代码拉取完成,页面将自动刷新
同步操作将从 7分钟/Bert_BaseLine 强制同步,此操作会覆盖自 Fork 仓库以来所做的任何修改,且无法恢复!!!
确定后同步将在后台操作,完成时将刷新页面,请耐心等待。
import codecs
import json
from keras.preprocessing import sequence
from keras_bert import Tokenizer, load_trained_model_from_checkpoint
from keras.models import load_model
from flask import request, Flask, jsonify
import tensorflow as tf
app = Flask(__name__)
def global_():
# 全局定义和全局加载模型,提升inference速度
global model, graph, bert_model, maxlen, dict_path, config_path, checkpoint_path, token_dict
base_path = 'D:/bert_textcls/chinese_L-12_H-768_A-12/chinese_L-12_H-768_A-12'
config_path = '{}/bert_config.json'.format(base_path)
checkpoint_path = '{}/bert_model.ckpt'.format(base_path)
dict_path = '{}/vocab.txt'.format(base_path)
maxlen = 100
bert_model = load_trained_model_from_checkpoint(config_path, checkpoint_path, seq_len=maxlen)
model = load_model('model/keras_bert.h5')
token_dict = {}
with codecs.open(dict_path, 'r', 'utf-8') as reader:
for line in reader:
token = line.strip()
token_dict[token] = len(token_dict)
graph = tf.get_default_graph()
class CTokenizer(Tokenizer):
def _tokenize(self, text):
tokenize_dic = []
for character in text:
if character in self._token_dict:
tokenize_dic.append(character)
elif self._is_space(character):
tokenize_dic.append('[unused1]')
else:
tokenize_dic.append('[UNK]')
return tokenize_dic
def get_encode(content, token_dict):
tokenizer = CTokenizer(token_dict)
onehot_encoding = []
postion_encoding = []
onehot, postion = tokenizer.encode(first=content)
onehot_encoding.append(onehot)
postion_encoding.append(postion)
onehot_encoding = sequence.pad_sequences(onehot_encoding, maxlen=maxlen, padding='post', truncating='post')
postion_encoding = sequence.pad_sequences(postion_encoding, maxlen=maxlen, padding='post', truncating='post')
return [onehot_encoding, postion_encoding]
global_()
@app.route("/sentiment_analysis_api", methods=['POST'])
def predict():
data = json.loads(request.get_data().decode('utf-8'))
content = data['content']
encoder = get_encode(content, token_dict)
# set default
with graph.as_default():
result = {}
bert_vec = bert_model.predict(encoder)
result["content"] = content
result["sa"] = '%.4f' % model.predict(bert_vec)[0][0]
return jsonify(result)
if __name__ == "__main__":
# add hot fresh
app.run()
此处可能存在不合适展示的内容,页面不予展示。您可通过相关编辑功能自查并修改。
如您确认内容无涉及 不当用语 / 纯广告导流 / 暴力 / 低俗色情 / 侵权 / 盗版 / 虚假 / 无价值内容或违法国家有关法律法规的内容,可点击提交进行申诉,我们将尽快为您处理。