本文整理匯總了Python中bert_serving.client.BertClient方法的典型用法代碼示例。如果您正苦於以下問題:Python client.BertClient方法的具體用法?Python client.BertClient怎麽用?Python client.BertClient使用的例子?那麽, 這裏精選的方法代碼示例或許可以為您提供幫助。您也可以進一步了解該方法所在類bert_serving.client
的用法示例。
在下文中一共展示了client.BertClient方法的11個代碼示例,這些例子默認根據受歡迎程度排序。您可以為喜歡或者感覺有用的代碼點讚,您的評價將有助於係統推薦出更棒的Python代碼示例。
示例1: getVectors
# 需要導入模塊: from bert_serving import client [as 別名]
# 或者: from bert_serving.client import BertClient [as 別名]
def getVectors(passages, filename):
i = 1
bc = BertClient()
passage = 'In June 1942, the United States Army Corps of Engineersbegan the Manhattan Project- The secret name for the 2 atomic bombs.'
print("Testing bc\nTesting passages:{}\nVector:{}".format(passage, bc.encode([passage])[0]))
passagePack = []
passageIDs = []
packSize = 100
with open(filename,'w') as w:
for passageID in passages:
if i % 100 == 0:
print('{} vectors retrieved'.format((i-1)*packSize))
if len(passagePack) == packSize:
response = bc.encode(passagePack)
w.write(process(passageIDs, response))
passageIDs = []
passagePack = []
i += 1
passageIDs.append(passageID)
passagePack.append(passages[passageID])
示例2: __init__
# 需要導入模塊: from bert_serving import client [as 別名]
# 或者: from bert_serving.client import BertClient [as 別名]
def __init__(self, architecture_path=None, weights_path=None):
self.bc = None
try:
self.bc = BertClient()
except:
raise Exception("PunchlineExtractor: Cannot instantiate BertClient. Is it running???")
# check if we're loading in a pre-trained model
if architecture_path is not None:
assert(weights_path is not None)
with open(architecture_path) as model_arch:
model_arch_str = model_arch.read()
self.model = model_from_json(model_arch_str)
self.model.load_weights(weights_path)
else:
self.build_model()
示例3: __init__
# 需要導入模塊: from bert_serving import client [as 別名]
# 或者: from bert_serving.client import BertClient [as 別名]
def __init__(self, component_config=None):
super(BertVectorsFeaturizer, self).__init__(component_config)
ip = self.component_config['ip']
port = self.component_config['port']
port_out = self.component_config['port_out']
show_server_config = self.component_config['show_server_config']
output_fmt = self.component_config['output_fmt']
check_version = self.component_config['check_version']
timeout = self.component_config['timeout']
identity = self.component_config['identity']
self.bc = BertClient(
ip=ip,
port=int(port),
port_out=int(port_out),
show_server_config=show_server_config,
output_fmt=output_fmt,
check_version=check_version,
timeout=int(timeout),
identity=identity
)
示例4: __init__
# 需要導入模塊: from bert_serving import client [as 別名]
# 或者: from bert_serving.client import BertClient [as 別名]
def __init__(self):
super(ActionGetFAQAnswer, self).__init__()
self.bc = BertClient()
self.faq_data = json.load(open("./data/nlu/faq.json", "rt", encoding="utf-8"))
self.standard_questions_encoder = np.load("./data/standard_questions.npy")
self.standard_questions_encoder_len = np.load("./data/standard_questions_len.npy")
print(self.standard_questions_encoder.shape)
示例5: encode_standard_question
# 需要導入模塊: from bert_serving import client [as 別名]
# 或者: from bert_serving.client import BertClient [as 別名]
def encode_standard_question():
bc = BertClient()
data = json.load(open("./data/nlu/faq.json", "rt", encoding="utf-8"))
standard_questions = [each['q'] for each in data]
print("Standard question size", len(standard_questions))
print("Start to calculate encoder....")
standard_questions_encoder = bc.encode(standard_questions)
np.save("./data/standard_questions", standard_questions_encoder)
standard_questions_encoder_len = np.sqrt(np.sum(standard_questions_encoder * standard_questions_encoder, axis=1))
np.save("./data/standard_questions_len", standard_questions_encoder_len)
示例6: getVectors
# 需要導入模塊: from bert_serving import client [as 別名]
# 或者: from bert_serving.client import BertClient [as 別名]
def getVectors(queries, filename):
i = 1
bc = BertClient()
query = 'who founded microsoft'
print("Testing bc\nTesting Query:{}\nVector:{}".format(query, bc.encode([query])[0]))
with open(filename,'w') as w:
for j in range(0,len(queries), 100):
if i % 100 == 0:
print('{} vectors retrieved'.format(i*100))
queryPack = queries[j:j+100]
response = bc.encode(queryPack)
w.write(process(queryPack, response))
i += 1
示例7: client_clone
# 需要導入模塊: from bert_serving import client [as 別名]
# 或者: from bert_serving.client import BertClient [as 別名]
def client_clone(id, idx):
bc = BertClient(port=int(sys.argv[1]), port_out=int(sys.argv[2]), identity=id)
for j in bc.fetch():
print('clone-client-%d: received %d x %d' % (idx, j.shape[0], j.shape[1]))
示例8: run
# 需要導入模塊: from bert_serving import client [as 別名]
# 或者: from bert_serving.client import BertClient [as 別名]
def run(self):
try:
from bert_serving.client import BertClient
except ImportError:
raise ImportError('BertClient module is not available, it is required for benchmarking.'
'Please use "pip install -U bert-serving-client" to install it.')
with BertClient(port=self.port, port_out=self.port_out,
show_server_config=True, check_version=False, check_length=False) as bc:
time_all = []
for _ in range(self.num_repeat):
start_t = time.perf_counter()
bc.encode(self.batch)
time_all.append(time.perf_counter() - start_t)
self.avg_time = mean(time_all[2:]) # first one is often slow due to cold-start/warm-up effect
示例9: __init__
# 需要導入模塊: from bert_serving import client [as 別名]
# 或者: from bert_serving.client import BertClient [as 別名]
def __init__(self):
"""
This bot tags sequences of words from abstracts as describing
P,I, or O elements.
"""
logging.debug("Loading PICO LSTM-CRF")
config = Config()
# build model
self.model = NERModel(config)
self.model.build()
self.model.restore_session(os.path.join(robotreviewer.DATA_ROOT, "pico_spans/model.weights/"))
logging.debug("PICO classifiers loaded")
self.bert = BertClient()
示例10: chatbot_sentence_vec_by_bert_bertasserver
# 需要導入模塊: from bert_serving import client [as 別名]
# 或者: from bert_serving.client import BertClient [as 別名]
def chatbot_sentence_vec_by_bert_bertasserver():
"""bert encode is used bert as server"""
from conf.path_config import chicken_and_gossip_path
from bert_serving.client import BertClient
from utils.text_tools import txtRead
import numpy as np
topk = 5
matrix_ques_save_path = "doc_vecs_chicken_and_gossip"
questions = txtRead(chicken_and_gossip_path, encodeType='utf-8')
ques = [ques.split('\t')[0] for ques in questions][0:100]
bc = BertClient(ip = 'localhost')
doc_vecs = bc.encode(ques)
np.savetxt(matrix_ques_save_path, doc_vecs)
# matrix_ques = np.loadtxt(matrix_ques_save_path)
while True:
query = input('你問: ')
query_vec = bc.encode([query])[0]
query_bert_vec = np.array(query_bert_vec)
# compute normalized dot product as score
score = np.sum(query_vec * doc_vecs, axis=1) / np.linalg.norm(doc_vecs, axis=1)
topk_idx = np.argsort(score)[::-1][:topk]
for idx in topk_idx:
print('小薑機器人回答: %s\t%s' % (score[idx], questions[idx]))
示例11: test
# 需要導入模塊: from bert_serving import client [as 別名]
# 或者: from bert_serving.client import BertClient [as 別名]
def test():
print("Loading test data...")
start_time = time.time()
x_test = process_file_nolabel(test_dir, word_to_id, config.seq_length)
x_test = list(x_test)
bc = BertClient()
x_test = bc.encode(x_test)
# (test 2364, 80)
session = tf.Session()
session.run(tf.global_variables_initializer())
saver = tf.train.Saver()
saver.restore(sess=session, save_path=save_path) # 讀取保存的模型
# print('Testing...')
# loss_test, acc_test = evaluate(session, x_test, y_test)
# msg = 'Test Loss: {0:>6.2}, Test Acc: {1:>7.2%}'
# print(msg.format(loss_test, acc_test))
batch_size = 32
data_len = len(x_test)
num_batch = int((data_len - 1) / batch_size) + 1
# y_test_cls = np.argmax(y_test, 1)
y_pred_cls = np.zeros(shape=len(x_test), dtype=np.int32) # 保存預測結果
for i in range(num_batch): # 逐批次處理
start_id = i * batch_size
end_id = min((i + 1) * batch_size, data_len)
feed_dict = {
model.input_x: x_test[start_id:end_id],
model.keep_prob: 1.0
}
y_pred_cls[start_id:end_id] = session.run(model.y_pred_cls, feed_dict=feed_dict)
# 評估
print("Precision, Recall and F1-Score...")
# print(metrics.classification_report(y_test_cls, y_pred_cls, target_names=categories))
# 混淆矩陣
print("Confusion Matrix...")
# cm = metrics.confusion_matrix(y_test_cls, y_pred_cls)
# print(cm)
time_dif = get_time_dif(start_time)
print("Time usage:", time_dif)
return y_pred_cls