提交 97f998db 编写于 作者: X xulongteng

Merge remote-tracking branch 'refs/remotes/origin/bert' into bert

use infer lib
......@@ -14,6 +14,7 @@
#ifndef SERVING_DEMO_CLIENT_SRC_DATA_PRE_H_
#define SERVING_DEMO_CLIENT_SRC_DATA_PRE_H_
#include <sys/stat.h>
#include <iostream>
#include <map>
#include <string>
#include <vector>
......
engines {
name: "bert"
type: "FLUID_GPU_ANALYSIS_DIR"
reloadable_meta: "./data/model/paddle/fluid_time_file"
reloadable_type: "timestamp_ne"
model_data_path: "./data/model/paddle/fluid/bert"
runtime_thread_num: 0
batch_infer_size: 0
enable_batch_align: 0
}
services {
name: "BertService"
workflows: "workflow9"
}
......@@ -137,7 +137,7 @@ int BertServiceOp::inference() {
<< " seq_len : " << out->at(0).shape[1]
<< " emb_size : " << out->at(0).shape[2];
float *out_data = reinterpret_cast<float *>out->at(0).data.data();
float *out_data = reinterpret_cast<float *>(out->at(0).data.data());
for (uint32_t bi = 0; bi < batch_size; bi++) {
BertResInstance *res_instance = res->add_instances();
for (uint32_t si = 0; si < MAX_SEQ_LEN; si++) {
......@@ -151,7 +151,7 @@ int BertServiceOp::inference() {
#else
LOG(INFO) << "batch_size : " << out->at(0).shape[0]
<< " emb_size : " << out->at(0).shape[1];
float *out_data = reinterpret_cast<float *> out->at(0).data.data();
float *out_data = reinterpret_cast<float *>(out->at(0).data.data());
for (uint32_t bi = 0; bi < batch_size; bi++) {
BertResInstance *res_instance = res->add_instances();
for (uint32_t si = 0; si < 1; si++) {
......
Markdown is supported
0% .
You are about to add 0 people to the discussion. Proceed with caution.
先完成此消息的编辑!
想要评论请 注册