未验证 提交 209aef88 编写于 作者: T TeslaZhao 提交者: GitHub

Merge pull request #1192 from zhangjun/cherrypick-0.6.0

Cherrypick 1179 1182 to  0.6.0
......@@ -136,8 +136,10 @@ LINK_DIRECTORIES(${PADDLE_INSTALL_DIR}/third_party/install/mklml/lib)
SET(CMAKE_INSTALL_RPATH "${CMAKE_INSTALL_RPATH}" "${PADDLE_INSTALL_DIR}/third_party/install/mkldnn/lib")
LINK_DIRECTORIES(${PADDLE_INSTALL_DIR}/third_party/install/mkldnn/lib)
ADD_LIBRARY(openblas STATIC IMPORTED GLOBAL)
SET_PROPERTY(TARGET openblas PROPERTY IMPORTED_LOCATION ${PADDLE_INSTALL_DIR}/third_party/install/openblas/lib/libopenblas.a)
if (NOT WITH_MKLML)
ADD_LIBRARY(openblas STATIC IMPORTED GLOBAL)
SET_PROPERTY(TARGET openblas PROPERTY IMPORTED_LOCATION ${PADDLE_INSTALL_DIR}/third_party/install/openblas/lib/libopenblas.a)
endif()
ADD_LIBRARY(paddle_inference STATIC IMPORTED GLOBAL)
SET_PROPERTY(TARGET paddle_inference PROPERTY IMPORTED_LOCATION ${PADDLE_INSTALL_DIR}/lib/libpaddle_inference.a)
......
......@@ -22,7 +22,7 @@ cmake -DPYTHON_INCLUDE_DIR=/usr/include/python3.7m/ \
-DSERVER=ON ..
make -j10
```
You can run `make install` to produce the target in `./output` directory. Add `-DCMAKE_INSTALL_PREFIX=./output` to specify the output path to CMake command shown above
You can run `make install` to produce the target in `./output` directory. Add `-DCMAKE_INSTALL_PREFIX=./output` to specify the output path to CMake command shown above. Please specify `-DWITH_MKL=ON` on Intel CPU platform with AVX2 support.
* Compile the Serving Client
```
mkdir -p client-build-arm && cd client-build-arm
......
......@@ -26,7 +26,7 @@ this script will download Chinese Dictionary File vocab.txt and Chinese Sample D
### Start Service
```
python3 bert_web_service.py serving_server 7703
python3 -m paddle_serving_server.serve --model serving_server --port 7703 --use_lite --use_xpu --ir_optim
```
### Client Prediction
......
......@@ -31,7 +31,7 @@ client.connect(endpoint_list)
for line in sys.stdin:
feed_dict = reader.process(line)
for key in feed_dict.keys():
feed_dict[key] = np.array(feed_dict[key]).reshape((128, 1))
feed_dict[key] = np.array(feed_dict[key]).reshape((1, 128))
#print(feed_dict)
result = client.predict(feed=feed_dict, fetch=fetch, batch=False)
result = client.predict(feed=feed_dict, fetch=fetch, batch=True)
print(result)
......@@ -23,7 +23,7 @@ args = benchmark_args()
reader = ChineseErnieReader({"max_seq_len": 128})
fetch = ["save_infer_model/scale_0"]
endpoint_list = ['127.0.0.1:12000']
endpoint_list = ['127.0.0.1:7704']
client = Client()
client.load_client_config(args.model)
client.connect(endpoint_list)
......
Markdown is supported
0% .
You are about to add 0 people to the discussion. Proceed with caution.
先完成此消息的编辑!
想要评论请 注册