提交 440163f8 编写于 作者: B barrierye

add ci part

上级 898daba1
...@@ -12,7 +12,7 @@ gRPC 接口实现形式类似 Web Service: ...@@ -12,7 +12,7 @@ gRPC 接口实现形式类似 Web Service:
def load_model_config(self, server_config_paths, client_config_path=None) def load_model_config(self, server_config_paths, client_config_path=None)
``` ```
在一些例子中 bRPC Server 端与 bRPC Client 端的配置文件可能是不同的(如 cube local 例子中,Client 端的数据先交给 cube,经过 cube 处理后再交给预测库),所以 gRPC Server 端需要获取 gRPC Client 端的配置;同时为了取消 gRPC Client 端手动加载配置文件的过程,所以设计 gRPC Server 端同时加载两个配置文件。`client_config_path` 默认为 `server_config_path/serving_server_conf.prototxt` 在一些例子中 bRPC Server 端与 bRPC Client 端的配置文件可能是不同的(如 cube local 例子中,Client 端的数据先交给 cube,经过 cube 处理后再交给预测库),所以 gRPC Server 端需要获取 gRPC Client 端的配置;同时为了取消 gRPC Client 端手动加载配置文件的过程,所以设计 gRPC Server 端同时加载两个配置文件。`client_config_path` 默认为 `<server_config_path>/serving_server_conf.prototxt`
2. gRPC Client 端取消 `load_client_config` 步骤: 2. gRPC Client 端取消 `load_client_config` 步骤:
......
wget --no-check-certificate https://paddle-serving.bj.bcebos.com/uci_housing.tar.gz
tar -xzf uci_housing.tar.gz
...@@ -13,16 +13,15 @@ ...@@ -13,16 +13,15 @@
# limitations under the License. # limitations under the License.
# pylint: disable=doc-string-missing # pylint: disable=doc-string-missing
from paddle_serving_client import MultiLangClient from paddle_serving_client import MultiLangClient as Client
import paddle
import functools import functools
import sys
import time import time
import threading import threading
client = MultiLangClient() client = Client()
client.connect(["127.0.0.1:9393"]) client.connect(["127.0.0.1:9393"])
import paddle
test_reader = paddle.batch( test_reader = paddle.batch(
paddle.reader.shuffle( paddle.reader.shuffle(
paddle.dataset.uci_housing.test(), buf_size=500), paddle.dataset.uci_housing.test(), buf_size=500),
......
# Copyright (c) 2020 PaddlePaddle Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# pylint: disable=doc-string-missing
from paddle_serving_client import MultiLangClient as Client
import paddle
client = Client()
client.connect(["127.0.0.1:9393"])
batch_size = 2
test_reader = paddle.batch(
paddle.reader.shuffle(
paddle.dataset.uci_housing.test(), buf_size=500),
batch_size=batch_size)
for data in test_reader():
batch_feed = [{"x": x[0]} for x in data]
fetch_map = client.predict(feed=batch_feed, fetch=["price"])
print(fetch_map)
# Copyright (c) 2020 PaddlePaddle Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# pylint: disable=doc-string-missing
from paddle_serving_client import MultiLangClient as Client
import paddle
client = Client()
client.connect(["127.0.0.1:9393"])
test_reader = paddle.batch(
paddle.reader.shuffle(
paddle.dataset.uci_housing.test(), buf_size=500),
batch_size=1)
for data in test_reader():
fetch_map = client.predict(
feed={"x": data[0][0]}, fetch=["price"], is_python=False)
print("{} {}".format(fetch_map["price"][0], data[0][1][0]))
# Copyright (c) 2020 PaddlePaddle Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# pylint: disable=doc-string-missing
from paddle_serving_client import MultiLangClient as Client
import paddle
client = Client()
client.connect(["127.0.0.1:9393"])
test_reader = paddle.batch(
paddle.reader.shuffle(
paddle.dataset.uci_housing.test(), buf_size=500),
batch_size=1)
for data in test_reader():
fetch_map = client.predict(feed={"x": data[0][0].tolist()}, fetch=["price"])
print("{} {}".format(fetch_map["price"][0], data[0][1][0]))
...@@ -17,7 +17,7 @@ import os ...@@ -17,7 +17,7 @@ import os
import sys import sys
from paddle_serving_server import OpMaker from paddle_serving_server import OpMaker
from paddle_serving_server import OpSeqMaker from paddle_serving_server import OpSeqMaker
from paddle_serving_server import MultiLangServer from paddle_serving_server import MultiLangServer as Server
op_maker = OpMaker() op_maker = OpMaker()
read_op = op_maker.create('general_reader') read_op = op_maker.create('general_reader')
...@@ -29,7 +29,7 @@ op_seq_maker.add_op(read_op) ...@@ -29,7 +29,7 @@ op_seq_maker.add_op(read_op)
op_seq_maker.add_op(general_infer_op) op_seq_maker.add_op(general_infer_op)
op_seq_maker.add_op(response_op) op_seq_maker.add_op(response_op)
server = MultiLangServer() server = Server()
server.set_op_sequence(op_seq_maker.get_op_sequence()) server.set_op_sequence(op_seq_maker.get_op_sequence())
server.load_model_config(sys.argv[1]) server.load_model_config(sys.argv[1])
server.prepare_server(workdir="work_dir1", port=9393, device="cpu") server.prepare_server(workdir="work_dir1", port=9393, device="cpu")
......
# Copyright (c) 2020 PaddlePaddle Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# pylint: disable=doc-string-missing
import os
import sys
from paddle_serving_server_gpu import OpMaker
from paddle_serving_server_gpu import OpSeqMaker
from paddle_serving_server_gpu import MultiLangServer as Server
op_maker = OpMaker()
read_op = op_maker.create('general_reader')
general_infer_op = op_maker.create('general_infer')
response_op = op_maker.create('general_response')
op_seq_maker = OpSeqMaker()
op_seq_maker.add_op(read_op)
op_seq_maker.add_op(general_infer_op)
op_seq_maker.add_op(response_op)
server = Server()
server.set_op_sequence(op_seq_maker.get_op_sequence())
server.load_model_config(sys.argv[1])
server.set_gpuid(0)
server.prepare_server(workdir="work_dir1", port=9393, device="gpu")
server.run_server()
# Copyright (c) 2020 PaddlePaddle Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# pylint: disable=doc-string-missing
from paddle_serving_client import MultiLangClient as Client
import paddle
client = Client()
client.connect(["127.0.0.1:9393"])
test_reader = paddle.batch(
paddle.reader.shuffle(
paddle.dataset.uci_housing.test(), buf_size=500),
batch_size=1)
for data in test_reader():
fetch_map = client.predict(feed={"x": data[0][0]}, fetch=["price"])
print("{} {}".format(fetch_map["price"][0], data[0][1][0]))
# Copyright (c) 2020 PaddlePaddle Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# pylint: disable=doc-string-missing
from paddle_serving_client import MultiLangClient as Client
import paddle
import grpc
client = Client()
client.connect(["127.0.0.1:9393"])
client.set_rpc_timeout_ms(1)
test_reader = paddle.batch(
paddle.reader.shuffle(
paddle.dataset.uci_housing.test(), buf_size=500),
batch_size=1)
for data in test_reader():
try:
fetch_map = client.predict(feed={"x": data[0][0]}, fetch=["price"])
except grpc.RpcError as e:
status_code = e.code()
if grpc.StatusCode.DEADLINE_EXCEEDED == status_code:
print('timeout')
else:
print("{} {}".format(fetch_map["price"][0], data[0][1][0]))
...@@ -171,12 +171,6 @@ function python_test_fit_a_line() { ...@@ -171,12 +171,6 @@ function python_test_fit_a_line() {
fi fi
setproxy # recover proxy state setproxy # recover proxy state
kill_server_process kill_server_process
# test grpc impl
check_cmd "python -m paddle_serving_server.serve --model uci_housing_model --port 9393 --thread 4 --use_multilang> /dev/null &"
sleep 5 # wait for the server to start
check_cmd "python test_multilang_client.py uci_housing_client/serving_client_conf.prototxt > /dev/null"
kill_server_process
;; ;;
GPU) GPU)
export CUDA_VISIBLE_DEVICES=0 export CUDA_VISIBLE_DEVICES=0
...@@ -207,12 +201,6 @@ function python_test_fit_a_line() { ...@@ -207,12 +201,6 @@ function python_test_fit_a_line() {
fi fi
setproxy # recover proxy state setproxy # recover proxy state
kill_server_process kill_server_process
# test grpc impl
check_cmd "python -m paddle_serving_server_gpu.serve --model uci_housing_model --port 9393 --thread 4 --gpu_ids 0 --use_multilang> /dev/null &"
sleep 5 # wait for the server to start
check_cmd "python test_multilang_client.py uci_housing_client/serving_client_conf.prototxt > /dev/null"
kill_server_process
;; ;;
*) *)
echo "error type" echo "error type"
...@@ -511,6 +499,80 @@ function python_test_lac() { ...@@ -511,6 +499,80 @@ function python_test_lac() {
cd .. cd ..
} }
function python_test_grpc_impl() {
# pwd: /Serving/python/examples
cd grpc_impl_example # pwd: /Serving/python/examples/grpc_impl_example
local TYPE=$1
export SERVING_BIN=${SERVING_WORKDIR}/build-server-${TYPE}/core/general-server/serving
case $TYPE in
CPU)
# test general case
cd fit_a_line # pwd: /Serving/python/examples/grpc_impl_example/fit_a_line
sh get_data.sh
# one line command start
check_cmd "python -m paddle_serving_server.serve --model uci_housing_model --port 9393 --thread 4 --use_multilang > /dev/null &"
sleep 5 # wait for the server to start
check_cmd "python test_sync_client.py > /dev/null"
check_cmd "python test_asyn_client.py > /dev/null"
check_cmd "python test_general_pb_client.py > /dev/null"
check_cmd "python test_list_input_client.py > /dev/null"
check_cmd "python test_timeout_client.py > /dev/null"
check_cmd "python test_batch_client.py > /dev/null"
kill_server_process
check_cmd "python test_server.py > /dev/null &"
sleep 5 # wait for the server to start
check_cmd "python test_sync_client.py > /dev/null"
check_cmd "python test_asyn_client.py > /dev/null"
check_cmd "python test_general_pb_client.py > /dev/null"
check_cmd "python test_list_input_client.py > /dev/null"
check_cmd "python test_timeout_client.py > /dev/null"
check_cmd "python test_batch_client.py > /dev/null"
kill_server_process
cd .. # pwd: /Serving/python/examples/grpc_impl_example/fit_a_line
;;
GPU)
export CUDA_VISIBLE_DEVICES=0
# test general case
cd fit_a_line # pwd: /Serving/python/examples/grpc_impl_example/fit_a_line
sh get_data.sh
# one line command start
check_cmd "python -m paddle_serving_server_gpu.serve --model uci_housing_model --port 9393 --thread 4 --gpu_ids 0 --use_multilang > /dev/null &"
sleep 5 # wait for the server to start
check_cmd "python test_sync_client.py > /dev/null"
check_cmd "python test_asyn_client.py > /dev/null"
check_cmd "python test_general_pb_client.py > /dev/null"
check_cmd "python test_list_input_client.py > /dev/null"
check_cmd "python test_timeout_client.py > /dev/null"
check_cmd "python test_batch_client.py > /dev/null"
kill_server_process
check_cmd "python test_server_gpu.py > /dev/null &"
sleep 5 # wait for the server to start
check_cmd "python test_sync_client.py > /dev/null"
check_cmd "python test_asyn_client.py > /dev/null"
check_cmd "python test_general_pb_client.py > /dev/null"
check_cmd "python test_list_input_client.py > /dev/null"
check_cmd "python test_timeout_client.py > /dev/null"
check_cmd "python test_batch_client.py > /dev/null"
kill_server_process
cd .. # pwd: /Serving/python/examples/grpc_impl_example/fit_a_line
;;
*)
echo "error type"
exit 1
;;
esac
echo "test fit_a_line $TYPE part finished as expected."
rm -rf image kvdb log uci_housing* work*
unset SERVING_BIN
cd .. # pwd: /Serving/python/examples
}
function python_run_test() { function python_run_test() {
# Using the compiled binary # Using the compiled binary
local TYPE=$1 # pwd: /Serving local TYPE=$1 # pwd: /Serving
...@@ -522,6 +584,7 @@ function python_run_test() { ...@@ -522,6 +584,7 @@ function python_run_test() {
python_test_lac $TYPE # pwd: /Serving/python/examples python_test_lac $TYPE # pwd: /Serving/python/examples
python_test_multi_process $TYPE # pwd: /Serving/python/examples python_test_multi_process $TYPE # pwd: /Serving/python/examples
python_test_multi_fetch $TYPE # pwd: /Serving/python/examples python_test_multi_fetch $TYPE # pwd: /Serving/python/examples
python_test_grpc_impl $TYPE # pwd: /Serving/python/examples
echo "test python $TYPE part finished as expected." echo "test python $TYPE part finished as expected."
cd ../.. # pwd: /Serving cd ../.. # pwd: /Serving
} }
......
Markdown is supported
0% .
You are about to add 0 people to the discussion. Proceed with caution.
先完成此消息的编辑!
想要评论请 注册