diff --git a/doc/COMPILE.md b/doc/COMPILE.md index 411620af2ee10a769384c36cebc3aa3ecb93ea49..3649b0a9355e25dad2544b7898f639fc44b9a0b0 100644 --- a/doc/COMPILE.md +++ b/doc/COMPILE.md @@ -36,6 +36,8 @@ cd Serving && git submodule update --init --recursive export PYTHONROOT=/usr/ ``` +In the default centos7 image we provide, the Python path is `/usr/bin/python`. If you want to use our centos6 image, you need to set it to `export PYTHONROOT=/usr/local/python2.7/`. + ## Compile Server ### Integrated CPU version paddle inference library diff --git a/doc/COMPILE_CN.md b/doc/COMPILE_CN.md index 44802260719d37a3140ca15f6a2ccc15479e32d6..2b8a42d10368d924e04a67aa9720c14c3b4d95dd 100644 --- a/doc/COMPILE_CN.md +++ b/doc/COMPILE_CN.md @@ -36,6 +36,8 @@ cd Serving && git submodule update --init --recursive export PYTHONROOT=/usr/ ``` +我们提供默认Centos7的Python路径为`/usr/bin/python`,如果您要使用我们的Centos6镜像,需要将其设置为`export PYTHONROOT=/usr/local/python2.7/`。 + ## 编译Server部分 ### 集成CPU版本Paddle Inference Library diff --git a/python/examples/fit_a_line/test_multi_process_client.py b/python/examples/fit_a_line/test_multi_process_client.py index 46ba3b60b5ae09b568868531d32234ade50d8556..5272d095df5e74f25ce0e36ca22c8d6d1884f5f0 100644 --- a/python/examples/fit_a_line/test_multi_process_client.py +++ b/python/examples/fit_a_line/test_multi_process_client.py @@ -22,15 +22,19 @@ def single_func(idx, resource): client.load_client_config( "./uci_housing_client/serving_client_conf.prototxt") client.connect(["127.0.0.1:9293", "127.0.0.1:9292"]) - test_reader = paddle.batch( - paddle.reader.shuffle( - paddle.dataset.uci_housing.test(), buf_size=500), - batch_size=1) - for data in test_reader(): - fetch_map = client.predict(feed={"x": data[0][0]}, fetch=["price"]) + x = [ + 0.0137, -0.1136, 0.2553, -0.0692, 0.0582, -0.0727, -0.1583, -0.0584, + 0.6283, 0.4919, 0.1856, 0.0795, -0.0332 + ] + for i in range(1000): + fetch_map = client.predict(feed={"x": x}, fetch=["price"]) + if fetch_map is None: + return [[None]] return [[0]] multi_thread_runner = MultiThreadRunner() thread_num = 4 result = multi_thread_runner.run(single_func, thread_num, {}) +if None in result[0]: + exit(1) diff --git a/python/paddle_serving_server_gpu/__init__.py b/python/paddle_serving_server_gpu/__init__.py index d4631141f8173b4ae0cb41d42c615566ac81ae7e..e40c0fa48763eaa66373e9f2149552c4f8693eb7 100644 --- a/python/paddle_serving_server_gpu/__init__.py +++ b/python/paddle_serving_server_gpu/__init__.py @@ -384,7 +384,7 @@ class Server(object): finally: os.remove(tar_name) #release lock - version_file.cloes() + version_file.close() os.chdir(self.cur_path) self.bin_path = self.server_path + "/serving" diff --git a/tools/serving_build.sh b/tools/serving_build.sh index da662d322003fa4c1ec7f82379a474f89f1d27ea..989e48ead9864e717e573f7f0800a1afba2e934a 100644 --- a/tools/serving_build.sh +++ b/tools/serving_build.sh @@ -375,16 +375,17 @@ function python_test_multi_process(){ sh get_data.sh case $TYPE in CPU) - check_cmd "python -m paddle_serving_server.serve --model uci_housing_model --port 9292 &" - check_cmd "python -m paddle_serving_server.serve --model uci_housing_model --port 9293 &" + check_cmd "python -m paddle_serving_server.serve --model uci_housing_model --port 9292 --workdir test9292 &" + check_cmd "python -m paddle_serving_server.serve --model uci_housing_model --port 9293 --workdir test9293 &" sleep 5 check_cmd "python test_multi_process_client.py" kill_server_process echo "bert mutli rpc RPC inference pass" ;; GPU) - check_cmd "python -m paddle_serving_server_gpu.serve --model uci_housing_model --port 9292 --gpu_ids 0 &" - check_cmd "python -m paddle_serving_server_gpu.serve --model uci_housing_model --port 9293 --gpu_ids 0 &" + rm -rf ./image #TODO: The following code tried to create this folder, but no corresponding code was found + check_cmd "python -m paddle_serving_server_gpu.serve --model uci_housing_model --port 9292 --workdir test9292 --gpu_ids 0 &" + check_cmd "python -m paddle_serving_server_gpu.serve --model uci_housing_model --port 9293 --workdir test9293 --gpu_ids 0 &" sleep 5 check_cmd "python test_multi_process_client.py" kill_server_process