From c88444fb09fc3429017c02b0e23bbc8af18d8235 Mon Sep 17 00:00:00 2001 From: wangjiawei04 Date: Tue, 24 Mar 2020 11:17:00 +0000 Subject: [PATCH] support cube gpu ci --- .../criteo_ctr_with_cube/test_server_gpu.py | 37 +++++++++++++++++++ python/paddle_serving_server_gpu/__init__.py | 7 ++++ tools/serving_build.sh | 25 +++++++++++++ 3 files changed, 69 insertions(+) create mode 100755 python/examples/criteo_ctr_with_cube/test_server_gpu.py diff --git a/python/examples/criteo_ctr_with_cube/test_server_gpu.py b/python/examples/criteo_ctr_with_cube/test_server_gpu.py new file mode 100755 index 00000000..382be99b --- /dev/null +++ b/python/examples/criteo_ctr_with_cube/test_server_gpu.py @@ -0,0 +1,37 @@ +# Copyright (c) 2020 PaddlePaddle Authors. All Rights Reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# pylint: disable=doc-string-missing + +import os +import sys +from paddle_serving_server_gpu import OpMaker +from paddle_serving_server_gpu import OpSeqMaker +from paddle_serving_server_gpu import Server + +op_maker = OpMaker() +read_op = op_maker.create('general_reader') +general_dist_kv_infer_op = op_maker.create('general_dist_kv_infer') +response_op = op_maker.create('general_response') + +op_seq_maker = OpSeqMaker() +op_seq_maker.add_op(read_op) +op_seq_maker.add_op(general_dist_kv_infer_op) +op_seq_maker.add_op(response_op) + +server = Server() +server.set_op_sequence(op_seq_maker.get_op_sequence()) +server.set_num_threads(4) +server.load_model_config(sys.argv[1]) +server.prepare_server(workdir="work_dir1", port=9292, device="cpu") +server.run_server() diff --git a/python/paddle_serving_server_gpu/__init__.py b/python/paddle_serving_server_gpu/__init__.py index 02b55801..2fd35c6d 100644 --- a/python/paddle_serving_server_gpu/__init__.py +++ b/python/paddle_serving_server_gpu/__init__.py @@ -55,6 +55,7 @@ class OpMaker(object): "general_text_reader": "GeneralTextReaderOp", "general_text_response": "GeneralTextResponseOp", "general_single_kv": "GeneralSingleKVOp", + "general_dist_kv_infer": "GeneralDistKVInferOp", "general_dist_kv": "GeneralDistKVOp" } @@ -104,6 +105,7 @@ class Server(object): self.infer_service_fn = "infer_service.prototxt" self.model_toolkit_fn = "model_toolkit.prototxt" self.general_model_config_fn = "general_model.prototxt" + self.cube_config_fn = "cube.conf" self.workdir = "" self.max_concurrency = 0 self.num_threads = 4 @@ -184,6 +186,11 @@ class Server(object): "w") as fout: fout.write(str(self.model_conf)) self.resource_conf = server_sdk.ResourceConf() + for workflow in self.workflow_conf.workflows: + for node in workflow.nodes: + if "dist_kv" in node.name: + self.resource_conf.cube_config_path = workdir + self.resource_conf.cube_config_file = self.cube_config_fn self.resource_conf.model_toolkit_path = workdir self.resource_conf.model_toolkit_file = self.model_toolkit_fn self.resource_conf.general_model_path = workdir diff --git a/tools/serving_build.sh b/tools/serving_build.sh index 93c11012..bd144646 100644 --- a/tools/serving_build.sh +++ b/tools/serving_build.sh @@ -193,6 +193,7 @@ function python_run_criteo_ctr_with_cube() { check_cmd "mkdir work_dir1 && cp cube/conf/cube.conf ./work_dir1/" python test_server.py ctr_serving_model_kv & check_cmd "python test_client.py ctr_client_conf/serving_client_conf.prototxt ./ut_data >score" + tail -n 2 score | awk 'NR==1' AUC=$(tail -n 2 score | awk 'NR==1') VAR2="0.70" RES=$( echo "$AUC>$VAR2" | bc ) @@ -205,6 +206,30 @@ function python_run_criteo_ctr_with_cube() { ps -ef | grep "cube" | grep -v grep | awk '{print $2}' | xargs kill ;; GPU) + check_cmd "wget https://paddle-serving.bj.bcebos.com/unittest/ctr_cube_unittest.tar.gz" + check_cmd "tar xf ctr_cube_unittest.tar.gz" + check_cmd "mv models/ctr_client_conf ./" + check_cmd "mv models/ctr_serving_model_kv ./" + check_cmd "mv models/data ./cube/" + check_cmd "mv models/ut_data ./" + cp ../../../build-server-$TYPE/output/bin/cube* ./cube/ + mkdir -p $PYTHONROOT/lib/python2.7/site-packages/paddle_serving_server/serving-cpu-avx-openblas-0.1.3/ + yes | cp ../../../build-server-$TYPE/output/demo/serving/bin/serving $PYTHONROOT/lib/python2.7/site-packages/paddle_serving_server_gpu/serving-gpu-0.1.3/ + sh cube_prepare.sh & + check_cmd "mkdir work_dir1 && cp cube/conf/cube.conf ./work_dir1/" + python test_server_gpu.py ctr_serving_model_kv & + check_cmd "python test_client.py ctr_client_conf/serving_client_conf.prototxt ./ut_data >score" + tail -n 2 score | awk 'NR==1' + AUC=$(tail -n 2 score | awk 'NR==1') + VAR2="0.70" + RES=$( echo "$AUC>$VAR2" | bc ) + if [[ $RES -eq 0 ]]; then + echo "error with criteo_ctr_with_cube inference auc test, auc should > 0.70" + exit 1 + fi + echo "criteo_ctr_with_cube inference auc test success" + ps -ef | grep "paddle_serving_server" | grep -v grep | awk '{print $2}' | xargs kill + ps -ef | grep "cube" | grep -v grep | awk '{print $2}' | xargs kill ;; *) echo "error type" -- GitLab