提交 c97d32a6 编写于 作者: D Dong Daxiang 提交者: GitHub

Merge pull request #306 from barrierye/gpu-devel-dockerfile

add ci test for gpu
...@@ -12,7 +12,7 @@ ...@@ -12,7 +12,7 @@
``` python ``` python
git clone https://github.com/PaddlePaddle/Serving git clone https://github.com/PaddlePaddle/Serving
git submodule update --init --recursive cd Serving && git submodule update --init --recursive
``` ```
### 编译Server部分 ### 编译Server部分
...@@ -44,3 +44,6 @@ make -j10 ...@@ -44,3 +44,6 @@ make -j10
### 安装wheel包 ### 安装wheel包
无论是client端还是server端,编译完成后,安装python/dist/下的whl包即可 无论是client端还是server端,编译完成后,安装python/dist/下的whl包即可
### 注意事项
运行python端server时,会检查`SERVING_BIN`环境变量,如果想使用自己编译的二进制文件,请将设置该环境变量为对应二进制文件的路径,通常是`export SERVING_BIN=${BUILD_PATH}/core/general-server/serving`
...@@ -50,7 +50,6 @@ serving_io.save_model("serving_model", "client_conf", ...@@ -50,7 +50,6 @@ serving_io.save_model("serving_model", "client_conf",
服务端的预测逻辑可以通过Paddle Serving Server端的API进行人工定义,一个例子: 服务端的预测逻辑可以通过Paddle Serving Server端的API进行人工定义,一个例子:
``` python ``` python
``` python
import paddle_serving_server as serving import paddle_serving_server as serving
op_maker = serving.OpMaker() op_maker = serving.OpMaker()
read_op = op_maker.create('general_reader') read_op = op_maker.create('general_reader')
...@@ -221,6 +220,3 @@ imdb_service.run_server() ...@@ -221,6 +220,3 @@ imdb_service.run_server()
### 5.3 向量检索、树结构检索 ### 5.3 向量检索、树结构检索
在推荐与广告场景的召回系统中,通常需要采用基于向量的快速检索或者基于树结构的快速检索,Paddle Serving会对这方面的检索引擎进行集成或扩展。 在推荐与广告场景的召回系统中,通常需要采用基于向量的快速检索或者基于树结构的快速检索,Paddle Serving会对这方面的检索引擎进行集成或扩展。
...@@ -91,15 +91,15 @@ if __name__ == "__main__": ...@@ -91,15 +91,15 @@ if __name__ == "__main__":
if args.name == "None": if args.name == "None":
start_multi_card(args) start_multi_card(args)
else: else:
from .web_service import WebService
web_service = WebService(name=args.name) web_service = WebService(name=args.name)
web_service.load_model_config(args.model) web_service.load_model_config(args.model)
gpu_ids = [] gpu_ids = args.gpu_ids
if args.gpu_ids == "": if gpu_ids == "":
if "CUDA_VISIBLE_DEVICES" in os.environ: if "CUDA_VISIBLE_DEVICES" in os.environ:
gpu_ids = os.environ["CUDA_VISIBLE_DEVICES"] gpu_ids = os.environ["CUDA_VISIBLE_DEVICES"]
if len(gpu_ids) > 0: if len(gpu_ids) > 0:
gpus = [int(x) for x in gpu_ids.split(",")] web_service.set_gpus(gpu_ids)
web_service.set_gpus(gpus)
web_service.prepare_server( web_service.prepare_server(
workdir=args.workdir, port=args.port, device=args.device) workdir=args.workdir, port=args.port, device=args.device)
web_service.run_server() web_service.run_server()
...@@ -10,6 +10,6 @@ RUN yum -y install wget && \ ...@@ -10,6 +10,6 @@ RUN yum -y install wget && \
curl https://bootstrap.pypa.io/get-pip.py -o get-pip.py && \ curl https://bootstrap.pypa.io/get-pip.py -o get-pip.py && \
python get-pip.py && rm get-pip.py && \ python get-pip.py && rm get-pip.py && \
ln -s /usr/local/cuda-9.0/lib64/libcublas.so.9.0 /usr/local/cuda-9.0/lib64/libcublas.so && \ ln -s /usr/local/cuda-9.0/lib64/libcublas.so.9.0 /usr/local/cuda-9.0/lib64/libcublas.so && \
echo 'export LD_LIBRARY_PATH=/usr/local/cuda/lib64':$LD_LIBRARY_PATH >> /root/.bashrc && \ echo 'export LD_LIBRARY_PATH=/usr/local/cuda/lib64:$LD_LIBRARY_PATH' >> /root/.bashrc && \
ln -s /usr/local/cuda-9.0/targets/x86_64-linux/lib/libcudnn.so.7 /usr/local/cuda-9.0/targets/x86_64-linux/lib/libcudnn.so && \ ln -s /usr/local/cuda-9.0/targets/x86_64-linux/lib/libcudnn.so.7 /usr/local/cuda-9.0/targets/x86_64-linux/lib/libcudnn.so && \
echo 'export LD_LIBRARY_PATH=/usr/local/cuda-9.0/targets/x86_64-linux/lib:$LD_LIBRARY_PATH' >> /root/.bashrc echo 'export LD_LIBRARY_PATH=/usr/local/cuda-9.0/targets/x86_64-linux/lib:$LD_LIBRARY_PATH' >> /root/.bashrc
FROM nvidia/cuda:9.0-cudnn7-devel-centos7
RUN yum -y install wget >/dev/null \
&& yum -y install gcc gcc-c++ make glibc-static which >/dev/null \
&& yum -y install git openssl-devel curl-devel bzip2-devel python-devel >/dev/null \
&& wget https://cmake.org/files/v3.2/cmake-3.2.0-Linux-x86_64.tar.gz >/dev/null \
&& tar xzf cmake-3.2.0-Linux-x86_64.tar.gz \
&& mv cmake-3.2.0-Linux-x86_64 /usr/local/cmake3.2.0 \
&& echo 'export PATH=/usr/local/cmake3.2.0/bin:$PATH' >> /root/.bashrc \
&& rm cmake-3.2.0-Linux-x86_64.tar.gz \
&& wget https://dl.google.com/go/go1.14.linux-amd64.tar.gz >/dev/null \
&& tar xzf go1.14.linux-amd64.tar.gz \
&& mv go /usr/local/go \
&& echo 'export GOROOT=/usr/local/go' >> /root/.bashrc \
&& echo 'export PATH=/usr/local/go/bin:$PATH' >> /root/.bashrc \
&& rm go1.14.linux-amd64.tar.gz \
&& yum -y install python-devel sqlite-devel >/dev/null \
&& curl https://bootstrap.pypa.io/get-pip.py -o get-pip.py >/dev/null \
&& python get-pip.py >/dev/null \
&& pip install google protobuf setuptools wheel flask >/dev/null \
&& rm get-pip.py \
&& yum -y install epel-release && yum -y install patchelf \
&& yum clean all
#!/usr/bin/env bash #!/usr/bin/env bash
function unsetproxy() {
HTTP_PROXY_TEMP=$http_proxy
HTTPS_PROXY_TEMP=$https_proxy
unset http_proxy
unset https_proxy
}
function setproxy() {
export http_proxy=$HTTP_PROXY_TEMP
export https_proxy=$HTTPS_PROXY_TEMP
}
function init() { function init() {
source /root/.bashrc source /root/.bashrc
set -v set -v
#export http_proxy=http://172.19.56.199:3128
#export https_proxy=http://172.19.56.199:3128
export PYTHONROOT=/usr export PYTHONROOT=/usr
cd Serving cd Serving
export SERVING_WORKDIR=$PWD
} }
function check_cmd() { function check_cmd() {
...@@ -16,18 +27,40 @@ function check_cmd() { ...@@ -16,18 +27,40 @@ function check_cmd() {
fi fi
} }
function rerun() {
if [ $# -ne 2 ]; then
echo "usage: rerun command rerun-times"
exit 1
fi
local command=$1
local times=$2
for((i=1;i<=${times};i++))
do
if [ ${i} != 1 ]; then
echo "${i}-th run command: ${command}..."
fi
eval $command
if [ $? -eq 0 ]; then
return 0
fi
echo "${i}-th run(command: ${command}) failed."
done
exit 1
}
function build_client() { function build_client() {
local TYPE=$1 local TYPE=$1
local DIRNAME=build-client-$TYPE local DIRNAME=build-client-$TYPE
mkdir $DIRNAME && cd $DIRNAME mkdir $DIRNAME # pwd: /Serving
cd $DIRNAME # pwd: /Serving/build-client-$TYPE
case $TYPE in case $TYPE in
CPU|GPU) CPU|GPU)
cmake -DPYTHON_INCLUDE_DIR=$PYTHONROOT/include/python2.7/ \ cmake -DPYTHON_INCLUDE_DIR=$PYTHONROOT/include/python2.7/ \
-DPYTHON_LIBRARIES=$PYTHONROOT/lib64/libpython2.7.so \ -DPYTHON_LIBRARIES=$PYTHONROOT/lib64/libpython2.7.so \
-DPYTHON_EXECUTABLE=$PYTHONROOT/bin/python \ -DPYTHON_EXECUTABLE=$PYTHONROOT/bin/python \
-DCLIENT_ONLY=ON .. -DCLIENT_ONLY=ON ..
check_cmd "make -j2 >/dev/null" rerun "make -j2 >/dev/null" 3 # due to some network reasons, compilation may fail
pip install python/dist/paddle_serving_client* >/dev/null pip install -U python/dist/paddle_serving_client* >/dev/null
;; ;;
*) *)
echo "error type" echo "error type"
...@@ -35,22 +68,24 @@ function build_client() { ...@@ -35,22 +68,24 @@ function build_client() {
;; ;;
esac esac
echo "build client $TYPE part finished as expected." echo "build client $TYPE part finished as expected."
cd .. cd .. # pwd: /Serving
rm -rf $DIRNAME # rm -rf $DIRNAME
} }
function build_server() { function build_server() {
local TYPE=$1 local TYPE=$1
local DIRNAME=build-server-$TYPE local DIRNAME=build-server-$TYPE
mkdir $DIRNAME && cd $DIRNAME mkdir $DIRNAME # pwd: /Serving
cd $DIRNAME # pwd: /Serving/build-server-$TYPE
case $TYPE in case $TYPE in
CPU) CPU)
cmake -DPYTHON_INCLUDE_DIR=$PYTHONROOT/include/python2.7/ \ cmake -DPYTHON_INCLUDE_DIR=$PYTHONROOT/include/python2.7/ \
-DPYTHON_LIBRARIES=$PYTHONROOT/lib64/libpython2.7.so \ -DPYTHON_LIBRARIES=$PYTHONROOT/lib64/libpython2.7.so \
-DPYTHON_EXECUTABLE=$PYTHONROOT/bin/python \ -DPYTHON_EXECUTABLE=$PYTHONROOT/bin/python \
-DCLIENT_ONLY=OFF .. -DCLIENT_ONLY=OFF ..
check_cmd "make -j2 >/dev/null && make install -j2 >/dev/null" rerun "make -j2 >/dev/null" 3 # due to some network reasons, compilation may fail
pip install python/dist/paddle_serving_server* >/dev/null check_cmd "make install -j2 >/dev/null"
pip install -U python/dist/paddle_serving_server* >/dev/null
;; ;;
GPU) GPU)
cmake -DPYTHON_INCLUDE_DIR=$PYTHONROOT/include/python2.7/ \ cmake -DPYTHON_INCLUDE_DIR=$PYTHONROOT/include/python2.7/ \
...@@ -58,8 +93,9 @@ function build_server() { ...@@ -58,8 +93,9 @@ function build_server() {
-DPYTHON_EXECUTABLE=$PYTHONROOT/bin/python \ -DPYTHON_EXECUTABLE=$PYTHONROOT/bin/python \
-DCLIENT_ONLY=OFF \ -DCLIENT_ONLY=OFF \
-DWITH_GPU=ON .. -DWITH_GPU=ON ..
check_cmd "make -j2 >/dev/null && make install -j2 >/dev/null" rerun "make -j2 >/dev/null" 3 # due to some network reasons, compilation may fail
pip install python/dist/paddle_serving_server* >/dev/null check_cmd "make install -j2 >/dev/null"
pip install -U python/dist/paddle_serving_server* >/dev/null
;; ;;
*) *)
echo "error type" echo "error type"
...@@ -67,30 +103,62 @@ function build_server() { ...@@ -67,30 +103,62 @@ function build_server() {
;; ;;
esac esac
echo "build server $TYPE part finished as expected." echo "build server $TYPE part finished as expected."
cd .. cd .. # pwd: /Serving
# rm -rf $DIRNAME for export SERVING_BIN
}
function kill_server_process() {
ps -ef | grep "serving" | grep -v serving_build | grep -v grep | awk '{print $2}' | xargs kill
} }
function python_test_fit_a_line() { function python_test_fit_a_line() {
cd fit_a_line # pwd: /Serving/python/examples
cd fit_a_line # pwd: /Serving/python/examples/fit_a_line
sh get_data.sh sh get_data.sh
local TYPE=$1 local TYPE=$1
echo $TYPE export SERVING_BIN=${SERVING_WORKDIR}/build-server-${TYPE}/core/general-server/serving
case $TYPE in case $TYPE in
CPU) CPU)
# test rpc # test rpc
check_cmd "python test_server.py uci_housing_model/ > /dev/null &" check_cmd "python -m paddle_serving_server.serve --model uci_housing_model --port 9393 --thread 4 > /dev/null &"
sleep 5 sleep 5 # wait for the server to start
check_cmd "python test_client.py uci_housing_client/serving_client_conf.prototxt > /dev/null" check_cmd "python test_client.py uci_housing_client/serving_client_conf.prototxt > /dev/null"
ps -ef | grep "paddle_serving_server" | grep -v grep | awk '{print $2}' | xargs kill kill_server_process
# test web # test web
check_cmd "python -m paddle_serving_server.serve --model uci_housing_model/ --name uci --port 9399 --name uci > /dev/null &" unsetproxy # maybe the proxy is used on iPipe, which makes web-test failed.
sleep 5 check_cmd "python -m paddle_serving_server.serve --model uci_housing_model --name uci --port 9393 --thread 4 --name uci > /dev/null &"
check_cmd "curl -H \"Content-Type:application/json\" -X POST -d '{\"x\": [0.0137, -0.1136, 0.2553, -0.0692, 0.0582, -0.0727, -0.1583, -0.0584, 0.6283, 0.4919, 0.1856, 0.0795, -0.0332], \"fetch\":[\"price\"]}' http://127.0.0.1:9399/uci/prediction" sleep 5 # wait for the server to start
ps -ef | grep "paddle_serving_server" | grep -v grep | awk '{print $2}' | xargs kill check_cmd "curl -H \"Content-Type:application/json\" -X POST -d '{\"x\": [0.0137, -0.1136, 0.2553, -0.0692, 0.0582, -0.0727, -0.1583, -0.0584, 0.6283, 0.4919, 0.1856, 0.0795, -0.0332], \"fetch\":[\"price\"]}' http://127.0.0.1:9393/uci/prediction"
# check http code
http_code=`curl -H "Content-Type:application/json" -X POST -d '{"x": [0.0137, -0.1136, 0.2553, -0.0692, 0.0582, -0.0727, -0.1583, -0.0584, 0.6283, 0.4919, 0.1856, 0.0795, -0.0332], "fetch":["price"]}' -s -w "%{http_code}" -o /dev/null http://127.0.0.1:9393/uci/prediction`
setproxy # recover proxy state
kill_server_process
if [ ${http_code} -ne 200 ]; then
echo "HTTP status code -ne 200"
exit 1
fi
;; ;;
GPU) GPU)
echo "not support yet" # test rpc
exit 1 check_cmd "python -m paddle_serving_server_gpu.serve --model uci_housing_model --port 9393 --thread 4 --gpu_ids 0 > /dev/null &"
sleep 5 # wait for the server to start
check_cmd "python test_client.py uci_housing_client/serving_client_conf.prototxt > /dev/null"
kill_server_process
# test web
unsetproxy # maybe the proxy is used on iPipe, which makes web-test failed.
check_cmd "python -m paddle_serving_server_gpu.serve --model uci_housing_model --port 9393 --thread 2 --gpu_ids 0 --name uci > /dev/null &"
sleep 5 # wait for the server to start
check_cmd "curl -H \"Content-Type:application/json\" -X POST -d '{\"x\": [0.0137, -0.1136, 0.2553, -0.0692, 0.0582, -0.0727, -0.1583, -0.0584, 0.6283, 0.4919, 0.1856, 0.0795, -0.0332], \"fetch\":[\"price\"]}' http://127.0.0.1:9393/uci/prediction"
# check http code
http_code=`curl -H "Content-Type:application/json" -X POST -d '{"x": [0.0137, -0.1136, 0.2553, -0.0692, 0.0582, -0.0727, -0.1583, -0.0584, 0.6283, 0.4919, 0.1856, 0.0795, -0.0332], "fetch":["price"]}' -s -w "%{http_code}" -o /dev/null http://127.0.0.1:9393/uci/prediction`
setproxy # recover proxy state
kill_server_process
if [ ${http_code} -ne 200 ]; then
echo "HTTP status code -ne 200"
exit 1
fi
;; ;;
*) *)
echo "error type" echo "error type"
...@@ -99,57 +167,69 @@ function python_test_fit_a_line() { ...@@ -99,57 +167,69 @@ function python_test_fit_a_line() {
esac esac
echo "test fit_a_line $TYPE part finished as expected." echo "test fit_a_line $TYPE part finished as expected."
rm -rf image kvdb log uci_housing* work* rm -rf image kvdb log uci_housing* work*
cd .. unset SERVING_BIN
cd .. # pwd: /Serving/python/examples
} }
function python_run_criteo_ctr_with_cube() { function python_run_criteo_ctr_with_cube() {
# pwd: /Serving/python/examples
local TYPE=$1 local TYPE=$1
yum install -y bc >/dev/null yum install -y bc >/dev/null
cd criteo_ctr_with_cube cd criteo_ctr_with_cube # pwd: /Serving/python/examples/criteo_ctr_with_cube
check_cmd "wget https://paddle-serving.bj.bcebos.com/unittest/ctr_cube_unittest.tar.gz" case $TYPE in
check_cmd "tar xf ctr_cube_unittest.tar.gz" CPU)
check_cmd "mv models/ctr_client_conf ./" check_cmd "wget https://paddle-serving.bj.bcebos.com/unittest/ctr_cube_unittest.tar.gz"
check_cmd "mv models/ctr_serving_model_kv ./" check_cmd "tar xf ctr_cube_unittest.tar.gz"
check_cmd "mv models/data ./cube/" check_cmd "mv models/ctr_client_conf ./"
check_cmd "mv models/ut_data ./" check_cmd "mv models/ctr_serving_model_kv ./"
cp ../../../build-server-$TYPE/output/bin/cube* ./cube/ check_cmd "mv models/data ./cube/"
mkdir -p $PYTHONROOT/lib/python2.7/site-packages/paddle_serving_server/serving-cpu-avx-openblas-0.1.3/ check_cmd "mv models/ut_data ./"
yes | cp ../../../build-server-$TYPE/output/demo/serving/bin/serving $PYTHONROOT/lib/python2.7/site-packages/paddle_serving_server/serving-cpu-avx-openblas-0.1.3/ cp ../../../build-server-$TYPE/output/bin/cube* ./cube/
mkdir -p $PYTHONROOT/lib/python2.7/site-packages/paddle_serving_server/serving-cpu-avx-openblas-0.1.3/
yes | cp ../../../build-server-$TYPE/output/demo/serving/bin/serving $PYTHONROOT/lib/python2.7/site-packages/paddle_serving_server/serving-cpu-avx-openblas-0.1.3/
sh cube_prepare.sh & sh cube_prepare.sh &
check_cmd "mkdir work_dir1 && cp cube/conf/cube.conf ./work_dir1/" check_cmd "mkdir work_dir1 && cp cube/conf/cube.conf ./work_dir1/"
python test_server.py ctr_serving_model_kv & python test_server.py ctr_serving_model_kv &
check_cmd "python test_client.py ctr_client_conf/serving_client_conf.prototxt ./ut_data >score" check_cmd "python test_client.py ctr_client_conf/serving_client_conf.prototxt ./ut_data >score"
AUC=$(tail -n 2 score | awk 'NR==1') AUC=$(tail -n 2 score | awk 'NR==1')
VAR2="0.70" VAR2="0.70"
RES=$( echo "$AUC>$VAR2" | bc ) RES=$( echo "$AUC>$VAR2" | bc )
if [[ $RES -eq 0 ]]; then if [[ $RES -eq 0 ]]; then
echo "error with criteo_ctr_with_cube inference auc test, auc should > 0.70" echo "error with criteo_ctr_with_cube inference auc test, auc should > 0.70"
exit 1 exit 1
fi fi
echo "criteo_ctr_with_cube inference auc test success" echo "criteo_ctr_with_cube inference auc test success"
ps -ef | grep "paddle_serving_server" | grep -v grep | awk '{print $2}' | xargs kill ps -ef | grep "paddle_serving_server" | grep -v grep | awk '{print $2}' | xargs kill
ps -ef | grep "cube" | grep -v grep | awk '{print $2}' | xargs kill ps -ef | grep "cube" | grep -v grep | awk '{print $2}' | xargs kill
;;
GPU)
;;
*)
echo "error type"
exit 1
;;
esac
echo "test criteo_ctr_with_cube $TYPE part finished as expected."
cd .. # pwd: /Serving/python/examples
} }
function python_run_test() { function python_run_test() {
cd python/examples # Using the compiled binary
local TYPE=$1 local TYPE=$1 # pwd: /Serving
# Frist time run, downloading PaddleServing components ... cd python/examples # pwd: /Serving/python/examples
python -c "from paddle_serving_server import Server; server = Server(); server.download_bin()" python_test_fit_a_line $TYPE # pwd: /Serving/python/examples
python_test_fit_a_line $TYPE python_run_criteo_ctr_with_cube $TYPE # pwd: /Serving/python/examples
python_run_criteo_ctr_with_cube $TYPE
echo "test python $TYPE part finished as expected." echo "test python $TYPE part finished as expected."
cd ../.. cd ../.. # pwd: /Serving
} }
function main() { function main() {
local TYPE=$1 local TYPE=$1 # pwd: /
init init # pwd: /Serving
build_client $TYPE build_client $TYPE # pwd: /Serving
build_server $TYPE build_server $TYPE # pwd: /Serving
cd Serving/ python_run_test $TYPE # pwd: /Serving
python_run_test $TYPE
echo "serving $TYPE part finished as expected." echo "serving $TYPE part finished as expected."
} }
......
Markdown is supported
0% .
You are about to add 0 people to the discussion. Proceed with caution.
先完成此消息的编辑!
想要评论请 注册