serving_build.sh 16.4 KB
Newer Older
G
guru4elephant 已提交
1 2
#!/usr/bin/env bash

3 4 5 6 7 8 9 10 11 12 13 14
function unsetproxy() {
    HTTP_PROXY_TEMP=$http_proxy
    HTTPS_PROXY_TEMP=$https_proxy
    unset http_proxy
    unset https_proxy
}

function setproxy() {
    export http_proxy=$HTTP_PROXY_TEMP
    export https_proxy=$HTTPS_PROXY_TEMP
}

G
guru4elephant 已提交
15 16 17 18 19
function init() {
    source /root/.bashrc
    set -v
    export PYTHONROOT=/usr
    cd Serving
B
barrierye 已提交
20
    export SERVING_WORKDIR=$PWD
G
guru4elephant 已提交
21 22 23 24 25 26 27 28 29
}

function check_cmd() {
    eval $@
    if [ $? -ne 0 ]; then
        exit 1
    fi
}

30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50
function rerun() {
    if [ $# -ne 2 ]; then
        echo "usage: rerun command rerun-times"
        exit 1
    fi
    local command=$1
    local times=$2
    for((i=1;i<=${times};i++))
    do
        if [ ${i} != 1 ]; then
            echo "${i}-th run command: ${command}..."
        fi
        eval $command
        if [ $? -eq 0 ]; then
            return 0
        fi
        echo "${i}-th run(command: ${command}) failed."
    done
    exit 1
}

B
barrierye 已提交
51 52 53 54 55
function build_app() {
    local TYPE=$1
    local DIRNAME=build-app-$TYPE
    mkdir $DIRNAME # pwd: /Serving
    cd $DIRNAME # pwd: /Serving/build-app-$TYPE
B
barrierye 已提交
56
    pip install numpy sentencepiece
B
barrierye 已提交
57 58 59 60 61 62 63 64 65 66 67 68 69 70 71 72 73 74
    case $TYPE in
        CPU|GPU)
            cmake -DPYTHON_INCLUDE_DIR=$PYTHONROOT/include/python2.7/ \
                  -DPYTHON_LIBRARIES=$PYTHONROOT/lib/libpython2.7.so \
                  -DPYTHON_EXECUTABLE=$PYTHONROOT/bin/python \
                  -DAPP=ON ..
            rerun "make -j2 >/dev/null" 3 # due to some network reasons, compilation may fail
            pip install -U python/dist/paddle_serving_app* >/dev/null
            ;;
        *)
            echo "error type"
            exit 1
            ;;
    esac
    echo "build app $TYPE part finished as expected."
    cd .. # pwd: /Serving
}

G
guru4elephant 已提交
75
function build_client() {
76
    local TYPE=$1
G
guru4elephant 已提交
77
    local DIRNAME=build-client-$TYPE
78 79
    mkdir $DIRNAME # pwd: /Serving
    cd $DIRNAME # pwd: /Serving/build-client-$TYPE
G
guru4elephant 已提交
80 81 82 83 84
    case $TYPE in
        CPU|GPU)
            cmake -DPYTHON_INCLUDE_DIR=$PYTHONROOT/include/python2.7/ \
                  -DPYTHON_LIBRARIES=$PYTHONROOT/lib64/libpython2.7.so \
                  -DPYTHON_EXECUTABLE=$PYTHONROOT/bin/python \
85
                  -DCLIENT=ON ..
86
            rerun "make -j2 >/dev/null" 3 # due to some network reasons, compilation may fail
Y
Your Name 已提交
87
            pip install -U python/dist/paddle_serving_client* >/dev/null
G
guru4elephant 已提交
88 89 90 91 92 93 94
            ;;
        *)
            echo "error type"
            exit 1
            ;;
    esac
    echo "build client $TYPE part finished as expected."
Y
Your Name 已提交
95
    cd .. # pwd: /Serving
96
    # rm -rf $DIRNAME
G
guru4elephant 已提交
97 98 99
}

function build_server() {
100
    local TYPE=$1
G
guru4elephant 已提交
101
    local DIRNAME=build-server-$TYPE
102 103
    mkdir $DIRNAME # pwd: /Serving
    cd $DIRNAME # pwd: /Serving/build-server-$TYPE
G
guru4elephant 已提交
104 105 106 107 108
    case $TYPE in
        CPU)
            cmake -DPYTHON_INCLUDE_DIR=$PYTHONROOT/include/python2.7/ \
                  -DPYTHON_LIBRARIES=$PYTHONROOT/lib64/libpython2.7.so \
                  -DPYTHON_EXECUTABLE=$PYTHONROOT/bin/python \
G
guru4elephant 已提交
109
                  -DSERVER=ON ..
110 111
            rerun "make -j2 >/dev/null" 3 # due to some network reasons, compilation may fail
            check_cmd "make install -j2 >/dev/null"
Y
Your Name 已提交
112
            pip install -U python/dist/paddle_serving_server* >/dev/null
G
guru4elephant 已提交
113 114 115 116 117
            ;;
        GPU)
            cmake -DPYTHON_INCLUDE_DIR=$PYTHONROOT/include/python2.7/ \
                  -DPYTHON_LIBRARIES=$PYTHONROOT/lib64/libpython2.7.so \
                  -DPYTHON_EXECUTABLE=$PYTHONROOT/bin/python \
118
                  -DSERVER=ON \
G
guru4elephant 已提交
119
                  -DWITH_GPU=ON ..
120 121
            rerun "make -j2 >/dev/null" 3 # due to some network reasons, compilation may fail
            check_cmd "make install -j2 >/dev/null"
Y
Your Name 已提交
122
            pip install -U python/dist/paddle_serving_server* >/dev/null
G
guru4elephant 已提交
123 124 125 126 127 128 129
            ;;
        *)
            echo "error type"
            exit 1
            ;;
    esac
    echo "build server $TYPE part finished as expected."
Y
Your Name 已提交
130
    cd .. # pwd: /Serving
Y
Your Name 已提交
131
    # rm -rf $DIRNAME    for export SERVING_BIN
G
guru4elephant 已提交
132 133
}

134
function kill_server_process() {
Y
Your Name 已提交
135
    ps -ef | grep "serving" | grep -v serving_build | grep -v grep | awk '{print $2}' | xargs kill
136 137
}

G
guru4elephant 已提交
138
function python_test_fit_a_line() {
Y
Your Name 已提交
139 140
    # pwd: /Serving/python/examples
    cd fit_a_line # pwd: /Serving/python/examples/fit_a_line
G
guru4elephant 已提交
141 142
    sh get_data.sh
    local TYPE=$1
B
barrierye 已提交
143
    export SERVING_BIN=${SERVING_WORKDIR}/build-server-${TYPE}/core/general-server/serving
G
guru4elephant 已提交
144 145 146
    case $TYPE in
        CPU)
            # test rpc
147 148
            check_cmd "python -m paddle_serving_server.serve --model uci_housing_model --port 9393 --thread 4 > /dev/null &"
            sleep 5 # wait for the server to start
G
guru4elephant 已提交
149
            check_cmd "python test_client.py uci_housing_client/serving_client_conf.prototxt > /dev/null"
150
            kill_server_process
B
barrierye 已提交
151

G
guru4elephant 已提交
152
            # test web
153
            unsetproxy # maybe the proxy is used on iPipe, which makes web-test failed.
154 155 156 157 158
            check_cmd "python -m paddle_serving_server.serve --model uci_housing_model --name uci --port 9393 --thread 4 --name uci > /dev/null &"
            sleep 5 # wait for the server to start
            check_cmd "curl -H \"Content-Type:application/json\" -X POST -d '{\"x\": [0.0137, -0.1136, 0.2553, -0.0692, 0.0582, -0.0727, -0.1583, -0.0584, 0.6283, 0.4919, 0.1856, 0.0795, -0.0332], \"fetch\":[\"price\"]}' http://127.0.0.1:9393/uci/prediction"
            # check http code
            http_code=`curl -H "Content-Type:application/json" -X POST -d '{"x": [0.0137, -0.1136, 0.2553, -0.0692, 0.0582, -0.0727, -0.1583, -0.0584, 0.6283, 0.4919, 0.1856, 0.0795, -0.0332], "fetch":["price"]}' -s -w "%{http_code}" -o /dev/null http://127.0.0.1:9393/uci/prediction`
159 160
            setproxy # recover proxy state
            kill_server_process
Y
Your Name 已提交
161 162 163 164
            if [ ${http_code} -ne 200 ]; then
                echo "HTTP status code -ne 200"
                exit 1
            fi
G
guru4elephant 已提交
165 166
            ;;
        GPU)
167 168 169 170 171
            # test rpc
            check_cmd "python -m paddle_serving_server_gpu.serve --model uci_housing_model --port 9393 --thread 4 --gpu_ids 0 > /dev/null &"
            sleep 5 # wait for the server to start
            check_cmd "python test_client.py uci_housing_client/serving_client_conf.prototxt > /dev/null"
            kill_server_process
B
barrierye 已提交
172
 
173
            # test web
174
            unsetproxy # maybe the proxy is used on iPipe, which makes web-test failed.
175 176 177 178 179
            check_cmd "python -m paddle_serving_server_gpu.serve --model uci_housing_model --port 9393 --thread 2 --gpu_ids 0 --name uci > /dev/null &"
            sleep 5 # wait for the server to start
            check_cmd "curl -H \"Content-Type:application/json\" -X POST -d '{\"x\": [0.0137, -0.1136, 0.2553, -0.0692, 0.0582, -0.0727, -0.1583, -0.0584, 0.6283, 0.4919, 0.1856, 0.0795, -0.0332], \"fetch\":[\"price\"]}' http://127.0.0.1:9393/uci/prediction"
            # check http code
            http_code=`curl -H "Content-Type:application/json" -X POST -d '{"x": [0.0137, -0.1136, 0.2553, -0.0692, 0.0582, -0.0727, -0.1583, -0.0584, 0.6283, 0.4919, 0.1856, 0.0795, -0.0332], "fetch":["price"]}' -s -w "%{http_code}" -o /dev/null http://127.0.0.1:9393/uci/prediction`
180 181
            setproxy # recover proxy state
            kill_server_process
Y
Your Name 已提交
182 183 184 185
            if [ ${http_code} -ne 200 ]; then
                echo "HTTP status code -ne 200"
                exit 1
            fi
G
guru4elephant 已提交
186 187 188 189 190 191 192 193
            ;;
        *)
            echo "error type"
            exit 1
            ;;
    esac
    echo "test fit_a_line $TYPE part finished as expected."
    rm -rf image kvdb log uci_housing* work*
B
barrierye 已提交
194
    unset SERVING_BIN
Y
Your Name 已提交
195
    cd .. # pwd: /Serving/python/examples
G
guru4elephant 已提交
196 197
}

W
wangjiawei04 已提交
198
function python_run_criteo_ctr_with_cube() {
Y
merge  
Your Name 已提交
199
    # pwd: /Serving/python/examples
W
wangjiawei04 已提交
200
    local TYPE=$1
201
    yum install -y bc >/dev/null
Y
merge  
Your Name 已提交
202
    cd criteo_ctr_with_cube # pwd: /Serving/python/examples/criteo_ctr_with_cube
M
MRXLT 已提交
203
    export SERVING_BIN=${SERVING_WORKDIR}/build-server-${TYPE}/core/general-server/serving
B
barrierye 已提交
204 205 206 207 208 209 210 211 212 213 214 215 216 217
    case $TYPE in
        CPU)
            check_cmd "wget https://paddle-serving.bj.bcebos.com/unittest/ctr_cube_unittest.tar.gz"
            check_cmd "tar xf ctr_cube_unittest.tar.gz"
            check_cmd "mv models/ctr_client_conf ./"
            check_cmd "mv models/ctr_serving_model_kv ./"
            check_cmd "mv models/data ./cube/"
            check_cmd "mv models/ut_data ./"
            cp ../../../build-server-$TYPE/output/bin/cube* ./cube/ 
            mkdir -p $PYTHONROOT/lib/python2.7/site-packages/paddle_serving_server/serving-cpu-avx-openblas-0.1.3/
            yes | cp ../../../build-server-$TYPE/output/demo/serving/bin/serving $PYTHONROOT/lib/python2.7/site-packages/paddle_serving_server/serving-cpu-avx-openblas-0.1.3/
            sh cube_prepare.sh &
            check_cmd "mkdir work_dir1 && cp cube/conf/cube.conf ./work_dir1/"    
            python test_server.py ctr_serving_model_kv &
M
MRXLT 已提交
218
            sleep 5
B
barrierye 已提交
219
            check_cmd "python test_client.py ctr_client_conf/serving_client_conf.prototxt ./ut_data >score"
W
wangjiawei04 已提交
220
            tail -n 2 score | awk 'NR==1'
B
barrierye 已提交
221
            AUC=$(tail -n 2  score | awk 'NR==1')
222
            VAR2="0.67" #TODO: temporarily relax the threshold to 0.67
B
barrierye 已提交
223 224
            RES=$( echo "$AUC>$VAR2" | bc )
            if [[ $RES -eq 0 ]]; then
J
Jiawei Wang 已提交
225
                echo "error with criteo_ctr_with_cube inference auc test, auc should > 0.67"
B
barrierye 已提交
226 227 228 229 230 231 232
                exit 1
            fi
            echo "criteo_ctr_with_cube inference auc test success"
            ps -ef | grep "paddle_serving_server" | grep -v grep | awk '{print $2}' | xargs kill
            ps -ef | grep "cube" | grep -v grep | awk '{print $2}' | xargs kill
            ;;
        GPU)
W
wangjiawei04 已提交
233 234 235 236 237 238 239
            check_cmd "wget https://paddle-serving.bj.bcebos.com/unittest/ctr_cube_unittest.tar.gz"
            check_cmd "tar xf ctr_cube_unittest.tar.gz"
            check_cmd "mv models/ctr_client_conf ./"
            check_cmd "mv models/ctr_serving_model_kv ./"
            check_cmd "mv models/data ./cube/"
            check_cmd "mv models/ut_data ./"
            cp ../../../build-server-$TYPE/output/bin/cube* ./cube/
240
            mkdir -p $PYTHONROOT/lib/python2.7/site-packages/paddle_serving_server_gpu/serving-gpu-0.1.3/
W
wangjiawei04 已提交
241 242 243 244
            yes | cp ../../../build-server-$TYPE/output/demo/serving/bin/serving $PYTHONROOT/lib/python2.7/site-packages/paddle_serving_server_gpu/serving-gpu-0.1.3/
            sh cube_prepare.sh &
            check_cmd "mkdir work_dir1 && cp cube/conf/cube.conf ./work_dir1/"
            python test_server_gpu.py ctr_serving_model_kv &
M
MRXLT 已提交
245
            sleep 5
W
wangjiawei04 已提交
246 247 248
            check_cmd "python test_client.py ctr_client_conf/serving_client_conf.prototxt ./ut_data >score"
            tail -n 2 score | awk 'NR==1'
            AUC=$(tail -n 2  score | awk 'NR==1')
J
Jiawei Wang 已提交
249
            VAR2="0.67" #TODO: temporarily relax the threshold to 0.67
W
wangjiawei04 已提交
250 251
            RES=$( echo "$AUC>$VAR2" | bc )
            if [[ $RES -eq 0 ]]; then
J
Jiawei Wang 已提交
252
                echo "error with criteo_ctr_with_cube inference auc test, auc should > 0.67"
W
wangjiawei04 已提交
253 254 255 256 257
                exit 1
            fi
            echo "criteo_ctr_with_cube inference auc test success"
            ps -ef | grep "paddle_serving_server" | grep -v grep | awk '{print $2}' | xargs kill
            ps -ef | grep "cube" | grep -v grep | awk '{print $2}' | xargs kill
B
barrierye 已提交
258 259 260 261 262 263
            ;;
        *)
            echo "error type"
            exit 1
            ;;
    esac
M
MRXLT 已提交
264
    unset SERVING_BIN
B
barrierye 已提交
265
    echo "test criteo_ctr_with_cube $TYPE part finished as expected."
Y
merge  
Your Name 已提交
266
    cd .. # pwd: /Serving/python/examples
W
wangjiawei04 已提交
267 268
}

W
wangjiawei04 已提交
269 270 271
function python_test_bert() {
    # pwd: /Serving/python/examples
    local TYPE=$1
J
Jiawei Wang 已提交
272
    yum install -y libXext libSM libXrender >/dev/null
J
Jiawei Wang 已提交
273
    pip install ujson
274
    export SERVING_BIN=${SERVING_WORKDIR}/build-server-${TYPE}/core/general-server/serving
W
wangjiawei04 已提交
275 276 277 278 279 280
    cd bert # pwd: /Serving/python/examples/bert
    case $TYPE in
        CPU)
            pip install paddlehub
            python prepare_model.py 20
            sh get_data.sh
281 282
            check_cmd "python -m paddle_serving_server.serve --model bert_seq20_model/ --port 9292 &"
            sleep 5
W
wangjiawei04 已提交
283 284
            pip install paddle_serving_app
            check_cmd "head -n 10 data-c.txt | python bert_client.py --model bert_seq20_client/serving_client_conf.prototxt"
J
Jiawei Wang 已提交
285
            kill_server_process
W
wangjiawei04 已提交
286
            ps -ef | grep "paddle_serving_server" | grep -v grep | awk '{print $2}' | xargs kill
J
Jiawei Wang 已提交
287
            ps -ef | grep "serving" | grep -v grep | awk '{print $2}' | xargs kill
W
wangjiawei04 已提交
288 289 290 291 292 293
            echo "bert RPC inference pass" 
            ;;
        GPU)
            pip install paddlehub
            python prepare_model.py 20
            sh get_data.sh
294 295
            check_cmd "python -m paddle_serving_server_gpu.serve --model bert_seq20_model/ --port 9292 --gpu_ids 0 &"
            sleep 5
W
wangjiawei04 已提交
296 297
            pip install paddle_serving_app
            check_cmd "head -n 10 data-c.txt | python bert_client.py --model bert_seq20_client/serving_client_conf.prototxt"
J
Jiawei Wang 已提交
298
            kill_server_process
W
wangjiawei04 已提交
299 300 301 302 303 304
            ps -ef | grep "paddle_serving_server" | grep -v grep | awk '{print $2}' | xargs kill
            echo "bert RPC inference pass"
            ;;
        *)
    esac
    echo "test bert $TYPE finished as expected."
305
    unset SERVING_BIN
W
wangjiawei04 已提交
306 307 308 309 310 311
    cd ..
}

function python_test_imdb() {
    # pwd: /Serving/python/examples
    local TYPE=$1
312 313
    export SERVING_BIN=${SERVING_WORKDIR}/build-server-${TYPE}/core/general-server/serving
    cd imdb # pwd: /Serving/python/examples/imdb
W
wangjiawei04 已提交
314 315 316
    case $TYPE in
        CPU)
            sh get_data.sh
317
            sleep 5
J
Jiawei Wang 已提交
318
            check_cmd "python -m paddle_serving_server.serve --model imdb_cnn_model/ --port 9292 &"
W
wangjiawei04 已提交
319 320 321
            check_cmd "head test_data/part-0 | python test_client.py imdb_cnn_client_conf/serving_client_conf.prototxt imdb.vocab"
            echo "imdb CPU RPC inference pass"
            ps -ef | grep "paddle_serving_server" | grep -v grep | awk '{print $2}' | xargs kill
J
Jiawei Wang 已提交
322 323
            rm -rf work_dir1
            sleep 5
W
wangjiawei04 已提交
324

J
Jiawei Wang 已提交
325
            check_cmd "python text_classify_service.py imdb_cnn_model/workdir/9292 imdb.vocab &"
326
            sleep 5
W
wangjiawei04 已提交
327 328
            check_cmd "curl -H "Content-Type:application/json" -X POST -d '{"words": "i am very sad | 0", "fetch":["prediction"]}' http://127.0.0.1:9292/imdb/prediction"
            ps -ef | grep "paddle_serving_server" | grep -v grep | awk '{print $2}' | xargs kill
329
            ps -ef | grep "text_classify_service.py" | grep -v grep | awk '{print $2}' | xargs kill
W
wangjiawei04 已提交
330 331 332 333 334 335 336 337
            echo "imdb CPU HTTP inference pass"           
            ;;
        GPU)
            echo "imdb ignore GPU test"
            ;;
        *)
    esac
    echo "test imdb $TYPE finished as expected."
338 339 340 341 342 343 344 345 346 347 348 349 350 351 352 353 354 355 356 357 358 359 360 361 362 363 364 365 366 367 368 369
    unset SERVING_BIN
    cd ..
}

function python_test_lac() {
    # pwd: /Serving/python/examples
    local TYPE=$1
    export SERVING_BIN=${SERVING_WORKDIR}/build-server-${TYPE}/core/general-server/serving
    cd lac # pwd: /Serving/python/examples/lac
    case $TYPE in
        CPU)
            sh get_data.sh
            check_cmd "python -m paddle_serving_server.serve --model jieba_server_model/ --port 9292 &"
            sleep 5
            check_cmd "echo "我爱北京天安门" | python lac_client.py jieba_client_conf/serving_client_conf.prototxt lac_dict/"
            echo "lac CPU RPC inference pass"
            ps -ef | grep "paddle_serving_server" | grep -v grep | awk '{print $2}' | xargs kill

            check_cmd "python lac_web_service.py jieba_server_model/ lac_workdir 9292 &"
            sleep 5
            check_cmd "curl -H "Content-Type:application/json" -X POST -d '{"words": "我爱北京天安门", "fetch":["word_seg"]}' http://127.0.0.1:9292/lac/prediction"
            ps -ef | grep "paddle_serving_server" | grep -v grep | awk '{print $2}' | xargs kill
            ps -ef | grep "lac_web_service" | grep -v grep | awk '{print $2}' | xargs kill
            echo "lac CPU HTTP inference pass"
            ;;
        GPU)
            echo "lac ignore GPU test"
            ;;
        *)
    esac
    echo "test lac $TYPE finished as expected."
    unset SERVING_BIN
W
wangjiawei04 已提交
370 371 372
    cd ..
}

G
guru4elephant 已提交
373
function python_run_test() {
374
    # Using the compiled binary
Y
Your Name 已提交
375 376 377
    local TYPE=$1 # pwd: /Serving
    cd python/examples # pwd: /Serving/python/examples
    python_test_fit_a_line $TYPE # pwd: /Serving/python/examples
Y
merge  
Your Name 已提交
378
    python_run_criteo_ctr_with_cube $TYPE # pwd: /Serving/python/examples
W
wangjiawei04 已提交
379
    python_test_bert $TYPE # pwd: /Serving/python/examples
380 381
    python_test_imdb $TYPE 
    python_test_lac $TYPE    
G
guru4elephant 已提交
382
    echo "test python $TYPE part finished as expected."
Y
Your Name 已提交
383
    cd ../.. # pwd: /Serving
G
guru4elephant 已提交
384 385 386
}

function main() {
Y
Your Name 已提交
387 388 389 390
    local TYPE=$1 # pwd: /
    init # pwd: /Serving
    build_client $TYPE # pwd: /Serving
    build_server $TYPE # pwd: /Serving
B
barrierye 已提交
391
    build_app $TYPE # pwd: /Serving
Y
Your Name 已提交
392
    python_run_test $TYPE # pwd: /Serving
G
guru4elephant 已提交
393 394 395 396
    echo "serving $TYPE part finished as expected."
}

main $@