serving_build.sh 17.8 KB
Newer Older
G
guru4elephant 已提交
1 2
#!/usr/bin/env bash

3 4 5 6 7 8 9 10 11 12 13 14
function unsetproxy() {
    HTTP_PROXY_TEMP=$http_proxy
    HTTPS_PROXY_TEMP=$https_proxy
    unset http_proxy
    unset https_proxy
}

function setproxy() {
    export http_proxy=$HTTP_PROXY_TEMP
    export https_proxy=$HTTPS_PROXY_TEMP
}

G
guru4elephant 已提交
15 16 17 18 19
function init() {
    source /root/.bashrc
    set -v
    export PYTHONROOT=/usr
    cd Serving
B
barrierye 已提交
20
    export SERVING_WORKDIR=$PWD
G
guru4elephant 已提交
21 22 23 24 25 26 27 28 29
}

function check_cmd() {
    eval $@
    if [ $? -ne 0 ]; then
        exit 1
    fi
}

30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50
function rerun() {
    if [ $# -ne 2 ]; then
        echo "usage: rerun command rerun-times"
        exit 1
    fi
    local command=$1
    local times=$2
    for((i=1;i<=${times};i++))
    do
        if [ ${i} != 1 ]; then
            echo "${i}-th run command: ${command}..."
        fi
        eval $command
        if [ $? -eq 0 ]; then
            return 0
        fi
        echo "${i}-th run(command: ${command}) failed."
    done
    exit 1
}

B
barrierye 已提交
51 52 53 54 55
function build_app() {
    local TYPE=$1
    local DIRNAME=build-app-$TYPE
    mkdir $DIRNAME # pwd: /Serving
    cd $DIRNAME # pwd: /Serving/build-app-$TYPE
B
barrierye 已提交
56
    pip install numpy sentencepiece
B
barrierye 已提交
57 58 59 60 61 62 63 64 65 66 67 68 69 70 71 72 73 74
    case $TYPE in
        CPU|GPU)
            cmake -DPYTHON_INCLUDE_DIR=$PYTHONROOT/include/python2.7/ \
                  -DPYTHON_LIBRARIES=$PYTHONROOT/lib/libpython2.7.so \
                  -DPYTHON_EXECUTABLE=$PYTHONROOT/bin/python \
                  -DAPP=ON ..
            rerun "make -j2 >/dev/null" 3 # due to some network reasons, compilation may fail
            pip install -U python/dist/paddle_serving_app* >/dev/null
            ;;
        *)
            echo "error type"
            exit 1
            ;;
    esac
    echo "build app $TYPE part finished as expected."
    cd .. # pwd: /Serving
}

G
guru4elephant 已提交
75
function build_client() {
76
    local TYPE=$1
G
guru4elephant 已提交
77
    local DIRNAME=build-client-$TYPE
78 79
    mkdir $DIRNAME # pwd: /Serving
    cd $DIRNAME # pwd: /Serving/build-client-$TYPE
G
guru4elephant 已提交
80 81 82 83 84
    case $TYPE in
        CPU|GPU)
            cmake -DPYTHON_INCLUDE_DIR=$PYTHONROOT/include/python2.7/ \
                  -DPYTHON_LIBRARIES=$PYTHONROOT/lib64/libpython2.7.so \
                  -DPYTHON_EXECUTABLE=$PYTHONROOT/bin/python \
85
                  -DCLIENT=ON ..
86
            rerun "make -j2 >/dev/null" 3 # due to some network reasons, compilation may fail
Y
Your Name 已提交
87
            pip install -U python/dist/paddle_serving_client* >/dev/null
G
guru4elephant 已提交
88 89 90 91 92 93 94
            ;;
        *)
            echo "error type"
            exit 1
            ;;
    esac
    echo "build client $TYPE part finished as expected."
Y
Your Name 已提交
95
    cd .. # pwd: /Serving
96
    # rm -rf $DIRNAME
G
guru4elephant 已提交
97 98 99
}

function build_server() {
100
    local TYPE=$1
G
guru4elephant 已提交
101
    local DIRNAME=build-server-$TYPE
102 103
    mkdir $DIRNAME # pwd: /Serving
    cd $DIRNAME # pwd: /Serving/build-server-$TYPE
G
guru4elephant 已提交
104 105 106 107 108
    case $TYPE in
        CPU)
            cmake -DPYTHON_INCLUDE_DIR=$PYTHONROOT/include/python2.7/ \
                  -DPYTHON_LIBRARIES=$PYTHONROOT/lib64/libpython2.7.so \
                  -DPYTHON_EXECUTABLE=$PYTHONROOT/bin/python \
G
guru4elephant 已提交
109
                  -DSERVER=ON ..
110 111
            rerun "make -j2 >/dev/null" 3 # due to some network reasons, compilation may fail
            check_cmd "make install -j2 >/dev/null"
Y
Your Name 已提交
112
            pip install -U python/dist/paddle_serving_server* >/dev/null
G
guru4elephant 已提交
113 114 115 116 117
            ;;
        GPU)
            cmake -DPYTHON_INCLUDE_DIR=$PYTHONROOT/include/python2.7/ \
                  -DPYTHON_LIBRARIES=$PYTHONROOT/lib64/libpython2.7.so \
                  -DPYTHON_EXECUTABLE=$PYTHONROOT/bin/python \
118
                  -DSERVER=ON \
G
guru4elephant 已提交
119
                  -DWITH_GPU=ON ..
120 121
            rerun "make -j2 >/dev/null" 3 # due to some network reasons, compilation may fail
            check_cmd "make install -j2 >/dev/null"
Y
Your Name 已提交
122
            pip install -U python/dist/paddle_serving_server* >/dev/null
G
guru4elephant 已提交
123 124 125 126 127 128 129
            ;;
        *)
            echo "error type"
            exit 1
            ;;
    esac
    echo "build server $TYPE part finished as expected."
Y
Your Name 已提交
130
    cd .. # pwd: /Serving
Y
Your Name 已提交
131
    # rm -rf $DIRNAME    for export SERVING_BIN
G
guru4elephant 已提交
132 133
}

134
function kill_server_process() {
Y
Your Name 已提交
135
    ps -ef | grep "serving" | grep -v serving_build | grep -v grep | awk '{print $2}' | xargs kill
136 137
}

G
guru4elephant 已提交
138
function python_test_fit_a_line() {
Y
Your Name 已提交
139 140
    # pwd: /Serving/python/examples
    cd fit_a_line # pwd: /Serving/python/examples/fit_a_line
G
guru4elephant 已提交
141 142
    sh get_data.sh
    local TYPE=$1
B
barrierye 已提交
143
    export SERVING_BIN=${SERVING_WORKDIR}/build-server-${TYPE}/core/general-server/serving
G
guru4elephant 已提交
144 145 146
    case $TYPE in
        CPU)
            # test rpc
147 148
            check_cmd "python -m paddle_serving_server.serve --model uci_housing_model --port 9393 --thread 4 > /dev/null &"
            sleep 5 # wait for the server to start
G
guru4elephant 已提交
149
            check_cmd "python test_client.py uci_housing_client/serving_client_conf.prototxt > /dev/null"
150
            kill_server_process
B
barrierye 已提交
151

G
guru4elephant 已提交
152
            # test web
153
            unsetproxy # maybe the proxy is used on iPipe, which makes web-test failed.
154 155 156 157 158
            check_cmd "python -m paddle_serving_server.serve --model uci_housing_model --name uci --port 9393 --thread 4 --name uci > /dev/null &"
            sleep 5 # wait for the server to start
            check_cmd "curl -H \"Content-Type:application/json\" -X POST -d '{\"x\": [0.0137, -0.1136, 0.2553, -0.0692, 0.0582, -0.0727, -0.1583, -0.0584, 0.6283, 0.4919, 0.1856, 0.0795, -0.0332], \"fetch\":[\"price\"]}' http://127.0.0.1:9393/uci/prediction"
            # check http code
            http_code=`curl -H "Content-Type:application/json" -X POST -d '{"x": [0.0137, -0.1136, 0.2553, -0.0692, 0.0582, -0.0727, -0.1583, -0.0584, 0.6283, 0.4919, 0.1856, 0.0795, -0.0332], "fetch":["price"]}' -s -w "%{http_code}" -o /dev/null http://127.0.0.1:9393/uci/prediction`
159 160
            setproxy # recover proxy state
            kill_server_process
Y
Your Name 已提交
161 162 163 164
            if [ ${http_code} -ne 200 ]; then
                echo "HTTP status code -ne 200"
                exit 1
            fi
G
guru4elephant 已提交
165 166
            ;;
        GPU)
B
fix bug  
barrierye 已提交
167
            export CUDA_VISIBLE_DEVICES=0
168 169 170 171 172
            # test rpc
            check_cmd "python -m paddle_serving_server_gpu.serve --model uci_housing_model --port 9393 --thread 4 --gpu_ids 0 > /dev/null &"
            sleep 5 # wait for the server to start
            check_cmd "python test_client.py uci_housing_client/serving_client_conf.prototxt > /dev/null"
            kill_server_process
B
barrierye 已提交
173
 
174
            # test web
175
            unsetproxy # maybe the proxy is used on iPipe, which makes web-test failed.
176 177 178 179 180
            check_cmd "python -m paddle_serving_server_gpu.serve --model uci_housing_model --port 9393 --thread 2 --gpu_ids 0 --name uci > /dev/null &"
            sleep 5 # wait for the server to start
            check_cmd "curl -H \"Content-Type:application/json\" -X POST -d '{\"x\": [0.0137, -0.1136, 0.2553, -0.0692, 0.0582, -0.0727, -0.1583, -0.0584, 0.6283, 0.4919, 0.1856, 0.0795, -0.0332], \"fetch\":[\"price\"]}' http://127.0.0.1:9393/uci/prediction"
            # check http code
            http_code=`curl -H "Content-Type:application/json" -X POST -d '{"x": [0.0137, -0.1136, 0.2553, -0.0692, 0.0582, -0.0727, -0.1583, -0.0584, 0.6283, 0.4919, 0.1856, 0.0795, -0.0332], "fetch":["price"]}' -s -w "%{http_code}" -o /dev/null http://127.0.0.1:9393/uci/prediction`
181 182
            setproxy # recover proxy state
            kill_server_process
Y
Your Name 已提交
183 184 185 186
            if [ ${http_code} -ne 200 ]; then
                echo "HTTP status code -ne 200"
                exit 1
            fi
G
guru4elephant 已提交
187 188 189 190 191 192 193 194
            ;;
        *)
            echo "error type"
            exit 1
            ;;
    esac
    echo "test fit_a_line $TYPE part finished as expected."
    rm -rf image kvdb log uci_housing* work*
B
barrierye 已提交
195
    unset SERVING_BIN
Y
Your Name 已提交
196
    cd .. # pwd: /Serving/python/examples
G
guru4elephant 已提交
197 198
}

W
wangjiawei04 已提交
199
function python_run_criteo_ctr_with_cube() {
Y
merge  
Your Name 已提交
200
    # pwd: /Serving/python/examples
W
wangjiawei04 已提交
201
    local TYPE=$1
202
    yum install -y bc >/dev/null
Y
merge  
Your Name 已提交
203
    cd criteo_ctr_with_cube # pwd: /Serving/python/examples/criteo_ctr_with_cube
M
MRXLT 已提交
204
    export SERVING_BIN=${SERVING_WORKDIR}/build-server-${TYPE}/core/general-server/serving
B
barrierye 已提交
205 206 207 208 209 210 211 212 213 214 215 216 217 218
    case $TYPE in
        CPU)
            check_cmd "wget https://paddle-serving.bj.bcebos.com/unittest/ctr_cube_unittest.tar.gz"
            check_cmd "tar xf ctr_cube_unittest.tar.gz"
            check_cmd "mv models/ctr_client_conf ./"
            check_cmd "mv models/ctr_serving_model_kv ./"
            check_cmd "mv models/data ./cube/"
            check_cmd "mv models/ut_data ./"
            cp ../../../build-server-$TYPE/output/bin/cube* ./cube/ 
            mkdir -p $PYTHONROOT/lib/python2.7/site-packages/paddle_serving_server/serving-cpu-avx-openblas-0.1.3/
            yes | cp ../../../build-server-$TYPE/output/demo/serving/bin/serving $PYTHONROOT/lib/python2.7/site-packages/paddle_serving_server/serving-cpu-avx-openblas-0.1.3/
            sh cube_prepare.sh &
            check_cmd "mkdir work_dir1 && cp cube/conf/cube.conf ./work_dir1/"    
            python test_server.py ctr_serving_model_kv &
M
MRXLT 已提交
219
            sleep 5
B
barrierye 已提交
220
            check_cmd "python test_client.py ctr_client_conf/serving_client_conf.prototxt ./ut_data >score"
W
wangjiawei04 已提交
221
            tail -n 2 score | awk 'NR==1'
B
barrierye 已提交
222
            AUC=$(tail -n 2  score | awk 'NR==1')
223
            VAR2="0.67" #TODO: temporarily relax the threshold to 0.67
B
barrierye 已提交
224 225
            RES=$( echo "$AUC>$VAR2" | bc )
            if [[ $RES -eq 0 ]]; then
J
Jiawei Wang 已提交
226
                echo "error with criteo_ctr_with_cube inference auc test, auc should > 0.67"
B
barrierye 已提交
227 228 229
                exit 1
            fi
            echo "criteo_ctr_with_cube inference auc test success"
B
barrierye 已提交
230
            kill_server_process
B
barrierye 已提交
231 232 233
            ps -ef | grep "cube" | grep -v grep | awk '{print $2}' | xargs kill
            ;;
        GPU)
W
wangjiawei04 已提交
234 235 236 237 238 239 240
            check_cmd "wget https://paddle-serving.bj.bcebos.com/unittest/ctr_cube_unittest.tar.gz"
            check_cmd "tar xf ctr_cube_unittest.tar.gz"
            check_cmd "mv models/ctr_client_conf ./"
            check_cmd "mv models/ctr_serving_model_kv ./"
            check_cmd "mv models/data ./cube/"
            check_cmd "mv models/ut_data ./"
            cp ../../../build-server-$TYPE/output/bin/cube* ./cube/
241
            mkdir -p $PYTHONROOT/lib/python2.7/site-packages/paddle_serving_server_gpu/serving-gpu-0.1.3/
W
wangjiawei04 已提交
242 243 244 245
            yes | cp ../../../build-server-$TYPE/output/demo/serving/bin/serving $PYTHONROOT/lib/python2.7/site-packages/paddle_serving_server_gpu/serving-gpu-0.1.3/
            sh cube_prepare.sh &
            check_cmd "mkdir work_dir1 && cp cube/conf/cube.conf ./work_dir1/"
            python test_server_gpu.py ctr_serving_model_kv &
M
MRXLT 已提交
246
            sleep 5
W
wangjiawei04 已提交
247 248 249
            check_cmd "python test_client.py ctr_client_conf/serving_client_conf.prototxt ./ut_data >score"
            tail -n 2 score | awk 'NR==1'
            AUC=$(tail -n 2  score | awk 'NR==1')
J
Jiawei Wang 已提交
250
            VAR2="0.67" #TODO: temporarily relax the threshold to 0.67
W
wangjiawei04 已提交
251 252
            RES=$( echo "$AUC>$VAR2" | bc )
            if [[ $RES -eq 0 ]]; then
J
Jiawei Wang 已提交
253
                echo "error with criteo_ctr_with_cube inference auc test, auc should > 0.67"
W
wangjiawei04 已提交
254 255 256
                exit 1
            fi
            echo "criteo_ctr_with_cube inference auc test success"
B
barrierye 已提交
257
            kill_server_process
W
wangjiawei04 已提交
258
            ps -ef | grep "cube" | grep -v grep | awk '{print $2}' | xargs kill
B
barrierye 已提交
259 260 261 262 263 264
            ;;
        *)
            echo "error type"
            exit 1
            ;;
    esac
M
MRXLT 已提交
265
    unset SERVING_BIN
B
barrierye 已提交
266
    echo "test criteo_ctr_with_cube $TYPE part finished as expected."
Y
merge  
Your Name 已提交
267
    cd .. # pwd: /Serving/python/examples
W
wangjiawei04 已提交
268 269
}

W
wangjiawei04 已提交
270 271 272
function python_test_bert() {
    # pwd: /Serving/python/examples
    local TYPE=$1
B
barrierye 已提交
273
    yum install -y libXext libSM libXrender >/dev/null
J
Jiawei Wang 已提交
274
    pip install ujson
275
    export SERVING_BIN=${SERVING_WORKDIR}/build-server-${TYPE}/core/general-server/serving
W
wangjiawei04 已提交
276 277 278 279
    cd bert # pwd: /Serving/python/examples/bert
    case $TYPE in
        CPU)
            pip install paddlehub
280 281 282 283
            # Because download from paddlehub may timeout,
            # download the model from bos(max_seq_len=128).
            wget https://paddle-serving.bj.bcebos.com/paddle_hub_models/text/SemanticModel/bert_chinese_L-12_H-768_A-12.tar.gz
            tar -xzf bert_chinese_L-12_H-768_A-12.tar.gz
W
wangjiawei04 已提交
284
            sh get_data.sh
285
            check_cmd "python -m paddle_serving_server.serve --model bert_chinese_L-12_H-768_A-12_model --port 9292 &"
286
            sleep 5
W
wangjiawei04 已提交
287
            pip install paddle_serving_app
288
            check_cmd "head -n 10 data-c.txt | python bert_client.py --model bert_chinese_L-12_H-768_A-12_client/serving_client_conf.prototxt"
J
Jiawei Wang 已提交
289
            kill_server_process
290 291 292 293 294 295 296 297 298
            # python prepare_model.py 20
            # sh get_data.sh
            # check_cmd "python -m paddle_serving_server.serve --model bert_seq20_model/ --port 9292 &"
            # sleep 5
            # pip install paddle_serving_app
            # check_cmd "head -n 10 data-c.txt | python bert_client.py --model bert_seq20_client/serving_client_conf.prototxt"
            # kill_server_process
            # ps -ef | grep "paddle_serving_server" | grep -v grep | awk '{print $2}' | xargs kill
            # ps -ef | grep "serving" | grep -v grep | awk '{print $2}' | xargs kill
W
wangjiawei04 已提交
299 300 301
            echo "bert RPC inference pass" 
            ;;
        GPU)
B
fix bug  
barrierye 已提交
302
            export CUDA_VISIBLE_DEVICES=0
W
wangjiawei04 已提交
303
            pip install paddlehub
304 305 306 307
            # Because download from paddlehub may timeout,
            # download the model from bos(max_seq_len=128).
            wget https://paddle-serving.bj.bcebos.com/paddle_hub_models/text/SemanticModel/bert_chinese_L-12_H-768_A-12.tar.gz
            tar -xzf bert_chinese_L-12_H-768_A-12.tar.gz
W
wangjiawei04 已提交
308
            sh get_data.sh
309
            check_cmd "python -m paddle_serving_server_gpu.serve --model bert_chinese_L-12_H-768_A-12_model --port 9292 --gpu_ids 0 &"
310
            sleep 5
W
wangjiawei04 已提交
311
            pip install paddle_serving_app
312
            check_cmd "head -n 10 data-c.txt | python bert_client.py --model bert_chinese_L-12_H-768_A-12_client/serving_client_conf.prototxt"
J
Jiawei Wang 已提交
313
            kill_server_process
314 315 316 317 318 319 320 321
            # python prepare_model.py 20
            # sh get_data.sh
            # check_cmd "python -m paddle_serving_server_gpu.serve --model bert_seq20_model/ --port 9292 --gpu_ids 0 &"
            # sleep 5
            # pip install paddle_serving_app
            # check_cmd "head -n 10 data-c.txt | python bert_client.py --model bert_seq20_client/serving_client_conf.prototxt"
            # kill_server_process
            # ps -ef | grep "paddle_serving_server" | grep -v grep | awk '{print $2}' | xargs kill
W
wangjiawei04 已提交
322 323 324 325 326
            echo "bert RPC inference pass"
            ;;
        *)
    esac
    echo "test bert $TYPE finished as expected."
327
    unset SERVING_BIN
W
wangjiawei04 已提交
328 329 330 331 332 333
    cd ..
}

function python_test_imdb() {
    # pwd: /Serving/python/examples
    local TYPE=$1
334 335
    export SERVING_BIN=${SERVING_WORKDIR}/build-server-${TYPE}/core/general-server/serving
    cd imdb # pwd: /Serving/python/examples/imdb
W
wangjiawei04 已提交
336 337 338
    case $TYPE in
        CPU)
            sh get_data.sh
339
            sleep 5
J
Jiawei Wang 已提交
340
            check_cmd "python -m paddle_serving_server.serve --model imdb_cnn_model/ --port 9292 &"
W
wangjiawei04 已提交
341 342 343
            check_cmd "head test_data/part-0 | python test_client.py imdb_cnn_client_conf/serving_client_conf.prototxt imdb.vocab"
            echo "imdb CPU RPC inference pass"
            ps -ef | grep "paddle_serving_server" | grep -v grep | awk '{print $2}' | xargs kill
J
Jiawei Wang 已提交
344 345
            rm -rf work_dir1
            sleep 5
W
wangjiawei04 已提交
346

J
Jiawei Wang 已提交
347
            check_cmd "python text_classify_service.py imdb_cnn_model/workdir/9292 imdb.vocab &"
348
            sleep 5
W
wangjiawei04 已提交
349
            check_cmd "curl -H "Content-Type:application/json" -X POST -d '{"words": "i am very sad | 0", "fetch":["prediction"]}' http://127.0.0.1:9292/imdb/prediction"
B
barrierye 已提交
350
            kill_server_process
W
wangjiawei04 已提交
351
            ps -ef | grep "paddle_serving_server" | grep -v grep | awk '{print $2}' | xargs kill
352
            ps -ef | grep "text_classify_service.py" | grep -v grep | awk '{print $2}' | xargs kill
W
wangjiawei04 已提交
353 354 355 356 357 358 359 360
            echo "imdb CPU HTTP inference pass"           
            ;;
        GPU)
            echo "imdb ignore GPU test"
            ;;
        *)
    esac
    echo "test imdb $TYPE finished as expected."
361 362 363 364 365 366 367 368 369 370 371 372 373 374 375 376 377 378 379 380 381
    unset SERVING_BIN
    cd ..
}

function python_test_lac() {
    # pwd: /Serving/python/examples
    local TYPE=$1
    export SERVING_BIN=${SERVING_WORKDIR}/build-server-${TYPE}/core/general-server/serving
    cd lac # pwd: /Serving/python/examples/lac
    case $TYPE in
        CPU)
            sh get_data.sh
            check_cmd "python -m paddle_serving_server.serve --model jieba_server_model/ --port 9292 &"
            sleep 5
            check_cmd "echo "我爱北京天安门" | python lac_client.py jieba_client_conf/serving_client_conf.prototxt lac_dict/"
            echo "lac CPU RPC inference pass"
            ps -ef | grep "paddle_serving_server" | grep -v grep | awk '{print $2}' | xargs kill

            check_cmd "python lac_web_service.py jieba_server_model/ lac_workdir 9292 &"
            sleep 5
            check_cmd "curl -H "Content-Type:application/json" -X POST -d '{"words": "我爱北京天安门", "fetch":["word_seg"]}' http://127.0.0.1:9292/lac/prediction"
B
barrierye 已提交
382
            kill_server_process
383 384 385 386 387 388 389 390 391 392 393
            ps -ef | grep "paddle_serving_server" | grep -v grep | awk '{print $2}' | xargs kill
            ps -ef | grep "lac_web_service" | grep -v grep | awk '{print $2}' | xargs kill
            echo "lac CPU HTTP inference pass"
            ;;
        GPU)
            echo "lac ignore GPU test"
            ;;
        *)
    esac
    echo "test lac $TYPE finished as expected."
    unset SERVING_BIN
W
wangjiawei04 已提交
394 395 396
    cd ..
}

G
guru4elephant 已提交
397
function python_run_test() {
398
    # Using the compiled binary
Y
Your Name 已提交
399 400 401
    local TYPE=$1 # pwd: /Serving
    cd python/examples # pwd: /Serving/python/examples
    python_test_fit_a_line $TYPE # pwd: /Serving/python/examples
Y
merge  
Your Name 已提交
402
    python_run_criteo_ctr_with_cube $TYPE # pwd: /Serving/python/examples
W
wangjiawei04 已提交
403
    python_test_bert $TYPE # pwd: /Serving/python/examples
B
barrierye 已提交
404
    python_test_imdb $TYPE # pwd: /Serving/python/examples 
405
    python_test_lac $TYPE    
G
guru4elephant 已提交
406
    echo "test python $TYPE part finished as expected."
Y
Your Name 已提交
407
    cd ../.. # pwd: /Serving
G
guru4elephant 已提交
408 409 410
}

function main() {
Y
Your Name 已提交
411 412 413 414
    local TYPE=$1 # pwd: /
    init # pwd: /Serving
    build_client $TYPE # pwd: /Serving
    build_server $TYPE # pwd: /Serving
B
barrierye 已提交
415
    build_app $TYPE # pwd: /Serving
Y
Your Name 已提交
416
    python_run_test $TYPE # pwd: /Serving
G
guru4elephant 已提交
417 418 419 420
    echo "serving $TYPE part finished as expected."
}

main $@