serving_build.sh 17.9 KB
Newer Older
G
guru4elephant 已提交
1 2
#!/usr/bin/env bash

3 4 5 6 7 8 9 10 11 12 13 14
function unsetproxy() {
    HTTP_PROXY_TEMP=$http_proxy
    HTTPS_PROXY_TEMP=$https_proxy
    unset http_proxy
    unset https_proxy
}

function setproxy() {
    export http_proxy=$HTTP_PROXY_TEMP
    export https_proxy=$HTTPS_PROXY_TEMP
}

G
guru4elephant 已提交
15 16 17 18 19
function init() {
    source /root/.bashrc
    set -v
    export PYTHONROOT=/usr
    cd Serving
B
barrierye 已提交
20
    export SERVING_WORKDIR=$PWD
D
dongdaxiang 已提交
21
    $PYTHONROOT/bin/python -m pip install -r python/requirements.txt
G
guru4elephant 已提交
22 23 24 25 26 27 28 29 30
}

function check_cmd() {
    eval $@
    if [ $? -ne 0 ]; then
        exit 1
    fi
}

31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51
function rerun() {
    if [ $# -ne 2 ]; then
        echo "usage: rerun command rerun-times"
        exit 1
    fi
    local command=$1
    local times=$2
    for((i=1;i<=${times};i++))
    do
        if [ ${i} != 1 ]; then
            echo "${i}-th run command: ${command}..."
        fi
        eval $command
        if [ $? -eq 0 ]; then
            return 0
        fi
        echo "${i}-th run(command: ${command}) failed."
    done
    exit 1
}

B
barrierye 已提交
52 53 54 55 56
function build_app() {
    local TYPE=$1
    local DIRNAME=build-app-$TYPE
    mkdir $DIRNAME # pwd: /Serving
    cd $DIRNAME # pwd: /Serving/build-app-$TYPE
B
barrierye 已提交
57
    pip install numpy sentencepiece
B
barrierye 已提交
58 59 60 61 62 63 64 65 66 67 68 69 70 71 72 73 74 75
    case $TYPE in
        CPU|GPU)
            cmake -DPYTHON_INCLUDE_DIR=$PYTHONROOT/include/python2.7/ \
                  -DPYTHON_LIBRARIES=$PYTHONROOT/lib/libpython2.7.so \
                  -DPYTHON_EXECUTABLE=$PYTHONROOT/bin/python \
                  -DAPP=ON ..
            rerun "make -j2 >/dev/null" 3 # due to some network reasons, compilation may fail
            pip install -U python/dist/paddle_serving_app* >/dev/null
            ;;
        *)
            echo "error type"
            exit 1
            ;;
    esac
    echo "build app $TYPE part finished as expected."
    cd .. # pwd: /Serving
}

G
guru4elephant 已提交
76
function build_client() {
77
    local TYPE=$1
G
guru4elephant 已提交
78
    local DIRNAME=build-client-$TYPE
79 80
    mkdir $DIRNAME # pwd: /Serving
    cd $DIRNAME # pwd: /Serving/build-client-$TYPE
G
guru4elephant 已提交
81 82 83 84 85
    case $TYPE in
        CPU|GPU)
            cmake -DPYTHON_INCLUDE_DIR=$PYTHONROOT/include/python2.7/ \
                  -DPYTHON_LIBRARIES=$PYTHONROOT/lib64/libpython2.7.so \
                  -DPYTHON_EXECUTABLE=$PYTHONROOT/bin/python \
86
                  -DCLIENT=ON ..
87
            rerun "make -j2 >/dev/null" 3 # due to some network reasons, compilation may fail
Y
Your Name 已提交
88
            pip install -U python/dist/paddle_serving_client* >/dev/null
G
guru4elephant 已提交
89 90 91 92 93 94 95
            ;;
        *)
            echo "error type"
            exit 1
            ;;
    esac
    echo "build client $TYPE part finished as expected."
Y
Your Name 已提交
96
    cd .. # pwd: /Serving
97
    # rm -rf $DIRNAME
G
guru4elephant 已提交
98 99 100
}

function build_server() {
101
    local TYPE=$1
G
guru4elephant 已提交
102
    local DIRNAME=build-server-$TYPE
103 104
    mkdir $DIRNAME # pwd: /Serving
    cd $DIRNAME # pwd: /Serving/build-server-$TYPE
G
guru4elephant 已提交
105 106 107 108 109
    case $TYPE in
        CPU)
            cmake -DPYTHON_INCLUDE_DIR=$PYTHONROOT/include/python2.7/ \
                  -DPYTHON_LIBRARIES=$PYTHONROOT/lib64/libpython2.7.so \
                  -DPYTHON_EXECUTABLE=$PYTHONROOT/bin/python \
G
guru4elephant 已提交
110
                  -DSERVER=ON ..
111 112
            rerun "make -j2 >/dev/null" 3 # due to some network reasons, compilation may fail
            check_cmd "make install -j2 >/dev/null"
Y
Your Name 已提交
113
            pip install -U python/dist/paddle_serving_server* >/dev/null
G
guru4elephant 已提交
114 115 116 117 118
            ;;
        GPU)
            cmake -DPYTHON_INCLUDE_DIR=$PYTHONROOT/include/python2.7/ \
                  -DPYTHON_LIBRARIES=$PYTHONROOT/lib64/libpython2.7.so \
                  -DPYTHON_EXECUTABLE=$PYTHONROOT/bin/python \
119
                  -DSERVER=ON \
G
guru4elephant 已提交
120
                  -DWITH_GPU=ON ..
121 122
            rerun "make -j2 >/dev/null" 3 # due to some network reasons, compilation may fail
            check_cmd "make install -j2 >/dev/null"
Y
Your Name 已提交
123
            pip install -U python/dist/paddle_serving_server* >/dev/null
G
guru4elephant 已提交
124 125 126 127 128 129 130
            ;;
        *)
            echo "error type"
            exit 1
            ;;
    esac
    echo "build server $TYPE part finished as expected."
Y
Your Name 已提交
131
    cd .. # pwd: /Serving
Y
Your Name 已提交
132
    # rm -rf $DIRNAME    for export SERVING_BIN
G
guru4elephant 已提交
133 134
}

135
function kill_server_process() {
Y
Your Name 已提交
136
    ps -ef | grep "serving" | grep -v serving_build | grep -v grep | awk '{print $2}' | xargs kill
137 138
}

G
guru4elephant 已提交
139
function python_test_fit_a_line() {
Y
Your Name 已提交
140 141
    # pwd: /Serving/python/examples
    cd fit_a_line # pwd: /Serving/python/examples/fit_a_line
G
guru4elephant 已提交
142 143
    sh get_data.sh
    local TYPE=$1
B
barrierye 已提交
144
    export SERVING_BIN=${SERVING_WORKDIR}/build-server-${TYPE}/core/general-server/serving
G
guru4elephant 已提交
145 146 147
    case $TYPE in
        CPU)
            # test rpc
148 149
            check_cmd "python -m paddle_serving_server.serve --model uci_housing_model --port 9393 --thread 4 > /dev/null &"
            sleep 5 # wait for the server to start
G
guru4elephant 已提交
150
            check_cmd "python test_client.py uci_housing_client/serving_client_conf.prototxt > /dev/null"
151
            kill_server_process
B
barrierye 已提交
152

G
guru4elephant 已提交
153
            # test web
154
            unsetproxy # maybe the proxy is used on iPipe, which makes web-test failed.
155 156 157 158 159
            check_cmd "python -m paddle_serving_server.serve --model uci_housing_model --name uci --port 9393 --thread 4 --name uci > /dev/null &"
            sleep 5 # wait for the server to start
            check_cmd "curl -H \"Content-Type:application/json\" -X POST -d '{\"x\": [0.0137, -0.1136, 0.2553, -0.0692, 0.0582, -0.0727, -0.1583, -0.0584, 0.6283, 0.4919, 0.1856, 0.0795, -0.0332], \"fetch\":[\"price\"]}' http://127.0.0.1:9393/uci/prediction"
            # check http code
            http_code=`curl -H "Content-Type:application/json" -X POST -d '{"x": [0.0137, -0.1136, 0.2553, -0.0692, 0.0582, -0.0727, -0.1583, -0.0584, 0.6283, 0.4919, 0.1856, 0.0795, -0.0332], "fetch":["price"]}' -s -w "%{http_code}" -o /dev/null http://127.0.0.1:9393/uci/prediction`
160 161
            setproxy # recover proxy state
            kill_server_process
Y
Your Name 已提交
162 163 164 165
            if [ ${http_code} -ne 200 ]; then
                echo "HTTP status code -ne 200"
                exit 1
            fi
G
guru4elephant 已提交
166 167
            ;;
        GPU)
B
fix bug  
barrierye 已提交
168
            export CUDA_VISIBLE_DEVICES=0
169 170 171 172 173
            # test rpc
            check_cmd "python -m paddle_serving_server_gpu.serve --model uci_housing_model --port 9393 --thread 4 --gpu_ids 0 > /dev/null &"
            sleep 5 # wait for the server to start
            check_cmd "python test_client.py uci_housing_client/serving_client_conf.prototxt > /dev/null"
            kill_server_process
B
barrierye 已提交
174
 
175
            # test web
176
            unsetproxy # maybe the proxy is used on iPipe, which makes web-test failed.
177 178 179 180 181
            check_cmd "python -m paddle_serving_server_gpu.serve --model uci_housing_model --port 9393 --thread 2 --gpu_ids 0 --name uci > /dev/null &"
            sleep 5 # wait for the server to start
            check_cmd "curl -H \"Content-Type:application/json\" -X POST -d '{\"x\": [0.0137, -0.1136, 0.2553, -0.0692, 0.0582, -0.0727, -0.1583, -0.0584, 0.6283, 0.4919, 0.1856, 0.0795, -0.0332], \"fetch\":[\"price\"]}' http://127.0.0.1:9393/uci/prediction"
            # check http code
            http_code=`curl -H "Content-Type:application/json" -X POST -d '{"x": [0.0137, -0.1136, 0.2553, -0.0692, 0.0582, -0.0727, -0.1583, -0.0584, 0.6283, 0.4919, 0.1856, 0.0795, -0.0332], "fetch":["price"]}' -s -w "%{http_code}" -o /dev/null http://127.0.0.1:9393/uci/prediction`
182 183
            setproxy # recover proxy state
            kill_server_process
Y
Your Name 已提交
184 185 186 187
            if [ ${http_code} -ne 200 ]; then
                echo "HTTP status code -ne 200"
                exit 1
            fi
G
guru4elephant 已提交
188 189 190 191 192 193 194 195
            ;;
        *)
            echo "error type"
            exit 1
            ;;
    esac
    echo "test fit_a_line $TYPE part finished as expected."
    rm -rf image kvdb log uci_housing* work*
B
barrierye 已提交
196
    unset SERVING_BIN
Y
Your Name 已提交
197
    cd .. # pwd: /Serving/python/examples
G
guru4elephant 已提交
198 199
}

W
wangjiawei04 已提交
200
function python_run_criteo_ctr_with_cube() {
Y
merge  
Your Name 已提交
201
    # pwd: /Serving/python/examples
W
wangjiawei04 已提交
202
    local TYPE=$1
203
    yum install -y bc >/dev/null
Y
merge  
Your Name 已提交
204
    cd criteo_ctr_with_cube # pwd: /Serving/python/examples/criteo_ctr_with_cube
M
MRXLT 已提交
205
    export SERVING_BIN=${SERVING_WORKDIR}/build-server-${TYPE}/core/general-server/serving
B
barrierye 已提交
206 207 208 209 210 211 212 213 214 215 216 217 218 219
    case $TYPE in
        CPU)
            check_cmd "wget https://paddle-serving.bj.bcebos.com/unittest/ctr_cube_unittest.tar.gz"
            check_cmd "tar xf ctr_cube_unittest.tar.gz"
            check_cmd "mv models/ctr_client_conf ./"
            check_cmd "mv models/ctr_serving_model_kv ./"
            check_cmd "mv models/data ./cube/"
            check_cmd "mv models/ut_data ./"
            cp ../../../build-server-$TYPE/output/bin/cube* ./cube/ 
            mkdir -p $PYTHONROOT/lib/python2.7/site-packages/paddle_serving_server/serving-cpu-avx-openblas-0.1.3/
            yes | cp ../../../build-server-$TYPE/output/demo/serving/bin/serving $PYTHONROOT/lib/python2.7/site-packages/paddle_serving_server/serving-cpu-avx-openblas-0.1.3/
            sh cube_prepare.sh &
            check_cmd "mkdir work_dir1 && cp cube/conf/cube.conf ./work_dir1/"    
            python test_server.py ctr_serving_model_kv &
M
MRXLT 已提交
220
            sleep 5
B
barrierye 已提交
221
            check_cmd "python test_client.py ctr_client_conf/serving_client_conf.prototxt ./ut_data >score"
W
wangjiawei04 已提交
222
            tail -n 2 score | awk 'NR==1'
B
barrierye 已提交
223
            AUC=$(tail -n 2  score | awk 'NR==1')
224
            VAR2="0.67" #TODO: temporarily relax the threshold to 0.67
B
barrierye 已提交
225 226
            RES=$( echo "$AUC>$VAR2" | bc )
            if [[ $RES -eq 0 ]]; then
J
Jiawei Wang 已提交
227
                echo "error with criteo_ctr_with_cube inference auc test, auc should > 0.67"
B
barrierye 已提交
228 229 230
                exit 1
            fi
            echo "criteo_ctr_with_cube inference auc test success"
B
barrierye 已提交
231
            kill_server_process
B
barrierye 已提交
232 233 234
            ps -ef | grep "cube" | grep -v grep | awk '{print $2}' | xargs kill
            ;;
        GPU)
W
wangjiawei04 已提交
235 236 237 238 239 240 241
            check_cmd "wget https://paddle-serving.bj.bcebos.com/unittest/ctr_cube_unittest.tar.gz"
            check_cmd "tar xf ctr_cube_unittest.tar.gz"
            check_cmd "mv models/ctr_client_conf ./"
            check_cmd "mv models/ctr_serving_model_kv ./"
            check_cmd "mv models/data ./cube/"
            check_cmd "mv models/ut_data ./"
            cp ../../../build-server-$TYPE/output/bin/cube* ./cube/
242
            mkdir -p $PYTHONROOT/lib/python2.7/site-packages/paddle_serving_server_gpu/serving-gpu-0.1.3/
W
wangjiawei04 已提交
243 244 245 246
            yes | cp ../../../build-server-$TYPE/output/demo/serving/bin/serving $PYTHONROOT/lib/python2.7/site-packages/paddle_serving_server_gpu/serving-gpu-0.1.3/
            sh cube_prepare.sh &
            check_cmd "mkdir work_dir1 && cp cube/conf/cube.conf ./work_dir1/"
            python test_server_gpu.py ctr_serving_model_kv &
M
MRXLT 已提交
247
            sleep 5
W
wangjiawei04 已提交
248 249 250
            check_cmd "python test_client.py ctr_client_conf/serving_client_conf.prototxt ./ut_data >score"
            tail -n 2 score | awk 'NR==1'
            AUC=$(tail -n 2  score | awk 'NR==1')
J
Jiawei Wang 已提交
251
            VAR2="0.67" #TODO: temporarily relax the threshold to 0.67
W
wangjiawei04 已提交
252 253
            RES=$( echo "$AUC>$VAR2" | bc )
            if [[ $RES -eq 0 ]]; then
J
Jiawei Wang 已提交
254
                echo "error with criteo_ctr_with_cube inference auc test, auc should > 0.67"
W
wangjiawei04 已提交
255 256 257
                exit 1
            fi
            echo "criteo_ctr_with_cube inference auc test success"
B
barrierye 已提交
258
            kill_server_process
W
wangjiawei04 已提交
259
            ps -ef | grep "cube" | grep -v grep | awk '{print $2}' | xargs kill
B
barrierye 已提交
260 261 262 263 264 265
            ;;
        *)
            echo "error type"
            exit 1
            ;;
    esac
M
MRXLT 已提交
266
    unset SERVING_BIN
B
barrierye 已提交
267
    echo "test criteo_ctr_with_cube $TYPE part finished as expected."
Y
merge  
Your Name 已提交
268
    cd .. # pwd: /Serving/python/examples
W
wangjiawei04 已提交
269 270
}

W
wangjiawei04 已提交
271 272 273
function python_test_bert() {
    # pwd: /Serving/python/examples
    local TYPE=$1
B
barrierye 已提交
274
    yum install -y libXext libSM libXrender >/dev/null
J
Jiawei Wang 已提交
275
    pip install ujson
276
    export SERVING_BIN=${SERVING_WORKDIR}/build-server-${TYPE}/core/general-server/serving
W
wangjiawei04 已提交
277 278 279 280
    cd bert # pwd: /Serving/python/examples/bert
    case $TYPE in
        CPU)
            pip install paddlehub
281 282 283 284
            # Because download from paddlehub may timeout,
            # download the model from bos(max_seq_len=128).
            wget https://paddle-serving.bj.bcebos.com/paddle_hub_models/text/SemanticModel/bert_chinese_L-12_H-768_A-12.tar.gz
            tar -xzf bert_chinese_L-12_H-768_A-12.tar.gz
W
wangjiawei04 已提交
285
            sh get_data.sh
286
            check_cmd "python -m paddle_serving_server.serve --model bert_chinese_L-12_H-768_A-12_model --port 9292 &"
287
            sleep 5
W
wangjiawei04 已提交
288
            pip install paddle_serving_app
289
            check_cmd "head -n 10 data-c.txt | python bert_client.py --model bert_chinese_L-12_H-768_A-12_client/serving_client_conf.prototxt"
J
Jiawei Wang 已提交
290
            kill_server_process
291 292 293 294 295 296 297 298 299
            # python prepare_model.py 20
            # sh get_data.sh
            # check_cmd "python -m paddle_serving_server.serve --model bert_seq20_model/ --port 9292 &"
            # sleep 5
            # pip install paddle_serving_app
            # check_cmd "head -n 10 data-c.txt | python bert_client.py --model bert_seq20_client/serving_client_conf.prototxt"
            # kill_server_process
            # ps -ef | grep "paddle_serving_server" | grep -v grep | awk '{print $2}' | xargs kill
            # ps -ef | grep "serving" | grep -v grep | awk '{print $2}' | xargs kill
W
wangjiawei04 已提交
300 301 302
            echo "bert RPC inference pass" 
            ;;
        GPU)
B
fix bug  
barrierye 已提交
303
            export CUDA_VISIBLE_DEVICES=0
W
wangjiawei04 已提交
304
            pip install paddlehub
305 306 307 308
            # Because download from paddlehub may timeout,
            # download the model from bos(max_seq_len=128).
            wget https://paddle-serving.bj.bcebos.com/paddle_hub_models/text/SemanticModel/bert_chinese_L-12_H-768_A-12.tar.gz
            tar -xzf bert_chinese_L-12_H-768_A-12.tar.gz
W
wangjiawei04 已提交
309
            sh get_data.sh
310
            check_cmd "python -m paddle_serving_server_gpu.serve --model bert_chinese_L-12_H-768_A-12_model --port 9292 --gpu_ids 0 &"
311
            sleep 5
W
wangjiawei04 已提交
312
            pip install paddle_serving_app
313
            check_cmd "head -n 10 data-c.txt | python bert_client.py --model bert_chinese_L-12_H-768_A-12_client/serving_client_conf.prototxt"
J
Jiawei Wang 已提交
314
            kill_server_process
315 316 317 318 319 320 321 322
            # python prepare_model.py 20
            # sh get_data.sh
            # check_cmd "python -m paddle_serving_server_gpu.serve --model bert_seq20_model/ --port 9292 --gpu_ids 0 &"
            # sleep 5
            # pip install paddle_serving_app
            # check_cmd "head -n 10 data-c.txt | python bert_client.py --model bert_seq20_client/serving_client_conf.prototxt"
            # kill_server_process
            # ps -ef | grep "paddle_serving_server" | grep -v grep | awk '{print $2}' | xargs kill
W
wangjiawei04 已提交
323 324 325 326 327
            echo "bert RPC inference pass"
            ;;
        *)
    esac
    echo "test bert $TYPE finished as expected."
328
    unset SERVING_BIN
W
wangjiawei04 已提交
329 330 331 332 333 334
    cd ..
}

function python_test_imdb() {
    # pwd: /Serving/python/examples
    local TYPE=$1
335 336
    export SERVING_BIN=${SERVING_WORKDIR}/build-server-${TYPE}/core/general-server/serving
    cd imdb # pwd: /Serving/python/examples/imdb
W
wangjiawei04 已提交
337 338 339
    case $TYPE in
        CPU)
            sh get_data.sh
340
            sleep 5
J
Jiawei Wang 已提交
341
            check_cmd "python -m paddle_serving_server.serve --model imdb_cnn_model/ --port 9292 &"
W
wangjiawei04 已提交
342 343 344
            check_cmd "head test_data/part-0 | python test_client.py imdb_cnn_client_conf/serving_client_conf.prototxt imdb.vocab"
            echo "imdb CPU RPC inference pass"
            ps -ef | grep "paddle_serving_server" | grep -v grep | awk '{print $2}' | xargs kill
J
Jiawei Wang 已提交
345 346
            rm -rf work_dir1
            sleep 5
W
wangjiawei04 已提交
347

J
Jiawei Wang 已提交
348
            check_cmd "python text_classify_service.py imdb_cnn_model/workdir/9292 imdb.vocab &"
349
            sleep 5
W
wangjiawei04 已提交
350
            check_cmd "curl -H "Content-Type:application/json" -X POST -d '{"words": "i am very sad | 0", "fetch":["prediction"]}' http://127.0.0.1:9292/imdb/prediction"
B
barrierye 已提交
351
            kill_server_process
W
wangjiawei04 已提交
352
            ps -ef | grep "paddle_serving_server" | grep -v grep | awk '{print $2}' | xargs kill
353
            ps -ef | grep "text_classify_service.py" | grep -v grep | awk '{print $2}' | xargs kill
B
barrierye 已提交
354
            echo "imdb CPU HTTP inference pass"
W
wangjiawei04 已提交
355 356 357 358 359 360 361
            ;;
        GPU)
            echo "imdb ignore GPU test"
            ;;
        *)
    esac
    echo "test imdb $TYPE finished as expected."
362 363 364 365 366 367 368 369 370 371 372 373 374 375 376 377 378 379 380 381 382
    unset SERVING_BIN
    cd ..
}

function python_test_lac() {
    # pwd: /Serving/python/examples
    local TYPE=$1
    export SERVING_BIN=${SERVING_WORKDIR}/build-server-${TYPE}/core/general-server/serving
    cd lac # pwd: /Serving/python/examples/lac
    case $TYPE in
        CPU)
            sh get_data.sh
            check_cmd "python -m paddle_serving_server.serve --model jieba_server_model/ --port 9292 &"
            sleep 5
            check_cmd "echo "我爱北京天安门" | python lac_client.py jieba_client_conf/serving_client_conf.prototxt lac_dict/"
            echo "lac CPU RPC inference pass"
            ps -ef | grep "paddle_serving_server" | grep -v grep | awk '{print $2}' | xargs kill

            check_cmd "python lac_web_service.py jieba_server_model/ lac_workdir 9292 &"
            sleep 5
            check_cmd "curl -H "Content-Type:application/json" -X POST -d '{"words": "我爱北京天安门", "fetch":["word_seg"]}' http://127.0.0.1:9292/lac/prediction"
B
barrierye 已提交
383
            kill_server_process
384 385 386 387 388 389 390 391 392 393 394
            ps -ef | grep "paddle_serving_server" | grep -v grep | awk '{print $2}' | xargs kill
            ps -ef | grep "lac_web_service" | grep -v grep | awk '{print $2}' | xargs kill
            echo "lac CPU HTTP inference pass"
            ;;
        GPU)
            echo "lac ignore GPU test"
            ;;
        *)
    esac
    echo "test lac $TYPE finished as expected."
    unset SERVING_BIN
W
wangjiawei04 已提交
395 396 397
    cd ..
}

G
guru4elephant 已提交
398
function python_run_test() {
399
    # Using the compiled binary
Y
Your Name 已提交
400 401 402
    local TYPE=$1 # pwd: /Serving
    cd python/examples # pwd: /Serving/python/examples
    python_test_fit_a_line $TYPE # pwd: /Serving/python/examples
Y
merge  
Your Name 已提交
403
    python_run_criteo_ctr_with_cube $TYPE # pwd: /Serving/python/examples
W
wangjiawei04 已提交
404
    python_test_bert $TYPE # pwd: /Serving/python/examples
B
barrierye 已提交
405
    python_test_imdb $TYPE # pwd: /Serving/python/examples 
406
    python_test_lac $TYPE    
G
guru4elephant 已提交
407
    echo "test python $TYPE part finished as expected."
Y
Your Name 已提交
408
    cd ../.. # pwd: /Serving
G
guru4elephant 已提交
409 410 411
}

function main() {
Y
Your Name 已提交
412 413 414 415
    local TYPE=$1 # pwd: /
    init # pwd: /Serving
    build_client $TYPE # pwd: /Serving
    build_server $TYPE # pwd: /Serving
B
barrierye 已提交
416
    build_app $TYPE # pwd: /Serving
Y
Your Name 已提交
417
    python_run_test $TYPE # pwd: /Serving
G
guru4elephant 已提交
418 419 420 421
    echo "serving $TYPE part finished as expected."
}

main $@