From 140ce16a82e70893df51e1c19c346b3b5ea66ffb Mon Sep 17 00:00:00 2001 From: barrierye Date: Wed, 29 Apr 2020 23:34:32 +0800 Subject: [PATCH] add http batch predict ut --- python/examples/imdb/benchmark_batch.py | 12 +++++++----- tools/serving_build.sh | 23 ++++++++++++++++++++++- 2 files changed, 29 insertions(+), 6 deletions(-) diff --git a/python/examples/imdb/benchmark_batch.py b/python/examples/imdb/benchmark_batch.py index 107008f5..7e873821 100644 --- a/python/examples/imdb/benchmark_batch.py +++ b/python/examples/imdb/benchmark_batch.py @@ -50,18 +50,20 @@ def single_func(idx, resource): print("unsupport batch size {}".format(args.batch_size)) elif args.request == "http": - #TODO: not support yet - raise ("no batch predict for http") if args.batch_size >= 1: feed_batch = [] for bi in range(args.batch_size): word_ids, label = imdb_dataset.get_words_and_label(dataset[bi]) - feed_batch.append(word_ids) + feed_batch.append({"words": word_ids}) r = requests.post( "http://{}/imdb/prediction".format(args.endpoint), - data={"words": feed_batch, + data={"feed": feed_batch, "fetch": ["prediction"]}) - print(r) + if r.status_code != 200: + print('HTTP status code -ne 200') + exit(1) + else: + print("unsupport batch size {}".format(args.batch_size)) end = time.time() return [[end - start]] diff --git a/tools/serving_build.sh b/tools/serving_build.sh index e39d6974..1ecbe06c 100644 --- a/tools/serving_build.sh +++ b/tools/serving_build.sh @@ -332,12 +332,19 @@ function python_test_imdb() { rm -rf work_dir1 sleep 5 - check_cmd "python text_classify_service.py imdb_cnn_model/workdir/9292 imdb.vocab &" + unsetproxy # maybe the proxy is used on iPipe, which makes web-test failed. + check_cmd "python text_classify_service.py imdb_cnn_model/ workdir/ 9292 imdb.vocab &" sleep 5 check_cmd "curl -H \"Content-Type:application/json\" -X POST -d '{\"feed\":[{\"words\": \"i am very sad | 0\"}], \"fetch\":[\"prediction\"]}' http://127.0.0.1:9292/imdb/prediction" + http_code=`curl -H "Content-Type:application/json" -X POST -d '{"feed":[{"words": "i am very sad | 0"}], "fetch":["prediction"]}' http://127.0.0.1:9292/imdb/prediction` + setproxy # recover proxy state kill_server_process ps -ef | grep "paddle_serving_server" | grep -v grep | awk '{print $2}' | xargs kill ps -ef | grep "text_classify_service.py" | grep -v grep | awk '{print $2}' | xargs kill + if [ ${http_code} -ne 200 ]; then + echo "HTTP status code -ne 200" + exit 1 + fi echo "imdb CPU HTTP inference pass" # test batch predict @@ -346,6 +353,20 @@ function python_test_imdb() { check_cmd "python benchmark_batch.py --thread 4 --batch_size 8 --model imdb_bow_client_conf/serving_client_conf.prototxt --request rpc --endpoint 127.0.0.1:9292" kill_server_process echo "imdb CPU rpc batch inference pass" + + unsetproxy # maybe the proxy is used on iPipe, which makes web-test failed. + check_cmd "python text_classify_service.py imdb_cnn_model/ workdir/ 9292 imdb.vocab &" + sleep 5 + check_cmd "python benchmark_batch.py --thread 4 --batch_size 8 --model imdb_bow_client_conf/serving_client_conf.prototxt --request http --endpoint 127.0.0.1:9292" + setproxy # recover proxy state + kill_server_process + ps -ef | grep "paddle_serving_server" | grep -v grep | awk '{print $2}' | xargs kill + ps -ef | grep "text_classify_service.py" | grep -v grep | awk '{print $2}' | xargs kill + if [ ${http_code} -ne 200 ]; then + echo "HTTP status code -ne 200" + exit 1 + fi + echo "imdb CPU http batch inference pass" ;; GPU) echo "imdb ignore GPU test" -- GitLab