diff --git a/deploy/pdserving/ocr_reader.py b/deploy/pdserving/ocr_reader.py index 3f219784fca79715d09ae9353a32d95e2e427cb6..67099786ea73b66412dac8f965e20201f0ac1fdc 100644 --- a/deploy/pdserving/ocr_reader.py +++ b/deploy/pdserving/ocr_reader.py @@ -433,3 +433,54 @@ class OCRReader(object): text = self.label_ops.decode( preds_idx, preds_prob, is_remove_duplicate=True) return text + +from argparse import ArgumentParser,RawDescriptionHelpFormatter +import yaml +class ArgsParser(ArgumentParser): + def __init__(self): + super(ArgsParser, self).__init__( + formatter_class=RawDescriptionHelpFormatter) + self.add_argument("-c", "--config", help="configuration file to use") + self.add_argument( + "-o", "--opt", nargs='+', help="set configuration options") + + def parse_args(self, argv=None): + args = super(ArgsParser, self).parse_args(argv) + assert args.config is not None, \ + "Please specify --config=configure_file_path." + args.conf_dict = self._parse_opt(args.opt, args.config) + print("args config:", args.conf_dict) + return args + + def _parse_helper(self, v): + if v.isnumeric(): + if "." in v: + v = float(v) + else: + v = int(v) + elif v == "True" or v == "False": + v = (v == "True") + return v + + def _parse_opt(self, opts, conf_path): + f = open(conf_path) + config = yaml.load(f, Loader=yaml.Loader) + if not opts: + return config + for s in opts: + s = s.strip() + k, v = s.split('=') + v = self._parse_helper(v) + print(k,v, type(v)) + cur = config + parent = cur + for kk in k.split("."): + if kk not in cur: + cur[kk] = {} + parent = cur + cur = cur[kk] + else: + parent = cur + cur = cur[kk] + parent[k.split(".")[-1]] = v + return config \ No newline at end of file diff --git a/deploy/pdserving/web_service_det.py b/deploy/pdserving/web_service_det.py index 38814ea8d1744ec4c89b54884bfb34b2a6a9fb7a..0ca8dbc41bbdde4caf76bcfddabe4b9c2e94cb4b 100644 --- a/deploy/pdserving/web_service_det.py +++ b/deploy/pdserving/web_service_det.py @@ -18,63 +18,13 @@ import numpy as np import cv2 import base64 # from paddle_serving_app.reader import OCRReader -from ocr_reader import OCRReader, DetResizeForTest +from ocr_reader import OCRReader, DetResizeForTest, ArgsParser from paddle_serving_app.reader import Sequential, ResizeByFactor from paddle_serving_app.reader import Div, Normalize, Transpose from paddle_serving_app.reader import DBPostProcess, FilterBoxes, GetRotateCropImage, SortedBoxes -import yaml -from argparse import ArgumentParser,RawDescriptionHelpFormatter - _LOGGER = logging.getLogger() -class ArgsParser(ArgumentParser): - def __init__(self): - super(ArgsParser, self).__init__( - formatter_class=RawDescriptionHelpFormatter) - self.add_argument("-c", "--config", help="configuration file to use") - self.add_argument( - "-o", "--opt", nargs='+', help="set configuration options") - - def parse_args(self, argv=None): - args = super(ArgsParser, self).parse_args(argv) - assert args.config is not None, \ - "Please specify --config=configure_file_path." - args.conf_dict = self._parse_opt(args.opt, args.config) - return args - - def _parse_helper(self, v): - if v.isnumeric(): - if "." in v: - v = float(v) - else: - v = int(v) - elif v == "True" or v == "False": - v = (v == "True") - return v - - def _parse_opt(self, opts, conf_path): - f = open(conf_path) - config = yaml.load(f, Loader=yaml.Loader) - if not opts: - return config - for s in opts: - s = s.strip() - k, v = s.split('=') - v = self._parse_helper(v) - print(k,v, type(v)) - cur = config - parent = cur - for kk in k.split("."): - if kk not in cur: - cur[kk] = {} - parent = cur - cur = cur[kk] - else: - parent = cur - cur = cur[kk] - parent[k.split(".")[-1]] = v - return config class DetOp(Op): def init_op(self): diff --git a/deploy/pdserving/web_service_rec.py b/deploy/pdserving/web_service_rec.py index 842728edddd179e28704e4e39e7bf771db6d21de..25d8962f93b5dcbbf7ec3c19374f0a19d1071d6d 100644 --- a/deploy/pdserving/web_service_rec.py +++ b/deploy/pdserving/web_service_rec.py @@ -18,10 +18,9 @@ import numpy as np import cv2 import base64 # from paddle_serving_app.reader import OCRReader -from ocr_reader import OCRReader, DetResizeForTest +from ocr_reader import OCRReader, DetResizeForTest, ArgsParser from paddle_serving_app.reader import Sequential, ResizeByFactor from paddle_serving_app.reader import Div, Normalize, Transpose -from web_service_det import ArgsParser _LOGGER = logging.getLogger() @@ -79,10 +78,11 @@ class RecOp(Op): class OcrService(WebService): def get_pipeline_response(self, read_op): rec_op = RecOp(name="rec", input_ops=[read_op]) + print("rec op:", rec_op) return rec_op uci_service = OcrService(name="ocr") FLAGS = ArgsParser().parse_args() uci_service.prepare_pipeline_config(yml_dict=FLAGS.conf_dict) -uci_service.run_service() +uci_service.run_service() \ No newline at end of file diff --git a/test_tipc/configs/ch_ppocr_mobile_v2.0_det/model_linux_gpu_normal_normal_serving_python_linux_gpu_cpu.txt b/test_tipc/configs/ch_ppocr_mobile_v2.0_det/model_linux_gpu_normal_normal_serving_python_linux_gpu_cpu.txt index 6e5cecf632a42294006cffdf4cf3a466a326260b..2326c9d2a7a785bf5f94124476fb3c21f91ceed2 100644 --- a/test_tipc/configs/ch_ppocr_mobile_v2.0_det/model_linux_gpu_normal_normal_serving_python_linux_gpu_cpu.txt +++ b/test_tipc/configs/ch_ppocr_mobile_v2.0_det/model_linux_gpu_normal_normal_serving_python_linux_gpu_cpu.txt @@ -8,8 +8,8 @@ trans_model:-m paddle_serving_client.convert --serving_server:./deploy/pdserving/ppocr_det_mobile_2.0_serving/ --serving_client:./deploy/pdserving/ppocr_det_mobile_2.0_client/ serving_dir:./deploy/pdserving -web_service:web_service_det.py --config=config.yml --opt op.det.concurrency=1 -op.det.local_service_conf.devices:null|0 +web_service:web_service_det.py --config=config.yml --opt op.det.concurrency="1" +op.det.local_service_conf.devices:"0"|null op.det.local_service_conf.use_mkldnn:True|False op.det.local_service_conf.thread_num:1|6 op.det.local_service_conf.use_trt:False|True diff --git a/test_tipc/configs/ch_ppocr_mobile_v2.0_rec/model_linux_gpu_normal_normal_serving_python_linux_gpu_cpu.txt b/test_tipc/configs/ch_ppocr_mobile_v2.0_rec/model_linux_gpu_normal_normal_serving_python_linux_gpu_cpu.txt index 7351e5bd6d5d8ffc5d49b313ad662b1e2fd55bd2..f890eff469ba82b87d2d83000add24cc9d380c49 100644 --- a/test_tipc/configs/ch_ppocr_mobile_v2.0_rec/model_linux_gpu_normal_normal_serving_python_linux_gpu_cpu.txt +++ b/test_tipc/configs/ch_ppocr_mobile_v2.0_rec/model_linux_gpu_normal_normal_serving_python_linux_gpu_cpu.txt @@ -9,7 +9,7 @@ trans_model:-m paddle_serving_client.convert --serving_client:./deploy/pdserving/ppocr_rec_mobile_2.0_client/ serving_dir:./deploy/pdserving web_service:web_service_rec.py --config=config.yml --opt op.rec.concurrency=1 -op.rec.local_service_conf.devices:null|0 +op.rec.local_service_conf.devices:"0"|null op.rec.local_service_conf.use_mkldnn:True|False op.rec.local_service_conf.thread_num:1|6 op.rec.local_service_conf.use_trt:False|True diff --git a/test_tipc/configs/ch_ppocr_server_v2.0_det/model_linux_gpu_normal_normal_serving_python_linux_gpu_cpu.txt b/test_tipc/configs/ch_ppocr_server_v2.0_det/model_linux_gpu_normal_normal_serving_python_linux_gpu_cpu.txt index 09b7ab750408a54fa292f1168d8de01bd962ca43..ec5464604697e15bdd4e0f7282d23a8e09f4a0b5 100644 --- a/test_tipc/configs/ch_ppocr_server_v2.0_det/model_linux_gpu_normal_normal_serving_python_linux_gpu_cpu.txt +++ b/test_tipc/configs/ch_ppocr_server_v2.0_det/model_linux_gpu_normal_normal_serving_python_linux_gpu_cpu.txt @@ -9,10 +9,10 @@ trans_model:-m paddle_serving_client.convert --serving_client:./deploy/pdserving/ppocr_det_server_2.0_client/ serving_dir:./deploy/pdserving web_service:web_service_det.py --config=config.yml --opt op.det.concurrency=1 -op.det.local_service_conf.devices:null|0 +op.det.local_service_conf.devices:"0"|null op.det.local_service_conf.use_mkldnn:True|False op.det.local_service_conf.thread_num:1|6 op.det.local_service_conf.use_trt:False|True op.det.local_service_conf.precision:fp32|fp16|int8 pipline:pipeline_rpc_client.py|pipeline_http_client.py ---image_dir:../../doc/imgs_words_en \ No newline at end of file +--image_dir:../../doc/imgs \ No newline at end of file diff --git a/test_tipc/configs/ch_ppocr_server_v2.0_rec/model_linux_gpu_normal_normal_serving_python_linux_gpu_cpu.txt b/test_tipc/configs/ch_ppocr_server_v2.0_rec/model_linux_gpu_normal_normal_serving_python_linux_gpu_cpu.txt index 24e7a8f3e0364f2a0a14c74a27da7372508cd414..d72abc6054d5f2eccf35f305076b7062fdf49848 100644 --- a/test_tipc/configs/ch_ppocr_server_v2.0_rec/model_linux_gpu_normal_normal_serving_python_linux_gpu_cpu.txt +++ b/test_tipc/configs/ch_ppocr_server_v2.0_rec/model_linux_gpu_normal_normal_serving_python_linux_gpu_cpu.txt @@ -1,6 +1,6 @@ ===========================serving_params=========================== model_name:ocr_rec_server -python:python3.7 +python:python3.7|cpp trans_model:-m paddle_serving_client.convert --dirname:./inference/ch_ppocr_server_v2.0_rec_infer/ --model_filename:inference.pdmodel @@ -9,7 +9,7 @@ trans_model:-m paddle_serving_client.convert --serving_client:./deploy/pdserving/ppocr_rec_server_2.0_client/ serving_dir:./deploy/pdserving web_service:web_service_rec.py --config=config.yml --opt op.rec.concurrency=1 -op.rec.local_service_conf.devices:null|0 +op.rec.local_service_conf.devices:"0"|null op.rec.local_service_conf.use_mkldnn:True|False op.rec.local_service_conf.thread_num:1|6 op.rec.local_service_conf.use_trt:False|True diff --git a/test_tipc/prepare.sh b/test_tipc/prepare.sh index 62451417287228868c33f778f3aae796b53dabcf..bd4af1923c0e00a613ea2734c6fa90232d35469f 100644 --- a/test_tipc/prepare.sh +++ b/test_tipc/prepare.sh @@ -308,10 +308,9 @@ if [ ${MODE} = "serving_infer" ];then IFS='|' array=(${python_name_list}) python_name=${array[0]} - wget -nc https://paddle-serving.bj.bcebos.com/chain/paddle_serving_server_gpu-0.0.0.post101-py3-none-any.whl - ${python_name} -m pip install install paddle_serving_server_gpu-0.0.0.post101-py3-none-any.whl - ${python_name} -m pip install paddle_serving_client==0.6.1 - ${python_name} -m pip install paddle-serving-app==0.6.3 + ${python_name} -m pip install paddle-serving-server-gpu==0.8.3.post101 + ${python_name} -m pip install paddle_serving_client==0.8.3 + ${python_name} -m pip install paddle-serving-app==0.8.3 wget -nc -P ./inference https://paddleocr.bj.bcebos.com/dygraph_v2.0/ch/ch_ppocr_mobile_v2.0_det_infer.tar wget -nc -P ./inference https://paddleocr.bj.bcebos.com/dygraph_v2.0/ch/ch_ppocr_mobile_v2.0_rec_infer.tar wget -nc -P ./inference https://paddleocr.bj.bcebos.com/dygraph_v2.0/ch/ch_ppocr_server_v2.0_det_infer.tar diff --git a/test_tipc/test_serving.sh b/test_tipc/test_serving.sh index 1318d012d401c4f4e8540a5d0d227ea75f677004..fa698a43f9b0d78e1c84fdc5031d1a79e7bccdf2 100644 --- a/test_tipc/test_serving.sh +++ b/test_tipc/test_serving.sh @@ -62,25 +62,30 @@ function func_serving(){ unset https_proxy unset http_proxy for python in ${python_list[*]}; do + echo ${python} if [ ${python} = "cpp" ]; then for use_gpu in ${web_use_gpu_list[*]}; do if [ ${use_gpu} = "null" ]; then - web_service_cpp_cmd="${python} -m paddle_serving_server.serve --model ppocr_det_mobile_2.0_serving/ ppocr_rec_mobile_2.0_serving/ --port 9293" - eval $web_service_cmd + web_service_cpp_cmd="${python_list[0]} -m paddle_serving_server.serve --model ppocr_det_mobile_2.0_serving/ ppocr_rec_mobile_2.0_serving/ --port 9293" + eval $web_service_cpp_cmd + last_status=${PIPESTATUS[0]} + status_check $last_status "${web_service_cpp_cmd}" "${status_log}" sleep 2s _save_log_path="${LOG_PATH}/server_infer_cpp_cpu_pipeline_usemkldnn_False_threads_4_batchsize_1.log" - pipeline_cmd="${python} ocr_cpp_client.py ppocr_det_mobile_2.0_client/ ppocr_rec_mobile_2.0_client/" + pipeline_cmd="${python_list[0]} ocr_cpp_client.py ppocr_det_mobile_2.0_client/ ppocr_rec_mobile_2.0_client/" eval $pipeline_cmd + last_status=${PIPESTATUS[0]} status_check $last_status "${pipeline_cmd}" "${status_log}" sleep 2s ps ux | grep -E 'web_service|pipeline' | awk '{print $2}' | xargs kill -s 9 else - web_service_cpp_cmd="${python} -m paddle_serving_server.serve --model ppocr_det_mobile_2.0_serving/ ppocr_rec_mobile_2.0_serving/ --port 9293 --gpu_id=0" - eval $web_service_cmd + web_service_cpp_cmd="${python_list[0]} -m paddle_serving_server.serve --model ppocr_det_mobile_2.0_serving/ ppocr_rec_mobile_2.0_serving/ --port 9293 --gpu_id=0" + eval $web_service_cpp_cmd sleep 2s _save_log_path="${LOG_PATH}/server_infer_cpp_cpu_pipeline_usemkldnn_False_threads_4_batchsize_1.log" - pipeline_cmd="${python} ocr_cpp_client.py ppocr_det_mobile_2.0_client/ ppocr_rec_mobile_2.0_client/" + pipeline_cmd="${python_list[0]} ocr_cpp_client.py ppocr_det_mobile_2.0_client/ ppocr_rec_mobile_2.0_client/" eval $pipeline_cmd + last_status=${PIPESTATUS[0]} status_check $last_status "${pipeline_cmd}" "${status_log}" sleep 2s ps ux | grep -E 'web_service|pipeline' | awk '{print $2}' | xargs kill -s 9 @@ -88,14 +93,17 @@ function func_serving(){ done else # python serving + echo ${web_use_gpu_list[*]} for use_gpu in ${web_use_gpu_list[*]}; do echo ${ues_gpu} if [ ${use_gpu} = "null" ]; then for use_mkldnn in ${web_use_mkldnn_list[*]}; do for threads in ${web_cpu_threads_list[*]}; do set_cpu_threads=$(func_set_params "${web_cpu_threads_key}" "${threads}") - web_service_cmd="${python} ${web_service_py} ${web_use_gpu_key}=${use_gpu} ${web_use_mkldnn_key}=${use_mkldnn} ${set_cpu_threads} &" + web_service_cmd="${python} ${web_service_py} ${web_use_gpu_key}="" ${web_use_mkldnn_key}=${use_mkldnn} ${set_cpu_threads} &" eval $web_service_cmd + last_status=${PIPESTATUS[0]} + status_check $last_status "${web_service_cmd}" "${status_log}" sleep 2s for pipeline in ${pipeline_py[*]}; do _save_log_path="${LOG_PATH}/server_infer_cpu_${pipeline%_client*}_usemkldnn_${use_mkldnn}_threads_${threads}_batchsize_1.log" @@ -128,6 +136,8 @@ function func_serving(){ set_precision=$(func_set_params "${web_precision_key}" "${precision}") web_service_cmd="${python} ${web_service_py} ${web_use_gpu_key}=${use_gpu} ${set_tensorrt} ${set_precision} & " eval $web_service_cmd + last_status=${PIPESTATUS[0]} + status_check $last_status "${web_service_cmd}" "${status_log}" sleep 2s for pipeline in ${pipeline_py[*]}; do @@ -151,15 +161,15 @@ function func_serving(){ } -# set cuda device +#set cuda device GPUID=$2 if [ ${#GPUID} -le 0 ];then - env=" " + env="export CUDA_VISIBLE_DEVICES=0" else env="export CUDA_VISIBLE_DEVICES=${GPUID}" fi -set CUDA_VISIBLE_DEVICES eval $env +echo $env echo "################### run test ###################"