diff --git a/test_tipc/prepare.sh b/test_tipc/prepare.sh index 5a18c3868db078a35862291867142ae449807ab0..cd2ad6fc55b267c2bb8f6ffccbf406dc6fefe3c7 100644 --- a/test_tipc/prepare.sh +++ b/test_tipc/prepare.sh @@ -200,7 +200,7 @@ fi if [[ ${MODE} = "serving_infer" ]]; then # prepare serving env python_name=$(func_parser_value "${lines[2]}") - ${python_name} -m pip install install paddle-serving-server-gpu==0.7.0.post102 + ${python_name} -m pip install paddle-serving-server-gpu==0.7.0.post102 ${python_name} -m pip install paddle_serving_client==0.7.0 ${python_name} -m pip install paddle-serving-app==0.7.0 if [[ ${model_name} =~ "ShiTu" ]]; then @@ -231,7 +231,7 @@ if [[ ${MODE} = "paddle2onnx_infer" ]]; then inference_model_url=$(func_parser_value "${lines[10]}") tar_name=${inference_model_url##*/} - ${python_name} -m pip install install paddle2onnx + ${python_name} -m pip install paddle2onnx ${python_name} -m pip install onnxruntime cd deploy mkdir models diff --git a/test_tipc/test_paddle2onnx.sh b/test_tipc/test_paddle2onnx.sh index 45eb9387922aa0e3a4de82d6f6245d178c8ec6fc..c869f8f3bf9df900d779dcf98355ca56eeece207 100644 --- a/test_tipc/test_paddle2onnx.sh +++ b/test_tipc/test_paddle2onnx.sh @@ -55,7 +55,7 @@ function func_paddle2onnx(){ trans_model_cmd="${padlle2onnx_cmd} ${set_dirname} ${set_model_filename} ${set_params_filename} ${set_save_model} ${set_opset_version} ${set_enable_onnx_checker}" eval $trans_model_cmd last_status=${PIPESTATUS[0]} - status_check $last_status "${trans_model_cmd}" "${status_log}" "${model_name}" "${model_name}" + status_check $last_status "${trans_model_cmd}" "${status_log}" "${model_name}" # python inference set_model_dir=$(func_set_params "${inference_model_dir_key}" "${inference_model_dir_value}") @@ -64,7 +64,7 @@ function func_paddle2onnx(){ set_inference_config=$(func_set_params "${inference_config_key}" "${inference_config_value}") infer_model_cmd="cd deploy && ${python} ${inference_py} -o ${set_model_dir} -o ${set_use_onnx} -o ${set_hardware} ${set_inference_config} > ${_save_log_path} 2>&1 && cd ../" eval $infer_model_cmd - status_check $last_status "${infer_model_cmd}" "${status_log}" "${model_name}" "${model_name}" + status_check $last_status "${infer_model_cmd}" "${status_log}" "${model_name}" }