提交 3f7f8c37 编写于 作者: M MissPenguin

refine

上级 08166b83
......@@ -75,7 +75,7 @@ elif [ ${MODE} = "infer" ] || [ ${MODE} = "cpp_infer" ];then
fi
if [ ${MODE} = "cpp_infer" ];then
################### build opencv ###################
echo "################### build opencv ###################"
cd deploy/cpp_infer
rm -rf 3.4.7.tar.gz opencv-3.4.7/
wget https://github.com/opencv/opencv/archive/3.4.7.tar.gz
......@@ -109,34 +109,10 @@ if [ ${MODE} = "cpp_infer" ];then
make -j
make install
cd ../
################### build opencv finished ###################
echo "################### build opencv finished ###################"
# ################### build paddle inference ###################
# rm -rf Paddle
# git clone https://github.com/PaddlePaddle/Paddle.git
# cd Paddle
# rm -rf build
# mkdir build
# cd build
# cmake .. \
# -DWITH_CONTRIB=OFF \
# -DWITH_MKL=ON \
# -DWITH_MKLDNN=ON \
# -DWITH_TESTING=OFF \
# -DCMAKE_BUILD_TYPE=Release \
# -DWITH_INFERENCE_API_TEST=OFF \
# -DON_INFER=ON \
# -DWITH_PYTHON=ON
# make -j
# make inference_lib_dist
# cd ../
# ################### build paddle inference finished ###################
################### build PaddleOCR demo ###################
echo "################### build PaddleOCR demo ####################"
OPENCV_DIR=$(pwd)/opencv-3.4.7/opencv3/
LIB_DIR=$(pwd)/Paddle/build/paddle_inference_install_dir/
CUDA_LIB_DIR=/usr/local/cuda/lib64/
......@@ -158,5 +134,5 @@ if [ ${MODE} = "cpp_infer" ];then
-DTENSORRT_DIR=${TENSORRT_DIR} \
make -j
################### build PaddleOCR demo finished ###################
echo "################### build PaddleOCR demo finished ###################"
fi
\ No newline at end of file
......@@ -145,34 +145,32 @@ benchmark_value=$(func_parser_value "${lines[49]}")
infer_key1=$(func_parser_key "${lines[50]}")
infer_value1=$(func_parser_value "${lines[50]}")
# parser cpp inference model
cpp_infer_model_dir_list=$(func_parser_value "${lines[52]}")
cpp_infer_is_quant=$(func_parser_value "${lines[53]}")
# parser cpp inference
inference_cmd=$(func_parser_value "${lines[54]}")
cpp_use_gpu_key=$(func_parser_key "${lines[55]}")
cpp_use_gpu_list=$(func_parser_value "${lines[55]}")
cpp_use_mkldnn_key=$(func_parser_key "${lines[56]}")
cpp_use_mkldnn_list=$(func_parser_value "${lines[56]}")
cpp_cpu_threads_key=$(func_parser_key "${lines[57]}")
cpp_cpu_threads_list=$(func_parser_value "${lines[57]}")
cpp_batch_size_key=$(func_parser_key "${lines[58]}")
cpp_batch_size_list=$(func_parser_value "${lines[58]}")
cpp_use_trt_key=$(func_parser_key "${lines[59]}")
cpp_use_trt_list=$(func_parser_value "${lines[59]}")
cpp_precision_key=$(func_parser_key "${lines[60]}")
cpp_precision_list=$(func_parser_value "${lines[60]}")
cpp_infer_model_key=$(func_parser_key "${lines[61]}")
cpp_image_dir_key=$(func_parser_key "${lines[62]}")
cpp_infer_img_dir=$(func_parser_value "${lines[62]}")
cpp_save_log_key=$(func_parser_key "${lines[63]}")
cpp_benchmark_key=$(func_parser_key "${lines[64]}")
cpp_benchmark_value=$(func_parser_value "${lines[64]}")
if [ ${MODE} = "cpp_infer" ]; then
# parser cpp inference model
cpp_infer_model_dir_list=$(func_parser_value "${lines[52]}")
cpp_infer_is_quant=$(func_parser_value "${lines[53]}")
# parser cpp inference
inference_cmd=$(func_parser_value "${lines[54]}")
cpp_use_gpu_key=$(func_parser_key "${lines[55]}")
cpp_use_gpu_list=$(func_parser_value "${lines[55]}")
cpp_use_mkldnn_key=$(func_parser_key "${lines[56]}")
cpp_use_mkldnn_list=$(func_parser_value "${lines[56]}")
cpp_cpu_threads_key=$(func_parser_key "${lines[57]}")
cpp_cpu_threads_list=$(func_parser_value "${lines[57]}")
cpp_batch_size_key=$(func_parser_key "${lines[58]}")
cpp_batch_size_list=$(func_parser_value "${lines[58]}")
cpp_use_trt_key=$(func_parser_key "${lines[59]}")
cpp_use_trt_list=$(func_parser_value "${lines[59]}")
cpp_precision_key=$(func_parser_key "${lines[60]}")
cpp_precision_list=$(func_parser_value "${lines[60]}")
cpp_infer_model_key=$(func_parser_key "${lines[61]}")
cpp_image_dir_key=$(func_parser_key "${lines[62]}")
cpp_infer_img_dir=$(func_parser_value "${lines[62]}")
cpp_save_log_key=$(func_parser_key "${lines[63]}")
cpp_benchmark_key=$(func_parser_key "${lines[64]}")
cpp_benchmark_value=$(func_parser_value "${lines[64]}")
fi
echo $inference_cmd
echo $cpp_cpu_threads_key $cpp_cpu_threads_list
echo $cpp_precision_key $cpp_precision_list
echo $cpp_benchmark_key $cpp_benchmark_value
LOG_PATH="./tests/output"
mkdir -p ${LOG_PATH}
......
Markdown is supported
0% .
You are about to add 0 people to the discussion. Proceed with caution.
先完成此消息的编辑!
想要评论请 注册