提交 eb22ce42 编写于 作者: L LDOUBLEV

fix kl quant

上级 08f25ca4
...@@ -82,17 +82,17 @@ pipline:pipeline_http_client.py --image_dir=../../doc/imgs ...@@ -82,17 +82,17 @@ pipline:pipeline_http_client.py --image_dir=../../doc/imgs
===========================kl_quant_params=========================== ===========================kl_quant_params===========================
infer_model:./inference/ch_ppocr_mobile_v2.0_det_infer/ infer_model:./inference/ch_ppocr_mobile_v2.0_det_infer/
infer_export:tools/export_model.py -c configs/det/ch_ppocr_v2.0/ch_det_mv3_db_v2.0.yml -o infer_export:tools/export_model.py -c configs/det/ch_ppocr_v2.0/ch_det_mv3_db_v2.0.yml -o
infer_quant:False infer_quant:True
inference:tools/infer/predict_det.py inference:tools/infer/predict_det.py
--use_gpu:True|False --use_gpu:True|False
--enable_mkldnn:True|False --enable_mkldnn:True|False
--cpu_threads:1|6 --cpu_threads:1|6
--rec_batch_num:1 --rec_batch_num:1
--use_tensorrt:False|True --use_tensorrt:False|True
--precision:fp32|fp16|int8 --precision:int8
--det_model_dir: --det_model_dir:
--image_dir:./inference/ch_det_data_50/all-sum-510/ --image_dir:./inference/ch_det_data_50/all-sum-510/
null:null null:null
--benchmark:True --benchmark:True
null:null null:null
null:null null:null
\ No newline at end of file
...@@ -57,7 +57,7 @@ function func_cpp_inference(){ ...@@ -57,7 +57,7 @@ function func_cpp_inference(){
for threads in ${cpp_cpu_threads_list[*]}; do for threads in ${cpp_cpu_threads_list[*]}; do
for batch_size in ${cpp_batch_size_list[*]}; do for batch_size in ${cpp_batch_size_list[*]}; do
precision="fp32" precision="fp32"
if [ ${_flag_quant} = "True" ]; then if [ ${use_mkldnn} = "False" ] && [ ${_flag_quant} = "True" ]; then
precison="int8" precison="int8"
fi fi
_save_log_path="${_log_path}/cpp_infer_cpu_usemkldnn_${use_mkldnn}_threads_${threads}_precision_${precision}_batchsize_${batch_size}.log" _save_log_path="${_log_path}/cpp_infer_cpu_usemkldnn_${use_mkldnn}_threads_${threads}_precision_${precision}_batchsize_${batch_size}.log"
......
...@@ -5,11 +5,7 @@ FILENAME=$1 ...@@ -5,11 +5,7 @@ FILENAME=$1
# MODE be one of ['lite_train_infer' 'whole_infer' 'whole_train_infer', 'infer', 'klquant_infer'] # MODE be one of ['lite_train_infer' 'whole_infer' 'whole_train_infer', 'infer', 'klquant_infer']
MODE=$2 MODE=$2
if [ ${MODE} = "klquant_infer" ]; then dataline=$(awk 'NR==1, NR==51{print}' $FILENAME)
dataline=$(awk 'NR==82, NR==98{print}' $FILENAME)
else
dataline=$(awk 'NR==1, NR==51{print}' $FILENAME)
fi
# parser params # parser params
IFS=$'\n' IFS=$'\n'
...@@ -93,6 +89,8 @@ infer_value1=$(func_parser_value "${lines[50]}") ...@@ -93,6 +89,8 @@ infer_value1=$(func_parser_value "${lines[50]}")
# parser klquant_infer # parser klquant_infer
if [ ${MODE} = "klquant_infer" ]; then if [ ${MODE} = "klquant_infer" ]; then
dataline=$(awk 'NR==82, NR==98{print}' $FILENAME)
lines=(${dataline})
# parser inference model # parser inference model
infer_model_dir_list=$(func_parser_value "${lines[1]}") infer_model_dir_list=$(func_parser_value "${lines[1]}")
infer_export_list=$(func_parser_value "${lines[2]}") infer_export_list=$(func_parser_value "${lines[2]}")
...@@ -144,7 +142,7 @@ function func_inference(){ ...@@ -144,7 +142,7 @@ function func_inference(){
for threads in ${cpu_threads_list[*]}; do for threads in ${cpu_threads_list[*]}; do
for batch_size in ${batch_size_list[*]}; do for batch_size in ${batch_size_list[*]}; do
precison="fp32" precison="fp32"
if [ ${_flag_quant} = "True" ]; then if [ ${use_mkldnn} = "False" ] && [ ${_flag_quant} = "True" ]; then
precision="int8" precision="int8"
fi fi
_save_log_path="${_log_path}/python_infer_cpu_usemkldnn_${use_mkldnn}_threads_${threads}_precision_${precision}_batchsize_${batch_size}.log" _save_log_path="${_log_path}/python_infer_cpu_usemkldnn_${use_mkldnn}_threads_${threads}_precision_${precision}_batchsize_${batch_size}.log"
...@@ -228,6 +226,9 @@ if [ ${MODE} = "infer" ] || [ ${MODE} = "klquant_infer" ]; then ...@@ -228,6 +226,9 @@ if [ ${MODE} = "infer" ] || [ ${MODE} = "klquant_infer" ]; then
fi fi
#run inference #run inference
is_quant=${infer_quant_flag[Count]} is_quant=${infer_quant_flag[Count]}
if [ ${MODE} = "klquant_infer" ]; then
is_quant="True"
fi
func_inference "${python}" "${inference_py}" "${save_infer_dir}" "${LOG_PATH}" "${infer_img_dir}" ${is_quant} func_inference "${python}" "${inference_py}" "${save_infer_dir}" "${LOG_PATH}" "${infer_img_dir}" ${is_quant}
Count=$(($Count + 1)) Count=$(($Count + 1))
done done
......
Markdown is supported
0% .
You are about to add 0 people to the discussion. Proceed with caution.
先完成此消息的编辑!
想要评论请 注册