未验证 提交 81d28246 编写于 作者: P pk_hk 提交者: GitHub

[test_tipc] add paddle2onnx; fix onnxruntime infer_demo (#5857)

Co-authored-by: Nlyuwenyu <wenyu.lyu@gmail.com>
上级 4d597840
......@@ -38,8 +38,12 @@ class PicoDet():
so = ort.SessionOptions()
so.log_severity_level = 3
self.net = ort.InferenceSession(model_pb_path, so)
self.input_shape = (self.net.get_inputs()[0].shape[2],
self.net.get_inputs()[0].shape[3])
inputs_name = [a.name for a in self.net.get_inputs()]
inputs_shape = {
k: v.shape
for k, v in zip(inputs_name, self.net.get_inputs())
}
self.input_shape = inputs_shape['image'][2:]
def _normalize(self, img):
img = img.astype(np.float32)
......@@ -51,6 +55,8 @@ class PicoDet():
origin_shape = srcimg.shape[:2]
im_scale_y = newh / float(origin_shape[0])
im_scale_x = neww / float(origin_shape[1])
img_shape = np.array([[float(origin_shape[0]), float(origin_shape[1])]
]).astype('float32')
scale_factor = np.array([[im_scale_y, im_scale_x]]).astype('float32')
if keep_ratio and srcimg.shape[0] != srcimg.shape[1]:
......@@ -87,7 +93,7 @@ class PicoDet():
img = cv2.resize(
srcimg, self.input_shape, interpolation=cv2.INTER_AREA)
return img, scale_factor
return img, img_shape, scale_factor
def get_color_map_list(self, num_classes):
color_map = num_classes * [0, 0, 0]
......@@ -104,15 +110,20 @@ class PicoDet():
return color_map
def detect(self, srcimg):
img, scale_factor = self.resize_image(srcimg)
img, im_shape, scale_factor = self.resize_image(srcimg)
img = self._normalize(img)
blob = np.expand_dims(np.transpose(img, (2, 0, 1)), axis=0)
outs = self.net.run(None, {
self.net.get_inputs()[0].name: blob,
self.net.get_inputs()[1].name: scale_factor
})
inputs_dict = {
'im_shape': im_shape,
'image': blob,
'scale_factor': scale_factor
}
inputs_name = [a.name for a in self.net.get_inputs()]
net_inputs = {k: inputs_dict[k] for k in inputs_name}
outs = self.net.run(None, net_inputs)
outs = np.array(outs[0])
expect_boxes = (outs[:, 1] > 0.5) & (outs[:, 0] > -1)
......@@ -181,7 +192,7 @@ if __name__ == '__main__':
parser.add_argument(
"--img_fold", dest="img_fold", type=str, default="./imgs")
parser.add_argument(
"--result_fold", dest="result_fold", type=str, default="./results")
"--result_fold", dest="result_fold", type=str, default="results")
args = parser.parse_args()
net = PicoDet(
......@@ -191,3 +202,6 @@ if __name__ == '__main__':
iou_threshold=args.nmsThreshold)
net.detect_folder(args.img_fold, args.result_fold)
print(
f'infer results in ./deploy/third_engine/demo_onnxruntime/{args.result_fold}'
)
===========================paddle2onnx_params===========================
model_name:picodet_s_320_coco_lcnet
python:python3.7
filename:null
##
--output_dir:./output_inference
weights:https://paddledet.bj.bcebos.com/models/picodet_s_320_coco_lcnet.pdparams
norm_export:tools/export_model.py -c configs/picodet/picodet_s_320_coco_lcnet.yml -o
quant_export:tools/export_model.py -c configs/picodet/picodet_s_320_coco_lcnet.yml --slim_config configs/picodet/picodet_s_320_coco_lcnet.yml -o
fpgm_export:tools/export_model.py -c configs/picodet/picodet_s_320_coco_lcnet.yml --slim_config configs/picodet/picodet_s_320_coco_lcnet.yml -o
distill_export:null
export1:null
export2:null
kl_quant_export:tools/post_quant.py -c configs/picodet/picodet_s_320_coco_lcnet.yml --slim_config configs/picodet/picodet_s_320_coco_lcnet.yml -o
##
2onnx: paddle2onnx
--model_dir:./output_inference/picodet_s_320_coco_lcnet/
--model_filename:model.pdmodel
--params_filename:model.pdiparams
--save_file:./deploy/third_engine/demo_onnxruntime/onnx_file/picodet_s_320_coco.onnx
--opset_version:11
##
inference:infer_demo.py
--modelpath:./onnx_file/picodet_s_320_coco.onnx
--img_fold:./imgs
--result_fold:results
infer_mode:norm
null:null
\ No newline at end of file
......@@ -4,7 +4,7 @@ source test_tipc/utils_func.sh
FILENAME=$1
# MODE be one of ['lite_train_lite_infer' 'lite_train_whole_infer'
# 'whole_train_whole_infer', 'whole_infer', 'klquant_whole_infer',
# 'cpp_infer', 'serving_infer', 'lite_infer']
# 'cpp_infer', 'serving_infer', 'lite_infer', 'paddle2onnx_infer']
MODE=$2
# parse params
......@@ -67,6 +67,10 @@ elif [ ${MODE} = "benchmark_train" ];then
wget -nc -P ./dataset/mot/ https://paddledet.bj.bcebos.com/data/mot_benchmark.tar
cd ./dataset/mot/ && tar -xvf mot_benchmark.tar && mv -u mot_benchmark/* .
rm -rf mot_benchmark/ && cd ../../
elif [ ${MODE} = "paddle2onnx_infer" ];then
# set paddle2onnx_infer enve
${python} -m pip install install paddle2onnx
${python} -m pip install onnxruntime==1.10.0
else
# download coco lite data
wget -nc -P ./dataset/coco/ https://paddledet.bj.bcebos.com/data/tipc/coco_tipc.tar
......
#!/bin/bash
source test_tipc/utils_func.sh
FILENAME=$1
# parser model_name
dataline=$(cat ${FILENAME})
IFS=$'\n'
lines=(${dataline})
model_name=$(func_parser_value "${lines[1]}")
echo "ppdet onnx_infer: ${model_name}"
python=$(func_parser_value "${lines[2]}")
filename_key=$(func_parser_key "${lines[3]}")
filename_value=$(func_parser_value "${lines[3]}")
# export params
save_export_key=$(func_parser_key "${lines[5]}")
save_export_value=$(func_parser_value "${lines[5]}")
export_weight_key=$(func_parser_key "${lines[6]}")
export_weight_value=$(func_parser_value "${lines[6]}")
norm_export=$(func_parser_value "${lines[7]}")
pact_export=$(func_parser_value "${lines[8]}")
fpgm_export=$(func_parser_value "${lines[9]}")
distill_export=$(func_parser_value "${lines[10]}")
export_key1=$(func_parser_key "${lines[11]}")
export_value1=$(func_parser_value "${lines[11]}")
export_key2=$(func_parser_key "${lines[12]}")
export_value2=$(func_parser_value "${lines[12]}")
kl_quant_export=$(func_parser_value "${lines[13]}")
# parser paddle2onnx
padlle2onnx_cmd=$(func_parser_value "${lines[15]}")
infer_model_dir_key=$(func_parser_key "${lines[16]}")
infer_model_dir_value=$(func_parser_value "${lines[16]}")
model_filename_key=$(func_parser_key "${lines[17]}")
model_filename_value=$(func_parser_value "${lines[17]}")
params_filename_key=$(func_parser_key "${lines[18]}")
params_filename_value=$(func_parser_value "${lines[18]}")
save_file_key=$(func_parser_key "${lines[19]}")
save_file_value=$(func_parser_value "${lines[19]}")
opset_version_key=$(func_parser_key "${lines[20]}")
opset_version_value=$(func_parser_value "${lines[20]}")
# parser onnx inference
inference_py=$(func_parser_value "${lines[22]}")
model_file_key=$(func_parser_key "${lines[23]}")
model_file_value=$(func_parser_value "${lines[23]}")
img_fold_key=$(func_parser_key "${lines[24]}")
img_fold_value=$(func_parser_value "${lines[24]}")
results_fold_key=$(func_parser_key "${lines[25]}")
results_fold_value=$(func_parser_value "${lines[25]}")
onnx_infer_mode_list=$(func_parser_value "${lines[26]}")
LOG_PATH="./test_tipc/output"
mkdir -p ${LOG_PATH}
status_log="${LOG_PATH}/results_paddle2onnx.log"
function func_paddle2onnx(){
IFS='|'
_script=$1
# paddle2onnx
echo "################### run onnx export ###################"
_save_log_path="${LOG_PATH}/paddle2onnx_infer_cpu.log"
set_dirname=$(func_set_params "${infer_model_dir_key}" "${infer_model_dir_value}")
set_model_filename=$(func_set_params "${model_filename_key}" "${model_filename_value}")
set_params_filename=$(func_set_params "${params_filename_key}" "${params_filename_value}")
set_save_model=$(func_set_params "${save_file_key}" "${save_file_value}")
set_opset_version=$(func_set_params "${opset_version_key}" "${opset_version_value}")
trans_model_cmd="${padlle2onnx_cmd} ${set_dirname} ${set_model_filename} ${set_params_filename} ${set_save_model} ${set_opset_version}"
eval $trans_model_cmd
last_status=${PIPESTATUS[0]}
status_check $last_status "${trans_model_cmd}" "${status_log}"
# python inference
echo "################### run infer ###################"
cd ./deploy/third_engine/demo_onnxruntime/
model_file=$(func_set_params "${model_file_key}" "${model_file_value}")
img_fold=$(func_set_params "${img_fold_key}" "${img_fold_value}")
results_fold=$(func_set_params "${results_fold_key}" "${results_fold_value}")
infer_model_cmd="${python} ${inference_py} ${model_file} ${img_fold} ${results_fold}"
eval $infer_model_cmd
last_status=${PIPESTATUS[0]}
status_check $last_status "${infer_model_cmd}" "${status_log}"
}
export Count=0
IFS="|"
echo "################### run paddle export ###################"
for infer_mode in ${onnx_infer_mode_list[*]}; do
# run export
case ${infer_mode} in
norm) run_export=${norm_export} ;;
quant) run_export=${pact_export} ;;
fpgm) run_export=${fpgm_export} ;;
distill) run_export=${distill_export} ;;
kl_quant) run_export=${kl_quant_export} ;;
*) echo "Undefined infer_mode!"; exit 1;
esac
if [ ${run_export} = "null" ]; then
continue
fi
set_export_weight=$(func_set_params "${export_weight_key}" "${export_weight_value}")
set_save_export_dir=$(func_set_params "${save_export_key}" "${save_export_value}")
set_filename=$(func_set_params "${filename_key}" "${model_name}")
export_cmd="${python} ${run_export} ${set_export_weight} ${set_filename} ${set_save_export_dir} "
echo $export_cmd
eval $export_cmd
status_export=$?
status_check $status_export "${export_cmd}" "${status_log}"
done
func_paddle2onnx
\ No newline at end of file
Markdown is supported
0% .
You are about to add 0 people to the discussion. Proceed with caution.
先完成此消息的编辑!
想要评论请 注册