Skip to content
体验新版
项目
组织
正在加载...
登录
切换导航
打开侧边栏
PaddlePaddle
PaddleClas
提交
605f6a2c
P
PaddleClas
项目概览
PaddlePaddle
/
PaddleClas
大约 1 年 前同步成功
通知
115
Star
4999
Fork
1114
代码
文件
提交
分支
Tags
贡献者
分支图
Diff
Issue
19
列表
看板
标记
里程碑
合并请求
6
Wiki
0
Wiki
分析
仓库
DevOps
项目成员
Pages
P
PaddleClas
项目概览
项目概览
详情
发布
仓库
仓库
文件
提交
分支
标签
贡献者
分支图
比较
Issue
19
Issue
19
列表
看板
标记
里程碑
合并请求
6
合并请求
6
Pages
分析
分析
仓库分析
DevOps
Wiki
0
Wiki
成员
成员
收起侧边栏
关闭侧边栏
动态
分支图
创建新Issue
提交
Issue看板
提交
605f6a2c
编写于
1月 20, 2022
作者:
L
lubin10
浏览文件
操作
浏览文件
下载
电子邮件补丁
差异文件
add onnx tipc for ResNet50_vd
上级
bc52b800
变更
3
显示空白变更内容
内联
并排
Showing
3 changed file
with
42 addition
and
12 deletion
+42
-12
test_tipc/config/ResNet/ResNet50_vd_linux_gpu_normal_normal_paddle2onnx_python_linux_cpu.txt
..._linux_gpu_normal_normal_paddle2onnx_python_linux_cpu.txt
+15
-0
test_tipc/prepare.sh
test_tipc/prepare.sh
+12
-0
test_tipc/test_paddle2onnx.sh
test_tipc/test_paddle2onnx.sh
+15
-12
未找到文件。
test_tipc/config/ResNet/ResNet50_vd_linux_gpu_normal_normal_paddle2onnx_python_linux_cpu.txt
0 → 100644
浏览文件 @
605f6a2c
===========================paddle2onnx_params===========================
model_name:ResNet50_vd
python:python3.7
2onnx: paddle2onnx
--model_dir:./deploy/models/ResNet50_vd_infer/
--model_filename:inference.pdmodel
--params_filename:inference.pdiparams
--save_file:./deploy/models/ResNet50_vd_infer/inference.onnx
--opset_version:10
--enable_onnx_checker:True
inference: python/predict_cls.py -c configs/inference_cls.yaml
Global.use_onnx:True
Global.inference_model_dir:models/ResNet50_vd_infer/
Global.use_gpu:False
Global.infer_imgs:./images/ILSVRC2012_val_00000010.jpeg
test_tipc/prepare.sh
浏览文件 @
605f6a2c
...
@@ -165,3 +165,15 @@ if [ ${MODE} = "serving_infer" ];then
...
@@ -165,3 +165,15 @@ if [ ${MODE} = "serving_infer" ];then
cd
./deploy/paddleserving
cd
./deploy/paddleserving
wget
-nc
https://paddle-imagenet-models-name.bj.bcebos.com/dygraph/inference/ResNet50_vd_infer.tar
&&
tar
xf ResNet50_vd_infer.tar
wget
-nc
https://paddle-imagenet-models-name.bj.bcebos.com/dygraph/inference/ResNet50_vd_infer.tar
&&
tar
xf ResNet50_vd_infer.tar
fi
fi
if
[
${
MODE
}
=
"paddle2onnx_infer"
]
;
then
# prepare paddle2onnx env
python_name
=
$(
func_parser_value
"
${
lines
[2]
}
"
)
${
python_name
}
-m
pip
install install
paddle2onnx
${
python_name
}
-m
pip
install
onnxruntime
# wget model
cd
deploy
&&
mkdir
models
&&
cd
models
wget
-nc
https://paddle-imagenet-models-name.bj.bcebos.com/dygraph/inference/ResNet50_vd_infer.tar
&&
tar
xf ResNet50_vd_infer.tar
cd
../../
fi
test_tipc/test_paddle2onnx.sh
浏览文件 @
605f6a2c
...
@@ -11,7 +11,7 @@ python=$(func_parser_value "${lines[2]}")
...
@@ -11,7 +11,7 @@ python=$(func_parser_value "${lines[2]}")
# parser params
# parser params
dataline
=
$(
awk
'NR==1, NR==1
2
{print}'
$FILENAME
)
dataline
=
$(
awk
'NR==1, NR==1
5
{print}'
$FILENAME
)
IFS
=
$'
\n
'
IFS
=
$'
\n
'
lines
=(
${
dataline
}
)
lines
=(
${
dataline
}
)
...
@@ -33,12 +33,14 @@ enable_onnx_checker_key=$(func_parser_key "${lines[9]}")
...
@@ -33,12 +33,14 @@ enable_onnx_checker_key=$(func_parser_key "${lines[9]}")
enable_onnx_checker_value
=
$(
func_parser_value
"
${
lines
[9]
}
"
)
enable_onnx_checker_value
=
$(
func_parser_value
"
${
lines
[9]
}
"
)
# parser onnx inference
# parser onnx inference
inference_py
=
$(
func_parser_value
"
${
lines
[10]
}
"
)
inference_py
=
$(
func_parser_value
"
${
lines
[10]
}
"
)
use_gpu_key
=
$(
func_parser_key
"
${
lines
[11]
}
"
)
use_onnx_key
=
$(
func_parser_key
"
${
lines
[11]
}
"
)
use_gpu_value
=
$(
func_parser_value
"
${
lines
[11]
}
"
)
use_onnx_value
=
$(
func_parser_value
"
${
lines
[11]
}
"
)
det_model_key
=
$(
func_parser_key
"
${
lines
[12]
}
"
)
inference_model_dir_key
=
$(
func_parser_key
"
${
lines
[12]
}
"
)
image_dir_key
=
$(
func_parser_key
"
${
lines
[13]
}
"
)
inference_model_dir_value
=
$(
func_parser_value
"
${
lines
[12]
}
"
)
image_dir_value
=
$(
func_parser_value
"
${
lines
[13]
}
"
)
inference_hardware_key
=
$(
func_parser_key
"
${
lines
[13]
}
"
)
inference_hardware_value
=
$(
func_parser_value
"
${
lines
[13]
}
"
)
inference_imgs_key
=
$(
func_parser_key
"
${
lines
[14]
}
"
)
inference_imgs_value
=
$(
func_parser_value
"
${
lines
[14]
}
"
)
LOG_PATH
=
"./test_tipc/output"
LOG_PATH
=
"./test_tipc/output"
mkdir
-p
./test_tipc/output
mkdir
-p
./test_tipc/output
...
@@ -50,7 +52,7 @@ function func_paddle2onnx(){
...
@@ -50,7 +52,7 @@ function func_paddle2onnx(){
_script
=
$1
_script
=
$1
# paddle2onnx
# paddle2onnx
_save_log_path
=
"
${
LOG_PATH
}
/paddle2onnx_infer_cpu.log"
_save_log_path
=
"
.
${
LOG_PATH
}
/paddle2onnx_infer_cpu.log"
set_dirname
=
$(
func_set_params
"
${
infer_model_dir_key
}
"
"
${
infer_model_dir_value
}
"
)
set_dirname
=
$(
func_set_params
"
${
infer_model_dir_key
}
"
"
${
infer_model_dir_value
}
"
)
set_model_filename
=
$(
func_set_params
"
${
model_filename_key
}
"
"
${
model_filename_value
}
"
)
set_model_filename
=
$(
func_set_params
"
${
model_filename_key
}
"
"
${
model_filename_value
}
"
)
set_params_filename
=
$(
func_set_params
"
${
params_filename_key
}
"
"
${
params_filename_value
}
"
)
set_params_filename
=
$(
func_set_params
"
${
params_filename_key
}
"
"
${
params_filename_value
}
"
)
...
@@ -62,10 +64,11 @@ function func_paddle2onnx(){
...
@@ -62,10 +64,11 @@ function func_paddle2onnx(){
last_status
=
${
PIPESTATUS
[0]
}
last_status
=
${
PIPESTATUS
[0]
}
status_check
$last_status
"
${
trans_model_cmd
}
"
"
${
status_log
}
"
status_check
$last_status
"
${
trans_model_cmd
}
"
"
${
status_log
}
"
# python inference
# python inference
set_gpu
=
$(
func_set_params
"
${
use_gpu_key
}
"
"
${
use_gpu_value
}
"
)
set_model_dir
=
$(
func_set_params
"
${
inference_model_dir_key
}
"
"
${
inference_model_dir_value
}
"
)
set_model_dir
=
$(
func_set_params
"
${
det_model_key
}
"
"
${
save_file_value
}
"
)
set_use_onnx
=
$(
func_set_params
"
${
use_onnx_key
}
"
"
${
use_onnx_value
}
"
)
set_img_dir
=
$(
func_set_params
"
${
image_dir_key
}
"
"
${
image_dir_value
}
"
)
set_hardware
=
$(
func_set_params
"
${
inference_hardware_key
}
"
"
${
inference_hardware_value
}
"
)
infer_model_cmd
=
"
${
python
}
${
inference_py
}
${
set_gpu
}
${
set_img_dir
}
${
set_model_dir
}
--use_onnx=True >
${
_save_log_path
}
2>&1 "
set_infer_imgs
=
$(
func_set_params
"
${
inference_imgs_key
}
"
"
${
inference_imgs_value
}
"
)
infer_model_cmd
=
"cd deploy &&
${
python
}
${
inference_py
}
-o
${
set_model_dir
}
-o
${
set_use_onnx
}
-o
${
set_hardware
}
-o
${
set_infer_imgs
}
>
${
_save_log_path
}
2>&1 && cd ../"
eval
$infer_model_cmd
eval
$infer_model_cmd
status_check
$last_status
"
${
infer_model_cmd
}
"
"
${
status_log
}
"
status_check
$last_status
"
${
infer_model_cmd
}
"
"
${
status_log
}
"
}
}
...
...
编辑
预览
Markdown
is supported
0%
请重试
或
添加新附件
.
添加附件
取消
You are about to add
0
people
to the discussion. Proceed with caution.
先完成此消息的编辑!
取消
想要评论请
注册
或
登录