提交 bc736d9c 编写于 作者: R root

onnxruntime update to 1.1.0, before convert, onnxruntime will run model to...

onnxruntime update to 1.1.0, before convert, onnxruntime will run model to check the correctness of model
上级 cd805fa9
...@@ -15,7 +15,7 @@ paddlepaddle >= 1.6.0 ...@@ -15,7 +15,7 @@ paddlepaddle >= 1.6.0
**按需安装以下依赖** **按需安装以下依赖**
tensorflow : tensorflow == 1.14.0 tensorflow : tensorflow == 1.14.0
caffe : 无 caffe : 无
onnx : onnx == 1.5.0 onnxruntime == 0.4.0 onnx : onnx == 1.6.0 onnxruntime == 1.1.0
## 安装 ## 安装
### 安装方式一(推荐) ### 安装方式一(推荐)
......
...@@ -26,6 +26,6 @@ setuptools.setup( ...@@ -26,6 +26,6 @@ setuptools.setup(
entry_points={ entry_points={
'console_scripts': [ 'console_scripts': [
'x2paddle=x2paddle.convert:main', 'x2paddle=x2paddle.convert:main',
'onnx_infer=x2paddle.onnx_infer:main' # 'onnx_infer=x2paddle.onnx_infer:main'
] ]
}) })
...@@ -155,18 +155,18 @@ def onnx2paddle(model_path, save_dir, params_merge=False): ...@@ -155,18 +155,18 @@ def onnx2paddle(model_path, save_dir, params_merge=False):
try: try:
import onnx import onnx
version = onnx.version.version version = onnx.version.version
if version != '1.5.0': if version != '1.6.0':
print("onnx==1.5.0 is required") print("onnx==1.6.0 is required")
return return
except: except:
print("onnx is not installed, use \"pip install onnx==1.5.0\".") print("onnx is not installed, use \"pip install onnx==1.6.0\".")
return return
print("Now translating model from onnx to paddle.") print("Now translating model from onnx to paddle.")
from x2paddle.op_mapper.onnx_op_mapper import ONNXOpMapper from x2paddle.op_mapper.onnx_op_mapper import ONNXOpMapper
from x2paddle.decoder.onnx_decoder import ONNXDecoder from x2paddle.decoder.onnx_decoder import ONNXDecoder
from x2paddle.optimizer.onnx_optimizer import ONNXOptimizer from x2paddle.optimizer.onnx_optimizer import ONNXOptimizer
import onnxruntime
model = ONNXDecoder(model_path) model = ONNXDecoder(model_path)
mapper = ONNXOpMapper(model, save_dir) mapper = ONNXOpMapper(model, save_dir)
optimizer = ONNXOptimizer(mapper) optimizer = ONNXOptimizer(mapper)
......
此差异已折叠。
...@@ -316,12 +316,14 @@ class ONNXDecoder(object): ...@@ -316,12 +316,14 @@ class ONNXDecoder(object):
model.ir_version, model.opset_import[0].version)) model.ir_version, model.opset_import[0].version))
if model.opset_import[0].version < 9: if model.opset_import[0].version < 9:
_logger.warning( _logger.warning(
'Now, onnx2paddle main support convert onnx model opset_verison == 9,' 'Now, onnx2paddle support convert onnx model opset_verison == 9,'
'opset_verison of your onnx model is %d < 9,' 'opset_verison of your onnx model is %d < 9,'
'some operator may cannot convert.', 'some operator maybe unsuccessful in convertion.',
model.opset_import[0].version) model.opset_import[0].version)
check_model(model) check_model(model)
self.check_model_running_state(onnx_model)
model = onnx.shape_inference.infer_shapes(model) model = onnx.shape_inference.infer_shapes(model)
model = self.optimize_model_skip_op_for_inference(model) model = self.optimize_model_skip_op_for_inference(model)
model = self.optimize_model_strip_initializer(model) model = self.optimize_model_strip_initializer(model)
...@@ -471,7 +473,44 @@ class ONNXDecoder(object): ...@@ -471,7 +473,44 @@ class ONNXDecoder(object):
raise ValueError('name should not be empty') raise ValueError('name should not be empty')
for s in ' .*?\\/-:': for s in ' .*?\\/-:':
name = name.replace(s, '_') name = name.replace(s, '_')
return '_' + name return 'x2paddle_' + name
def check_model_running_state(self, model_path):
try:
import onnxruntime as rt
version = rt.__version__
if version != '1.0.0':
print("onnxruntime==1.0.0 is required")
return
except:
raise Exception(
"onnxruntime is not installed, use \"pip install onnxruntime==1.0.0\"."
)
model = onnx.load(model_path)
model = onnx.shape_inference.infer_shapes(model)
if len(model.graph.value_info) < len(model.graph.node) - 1:
print(
"shape inference for some operators failed, those operators will be assignd node.out_shape==None, refer to https://github.com/onnx/onnx/blob/master/docs/ShapeInference.md"
)
try:
datatype_map = {
'tensor(int64)': 'int',
'tensor(float)': 'float32',
'tensor(int32)': 'int32'
}
input_dict = {}
sess = rt.InferenceSession(model_path)
for ipt in sess.get_inputs():
datatype = datatype_map[ipt.type]
input_dict[ipt.name] = np.random.random(
ipt.shape).astype(datatype)
res = sess.run(None, input_feed=input_dict)
except:
raise Exception(
"onnxruntime inference onnx model failed, Please confirm the correctness of onnx model by onnxruntime, if onnx model is correct, please submit issue in github."
)
def standardize_variable_name(self, graph): def standardize_variable_name(self, graph):
""" """
......
import os
import sys
import numpy as np
import onnx
import json
import argparse
from six import text_type as _text_type
def arg_parser():
parser = argparse.ArgumentParser()
parser.add_argument("--save_dir",
"-s",
type=_text_type,
default=None,
help="define save_dir")
return parser
def main():
try:
import onnxruntime as rt
version = rt.__version__
if version != '0.4.0':
print("onnxruntime==0.4.0 is required")
return
except:
print(
"onnxruntime is not installed, use \"pip install onnxruntime==0.4.0\"."
)
return
parser = arg_parser()
args = parser.parse_args()
save_dir = args.save_dir
model_dir = os.path.join(save_dir, 'onnx_model_infer.onnx')
model = onnx.load(model_dir)
sess = rt.InferenceSession(model_dir)
inputs_dict = {}
for ipt in sess.get_inputs():
data_dir = os.path.join(save_dir, ipt.name + '.npy')
inputs_dict[ipt.name] = np.load(data_dir, allow_pickle=True)
res = sess.run(None, input_feed=inputs_dict)
for idx, value_info in enumerate(model.graph.output):
np.save(os.path.join(save_dir, value_info.name), res[idx])
if __name__ == "__main__":
main()
...@@ -32,7 +32,7 @@ import math ...@@ -32,7 +32,7 @@ import math
import os import os
import shutil import shutil
from functools import reduce from functools import reduce
import onnxruntime as rt
_logger = _logging.getLogger(__name__) _logger = _logging.getLogger(__name__)
...@@ -71,6 +71,7 @@ class ONNXOpMapper(OpMapper): ...@@ -71,6 +71,7 @@ class ONNXOpMapper(OpMapper):
self.used_custom_layers = dict() self.used_custom_layers = dict()
self.is_inference = False self.is_inference = False
self.tmp_data_dir = os.path.join(save_dir, 'tmp_data') self.tmp_data_dir = os.path.join(save_dir, 'tmp_data')
self.tmp_outputs_dict = {}
self.get_output_shapes() self.get_output_shapes()
if not self.op_checker(): if not self.op_checker():
...@@ -119,7 +120,7 @@ class ONNXOpMapper(OpMapper): ...@@ -119,7 +120,7 @@ class ONNXOpMapper(OpMapper):
def get_results_of_inference(self, model, value_infos, data_nodes): def get_results_of_inference(self, model, value_infos, data_nodes):
if not os.path.exists(self.tmp_data_dir): if not os.path.exists(self.tmp_data_dir):
os.makedirs(self.tmp_data_dir) os.makedirs(self.tmp_data_dir)
inputs_dict = {}
for data_node in data_nodes: for data_node in data_nodes:
value_info = value_infos[data_node] value_info = value_infos[data_node]
shape = value_info['shape'] shape = value_info['shape']
...@@ -129,34 +130,32 @@ class ONNXOpMapper(OpMapper): ...@@ -129,34 +130,32 @@ class ONNXOpMapper(OpMapper):
if dim_shape == 0 and i != 0: if dim_shape == 0 and i != 0:
assert 'shape of input is not assigned' assert 'shape of input is not assigned'
ipt = np.random.random(shape).astype(value_info['dtype']) ipt = np.random.random(shape).astype(value_info['dtype'])
np.save(os.path.join(self.tmp_data_dir, data_node), ipt) inputs_dict[data_node] = ipt
model = onnx.shape_inference.infer_shapes(model) model = onnx.shape_inference.infer_shapes(model)
outputs = [] outputs = []
for value_info in model.graph.value_info: for value_info in model.graph.value_info:
outputs.append(value_info) outputs.append(value_info.name)
model.graph.ClearField('output') model.graph.ClearField('output')
model.graph.output.MergeFrom(outputs) model.graph.output.MergeFrom(model.graph.value_info)
onnx.save(model, os.path.join(self.tmp_data_dir, onnx.save(model, os.path.join(self.tmp_data_dir,
'onnx_model_infer.onnx')) 'onnx_model_infer.onnx'))
sess = rt.InferenceSession(
os.path.join(self.tmp_data_dir, 'onnx_model_infer.onnx'))
res = sess.run(None, input_feed=inputs_dict)
self.tmp_outputs_dict = dict(zip(outputs, res))
is_success = os.system('onnx_infer --save_dir=' + self.tmp_data_dir)
if is_success != 0:
raise Exception("onnxruntime inference onnx model failed, Please \
confirm the correctness of onnx model by onnxruntime, \
if onnx model is valid, you can submit issue in github."
)
return return
def get_dynamic_shape(self, layer): def get_dynamic_shape(self, layer):
""" """
get dynamic shape from infer_result get dynamic shape from infer_result
""" """
path = os.path.join(self.tmp_data_dir, layer + '.npy') if layer not in self.tmp_outputs_dict:
if not os.path.exists(path):
return [None, None, None] return [None, None, None]
output = np.load(path) output = self.tmp_outputs_dict[layer]
return output.tolist(), output.dtype, output.shape return output.tolist(), output.dtype, output.shape
def get_output_shapes(self): def get_output_shapes(self):
......
Markdown is supported
0% .
You are about to add 0 people to the discussion. Proceed with caution.
先完成此消息的编辑!
想要评论请 注册