未验证 提交 d275d9ef 编写于 作者: J Jason 提交者: GitHub

Merge pull request #434 from SunAhong1993/paddle-2.0

onnx2paddle
......@@ -177,7 +177,7 @@ def caffe2paddle(proto, weight, save_dir, caffe_proto,
mapper.paddle_graph.gen_model(save_dir)
def onnx2paddle(model_path, save_dir, params_merge=False):
def onnx2paddle(model_path, save_dir, paddle_type, params_merge=False):
# check onnx installation and version
try:
import onnx
......@@ -190,19 +190,23 @@ def onnx2paddle(model_path, save_dir, params_merge=False):
return
print("Now translating model from onnx to paddle.")
from x2paddle.op_mapper.onnx2paddle.onnx_op_mapper import ONNXOpMapper
from x2paddle.decoder.onnx_decoder import ONNXDecoder
from x2paddle.optimizer.onnx_optimizer import ONNXOptimizer
if paddle_type == "dygraph":
from x2paddle.op_mapper.dygraph.onnx2paddle.onnx_op_mapper import ONNXOpMapper
else:
from x2paddle.op_mapper.static.onnx2paddle.onnx_op_mapper import ONNXOpMapper
model = ONNXDecoder(model_path)
mapper = ONNXOpMapper(model)
print("Model optimizing ...")
optimizer = ONNXOptimizer(mapper)
optimizer.delete_redundance_code()
print("Model optimized.")
print("Paddle model and code generating ...")
mapper.save_inference_model(save_dir, params_merge)
print("Paddle model and code generated.")
if paddle_type == "dygraph":
mapper.paddle_graph.build()
mapper.paddle_graph.gen_model(save_dir)
else:
from x2paddle.optimizer.onnx_optimizer import ONNXOptimizer
print("Model optimizing ...")
optimizer = ONNXOptimizer(mapper)
optimizer.delete_redundance_code()
print("Model optimized.")
mapper.save_inference_model(save_dir, params_merge)
def pytorch2paddle(model_path, save_dir, jit_type, input_files):
......@@ -318,7 +322,7 @@ def main():
if args.params_merge:
params_merge = True
onnx2paddle(args.model, args.save_dir, params_merge)
onnx2paddle(args.model, args.save_dir, args.paddle_type, params_merge)
elif args.framework == "pytorch":
assert args.model is not None, "--model should be defined while translating pytorch model"
pytorch2paddle(args.model, args.save_dir, args.jit_type, args.input_files)
......
......@@ -41,6 +41,8 @@ class Layer(object):
layer_code = layer_code
elif self.use_fluid:
layer_code = layer_code + "fluid." + self.op + "("
elif self.op == "full_like":
layer_code = layer_code + "paddle." + self.op + "("
else:
layer_code = layer_code + "fluid.layers." + self.op + "("
......
......@@ -128,6 +128,7 @@ class OpMapper(object):
self.add_codes("from paddle.fluid.initializer import Constant")
self.add_codes("from paddle.fluid.param_attr import ParamAttr")
self.add_codes("import paddle.fluid as fluid")
self.add_codes("import paddle")
self.add_codes("")
def save_inference_model(self, save_dir, params_merge):
......@@ -214,6 +215,7 @@ class OpMapper(object):
self.add_codes("", 0)
self.add_codes("\ndef x2paddle_net():", 0)
self.add_codes("paddle.enable_static()", 1)
for i in range(len(self.graph.topo_sort)):
node_name = self.graph.topo_sort[i]
node = self.graph.get_node(node_name)
......
......@@ -488,11 +488,12 @@ class PaddleGraph(object):
gen_codes(
comment_list,
indent=1))
use_structured_name = False if self.source_type in ["tf", "onnx"] else True
self.run_func.extend(
gen_codes(["paddle.disable_static()",
"params, _ = fluid.load_dygraph('{}/model')".format(code_dir),
"model = {}()".format(self.name),
"model.set_dict(params)",
"model.set_dict(params, use_structured_name={})".format(use_structured_name),
"model.eval()",
"out = model({})".format(input_data_name),
"return out"], indent=1))
......@@ -624,7 +625,7 @@ class PaddleGraph(object):
paddle.disable_static()
restore, _ = fluid.load_dygraph(osp.join(save_dir, "model"))
model = getattr(x2paddle_code, self.name)()
if self.source_type == "tf":
if self.source_type in ["tf", "onnx"]:
model.set_dict(restore, use_structured_name=False)
else:
model.set_dict(restore)
......
......@@ -145,6 +145,7 @@ class ONNXGraph(Graph):
self.build()
self.collect_value_infos()
self.allocate_shapes()
self.graph_name = "ONNXModel"
def get_inner_nodes(self):
"""
......
......@@ -578,9 +578,11 @@ class CaffeOpMapper(OpMapper):
mode_bool = params.channel_shared
output_shape = node.output_shape[0]
if mode_bool:
num_parameters = 1
mode = 'all'
channel = None
else:
num_parameters = output_shape[1]
mode = 'channel'
channel = output_shape[1]
data = node.data
self.params[prelu_name + '._weight'] = np.squeeze(data[0])
assert data is not None, "The parameter of {} (type is {}) is not set. You need to use python package of caffe to set the default value.".format(
......@@ -589,7 +591,8 @@ class CaffeOpMapper(OpMapper):
"paddle.nn.PReLU",
inputs={"input": self.get_input_name(input)},
outputs=layer_outputs,
num_parameters=num_parameters)
channel=channel,
mode=string(mode))
def Eltwise(self, node):
assert len(
......@@ -745,12 +748,29 @@ class CaffeOpMapper(OpMapper):
inputs_dict = {}
inputs_dict['x'] = node.layer_name + "_mul"
inputs_dict['y'] = node.layer_name + "_cparam2"
self.paddle_graph.add_layer(
"fluid.layers.elementwise_add",
inputs=inputs_dict,
outputs=[node.layer_name],
axis=axis)
output_shape = node.output_shape[0]
if axis == -1:
self.paddle_graph.add_layer(
"paddle.add",
inputs=inputs_dict,
outputs=[node.layer_name])
else:
if axis < 0:
axis = axis + len(output_shape)
param2_shape = self.params[node.layer_name + "_cparam2"].shape
param2_shape_len = len(param2_shape)
diff_len = len(output_shape) - axis - param2_shape_len
new_shape = param2_shape + [1] * diff_len
self.paddle_graph.add_layer(
"paddle.reshape",
inputs={"x": node.layer_name + "_cparam2"},
outputs=[node.layer_name + "_cparam2"],
shape=new_shape)
self.paddle_graph.add_layer(
"paddle.add",
inputs=inputs_dict,
outputs=[node.layer_name])
def Reshape(self, node):
input = self.graph.get_bottom_node(node, idx=0, copy=True)
output_shape = node.output_shape[0]
......
# Copyright (c) 2019 PaddlePaddle Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from x2paddle.op_mapper.dygraph.onnx2paddle.opset9 import OpSet9
from x2paddle.core.op_mapper import OpMapper
from x2paddle.decoder.onnx_decoder import ONNXGraphNode
from x2paddle.core.program import PaddleGraph
class ONNXOpMapper(OpMapper):
def __init__(self, decoder):
super(ONNXOpMapper, self).__init__()
self.support_op_sets = [9, ]
self.default_op_set = 9
self.graph = decoder.graph
self.paddle_graph = PaddleGraph(parent_layer=None, graph_type="dygraph", source_type="onnx")
self.opset = self.create_opset(decoder)
if not self.op_checker():
raise Exception("Model are not supported yet.")
#mapping op
print("Total nodes: {}".format(
sum([
isinstance(node, ONNXGraphNode)
for name, node in self.graph.node_map.items()
])))
print("Nodes converting ...")
for node_name in self.graph.topo_sort:
node = self.graph.get_node(node_name)
op = node.layer_type
if hasattr(self.opset, op):
func = getattr(self.opset, op)
func(node)
elif op in self.opset.default_op_mapping:
self.opset.directly_map(node)
elif op in self.opset.elementwise_ops:
self.opset.elementwise_map(node)
print("Nodes converted.")
self.weights = self.opset.weights
self.inputs_info = self.opset.inputs_info
self.paddle_graph.set_name(self.graph.graph_name)
self.paddle_graph.set_parameters(self.weights)
self.paddle_graph.set_inputs_info(self.inputs_info)
self.paddle_graph.outputs = self.graph.output_nodes
def op_checker(self):
unsupported_ops = set()
for node_name in self.graph.topo_sort:
node = self.graph.get_node(node_name)
op = node.layer_type
if not hasattr(self.opset, op) and \
op not in self.opset.default_op_mapping and \
op not in self.opset.elementwise_ops:
unsupported_ops.add(op)
if len(unsupported_ops) == 0:
return True
else:
print("There are {} ops not supported yet, list as below".format(
len(unsupported_ops)))
for op in unsupported_ops:
print(op)
return False
def create_opset(self, decoder):
run_op_set = self.default_op_set
opset = ''
if decoder.op_set in self.support_op_sets:
opset = 'OpSet' + str(decoder.op_set)
elif decoder.op_set < self.default_op_set:
opset = 'OpSet' + str(self.default_op_set)
else:
for op_set in self.support_op_sets:
if decoder.op_set > op_set:
run_op_set = op_set
else:
break
opset = 'OpSet' + str(run_op_set)
print(
'Now, onnx2paddle support convert onnx model opset_verison {},'
'opset_verison of your onnx model is {}, automatically treated as op_set: {}.'
.format(self.support_op_sets, decoder.op_set, run_op_set))
return eval(opset)(decoder, self.paddle_graph)
此差异已折叠。
......@@ -12,7 +12,7 @@
# See the License for the specific language governing permissions and
# limitations under the License.
from x2paddle.op_mapper.onnx2paddle.opset9 import OpSet9, custom_layers
from x2paddle.op_mapper.static.onnx2paddle.opset9 import OpSet9, custom_layers
from x2paddle.core.op_mapper import OpMapper
from x2paddle.decoder.onnx_decoder import ONNXGraph, ONNXGraphNode, ONNXGraphDataNode
......
......@@ -17,7 +17,7 @@ from x2paddle.core.graph import GraphNode
from x2paddle.core.fluid_code import Layer
from x2paddle.core.fluid_code import FluidCode
from x2paddle.core.util import string
from x2paddle.op_mapper.onnx2paddle.opset9.custom_layer import *
from x2paddle.op_mapper.static.onnx2paddle.opset9.custom_layer import *
from functools import reduce
import numpy as np
import onnx
......
Markdown is supported
0% .
You are about to add 0 people to the discussion. Proceed with caution.
先完成此消息的编辑!
想要评论请 注册