From 4519e2e8ff3cf65b0a804a4ce8e9f2f121b80cd6 Mon Sep 17 00:00:00 2001 From: Channingss Date: Thu, 30 Jul 2020 07:50:07 +0000 Subject: [PATCH] merge paddle/develop --- .../op_mapper/paddle2onnx/opset11/opset.py | 21 +++++++++++++++ .../op_mapper/paddle2onnx/opset9/opset.py | 26 +++++++++++++++++-- 2 files changed, 45 insertions(+), 2 deletions(-) diff --git a/x2paddle/op_mapper/paddle2onnx/opset11/opset.py b/x2paddle/op_mapper/paddle2onnx/opset11/opset.py index 9e40c85..385b377 100644 --- a/x2paddle/op_mapper/paddle2onnx/opset11/opset.py +++ b/x2paddle/op_mapper/paddle2onnx/opset11/opset.py @@ -41,6 +41,27 @@ class OpSet11(OpSet10): outputs=op.output('Out'), ) return [min_node, max_node, node] + def pad2d(self, op, block): + x_shape = block.var(op.input('X')[0]).shape + paddings = op.attr('paddings') + onnx_pads = [] + #TODO support pads is Variable + if op.attr('data_format') == 'NCHW': + pads = [ + 0, 0, paddings[0], paddings[2], 0, 0, paddings[1], paddings[3] + ] + else: + pads = [ + 0, paddings[0], paddings[2], 0, 0, paddings[1], paddings[3], 0 + ] + pads_name = self.get_name(op.type, 'pads') + pads_node = self.make_constant_node(pads_name, + onnx_pb.TensorProto.INT64, pads) + constant_value_name = self.get_name(op.type, 'constant_value') + constant_value_node = self.make_constant_node(constant_value_name, + onnx_pb.TensorProto.FLOAT, + op.attr('pad_value')) + def clip(self, op, block): min_name = self.get_name(op.type, 'min') max_name = self.get_name(op.type, 'max') diff --git a/x2paddle/op_mapper/paddle2onnx/opset9/opset.py b/x2paddle/op_mapper/paddle2onnx/opset9/opset.py index 2317031..a0436e6 100644 --- a/x2paddle/op_mapper/paddle2onnx/opset9/opset.py +++ b/x2paddle/op_mapper/paddle2onnx/opset9/opset.py @@ -59,7 +59,7 @@ class OpSet9(object): 'Constant', inputs=[], outputs=[name], value=tensor) return node - def convert_weights(self, program): + def convert_weights(self, program, scope=None): var_names = program.global_block().vars nodes = list() for name in var_names: @@ -68,7 +68,7 @@ class OpSet9(object): continue if not var.persistable: continue - weight = np.array(fluid.global_scope().find_var(name).get_tensor()) + weight = np.array(scope.find_var(name).get_tensor()) tensor = helper.make_tensor( name=name, dims=var.shape, @@ -236,6 +236,28 @@ class OpSet9(object): pads=op.attr('paddings') + op.attr('paddings')) return node + def pad2d(self, op, block): + x_shape = block.var(op.input('X')[0]).shape + paddings = op.attr('paddings') + onnx_pads = [] + if op.attr('data_format') == 'NCHW': + pads = [ + 0, 0, paddings[0], paddings[2], 0, 0, paddings[1], paddings[3] + ] + else: + pads = [ + 0, paddings[0], paddings[2], 0, 0, paddings[1], paddings[3], 0 + ] + #TODO support pads is Variable + node = helper.make_node( + 'Pad', + inputs=op.input('X'), + outputs=op.output('Out'), + mode=op.attr('mode'), + value=op.attr('pad_value'), + pads=pads) + return node + def softmax(self, op, block): axis = op.attr('axis') shape = block.var(op.output('Out')[0]).shape -- GitLab