From d43f75b29fe435e14c4dfc19397034d6c86556b6 Mon Sep 17 00:00:00 2001 From: Channingss Date: Thu, 30 Jul 2020 07:39:25 +0000 Subject: [PATCH] add clip,tanh,log --- .../op_mapper/paddle2onnx/opset11/opset.py | 36 +++++--------- .../op_mapper/paddle2onnx/opset9/opset.py | 47 +++++++++---------- 2 files changed, 34 insertions(+), 49 deletions(-) diff --git a/x2paddle/op_mapper/paddle2onnx/opset11/opset.py b/x2paddle/op_mapper/paddle2onnx/opset11/opset.py index 4ec88b5..9e40c85 100644 --- a/x2paddle/op_mapper/paddle2onnx/opset11/opset.py +++ b/x2paddle/op_mapper/paddle2onnx/opset11/opset.py @@ -41,32 +41,18 @@ class OpSet11(OpSet10): outputs=op.output('Out'), ) return [min_node, max_node, node] - def pad2d(self, op, block): - x_shape = block.var(op.input('X')[0]).shape - paddings = op.attr('paddings') - onnx_pads = [] - #TODO support pads is Variable - if op.attr('data_format') == 'NCHW': - pads = [ - 0, 0, paddings[0], paddings[2], 0, 0, paddings[1], paddings[3] - ] - else: - pads = [ - 0, paddings[0], paddings[2], 0, 0, paddings[1], paddings[3], 0 - ] - pads_name = self.get_name(op.type, 'pads') - pads_node = self.make_constant_node(pads_name, - onnx_pb.TensorProto.INT64, pads) - constant_value_name = self.get_name(op.type, 'constant_value') - constant_value_node = self.make_constant_node(constant_value_name, - onnx_pb.TensorProto.FLOAT, - op.attr('pad_value')) + def clip(self, op, block): + min_name = self.get_name(op.type, 'min') + max_name = self.get_name(op.type, 'max') + min_node = self.make_constant_node(min_name, onnx_pb.TensorProto.FLOAT, + op.attr('min')) + max_node = self.make_constant_node(max_name, onnx_pb.TensorProto.FLOAT, + op.attr('max')) node = helper.make_node( - 'Pad', - inputs=op.input('X') + [pads_name, constant_value_name], - outputs=op.output('Out'), - mode=op.attr('mode')) - return [pads_node, constant_value_node, node] + 'Clip', + inputs=[op.input('X')[0], min_name, max_name], + outputs=op.output('Out')) + return [min_node, max_node, node] def bilinear_interp(self, op, block): input_names = op.input_names diff --git a/x2paddle/op_mapper/paddle2onnx/opset9/opset.py b/x2paddle/op_mapper/paddle2onnx/opset9/opset.py index 0ab8e69..2317031 100644 --- a/x2paddle/op_mapper/paddle2onnx/opset9/opset.py +++ b/x2paddle/op_mapper/paddle2onnx/opset9/opset.py @@ -59,7 +59,7 @@ class OpSet9(object): 'Constant', inputs=[], outputs=[name], value=tensor) return node - def convert_weights(self, program, scope=None): + def convert_weights(self, program): var_names = program.global_block().vars nodes = list() for name in var_names: @@ -68,7 +68,7 @@ class OpSet9(object): continue if not var.persistable: continue - weight = np.array(scope.find_var(name).get_tensor()) + weight = np.array(fluid.global_scope().find_var(name).get_tensor()) tensor = helper.make_tensor( name=name, dims=var.shape, @@ -110,11 +110,32 @@ class OpSet9(object): 'Relu', inputs=op.input('X'), outputs=op.output('Out')) return node + def tanh(self, op, block): + node = helper.make_node( + 'Tanh', inputs=op.input('X'), outputs=op.output('Out')) + return node + + def log(self, op, block): + node = helper.make_node( + 'Log', inputs=op.input('X'), outputs=op.output('Out')) + return node + def sigmoid(self, op, block): node = helper.make_node( 'Sigmoid', inputs=op.input('X'), outputs=op.output('Out')) return node + def clip(self, op, block): + min_value = op.attr('min') + max_value = op.attr('max') + node = helper.make_node( + 'Clip', + inputs=[op.input('X')[0]], + outputs=op.output('Out'), + max=max_value, + min=min_value) + return node + def exp(self, op, block): node = helper.make_node( 'Exp', inputs=op.input('X'), outputs=op.output('Out')) @@ -215,28 +236,6 @@ class OpSet9(object): pads=op.attr('paddings') + op.attr('paddings')) return node - def pad2d(self, op, block): - x_shape = block.var(op.input('X')[0]).shape - paddings = op.attr('paddings') - onnx_pads = [] - if op.attr('data_format') == 'NCHW': - pads = [ - 0, 0, paddings[0], paddings[2], 0, 0, paddings[1], paddings[3] - ] - else: - pads = [ - 0, paddings[0], paddings[2], 0, 0, paddings[1], paddings[3], 0 - ] - #TODO support pads is Variable - node = helper.make_node( - 'Pad', - inputs=op.input('X'), - outputs=op.output('Out'), - mode=op.attr('mode'), - value=op.attr('pad_value'), - pads=pads) - return node - def softmax(self, op, block): axis = op.attr('axis') shape = block.var(op.output('Out')[0]).shape -- GitLab