diff --git a/x2paddle/op_mapper/onnx_opsets/opset9.py b/x2paddle/op_mapper/onnx_opsets/opset9.py index 5cf058f58ed3c35f54ff26cec469739cded0a23c..931900b2d7949725cedc3b91086e9b10c4c8413a 100644 --- a/x2paddle/op_mapper/onnx_opsets/opset9.py +++ b/x2paddle/op_mapper/onnx_opsets/opset9.py @@ -107,6 +107,10 @@ class OpSet9(): 'reduce_min', ['X'], ['Out'], dict( axes='dim', keepdims='keep_dim'), dict(keep_dim=1) ], + 'ReduceMax': [ + 'reduce_max', ['X'], ['Out'], dict( + axes='dim', keepdims='keep_dim'), dict(keep_dim=1) + ], #active function 'Relu': ['relu', ['X'], ['Out']], 'LeakyRelu': ['leaky_relu', ['X'], ['Out'], dict(), dict(alpha=.01)], @@ -131,10 +135,7 @@ class OpSet9(): 'Abs': ['abs', ['X'], ['Out']], } - default_ioa_constraint = { - 'Gather': - [(lambda i, o, a: a.get('axis', 0) == 0, 'only axis = 0 is supported')], - } + default_ioa_constraint = {} def __init__(self, decoder): super(OpSet9, self).__init__() @@ -1082,6 +1083,17 @@ class OpSet9(): output=node, param_attr=None) + @print_mapping_info + def Greater(self, node): + val_x = self.graph.get_input_node(node, idx=0, copy=True) + val_y = self.graph.get_input_node(node, idx=1, copy=True) + node.fluid_code.add_layer( + "greater_than", + inputs={'x': val_x, + 'y': val_y}, + output=node, + param_attr=None) + @print_mapping_info def Where(self, node): condition = self.graph.get_input_node(node, idx=0, copy=True) diff --git a/x2paddle/op_mapper/paddle_op_mapper.py b/x2paddle/op_mapper/paddle_op_mapper.py index 0ba7ad682528b4062dea381964835271f0177432..5d8a3c01a4b81d17497d0155f642ba10d62f5383 100644 --- a/x2paddle/op_mapper/paddle_op_mapper.py +++ b/x2paddle/op_mapper/paddle_op_mapper.py @@ -42,7 +42,6 @@ class PaddleOpMapper(object): op_nodes = list() input_nodes = list() output_nodes = list() - unsupported_ops = set() print("Translating PaddlePaddle to ONNX...\n") @@ -183,6 +182,41 @@ class PaddleOpMapper(object): alpha=op.attr('alpha')) return node + def swish(self, op, block): + """ + The activation swish, y = x / (1 + exp(-beta * x)) + """ + beta = op.attr('beta') + beta_name = self.get_name(op.type, 'beta') + beta_node = onnx.helper.make_node( + 'Constant', + name=beta_name, + inputs=[], + outputs=[beta_name], + value=onnx.helper.make_tensor( + name=beta_name, + data_type=onnx.TensorProto.FLOAT, + dims=(), + vals=[beta])) + + beta_x_name = self.get_name(op.type, 'beta_x') + beta_x_node = onnx.helper.make_node( + 'Mul', + name=beta_x_name, + inputs=[op.input('X')[0], beta_name], + outputs=[beta_x_name]) + sigmoid_name = self.get_name(op.type, 'sigmoid') + sigmoid_node = onnx.helper.make_node( + 'Sigmoid', + name=sigmoid_name, + inputs=[beta_x_name], + outputs=[sigmoid_name]) + swish_node = onnx.helper.make_node( + 'Mul', + inputs=[op.input('X')[0], sigmoid_name], + outputs=op.output('Out')) + return [beta_node, beta_x_node, sigmoid_node, swish_node] + def elementwise_add(self, op, block): axis = op.attr('axis') x_shape = block.var(op.input('X')[0]).shape