未验证 提交 29bb1208 编写于 作者: W WJJ1995 提交者: GitHub

add ONNX ReduceL1/L2 op and fixed clip op (#635)

* add ReduceL1/L2 op and fixed clip op

* remove print

* fixed comments
上级 fd0205c4
......@@ -99,7 +99,6 @@ class TFGraphNode(GraphNode):
@property
def name(self):
if hasattr(self, 'index'):
print(self.layer_type)
return self.layer_name + "_p{}".format(self.index)
return self.layer_name
......
......@@ -1173,15 +1173,15 @@ class OpSet9():
max_value = max_value[0]
if min_value.shape == (1, ):
min_value = min_value[0]
if max_value is not None and min_value is not None:
layer_attrs = {'max': max_value, 'min': min_value}
self.paddle_graph.add_layer(
'paddle.clip',
inputs={"x": val_x.name},
outputs=[node.name],
**layer_attrs)
else:
raise
if max_value is not None and min_value is not None:
layer_attrs = {'max': max_value, 'min': min_value}
self.paddle_graph.add_layer(
'paddle.clip',
inputs={"x": val_x.name},
outputs=[node.name],
**layer_attrs)
else:
raise Exception("max_value or min_value can't be None")
@print_mapping_info
def Split(self, node):
......@@ -2226,3 +2226,31 @@ class OpSet9():
"scores": scores.name},
outputs=layer_outputs,
**layer_attrs)
@print_mapping_info
def ReduceL1(self, node):
output_name = node.name
layer_outputs = [output_name]
val_x = self.graph.get_input_node(node, idx=0, copy=True)
axes = node.get_attr('axes')
keepdims = False if node.get_attr('keepdims') == 0 else True
layer_attrs = {'p': 1, 'axis': axes, 'keepdim': keepdims}
self.paddle_graph.add_layer(
"paddle.norm",
inputs={"x": val_x.name},
outputs=layer_outputs,
**layer_attrs)
@print_mapping_info
def ReduceL2(self, node):
output_name = node.name
layer_outputs = [output_name]
val_x = self.graph.get_input_node(node, idx=0, copy=True)
axes = node.get_attr('axes')
keepdims = False if node.get_attr('keepdims') == 0 else True
layer_attrs = {'p': 2, 'axis': axes, 'keepdim': keepdims}
self.paddle_graph.add_layer(
"paddle.norm",
inputs={"x": val_x.name},
outputs=layer_outputs,
**layer_attrs)
Markdown is supported
0% .
You are about to add 0 people to the discussion. Proceed with caution.
先完成此消息的编辑!
想要评论请 注册