提交 d5c2a8be 编写于 作者: W wjj19950828

Merge remote-tracking branch 'upstream/develop' into Support_GPT2

...@@ -76,6 +76,7 @@ ...@@ -76,6 +76,7 @@
| 81 | Add | 82 | Concat | 83 | Max | 84 | Min | | 81 | Add | 82 | Concat | 83 | Max | 84 | Min |
| 85 | GreaterOrEqual | 86 | GatherND | 87 | And | 88 | cos | | 85 | GreaterOrEqual | 86 | GatherND | 87 | And | 88 | cos |
| 89 | Neg | 90 | SpaceToDepth | 91 | GatherElement | 92 | Sin | | 89 | Neg | 90 | SpaceToDepth | 91 | GatherElement | 92 | Sin |
| 93 | CumSum | | | | | | |
## PyTorch ## PyTorch
......
...@@ -457,6 +457,19 @@ class OpSet9(): ...@@ -457,6 +457,19 @@ class OpSet9():
outputs=[node.name], outputs=[node.name],
**attrs) **attrs)
@print_mapping_info
def CumSum(self, node):
val_x = self.graph.get_input_node(node, idx=0, copy=True)
axis = self.graph.get_input_node(node, idx=1, copy=True)
axis_values = _const_weight_or_none(axis)
assert axis_values is not None, 'Axis only support constant tensor!'
layer_attrs = {'axis': axis_values}
self.paddle_graph.add_layer(
'paddle.cumsum',
inputs={"x": val_x.name},
outputs=[node.name],
**layer_attrs)
@print_mapping_info @print_mapping_info
def HardSigmoid(self, node): def HardSigmoid(self, node):
val_x = self.graph.get_input_node(node, idx=0, copy=True) val_x = self.graph.get_input_node(node, idx=0, copy=True)
......
Markdown is supported
0% .
You are about to add 0 people to the discussion. Proceed with caution.
先完成此消息的编辑!
想要评论请 注册