未验证 提交 bfb861f5 编写于 作者: X xiaoguoguo626807 提交者: GitHub

recover tanh_triple (#55372)

上级 0a4d1999
...@@ -2265,6 +2265,7 @@ ...@@ -2265,6 +2265,7 @@
func : tanh_double_grad func : tanh_double_grad
composite : tanh_double_grad(out, grad_out, grad_x_grad, out_grad, grad_out_grad) composite : tanh_double_grad(out, grad_out, grad_x_grad, out_grad, grad_out_grad)
inplace : (grad_x_grad -> grad_out_grad) inplace : (grad_x_grad -> grad_out_grad)
backward : tanh_triple_grad
- backward_op : tanh_grad - backward_op : tanh_grad
forward : tanh (Tensor x) -> Tensor(out) forward : tanh (Tensor x) -> Tensor(out)
...@@ -2290,6 +2291,18 @@ ...@@ -2290,6 +2291,18 @@
func : tanh_shrink_grad func : tanh_shrink_grad
inplace : (out_grad -> x_grad) inplace : (out_grad -> x_grad)
- backward_op : tanh_triple_grad
forward : tanh_double_grad (Tensor out, Tensor grad_out_forward, Tensor grad_x_grad_forward) -> Tensor(grad_out_new), Tensor(grad_out_grad)
args : (Tensor out, Tensor grad_out_forward, Tensor grad_x_grad_forward, Tensor grad_out_new_grad, Tensor grad_out_grad_grad)
output : Tensor(out_grad), Tensor(grad_out_forward_grad), Tensor(grad_x_grad_forward_grad)
infer_meta :
func : GeneralTernaryGradInferMeta
param : [out, out, grad_x_grad_forward]
kernel :
func : tanh_triple_grad
inplace : (grad_x_grad_forward -> grad_out_forward_grad)
optional : grad_out_new_grad, grad_out_grad_grad
- backward_op : temporal_shift_grad - backward_op : temporal_shift_grad
forward : temporal_shift(Tensor x, int seg_num, float shift_ratio = 0.25f, str data_format = "NCHW") -> Tensor(out) forward : temporal_shift(Tensor x, int seg_num, float shift_ratio = 0.25f, str data_format = "NCHW") -> Tensor(out)
args : (Tensor out_grad, int seg_num, float shift_ratio, str data_format) args : (Tensor out_grad, int seg_num, float shift_ratio, str data_format)
......
...@@ -2688,7 +2688,7 @@ ...@@ -2688,7 +2688,7 @@
attrs : [bool use_mkldnn = false, bool use_cudnn = false] attrs : [bool use_mkldnn = false, bool use_cudnn = false]
- op : tanh - op : tanh
backward : tanh_grad, tanh_double_grad (tanh_grad_grad) backward : tanh_grad, tanh_double_grad (tanh_grad_grad), tanh_triple_grad
inputs : inputs :
x : X x : X
outputs : outputs :
......
Markdown is supported
0% .
You are about to add 0 people to the discussion. Proceed with caution.
先完成此消息的编辑!
想要评论请 注册