未验证 提交 65dd828e 编写于 作者: C Charles-hit 提交者: GitHub

support assign op backward refuse forward (#45879)

上级 654eff5f
......@@ -182,27 +182,11 @@
func : asinh_grad
inplace : (out_grad -> x_grad)
- backward_api : assign_double_grad
forward : assign_grad (Tensor grad_out) -> Tensor(grad_x)
args : (Tensor grad_x_grad)
output : Tensor(grad_out_grad)
infer_meta :
func : UnchangedInferMeta
kernel :
func : assign
backward: assign_triple_grad
inplace : (grad_x_grad -> grad_out_grad)
- backward_api : assign_grad
forward : assign (Tensor x) -> Tensor(out)
args : (Tensor out_grad)
output : Tensor(x_grad)
infer_meta :
func : UnchangedInferMeta
kernel :
func : assign
backward: assign_double_grad
inplace : (out_grad -> x_grad)
invoke : assign(out_grad)
- backward_api : assign_out__grad
forward : assign_out_ (Tensor x, Tensor output) -> Tensor(out)
......@@ -214,16 +198,6 @@
func : assign
inplace : (out_grad -> x_grad)
- backward_api : assign_triple_grad
forward : assign_double_grad (Tensor grad_out) -> Tensor(grad_x)
args : (Tensor grad_x_grad)
output : Tensor(grad_out_grad)
infer_meta :
func : UnchangedInferMeta
kernel :
func : assign
inplace : (grad_x_grad -> grad_out_grad)
- backward_api : atan_grad
forward : atan (Tensor x) -> Tensor(out)
args : (Tensor x, Tensor out_grad)
......
......@@ -24,6 +24,9 @@ import paddle.fluid as fluid
from paddle.fluid import compiler, Program, program_guard
from paddle.fluid.backward import append_backward
import paddle.fluid.framework as framework
import gradient_checker
from decorator_helper import prog_scope
import paddle.fluid.layers as layers
class TestAssignOp(op_test.OpTest):
......@@ -258,5 +261,79 @@ class TestAssignOpErrorApi(unittest.TestCase):
paddle.disable_static()
class TestAssignDoubleGradCheck(unittest.TestCase):
def assign_wrapper(self, x):
return paddle.fluid.layers.assign(x[0])
@prog_scope()
def func(self, place):
# the shape of input variable should be clearly specified, not inlcude -1.
eps = 0.005
dtype = np.float32
data = layers.data('data', [3, 4, 5], False, dtype)
data.persistable = True
out = paddle.fluid.layers.assign(data)
data_arr = np.random.uniform(-1, 1, data.shape).astype(dtype)
gradient_checker.double_grad_check([data],
out,
x_init=[data_arr],
place=place,
eps=eps)
fluid.set_flags({"FLAGS_retain_grad_for_all_tensor": True})
gradient_checker.double_grad_check_for_dygraph(self.assign_wrapper,
[data],
out,
x_init=[data_arr],
place=place)
def test_grad(self):
paddle.enable_static()
places = [fluid.CPUPlace()]
if core.is_compiled_with_cuda():
places.append(fluid.CUDAPlace(0))
for p in places:
self.func(p)
class TestAssignTripleGradCheck(unittest.TestCase):
def assign_wrapper(self, x):
return paddle.fluid.layers.assign(x[0])
@prog_scope()
def func(self, place):
# the shape of input variable should be clearly specified, not inlcude -1.
eps = 0.005
dtype = np.float32
data = layers.data('data', [3, 4, 5], False, dtype)
data.persistable = True
out = paddle.fluid.layers.assign(data)
data_arr = np.random.uniform(-1, 1, data.shape).astype(dtype)
gradient_checker.triple_grad_check([data],
out,
x_init=[data_arr],
place=place,
eps=eps)
fluid.set_flags({"FLAGS_retain_grad_for_all_tensor": True})
gradient_checker.triple_grad_check_for_dygraph(self.assign_wrapper,
[data],
out,
x_init=[data_arr],
place=place)
def test_grad(self):
paddle.enable_static()
places = [fluid.CPUPlace()]
if core.is_compiled_with_cuda():
places.append(fluid.CUDAPlace(0))
for p in places:
self.func(p)
if __name__ == '__main__':
unittest.main()
Markdown is supported
0% .
You are about to add 0 people to the discussion. Proceed with caution.
先完成此消息的编辑!
想要评论请 注册