From 5bd7c82bc571c9fe72fa47a3b8058070141a5b4b Mon Sep 17 00:00:00 2001 From: qingqing01 Date: Thu, 25 Feb 2021 11:26:08 +0800 Subject: [PATCH] [Cherry-pick] Double grad for clip op #31109 Cherry-pick double grad for clip --- paddle/fluid/operators/clip_op.cc | 27 ++++++++++++++++++- .../fluid/tests/unittests/test_nn_grad.py | 21 +++++++++++++++ 2 files changed, 47 insertions(+), 1 deletion(-) diff --git a/paddle/fluid/operators/clip_op.cc b/paddle/fluid/operators/clip_op.cc index ad61d61d4cc..eb27df8a367 100644 --- a/paddle/fluid/operators/clip_op.cc +++ b/paddle/fluid/operators/clip_op.cc @@ -109,6 +109,29 @@ DECLARE_INPLACE_OP_INFERER(ClipGradInplaceInferer, {framework::GradVarName("Out"), framework::GradVarName("X")}); +template +class ClipDoubleGradOpMaker : public framework::SingleGradOpMaker { + public: + using framework::SingleGradOpMaker::SingleGradOpMaker; + + protected: + void Apply(GradOpPtr op) const override { + op->SetType("clip_grad"); + op->SetInput("X", this->Input("X")); + if (this->HasInput("Min")) { + op->SetInput("Min", this->Input("Min")); + } + if (this->HasInput("Max")) { + op->SetInput("Max", this->Input("Max")); + } + op->SetInput(framework::GradVarName("Out"), + this->OutputGrad(framework::GradVarName("X"))); + op->SetOutput(framework::GradVarName("X"), + this->InputGrad(framework::GradVarName("Out"))); + op->SetAttrMap(this->Attrs()); + } +}; + } // namespace operators } // namespace paddle @@ -117,7 +140,9 @@ REGISTER_OPERATOR(clip, ops::ClipOp, ops::ClipOpMaker, ops::ClipGradOpMaker, ops::ClipGradOpMaker, ops::ClipInplaceInferer); -REGISTER_OPERATOR(clip_grad, ops::ClipOpGrad, ops::ClipGradInplaceInferer); +REGISTER_OPERATOR(clip_grad, ops::ClipOpGrad, ops::ClipGradInplaceInferer, + ops::ClipDoubleGradOpMaker, + ops::ClipDoubleGradOpMaker); REGISTER_OP_CPU_KERNEL( clip, ops::ClipKernel, ops::ClipKernel); diff --git a/python/paddle/fluid/tests/unittests/test_nn_grad.py b/python/paddle/fluid/tests/unittests/test_nn_grad.py index 9711b84d613..d7bbc355d5d 100644 --- a/python/paddle/fluid/tests/unittests/test_nn_grad.py +++ b/python/paddle/fluid/tests/unittests/test_nn_grad.py @@ -329,6 +329,27 @@ class TestUnsqueezeDoubleGradCheck(unittest.TestCase): self.func(p) +class TestClipDoubleGradCheck(unittest.TestCase): + @prog_scope() + def func(self, place): + x_shape = [2, 4, 10] + dtype = np.float64 + + x = layers.data('x', x_shape, False, dtype) + x.persistable = True + out = paddle.clip(x, min=-1., max=1.) + x_arr = np.random.uniform(-5., 5., x_shape).astype(dtype) + + gradient_checker.double_grad_check([x], out, x_init=x_arr, place=place) + + def test_grad(self): + places = [fluid.CPUPlace()] + if core.is_compiled_with_cuda(): + places.append(fluid.CUDAPlace(0)) + for p in places: + self.func(p) + + class TestTransposeDoubleGradCheck(unittest.TestCase): @prog_scope() def func(self, place): -- GitLab