From 96bc335216f418a8682e49f75ddaf50eedb71704 Mon Sep 17 00:00:00 2001 From: Yang Yu Date: Thu, 28 Dec 2017 12:49:02 +0800 Subject: [PATCH] Update --- paddle/framework/variable.h | 1 - paddle/operators/sum_op.h | 6 ++++-- python/paddle/v2/fluid/tests/test_dynrnn_gradient_check.py | 5 +++-- 3 files changed, 7 insertions(+), 5 deletions(-) diff --git a/paddle/framework/variable.h b/paddle/framework/variable.h index 3720393601..e5a94759f9 100644 --- a/paddle/framework/variable.h +++ b/paddle/framework/variable.h @@ -35,7 +35,6 @@ class Variable { template T* GetMutable() { if (!IsType()) { - VLOG(10) << "Resetting " << *this->name_; holder_.reset(new PlaceholderImpl(new T())); } return static_cast(holder_->Ptr()); diff --git a/paddle/operators/sum_op.h b/paddle/operators/sum_op.h index d1277d3edd..552b48f608 100644 --- a/paddle/operators/sum_op.h +++ b/paddle/operators/sum_op.h @@ -37,10 +37,12 @@ class SumKernel : public framework::OpKernel { bool in_place = out_var == in_vars[0]; if (out_var->IsType()) { - auto *out = context.Output("Out"); - auto result = EigenVector::Flatten(*out); + auto *out = context.Output("Out"); if (!in_place) { out->mutable_data(context.GetPlace()); + } + auto result = EigenVector::Flatten(*out); + if (!in_place) { math::SetConstant constant_functor; constant_functor(context.template device_context(), out, 0.0); diff --git a/python/paddle/v2/fluid/tests/test_dynrnn_gradient_check.py b/python/paddle/v2/fluid/tests/test_dynrnn_gradient_check.py index 7f61b966fd..238fd1a8cb 100644 --- a/python/paddle/v2/fluid/tests/test_dynrnn_gradient_check.py +++ b/python/paddle/v2/fluid/tests/test_dynrnn_gradient_check.py @@ -242,7 +242,7 @@ class TestSimpleMul(unittest.TestCase): out = rnn() out = fluid.layers.sequence_pool(out, pool_type='last') loss = fluid.layers.mean(x=out) - fluid.backward.append_backward_ops(loss) + fluid.backward.append_backward(loss) cpu = fluid.CPUPlace() exe = fluid.Executor(cpu) @@ -317,7 +317,7 @@ class TestSimpleMulWithMemory(unittest.TestCase): out = rnn() last = fluid.layers.sequence_pool(input=out, pool_type='last') loss = fluid.layers.mean(x=last) - fluid.backward.append_backward_ops(loss) + fluid.backward.append_backward(loss) cpu = fluid.CPUPlace() exe = fluid.Executor(cpu) @@ -330,6 +330,7 @@ class TestSimpleMulWithMemory(unittest.TestCase): ], return_numpy=False)) last_by_py, = py_rnn.exe().values() + print w_g[0] self.assertTrue(numpy.allclose(last_np, last_by_py)) w_g_num = py_rnn.get_numeric_gradient_of_param(self.PARAM_NAME) # print w_g_num[0], w_g[0] -- GitLab