diff --git a/python/paddle/fluid/imperative/nn.py b/python/paddle/fluid/imperative/nn.py index 59db26824cc7ef481408c1f85c83b5d1237f6dea..d7d73df45fb46128f936914ee1258d39266c00f4 100644 --- a/python/paddle/fluid/imperative/nn.py +++ b/python/paddle/fluid/imperative/nn.py @@ -324,7 +324,7 @@ class SimpleRNNCell(layers.Layer): outputs={"Out": tmp_i2h}, attrs={"x_num_col_dims": 1, "y_num_col_dims": 1}) - # print("mul op 1") + self._helper.append_op( type="mul", inputs={"X": pre_hidden, @@ -332,7 +332,7 @@ class SimpleRNNCell(layers.Layer): outputs={"Out": tmp_h2h}, attrs={"x_num_col_dims": 1, "y_num_col_dims": 1}) - # print("mul op 2") + self._helper.append_op( type="elementwise_add", inputs={'X': tmp_h2h, @@ -340,21 +340,6 @@ class SimpleRNNCell(layers.Layer): outputs={'Out': hidden}, attrs={'axis': -1, 'use_mkldnn': False}) - # print("elementwise op 1") - - # self._helper.append_op( - # type='print', - # inputs={'In': hidden}, - # attrs={ - # 'first_n': -1, - # 'summarize': -1, - # 'message': None or "", - # 'print_tensor_name': True, - # 'print_tensor_type': True, - # 'print_tensor_shape': True, - # 'print_tensor_lod': True, - # 'print_phase': 'BOTH' - # }) hidden = self._helper.append_activation(hidden) self._helper.append_op( @@ -364,14 +349,12 @@ class SimpleRNNCell(layers.Layer): outputs={"Out": out}, attrs={"x_num_col_dims": 1, "y_num_col_dims": 1}) - # print("mul op 3") self._helper.append_op( type="softmax", inputs={"X": out}, outputs={"Out": softmax_out}, attrs={"use_cudnn": False}) - # print("softmax op 1") self._helper.append_op( type='reduce_sum', @@ -380,5 +363,5 @@ class SimpleRNNCell(layers.Layer): attrs={'dim': None, 'keep_dim': False, 'reduce_all': True}) - # print("reduce_sum op 1") + return reduce_out, hidden diff --git a/python/paddle/fluid/tests/unittests/test_imperative.py b/python/paddle/fluid/tests/unittests/test_imperative.py index 6ec3a4620e52b3db250f334a0b5cb4ea9552842a..0110a8dd47611d6adfdb5e980dab15e24ab5e2ac 100644 --- a/python/paddle/fluid/tests/unittests/test_imperative.py +++ b/python/paddle/fluid/tests/unittests/test_imperative.py @@ -245,7 +245,6 @@ class TestImperative(unittest.TestCase): dy_grad_h2o = simple_rnn._cell._h2o_w._gradient() dy_grad_h2h = simple_rnn._cell._h2h_w._gradient() dy_grad_i2h = simple_rnn._cell._i2h_w._gradient() - # print("dy_grad is {}".format(dy_grad)) with new_program_scope(): print("im here") @@ -262,10 +261,10 @@ class TestImperative(unittest.TestCase): outs[3].name, param_grads[0][1].name, param_grads[1][1].name, param_grads[2][1].name ]) - self.assertTrue(np.allclose(dy_out, static_out)) - self.assertTrue(np.allclose(dy_grad_h2o, static_grad_h2o)) - self.assertTrue(np.allclose(dy_grad_h2h, static_grad_h2h)) - self.assertTrue(np.allclose(dy_grad_i2h, static_grad_i2h)) + self.assertTrue(np.allclose(dy_out, static_out)) + self.assertTrue(np.allclose(dy_grad_h2o, static_grad_h2o)) + self.assertTrue(np.allclose(dy_grad_h2h, static_grad_h2h)) + self.assertTrue(np.allclose(dy_grad_i2h, static_grad_i2h)) if __name__ == '__main__':