From 4450a312a9228d0237b794d05a75c6de71b3aa55 Mon Sep 17 00:00:00 2001 From: Yang Yu Date: Tue, 26 Dec 2017 13:13:01 +0800 Subject: [PATCH] Polish Unittest --- python/paddle/v2/fluid/tests/decorators.py | 27 +++++++ .../fluid/tests/test_dynrnn_gradient_check.py | 80 +++++++++---------- 2 files changed, 67 insertions(+), 40 deletions(-) create mode 100644 python/paddle/v2/fluid/tests/decorators.py diff --git a/python/paddle/v2/fluid/tests/decorators.py b/python/paddle/v2/fluid/tests/decorators.py new file mode 100644 index 00000000000..d3dcf3562df --- /dev/null +++ b/python/paddle/v2/fluid/tests/decorators.py @@ -0,0 +1,27 @@ +import paddle.v2.fluid as fluid + +__all__ = ['many_times', 'prog_scope'] + + +def many_times(times): + def __impl__(fn): + def __fn__(*args, **kwargs): + for _ in range(times): + fn(*args, **kwargs) + + return __fn__ + + return __impl__ + + +def prog_scope(): + def __impl__(fn): + def __fn__(*args, **kwargs): + prog = fluid.Program() + startup_prog = fluid.Program() + with fluid.program_guard(prog, startup_prog): + fn(*args, **kwargs) + + return __fn__ + + return __impl__ diff --git a/python/paddle/v2/fluid/tests/test_dynrnn_gradient_check.py b/python/paddle/v2/fluid/tests/test_dynrnn_gradient_check.py index 99b92854663..3018588c3a1 100644 --- a/python/paddle/v2/fluid/tests/test_dynrnn_gradient_check.py +++ b/python/paddle/v2/fluid/tests/test_dynrnn_gradient_check.py @@ -3,7 +3,7 @@ import random import collections import paddle.v2.fluid as fluid import unittest -import copy +from decorators import * class Memory(object): @@ -78,7 +78,7 @@ class BaseRNN(object): self.outputs[oname] = Output() def step(self, **kwargs): - pass + raise NotImplementedError() def exe(self): retv = dict() @@ -141,18 +141,22 @@ class BaseRNN(object): feed_dict[pname] = self.params[pname] return feed_dict - def get_numeric_gradient_of_param(self, param_name, delta=0.01): + def get_numeric_gradient_of_param(self, param_name, delta=0.001): + if len(p.shape) != 2: + raise ValueError("Not support get numeric gradient of an parameter," + " which is not matrix") p = self.params[param_name] g = numpy.zeros(shape=p.shape, dtype=p.dtype) - for p_it, g_it in numpy.nditer([p, g], op_flags=['readwrite']): - o = float(p_it) - p_it[...] = o + delta - pos = self._exe_mean_out_() - p_it[...] = o - delta - neg = self._exe_mean_out_() - p_it[...] = o - g[:] = (pos - neg) / (delta * 2) + for i in xrange(p.shape[0]): + for j in xrange(p.shape[1]): + o = p[i][j] + p[i][j] += delta + pos = self._exe_mean_out_() + p[i][j] -= 2 * delta + neg = self._exe_mean_out_() + p[i][j] = o + g[i][j] = (pos - neg) / (delta * 2) return g def _exe_mean_out_(self): @@ -175,40 +179,36 @@ class SimpleMul(BaseRNN): class TestSimpleMul(unittest.TestCase): - def setUp(self): - self.python_impl = SimpleMul() - - def test_forward(self): - program = fluid.Program() - startup_program = fluid.Program() - with fluid.program_guard(program, startup_program): - dat = fluid.layers.data(name='X', shape=[32], lod_level=1) - - rnn = fluid.layers.DynamicRNN() - with rnn.block(): - d = rnn.step_input(dat) - o = fluid.layers.fc(input=d, - param_attr='W', - bias_attr=False, - size=10, - act=None) - rnn.output(o) - - out = rnn() - out = fluid.layers.sequence_pool(out, pool_type='last') - loss = fluid.layers.mean(x=out) - fluid.backward.append_backward_ops(loss) + # Test many times in local to ensure the random seed cannot breaks CI + # @many_times(10) + @prog_scope() + def test_forward_backward(self): + python_impl = SimpleMul() + dat = fluid.layers.data(name='X', shape=[32], lod_level=1) + + rnn = fluid.layers.DynamicRNN() + with rnn.block(): + d = rnn.step_input(dat) + o = fluid.layers.fc(input=d, + param_attr='W', + bias_attr=False, + size=10, + act=None) + rnn.output(o) + + out = rnn() + out = fluid.layers.sequence_pool(out, pool_type='last') + loss = fluid.layers.mean(x=out) + fluid.backward.append_backward_ops(loss) cpu = fluid.CPUPlace() exe = fluid.Executor(cpu) - out, w_g = exe.run(program, - feed=self.python_impl.to_feed(cpu), + out, w_g = exe.run(feed=python_impl.to_feed(cpu), fetch_list=[out, "W@GRAD"]) - out_by_python = self.python_impl.exe()['Out'] + out_by_python = python_impl.exe()['Out'] self.assertTrue(numpy.allclose(out, out_by_python)) - w_g_num = self.python_impl.get_numeric_gradient_of_param("W") - print w_g_num[0][0] - print w_g_num - w_g + w_g_num = python_impl.get_numeric_gradient_of_param("W") + self.assertTrue(numpy.allclose(w_g_num, w_g, rtol=0.05)) if __name__ == '__main__': -- GitLab