提交 2a36e8ad 编写于 作者: Y Yang Yu

Make as const name

上级 32313994
...@@ -164,35 +164,44 @@ class BaseRNN(object): ...@@ -164,35 +164,44 @@ class BaseRNN(object):
return numpy.array([o.mean() for o in outs.itervalues()]).mean() return numpy.array([o.mean() for o in outs.itervalues()]).mean()
class SimpleMul(BaseRNN): class TestSimpleMul(unittest.TestCase):
def __init__(self): DATA_NAME = 'X'
super(SimpleMul, self).__init__({ DATA_WIDTH = 32
'X': { PARAM_NAME = 'W'
'shape': [32] HIDDEN_WIDTH = 10
} OUT_NAME = 'Out'
}, {}, {'W': {
'shape': [32, 10]
}}, ['Out'])
def step(self, X, W, Out): class SimpleMul(BaseRNN):
Out.out(numpy.matmul(X, W)) def __init__(self):
base = TestSimpleMul
super(base.SimpleMul, self).__init__({
base.DATA_NAME: {
'shape': [base.DATA_WIDTH]
}
}, {}, {
base.PARAM_NAME: {
'shape': [base.DATA_WIDTH, base.HIDDEN_WIDTH]
}
}, [base.OUT_NAME])
def step(self, X, W, Out):
Out.out(numpy.matmul(X, W))
class TestSimpleMul(unittest.TestCase):
# Test many times in local to ensure the random seed cannot breaks CI # Test many times in local to ensure the random seed cannot breaks CI
# @many_times(10) # @many_times(10)
@prog_scope() @prog_scope()
def test_forward_backward(self): def test_forward_backward(self):
python_impl = SimpleMul() python_impl = TestSimpleMul.SimpleMul()
dat = fluid.layers.data(name='X', shape=[32], lod_level=1) dat = fluid.layers.data(
name=self.DATA_NAME, shape=[self.DATA_WIDTH], lod_level=1)
rnn = fluid.layers.DynamicRNN() rnn = fluid.layers.DynamicRNN()
with rnn.block(): with rnn.block():
d = rnn.step_input(dat) d = rnn.step_input(dat)
o = fluid.layers.fc(input=d, o = fluid.layers.fc(input=d,
param_attr='W', param_attr=self.PARAM_NAME,
bias_attr=False, bias_attr=False,
size=10, size=self.HIDDEN_WIDTH,
act=None) act=None)
rnn.output(o) rnn.output(o)
...@@ -204,10 +213,10 @@ class TestSimpleMul(unittest.TestCase): ...@@ -204,10 +213,10 @@ class TestSimpleMul(unittest.TestCase):
cpu = fluid.CPUPlace() cpu = fluid.CPUPlace()
exe = fluid.Executor(cpu) exe = fluid.Executor(cpu)
out, w_g = exe.run(feed=python_impl.to_feed(cpu), out, w_g = exe.run(feed=python_impl.to_feed(cpu),
fetch_list=[out, "W@GRAD"]) fetch_list=[out, self.PARAM_NAME + "@GRAD"])
out_by_python = python_impl.exe()['Out'] out_by_python = python_impl.exe()[self.OUT_NAME]
self.assertTrue(numpy.allclose(out, out_by_python)) self.assertTrue(numpy.allclose(out, out_by_python))
w_g_num = python_impl.get_numeric_gradient_of_param("W") w_g_num = python_impl.get_numeric_gradient_of_param(self.PARAM_NAME)
self.assertTrue(numpy.allclose(w_g_num, w_g, rtol=0.05)) self.assertTrue(numpy.allclose(w_g_num, w_g, rtol=0.05))
......
Markdown is supported
0% .
You are about to add 0 people to the discussion. Proceed with caution.
先完成此消息的编辑!
想要评论请 注册