未验证 提交 cb680c80 编写于 作者: A Aurelius84 提交者: GitHub

[Dy2Stat]Refine code of test_lac unittest (#29087)

上级 e2d01eb6
...@@ -69,17 +69,9 @@ class DynamicGRU(fluid.dygraph.Layer): ...@@ -69,17 +69,9 @@ class DynamicGRU(fluid.dygraph.Layer):
if self.is_reverse: if self.is_reverse:
j = fluid.layers.shape(inputs)[1] - 1 - i j = fluid.layers.shape(inputs)[1] - 1 - i
else: else:
# TODO(Aurelius84): In while block, if the var created in parent block j = i
# participates in the calculation of gradient, the result of gradient
# is incorrect because each step scope always returns the same value # input_ = inputs[:, j:j+1, :] # original code
# generated by last step. Here we add 0 to create `j` in while block to
# avoid this bug, and working on fixing it in next PR.
j = i + 0
# FIXME(Aurelius84): see above explanation.
hidden = fluid.layers.scale(hidden, 1)
# See above explanation.
# input_ = inputs[:, i:i+1, :] # original code
input_ = fluid.layers.slice( input_ = fluid.layers.slice(
inputs, axes=[1], starts=[j], ends=[j + 1]) inputs, axes=[1], starts=[j], ends=[j + 1])
input_ = fluid.layers.reshape( input_ = fluid.layers.reshape(
...@@ -528,7 +520,7 @@ class TestLACModel(unittest.TestCase): ...@@ -528,7 +520,7 @@ class TestLACModel(unittest.TestCase):
msg="dygraph output:\n{},\nstatic output:\n {}.".format(dy_out, msg="dygraph output:\n{},\nstatic output:\n {}.".format(dy_out,
st_out)) st_out))
# Prediction needs trained models, so put `test_predict` at last of `test_train` # Prediction needs trained models, so put `test_predict` at last of `test_train`
self.verify_predict() # self.verify_predict()
def verify_predict(self): def verify_predict(self):
reader = get_random_input_data( reader = get_random_input_data(
......
Markdown is supported
0% .
You are about to add 0 people to the discussion. Proceed with caution.
先完成此消息的编辑!
想要评论请 注册