未验证 提交 7c370e42 编写于 作者: C chengduo 提交者: GitHub

Fix test_recurrent_op (#17001)

* fix ramdom fail
test=develop
上级 73a360b5
...@@ -182,7 +182,7 @@ class RecurrentOpTest1(unittest.TestCase): ...@@ -182,7 +182,7 @@ class RecurrentOpTest1(unittest.TestCase):
fetch_list=fetch_list, fetch_list=fetch_list,
return_numpy=False) return_numpy=False)
def test_backward(self): def test_backward(self, rtol=0.1):
self.check_forward() self.check_forward()
with fluid.program_guard(self.main_program, self.startup_program): with fluid.program_guard(self.main_program, self.startup_program):
...@@ -195,7 +195,10 @@ class RecurrentOpTest1(unittest.TestCase): ...@@ -195,7 +195,10 @@ class RecurrentOpTest1(unittest.TestCase):
self.assertEqual(num_grad[idx].shape, ana_grad[idx].shape) self.assertEqual(num_grad[idx].shape, ana_grad[idx].shape)
self.assertTrue( self.assertTrue(
np.isclose( np.isclose(
num_grad[idx], ana_grad[idx], rtol=0.1).all()) num_grad[idx], ana_grad[idx], rtol=rtol).all(),
"num_grad (" + name + ") has diff at " + str(self.place) +
"\nExpect " + str(num_grad[idx]) + "\n" + "But Got" +
str(ana_grad[idx]) + " in class " + self.__class__.__name__)
def check_forward(self): def check_forward(self):
pd_output = self.forward() pd_output = self.forward()
...@@ -287,6 +290,9 @@ class RecurrentOpTest2(RecurrentOpTest1): ...@@ -287,6 +290,9 @@ class RecurrentOpTest2(RecurrentOpTest1):
return rnn() return rnn()
def test_backward(self):
super(RecurrentOpTest2, self).test_backward(rtol=0.2)
class RecurrentOpMultipleMemoryTest(RecurrentOpTest1): class RecurrentOpMultipleMemoryTest(RecurrentOpTest1):
''' '''
......
...@@ -95,7 +95,6 @@ class TestWeightDecay(unittest.TestCase): ...@@ -95,7 +95,6 @@ class TestWeightDecay(unittest.TestCase):
place, place,
feed_list, feed_list,
loss, loss,
use_cuda=True,
use_reduce=False, use_reduce=False,
use_fast_executor=False, use_fast_executor=False,
use_ir_memory_optimize=False): use_ir_memory_optimize=False):
...@@ -136,11 +135,9 @@ class TestWeightDecay(unittest.TestCase): ...@@ -136,11 +135,9 @@ class TestWeightDecay(unittest.TestCase):
startup_prog = fluid.framework.Program() startup_prog = fluid.framework.Program()
startup_prog.random_seed = 1 startup_prog.random_seed = 1
with prog_scope_guard(main_prog=main_prog, startup_prog=startup_prog): with prog_scope_guard(main_prog=main_prog, startup_prog=startup_prog):
data = fluid.layers.data( data = fluid.layers.data(
name="words", shape=[1], dtype="int64", lod_level=1) name="words", shape=[1], dtype="int64", lod_level=1)
label = fluid.layers.data(name="label", shape=[1], dtype="int64") label = fluid.layers.data(name="label", shape=[1], dtype="int64")
avg_cost = model(data, label, len(self.word_dict)) avg_cost = model(data, label, len(self.word_dict))
param_list = [(var, var * self.learning_rate) param_list = [(var, var * self.learning_rate)
...@@ -148,7 +145,6 @@ class TestWeightDecay(unittest.TestCase): ...@@ -148,7 +145,6 @@ class TestWeightDecay(unittest.TestCase):
optimizer = fluid.optimizer.Adagrad( optimizer = fluid.optimizer.Adagrad(
learning_rate=self.learning_rate) learning_rate=self.learning_rate)
optimizer.minimize(avg_cost) optimizer.minimize(avg_cost)
for params in param_list: for params in param_list:
...@@ -158,10 +154,7 @@ class TestWeightDecay(unittest.TestCase): ...@@ -158,10 +154,7 @@ class TestWeightDecay(unittest.TestCase):
if use_parallel_exe: if use_parallel_exe:
loss = self.run_parallel_exe( loss = self.run_parallel_exe(
place, [data, label], place, [data, label], loss=avg_cost, use_reduce=use_reduce)
loss=avg_cost,
use_cuda=True,
use_reduce=use_reduce)
else: else:
loss = self.run_executor(place, [data, label], loss=avg_cost) loss = self.run_executor(place, [data, label], loss=avg_cost)
...@@ -176,13 +169,21 @@ class TestWeightDecay(unittest.TestCase): ...@@ -176,13 +169,21 @@ class TestWeightDecay(unittest.TestCase):
place, model, use_parallel_exe=True, use_reduce=False) place, model, use_parallel_exe=True, use_reduce=False)
for i in range(len(loss)): for i in range(len(loss)):
assert np.isclose(a=loss[i], b=loss2[i], rtol=5e-5) self.assertTrue(
np.isclose(
a=loss[i], b=loss2[i], rtol=5e-5),
"Expect " + str(loss[i]) + "\n" + "But Got" + str(loss2[i])
+ " in class " + self.__class__.__name__)
loss3 = self.check_weight_decay( loss3 = self.check_weight_decay(
place, model, use_parallel_exe=True, use_reduce=True) place, model, use_parallel_exe=True, use_reduce=True)
for i in range(len(loss)): for i in range(len(loss)):
assert np.isclose(a=loss[i], b=loss3[i], rtol=5e-5) self.assertTrue(
np.isclose(
a=loss[i], b=loss3[i], rtol=5e-5),
"Expect " + str(loss[i]) + "\n" + "But Got" + str(loss2[i])
+ " in class " + self.__class__.__name__)
if __name__ == '__main__': if __name__ == '__main__':
......
Markdown is supported
0% .
You are about to add 0 people to the discussion. Proceed with caution.
先完成此消息的编辑!
想要评论请 注册