From 4dfc10ccf73aa7872b41684cda0e407cc6a3ba00 Mon Sep 17 00:00:00 2001 From: Peng Li Date: Fri, 29 Sep 2017 14:34:40 +0800 Subject: [PATCH] a patch for fixing random seeds in gradient checkers --- python/paddle/v2/framework/tests/op_test.py | 16 ++++++++++++++++ .../tests/test_softmax_with_cross_entropy_op.py | 2 +- 2 files changed, 17 insertions(+), 1 deletion(-) diff --git a/python/paddle/v2/framework/tests/op_test.py b/python/paddle/v2/framework/tests/op_test.py index 23794151bdb..75df2eeddfe 100644 --- a/python/paddle/v2/framework/tests/op_test.py +++ b/python/paddle/v2/framework/tests/op_test.py @@ -1,5 +1,6 @@ import unittest import numpy as np +import random import itertools import paddle.v2.framework.core as core from paddle.v2.framework.op import Operator @@ -192,6 +193,21 @@ def get_gradient(scope, op, inputs, outputs, grad_name, place, class OpTest(unittest.TestCase): + @classmethod + def setUpClass(cls): + '''Fix random seeds to remove randomness from tests''' + cls._np_rand_state = np.random.get_state() + cls._py_rand_state = random.getstate() + + np.random.seed(123) + random.seed(124) + + @classmethod + def tearDownClass(cls): + '''Restore random seeds''' + np.random.set_state(cls._np_rand_state) + random.setstate(cls._py_rand_state) + def check_output_with_place(self, place, atol): self.scope = core.Scope() op_inputs = self.inputs if hasattr(self, "inputs") else dict() diff --git a/python/paddle/v2/framework/tests/test_softmax_with_cross_entropy_op.py b/python/paddle/v2/framework/tests/test_softmax_with_cross_entropy_op.py index 428395b76c8..377d07fb592 100644 --- a/python/paddle/v2/framework/tests/test_softmax_with_cross_entropy_op.py +++ b/python/paddle/v2/framework/tests/test_softmax_with_cross_entropy_op.py @@ -43,7 +43,7 @@ class TestSoftmaxWithCrossEntropyOp2(OpTest): def setUp(self): self.op_type = "softmax_with_cross_entropy" batch_size = 2 - class_num = 17 + class_num = 37 logits = np.random.uniform(0.1, 1.0, [batch_size, class_num]).astype("float32") -- GitLab