diff --git a/python/paddle/v2/framework/tests/op_test.py b/python/paddle/v2/framework/tests/op_test.py index 4fec4c9109bf247abb2068177583acb47a8ebd97..9936fd76baf3e64aed01b8ae1d54e50b39793925 100644 --- a/python/paddle/v2/framework/tests/op_test.py +++ b/python/paddle/v2/framework/tests/op_test.py @@ -85,7 +85,7 @@ def get_numeric_gradient(scope, op, inputs, input_to_check, - output_name, + output_names, delta=0.005, in_place=False): @@ -100,8 +100,11 @@ def get_numeric_gradient(scope, ctx = core.DeviceContext.create(core.CPUPlace()) def get_output(): - op.run(scope, ctx) - return np.array(scope.find_var(output_name).get_tensor()).sum() + sum = 0.0 + for output_name in output_names: + op.run(scope, ctx) + sum += np.array(scope.find_var(output_name).get_tensor()).sum() + return sum tensor_to_check = scope.find_var(input_to_check).get_tensor() tensor_size = product(tensor_to_check.get_dims()) @@ -225,7 +228,7 @@ class OpTest(unittest.TestCase): def check_grad(self, inputs_to_check, - output_name, + output_names, no_grad_set=None, in_place=False, max_relative_error=0.005): @@ -237,13 +240,16 @@ class OpTest(unittest.TestCase): if no_grad_set is None: no_grad_set = set() + if not type(output_names) is list: + output_names = [output_names] + numeric_grads = [ get_numeric_gradient( self.scope, self.op, self.inputs, input_to_check, - output_name, + output_names, in_place=in_place) for input_to_check in inputs_to_check ] grad_names = [ diff --git a/python/paddle/v2/framework/tests/test_gradient_checker.py b/python/paddle/v2/framework/tests/test_gradient_checker.py index abeb01cb34158a43b5dcce5e39efc0e21e9fe638..85117bf9600975ea5d61dfb5b34335792bf6d8b2 100644 --- a/python/paddle/v2/framework/tests/test_gradient_checker.py +++ b/python/paddle/v2/framework/tests/test_gradient_checker.py @@ -12,7 +12,8 @@ class GetNumericGradientTest(unittest.TestCase): z = x + y scope = core.Scope() add_op = create_op(scope, "add", {'X': x, 'Y': y}, {'Out': z}, dict()) - arr = get_numeric_gradient(scope, add_op, {'X': x, 'Y': y}, 'X', 'Out') + arr = get_numeric_gradient(scope, add_op, {'X': x, + 'Y': y}, 'X', ['Out']) self.assertAlmostEqual(arr.mean(), 1.0, delta=1e-4) def test_softmax_op(self):