From 47975870aa043d1d4e6c71335c6e4d09df94e13d Mon Sep 17 00:00:00 2001 From: Yancey Date: Wed, 13 Sep 2017 20:18:00 +0800 Subject: [PATCH] Fix check grad with multioutput (#4067) Fix check grad with multi outputs --- python/paddle/v2/framework/tests/op_test.py | 16 +++++++++++----- .../v2/framework/tests/test_gradient_checker.py | 3 ++- 2 files changed, 13 insertions(+), 6 deletions(-) diff --git a/python/paddle/v2/framework/tests/op_test.py b/python/paddle/v2/framework/tests/op_test.py index 4fec4c9109b..9936fd76baf 100644 --- a/python/paddle/v2/framework/tests/op_test.py +++ b/python/paddle/v2/framework/tests/op_test.py @@ -85,7 +85,7 @@ def get_numeric_gradient(scope, op, inputs, input_to_check, - output_name, + output_names, delta=0.005, in_place=False): @@ -100,8 +100,11 @@ def get_numeric_gradient(scope, ctx = core.DeviceContext.create(core.CPUPlace()) def get_output(): - op.run(scope, ctx) - return np.array(scope.find_var(output_name).get_tensor()).sum() + sum = 0.0 + for output_name in output_names: + op.run(scope, ctx) + sum += np.array(scope.find_var(output_name).get_tensor()).sum() + return sum tensor_to_check = scope.find_var(input_to_check).get_tensor() tensor_size = product(tensor_to_check.get_dims()) @@ -225,7 +228,7 @@ class OpTest(unittest.TestCase): def check_grad(self, inputs_to_check, - output_name, + output_names, no_grad_set=None, in_place=False, max_relative_error=0.005): @@ -237,13 +240,16 @@ class OpTest(unittest.TestCase): if no_grad_set is None: no_grad_set = set() + if not type(output_names) is list: + output_names = [output_names] + numeric_grads = [ get_numeric_gradient( self.scope, self.op, self.inputs, input_to_check, - output_name, + output_names, in_place=in_place) for input_to_check in inputs_to_check ] grad_names = [ diff --git a/python/paddle/v2/framework/tests/test_gradient_checker.py b/python/paddle/v2/framework/tests/test_gradient_checker.py index abeb01cb341..85117bf9600 100644 --- a/python/paddle/v2/framework/tests/test_gradient_checker.py +++ b/python/paddle/v2/framework/tests/test_gradient_checker.py @@ -12,7 +12,8 @@ class GetNumericGradientTest(unittest.TestCase): z = x + y scope = core.Scope() add_op = create_op(scope, "add", {'X': x, 'Y': y}, {'Out': z}, dict()) - arr = get_numeric_gradient(scope, add_op, {'X': x, 'Y': y}, 'X', 'Out') + arr = get_numeric_gradient(scope, add_op, {'X': x, + 'Y': y}, 'X', ['Out']) self.assertAlmostEqual(arr.mean(), 1.0, delta=1e-4) def test_softmax_op(self): -- GitLab