diff --git a/python/paddle/fluid/backward.py b/python/paddle/fluid/backward.py index 8e19650823333107c7ef2102ed9e32d97a10eeb6..f34060358cd3a8f6d648fbc34f88fd9f036fb32e 100755 --- a/python/paddle/fluid/backward.py +++ b/python/paddle/fluid/backward.py @@ -1313,8 +1313,8 @@ def _append_backward_vars_(block, start_op_idx, grad_to_var, grad_info_map): if grad_var_ins: existing_grad_var_ins = [ var for var in grad_var_ins - if block.desc.has_var_recursive(cpt.to_bytes(var)) or - var in parent_op_vars + if block.desc.has_var_recursive(cpt.to_bytes(var)) or var in + parent_op_vars ] if not existing_grad_var_ins: ''' diff --git a/python/paddle/fluid/tests/unittests/gradient_checker.py b/python/paddle/fluid/tests/unittests/gradient_checker.py index de084b7967262ba9dd874d4f9c8a52f78007dbb5..5080fbfd64858c52f5575d957d002ad9096f0f71 100644 --- a/python/paddle/fluid/tests/unittests/gradient_checker.py +++ b/python/paddle/fluid/tests/unittests/gradient_checker.py @@ -312,7 +312,7 @@ def grad_check(x, _compute_analytical_jacobian(prog, clone_x, clone_y, place, scope)) for i, (x_idx, - y_idx) in enumerate(product(* [range(len(x)), range(len(y))])): + y_idx) in enumerate(product(*[range(len(x)), range(len(y))])): a = analytical[y_idx][x_idx] n = numerical[x_idx][y_idx] if not np.allclose(a, n, rtol, atol):