From 3a442a6a6e3bebbe0566e22ba314dc39158ba229 Mon Sep 17 00:00:00 2001 From: Tony Cao <57024921+caolonghao@users.noreply.github.com> Date: Mon, 10 Oct 2022 18:18:54 +0800 Subject: [PATCH] [CodeStyle][F402] Change the loop variable name which is the same with an imported module (#46698) * Update README.md * Update README.md * Fix F402 by changing the name of loop variable * Change variable name Change variable name 'temp_tensor' to 'var' to avoid confusion, as var also appears in the same file. --- .../fluid/tests/unittests/test_auto_parallel_partitioner.py | 5 ++--- 1 file changed, 2 insertions(+), 3 deletions(-) diff --git a/python/paddle/fluid/tests/unittests/test_auto_parallel_partitioner.py b/python/paddle/fluid/tests/unittests/test_auto_parallel_partitioner.py index 0a4e8d3d9e2..95063b85f7e 100644 --- a/python/paddle/fluid/tests/unittests/test_auto_parallel_partitioner.py +++ b/python/paddle/fluid/tests/unittests/test_auto_parallel_partitioner.py @@ -234,9 +234,8 @@ def distributed_attr_check_for_dist_op(serial_main_prog, dist_main_prog, def distributed_attr_check_for_program(dist_main_prog, dist_context): have_dist_attr = True for block in dist_main_prog.blocks: - for tensor in block.vars.values(): - var_dist_attr = dist_context.get_tensor_dist_attr_for_program( - tensor) + for var in block.vars.values(): + var_dist_attr = dist_context.get_tensor_dist_attr_for_program(var) if var_dist_attr is None: have_dist_attr = False -- GitLab