diff --git a/paddle/fluid/eager/auto_code_generator/generator/eager_gen.py b/paddle/fluid/eager/auto_code_generator/generator/eager_gen.py index 5ae57faa37bfeabc4df04467245f8a306a23ec43..308e0348a21afb355248ae1ee5e6dff3d5a7814d 100644 --- a/paddle/fluid/eager/auto_code_generator/generator/eager_gen.py +++ b/paddle/fluid/eager/auto_code_generator/generator/eager_gen.py @@ -1055,7 +1055,7 @@ class DygraphForwardFunctionGenerator(DygraphFunctionGeneratorBase): heavily_flag = True if len(layout_autotune_attr) == 0: layout_autotune_attr_code_list.append( - f"auto transformer = egr::EagerLayoutAutotune(op_name, tensors_vector);\n" + "auto transformer = egr::EagerLayoutAutotune(op_name, tensors_vector);\n" ) elif len(layout_autotune_attr) == 1: layout_autotune_attr_code_list.append( @@ -1387,7 +1387,7 @@ class DygraphForwardFunctionGenerator(DygraphFunctionGeneratorBase): amp_tensors_vector_list) + " }" amp_tensors_vector_optional_list_str = " ".join( amp_tensors_vector_optional_list) - amp_get_dst_dtype_str = f"auto amp_dst_dtype = egr::GetAmpDestDtype(op_name, amp_tensors_vector);\n" + amp_get_dst_dtype_str = "auto amp_dst_dtype = egr::GetAmpDestDtype(op_name, amp_tensors_vector);\n" amp_autocast_list_str = " ".join( amp_autocast_list) + " " + " ".join( amp_autocast_optional_list) diff --git a/python/paddle/distributed/fleet/elastic/manager.py b/python/paddle/distributed/fleet/elastic/manager.py index c2cef68f73e82d239655444ea2e2e0ef9ee38c4c..4cc23df2e5f0a2efc28daf568dcb3a653b69bf81 100644 --- a/python/paddle/distributed/fleet/elastic/manager.py +++ b/python/paddle/distributed/fleet/elastic/manager.py @@ -178,10 +178,10 @@ class ElasticManager(object): if self.min_np == self.max_np or \ (self.min_np > 0 and self.max_np == 0): self.elastic_level = ElasticLevel.FAULT_TOLERANCE - logger.info(f'start job with ElasticLevel.FAULT_TOLERANCE') + logger.info('start job with ElasticLevel.FAULT_TOLERANCE') if self.min_np > 0 and self.max_np > self.min_np: self.elastic_level = ElasticLevel.ELASTIC - logger.info(f'start job with ElasticLevel.ELASTIC') + logger.info('start job with ElasticLevel.ELASTIC') # compatible with kuberntes service discovery if not server and os.getenv( diff --git a/python/paddle/distributed/fleet/utils/hybrid_parallel_inference.py b/python/paddle/distributed/fleet/utils/hybrid_parallel_inference.py index 7e9152f92a33f0d27744a6b2f5b7efe850e348f2..04955db3c1811570c0a9c76074a4b61c27730018 100644 --- a/python/paddle/distributed/fleet/utils/hybrid_parallel_inference.py +++ b/python/paddle/distributed/fleet/utils/hybrid_parallel_inference.py @@ -739,9 +739,9 @@ class HybridParallelInferenceHelper(object): startup_block = self._startup_program.global_block() if debug: - with open(f'main_program.txt', 'w') as f: + with open('main_program.txt', 'w') as f: f.write(str(self._main_program)) - with open(f'startup_program.txt', 'w') as f: + with open('startup_program.txt', 'w') as f: f.write(str(self._startup_program)) # step1: add op_device attribute for all ops diff --git a/python/paddle/fluid/tests/unittests/ipu/custom_ops/test_custom_ops_ipu.py b/python/paddle/fluid/tests/unittests/ipu/custom_ops/test_custom_ops_ipu.py index 0dc182354e5e08685cfc6c188e48a887c656248a..744ea86663a7c86122daf91a7961c85f58b3e52d 100644 --- a/python/paddle/fluid/tests/unittests/ipu/custom_ops/test_custom_ops_ipu.py +++ b/python/paddle/fluid/tests/unittests/ipu/custom_ops/test_custom_ops_ipu.py @@ -28,7 +28,7 @@ from op_test_ipu import IPUOpTest # just load one custom-op for the data race issue under parallel mode def load_custom_detach(): cur_dir = os.path.dirname(os.path.realpath(__file__)) - custom_ops = load(name=f"custom_detach", + custom_ops = load(name="custom_detach", sources=[ f"{cur_dir}/custom_detach.cc", ], @@ -39,7 +39,7 @@ def load_custom_detach(): def load_custom_identity(): cur_dir = os.path.dirname(os.path.realpath(__file__)) - custom_ops = load(name=f"custom_identity", + custom_ops = load(name="custom_identity", sources=[ f"{cur_dir}/custom_identity.cc", ], @@ -50,7 +50,7 @@ def load_custom_identity(): def load_custom_nll(): cur_dir = os.path.dirname(os.path.realpath(__file__)) - custom_ops = load(name=f"custom_nll", + custom_ops = load(name="custom_nll", sources=[ f"{cur_dir}/custom_nll.cc", ], diff --git a/python/paddle/fluid/tests/unittests/test_coalesce_tensor_op.py b/python/paddle/fluid/tests/unittests/test_coalesce_tensor_op.py index e503e904d8417e84c6aa545ec5f1b51ace475962..2bbac7a09e9b303ce432216a75dc9c10f6232062 100644 --- a/python/paddle/fluid/tests/unittests/test_coalesce_tensor_op.py +++ b/python/paddle/fluid/tests/unittests/test_coalesce_tensor_op.py @@ -131,7 +131,7 @@ class TestAllocContinuousSpace(OpTest): np.testing.assert_allclose(self.outputs['FusedOutput'], eager_fused_output, atol=1e-5, - err_msg=f'not equal fusedoutput') + err_msg='not equal fusedoutput') def test_check_output(self): self.check_output_with_place(place=core.CUDAPlace(0), diff --git a/python/paddle/geometric/sampling/neighbors.py b/python/paddle/geometric/sampling/neighbors.py index 48fb22d4fe8a95b00cda522dac03ea5b22be9dbb..63ec44b4f39d35e405b80f91eff13def55601d98 100644 --- a/python/paddle/geometric/sampling/neighbors.py +++ b/python/paddle/geometric/sampling/neighbors.py @@ -90,7 +90,7 @@ def sample_neighbors(row, if return_eids: if eids is None: raise ValueError( - f"`eids` should not be None if `return_eids` is True.") + "`eids` should not be None if `return_eids` is True.") use_perm_buffer = True if perm_buffer is not None else False diff --git a/python/paddle/incubate/autograd/primx.py b/python/paddle/incubate/autograd/primx.py index d102dd2ac61867772e1b3e148a93dbdf274537cd..e61a77791fd19a77fd0e479400ee29178d0cd98c 100644 --- a/python/paddle/incubate/autograd/primx.py +++ b/python/paddle/incubate/autograd/primx.py @@ -51,7 +51,7 @@ def topo_path(xs, ys, block=None): # Initialize reached vars for x in xs: - assert x is None or x.block == block, f'x is not None and x.block != block' + assert x is None or x.block == block, 'x is not None and x.block != block' reached_vars[id(x)] = x # Reaching test, returning whether an op is reached from the given input @@ -174,7 +174,7 @@ class Transform(object): def __init__(self, block): assert block == default_main_program().current_block( - ), f'only support transform on current block of main program.' + ), 'only support transform on current block of main program.' self.block = block self.vars = self.init_vars(block) self.var2dot = VarMap('var2dot', self.vars) @@ -318,8 +318,8 @@ class Transform(object): the list outputs of the resulting transposed program """ - assert all(v is not None for v in xs_dot), f'`xs_dot` includes None.' - assert all(v is not None for v in ys_dot), f'`ys_dot` includes None.' + assert all(v is not None for v in xs_dot), '`xs_dot` includes None.' + assert all(v is not None for v in ys_dot), '`ys_dot` includes None.' if ys_bar is None: ys_bar = [] @@ -537,7 +537,7 @@ def orig2prim(block=None): block = default_main_program().current_block() if block is None else block assert block == default_main_program().current_block( - ), f'block is neither None nor current block of main program' + ), 'block is neither None nor current block of main program' _lower(block, reverse=False, blacklist=[]) @@ -582,6 +582,6 @@ def prim2orig(block=None, blacklist=None): block = default_main_program().current_block() if block is None else block assert block == default_main_program().current_block( - ), f'block is neither None nor current block of main program' + ), 'block is neither None nor current block of main program' blacklist = [] if blacklist is None else blacklist _lower(block, reverse=True, blacklist=blacklist) diff --git a/python/paddle/incubate/operators/graph_khop_sampler.py b/python/paddle/incubate/operators/graph_khop_sampler.py index 91fa9488ca8855b851dfd8c58144d0bb5befea29..11c9a9853bf4463ec781f1a3f1d4491dd6cd9138 100644 --- a/python/paddle/incubate/operators/graph_khop_sampler.py +++ b/python/paddle/incubate/operators/graph_khop_sampler.py @@ -88,8 +88,8 @@ def graph_khop_sampler(row, if _non_static_mode(): if return_eids: if sorted_eids is None: - raise ValueError(f"`sorted_eid` should not be None " - f"if return_eids is True.") + raise ValueError("`sorted_eid` should not be None " + "if return_eids is True.") edge_src, edge_dst, sample_index, reindex_nodes, edge_eids = \ _legacy_C_ops.graph_khop_sampler(row, sorted_eids, colptr, input_nodes, @@ -109,8 +109,8 @@ def graph_khop_sampler(row, if return_eids: if sorted_eids is None: - raise ValueError(f"`sorted_eid` should not be None " - f"if return_eids is True.") + raise ValueError("`sorted_eid` should not be None " + "if return_eids is True.") check_variable_and_dtype(sorted_eids, "Eids", ("int32", "int64"), "graph_khop_sampler") diff --git a/python/paddle/incubate/operators/graph_reindex.py b/python/paddle/incubate/operators/graph_reindex.py index 8f223f36a0447970ff28444c3bdee1147bfecb07..fee9b170c1a5bd5bcb5ac0f687a40c379a0613f7 100644 --- a/python/paddle/incubate/operators/graph_reindex.py +++ b/python/paddle/incubate/operators/graph_reindex.py @@ -109,7 +109,7 @@ def graph_reindex(x, """ if flag_buffer_hashtable: if value_buffer is None or index_buffer is None: - raise ValueError(f"`value_buffer` and `index_buffer` should not" + raise ValueError("`value_buffer` and `index_buffer` should not" "be None if `flag_buffer_hashtable` is True.") if _non_static_mode(): diff --git a/python/paddle/incubate/operators/graph_sample_neighbors.py b/python/paddle/incubate/operators/graph_sample_neighbors.py index 81839fda1378413ebd93f42ebf7d936fff2b8acc..8b0d6158b2917cae5b6a502bc9619ad530db3b6c 100644 --- a/python/paddle/incubate/operators/graph_sample_neighbors.py +++ b/python/paddle/incubate/operators/graph_sample_neighbors.py @@ -98,12 +98,12 @@ def graph_sample_neighbors(row, if return_eids: if eids is None: raise ValueError( - f"`eids` should not be None if `return_eids` is True.") + "`eids` should not be None if `return_eids` is True.") if flag_perm_buffer: if perm_buffer is None: raise ValueError( - f"`perm_buffer` should not be None if `flag_perm_buffer`" + "`perm_buffer` should not be None if `flag_perm_buffer`" "is True.") if _non_static_mode(): diff --git a/python/paddle/optimizer/optimizer.py b/python/paddle/optimizer/optimizer.py index 36847032d18a095c21388a5cacd1fac34d89c555..b90bd5ae3ff1e97f8e0709adc61926963d59e823 100644 --- a/python/paddle/optimizer/optimizer.py +++ b/python/paddle/optimizer/optimizer.py @@ -48,7 +48,7 @@ def append_backward_new(loss_list, assert program.num_blocks == 1, "The append_backward_new interface is designed to process only one block." block = program.current_block() for el in loss_list: - assert el.block == block, f'variable in loss_list should be in current block of main program' + assert el.block == block, 'variable in loss_list should be in current block of main program' orig2prim(block) ad = Transform(block) diff --git a/python/paddle/tensor/einsum.py b/python/paddle/tensor/einsum.py index dff4e4d13f3e3f009505214ce0717506953e0ba4..1903c30824e06f44adc7a6bcfe5145742d79f9a1 100644 --- a/python/paddle/tensor/einsum.py +++ b/python/paddle/tensor/einsum.py @@ -57,7 +57,7 @@ def parse_op_labels(labelstr, operand): ) assert labelstr.replace('...', '', 1).find('.') == -1, ( - f"Invalid equation: `.` is found outside of an ellipsis.") + "Invalid equation: `.` is found outside of an ellipsis.") # Check shape. Note, in Paddle a tensor rank is always nonzero ndims = len(operand.shape) @@ -102,7 +102,7 @@ def validate_rhs(rhs, input_labels, n_bcast_dims): # Sanity check. if n_bcast_dims > 0: assert '...' in rhs, ( - f"Invalid equation: missing ellipsis in output labels.") + "Invalid equation: missing ellipsis in output labels.") rhs = rhs.replace('...', '') rhs_set = set(rhs) @@ -117,7 +117,7 @@ def validate_rhs(rhs, input_labels, n_bcast_dims): f"output label {sorted(non_input_labels)} not used by any input.") # Verify that output labels are not duplicate assert len(rhs) == len(rhs_set), ( - f"Invalid equation: duplicate output labels are found.") + "Invalid equation: duplicate output labels are found.") def build_view(in_labels, out_labels): @@ -298,7 +298,7 @@ def diagonalize(labels, operand): 'ijj...i' would be merged into 'ij...' ''' assert not has_duplicated_labels(labels), ( - f'Duplicate labels are not supported.') + 'Duplicate labels are not supported.') return labels, operand @@ -695,13 +695,13 @@ def preprocess(equation, *operands): f"but found {len(lhs.split(','))} segments in the label equation.") assert not ('...' in lhs and '...' not in rhs - ), f'Invalid equation: missing ellipsis in output labels.' + ), 'Invalid equation: missing ellipsis in output labels.' assert not (len(list(filter(has_duplicated_labels, lhs.split(',')))) > - 0), f'Duplicate labels are not supported.' + 0), 'Duplicate labels are not supported.' assert not has_duplicated_labels( - rhs), f'Invalid equation: duplicate output labels are found.' + rhs), 'Invalid equation: duplicate output labels are found.' return lhs, rhs, labels diff --git a/tools/gen_ut_cmakelists.py b/tools/gen_ut_cmakelists.py index 32ec30106021c49761961a0e8d9a2c67f24706c2..62deef8c21b05066fcfc060df49f8b680ad50626 100644 --- a/tools/gen_ut_cmakelists.py +++ b/tools/gen_ut_cmakelists.py @@ -158,8 +158,8 @@ def _process_name(name, curdir): """ name = name.strip() assert re.compile("^test_[0-9a-zA-Z_]+").search(name), \ - f"""If line is not the header of table, the test name must begin with "test_" """ \ - f"""and the following substring must include at least one char of "0-9", "a-z", "A-Z" or "_".""" + """If line is not the header of table, the test name must begin with "test_" """ \ + """and the following substring must include at least one char of "0-9", "a-z", "A-Z" or "_".""" filepath_prefix = os.path.join(curdir, name) suffix = [".py", ".sh"] assert _file_with_extension(filepath_prefix, suffix), \ @@ -319,7 +319,7 @@ class DistUTPortManager(): self.process_dist_port_num(num_port) # step 2 - err_msg = f"""==================[No Old CMakeLists.txt Error]================================== + err_msg = """==================[No Old CMakeLists.txt Error]================================== Following directories has no CmakeLists.txt files: """ for c in self.no_cmake_dirs: @@ -452,7 +452,7 @@ class CMakeGenerator(): set_properties = "" cmd = cmd % set_properties for _ in conditions: - cmd += f"endif()\n" + cmd += "endif()\n" return cmd def _gen_cmakelists(self, current_work_dir, depth=0):