未验证 提交 c64e1dcf 编写于 作者: T Tony Cao 提交者: GitHub

[CodeStyle][F541] Convert f-strings without curly braces to normal strings (#46700)

* Update README.md

* Update README.md

* Fix F541 by converting f-string to normal strings
上级 1b697a5d
...@@ -1055,7 +1055,7 @@ class DygraphForwardFunctionGenerator(DygraphFunctionGeneratorBase): ...@@ -1055,7 +1055,7 @@ class DygraphForwardFunctionGenerator(DygraphFunctionGeneratorBase):
heavily_flag = True heavily_flag = True
if len(layout_autotune_attr) == 0: if len(layout_autotune_attr) == 0:
layout_autotune_attr_code_list.append( layout_autotune_attr_code_list.append(
f"auto transformer = egr::EagerLayoutAutotune(op_name, tensors_vector);\n" "auto transformer = egr::EagerLayoutAutotune(op_name, tensors_vector);\n"
) )
elif len(layout_autotune_attr) == 1: elif len(layout_autotune_attr) == 1:
layout_autotune_attr_code_list.append( layout_autotune_attr_code_list.append(
...@@ -1387,7 +1387,7 @@ class DygraphForwardFunctionGenerator(DygraphFunctionGeneratorBase): ...@@ -1387,7 +1387,7 @@ class DygraphForwardFunctionGenerator(DygraphFunctionGeneratorBase):
amp_tensors_vector_list) + " }" amp_tensors_vector_list) + " }"
amp_tensors_vector_optional_list_str = " ".join( amp_tensors_vector_optional_list_str = " ".join(
amp_tensors_vector_optional_list) amp_tensors_vector_optional_list)
amp_get_dst_dtype_str = f"auto amp_dst_dtype = egr::GetAmpDestDtype(op_name, amp_tensors_vector);\n" amp_get_dst_dtype_str = "auto amp_dst_dtype = egr::GetAmpDestDtype(op_name, amp_tensors_vector);\n"
amp_autocast_list_str = " ".join( amp_autocast_list_str = " ".join(
amp_autocast_list) + " " + " ".join( amp_autocast_list) + " " + " ".join(
amp_autocast_optional_list) amp_autocast_optional_list)
......
...@@ -178,10 +178,10 @@ class ElasticManager(object): ...@@ -178,10 +178,10 @@ class ElasticManager(object):
if self.min_np == self.max_np or \ if self.min_np == self.max_np or \
(self.min_np > 0 and self.max_np == 0): (self.min_np > 0 and self.max_np == 0):
self.elastic_level = ElasticLevel.FAULT_TOLERANCE self.elastic_level = ElasticLevel.FAULT_TOLERANCE
logger.info(f'start job with ElasticLevel.FAULT_TOLERANCE') logger.info('start job with ElasticLevel.FAULT_TOLERANCE')
if self.min_np > 0 and self.max_np > self.min_np: if self.min_np > 0 and self.max_np > self.min_np:
self.elastic_level = ElasticLevel.ELASTIC self.elastic_level = ElasticLevel.ELASTIC
logger.info(f'start job with ElasticLevel.ELASTIC') logger.info('start job with ElasticLevel.ELASTIC')
# compatible with kuberntes service discovery # compatible with kuberntes service discovery
if not server and os.getenv( if not server and os.getenv(
......
...@@ -739,9 +739,9 @@ class HybridParallelInferenceHelper(object): ...@@ -739,9 +739,9 @@ class HybridParallelInferenceHelper(object):
startup_block = self._startup_program.global_block() startup_block = self._startup_program.global_block()
if debug: if debug:
with open(f'main_program.txt', 'w') as f: with open('main_program.txt', 'w') as f:
f.write(str(self._main_program)) f.write(str(self._main_program))
with open(f'startup_program.txt', 'w') as f: with open('startup_program.txt', 'w') as f:
f.write(str(self._startup_program)) f.write(str(self._startup_program))
# step1: add op_device attribute for all ops # step1: add op_device attribute for all ops
......
...@@ -28,7 +28,7 @@ from op_test_ipu import IPUOpTest ...@@ -28,7 +28,7 @@ from op_test_ipu import IPUOpTest
# just load one custom-op for the data race issue under parallel mode # just load one custom-op for the data race issue under parallel mode
def load_custom_detach(): def load_custom_detach():
cur_dir = os.path.dirname(os.path.realpath(__file__)) cur_dir = os.path.dirname(os.path.realpath(__file__))
custom_ops = load(name=f"custom_detach", custom_ops = load(name="custom_detach",
sources=[ sources=[
f"{cur_dir}/custom_detach.cc", f"{cur_dir}/custom_detach.cc",
], ],
...@@ -39,7 +39,7 @@ def load_custom_detach(): ...@@ -39,7 +39,7 @@ def load_custom_detach():
def load_custom_identity(): def load_custom_identity():
cur_dir = os.path.dirname(os.path.realpath(__file__)) cur_dir = os.path.dirname(os.path.realpath(__file__))
custom_ops = load(name=f"custom_identity", custom_ops = load(name="custom_identity",
sources=[ sources=[
f"{cur_dir}/custom_identity.cc", f"{cur_dir}/custom_identity.cc",
], ],
...@@ -50,7 +50,7 @@ def load_custom_identity(): ...@@ -50,7 +50,7 @@ def load_custom_identity():
def load_custom_nll(): def load_custom_nll():
cur_dir = os.path.dirname(os.path.realpath(__file__)) cur_dir = os.path.dirname(os.path.realpath(__file__))
custom_ops = load(name=f"custom_nll", custom_ops = load(name="custom_nll",
sources=[ sources=[
f"{cur_dir}/custom_nll.cc", f"{cur_dir}/custom_nll.cc",
], ],
......
...@@ -131,7 +131,7 @@ class TestAllocContinuousSpace(OpTest): ...@@ -131,7 +131,7 @@ class TestAllocContinuousSpace(OpTest):
np.testing.assert_allclose(self.outputs['FusedOutput'], np.testing.assert_allclose(self.outputs['FusedOutput'],
eager_fused_output, eager_fused_output,
atol=1e-5, atol=1e-5,
err_msg=f'not equal fusedoutput') err_msg='not equal fusedoutput')
def test_check_output(self): def test_check_output(self):
self.check_output_with_place(place=core.CUDAPlace(0), self.check_output_with_place(place=core.CUDAPlace(0),
......
...@@ -90,7 +90,7 @@ def sample_neighbors(row, ...@@ -90,7 +90,7 @@ def sample_neighbors(row,
if return_eids: if return_eids:
if eids is None: if eids is None:
raise ValueError( raise ValueError(
f"`eids` should not be None if `return_eids` is True.") "`eids` should not be None if `return_eids` is True.")
use_perm_buffer = True if perm_buffer is not None else False use_perm_buffer = True if perm_buffer is not None else False
......
...@@ -51,7 +51,7 @@ def topo_path(xs, ys, block=None): ...@@ -51,7 +51,7 @@ def topo_path(xs, ys, block=None):
# Initialize reached vars # Initialize reached vars
for x in xs: for x in xs:
assert x is None or x.block == block, f'x is not None and x.block != block' assert x is None or x.block == block, 'x is not None and x.block != block'
reached_vars[id(x)] = x reached_vars[id(x)] = x
# Reaching test, returning whether an op is reached from the given input # Reaching test, returning whether an op is reached from the given input
...@@ -174,7 +174,7 @@ class Transform(object): ...@@ -174,7 +174,7 @@ class Transform(object):
def __init__(self, block): def __init__(self, block):
assert block == default_main_program().current_block( assert block == default_main_program().current_block(
), f'only support transform on current block of main program.' ), 'only support transform on current block of main program.'
self.block = block self.block = block
self.vars = self.init_vars(block) self.vars = self.init_vars(block)
self.var2dot = VarMap('var2dot', self.vars) self.var2dot = VarMap('var2dot', self.vars)
...@@ -318,8 +318,8 @@ class Transform(object): ...@@ -318,8 +318,8 @@ class Transform(object):
the list outputs of the resulting transposed program the list outputs of the resulting transposed program
""" """
assert all(v is not None for v in xs_dot), f'`xs_dot` includes None.' assert all(v is not None for v in xs_dot), '`xs_dot` includes None.'
assert all(v is not None for v in ys_dot), f'`ys_dot` includes None.' assert all(v is not None for v in ys_dot), '`ys_dot` includes None.'
if ys_bar is None: if ys_bar is None:
ys_bar = [] ys_bar = []
...@@ -537,7 +537,7 @@ def orig2prim(block=None): ...@@ -537,7 +537,7 @@ def orig2prim(block=None):
block = default_main_program().current_block() if block is None else block block = default_main_program().current_block() if block is None else block
assert block == default_main_program().current_block( assert block == default_main_program().current_block(
), f'block is neither None nor current block of main program' ), 'block is neither None nor current block of main program'
_lower(block, reverse=False, blacklist=[]) _lower(block, reverse=False, blacklist=[])
...@@ -582,6 +582,6 @@ def prim2orig(block=None, blacklist=None): ...@@ -582,6 +582,6 @@ def prim2orig(block=None, blacklist=None):
block = default_main_program().current_block() if block is None else block block = default_main_program().current_block() if block is None else block
assert block == default_main_program().current_block( assert block == default_main_program().current_block(
), f'block is neither None nor current block of main program' ), 'block is neither None nor current block of main program'
blacklist = [] if blacklist is None else blacklist blacklist = [] if blacklist is None else blacklist
_lower(block, reverse=True, blacklist=blacklist) _lower(block, reverse=True, blacklist=blacklist)
...@@ -88,8 +88,8 @@ def graph_khop_sampler(row, ...@@ -88,8 +88,8 @@ def graph_khop_sampler(row,
if _non_static_mode(): if _non_static_mode():
if return_eids: if return_eids:
if sorted_eids is None: if sorted_eids is None:
raise ValueError(f"`sorted_eid` should not be None " raise ValueError("`sorted_eid` should not be None "
f"if return_eids is True.") "if return_eids is True.")
edge_src, edge_dst, sample_index, reindex_nodes, edge_eids = \ edge_src, edge_dst, sample_index, reindex_nodes, edge_eids = \
_legacy_C_ops.graph_khop_sampler(row, sorted_eids, _legacy_C_ops.graph_khop_sampler(row, sorted_eids,
colptr, input_nodes, colptr, input_nodes,
...@@ -109,8 +109,8 @@ def graph_khop_sampler(row, ...@@ -109,8 +109,8 @@ def graph_khop_sampler(row,
if return_eids: if return_eids:
if sorted_eids is None: if sorted_eids is None:
raise ValueError(f"`sorted_eid` should not be None " raise ValueError("`sorted_eid` should not be None "
f"if return_eids is True.") "if return_eids is True.")
check_variable_and_dtype(sorted_eids, "Eids", ("int32", "int64"), check_variable_and_dtype(sorted_eids, "Eids", ("int32", "int64"),
"graph_khop_sampler") "graph_khop_sampler")
......
...@@ -109,7 +109,7 @@ def graph_reindex(x, ...@@ -109,7 +109,7 @@ def graph_reindex(x,
""" """
if flag_buffer_hashtable: if flag_buffer_hashtable:
if value_buffer is None or index_buffer is None: if value_buffer is None or index_buffer is None:
raise ValueError(f"`value_buffer` and `index_buffer` should not" raise ValueError("`value_buffer` and `index_buffer` should not"
"be None if `flag_buffer_hashtable` is True.") "be None if `flag_buffer_hashtable` is True.")
if _non_static_mode(): if _non_static_mode():
......
...@@ -98,12 +98,12 @@ def graph_sample_neighbors(row, ...@@ -98,12 +98,12 @@ def graph_sample_neighbors(row,
if return_eids: if return_eids:
if eids is None: if eids is None:
raise ValueError( raise ValueError(
f"`eids` should not be None if `return_eids` is True.") "`eids` should not be None if `return_eids` is True.")
if flag_perm_buffer: if flag_perm_buffer:
if perm_buffer is None: if perm_buffer is None:
raise ValueError( raise ValueError(
f"`perm_buffer` should not be None if `flag_perm_buffer`" "`perm_buffer` should not be None if `flag_perm_buffer`"
"is True.") "is True.")
if _non_static_mode(): if _non_static_mode():
......
...@@ -48,7 +48,7 @@ def append_backward_new(loss_list, ...@@ -48,7 +48,7 @@ def append_backward_new(loss_list,
assert program.num_blocks == 1, "The append_backward_new interface is designed to process only one block." assert program.num_blocks == 1, "The append_backward_new interface is designed to process only one block."
block = program.current_block() block = program.current_block()
for el in loss_list: for el in loss_list:
assert el.block == block, f'variable in loss_list should be in current block of main program' assert el.block == block, 'variable in loss_list should be in current block of main program'
orig2prim(block) orig2prim(block)
ad = Transform(block) ad = Transform(block)
......
...@@ -57,7 +57,7 @@ def parse_op_labels(labelstr, operand): ...@@ -57,7 +57,7 @@ def parse_op_labels(labelstr, operand):
) )
assert labelstr.replace('...', '', 1).find('.') == -1, ( assert labelstr.replace('...', '', 1).find('.') == -1, (
f"Invalid equation: `.` is found outside of an ellipsis.") "Invalid equation: `.` is found outside of an ellipsis.")
# Check shape. Note, in Paddle a tensor rank is always nonzero # Check shape. Note, in Paddle a tensor rank is always nonzero
ndims = len(operand.shape) ndims = len(operand.shape)
...@@ -102,7 +102,7 @@ def validate_rhs(rhs, input_labels, n_bcast_dims): ...@@ -102,7 +102,7 @@ def validate_rhs(rhs, input_labels, n_bcast_dims):
# Sanity check. # Sanity check.
if n_bcast_dims > 0: if n_bcast_dims > 0:
assert '...' in rhs, ( assert '...' in rhs, (
f"Invalid equation: missing ellipsis in output labels.") "Invalid equation: missing ellipsis in output labels.")
rhs = rhs.replace('...', '') rhs = rhs.replace('...', '')
rhs_set = set(rhs) rhs_set = set(rhs)
...@@ -117,7 +117,7 @@ def validate_rhs(rhs, input_labels, n_bcast_dims): ...@@ -117,7 +117,7 @@ def validate_rhs(rhs, input_labels, n_bcast_dims):
f"output label {sorted(non_input_labels)} not used by any input.") f"output label {sorted(non_input_labels)} not used by any input.")
# Verify that output labels are not duplicate # Verify that output labels are not duplicate
assert len(rhs) == len(rhs_set), ( assert len(rhs) == len(rhs_set), (
f"Invalid equation: duplicate output labels are found.") "Invalid equation: duplicate output labels are found.")
def build_view(in_labels, out_labels): def build_view(in_labels, out_labels):
...@@ -298,7 +298,7 @@ def diagonalize(labels, operand): ...@@ -298,7 +298,7 @@ def diagonalize(labels, operand):
'ijj...i' would be merged into 'ij...' 'ijj...i' would be merged into 'ij...'
''' '''
assert not has_duplicated_labels(labels), ( assert not has_duplicated_labels(labels), (
f'Duplicate labels are not supported.') 'Duplicate labels are not supported.')
return labels, operand return labels, operand
...@@ -695,13 +695,13 @@ def preprocess(equation, *operands): ...@@ -695,13 +695,13 @@ def preprocess(equation, *operands):
f"but found {len(lhs.split(','))} segments in the label equation.") f"but found {len(lhs.split(','))} segments in the label equation.")
assert not ('...' in lhs and '...' not in rhs assert not ('...' in lhs and '...' not in rhs
), f'Invalid equation: missing ellipsis in output labels.' ), 'Invalid equation: missing ellipsis in output labels.'
assert not (len(list(filter(has_duplicated_labels, lhs.split(',')))) > assert not (len(list(filter(has_duplicated_labels, lhs.split(',')))) >
0), f'Duplicate labels are not supported.' 0), 'Duplicate labels are not supported.'
assert not has_duplicated_labels( assert not has_duplicated_labels(
rhs), f'Invalid equation: duplicate output labels are found.' rhs), 'Invalid equation: duplicate output labels are found.'
return lhs, rhs, labels return lhs, rhs, labels
......
...@@ -158,8 +158,8 @@ def _process_name(name, curdir): ...@@ -158,8 +158,8 @@ def _process_name(name, curdir):
""" """
name = name.strip() name = name.strip()
assert re.compile("^test_[0-9a-zA-Z_]+").search(name), \ assert re.compile("^test_[0-9a-zA-Z_]+").search(name), \
f"""If line is not the header of table, the test name must begin with "test_" """ \ """If line is not the header of table, the test name must begin with "test_" """ \
f"""and the following substring must include at least one char of "0-9", "a-z", "A-Z" or "_".""" """and the following substring must include at least one char of "0-9", "a-z", "A-Z" or "_"."""
filepath_prefix = os.path.join(curdir, name) filepath_prefix = os.path.join(curdir, name)
suffix = [".py", ".sh"] suffix = [".py", ".sh"]
assert _file_with_extension(filepath_prefix, suffix), \ assert _file_with_extension(filepath_prefix, suffix), \
...@@ -319,7 +319,7 @@ class DistUTPortManager(): ...@@ -319,7 +319,7 @@ class DistUTPortManager():
self.process_dist_port_num(num_port) self.process_dist_port_num(num_port)
# step 2 # step 2
err_msg = f"""==================[No Old CMakeLists.txt Error]================================== err_msg = """==================[No Old CMakeLists.txt Error]==================================
Following directories has no CmakeLists.txt files: Following directories has no CmakeLists.txt files:
""" """
for c in self.no_cmake_dirs: for c in self.no_cmake_dirs:
...@@ -452,7 +452,7 @@ class CMakeGenerator(): ...@@ -452,7 +452,7 @@ class CMakeGenerator():
set_properties = "" set_properties = ""
cmd = cmd % set_properties cmd = cmd % set_properties
for _ in conditions: for _ in conditions:
cmd += f"endif()\n" cmd += "endif()\n"
return cmd return cmd
def _gen_cmakelists(self, current_work_dir, depth=0): def _gen_cmakelists(self, current_work_dir, depth=0):
......
Markdown is supported
0% .
You are about to add 0 people to the discussion. Proceed with caution.
先完成此消息的编辑!
想要评论请 注册