未验证 提交 c64e1dcf 编写于 作者: T Tony Cao 提交者: GitHub

[CodeStyle][F541] Convert f-strings without curly braces to normal strings (#46700)

* Update README.md

* Update README.md

* Fix F541 by converting f-string to normal strings
上级 1b697a5d
......@@ -1055,7 +1055,7 @@ class DygraphForwardFunctionGenerator(DygraphFunctionGeneratorBase):
heavily_flag = True
if len(layout_autotune_attr) == 0:
layout_autotune_attr_code_list.append(
f"auto transformer = egr::EagerLayoutAutotune(op_name, tensors_vector);\n"
"auto transformer = egr::EagerLayoutAutotune(op_name, tensors_vector);\n"
)
elif len(layout_autotune_attr) == 1:
layout_autotune_attr_code_list.append(
......@@ -1387,7 +1387,7 @@ class DygraphForwardFunctionGenerator(DygraphFunctionGeneratorBase):
amp_tensors_vector_list) + " }"
amp_tensors_vector_optional_list_str = " ".join(
amp_tensors_vector_optional_list)
amp_get_dst_dtype_str = f"auto amp_dst_dtype = egr::GetAmpDestDtype(op_name, amp_tensors_vector);\n"
amp_get_dst_dtype_str = "auto amp_dst_dtype = egr::GetAmpDestDtype(op_name, amp_tensors_vector);\n"
amp_autocast_list_str = " ".join(
amp_autocast_list) + " " + " ".join(
amp_autocast_optional_list)
......
......@@ -178,10 +178,10 @@ class ElasticManager(object):
if self.min_np == self.max_np or \
(self.min_np > 0 and self.max_np == 0):
self.elastic_level = ElasticLevel.FAULT_TOLERANCE
logger.info(f'start job with ElasticLevel.FAULT_TOLERANCE')
logger.info('start job with ElasticLevel.FAULT_TOLERANCE')
if self.min_np > 0 and self.max_np > self.min_np:
self.elastic_level = ElasticLevel.ELASTIC
logger.info(f'start job with ElasticLevel.ELASTIC')
logger.info('start job with ElasticLevel.ELASTIC')
# compatible with kuberntes service discovery
if not server and os.getenv(
......
......@@ -739,9 +739,9 @@ class HybridParallelInferenceHelper(object):
startup_block = self._startup_program.global_block()
if debug:
with open(f'main_program.txt', 'w') as f:
with open('main_program.txt', 'w') as f:
f.write(str(self._main_program))
with open(f'startup_program.txt', 'w') as f:
with open('startup_program.txt', 'w') as f:
f.write(str(self._startup_program))
# step1: add op_device attribute for all ops
......
......@@ -28,7 +28,7 @@ from op_test_ipu import IPUOpTest
# just load one custom-op for the data race issue under parallel mode
def load_custom_detach():
cur_dir = os.path.dirname(os.path.realpath(__file__))
custom_ops = load(name=f"custom_detach",
custom_ops = load(name="custom_detach",
sources=[
f"{cur_dir}/custom_detach.cc",
],
......@@ -39,7 +39,7 @@ def load_custom_detach():
def load_custom_identity():
cur_dir = os.path.dirname(os.path.realpath(__file__))
custom_ops = load(name=f"custom_identity",
custom_ops = load(name="custom_identity",
sources=[
f"{cur_dir}/custom_identity.cc",
],
......@@ -50,7 +50,7 @@ def load_custom_identity():
def load_custom_nll():
cur_dir = os.path.dirname(os.path.realpath(__file__))
custom_ops = load(name=f"custom_nll",
custom_ops = load(name="custom_nll",
sources=[
f"{cur_dir}/custom_nll.cc",
],
......
......@@ -131,7 +131,7 @@ class TestAllocContinuousSpace(OpTest):
np.testing.assert_allclose(self.outputs['FusedOutput'],
eager_fused_output,
atol=1e-5,
err_msg=f'not equal fusedoutput')
err_msg='not equal fusedoutput')
def test_check_output(self):
self.check_output_with_place(place=core.CUDAPlace(0),
......
......@@ -90,7 +90,7 @@ def sample_neighbors(row,
if return_eids:
if eids is None:
raise ValueError(
f"`eids` should not be None if `return_eids` is True.")
"`eids` should not be None if `return_eids` is True.")
use_perm_buffer = True if perm_buffer is not None else False
......
......@@ -51,7 +51,7 @@ def topo_path(xs, ys, block=None):
# Initialize reached vars
for x in xs:
assert x is None or x.block == block, f'x is not None and x.block != block'
assert x is None or x.block == block, 'x is not None and x.block != block'
reached_vars[id(x)] = x
# Reaching test, returning whether an op is reached from the given input
......@@ -174,7 +174,7 @@ class Transform(object):
def __init__(self, block):
assert block == default_main_program().current_block(
), f'only support transform on current block of main program.'
), 'only support transform on current block of main program.'
self.block = block
self.vars = self.init_vars(block)
self.var2dot = VarMap('var2dot', self.vars)
......@@ -318,8 +318,8 @@ class Transform(object):
the list outputs of the resulting transposed program
"""
assert all(v is not None for v in xs_dot), f'`xs_dot` includes None.'
assert all(v is not None for v in ys_dot), f'`ys_dot` includes None.'
assert all(v is not None for v in xs_dot), '`xs_dot` includes None.'
assert all(v is not None for v in ys_dot), '`ys_dot` includes None.'
if ys_bar is None:
ys_bar = []
......@@ -537,7 +537,7 @@ def orig2prim(block=None):
block = default_main_program().current_block() if block is None else block
assert block == default_main_program().current_block(
), f'block is neither None nor current block of main program'
), 'block is neither None nor current block of main program'
_lower(block, reverse=False, blacklist=[])
......@@ -582,6 +582,6 @@ def prim2orig(block=None, blacklist=None):
block = default_main_program().current_block() if block is None else block
assert block == default_main_program().current_block(
), f'block is neither None nor current block of main program'
), 'block is neither None nor current block of main program'
blacklist = [] if blacklist is None else blacklist
_lower(block, reverse=True, blacklist=blacklist)
......@@ -88,8 +88,8 @@ def graph_khop_sampler(row,
if _non_static_mode():
if return_eids:
if sorted_eids is None:
raise ValueError(f"`sorted_eid` should not be None "
f"if return_eids is True.")
raise ValueError("`sorted_eid` should not be None "
"if return_eids is True.")
edge_src, edge_dst, sample_index, reindex_nodes, edge_eids = \
_legacy_C_ops.graph_khop_sampler(row, sorted_eids,
colptr, input_nodes,
......@@ -109,8 +109,8 @@ def graph_khop_sampler(row,
if return_eids:
if sorted_eids is None:
raise ValueError(f"`sorted_eid` should not be None "
f"if return_eids is True.")
raise ValueError("`sorted_eid` should not be None "
"if return_eids is True.")
check_variable_and_dtype(sorted_eids, "Eids", ("int32", "int64"),
"graph_khop_sampler")
......
......@@ -109,7 +109,7 @@ def graph_reindex(x,
"""
if flag_buffer_hashtable:
if value_buffer is None or index_buffer is None:
raise ValueError(f"`value_buffer` and `index_buffer` should not"
raise ValueError("`value_buffer` and `index_buffer` should not"
"be None if `flag_buffer_hashtable` is True.")
if _non_static_mode():
......
......@@ -98,12 +98,12 @@ def graph_sample_neighbors(row,
if return_eids:
if eids is None:
raise ValueError(
f"`eids` should not be None if `return_eids` is True.")
"`eids` should not be None if `return_eids` is True.")
if flag_perm_buffer:
if perm_buffer is None:
raise ValueError(
f"`perm_buffer` should not be None if `flag_perm_buffer`"
"`perm_buffer` should not be None if `flag_perm_buffer`"
"is True.")
if _non_static_mode():
......
......@@ -48,7 +48,7 @@ def append_backward_new(loss_list,
assert program.num_blocks == 1, "The append_backward_new interface is designed to process only one block."
block = program.current_block()
for el in loss_list:
assert el.block == block, f'variable in loss_list should be in current block of main program'
assert el.block == block, 'variable in loss_list should be in current block of main program'
orig2prim(block)
ad = Transform(block)
......
......@@ -57,7 +57,7 @@ def parse_op_labels(labelstr, operand):
)
assert labelstr.replace('...', '', 1).find('.') == -1, (
f"Invalid equation: `.` is found outside of an ellipsis.")
"Invalid equation: `.` is found outside of an ellipsis.")
# Check shape. Note, in Paddle a tensor rank is always nonzero
ndims = len(operand.shape)
......@@ -102,7 +102,7 @@ def validate_rhs(rhs, input_labels, n_bcast_dims):
# Sanity check.
if n_bcast_dims > 0:
assert '...' in rhs, (
f"Invalid equation: missing ellipsis in output labels.")
"Invalid equation: missing ellipsis in output labels.")
rhs = rhs.replace('...', '')
rhs_set = set(rhs)
......@@ -117,7 +117,7 @@ def validate_rhs(rhs, input_labels, n_bcast_dims):
f"output label {sorted(non_input_labels)} not used by any input.")
# Verify that output labels are not duplicate
assert len(rhs) == len(rhs_set), (
f"Invalid equation: duplicate output labels are found.")
"Invalid equation: duplicate output labels are found.")
def build_view(in_labels, out_labels):
......@@ -298,7 +298,7 @@ def diagonalize(labels, operand):
'ijj...i' would be merged into 'ij...'
'''
assert not has_duplicated_labels(labels), (
f'Duplicate labels are not supported.')
'Duplicate labels are not supported.')
return labels, operand
......@@ -695,13 +695,13 @@ def preprocess(equation, *operands):
f"but found {len(lhs.split(','))} segments in the label equation.")
assert not ('...' in lhs and '...' not in rhs
), f'Invalid equation: missing ellipsis in output labels.'
), 'Invalid equation: missing ellipsis in output labels.'
assert not (len(list(filter(has_duplicated_labels, lhs.split(',')))) >
0), f'Duplicate labels are not supported.'
0), 'Duplicate labels are not supported.'
assert not has_duplicated_labels(
rhs), f'Invalid equation: duplicate output labels are found.'
rhs), 'Invalid equation: duplicate output labels are found.'
return lhs, rhs, labels
......
......@@ -158,8 +158,8 @@ def _process_name(name, curdir):
"""
name = name.strip()
assert re.compile("^test_[0-9a-zA-Z_]+").search(name), \
f"""If line is not the header of table, the test name must begin with "test_" """ \
f"""and the following substring must include at least one char of "0-9", "a-z", "A-Z" or "_"."""
"""If line is not the header of table, the test name must begin with "test_" """ \
"""and the following substring must include at least one char of "0-9", "a-z", "A-Z" or "_"."""
filepath_prefix = os.path.join(curdir, name)
suffix = [".py", ".sh"]
assert _file_with_extension(filepath_prefix, suffix), \
......@@ -319,7 +319,7 @@ class DistUTPortManager():
self.process_dist_port_num(num_port)
# step 2
err_msg = f"""==================[No Old CMakeLists.txt Error]==================================
err_msg = """==================[No Old CMakeLists.txt Error]==================================
Following directories has no CmakeLists.txt files:
"""
for c in self.no_cmake_dirs:
......@@ -452,7 +452,7 @@ class CMakeGenerator():
set_properties = ""
cmd = cmd % set_properties
for _ in conditions:
cmd += f"endif()\n"
cmd += "endif()\n"
return cmd
def _gen_cmakelists(self, current_work_dir, depth=0):
......
Markdown is supported
0% .
You are about to add 0 people to the discussion. Proceed with caution.
先完成此消息的编辑!
想要评论请 注册