未验证 提交 52a31b87 编写于 作者: iSerendipity's avatar iSerendipity 提交者: GitHub

[CodeStyle][UP018] Unnecessary call to `str` (#51922)

上级 db599258
......@@ -49,6 +49,7 @@ select = [
"UP013",
"UP014",
"UP017",
"UP018",
"UP019",
"UP020",
"UP021",
......
......@@ -440,9 +440,7 @@ def sync_and_scale_gradients(dist_ctx, op, dp_group, allreduce_var_names):
OP_ROLE_KEY: OpRole.Backward,
},
)
allreduce_op._set_attr(
'op_namescope', str('/') + ParallelMode.DataParallel
)
allreduce_op._set_attr('op_namescope', '/' + ParallelMode.DataParallel)
added_ops.append(allreduce_op)
if dist_ctx.gradient_scale:
......@@ -452,9 +450,7 @@ def sync_and_scale_gradients(dist_ctx, op, dp_group, allreduce_var_names):
outputs={'Out': grad_var},
attrs={'scale': 1.0 / dp_degree, OP_ROLE_KEY: OpRole.Backward},
)
scale_op._set_attr(
'op_namescope', str('/') + ParallelMode.DataParallel
)
scale_op._set_attr('op_namescope', '/' + ParallelMode.DataParallel)
added_ops.append(scale_op)
dims_mapping = op_dist_attr.get_output_dims_mapping(grad_var.name)
......
......@@ -169,7 +169,7 @@ class DistributedCheckFiniteAndUnscaleImpl(DistributedOperatorImpl):
OP_ROLE_KEY: OpRole.Optimize,
},
)
allreduce_op._set_attr('op_namescope', str('/') + SyncMode.AmpFlagSync)
allreduce_op._set_attr('op_namescope', '/' + SyncMode.AmpFlagSync)
cast_op2 = main_block.append_op(
type='cast',
inputs={'X': inf_var_int32},
......
......@@ -456,7 +456,7 @@ class ClipGradByGloblNormPass(PassBase):
)
# TODO better regular the usage of op namescope
allreduce_op._set_attr(
'op_namescope', str('/') + SyncMode.GlobalNormSync
'op_namescope', '/' + SyncMode.GlobalNormSync
)
self.clip_helper._init_dist_attr(allreduce_op)
......
......@@ -492,7 +492,7 @@ class ShardingPass(PassBase):
},
)
new_op._set_attr(
'op_namescope', str('/') + ParallelMode.DataParallel
'op_namescope', '/' + ParallelMode.DataParallel
)
param_dist_attr = (
self._dist_context.get_tensor_dist_attr_for_program(param)
......@@ -545,7 +545,7 @@ class ShardingPass(PassBase):
else:
op._set_attr("ring_id", self.outer_dp_group.id)
op._set_attr(
'op_namescope', str('/') + ParallelMode.DataParallel
'op_namescope', '/' + ParallelMode.DataParallel
)
# NOTE:
......@@ -843,9 +843,7 @@ class ShardingPass(PassBase):
},
)
self.op_to_stream_idx[new_op] = comm_stream_idx
new_op._set_attr(
'op_namescope', str('/') + ParallelMode.DataParallel
)
new_op._set_attr('op_namescope', '/' + ParallelMode.DataParallel)
if self.enable_overlap:
new_op.dist_attr.execution_stream = comm_stream
new_op.dist_attr.scheduling_priority = (
......@@ -1374,7 +1372,7 @@ class ShardingPass(PassBase):
},
)
new_op._set_attr(
'op_namescope', str('/') + ParallelMode.DataParallel
'op_namescope', '/' + ParallelMode.DataParallel
)
if self.enable_overlap:
......@@ -1424,7 +1422,7 @@ def _insert_init_and_broadcast_op(
OP_ROLE_KEY: op_role,
},
)
new_op._set_attr('op_namescope', str('/') + ParallelMode.DataParallel)
new_op._set_attr('op_namescope', '/' + ParallelMode.DataParallel)
naive_set_dist_op_attr_for_program_by_mesh_and_mapping(
new_op,
broadcast_var_dist_attr.process_mesh,
......@@ -1484,7 +1482,7 @@ def _insert_reduce_op(
naive_set_dist_op_attr_for_program_by_mesh_and_mapping(
new_op, dist_attr.process_mesh, dist_attr.dims_mapping, dist_context
)
new_op._set_attr('op_namescope', str('/') + ParallelMode.DataParallel)
new_op._set_attr('op_namescope', '/' + ParallelMode.DataParallel)
return new_op
......
......@@ -16,8 +16,8 @@ import os
import numpy as np
os.environ[str("FLAGS_check_nan_inf")] = str("1")
os.environ[str("GLOG_vmodule")] = str("nan_inf_utils_detail=10")
os.environ["FLAGS_check_nan_inf"] = "1"
os.environ["GLOG_vmodule"] = "nan_inf_utils_detail=10"
import paddle
import paddle.fluid as fluid
......
......@@ -16,8 +16,8 @@ import os
import numpy as np
os.environ[str("FLAGS_check_nan_inf")] = str("1")
os.environ[str("GLOG_vmodule")] = str("nan_inf_utils_detail=10")
os.environ["FLAGS_check_nan_inf"] = "1"
os.environ["GLOG_vmodule"] = "nan_inf_utils_detail=10"
import paddle
import paddle.nn as nn
......
......@@ -18,7 +18,7 @@ from multiprocessing import Process
from launch_function_helper import _find_free_port, wait
os.environ['GLOG_vmodule'] = str("gen_nccl_id_op*=10,gen_comm_id*=10")
os.environ['GLOG_vmodule'] = "gen_nccl_id_op*=10,gen_comm_id*=10"
import paddle
from paddle.fluid import core
......
......@@ -42,7 +42,7 @@ def check():
)
a_np = np.random.uniform(-2, 2, (10, 20, 30)).astype(np.float32)
b_np = np.random.uniform(-5, 5, (10, 20, 30)).astype(np.float32)
helper = LayerHelper(fluid.unique_name.generate(str("test")), act="relu")
helper = LayerHelper(fluid.unique_name.generate("test"), act="relu")
func = helper.append_activation
with fluid.dygraph.guard(fluid.core.CPUPlace()):
a = fluid.dygraph.to_variable(a_np)
......
......@@ -32,7 +32,7 @@ def check():
)
print("check: DNNL_VERBOSE=", os.environ['DNNL_VERBOSE'])
a_np = np.random.uniform(-2, 2, (10, 20, 30)).astype(np.float32)
helper = LayerHelper(fluid.unique_name.generate(str("test")), act="relu")
helper = LayerHelper(fluid.unique_name.generate("test"), act="relu")
func = helper.append_activation
with fluid.dygraph.guard(fluid.core.CPUPlace()):
a = fluid.dygraph.to_variable(a_np)
......
......@@ -25,8 +25,8 @@ class TestFlagsUseMkldnn(unittest.TestCase):
self._python_interp += " check_flags_mkldnn_ops_on_off.py"
self.env = os.environ.copy()
self.env[str("DNNL_VERBOSE")] = str("1")
self.env[str("FLAGS_use_mkldnn")] = str("1")
self.env["DNNL_VERBOSE"] = "1"
self.env["FLAGS_use_mkldnn"] = "1"
self.relu_regex = b"^onednn_verbose,exec,cpu,eltwise,.+alg:eltwise_relu alpha:0 beta:0,10x20x20"
self.ew_add_regex = (
......@@ -73,28 +73,28 @@ class TestFlagsUseMkldnn(unittest.TestCase):
assert self.found(self.matmul_regex, out, err)
def test_flags_use_mkl_dnn_on(self):
env = {str("FLAGS_tracer_mkldnn_ops_on"): str("relu")}
env = {"FLAGS_tracer_mkldnn_ops_on": "relu"}
out, err = self.flags_use_mkl_dnn_common(env)
assert self.found(self.relu_regex, out, err)
assert self.not_found(self.ew_add_regex, out, err)
assert self.not_found(self.matmul_regex, out, err)
def test_flags_use_mkl_dnn_on_multiple(self):
env = {str("FLAGS_tracer_mkldnn_ops_on"): str("relu,elementwise_add")}
env = {"FLAGS_tracer_mkldnn_ops_on": "relu,elementwise_add"}
out, err = self.flags_use_mkl_dnn_common(env)
assert self.found(self.relu_regex, out, err)
assert self.found(self.ew_add_regex, out, err)
assert self.not_found(self.matmul_regex, out, err)
def test_flags_use_mkl_dnn_off(self):
env = {str("FLAGS_tracer_mkldnn_ops_off"): str("matmul_v2")}
env = {"FLAGS_tracer_mkldnn_ops_off": "matmul_v2"}
out, err = self.flags_use_mkl_dnn_common(env)
assert self.found(self.relu_regex, out, err)
assert self.found(self.ew_add_regex, out, err)
assert self.not_found(self.matmul_regex, out, err)
def test_flags_use_mkl_dnn_off_multiple(self):
env = {str("FLAGS_tracer_mkldnn_ops_off"): str("matmul_v2,relu")}
env = {"FLAGS_tracer_mkldnn_ops_off": "matmul_v2,relu"}
out, err = self.flags_use_mkl_dnn_common(env)
assert self.not_found(self.relu_regex, out, err)
assert self.found(self.ew_add_regex, out, err)
......@@ -102,8 +102,8 @@ class TestFlagsUseMkldnn(unittest.TestCase):
def test_flags_use_mkl_dnn_on_off(self):
env = {
str("FLAGS_tracer_mkldnn_ops_on"): str("elementwise_add"),
str("FLAGS_tracer_mkldnn_ops_off"): str("matmul_v2"),
"FLAGS_tracer_mkldnn_ops_on": "elementwise_add",
"FLAGS_tracer_mkldnn_ops_off": "matmul_v2",
}
out, err = self.flags_use_mkl_dnn_common(env)
assert self.not_found(self.relu_regex, out, err)
......
......@@ -25,9 +25,9 @@ class TestFlagsUseMkldnn(unittest.TestCase):
self._python_interp += " check_flags_use_mkldnn.py"
self.env = os.environ.copy()
self.env[str("GLOG_v")] = str("1")
self.env[str("DNNL_VERBOSE")] = str("1")
self.env[str("FLAGS_use_mkldnn")] = str("1")
self.env["GLOG_v"] = "1"
self.env["DNNL_VERBOSE"] = "1"
self.env["FLAGS_use_mkldnn"] = "1"
self.relu_regex = b"^onednn_verbose,exec,cpu,eltwise,.+alg:eltwise_relu alpha:0 beta:0,10x20x30"
......
......@@ -63,11 +63,9 @@ class TestNanInfEnv(TestNanInf):
super().setUp()
# windows python have some bug with env, so need use str to pass ci
# otherwise, "TypeError: environment can only contain strings"
self.env[str("PADDLE_INF_NAN_SKIP_OP")] = str("mul")
self.env[str("PADDLE_INF_NAN_SKIP_ROLE")] = str("loss")
self.env[str("PADDLE_INF_NAN_SKIP_VAR")] = str(
"elementwise_add:fc_0.tmp_1"
)
self.env["PADDLE_INF_NAN_SKIP_OP"] = "mul"
self.env["PADDLE_INF_NAN_SKIP_ROLE"] = "loss"
self.env["PADDLE_INF_NAN_SKIP_VAR"] = "elementwise_add:fc_0.tmp_1"
class TestNanInfCheckResult(unittest.TestCase):
......
......@@ -21,7 +21,7 @@ from multiprocessing import Process
from launch_function_helper import _find_free_port, wait
os.environ['GLOG_vmodule'] = str("gen_bkcl_id_op*=10,gen_comm_id*=10")
os.environ['GLOG_vmodule'] = "gen_bkcl_id_op*=10,gen_comm_id*=10"
import paddle
from paddle.fluid import core
......
......@@ -34,7 +34,7 @@ except ModuleNotFoundError:
class RegisterPassHelper:
_register_helpers = list()
def __init__(self, pass_pairs, pass_type=str(), input_specs=dict()):
def __init__(self, pass_pairs, pass_type='', input_specs=dict()):
self._pass_type = pass_type
self._pass_pairs = pass_pairs
self._input_specs = input_specs
......
......@@ -994,7 +994,7 @@ def save_vars(
for name in sorted(save_var_map.keys()):
save_var_list.append(save_var_map[name])
save_path = str()
save_path = ''
if save_to_memory is False:
save_path = os.path.join(os.path.normpath(dirname), filename)
......
Markdown is supported
0% .
You are about to add 0 people to the discussion. Proceed with caution.
先完成此消息的编辑!
想要评论请 注册