Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

[CodeStyle][UP018] remove unnecessary call to str #51922

Merged
merged 1 commit into from
Mar 22, 2023
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
1 change: 1 addition & 0 deletions pyproject.toml
Original file line number Diff line number Diff line change
Expand Up @@ -44,6 +44,7 @@ select = [
"UP013",
"UP014",
"UP017",
"UP018",
"UP019",
"UP020",
"UP021",
Expand Down
8 changes: 2 additions & 6 deletions python/paddle/distributed/auto_parallel/operators/common.py
Original file line number Diff line number Diff line change
Expand Up @@ -430,9 +430,7 @@ def sync_and_scale_gradients(dist_ctx, op, dp_group, allreduce_var_names):
OP_ROLE_KEY: OpRole.Backward,
},
)
allreduce_op._set_attr(
'op_namescope', str('/') + ParallelMode.DataParallel
)
allreduce_op._set_attr('op_namescope', '/' + ParallelMode.DataParallel)
added_ops.append(allreduce_op)

if dist_ctx.gradient_scale:
Expand All @@ -442,9 +440,7 @@ def sync_and_scale_gradients(dist_ctx, op, dp_group, allreduce_var_names):
outputs={'Out': grad_var},
attrs={'scale': 1.0 / dp_degree, OP_ROLE_KEY: OpRole.Backward},
)
scale_op._set_attr(
'op_namescope', str('/') + ParallelMode.DataParallel
)
scale_op._set_attr('op_namescope', '/' + ParallelMode.DataParallel)
added_ops.append(scale_op)

dims_mapping = op_dist_attr.get_output_dims_mapping(grad_var.name)
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -169,7 +169,7 @@ def backward(ctx, *args, **kwargs):
OP_ROLE_KEY: OpRole.Optimize,
},
)
allreduce_op._set_attr('op_namescope', str('/') + SyncMode.AmpFlagSync)
allreduce_op._set_attr('op_namescope', '/' + SyncMode.AmpFlagSync)
cast_op2 = main_block.append_op(
type='cast',
inputs={'X': inf_var_int32},
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -456,7 +456,7 @@ def _remove_no_need_ops_vars(self, block):
)
# TODO better regular the usage of op namescope
allreduce_op._set_attr(
'op_namescope', str('/') + SyncMode.GlobalNormSync
'op_namescope', '/' + SyncMode.GlobalNormSync
)
self.clip_helper._init_dist_attr(allreduce_op)

Expand Down
14 changes: 6 additions & 8 deletions python/paddle/distributed/passes/auto_parallel_sharding.py
Original file line number Diff line number Diff line change
Expand Up @@ -492,7 +492,7 @@ def _insert_optimizer_broadcasts(self, main_block, startup_block):
},
)
new_op._set_attr(
'op_namescope', str('/') + ParallelMode.DataParallel
'op_namescope', '/' + ParallelMode.DataParallel
)
param_dist_attr = (
self._dist_context.get_tensor_dist_attr_for_program(param)
Expand Down Expand Up @@ -545,7 +545,7 @@ def _shard_gradient_synchronization(self, main_block):
else:
op._set_attr("ring_id", self.outer_dp_group.id)
op._set_attr(
'op_namescope', str('/') + ParallelMode.DataParallel
'op_namescope', '/' + ParallelMode.DataParallel
)

# NOTE:
Expand Down Expand Up @@ -843,9 +843,7 @@ def _fuse_overlap_parameter_comm_stage_two(self, sharding_info):
},
)
self.op_to_stream_idx[new_op] = comm_stream_idx
new_op._set_attr(
'op_namescope', str('/') + ParallelMode.DataParallel
)
new_op._set_attr('op_namescope', '/' + ParallelMode.DataParallel)
if self.enable_overlap:
new_op.dist_attr.execution_stream = comm_stream
new_op.dist_attr.scheduling_priority = (
Expand Down Expand Up @@ -1374,7 +1372,7 @@ def _overlap_grad_comm(
},
)
new_op._set_attr(
'op_namescope', str('/') + ParallelMode.DataParallel
'op_namescope', '/' + ParallelMode.DataParallel
)

if self.enable_overlap:
Expand Down Expand Up @@ -1424,7 +1422,7 @@ def _insert_init_and_broadcast_op(
OP_ROLE_KEY: op_role,
},
)
new_op._set_attr('op_namescope', str('/') + ParallelMode.DataParallel)
new_op._set_attr('op_namescope', '/' + ParallelMode.DataParallel)
naive_set_dist_op_attr_for_program_by_mesh_and_mapping(
new_op,
broadcast_var_dist_attr.process_mesh,
Expand Down Expand Up @@ -1484,7 +1482,7 @@ def _insert_reduce_op(
naive_set_dist_op_attr_for_program_by_mesh_and_mapping(
new_op, dist_attr.process_mesh, dist_attr.dims_mapping, dist_context
)
new_op._set_attr('op_namescope', str('/') + ParallelMode.DataParallel)
new_op._set_attr('op_namescope', '/' + ParallelMode.DataParallel)
return new_op


Expand Down
4 changes: 2 additions & 2 deletions python/paddle/fluid/tests/unittests/check_nan_inf_base.py
Original file line number Diff line number Diff line change
Expand Up @@ -16,8 +16,8 @@

import numpy as np

os.environ[str("FLAGS_check_nan_inf")] = str("1")
os.environ[str("GLOG_vmodule")] = str("nan_inf_utils_detail=10")
os.environ["FLAGS_check_nan_inf"] = "1"
os.environ["GLOG_vmodule"] = "nan_inf_utils_detail=10"

import paddle
import paddle.fluid as fluid
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -16,8 +16,8 @@

import numpy as np

os.environ[str("FLAGS_check_nan_inf")] = str("1")
os.environ[str("GLOG_vmodule")] = str("nan_inf_utils_detail=10")
os.environ["FLAGS_check_nan_inf"] = "1"
os.environ["GLOG_vmodule"] = "nan_inf_utils_detail=10"

import paddle
import paddle.nn as nn
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -18,7 +18,7 @@

from launch_function_helper import _find_free_port, wait

os.environ['GLOG_vmodule'] = str("gen_nccl_id_op*=10,gen_comm_id*=10")
os.environ['GLOG_vmodule'] = "gen_nccl_id_op*=10,gen_comm_id*=10"

import paddle
from paddle.fluid import core
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -42,7 +42,7 @@ def check():
)
a_np = np.random.uniform(-2, 2, (10, 20, 30)).astype(np.float32)
b_np = np.random.uniform(-5, 5, (10, 20, 30)).astype(np.float32)
helper = LayerHelper(fluid.unique_name.generate(str("test")), act="relu")
helper = LayerHelper(fluid.unique_name.generate("test"), act="relu")
func = helper.append_activation
with fluid.dygraph.guard(fluid.core.CPUPlace()):
a = fluid.dygraph.to_variable(a_np)
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -32,7 +32,7 @@ def check():
)
print("check: DNNL_VERBOSE=", os.environ['DNNL_VERBOSE'])
a_np = np.random.uniform(-2, 2, (10, 20, 30)).astype(np.float32)
helper = LayerHelper(fluid.unique_name.generate(str("test")), act="relu")
helper = LayerHelper(fluid.unique_name.generate("test"), act="relu")
func = helper.append_activation
with fluid.dygraph.guard(fluid.core.CPUPlace()):
a = fluid.dygraph.to_variable(a_np)
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -25,8 +25,8 @@ def setUp(self):
self._python_interp += " check_flags_mkldnn_ops_on_off.py"

self.env = os.environ.copy()
self.env[str("DNNL_VERBOSE")] = str("1")
self.env[str("FLAGS_use_mkldnn")] = str("1")
self.env["DNNL_VERBOSE"] = "1"
self.env["FLAGS_use_mkldnn"] = "1"

self.relu_regex = b"^onednn_verbose,exec,cpu,eltwise,.+alg:eltwise_relu alpha:0 beta:0,10x20x20"
self.ew_add_regex = (
Expand Down Expand Up @@ -73,37 +73,37 @@ def test_flags_use_mkl_dnn_on_empty_off_empty(self):
assert self.found(self.matmul_regex, out, err)

def test_flags_use_mkl_dnn_on(self):
env = {str("FLAGS_tracer_mkldnn_ops_on"): str("relu")}
env = {"FLAGS_tracer_mkldnn_ops_on": "relu"}
out, err = self.flags_use_mkl_dnn_common(env)
assert self.found(self.relu_regex, out, err)
assert self.not_found(self.ew_add_regex, out, err)
assert self.not_found(self.matmul_regex, out, err)

def test_flags_use_mkl_dnn_on_multiple(self):
env = {str("FLAGS_tracer_mkldnn_ops_on"): str("relu,elementwise_add")}
env = {"FLAGS_tracer_mkldnn_ops_on": "relu,elementwise_add"}
out, err = self.flags_use_mkl_dnn_common(env)
assert self.found(self.relu_regex, out, err)
assert self.found(self.ew_add_regex, out, err)
assert self.not_found(self.matmul_regex, out, err)

def test_flags_use_mkl_dnn_off(self):
env = {str("FLAGS_tracer_mkldnn_ops_off"): str("matmul_v2")}
env = {"FLAGS_tracer_mkldnn_ops_off": "matmul_v2"}
out, err = self.flags_use_mkl_dnn_common(env)
assert self.found(self.relu_regex, out, err)
assert self.found(self.ew_add_regex, out, err)
assert self.not_found(self.matmul_regex, out, err)

def test_flags_use_mkl_dnn_off_multiple(self):
env = {str("FLAGS_tracer_mkldnn_ops_off"): str("matmul_v2,relu")}
env = {"FLAGS_tracer_mkldnn_ops_off": "matmul_v2,relu"}
out, err = self.flags_use_mkl_dnn_common(env)
assert self.not_found(self.relu_regex, out, err)
assert self.found(self.ew_add_regex, out, err)
assert self.not_found(self.matmul_regex, out, err)

def test_flags_use_mkl_dnn_on_off(self):
env = {
str("FLAGS_tracer_mkldnn_ops_on"): str("elementwise_add"),
str("FLAGS_tracer_mkldnn_ops_off"): str("matmul_v2"),
"FLAGS_tracer_mkldnn_ops_on": "elementwise_add",
"FLAGS_tracer_mkldnn_ops_off": "matmul_v2",
}
out, err = self.flags_use_mkl_dnn_common(env)
assert self.not_found(self.relu_regex, out, err)
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -25,9 +25,9 @@ def setUp(self):
self._python_interp += " check_flags_use_mkldnn.py"

self.env = os.environ.copy()
self.env[str("GLOG_v")] = str("1")
self.env[str("DNNL_VERBOSE")] = str("1")
self.env[str("FLAGS_use_mkldnn")] = str("1")
self.env["GLOG_v"] = "1"
self.env["DNNL_VERBOSE"] = "1"
self.env["FLAGS_use_mkldnn"] = "1"

self.relu_regex = b"^onednn_verbose,exec,cpu,eltwise,.+alg:eltwise_relu alpha:0 beta:0,10x20x30"

Expand Down
8 changes: 3 additions & 5 deletions python/paddle/fluid/tests/unittests/test_nan_inf.py
Original file line number Diff line number Diff line change
Expand Up @@ -63,11 +63,9 @@ def setUp(self):
super().setUp()
# windows python have some bug with env, so need use str to pass ci
# otherwise, "TypeError: environment can only contain strings"
self.env[str("PADDLE_INF_NAN_SKIP_OP")] = str("mul")
self.env[str("PADDLE_INF_NAN_SKIP_ROLE")] = str("loss")
self.env[str("PADDLE_INF_NAN_SKIP_VAR")] = str(
"elementwise_add:fc_0.tmp_1"
)
self.env["PADDLE_INF_NAN_SKIP_OP"] = "mul"
self.env["PADDLE_INF_NAN_SKIP_ROLE"] = "loss"
self.env["PADDLE_INF_NAN_SKIP_VAR"] = "elementwise_add:fc_0.tmp_1"


class TestNanInfCheckResult(unittest.TestCase):
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -21,7 +21,7 @@

from launch_function_helper import _find_free_port, wait

os.environ['GLOG_vmodule'] = str("gen_bkcl_id_op*=10,gen_comm_id*=10")
os.environ['GLOG_vmodule'] = "gen_bkcl_id_op*=10,gen_comm_id*=10"

import paddle
from paddle.fluid import core
Expand Down
2 changes: 1 addition & 1 deletion python/paddle/incubate/passes/ir.py
Original file line number Diff line number Diff line change
Expand Up @@ -34,7 +34,7 @@
class RegisterPassHelper:
_register_helpers = list()

def __init__(self, pass_pairs, pass_type=str(), input_specs=dict()):
def __init__(self, pass_pairs, pass_type='', input_specs=dict()):
self._pass_type = pass_type
self._pass_pairs = pass_pairs
self._input_specs = input_specs
Expand Down
2 changes: 1 addition & 1 deletion python/paddle/static/io.py
Original file line number Diff line number Diff line change
Expand Up @@ -994,7 +994,7 @@ def name_has_fc(var):
for name in sorted(save_var_map.keys()):
save_var_list.append(save_var_map[name])

save_path = str()
save_path = ''
if save_to_memory is False:
save_path = os.path.join(os.path.normpath(dirname), filename)

Expand Down