Skip to content

Commit

Permalink
[CodeStyle][Ruff][BUAA][C-[1-10]] Fix Ruff RSE102 diagnostic for 10…
Browse files Browse the repository at this point in the history
… files in `python/paddle/` (#67123)
  • Loading branch information
Jeff114514 authored Aug 7, 2024
1 parent 1415c88 commit 296df15
Show file tree
Hide file tree
Showing 10 changed files with 17 additions and 17 deletions.
2 changes: 1 addition & 1 deletion python/paddle/audio/backends/init_backend.py
Original file line number Diff line number Diff line change
Expand Up @@ -174,7 +174,7 @@ def set_backend(backend_name: str) -> None:
"""
if backend_name not in list_available_backends():
raise NotImplementedError()
raise NotImplementedError

if backend_name == "wave_backend":
module = wave_backend
Expand Down
4 changes: 2 additions & 2 deletions python/paddle/base/reader.py
Original file line number Diff line number Diff line change
Expand Up @@ -136,10 +136,10 @@ def __call__(self):
return self

def __iter__(self):
raise NotImplementedError()
raise NotImplementedError

def __next__(self):
raise NotImplementedError()
raise NotImplementedError

@classmethod
def _check_input_array(cls, item):
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -100,7 +100,7 @@ def get_var(self, level, step):
elif level == 5:
return self.get_backward_tmp_var()
else:
raise ValueError()
raise ValueError

def set_program(self, program: Program):
assert isinstance(program, Program)
Expand Down
2 changes: 1 addition & 1 deletion python/paddle/distributed/auto_tuner/tuner.py
Original file line number Diff line number Diff line change
Expand Up @@ -53,7 +53,7 @@ def __init__(self, tuner_cfg):

self.algo = CustomizeSearch(tuner_cfg)
else:
raise NotImplementedError()
raise NotImplementedError

self.history_cfgs = []
self.resume_cfgs = []
Expand Down
2 changes: 1 addition & 1 deletion python/paddle/distributed/passes/pass_base.py
Original file line number Diff line number Diff line change
Expand Up @@ -143,7 +143,7 @@ def __init__(self):

@property
def cpp_name(self):
raise NotImplementedError()
raise NotImplementedError

@property
def cpp_attr_types(self):
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -851,10 +851,10 @@ def backward(
no_grad_set=None,
callbacks=None,
):
raise NotImplementedError()
raise NotImplementedError

def apply_gradients(self, params_grads):
raise NotImplementedError()
raise NotImplementedError

def _build_trainer_programs(self, compiled_config):
_main = fleet._origin_main_program.clone()
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -1219,7 +1219,7 @@ def backward(
"""
Currently, backward function can not be called through DistributedOptimizer
"""
raise NotImplementedError()
raise NotImplementedError

def _remove_collective_ops(self, program, name):
"""
Expand All @@ -1235,7 +1235,7 @@ def apply_gradients(self, params_grads):
"""
Currently, apply_gradients function can not be called through DistributedOptimizer
"""
raise NotImplementedError()
raise NotImplementedError

def get_dist_env(self):
trainer_id = int(os.getenv('PADDLE_TRAINER_ID', '0'))
Expand Down
4 changes: 2 additions & 2 deletions python/paddle/incubate/nn/attn_bias.py
Original file line number Diff line number Diff line change
Expand Up @@ -30,7 +30,7 @@
class AttentionBias(ABC):
@abstractmethod
def materialize(self, shape, dtype=paddle.float32):
raise NotImplementedError()
raise NotImplementedError


class LowerTriangularMask(AttentionBias):
Expand Down Expand Up @@ -119,7 +119,7 @@ def from_seqlens_padded(cls, seqlens, padding):
)

def split(self, x, batch_sizes=None):
raise NotImplementedError()
raise NotImplementedError


@dataclass
Expand Down
4 changes: 2 additions & 2 deletions python/paddle/incubate/nn/layer/fused_transformer.py
Original file line number Diff line number Diff line change
Expand Up @@ -985,10 +985,10 @@ def __init__(
custom_decoder=None,
):
super().__init__()
raise NotImplementedError()
raise NotImplementedError

def forward(self, src, tgt, src_mask=None, tgt_mask=None, memory_mask=None):
raise NotImplementedError()
raise NotImplementedError


class FusedMultiTransformer(Layer):
Expand Down
6 changes: 3 additions & 3 deletions python/paddle/jit/dy2static/py_layer.py
Original file line number Diff line number Diff line change
Expand Up @@ -66,15 +66,15 @@ def saved_tensor(self):

# TODO(MarioLulab): support not_inplace
def mark_not_inplace(self, *args):
raise NotImplementedError()
raise NotImplementedError

# TODO(MarioLulab): support non_differentiable
def mark_non_differentiable(self, *args):
raise NotImplementedError()
raise NotImplementedError

# TODO(MarioLulab): support materialize_grads
def set_materialize_grads(self, value: bool):
raise NotImplementedError()
raise NotImplementedError


class StaticPyLayer:
Expand Down

0 comments on commit 296df15

Please sign in to comment.