fix: Flake8-BugBear code B-026 for PyTorch (#111362)

Fixes #106571

I have fixed the B-026 error codes for Flake8 tests on the codebase. Please review and tell me anything else to do.
Thanks and excited for this first contribution to PyTorch.

Also I refer this issue which introduced [B-026](https://github.com/PyCQA/flake8-bugbear/issues/286) in `pytest-bugbear` and discuss the error code.
Pull Request resolved: https://github.com/pytorch/pytorch/pull/111362
Approved by: https://github.com/Skylion007
This commit is contained in:
Aryan Gupta
2023-11-07 21:38:13 +00:00
committed by PyTorch MergeBot
parent 2da062da51
commit 8cee0a25bd
8 changed files with 9 additions and 9 deletions

View File

@ -14,7 +14,7 @@ ignore =
# to line this up with executable bit
EXE001,
# these ignores are from flake8-bugbear; please fix!
B007,B008,B017,B019,B023,B026,B028,B903,B904,B905,B906,B907
B007,B008,B017,B019,B023,B028,B903,B904,B905,B906,B907
# these ignores are from flake8-comprehensions; please fix!
C407,
# these ignores are from flake8-logging-format; please fix!

View File

@ -29,7 +29,7 @@ ignore = [
"B007", "B008", "B017",
"B018", # Useless expression
"B019",
"B023", "B026",
"B023",
"B028", # No explicit `stacklevel` keyword argument found
"B904",
"E402",

View File

@ -100,7 +100,7 @@ class TestFSDPCheckpoint(FSDPTest):
l3 = ckpt_wrapper(l3)
fsdp_wrapper = partial(
_maybe_wrap_fsdp, wrap_fsdp=wrap_fsdp, *fsdp_args, **fsdp_kwargs
_maybe_wrap_fsdp, *fsdp_args, wrap_fsdp=wrap_fsdp, **fsdp_kwargs
)
self.ffn = nn.Sequential(
fsdp_wrapper(l1),

View File

@ -230,8 +230,8 @@ class TestFSDPStateDict(FSDPTest):
bn1 = checkpoint_wrapper(bn1)
lin2 = checkpoint_wrapper(lin2)
seq = nn.Sequential(
FSDP(lin1, mixed_precision=lin_mp, *fsdp_args, **fsdp_kwargs),
FSDP(bn1, mixed_precision=bn_mp, *fsdp_args, **fsdp_kwargs),
FSDP(lin1, *fsdp_args, mixed_precision=lin_mp, **fsdp_kwargs),
FSDP(bn1, *fsdp_args, mixed_precision=bn_mp, **fsdp_kwargs),
lin2,
)
if checkpoint_wrap:

View File

@ -48,7 +48,7 @@ _onnx_dep = True # flag to import onnx package.
def export_to_pbtxt(model, inputs, *args, **kwargs):
return torch.onnx.export_to_pretty_string(
model, inputs, google_printer=True, *args, **kwargs
model, inputs, *args, google_printer=True, **kwargs
)

View File

@ -986,7 +986,7 @@ class TestCommon(TestCase):
try:
if with_out:
out = torch.empty(0, dtype=torch.int32, device=device)
op_to_test(inputs, out=out, *args, **kwargs)
op_to_test(inputs, *args, out=out, **kwargs)
else:
out = op_to_test(inputs, *args, **kwargs)
self.assertFalse(expectFail)

View File

@ -4716,7 +4716,7 @@ def aot_module(mod: nn.Module, *args, **kwargs) -> nn.Module:
named_buffers = dict(mod.named_buffers(remove_duplicate=False))
num_params_buffers = len(named_params) + len(named_buffers)
compiled_f = aot_function(
functional_call, num_params_buffers=num_params_buffers, *args, **kwargs
functional_call, *args, num_params_buffers=num_params_buffers, **kwargs
)
class AOTModule(nn.Module):

View File

@ -40,7 +40,7 @@ class SamplerIterDataPipe(IterDataPipe[T_co]):
self.sampler_args = () if sampler_args is None else sampler_args
self.sampler_kwargs = {} if sampler_kwargs is None else sampler_kwargs
# https://github.com/python/mypy/pull/9629 will solve
self.sampler = sampler(data_source=self.datapipe, *self.sampler_args, **self.sampler_kwargs) # type: ignore[misc]
self.sampler = sampler(*self.sampler_args, data_source=self.datapipe, **self.sampler_kwargs) # type: ignore[misc]
def __iter__(self) -> Iterator[T_co]:
return iter(self.sampler)