mirror of
https://github.com/pytorch/pytorch.git
synced 2025-10-20 21:14:14 +08:00
[BE]: Enable a few additional ruff rules (#130700)
Enables a few extra ruff rules, most of which do not have any violations as I already cleaned them with earlier PRs, these just turns them on to enforce them. Adds 1 noqa as we want the suboptimal lambda generation + call kept as a test. Also enables the test in flake8 Pull Request resolved: https://github.com/pytorch/pytorch/pull/130700 Approved by: https://github.com/justinchuby, https://github.com/ezyang
This commit is contained in:
committed by
PyTorch MergeBot
parent
c24c50da92
commit
d1c4e6b55f
2
.flake8
2
.flake8
@ -2,7 +2,7 @@
|
||||
# NOTE: **Mirror any changes** to this file the [tool.ruff] config in pyproject.toml
|
||||
# before we can fully move to use ruff
|
||||
enable-extensions = G
|
||||
select = B,C,E,F,G,P,SIM1,T4,W,B9,TOR0,TOR1,TOR2,TOR9
|
||||
select = B,C,E,F,G,P,SIM1,SIM911,T4,W,B9,TOR0,TOR1,TOR2,TOR9
|
||||
max-line-length = 120
|
||||
# C408 ignored because we like the dict keyword argument syntax
|
||||
# E501 is not flexible enough, we're using B950 instead
|
||||
|
@ -95,6 +95,7 @@ select = [
|
||||
"EXE",
|
||||
"F",
|
||||
"SIM1",
|
||||
"SIM911",
|
||||
"W",
|
||||
# Not included in flake8
|
||||
"FURB",
|
||||
@ -110,6 +111,7 @@ select = [
|
||||
"PLC0131", # type bivariance
|
||||
"PLC0132", # type param mismatch
|
||||
"PLC0205", # string as __slots__
|
||||
"PLC3002", # unnecessary-direct-lambda-call
|
||||
"PLE",
|
||||
"PLR0133", # constant comparison
|
||||
"PLR0206", # property with params
|
||||
@ -137,6 +139,7 @@ select = [
|
||||
"RUF016", # type error non-integer index
|
||||
"RUF017",
|
||||
"RUF018", # no assignment in assert
|
||||
"RUF019", # unnecessary-key-check
|
||||
"RUF024", # from keys mutable
|
||||
"RUF026", # default factory kwarg
|
||||
"TCH",
|
||||
|
@ -1482,7 +1482,7 @@ class TestTracer(JitTestCase):
|
||||
return x + 2
|
||||
|
||||
def forward(self, input):
|
||||
return (lambda a: a + 1)(input)
|
||||
return (lambda a: a + 1)(input) # noqa: PLC3002
|
||||
|
||||
# When tracing Bar as a submodule, we only want to script the
|
||||
# exported methods, and we want to keep the forwards still
|
||||
|
@ -3696,7 +3696,6 @@ class TestLinalg(TestCase):
|
||||
with self.assertRaisesRegex(RuntimeError,
|
||||
"The derivative of linalg.qr depends on Q"):
|
||||
b.backward()
|
||||
#
|
||||
inp = torch.randn((7, 5), device=device, dtype=dtype, requires_grad=True)
|
||||
q, r = torch.linalg.qr(inp, mode='complete')
|
||||
b = torch.sum(r)
|
||||
|
@ -281,7 +281,7 @@ manual_torch_name_rule_map = {
|
||||
"torch._functorch.deprecated.grad": UserFunctionVariable,
|
||||
"torch._functorch.deprecated.grad_and_value": UserFunctionVariable,
|
||||
"torch._functorch.deprecated.vjp": UserFunctionVariable,
|
||||
#
|
||||
# everything else
|
||||
"torch._constrain_as_size": UserFunctionVariable,
|
||||
"torch._tensor._convert": UserFunctionVariable,
|
||||
"torch.jit._unwrap_optional": UserFunctionVariable,
|
||||
|
Reference in New Issue
Block a user