Update ruff to 0.13.1 (#163744)

Update ruff to 0.13.1 so that we can remove `UP038` from `pyproject.toml` because it has been removed from supported rules of ruff.
There are some fixes, the most notable one is [(PYI059)](https://docs.astral.sh/ruff/rules/generic-not-last-base-class/#generic-not-last-base-class-pyi059)
```
Checks for classes inheriting from typing.Generic[] where Generic[] is not the last base class in the bases tuple.

```

A BC-breaking change is introduced to change the typing of `OrderedSet .storage`

Pull Request resolved: https://github.com/pytorch/pytorch/pull/163744
Approved by: https://github.com/Skylion007, https://github.com/jingsh
This commit is contained in:
Yuanyuan Chen
2025-09-26 10:12:21 +00:00
committed by PyTorch MergeBot
parent 6a2bd1f4ee
commit 7441a1b9b1
10 changed files with 15 additions and 16 deletions

View File

@ -1453,7 +1453,7 @@ init_command = [
'--dry-run={{DRYRUN}}', '--dry-run={{DRYRUN}}',
'usort==1.0.8.post1', 'usort==1.0.8.post1',
'isort==6.0.1', 'isort==6.0.1',
'ruff==0.12.9', # sync with RUFF 'ruff==0.13.1', # sync with RUFF
] ]
is_formatter = true is_formatter = true
@ -1587,7 +1587,7 @@ init_command = [
'python3', 'python3',
'tools/linter/adapters/pip_init.py', 'tools/linter/adapters/pip_init.py',
'--dry-run={{DRYRUN}}', '--dry-run={{DRYRUN}}',
'ruff==0.12.9', # sync with PYFMT 'ruff==0.13.1', # sync with PYFMT
] ]
is_formatter = true is_formatter = true

View File

@ -38,7 +38,7 @@ def unroll(num_unrolls, IndexType, InType, OutType):
code = [] code = []
if num_unrolls == 1: if num_unrolls == 1:
code.append(f" // tail loop") code.append(" // tail loop")
code.append(" if (j < end_offset) {") code.append(" if (j < end_offset) {")
else: else:
code.append(f" // unrolling {num_unrolls} times") code.append(f" // unrolling {num_unrolls} times")

View File

@ -182,7 +182,6 @@ ignore = [
"SIM117", "SIM117",
"SIM118", "SIM118",
"UP007", # keep-runtime-typing "UP007", # keep-runtime-typing
"UP038", # Was removed from newer versions, results in slower code
"UP045", # keep-runtime-typing "UP045", # keep-runtime-typing
"TC006", "TC006",
# TODO: Remove Python-3.10 specific suppressions # TODO: Remove Python-3.10 specific suppressions

View File

@ -507,7 +507,7 @@ def autograd_cache_key(
TOut = TypeVar("TOut", bound=OutputCode) TOut = TypeVar("TOut", bound=OutputCode)
class InductorOutput(Generic[TOut], ABC): class InductorOutput(ABC, Generic[TOut]):
""" """
Class representing a single inductor output Class representing a single inductor output
""" """

View File

@ -17,7 +17,7 @@ from torch._inductor.autoheuristic.learnedheuristic_interface import (
class MMRankingA100(LearnedHeuristicDecision): class MMRankingA100(LearnedHeuristicDecision):
def __init__(self) -> None: def __init__(self) -> None:
self.choices: List[Choice] = [] self.choices: list[Choice] = []
self.fill_choices() self.fill_choices()
def check_precondition(self, metadata: AHMetadata, context: AHContext,) -> bool: def check_precondition(self, metadata: AHMetadata, context: AHContext,) -> bool:
@ -238,7 +238,7 @@ class MMRankingA100(LearnedHeuristicDecision):
def get_name(self) -> str: def get_name(self) -> str:
return 'mm' return 'mm'
def get_best_choices(self, context: AHContext) -> Optional[List[tuple[float, int]]]: def get_best_choices(self, context: AHContext) -> Optional[list[tuple[float, int]]]:
if context.get_value('arith_intensity') <= 52.6245059967041: if context.get_value('arith_intensity') <= 52.6245059967041:
if context.get_value('n') <= 34.0: if context.get_value('n') <= 34.0:
if context.get_value('n') <= 18.0: if context.get_value('n') <= 18.0:

View File

@ -17,7 +17,7 @@ from torch._inductor.autoheuristic.learnedheuristic_interface import (
class MMRankingH100(LearnedHeuristicDecision): class MMRankingH100(LearnedHeuristicDecision):
def __init__(self) -> None: def __init__(self) -> None:
self.choices: List[Choice] = [] self.choices: list[Choice] = []
self.fill_choices() self.fill_choices()
def check_precondition(self, metadata: AHMetadata, context: AHContext,) -> bool: def check_precondition(self, metadata: AHMetadata, context: AHContext,) -> bool:
@ -242,7 +242,7 @@ class MMRankingH100(LearnedHeuristicDecision):
def get_name(self) -> str: def get_name(self) -> str:
return 'mm' return 'mm'
def get_best_choices(self, context: AHContext) -> Optional[List[tuple[float, int]]]: def get_best_choices(self, context: AHContext) -> Optional[list[tuple[float, int]]]:
if context.get_value('arith_intensity') <= 29.89772129058838: if context.get_value('arith_intensity') <= 29.89772129058838:
if context.get_value('n') <= 34.0: if context.get_value('n') <= 34.0:
if context.get_value('n') <= 18.0: if context.get_value('n') <= 18.0:

View File

@ -17,7 +17,7 @@ from torch._inductor.autoheuristic.learnedheuristic_interface import (
class MixedMMA100(LearnedHeuristicDecision): class MixedMMA100(LearnedHeuristicDecision):
def __init__(self) -> None: def __init__(self) -> None:
self.choices: List[Choice] = [] self.choices: list[Choice] = []
self.fill_choices() self.fill_choices()
def check_precondition(self, metadata: AHMetadata, context: AHContext,) -> bool: def check_precondition(self, metadata: AHMetadata, context: AHContext,) -> bool:
@ -62,7 +62,7 @@ class MixedMMA100(LearnedHeuristicDecision):
def get_name(self) -> str: def get_name(self) -> str:
return 'mixed_mm' return 'mixed_mm'
def get_best_choices(self, context: AHContext) -> Optional[List[tuple[float, int]]]: def get_best_choices(self, context: AHContext) -> Optional[list[tuple[float, int]]]:
if str(context.get_value('1LEQmLEQ16')) != 'True': if str(context.get_value('1LEQmLEQ16')) != 'True':
if context.get_value('m') <= 32.5: if context.get_value('m') <= 32.5:
if context.get_value('n') <= 6976.0: if context.get_value('n') <= 6976.0:

View File

@ -17,7 +17,7 @@ from torch._inductor.autoheuristic.learnedheuristic_interface import (
class MixedMMH100(LearnedHeuristicDecision): class MixedMMH100(LearnedHeuristicDecision):
def __init__(self) -> None: def __init__(self) -> None:
self.choices: List[Choice] = [] self.choices: list[Choice] = []
self.fill_choices() self.fill_choices()
def check_precondition(self, metadata: AHMetadata, context: AHContext,) -> bool: def check_precondition(self, metadata: AHMetadata, context: AHContext,) -> bool:
@ -61,7 +61,7 @@ class MixedMMH100(LearnedHeuristicDecision):
def get_name(self) -> str: def get_name(self) -> str:
return 'mixed_mm' return 'mixed_mm'
def get_best_choices(self, context: AHContext) -> Optional[List[tuple[float, int]]]: def get_best_choices(self, context: AHContext) -> Optional[list[tuple[float, int]]]:
if context.get_value('arith_intensity') <= 15.988086223602295: if context.get_value('arith_intensity') <= 15.988086223602295:
if context.get_value('n') <= 25280.0: if context.get_value('n') <= 25280.0:
if context.get_value('n') <= 1344.0: if context.get_value('n') <= 1344.0:

View File

@ -2058,7 +2058,7 @@ _T1 = TypeVar("_T1")
@dataclass(frozen=True) @dataclass(frozen=True)
class StatelessSymbolicContext(Generic[_P1, _T1], SymbolicContext): class StatelessSymbolicContext(SymbolicContext, Generic[_P1, _T1]):
""" """
Create symbols in ``create_symbolic_sizes_strides_storage_offset`` via Create symbols in ``create_symbolic_sizes_strides_storage_offset`` via
a symbolic_context determination as given by ``DimDynamic`` and ``DimConstraint``. a symbolic_context determination as given by ``DimDynamic`` and ``DimConstraint``.

View File

@ -11,7 +11,7 @@ from dataclasses import is_dataclass
from enum import auto, Enum from enum import auto, Enum
from pathlib import Path from pathlib import Path
from pprint import pformat from pprint import pformat
from typing import Any, Callable, Generic, Literal, NoReturn, TYPE_CHECKING, TypeVar from typing import Any, Callable, Generic, NoReturn, TYPE_CHECKING, TypeVar
from typing_extensions import assert_never, deprecated, Self from typing_extensions import assert_never, deprecated, Self
from torchgen.code_template import CodeTemplate from torchgen.code_template import CodeTemplate
@ -482,7 +482,7 @@ class NamespaceHelper:
class OrderedSet(Generic[T]): class OrderedSet(Generic[T]):
storage: dict[T, Literal[None]] storage: dict[T, None]
def __init__(self, iterable: Iterable[T] | None = None) -> None: def __init__(self, iterable: Iterable[T] | None = None) -> None:
if iterable is None: if iterable is None: