diff --git a/.lintrunner.toml b/.lintrunner.toml index 679a04981b07..cd015f3e97a4 100644 --- a/.lintrunner.toml +++ b/.lintrunner.toml @@ -1453,7 +1453,7 @@ init_command = [ '--dry-run={{DRYRUN}}', 'usort==1.0.8.post1', 'isort==6.0.1', - 'ruff==0.12.9', # sync with RUFF + 'ruff==0.13.1', # sync with RUFF ] is_formatter = true @@ -1587,7 +1587,7 @@ init_command = [ 'python3', 'tools/linter/adapters/pip_init.py', '--dry-run={{DRYRUN}}', - 'ruff==0.12.9', # sync with PYFMT + 'ruff==0.13.1', # sync with PYFMT ] is_formatter = true diff --git a/caffe2/perfkernels/sve_emblookup_codegen.py b/caffe2/perfkernels/sve_emblookup_codegen.py index 4c5ad01bdc10..6a63920cc8bb 100644 --- a/caffe2/perfkernels/sve_emblookup_codegen.py +++ b/caffe2/perfkernels/sve_emblookup_codegen.py @@ -38,7 +38,7 @@ def unroll(num_unrolls, IndexType, InType, OutType): code = [] if num_unrolls == 1: - code.append(f" // tail loop") + code.append(" // tail loop") code.append(" if (j < end_offset) {") else: code.append(f" // unrolling {num_unrolls} times") diff --git a/pyproject.toml b/pyproject.toml index 321af034f854..69ece31e0a98 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -182,7 +182,6 @@ ignore = [ "SIM117", "SIM118", "UP007", # keep-runtime-typing - "UP038", # Was removed from newer versions, results in slower code "UP045", # keep-runtime-typing "TC006", # TODO: Remove Python-3.10 specific suppressions diff --git a/torch/_functorch/_aot_autograd/autograd_cache.py b/torch/_functorch/_aot_autograd/autograd_cache.py index 4d6a881b2a45..54ad74da8f73 100644 --- a/torch/_functorch/_aot_autograd/autograd_cache.py +++ b/torch/_functorch/_aot_autograd/autograd_cache.py @@ -507,7 +507,7 @@ def autograd_cache_key( TOut = TypeVar("TOut", bound=OutputCode) -class InductorOutput(Generic[TOut], ABC): +class InductorOutput(ABC, Generic[TOut]): """ Class representing a single inductor output """ diff --git a/torch/_inductor/autoheuristic/artifacts/_MMRankingA100.py b/torch/_inductor/autoheuristic/artifacts/_MMRankingA100.py index 6a8cce6f870b..7ebf134c83d7 100644 --- a/torch/_inductor/autoheuristic/artifacts/_MMRankingA100.py +++ b/torch/_inductor/autoheuristic/artifacts/_MMRankingA100.py @@ -17,7 +17,7 @@ from torch._inductor.autoheuristic.learnedheuristic_interface import ( class MMRankingA100(LearnedHeuristicDecision): def __init__(self) -> None: - self.choices: List[Choice] = [] + self.choices: list[Choice] = [] self.fill_choices() def check_precondition(self, metadata: AHMetadata, context: AHContext,) -> bool: @@ -238,7 +238,7 @@ class MMRankingA100(LearnedHeuristicDecision): def get_name(self) -> str: return 'mm' - def get_best_choices(self, context: AHContext) -> Optional[List[tuple[float, int]]]: + def get_best_choices(self, context: AHContext) -> Optional[list[tuple[float, int]]]: if context.get_value('arith_intensity') <= 52.6245059967041: if context.get_value('n') <= 34.0: if context.get_value('n') <= 18.0: diff --git a/torch/_inductor/autoheuristic/artifacts/_MMRankingH100.py b/torch/_inductor/autoheuristic/artifacts/_MMRankingH100.py index e794b8e646f3..6201acc4213a 100644 --- a/torch/_inductor/autoheuristic/artifacts/_MMRankingH100.py +++ b/torch/_inductor/autoheuristic/artifacts/_MMRankingH100.py @@ -17,7 +17,7 @@ from torch._inductor.autoheuristic.learnedheuristic_interface import ( class MMRankingH100(LearnedHeuristicDecision): def __init__(self) -> None: - self.choices: List[Choice] = [] + self.choices: list[Choice] = [] self.fill_choices() def check_precondition(self, metadata: AHMetadata, context: AHContext,) -> bool: @@ -242,7 +242,7 @@ class MMRankingH100(LearnedHeuristicDecision): def get_name(self) -> str: return 'mm' - def get_best_choices(self, context: AHContext) -> Optional[List[tuple[float, int]]]: + def get_best_choices(self, context: AHContext) -> Optional[list[tuple[float, int]]]: if context.get_value('arith_intensity') <= 29.89772129058838: if context.get_value('n') <= 34.0: if context.get_value('n') <= 18.0: diff --git a/torch/_inductor/autoheuristic/artifacts/_MixedMMA100.py b/torch/_inductor/autoheuristic/artifacts/_MixedMMA100.py index 9a9ea693a96d..1ba7cbaf9027 100644 --- a/torch/_inductor/autoheuristic/artifacts/_MixedMMA100.py +++ b/torch/_inductor/autoheuristic/artifacts/_MixedMMA100.py @@ -17,7 +17,7 @@ from torch._inductor.autoheuristic.learnedheuristic_interface import ( class MixedMMA100(LearnedHeuristicDecision): def __init__(self) -> None: - self.choices: List[Choice] = [] + self.choices: list[Choice] = [] self.fill_choices() def check_precondition(self, metadata: AHMetadata, context: AHContext,) -> bool: @@ -62,7 +62,7 @@ class MixedMMA100(LearnedHeuristicDecision): def get_name(self) -> str: return 'mixed_mm' - def get_best_choices(self, context: AHContext) -> Optional[List[tuple[float, int]]]: + def get_best_choices(self, context: AHContext) -> Optional[list[tuple[float, int]]]: if str(context.get_value('1LEQmLEQ16')) != 'True': if context.get_value('m') <= 32.5: if context.get_value('n') <= 6976.0: diff --git a/torch/_inductor/autoheuristic/artifacts/_MixedMMH100.py b/torch/_inductor/autoheuristic/artifacts/_MixedMMH100.py index b4552c5257e7..c21579077042 100644 --- a/torch/_inductor/autoheuristic/artifacts/_MixedMMH100.py +++ b/torch/_inductor/autoheuristic/artifacts/_MixedMMH100.py @@ -17,7 +17,7 @@ from torch._inductor.autoheuristic.learnedheuristic_interface import ( class MixedMMH100(LearnedHeuristicDecision): def __init__(self) -> None: - self.choices: List[Choice] = [] + self.choices: list[Choice] = [] self.fill_choices() def check_precondition(self, metadata: AHMetadata, context: AHContext,) -> bool: @@ -61,7 +61,7 @@ class MixedMMH100(LearnedHeuristicDecision): def get_name(self) -> str: return 'mixed_mm' - def get_best_choices(self, context: AHContext) -> Optional[List[tuple[float, int]]]: + def get_best_choices(self, context: AHContext) -> Optional[list[tuple[float, int]]]: if context.get_value('arith_intensity') <= 15.988086223602295: if context.get_value('n') <= 25280.0: if context.get_value('n') <= 1344.0: diff --git a/torch/fx/experimental/symbolic_shapes.py b/torch/fx/experimental/symbolic_shapes.py index d972714ae243..58f240746f9a 100644 --- a/torch/fx/experimental/symbolic_shapes.py +++ b/torch/fx/experimental/symbolic_shapes.py @@ -2058,7 +2058,7 @@ _T1 = TypeVar("_T1") @dataclass(frozen=True) -class StatelessSymbolicContext(Generic[_P1, _T1], SymbolicContext): +class StatelessSymbolicContext(SymbolicContext, Generic[_P1, _T1]): """ Create symbols in ``create_symbolic_sizes_strides_storage_offset`` via a symbolic_context determination as given by ``DimDynamic`` and ``DimConstraint``. diff --git a/torchgen/utils.py b/torchgen/utils.py index ced051e176f7..f6777912a8f4 100644 --- a/torchgen/utils.py +++ b/torchgen/utils.py @@ -11,7 +11,7 @@ from dataclasses import is_dataclass from enum import auto, Enum from pathlib import Path from pprint import pformat -from typing import Any, Callable, Generic, Literal, NoReturn, TYPE_CHECKING, TypeVar +from typing import Any, Callable, Generic, NoReturn, TYPE_CHECKING, TypeVar from typing_extensions import assert_never, deprecated, Self from torchgen.code_template import CodeTemplate @@ -482,7 +482,7 @@ class NamespaceHelper: class OrderedSet(Generic[T]): - storage: dict[T, Literal[None]] + storage: dict[T, None] def __init__(self, iterable: Iterable[T] | None = None) -> None: if iterable is None: