From b11593c31bd84845e1573de0c15692387c572a2f Mon Sep 17 00:00:00 2001 From: Yuanyuan Chen Date: Wed, 15 Oct 2025 03:18:57 +0000 Subject: [PATCH] [8/N] Apply ruff UP035 rule (#165214) This is follow-up of #164653 to continue applying `UP035` fixes. The purpose is to finally enable this rule. Pull Request resolved: https://github.com/pytorch/pytorch/pull/165214 Approved by: https://github.com/ezyang --- test/fx/test_matcher_utils.py | 2 +- test/test_fx.py | 3 ++- test/test_fx_experimental.py | 3 ++- torch/distributed/pipelining/microbatch.py | 3 ++- torch/fx/_compatibility.py | 3 ++- torch/fx/_graph_pickler.py | 3 ++- torch/fx/_pytree.py | 3 ++- torch/fx/_symbolic_trace.py | 4 ++-- torch/fx/experimental/_dynamism.py | 3 ++- torch/fx/experimental/const_fold.py | 3 ++- torch/fx/experimental/graph_gradual_typechecker.py | 3 ++- torch/fx/experimental/meta_tracer.py | 3 ++- .../migrate_gradual_types/constraint_generator.py | 4 ++-- .../migrate_gradual_types/constraint_transformation.py | 2 +- torch/fx/experimental/normalize.py | 3 ++- torch/fx/experimental/proxy_tensor.py | 6 +++--- torch/fx/experimental/recording.py | 3 ++- torch/fx/experimental/rewriter.py | 3 ++- torch/fx/experimental/symbolic_shapes.py | 7 ++++--- torch/fx/experimental/unification/multipledispatch/core.py | 3 ++- torch/fx/experimental/validator.py | 3 ++- torch/fx/graph.py | 4 ++-- torch/fx/graph_module.py | 3 ++- torch/fx/node.py | 6 +++--- torch/fx/operator_schemas.py | 3 ++- torch/fx/passes/graph_transform_observer.py | 3 ++- torch/fx/passes/infra/pass_manager.py | 2 +- torch/fx/passes/net_min_base.py | 3 ++- torch/fx/passes/param_fetch.py | 3 ++- torch/fx/passes/pass_manager.py | 3 ++- torch/fx/passes/reinplace.py | 3 ++- torch/fx/passes/split_module.py | 3 ++- torch/fx/passes/utils/source_matcher_utils.py | 3 ++- torch/fx/proxy.py | 4 ++-- torch/fx/subgraph_rewriter.py | 3 ++- 35 files changed, 71 insertions(+), 45 deletions(-) diff --git a/test/fx/test_matcher_utils.py b/test/fx/test_matcher_utils.py index d046fccf1f50..6354fec2c6ed 100644 --- a/test/fx/test_matcher_utils.py +++ b/test/fx/test_matcher_utils.py @@ -2,7 +2,7 @@ import os import sys -from typing import Callable +from collections.abc import Callable import torch import torch.nn.functional as F diff --git a/test/test_fx.py b/test/test_fx.py index e3cd61432d08..1f6296a509fc 100644 --- a/test/test_fx.py +++ b/test/test_fx.py @@ -35,7 +35,8 @@ from torch.fx.experimental.rewriter import RewritingTracer from torch.fx.operator_schemas import get_signature_for_torch_op from copy import deepcopy from collections import namedtuple -from typing import Any, Callable, NamedTuple, Optional, Union +from typing import Any, NamedTuple, Optional, Union +from collections.abc import Callable import torch diff --git a/test/test_fx_experimental.py b/test/test_fx_experimental.py index 72d770e6d3f0..d74a3febf171 100644 --- a/test/test_fx_experimental.py +++ b/test/test_fx_experimental.py @@ -12,7 +12,8 @@ import tempfile import typing import unittest from types import BuiltinFunctionType -from typing import Callable, NamedTuple, Optional, Union +from typing import NamedTuple, Optional, Union +from collections.abc import Callable import torch import torch.fx.experimental.meta_tracer diff --git a/torch/distributed/pipelining/microbatch.py b/torch/distributed/pipelining/microbatch.py index e99bf9bce25e..06c4edb9b3d3 100644 --- a/torch/distributed/pipelining/microbatch.py +++ b/torch/distributed/pipelining/microbatch.py @@ -2,7 +2,8 @@ # Copyright (c) Meta Platforms, Inc. and affiliates import logging import operator -from typing import Any, Optional, Sequence +from collections.abc import Sequence +from typing import Any, Optional import torch from torch.fx.node import map_aggregate diff --git a/torch/fx/_compatibility.py b/torch/fx/_compatibility.py index 26bb3ff3b772..c07dd1b51bc0 100644 --- a/torch/fx/_compatibility.py +++ b/torch/fx/_compatibility.py @@ -1,5 +1,6 @@ import textwrap -from typing import Any, Callable, TypeVar +from collections.abc import Callable +from typing import Any, TypeVar _BACK_COMPAT_OBJECTS: dict[Any, None] = {} diff --git a/torch/fx/_graph_pickler.py b/torch/fx/_graph_pickler.py index 0d27b3fc390d..8138e476b416 100644 --- a/torch/fx/_graph_pickler.py +++ b/torch/fx/_graph_pickler.py @@ -3,7 +3,8 @@ import importlib import io import pickle from abc import abstractmethod -from typing import Any, Callable, NewType, Optional, TypeVar, Union +from collections.abc import Callable +from typing import Any, NewType, Optional, TypeVar, Union from typing_extensions import override, Self import torch diff --git a/torch/fx/_pytree.py b/torch/fx/_pytree.py index 7a31e4ef3cfa..2f608816c49b 100644 --- a/torch/fx/_pytree.py +++ b/torch/fx/_pytree.py @@ -1,5 +1,6 @@ from collections import namedtuple -from typing import Any, Callable, Optional, TypeVar +from collections.abc import Callable +from typing import Any, Optional, TypeVar from typing_extensions import NamedTuple import torch.return_types diff --git a/torch/fx/_symbolic_trace.py b/torch/fx/_symbolic_trace.py index 07f2f0bf983a..ddce85e21d22 100644 --- a/torch/fx/_symbolic_trace.py +++ b/torch/fx/_symbolic_trace.py @@ -9,10 +9,10 @@ import logging import math import os import warnings +from collections.abc import Callable from itertools import chain from types import CodeType, FunctionType, ModuleType -from typing import Any, Callable, get_args, NamedTuple, Optional, Union -from typing_extensions import TypeAlias +from typing import Any, get_args, NamedTuple, Optional, TypeAlias, Union import torch import torch.utils._pytree as pytree diff --git a/torch/fx/experimental/_dynamism.py b/torch/fx/experimental/_dynamism.py index 4828b6f458eb..f6f30779ecc2 100644 --- a/torch/fx/experimental/_dynamism.py +++ b/torch/fx/experimental/_dynamism.py @@ -1,5 +1,6 @@ import re -from typing import Any, Callable, Union +from collections.abc import Callable +from typing import Any, Union import torch from torch.utils._pytree import tree_flatten_with_path, tree_map diff --git a/torch/fx/experimental/const_fold.py b/torch/fx/experimental/const_fold.py index 3e53cb908fbf..d4a56a808bc1 100644 --- a/torch/fx/experimental/const_fold.py +++ b/torch/fx/experimental/const_fold.py @@ -1,6 +1,7 @@ # mypy: allow-untyped-defs import re -from typing import Callable, Optional, Union +from collections.abc import Callable +from typing import Optional, Union import torch.fx from torch.fx.node import map_arg diff --git a/torch/fx/experimental/graph_gradual_typechecker.py b/torch/fx/experimental/graph_gradual_typechecker.py index b5ddeb3fffe3..d1ca9bc0c880 100644 --- a/torch/fx/experimental/graph_gradual_typechecker.py +++ b/torch/fx/experimental/graph_gradual_typechecker.py @@ -1,8 +1,9 @@ # mypy: allow-untyped-defs import itertools import operator +from collections.abc import Callable from functools import reduce -from typing import Callable, TypeVar +from typing import TypeVar from typing_extensions import ParamSpec import sympy diff --git a/torch/fx/experimental/meta_tracer.py b/torch/fx/experimental/meta_tracer.py index 5f437cc0a686..040521a28455 100644 --- a/torch/fx/experimental/meta_tracer.py +++ b/torch/fx/experimental/meta_tracer.py @@ -2,7 +2,8 @@ import builtins import functools import warnings -from typing import Any, Callable, Optional, Union +from collections.abc import Callable +from typing import Any, Optional, Union import torch import torch.fx diff --git a/torch/fx/experimental/migrate_gradual_types/constraint_generator.py b/torch/fx/experimental/migrate_gradual_types/constraint_generator.py index 9e0f8f98768c..381cdf18d19b 100644 --- a/torch/fx/experimental/migrate_gradual_types/constraint_generator.py +++ b/torch/fx/experimental/migrate_gradual_types/constraint_generator.py @@ -1,8 +1,8 @@ # mypy: allow-untyped-defs import operator import warnings -from collections.abc import Iterable -from typing import Callable, TypeVar +from collections.abc import Callable, Iterable +from typing import TypeVar from typing_extensions import ParamSpec import torch diff --git a/torch/fx/experimental/migrate_gradual_types/constraint_transformation.py b/torch/fx/experimental/migrate_gradual_types/constraint_transformation.py index 9b84c12127f0..0782ba5affc9 100644 --- a/torch/fx/experimental/migrate_gradual_types/constraint_transformation.py +++ b/torch/fx/experimental/migrate_gradual_types/constraint_transformation.py @@ -1,7 +1,7 @@ # mypy: ignore-errors import copy import itertools -from typing import Callable +from collections.abc import Callable from torch.fx.experimental.migrate_gradual_types.constraint import ( ApplyBroadcasting, diff --git a/torch/fx/experimental/normalize.py b/torch/fx/experimental/normalize.py index 4d9cf4e10896..e2dd3c962bbe 100644 --- a/torch/fx/experimental/normalize.py +++ b/torch/fx/experimental/normalize.py @@ -1,6 +1,7 @@ # mypy: allow-untyped-defs import operator -from typing import Any, Callable, Optional +from collections.abc import Callable +from typing import Any, Optional import torch import torch.fx diff --git a/torch/fx/experimental/proxy_tensor.py b/torch/fx/experimental/proxy_tensor.py index 2e877ff4fa0d..aeb3c374bce6 100644 --- a/torch/fx/experimental/proxy_tensor.py +++ b/torch/fx/experimental/proxy_tensor.py @@ -16,12 +16,12 @@ import typing import typing_extensions import weakref from collections import defaultdict, OrderedDict -from collections.abc import Generator, Mapping, Sequence +from collections.abc import Callable, Generator, Mapping, Sequence from contextlib import _GeneratorContextManager, contextmanager, ExitStack, nullcontext from dataclasses import dataclass from typing import ( Any, - Callable, + Concatenate, Optional, overload, Protocol, @@ -29,7 +29,7 @@ from typing import ( TypeVar, Union, ) -from typing_extensions import Concatenate, ParamSpec, Self, TypeVarTuple, Unpack +from typing_extensions import ParamSpec, Self, TypeVarTuple, Unpack from weakref import WeakKeyDictionary import torch diff --git a/torch/fx/experimental/recording.py b/torch/fx/experimental/recording.py index a9025fc54ebe..4ec092898cd6 100644 --- a/torch/fx/experimental/recording.py +++ b/torch/fx/experimental/recording.py @@ -3,8 +3,9 @@ import functools import inspect import itertools import logging +from collections.abc import Callable from dataclasses import dataclass -from typing import Any, Callable, Optional, Union +from typing import Any, Optional, Union import torch import torch.utils._pytree as pytree diff --git a/torch/fx/experimental/rewriter.py b/torch/fx/experimental/rewriter.py index 8e635a525f6f..2cc902599aeb 100644 --- a/torch/fx/experimental/rewriter.py +++ b/torch/fx/experimental/rewriter.py @@ -5,8 +5,9 @@ import copy import functools import inspect import textwrap +from collections.abc import Callable from types import FunctionType -from typing import Any, Callable, cast, Optional, Union +from typing import Any, cast, Optional, Union import torch from torch._sources import normalize_source_lines diff --git a/torch/fx/experimental/symbolic_shapes.py b/torch/fx/experimental/symbolic_shapes.py index 4a4744939502..bbe84a2e4141 100644 --- a/torch/fx/experimental/symbolic_shapes.py +++ b/torch/fx/experimental/symbolic_shapes.py @@ -31,23 +31,24 @@ import sys import threading import traceback from collections import Counter, defaultdict -from collections.abc import Generator, Iterator, Mapping, Sequence +from collections.abc import Callable, Generator, Iterator, Mapping, Sequence from contextlib import _GeneratorContextManager, contextmanager from dataclasses import asdict, dataclass, field from enum import Enum from typing import ( Any, - Callable, cast, Generic, NamedTuple, NoReturn, Optional, TYPE_CHECKING, + TypeAlias, + TypeGuard, TypeVar, Union, ) -from typing_extensions import deprecated, ParamSpec, TypeAlias, TypeGuard +from typing_extensions import deprecated, ParamSpec import torch import torch.fx diff --git a/torch/fx/experimental/unification/multipledispatch/core.py b/torch/fx/experimental/unification/multipledispatch/core.py index cd00a9028d55..69b9f3b2b5a2 100644 --- a/torch/fx/experimental/unification/multipledispatch/core.py +++ b/torch/fx/experimental/unification/multipledispatch/core.py @@ -1,6 +1,7 @@ # mypy: allow-untyped-defs import inspect -from typing import Any, Callable, TypeVar +from collections.abc import Callable +from typing import Any, TypeVar from typing_extensions import TypeVarTuple, Unpack from .dispatcher import Dispatcher, MethodDispatcher diff --git a/torch/fx/experimental/validator.py b/torch/fx/experimental/validator.py index db0095251206..eb55b6c2050c 100644 --- a/torch/fx/experimental/validator.py +++ b/torch/fx/experimental/validator.py @@ -4,8 +4,9 @@ import functools import logging import math import operator +from collections.abc import Callable from dataclasses import dataclass -from typing import Any, Callable, Optional, Union +from typing import Any, Optional, Union import sympy diff --git a/torch/fx/graph.py b/torch/fx/graph.py index a6a365578a50..940737e7e3a6 100644 --- a/torch/fx/graph.py +++ b/torch/fx/graph.py @@ -12,10 +12,10 @@ import re import typing import warnings from collections import defaultdict -from collections.abc import Iterable, Iterator +from collections.abc import Callable, Iterable, Iterator from contextlib import contextmanager from dataclasses import dataclass -from typing import Any, Callable, Literal, NamedTuple, Optional, TYPE_CHECKING +from typing import Any, Literal, NamedTuple, Optional, TYPE_CHECKING import torch import torch.utils._pytree as pytree diff --git a/torch/fx/graph_module.py b/torch/fx/graph_module.py index 338190c7a5e9..dbe2467b1b89 100644 --- a/torch/fx/graph_module.py +++ b/torch/fx/graph_module.py @@ -7,8 +7,9 @@ import os import sys import traceback import warnings +from collections.abc import Callable from pathlib import Path -from typing import Any, Callable, Optional, Union +from typing import Any, Optional, Union import torch import torch.nn as nn diff --git a/torch/fx/node.py b/torch/fx/node.py index 321cbfbf2f3b..b267b01a7c50 100644 --- a/torch/fx/node.py +++ b/torch/fx/node.py @@ -4,9 +4,9 @@ import inspect import logging import operator import types -from collections.abc import Iterable, Mapping, Sequence -from typing import Any, Callable, Optional, TYPE_CHECKING, Union -from typing_extensions import ParamSpec, TypeAlias, TypeVar +from collections.abc import Callable, Iterable, Mapping, Sequence +from typing import Any, Optional, TYPE_CHECKING, TypeAlias, Union +from typing_extensions import ParamSpec, TypeVar import torch from torch._C import _fx_map_aggregate, _fx_map_arg, _NodeBase diff --git a/torch/fx/operator_schemas.py b/torch/fx/operator_schemas.py index 284078b2371f..1234d13b3b11 100644 --- a/torch/fx/operator_schemas.py +++ b/torch/fx/operator_schemas.py @@ -5,7 +5,8 @@ import numbers import types import typing import warnings -from typing import Any, Callable, cast, NamedTuple, Optional, TYPE_CHECKING +from collections.abc import Callable +from typing import Any, cast, NamedTuple, Optional, TYPE_CHECKING import torch from torch._jit_internal import boolean_dispatched diff --git a/torch/fx/passes/graph_transform_observer.py b/torch/fx/passes/graph_transform_observer.py index 6479af665895..e762b8a60d10 100644 --- a/torch/fx/passes/graph_transform_observer.py +++ b/torch/fx/passes/graph_transform_observer.py @@ -1,6 +1,7 @@ # mypy: allow-untyped-defs import os -from typing import Callable, Optional, TypeVar +from collections.abc import Callable +from typing import Optional, TypeVar from torch.fx import Graph, Node from torch.fx._compatibility import compatibility diff --git a/torch/fx/passes/infra/pass_manager.py b/torch/fx/passes/infra/pass_manager.py index 8fed76cc3893..e13ca72fd240 100644 --- a/torch/fx/passes/infra/pass_manager.py +++ b/torch/fx/passes/infra/pass_manager.py @@ -1,9 +1,9 @@ # mypy: allow-untyped-defs import inspect import logging +from collections.abc import Callable from functools import wraps from queue import Queue -from typing import Callable import torch.nn as nn from torch.fx._compatibility import compatibility diff --git a/torch/fx/passes/net_min_base.py b/torch/fx/passes/net_min_base.py index 8a147f3e0b00..b4a82f10177d 100644 --- a/torch/fx/passes/net_min_base.py +++ b/torch/fx/passes/net_min_base.py @@ -1,7 +1,8 @@ # mypy: allow-untyped-defs import logging +from collections.abc import Callable from dataclasses import dataclass -from typing import Any, Callable, cast, Optional +from typing import Any, cast, Optional import torch import torch.fx diff --git a/torch/fx/passes/param_fetch.py b/torch/fx/passes/param_fetch.py index 02904b8e403e..5e17a8040e6a 100644 --- a/torch/fx/passes/param_fetch.py +++ b/torch/fx/passes/param_fetch.py @@ -1,4 +1,5 @@ -from typing import Any, Callable +from collections.abc import Callable +from typing import Any import torch import torch.nn as nn diff --git a/torch/fx/passes/pass_manager.py b/torch/fx/passes/pass_manager.py index 48dfe702fedb..297d50a68f47 100644 --- a/torch/fx/passes/pass_manager.py +++ b/torch/fx/passes/pass_manager.py @@ -1,8 +1,9 @@ # mypy: allow-untyped-defs import logging +from collections.abc import Callable from functools import wraps from inspect import unwrap -from typing import Callable, Optional +from typing import Optional logger = logging.getLogger(__name__) diff --git a/torch/fx/passes/reinplace.py b/torch/fx/passes/reinplace.py index 41e831327b41..30f154938961 100644 --- a/torch/fx/passes/reinplace.py +++ b/torch/fx/passes/reinplace.py @@ -2,8 +2,9 @@ import _operator import itertools from collections import defaultdict +from collections.abc import Callable from enum import Enum -from typing import Any, Callable +from typing import Any import torch from torch._subclasses.fake_tensor import FakeTensor, FakeTensorMode diff --git a/torch/fx/passes/split_module.py b/torch/fx/passes/split_module.py index fb8bcb835ede..095aea9c1644 100644 --- a/torch/fx/passes/split_module.py +++ b/torch/fx/passes/split_module.py @@ -2,7 +2,8 @@ import inspect import logging from collections import OrderedDict -from typing import Any, Callable, Optional +from collections.abc import Callable +from typing import Any, Optional import torch from torch.fx._compatibility import compatibility diff --git a/torch/fx/passes/utils/source_matcher_utils.py b/torch/fx/passes/utils/source_matcher_utils.py index 0a07da522113..d504ce56fd66 100644 --- a/torch/fx/passes/utils/source_matcher_utils.py +++ b/torch/fx/passes/utils/source_matcher_utils.py @@ -1,7 +1,8 @@ import logging import os +from collections.abc import Callable from dataclasses import dataclass, field -from typing import Any, Callable, Optional +from typing import Any, Optional from torch.fx._compatibility import compatibility from torch.fx.graph import Graph diff --git a/torch/fx/proxy.py b/torch/fx/proxy.py index 6bbd252b6d0b..8979dcbabaff 100644 --- a/torch/fx/proxy.py +++ b/torch/fx/proxy.py @@ -10,9 +10,9 @@ import operator import sys import traceback from collections import OrderedDict -from collections.abc import Iterator +from collections.abc import Callable, Iterator from dataclasses import fields, is_dataclass -from typing import Any, Callable, Optional +from typing import Any, Optional import torch import torch.fx.traceback as fx_traceback diff --git a/torch/fx/subgraph_rewriter.py b/torch/fx/subgraph_rewriter.py index 686b33f44085..2253da19d364 100644 --- a/torch/fx/subgraph_rewriter.py +++ b/torch/fx/subgraph_rewriter.py @@ -1,6 +1,7 @@ import copy +from collections.abc import Callable from dataclasses import dataclass -from typing import Any, Callable, NamedTuple, Optional, TYPE_CHECKING, Union +from typing import Any, NamedTuple, Optional, TYPE_CHECKING, Union import torch