Remove unused imported names in python files (#134438)

Fixes #ISSUE_NUMBER

Pull Request resolved: https://github.com/pytorch/pytorch/pull/134438
Approved by: https://github.com/zou3519
This commit is contained in:
cyy
2024-08-27 20:44:01 +00:00
committed by PyTorch MergeBot
parent d23c0150f3
commit b567ca0f51
7 changed files with 5 additions and 15 deletions

View File

@ -1,6 +1,6 @@
from __future__ import annotations
from typing import cast, Callable, Generic, Type, TypeVar
from typing import Generic, TypeVar
import torch

View File

@ -24,10 +24,8 @@ def _save_storages(importer, obj):
if isinstance(obj, torch.storage.TypedStorage):
# TODO: Once we decide to break serialization FC, we can
# remove this case
storage = obj._untyped_storage
dtype = obj.dtype
else:
storage = obj
dtype = torch.uint8
serialized_storages.append(obj)

View File

@ -1,5 +1,4 @@
# mypy: allow-untyped-defs
import threading
import torch._C._lazy
from torch.utils._pytree import tree_flatten, tree_unflatten

View File

@ -1,16 +1,13 @@
# mypy: allow-untyped-defs
import contextlib
import itertools
import operator
import weakref
from enum import Enum
from functools import partial, reduce
from typing import Any, Callable, List, Optional, Sequence, Tuple, Type, Union
from typing import Callable, List, Optional, Sequence, Tuple, Type, Union
import torch
import torch._prims_common as utils
import torch.library
from torch import sym_float, Tensor, TypedStorage
from torch import sym_float, Tensor
from torch._C import _get_default_device
from torch._higher_order_ops.effects import new_token_tensor
from torch._library.utils import is_functional_schema

View File

@ -3,10 +3,9 @@ from __future__ import annotations
import operator
import warnings
import weakref
from contextlib import nullcontext
from enum import Enum
from functools import cmp_to_key, reduce
from functools import reduce
from typing import (
Any,
Callable,

View File

@ -1,6 +1,6 @@
# mypy: allow-untyped-defs
from functools import partial
from typing import List, Optional, Tuple, Union
from typing import Optional, Tuple, Union
import torch
import torch._prims as prims
@ -15,7 +15,6 @@ from torch._prims_common import (
DimsType,
ELEMENTWISE_TYPE_PROMOTION_KIND,
IntLike,
NumberType,
TensorLikeType,
)
from torch._prims_common.wrappers import (

View File

@ -1,5 +1,3 @@
import sys
import torch
from torch._C import _add_docstr, _linalg # type: ignore[attr-defined]