[BE] better type annotation for torch.types (#129559)

Closes #129525

- #129525

Pull Request resolved: https://github.com/pytorch/pytorch/pull/129559
Approved by: https://github.com/ezyang
This commit is contained in:
Xuehai Pan
2024-09-02 00:43:16 +08:00
committed by PyTorch MergeBot
parent 76710d4f95
commit 4c1dd13ba3

View File

@ -1,7 +1,5 @@
# mypy: allow-untyped-defs # mypy: allow-untyped-defs
import builtins
# In some cases, these basic types are shadowed by corresponding # In some cases, these basic types are shadowed by corresponding
# top-level values. The underscore variants let us refer to these # top-level values. The underscore variants let us refer to these
# types. See https://github.com/python/mypy/issues/4146 for why these # types. See https://github.com/python/mypy/issues/4146 for why these
@ -14,100 +12,114 @@ from builtins import ( # noqa: F401
int as _int, int as _int,
str as _str, str as _str,
) )
from typing import Any, List, Optional, Sequence, Tuple, TYPE_CHECKING, Union from typing import Any, Dict, List, Sequence, Tuple, TYPE_CHECKING, Union
from typing_extensions import TypeAlias
import torch # `as` imports have better static analysis support than assignment `ExposedType: TypeAlias = HiddenType`
from torch import SymBool, SymFloat, SymInt from torch import ( # noqa: F401
device as _device,
DispatchKey as DispatchKey,
dtype as _dtype,
layout as _layout,
qscheme as _qscheme,
Size as Size,
SymBool as SymBool,
SymFloat as SymFloat,
SymInt as SymInt,
Tensor as Tensor,
)
if TYPE_CHECKING: if TYPE_CHECKING:
from torch.autograd.graph import GradientEdge from torch.autograd.graph import GradientEdge
__all__ = ["Number", "Device", "Storage"] __all__ = ["Number", "Device", "Storage"]
# Convenience aliases for common composite types that we need # Convenience aliases for common composite types that we need
# to talk about in PyTorch # to talk about in PyTorch
_TensorOrTensors: TypeAlias = Union[Tensor, Sequence[Tensor]] # noqa: PYI047
_TensorOrTensors = Union[torch.Tensor, Sequence[torch.Tensor]] _TensorOrTensorsOrGradEdge: TypeAlias = Union[ # noqa: PYI047
_TensorOrTensorsOrGradEdge = Union[ Tensor,
torch.Tensor, Sequence[Tensor],
Sequence[torch.Tensor],
"GradientEdge", "GradientEdge",
Sequence["GradientEdge"], Sequence["GradientEdge"],
] ]
_dtype = torch.dtype _size: TypeAlias = Union[Size, List[int], Tuple[int, ...]] # noqa: PYI042,PYI047
_device = torch.device _symsize: TypeAlias = Union[Size, Sequence[Union[int, SymInt]]] # noqa: PYI042,PYI047
_qscheme = torch.qscheme _dispatchkey: TypeAlias = Union[str, DispatchKey] # noqa: PYI042,PYI047
_layout = torch.layout
_size = Union[torch.Size, List[builtins.int], Tuple[builtins.int, ...]]
_symsize = Union[torch.Size, Sequence[Union[_int, SymInt]]]
_dispatchkey = Union[builtins.str, torch._C.DispatchKey]
# int or SymInt # int or SymInt
IntLikeType = Union[_int, torch.SymInt] IntLikeType: TypeAlias = Union[int, SymInt]
# float or SymFloat
FloatLikeType: TypeAlias = Union[float, SymFloat]
# bool or SymBool
BoolLikeType: TypeAlias = Union[bool, SymBool]
py_sym_types = (SymInt, SymFloat, SymBool) py_sym_types = (SymInt, SymFloat, SymBool)
PySymType = Union[SymInt, SymFloat, SymBool] PySymType: TypeAlias = Union[SymInt, SymFloat, SymBool]
# Meta-type for "numeric" things; matches our docs # Meta-type for "numeric" things; matches our docs
Number = Union[builtins.int, builtins.float, builtins.bool] Number: TypeAlias = Union[int, float, bool]
# Meta-type for "device-like" things. Not to be confused with 'device' (a # Meta-type for "device-like" things. Not to be confused with 'device' (a
# literal device object). This nomenclature is consistent with PythonArgParser. # literal device object). This nomenclature is consistent with PythonArgParser.
# None means use the default device (typically CPU) # None means use the default device (typically CPU)
Device = Optional[Union[_device, builtins.str, builtins.int]] Device: TypeAlias = Union[_device, str, int, None]
del Optional
# Storage protocol implemented by ${Type}StorageBase classes # Storage protocol implemented by ${Type}StorageBase classes
class Storage: class Storage:
_cdata: _int _cdata: int
device: torch.device device: _device
dtype: torch.dtype dtype: _dtype
_torch_load_uninitialized: _bool _torch_load_uninitialized: bool
def __deepcopy__(self, memo: dict) -> "Storage": def __deepcopy__(self, memo: Dict[int, Any]) -> "Storage":
raise NotImplementedError raise NotImplementedError
def _new_shared(self, size: _int) -> "Storage": def _new_shared(self, size: int) -> "Storage":
raise NotImplementedError raise NotImplementedError
def _write_file( def _write_file(
self, self,
f: Any, f: Any,
is_real_file: _bool, is_real_file: bool,
save_size: _bool, save_size: bool,
element_size: _int, element_size: int,
) -> None: ) -> None:
raise NotImplementedError raise NotImplementedError
def element_size(self) -> _int: def element_size(self) -> int:
raise NotImplementedError raise NotImplementedError
def is_shared(self) -> _bool: def is_shared(self) -> bool:
raise NotImplementedError raise NotImplementedError
def share_memory_(self) -> "Storage": def share_memory_(self) -> "Storage":
raise NotImplementedError raise NotImplementedError
def nbytes(self) -> _int: def nbytes(self) -> int:
raise NotImplementedError raise NotImplementedError
def cpu(self) -> "Storage": def cpu(self) -> "Storage":
raise NotImplementedError raise NotImplementedError
def data_ptr(self) -> _int: def data_ptr(self) -> int:
raise NotImplementedError raise NotImplementedError
def from_file( def from_file(
self, self,
filename: _str, filename: str,
shared: _bool = False, shared: bool = False,
nbytes: _int = 0, nbytes: int = 0,
) -> "Storage": ) -> "Storage":
raise NotImplementedError raise NotImplementedError
def _new_with_file(self, f: Any, element_size: _int) -> "Storage": def _new_with_file(
self,
f: Any,
element_size: int,
) -> "Storage":
raise NotImplementedError raise NotImplementedError