[BE][2/16] fix typos in torch/ (torch/_*/) (#156312)

Pull Request resolved: https://github.com/pytorch/pytorch/pull/156312
Approved by: https://github.com/albanD
This commit is contained in:
Xuehai Pan
2025-07-11 22:11:04 +08:00
committed by PyTorch MergeBot
parent 8b97e4dd8c
commit 7a92b51196
68 changed files with 121 additions and 122 deletions

View File

@ -1,6 +1,6 @@
# mypy: ignore-errors
"""Dtypes/scalar type implementaions with torch dtypes.
"""Dtypes/scalar type implementations with torch dtypes.
Here `dtype` is always a torch.dtype, this module knows nothing about
scalar types, wrapper dtypes or anything like that. PyTorch only.

View File

@ -96,7 +96,7 @@ def _concat_cast_helper(tensors, out=None, dtype=None, casting="same_kind"):
else:
out_dtype = _dtypes_impl.result_type_impl(*tensors)
# cast input arrays if necessary; do not broadcast them agains `out`
# cast input arrays if necessary; do not broadcast them against `out`
tensors = _util.typecast_tensors(tensors, out_dtype, casting)
return tensors
@ -1290,7 +1290,7 @@ def cross(a: ArrayLike, b: ArrayLike, axisa=-1, axisb=-1, axisc=-1, axis=None):
def einsum(*operands, out=None, dtype=None, order="K", casting="safe", optimize=False):
# Have to manually normalize *operands and **kwargs, following the NumPy signature
# We have a local import to avoid poluting the global space, as it will be then
# We have a local import to avoid polluting the global space, as it will be then
# exported in funcs.py
from ._ndarray import ndarray
from ._normalizations import (

View File

@ -204,7 +204,7 @@ def _coerce_to_tensor(obj, dtype=None, copy=False, ndmin=0):
Notes
-----
This is almost a "tensor_like" coersion function. Does not handle wrapper
This is almost a "tensor_like" coercive function. Does not handle wrapper
ndarrays (those should be handled in the ndarray-aware layer prior to
invoking this function).
"""