mirror of
https://github.com/pytorch/pytorch.git
synced 2025-10-21 05:34:18 +08:00
Summary: Following up on this: https://github.com/pytorch/pytorch/pull/35851 cross dtype storage copy is not being used internally, so I have not included cross dtype copy for complex. Pull Request resolved: https://github.com/pytorch/pytorch/pull/35771 Differential Revision: D21319650 Pulled By: anjali411 fbshipit-source-id: 07c72996ee598eba0cf401ad61534494d6f5b5b3
49 lines
1.3 KiB
Python
49 lines
1.3 KiB
Python
"""Adds docstrings to Storage functions"""
|
|
|
|
import torch._C
|
|
from torch._C import _add_docstr as add_docstr
|
|
|
|
|
|
storage_classes = [
|
|
'DoubleStorageBase',
|
|
'FloatStorageBase',
|
|
'LongStorageBase',
|
|
'IntStorageBase',
|
|
'ShortStorageBase',
|
|
'CharStorageBase',
|
|
'ByteStorageBase',
|
|
'BoolStorageBase',
|
|
'BFloat16StorageBase',
|
|
'ComplexDoubleStorageBase',
|
|
'ComplexFloatStorageBase',
|
|
]
|
|
|
|
|
|
def add_docstr_all(method, docstr):
|
|
for cls_name in storage_classes:
|
|
cls = getattr(torch._C, cls_name)
|
|
try:
|
|
add_docstr(getattr(cls, method), docstr)
|
|
except AttributeError:
|
|
pass
|
|
|
|
|
|
add_docstr_all('from_file',
|
|
"""
|
|
from_file(filename, shared=False, size=0) -> Storage
|
|
|
|
If `shared` is `True`, then memory is shared between all processes.
|
|
All changes are written to the file. If `shared` is `False`, then the changes on
|
|
the storage do not affect the file.
|
|
|
|
`size` is the number of elements in the storage. If `shared` is `False`,
|
|
then the file must contain at least `size * sizeof(Type)` bytes
|
|
(`Type` is the type of storage). If `shared` is `True` the file will be
|
|
created if needed.
|
|
|
|
Args:
|
|
filename (str): file name to map
|
|
shared (bool): whether to share memory
|
|
size (int): number of elements in the storage
|
|
""")
|