mirror of
https://github.com/pytorch/pytorch.git
synced 2025-10-20 21:14:14 +08:00
Revert "(MTIA) Move "empty_cache" API (#143402)"
This reverts commit c7d9f298072a3f59b39517e367c7d3d2ea30e6d9. Reverted https://github.com/pytorch/pytorch/pull/143402 on behalf of https://github.com/huydhn due to The internal diff D67148738 has been reverted ([comment](https://github.com/pytorch/pytorch/pull/143402#issuecomment-2557982597))
This commit is contained in:
@ -10,6 +10,5 @@ The MTIA backend is implemented out of the tree, only interfaces are be defined
|
||||
:toctree: generated
|
||||
:nosignatures:
|
||||
|
||||
empty_cache
|
||||
memory_stats
|
||||
max_memory_allocated
|
||||
|
@ -187,6 +187,11 @@ def get_device_capability(device: Optional[_device_t] = None) -> Tuple[int, int]
|
||||
return torch._C._mtia_getDeviceCapability(_get_device_index(device, optional=True))
|
||||
|
||||
|
||||
def empty_cache() -> None:
|
||||
r"""Empty the MTIA device cache."""
|
||||
return torch._C._mtia_emptyCache()
|
||||
|
||||
|
||||
def set_stream(stream: Stream):
|
||||
r"""Set the current stream.This is a wrapper API to set the stream.
|
||||
Usage of this function is discouraged in favor of the ``stream``
|
||||
|
@ -10,11 +10,6 @@ from . import _device_t, is_initialized
|
||||
from ._utils import _get_device_index
|
||||
|
||||
|
||||
def empty_cache() -> None:
|
||||
r"""Empty the MTIA device cache."""
|
||||
return torch._C._mtia_emptyCache()
|
||||
|
||||
|
||||
def max_memory_allocated(device: Optional[_device_t] = None) -> int:
|
||||
r"""Return the maximum memory allocated in bytes for a given device.
|
||||
|
||||
@ -42,7 +37,6 @@ def memory_stats(device: Optional[_device_t] = None) -> Dict[str, Any]:
|
||||
|
||||
|
||||
__all__ = [
|
||||
"empty_cache",
|
||||
"memory_stats",
|
||||
"max_memory_allocated",
|
||||
]
|
||||
|
Reference in New Issue
Block a user