Revert "[Reland] Refactor caching device allocator utils (#130923)"

This reverts commit 9809080b9ed657a8c0ea0383be7cbdce3a26e05e.

Reverted https://github.com/pytorch/pytorch/pull/130923 on behalf of https://github.com/kit1980 due to breaking internal builds - Error: Relocation overflow has occured ([comment](https://github.com/pytorch/pytorch/pull/130923#issuecomment-2332640961))
This commit is contained in:
PyTorch MergeBot
2024-09-05 21:16:14 +00:00
parent a4cf9653ee
commit e55c0f59e5
7 changed files with 190 additions and 202 deletions

View File

@ -565,10 +565,10 @@ PyObject* THCPModule_memoryStats(PyObject* _unused, PyObject* arg) {
TORCH_CHECK(THPUtils_checkLong(arg), "invalid argument to memory_allocated");
const auto device_index = THPUtils_unpackDeviceIndex(arg);
using c10::CachingDeviceAllocator::DeviceStats;
using c10::CachingDeviceAllocator::Stat;
using c10::CachingDeviceAllocator::StatArray;
using c10::CachingDeviceAllocator::StatType;
using c10::cuda::CUDACachingAllocator::DeviceStats;
using c10::cuda::CUDACachingAllocator::Stat;
using c10::cuda::CUDACachingAllocator::StatArray;
using c10::cuda::CUDACachingAllocator::StatType;
const auto statToDict = [](const Stat& stat) {
py::dict dict;