Revert D23752058: [pytorch][PR] Don't split oversize cached blocks

Test Plan: revert-hammer

Differential Revision:
D23752058 (67dcd62310)

Original commit changeset: ccb7c13e3cf8

fbshipit-source-id: 12ae9702135ea510e9714ed97fb75ca3b9f97c27
This commit is contained in:
Natalia Gimelshein
2021-04-14 09:22:57 -07:00
committed by Facebook GitHub Bot
parent e7e164f9e6
commit f94c95a2dd
6 changed files with 35 additions and 234 deletions

View File

@ -356,7 +356,6 @@ PyObject * THCPModule_memoryStats(PyObject *_unused, PyObject *arg)
py::dict result;
result["num_alloc_retries"] = stats.num_alloc_retries;
result["num_ooms"] = stats.num_ooms;
result["max_split_size"] = stats.max_split_size;
result["allocation"] = statArrayToDict(stats.allocation);
result["segment"] = statArrayToDict(stats.segment);
result["active"] = statArrayToDict(stats.active);
@ -365,8 +364,6 @@ PyObject * THCPModule_memoryStats(PyObject *_unused, PyObject *arg)
result["reserved_bytes"] = statArrayToDict(stats.reserved_bytes);
result["active_bytes"] = statArrayToDict(stats.active_bytes);
result["inactive_split_bytes"] = statArrayToDict(stats.inactive_split_bytes);
result["oversize_allocations"] = statToDict(stats.oversize_allocations);
result["oversize_segments"] = statToDict(stats.oversize_segments);
return result.release().ptr();
END_HANDLE_TH_ERRORS