Revert "raw_alloc ignores PYTORCH_NO_CUDA_MEMORY_CACHING (#131114)"

This reverts commit 70019074806920f95976fedad775d7570294f635.

Reverted https://github.com/pytorch/pytorch/pull/131114 on behalf of https://github.com/PaliC due to failing internal builds ([comment](https://github.com/pytorch/pytorch/pull/131114#issuecomment-2390615007))
This commit is contained in:
PyTorch MergeBot
2024-10-03 06:22:53 +00:00
parent 87bf2a8428
commit 0d1701f310
12 changed files with 21 additions and 148 deletions

View File

@ -428,19 +428,6 @@ PyObject* THCPModule_cudaCachingAllocator_raw_delete(
END_HANDLE_TH_ERRORS
}
PyObject* THCPModule_cudaCachingAllocator_enable(
PyObject* _unused,
PyObject* arg) {
HANDLE_TH_ERRORS
TORCH_CHECK(
THPUtils_checkBool(arg),
"cudaCachingAllocator_enable expects a bool, but got ",
THPUtils_typename(arg));
c10::cuda::CUDACachingAllocator::enable(THPUtils_unpackBool(arg));
Py_RETURN_NONE;
END_HANDLE_TH_ERRORS
}
PyObject* THCPModule_cudaCachingAllocator_set_allocator_settings(
PyObject* _unused,
PyObject* env) {
@ -1869,10 +1856,6 @@ static struct PyMethodDef _THCPModule_methods[] = {
THCPModule_cudaCachingAllocator_raw_delete,
METH_O,
nullptr},
{"_cuda_cudaCachingAllocator_enable",
THCPModule_cudaCachingAllocator_enable,
METH_O,
nullptr},
{"_cuda_cudaCachingAllocator_set_allocator_settings",
THCPModule_cudaCachingAllocator_set_allocator_settings,
METH_O,