mirror of
https://github.com/pytorch/pytorch.git
synced 2025-10-21 13:44:15 +08:00
Revert "raw_alloc ignores PYTORCH_NO_CUDA_MEMORY_CACHING (#131114)"
This reverts commit 70019074806920f95976fedad775d7570294f635. Reverted https://github.com/pytorch/pytorch/pull/131114 on behalf of https://github.com/PaliC due to failing internal builds ([comment](https://github.com/pytorch/pytorch/pull/131114#issuecomment-2390615007))
This commit is contained in:
@ -428,19 +428,6 @@ PyObject* THCPModule_cudaCachingAllocator_raw_delete(
|
||||
END_HANDLE_TH_ERRORS
|
||||
}
|
||||
|
||||
PyObject* THCPModule_cudaCachingAllocator_enable(
|
||||
PyObject* _unused,
|
||||
PyObject* arg) {
|
||||
HANDLE_TH_ERRORS
|
||||
TORCH_CHECK(
|
||||
THPUtils_checkBool(arg),
|
||||
"cudaCachingAllocator_enable expects a bool, but got ",
|
||||
THPUtils_typename(arg));
|
||||
c10::cuda::CUDACachingAllocator::enable(THPUtils_unpackBool(arg));
|
||||
Py_RETURN_NONE;
|
||||
END_HANDLE_TH_ERRORS
|
||||
}
|
||||
|
||||
PyObject* THCPModule_cudaCachingAllocator_set_allocator_settings(
|
||||
PyObject* _unused,
|
||||
PyObject* env) {
|
||||
@ -1869,10 +1856,6 @@ static struct PyMethodDef _THCPModule_methods[] = {
|
||||
THCPModule_cudaCachingAllocator_raw_delete,
|
||||
METH_O,
|
||||
nullptr},
|
||||
{"_cuda_cudaCachingAllocator_enable",
|
||||
THCPModule_cudaCachingAllocator_enable,
|
||||
METH_O,
|
||||
nullptr},
|
||||
{"_cuda_cudaCachingAllocator_set_allocator_settings",
|
||||
THCPModule_cudaCachingAllocator_set_allocator_settings,
|
||||
METH_O,
|
||||
|
Reference in New Issue
Block a user