Revert "Support for expandable segments with cuda graph trees (#128068)"

This reverts commit fdc83610f272610ce50d1a6f5b6354f2df1baabb.

Reverted https://github.com/pytorch/pytorch/pull/128068 on behalf of https://github.com/janeyx99 due to Reverting for breaking ROCm tests on trunk, I think the tests need to be qualified with @onlyCUDA ([comment](https://github.com/pytorch/pytorch/pull/128068#issuecomment-2223672381))
This commit is contained in:
PyTorch MergeBot
2024-07-11 18:58:13 +00:00
parent 1cae60a87e
commit 578388bed8
7 changed files with 16 additions and 196 deletions

View File

@ -1,34 +1,15 @@
# Owner(s): ["module: cuda"]
# run time cuda tests, but with the allocator using expandable segments
import pathlib
import sys
from test_cuda import ( # noqa: F401
TestBlockStateAbsorption,
TestCuda,
TestCudaMallocAsync,
)
import os
import torch
from torch.testing._internal.common_cuda import IS_JETSON, IS_WINDOWS
from torch.testing._internal.common_utils import run_tests
from torch.testing._internal.common_cuda import IS_JETSON
REPO_ROOT = pathlib.Path(__file__).resolve().parent.parent
if torch.cuda.is_available() and not IS_JETSON:
torch.cuda.memory._set_allocator_settings("expandable_segments:True")
sys.path.insert(0, str(REPO_ROOT))
from tools.stats.import_test_stats import get_disabled_tests
# Make sure to remove REPO_ROOT after import is done
sys.path.remove(str(REPO_ROOT))
if __name__ == "__main__":
if torch.cuda.is_available() and not IS_JETSON and not IS_WINDOWS:
get_disabled_tests(".")
torch.cuda.memory._set_allocator_settings("expandable_segments:True")
TestCuda.expandable_segments = lambda _: True
TestBlockStateAbsorption.expandable_segments = lambda _: True
run_tests()
current_dir = os.path.dirname(os.path.abspath(__file__))
filepath = os.path.join(current_dir, "test_cuda.py")
exec(compile(open(filepath).read(), filepath, mode="exec"))