Files
pytorch/test/test_cuda_expandable_segments.py
PyTorch MergeBot 99f2491af9 Revert "Use absolute path path.resolve() -> path.absolute() (#129409)"
This reverts commit 45411d1fc9a2b6d2f891b6ab0ae16409719e09fc.

Reverted https://github.com/pytorch/pytorch/pull/129409 on behalf of https://github.com/jeanschmidt due to Breaking internal CI, @albanD please help get this PR merged ([comment](https://github.com/pytorch/pytorch/pull/129409#issuecomment-2571316444))
2025-01-04 14:17:20 +00:00

41 lines
1.0 KiB
Python

# Owner(s): ["module: cuda"]
# run time cuda tests, but with the allocator using expandable segments
import pathlib
import sys
from test_cuda import ( # noqa: F401
TestBlockStateAbsorption,
TestCuda,
TestCudaMallocAsync,
)
import torch
from torch.testing._internal.common_cuda import IS_JETSON, IS_WINDOWS
from torch.testing._internal.common_utils import run_tests, TEST_WITH_ROCM
REPO_ROOT = pathlib.Path(__file__).resolve().parent.parent
sys.path.insert(0, str(REPO_ROOT))
from tools.stats.import_test_stats import get_disabled_tests
# Make sure to remove REPO_ROOT after import is done
sys.path.remove(str(REPO_ROOT))
if __name__ == "__main__":
if (
torch.cuda.is_available()
and not IS_JETSON
and not IS_WINDOWS
and not TEST_WITH_ROCM
):
get_disabled_tests(".")
torch.cuda.memory._set_allocator_settings("expandable_segments:True")
TestCuda.expandable_segments = lambda _: True
TestBlockStateAbsorption.expandable_segments = lambda _: True
run_tests()