Disable inductor/test_flex_attention.py (#161450)

Currently inductor/test_flex_attention.py is causing rocm pytorch mi250 shard 1 to go over the timeout limit. This PR is for disabling that test.
Pull Request resolved: https://github.com/pytorch/pytorch/pull/161450
Approved by: https://github.com/jeffdaily

Co-authored-by: Jeff Daily <jeff.daily@amd.com>
This commit is contained in:
amdfaa
2025-08-26 01:28:51 +00:00
committed by PyTorch MergeBot
parent 74c4c758af
commit 85adf80cf1

View File

@ -174,6 +174,7 @@ ROCM_BLOCKLIST = [
"test_jit_legacy",
"test_cuda_nvml_based_avail",
"test_jit_cuda_fuser",
"inductor/test_flex_attention",
]
S390X_BLOCKLIST = [