mirror of
https://github.com/pytorch/pytorch.git
synced 2025-10-20 21:14:14 +08:00
Disable inductor/test_flex_attention.py (#161450)
Currently inductor/test_flex_attention.py is causing rocm pytorch mi250 shard 1 to go over the timeout limit. This PR is for disabling that test. Pull Request resolved: https://github.com/pytorch/pytorch/pull/161450 Approved by: https://github.com/jeffdaily Co-authored-by: Jeff Daily <jeff.daily@amd.com>
This commit is contained in:
@ -174,6 +174,7 @@ ROCM_BLOCKLIST = [
|
||||
"test_jit_legacy",
|
||||
"test_cuda_nvml_based_avail",
|
||||
"test_jit_cuda_fuser",
|
||||
"inductor/test_flex_attention",
|
||||
]
|
||||
|
||||
S390X_BLOCKLIST = [
|
||||
|
Reference in New Issue
Block a user