[ez] Disable some failing periodic tests (#156731)

test_torch.py::TestTorchDeviceTypeCUDA::test_storage_use_count_cuda:
Added in https://github.com/pytorch/pytorch/pull/150059
Fails in debug mode [GH job link](https://github.com/pytorch/pytorch/actions/runs/15856606665/job/44706020831) [HUD commit link](4491326fb0)

inductor/test_inductor_freezing.py::FreezingGpuTests::test_cpp_wrapper_cuda:
[GH job link](https://github.com/pytorch/pytorch/actions/runs/15856606665/job/44707119967) [HUD commit link](4491326fb0)
started failing after moving to new cuda version https://github.com/pytorch/pytorch/pull/155234

I'll ping people if this gets merged

Pull Request resolved: https://github.com/pytorch/pytorch/pull/156731
Approved by: https://github.com/huydhn
This commit is contained in:
Catherine Lee
2025-06-24 23:02:18 +00:00
committed by PyTorch MergeBot
parent d8bb5ac260
commit 2ff3280c77
2 changed files with 14 additions and 1 deletions

View File

@ -17,7 +17,12 @@ from torch._inductor.test_case import TestCase as InductorTestCase
from torch._inductor.utils import override_lowering, run_and_get_code
from torch.testing import FileCheck
from torch.testing._internal.common_cuda import SM80OrLater, tf32_on_and_off
from torch.testing._internal.common_utils import IS_FBCODE, skipIfRocm, skipIfXpu
from torch.testing._internal.common_utils import (
IS_FBCODE,
skipIfRocm,
skipIfXpu,
TEST_WITH_SLOW_GRADCHECK,
)
# Make the helper files in test/ importable
@ -785,6 +790,10 @@ class OptimizeForInferenceTemplate(TestCase):
@skipIfXpu
@unittest.skipIf(IS_FBCODE, "Not yet runnable in fbcode")
@unittest.skipIf(
TEST_WITH_SLOW_GRADCHECK,
"Failing in slow gradcheck on cuda12.8, see https://github.com/pytorch/pytorch/pull/156731 for example",
)
def test_cpp_wrapper(self):
mod = ConvBN(3, 32, kernel_size=3, stride=2).eval().to(self.device)

View File

@ -250,6 +250,10 @@ class TestTorchDeviceType(TestCase):
@skipIfTorchDynamo("Not a suitable test for TorchDynamo")
@onlyNativeDeviceTypes
@unittest.skipIf(
"RelWithAssert" in torch.__config__.show(),
"failing in debug build, see https://github.com/pytorch/pytorch/pull/156731 for example",
)
def test_storage_use_count(self, device):
a = torch.randn(10, device=device)
prev_cf = torch._C._storage_Use_Count(a.untyped_storage()._cdata)