Revert "Skip test_memory_format_nn_BatchNorm2d in inductor (#125970)" (#126594)

This reverts commit 0a9c6e92f8d1a35f33042c8dab39f23b7f39d6e7.

enable the test since it's fixed.
Pull Request resolved: https://github.com/pytorch/pytorch/pull/126594
Approved by: https://github.com/huydhn
ghstack dependencies: #126593
This commit is contained in:
Shunting Zhang
2024-05-24 13:35:21 -07:00
committed by PyTorch MergeBot
parent b03dc3d167
commit db9c6aeec6

View File

@ -25,7 +25,7 @@ from torch.testing._internal.common_nn import (
nllloss_reference, nlllossNd_reference, smoothl1loss_reference, softmarginloss_reference, get_reduction)
from torch.testing._internal.common_utils import (
freeze_rng_state, set_single_threaded_if_parallel_tbb, skipIfMps, GRADCHECK_NONDET_TOL, TEST_WITH_ROCM, IS_WINDOWS,
skipIfTorchDynamo, TEST_WITH_TORCHINDUCTOR)
skipIfTorchDynamo)
from types import ModuleType
from typing import List, Tuple, Type, Set, Dict
import operator
@ -127,7 +127,7 @@ class modules(_TestParametrizer):
def test_wrapper(*args, **kwargs):
return test(*args, **kwargs)
if self.skip_if_dynamo and not TEST_WITH_TORCHINDUCTOR:
if self.skip_if_dynamo and not torch.testing._internal.common_utils.TEST_WITH_TORCHINDUCTOR:
test_wrapper = skipIfTorchDynamo("Policy: we don't run ModuleInfo tests w/ Dynamo")(test_wrapper)
decorator_fn = partial(module_info.get_decorators, generic_cls.__name__,
@ -3469,12 +3469,7 @@ module_db: List[ModuleInfo] = [
unittest.expectedFailure, 'TestEagerFusionModuleInfo',
'test_aot_autograd_module_exhaustive',
active_if=operator.itemgetter('training')
),
# test fails if run alone in inductor https://github.com/pytorch/pytorch/issues/125967
DecorateInfo(
unittest.skip("Skipped https://github.com/pytorch/pytorch/issues/125967"),
'TestModule', 'test_memory_format', device_type='cuda',
active_if=(TEST_WITH_TORCHINDUCTOR)),)
),)
),
ModuleInfo(torch.nn.BatchNorm3d,
train_and_eval_differ=True,