Files
pytorch/torch/testing/_internal/dynamo_test_failures.py
Shunting Zhang ca8153ae67 BE: reset dynamo before each test in test_ops_gradients.py (#131397)
https://github.com/pytorch/pytorch/pull/126586 tried to reset dynamo before each unit test. That PR get reverted a couple of times because we see post-land test failures that we don't see before merge. This PR only reset dynamo before each tests in `test_ops_gradients.py` to make it easier to land.

Eventually after we reset dynamo in each individual test files, we can move the change to the base class (TestCase) and remove the change in individual test files.

Pull Request resolved: https://github.com/pytorch/pytorch/pull/131397
Approved by: https://github.com/zou3519
ghstack dependencies: #131551, #131388, #131372
2024-07-29 17:39:23 +00:00

124 lines
4.5 KiB
Python

# mypy: allow-untyped-defs
import logging
import os
import sys
# NOTE: [dynamo_test_failures.py]
#
# We generate xFailIfTorchDynamo* for all tests in `dynamo_expected_failures`
# We generate skipIfTorchDynamo* for all tests in `dynamo_skips`
#
# For an easier-than-manual way of generating and updating these lists,
# see scripts/compile_tests/update_failures.py
#
# If you're adding a new test, and it's failing PYTORCH_TEST_WITH_DYNAMO=1,
# either add the appropriate decorators to your test or add skips for them
# via test/dynamo_skips and test/dynamo_expected_failures.
#
# *These are not exactly unittest.expectedFailure and unittest.skip. We'll
# always execute the test and then suppress the signal, if necessary.
# If your tests crashes, or is slow, please use @skipIfTorchDynamo instead.
#
# The expected failure and skip files are located in test/dynamo_skips and
# test/dynamo_expected_failures. They're individual files rather than a list so
# git will merge changes easier.
def find_test_dir():
# Find the path to the dynamo expected failure and skip files.
from os.path import abspath, basename, dirname, exists, join, normpath
if sys.platform == "win32":
return None
# Check relative to this file (local build):
test_dir = normpath(join(dirname(abspath(__file__)), "../../../test"))
if exists(join(test_dir, "dynamo_expected_failures")):
return test_dir
# Check relative to __main__ (installed builds relative to test file):
main = sys.modules["__main__"]
file = getattr(main, "__file__", None)
if file is None:
# Generated files do not have a module.__file__
return None
test_dir = dirname(abspath(file))
while dirname(test_dir) != test_dir:
if basename(test_dir) == "test" and exists(
join(test_dir, "dynamo_expected_failures")
):
return test_dir
test_dir = dirname(test_dir)
# Not found
return None
test_dir = find_test_dir()
if not test_dir:
logger = logging.getLogger(__name__)
logger.warning(
"test/dynamo_expected_failures directory not found - known dynamo errors won't be skipped."
)
# Tests that run without strict mode in PYTORCH_TEST_WITH_INDUCTOR=1.
# Please don't add anything to this list.
FIXME_inductor_non_strict = {
"test_modules",
"test_ops",
"test_ops_gradients",
"test_torch",
}
# Tests that run without resetting dynamo in PYTORCH_TEST_WITH_INDUCTOR=1.
# Please don't add anything to this list.
#
# Instead we will gradually remove items from this list. Once the list is empty,
# we will remove the list.
FIXME_inductor_dont_reset_dynamo = {
"test_ops",
}
# We generate unittest.expectedFailure for all of the following tests
# when run under PYTORCH_TEST_WITH_DYNAMO=1.
# see NOTE [dynamo_test_failures.py] for more details
#
# This lists exists so we can more easily add large numbers of failing tests,
if test_dir is None:
dynamo_expected_failures = set()
dynamo_skips = set()
else:
failures_directory = os.path.join(test_dir, "dynamo_expected_failures")
skips_directory = os.path.join(test_dir, "dynamo_skips")
dynamo_expected_failures = set(os.listdir(failures_directory))
dynamo_skips = set(os.listdir(skips_directory))
# TODO: due to case sensitivity problems, for now list these files by hand
extra_dynamo_skips = {
"TestProxyTensorOpInfoCPU.test_make_fx_exhaustive_T_cpu_float32",
"TestProxyTensorOpInfoCPU.test_make_fx_exhaustive_t_cpu_float32",
"TestProxyTensorOpInfoCPU.test_make_fx_fake_exhaustive_T_cpu_float32",
"TestProxyTensorOpInfoCPU.test_make_fx_fake_exhaustive_t_cpu_float32",
"TestProxyTensorOpInfoCPU.test_make_fx_symbolic_exhaustive_T_cpu_float32",
"TestProxyTensorOpInfoCPU.test_make_fx_symbolic_exhaustive_t_cpu_float32",
"TestProxyTensorOpInfoCPU.test_make_fx_symbolic_exhaustive_inplace_T_cpu_float32",
"TestProxyTensorOpInfoCPU.test_make_fx_symbolic_exhaustive_inplace_t_cpu_float32",
"TestProxyTensorOpInfoCPU.test_make_fx_symbolic_exhaustive_out_T_cpu_float32",
"TestProxyTensorOpInfoCPU.test_make_fx_symbolic_exhaustive_out_t_cpu_float32",
}
dynamo_skips = dynamo_skips.union(extra_dynamo_skips)
# verify some invariants
for test in dynamo_expected_failures.union(dynamo_skips):
if len(test.split(".")) != 2:
raise AssertionError(f'Invalid test name: "{test}"')
intersection = dynamo_expected_failures.intersection(dynamo_skips)
if len(intersection) > 0:
raise AssertionError(
"there should be no overlap between dynamo_expected_failures "
"and dynamo_skips, got " + str(intersection)
)