mirror of
https://github.com/pytorch/pytorch.git
synced 2025-10-20 21:14:14 +08:00
Move test_utils.py back to MYPY (#113745)
Since MYPYNOFOLLOW is about to turn on import following, there's no reason to keep test_utils.py in the MYPYNOFOLLOW config. Moreover, I'm not sure it still takes 10 minutes to typecheck this file; adding it to the MYPY config takes `lintrunner --take MYPY --all-files` from 53s to 57s on my machine, which is substantial but not horrible. I guess we'll see how it fares on CI. (Note that we cannot simply merge MYPY and MYPYNOFOLLOW because the latter config turns on `disallow_any_generics` and so is in that sense stricter than the MYPY config.) Pull Request resolved: https://github.com/pytorch/pytorch/pull/113745 Approved by: https://github.com/clee2000
This commit is contained in:
@ -112,6 +112,7 @@ include_patterns = [
|
||||
'test/test_torch.py',
|
||||
'test/test_type_hints.py',
|
||||
'test/test_type_info.py',
|
||||
'test/test_utils.py',
|
||||
]
|
||||
exclude_patterns = [
|
||||
'**/fb/**',
|
||||
@ -182,7 +183,6 @@ include_patterns = [
|
||||
'torch/_dynamo/**/*.py',
|
||||
'torch/_inductor/**/*.py',
|
||||
'torch/_C/_dynamo/**/*.py',
|
||||
'test/test_utils.py', # used to by in MYPY but after importing op_db it took 10+ minutes
|
||||
]
|
||||
exclude_patterns = [
|
||||
'**/fb/**',
|
||||
|
@ -17,7 +17,7 @@ disallow_any_generics = True
|
||||
|
||||
files =
|
||||
torch/_dynamo,
|
||||
test/test_utils.py
|
||||
torch/_inductor
|
||||
|
||||
# Minimum version supported - variable annotations were introduced
|
||||
# in Python 3.8
|
||||
|
1
mypy.ini
1
mypy.ini
@ -37,6 +37,7 @@ files =
|
||||
test/test_torch.py,
|
||||
test/test_type_hints.py,
|
||||
test/test_type_info.py
|
||||
test/test_utils.py
|
||||
|
||||
#
|
||||
# `exclude` is a regex, not a list of paths like `files` (sigh)
|
||||
|
@ -30,7 +30,7 @@ from torch.utils._traceback import report_compile_source_on_error, format_traceb
|
||||
import torch.utils.cpp_extension
|
||||
from torch.autograd._functions.utils import check_onnx_broadcast
|
||||
from torch.onnx.symbolic_opset9 import _prepare_onnx_paddings
|
||||
from torch.testing._internal.common_utils import load_tests, IS_FBCODE, IS_SANDCASTLE, IS_WINDOWS
|
||||
from torch.testing._internal.common_utils import load_tests, IS_FBCODE, IS_SANDCASTLE, IS_WINDOWS # type: ignore[attr-defined]
|
||||
|
||||
# load_tests from torch.testing._internal.common_utils is used to automatically filter tests for
|
||||
# sharding on sandcastle. This line silences flake warnings
|
||||
@ -842,14 +842,14 @@ class TestExtensionUtils(TestCase):
|
||||
self.assertEqual(custom_backend_name, 'foo')
|
||||
|
||||
with self.assertRaises(AttributeError):
|
||||
torch.foo.is_available()
|
||||
torch.foo.is_available() # type: ignore[attr-defined]
|
||||
|
||||
with self.assertRaisesRegex(AssertionError, "Tried to use AMP with the"):
|
||||
with torch.autocast(device_type=custom_backend_name):
|
||||
pass
|
||||
torch._register_device_module('foo', DummyXPUModule)
|
||||
|
||||
torch.foo.is_available()
|
||||
torch.foo.is_available() # type: ignore[attr-defined]
|
||||
with torch.autocast(device_type=custom_backend_name):
|
||||
pass
|
||||
|
||||
|
Reference in New Issue
Block a user