Remove code for Python < 3.9 (#147097)

Fixes #ISSUE_NUMBER

Pull Request resolved: https://github.com/pytorch/pytorch/pull/147097
Approved by: https://github.com/albanD
This commit is contained in:
cyy
2025-02-14 03:22:46 +00:00
committed by PyTorch MergeBot
parent 880e176544
commit d473c212fd
7 changed files with 4 additions and 322 deletions

View File

@ -233,24 +233,11 @@ def stacksize_analysis(instructions) -> Union[int, float]:
for inst, next_inst in zip(instructions, instructions[1:] + [None]):
stack_size = stack_sizes[inst]
# CALL_FINALLY in Python 3.8 is handled differently when determining stack depth.
# See https://github.com/python/cpython/blob/3.8/Python/compile.c#L5450.
# Essentially, the stack effect of CALL_FINALLY is computed with jump=True,
# but the resulting stack depth is propagated to the next instruction, not the
# jump target.
is_call_finally = (
sys.version_info < (3, 9) and inst.opcode == dis.opmap["CALL_FINALLY"]
)
if inst.opcode not in TERMINAL_OPCODES:
assert next_inst is not None, f"missing next inst: {inst}"
# total stack effect of CALL_FINALLY and END_FINALLY in 3.8 is 0
eff = (
0
if is_call_finally
else stack_effect(inst.opcode, inst.arg, jump=False)
)
eff = stack_effect(inst.opcode, inst.arg, jump=False)
stack_sizes[next_inst].offset_of(stack_size, eff)
if inst.opcode in JUMP_OPCODES and not is_call_finally:
if inst.opcode in JUMP_OPCODES:
stack_sizes[inst.target].offset_of(
stack_size, stack_effect(inst.opcode, inst.arg, jump=True)
)

View File

@ -234,8 +234,6 @@ def create_rot_n(n) -> list[Instruction]:
return [create_instruction("SWAP", arg=i) for i in range(n, 1, -1)]
# ensure desired rotate function exists
if sys.version_info < (3, 8) and n >= 4:
raise AttributeError(f"rotate {n} not supported for Python < 3.8")
if sys.version_info < (3, 10) and n >= 5:
raise AttributeError(f"rotate {n} not supported for Python < 3.10")

View File

@ -47,7 +47,6 @@ from torch._sources import fake_range, get_source_lines_and_file, parse_def
from torch.futures import Future
IS_PY39_PLUS: Final[bool] = sys.version_info >= (3, 9)
IS_PY310_PLUS: Final[bool] = sys.version_info >= (3, 10)
BuiltinUnionType: Union[type, tuple[type, ...]]
@ -466,7 +465,7 @@ def get_annotation_str(annotation):
return ".".join([get_annotation_str(annotation.value), annotation.attr])
elif isinstance(annotation, ast.Subscript):
# In Python3.9+ subscript indicies are not wrapped in ast.Index
subscript_slice = annotation.slice if IS_PY39_PLUS else annotation.slice.value # type: ignore[attr-defined]
subscript_slice = annotation.slice
return f"{get_annotation_str(annotation.value)}[{get_annotation_str(subscript_slice)}]"
elif isinstance(annotation, ast.Tuple):
return ",".join([get_annotation_str(elt) for elt in annotation.elts])

View File

@ -1,6 +1,5 @@
# mypy: allow-untyped-defs
import inspect
import sys
from .dispatcher import Dispatcher, MethodDispatcher
@ -81,8 +80,5 @@ def ismethod(func):
signature = inspect.signature(func)
return signature.parameters.get("self", None) is not None
else:
if sys.version_info.major < 3:
spec = inspect.getargspec(func) # type: ignore[attr-defined]
else:
spec = inspect.getfullargspec(func) # type: ignore[union-attr, assignment]
spec = inspect.getfullargspec(func) # type: ignore[union-attr, assignment]
return spec and spec.args and spec.args[0] == "self"

View File

@ -18,8 +18,6 @@ def is_stdlib_module(module: str) -> bool:
def _get_stdlib_modules():
if sys.version_info.major == 3:
if sys.version_info.minor == 8:
return stdlib3_8
if sys.version_info.minor == 9:
return stdlib3_9
if sys.version_info.minor >= 10:
@ -30,223 +28,6 @@ def _get_stdlib_modules():
raise RuntimeError(f"Unsupported Python version: {sys.version_info}")
stdlib3_8 = {
"_dummy_thread",
"_thread",
"abc",
"aifc",
"argparse",
"array",
"ast",
"asynchat",
"asyncio",
"asyncore",
"atexit",
"audioop",
"base64",
"bdb",
"binascii",
"binhex",
"bisect",
"builtins",
"bz2",
"cProfile",
"calendar",
"cgi",
"cgitb",
"chunk",
"cmath",
"cmd",
"code",
"codecs",
"codeop",
"collections",
"colorsys",
"compileall",
"concurrent",
"configparser",
"contextlib",
"contextvars",
"copy",
"copyreg",
"crypt",
"csv",
"ctypes",
"curses",
"dataclasses",
"datetime",
"dbm",
"decimal",
"difflib",
"dis",
"distutils",
"doctest",
"dummy_threading",
"email",
"encodings",
"ensurepip",
"enum",
"errno",
"faulthandler",
"fcntl",
"filecmp",
"fileinput",
"fnmatch",
"formatter",
"fractions",
"ftplib",
"functools",
"gc",
"getopt",
"getpass",
"gettext",
"glob",
"grp",
"gzip",
"hashlib",
"heapq",
"hmac",
"html",
"http",
"imaplib",
"imghdr",
"imp",
"importlib",
"inspect",
"io",
"ipaddress",
"itertools",
"json",
"keyword",
"lib2to3",
"linecache",
"locale",
"logging",
"lzma",
"mailbox",
"mailcap",
"marshal",
"math",
"mimetypes",
"mmap",
"modulefinder",
"msilib",
"msvcrt",
"multiprocessing",
"netrc",
"nis",
"nntplib",
"ntpath",
"numbers",
"operator",
"optparse",
"os",
"ossaudiodev",
"parser",
"pathlib",
"pdb",
"pickle",
"pickletools",
"pipes",
"pkgutil",
"platform",
"plistlib",
"poplib",
"posix",
"posixpath",
"pprint",
"profile",
"pstats",
"pty",
"pwd",
"py_compile",
"pyclbr",
"pydoc",
"queue",
"quopri",
"random",
"re",
"readline",
"reprlib",
"resource",
"rlcompleter",
"runpy",
"sched",
"secrets",
"select",
"selectors",
"shelve",
"shlex",
"shutil",
"signal",
"site",
"smtpd",
"smtplib",
"sndhdr",
"socket",
"socketserver",
"spwd",
"sqlite3",
"sre",
"sre_compile",
"sre_constants",
"sre_parse",
"ssl",
"stat",
"statistics",
"string",
"stringprep",
"struct",
"subprocess",
"sunau",
"symbol",
"symtable",
"sys",
"sysconfig",
"syslog",
"tabnanny",
"tarfile",
"telnetlib",
"tempfile",
"termios",
"test",
"textwrap",
"threading",
"time",
"timeit",
"tkinter",
"token",
"tokenize",
"trace",
"traceback",
"tracemalloc",
"tty",
"turtle",
"turtledemo",
"types",
"typing",
"unicodedata",
"unittest",
"urllib",
"uu",
"uuid",
"venv",
"warnings",
"wave",
"weakref",
"webbrowser",
"winreg",
"winsound",
"wsgiref",
"xdrlib",
"xml",
"xmlrpc",
"zipapp",
"zipfile",
"zipimport",
"zlib",
}
stdlib3_9 = {
"_thread",
"abc",

View File

@ -1750,13 +1750,6 @@ def _legacy_load(f, map_location, pickle_module, **pickle_load_args):
# if not a tarfile, reset file offset and proceed
f.seek(0)
if not hasattr(f, "readinto") and (3, 8, 0) <= sys.version_info < (3, 8, 2):
raise RuntimeError(
"torch.load does not work with file-like objects that do not implement readinto on Python 3.8.0 and 3.8.1. "
f'Received object of type "{type(f)}". Please update to Python 3.8.2 or newer to restore this '
"functionality."
)
magic_number = pickle_module.load(f, **pickle_load_args)
if magic_number != MAGIC_NUMBER:
raise RuntimeError("Invalid magic number; corrupt file?")

View File

@ -1320,12 +1320,6 @@ optim_db: list[OptimizerInfo] = [
supported_impls=("foreach", "differentiable"),
has_capturable_arg=True,
skips=(
DecorateInfo(
skipIfTorchDynamo("Fails fix point assertion on 3.8, see #97811"),
"TestOptimRenewed",
"test_tensor_lr",
active_if=sys.version_info < (3, 9) and sys.version_info > (3, 7),
),
DecorateInfo(
skipIfTorchDynamo("See #116028"),
"TestOptimRenewed",
@ -1555,12 +1549,6 @@ optim_db: list[OptimizerInfo] = [
active_if=lambda kwargs: not kwargs["contiguous"],
device_type="mps",
),
DecorateInfo(
skipIfTorchDynamo("Fails fix point assertion on 3.8, see #97811"),
"TestOptimRenewed",
"test_tensor_lr",
active_if=sys.version_info < (3, 9) and sys.version_info > (3, 7),
),
DecorateInfo(
skipIfTorchDynamo("See #116028"),
"TestOptimRenewed",
@ -1658,12 +1646,6 @@ optim_db: list[OptimizerInfo] = [
active_if=lambda kwargs: not kwargs["contiguous"],
device_type="mps",
),
DecorateInfo(
skipIfTorchDynamo("Fails fix point assertion on 3.8, see #97811"),
"TestOptimRenewed",
"test_tensor_lr",
active_if=sys.version_info < (3, 9) and sys.version_info > (3, 7),
),
DecorateInfo(
skipIfTorchDynamo(
"Errors w/ Global state changed, see https://github.com/pytorch/pytorch/issues/116028"
@ -1701,12 +1683,6 @@ optim_db: list[OptimizerInfo] = [
active_if=lambda kwargs: not kwargs["contiguous"],
device_type="mps",
),
DecorateInfo(
skipIfTorchDynamo("Fails fix point assertion on 3.8, see #97811"),
"TestOptimRenewed",
"test_tensor_lr",
active_if=sys.version_info < (3, 9) and sys.version_info > (3, 7),
),
DecorateInfo(
skipIfTorchDynamo("See #116028"),
"TestOptimRenewed",
@ -1794,12 +1770,6 @@ optim_db: list[OptimizerInfo] = [
active_if=lambda kwargs: not kwargs["contiguous"],
device_type="mps",
),
DecorateInfo(
skipIfTorchDynamo("Fails fix point assertion on 3.8, see #97811"),
"TestOptimRenewed",
"test_tensor_lr",
active_if=sys.version_info < (3, 9) and sys.version_info > (3, 7),
),
DecorateInfo(
skipIfTorchDynamo(
"Errors w/ Global state changed, see https://github.com/pytorch/pytorch/issues/116028"
@ -1830,12 +1800,6 @@ optim_db: list[OptimizerInfo] = [
supported_impls=("foreach", "differentiable"),
has_capturable_arg=True,
skips=(
DecorateInfo(
skipIfTorchDynamo("Fails fix point assertion on 3.8, see #97811"),
"TestOptimRenewed",
"test_tensor_lr",
active_if=sys.version_info < (3, 9) and sys.version_info > (3, 7),
),
DecorateInfo(
skipIfTorchDynamo(
"Errors w/ Global state changed, see https://github.com/pytorch/pytorch/issues/116028"
@ -1923,12 +1887,6 @@ optim_db: list[OptimizerInfo] = [
"TestOptimRenewed",
"test_param_group_with_lrscheduler_goes_right_direction",
),
DecorateInfo(
skipIfTorchDynamo("Fails fix point assertion on 3.8, see #97811"),
"TestOptimRenewed",
"test_tensor_lr",
active_if=sys.version_info < (3, 9) and sys.version_info > (3, 7),
),
# https://github.com/pytorch/pytorch/issues/131398
DecorateInfo(
unittest.expectedFailure,
@ -1953,12 +1911,6 @@ optim_db: list[OptimizerInfo] = [
active_if=lambda kwargs: not kwargs["contiguous"],
device_type="mps",
),
DecorateInfo(
skipIfTorchDynamo("Fails fix point assertion on 3.8, see #97811"),
"TestOptimRenewed",
"test_tensor_lr",
active_if=sys.version_info < (3, 9) and sys.version_info > (3, 7),
),
DecorateInfo(
skipIfTorchDynamo(
"Errors w/ Global state changed, see https://github.com/pytorch/pytorch/issues/116028"
@ -1996,12 +1948,6 @@ optim_db: list[OptimizerInfo] = [
supported_impls=("foreach", "differentiable"),
has_capturable_arg=True,
skips=(
DecorateInfo(
skipIfTorchDynamo("Fails fix point assertion on 3.8, see #97811"),
"TestOptimRenewed",
"test_tensor_lr",
active_if=sys.version_info < (3, 9) and sys.version_info > (3, 7),
),
DecorateInfo(
skipIfTorchDynamo(
"Errors w/ Global state changed, see https://github.com/pytorch/pytorch/issues/116028"
@ -2049,12 +1995,6 @@ optim_db: list[OptimizerInfo] = [
active_if=lambda kwargs: not kwargs["contiguous"],
device_type="mps",
),
DecorateInfo(
skipIfTorchDynamo("Fails fix point assertion on 3.8, see #97811"),
"TestOptimRenewed",
"test_tensor_lr",
active_if=sys.version_info < (3, 9) and sys.version_info > (3, 7),
),
DecorateInfo(
skipIfTorchDynamo("See #116028"),
"TestOptimRenewed",
@ -2100,12 +2040,6 @@ optim_db: list[OptimizerInfo] = [
active_if=lambda kwargs: not kwargs["contiguous"],
device_type="mps",
),
DecorateInfo(
skipIfTorchDynamo("Fails fix point assertion on 3.8, see #97811"),
"TestOptimRenewed",
"test_tensor_lr",
active_if=sys.version_info < (3, 9) and sys.version_info > (3, 7),
),
DecorateInfo(
skipIfTorchDynamo("See #116028"),
"TestOptimRenewed",
@ -2181,12 +2115,6 @@ optim_db: list[OptimizerInfo] = [
"mps",
),
skips=(
DecorateInfo(
skipIfTorchDynamo("Fails fix point assertion on 3.8, see #97811"),
"TestOptimRenewed",
"test_tensor_lr",
active_if=sys.version_info < (3, 9) and sys.version_info > (3, 7),
),
DecorateInfo(
skipIfTorchDynamo(
"Errors w/ Global state changed, see https://github.com/pytorch/pytorch/issues/116028"