diff --git a/test/dynamo/cpython/3_13/data/README b/test/dynamo/cpython/3_13/data/README new file mode 100644 index 000000000000..bd05984e4390 --- /dev/null +++ b/test/dynamo/cpython/3_13/data/README @@ -0,0 +1,2 @@ +This empty directory serves as destination for temporary files +created by some tests, in particular, the test_codecmaps_* tests. diff --git a/test/dynamo/cpython/3_13/test_contextlib.diff b/test/dynamo/cpython/3_13/test_contextlib.diff new file mode 100644 index 000000000000..f3314f590c10 --- /dev/null +++ b/test/dynamo/cpython/3_13/test_contextlib.diff @@ -0,0 +1,195 @@ +diff --git a/test/dynamo/cpython/3_13/test_contextlib.py b/test/dynamo/cpython/3_13/test_contextlib.py +index cf651959803..6a17bc719eb 100644 +--- a/test/dynamo/cpython/3_13/test_contextlib.py ++++ b/test/dynamo/cpython/3_13/test_contextlib.py +@@ -1,3 +1,54 @@ ++# ======= BEGIN Dynamo patch ======= ++# Owner(s): ["module: dynamo"] ++ ++# ruff: noqa ++# flake8: noqa ++ ++import sys ++import torch ++import torch._dynamo.test_case ++import unittest ++from torch._dynamo.test_case import CPythonTestCase ++from torch.testing._internal.common_utils import run_tests ++ ++__TestCase = CPythonTestCase ++ ++ ++# redirect import statements ++import sys ++import importlib.abc ++ ++redirect_imports = ( ++ "test.mapping_tests", ++ "test.typinganndata", ++ "test.test_grammar", ++ "test.test_math", ++ "test.test_iter", ++ "test.typinganndata.ann_module", ++) ++ ++class RedirectImportFinder(importlib.abc.MetaPathFinder): ++ def find_spec(self, fullname, path, target=None): ++ # Check if the import is the problematic one ++ if fullname in redirect_imports: ++ try: ++ # Attempt to import the standalone module ++ name = fullname.removeprefix("test.") ++ r = importlib.import_module(name) ++ # Redirect the module in sys.modules ++ sys.modules[fullname] = r ++ # Return a module spec from the found module ++ return importlib.util.find_spec(name) ++ except ImportError: ++ return None ++ return None ++ ++# Add the custom finder to sys.meta_path ++sys.meta_path.insert(0, RedirectImportFinder()) ++ ++ ++# ======= END DYNAMO PATCH ======= ++ + """Unit tests for contextlib.py, and other context managers.""" + + import io +@@ -14,7 +65,7 @@ from test.support.testcase import ExceptionIsLikeMixin + import weakref + + +-class TestAbstractContextManager(unittest.TestCase): ++class TestAbstractContextManager(__TestCase): + + def test_enter(self): + class DefaultEnter(AbstractContextManager): +@@ -67,7 +118,7 @@ class TestAbstractContextManager(unittest.TestCase): + self.assertFalse(issubclass(NoExit, AbstractContextManager)) + + +-class ContextManagerTestCase(unittest.TestCase): ++class ContextManagerTestCase(__TestCase): + + def test_contextmanager_plain(self): + state = [] +@@ -396,7 +447,7 @@ def woohoo(): + self.assertEqual(depth, 0) + + +-class ClosingTestCase(unittest.TestCase): ++class ClosingTestCase(__TestCase): + + @support.requires_docstrings + def test_instance_docs(self): +@@ -430,7 +481,7 @@ class ClosingTestCase(unittest.TestCase): + self.assertEqual(state, [1]) + + +-class NullcontextTestCase(unittest.TestCase): ++class NullcontextTestCase(__TestCase): + def test_nullcontext(self): + class C: + pass +@@ -439,7 +490,7 @@ class NullcontextTestCase(unittest.TestCase): + self.assertIs(c_in, c) + + +-class FileContextTestCase(unittest.TestCase): ++class FileContextTestCase(__TestCase): + + def testWithOpen(self): + tfn = tempfile.mktemp() +@@ -457,7 +508,7 @@ class FileContextTestCase(unittest.TestCase): + finally: + os_helper.unlink(tfn) + +-class LockContextTestCase(unittest.TestCase): ++class LockContextTestCase(__TestCase): + + def boilerPlate(self, lock, locked): + self.assertFalse(locked()) +@@ -520,7 +571,7 @@ class mycontext(ContextDecorator): + return self.catch + + +-class TestContextDecorator(unittest.TestCase): ++class TestContextDecorator(__TestCase): + + @support.requires_docstrings + def test_instance_docs(self): +@@ -680,7 +731,7 @@ class TestContextDecorator(unittest.TestCase): + self.assertEqual(state, [1, 'something else', 999]) + + +-class TestBaseExitStack: ++class _TestBaseExitStack: + exit_stack = None + + @support.requires_docstrings +@@ -1141,7 +1192,7 @@ class TestBaseExitStack: + self.assertIs(exc.__cause__, exc.__context__) + + +-class TestExitStack(TestBaseExitStack, unittest.TestCase): ++class TestExitStack(_TestBaseExitStack, __TestCase): + exit_stack = ExitStack + callback_error_internal_frames = [ + ('__exit__', 'raise exc'), +@@ -1149,7 +1200,7 @@ class TestExitStack(TestBaseExitStack, unittest.TestCase): + ] + + +-class TestRedirectStream: ++class _TestRedirectStream: + + redirect_stream = None + orig_stream = None +@@ -1206,19 +1257,19 @@ class TestRedirectStream: + self.assertEqual(s, "Hello World!\n") + + +-class TestRedirectStdout(TestRedirectStream, unittest.TestCase): ++class TestRedirectStdout(_TestRedirectStream, __TestCase): + + redirect_stream = redirect_stdout + orig_stream = "stdout" + + +-class TestRedirectStderr(TestRedirectStream, unittest.TestCase): ++class TestRedirectStderr(_TestRedirectStream, __TestCase): + + redirect_stream = redirect_stderr + orig_stream = "stderr" + + +-class TestSuppress(ExceptionIsLikeMixin, unittest.TestCase): ++class TestSuppress(ExceptionIsLikeMixin, __TestCase): + + @support.requires_docstrings + def test_instance_docs(self): +@@ -1315,7 +1366,7 @@ class TestSuppress(ExceptionIsLikeMixin, unittest.TestCase): + ) + + +-class TestChdir(unittest.TestCase): ++class TestChdir(__TestCase): + def make_relative_path(self, *parts): + return os.path.join( + os.path.dirname(os.path.realpath(__file__)), +@@ -1331,6 +1382,7 @@ class TestChdir(unittest.TestCase): + self.assertEqual(os.getcwd(), target) + self.assertEqual(os.getcwd(), old_cwd) + ++ @unittest.skip("Missing archivetestdata") + def test_reentrant(self): + old_cwd = os.getcwd() + target1 = self.make_relative_path('data') +@@ -1363,4 +1415,4 @@ class TestChdir(unittest.TestCase): + + + if __name__ == "__main__": +- unittest.main() ++ run_tests() diff --git a/test/dynamo/cpython/3_13/test_contextlib.py b/test/dynamo/cpython/3_13/test_contextlib.py new file mode 100644 index 000000000000..6a17bc719eb9 --- /dev/null +++ b/test/dynamo/cpython/3_13/test_contextlib.py @@ -0,0 +1,1418 @@ +# ======= BEGIN Dynamo patch ======= +# Owner(s): ["module: dynamo"] + +# ruff: noqa +# flake8: noqa + +import sys +import torch +import torch._dynamo.test_case +import unittest +from torch._dynamo.test_case import CPythonTestCase +from torch.testing._internal.common_utils import run_tests + +__TestCase = CPythonTestCase + + +# redirect import statements +import sys +import importlib.abc + +redirect_imports = ( + "test.mapping_tests", + "test.typinganndata", + "test.test_grammar", + "test.test_math", + "test.test_iter", + "test.typinganndata.ann_module", +) + +class RedirectImportFinder(importlib.abc.MetaPathFinder): + def find_spec(self, fullname, path, target=None): + # Check if the import is the problematic one + if fullname in redirect_imports: + try: + # Attempt to import the standalone module + name = fullname.removeprefix("test.") + r = importlib.import_module(name) + # Redirect the module in sys.modules + sys.modules[fullname] = r + # Return a module spec from the found module + return importlib.util.find_spec(name) + except ImportError: + return None + return None + +# Add the custom finder to sys.meta_path +sys.meta_path.insert(0, RedirectImportFinder()) + + +# ======= END DYNAMO PATCH ======= + +"""Unit tests for contextlib.py, and other context managers.""" + +import io +import os +import sys +import tempfile +import threading +import traceback +import unittest +from contextlib import * # Tests __all__ +from test import support +from test.support import os_helper +from test.support.testcase import ExceptionIsLikeMixin +import weakref + + +class TestAbstractContextManager(__TestCase): + + def test_enter(self): + class DefaultEnter(AbstractContextManager): + def __exit__(self, *args): + super().__exit__(*args) + + manager = DefaultEnter() + self.assertIs(manager.__enter__(), manager) + + def test_slots(self): + class DefaultContextManager(AbstractContextManager): + __slots__ = () + + def __exit__(self, *args): + super().__exit__(*args) + + with self.assertRaises(AttributeError): + DefaultContextManager().var = 42 + + def test_exit_is_abstract(self): + class MissingExit(AbstractContextManager): + pass + + with self.assertRaises(TypeError): + MissingExit() + + def test_structural_subclassing(self): + class ManagerFromScratch: + def __enter__(self): + return self + def __exit__(self, exc_type, exc_value, traceback): + return None + + self.assertTrue(issubclass(ManagerFromScratch, AbstractContextManager)) + + class DefaultEnter(AbstractContextManager): + def __exit__(self, *args): + super().__exit__(*args) + + self.assertTrue(issubclass(DefaultEnter, AbstractContextManager)) + + class NoEnter(ManagerFromScratch): + __enter__ = None + + self.assertFalse(issubclass(NoEnter, AbstractContextManager)) + + class NoExit(ManagerFromScratch): + __exit__ = None + + self.assertFalse(issubclass(NoExit, AbstractContextManager)) + + +class ContextManagerTestCase(__TestCase): + + def test_contextmanager_plain(self): + state = [] + @contextmanager + def woohoo(): + state.append(1) + yield 42 + state.append(999) + with woohoo() as x: + self.assertEqual(state, [1]) + self.assertEqual(x, 42) + state.append(x) + self.assertEqual(state, [1, 42, 999]) + + def test_contextmanager_finally(self): + state = [] + @contextmanager + def woohoo(): + state.append(1) + try: + yield 42 + finally: + state.append(999) + with self.assertRaises(ZeroDivisionError): + with woohoo() as x: + self.assertEqual(state, [1]) + self.assertEqual(x, 42) + state.append(x) + raise ZeroDivisionError() + self.assertEqual(state, [1, 42, 999]) + + def test_contextmanager_traceback(self): + @contextmanager + def f(): + yield + + try: + with f(): + 1/0 + except ZeroDivisionError as e: + frames = traceback.extract_tb(e.__traceback__) + + self.assertEqual(len(frames), 1) + self.assertEqual(frames[0].name, 'test_contextmanager_traceback') + self.assertEqual(frames[0].line, '1/0') + + # Repeat with RuntimeError (which goes through a different code path) + class RuntimeErrorSubclass(RuntimeError): + pass + + try: + with f(): + raise RuntimeErrorSubclass(42) + except RuntimeErrorSubclass as e: + frames = traceback.extract_tb(e.__traceback__) + + self.assertEqual(len(frames), 1) + self.assertEqual(frames[0].name, 'test_contextmanager_traceback') + self.assertEqual(frames[0].line, 'raise RuntimeErrorSubclass(42)') + + class StopIterationSubclass(StopIteration): + pass + + for stop_exc in ( + StopIteration('spam'), + StopIterationSubclass('spam'), + ): + with self.subTest(type=type(stop_exc)): + try: + with f(): + raise stop_exc + except type(stop_exc) as e: + self.assertIs(e, stop_exc) + frames = traceback.extract_tb(e.__traceback__) + else: + self.fail(f'{stop_exc} was suppressed') + + self.assertEqual(len(frames), 1) + self.assertEqual(frames[0].name, 'test_contextmanager_traceback') + self.assertEqual(frames[0].line, 'raise stop_exc') + + def test_contextmanager_no_reraise(self): + @contextmanager + def whee(): + yield + ctx = whee() + ctx.__enter__() + # Calling __exit__ should not result in an exception + self.assertFalse(ctx.__exit__(TypeError, TypeError("foo"), None)) + + def test_contextmanager_trap_yield_after_throw(self): + @contextmanager + def whoo(): + try: + yield + except: + yield + ctx = whoo() + ctx.__enter__() + with self.assertRaises(RuntimeError): + ctx.__exit__(TypeError, TypeError("foo"), None) + if support.check_impl_detail(cpython=True): + # The "gen" attribute is an implementation detail. + self.assertFalse(ctx.gen.gi_suspended) + + def test_contextmanager_trap_no_yield(self): + @contextmanager + def whoo(): + if False: + yield + ctx = whoo() + with self.assertRaises(RuntimeError): + ctx.__enter__() + + def test_contextmanager_trap_second_yield(self): + @contextmanager + def whoo(): + yield + yield + ctx = whoo() + ctx.__enter__() + with self.assertRaises(RuntimeError): + ctx.__exit__(None, None, None) + if support.check_impl_detail(cpython=True): + # The "gen" attribute is an implementation detail. + self.assertFalse(ctx.gen.gi_suspended) + + def test_contextmanager_non_normalised(self): + @contextmanager + def whoo(): + try: + yield + except RuntimeError: + raise SyntaxError + + ctx = whoo() + ctx.__enter__() + with self.assertRaises(SyntaxError): + ctx.__exit__(RuntimeError, None, None) + + def test_contextmanager_except(self): + state = [] + @contextmanager + def woohoo(): + state.append(1) + try: + yield 42 + except ZeroDivisionError as e: + state.append(e.args[0]) + self.assertEqual(state, [1, 42, 999]) + with woohoo() as x: + self.assertEqual(state, [1]) + self.assertEqual(x, 42) + state.append(x) + raise ZeroDivisionError(999) + self.assertEqual(state, [1, 42, 999]) + + def test_contextmanager_except_stopiter(self): + @contextmanager + def woohoo(): + yield + + class StopIterationSubclass(StopIteration): + pass + + for stop_exc in (StopIteration('spam'), StopIterationSubclass('spam')): + with self.subTest(type=type(stop_exc)): + try: + with woohoo(): + raise stop_exc + except Exception as ex: + self.assertIs(ex, stop_exc) + else: + self.fail(f'{stop_exc} was suppressed') + + def test_contextmanager_except_pep479(self): + code = """\ +from __future__ import generator_stop +from contextlib import contextmanager +@contextmanager +def woohoo(): + yield +""" + locals = {} + exec(code, locals, locals) + woohoo = locals['woohoo'] + + stop_exc = StopIteration('spam') + try: + with woohoo(): + raise stop_exc + except Exception as ex: + self.assertIs(ex, stop_exc) + else: + self.fail('StopIteration was suppressed') + + def test_contextmanager_do_not_unchain_non_stopiteration_exceptions(self): + @contextmanager + def test_issue29692(): + try: + yield + except Exception as exc: + raise RuntimeError('issue29692:Chained') from exc + try: + with test_issue29692(): + raise ZeroDivisionError + except Exception as ex: + self.assertIs(type(ex), RuntimeError) + self.assertEqual(ex.args[0], 'issue29692:Chained') + self.assertIsInstance(ex.__cause__, ZeroDivisionError) + + try: + with test_issue29692(): + raise StopIteration('issue29692:Unchained') + except Exception as ex: + self.assertIs(type(ex), StopIteration) + self.assertEqual(ex.args[0], 'issue29692:Unchained') + self.assertIsNone(ex.__cause__) + + def test_contextmanager_wrap_runtimeerror(self): + @contextmanager + def woohoo(): + try: + yield + except Exception as exc: + raise RuntimeError(f'caught {exc}') from exc + + with self.assertRaises(RuntimeError): + with woohoo(): + 1 / 0 + + # If the context manager wrapped StopIteration in a RuntimeError, + # we also unwrap it, because we can't tell whether the wrapping was + # done by the generator machinery or by the generator itself. + with self.assertRaises(StopIteration): + with woohoo(): + raise StopIteration + + def _create_contextmanager_attribs(self): + def attribs(**kw): + def decorate(func): + for k,v in kw.items(): + setattr(func,k,v) + return func + return decorate + @contextmanager + @attribs(foo='bar') + def baz(spam): + """Whee!""" + yield + return baz + + def test_contextmanager_attribs(self): + baz = self._create_contextmanager_attribs() + self.assertEqual(baz.__name__,'baz') + self.assertEqual(baz.foo, 'bar') + + @support.requires_docstrings + def test_contextmanager_doc_attrib(self): + baz = self._create_contextmanager_attribs() + self.assertEqual(baz.__doc__, "Whee!") + + @support.requires_docstrings + def test_instance_docstring_given_cm_docstring(self): + baz = self._create_contextmanager_attribs()(None) + self.assertEqual(baz.__doc__, "Whee!") + + def test_keywords(self): + # Ensure no keyword arguments are inhibited + @contextmanager + def woohoo(self, func, args, kwds): + yield (self, func, args, kwds) + with woohoo(self=11, func=22, args=33, kwds=44) as target: + self.assertEqual(target, (11, 22, 33, 44)) + + def test_nokeepref(self): + class A: + pass + + @contextmanager + def woohoo(a, b): + a = weakref.ref(a) + b = weakref.ref(b) + # Allow test to work with a non-refcounted GC + support.gc_collect() + self.assertIsNone(a()) + self.assertIsNone(b()) + yield + + with woohoo(A(), b=A()): + pass + + def test_param_errors(self): + @contextmanager + def woohoo(a, *, b): + yield + + with self.assertRaises(TypeError): + woohoo() + with self.assertRaises(TypeError): + woohoo(3, 5) + with self.assertRaises(TypeError): + woohoo(b=3) + + def test_recursive(self): + depth = 0 + ncols = 0 + @contextmanager + def woohoo(): + nonlocal ncols + ncols += 1 + nonlocal depth + before = depth + depth += 1 + yield + depth -= 1 + self.assertEqual(depth, before) + + @woohoo() + def recursive(): + if depth < 10: + recursive() + + recursive() + self.assertEqual(ncols, 10) + self.assertEqual(depth, 0) + + +class ClosingTestCase(__TestCase): + + @support.requires_docstrings + def test_instance_docs(self): + # Issue 19330: ensure context manager instances have good docstrings + cm_docstring = closing.__doc__ + obj = closing(None) + self.assertEqual(obj.__doc__, cm_docstring) + + def test_closing(self): + state = [] + class C: + def close(self): + state.append(1) + x = C() + self.assertEqual(state, []) + with closing(x) as y: + self.assertEqual(x, y) + self.assertEqual(state, [1]) + + def test_closing_error(self): + state = [] + class C: + def close(self): + state.append(1) + x = C() + self.assertEqual(state, []) + with self.assertRaises(ZeroDivisionError): + with closing(x) as y: + self.assertEqual(x, y) + 1 / 0 + self.assertEqual(state, [1]) + + +class NullcontextTestCase(__TestCase): + def test_nullcontext(self): + class C: + pass + c = C() + with nullcontext(c) as c_in: + self.assertIs(c_in, c) + + +class FileContextTestCase(__TestCase): + + def testWithOpen(self): + tfn = tempfile.mktemp() + try: + with open(tfn, "w", encoding="utf-8") as f: + self.assertFalse(f.closed) + f.write("Booh\n") + self.assertTrue(f.closed) + with self.assertRaises(ZeroDivisionError): + with open(tfn, "r", encoding="utf-8") as f: + self.assertFalse(f.closed) + self.assertEqual(f.read(), "Booh\n") + 1 / 0 + self.assertTrue(f.closed) + finally: + os_helper.unlink(tfn) + +class LockContextTestCase(__TestCase): + + def boilerPlate(self, lock, locked): + self.assertFalse(locked()) + with lock: + self.assertTrue(locked()) + self.assertFalse(locked()) + with self.assertRaises(ZeroDivisionError): + with lock: + self.assertTrue(locked()) + 1 / 0 + self.assertFalse(locked()) + + def testWithLock(self): + lock = threading.Lock() + self.boilerPlate(lock, lock.locked) + + def testWithRLock(self): + lock = threading.RLock() + self.boilerPlate(lock, lock._is_owned) + + def testWithCondition(self): + lock = threading.Condition() + def locked(): + return lock._is_owned() + self.boilerPlate(lock, locked) + + def testWithSemaphore(self): + lock = threading.Semaphore() + def locked(): + if lock.acquire(False): + lock.release() + return False + else: + return True + self.boilerPlate(lock, locked) + + def testWithBoundedSemaphore(self): + lock = threading.BoundedSemaphore() + def locked(): + if lock.acquire(False): + lock.release() + return False + else: + return True + self.boilerPlate(lock, locked) + + +class mycontext(ContextDecorator): + """Example decoration-compatible context manager for testing""" + started = False + exc = None + catch = False + + def __enter__(self): + self.started = True + return self + + def __exit__(self, *exc): + self.exc = exc + return self.catch + + +class TestContextDecorator(__TestCase): + + @support.requires_docstrings + def test_instance_docs(self): + # Issue 19330: ensure context manager instances have good docstrings + cm_docstring = mycontext.__doc__ + obj = mycontext() + self.assertEqual(obj.__doc__, cm_docstring) + + def test_contextdecorator(self): + context = mycontext() + with context as result: + self.assertIs(result, context) + self.assertTrue(context.started) + + self.assertEqual(context.exc, (None, None, None)) + + + def test_contextdecorator_with_exception(self): + context = mycontext() + + with self.assertRaisesRegex(NameError, 'foo'): + with context: + raise NameError('foo') + self.assertIsNotNone(context.exc) + self.assertIs(context.exc[0], NameError) + + context = mycontext() + context.catch = True + with context: + raise NameError('foo') + self.assertIsNotNone(context.exc) + self.assertIs(context.exc[0], NameError) + + + def test_decorator(self): + context = mycontext() + + @context + def test(): + self.assertIsNone(context.exc) + self.assertTrue(context.started) + test() + self.assertEqual(context.exc, (None, None, None)) + + + def test_decorator_with_exception(self): + context = mycontext() + + @context + def test(): + self.assertIsNone(context.exc) + self.assertTrue(context.started) + raise NameError('foo') + + with self.assertRaisesRegex(NameError, 'foo'): + test() + self.assertIsNotNone(context.exc) + self.assertIs(context.exc[0], NameError) + + + def test_decorating_method(self): + context = mycontext() + + class Test(object): + + @context + def method(self, a, b, c=None): + self.a = a + self.b = b + self.c = c + + # these tests are for argument passing when used as a decorator + test = Test() + test.method(1, 2) + self.assertEqual(test.a, 1) + self.assertEqual(test.b, 2) + self.assertEqual(test.c, None) + + test = Test() + test.method('a', 'b', 'c') + self.assertEqual(test.a, 'a') + self.assertEqual(test.b, 'b') + self.assertEqual(test.c, 'c') + + test = Test() + test.method(a=1, b=2) + self.assertEqual(test.a, 1) + self.assertEqual(test.b, 2) + + + def test_typo_enter(self): + class mycontext(ContextDecorator): + def __unter__(self): + pass + def __exit__(self, *exc): + pass + + with self.assertRaisesRegex(TypeError, 'the context manager'): + with mycontext(): + pass + + + def test_typo_exit(self): + class mycontext(ContextDecorator): + def __enter__(self): + pass + def __uxit__(self, *exc): + pass + + with self.assertRaisesRegex(TypeError, 'the context manager.*__exit__'): + with mycontext(): + pass + + + def test_contextdecorator_as_mixin(self): + class somecontext(object): + started = False + exc = None + + def __enter__(self): + self.started = True + return self + + def __exit__(self, *exc): + self.exc = exc + + class mycontext(somecontext, ContextDecorator): + pass + + context = mycontext() + @context + def test(): + self.assertIsNone(context.exc) + self.assertTrue(context.started) + test() + self.assertEqual(context.exc, (None, None, None)) + + + def test_contextmanager_as_decorator(self): + @contextmanager + def woohoo(y): + state.append(y) + yield + state.append(999) + + state = [] + @woohoo(1) + def test(x): + self.assertEqual(state, [1]) + state.append(x) + test('something') + self.assertEqual(state, [1, 'something', 999]) + + # Issue #11647: Ensure the decorated function is 'reusable' + state = [] + test('something else') + self.assertEqual(state, [1, 'something else', 999]) + + +class _TestBaseExitStack: + exit_stack = None + + @support.requires_docstrings + def test_instance_docs(self): + # Issue 19330: ensure context manager instances have good docstrings + cm_docstring = self.exit_stack.__doc__ + obj = self.exit_stack() + self.assertEqual(obj.__doc__, cm_docstring) + + def test_no_resources(self): + with self.exit_stack(): + pass + + def test_callback(self): + expected = [ + ((), {}), + ((1,), {}), + ((1,2), {}), + ((), dict(example=1)), + ((1,), dict(example=1)), + ((1,2), dict(example=1)), + ((1,2), dict(self=3, callback=4)), + ] + result = [] + def _exit(*args, **kwds): + """Test metadata propagation""" + result.append((args, kwds)) + with self.exit_stack() as stack: + for args, kwds in reversed(expected): + if args and kwds: + f = stack.callback(_exit, *args, **kwds) + elif args: + f = stack.callback(_exit, *args) + elif kwds: + f = stack.callback(_exit, **kwds) + else: + f = stack.callback(_exit) + self.assertIs(f, _exit) + for wrapper in stack._exit_callbacks: + self.assertIs(wrapper[1].__wrapped__, _exit) + self.assertNotEqual(wrapper[1].__name__, _exit.__name__) + self.assertIsNone(wrapper[1].__doc__, _exit.__doc__) + self.assertEqual(result, expected) + + result = [] + with self.exit_stack() as stack: + with self.assertRaises(TypeError): + stack.callback(arg=1) + with self.assertRaises(TypeError): + self.exit_stack.callback(arg=2) + with self.assertRaises(TypeError): + stack.callback(callback=_exit, arg=3) + self.assertEqual(result, []) + + def test_push(self): + exc_raised = ZeroDivisionError + def _expect_exc(exc_type, exc, exc_tb): + self.assertIs(exc_type, exc_raised) + def _suppress_exc(*exc_details): + return True + def _expect_ok(exc_type, exc, exc_tb): + self.assertIsNone(exc_type) + self.assertIsNone(exc) + self.assertIsNone(exc_tb) + class ExitCM(object): + def __init__(self, check_exc): + self.check_exc = check_exc + def __enter__(self): + self.fail("Should not be called!") + def __exit__(self, *exc_details): + self.check_exc(*exc_details) + with self.exit_stack() as stack: + stack.push(_expect_ok) + self.assertIs(stack._exit_callbacks[-1][1], _expect_ok) + cm = ExitCM(_expect_ok) + stack.push(cm) + self.assertIs(stack._exit_callbacks[-1][1].__self__, cm) + stack.push(_suppress_exc) + self.assertIs(stack._exit_callbacks[-1][1], _suppress_exc) + cm = ExitCM(_expect_exc) + stack.push(cm) + self.assertIs(stack._exit_callbacks[-1][1].__self__, cm) + stack.push(_expect_exc) + self.assertIs(stack._exit_callbacks[-1][1], _expect_exc) + stack.push(_expect_exc) + self.assertIs(stack._exit_callbacks[-1][1], _expect_exc) + 1/0 + + def test_enter_context(self): + class TestCM(object): + def __enter__(self): + result.append(1) + def __exit__(self, *exc_details): + result.append(3) + + result = [] + cm = TestCM() + with self.exit_stack() as stack: + @stack.callback # Registered first => cleaned up last + def _exit(): + result.append(4) + self.assertIsNotNone(_exit) + stack.enter_context(cm) + self.assertIs(stack._exit_callbacks[-1][1].__self__, cm) + result.append(2) + self.assertEqual(result, [1, 2, 3, 4]) + + def test_enter_context_errors(self): + class LacksEnterAndExit: + pass + class LacksEnter: + def __exit__(self, *exc_info): + pass + class LacksExit: + def __enter__(self): + pass + + with self.exit_stack() as stack: + with self.assertRaisesRegex(TypeError, 'the context manager'): + stack.enter_context(LacksEnterAndExit()) + with self.assertRaisesRegex(TypeError, 'the context manager'): + stack.enter_context(LacksEnter()) + with self.assertRaisesRegex(TypeError, 'the context manager'): + stack.enter_context(LacksExit()) + self.assertFalse(stack._exit_callbacks) + + def test_close(self): + result = [] + with self.exit_stack() as stack: + @stack.callback + def _exit(): + result.append(1) + self.assertIsNotNone(_exit) + stack.close() + result.append(2) + self.assertEqual(result, [1, 2]) + + def test_pop_all(self): + result = [] + with self.exit_stack() as stack: + @stack.callback + def _exit(): + result.append(3) + self.assertIsNotNone(_exit) + new_stack = stack.pop_all() + result.append(1) + result.append(2) + new_stack.close() + self.assertEqual(result, [1, 2, 3]) + + def test_exit_raise(self): + with self.assertRaises(ZeroDivisionError): + with self.exit_stack() as stack: + stack.push(lambda *exc: False) + 1/0 + + def test_exit_suppress(self): + with self.exit_stack() as stack: + stack.push(lambda *exc: True) + 1/0 + + def test_exit_exception_traceback(self): + # This test captures the current behavior of ExitStack so that we know + # if we ever unintendedly change it. It is not a statement of what the + # desired behavior is (for instance, we may want to remove some of the + # internal contextlib frames). + + def raise_exc(exc): + raise exc + + try: + with self.exit_stack() as stack: + stack.callback(raise_exc, ValueError) + 1/0 + except ValueError as e: + exc = e + + self.assertIsInstance(exc, ValueError) + ve_frames = traceback.extract_tb(exc.__traceback__) + expected = \ + [('test_exit_exception_traceback', 'with self.exit_stack() as stack:')] + \ + self.callback_error_internal_frames + \ + [('_exit_wrapper', 'callback(*args, **kwds)'), + ('raise_exc', 'raise exc')] + + self.assertEqual( + [(f.name, f.line) for f in ve_frames], expected) + + self.assertIsInstance(exc.__context__, ZeroDivisionError) + zde_frames = traceback.extract_tb(exc.__context__.__traceback__) + self.assertEqual([(f.name, f.line) for f in zde_frames], + [('test_exit_exception_traceback', '1/0')]) + + def test_exit_exception_chaining_reference(self): + # Sanity check to make sure that ExitStack chaining matches + # actual nested with statements + class RaiseExc: + def __init__(self, exc): + self.exc = exc + def __enter__(self): + return self + def __exit__(self, *exc_details): + raise self.exc + + class RaiseExcWithContext: + def __init__(self, outer, inner): + self.outer = outer + self.inner = inner + def __enter__(self): + return self + def __exit__(self, *exc_details): + try: + raise self.inner + except: + raise self.outer + + class SuppressExc: + def __enter__(self): + return self + def __exit__(self, *exc_details): + type(self).saved_details = exc_details + return True + + try: + with RaiseExc(IndexError): + with RaiseExcWithContext(KeyError, AttributeError): + with SuppressExc(): + with RaiseExc(ValueError): + 1 / 0 + except IndexError as exc: + self.assertIsInstance(exc.__context__, KeyError) + self.assertIsInstance(exc.__context__.__context__, AttributeError) + # Inner exceptions were suppressed + self.assertIsNone(exc.__context__.__context__.__context__) + else: + self.fail("Expected IndexError, but no exception was raised") + # Check the inner exceptions + inner_exc = SuppressExc.saved_details[1] + self.assertIsInstance(inner_exc, ValueError) + self.assertIsInstance(inner_exc.__context__, ZeroDivisionError) + + def test_exit_exception_chaining(self): + # Ensure exception chaining matches the reference behaviour + def raise_exc(exc): + raise exc + + saved_details = None + def suppress_exc(*exc_details): + nonlocal saved_details + saved_details = exc_details + return True + + try: + with self.exit_stack() as stack: + stack.callback(raise_exc, IndexError) + stack.callback(raise_exc, KeyError) + stack.callback(raise_exc, AttributeError) + stack.push(suppress_exc) + stack.callback(raise_exc, ValueError) + 1 / 0 + except IndexError as exc: + self.assertIsInstance(exc.__context__, KeyError) + self.assertIsInstance(exc.__context__.__context__, AttributeError) + # Inner exceptions were suppressed + self.assertIsNone(exc.__context__.__context__.__context__) + else: + self.fail("Expected IndexError, but no exception was raised") + # Check the inner exceptions + inner_exc = saved_details[1] + self.assertIsInstance(inner_exc, ValueError) + self.assertIsInstance(inner_exc.__context__, ZeroDivisionError) + + def test_exit_exception_explicit_none_context(self): + # Ensure ExitStack chaining matches actual nested `with` statements + # regarding explicit __context__ = None. + + class MyException(Exception): + pass + + @contextmanager + def my_cm(): + try: + yield + except BaseException: + exc = MyException() + try: + raise exc + finally: + exc.__context__ = None + + @contextmanager + def my_cm_with_exit_stack(): + with self.exit_stack() as stack: + stack.enter_context(my_cm()) + yield stack + + for cm in (my_cm, my_cm_with_exit_stack): + with self.subTest(): + try: + with cm(): + raise IndexError() + except MyException as exc: + self.assertIsNone(exc.__context__) + else: + self.fail("Expected IndexError, but no exception was raised") + + def test_exit_exception_non_suppressing(self): + # http://bugs.python.org/issue19092 + def raise_exc(exc): + raise exc + + def suppress_exc(*exc_details): + return True + + try: + with self.exit_stack() as stack: + stack.callback(lambda: None) + stack.callback(raise_exc, IndexError) + except Exception as exc: + self.assertIsInstance(exc, IndexError) + else: + self.fail("Expected IndexError, but no exception was raised") + + try: + with self.exit_stack() as stack: + stack.callback(raise_exc, KeyError) + stack.push(suppress_exc) + stack.callback(raise_exc, IndexError) + except Exception as exc: + self.assertIsInstance(exc, KeyError) + else: + self.fail("Expected KeyError, but no exception was raised") + + def test_exit_exception_with_correct_context(self): + # http://bugs.python.org/issue20317 + @contextmanager + def gets_the_context_right(exc): + try: + yield + finally: + raise exc + + exc1 = Exception(1) + exc2 = Exception(2) + exc3 = Exception(3) + exc4 = Exception(4) + + # The contextmanager already fixes the context, so prior to the + # fix, ExitStack would try to fix it *again* and get into an + # infinite self-referential loop + try: + with self.exit_stack() as stack: + stack.enter_context(gets_the_context_right(exc4)) + stack.enter_context(gets_the_context_right(exc3)) + stack.enter_context(gets_the_context_right(exc2)) + raise exc1 + except Exception as exc: + self.assertIs(exc, exc4) + self.assertIs(exc.__context__, exc3) + self.assertIs(exc.__context__.__context__, exc2) + self.assertIs(exc.__context__.__context__.__context__, exc1) + self.assertIsNone( + exc.__context__.__context__.__context__.__context__) + + def test_exit_exception_with_existing_context(self): + # Addresses a lack of test coverage discovered after checking in a + # fix for issue 20317 that still contained debugging code. + def raise_nested(inner_exc, outer_exc): + try: + raise inner_exc + finally: + raise outer_exc + exc1 = Exception(1) + exc2 = Exception(2) + exc3 = Exception(3) + exc4 = Exception(4) + exc5 = Exception(5) + try: + with self.exit_stack() as stack: + stack.callback(raise_nested, exc4, exc5) + stack.callback(raise_nested, exc2, exc3) + raise exc1 + except Exception as exc: + self.assertIs(exc, exc5) + self.assertIs(exc.__context__, exc4) + self.assertIs(exc.__context__.__context__, exc3) + self.assertIs(exc.__context__.__context__.__context__, exc2) + self.assertIs( + exc.__context__.__context__.__context__.__context__, exc1) + self.assertIsNone( + exc.__context__.__context__.__context__.__context__.__context__) + + def test_body_exception_suppress(self): + def suppress_exc(*exc_details): + return True + try: + with self.exit_stack() as stack: + stack.push(suppress_exc) + 1/0 + except IndexError as exc: + self.fail("Expected no exception, got IndexError") + + def test_exit_exception_chaining_suppress(self): + with self.exit_stack() as stack: + stack.push(lambda *exc: True) + stack.push(lambda *exc: 1/0) + stack.push(lambda *exc: {}[1]) + + def test_excessive_nesting(self): + # The original implementation would die with RecursionError here + with self.exit_stack() as stack: + for i in range(10000): + stack.callback(int) + + def test_instance_bypass(self): + class Example(object): pass + cm = Example() + cm.__enter__ = object() + cm.__exit__ = object() + stack = self.exit_stack() + with self.assertRaisesRegex(TypeError, 'the context manager'): + stack.enter_context(cm) + stack.push(cm) + self.assertIs(stack._exit_callbacks[-1][1], cm) + + def test_dont_reraise_RuntimeError(self): + # https://bugs.python.org/issue27122 + class UniqueException(Exception): pass + class UniqueRuntimeError(RuntimeError): pass + + @contextmanager + def second(): + try: + yield 1 + except Exception as exc: + raise UniqueException("new exception") from exc + + @contextmanager + def first(): + try: + yield 1 + except Exception as exc: + raise exc + + # The UniqueRuntimeError should be caught by second()'s exception + # handler which chain raised a new UniqueException. + with self.assertRaises(UniqueException) as err_ctx: + with self.exit_stack() as es_ctx: + es_ctx.enter_context(second()) + es_ctx.enter_context(first()) + raise UniqueRuntimeError("please no infinite loop.") + + exc = err_ctx.exception + self.assertIsInstance(exc, UniqueException) + self.assertIsInstance(exc.__context__, UniqueRuntimeError) + self.assertIsNone(exc.__context__.__context__) + self.assertIsNone(exc.__context__.__cause__) + self.assertIs(exc.__cause__, exc.__context__) + + +class TestExitStack(_TestBaseExitStack, __TestCase): + exit_stack = ExitStack + callback_error_internal_frames = [ + ('__exit__', 'raise exc'), + ('__exit__', 'if cb(*exc_details):'), + ] + + +class _TestRedirectStream: + + redirect_stream = None + orig_stream = None + + @support.requires_docstrings + def test_instance_docs(self): + # Issue 19330: ensure context manager instances have good docstrings + cm_docstring = self.redirect_stream.__doc__ + obj = self.redirect_stream(None) + self.assertEqual(obj.__doc__, cm_docstring) + + def test_no_redirect_in_init(self): + orig_stdout = getattr(sys, self.orig_stream) + self.redirect_stream(None) + self.assertIs(getattr(sys, self.orig_stream), orig_stdout) + + def test_redirect_to_string_io(self): + f = io.StringIO() + msg = "Consider an API like help(), which prints directly to stdout" + orig_stdout = getattr(sys, self.orig_stream) + with self.redirect_stream(f): + print(msg, file=getattr(sys, self.orig_stream)) + self.assertIs(getattr(sys, self.orig_stream), orig_stdout) + s = f.getvalue().strip() + self.assertEqual(s, msg) + + def test_enter_result_is_target(self): + f = io.StringIO() + with self.redirect_stream(f) as enter_result: + self.assertIs(enter_result, f) + + def test_cm_is_reusable(self): + f = io.StringIO() + write_to_f = self.redirect_stream(f) + orig_stdout = getattr(sys, self.orig_stream) + with write_to_f: + print("Hello", end=" ", file=getattr(sys, self.orig_stream)) + with write_to_f: + print("World!", file=getattr(sys, self.orig_stream)) + self.assertIs(getattr(sys, self.orig_stream), orig_stdout) + s = f.getvalue() + self.assertEqual(s, "Hello World!\n") + + def test_cm_is_reentrant(self): + f = io.StringIO() + write_to_f = self.redirect_stream(f) + orig_stdout = getattr(sys, self.orig_stream) + with write_to_f: + print("Hello", end=" ", file=getattr(sys, self.orig_stream)) + with write_to_f: + print("World!", file=getattr(sys, self.orig_stream)) + self.assertIs(getattr(sys, self.orig_stream), orig_stdout) + s = f.getvalue() + self.assertEqual(s, "Hello World!\n") + + +class TestRedirectStdout(_TestRedirectStream, __TestCase): + + redirect_stream = redirect_stdout + orig_stream = "stdout" + + +class TestRedirectStderr(_TestRedirectStream, __TestCase): + + redirect_stream = redirect_stderr + orig_stream = "stderr" + + +class TestSuppress(ExceptionIsLikeMixin, __TestCase): + + @support.requires_docstrings + def test_instance_docs(self): + # Issue 19330: ensure context manager instances have good docstrings + cm_docstring = suppress.__doc__ + obj = suppress() + self.assertEqual(obj.__doc__, cm_docstring) + + def test_no_result_from_enter(self): + with suppress(ValueError) as enter_result: + self.assertIsNone(enter_result) + + def test_no_exception(self): + with suppress(ValueError): + self.assertEqual(pow(2, 5), 32) + + def test_exact_exception(self): + with suppress(TypeError): + len(5) + + def test_exception_hierarchy(self): + with suppress(LookupError): + 'Hello'[50] + + def test_other_exception(self): + with self.assertRaises(ZeroDivisionError): + with suppress(TypeError): + 1/0 + + def test_no_args(self): + with self.assertRaises(ZeroDivisionError): + with suppress(): + 1/0 + + def test_multiple_exception_args(self): + with suppress(ZeroDivisionError, TypeError): + 1/0 + with suppress(ZeroDivisionError, TypeError): + len(5) + + def test_cm_is_reentrant(self): + ignore_exceptions = suppress(Exception) + with ignore_exceptions: + pass + with ignore_exceptions: + len(5) + with ignore_exceptions: + with ignore_exceptions: # Check nested usage + len(5) + outer_continued = True + 1/0 + self.assertTrue(outer_continued) + + def test_exception_groups(self): + eg_ve = lambda: ExceptionGroup( + "EG with ValueErrors only", + [ValueError("ve1"), ValueError("ve2"), ValueError("ve3")], + ) + eg_all = lambda: ExceptionGroup( + "EG with many types of exceptions", + [ValueError("ve1"), KeyError("ke1"), ValueError("ve2"), KeyError("ke2")], + ) + with suppress(ValueError): + raise eg_ve() + with suppress(ValueError, KeyError): + raise eg_all() + with self.assertRaises(ExceptionGroup) as eg1: + with suppress(ValueError): + raise eg_all() + self.assertExceptionIsLike( + eg1.exception, + ExceptionGroup( + "EG with many types of exceptions", + [KeyError("ke1"), KeyError("ke2")], + ), + ) + # Check handling of BaseExceptionGroup, using GeneratorExit so that + # we don't accidentally discard a ctrl-c with KeyboardInterrupt. + with suppress(GeneratorExit): + raise BaseExceptionGroup("message", [GeneratorExit()]) + # If we raise a BaseException group, we can still suppress parts + with self.assertRaises(BaseExceptionGroup) as eg1: + with suppress(KeyError): + raise BaseExceptionGroup("message", [GeneratorExit("g"), KeyError("k")]) + self.assertExceptionIsLike( + eg1.exception, BaseExceptionGroup("message", [GeneratorExit("g")]), + ) + # If we suppress all the leaf BaseExceptions, we get a non-base ExceptionGroup + with self.assertRaises(ExceptionGroup) as eg1: + with suppress(GeneratorExit): + raise BaseExceptionGroup("message", [GeneratorExit("g"), KeyError("k")]) + self.assertExceptionIsLike( + eg1.exception, ExceptionGroup("message", [KeyError("k")]), + ) + + +class TestChdir(__TestCase): + def make_relative_path(self, *parts): + return os.path.join( + os.path.dirname(os.path.realpath(__file__)), + *parts, + ) + + def test_simple(self): + old_cwd = os.getcwd() + target = self.make_relative_path('data') + self.assertNotEqual(old_cwd, target) + + with chdir(target): + self.assertEqual(os.getcwd(), target) + self.assertEqual(os.getcwd(), old_cwd) + + @unittest.skip("Missing archivetestdata") + def test_reentrant(self): + old_cwd = os.getcwd() + target1 = self.make_relative_path('data') + target2 = self.make_relative_path('archivetestdata') + self.assertNotIn(old_cwd, (target1, target2)) + chdir1, chdir2 = chdir(target1), chdir(target2) + + with chdir1: + self.assertEqual(os.getcwd(), target1) + with chdir2: + self.assertEqual(os.getcwd(), target2) + with chdir1: + self.assertEqual(os.getcwd(), target1) + self.assertEqual(os.getcwd(), target2) + self.assertEqual(os.getcwd(), target1) + self.assertEqual(os.getcwd(), old_cwd) + + def test_exception(self): + old_cwd = os.getcwd() + target = self.make_relative_path('data') + self.assertNotEqual(old_cwd, target) + + try: + with chdir(target): + self.assertEqual(os.getcwd(), target) + raise RuntimeError("boom") + except RuntimeError as re: + self.assertEqual(str(re), "boom") + self.assertEqual(os.getcwd(), old_cwd) + + +if __name__ == "__main__": + run_tests() diff --git a/test/dynamo/cpython/3_13/test_generator_stop.diff b/test/dynamo/cpython/3_13/test_generator_stop.diff new file mode 100644 index 000000000000..4f6450a86e56 --- /dev/null +++ b/test/dynamo/cpython/3_13/test_generator_stop.diff @@ -0,0 +1,74 @@ +diff --git a/test/dynamo/cpython/3_13/test_generator_stop.py b/test/dynamo/cpython/3_13/test_generator_stop.py +index bc235ceb00e..cb2a85255cb 100644 +--- a/test/dynamo/cpython/3_13/test_generator_stop.py ++++ b/test/dynamo/cpython/3_13/test_generator_stop.py +@@ -1,9 +1,60 @@ + from __future__ import generator_stop + ++# ======= BEGIN Dynamo patch ======= ++# Owner(s): ["module: dynamo"] ++ ++# ruff: noqa ++# flake8: noqa ++ ++import sys ++import torch ++import torch._dynamo.test_case ++import unittest ++from torch._dynamo.test_case import CPythonTestCase ++from torch.testing._internal.common_utils import run_tests ++ ++__TestCase = CPythonTestCase ++ ++ ++# redirect import statements ++import sys ++import importlib.abc ++ ++redirect_imports = ( ++ "test.mapping_tests", ++ "test.typinganndata", ++ "test.test_grammar", ++ "test.test_math", ++ "test.test_iter", ++ "test.typinganndata.ann_module", ++) ++ ++class RedirectImportFinder(importlib.abc.MetaPathFinder): ++ def find_spec(self, fullname, path, target=None): ++ # Check if the import is the problematic one ++ if fullname in redirect_imports: ++ try: ++ # Attempt to import the standalone module ++ name = fullname.removeprefix("test.") ++ r = importlib.import_module(name) ++ # Redirect the module in sys.modules ++ sys.modules[fullname] = r ++ # Return a module spec from the found module ++ return importlib.util.find_spec(name) ++ except ImportError: ++ return None ++ return None ++ ++# Add the custom finder to sys.meta_path ++sys.meta_path.insert(0, RedirectImportFinder()) ++ ++ ++# ======= END DYNAMO PATCH ======= ++ + import unittest + + +-class TestPEP479(unittest.TestCase): ++class TestPEP479(__TestCase): + def test_stopiteration_wrapping(self): + def f(): + raise StopIteration +@@ -30,5 +81,5 @@ class TestPEP479(unittest.TestCase): + 'were not properly set') + + +-if __name__ == '__main__': +- unittest.main() ++if __name__ == "__main__": ++ run_tests() diff --git a/test/dynamo/cpython/3_13/test_generator_stop.py b/test/dynamo/cpython/3_13/test_generator_stop.py new file mode 100644 index 000000000000..cb2a85255cb4 --- /dev/null +++ b/test/dynamo/cpython/3_13/test_generator_stop.py @@ -0,0 +1,85 @@ +from __future__ import generator_stop + +# ======= BEGIN Dynamo patch ======= +# Owner(s): ["module: dynamo"] + +# ruff: noqa +# flake8: noqa + +import sys +import torch +import torch._dynamo.test_case +import unittest +from torch._dynamo.test_case import CPythonTestCase +from torch.testing._internal.common_utils import run_tests + +__TestCase = CPythonTestCase + + +# redirect import statements +import sys +import importlib.abc + +redirect_imports = ( + "test.mapping_tests", + "test.typinganndata", + "test.test_grammar", + "test.test_math", + "test.test_iter", + "test.typinganndata.ann_module", +) + +class RedirectImportFinder(importlib.abc.MetaPathFinder): + def find_spec(self, fullname, path, target=None): + # Check if the import is the problematic one + if fullname in redirect_imports: + try: + # Attempt to import the standalone module + name = fullname.removeprefix("test.") + r = importlib.import_module(name) + # Redirect the module in sys.modules + sys.modules[fullname] = r + # Return a module spec from the found module + return importlib.util.find_spec(name) + except ImportError: + return None + return None + +# Add the custom finder to sys.meta_path +sys.meta_path.insert(0, RedirectImportFinder()) + + +# ======= END DYNAMO PATCH ======= + +import unittest + + +class TestPEP479(__TestCase): + def test_stopiteration_wrapping(self): + def f(): + raise StopIteration + def g(): + yield f() + with self.assertRaisesRegex(RuntimeError, + "generator raised StopIteration"): + next(g()) + + def test_stopiteration_wrapping_context(self): + def f(): + raise StopIteration + def g(): + yield f() + + try: + next(g()) + except RuntimeError as exc: + self.assertIs(type(exc.__cause__), StopIteration) + self.assertIs(type(exc.__context__), StopIteration) + self.assertTrue(exc.__suppress_context__) + else: + self.fail('__cause__, __context__, or __suppress_context__ ' + 'were not properly set') + + +if __name__ == "__main__": + run_tests() diff --git a/test/dynamo/cpython/3_13/test_generators.diff b/test/dynamo/cpython/3_13/test_generators.diff new file mode 100644 index 000000000000..49a2d664cf17 --- /dev/null +++ b/test/dynamo/cpython/3_13/test_generators.diff @@ -0,0 +1,289 @@ +diff --git a/test/dynamo/cpython/3_13/test_generators.py b/test/dynamo/cpython/3_13/test_generators.py +index e48d79d34f4..40a02d644a9 100644 +--- a/test/dynamo/cpython/3_13/test_generators.py ++++ b/test/dynamo/cpython/3_13/test_generators.py +@@ -1,3 +1,53 @@ ++# ======= BEGIN Dynamo patch ======= ++# Owner(s): ["module: dynamo"] ++ ++# ruff: noqa ++# flake8: noqa ++ ++import sys ++import torch ++import torch._dynamo.test_case ++import unittest ++from torch._dynamo.test_case import CPythonTestCase ++from torch.testing._internal.common_utils import run_tests ++ ++__TestCase = CPythonTestCase ++ ++# redirect import statements ++import sys ++import importlib.abc ++ ++redirect_imports = ( ++ "test.mapping_tests", ++ "test.typinganndata", ++ "test.test_grammar", ++ "test.test_math", ++ "test.test_iter", ++ "test.typinganndata.ann_module", ++) ++ ++class RedirectImportFinder(importlib.abc.MetaPathFinder): ++ def find_spec(self, fullname, path, target=None): ++ # Check if the import is the problematic one ++ if fullname in redirect_imports: ++ try: ++ # Attempt to import the standalone module ++ name = fullname.removeprefix("test.") ++ r = importlib.import_module(name) ++ # Redirect the module in sys.modules ++ sys.modules[fullname] = r ++ # Return a module spec from the found module ++ return importlib.util.find_spec(name) ++ except ImportError: ++ return None ++ return None ++ ++# Add the custom finder to sys.meta_path ++sys.meta_path.insert(0, RedirectImportFinder()) ++ ++ ++# ======= END DYNAMO PATCH ======= ++ + import copy + import gc + import pickle +@@ -22,7 +72,7 @@ except ImportError: + @unittest.skipUnless(_testcapi is not None and + hasattr(_testcapi, "raise_SIGINT_then_send_None"), + "needs _testcapi.raise_SIGINT_then_send_None") +-class SignalAndYieldFromTest(unittest.TestCase): ++class SignalAndYieldFromTest(__TestCase): + + def generator1(self): + return (yield from self.generator2()) +@@ -46,7 +96,7 @@ class SignalAndYieldFromTest(unittest.TestCase): + self.assertEqual(exc.value, "PASSED") + + +-class FinalizationTest(unittest.TestCase): ++class FinalizationTest(__TestCase): + + def test_frame_resurrect(self): + # A generator frame can be resurrected by a generator's finalization. +@@ -113,7 +163,7 @@ class FinalizationTest(unittest.TestCase): + self.assertEqual(cm.exception.value, 2) + + +-class GeneratorTest(unittest.TestCase): ++class GeneratorTest(__TestCase): + + def test_name(self): + def func(): +@@ -246,8 +296,31 @@ class GeneratorTest(unittest.TestCase): + #This should not raise + loop() + ++ @unittest.expectedFailure ++ def test_genexpr_only_calls_dunder_iter_once(self): ++ ++ class Iterator: ++ ++ def __init__(self): ++ self.val = 0 ++ ++ def __next__(self): ++ if self.val == 2: ++ raise StopIteration ++ self.val += 1 ++ return self.val ++ ++ # No __iter__ method ++ ++ class C: ++ ++ def __iter__(self): ++ return Iterator() ++ ++ self.assertEqual([1,2], list(i for i in C())) ++ + +-class ModifyUnderlyingIterableTest(unittest.TestCase): ++class ModifyUnderlyingIterableTest(__TestCase): + iterables = [ + range(0), + range(20), +@@ -319,7 +392,7 @@ class ModifyUnderlyingIterableTest(unittest.TestCase): + self.process_tests(get_generator_genfunc) + + +-class ExceptionTest(unittest.TestCase): ++class ExceptionTest(__TestCase): + # Tests for the issue #23353: check that the currently handled exception + # is correctly saved/restored in PyEval_EvalFrameEx(). + +@@ -528,7 +601,7 @@ class ExceptionTest(unittest.TestCase): + self.assertEqual(cm.exception.value.value, 2) + + +-class GeneratorCloseTest(unittest.TestCase): ++class GeneratorCloseTest(__TestCase): + + def test_close_no_return_value(self): + def f(): +@@ -630,90 +703,7 @@ class GeneratorCloseTest(unittest.TestCase): + self.assertIsNone(f_wr()) + + +-# See https://github.com/python/cpython/issues/125723 +-class GeneratorDeallocTest(unittest.TestCase): +- def test_frame_outlives_generator(self): +- def g1(): +- a = 42 +- yield sys._getframe() +- +- def g2(): +- a = 42 +- yield +- +- def g3(obj): +- a = 42 +- obj.frame = sys._getframe() +- yield +- +- class ObjectWithFrame(): +- def __init__(self): +- self.frame = None +- +- def get_frame(index): +- if index == 1: +- return next(g1()) +- elif index == 2: +- gen = g2() +- next(gen) +- return gen.gi_frame +- elif index == 3: +- obj = ObjectWithFrame() +- next(g3(obj)) +- return obj.frame +- else: +- return None +- +- for index in (1, 2, 3): +- with self.subTest(index=index): +- frame = get_frame(index) +- frame_locals = frame.f_locals +- self.assertIn('a', frame_locals) +- self.assertEqual(frame_locals['a'], 42) +- +- def test_frame_locals_outlive_generator(self): +- frame_locals1 = None +- +- def g1(): +- nonlocal frame_locals1 +- frame_locals1 = sys._getframe().f_locals +- a = 42 +- yield +- +- def g2(): +- a = 42 +- yield sys._getframe().f_locals +- +- def get_frame_locals(index): +- if index == 1: +- nonlocal frame_locals1 +- next(g1()) +- return frame_locals1 +- if index == 2: +- return next(g2()) +- else: +- return None +- +- for index in (1, 2): +- with self.subTest(index=index): +- frame_locals = get_frame_locals(index) +- self.assertIn('a', frame_locals) +- self.assertEqual(frame_locals['a'], 42) +- +- def test_frame_locals_outlive_generator_with_exec(self): +- def g(): +- a = 42 +- yield locals(), sys._getframe().f_locals +- +- locals_ = {'g': g} +- for i in range(10): +- exec("snapshot, live_locals = next(g())", locals=locals_) +- for l in (locals_['snapshot'], locals_['live_locals']): +- self.assertIn('a', l) +- self.assertEqual(l['a'], 42) +- +- +-class GeneratorThrowTest(unittest.TestCase): ++class GeneratorThrowTest(__TestCase): + + def test_exception_context_with_yield(self): + def f(): +@@ -812,7 +802,7 @@ class GeneratorThrowTest(unittest.TestCase): + gen.throw(ValueError) + + +-class GeneratorStackTraceTest(unittest.TestCase): ++class GeneratorStackTraceTest(__TestCase): + + def check_stack_names(self, frame, expected): + names = [] +@@ -861,7 +851,7 @@ class GeneratorStackTraceTest(unittest.TestCase): + self.check_yield_from_example(call_throw) + + +-class YieldFromTests(unittest.TestCase): ++class YieldFromTests(__TestCase): + def test_generator_gi_yieldfrom(self): + def a(): + self.assertEqual(inspect.getgeneratorstate(gen_b), inspect.GEN_RUNNING) +@@ -2752,21 +2742,27 @@ test_generators just happened to be the test that drew these out. + + """ + +-__test__ = {"tut": tutorial_tests, +- "pep": pep_tests, +- "email": email_tests, +- "fun": fun_tests, +- "syntax": syntax_tests, +- "conjoin": conjoin_tests, +- "weakref": weakref_tests, +- "coroutine": coroutine_tests, +- "refleaks": refleaks_tests, +- } +- +-def load_tests(loader, tests, pattern): +- tests.addTest(doctest.DocTestSuite()) +- return tests ++# __test__ = {"tut": tutorial_tests, ++# "pep": pep_tests, ++# "email": email_tests, ++# "fun": fun_tests, ++# "syntax": syntax_tests, ++# "conjoin": conjoin_tests, ++# "weakref": weakref_tests, ++# "coroutine": coroutine_tests, ++# "refleaks": refleaks_tests, ++# } ++ ++# def load_tests(loader, tests, pattern): ++# # ======= BEGIN Dynamo patch ======= ++# suite = doctest.DocTestSuite() ++# for test in suite: ++# # Dynamically change base class ++# test.__class__ = type(test.__class__.__name__, (__TestCase, test.__class__), {}) ++# tests.addTests(suite) ++# # ======= END DYNAMO PATCH ======= ++# return tests + + + if __name__ == "__main__": +- unittest.main() ++ run_tests() diff --git a/test/dynamo/cpython/3_13/test_generators.py b/test/dynamo/cpython/3_13/test_generators.py new file mode 100644 index 000000000000..40a02d644a99 --- /dev/null +++ b/test/dynamo/cpython/3_13/test_generators.py @@ -0,0 +1,2768 @@ +# ======= BEGIN Dynamo patch ======= +# Owner(s): ["module: dynamo"] + +# ruff: noqa +# flake8: noqa + +import sys +import torch +import torch._dynamo.test_case +import unittest +from torch._dynamo.test_case import CPythonTestCase +from torch.testing._internal.common_utils import run_tests + +__TestCase = CPythonTestCase + +# redirect import statements +import sys +import importlib.abc + +redirect_imports = ( + "test.mapping_tests", + "test.typinganndata", + "test.test_grammar", + "test.test_math", + "test.test_iter", + "test.typinganndata.ann_module", +) + +class RedirectImportFinder(importlib.abc.MetaPathFinder): + def find_spec(self, fullname, path, target=None): + # Check if the import is the problematic one + if fullname in redirect_imports: + try: + # Attempt to import the standalone module + name = fullname.removeprefix("test.") + r = importlib.import_module(name) + # Redirect the module in sys.modules + sys.modules[fullname] = r + # Return a module spec from the found module + return importlib.util.find_spec(name) + except ImportError: + return None + return None + +# Add the custom finder to sys.meta_path +sys.meta_path.insert(0, RedirectImportFinder()) + + +# ======= END DYNAMO PATCH ======= + +import copy +import gc +import pickle +import sys +import doctest +import unittest +import weakref +import inspect +import types + +from test import support + +try: + import _testcapi +except ImportError: + _testcapi = None + + +# This tests to make sure that if a SIGINT arrives just before we send into a +# yield from chain, the KeyboardInterrupt is raised in the innermost +# generator (see bpo-30039). +@unittest.skipUnless(_testcapi is not None and + hasattr(_testcapi, "raise_SIGINT_then_send_None"), + "needs _testcapi.raise_SIGINT_then_send_None") +class SignalAndYieldFromTest(__TestCase): + + def generator1(self): + return (yield from self.generator2()) + + def generator2(self): + try: + yield + except KeyboardInterrupt: + return "PASSED" + else: + return "FAILED" + + def test_raise_and_yield_from(self): + gen = self.generator1() + gen.send(None) + try: + _testcapi.raise_SIGINT_then_send_None(gen) + except BaseException as _exc: + exc = _exc + self.assertIs(type(exc), StopIteration) + self.assertEqual(exc.value, "PASSED") + + +class FinalizationTest(__TestCase): + + def test_frame_resurrect(self): + # A generator frame can be resurrected by a generator's finalization. + def gen(): + nonlocal frame + try: + yield + finally: + frame = sys._getframe() + + g = gen() + wr = weakref.ref(g) + next(g) + del g + support.gc_collect() + self.assertIs(wr(), None) + self.assertTrue(frame) + del frame + support.gc_collect() + + def test_refcycle(self): + # A generator caught in a refcycle gets finalized anyway. + old_garbage = gc.garbage[:] + finalized = False + def gen(): + nonlocal finalized + try: + g = yield + yield 1 + finally: + finalized = True + + g = gen() + next(g) + g.send(g) + self.assertGreater(sys.getrefcount(g), 2) + self.assertFalse(finalized) + del g + support.gc_collect() + self.assertTrue(finalized) + self.assertEqual(gc.garbage, old_garbage) + + def test_lambda_generator(self): + # bpo-23192, gh-119897: Test that a lambda returning a generator behaves + # like the equivalent function + f = lambda: (yield 1) + self.assertIsInstance(f(), types.GeneratorType) + self.assertEqual(next(f()), 1) + + def g(): return (yield 1) + + # test 'yield from' + f2 = lambda: (yield from g()) + def g2(): return (yield from g()) + + f3 = lambda: (yield from f()) + def g3(): return (yield from f()) + + for gen_fun in (f, g, f2, g2, f3, g3): + gen = gen_fun() + self.assertEqual(next(gen), 1) + with self.assertRaises(StopIteration) as cm: + gen.send(2) + self.assertEqual(cm.exception.value, 2) + + +class GeneratorTest(__TestCase): + + def test_name(self): + def func(): + yield 1 + + # check generator names + gen = func() + self.assertEqual(gen.__name__, "func") + self.assertEqual(gen.__qualname__, + "GeneratorTest.test_name..func") + + # modify generator names + gen.__name__ = "name" + gen.__qualname__ = "qualname" + self.assertEqual(gen.__name__, "name") + self.assertEqual(gen.__qualname__, "qualname") + + # generator names must be a string and cannot be deleted + self.assertRaises(TypeError, setattr, gen, '__name__', 123) + self.assertRaises(TypeError, setattr, gen, '__qualname__', 123) + self.assertRaises(TypeError, delattr, gen, '__name__') + self.assertRaises(TypeError, delattr, gen, '__qualname__') + + # modify names of the function creating the generator + func.__qualname__ = "func_qualname" + func.__name__ = "func_name" + gen = func() + self.assertEqual(gen.__name__, "func_name") + self.assertEqual(gen.__qualname__, "func_qualname") + + # unnamed generator + gen = (x for x in range(10)) + self.assertEqual(gen.__name__, + "") + self.assertEqual(gen.__qualname__, + "GeneratorTest.test_name..") + + def test_copy(self): + def f(): + yield 1 + g = f() + with self.assertRaises(TypeError): + copy.copy(g) + + def test_pickle(self): + def f(): + yield 1 + g = f() + for proto in range(pickle.HIGHEST_PROTOCOL + 1): + with self.assertRaises((TypeError, pickle.PicklingError)): + pickle.dumps(g, proto) + + def test_send_non_none_to_new_gen(self): + def f(): + yield 1 + g = f() + with self.assertRaises(TypeError): + g.send(0) + self.assertEqual(next(g), 1) + + def test_handle_frame_object_in_creation(self): + + #Attempt to expose partially constructed frames + #See https://github.com/python/cpython/issues/94262 + + def cb(*args): + inspect.stack() + + def gen(): + yield 1 + + thresholds = gc.get_threshold() + + gc.callbacks.append(cb) + gc.set_threshold(1, 0, 0) + try: + gen() + finally: + gc.set_threshold(*thresholds) + gc.callbacks.pop() + + class Sneaky: + def __del__(self): + inspect.stack() + + sneaky = Sneaky() + sneaky._s = Sneaky() + sneaky._s._s = sneaky + + gc.set_threshold(1, 0, 0) + try: + del sneaky + gen() + finally: + gc.set_threshold(*thresholds) + + def test_ag_frame_f_back(self): + async def f(): + yield + ag = f() + self.assertIsNone(ag.ag_frame.f_back) + + def test_cr_frame_f_back(self): + async def f(): + pass + cr = f() + self.assertIsNone(cr.cr_frame.f_back) + cr.close() # Suppress RuntimeWarning. + + def test_gi_frame_f_back(self): + def f(): + yield + gi = f() + self.assertIsNone(gi.gi_frame.f_back) + + def test_issue103488(self): + + def gen_raises(): + yield + raise ValueError() + + def loop(): + try: + for _ in gen_raises(): + if True is False: + return + except ValueError: + pass + + #This should not raise + loop() + + @unittest.expectedFailure + def test_genexpr_only_calls_dunder_iter_once(self): + + class Iterator: + + def __init__(self): + self.val = 0 + + def __next__(self): + if self.val == 2: + raise StopIteration + self.val += 1 + return self.val + + # No __iter__ method + + class C: + + def __iter__(self): + return Iterator() + + self.assertEqual([1,2], list(i for i in C())) + + +class ModifyUnderlyingIterableTest(__TestCase): + iterables = [ + range(0), + range(20), + [1, 2, 3], + (2,), + {13, 48, 211}, + frozenset((15, 8, 6)), + {1: 2, 3: 4}, + ] + + non_iterables = [ + None, + 42, + 3.0, + 2j, + ] + + def genexpr(self): + return (x for x in range(10)) + + def genfunc(self): + def gen(it): + for x in it: + yield x + return gen(range(10)) + + def process_tests(self, get_generator): + for obj in self.iterables: + g_obj = get_generator(obj) + with self.subTest(g_obj=g_obj, obj=obj): + self.assertListEqual(list(g_obj), list(obj)) + + g_iter = get_generator(iter(obj)) + with self.subTest(g_iter=g_iter, obj=obj): + self.assertListEqual(list(g_iter), list(obj)) + + err_regex = "'.*' object is not iterable" + for obj in self.non_iterables: + g_obj = get_generator(obj) + with self.subTest(g_obj=g_obj): + self.assertRaisesRegex(TypeError, err_regex, list, g_obj) + + def test_modify_f_locals(self): + def modify_f_locals(g, local, obj): + g.gi_frame.f_locals[local] = obj + return g + + def get_generator_genexpr(obj): + return modify_f_locals(self.genexpr(), '.0', obj) + + def get_generator_genfunc(obj): + return modify_f_locals(self.genfunc(), 'it', obj) + + self.process_tests(get_generator_genexpr) + self.process_tests(get_generator_genfunc) + + def test_new_gen_from_gi_code(self): + def new_gen_from_gi_code(g, obj): + generator_func = types.FunctionType(g.gi_code, {}) + return generator_func(obj) + + def get_generator_genexpr(obj): + return new_gen_from_gi_code(self.genexpr(), obj) + + def get_generator_genfunc(obj): + return new_gen_from_gi_code(self.genfunc(), obj) + + self.process_tests(get_generator_genexpr) + self.process_tests(get_generator_genfunc) + + +class ExceptionTest(__TestCase): + # Tests for the issue #23353: check that the currently handled exception + # is correctly saved/restored in PyEval_EvalFrameEx(). + + def test_except_throw(self): + def store_raise_exc_generator(): + try: + self.assertIsNone(sys.exception()) + yield + except Exception as exc: + # exception raised by gen.throw(exc) + self.assertIsInstance(sys.exception(), ValueError) + self.assertIsNone(exc.__context__) + yield + + # ensure that the exception is not lost + self.assertIsInstance(sys.exception(), ValueError) + yield + + # we should be able to raise back the ValueError + raise + + make = store_raise_exc_generator() + next(make) + + try: + raise ValueError() + except Exception as exc: + try: + make.throw(exc) + except Exception: + pass + + next(make) + with self.assertRaises(ValueError) as cm: + next(make) + self.assertIsNone(cm.exception.__context__) + + self.assertIsNone(sys.exception()) + + def test_except_next(self): + def gen(): + self.assertIsInstance(sys.exception(), ValueError) + yield "done" + + g = gen() + try: + raise ValueError + except Exception: + self.assertEqual(next(g), "done") + self.assertIsNone(sys.exception()) + + def test_except_gen_except(self): + def gen(): + try: + self.assertIsNone(sys.exception()) + yield + # we are called from "except ValueError:", TypeError must + # inherit ValueError in its context + raise TypeError() + except TypeError as exc: + self.assertIsInstance(sys.exception(), TypeError) + self.assertEqual(type(exc.__context__), ValueError) + # here we are still called from the "except ValueError:" + self.assertIsInstance(sys.exception(), ValueError) + yield + self.assertIsNone(sys.exception()) + yield "done" + + g = gen() + next(g) + try: + raise ValueError + except Exception: + next(g) + + self.assertEqual(next(g), "done") + self.assertIsNone(sys.exception()) + + def test_nested_gen_except_loop(self): + def gen(): + for i in range(100): + self.assertIsInstance(sys.exception(), TypeError) + yield "doing" + + def outer(): + try: + raise TypeError + except: + for x in gen(): + yield x + + try: + raise ValueError + except Exception: + for x in outer(): + self.assertEqual(x, "doing") + self.assertEqual(sys.exception(), None) + + def test_except_throw_exception_context(self): + def gen(): + try: + try: + self.assertIsNone(sys.exception()) + yield + except ValueError: + # we are called from "except ValueError:" + self.assertIsInstance(sys.exception(), ValueError) + raise TypeError() + except Exception as exc: + self.assertIsInstance(sys.exception(), TypeError) + self.assertEqual(type(exc.__context__), ValueError) + # we are still called from "except ValueError:" + self.assertIsInstance(sys.exception(), ValueError) + yield + self.assertIsNone(sys.exception()) + yield "done" + + g = gen() + next(g) + try: + raise ValueError + except Exception as exc: + g.throw(exc) + + self.assertEqual(next(g), "done") + self.assertIsNone(sys.exception()) + + def test_except_throw_bad_exception(self): + class E(Exception): + def __new__(cls, *args, **kwargs): + return cls + + def boring_generator(): + yield + + gen = boring_generator() + + err_msg = 'should have returned an instance of BaseException' + + with self.assertRaisesRegex(TypeError, err_msg): + gen.throw(E) + + self.assertRaises(StopIteration, next, gen) + + def generator(): + with self.assertRaisesRegex(TypeError, err_msg): + yield + + gen = generator() + next(gen) + with self.assertRaises(StopIteration): + gen.throw(E) + + def test_gen_3_arg_deprecation_warning(self): + def g(): + yield 42 + + gen = g() + with self.assertWarns(DeprecationWarning): + with self.assertRaises(TypeError): + gen.throw(TypeError, TypeError(24), None) + + def test_stopiteration_error(self): + # See also PEP 479. + + def gen(): + raise StopIteration + yield + + with self.assertRaisesRegex(RuntimeError, 'raised StopIteration'): + next(gen()) + + def test_tutorial_stopiteration(self): + # Raise StopIteration" stops the generator too: + + def f(): + yield 1 + raise StopIteration + yield 2 # never reached + + g = f() + self.assertEqual(next(g), 1) + + with self.assertRaisesRegex(RuntimeError, 'raised StopIteration'): + next(g) + + def test_return_tuple(self): + def g(): + return (yield 1) + + gen = g() + self.assertEqual(next(gen), 1) + with self.assertRaises(StopIteration) as cm: + gen.send((2,)) + self.assertEqual(cm.exception.value, (2,)) + + def test_return_stopiteration(self): + def g(): + return (yield 1) + + gen = g() + self.assertEqual(next(gen), 1) + with self.assertRaises(StopIteration) as cm: + gen.send(StopIteration(2)) + self.assertIsInstance(cm.exception.value, StopIteration) + self.assertEqual(cm.exception.value.value, 2) + + +class GeneratorCloseTest(__TestCase): + + def test_close_no_return_value(self): + def f(): + yield + + gen = f() + gen.send(None) + self.assertIsNone(gen.close()) + + def test_close_return_value(self): + def f(): + try: + yield + # close() raises GeneratorExit here, which is caught + except GeneratorExit: + return 0 + + gen = f() + gen.send(None) + self.assertEqual(gen.close(), 0) + + def test_close_not_catching_exit(self): + def f(): + yield + # close() raises GeneratorExit here, which isn't caught and + # therefore propagates -- no return value + return 0 + + gen = f() + gen.send(None) + self.assertIsNone(gen.close()) + + def test_close_not_started(self): + def f(): + try: + yield + except GeneratorExit: + return 0 + + gen = f() + self.assertIsNone(gen.close()) + + def test_close_exhausted(self): + def f(): + try: + yield + except GeneratorExit: + return 0 + + gen = f() + next(gen) + with self.assertRaises(StopIteration): + next(gen) + self.assertIsNone(gen.close()) + + def test_close_closed(self): + def f(): + try: + yield + except GeneratorExit: + return 0 + + gen = f() + gen.send(None) + self.assertEqual(gen.close(), 0) + self.assertIsNone(gen.close()) + + def test_close_raises(self): + def f(): + try: + yield + except GeneratorExit: + pass + raise RuntimeError + + gen = f() + gen.send(None) + with self.assertRaises(RuntimeError): + gen.close() + + def test_close_releases_frame_locals(self): + # See gh-118272 + + class Foo: + pass + + f = Foo() + f_wr = weakref.ref(f) + + def genfn(): + a = f + yield + + g = genfn() + next(g) + del f + g.close() + support.gc_collect() + self.assertIsNone(f_wr()) + + +class GeneratorThrowTest(__TestCase): + + def test_exception_context_with_yield(self): + def f(): + try: + raise KeyError('a') + except Exception: + yield + + gen = f() + gen.send(None) + with self.assertRaises(ValueError) as cm: + gen.throw(ValueError) + context = cm.exception.__context__ + self.assertEqual((type(context), context.args), (KeyError, ('a',))) + + def test_exception_context_with_yield_inside_generator(self): + # Check that the context is also available from inside the generator + # with yield, as opposed to outside. + def f(): + try: + raise KeyError('a') + except Exception: + try: + yield + except Exception as exc: + self.assertEqual(type(exc), ValueError) + context = exc.__context__ + self.assertEqual((type(context), context.args), + (KeyError, ('a',))) + yield 'b' + + gen = f() + gen.send(None) + actual = gen.throw(ValueError) + # This ensures that the assertions inside were executed. + self.assertEqual(actual, 'b') + + def test_exception_context_with_yield_from(self): + def f(): + yield + + def g(): + try: + raise KeyError('a') + except Exception: + yield from f() + + gen = g() + gen.send(None) + with self.assertRaises(ValueError) as cm: + gen.throw(ValueError) + context = cm.exception.__context__ + self.assertEqual((type(context), context.args), (KeyError, ('a',))) + + def test_exception_context_with_yield_from_with_context_cycle(self): + # Check trying to create an exception context cycle: + # https://bugs.python.org/issue40696 + has_cycle = None + + def f(): + yield + + def g(exc): + nonlocal has_cycle + try: + raise exc + except Exception: + try: + yield from f() + except Exception as exc: + has_cycle = (exc is exc.__context__) + yield + + exc = KeyError('a') + gen = g(exc) + gen.send(None) + gen.throw(exc) + # This also distinguishes from the initial has_cycle=None. + self.assertEqual(has_cycle, False) + + def test_throw_after_none_exc_type(self): + def g(): + try: + raise KeyError + except KeyError: + pass + + try: + yield + except Exception: + raise RuntimeError + + gen = g() + gen.send(None) + with self.assertRaises(RuntimeError) as cm: + gen.throw(ValueError) + + +class GeneratorStackTraceTest(__TestCase): + + def check_stack_names(self, frame, expected): + names = [] + while frame: + name = frame.f_code.co_name + # Stop checking frames when we get to our test helper. + if name.startswith('check_') or name.startswith('call_'): + break + + names.append(name) + frame = frame.f_back + + self.assertEqual(names, expected) + + def check_yield_from_example(self, call_method): + def f(): + self.check_stack_names(sys._getframe(), ['f', 'g']) + try: + yield + except Exception: + pass + self.check_stack_names(sys._getframe(), ['f', 'g']) + + def g(): + self.check_stack_names(sys._getframe(), ['g']) + yield from f() + self.check_stack_names(sys._getframe(), ['g']) + + gen = g() + gen.send(None) + try: + call_method(gen) + except StopIteration: + pass + + def test_send_with_yield_from(self): + def call_send(gen): + gen.send(None) + + self.check_yield_from_example(call_send) + + def test_throw_with_yield_from(self): + def call_throw(gen): + gen.throw(RuntimeError) + + self.check_yield_from_example(call_throw) + + +class YieldFromTests(__TestCase): + def test_generator_gi_yieldfrom(self): + def a(): + self.assertEqual(inspect.getgeneratorstate(gen_b), inspect.GEN_RUNNING) + self.assertIsNone(gen_b.gi_yieldfrom) + yield + self.assertEqual(inspect.getgeneratorstate(gen_b), inspect.GEN_RUNNING) + self.assertIsNone(gen_b.gi_yieldfrom) + + def b(): + self.assertIsNone(gen_b.gi_yieldfrom) + yield from a() + self.assertIsNone(gen_b.gi_yieldfrom) + yield + self.assertIsNone(gen_b.gi_yieldfrom) + + gen_b = b() + self.assertEqual(inspect.getgeneratorstate(gen_b), inspect.GEN_CREATED) + self.assertIsNone(gen_b.gi_yieldfrom) + + gen_b.send(None) + self.assertEqual(inspect.getgeneratorstate(gen_b), inspect.GEN_SUSPENDED) + self.assertEqual(gen_b.gi_yieldfrom.gi_code.co_name, 'a') + + gen_b.send(None) + self.assertEqual(inspect.getgeneratorstate(gen_b), inspect.GEN_SUSPENDED) + self.assertIsNone(gen_b.gi_yieldfrom) + + [] = gen_b # Exhaust generator + self.assertEqual(inspect.getgeneratorstate(gen_b), inspect.GEN_CLOSED) + self.assertIsNone(gen_b.gi_yieldfrom) + + +tutorial_tests = """ +Let's try a simple generator: + + >>> def f(): + ... yield 1 + ... yield 2 + + >>> for i in f(): + ... print(i) + 1 + 2 + >>> g = f() + >>> next(g) + 1 + >>> next(g) + 2 + +"Falling off the end" stops the generator: + + >>> next(g) + Traceback (most recent call last): + File "", line 1, in ? + File "", line 2, in g + StopIteration + +"return" also stops the generator: + + >>> def f(): + ... yield 1 + ... return + ... yield 2 # never reached + ... + >>> g = f() + >>> next(g) + 1 + >>> next(g) + Traceback (most recent call last): + File "", line 1, in ? + File "", line 3, in f + StopIteration + >>> next(g) # once stopped, can't be resumed + Traceback (most recent call last): + File "", line 1, in ? + StopIteration + +However, "return" and StopIteration are not exactly equivalent: + + >>> def g1(): + ... try: + ... return + ... except: + ... yield 1 + ... + >>> list(g1()) + [] + + >>> def g2(): + ... try: + ... raise StopIteration + ... except: + ... yield 42 + >>> print(list(g2())) + [42] + +This may be surprising at first: + + >>> def g3(): + ... try: + ... return + ... finally: + ... yield 1 + ... + >>> list(g3()) + [1] + +Let's create an alternate range() function implemented as a generator: + + >>> def yrange(n): + ... for i in range(n): + ... yield i + ... + >>> list(yrange(5)) + [0, 1, 2, 3, 4] + +Generators always return to the most recent caller: + + >>> def creator(): + ... r = yrange(5) + ... print("creator", next(r)) + ... return r + ... + >>> def caller(): + ... r = creator() + ... for i in r: + ... print("caller", i) + ... + >>> caller() + creator 0 + caller 1 + caller 2 + caller 3 + caller 4 + +Generators can call other generators: + + >>> def zrange(n): + ... for i in yrange(n): + ... yield i + ... + >>> list(zrange(5)) + [0, 1, 2, 3, 4] + +""" + +# The examples from PEP 255. + +pep_tests = """ + +Specification: Yield + + Restriction: A generator cannot be resumed while it is actively + running: + + >>> def g(): + ... i = next(me) + ... yield i + >>> me = g() + >>> next(me) + Traceback (most recent call last): + ... + File "", line 2, in g + ValueError: generator already executing + +Specification: Return + + Note that return isn't always equivalent to raising StopIteration: the + difference lies in how enclosing try/except constructs are treated. + For example, + + >>> def f1(): + ... try: + ... return + ... except: + ... yield 1 + >>> print(list(f1())) + [] + + because, as in any function, return simply exits, but + + >>> def f2(): + ... try: + ... raise StopIteration + ... except: + ... yield 42 + >>> print(list(f2())) + [42] + + because StopIteration is captured by a bare "except", as is any + exception. + +Specification: Generators and Exception Propagation + + >>> def f(): + ... return 1//0 + >>> def g(): + ... yield f() # the zero division exception propagates + ... yield 42 # and we'll never get here + >>> k = g() + >>> next(k) + Traceback (most recent call last): + File "", line 1, in ? + File "", line 2, in g + File "", line 2, in f + ZeroDivisionError: integer division or modulo by zero + >>> next(k) # and the generator cannot be resumed + Traceback (most recent call last): + File "", line 1, in ? + StopIteration + >>> + +Specification: Try/Except/Finally + + >>> def f(): + ... try: + ... yield 1 + ... try: + ... yield 2 + ... 1//0 + ... yield 3 # never get here + ... except ZeroDivisionError: + ... yield 4 + ... yield 5 + ... raise + ... except: + ... yield 6 + ... yield 7 # the "raise" above stops this + ... except: + ... yield 8 + ... yield 9 + ... try: + ... x = 12 + ... finally: + ... yield 10 + ... yield 11 + >>> print(list(f())) + [1, 2, 4, 5, 8, 9, 10, 11] + >>> + +Guido's binary tree example. + + >>> # A binary tree class. + >>> class Tree: + ... + ... def __init__(self, label, left=None, right=None): + ... self.label = label + ... self.left = left + ... self.right = right + ... + ... def __repr__(self, level=0, indent=" "): + ... s = level*indent + repr(self.label) + ... if self.left: + ... s = s + "\\n" + self.left.__repr__(level+1, indent) + ... if self.right: + ... s = s + "\\n" + self.right.__repr__(level+1, indent) + ... return s + ... + ... def __iter__(self): + ... return inorder(self) + + >>> # Create a Tree from a list. + >>> def tree(list): + ... n = len(list) + ... if n == 0: + ... return [] + ... i = n // 2 + ... return Tree(list[i], tree(list[:i]), tree(list[i+1:])) + + >>> # Show it off: create a tree. + >>> t = tree("ABCDEFGHIJKLMNOPQRSTUVWXYZ") + + >>> # A recursive generator that generates Tree labels in in-order. + >>> def inorder(t): + ... if t: + ... for x in inorder(t.left): + ... yield x + ... yield t.label + ... for x in inorder(t.right): + ... yield x + + >>> # Show it off: create a tree. + >>> t = tree("ABCDEFGHIJKLMNOPQRSTUVWXYZ") + >>> # Print the nodes of the tree in in-order. + >>> for x in t: + ... print(' '+x, end='') + A B C D E F G H I J K L M N O P Q R S T U V W X Y Z + + >>> # A non-recursive generator. + >>> def inorder(node): + ... stack = [] + ... while node: + ... while node.left: + ... stack.append(node) + ... node = node.left + ... yield node.label + ... while not node.right: + ... try: + ... node = stack.pop() + ... except IndexError: + ... return + ... yield node.label + ... node = node.right + + >>> # Exercise the non-recursive generator. + >>> for x in t: + ... print(' '+x, end='') + A B C D E F G H I J K L M N O P Q R S T U V W X Y Z + +""" + +# Examples from Iterator-List and Python-Dev and c.l.py. + +email_tests = """ + +The difference between yielding None and returning it. + +>>> def g(): +... for i in range(3): +... yield None +... yield None +... return +>>> list(g()) +[None, None, None, None] + +Ensure that explicitly raising StopIteration acts like any other exception +in try/except, not like a return. + +>>> def g(): +... yield 1 +... try: +... raise StopIteration +... except: +... yield 2 +... yield 3 +>>> list(g()) +[1, 2, 3] + +Next one was posted to c.l.py. + +>>> def gcomb(x, k): +... "Generate all combinations of k elements from list x." +... +... if k > len(x): +... return +... if k == 0: +... yield [] +... else: +... first, rest = x[0], x[1:] +... # A combination does or doesn't contain first. +... # If it does, the remainder is a k-1 comb of rest. +... for c in gcomb(rest, k-1): +... c.insert(0, first) +... yield c +... # If it doesn't contain first, it's a k comb of rest. +... for c in gcomb(rest, k): +... yield c + +>>> seq = list(range(1, 5)) +>>> for k in range(len(seq) + 2): +... print("%d-combs of %s:" % (k, seq)) +... for c in gcomb(seq, k): +... print(" ", c) +0-combs of [1, 2, 3, 4]: + [] +1-combs of [1, 2, 3, 4]: + [1] + [2] + [3] + [4] +2-combs of [1, 2, 3, 4]: + [1, 2] + [1, 3] + [1, 4] + [2, 3] + [2, 4] + [3, 4] +3-combs of [1, 2, 3, 4]: + [1, 2, 3] + [1, 2, 4] + [1, 3, 4] + [2, 3, 4] +4-combs of [1, 2, 3, 4]: + [1, 2, 3, 4] +5-combs of [1, 2, 3, 4]: + +From the Iterators list, about the types of these things. + +>>> def g(): +... yield 1 +... +>>> type(g) + +>>> i = g() +>>> type(i) + +>>> [s for s in dir(i) if not s.startswith('_')] +['close', 'gi_code', 'gi_frame', 'gi_running', 'gi_suspended', 'gi_yieldfrom', 'send', 'throw'] +>>> from test.support import HAVE_DOCSTRINGS +>>> print(i.__next__.__doc__ if HAVE_DOCSTRINGS else 'Implement next(self).') +Implement next(self). +>>> iter(i) is i +True +>>> import types +>>> isinstance(i, types.GeneratorType) +True + +And more, added later. + +>>> i.gi_running +0 +>>> type(i.gi_frame) + +>>> i.gi_running = 42 +Traceback (most recent call last): + ... +AttributeError: attribute 'gi_running' of 'generator' objects is not writable +>>> def g(): +... yield me.gi_running +>>> me = g() +>>> me.gi_running +0 +>>> next(me) +1 +>>> me.gi_running +0 + +A clever union-find implementation from c.l.py, due to David Eppstein. +Sent: Friday, June 29, 2001 12:16 PM +To: python-list@python.org +Subject: Re: PEP 255: Simple Generators + +>>> class disjointSet: +... def __init__(self, name): +... self.name = name +... self.parent = None +... self.generator = self.generate() +... +... def generate(self): +... while not self.parent: +... yield self +... for x in self.parent.generator: +... yield x +... +... def find(self): +... return next(self.generator) +... +... def union(self, parent): +... if self.parent: +... raise ValueError("Sorry, I'm not a root!") +... self.parent = parent +... +... def __str__(self): +... return self.name + +>>> names = "ABCDEFGHIJKLM" +>>> sets = [disjointSet(name) for name in names] +>>> roots = sets[:] + +>>> import random +>>> gen = random.Random(42) +>>> while 1: +... for s in sets: +... print(" %s->%s" % (s, s.find()), end='') +... print() +... if len(roots) > 1: +... s1 = gen.choice(roots) +... roots.remove(s1) +... s2 = gen.choice(roots) +... s1.union(s2) +... print("merged", s1, "into", s2) +... else: +... break + A->A B->B C->C D->D E->E F->F G->G H->H I->I J->J K->K L->L M->M +merged K into B + A->A B->B C->C D->D E->E F->F G->G H->H I->I J->J K->B L->L M->M +merged A into F + A->F B->B C->C D->D E->E F->F G->G H->H I->I J->J K->B L->L M->M +merged E into F + A->F B->B C->C D->D E->F F->F G->G H->H I->I J->J K->B L->L M->M +merged D into C + A->F B->B C->C D->C E->F F->F G->G H->H I->I J->J K->B L->L M->M +merged M into C + A->F B->B C->C D->C E->F F->F G->G H->H I->I J->J K->B L->L M->C +merged J into B + A->F B->B C->C D->C E->F F->F G->G H->H I->I J->B K->B L->L M->C +merged B into C + A->F B->C C->C D->C E->F F->F G->G H->H I->I J->C K->C L->L M->C +merged F into G + A->G B->C C->C D->C E->G F->G G->G H->H I->I J->C K->C L->L M->C +merged L into C + A->G B->C C->C D->C E->G F->G G->G H->H I->I J->C K->C L->C M->C +merged G into I + A->I B->C C->C D->C E->I F->I G->I H->H I->I J->C K->C L->C M->C +merged I into H + A->H B->C C->C D->C E->H F->H G->H H->H I->H J->C K->C L->C M->C +merged C into H + A->H B->H C->H D->H E->H F->H G->H H->H I->H J->H K->H L->H M->H + +""" +# Emacs turd ' + +# Fun tests (for sufficiently warped notions of "fun"). + +fun_tests = """ + +Build up to a recursive Sieve of Eratosthenes generator. + +>>> def firstn(g, n): +... return [next(g) for i in range(n)] + +>>> def intsfrom(i): +... while 1: +... yield i +... i += 1 + +>>> firstn(intsfrom(5), 7) +[5, 6, 7, 8, 9, 10, 11] + +>>> def exclude_multiples(n, ints): +... for i in ints: +... if i % n: +... yield i + +>>> firstn(exclude_multiples(3, intsfrom(1)), 6) +[1, 2, 4, 5, 7, 8] + +>>> def sieve(ints): +... prime = next(ints) +... yield prime +... not_divisible_by_prime = exclude_multiples(prime, ints) +... for p in sieve(not_divisible_by_prime): +... yield p + +>>> primes = sieve(intsfrom(2)) +>>> firstn(primes, 20) +[2, 3, 5, 7, 11, 13, 17, 19, 23, 29, 31, 37, 41, 43, 47, 53, 59, 61, 67, 71] + + +Another famous problem: generate all integers of the form + 2**i * 3**j * 5**k +in increasing order, where i,j,k >= 0. Trickier than it may look at first! +Try writing it without generators, and correctly, and without generating +3 internal results for each result output. + +>>> def times(n, g): +... for i in g: +... yield n * i +>>> firstn(times(10, intsfrom(1)), 10) +[10, 20, 30, 40, 50, 60, 70, 80, 90, 100] + +>>> def merge(g, h): +... ng = next(g) +... nh = next(h) +... while 1: +... if ng < nh: +... yield ng +... ng = next(g) +... elif ng > nh: +... yield nh +... nh = next(h) +... else: +... yield ng +... ng = next(g) +... nh = next(h) + +The following works, but is doing a whale of a lot of redundant work -- +it's not clear how to get the internal uses of m235 to share a single +generator. Note that me_times2 (etc) each need to see every element in the +result sequence. So this is an example where lazy lists are more natural +(you can look at the head of a lazy list any number of times). + +>>> def m235(): +... yield 1 +... me_times2 = times(2, m235()) +... me_times3 = times(3, m235()) +... me_times5 = times(5, m235()) +... for i in merge(merge(me_times2, +... me_times3), +... me_times5): +... yield i + +Don't print "too many" of these -- the implementation above is extremely +inefficient: each call of m235() leads to 3 recursive calls, and in +turn each of those 3 more, and so on, and so on, until we've descended +enough levels to satisfy the print stmts. Very odd: when I printed 5 +lines of results below, this managed to screw up Win98's malloc in "the +usual" way, i.e. the heap grew over 4Mb so Win98 started fragmenting +address space, and it *looked* like a very slow leak. + +>>> result = m235() +>>> for i in range(3): +... print(firstn(result, 15)) +[1, 2, 3, 4, 5, 6, 8, 9, 10, 12, 15, 16, 18, 20, 24] +[25, 27, 30, 32, 36, 40, 45, 48, 50, 54, 60, 64, 72, 75, 80] +[81, 90, 96, 100, 108, 120, 125, 128, 135, 144, 150, 160, 162, 180, 192] + +Heh. Here's one way to get a shared list, complete with an excruciating +namespace renaming trick. The *pretty* part is that the times() and merge() +functions can be reused as-is, because they only assume their stream +arguments are iterable -- a LazyList is the same as a generator to times(). + +>>> class LazyList: +... def __init__(self, g): +... self.sofar = [] +... self.fetch = g.__next__ +... +... def __getitem__(self, i): +... sofar, fetch = self.sofar, self.fetch +... while i >= len(sofar): +... sofar.append(fetch()) +... return sofar[i] + +>>> def m235(): +... yield 1 +... # Gack: m235 below actually refers to a LazyList. +... me_times2 = times(2, m235) +... me_times3 = times(3, m235) +... me_times5 = times(5, m235) +... for i in merge(merge(me_times2, +... me_times3), +... me_times5): +... yield i + +Print as many of these as you like -- *this* implementation is memory- +efficient. + +>>> m235 = LazyList(m235()) +>>> for i in range(5): +... print([m235[j] for j in range(15*i, 15*(i+1))]) +[1, 2, 3, 4, 5, 6, 8, 9, 10, 12, 15, 16, 18, 20, 24] +[25, 27, 30, 32, 36, 40, 45, 48, 50, 54, 60, 64, 72, 75, 80] +[81, 90, 96, 100, 108, 120, 125, 128, 135, 144, 150, 160, 162, 180, 192] +[200, 216, 225, 240, 243, 250, 256, 270, 288, 300, 320, 324, 360, 375, 384] +[400, 405, 432, 450, 480, 486, 500, 512, 540, 576, 600, 625, 640, 648, 675] + +Ye olde Fibonacci generator, LazyList style. + +>>> def fibgen(a, b): +... +... def sum(g, h): +... while 1: +... yield next(g) + next(h) +... +... def tail(g): +... next(g) # throw first away +... for x in g: +... yield x +... +... yield a +... yield b +... for s in sum(iter(fib), +... tail(iter(fib))): +... yield s + +>>> fib = LazyList(fibgen(1, 2)) +>>> firstn(iter(fib), 17) +[1, 2, 3, 5, 8, 13, 21, 34, 55, 89, 144, 233, 377, 610, 987, 1597, 2584] + + +Running after your tail with itertools.tee (new in version 2.4) + +The algorithms "m235" (Hamming) and Fibonacci presented above are both +examples of a whole family of FP (functional programming) algorithms +where a function produces and returns a list while the production algorithm +suppose the list as already produced by recursively calling itself. +For these algorithms to work, they must: + +- produce at least a first element without presupposing the existence of + the rest of the list +- produce their elements in a lazy manner + +To work efficiently, the beginning of the list must not be recomputed over +and over again. This is ensured in most FP languages as a built-in feature. +In python, we have to explicitly maintain a list of already computed results +and abandon genuine recursivity. + +This is what had been attempted above with the LazyList class. One problem +with that class is that it keeps a list of all of the generated results and +therefore continually grows. This partially defeats the goal of the generator +concept, viz. produce the results only as needed instead of producing them +all and thereby wasting memory. + +Thanks to itertools.tee, it is now clear "how to get the internal uses of +m235 to share a single generator". + +>>> from itertools import tee +>>> def m235(): +... def _m235(): +... yield 1 +... for n in merge(times(2, m2), +... merge(times(3, m3), +... times(5, m5))): +... yield n +... m1 = _m235() +... m2, m3, m5, mRes = tee(m1, 4) +... return mRes + +>>> it = m235() +>>> for i in range(5): +... print(firstn(it, 15)) +[1, 2, 3, 4, 5, 6, 8, 9, 10, 12, 15, 16, 18, 20, 24] +[25, 27, 30, 32, 36, 40, 45, 48, 50, 54, 60, 64, 72, 75, 80] +[81, 90, 96, 100, 108, 120, 125, 128, 135, 144, 150, 160, 162, 180, 192] +[200, 216, 225, 240, 243, 250, 256, 270, 288, 300, 320, 324, 360, 375, 384] +[400, 405, 432, 450, 480, 486, 500, 512, 540, 576, 600, 625, 640, 648, 675] + +The "tee" function does just what we want. It internally keeps a generated +result for as long as it has not been "consumed" from all of the duplicated +iterators, whereupon it is deleted. You can therefore print the hamming +sequence during hours without increasing memory usage, or very little. + +The beauty of it is that recursive running-after-their-tail FP algorithms +are quite straightforwardly expressed with this Python idiom. + +Ye olde Fibonacci generator, tee style. + +>>> def fib(): +... +... def _isum(g, h): +... while 1: +... yield next(g) + next(h) +... +... def _fib(): +... yield 1 +... yield 2 +... next(fibTail) # throw first away +... for res in _isum(fibHead, fibTail): +... yield res +... +... realfib = _fib() +... fibHead, fibTail, fibRes = tee(realfib, 3) +... return fibRes + +>>> firstn(fib(), 17) +[1, 2, 3, 5, 8, 13, 21, 34, 55, 89, 144, 233, 377, 610, 987, 1597, 2584] + +""" + +# syntax_tests mostly provokes SyntaxErrors. Also fiddling with #if 0 +# hackery. + +syntax_tests = """ + +These are fine: + +>>> def f(): +... yield 1 +... return + +>>> def f(): +... try: +... yield 1 +... finally: +... pass + +>>> def f(): +... try: +... try: +... 1//0 +... except ZeroDivisionError: +... yield 666 +... except: +... pass +... finally: +... pass + +>>> def f(): +... try: +... try: +... yield 12 +... 1//0 +... except ZeroDivisionError: +... yield 666 +... except: +... try: +... x = 12 +... finally: +... yield 12 +... except: +... return +>>> list(f()) +[12, 666] + +>>> def f(): +... yield +>>> type(f()) + + + +>>> def f(): +... if 0: +... yield +>>> type(f()) + + + +>>> def f(): +... if 0: +... yield 1 +>>> type(f()) + + +>>> def f(): +... if "": +... yield None +>>> type(f()) + + +>>> def f(): +... return +... try: +... if x==4: +... pass +... elif 0: +... try: +... 1//0 +... except SyntaxError: +... pass +... else: +... if 0: +... while 12: +... x += 1 +... yield 2 # don't blink +... f(a, b, c, d, e) +... else: +... pass +... except: +... x = 1 +... return +>>> type(f()) + + +>>> def f(): +... if 0: +... def g(): +... yield 1 +... +>>> type(f()) + + +>>> def f(): +... if 0: +... class C: +... def __init__(self): +... yield 1 +... def f(self): +... yield 2 +>>> type(f()) + + +>>> def f(): +... if 0: +... return +... if 0: +... yield 2 +>>> type(f()) + + +This one caused a crash (see SF bug 567538): + +>>> def f(): +... for i in range(3): +... try: +... continue +... finally: +... yield i +... +>>> g = f() +>>> print(next(g)) +0 +>>> print(next(g)) +1 +>>> print(next(g)) +2 +>>> print(next(g)) +Traceback (most recent call last): +StopIteration + + +Test the gi_code attribute + +>>> def f(): +... yield 5 +... +>>> g = f() +>>> g.gi_code is f.__code__ +True +>>> next(g) +5 +>>> next(g) +Traceback (most recent call last): +StopIteration +>>> g.gi_code is f.__code__ +True + + +Test the __name__ attribute and the repr() + +>>> def f(): +... yield 5 +... +>>> g = f() +>>> g.__name__ +'f' +>>> repr(g) # doctest: +ELLIPSIS +'' + +Lambdas shouldn't have their usual return behavior. + +>>> x = lambda: (yield 1) +>>> list(x()) +[1] + +>>> x = lambda: ((yield 1), (yield 2)) +>>> list(x()) +[1, 2] +""" + +# conjoin is a simple backtracking generator, named in honor of Icon's +# "conjunction" control structure. Pass a list of no-argument functions +# that return iterable objects. Easiest to explain by example: assume the +# function list [x, y, z] is passed. Then conjoin acts like: +# +# def g(): +# values = [None] * 3 +# for values[0] in x(): +# for values[1] in y(): +# for values[2] in z(): +# yield values +# +# So some 3-lists of values *may* be generated, each time we successfully +# get into the innermost loop. If an iterator fails (is exhausted) before +# then, it "backtracks" to get the next value from the nearest enclosing +# iterator (the one "to the left"), and starts all over again at the next +# slot (pumps a fresh iterator). Of course this is most useful when the +# iterators have side-effects, so that which values *can* be generated at +# each slot depend on the values iterated at previous slots. + +def simple_conjoin(gs): + + values = [None] * len(gs) + + def gen(i): + if i >= len(gs): + yield values + else: + for values[i] in gs[i](): + for x in gen(i+1): + yield x + + for x in gen(0): + yield x + +# That works fine, but recursing a level and checking i against len(gs) for +# each item produced is inefficient. By doing manual loop unrolling across +# generator boundaries, it's possible to eliminate most of that overhead. +# This isn't worth the bother *in general* for generators, but conjoin() is +# a core building block for some CPU-intensive generator applications. + +def conjoin(gs): + + n = len(gs) + values = [None] * n + + # Do one loop nest at time recursively, until the # of loop nests + # remaining is divisible by 3. + + def gen(i): + if i >= n: + yield values + + elif (n-i) % 3: + ip1 = i+1 + for values[i] in gs[i](): + for x in gen(ip1): + yield x + + else: + for x in _gen3(i): + yield x + + # Do three loop nests at a time, recursing only if at least three more + # remain. Don't call directly: this is an internal optimization for + # gen's use. + + def _gen3(i): + assert i < n and (n-i) % 3 == 0 + ip1, ip2, ip3 = i+1, i+2, i+3 + g, g1, g2 = gs[i : ip3] + + if ip3 >= n: + # These are the last three, so we can yield values directly. + for values[i] in g(): + for values[ip1] in g1(): + for values[ip2] in g2(): + yield values + + else: + # At least 6 loop nests remain; peel off 3 and recurse for the + # rest. + for values[i] in g(): + for values[ip1] in g1(): + for values[ip2] in g2(): + for x in _gen3(ip3): + yield x + + for x in gen(0): + yield x + +# And one more approach: For backtracking apps like the Knight's Tour +# solver below, the number of backtracking levels can be enormous (one +# level per square, for the Knight's Tour, so that e.g. a 100x100 board +# needs 10,000 levels). In such cases Python is likely to run out of +# stack space due to recursion. So here's a recursion-free version of +# conjoin too. +# NOTE WELL: This allows large problems to be solved with only trivial +# demands on stack space. Without explicitly resumable generators, this is +# much harder to achieve. OTOH, this is much slower (up to a factor of 2) +# than the fancy unrolled recursive conjoin. + +def flat_conjoin(gs): # rename to conjoin to run tests with this instead + n = len(gs) + values = [None] * n + iters = [None] * n + _StopIteration = StopIteration # make local because caught a *lot* + i = 0 + while 1: + # Descend. + try: + while i < n: + it = iters[i] = gs[i]().__next__ + values[i] = it() + i += 1 + except _StopIteration: + pass + else: + assert i == n + yield values + + # Backtrack until an older iterator can be resumed. + i -= 1 + while i >= 0: + try: + values[i] = iters[i]() + # Success! Start fresh at next level. + i += 1 + break + except _StopIteration: + # Continue backtracking. + i -= 1 + else: + assert i < 0 + break + +# A conjoin-based N-Queens solver. + +class Queens: + def __init__(self, n): + self.n = n + rangen = range(n) + + # Assign a unique int to each column and diagonal. + # columns: n of those, range(n). + # NW-SE diagonals: 2n-1 of these, i-j unique and invariant along + # each, smallest i-j is 0-(n-1) = 1-n, so add n-1 to shift to 0- + # based. + # NE-SW diagonals: 2n-1 of these, i+j unique and invariant along + # each, smallest i+j is 0, largest is 2n-2. + + # For each square, compute a bit vector of the columns and + # diagonals it covers, and for each row compute a function that + # generates the possibilities for the columns in that row. + self.rowgenerators = [] + for i in rangen: + rowuses = [(1 << j) | # column ordinal + (1 << (n + i-j + n-1)) | # NW-SE ordinal + (1 << (n + 2*n-1 + i+j)) # NE-SW ordinal + for j in rangen] + + def rowgen(rowuses=rowuses): + for j in rangen: + uses = rowuses[j] + if uses & self.used == 0: + self.used |= uses + yield j + self.used &= ~uses + + self.rowgenerators.append(rowgen) + + # Generate solutions. + def solve(self): + self.used = 0 + for row2col in conjoin(self.rowgenerators): + yield row2col + + def printsolution(self, row2col): + n = self.n + assert n == len(row2col) + sep = "+" + "-+" * n + print(sep) + for i in range(n): + squares = [" " for j in range(n)] + squares[row2col[i]] = "Q" + print("|" + "|".join(squares) + "|") + print(sep) + +# A conjoin-based Knight's Tour solver. This is pretty sophisticated +# (e.g., when used with flat_conjoin above, and passing hard=1 to the +# constructor, a 200x200 Knight's Tour was found quickly -- note that we're +# creating 10s of thousands of generators then!), and is lengthy. + +class Knights: + def __init__(self, m, n, hard=0): + self.m, self.n = m, n + + # solve() will set up succs[i] to be a list of square #i's + # successors. + succs = self.succs = [] + + # Remove i0 from each of its successor's successor lists, i.e. + # successors can't go back to i0 again. Return 0 if we can + # detect this makes a solution impossible, else return 1. + + def remove_from_successors(i0, len=len): + # If we remove all exits from a free square, we're dead: + # even if we move to it next, we can't leave it again. + # If we create a square with one exit, we must visit it next; + # else somebody else will have to visit it, and since there's + # only one adjacent, there won't be a way to leave it again. + # Finally, if we create more than one free square with a + # single exit, we can only move to one of them next, leaving + # the other one a dead end. + ne0 = ne1 = 0 + for i in succs[i0]: + s = succs[i] + s.remove(i0) + e = len(s) + if e == 0: + ne0 += 1 + elif e == 1: + ne1 += 1 + return ne0 == 0 and ne1 < 2 + + # Put i0 back in each of its successor's successor lists. + + def add_to_successors(i0): + for i in succs[i0]: + succs[i].append(i0) + + # Generate the first move. + def first(): + if m < 1 or n < 1: + return + + # Since we're looking for a cycle, it doesn't matter where we + # start. Starting in a corner makes the 2nd move easy. + corner = self.coords2index(0, 0) + remove_from_successors(corner) + self.lastij = corner + yield corner + add_to_successors(corner) + + # Generate the second moves. + def second(): + corner = self.coords2index(0, 0) + assert self.lastij == corner # i.e., we started in the corner + if m < 3 or n < 3: + return + assert len(succs[corner]) == 2 + assert self.coords2index(1, 2) in succs[corner] + assert self.coords2index(2, 1) in succs[corner] + # Only two choices. Whichever we pick, the other must be the + # square picked on move m*n, as it's the only way to get back + # to (0, 0). Save its index in self.final so that moves before + # the last know it must be kept free. + for i, j in (1, 2), (2, 1): + this = self.coords2index(i, j) + final = self.coords2index(3-i, 3-j) + self.final = final + + remove_from_successors(this) + succs[final].append(corner) + self.lastij = this + yield this + succs[final].remove(corner) + add_to_successors(this) + + # Generate moves 3 through m*n-1. + def advance(len=len): + # If some successor has only one exit, must take it. + # Else favor successors with fewer exits. + candidates = [] + for i in succs[self.lastij]: + e = len(succs[i]) + assert e > 0, "else remove_from_successors() pruning flawed" + if e == 1: + candidates = [(e, i)] + break + candidates.append((e, i)) + else: + candidates.sort() + + for e, i in candidates: + if i != self.final: + if remove_from_successors(i): + self.lastij = i + yield i + add_to_successors(i) + + # Generate moves 3 through m*n-1. Alternative version using a + # stronger (but more expensive) heuristic to order successors. + # Since the # of backtracking levels is m*n, a poor move early on + # can take eons to undo. Smallest square board for which this + # matters a lot is 52x52. + def advance_hard(vmid=(m-1)/2.0, hmid=(n-1)/2.0, len=len): + # If some successor has only one exit, must take it. + # Else favor successors with fewer exits. + # Break ties via max distance from board centerpoint (favor + # corners and edges whenever possible). + candidates = [] + for i in succs[self.lastij]: + e = len(succs[i]) + assert e > 0, "else remove_from_successors() pruning flawed" + if e == 1: + candidates = [(e, 0, i)] + break + i1, j1 = self.index2coords(i) + d = (i1 - vmid)**2 + (j1 - hmid)**2 + candidates.append((e, -d, i)) + else: + candidates.sort() + + for e, d, i in candidates: + if i != self.final: + if remove_from_successors(i): + self.lastij = i + yield i + add_to_successors(i) + + # Generate the last move. + def last(): + assert self.final in succs[self.lastij] + yield self.final + + if m*n < 4: + self.squaregenerators = [first] + else: + self.squaregenerators = [first, second] + \ + [hard and advance_hard or advance] * (m*n - 3) + \ + [last] + + def coords2index(self, i, j): + assert 0 <= i < self.m + assert 0 <= j < self.n + return i * self.n + j + + def index2coords(self, index): + assert 0 <= index < self.m * self.n + return divmod(index, self.n) + + def _init_board(self): + succs = self.succs + del succs[:] + m, n = self.m, self.n + c2i = self.coords2index + + offsets = [( 1, 2), ( 2, 1), ( 2, -1), ( 1, -2), + (-1, -2), (-2, -1), (-2, 1), (-1, 2)] + rangen = range(n) + for i in range(m): + for j in rangen: + s = [c2i(i+io, j+jo) for io, jo in offsets + if 0 <= i+io < m and + 0 <= j+jo < n] + succs.append(s) + + # Generate solutions. + def solve(self): + self._init_board() + for x in conjoin(self.squaregenerators): + yield x + + def printsolution(self, x): + m, n = self.m, self.n + assert len(x) == m*n + w = len(str(m*n)) + format = "%" + str(w) + "d" + + squares = [[None] * n for i in range(m)] + k = 1 + for i in x: + i1, j1 = self.index2coords(i) + squares[i1][j1] = format % k + k += 1 + + sep = "+" + ("-" * w + "+") * n + print(sep) + for i in range(m): + row = squares[i] + print("|" + "|".join(row) + "|") + print(sep) + +conjoin_tests = """ + +Generate the 3-bit binary numbers in order. This illustrates dumbest- +possible use of conjoin, just to generate the full cross-product. + +>>> for c in conjoin([lambda: iter((0, 1))] * 3): +... print(c) +[0, 0, 0] +[0, 0, 1] +[0, 1, 0] +[0, 1, 1] +[1, 0, 0] +[1, 0, 1] +[1, 1, 0] +[1, 1, 1] + +For efficiency in typical backtracking apps, conjoin() yields the same list +object each time. So if you want to save away a full account of its +generated sequence, you need to copy its results. + +>>> def gencopy(iterator): +... for x in iterator: +... yield x[:] + +>>> for n in range(10): +... all = list(gencopy(conjoin([lambda: iter((0, 1))] * n))) +... print(n, len(all), all[0] == [0] * n, all[-1] == [1] * n) +0 1 True True +1 2 True True +2 4 True True +3 8 True True +4 16 True True +5 32 True True +6 64 True True +7 128 True True +8 256 True True +9 512 True True + +And run an 8-queens solver. + +>>> q = Queens(8) +>>> LIMIT = 2 +>>> count = 0 +>>> for row2col in q.solve(): +... count += 1 +... if count <= LIMIT: +... print("Solution", count) +... q.printsolution(row2col) +Solution 1 ++-+-+-+-+-+-+-+-+ +|Q| | | | | | | | ++-+-+-+-+-+-+-+-+ +| | | | |Q| | | | ++-+-+-+-+-+-+-+-+ +| | | | | | | |Q| ++-+-+-+-+-+-+-+-+ +| | | | | |Q| | | ++-+-+-+-+-+-+-+-+ +| | |Q| | | | | | ++-+-+-+-+-+-+-+-+ +| | | | | | |Q| | ++-+-+-+-+-+-+-+-+ +| |Q| | | | | | | ++-+-+-+-+-+-+-+-+ +| | | |Q| | | | | ++-+-+-+-+-+-+-+-+ +Solution 2 ++-+-+-+-+-+-+-+-+ +|Q| | | | | | | | ++-+-+-+-+-+-+-+-+ +| | | | | |Q| | | ++-+-+-+-+-+-+-+-+ +| | | | | | | |Q| ++-+-+-+-+-+-+-+-+ +| | |Q| | | | | | ++-+-+-+-+-+-+-+-+ +| | | | | | |Q| | ++-+-+-+-+-+-+-+-+ +| | | |Q| | | | | ++-+-+-+-+-+-+-+-+ +| |Q| | | | | | | ++-+-+-+-+-+-+-+-+ +| | | | |Q| | | | ++-+-+-+-+-+-+-+-+ + +>>> print(count, "solutions in all.") +92 solutions in all. + +And run a Knight's Tour on a 10x10 board. Note that there are about +20,000 solutions even on a 6x6 board, so don't dare run this to exhaustion. + +>>> k = Knights(10, 10) +>>> LIMIT = 2 +>>> count = 0 +>>> for x in k.solve(): +... count += 1 +... if count <= LIMIT: +... print("Solution", count) +... k.printsolution(x) +... else: +... break +Solution 1 ++---+---+---+---+---+---+---+---+---+---+ +| 1| 58| 27| 34| 3| 40| 29| 10| 5| 8| ++---+---+---+---+---+---+---+---+---+---+ +| 26| 35| 2| 57| 28| 33| 4| 7| 30| 11| ++---+---+---+---+---+---+---+---+---+---+ +| 59|100| 73| 36| 41| 56| 39| 32| 9| 6| ++---+---+---+---+---+---+---+---+---+---+ +| 74| 25| 60| 55| 72| 37| 42| 49| 12| 31| ++---+---+---+---+---+---+---+---+---+---+ +| 61| 86| 99| 76| 63| 52| 47| 38| 43| 50| ++---+---+---+---+---+---+---+---+---+---+ +| 24| 75| 62| 85| 54| 71| 64| 51| 48| 13| ++---+---+---+---+---+---+---+---+---+---+ +| 87| 98| 91| 80| 77| 84| 53| 46| 65| 44| ++---+---+---+---+---+---+---+---+---+---+ +| 90| 23| 88| 95| 70| 79| 68| 83| 14| 17| ++---+---+---+---+---+---+---+---+---+---+ +| 97| 92| 21| 78| 81| 94| 19| 16| 45| 66| ++---+---+---+---+---+---+---+---+---+---+ +| 22| 89| 96| 93| 20| 69| 82| 67| 18| 15| ++---+---+---+---+---+---+---+---+---+---+ +Solution 2 ++---+---+---+---+---+---+---+---+---+---+ +| 1| 58| 27| 34| 3| 40| 29| 10| 5| 8| ++---+---+---+---+---+---+---+---+---+---+ +| 26| 35| 2| 57| 28| 33| 4| 7| 30| 11| ++---+---+---+---+---+---+---+---+---+---+ +| 59|100| 73| 36| 41| 56| 39| 32| 9| 6| ++---+---+---+---+---+---+---+---+---+---+ +| 74| 25| 60| 55| 72| 37| 42| 49| 12| 31| ++---+---+---+---+---+---+---+---+---+---+ +| 61| 86| 99| 76| 63| 52| 47| 38| 43| 50| ++---+---+---+---+---+---+---+---+---+---+ +| 24| 75| 62| 85| 54| 71| 64| 51| 48| 13| ++---+---+---+---+---+---+---+---+---+---+ +| 87| 98| 89| 80| 77| 84| 53| 46| 65| 44| ++---+---+---+---+---+---+---+---+---+---+ +| 90| 23| 92| 95| 70| 79| 68| 83| 14| 17| ++---+---+---+---+---+---+---+---+---+---+ +| 97| 88| 21| 78| 81| 94| 19| 16| 45| 66| ++---+---+---+---+---+---+---+---+---+---+ +| 22| 91| 96| 93| 20| 69| 82| 67| 18| 15| ++---+---+---+---+---+---+---+---+---+---+ +""" + +weakref_tests = """\ +Generators are weakly referencable: + +>>> import weakref +>>> def gen(): +... yield 'foo!' +... +>>> wr = weakref.ref(gen) +>>> wr() is gen +True +>>> p = weakref.proxy(gen) + +Generator-iterators are weakly referencable as well: + +>>> gi = gen() +>>> wr = weakref.ref(gi) +>>> wr() is gi +True +>>> p = weakref.proxy(gi) +>>> list(p) +['foo!'] + +""" + +coroutine_tests = """\ +>>> from test.support import gc_collect + +Sending a value into a started generator: + +>>> def f(): +... print((yield 1)) +... yield 2 +>>> g = f() +>>> next(g) +1 +>>> g.send(42) +42 +2 + +Sending a value into a new generator produces a TypeError: + +>>> f().send("foo") +Traceback (most recent call last): +... +TypeError: can't send non-None value to a just-started generator + + +Yield by itself yields None: + +>>> def f(): yield +>>> list(f()) +[None] + + +Yield is allowed only in the outermost iterable in generator expression: + +>>> def f(): list(i for i in [(yield 26)]) +>>> type(f()) + + + +A yield expression with augmented assignment. + +>>> def coroutine(seq): +... count = 0 +... while count < 200: +... count += yield +... seq.append(count) +>>> seq = [] +>>> c = coroutine(seq) +>>> next(c) +>>> print(seq) +[] +>>> c.send(10) +>>> print(seq) +[10] +>>> c.send(10) +>>> print(seq) +[10, 20] +>>> c.send(10) +>>> print(seq) +[10, 20, 30] + + +Check some syntax errors for yield expressions: + +>>> f=lambda: (yield 1),(yield 2) +Traceback (most recent call last): + ... +SyntaxError: 'yield' outside function + +>>> f=lambda: (yield from (1,2)), (yield from (3,4)) +Traceback (most recent call last): + ... +SyntaxError: 'yield from' outside function + +>>> yield from [1,2] +Traceback (most recent call last): + ... +SyntaxError: 'yield from' outside function + +>>> def f(): x = yield = y +Traceback (most recent call last): + ... +SyntaxError: assignment to yield expression not possible + +>>> def f(): (yield bar) = y +Traceback (most recent call last): + ... +SyntaxError: cannot assign to yield expression here. Maybe you meant '==' instead of '='? + +>>> def f(): (yield bar) += y +Traceback (most recent call last): + ... +SyntaxError: 'yield expression' is an illegal expression for augmented assignment + + +Now check some throw() conditions: + +>>> def f(): +... while True: +... try: +... print((yield)) +... except ValueError as v: +... print("caught ValueError (%s)" % (v)) +>>> import sys +>>> g = f() +>>> next(g) + +>>> g.throw(ValueError) # type only +caught ValueError () + +>>> g.throw(ValueError("xyz")) # value only +caught ValueError (xyz) + +>>> import warnings +>>> old_filters = warnings.filters.copy() +>>> warnings.filterwarnings("ignore", category=DeprecationWarning) + +# Filter DeprecationWarning: regarding the (type, val, tb) signature of throw(). +# Deprecation warnings are re-enabled below. + +>>> g.throw(ValueError, ValueError(1)) # value+matching type +caught ValueError (1) + +>>> g.throw(ValueError, TypeError(1)) # mismatched type, rewrapped +caught ValueError (1) + +>>> g.throw(ValueError, ValueError(1), None) # explicit None traceback +caught ValueError (1) + +>>> g.throw(ValueError(1), "foo") # bad args +Traceback (most recent call last): + ... +TypeError: instance exception may not have a separate value + +>>> g.throw(ValueError, "foo", 23) # bad args +Traceback (most recent call last): + ... +TypeError: throw() third argument must be a traceback object + +>>> g.throw("abc") +Traceback (most recent call last): + ... +TypeError: exceptions must be classes or instances deriving from BaseException, not str + +>>> g.throw(0) +Traceback (most recent call last): + ... +TypeError: exceptions must be classes or instances deriving from BaseException, not int + +>>> g.throw(list) +Traceback (most recent call last): + ... +TypeError: exceptions must be classes or instances deriving from BaseException, not type + +>>> def throw(g,exc): +... try: +... raise exc +... except: +... g.throw(*sys.exc_info()) +>>> throw(g,ValueError) # do it with traceback included +caught ValueError () + +>>> g.send(1) +1 + +>>> throw(g,TypeError) # terminate the generator +Traceback (most recent call last): + ... +TypeError + +>>> print(g.gi_frame) +None + +>>> g.send(2) +Traceback (most recent call last): + ... +StopIteration + +>>> g.throw(ValueError,6) # throw on closed generator +Traceback (most recent call last): + ... +ValueError: 6 + +>>> f().throw(ValueError,7) # throw on just-opened generator +Traceback (most recent call last): + ... +ValueError: 7 + +>>> warnings.filters[:] = old_filters + +# Re-enable DeprecationWarning: the (type, val, tb) exception representation is deprecated, +# and may be removed in a future version of Python. + +Plain "raise" inside a generator should preserve the traceback (#13188). +The traceback should have 3 levels: +- g.throw() +- f() +- 1/0 + +>>> def f(): +... try: +... yield +... except: +... raise +>>> g = f() +>>> try: +... 1/0 +... except ZeroDivisionError as v: +... try: +... g.throw(v) +... except Exception as w: +... tb = w.__traceback__ +>>> levels = 0 +>>> while tb: +... levels += 1 +... tb = tb.tb_next +>>> levels +3 + +Now let's try closing a generator: + +>>> def f(): +... try: yield +... except GeneratorExit: +... print("exiting") + +>>> g = f() +>>> next(g) +>>> g.close() +exiting +>>> g.close() # should be no-op now + +>>> f().close() # close on just-opened generator should be fine + +>>> def f(): yield # an even simpler generator +>>> f().close() # close before opening +>>> g = f() +>>> next(g) +>>> g.close() # close normally + +And finalization: + +>>> def f(): +... try: yield +... finally: +... print("exiting") + +>>> g = f() +>>> next(g) +>>> del g; gc_collect() # For PyPy or other GCs. +exiting + + +GeneratorExit is not caught by except Exception: + +>>> def f(): +... try: yield +... except Exception: +... print('except') +... finally: +... print('finally') + +>>> g = f() +>>> next(g) +>>> del g; gc_collect() # For PyPy or other GCs. +finally + + +Now let's try some ill-behaved generators: + +>>> def f(): +... try: yield +... except GeneratorExit: +... yield "foo!" +>>> g = f() +>>> next(g) +>>> g.close() +Traceback (most recent call last): + ... +RuntimeError: generator ignored GeneratorExit +>>> g.close() + + +Our ill-behaved code should be invoked during GC: + +>>> with support.catch_unraisable_exception() as cm: +... g = f() +... next(g) +... del g +... +... cm.unraisable.exc_type == RuntimeError +... "generator ignored GeneratorExit" in str(cm.unraisable.exc_value) +... cm.unraisable.exc_traceback is not None +True +True +True + +And errors thrown during closing should propagate: + +>>> def f(): +... try: yield +... except GeneratorExit: +... raise TypeError("fie!") +>>> g = f() +>>> next(g) +>>> g.close() +Traceback (most recent call last): + ... +TypeError: fie! + + +Ensure that various yield expression constructs make their +enclosing function a generator: + +>>> def f(): x += yield +>>> type(f()) + + +>>> def f(): x = yield +>>> type(f()) + + +>>> def f(): lambda x=(yield): 1 +>>> type(f()) + + +>>> def f(d): d[(yield "a")] = d[(yield "b")] = 27 +>>> data = [1,2] +>>> g = f(data) +>>> type(g) + +>>> g.send(None) +'a' +>>> data +[1, 2] +>>> g.send(0) +'b' +>>> data +[27, 2] +>>> try: g.send(1) +... except StopIteration: pass +>>> data +[27, 27] + +""" + +refleaks_tests = """ +Prior to adding cycle-GC support to itertools.tee, this code would leak +references. We add it to the standard suite so the routine refleak-tests +would trigger if it starts being uncleanable again. + +>>> import itertools +>>> def leak(): +... class gen: +... def __iter__(self): +... return self +... def __next__(self): +... return self.item +... g = gen() +... head, tail = itertools.tee(g) +... g.item = head +... return head +>>> it = leak() + +Make sure to also test the involvement of the tee-internal teedataobject, +which stores returned items. + +>>> item = next(it) + + + +This test leaked at one point due to generator finalization/destruction. +It was copied from Lib/test/leakers/test_generator_cycle.py before the file +was removed. + +>>> def leak(): +... def gen(): +... while True: +... yield g +... g = gen() + +>>> leak() + + + +This test isn't really generator related, but rather exception-in-cleanup +related. The coroutine tests (above) just happen to cause an exception in +the generator's __del__ (tp_del) method. We can also test for this +explicitly, without generators. We do have to redirect stderr to avoid +printing warnings and to doublecheck that we actually tested what we wanted +to test. + +>>> from test import support +>>> class Leaker: +... def __del__(self): +... def invoke(message): +... raise RuntimeError(message) +... invoke("del failed") +... +>>> with support.catch_unraisable_exception() as cm: +... l = Leaker() +... del l +... +... cm.unraisable.object == Leaker.__del__ +... cm.unraisable.exc_type == RuntimeError +... str(cm.unraisable.exc_value) == "del failed" +... cm.unraisable.exc_traceback is not None +True +True +True +True + + +These refleak tests should perhaps be in a testfile of their own, +test_generators just happened to be the test that drew these out. + +""" + +# __test__ = {"tut": tutorial_tests, +# "pep": pep_tests, +# "email": email_tests, +# "fun": fun_tests, +# "syntax": syntax_tests, +# "conjoin": conjoin_tests, +# "weakref": weakref_tests, +# "coroutine": coroutine_tests, +# "refleaks": refleaks_tests, +# } + +# def load_tests(loader, tests, pattern): +# # ======= BEGIN Dynamo patch ======= +# suite = doctest.DocTestSuite() +# for test in suite: +# # Dynamically change base class +# test.__class__ = type(test.__class__.__name__, (__TestCase, test.__class__), {}) +# tests.addTests(suite) +# # ======= END DYNAMO PATCH ======= +# return tests + + +if __name__ == "__main__": + run_tests() diff --git a/test/dynamo_expected_failures/CPython313-test_contextlib-ClosingTestCase.test_closing b/test/dynamo_expected_failures/CPython313-test_contextlib-ClosingTestCase.test_closing new file mode 100644 index 000000000000..e69de29bb2d1 diff --git a/test/dynamo_expected_failures/CPython313-test_contextlib-ClosingTestCase.test_closing_error b/test/dynamo_expected_failures/CPython313-test_contextlib-ClosingTestCase.test_closing_error new file mode 100644 index 000000000000..e69de29bb2d1 diff --git a/test/dynamo_expected_failures/CPython313-test_contextlib-ClosingTestCase.test_instance_docs b/test/dynamo_expected_failures/CPython313-test_contextlib-ClosingTestCase.test_instance_docs new file mode 100644 index 000000000000..e69de29bb2d1 diff --git a/test/dynamo_expected_failures/CPython313-test_contextlib-ContextManagerTestCase.test_contextmanager_attribs b/test/dynamo_expected_failures/CPython313-test_contextlib-ContextManagerTestCase.test_contextmanager_attribs new file mode 100644 index 000000000000..e69de29bb2d1 diff --git a/test/dynamo_expected_failures/CPython313-test_contextlib-ContextManagerTestCase.test_contextmanager_do_not_unchain_non_stopiteration_exceptions b/test/dynamo_expected_failures/CPython313-test_contextlib-ContextManagerTestCase.test_contextmanager_do_not_unchain_non_stopiteration_exceptions new file mode 100644 index 000000000000..e69de29bb2d1 diff --git a/test/dynamo_expected_failures/CPython313-test_contextlib-ContextManagerTestCase.test_contextmanager_doc_attrib b/test/dynamo_expected_failures/CPython313-test_contextlib-ContextManagerTestCase.test_contextmanager_doc_attrib new file mode 100644 index 000000000000..e69de29bb2d1 diff --git a/test/dynamo_expected_failures/CPython313-test_contextlib-ContextManagerTestCase.test_contextmanager_except b/test/dynamo_expected_failures/CPython313-test_contextlib-ContextManagerTestCase.test_contextmanager_except new file mode 100644 index 000000000000..e69de29bb2d1 diff --git a/test/dynamo_expected_failures/CPython313-test_contextlib-ContextManagerTestCase.test_contextmanager_except_pep479 b/test/dynamo_expected_failures/CPython313-test_contextlib-ContextManagerTestCase.test_contextmanager_except_pep479 new file mode 100644 index 000000000000..e69de29bb2d1 diff --git a/test/dynamo_expected_failures/CPython313-test_contextlib-ContextManagerTestCase.test_contextmanager_except_stopiter b/test/dynamo_expected_failures/CPython313-test_contextlib-ContextManagerTestCase.test_contextmanager_except_stopiter new file mode 100644 index 000000000000..e69de29bb2d1 diff --git a/test/dynamo_expected_failures/CPython313-test_contextlib-ContextManagerTestCase.test_contextmanager_finally b/test/dynamo_expected_failures/CPython313-test_contextlib-ContextManagerTestCase.test_contextmanager_finally new file mode 100644 index 000000000000..e69de29bb2d1 diff --git a/test/dynamo_expected_failures/CPython313-test_contextlib-ContextManagerTestCase.test_contextmanager_plain b/test/dynamo_expected_failures/CPython313-test_contextlib-ContextManagerTestCase.test_contextmanager_plain new file mode 100644 index 000000000000..e69de29bb2d1 diff --git a/test/dynamo_expected_failures/CPython313-test_contextlib-ContextManagerTestCase.test_contextmanager_traceback b/test/dynamo_expected_failures/CPython313-test_contextlib-ContextManagerTestCase.test_contextmanager_traceback new file mode 100644 index 000000000000..e69de29bb2d1 diff --git a/test/dynamo_expected_failures/CPython313-test_contextlib-ContextManagerTestCase.test_contextmanager_trap_second_yield b/test/dynamo_expected_failures/CPython313-test_contextlib-ContextManagerTestCase.test_contextmanager_trap_second_yield new file mode 100644 index 000000000000..e69de29bb2d1 diff --git a/test/dynamo_expected_failures/CPython313-test_contextlib-ContextManagerTestCase.test_contextmanager_trap_yield_after_throw b/test/dynamo_expected_failures/CPython313-test_contextlib-ContextManagerTestCase.test_contextmanager_trap_yield_after_throw new file mode 100644 index 000000000000..e69de29bb2d1 diff --git a/test/dynamo_expected_failures/CPython313-test_contextlib-ContextManagerTestCase.test_contextmanager_wrap_runtimeerror b/test/dynamo_expected_failures/CPython313-test_contextlib-ContextManagerTestCase.test_contextmanager_wrap_runtimeerror new file mode 100644 index 000000000000..e69de29bb2d1 diff --git a/test/dynamo_expected_failures/CPython313-test_contextlib-ContextManagerTestCase.test_instance_docstring_given_cm_docstring b/test/dynamo_expected_failures/CPython313-test_contextlib-ContextManagerTestCase.test_instance_docstring_given_cm_docstring new file mode 100644 index 000000000000..e69de29bb2d1 diff --git a/test/dynamo_expected_failures/CPython313-test_contextlib-ContextManagerTestCase.test_keywords b/test/dynamo_expected_failures/CPython313-test_contextlib-ContextManagerTestCase.test_keywords new file mode 100644 index 000000000000..e69de29bb2d1 diff --git a/test/dynamo_expected_failures/CPython313-test_contextlib-ContextManagerTestCase.test_nokeepref b/test/dynamo_expected_failures/CPython313-test_contextlib-ContextManagerTestCase.test_nokeepref new file mode 100644 index 000000000000..e69de29bb2d1 diff --git a/test/dynamo_expected_failures/CPython313-test_contextlib-ContextManagerTestCase.test_param_errors b/test/dynamo_expected_failures/CPython313-test_contextlib-ContextManagerTestCase.test_param_errors new file mode 100644 index 000000000000..e69de29bb2d1 diff --git a/test/dynamo_expected_failures/CPython313-test_contextlib-FileContextTestCase.testWithOpen b/test/dynamo_expected_failures/CPython313-test_contextlib-FileContextTestCase.testWithOpen new file mode 100644 index 000000000000..e69de29bb2d1 diff --git a/test/dynamo_expected_failures/CPython313-test_contextlib-LockContextTestCase.testWithBoundedSemaphore b/test/dynamo_expected_failures/CPython313-test_contextlib-LockContextTestCase.testWithBoundedSemaphore new file mode 100644 index 000000000000..e69de29bb2d1 diff --git a/test/dynamo_expected_failures/CPython313-test_contextlib-LockContextTestCase.testWithCondition b/test/dynamo_expected_failures/CPython313-test_contextlib-LockContextTestCase.testWithCondition new file mode 100644 index 000000000000..e69de29bb2d1 diff --git a/test/dynamo_expected_failures/CPython313-test_contextlib-LockContextTestCase.testWithLock b/test/dynamo_expected_failures/CPython313-test_contextlib-LockContextTestCase.testWithLock new file mode 100644 index 000000000000..e69de29bb2d1 diff --git a/test/dynamo_expected_failures/CPython313-test_contextlib-LockContextTestCase.testWithRLock b/test/dynamo_expected_failures/CPython313-test_contextlib-LockContextTestCase.testWithRLock new file mode 100644 index 000000000000..e69de29bb2d1 diff --git a/test/dynamo_expected_failures/CPython313-test_contextlib-LockContextTestCase.testWithSemaphore b/test/dynamo_expected_failures/CPython313-test_contextlib-LockContextTestCase.testWithSemaphore new file mode 100644 index 000000000000..e69de29bb2d1 diff --git a/test/dynamo_expected_failures/CPython313-test_contextlib-NullcontextTestCase.test_nullcontext b/test/dynamo_expected_failures/CPython313-test_contextlib-NullcontextTestCase.test_nullcontext new file mode 100644 index 000000000000..e69de29bb2d1 diff --git a/test/dynamo_expected_failures/CPython313-test_contextlib-TestAbstractContextManager.test_enter b/test/dynamo_expected_failures/CPython313-test_contextlib-TestAbstractContextManager.test_enter new file mode 100644 index 000000000000..e69de29bb2d1 diff --git a/test/dynamo_expected_failures/CPython313-test_contextlib-TestAbstractContextManager.test_exit_is_abstract b/test/dynamo_expected_failures/CPython313-test_contextlib-TestAbstractContextManager.test_exit_is_abstract new file mode 100644 index 000000000000..e69de29bb2d1 diff --git a/test/dynamo_expected_failures/CPython313-test_contextlib-TestAbstractContextManager.test_slots b/test/dynamo_expected_failures/CPython313-test_contextlib-TestAbstractContextManager.test_slots new file mode 100644 index 000000000000..e69de29bb2d1 diff --git a/test/dynamo_expected_failures/CPython313-test_contextlib-TestAbstractContextManager.test_structural_subclassing b/test/dynamo_expected_failures/CPython313-test_contextlib-TestAbstractContextManager.test_structural_subclassing new file mode 100644 index 000000000000..e69de29bb2d1 diff --git a/test/dynamo_expected_failures/CPython313-test_contextlib-TestChdir.test_exception b/test/dynamo_expected_failures/CPython313-test_contextlib-TestChdir.test_exception new file mode 100644 index 000000000000..e69de29bb2d1 diff --git a/test/dynamo_expected_failures/CPython313-test_contextlib-TestChdir.test_reentrant b/test/dynamo_expected_failures/CPython313-test_contextlib-TestChdir.test_reentrant new file mode 100644 index 000000000000..e69de29bb2d1 diff --git a/test/dynamo_expected_failures/CPython313-test_contextlib-TestChdir.test_simple b/test/dynamo_expected_failures/CPython313-test_contextlib-TestChdir.test_simple new file mode 100644 index 000000000000..e69de29bb2d1 diff --git a/test/dynamo_expected_failures/CPython313-test_contextlib-TestContextDecorator.test_contextdecorator b/test/dynamo_expected_failures/CPython313-test_contextlib-TestContextDecorator.test_contextdecorator new file mode 100644 index 000000000000..e69de29bb2d1 diff --git a/test/dynamo_expected_failures/CPython313-test_contextlib-TestContextDecorator.test_contextdecorator_as_mixin b/test/dynamo_expected_failures/CPython313-test_contextlib-TestContextDecorator.test_contextdecorator_as_mixin new file mode 100644 index 000000000000..e69de29bb2d1 diff --git a/test/dynamo_expected_failures/CPython313-test_contextlib-TestContextDecorator.test_contextdecorator_with_exception b/test/dynamo_expected_failures/CPython313-test_contextlib-TestContextDecorator.test_contextdecorator_with_exception new file mode 100644 index 000000000000..e69de29bb2d1 diff --git a/test/dynamo_expected_failures/CPython313-test_contextlib-TestContextDecorator.test_contextmanager_as_decorator b/test/dynamo_expected_failures/CPython313-test_contextlib-TestContextDecorator.test_contextmanager_as_decorator new file mode 100644 index 000000000000..e69de29bb2d1 diff --git a/test/dynamo_expected_failures/CPython313-test_contextlib-TestContextDecorator.test_decorating_method b/test/dynamo_expected_failures/CPython313-test_contextlib-TestContextDecorator.test_decorating_method new file mode 100644 index 000000000000..e69de29bb2d1 diff --git a/test/dynamo_expected_failures/CPython313-test_contextlib-TestContextDecorator.test_decorator b/test/dynamo_expected_failures/CPython313-test_contextlib-TestContextDecorator.test_decorator new file mode 100644 index 000000000000..e69de29bb2d1 diff --git a/test/dynamo_expected_failures/CPython313-test_contextlib-TestContextDecorator.test_decorator_with_exception b/test/dynamo_expected_failures/CPython313-test_contextlib-TestContextDecorator.test_decorator_with_exception new file mode 100644 index 000000000000..e69de29bb2d1 diff --git a/test/dynamo_expected_failures/CPython313-test_contextlib-TestContextDecorator.test_typo_enter b/test/dynamo_expected_failures/CPython313-test_contextlib-TestContextDecorator.test_typo_enter new file mode 100644 index 000000000000..e69de29bb2d1 diff --git a/test/dynamo_expected_failures/CPython313-test_contextlib-TestContextDecorator.test_typo_exit b/test/dynamo_expected_failures/CPython313-test_contextlib-TestContextDecorator.test_typo_exit new file mode 100644 index 000000000000..e69de29bb2d1 diff --git a/test/dynamo_expected_failures/CPython313-test_contextlib-TestExitStack.test_body_exception_suppress b/test/dynamo_expected_failures/CPython313-test_contextlib-TestExitStack.test_body_exception_suppress new file mode 100644 index 000000000000..e69de29bb2d1 diff --git a/test/dynamo_expected_failures/CPython313-test_contextlib-TestExitStack.test_callback b/test/dynamo_expected_failures/CPython313-test_contextlib-TestExitStack.test_callback new file mode 100644 index 000000000000..e69de29bb2d1 diff --git a/test/dynamo_expected_failures/CPython313-test_contextlib-TestExitStack.test_close b/test/dynamo_expected_failures/CPython313-test_contextlib-TestExitStack.test_close new file mode 100644 index 000000000000..e69de29bb2d1 diff --git a/test/dynamo_expected_failures/CPython313-test_contextlib-TestExitStack.test_dont_reraise_RuntimeError b/test/dynamo_expected_failures/CPython313-test_contextlib-TestExitStack.test_dont_reraise_RuntimeError new file mode 100644 index 000000000000..e69de29bb2d1 diff --git a/test/dynamo_expected_failures/CPython313-test_contextlib-TestExitStack.test_enter_context b/test/dynamo_expected_failures/CPython313-test_contextlib-TestExitStack.test_enter_context new file mode 100644 index 000000000000..e69de29bb2d1 diff --git a/test/dynamo_expected_failures/CPython313-test_contextlib-TestExitStack.test_enter_context_errors b/test/dynamo_expected_failures/CPython313-test_contextlib-TestExitStack.test_enter_context_errors new file mode 100644 index 000000000000..e69de29bb2d1 diff --git a/test/dynamo_expected_failures/CPython313-test_contextlib-TestExitStack.test_excessive_nesting b/test/dynamo_expected_failures/CPython313-test_contextlib-TestExitStack.test_excessive_nesting new file mode 100644 index 000000000000..e69de29bb2d1 diff --git a/test/dynamo_expected_failures/CPython313-test_contextlib-TestExitStack.test_exit_exception_chaining b/test/dynamo_expected_failures/CPython313-test_contextlib-TestExitStack.test_exit_exception_chaining new file mode 100644 index 000000000000..e69de29bb2d1 diff --git a/test/dynamo_expected_failures/CPython313-test_contextlib-TestExitStack.test_exit_exception_chaining_reference b/test/dynamo_expected_failures/CPython313-test_contextlib-TestExitStack.test_exit_exception_chaining_reference new file mode 100644 index 000000000000..e69de29bb2d1 diff --git a/test/dynamo_expected_failures/CPython313-test_contextlib-TestExitStack.test_exit_exception_chaining_suppress b/test/dynamo_expected_failures/CPython313-test_contextlib-TestExitStack.test_exit_exception_chaining_suppress new file mode 100644 index 000000000000..e69de29bb2d1 diff --git a/test/dynamo_expected_failures/CPython313-test_contextlib-TestExitStack.test_exit_exception_explicit_none_context b/test/dynamo_expected_failures/CPython313-test_contextlib-TestExitStack.test_exit_exception_explicit_none_context new file mode 100644 index 000000000000..e69de29bb2d1 diff --git a/test/dynamo_expected_failures/CPython313-test_contextlib-TestExitStack.test_exit_exception_non_suppressing b/test/dynamo_expected_failures/CPython313-test_contextlib-TestExitStack.test_exit_exception_non_suppressing new file mode 100644 index 000000000000..e69de29bb2d1 diff --git a/test/dynamo_expected_failures/CPython313-test_contextlib-TestExitStack.test_exit_exception_traceback b/test/dynamo_expected_failures/CPython313-test_contextlib-TestExitStack.test_exit_exception_traceback new file mode 100644 index 000000000000..e69de29bb2d1 diff --git a/test/dynamo_expected_failures/CPython313-test_contextlib-TestExitStack.test_exit_exception_with_correct_context b/test/dynamo_expected_failures/CPython313-test_contextlib-TestExitStack.test_exit_exception_with_correct_context new file mode 100644 index 000000000000..e69de29bb2d1 diff --git a/test/dynamo_expected_failures/CPython313-test_contextlib-TestExitStack.test_exit_exception_with_existing_context b/test/dynamo_expected_failures/CPython313-test_contextlib-TestExitStack.test_exit_exception_with_existing_context new file mode 100644 index 000000000000..e69de29bb2d1 diff --git a/test/dynamo_expected_failures/CPython313-test_contextlib-TestExitStack.test_exit_raise b/test/dynamo_expected_failures/CPython313-test_contextlib-TestExitStack.test_exit_raise new file mode 100644 index 000000000000..e69de29bb2d1 diff --git a/test/dynamo_expected_failures/CPython313-test_contextlib-TestExitStack.test_exit_suppress b/test/dynamo_expected_failures/CPython313-test_contextlib-TestExitStack.test_exit_suppress new file mode 100644 index 000000000000..e69de29bb2d1 diff --git a/test/dynamo_expected_failures/CPython313-test_contextlib-TestExitStack.test_instance_bypass b/test/dynamo_expected_failures/CPython313-test_contextlib-TestExitStack.test_instance_bypass new file mode 100644 index 000000000000..e69de29bb2d1 diff --git a/test/dynamo_expected_failures/CPython313-test_contextlib-TestExitStack.test_instance_docs b/test/dynamo_expected_failures/CPython313-test_contextlib-TestExitStack.test_instance_docs new file mode 100644 index 000000000000..e69de29bb2d1 diff --git a/test/dynamo_expected_failures/CPython313-test_contextlib-TestExitStack.test_no_resources b/test/dynamo_expected_failures/CPython313-test_contextlib-TestExitStack.test_no_resources new file mode 100644 index 000000000000..e69de29bb2d1 diff --git a/test/dynamo_expected_failures/CPython313-test_contextlib-TestExitStack.test_pop_all b/test/dynamo_expected_failures/CPython313-test_contextlib-TestExitStack.test_pop_all new file mode 100644 index 000000000000..e69de29bb2d1 diff --git a/test/dynamo_expected_failures/CPython313-test_contextlib-TestExitStack.test_push b/test/dynamo_expected_failures/CPython313-test_contextlib-TestExitStack.test_push new file mode 100644 index 000000000000..e69de29bb2d1 diff --git a/test/dynamo_expected_failures/CPython313-test_contextlib-TestRedirectStderr.test_cm_is_reentrant b/test/dynamo_expected_failures/CPython313-test_contextlib-TestRedirectStderr.test_cm_is_reentrant new file mode 100644 index 000000000000..e69de29bb2d1 diff --git a/test/dynamo_expected_failures/CPython313-test_contextlib-TestRedirectStderr.test_cm_is_reusable b/test/dynamo_expected_failures/CPython313-test_contextlib-TestRedirectStderr.test_cm_is_reusable new file mode 100644 index 000000000000..e69de29bb2d1 diff --git a/test/dynamo_expected_failures/CPython313-test_contextlib-TestRedirectStderr.test_enter_result_is_target b/test/dynamo_expected_failures/CPython313-test_contextlib-TestRedirectStderr.test_enter_result_is_target new file mode 100644 index 000000000000..e69de29bb2d1 diff --git a/test/dynamo_expected_failures/CPython313-test_contextlib-TestRedirectStderr.test_instance_docs b/test/dynamo_expected_failures/CPython313-test_contextlib-TestRedirectStderr.test_instance_docs new file mode 100644 index 000000000000..e69de29bb2d1 diff --git a/test/dynamo_expected_failures/CPython313-test_contextlib-TestRedirectStderr.test_no_redirect_in_init b/test/dynamo_expected_failures/CPython313-test_contextlib-TestRedirectStderr.test_no_redirect_in_init new file mode 100644 index 000000000000..e69de29bb2d1 diff --git a/test/dynamo_expected_failures/CPython313-test_contextlib-TestRedirectStderr.test_redirect_to_string_io b/test/dynamo_expected_failures/CPython313-test_contextlib-TestRedirectStderr.test_redirect_to_string_io new file mode 100644 index 000000000000..e69de29bb2d1 diff --git a/test/dynamo_expected_failures/CPython313-test_contextlib-TestRedirectStdout.test_cm_is_reentrant b/test/dynamo_expected_failures/CPython313-test_contextlib-TestRedirectStdout.test_cm_is_reentrant new file mode 100644 index 000000000000..e69de29bb2d1 diff --git a/test/dynamo_expected_failures/CPython313-test_contextlib-TestRedirectStdout.test_cm_is_reusable b/test/dynamo_expected_failures/CPython313-test_contextlib-TestRedirectStdout.test_cm_is_reusable new file mode 100644 index 000000000000..e69de29bb2d1 diff --git a/test/dynamo_expected_failures/CPython313-test_contextlib-TestRedirectStdout.test_enter_result_is_target b/test/dynamo_expected_failures/CPython313-test_contextlib-TestRedirectStdout.test_enter_result_is_target new file mode 100644 index 000000000000..e69de29bb2d1 diff --git a/test/dynamo_expected_failures/CPython313-test_contextlib-TestRedirectStdout.test_instance_docs b/test/dynamo_expected_failures/CPython313-test_contextlib-TestRedirectStdout.test_instance_docs new file mode 100644 index 000000000000..e69de29bb2d1 diff --git a/test/dynamo_expected_failures/CPython313-test_contextlib-TestRedirectStdout.test_no_redirect_in_init b/test/dynamo_expected_failures/CPython313-test_contextlib-TestRedirectStdout.test_no_redirect_in_init new file mode 100644 index 000000000000..e69de29bb2d1 diff --git a/test/dynamo_expected_failures/CPython313-test_contextlib-TestRedirectStdout.test_redirect_to_string_io b/test/dynamo_expected_failures/CPython313-test_contextlib-TestRedirectStdout.test_redirect_to_string_io new file mode 100644 index 000000000000..e69de29bb2d1 diff --git a/test/dynamo_expected_failures/CPython313-test_contextlib-TestSuppress.test_cm_is_reentrant b/test/dynamo_expected_failures/CPython313-test_contextlib-TestSuppress.test_cm_is_reentrant new file mode 100644 index 000000000000..e69de29bb2d1 diff --git a/test/dynamo_expected_failures/CPython313-test_contextlib-TestSuppress.test_exact_exception b/test/dynamo_expected_failures/CPython313-test_contextlib-TestSuppress.test_exact_exception new file mode 100644 index 000000000000..e69de29bb2d1 diff --git a/test/dynamo_expected_failures/CPython313-test_contextlib-TestSuppress.test_exception_groups b/test/dynamo_expected_failures/CPython313-test_contextlib-TestSuppress.test_exception_groups new file mode 100644 index 000000000000..e69de29bb2d1 diff --git a/test/dynamo_expected_failures/CPython313-test_contextlib-TestSuppress.test_exception_hierarchy b/test/dynamo_expected_failures/CPython313-test_contextlib-TestSuppress.test_exception_hierarchy new file mode 100644 index 000000000000..e69de29bb2d1 diff --git a/test/dynamo_expected_failures/CPython313-test_contextlib-TestSuppress.test_instance_docs b/test/dynamo_expected_failures/CPython313-test_contextlib-TestSuppress.test_instance_docs new file mode 100644 index 000000000000..e69de29bb2d1 diff --git a/test/dynamo_expected_failures/CPython313-test_contextlib-TestSuppress.test_multiple_exception_args b/test/dynamo_expected_failures/CPython313-test_contextlib-TestSuppress.test_multiple_exception_args new file mode 100644 index 000000000000..e69de29bb2d1 diff --git a/test/dynamo_expected_failures/CPython313-test_contextlib-TestSuppress.test_no_args b/test/dynamo_expected_failures/CPython313-test_contextlib-TestSuppress.test_no_args new file mode 100644 index 000000000000..e69de29bb2d1 diff --git a/test/dynamo_expected_failures/CPython313-test_contextlib-TestSuppress.test_no_exception b/test/dynamo_expected_failures/CPython313-test_contextlib-TestSuppress.test_no_exception new file mode 100644 index 000000000000..e69de29bb2d1 diff --git a/test/dynamo_expected_failures/CPython313-test_contextlib-TestSuppress.test_no_result_from_enter b/test/dynamo_expected_failures/CPython313-test_contextlib-TestSuppress.test_no_result_from_enter new file mode 100644 index 000000000000..e69de29bb2d1 diff --git a/test/dynamo_expected_failures/CPython313-test_contextlib-TestSuppress.test_other_exception b/test/dynamo_expected_failures/CPython313-test_contextlib-TestSuppress.test_other_exception new file mode 100644 index 000000000000..e69de29bb2d1 diff --git a/test/dynamo_expected_failures/CPython313-test_generator_stop-TestPEP479.test_stopiteration_wrapping b/test/dynamo_expected_failures/CPython313-test_generator_stop-TestPEP479.test_stopiteration_wrapping new file mode 100644 index 000000000000..e69de29bb2d1 diff --git a/test/dynamo_expected_failures/CPython313-test_generators-ExceptionTest.test_except_throw b/test/dynamo_expected_failures/CPython313-test_generators-ExceptionTest.test_except_throw new file mode 100644 index 000000000000..e69de29bb2d1 diff --git a/test/dynamo_expected_failures/CPython313-test_generators-ExceptionTest.test_except_throw_bad_exception b/test/dynamo_expected_failures/CPython313-test_generators-ExceptionTest.test_except_throw_bad_exception new file mode 100644 index 000000000000..e69de29bb2d1 diff --git a/test/dynamo_expected_failures/CPython313-test_generators-ExceptionTest.test_except_throw_exception_context b/test/dynamo_expected_failures/CPython313-test_generators-ExceptionTest.test_except_throw_exception_context new file mode 100644 index 000000000000..e69de29bb2d1 diff --git a/test/dynamo_expected_failures/CPython313-test_generators-ExceptionTest.test_gen_3_arg_deprecation_warning b/test/dynamo_expected_failures/CPython313-test_generators-ExceptionTest.test_gen_3_arg_deprecation_warning new file mode 100644 index 000000000000..e69de29bb2d1 diff --git a/test/dynamo_expected_failures/CPython313-test_generators-ExceptionTest.test_return_stopiteration b/test/dynamo_expected_failures/CPython313-test_generators-ExceptionTest.test_return_stopiteration new file mode 100644 index 000000000000..e69de29bb2d1 diff --git a/test/dynamo_expected_failures/CPython313-test_generators-ExceptionTest.test_return_tuple b/test/dynamo_expected_failures/CPython313-test_generators-ExceptionTest.test_return_tuple new file mode 100644 index 000000000000..e69de29bb2d1 diff --git a/test/dynamo_expected_failures/CPython313-test_generators-ExceptionTest.test_stopiteration_error b/test/dynamo_expected_failures/CPython313-test_generators-ExceptionTest.test_stopiteration_error new file mode 100644 index 000000000000..e69de29bb2d1 diff --git a/test/dynamo_expected_failures/CPython313-test_generators-ExceptionTest.test_tutorial_stopiteration b/test/dynamo_expected_failures/CPython313-test_generators-ExceptionTest.test_tutorial_stopiteration new file mode 100644 index 000000000000..e69de29bb2d1 diff --git a/test/dynamo_expected_failures/CPython313-test_generators-FinalizationTest.test_frame_resurrect b/test/dynamo_expected_failures/CPython313-test_generators-FinalizationTest.test_frame_resurrect new file mode 100644 index 000000000000..e69de29bb2d1 diff --git a/test/dynamo_expected_failures/CPython313-test_generators-FinalizationTest.test_lambda_generator b/test/dynamo_expected_failures/CPython313-test_generators-FinalizationTest.test_lambda_generator new file mode 100644 index 000000000000..e69de29bb2d1 diff --git a/test/dynamo_expected_failures/CPython313-test_generators-FinalizationTest.test_refcycle b/test/dynamo_expected_failures/CPython313-test_generators-FinalizationTest.test_refcycle new file mode 100644 index 000000000000..e69de29bb2d1 diff --git a/test/dynamo_expected_failures/CPython313-test_generators-GeneratorCloseTest.test_close_releases_frame_locals b/test/dynamo_expected_failures/CPython313-test_generators-GeneratorCloseTest.test_close_releases_frame_locals new file mode 100644 index 000000000000..e69de29bb2d1 diff --git a/test/dynamo_expected_failures/CPython313-test_generators-GeneratorStackTraceTest.test_send_with_yield_from b/test/dynamo_expected_failures/CPython313-test_generators-GeneratorStackTraceTest.test_send_with_yield_from new file mode 100644 index 000000000000..e69de29bb2d1 diff --git a/test/dynamo_expected_failures/CPython313-test_generators-GeneratorStackTraceTest.test_throw_with_yield_from b/test/dynamo_expected_failures/CPython313-test_generators-GeneratorStackTraceTest.test_throw_with_yield_from new file mode 100644 index 000000000000..e69de29bb2d1 diff --git a/test/dynamo_expected_failures/CPython313-test_generators-GeneratorTest.test_ag_frame_f_back b/test/dynamo_expected_failures/CPython313-test_generators-GeneratorTest.test_ag_frame_f_back new file mode 100644 index 000000000000..e69de29bb2d1 diff --git a/test/dynamo_expected_failures/CPython313-test_generators-GeneratorTest.test_copy b/test/dynamo_expected_failures/CPython313-test_generators-GeneratorTest.test_copy new file mode 100644 index 000000000000..e69de29bb2d1 diff --git a/test/dynamo_expected_failures/CPython313-test_generators-GeneratorTest.test_cr_frame_f_back b/test/dynamo_expected_failures/CPython313-test_generators-GeneratorTest.test_cr_frame_f_back new file mode 100644 index 000000000000..e69de29bb2d1 diff --git a/test/dynamo_expected_failures/CPython313-test_generators-GeneratorTest.test_gi_frame_f_back b/test/dynamo_expected_failures/CPython313-test_generators-GeneratorTest.test_gi_frame_f_back new file mode 100644 index 000000000000..e69de29bb2d1 diff --git a/test/dynamo_expected_failures/CPython313-test_generators-GeneratorTest.test_handle_frame_object_in_creation b/test/dynamo_expected_failures/CPython313-test_generators-GeneratorTest.test_handle_frame_object_in_creation new file mode 100644 index 000000000000..e69de29bb2d1 diff --git a/test/dynamo_expected_failures/CPython313-test_generators-GeneratorTest.test_name b/test/dynamo_expected_failures/CPython313-test_generators-GeneratorTest.test_name new file mode 100644 index 000000000000..e69de29bb2d1 diff --git a/test/dynamo_expected_failures/CPython313-test_generators-GeneratorTest.test_pickle b/test/dynamo_expected_failures/CPython313-test_generators-GeneratorTest.test_pickle new file mode 100644 index 000000000000..e69de29bb2d1 diff --git a/test/dynamo_expected_failures/CPython313-test_generators-GeneratorThrowTest.test_exception_context_with_yield b/test/dynamo_expected_failures/CPython313-test_generators-GeneratorThrowTest.test_exception_context_with_yield new file mode 100644 index 000000000000..e69de29bb2d1 diff --git a/test/dynamo_expected_failures/CPython313-test_generators-GeneratorThrowTest.test_exception_context_with_yield_from b/test/dynamo_expected_failures/CPython313-test_generators-GeneratorThrowTest.test_exception_context_with_yield_from new file mode 100644 index 000000000000..e69de29bb2d1 diff --git a/test/dynamo_expected_failures/CPython313-test_generators-GeneratorThrowTest.test_exception_context_with_yield_inside_generator b/test/dynamo_expected_failures/CPython313-test_generators-GeneratorThrowTest.test_exception_context_with_yield_inside_generator new file mode 100644 index 000000000000..e69de29bb2d1 diff --git a/test/dynamo_expected_failures/CPython313-test_generators-ModifyUnderlyingIterableTest.test_modify_f_locals b/test/dynamo_expected_failures/CPython313-test_generators-ModifyUnderlyingIterableTest.test_modify_f_locals new file mode 100644 index 000000000000..e69de29bb2d1 diff --git a/test/dynamo_expected_failures/CPython313-test_generators-ModifyUnderlyingIterableTest.test_new_gen_from_gi_code b/test/dynamo_expected_failures/CPython313-test_generators-ModifyUnderlyingIterableTest.test_new_gen_from_gi_code new file mode 100644 index 000000000000..e69de29bb2d1 diff --git a/test/dynamo_expected_failures/CPython313-test_generators-SignalAndYieldFromTest.test_raise_and_yield_from b/test/dynamo_expected_failures/CPython313-test_generators-SignalAndYieldFromTest.test_raise_and_yield_from new file mode 100644 index 000000000000..e69de29bb2d1 diff --git a/test/dynamo_expected_failures/CPython313-test_generators-YieldFromTests.test_generator_gi_yieldfrom b/test/dynamo_expected_failures/CPython313-test_generators-YieldFromTests.test_generator_gi_yieldfrom new file mode 100644 index 000000000000..e69de29bb2d1