1import copy
2import gc
3import pickle
4import sys
5import doctest
6import unittest
7import weakref
8import inspect
9
10from test import support
11
12try:
13    import _testcapi
14except ImportError:
15    _testcapi = None
16
17
18# This tests to make sure that if a SIGINT arrives just before we send into a
19# yield from chain, the KeyboardInterrupt is raised in the innermost
20# generator (see bpo-30039).
21@unittest.skipUnless(_testcapi is not None and
22                     hasattr(_testcapi, "raise_SIGINT_then_send_None"),
23                     "needs _testcapi.raise_SIGINT_then_send_None")
24class SignalAndYieldFromTest(unittest.TestCase):
25
26    def generator1(self):
27        return (yield from self.generator2())
28
29    def generator2(self):
30        try:
31            yield
32        except KeyboardInterrupt:
33            return "PASSED"
34        else:
35            return "FAILED"
36
37    def test_raise_and_yield_from(self):
38        gen = self.generator1()
39        gen.send(None)
40        try:
41            _testcapi.raise_SIGINT_then_send_None(gen)
42        except BaseException as _exc:
43            exc = _exc
44        self.assertIs(type(exc), StopIteration)
45        self.assertEqual(exc.value, "PASSED")
46
47
48class FinalizationTest(unittest.TestCase):
49
50    def test_frame_resurrect(self):
51        # A generator frame can be resurrected by a generator's finalization.
52        def gen():
53            nonlocal frame
54            try:
55                yield
56            finally:
57                frame = sys._getframe()
58
59        g = gen()
60        wr = weakref.ref(g)
61        next(g)
62        del g
63        support.gc_collect()
64        self.assertIs(wr(), None)
65        self.assertTrue(frame)
66        del frame
67        support.gc_collect()
68
69    def test_refcycle(self):
70        # A generator caught in a refcycle gets finalized anyway.
71        old_garbage = gc.garbage[:]
72        finalized = False
73        def gen():
74            nonlocal finalized
75            try:
76                g = yield
77                yield 1
78            finally:
79                finalized = True
80
81        g = gen()
82        next(g)
83        g.send(g)
84        self.assertGreater(sys.getrefcount(g), 2)
85        self.assertFalse(finalized)
86        del g
87        support.gc_collect()
88        self.assertTrue(finalized)
89        self.assertEqual(gc.garbage, old_garbage)
90
91    def test_lambda_generator(self):
92        # Issue #23192: Test that a lambda returning a generator behaves
93        # like the equivalent function
94        f = lambda: (yield 1)
95        def g(): return (yield 1)
96
97        # test 'yield from'
98        f2 = lambda: (yield from g())
99        def g2(): return (yield from g())
100
101        f3 = lambda: (yield from f())
102        def g3(): return (yield from f())
103
104        for gen_fun in (f, g, f2, g2, f3, g3):
105            gen = gen_fun()
106            self.assertEqual(next(gen), 1)
107            with self.assertRaises(StopIteration) as cm:
108                gen.send(2)
109            self.assertEqual(cm.exception.value, 2)
110
111
112class GeneratorTest(unittest.TestCase):
113
114    def test_name(self):
115        def func():
116            yield 1
117
118        # check generator names
119        gen = func()
120        self.assertEqual(gen.__name__, "func")
121        self.assertEqual(gen.__qualname__,
122                         "GeneratorTest.test_name.<locals>.func")
123
124        # modify generator names
125        gen.__name__ = "name"
126        gen.__qualname__ = "qualname"
127        self.assertEqual(gen.__name__, "name")
128        self.assertEqual(gen.__qualname__, "qualname")
129
130        # generator names must be a string and cannot be deleted
131        self.assertRaises(TypeError, setattr, gen, '__name__', 123)
132        self.assertRaises(TypeError, setattr, gen, '__qualname__', 123)
133        self.assertRaises(TypeError, delattr, gen, '__name__')
134        self.assertRaises(TypeError, delattr, gen, '__qualname__')
135
136        # modify names of the function creating the generator
137        func.__qualname__ = "func_qualname"
138        func.__name__ = "func_name"
139        gen = func()
140        self.assertEqual(gen.__name__, "func_name")
141        self.assertEqual(gen.__qualname__, "func_qualname")
142
143        # unnamed generator
144        gen = (x for x in range(10))
145        self.assertEqual(gen.__name__,
146                         "<genexpr>")
147        self.assertEqual(gen.__qualname__,
148                         "GeneratorTest.test_name.<locals>.<genexpr>")
149
150    def test_copy(self):
151        def f():
152            yield 1
153        g = f()
154        with self.assertRaises(TypeError):
155            copy.copy(g)
156
157    def test_pickle(self):
158        def f():
159            yield 1
160        g = f()
161        for proto in range(pickle.HIGHEST_PROTOCOL + 1):
162            with self.assertRaises((TypeError, pickle.PicklingError)):
163                pickle.dumps(g, proto)
164
165    def test_send_non_none_to_new_gen(self):
166        def f():
167            yield 1
168        g = f()
169        with self.assertRaises(TypeError):
170            g.send(0)
171        self.assertEqual(next(g), 1)
172
173
174class ExceptionTest(unittest.TestCase):
175    # Tests for the issue #23353: check that the currently handled exception
176    # is correctly saved/restored in PyEval_EvalFrameEx().
177
178    def test_except_throw(self):
179        def store_raise_exc_generator():
180            try:
181                self.assertEqual(sys.exc_info()[0], None)
182                yield
183            except Exception as exc:
184                # exception raised by gen.throw(exc)
185                self.assertEqual(sys.exc_info()[0], ValueError)
186                self.assertIsNone(exc.__context__)
187                yield
188
189                # ensure that the exception is not lost
190                self.assertEqual(sys.exc_info()[0], ValueError)
191                yield
192
193                # we should be able to raise back the ValueError
194                raise
195
196        make = store_raise_exc_generator()
197        next(make)
198
199        try:
200            raise ValueError()
201        except Exception as exc:
202            try:
203                make.throw(exc)
204            except Exception:
205                pass
206
207        next(make)
208        with self.assertRaises(ValueError) as cm:
209            next(make)
210        self.assertIsNone(cm.exception.__context__)
211
212        self.assertEqual(sys.exc_info(), (None, None, None))
213
214    def test_except_next(self):
215        def gen():
216            self.assertEqual(sys.exc_info()[0], ValueError)
217            yield "done"
218
219        g = gen()
220        try:
221            raise ValueError
222        except Exception:
223            self.assertEqual(next(g), "done")
224        self.assertEqual(sys.exc_info(), (None, None, None))
225
226    def test_except_gen_except(self):
227        def gen():
228            try:
229                self.assertEqual(sys.exc_info()[0], None)
230                yield
231                # we are called from "except ValueError:", TypeError must
232                # inherit ValueError in its context
233                raise TypeError()
234            except TypeError as exc:
235                self.assertEqual(sys.exc_info()[0], TypeError)
236                self.assertEqual(type(exc.__context__), ValueError)
237            # here we are still called from the "except ValueError:"
238            self.assertEqual(sys.exc_info()[0], ValueError)
239            yield
240            self.assertIsNone(sys.exc_info()[0])
241            yield "done"
242
243        g = gen()
244        next(g)
245        try:
246            raise ValueError
247        except Exception:
248            next(g)
249
250        self.assertEqual(next(g), "done")
251        self.assertEqual(sys.exc_info(), (None, None, None))
252
253    def test_except_throw_exception_context(self):
254        def gen():
255            try:
256                try:
257                    self.assertEqual(sys.exc_info()[0], None)
258                    yield
259                except ValueError:
260                    # we are called from "except ValueError:"
261                    self.assertEqual(sys.exc_info()[0], ValueError)
262                    raise TypeError()
263            except Exception as exc:
264                self.assertEqual(sys.exc_info()[0], TypeError)
265                self.assertEqual(type(exc.__context__), ValueError)
266            # we are still called from "except ValueError:"
267            self.assertEqual(sys.exc_info()[0], ValueError)
268            yield
269            self.assertIsNone(sys.exc_info()[0])
270            yield "done"
271
272        g = gen()
273        next(g)
274        try:
275            raise ValueError
276        except Exception as exc:
277            g.throw(exc)
278
279        self.assertEqual(next(g), "done")
280        self.assertEqual(sys.exc_info(), (None, None, None))
281
282    def test_except_throw_bad_exception(self):
283        class E(Exception):
284            def __new__(cls, *args, **kwargs):
285                return cls
286
287        def boring_generator():
288            yield
289
290        gen = boring_generator()
291
292        err_msg = 'should have returned an instance of BaseException'
293
294        with self.assertRaisesRegex(TypeError, err_msg):
295            gen.throw(E)
296
297        self.assertRaises(StopIteration, next, gen)
298
299        def generator():
300            with self.assertRaisesRegex(TypeError, err_msg):
301                yield
302
303        gen = generator()
304        next(gen)
305        with self.assertRaises(StopIteration):
306            gen.throw(E)
307
308    def test_stopiteration_error(self):
309        # See also PEP 479.
310
311        def gen():
312            raise StopIteration
313            yield
314
315        with self.assertRaisesRegex(RuntimeError, 'raised StopIteration'):
316            next(gen())
317
318    def test_tutorial_stopiteration(self):
319        # Raise StopIteration" stops the generator too:
320
321        def f():
322            yield 1
323            raise StopIteration
324            yield 2 # never reached
325
326        g = f()
327        self.assertEqual(next(g), 1)
328
329        with self.assertRaisesRegex(RuntimeError, 'raised StopIteration'):
330            next(g)
331
332    def test_return_tuple(self):
333        def g():
334            return (yield 1)
335
336        gen = g()
337        self.assertEqual(next(gen), 1)
338        with self.assertRaises(StopIteration) as cm:
339            gen.send((2,))
340        self.assertEqual(cm.exception.value, (2,))
341
342    def test_return_stopiteration(self):
343        def g():
344            return (yield 1)
345
346        gen = g()
347        self.assertEqual(next(gen), 1)
348        with self.assertRaises(StopIteration) as cm:
349            gen.send(StopIteration(2))
350        self.assertIsInstance(cm.exception.value, StopIteration)
351        self.assertEqual(cm.exception.value.value, 2)
352
353
354class GeneratorThrowTest(unittest.TestCase):
355
356    def test_exception_context_with_yield(self):
357        def f():
358            try:
359                raise KeyError('a')
360            except Exception:
361                yield
362
363        gen = f()
364        gen.send(None)
365        with self.assertRaises(ValueError) as cm:
366            gen.throw(ValueError)
367        context = cm.exception.__context__
368        self.assertEqual((type(context), context.args), (KeyError, ('a',)))
369
370    def test_exception_context_with_yield_inside_generator(self):
371        # Check that the context is also available from inside the generator
372        # with yield, as opposed to outside.
373        def f():
374            try:
375                raise KeyError('a')
376            except Exception:
377                try:
378                    yield
379                except Exception as exc:
380                    self.assertEqual(type(exc), ValueError)
381                    context = exc.__context__
382                    self.assertEqual((type(context), context.args),
383                        (KeyError, ('a',)))
384                    yield 'b'
385
386        gen = f()
387        gen.send(None)
388        actual = gen.throw(ValueError)
389        # This ensures that the assertions inside were executed.
390        self.assertEqual(actual, 'b')
391
392    def test_exception_context_with_yield_from(self):
393        def f():
394            yield
395
396        def g():
397            try:
398                raise KeyError('a')
399            except Exception:
400                yield from f()
401
402        gen = g()
403        gen.send(None)
404        with self.assertRaises(ValueError) as cm:
405            gen.throw(ValueError)
406        context = cm.exception.__context__
407        self.assertEqual((type(context), context.args), (KeyError, ('a',)))
408
409    def test_exception_context_with_yield_from_with_context_cycle(self):
410        # Check trying to create an exception context cycle:
411        # https://bugs.python.org/issue40696
412        has_cycle = None
413
414        def f():
415            yield
416
417        def g(exc):
418            nonlocal has_cycle
419            try:
420                raise exc
421            except Exception:
422                try:
423                    yield from f()
424                except Exception as exc:
425                    has_cycle = (exc is exc.__context__)
426            yield
427
428        exc = KeyError('a')
429        gen = g(exc)
430        gen.send(None)
431        gen.throw(exc)
432        # This also distinguishes from the initial has_cycle=None.
433        self.assertEqual(has_cycle, False)
434
435    def test_throw_after_none_exc_type(self):
436        def g():
437            try:
438                raise KeyError
439            except KeyError:
440                pass
441
442            try:
443                yield
444            except Exception:
445                raise RuntimeError
446
447        gen = g()
448        gen.send(None)
449        with self.assertRaises(RuntimeError) as cm:
450            gen.throw(ValueError)
451
452
453class GeneratorStackTraceTest(unittest.TestCase):
454
455    def check_stack_names(self, frame, expected):
456        names = []
457        while frame:
458            name = frame.f_code.co_name
459            # Stop checking frames when we get to our test helper.
460            if name.startswith('check_') or name.startswith('call_'):
461                break
462
463            names.append(name)
464            frame = frame.f_back
465
466        self.assertEqual(names, expected)
467
468    def check_yield_from_example(self, call_method):
469        def f():
470            self.check_stack_names(sys._getframe(), ['f', 'g'])
471            try:
472                yield
473            except Exception:
474                pass
475            self.check_stack_names(sys._getframe(), ['f', 'g'])
476
477        def g():
478            self.check_stack_names(sys._getframe(), ['g'])
479            yield from f()
480            self.check_stack_names(sys._getframe(), ['g'])
481
482        gen = g()
483        gen.send(None)
484        try:
485            call_method(gen)
486        except StopIteration:
487            pass
488
489    def test_send_with_yield_from(self):
490        def call_send(gen):
491            gen.send(None)
492
493        self.check_yield_from_example(call_send)
494
495    def test_throw_with_yield_from(self):
496        def call_throw(gen):
497            gen.throw(RuntimeError)
498
499        self.check_yield_from_example(call_throw)
500
501
502class YieldFromTests(unittest.TestCase):
503    def test_generator_gi_yieldfrom(self):
504        def a():
505            self.assertEqual(inspect.getgeneratorstate(gen_b), inspect.GEN_RUNNING)
506            self.assertIsNone(gen_b.gi_yieldfrom)
507            yield
508            self.assertEqual(inspect.getgeneratorstate(gen_b), inspect.GEN_RUNNING)
509            self.assertIsNone(gen_b.gi_yieldfrom)
510
511        def b():
512            self.assertIsNone(gen_b.gi_yieldfrom)
513            yield from a()
514            self.assertIsNone(gen_b.gi_yieldfrom)
515            yield
516            self.assertIsNone(gen_b.gi_yieldfrom)
517
518        gen_b = b()
519        self.assertEqual(inspect.getgeneratorstate(gen_b), inspect.GEN_CREATED)
520        self.assertIsNone(gen_b.gi_yieldfrom)
521
522        gen_b.send(None)
523        self.assertEqual(inspect.getgeneratorstate(gen_b), inspect.GEN_SUSPENDED)
524        self.assertEqual(gen_b.gi_yieldfrom.gi_code.co_name, 'a')
525
526        gen_b.send(None)
527        self.assertEqual(inspect.getgeneratorstate(gen_b), inspect.GEN_SUSPENDED)
528        self.assertIsNone(gen_b.gi_yieldfrom)
529
530        [] = gen_b  # Exhaust generator
531        self.assertEqual(inspect.getgeneratorstate(gen_b), inspect.GEN_CLOSED)
532        self.assertIsNone(gen_b.gi_yieldfrom)
533
534
535tutorial_tests = """
536Let's try a simple generator:
537
538    >>> def f():
539    ...    yield 1
540    ...    yield 2
541
542    >>> for i in f():
543    ...     print(i)
544    1
545    2
546    >>> g = f()
547    >>> next(g)
548    1
549    >>> next(g)
550    2
551
552"Falling off the end" stops the generator:
553
554    >>> next(g)
555    Traceback (most recent call last):
556      File "<stdin>", line 1, in ?
557      File "<stdin>", line 2, in g
558    StopIteration
559
560"return" also stops the generator:
561
562    >>> def f():
563    ...     yield 1
564    ...     return
565    ...     yield 2 # never reached
566    ...
567    >>> g = f()
568    >>> next(g)
569    1
570    >>> next(g)
571    Traceback (most recent call last):
572      File "<stdin>", line 1, in ?
573      File "<stdin>", line 3, in f
574    StopIteration
575    >>> next(g) # once stopped, can't be resumed
576    Traceback (most recent call last):
577      File "<stdin>", line 1, in ?
578    StopIteration
579
580However, "return" and StopIteration are not exactly equivalent:
581
582    >>> def g1():
583    ...     try:
584    ...         return
585    ...     except:
586    ...         yield 1
587    ...
588    >>> list(g1())
589    []
590
591    >>> def g2():
592    ...     try:
593    ...         raise StopIteration
594    ...     except:
595    ...         yield 42
596    >>> print(list(g2()))
597    [42]
598
599This may be surprising at first:
600
601    >>> def g3():
602    ...     try:
603    ...         return
604    ...     finally:
605    ...         yield 1
606    ...
607    >>> list(g3())
608    [1]
609
610Let's create an alternate range() function implemented as a generator:
611
612    >>> def yrange(n):
613    ...     for i in range(n):
614    ...         yield i
615    ...
616    >>> list(yrange(5))
617    [0, 1, 2, 3, 4]
618
619Generators always return to the most recent caller:
620
621    >>> def creator():
622    ...     r = yrange(5)
623    ...     print("creator", next(r))
624    ...     return r
625    ...
626    >>> def caller():
627    ...     r = creator()
628    ...     for i in r:
629    ...             print("caller", i)
630    ...
631    >>> caller()
632    creator 0
633    caller 1
634    caller 2
635    caller 3
636    caller 4
637
638Generators can call other generators:
639
640    >>> def zrange(n):
641    ...     for i in yrange(n):
642    ...         yield i
643    ...
644    >>> list(zrange(5))
645    [0, 1, 2, 3, 4]
646
647"""
648
649# The examples from PEP 255.
650
651pep_tests = """
652
653Specification:  Yield
654
655    Restriction:  A generator cannot be resumed while it is actively
656    running:
657
658    >>> def g():
659    ...     i = next(me)
660    ...     yield i
661    >>> me = g()
662    >>> next(me)
663    Traceback (most recent call last):
664     ...
665      File "<string>", line 2, in g
666    ValueError: generator already executing
667
668Specification: Return
669
670    Note that return isn't always equivalent to raising StopIteration:  the
671    difference lies in how enclosing try/except constructs are treated.
672    For example,
673
674        >>> def f1():
675        ...     try:
676        ...         return
677        ...     except:
678        ...        yield 1
679        >>> print(list(f1()))
680        []
681
682    because, as in any function, return simply exits, but
683
684        >>> def f2():
685        ...     try:
686        ...         raise StopIteration
687        ...     except:
688        ...         yield 42
689        >>> print(list(f2()))
690        [42]
691
692    because StopIteration is captured by a bare "except", as is any
693    exception.
694
695Specification: Generators and Exception Propagation
696
697    >>> def f():
698    ...     return 1//0
699    >>> def g():
700    ...     yield f()  # the zero division exception propagates
701    ...     yield 42   # and we'll never get here
702    >>> k = g()
703    >>> next(k)
704    Traceback (most recent call last):
705      File "<stdin>", line 1, in ?
706      File "<stdin>", line 2, in g
707      File "<stdin>", line 2, in f
708    ZeroDivisionError: integer division or modulo by zero
709    >>> next(k)  # and the generator cannot be resumed
710    Traceback (most recent call last):
711      File "<stdin>", line 1, in ?
712    StopIteration
713    >>>
714
715Specification: Try/Except/Finally
716
717    >>> def f():
718    ...     try:
719    ...         yield 1
720    ...         try:
721    ...             yield 2
722    ...             1//0
723    ...             yield 3  # never get here
724    ...         except ZeroDivisionError:
725    ...             yield 4
726    ...             yield 5
727    ...             raise
728    ...         except:
729    ...             yield 6
730    ...         yield 7     # the "raise" above stops this
731    ...     except:
732    ...         yield 8
733    ...     yield 9
734    ...     try:
735    ...         x = 12
736    ...     finally:
737    ...         yield 10
738    ...     yield 11
739    >>> print(list(f()))
740    [1, 2, 4, 5, 8, 9, 10, 11]
741    >>>
742
743Guido's binary tree example.
744
745    >>> # A binary tree class.
746    >>> class Tree:
747    ...
748    ...     def __init__(self, label, left=None, right=None):
749    ...         self.label = label
750    ...         self.left = left
751    ...         self.right = right
752    ...
753    ...     def __repr__(self, level=0, indent="    "):
754    ...         s = level*indent + repr(self.label)
755    ...         if self.left:
756    ...             s = s + "\\n" + self.left.__repr__(level+1, indent)
757    ...         if self.right:
758    ...             s = s + "\\n" + self.right.__repr__(level+1, indent)
759    ...         return s
760    ...
761    ...     def __iter__(self):
762    ...         return inorder(self)
763
764    >>> # Create a Tree from a list.
765    >>> def tree(list):
766    ...     n = len(list)
767    ...     if n == 0:
768    ...         return []
769    ...     i = n // 2
770    ...     return Tree(list[i], tree(list[:i]), tree(list[i+1:]))
771
772    >>> # Show it off: create a tree.
773    >>> t = tree("ABCDEFGHIJKLMNOPQRSTUVWXYZ")
774
775    >>> # A recursive generator that generates Tree labels in in-order.
776    >>> def inorder(t):
777    ...     if t:
778    ...         for x in inorder(t.left):
779    ...             yield x
780    ...         yield t.label
781    ...         for x in inorder(t.right):
782    ...             yield x
783
784    >>> # Show it off: create a tree.
785    >>> t = tree("ABCDEFGHIJKLMNOPQRSTUVWXYZ")
786    >>> # Print the nodes of the tree in in-order.
787    >>> for x in t:
788    ...     print(' '+x, end='')
789     A B C D E F G H I J K L M N O P Q R S T U V W X Y Z
790
791    >>> # A non-recursive generator.
792    >>> def inorder(node):
793    ...     stack = []
794    ...     while node:
795    ...         while node.left:
796    ...             stack.append(node)
797    ...             node = node.left
798    ...         yield node.label
799    ...         while not node.right:
800    ...             try:
801    ...                 node = stack.pop()
802    ...             except IndexError:
803    ...                 return
804    ...             yield node.label
805    ...         node = node.right
806
807    >>> # Exercise the non-recursive generator.
808    >>> for x in t:
809    ...     print(' '+x, end='')
810     A B C D E F G H I J K L M N O P Q R S T U V W X Y Z
811
812"""
813
814# Examples from Iterator-List and Python-Dev and c.l.py.
815
816email_tests = """
817
818The difference between yielding None and returning it.
819
820>>> def g():
821...     for i in range(3):
822...         yield None
823...     yield None
824...     return
825>>> list(g())
826[None, None, None, None]
827
828Ensure that explicitly raising StopIteration acts like any other exception
829in try/except, not like a return.
830
831>>> def g():
832...     yield 1
833...     try:
834...         raise StopIteration
835...     except:
836...         yield 2
837...     yield 3
838>>> list(g())
839[1, 2, 3]
840
841Next one was posted to c.l.py.
842
843>>> def gcomb(x, k):
844...     "Generate all combinations of k elements from list x."
845...
846...     if k > len(x):
847...         return
848...     if k == 0:
849...         yield []
850...     else:
851...         first, rest = x[0], x[1:]
852...         # A combination does or doesn't contain first.
853...         # If it does, the remainder is a k-1 comb of rest.
854...         for c in gcomb(rest, k-1):
855...             c.insert(0, first)
856...             yield c
857...         # If it doesn't contain first, it's a k comb of rest.
858...         for c in gcomb(rest, k):
859...             yield c
860
861>>> seq = list(range(1, 5))
862>>> for k in range(len(seq) + 2):
863...     print("%d-combs of %s:" % (k, seq))
864...     for c in gcomb(seq, k):
865...         print("   ", c)
8660-combs of [1, 2, 3, 4]:
867    []
8681-combs of [1, 2, 3, 4]:
869    [1]
870    [2]
871    [3]
872    [4]
8732-combs of [1, 2, 3, 4]:
874    [1, 2]
875    [1, 3]
876    [1, 4]
877    [2, 3]
878    [2, 4]
879    [3, 4]
8803-combs of [1, 2, 3, 4]:
881    [1, 2, 3]
882    [1, 2, 4]
883    [1, 3, 4]
884    [2, 3, 4]
8854-combs of [1, 2, 3, 4]:
886    [1, 2, 3, 4]
8875-combs of [1, 2, 3, 4]:
888
889From the Iterators list, about the types of these things.
890
891>>> def g():
892...     yield 1
893...
894>>> type(g)
895<class 'function'>
896>>> i = g()
897>>> type(i)
898<class 'generator'>
899>>> [s for s in dir(i) if not s.startswith('_')]
900['close', 'gi_code', 'gi_frame', 'gi_running', 'gi_yieldfrom', 'send', 'throw']
901>>> from test.support import HAVE_DOCSTRINGS
902>>> print(i.__next__.__doc__ if HAVE_DOCSTRINGS else 'Implement next(self).')
903Implement next(self).
904>>> iter(i) is i
905True
906>>> import types
907>>> isinstance(i, types.GeneratorType)
908True
909
910And more, added later.
911
912>>> i.gi_running
9130
914>>> type(i.gi_frame)
915<class 'frame'>
916>>> i.gi_running = 42
917Traceback (most recent call last):
918  ...
919AttributeError: attribute 'gi_running' of 'generator' objects is not writable
920>>> def g():
921...     yield me.gi_running
922>>> me = g()
923>>> me.gi_running
9240
925>>> next(me)
9261
927>>> me.gi_running
9280
929
930A clever union-find implementation from c.l.py, due to David Eppstein.
931Sent: Friday, June 29, 2001 12:16 PM
932To: python-list@python.org
933Subject: Re: PEP 255: Simple Generators
934
935>>> class disjointSet:
936...     def __init__(self, name):
937...         self.name = name
938...         self.parent = None
939...         self.generator = self.generate()
940...
941...     def generate(self):
942...         while not self.parent:
943...             yield self
944...         for x in self.parent.generator:
945...             yield x
946...
947...     def find(self):
948...         return next(self.generator)
949...
950...     def union(self, parent):
951...         if self.parent:
952...             raise ValueError("Sorry, I'm not a root!")
953...         self.parent = parent
954...
955...     def __str__(self):
956...         return self.name
957
958>>> names = "ABCDEFGHIJKLM"
959>>> sets = [disjointSet(name) for name in names]
960>>> roots = sets[:]
961
962>>> import random
963>>> gen = random.Random(42)
964>>> while 1:
965...     for s in sets:
966...         print(" %s->%s" % (s, s.find()), end='')
967...     print()
968...     if len(roots) > 1:
969...         s1 = gen.choice(roots)
970...         roots.remove(s1)
971...         s2 = gen.choice(roots)
972...         s1.union(s2)
973...         print("merged", s1, "into", s2)
974...     else:
975...         break
976 A->A B->B C->C D->D E->E F->F G->G H->H I->I J->J K->K L->L M->M
977merged K into B
978 A->A B->B C->C D->D E->E F->F G->G H->H I->I J->J K->B L->L M->M
979merged A into F
980 A->F B->B C->C D->D E->E F->F G->G H->H I->I J->J K->B L->L M->M
981merged E into F
982 A->F B->B C->C D->D E->F F->F G->G H->H I->I J->J K->B L->L M->M
983merged D into C
984 A->F B->B C->C D->C E->F F->F G->G H->H I->I J->J K->B L->L M->M
985merged M into C
986 A->F B->B C->C D->C E->F F->F G->G H->H I->I J->J K->B L->L M->C
987merged J into B
988 A->F B->B C->C D->C E->F F->F G->G H->H I->I J->B K->B L->L M->C
989merged B into C
990 A->F B->C C->C D->C E->F F->F G->G H->H I->I J->C K->C L->L M->C
991merged F into G
992 A->G B->C C->C D->C E->G F->G G->G H->H I->I J->C K->C L->L M->C
993merged L into C
994 A->G B->C C->C D->C E->G F->G G->G H->H I->I J->C K->C L->C M->C
995merged G into I
996 A->I B->C C->C D->C E->I F->I G->I H->H I->I J->C K->C L->C M->C
997merged I into H
998 A->H B->C C->C D->C E->H F->H G->H H->H I->H J->C K->C L->C M->C
999merged C into H
1000 A->H B->H C->H D->H E->H F->H G->H H->H I->H J->H K->H L->H M->H
1001
1002"""
1003# Emacs turd '
1004
1005# Fun tests (for sufficiently warped notions of "fun").
1006
1007fun_tests = """
1008
1009Build up to a recursive Sieve of Eratosthenes generator.
1010
1011>>> def firstn(g, n):
1012...     return [next(g) for i in range(n)]
1013
1014>>> def intsfrom(i):
1015...     while 1:
1016...         yield i
1017...         i += 1
1018
1019>>> firstn(intsfrom(5), 7)
1020[5, 6, 7, 8, 9, 10, 11]
1021
1022>>> def exclude_multiples(n, ints):
1023...     for i in ints:
1024...         if i % n:
1025...             yield i
1026
1027>>> firstn(exclude_multiples(3, intsfrom(1)), 6)
1028[1, 2, 4, 5, 7, 8]
1029
1030>>> def sieve(ints):
1031...     prime = next(ints)
1032...     yield prime
1033...     not_divisible_by_prime = exclude_multiples(prime, ints)
1034...     for p in sieve(not_divisible_by_prime):
1035...         yield p
1036
1037>>> primes = sieve(intsfrom(2))
1038>>> firstn(primes, 20)
1039[2, 3, 5, 7, 11, 13, 17, 19, 23, 29, 31, 37, 41, 43, 47, 53, 59, 61, 67, 71]
1040
1041
1042Another famous problem:  generate all integers of the form
1043    2**i * 3**j  * 5**k
1044in increasing order, where i,j,k >= 0.  Trickier than it may look at first!
1045Try writing it without generators, and correctly, and without generating
10463 internal results for each result output.
1047
1048>>> def times(n, g):
1049...     for i in g:
1050...         yield n * i
1051>>> firstn(times(10, intsfrom(1)), 10)
1052[10, 20, 30, 40, 50, 60, 70, 80, 90, 100]
1053
1054>>> def merge(g, h):
1055...     ng = next(g)
1056...     nh = next(h)
1057...     while 1:
1058...         if ng < nh:
1059...             yield ng
1060...             ng = next(g)
1061...         elif ng > nh:
1062...             yield nh
1063...             nh = next(h)
1064...         else:
1065...             yield ng
1066...             ng = next(g)
1067...             nh = next(h)
1068
1069The following works, but is doing a whale of a lot of redundant work --
1070it's not clear how to get the internal uses of m235 to share a single
1071generator.  Note that me_times2 (etc) each need to see every element in the
1072result sequence.  So this is an example where lazy lists are more natural
1073(you can look at the head of a lazy list any number of times).
1074
1075>>> def m235():
1076...     yield 1
1077...     me_times2 = times(2, m235())
1078...     me_times3 = times(3, m235())
1079...     me_times5 = times(5, m235())
1080...     for i in merge(merge(me_times2,
1081...                          me_times3),
1082...                    me_times5):
1083...         yield i
1084
1085Don't print "too many" of these -- the implementation above is extremely
1086inefficient:  each call of m235() leads to 3 recursive calls, and in
1087turn each of those 3 more, and so on, and so on, until we've descended
1088enough levels to satisfy the print stmts.  Very odd:  when I printed 5
1089lines of results below, this managed to screw up Win98's malloc in "the
1090usual" way, i.e. the heap grew over 4Mb so Win98 started fragmenting
1091address space, and it *looked* like a very slow leak.
1092
1093>>> result = m235()
1094>>> for i in range(3):
1095...     print(firstn(result, 15))
1096[1, 2, 3, 4, 5, 6, 8, 9, 10, 12, 15, 16, 18, 20, 24]
1097[25, 27, 30, 32, 36, 40, 45, 48, 50, 54, 60, 64, 72, 75, 80]
1098[81, 90, 96, 100, 108, 120, 125, 128, 135, 144, 150, 160, 162, 180, 192]
1099
1100Heh.  Here's one way to get a shared list, complete with an excruciating
1101namespace renaming trick.  The *pretty* part is that the times() and merge()
1102functions can be reused as-is, because they only assume their stream
1103arguments are iterable -- a LazyList is the same as a generator to times().
1104
1105>>> class LazyList:
1106...     def __init__(self, g):
1107...         self.sofar = []
1108...         self.fetch = g.__next__
1109...
1110...     def __getitem__(self, i):
1111...         sofar, fetch = self.sofar, self.fetch
1112...         while i >= len(sofar):
1113...             sofar.append(fetch())
1114...         return sofar[i]
1115
1116>>> def m235():
1117...     yield 1
1118...     # Gack:  m235 below actually refers to a LazyList.
1119...     me_times2 = times(2, m235)
1120...     me_times3 = times(3, m235)
1121...     me_times5 = times(5, m235)
1122...     for i in merge(merge(me_times2,
1123...                          me_times3),
1124...                    me_times5):
1125...         yield i
1126
1127Print as many of these as you like -- *this* implementation is memory-
1128efficient.
1129
1130>>> m235 = LazyList(m235())
1131>>> for i in range(5):
1132...     print([m235[j] for j in range(15*i, 15*(i+1))])
1133[1, 2, 3, 4, 5, 6, 8, 9, 10, 12, 15, 16, 18, 20, 24]
1134[25, 27, 30, 32, 36, 40, 45, 48, 50, 54, 60, 64, 72, 75, 80]
1135[81, 90, 96, 100, 108, 120, 125, 128, 135, 144, 150, 160, 162, 180, 192]
1136[200, 216, 225, 240, 243, 250, 256, 270, 288, 300, 320, 324, 360, 375, 384]
1137[400, 405, 432, 450, 480, 486, 500, 512, 540, 576, 600, 625, 640, 648, 675]
1138
1139Ye olde Fibonacci generator, LazyList style.
1140
1141>>> def fibgen(a, b):
1142...
1143...     def sum(g, h):
1144...         while 1:
1145...             yield next(g) + next(h)
1146...
1147...     def tail(g):
1148...         next(g)    # throw first away
1149...         for x in g:
1150...             yield x
1151...
1152...     yield a
1153...     yield b
1154...     for s in sum(iter(fib),
1155...                  tail(iter(fib))):
1156...         yield s
1157
1158>>> fib = LazyList(fibgen(1, 2))
1159>>> firstn(iter(fib), 17)
1160[1, 2, 3, 5, 8, 13, 21, 34, 55, 89, 144, 233, 377, 610, 987, 1597, 2584]
1161
1162
1163Running after your tail with itertools.tee (new in version 2.4)
1164
1165The algorithms "m235" (Hamming) and Fibonacci presented above are both
1166examples of a whole family of FP (functional programming) algorithms
1167where a function produces and returns a list while the production algorithm
1168suppose the list as already produced by recursively calling itself.
1169For these algorithms to work, they must:
1170
1171- produce at least a first element without presupposing the existence of
1172  the rest of the list
1173- produce their elements in a lazy manner
1174
1175To work efficiently, the beginning of the list must not be recomputed over
1176and over again. This is ensured in most FP languages as a built-in feature.
1177In python, we have to explicitly maintain a list of already computed results
1178and abandon genuine recursivity.
1179
1180This is what had been attempted above with the LazyList class. One problem
1181with that class is that it keeps a list of all of the generated results and
1182therefore continually grows. This partially defeats the goal of the generator
1183concept, viz. produce the results only as needed instead of producing them
1184all and thereby wasting memory.
1185
1186Thanks to itertools.tee, it is now clear "how to get the internal uses of
1187m235 to share a single generator".
1188
1189>>> from itertools import tee
1190>>> def m235():
1191...     def _m235():
1192...         yield 1
1193...         for n in merge(times(2, m2),
1194...                        merge(times(3, m3),
1195...                              times(5, m5))):
1196...             yield n
1197...     m1 = _m235()
1198...     m2, m3, m5, mRes = tee(m1, 4)
1199...     return mRes
1200
1201>>> it = m235()
1202>>> for i in range(5):
1203...     print(firstn(it, 15))
1204[1, 2, 3, 4, 5, 6, 8, 9, 10, 12, 15, 16, 18, 20, 24]
1205[25, 27, 30, 32, 36, 40, 45, 48, 50, 54, 60, 64, 72, 75, 80]
1206[81, 90, 96, 100, 108, 120, 125, 128, 135, 144, 150, 160, 162, 180, 192]
1207[200, 216, 225, 240, 243, 250, 256, 270, 288, 300, 320, 324, 360, 375, 384]
1208[400, 405, 432, 450, 480, 486, 500, 512, 540, 576, 600, 625, 640, 648, 675]
1209
1210The "tee" function does just what we want. It internally keeps a generated
1211result for as long as it has not been "consumed" from all of the duplicated
1212iterators, whereupon it is deleted. You can therefore print the hamming
1213sequence during hours without increasing memory usage, or very little.
1214
1215The beauty of it is that recursive running-after-their-tail FP algorithms
1216are quite straightforwardly expressed with this Python idiom.
1217
1218Ye olde Fibonacci generator, tee style.
1219
1220>>> def fib():
1221...
1222...     def _isum(g, h):
1223...         while 1:
1224...             yield next(g) + next(h)
1225...
1226...     def _fib():
1227...         yield 1
1228...         yield 2
1229...         next(fibTail) # throw first away
1230...         for res in _isum(fibHead, fibTail):
1231...             yield res
1232...
1233...     realfib = _fib()
1234...     fibHead, fibTail, fibRes = tee(realfib, 3)
1235...     return fibRes
1236
1237>>> firstn(fib(), 17)
1238[1, 2, 3, 5, 8, 13, 21, 34, 55, 89, 144, 233, 377, 610, 987, 1597, 2584]
1239
1240"""
1241
1242# syntax_tests mostly provokes SyntaxErrors.  Also fiddling with #if 0
1243# hackery.
1244
1245syntax_tests = """
1246
1247These are fine:
1248
1249>>> def f():
1250...     yield 1
1251...     return
1252
1253>>> def f():
1254...     try:
1255...         yield 1
1256...     finally:
1257...         pass
1258
1259>>> def f():
1260...     try:
1261...         try:
1262...             1//0
1263...         except ZeroDivisionError:
1264...             yield 666
1265...         except:
1266...             pass
1267...     finally:
1268...         pass
1269
1270>>> def f():
1271...     try:
1272...         try:
1273...             yield 12
1274...             1//0
1275...         except ZeroDivisionError:
1276...             yield 666
1277...         except:
1278...             try:
1279...                 x = 12
1280...             finally:
1281...                 yield 12
1282...     except:
1283...         return
1284>>> list(f())
1285[12, 666]
1286
1287>>> def f():
1288...    yield
1289>>> type(f())
1290<class 'generator'>
1291
1292
1293>>> def f():
1294...    if 0:
1295...        yield
1296>>> type(f())
1297<class 'generator'>
1298
1299
1300>>> def f():
1301...     if 0:
1302...         yield 1
1303>>> type(f())
1304<class 'generator'>
1305
1306>>> def f():
1307...    if "":
1308...        yield None
1309>>> type(f())
1310<class 'generator'>
1311
1312>>> def f():
1313...     return
1314...     try:
1315...         if x==4:
1316...             pass
1317...         elif 0:
1318...             try:
1319...                 1//0
1320...             except SyntaxError:
1321...                 pass
1322...             else:
1323...                 if 0:
1324...                     while 12:
1325...                         x += 1
1326...                         yield 2 # don't blink
1327...                         f(a, b, c, d, e)
1328...         else:
1329...             pass
1330...     except:
1331...         x = 1
1332...     return
1333>>> type(f())
1334<class 'generator'>
1335
1336>>> def f():
1337...     if 0:
1338...         def g():
1339...             yield 1
1340...
1341>>> type(f())
1342<class 'NoneType'>
1343
1344>>> def f():
1345...     if 0:
1346...         class C:
1347...             def __init__(self):
1348...                 yield 1
1349...             def f(self):
1350...                 yield 2
1351>>> type(f())
1352<class 'NoneType'>
1353
1354>>> def f():
1355...     if 0:
1356...         return
1357...     if 0:
1358...         yield 2
1359>>> type(f())
1360<class 'generator'>
1361
1362This one caused a crash (see SF bug 567538):
1363
1364>>> def f():
1365...     for i in range(3):
1366...         try:
1367...             continue
1368...         finally:
1369...             yield i
1370...
1371>>> g = f()
1372>>> print(next(g))
13730
1374>>> print(next(g))
13751
1376>>> print(next(g))
13772
1378>>> print(next(g))
1379Traceback (most recent call last):
1380StopIteration
1381
1382
1383Test the gi_code attribute
1384
1385>>> def f():
1386...     yield 5
1387...
1388>>> g = f()
1389>>> g.gi_code is f.__code__
1390True
1391>>> next(g)
13925
1393>>> next(g)
1394Traceback (most recent call last):
1395StopIteration
1396>>> g.gi_code is f.__code__
1397True
1398
1399
1400Test the __name__ attribute and the repr()
1401
1402>>> def f():
1403...    yield 5
1404...
1405>>> g = f()
1406>>> g.__name__
1407'f'
1408>>> repr(g)  # doctest: +ELLIPSIS
1409'<generator object f at ...>'
1410
1411Lambdas shouldn't have their usual return behavior.
1412
1413>>> x = lambda: (yield 1)
1414>>> list(x())
1415[1]
1416
1417>>> x = lambda: ((yield 1), (yield 2))
1418>>> list(x())
1419[1, 2]
1420"""
1421
1422# conjoin is a simple backtracking generator, named in honor of Icon's
1423# "conjunction" control structure.  Pass a list of no-argument functions
1424# that return iterable objects.  Easiest to explain by example:  assume the
1425# function list [x, y, z] is passed.  Then conjoin acts like:
1426#
1427# def g():
1428#     values = [None] * 3
1429#     for values[0] in x():
1430#         for values[1] in y():
1431#             for values[2] in z():
1432#                 yield values
1433#
1434# So some 3-lists of values *may* be generated, each time we successfully
1435# get into the innermost loop.  If an iterator fails (is exhausted) before
1436# then, it "backtracks" to get the next value from the nearest enclosing
1437# iterator (the one "to the left"), and starts all over again at the next
1438# slot (pumps a fresh iterator).  Of course this is most useful when the
1439# iterators have side-effects, so that which values *can* be generated at
1440# each slot depend on the values iterated at previous slots.
1441
1442def simple_conjoin(gs):
1443
1444    values = [None] * len(gs)
1445
1446    def gen(i):
1447        if i >= len(gs):
1448            yield values
1449        else:
1450            for values[i] in gs[i]():
1451                for x in gen(i+1):
1452                    yield x
1453
1454    for x in gen(0):
1455        yield x
1456
1457# That works fine, but recursing a level and checking i against len(gs) for
1458# each item produced is inefficient.  By doing manual loop unrolling across
1459# generator boundaries, it's possible to eliminate most of that overhead.
1460# This isn't worth the bother *in general* for generators, but conjoin() is
1461# a core building block for some CPU-intensive generator applications.
1462
1463def conjoin(gs):
1464
1465    n = len(gs)
1466    values = [None] * n
1467
1468    # Do one loop nest at time recursively, until the # of loop nests
1469    # remaining is divisible by 3.
1470
1471    def gen(i):
1472        if i >= n:
1473            yield values
1474
1475        elif (n-i) % 3:
1476            ip1 = i+1
1477            for values[i] in gs[i]():
1478                for x in gen(ip1):
1479                    yield x
1480
1481        else:
1482            for x in _gen3(i):
1483                yield x
1484
1485    # Do three loop nests at a time, recursing only if at least three more
1486    # remain.  Don't call directly:  this is an internal optimization for
1487    # gen's use.
1488
1489    def _gen3(i):
1490        assert i < n and (n-i) % 3 == 0
1491        ip1, ip2, ip3 = i+1, i+2, i+3
1492        g, g1, g2 = gs[i : ip3]
1493
1494        if ip3 >= n:
1495            # These are the last three, so we can yield values directly.
1496            for values[i] in g():
1497                for values[ip1] in g1():
1498                    for values[ip2] in g2():
1499                        yield values
1500
1501        else:
1502            # At least 6 loop nests remain; peel off 3 and recurse for the
1503            # rest.
1504            for values[i] in g():
1505                for values[ip1] in g1():
1506                    for values[ip2] in g2():
1507                        for x in _gen3(ip3):
1508                            yield x
1509
1510    for x in gen(0):
1511        yield x
1512
1513# And one more approach:  For backtracking apps like the Knight's Tour
1514# solver below, the number of backtracking levels can be enormous (one
1515# level per square, for the Knight's Tour, so that e.g. a 100x100 board
1516# needs 10,000 levels).  In such cases Python is likely to run out of
1517# stack space due to recursion.  So here's a recursion-free version of
1518# conjoin too.
1519# NOTE WELL:  This allows large problems to be solved with only trivial
1520# demands on stack space.  Without explicitly resumable generators, this is
1521# much harder to achieve.  OTOH, this is much slower (up to a factor of 2)
1522# than the fancy unrolled recursive conjoin.
1523
1524def flat_conjoin(gs):  # rename to conjoin to run tests with this instead
1525    n = len(gs)
1526    values = [None] * n
1527    iters  = [None] * n
1528    _StopIteration = StopIteration  # make local because caught a *lot*
1529    i = 0
1530    while 1:
1531        # Descend.
1532        try:
1533            while i < n:
1534                it = iters[i] = gs[i]().__next__
1535                values[i] = it()
1536                i += 1
1537        except _StopIteration:
1538            pass
1539        else:
1540            assert i == n
1541            yield values
1542
1543        # Backtrack until an older iterator can be resumed.
1544        i -= 1
1545        while i >= 0:
1546            try:
1547                values[i] = iters[i]()
1548                # Success!  Start fresh at next level.
1549                i += 1
1550                break
1551            except _StopIteration:
1552                # Continue backtracking.
1553                i -= 1
1554        else:
1555            assert i < 0
1556            break
1557
1558# A conjoin-based N-Queens solver.
1559
1560class Queens:
1561    def __init__(self, n):
1562        self.n = n
1563        rangen = range(n)
1564
1565        # Assign a unique int to each column and diagonal.
1566        # columns:  n of those, range(n).
1567        # NW-SE diagonals: 2n-1 of these, i-j unique and invariant along
1568        # each, smallest i-j is 0-(n-1) = 1-n, so add n-1 to shift to 0-
1569        # based.
1570        # NE-SW diagonals: 2n-1 of these, i+j unique and invariant along
1571        # each, smallest i+j is 0, largest is 2n-2.
1572
1573        # For each square, compute a bit vector of the columns and
1574        # diagonals it covers, and for each row compute a function that
1575        # generates the possibilities for the columns in that row.
1576        self.rowgenerators = []
1577        for i in rangen:
1578            rowuses = [(1 << j) |                  # column ordinal
1579                       (1 << (n + i-j + n-1)) |    # NW-SE ordinal
1580                       (1 << (n + 2*n-1 + i+j))    # NE-SW ordinal
1581                            for j in rangen]
1582
1583            def rowgen(rowuses=rowuses):
1584                for j in rangen:
1585                    uses = rowuses[j]
1586                    if uses & self.used == 0:
1587                        self.used |= uses
1588                        yield j
1589                        self.used &= ~uses
1590
1591            self.rowgenerators.append(rowgen)
1592
1593    # Generate solutions.
1594    def solve(self):
1595        self.used = 0
1596        for row2col in conjoin(self.rowgenerators):
1597            yield row2col
1598
1599    def printsolution(self, row2col):
1600        n = self.n
1601        assert n == len(row2col)
1602        sep = "+" + "-+" * n
1603        print(sep)
1604        for i in range(n):
1605            squares = [" " for j in range(n)]
1606            squares[row2col[i]] = "Q"
1607            print("|" + "|".join(squares) + "|")
1608            print(sep)
1609
1610# A conjoin-based Knight's Tour solver.  This is pretty sophisticated
1611# (e.g., when used with flat_conjoin above, and passing hard=1 to the
1612# constructor, a 200x200 Knight's Tour was found quickly -- note that we're
1613# creating 10s of thousands of generators then!), and is lengthy.
1614
1615class Knights:
1616    def __init__(self, m, n, hard=0):
1617        self.m, self.n = m, n
1618
1619        # solve() will set up succs[i] to be a list of square #i's
1620        # successors.
1621        succs = self.succs = []
1622
1623        # Remove i0 from each of its successor's successor lists, i.e.
1624        # successors can't go back to i0 again.  Return 0 if we can
1625        # detect this makes a solution impossible, else return 1.
1626
1627        def remove_from_successors(i0, len=len):
1628            # If we remove all exits from a free square, we're dead:
1629            # even if we move to it next, we can't leave it again.
1630            # If we create a square with one exit, we must visit it next;
1631            # else somebody else will have to visit it, and since there's
1632            # only one adjacent, there won't be a way to leave it again.
1633            # Finally, if we create more than one free square with a
1634            # single exit, we can only move to one of them next, leaving
1635            # the other one a dead end.
1636            ne0 = ne1 = 0
1637            for i in succs[i0]:
1638                s = succs[i]
1639                s.remove(i0)
1640                e = len(s)
1641                if e == 0:
1642                    ne0 += 1
1643                elif e == 1:
1644                    ne1 += 1
1645            return ne0 == 0 and ne1 < 2
1646
1647        # Put i0 back in each of its successor's successor lists.
1648
1649        def add_to_successors(i0):
1650            for i in succs[i0]:
1651                succs[i].append(i0)
1652
1653        # Generate the first move.
1654        def first():
1655            if m < 1 or n < 1:
1656                return
1657
1658            # Since we're looking for a cycle, it doesn't matter where we
1659            # start.  Starting in a corner makes the 2nd move easy.
1660            corner = self.coords2index(0, 0)
1661            remove_from_successors(corner)
1662            self.lastij = corner
1663            yield corner
1664            add_to_successors(corner)
1665
1666        # Generate the second moves.
1667        def second():
1668            corner = self.coords2index(0, 0)
1669            assert self.lastij == corner  # i.e., we started in the corner
1670            if m < 3 or n < 3:
1671                return
1672            assert len(succs[corner]) == 2
1673            assert self.coords2index(1, 2) in succs[corner]
1674            assert self.coords2index(2, 1) in succs[corner]
1675            # Only two choices.  Whichever we pick, the other must be the
1676            # square picked on move m*n, as it's the only way to get back
1677            # to (0, 0).  Save its index in self.final so that moves before
1678            # the last know it must be kept free.
1679            for i, j in (1, 2), (2, 1):
1680                this  = self.coords2index(i, j)
1681                final = self.coords2index(3-i, 3-j)
1682                self.final = final
1683
1684                remove_from_successors(this)
1685                succs[final].append(corner)
1686                self.lastij = this
1687                yield this
1688                succs[final].remove(corner)
1689                add_to_successors(this)
1690
1691        # Generate moves 3 through m*n-1.
1692        def advance(len=len):
1693            # If some successor has only one exit, must take it.
1694            # Else favor successors with fewer exits.
1695            candidates = []
1696            for i in succs[self.lastij]:
1697                e = len(succs[i])
1698                assert e > 0, "else remove_from_successors() pruning flawed"
1699                if e == 1:
1700                    candidates = [(e, i)]
1701                    break
1702                candidates.append((e, i))
1703            else:
1704                candidates.sort()
1705
1706            for e, i in candidates:
1707                if i != self.final:
1708                    if remove_from_successors(i):
1709                        self.lastij = i
1710                        yield i
1711                    add_to_successors(i)
1712
1713        # Generate moves 3 through m*n-1.  Alternative version using a
1714        # stronger (but more expensive) heuristic to order successors.
1715        # Since the # of backtracking levels is m*n, a poor move early on
1716        # can take eons to undo.  Smallest square board for which this
1717        # matters a lot is 52x52.
1718        def advance_hard(vmid=(m-1)/2.0, hmid=(n-1)/2.0, len=len):
1719            # If some successor has only one exit, must take it.
1720            # Else favor successors with fewer exits.
1721            # Break ties via max distance from board centerpoint (favor
1722            # corners and edges whenever possible).
1723            candidates = []
1724            for i in succs[self.lastij]:
1725                e = len(succs[i])
1726                assert e > 0, "else remove_from_successors() pruning flawed"
1727                if e == 1:
1728                    candidates = [(e, 0, i)]
1729                    break
1730                i1, j1 = self.index2coords(i)
1731                d = (i1 - vmid)**2 + (j1 - hmid)**2
1732                candidates.append((e, -d, i))
1733            else:
1734                candidates.sort()
1735
1736            for e, d, i in candidates:
1737                if i != self.final:
1738                    if remove_from_successors(i):
1739                        self.lastij = i
1740                        yield i
1741                    add_to_successors(i)
1742
1743        # Generate the last move.
1744        def last():
1745            assert self.final in succs[self.lastij]
1746            yield self.final
1747
1748        if m*n < 4:
1749            self.squaregenerators = [first]
1750        else:
1751            self.squaregenerators = [first, second] + \
1752                [hard and advance_hard or advance] * (m*n - 3) + \
1753                [last]
1754
1755    def coords2index(self, i, j):
1756        assert 0 <= i < self.m
1757        assert 0 <= j < self.n
1758        return i * self.n + j
1759
1760    def index2coords(self, index):
1761        assert 0 <= index < self.m * self.n
1762        return divmod(index, self.n)
1763
1764    def _init_board(self):
1765        succs = self.succs
1766        del succs[:]
1767        m, n = self.m, self.n
1768        c2i = self.coords2index
1769
1770        offsets = [( 1,  2), ( 2,  1), ( 2, -1), ( 1, -2),
1771                   (-1, -2), (-2, -1), (-2,  1), (-1,  2)]
1772        rangen = range(n)
1773        for i in range(m):
1774            for j in rangen:
1775                s = [c2i(i+io, j+jo) for io, jo in offsets
1776                                     if 0 <= i+io < m and
1777                                        0 <= j+jo < n]
1778                succs.append(s)
1779
1780    # Generate solutions.
1781    def solve(self):
1782        self._init_board()
1783        for x in conjoin(self.squaregenerators):
1784            yield x
1785
1786    def printsolution(self, x):
1787        m, n = self.m, self.n
1788        assert len(x) == m*n
1789        w = len(str(m*n))
1790        format = "%" + str(w) + "d"
1791
1792        squares = [[None] * n for i in range(m)]
1793        k = 1
1794        for i in x:
1795            i1, j1 = self.index2coords(i)
1796            squares[i1][j1] = format % k
1797            k += 1
1798
1799        sep = "+" + ("-" * w + "+") * n
1800        print(sep)
1801        for i in range(m):
1802            row = squares[i]
1803            print("|" + "|".join(row) + "|")
1804            print(sep)
1805
1806conjoin_tests = """
1807
1808Generate the 3-bit binary numbers in order.  This illustrates dumbest-
1809possible use of conjoin, just to generate the full cross-product.
1810
1811>>> for c in conjoin([lambda: iter((0, 1))] * 3):
1812...     print(c)
1813[0, 0, 0]
1814[0, 0, 1]
1815[0, 1, 0]
1816[0, 1, 1]
1817[1, 0, 0]
1818[1, 0, 1]
1819[1, 1, 0]
1820[1, 1, 1]
1821
1822For efficiency in typical backtracking apps, conjoin() yields the same list
1823object each time.  So if you want to save away a full account of its
1824generated sequence, you need to copy its results.
1825
1826>>> def gencopy(iterator):
1827...     for x in iterator:
1828...         yield x[:]
1829
1830>>> for n in range(10):
1831...     all = list(gencopy(conjoin([lambda: iter((0, 1))] * n)))
1832...     print(n, len(all), all[0] == [0] * n, all[-1] == [1] * n)
18330 1 True True
18341 2 True True
18352 4 True True
18363 8 True True
18374 16 True True
18385 32 True True
18396 64 True True
18407 128 True True
18418 256 True True
18429 512 True True
1843
1844And run an 8-queens solver.
1845
1846>>> q = Queens(8)
1847>>> LIMIT = 2
1848>>> count = 0
1849>>> for row2col in q.solve():
1850...     count += 1
1851...     if count <= LIMIT:
1852...         print("Solution", count)
1853...         q.printsolution(row2col)
1854Solution 1
1855+-+-+-+-+-+-+-+-+
1856|Q| | | | | | | |
1857+-+-+-+-+-+-+-+-+
1858| | | | |Q| | | |
1859+-+-+-+-+-+-+-+-+
1860| | | | | | | |Q|
1861+-+-+-+-+-+-+-+-+
1862| | | | | |Q| | |
1863+-+-+-+-+-+-+-+-+
1864| | |Q| | | | | |
1865+-+-+-+-+-+-+-+-+
1866| | | | | | |Q| |
1867+-+-+-+-+-+-+-+-+
1868| |Q| | | | | | |
1869+-+-+-+-+-+-+-+-+
1870| | | |Q| | | | |
1871+-+-+-+-+-+-+-+-+
1872Solution 2
1873+-+-+-+-+-+-+-+-+
1874|Q| | | | | | | |
1875+-+-+-+-+-+-+-+-+
1876| | | | | |Q| | |
1877+-+-+-+-+-+-+-+-+
1878| | | | | | | |Q|
1879+-+-+-+-+-+-+-+-+
1880| | |Q| | | | | |
1881+-+-+-+-+-+-+-+-+
1882| | | | | | |Q| |
1883+-+-+-+-+-+-+-+-+
1884| | | |Q| | | | |
1885+-+-+-+-+-+-+-+-+
1886| |Q| | | | | | |
1887+-+-+-+-+-+-+-+-+
1888| | | | |Q| | | |
1889+-+-+-+-+-+-+-+-+
1890
1891>>> print(count, "solutions in all.")
189292 solutions in all.
1893
1894And run a Knight's Tour on a 10x10 board.  Note that there are about
189520,000 solutions even on a 6x6 board, so don't dare run this to exhaustion.
1896
1897>>> k = Knights(10, 10)
1898>>> LIMIT = 2
1899>>> count = 0
1900>>> for x in k.solve():
1901...     count += 1
1902...     if count <= LIMIT:
1903...         print("Solution", count)
1904...         k.printsolution(x)
1905...     else:
1906...         break
1907Solution 1
1908+---+---+---+---+---+---+---+---+---+---+
1909|  1| 58| 27| 34|  3| 40| 29| 10|  5|  8|
1910+---+---+---+---+---+---+---+---+---+---+
1911| 26| 35|  2| 57| 28| 33|  4|  7| 30| 11|
1912+---+---+---+---+---+---+---+---+---+---+
1913| 59|100| 73| 36| 41| 56| 39| 32|  9|  6|
1914+---+---+---+---+---+---+---+---+---+---+
1915| 74| 25| 60| 55| 72| 37| 42| 49| 12| 31|
1916+---+---+---+---+---+---+---+---+---+---+
1917| 61| 86| 99| 76| 63| 52| 47| 38| 43| 50|
1918+---+---+---+---+---+---+---+---+---+---+
1919| 24| 75| 62| 85| 54| 71| 64| 51| 48| 13|
1920+---+---+---+---+---+---+---+---+---+---+
1921| 87| 98| 91| 80| 77| 84| 53| 46| 65| 44|
1922+---+---+---+---+---+---+---+---+---+---+
1923| 90| 23| 88| 95| 70| 79| 68| 83| 14| 17|
1924+---+---+---+---+---+---+---+---+---+---+
1925| 97| 92| 21| 78| 81| 94| 19| 16| 45| 66|
1926+---+---+---+---+---+---+---+---+---+---+
1927| 22| 89| 96| 93| 20| 69| 82| 67| 18| 15|
1928+---+---+---+---+---+---+---+---+---+---+
1929Solution 2
1930+---+---+---+---+---+---+---+---+---+---+
1931|  1| 58| 27| 34|  3| 40| 29| 10|  5|  8|
1932+---+---+---+---+---+---+---+---+---+---+
1933| 26| 35|  2| 57| 28| 33|  4|  7| 30| 11|
1934+---+---+---+---+---+---+---+---+---+---+
1935| 59|100| 73| 36| 41| 56| 39| 32|  9|  6|
1936+---+---+---+---+---+---+---+---+---+---+
1937| 74| 25| 60| 55| 72| 37| 42| 49| 12| 31|
1938+---+---+---+---+---+---+---+---+---+---+
1939| 61| 86| 99| 76| 63| 52| 47| 38| 43| 50|
1940+---+---+---+---+---+---+---+---+---+---+
1941| 24| 75| 62| 85| 54| 71| 64| 51| 48| 13|
1942+---+---+---+---+---+---+---+---+---+---+
1943| 87| 98| 89| 80| 77| 84| 53| 46| 65| 44|
1944+---+---+---+---+---+---+---+---+---+---+
1945| 90| 23| 92| 95| 70| 79| 68| 83| 14| 17|
1946+---+---+---+---+---+---+---+---+---+---+
1947| 97| 88| 21| 78| 81| 94| 19| 16| 45| 66|
1948+---+---+---+---+---+---+---+---+---+---+
1949| 22| 91| 96| 93| 20| 69| 82| 67| 18| 15|
1950+---+---+---+---+---+---+---+---+---+---+
1951"""
1952
1953weakref_tests = """\
1954Generators are weakly referencable:
1955
1956>>> import weakref
1957>>> def gen():
1958...     yield 'foo!'
1959...
1960>>> wr = weakref.ref(gen)
1961>>> wr() is gen
1962True
1963>>> p = weakref.proxy(gen)
1964
1965Generator-iterators are weakly referencable as well:
1966
1967>>> gi = gen()
1968>>> wr = weakref.ref(gi)
1969>>> wr() is gi
1970True
1971>>> p = weakref.proxy(gi)
1972>>> list(p)
1973['foo!']
1974
1975"""
1976
1977coroutine_tests = """\
1978>>> from test.support import gc_collect
1979
1980Sending a value into a started generator:
1981
1982>>> def f():
1983...     print((yield 1))
1984...     yield 2
1985>>> g = f()
1986>>> next(g)
19871
1988>>> g.send(42)
198942
19902
1991
1992Sending a value into a new generator produces a TypeError:
1993
1994>>> f().send("foo")
1995Traceback (most recent call last):
1996...
1997TypeError: can't send non-None value to a just-started generator
1998
1999
2000Yield by itself yields None:
2001
2002>>> def f(): yield
2003>>> list(f())
2004[None]
2005
2006
2007Yield is allowed only in the outermost iterable in generator expression:
2008
2009>>> def f(): list(i for i in [(yield 26)])
2010>>> type(f())
2011<class 'generator'>
2012
2013
2014A yield expression with augmented assignment.
2015
2016>>> def coroutine(seq):
2017...     count = 0
2018...     while count < 200:
2019...         count += yield
2020...         seq.append(count)
2021>>> seq = []
2022>>> c = coroutine(seq)
2023>>> next(c)
2024>>> print(seq)
2025[]
2026>>> c.send(10)
2027>>> print(seq)
2028[10]
2029>>> c.send(10)
2030>>> print(seq)
2031[10, 20]
2032>>> c.send(10)
2033>>> print(seq)
2034[10, 20, 30]
2035
2036
2037Check some syntax errors for yield expressions:
2038
2039>>> f=lambda: (yield 1),(yield 2)
2040Traceback (most recent call last):
2041  ...
2042SyntaxError: 'yield' outside function
2043
2044# Pegen does not produce this error message yet
2045# >>> def f(): x = yield = y
2046# Traceback (most recent call last):
2047#   ...
2048# SyntaxError: assignment to yield expression not possible
2049
2050>>> def f(): (yield bar) = y
2051Traceback (most recent call last):
2052  ...
2053SyntaxError: cannot assign to yield expression here. Maybe you meant '==' instead of '='?
2054
2055>>> def f(): (yield bar) += y
2056Traceback (most recent call last):
2057  ...
2058SyntaxError: 'yield expression' is an illegal expression for augmented assignment
2059
2060
2061Now check some throw() conditions:
2062
2063>>> def f():
2064...     while True:
2065...         try:
2066...             print((yield))
2067...         except ValueError as v:
2068...             print("caught ValueError (%s)" % (v))
2069>>> import sys
2070>>> g = f()
2071>>> next(g)
2072
2073>>> g.throw(ValueError) # type only
2074caught ValueError ()
2075
2076>>> g.throw(ValueError("xyz"))  # value only
2077caught ValueError (xyz)
2078
2079>>> g.throw(ValueError, ValueError(1))   # value+matching type
2080caught ValueError (1)
2081
2082>>> g.throw(ValueError, TypeError(1))  # mismatched type, rewrapped
2083caught ValueError (1)
2084
2085>>> g.throw(ValueError, ValueError(1), None)   # explicit None traceback
2086caught ValueError (1)
2087
2088>>> g.throw(ValueError(1), "foo")       # bad args
2089Traceback (most recent call last):
2090  ...
2091TypeError: instance exception may not have a separate value
2092
2093>>> g.throw(ValueError, "foo", 23)      # bad args
2094Traceback (most recent call last):
2095  ...
2096TypeError: throw() third argument must be a traceback object
2097
2098>>> g.throw("abc")
2099Traceback (most recent call last):
2100  ...
2101TypeError: exceptions must be classes or instances deriving from BaseException, not str
2102
2103>>> g.throw(0)
2104Traceback (most recent call last):
2105  ...
2106TypeError: exceptions must be classes or instances deriving from BaseException, not int
2107
2108>>> g.throw(list)
2109Traceback (most recent call last):
2110  ...
2111TypeError: exceptions must be classes or instances deriving from BaseException, not type
2112
2113>>> def throw(g,exc):
2114...     try:
2115...         raise exc
2116...     except:
2117...         g.throw(*sys.exc_info())
2118>>> throw(g,ValueError) # do it with traceback included
2119caught ValueError ()
2120
2121>>> g.send(1)
21221
2123
2124>>> throw(g,TypeError)  # terminate the generator
2125Traceback (most recent call last):
2126  ...
2127TypeError
2128
2129>>> print(g.gi_frame)
2130None
2131
2132>>> g.send(2)
2133Traceback (most recent call last):
2134  ...
2135StopIteration
2136
2137>>> g.throw(ValueError,6)       # throw on closed generator
2138Traceback (most recent call last):
2139  ...
2140ValueError: 6
2141
2142>>> f().throw(ValueError,7)     # throw on just-opened generator
2143Traceback (most recent call last):
2144  ...
2145ValueError: 7
2146
2147Plain "raise" inside a generator should preserve the traceback (#13188).
2148The traceback should have 3 levels:
2149- g.throw()
2150- f()
2151- 1/0
2152
2153>>> def f():
2154...     try:
2155...         yield
2156...     except:
2157...         raise
2158>>> g = f()
2159>>> try:
2160...     1/0
2161... except ZeroDivisionError as v:
2162...     try:
2163...         g.throw(v)
2164...     except Exception as w:
2165...         tb = w.__traceback__
2166>>> levels = 0
2167>>> while tb:
2168...     levels += 1
2169...     tb = tb.tb_next
2170>>> levels
21713
2172
2173Now let's try closing a generator:
2174
2175>>> def f():
2176...     try: yield
2177...     except GeneratorExit:
2178...         print("exiting")
2179
2180>>> g = f()
2181>>> next(g)
2182>>> g.close()
2183exiting
2184>>> g.close()  # should be no-op now
2185
2186>>> f().close()  # close on just-opened generator should be fine
2187
2188>>> def f(): yield      # an even simpler generator
2189>>> f().close()         # close before opening
2190>>> g = f()
2191>>> next(g)
2192>>> g.close()           # close normally
2193
2194And finalization:
2195
2196>>> def f():
2197...     try: yield
2198...     finally:
2199...         print("exiting")
2200
2201>>> g = f()
2202>>> next(g)
2203>>> del g; gc_collect()  # For PyPy or other GCs.
2204exiting
2205
2206
2207GeneratorExit is not caught by except Exception:
2208
2209>>> def f():
2210...     try: yield
2211...     except Exception:
2212...         print('except')
2213...     finally:
2214...         print('finally')
2215
2216>>> g = f()
2217>>> next(g)
2218>>> del g; gc_collect()  # For PyPy or other GCs.
2219finally
2220
2221
2222Now let's try some ill-behaved generators:
2223
2224>>> def f():
2225...     try: yield
2226...     except GeneratorExit:
2227...         yield "foo!"
2228>>> g = f()
2229>>> next(g)
2230>>> g.close()
2231Traceback (most recent call last):
2232  ...
2233RuntimeError: generator ignored GeneratorExit
2234>>> g.close()
2235
2236
2237Our ill-behaved code should be invoked during GC:
2238
2239>>> with support.catch_unraisable_exception() as cm:
2240...     g = f()
2241...     next(g)
2242...     del g
2243...
2244...     cm.unraisable.exc_type == RuntimeError
2245...     "generator ignored GeneratorExit" in str(cm.unraisable.exc_value)
2246...     cm.unraisable.exc_traceback is not None
2247True
2248True
2249True
2250
2251And errors thrown during closing should propagate:
2252
2253>>> def f():
2254...     try: yield
2255...     except GeneratorExit:
2256...         raise TypeError("fie!")
2257>>> g = f()
2258>>> next(g)
2259>>> g.close()
2260Traceback (most recent call last):
2261  ...
2262TypeError: fie!
2263
2264
2265Ensure that various yield expression constructs make their
2266enclosing function a generator:
2267
2268>>> def f(): x += yield
2269>>> type(f())
2270<class 'generator'>
2271
2272>>> def f(): x = yield
2273>>> type(f())
2274<class 'generator'>
2275
2276>>> def f(): lambda x=(yield): 1
2277>>> type(f())
2278<class 'generator'>
2279
2280>>> def f(d): d[(yield "a")] = d[(yield "b")] = 27
2281>>> data = [1,2]
2282>>> g = f(data)
2283>>> type(g)
2284<class 'generator'>
2285>>> g.send(None)
2286'a'
2287>>> data
2288[1, 2]
2289>>> g.send(0)
2290'b'
2291>>> data
2292[27, 2]
2293>>> try: g.send(1)
2294... except StopIteration: pass
2295>>> data
2296[27, 27]
2297
2298"""
2299
2300refleaks_tests = """
2301Prior to adding cycle-GC support to itertools.tee, this code would leak
2302references. We add it to the standard suite so the routine refleak-tests
2303would trigger if it starts being uncleanable again.
2304
2305>>> import itertools
2306>>> def leak():
2307...     class gen:
2308...         def __iter__(self):
2309...             return self
2310...         def __next__(self):
2311...             return self.item
2312...     g = gen()
2313...     head, tail = itertools.tee(g)
2314...     g.item = head
2315...     return head
2316>>> it = leak()
2317
2318Make sure to also test the involvement of the tee-internal teedataobject,
2319which stores returned items.
2320
2321>>> item = next(it)
2322
2323
2324
2325This test leaked at one point due to generator finalization/destruction.
2326It was copied from Lib/test/leakers/test_generator_cycle.py before the file
2327was removed.
2328
2329>>> def leak():
2330...    def gen():
2331...        while True:
2332...            yield g
2333...    g = gen()
2334
2335>>> leak()
2336
2337
2338
2339This test isn't really generator related, but rather exception-in-cleanup
2340related. The coroutine tests (above) just happen to cause an exception in
2341the generator's __del__ (tp_del) method. We can also test for this
2342explicitly, without generators. We do have to redirect stderr to avoid
2343printing warnings and to doublecheck that we actually tested what we wanted
2344to test.
2345
2346>>> from test import support
2347>>> class Leaker:
2348...     def __del__(self):
2349...         def invoke(message):
2350...             raise RuntimeError(message)
2351...         invoke("del failed")
2352...
2353>>> with support.catch_unraisable_exception() as cm:
2354...     l = Leaker()
2355...     del l
2356...
2357...     cm.unraisable.object == Leaker.__del__
2358...     cm.unraisable.exc_type == RuntimeError
2359...     str(cm.unraisable.exc_value) == "del failed"
2360...     cm.unraisable.exc_traceback is not None
2361True
2362True
2363True
2364True
2365
2366
2367These refleak tests should perhaps be in a testfile of their own,
2368test_generators just happened to be the test that drew these out.
2369
2370"""
2371
2372__test__ = {"tut":      tutorial_tests,
2373            "pep":      pep_tests,
2374            "email":    email_tests,
2375            "fun":      fun_tests,
2376            "syntax":   syntax_tests,
2377            "conjoin":  conjoin_tests,
2378            "weakref":  weakref_tests,
2379            "coroutine":  coroutine_tests,
2380            "refleaks": refleaks_tests,
2381            }
2382
2383def load_tests(loader, tests, pattern):
2384    tests.addTest(doctest.DocTestSuite())
2385    return tests
2386
2387
2388if __name__ == "__main__":
2389    unittest.main()
2390