1import copy
2import gc
3import pickle
4import sys
5import unittest
6import weakref
7import inspect
8
9from test import support
10
11try:
12    import _testcapi
13except ImportError:
14    _testcapi = None
15
16
17# This tests to make sure that if a SIGINT arrives just before we send into a
18# yield from chain, the KeyboardInterrupt is raised in the innermost
19# generator (see bpo-30039).
20@unittest.skipUnless(_testcapi is not None and
21                     hasattr(_testcapi, "raise_SIGINT_then_send_None"),
22                     "needs _testcapi.raise_SIGINT_then_send_None")
23class SignalAndYieldFromTest(unittest.TestCase):
24
25    def generator1(self):
26        return (yield from self.generator2())
27
28    def generator2(self):
29        try:
30            yield
31        except KeyboardInterrupt:
32            return "PASSED"
33        else:
34            return "FAILED"
35
36    def test_raise_and_yield_from(self):
37        gen = self.generator1()
38        gen.send(None)
39        try:
40            _testcapi.raise_SIGINT_then_send_None(gen)
41        except BaseException as _exc:
42            exc = _exc
43        self.assertIs(type(exc), StopIteration)
44        self.assertEqual(exc.value, "PASSED")
45
46
47class FinalizationTest(unittest.TestCase):
48
49    def test_frame_resurrect(self):
50        # A generator frame can be resurrected by a generator's finalization.
51        def gen():
52            nonlocal frame
53            try:
54                yield
55            finally:
56                frame = sys._getframe()
57
58        g = gen()
59        wr = weakref.ref(g)
60        next(g)
61        del g
62        support.gc_collect()
63        self.assertIs(wr(), None)
64        self.assertTrue(frame)
65        del frame
66        support.gc_collect()
67
68    def test_refcycle(self):
69        # A generator caught in a refcycle gets finalized anyway.
70        old_garbage = gc.garbage[:]
71        finalized = False
72        def gen():
73            nonlocal finalized
74            try:
75                g = yield
76                yield 1
77            finally:
78                finalized = True
79
80        g = gen()
81        next(g)
82        g.send(g)
83        self.assertGreater(sys.getrefcount(g), 2)
84        self.assertFalse(finalized)
85        del g
86        support.gc_collect()
87        self.assertTrue(finalized)
88        self.assertEqual(gc.garbage, old_garbage)
89
90    def test_lambda_generator(self):
91        # Issue #23192: Test that a lambda returning a generator behaves
92        # like the equivalent function
93        f = lambda: (yield 1)
94        def g(): return (yield 1)
95
96        # test 'yield from'
97        f2 = lambda: (yield from g())
98        def g2(): return (yield from g())
99
100        f3 = lambda: (yield from f())
101        def g3(): return (yield from f())
102
103        for gen_fun in (f, g, f2, g2, f3, g3):
104            gen = gen_fun()
105            self.assertEqual(next(gen), 1)
106            with self.assertRaises(StopIteration) as cm:
107                gen.send(2)
108            self.assertEqual(cm.exception.value, 2)
109
110
111class GeneratorTest(unittest.TestCase):
112
113    def test_name(self):
114        def func():
115            yield 1
116
117        # check generator names
118        gen = func()
119        self.assertEqual(gen.__name__, "func")
120        self.assertEqual(gen.__qualname__,
121                         "GeneratorTest.test_name.<locals>.func")
122
123        # modify generator names
124        gen.__name__ = "name"
125        gen.__qualname__ = "qualname"
126        self.assertEqual(gen.__name__, "name")
127        self.assertEqual(gen.__qualname__, "qualname")
128
129        # generator names must be a string and cannot be deleted
130        self.assertRaises(TypeError, setattr, gen, '__name__', 123)
131        self.assertRaises(TypeError, setattr, gen, '__qualname__', 123)
132        self.assertRaises(TypeError, delattr, gen, '__name__')
133        self.assertRaises(TypeError, delattr, gen, '__qualname__')
134
135        # modify names of the function creating the generator
136        func.__qualname__ = "func_qualname"
137        func.__name__ = "func_name"
138        gen = func()
139        self.assertEqual(gen.__name__, "func_name")
140        self.assertEqual(gen.__qualname__, "func_qualname")
141
142        # unnamed generator
143        gen = (x for x in range(10))
144        self.assertEqual(gen.__name__,
145                         "<genexpr>")
146        self.assertEqual(gen.__qualname__,
147                         "GeneratorTest.test_name.<locals>.<genexpr>")
148
149    def test_copy(self):
150        def f():
151            yield 1
152        g = f()
153        with self.assertRaises(TypeError):
154            copy.copy(g)
155
156    def test_pickle(self):
157        def f():
158            yield 1
159        g = f()
160        for proto in range(pickle.HIGHEST_PROTOCOL + 1):
161            with self.assertRaises((TypeError, pickle.PicklingError)):
162                pickle.dumps(g, proto)
163
164
165class ExceptionTest(unittest.TestCase):
166    # Tests for the issue #23353: check that the currently handled exception
167    # is correctly saved/restored in PyEval_EvalFrameEx().
168
169    def test_except_throw(self):
170        def store_raise_exc_generator():
171            try:
172                self.assertEqual(sys.exc_info()[0], None)
173                yield
174            except Exception as exc:
175                # exception raised by gen.throw(exc)
176                self.assertEqual(sys.exc_info()[0], ValueError)
177                self.assertIsNone(exc.__context__)
178                yield
179
180                # ensure that the exception is not lost
181                self.assertEqual(sys.exc_info()[0], ValueError)
182                yield
183
184                # we should be able to raise back the ValueError
185                raise
186
187        make = store_raise_exc_generator()
188        next(make)
189
190        try:
191            raise ValueError()
192        except Exception as exc:
193            try:
194                make.throw(exc)
195            except Exception:
196                pass
197
198        next(make)
199        with self.assertRaises(ValueError) as cm:
200            next(make)
201        self.assertIsNone(cm.exception.__context__)
202
203        self.assertEqual(sys.exc_info(), (None, None, None))
204
205    def test_except_next(self):
206        def gen():
207            self.assertEqual(sys.exc_info()[0], ValueError)
208            yield "done"
209
210        g = gen()
211        try:
212            raise ValueError
213        except Exception:
214            self.assertEqual(next(g), "done")
215        self.assertEqual(sys.exc_info(), (None, None, None))
216
217    def test_except_gen_except(self):
218        def gen():
219            try:
220                self.assertEqual(sys.exc_info()[0], None)
221                yield
222                # we are called from "except ValueError:", TypeError must
223                # inherit ValueError in its context
224                raise TypeError()
225            except TypeError as exc:
226                self.assertEqual(sys.exc_info()[0], TypeError)
227                self.assertEqual(type(exc.__context__), ValueError)
228            # here we are still called from the "except ValueError:"
229            self.assertEqual(sys.exc_info()[0], ValueError)
230            yield
231            self.assertIsNone(sys.exc_info()[0])
232            yield "done"
233
234        g = gen()
235        next(g)
236        try:
237            raise ValueError
238        except Exception:
239            next(g)
240
241        self.assertEqual(next(g), "done")
242        self.assertEqual(sys.exc_info(), (None, None, None))
243
244    def test_except_throw_exception_context(self):
245        def gen():
246            try:
247                try:
248                    self.assertEqual(sys.exc_info()[0], None)
249                    yield
250                except ValueError:
251                    # we are called from "except ValueError:"
252                    self.assertEqual(sys.exc_info()[0], ValueError)
253                    raise TypeError()
254            except Exception as exc:
255                self.assertEqual(sys.exc_info()[0], TypeError)
256                self.assertEqual(type(exc.__context__), ValueError)
257            # we are still called from "except ValueError:"
258            self.assertEqual(sys.exc_info()[0], ValueError)
259            yield
260            self.assertIsNone(sys.exc_info()[0])
261            yield "done"
262
263        g = gen()
264        next(g)
265        try:
266            raise ValueError
267        except Exception as exc:
268            g.throw(exc)
269
270        self.assertEqual(next(g), "done")
271        self.assertEqual(sys.exc_info(), (None, None, None))
272
273    def test_stopiteration_error(self):
274        # See also PEP 479.
275
276        def gen():
277            raise StopIteration
278            yield
279
280        with self.assertRaisesRegex(RuntimeError, 'raised StopIteration'):
281            next(gen())
282
283    def test_tutorial_stopiteration(self):
284        # Raise StopIteration" stops the generator too:
285
286        def f():
287            yield 1
288            raise StopIteration
289            yield 2 # never reached
290
291        g = f()
292        self.assertEqual(next(g), 1)
293
294        with self.assertRaisesRegex(RuntimeError, 'raised StopIteration'):
295            next(g)
296
297    def test_return_tuple(self):
298        def g():
299            return (yield 1)
300
301        gen = g()
302        self.assertEqual(next(gen), 1)
303        with self.assertRaises(StopIteration) as cm:
304            gen.send((2,))
305        self.assertEqual(cm.exception.value, (2,))
306
307    def test_return_stopiteration(self):
308        def g():
309            return (yield 1)
310
311        gen = g()
312        self.assertEqual(next(gen), 1)
313        with self.assertRaises(StopIteration) as cm:
314            gen.send(StopIteration(2))
315        self.assertIsInstance(cm.exception.value, StopIteration)
316        self.assertEqual(cm.exception.value.value, 2)
317
318
319class YieldFromTests(unittest.TestCase):
320    def test_generator_gi_yieldfrom(self):
321        def a():
322            self.assertEqual(inspect.getgeneratorstate(gen_b), inspect.GEN_RUNNING)
323            self.assertIsNone(gen_b.gi_yieldfrom)
324            yield
325            self.assertEqual(inspect.getgeneratorstate(gen_b), inspect.GEN_RUNNING)
326            self.assertIsNone(gen_b.gi_yieldfrom)
327
328        def b():
329            self.assertIsNone(gen_b.gi_yieldfrom)
330            yield from a()
331            self.assertIsNone(gen_b.gi_yieldfrom)
332            yield
333            self.assertIsNone(gen_b.gi_yieldfrom)
334
335        gen_b = b()
336        self.assertEqual(inspect.getgeneratorstate(gen_b), inspect.GEN_CREATED)
337        self.assertIsNone(gen_b.gi_yieldfrom)
338
339        gen_b.send(None)
340        self.assertEqual(inspect.getgeneratorstate(gen_b), inspect.GEN_SUSPENDED)
341        self.assertEqual(gen_b.gi_yieldfrom.gi_code.co_name, 'a')
342
343        gen_b.send(None)
344        self.assertEqual(inspect.getgeneratorstate(gen_b), inspect.GEN_SUSPENDED)
345        self.assertIsNone(gen_b.gi_yieldfrom)
346
347        [] = gen_b  # Exhaust generator
348        self.assertEqual(inspect.getgeneratorstate(gen_b), inspect.GEN_CLOSED)
349        self.assertIsNone(gen_b.gi_yieldfrom)
350
351
352tutorial_tests = """
353Let's try a simple generator:
354
355    >>> def f():
356    ...    yield 1
357    ...    yield 2
358
359    >>> for i in f():
360    ...     print(i)
361    1
362    2
363    >>> g = f()
364    >>> next(g)
365    1
366    >>> next(g)
367    2
368
369"Falling off the end" stops the generator:
370
371    >>> next(g)
372    Traceback (most recent call last):
373      File "<stdin>", line 1, in ?
374      File "<stdin>", line 2, in g
375    StopIteration
376
377"return" also stops the generator:
378
379    >>> def f():
380    ...     yield 1
381    ...     return
382    ...     yield 2 # never reached
383    ...
384    >>> g = f()
385    >>> next(g)
386    1
387    >>> next(g)
388    Traceback (most recent call last):
389      File "<stdin>", line 1, in ?
390      File "<stdin>", line 3, in f
391    StopIteration
392    >>> next(g) # once stopped, can't be resumed
393    Traceback (most recent call last):
394      File "<stdin>", line 1, in ?
395    StopIteration
396
397However, "return" and StopIteration are not exactly equivalent:
398
399    >>> def g1():
400    ...     try:
401    ...         return
402    ...     except:
403    ...         yield 1
404    ...
405    >>> list(g1())
406    []
407
408    >>> def g2():
409    ...     try:
410    ...         raise StopIteration
411    ...     except:
412    ...         yield 42
413    >>> print(list(g2()))
414    [42]
415
416This may be surprising at first:
417
418    >>> def g3():
419    ...     try:
420    ...         return
421    ...     finally:
422    ...         yield 1
423    ...
424    >>> list(g3())
425    [1]
426
427Let's create an alternate range() function implemented as a generator:
428
429    >>> def yrange(n):
430    ...     for i in range(n):
431    ...         yield i
432    ...
433    >>> list(yrange(5))
434    [0, 1, 2, 3, 4]
435
436Generators always return to the most recent caller:
437
438    >>> def creator():
439    ...     r = yrange(5)
440    ...     print("creator", next(r))
441    ...     return r
442    ...
443    >>> def caller():
444    ...     r = creator()
445    ...     for i in r:
446    ...             print("caller", i)
447    ...
448    >>> caller()
449    creator 0
450    caller 1
451    caller 2
452    caller 3
453    caller 4
454
455Generators can call other generators:
456
457    >>> def zrange(n):
458    ...     for i in yrange(n):
459    ...         yield i
460    ...
461    >>> list(zrange(5))
462    [0, 1, 2, 3, 4]
463
464"""
465
466# The examples from PEP 255.
467
468pep_tests = """
469
470Specification:  Yield
471
472    Restriction:  A generator cannot be resumed while it is actively
473    running:
474
475    >>> def g():
476    ...     i = next(me)
477    ...     yield i
478    >>> me = g()
479    >>> next(me)
480    Traceback (most recent call last):
481     ...
482      File "<string>", line 2, in g
483    ValueError: generator already executing
484
485Specification: Return
486
487    Note that return isn't always equivalent to raising StopIteration:  the
488    difference lies in how enclosing try/except constructs are treated.
489    For example,
490
491        >>> def f1():
492        ...     try:
493        ...         return
494        ...     except:
495        ...        yield 1
496        >>> print(list(f1()))
497        []
498
499    because, as in any function, return simply exits, but
500
501        >>> def f2():
502        ...     try:
503        ...         raise StopIteration
504        ...     except:
505        ...         yield 42
506        >>> print(list(f2()))
507        [42]
508
509    because StopIteration is captured by a bare "except", as is any
510    exception.
511
512Specification: Generators and Exception Propagation
513
514    >>> def f():
515    ...     return 1//0
516    >>> def g():
517    ...     yield f()  # the zero division exception propagates
518    ...     yield 42   # and we'll never get here
519    >>> k = g()
520    >>> next(k)
521    Traceback (most recent call last):
522      File "<stdin>", line 1, in ?
523      File "<stdin>", line 2, in g
524      File "<stdin>", line 2, in f
525    ZeroDivisionError: integer division or modulo by zero
526    >>> next(k)  # and the generator cannot be resumed
527    Traceback (most recent call last):
528      File "<stdin>", line 1, in ?
529    StopIteration
530    >>>
531
532Specification: Try/Except/Finally
533
534    >>> def f():
535    ...     try:
536    ...         yield 1
537    ...         try:
538    ...             yield 2
539    ...             1//0
540    ...             yield 3  # never get here
541    ...         except ZeroDivisionError:
542    ...             yield 4
543    ...             yield 5
544    ...             raise
545    ...         except:
546    ...             yield 6
547    ...         yield 7     # the "raise" above stops this
548    ...     except:
549    ...         yield 8
550    ...     yield 9
551    ...     try:
552    ...         x = 12
553    ...     finally:
554    ...         yield 10
555    ...     yield 11
556    >>> print(list(f()))
557    [1, 2, 4, 5, 8, 9, 10, 11]
558    >>>
559
560Guido's binary tree example.
561
562    >>> # A binary tree class.
563    >>> class Tree:
564    ...
565    ...     def __init__(self, label, left=None, right=None):
566    ...         self.label = label
567    ...         self.left = left
568    ...         self.right = right
569    ...
570    ...     def __repr__(self, level=0, indent="    "):
571    ...         s = level*indent + repr(self.label)
572    ...         if self.left:
573    ...             s = s + "\\n" + self.left.__repr__(level+1, indent)
574    ...         if self.right:
575    ...             s = s + "\\n" + self.right.__repr__(level+1, indent)
576    ...         return s
577    ...
578    ...     def __iter__(self):
579    ...         return inorder(self)
580
581    >>> # Create a Tree from a list.
582    >>> def tree(list):
583    ...     n = len(list)
584    ...     if n == 0:
585    ...         return []
586    ...     i = n // 2
587    ...     return Tree(list[i], tree(list[:i]), tree(list[i+1:]))
588
589    >>> # Show it off: create a tree.
590    >>> t = tree("ABCDEFGHIJKLMNOPQRSTUVWXYZ")
591
592    >>> # A recursive generator that generates Tree labels in in-order.
593    >>> def inorder(t):
594    ...     if t:
595    ...         for x in inorder(t.left):
596    ...             yield x
597    ...         yield t.label
598    ...         for x in inorder(t.right):
599    ...             yield x
600
601    >>> # Show it off: create a tree.
602    >>> t = tree("ABCDEFGHIJKLMNOPQRSTUVWXYZ")
603    >>> # Print the nodes of the tree in in-order.
604    >>> for x in t:
605    ...     print(' '+x, end='')
606     A B C D E F G H I J K L M N O P Q R S T U V W X Y Z
607
608    >>> # A non-recursive generator.
609    >>> def inorder(node):
610    ...     stack = []
611    ...     while node:
612    ...         while node.left:
613    ...             stack.append(node)
614    ...             node = node.left
615    ...         yield node.label
616    ...         while not node.right:
617    ...             try:
618    ...                 node = stack.pop()
619    ...             except IndexError:
620    ...                 return
621    ...             yield node.label
622    ...         node = node.right
623
624    >>> # Exercise the non-recursive generator.
625    >>> for x in t:
626    ...     print(' '+x, end='')
627     A B C D E F G H I J K L M N O P Q R S T U V W X Y Z
628
629"""
630
631# Examples from Iterator-List and Python-Dev and c.l.py.
632
633email_tests = """
634
635The difference between yielding None and returning it.
636
637>>> def g():
638...     for i in range(3):
639...         yield None
640...     yield None
641...     return
642>>> list(g())
643[None, None, None, None]
644
645Ensure that explicitly raising StopIteration acts like any other exception
646in try/except, not like a return.
647
648>>> def g():
649...     yield 1
650...     try:
651...         raise StopIteration
652...     except:
653...         yield 2
654...     yield 3
655>>> list(g())
656[1, 2, 3]
657
658Next one was posted to c.l.py.
659
660>>> def gcomb(x, k):
661...     "Generate all combinations of k elements from list x."
662...
663...     if k > len(x):
664...         return
665...     if k == 0:
666...         yield []
667...     else:
668...         first, rest = x[0], x[1:]
669...         # A combination does or doesn't contain first.
670...         # If it does, the remainder is a k-1 comb of rest.
671...         for c in gcomb(rest, k-1):
672...             c.insert(0, first)
673...             yield c
674...         # If it doesn't contain first, it's a k comb of rest.
675...         for c in gcomb(rest, k):
676...             yield c
677
678>>> seq = list(range(1, 5))
679>>> for k in range(len(seq) + 2):
680...     print("%d-combs of %s:" % (k, seq))
681...     for c in gcomb(seq, k):
682...         print("   ", c)
6830-combs of [1, 2, 3, 4]:
684    []
6851-combs of [1, 2, 3, 4]:
686    [1]
687    [2]
688    [3]
689    [4]
6902-combs of [1, 2, 3, 4]:
691    [1, 2]
692    [1, 3]
693    [1, 4]
694    [2, 3]
695    [2, 4]
696    [3, 4]
6973-combs of [1, 2, 3, 4]:
698    [1, 2, 3]
699    [1, 2, 4]
700    [1, 3, 4]
701    [2, 3, 4]
7024-combs of [1, 2, 3, 4]:
703    [1, 2, 3, 4]
7045-combs of [1, 2, 3, 4]:
705
706From the Iterators list, about the types of these things.
707
708>>> def g():
709...     yield 1
710...
711>>> type(g)
712<class 'function'>
713>>> i = g()
714>>> type(i)
715<class 'generator'>
716>>> [s for s in dir(i) if not s.startswith('_')]
717['close', 'gi_code', 'gi_frame', 'gi_running', 'gi_yieldfrom', 'send', 'throw']
718>>> from test.support import HAVE_DOCSTRINGS
719>>> print(i.__next__.__doc__ if HAVE_DOCSTRINGS else 'Implement next(self).')
720Implement next(self).
721>>> iter(i) is i
722True
723>>> import types
724>>> isinstance(i, types.GeneratorType)
725True
726
727And more, added later.
728
729>>> i.gi_running
7300
731>>> type(i.gi_frame)
732<class 'frame'>
733>>> i.gi_running = 42
734Traceback (most recent call last):
735  ...
736AttributeError: readonly attribute
737>>> def g():
738...     yield me.gi_running
739>>> me = g()
740>>> me.gi_running
7410
742>>> next(me)
7431
744>>> me.gi_running
7450
746
747A clever union-find implementation from c.l.py, due to David Eppstein.
748Sent: Friday, June 29, 2001 12:16 PM
749To: python-list@python.org
750Subject: Re: PEP 255: Simple Generators
751
752>>> class disjointSet:
753...     def __init__(self, name):
754...         self.name = name
755...         self.parent = None
756...         self.generator = self.generate()
757...
758...     def generate(self):
759...         while not self.parent:
760...             yield self
761...         for x in self.parent.generator:
762...             yield x
763...
764...     def find(self):
765...         return next(self.generator)
766...
767...     def union(self, parent):
768...         if self.parent:
769...             raise ValueError("Sorry, I'm not a root!")
770...         self.parent = parent
771...
772...     def __str__(self):
773...         return self.name
774
775>>> names = "ABCDEFGHIJKLM"
776>>> sets = [disjointSet(name) for name in names]
777>>> roots = sets[:]
778
779>>> import random
780>>> gen = random.Random(42)
781>>> while 1:
782...     for s in sets:
783...         print(" %s->%s" % (s, s.find()), end='')
784...     print()
785...     if len(roots) > 1:
786...         s1 = gen.choice(roots)
787...         roots.remove(s1)
788...         s2 = gen.choice(roots)
789...         s1.union(s2)
790...         print("merged", s1, "into", s2)
791...     else:
792...         break
793 A->A B->B C->C D->D E->E F->F G->G H->H I->I J->J K->K L->L M->M
794merged K into B
795 A->A B->B C->C D->D E->E F->F G->G H->H I->I J->J K->B L->L M->M
796merged A into F
797 A->F B->B C->C D->D E->E F->F G->G H->H I->I J->J K->B L->L M->M
798merged E into F
799 A->F B->B C->C D->D E->F F->F G->G H->H I->I J->J K->B L->L M->M
800merged D into C
801 A->F B->B C->C D->C E->F F->F G->G H->H I->I J->J K->B L->L M->M
802merged M into C
803 A->F B->B C->C D->C E->F F->F G->G H->H I->I J->J K->B L->L M->C
804merged J into B
805 A->F B->B C->C D->C E->F F->F G->G H->H I->I J->B K->B L->L M->C
806merged B into C
807 A->F B->C C->C D->C E->F F->F G->G H->H I->I J->C K->C L->L M->C
808merged F into G
809 A->G B->C C->C D->C E->G F->G G->G H->H I->I J->C K->C L->L M->C
810merged L into C
811 A->G B->C C->C D->C E->G F->G G->G H->H I->I J->C K->C L->C M->C
812merged G into I
813 A->I B->C C->C D->C E->I F->I G->I H->H I->I J->C K->C L->C M->C
814merged I into H
815 A->H B->C C->C D->C E->H F->H G->H H->H I->H J->C K->C L->C M->C
816merged C into H
817 A->H B->H C->H D->H E->H F->H G->H H->H I->H J->H K->H L->H M->H
818
819"""
820# Emacs turd '
821
822# Fun tests (for sufficiently warped notions of "fun").
823
824fun_tests = """
825
826Build up to a recursive Sieve of Eratosthenes generator.
827
828>>> def firstn(g, n):
829...     return [next(g) for i in range(n)]
830
831>>> def intsfrom(i):
832...     while 1:
833...         yield i
834...         i += 1
835
836>>> firstn(intsfrom(5), 7)
837[5, 6, 7, 8, 9, 10, 11]
838
839>>> def exclude_multiples(n, ints):
840...     for i in ints:
841...         if i % n:
842...             yield i
843
844>>> firstn(exclude_multiples(3, intsfrom(1)), 6)
845[1, 2, 4, 5, 7, 8]
846
847>>> def sieve(ints):
848...     prime = next(ints)
849...     yield prime
850...     not_divisible_by_prime = exclude_multiples(prime, ints)
851...     for p in sieve(not_divisible_by_prime):
852...         yield p
853
854>>> primes = sieve(intsfrom(2))
855>>> firstn(primes, 20)
856[2, 3, 5, 7, 11, 13, 17, 19, 23, 29, 31, 37, 41, 43, 47, 53, 59, 61, 67, 71]
857
858
859Another famous problem:  generate all integers of the form
860    2**i * 3**j  * 5**k
861in increasing order, where i,j,k >= 0.  Trickier than it may look at first!
862Try writing it without generators, and correctly, and without generating
8633 internal results for each result output.
864
865>>> def times(n, g):
866...     for i in g:
867...         yield n * i
868>>> firstn(times(10, intsfrom(1)), 10)
869[10, 20, 30, 40, 50, 60, 70, 80, 90, 100]
870
871>>> def merge(g, h):
872...     ng = next(g)
873...     nh = next(h)
874...     while 1:
875...         if ng < nh:
876...             yield ng
877...             ng = next(g)
878...         elif ng > nh:
879...             yield nh
880...             nh = next(h)
881...         else:
882...             yield ng
883...             ng = next(g)
884...             nh = next(h)
885
886The following works, but is doing a whale of a lot of redundant work --
887it's not clear how to get the internal uses of m235 to share a single
888generator.  Note that me_times2 (etc) each need to see every element in the
889result sequence.  So this is an example where lazy lists are more natural
890(you can look at the head of a lazy list any number of times).
891
892>>> def m235():
893...     yield 1
894...     me_times2 = times(2, m235())
895...     me_times3 = times(3, m235())
896...     me_times5 = times(5, m235())
897...     for i in merge(merge(me_times2,
898...                          me_times3),
899...                    me_times5):
900...         yield i
901
902Don't print "too many" of these -- the implementation above is extremely
903inefficient:  each call of m235() leads to 3 recursive calls, and in
904turn each of those 3 more, and so on, and so on, until we've descended
905enough levels to satisfy the print stmts.  Very odd:  when I printed 5
906lines of results below, this managed to screw up Win98's malloc in "the
907usual" way, i.e. the heap grew over 4Mb so Win98 started fragmenting
908address space, and it *looked* like a very slow leak.
909
910>>> result = m235()
911>>> for i in range(3):
912...     print(firstn(result, 15))
913[1, 2, 3, 4, 5, 6, 8, 9, 10, 12, 15, 16, 18, 20, 24]
914[25, 27, 30, 32, 36, 40, 45, 48, 50, 54, 60, 64, 72, 75, 80]
915[81, 90, 96, 100, 108, 120, 125, 128, 135, 144, 150, 160, 162, 180, 192]
916
917Heh.  Here's one way to get a shared list, complete with an excruciating
918namespace renaming trick.  The *pretty* part is that the times() and merge()
919functions can be reused as-is, because they only assume their stream
920arguments are iterable -- a LazyList is the same as a generator to times().
921
922>>> class LazyList:
923...     def __init__(self, g):
924...         self.sofar = []
925...         self.fetch = g.__next__
926...
927...     def __getitem__(self, i):
928...         sofar, fetch = self.sofar, self.fetch
929...         while i >= len(sofar):
930...             sofar.append(fetch())
931...         return sofar[i]
932
933>>> def m235():
934...     yield 1
935...     # Gack:  m235 below actually refers to a LazyList.
936...     me_times2 = times(2, m235)
937...     me_times3 = times(3, m235)
938...     me_times5 = times(5, m235)
939...     for i in merge(merge(me_times2,
940...                          me_times3),
941...                    me_times5):
942...         yield i
943
944Print as many of these as you like -- *this* implementation is memory-
945efficient.
946
947>>> m235 = LazyList(m235())
948>>> for i in range(5):
949...     print([m235[j] for j in range(15*i, 15*(i+1))])
950[1, 2, 3, 4, 5, 6, 8, 9, 10, 12, 15, 16, 18, 20, 24]
951[25, 27, 30, 32, 36, 40, 45, 48, 50, 54, 60, 64, 72, 75, 80]
952[81, 90, 96, 100, 108, 120, 125, 128, 135, 144, 150, 160, 162, 180, 192]
953[200, 216, 225, 240, 243, 250, 256, 270, 288, 300, 320, 324, 360, 375, 384]
954[400, 405, 432, 450, 480, 486, 500, 512, 540, 576, 600, 625, 640, 648, 675]
955
956Ye olde Fibonacci generator, LazyList style.
957
958>>> def fibgen(a, b):
959...
960...     def sum(g, h):
961...         while 1:
962...             yield next(g) + next(h)
963...
964...     def tail(g):
965...         next(g)    # throw first away
966...         for x in g:
967...             yield x
968...
969...     yield a
970...     yield b
971...     for s in sum(iter(fib),
972...                  tail(iter(fib))):
973...         yield s
974
975>>> fib = LazyList(fibgen(1, 2))
976>>> firstn(iter(fib), 17)
977[1, 2, 3, 5, 8, 13, 21, 34, 55, 89, 144, 233, 377, 610, 987, 1597, 2584]
978
979
980Running after your tail with itertools.tee (new in version 2.4)
981
982The algorithms "m235" (Hamming) and Fibonacci presented above are both
983examples of a whole family of FP (functional programming) algorithms
984where a function produces and returns a list while the production algorithm
985suppose the list as already produced by recursively calling itself.
986For these algorithms to work, they must:
987
988- produce at least a first element without presupposing the existence of
989  the rest of the list
990- produce their elements in a lazy manner
991
992To work efficiently, the beginning of the list must not be recomputed over
993and over again. This is ensured in most FP languages as a built-in feature.
994In python, we have to explicitly maintain a list of already computed results
995and abandon genuine recursivity.
996
997This is what had been attempted above with the LazyList class. One problem
998with that class is that it keeps a list of all of the generated results and
999therefore continually grows. This partially defeats the goal of the generator
1000concept, viz. produce the results only as needed instead of producing them
1001all and thereby wasting memory.
1002
1003Thanks to itertools.tee, it is now clear "how to get the internal uses of
1004m235 to share a single generator".
1005
1006>>> from itertools import tee
1007>>> def m235():
1008...     def _m235():
1009...         yield 1
1010...         for n in merge(times(2, m2),
1011...                        merge(times(3, m3),
1012...                              times(5, m5))):
1013...             yield n
1014...     m1 = _m235()
1015...     m2, m3, m5, mRes = tee(m1, 4)
1016...     return mRes
1017
1018>>> it = m235()
1019>>> for i in range(5):
1020...     print(firstn(it, 15))
1021[1, 2, 3, 4, 5, 6, 8, 9, 10, 12, 15, 16, 18, 20, 24]
1022[25, 27, 30, 32, 36, 40, 45, 48, 50, 54, 60, 64, 72, 75, 80]
1023[81, 90, 96, 100, 108, 120, 125, 128, 135, 144, 150, 160, 162, 180, 192]
1024[200, 216, 225, 240, 243, 250, 256, 270, 288, 300, 320, 324, 360, 375, 384]
1025[400, 405, 432, 450, 480, 486, 500, 512, 540, 576, 600, 625, 640, 648, 675]
1026
1027The "tee" function does just what we want. It internally keeps a generated
1028result for as long as it has not been "consumed" from all of the duplicated
1029iterators, whereupon it is deleted. You can therefore print the hamming
1030sequence during hours without increasing memory usage, or very little.
1031
1032The beauty of it is that recursive running-after-their-tail FP algorithms
1033are quite straightforwardly expressed with this Python idiom.
1034
1035Ye olde Fibonacci generator, tee style.
1036
1037>>> def fib():
1038...
1039...     def _isum(g, h):
1040...         while 1:
1041...             yield next(g) + next(h)
1042...
1043...     def _fib():
1044...         yield 1
1045...         yield 2
1046...         next(fibTail) # throw first away
1047...         for res in _isum(fibHead, fibTail):
1048...             yield res
1049...
1050...     realfib = _fib()
1051...     fibHead, fibTail, fibRes = tee(realfib, 3)
1052...     return fibRes
1053
1054>>> firstn(fib(), 17)
1055[1, 2, 3, 5, 8, 13, 21, 34, 55, 89, 144, 233, 377, 610, 987, 1597, 2584]
1056
1057"""
1058
1059# syntax_tests mostly provokes SyntaxErrors.  Also fiddling with #if 0
1060# hackery.
1061
1062syntax_tests = """
1063
1064These are fine:
1065
1066>>> def f():
1067...     yield 1
1068...     return
1069
1070>>> def f():
1071...     try:
1072...         yield 1
1073...     finally:
1074...         pass
1075
1076>>> def f():
1077...     try:
1078...         try:
1079...             1//0
1080...         except ZeroDivisionError:
1081...             yield 666
1082...         except:
1083...             pass
1084...     finally:
1085...         pass
1086
1087>>> def f():
1088...     try:
1089...         try:
1090...             yield 12
1091...             1//0
1092...         except ZeroDivisionError:
1093...             yield 666
1094...         except:
1095...             try:
1096...                 x = 12
1097...             finally:
1098...                 yield 12
1099...     except:
1100...         return
1101>>> list(f())
1102[12, 666]
1103
1104>>> def f():
1105...    yield
1106>>> type(f())
1107<class 'generator'>
1108
1109
1110>>> def f():
1111...    if 0:
1112...        yield
1113>>> type(f())
1114<class 'generator'>
1115
1116
1117>>> def f():
1118...     if 0:
1119...         yield 1
1120>>> type(f())
1121<class 'generator'>
1122
1123>>> def f():
1124...    if "":
1125...        yield None
1126>>> type(f())
1127<class 'generator'>
1128
1129>>> def f():
1130...     return
1131...     try:
1132...         if x==4:
1133...             pass
1134...         elif 0:
1135...             try:
1136...                 1//0
1137...             except SyntaxError:
1138...                 pass
1139...             else:
1140...                 if 0:
1141...                     while 12:
1142...                         x += 1
1143...                         yield 2 # don't blink
1144...                         f(a, b, c, d, e)
1145...         else:
1146...             pass
1147...     except:
1148...         x = 1
1149...     return
1150>>> type(f())
1151<class 'generator'>
1152
1153>>> def f():
1154...     if 0:
1155...         def g():
1156...             yield 1
1157...
1158>>> type(f())
1159<class 'NoneType'>
1160
1161>>> def f():
1162...     if 0:
1163...         class C:
1164...             def __init__(self):
1165...                 yield 1
1166...             def f(self):
1167...                 yield 2
1168>>> type(f())
1169<class 'NoneType'>
1170
1171>>> def f():
1172...     if 0:
1173...         return
1174...     if 0:
1175...         yield 2
1176>>> type(f())
1177<class 'generator'>
1178
1179This one caused a crash (see SF bug 567538):
1180
1181>>> def f():
1182...     for i in range(3):
1183...         try:
1184...             continue
1185...         finally:
1186...             yield i
1187...
1188>>> g = f()
1189>>> print(next(g))
11900
1191>>> print(next(g))
11921
1193>>> print(next(g))
11942
1195>>> print(next(g))
1196Traceback (most recent call last):
1197StopIteration
1198
1199
1200Test the gi_code attribute
1201
1202>>> def f():
1203...     yield 5
1204...
1205>>> g = f()
1206>>> g.gi_code is f.__code__
1207True
1208>>> next(g)
12095
1210>>> next(g)
1211Traceback (most recent call last):
1212StopIteration
1213>>> g.gi_code is f.__code__
1214True
1215
1216
1217Test the __name__ attribute and the repr()
1218
1219>>> def f():
1220...    yield 5
1221...
1222>>> g = f()
1223>>> g.__name__
1224'f'
1225>>> repr(g)  # doctest: +ELLIPSIS
1226'<generator object f at ...>'
1227
1228Lambdas shouldn't have their usual return behavior.
1229
1230>>> x = lambda: (yield 1)
1231>>> list(x())
1232[1]
1233
1234>>> x = lambda: ((yield 1), (yield 2))
1235>>> list(x())
1236[1, 2]
1237"""
1238
1239# conjoin is a simple backtracking generator, named in honor of Icon's
1240# "conjunction" control structure.  Pass a list of no-argument functions
1241# that return iterable objects.  Easiest to explain by example:  assume the
1242# function list [x, y, z] is passed.  Then conjoin acts like:
1243#
1244# def g():
1245#     values = [None] * 3
1246#     for values[0] in x():
1247#         for values[1] in y():
1248#             for values[2] in z():
1249#                 yield values
1250#
1251# So some 3-lists of values *may* be generated, each time we successfully
1252# get into the innermost loop.  If an iterator fails (is exhausted) before
1253# then, it "backtracks" to get the next value from the nearest enclosing
1254# iterator (the one "to the left"), and starts all over again at the next
1255# slot (pumps a fresh iterator).  Of course this is most useful when the
1256# iterators have side-effects, so that which values *can* be generated at
1257# each slot depend on the values iterated at previous slots.
1258
1259def simple_conjoin(gs):
1260
1261    values = [None] * len(gs)
1262
1263    def gen(i):
1264        if i >= len(gs):
1265            yield values
1266        else:
1267            for values[i] in gs[i]():
1268                for x in gen(i+1):
1269                    yield x
1270
1271    for x in gen(0):
1272        yield x
1273
1274# That works fine, but recursing a level and checking i against len(gs) for
1275# each item produced is inefficient.  By doing manual loop unrolling across
1276# generator boundaries, it's possible to eliminate most of that overhead.
1277# This isn't worth the bother *in general* for generators, but conjoin() is
1278# a core building block for some CPU-intensive generator applications.
1279
1280def conjoin(gs):
1281
1282    n = len(gs)
1283    values = [None] * n
1284
1285    # Do one loop nest at time recursively, until the # of loop nests
1286    # remaining is divisible by 3.
1287
1288    def gen(i):
1289        if i >= n:
1290            yield values
1291
1292        elif (n-i) % 3:
1293            ip1 = i+1
1294            for values[i] in gs[i]():
1295                for x in gen(ip1):
1296                    yield x
1297
1298        else:
1299            for x in _gen3(i):
1300                yield x
1301
1302    # Do three loop nests at a time, recursing only if at least three more
1303    # remain.  Don't call directly:  this is an internal optimization for
1304    # gen's use.
1305
1306    def _gen3(i):
1307        assert i < n and (n-i) % 3 == 0
1308        ip1, ip2, ip3 = i+1, i+2, i+3
1309        g, g1, g2 = gs[i : ip3]
1310
1311        if ip3 >= n:
1312            # These are the last three, so we can yield values directly.
1313            for values[i] in g():
1314                for values[ip1] in g1():
1315                    for values[ip2] in g2():
1316                        yield values
1317
1318        else:
1319            # At least 6 loop nests remain; peel off 3 and recurse for the
1320            # rest.
1321            for values[i] in g():
1322                for values[ip1] in g1():
1323                    for values[ip2] in g2():
1324                        for x in _gen3(ip3):
1325                            yield x
1326
1327    for x in gen(0):
1328        yield x
1329
1330# And one more approach:  For backtracking apps like the Knight's Tour
1331# solver below, the number of backtracking levels can be enormous (one
1332# level per square, for the Knight's Tour, so that e.g. a 100x100 board
1333# needs 10,000 levels).  In such cases Python is likely to run out of
1334# stack space due to recursion.  So here's a recursion-free version of
1335# conjoin too.
1336# NOTE WELL:  This allows large problems to be solved with only trivial
1337# demands on stack space.  Without explicitly resumable generators, this is
1338# much harder to achieve.  OTOH, this is much slower (up to a factor of 2)
1339# than the fancy unrolled recursive conjoin.
1340
1341def flat_conjoin(gs):  # rename to conjoin to run tests with this instead
1342    n = len(gs)
1343    values = [None] * n
1344    iters  = [None] * n
1345    _StopIteration = StopIteration  # make local because caught a *lot*
1346    i = 0
1347    while 1:
1348        # Descend.
1349        try:
1350            while i < n:
1351                it = iters[i] = gs[i]().__next__
1352                values[i] = it()
1353                i += 1
1354        except _StopIteration:
1355            pass
1356        else:
1357            assert i == n
1358            yield values
1359
1360        # Backtrack until an older iterator can be resumed.
1361        i -= 1
1362        while i >= 0:
1363            try:
1364                values[i] = iters[i]()
1365                # Success!  Start fresh at next level.
1366                i += 1
1367                break
1368            except _StopIteration:
1369                # Continue backtracking.
1370                i -= 1
1371        else:
1372            assert i < 0
1373            break
1374
1375# A conjoin-based N-Queens solver.
1376
1377class Queens:
1378    def __init__(self, n):
1379        self.n = n
1380        rangen = range(n)
1381
1382        # Assign a unique int to each column and diagonal.
1383        # columns:  n of those, range(n).
1384        # NW-SE diagonals: 2n-1 of these, i-j unique and invariant along
1385        # each, smallest i-j is 0-(n-1) = 1-n, so add n-1 to shift to 0-
1386        # based.
1387        # NE-SW diagonals: 2n-1 of these, i+j unique and invariant along
1388        # each, smallest i+j is 0, largest is 2n-2.
1389
1390        # For each square, compute a bit vector of the columns and
1391        # diagonals it covers, and for each row compute a function that
1392        # generates the possibilities for the columns in that row.
1393        self.rowgenerators = []
1394        for i in rangen:
1395            rowuses = [(1 << j) |                  # column ordinal
1396                       (1 << (n + i-j + n-1)) |    # NW-SE ordinal
1397                       (1 << (n + 2*n-1 + i+j))    # NE-SW ordinal
1398                            for j in rangen]
1399
1400            def rowgen(rowuses=rowuses):
1401                for j in rangen:
1402                    uses = rowuses[j]
1403                    if uses & self.used == 0:
1404                        self.used |= uses
1405                        yield j
1406                        self.used &= ~uses
1407
1408            self.rowgenerators.append(rowgen)
1409
1410    # Generate solutions.
1411    def solve(self):
1412        self.used = 0
1413        for row2col in conjoin(self.rowgenerators):
1414            yield row2col
1415
1416    def printsolution(self, row2col):
1417        n = self.n
1418        assert n == len(row2col)
1419        sep = "+" + "-+" * n
1420        print(sep)
1421        for i in range(n):
1422            squares = [" " for j in range(n)]
1423            squares[row2col[i]] = "Q"
1424            print("|" + "|".join(squares) + "|")
1425            print(sep)
1426
1427# A conjoin-based Knight's Tour solver.  This is pretty sophisticated
1428# (e.g., when used with flat_conjoin above, and passing hard=1 to the
1429# constructor, a 200x200 Knight's Tour was found quickly -- note that we're
1430# creating 10s of thousands of generators then!), and is lengthy.
1431
1432class Knights:
1433    def __init__(self, m, n, hard=0):
1434        self.m, self.n = m, n
1435
1436        # solve() will set up succs[i] to be a list of square #i's
1437        # successors.
1438        succs = self.succs = []
1439
1440        # Remove i0 from each of its successor's successor lists, i.e.
1441        # successors can't go back to i0 again.  Return 0 if we can
1442        # detect this makes a solution impossible, else return 1.
1443
1444        def remove_from_successors(i0, len=len):
1445            # If we remove all exits from a free square, we're dead:
1446            # even if we move to it next, we can't leave it again.
1447            # If we create a square with one exit, we must visit it next;
1448            # else somebody else will have to visit it, and since there's
1449            # only one adjacent, there won't be a way to leave it again.
1450            # Finally, if we create more than one free square with a
1451            # single exit, we can only move to one of them next, leaving
1452            # the other one a dead end.
1453            ne0 = ne1 = 0
1454            for i in succs[i0]:
1455                s = succs[i]
1456                s.remove(i0)
1457                e = len(s)
1458                if e == 0:
1459                    ne0 += 1
1460                elif e == 1:
1461                    ne1 += 1
1462            return ne0 == 0 and ne1 < 2
1463
1464        # Put i0 back in each of its successor's successor lists.
1465
1466        def add_to_successors(i0):
1467            for i in succs[i0]:
1468                succs[i].append(i0)
1469
1470        # Generate the first move.
1471        def first():
1472            if m < 1 or n < 1:
1473                return
1474
1475            # Since we're looking for a cycle, it doesn't matter where we
1476            # start.  Starting in a corner makes the 2nd move easy.
1477            corner = self.coords2index(0, 0)
1478            remove_from_successors(corner)
1479            self.lastij = corner
1480            yield corner
1481            add_to_successors(corner)
1482
1483        # Generate the second moves.
1484        def second():
1485            corner = self.coords2index(0, 0)
1486            assert self.lastij == corner  # i.e., we started in the corner
1487            if m < 3 or n < 3:
1488                return
1489            assert len(succs[corner]) == 2
1490            assert self.coords2index(1, 2) in succs[corner]
1491            assert self.coords2index(2, 1) in succs[corner]
1492            # Only two choices.  Whichever we pick, the other must be the
1493            # square picked on move m*n, as it's the only way to get back
1494            # to (0, 0).  Save its index in self.final so that moves before
1495            # the last know it must be kept free.
1496            for i, j in (1, 2), (2, 1):
1497                this  = self.coords2index(i, j)
1498                final = self.coords2index(3-i, 3-j)
1499                self.final = final
1500
1501                remove_from_successors(this)
1502                succs[final].append(corner)
1503                self.lastij = this
1504                yield this
1505                succs[final].remove(corner)
1506                add_to_successors(this)
1507
1508        # Generate moves 3 through m*n-1.
1509        def advance(len=len):
1510            # If some successor has only one exit, must take it.
1511            # Else favor successors with fewer exits.
1512            candidates = []
1513            for i in succs[self.lastij]:
1514                e = len(succs[i])
1515                assert e > 0, "else remove_from_successors() pruning flawed"
1516                if e == 1:
1517                    candidates = [(e, i)]
1518                    break
1519                candidates.append((e, i))
1520            else:
1521                candidates.sort()
1522
1523            for e, i in candidates:
1524                if i != self.final:
1525                    if remove_from_successors(i):
1526                        self.lastij = i
1527                        yield i
1528                    add_to_successors(i)
1529
1530        # Generate moves 3 through m*n-1.  Alternative version using a
1531        # stronger (but more expensive) heuristic to order successors.
1532        # Since the # of backtracking levels is m*n, a poor move early on
1533        # can take eons to undo.  Smallest square board for which this
1534        # matters a lot is 52x52.
1535        def advance_hard(vmid=(m-1)/2.0, hmid=(n-1)/2.0, len=len):
1536            # If some successor has only one exit, must take it.
1537            # Else favor successors with fewer exits.
1538            # Break ties via max distance from board centerpoint (favor
1539            # corners and edges whenever possible).
1540            candidates = []
1541            for i in succs[self.lastij]:
1542                e = len(succs[i])
1543                assert e > 0, "else remove_from_successors() pruning flawed"
1544                if e == 1:
1545                    candidates = [(e, 0, i)]
1546                    break
1547                i1, j1 = self.index2coords(i)
1548                d = (i1 - vmid)**2 + (j1 - hmid)**2
1549                candidates.append((e, -d, i))
1550            else:
1551                candidates.sort()
1552
1553            for e, d, i in candidates:
1554                if i != self.final:
1555                    if remove_from_successors(i):
1556                        self.lastij = i
1557                        yield i
1558                    add_to_successors(i)
1559
1560        # Generate the last move.
1561        def last():
1562            assert self.final in succs[self.lastij]
1563            yield self.final
1564
1565        if m*n < 4:
1566            self.squaregenerators = [first]
1567        else:
1568            self.squaregenerators = [first, second] + \
1569                [hard and advance_hard or advance] * (m*n - 3) + \
1570                [last]
1571
1572    def coords2index(self, i, j):
1573        assert 0 <= i < self.m
1574        assert 0 <= j < self.n
1575        return i * self.n + j
1576
1577    def index2coords(self, index):
1578        assert 0 <= index < self.m * self.n
1579        return divmod(index, self.n)
1580
1581    def _init_board(self):
1582        succs = self.succs
1583        del succs[:]
1584        m, n = self.m, self.n
1585        c2i = self.coords2index
1586
1587        offsets = [( 1,  2), ( 2,  1), ( 2, -1), ( 1, -2),
1588                   (-1, -2), (-2, -1), (-2,  1), (-1,  2)]
1589        rangen = range(n)
1590        for i in range(m):
1591            for j in rangen:
1592                s = [c2i(i+io, j+jo) for io, jo in offsets
1593                                     if 0 <= i+io < m and
1594                                        0 <= j+jo < n]
1595                succs.append(s)
1596
1597    # Generate solutions.
1598    def solve(self):
1599        self._init_board()
1600        for x in conjoin(self.squaregenerators):
1601            yield x
1602
1603    def printsolution(self, x):
1604        m, n = self.m, self.n
1605        assert len(x) == m*n
1606        w = len(str(m*n))
1607        format = "%" + str(w) + "d"
1608
1609        squares = [[None] * n for i in range(m)]
1610        k = 1
1611        for i in x:
1612            i1, j1 = self.index2coords(i)
1613            squares[i1][j1] = format % k
1614            k += 1
1615
1616        sep = "+" + ("-" * w + "+") * n
1617        print(sep)
1618        for i in range(m):
1619            row = squares[i]
1620            print("|" + "|".join(row) + "|")
1621            print(sep)
1622
1623conjoin_tests = """
1624
1625Generate the 3-bit binary numbers in order.  This illustrates dumbest-
1626possible use of conjoin, just to generate the full cross-product.
1627
1628>>> for c in conjoin([lambda: iter((0, 1))] * 3):
1629...     print(c)
1630[0, 0, 0]
1631[0, 0, 1]
1632[0, 1, 0]
1633[0, 1, 1]
1634[1, 0, 0]
1635[1, 0, 1]
1636[1, 1, 0]
1637[1, 1, 1]
1638
1639For efficiency in typical backtracking apps, conjoin() yields the same list
1640object each time.  So if you want to save away a full account of its
1641generated sequence, you need to copy its results.
1642
1643>>> def gencopy(iterator):
1644...     for x in iterator:
1645...         yield x[:]
1646
1647>>> for n in range(10):
1648...     all = list(gencopy(conjoin([lambda: iter((0, 1))] * n)))
1649...     print(n, len(all), all[0] == [0] * n, all[-1] == [1] * n)
16500 1 True True
16511 2 True True
16522 4 True True
16533 8 True True
16544 16 True True
16555 32 True True
16566 64 True True
16577 128 True True
16588 256 True True
16599 512 True True
1660
1661And run an 8-queens solver.
1662
1663>>> q = Queens(8)
1664>>> LIMIT = 2
1665>>> count = 0
1666>>> for row2col in q.solve():
1667...     count += 1
1668...     if count <= LIMIT:
1669...         print("Solution", count)
1670...         q.printsolution(row2col)
1671Solution 1
1672+-+-+-+-+-+-+-+-+
1673|Q| | | | | | | |
1674+-+-+-+-+-+-+-+-+
1675| | | | |Q| | | |
1676+-+-+-+-+-+-+-+-+
1677| | | | | | | |Q|
1678+-+-+-+-+-+-+-+-+
1679| | | | | |Q| | |
1680+-+-+-+-+-+-+-+-+
1681| | |Q| | | | | |
1682+-+-+-+-+-+-+-+-+
1683| | | | | | |Q| |
1684+-+-+-+-+-+-+-+-+
1685| |Q| | | | | | |
1686+-+-+-+-+-+-+-+-+
1687| | | |Q| | | | |
1688+-+-+-+-+-+-+-+-+
1689Solution 2
1690+-+-+-+-+-+-+-+-+
1691|Q| | | | | | | |
1692+-+-+-+-+-+-+-+-+
1693| | | | | |Q| | |
1694+-+-+-+-+-+-+-+-+
1695| | | | | | | |Q|
1696+-+-+-+-+-+-+-+-+
1697| | |Q| | | | | |
1698+-+-+-+-+-+-+-+-+
1699| | | | | | |Q| |
1700+-+-+-+-+-+-+-+-+
1701| | | |Q| | | | |
1702+-+-+-+-+-+-+-+-+
1703| |Q| | | | | | |
1704+-+-+-+-+-+-+-+-+
1705| | | | |Q| | | |
1706+-+-+-+-+-+-+-+-+
1707
1708>>> print(count, "solutions in all.")
170992 solutions in all.
1710
1711And run a Knight's Tour on a 10x10 board.  Note that there are about
171220,000 solutions even on a 6x6 board, so don't dare run this to exhaustion.
1713
1714>>> k = Knights(10, 10)
1715>>> LIMIT = 2
1716>>> count = 0
1717>>> for x in k.solve():
1718...     count += 1
1719...     if count <= LIMIT:
1720...         print("Solution", count)
1721...         k.printsolution(x)
1722...     else:
1723...         break
1724Solution 1
1725+---+---+---+---+---+---+---+---+---+---+
1726|  1| 58| 27| 34|  3| 40| 29| 10|  5|  8|
1727+---+---+---+---+---+---+---+---+---+---+
1728| 26| 35|  2| 57| 28| 33|  4|  7| 30| 11|
1729+---+---+---+---+---+---+---+---+---+---+
1730| 59|100| 73| 36| 41| 56| 39| 32|  9|  6|
1731+---+---+---+---+---+---+---+---+---+---+
1732| 74| 25| 60| 55| 72| 37| 42| 49| 12| 31|
1733+---+---+---+---+---+---+---+---+---+---+
1734| 61| 86| 99| 76| 63| 52| 47| 38| 43| 50|
1735+---+---+---+---+---+---+---+---+---+---+
1736| 24| 75| 62| 85| 54| 71| 64| 51| 48| 13|
1737+---+---+---+---+---+---+---+---+---+---+
1738| 87| 98| 91| 80| 77| 84| 53| 46| 65| 44|
1739+---+---+---+---+---+---+---+---+---+---+
1740| 90| 23| 88| 95| 70| 79| 68| 83| 14| 17|
1741+---+---+---+---+---+---+---+---+---+---+
1742| 97| 92| 21| 78| 81| 94| 19| 16| 45| 66|
1743+---+---+---+---+---+---+---+---+---+---+
1744| 22| 89| 96| 93| 20| 69| 82| 67| 18| 15|
1745+---+---+---+---+---+---+---+---+---+---+
1746Solution 2
1747+---+---+---+---+---+---+---+---+---+---+
1748|  1| 58| 27| 34|  3| 40| 29| 10|  5|  8|
1749+---+---+---+---+---+---+---+---+---+---+
1750| 26| 35|  2| 57| 28| 33|  4|  7| 30| 11|
1751+---+---+---+---+---+---+---+---+---+---+
1752| 59|100| 73| 36| 41| 56| 39| 32|  9|  6|
1753+---+---+---+---+---+---+---+---+---+---+
1754| 74| 25| 60| 55| 72| 37| 42| 49| 12| 31|
1755+---+---+---+---+---+---+---+---+---+---+
1756| 61| 86| 99| 76| 63| 52| 47| 38| 43| 50|
1757+---+---+---+---+---+---+---+---+---+---+
1758| 24| 75| 62| 85| 54| 71| 64| 51| 48| 13|
1759+---+---+---+---+---+---+---+---+---+---+
1760| 87| 98| 89| 80| 77| 84| 53| 46| 65| 44|
1761+---+---+---+---+---+---+---+---+---+---+
1762| 90| 23| 92| 95| 70| 79| 68| 83| 14| 17|
1763+---+---+---+---+---+---+---+---+---+---+
1764| 97| 88| 21| 78| 81| 94| 19| 16| 45| 66|
1765+---+---+---+---+---+---+---+---+---+---+
1766| 22| 91| 96| 93| 20| 69| 82| 67| 18| 15|
1767+---+---+---+---+---+---+---+---+---+---+
1768"""
1769
1770weakref_tests = """\
1771Generators are weakly referencable:
1772
1773>>> import weakref
1774>>> def gen():
1775...     yield 'foo!'
1776...
1777>>> wr = weakref.ref(gen)
1778>>> wr() is gen
1779True
1780>>> p = weakref.proxy(gen)
1781
1782Generator-iterators are weakly referencable as well:
1783
1784>>> gi = gen()
1785>>> wr = weakref.ref(gi)
1786>>> wr() is gi
1787True
1788>>> p = weakref.proxy(gi)
1789>>> list(p)
1790['foo!']
1791
1792"""
1793
1794coroutine_tests = """\
1795Sending a value into a started generator:
1796
1797>>> def f():
1798...     print((yield 1))
1799...     yield 2
1800>>> g = f()
1801>>> next(g)
18021
1803>>> g.send(42)
180442
18052
1806
1807Sending a value into a new generator produces a TypeError:
1808
1809>>> f().send("foo")
1810Traceback (most recent call last):
1811...
1812TypeError: can't send non-None value to a just-started generator
1813
1814
1815Yield by itself yields None:
1816
1817>>> def f(): yield
1818>>> list(f())
1819[None]
1820
1821
1822Yield is allowed only in the outermost iterable in generator expression:
1823
1824>>> def f(): list(i for i in [(yield 26)])
1825>>> type(f())
1826<class 'generator'>
1827
1828
1829A yield expression with augmented assignment.
1830
1831>>> def coroutine(seq):
1832...     count = 0
1833...     while count < 200:
1834...         count += yield
1835...         seq.append(count)
1836>>> seq = []
1837>>> c = coroutine(seq)
1838>>> next(c)
1839>>> print(seq)
1840[]
1841>>> c.send(10)
1842>>> print(seq)
1843[10]
1844>>> c.send(10)
1845>>> print(seq)
1846[10, 20]
1847>>> c.send(10)
1848>>> print(seq)
1849[10, 20, 30]
1850
1851
1852Check some syntax errors for yield expressions:
1853
1854>>> f=lambda: (yield 1),(yield 2)
1855Traceback (most recent call last):
1856  ...
1857SyntaxError: 'yield' outside function
1858
1859>>> def f(): x = yield = y
1860Traceback (most recent call last):
1861  ...
1862SyntaxError: assignment to yield expression not possible
1863
1864>>> def f(): (yield bar) = y
1865Traceback (most recent call last):
1866  ...
1867SyntaxError: cannot assign to yield expression
1868
1869>>> def f(): (yield bar) += y
1870Traceback (most recent call last):
1871  ...
1872SyntaxError: cannot assign to yield expression
1873
1874
1875Now check some throw() conditions:
1876
1877>>> def f():
1878...     while True:
1879...         try:
1880...             print((yield))
1881...         except ValueError as v:
1882...             print("caught ValueError (%s)" % (v))
1883>>> import sys
1884>>> g = f()
1885>>> next(g)
1886
1887>>> g.throw(ValueError) # type only
1888caught ValueError ()
1889
1890>>> g.throw(ValueError("xyz"))  # value only
1891caught ValueError (xyz)
1892
1893>>> g.throw(ValueError, ValueError(1))   # value+matching type
1894caught ValueError (1)
1895
1896>>> g.throw(ValueError, TypeError(1))  # mismatched type, rewrapped
1897caught ValueError (1)
1898
1899>>> g.throw(ValueError, ValueError(1), None)   # explicit None traceback
1900caught ValueError (1)
1901
1902>>> g.throw(ValueError(1), "foo")       # bad args
1903Traceback (most recent call last):
1904  ...
1905TypeError: instance exception may not have a separate value
1906
1907>>> g.throw(ValueError, "foo", 23)      # bad args
1908Traceback (most recent call last):
1909  ...
1910TypeError: throw() third argument must be a traceback object
1911
1912>>> g.throw("abc")
1913Traceback (most recent call last):
1914  ...
1915TypeError: exceptions must be classes or instances deriving from BaseException, not str
1916
1917>>> g.throw(0)
1918Traceback (most recent call last):
1919  ...
1920TypeError: exceptions must be classes or instances deriving from BaseException, not int
1921
1922>>> g.throw(list)
1923Traceback (most recent call last):
1924  ...
1925TypeError: exceptions must be classes or instances deriving from BaseException, not type
1926
1927>>> def throw(g,exc):
1928...     try:
1929...         raise exc
1930...     except:
1931...         g.throw(*sys.exc_info())
1932>>> throw(g,ValueError) # do it with traceback included
1933caught ValueError ()
1934
1935>>> g.send(1)
19361
1937
1938>>> throw(g,TypeError)  # terminate the generator
1939Traceback (most recent call last):
1940  ...
1941TypeError
1942
1943>>> print(g.gi_frame)
1944None
1945
1946>>> g.send(2)
1947Traceback (most recent call last):
1948  ...
1949StopIteration
1950
1951>>> g.throw(ValueError,6)       # throw on closed generator
1952Traceback (most recent call last):
1953  ...
1954ValueError: 6
1955
1956>>> f().throw(ValueError,7)     # throw on just-opened generator
1957Traceback (most recent call last):
1958  ...
1959ValueError: 7
1960
1961Plain "raise" inside a generator should preserve the traceback (#13188).
1962The traceback should have 3 levels:
1963- g.throw()
1964- f()
1965- 1/0
1966
1967>>> def f():
1968...     try:
1969...         yield
1970...     except:
1971...         raise
1972>>> g = f()
1973>>> try:
1974...     1/0
1975... except ZeroDivisionError as v:
1976...     try:
1977...         g.throw(v)
1978...     except Exception as w:
1979...         tb = w.__traceback__
1980>>> levels = 0
1981>>> while tb:
1982...     levels += 1
1983...     tb = tb.tb_next
1984>>> levels
19853
1986
1987Now let's try closing a generator:
1988
1989>>> def f():
1990...     try: yield
1991...     except GeneratorExit:
1992...         print("exiting")
1993
1994>>> g = f()
1995>>> next(g)
1996>>> g.close()
1997exiting
1998>>> g.close()  # should be no-op now
1999
2000>>> f().close()  # close on just-opened generator should be fine
2001
2002>>> def f(): yield      # an even simpler generator
2003>>> f().close()         # close before opening
2004>>> g = f()
2005>>> next(g)
2006>>> g.close()           # close normally
2007
2008And finalization:
2009
2010>>> def f():
2011...     try: yield
2012...     finally:
2013...         print("exiting")
2014
2015>>> g = f()
2016>>> next(g)
2017>>> del g
2018exiting
2019
2020
2021GeneratorExit is not caught by except Exception:
2022
2023>>> def f():
2024...     try: yield
2025...     except Exception:
2026...         print('except')
2027...     finally:
2028...         print('finally')
2029
2030>>> g = f()
2031>>> next(g)
2032>>> del g
2033finally
2034
2035
2036Now let's try some ill-behaved generators:
2037
2038>>> def f():
2039...     try: yield
2040...     except GeneratorExit:
2041...         yield "foo!"
2042>>> g = f()
2043>>> next(g)
2044>>> g.close()
2045Traceback (most recent call last):
2046  ...
2047RuntimeError: generator ignored GeneratorExit
2048>>> g.close()
2049
2050
2051Our ill-behaved code should be invoked during GC:
2052
2053>>> with support.catch_unraisable_exception() as cm:
2054...     g = f()
2055...     next(g)
2056...     del g
2057...
2058...     cm.unraisable.exc_type == RuntimeError
2059...     "generator ignored GeneratorExit" in str(cm.unraisable.exc_value)
2060...     cm.unraisable.exc_traceback is not None
2061True
2062True
2063True
2064
2065And errors thrown during closing should propagate:
2066
2067>>> def f():
2068...     try: yield
2069...     except GeneratorExit:
2070...         raise TypeError("fie!")
2071>>> g = f()
2072>>> next(g)
2073>>> g.close()
2074Traceback (most recent call last):
2075  ...
2076TypeError: fie!
2077
2078
2079Ensure that various yield expression constructs make their
2080enclosing function a generator:
2081
2082>>> def f(): x += yield
2083>>> type(f())
2084<class 'generator'>
2085
2086>>> def f(): x = yield
2087>>> type(f())
2088<class 'generator'>
2089
2090>>> def f(): lambda x=(yield): 1
2091>>> type(f())
2092<class 'generator'>
2093
2094>>> def f(d): d[(yield "a")] = d[(yield "b")] = 27
2095>>> data = [1,2]
2096>>> g = f(data)
2097>>> type(g)
2098<class 'generator'>
2099>>> g.send(None)
2100'a'
2101>>> data
2102[1, 2]
2103>>> g.send(0)
2104'b'
2105>>> data
2106[27, 2]
2107>>> try: g.send(1)
2108... except StopIteration: pass
2109>>> data
2110[27, 27]
2111
2112"""
2113
2114refleaks_tests = """
2115Prior to adding cycle-GC support to itertools.tee, this code would leak
2116references. We add it to the standard suite so the routine refleak-tests
2117would trigger if it starts being uncleanable again.
2118
2119>>> import itertools
2120>>> def leak():
2121...     class gen:
2122...         def __iter__(self):
2123...             return self
2124...         def __next__(self):
2125...             return self.item
2126...     g = gen()
2127...     head, tail = itertools.tee(g)
2128...     g.item = head
2129...     return head
2130>>> it = leak()
2131
2132Make sure to also test the involvement of the tee-internal teedataobject,
2133which stores returned items.
2134
2135>>> item = next(it)
2136
2137
2138
2139This test leaked at one point due to generator finalization/destruction.
2140It was copied from Lib/test/leakers/test_generator_cycle.py before the file
2141was removed.
2142
2143>>> def leak():
2144...    def gen():
2145...        while True:
2146...            yield g
2147...    g = gen()
2148
2149>>> leak()
2150
2151
2152
2153This test isn't really generator related, but rather exception-in-cleanup
2154related. The coroutine tests (above) just happen to cause an exception in
2155the generator's __del__ (tp_del) method. We can also test for this
2156explicitly, without generators. We do have to redirect stderr to avoid
2157printing warnings and to doublecheck that we actually tested what we wanted
2158to test.
2159
2160>>> from test import support
2161>>> class Leaker:
2162...     def __del__(self):
2163...         def invoke(message):
2164...             raise RuntimeError(message)
2165...         invoke("del failed")
2166...
2167>>> with support.catch_unraisable_exception() as cm:
2168...     l = Leaker()
2169...     del l
2170...
2171...     cm.unraisable.object == Leaker.__del__
2172...     cm.unraisable.exc_type == RuntimeError
2173...     str(cm.unraisable.exc_value) == "del failed"
2174...     cm.unraisable.exc_traceback is not None
2175True
2176True
2177True
2178True
2179
2180
2181These refleak tests should perhaps be in a testfile of their own,
2182test_generators just happened to be the test that drew these out.
2183
2184"""
2185
2186__test__ = {"tut":      tutorial_tests,
2187            "pep":      pep_tests,
2188            "email":    email_tests,
2189            "fun":      fun_tests,
2190            "syntax":   syntax_tests,
2191            "conjoin":  conjoin_tests,
2192            "weakref":  weakref_tests,
2193            "coroutine":  coroutine_tests,
2194            "refleaks": refleaks_tests,
2195            }
2196
2197# Magic test name that regrtest.py invokes *after* importing this module.
2198# This worms around a bootstrap problem.
2199# Note that doctest and regrtest both look in sys.argv for a "-v" argument,
2200# so this works as expected in both ways of running regrtest.
2201def test_main(verbose=None):
2202    from test import support, test_generators
2203    support.run_unittest(__name__)
2204    support.run_doctest(test_generators, verbose)
2205
2206# This part isn't needed for regrtest, but for running the test directly.
2207if __name__ == "__main__":
2208    test_main(1)
2209