(root)/
Python-3.12.0/
Lib/
test/
test_generators.py
       1  import copy
       2  import gc
       3  import pickle
       4  import sys
       5  import doctest
       6  import unittest
       7  import weakref
       8  import inspect
       9  
      10  from test import support
      11  
      12  try:
      13      import _testcapi
      14  except ImportError:
      15      _testcapi = None
      16  
      17  
      18  # This tests to make sure that if a SIGINT arrives just before we send into a
      19  # yield from chain, the KeyboardInterrupt is raised in the innermost
      20  # generator (see bpo-30039).
      21  @unittest.skipUnless(_testcapi is not None and
      22                       hasattr(_testcapi, "raise_SIGINT_then_send_None"),
      23                       "needs _testcapi.raise_SIGINT_then_send_None")
      24  class ESC[4;38;5;81mSignalAndYieldFromTest(ESC[4;38;5;149munittestESC[4;38;5;149m.ESC[4;38;5;149mTestCase):
      25  
      26      def generator1(self):
      27          return (yield from self.generator2())
      28  
      29      def generator2(self):
      30          try:
      31              yield
      32          except KeyboardInterrupt:
      33              return "PASSED"
      34          else:
      35              return "FAILED"
      36  
      37      def test_raise_and_yield_from(self):
      38          gen = self.generator1()
      39          gen.send(None)
      40          try:
      41              _testcapi.raise_SIGINT_then_send_None(gen)
      42          except BaseException as _exc:
      43              exc = _exc
      44          self.assertIs(type(exc), StopIteration)
      45          self.assertEqual(exc.value, "PASSED")
      46  
      47  
      48  class ESC[4;38;5;81mFinalizationTest(ESC[4;38;5;149munittestESC[4;38;5;149m.ESC[4;38;5;149mTestCase):
      49  
      50      def test_frame_resurrect(self):
      51          # A generator frame can be resurrected by a generator's finalization.
      52          def gen():
      53              nonlocal frame
      54              try:
      55                  yield
      56              finally:
      57                  frame = sys._getframe()
      58  
      59          g = gen()
      60          wr = weakref.ref(g)
      61          next(g)
      62          del g
      63          support.gc_collect()
      64          self.assertIs(wr(), None)
      65          self.assertTrue(frame)
      66          del frame
      67          support.gc_collect()
      68  
      69      def test_refcycle(self):
      70          # A generator caught in a refcycle gets finalized anyway.
      71          old_garbage = gc.garbage[:]
      72          finalized = False
      73          def gen():
      74              nonlocal finalized
      75              try:
      76                  g = yield
      77                  yield 1
      78              finally:
      79                  finalized = True
      80  
      81          g = gen()
      82          next(g)
      83          g.send(g)
      84          self.assertGreater(sys.getrefcount(g), 2)
      85          self.assertFalse(finalized)
      86          del g
      87          support.gc_collect()
      88          self.assertTrue(finalized)
      89          self.assertEqual(gc.garbage, old_garbage)
      90  
      91      def test_lambda_generator(self):
      92          # Issue #23192: Test that a lambda returning a generator behaves
      93          # like the equivalent function
      94          f = lambda: (yield 1)
      95          def g(): return (yield 1)
      96  
      97          # test 'yield from'
      98          f2 = lambda: (yield from g())
      99          def g2(): return (yield from g())
     100  
     101          f3 = lambda: (yield from f())
     102          def g3(): return (yield from f())
     103  
     104          for gen_fun in (f, g, f2, g2, f3, g3):
     105              gen = gen_fun()
     106              self.assertEqual(next(gen), 1)
     107              with self.assertRaises(StopIteration) as cm:
     108                  gen.send(2)
     109              self.assertEqual(cm.exception.value, 2)
     110  
     111  
     112  class ESC[4;38;5;81mGeneratorTest(ESC[4;38;5;149munittestESC[4;38;5;149m.ESC[4;38;5;149mTestCase):
     113  
     114      def test_name(self):
     115          def func():
     116              yield 1
     117  
     118          # check generator names
     119          gen = func()
     120          self.assertEqual(gen.__name__, "func")
     121          self.assertEqual(gen.__qualname__,
     122                           "GeneratorTest.test_name.<locals>.func")
     123  
     124          # modify generator names
     125          gen.__name__ = "name"
     126          gen.__qualname__ = "qualname"
     127          self.assertEqual(gen.__name__, "name")
     128          self.assertEqual(gen.__qualname__, "qualname")
     129  
     130          # generator names must be a string and cannot be deleted
     131          self.assertRaises(TypeError, setattr, gen, '__name__', 123)
     132          self.assertRaises(TypeError, setattr, gen, '__qualname__', 123)
     133          self.assertRaises(TypeError, delattr, gen, '__name__')
     134          self.assertRaises(TypeError, delattr, gen, '__qualname__')
     135  
     136          # modify names of the function creating the generator
     137          func.__qualname__ = "func_qualname"
     138          func.__name__ = "func_name"
     139          gen = func()
     140          self.assertEqual(gen.__name__, "func_name")
     141          self.assertEqual(gen.__qualname__, "func_qualname")
     142  
     143          # unnamed generator
     144          gen = (x for x in range(10))
     145          self.assertEqual(gen.__name__,
     146                           "<genexpr>")
     147          self.assertEqual(gen.__qualname__,
     148                           "GeneratorTest.test_name.<locals>.<genexpr>")
     149  
     150      def test_copy(self):
     151          def f():
     152              yield 1
     153          g = f()
     154          with self.assertRaises(TypeError):
     155              copy.copy(g)
     156  
     157      def test_pickle(self):
     158          def f():
     159              yield 1
     160          g = f()
     161          for proto in range(pickle.HIGHEST_PROTOCOL + 1):
     162              with self.assertRaises((TypeError, pickle.PicklingError)):
     163                  pickle.dumps(g, proto)
     164  
     165      def test_send_non_none_to_new_gen(self):
     166          def f():
     167              yield 1
     168          g = f()
     169          with self.assertRaises(TypeError):
     170              g.send(0)
     171          self.assertEqual(next(g), 1)
     172  
     173      def test_handle_frame_object_in_creation(self):
     174  
     175          #Attempt to expose partially constructed frames
     176          #See https://github.com/python/cpython/issues/94262
     177  
     178          def cb(*args):
     179              inspect.stack()
     180  
     181          def gen():
     182              yield 1
     183  
     184          thresholds = gc.get_threshold()
     185  
     186          gc.callbacks.append(cb)
     187          gc.set_threshold(1, 0, 0)
     188          try:
     189              gen()
     190          finally:
     191              gc.set_threshold(*thresholds)
     192              gc.callbacks.pop()
     193  
     194          class ESC[4;38;5;81mSneaky:
     195              def __del__(self):
     196                  inspect.stack()
     197  
     198          sneaky = Sneaky()
     199          sneaky._s = Sneaky()
     200          sneaky._s._s = sneaky
     201  
     202          gc.set_threshold(1, 0, 0)
     203          try:
     204              del sneaky
     205              gen()
     206          finally:
     207              gc.set_threshold(*thresholds)
     208  
     209      def test_ag_frame_f_back(self):
     210          async def f():
     211              yield
     212          ag = f()
     213          self.assertIsNone(ag.ag_frame.f_back)
     214  
     215      def test_cr_frame_f_back(self):
     216          async def f():
     217              pass
     218          cr = f()
     219          self.assertIsNone(cr.cr_frame.f_back)
     220          cr.close()  # Suppress RuntimeWarning.
     221  
     222      def test_gi_frame_f_back(self):
     223          def f():
     224              yield
     225          gi = f()
     226          self.assertIsNone(gi.gi_frame.f_back)
     227  
     228      def test_issue103488(self):
     229  
     230          def gen_raises():
     231              yield
     232              raise ValueError()
     233  
     234          def loop():
     235              try:
     236                  for _ in gen_raises():
     237                      if True is False:
     238                          return
     239              except ValueError:
     240                  pass
     241  
     242          #This should not raise
     243          loop()
     244  
     245  class ESC[4;38;5;81mExceptionTest(ESC[4;38;5;149munittestESC[4;38;5;149m.ESC[4;38;5;149mTestCase):
     246      # Tests for the issue #23353: check that the currently handled exception
     247      # is correctly saved/restored in PyEval_EvalFrameEx().
     248  
     249      def test_except_throw(self):
     250          def store_raise_exc_generator():
     251              try:
     252                  self.assertIsNone(sys.exception())
     253                  yield
     254              except Exception as exc:
     255                  # exception raised by gen.throw(exc)
     256                  self.assertIsInstance(sys.exception(), ValueError)
     257                  self.assertIsNone(exc.__context__)
     258                  yield
     259  
     260                  # ensure that the exception is not lost
     261                  self.assertIsInstance(sys.exception(), ValueError)
     262                  yield
     263  
     264                  # we should be able to raise back the ValueError
     265                  raise
     266  
     267          make = store_raise_exc_generator()
     268          next(make)
     269  
     270          try:
     271              raise ValueError()
     272          except Exception as exc:
     273              try:
     274                  make.throw(exc)
     275              except Exception:
     276                  pass
     277  
     278          next(make)
     279          with self.assertRaises(ValueError) as cm:
     280              next(make)
     281          self.assertIsNone(cm.exception.__context__)
     282  
     283          self.assertIsNone(sys.exception())
     284  
     285      def test_except_next(self):
     286          def gen():
     287              self.assertIsInstance(sys.exception(), ValueError)
     288              yield "done"
     289  
     290          g = gen()
     291          try:
     292              raise ValueError
     293          except Exception:
     294              self.assertEqual(next(g), "done")
     295          self.assertIsNone(sys.exception())
     296  
     297      def test_except_gen_except(self):
     298          def gen():
     299              try:
     300                  self.assertIsNone(sys.exception())
     301                  yield
     302                  # we are called from "except ValueError:", TypeError must
     303                  # inherit ValueError in its context
     304                  raise TypeError()
     305              except TypeError as exc:
     306                  self.assertIsInstance(sys.exception(), TypeError)
     307                  self.assertEqual(type(exc.__context__), ValueError)
     308              # here we are still called from the "except ValueError:"
     309              self.assertIsInstance(sys.exception(), ValueError)
     310              yield
     311              self.assertIsNone(sys.exception())
     312              yield "done"
     313  
     314          g = gen()
     315          next(g)
     316          try:
     317              raise ValueError
     318          except Exception:
     319              next(g)
     320  
     321          self.assertEqual(next(g), "done")
     322          self.assertIsNone(sys.exception())
     323  
     324      def test_nested_gen_except_loop(self):
     325          def gen():
     326              for i in range(100):
     327                  self.assertIsInstance(sys.exception(), TypeError)
     328                  yield "doing"
     329  
     330          def outer():
     331              try:
     332                  raise TypeError
     333              except:
     334                  for x in gen():
     335                      yield x
     336  
     337          try:
     338              raise ValueError
     339          except Exception:
     340              for x in outer():
     341                  self.assertEqual(x, "doing")
     342          self.assertEqual(sys.exception(), None)
     343  
     344      def test_except_throw_exception_context(self):
     345          def gen():
     346              try:
     347                  try:
     348                      self.assertIsNone(sys.exception())
     349                      yield
     350                  except ValueError:
     351                      # we are called from "except ValueError:"
     352                      self.assertIsInstance(sys.exception(), ValueError)
     353                      raise TypeError()
     354              except Exception as exc:
     355                  self.assertIsInstance(sys.exception(), TypeError)
     356                  self.assertEqual(type(exc.__context__), ValueError)
     357              # we are still called from "except ValueError:"
     358              self.assertIsInstance(sys.exception(), ValueError)
     359              yield
     360              self.assertIsNone(sys.exception())
     361              yield "done"
     362  
     363          g = gen()
     364          next(g)
     365          try:
     366              raise ValueError
     367          except Exception as exc:
     368              g.throw(exc)
     369  
     370          self.assertEqual(next(g), "done")
     371          self.assertIsNone(sys.exception())
     372  
     373      def test_except_throw_bad_exception(self):
     374          class ESC[4;38;5;81mE(ESC[4;38;5;149mException):
     375              def __new__(cls, *args, **kwargs):
     376                  return cls
     377  
     378          def boring_generator():
     379              yield
     380  
     381          gen = boring_generator()
     382  
     383          err_msg = 'should have returned an instance of BaseException'
     384  
     385          with self.assertRaisesRegex(TypeError, err_msg):
     386              gen.throw(E)
     387  
     388          self.assertRaises(StopIteration, next, gen)
     389  
     390          def generator():
     391              with self.assertRaisesRegex(TypeError, err_msg):
     392                  yield
     393  
     394          gen = generator()
     395          next(gen)
     396          with self.assertRaises(StopIteration):
     397              gen.throw(E)
     398  
     399      def test_gen_3_arg_deprecation_warning(self):
     400          def g():
     401              yield 42
     402  
     403          gen = g()
     404          with self.assertWarns(DeprecationWarning):
     405              with self.assertRaises(TypeError):
     406                  gen.throw(TypeError, TypeError(24), None)
     407  
     408      def test_stopiteration_error(self):
     409          # See also PEP 479.
     410  
     411          def gen():
     412              raise StopIteration
     413              yield
     414  
     415          with self.assertRaisesRegex(RuntimeError, 'raised StopIteration'):
     416              next(gen())
     417  
     418      def test_tutorial_stopiteration(self):
     419          # Raise StopIteration" stops the generator too:
     420  
     421          def f():
     422              yield 1
     423              raise StopIteration
     424              yield 2 # never reached
     425  
     426          g = f()
     427          self.assertEqual(next(g), 1)
     428  
     429          with self.assertRaisesRegex(RuntimeError, 'raised StopIteration'):
     430              next(g)
     431  
     432      def test_return_tuple(self):
     433          def g():
     434              return (yield 1)
     435  
     436          gen = g()
     437          self.assertEqual(next(gen), 1)
     438          with self.assertRaises(StopIteration) as cm:
     439              gen.send((2,))
     440          self.assertEqual(cm.exception.value, (2,))
     441  
     442      def test_return_stopiteration(self):
     443          def g():
     444              return (yield 1)
     445  
     446          gen = g()
     447          self.assertEqual(next(gen), 1)
     448          with self.assertRaises(StopIteration) as cm:
     449              gen.send(StopIteration(2))
     450          self.assertIsInstance(cm.exception.value, StopIteration)
     451          self.assertEqual(cm.exception.value.value, 2)
     452  
     453  
     454  class ESC[4;38;5;81mGeneratorThrowTest(ESC[4;38;5;149munittestESC[4;38;5;149m.ESC[4;38;5;149mTestCase):
     455  
     456      def test_exception_context_with_yield(self):
     457          def f():
     458              try:
     459                  raise KeyError('a')
     460              except Exception:
     461                  yield
     462  
     463          gen = f()
     464          gen.send(None)
     465          with self.assertRaises(ValueError) as cm:
     466              gen.throw(ValueError)
     467          context = cm.exception.__context__
     468          self.assertEqual((type(context), context.args), (KeyError, ('a',)))
     469  
     470      def test_exception_context_with_yield_inside_generator(self):
     471          # Check that the context is also available from inside the generator
     472          # with yield, as opposed to outside.
     473          def f():
     474              try:
     475                  raise KeyError('a')
     476              except Exception:
     477                  try:
     478                      yield
     479                  except Exception as exc:
     480                      self.assertEqual(type(exc), ValueError)
     481                      context = exc.__context__
     482                      self.assertEqual((type(context), context.args),
     483                          (KeyError, ('a',)))
     484                      yield 'b'
     485  
     486          gen = f()
     487          gen.send(None)
     488          actual = gen.throw(ValueError)
     489          # This ensures that the assertions inside were executed.
     490          self.assertEqual(actual, 'b')
     491  
     492      def test_exception_context_with_yield_from(self):
     493          def f():
     494              yield
     495  
     496          def g():
     497              try:
     498                  raise KeyError('a')
     499              except Exception:
     500                  yield from f()
     501  
     502          gen = g()
     503          gen.send(None)
     504          with self.assertRaises(ValueError) as cm:
     505              gen.throw(ValueError)
     506          context = cm.exception.__context__
     507          self.assertEqual((type(context), context.args), (KeyError, ('a',)))
     508  
     509      def test_exception_context_with_yield_from_with_context_cycle(self):
     510          # Check trying to create an exception context cycle:
     511          # https://bugs.python.org/issue40696
     512          has_cycle = None
     513  
     514          def f():
     515              yield
     516  
     517          def g(exc):
     518              nonlocal has_cycle
     519              try:
     520                  raise exc
     521              except Exception:
     522                  try:
     523                      yield from f()
     524                  except Exception as exc:
     525                      has_cycle = (exc is exc.__context__)
     526              yield
     527  
     528          exc = KeyError('a')
     529          gen = g(exc)
     530          gen.send(None)
     531          gen.throw(exc)
     532          # This also distinguishes from the initial has_cycle=None.
     533          self.assertEqual(has_cycle, False)
     534  
     535      def test_throw_after_none_exc_type(self):
     536          def g():
     537              try:
     538                  raise KeyError
     539              except KeyError:
     540                  pass
     541  
     542              try:
     543                  yield
     544              except Exception:
     545                  raise RuntimeError
     546  
     547          gen = g()
     548          gen.send(None)
     549          with self.assertRaises(RuntimeError) as cm:
     550              gen.throw(ValueError)
     551  
     552  
     553  class ESC[4;38;5;81mGeneratorStackTraceTest(ESC[4;38;5;149munittestESC[4;38;5;149m.ESC[4;38;5;149mTestCase):
     554  
     555      def check_stack_names(self, frame, expected):
     556          names = []
     557          while frame:
     558              name = frame.f_code.co_name
     559              # Stop checking frames when we get to our test helper.
     560              if name.startswith('check_') or name.startswith('call_'):
     561                  break
     562  
     563              names.append(name)
     564              frame = frame.f_back
     565  
     566          self.assertEqual(names, expected)
     567  
     568      def check_yield_from_example(self, call_method):
     569          def f():
     570              self.check_stack_names(sys._getframe(), ['f', 'g'])
     571              try:
     572                  yield
     573              except Exception:
     574                  pass
     575              self.check_stack_names(sys._getframe(), ['f', 'g'])
     576  
     577          def g():
     578              self.check_stack_names(sys._getframe(), ['g'])
     579              yield from f()
     580              self.check_stack_names(sys._getframe(), ['g'])
     581  
     582          gen = g()
     583          gen.send(None)
     584          try:
     585              call_method(gen)
     586          except StopIteration:
     587              pass
     588  
     589      def test_send_with_yield_from(self):
     590          def call_send(gen):
     591              gen.send(None)
     592  
     593          self.check_yield_from_example(call_send)
     594  
     595      def test_throw_with_yield_from(self):
     596          def call_throw(gen):
     597              gen.throw(RuntimeError)
     598  
     599          self.check_yield_from_example(call_throw)
     600  
     601  
     602  class ESC[4;38;5;81mYieldFromTests(ESC[4;38;5;149munittestESC[4;38;5;149m.ESC[4;38;5;149mTestCase):
     603      def test_generator_gi_yieldfrom(self):
     604          def a():
     605              self.assertEqual(inspect.getgeneratorstate(gen_b), inspect.GEN_RUNNING)
     606              self.assertIsNone(gen_b.gi_yieldfrom)
     607              yield
     608              self.assertEqual(inspect.getgeneratorstate(gen_b), inspect.GEN_RUNNING)
     609              self.assertIsNone(gen_b.gi_yieldfrom)
     610  
     611          def b():
     612              self.assertIsNone(gen_b.gi_yieldfrom)
     613              yield from a()
     614              self.assertIsNone(gen_b.gi_yieldfrom)
     615              yield
     616              self.assertIsNone(gen_b.gi_yieldfrom)
     617  
     618          gen_b = b()
     619          self.assertEqual(inspect.getgeneratorstate(gen_b), inspect.GEN_CREATED)
     620          self.assertIsNone(gen_b.gi_yieldfrom)
     621  
     622          gen_b.send(None)
     623          self.assertEqual(inspect.getgeneratorstate(gen_b), inspect.GEN_SUSPENDED)
     624          self.assertEqual(gen_b.gi_yieldfrom.gi_code.co_name, 'a')
     625  
     626          gen_b.send(None)
     627          self.assertEqual(inspect.getgeneratorstate(gen_b), inspect.GEN_SUSPENDED)
     628          self.assertIsNone(gen_b.gi_yieldfrom)
     629  
     630          [] = gen_b  # Exhaust generator
     631          self.assertEqual(inspect.getgeneratorstate(gen_b), inspect.GEN_CLOSED)
     632          self.assertIsNone(gen_b.gi_yieldfrom)
     633  
     634  
     635  tutorial_tests = """
     636  Let's try a simple generator:
     637  
     638      >>> def f():
     639      ...    yield 1
     640      ...    yield 2
     641  
     642      >>> for i in f():
     643      ...     print(i)
     644      1
     645      2
     646      >>> g = f()
     647      >>> next(g)
     648      1
     649      >>> next(g)
     650      2
     651  
     652  "Falling off the end" stops the generator:
     653  
     654      >>> next(g)
     655      Traceback (most recent call last):
     656        File "<stdin>", line 1, in ?
     657        File "<stdin>", line 2, in g
     658      StopIteration
     659  
     660  "return" also stops the generator:
     661  
     662      >>> def f():
     663      ...     yield 1
     664      ...     return
     665      ...     yield 2 # never reached
     666      ...
     667      >>> g = f()
     668      >>> next(g)
     669      1
     670      >>> next(g)
     671      Traceback (most recent call last):
     672        File "<stdin>", line 1, in ?
     673        File "<stdin>", line 3, in f
     674      StopIteration
     675      >>> next(g) # once stopped, can't be resumed
     676      Traceback (most recent call last):
     677        File "<stdin>", line 1, in ?
     678      StopIteration
     679  
     680  However, "return" and StopIteration are not exactly equivalent:
     681  
     682      >>> def g1():
     683      ...     try:
     684      ...         return
     685      ...     except:
     686      ...         yield 1
     687      ...
     688      >>> list(g1())
     689      []
     690  
     691      >>> def g2():
     692      ...     try:
     693      ...         raise StopIteration
     694      ...     except:
     695      ...         yield 42
     696      >>> print(list(g2()))
     697      [42]
     698  
     699  This may be surprising at first:
     700  
     701      >>> def g3():
     702      ...     try:
     703      ...         return
     704      ...     finally:
     705      ...         yield 1
     706      ...
     707      >>> list(g3())
     708      [1]
     709  
     710  Let's create an alternate range() function implemented as a generator:
     711  
     712      >>> def yrange(n):
     713      ...     for i in range(n):
     714      ...         yield i
     715      ...
     716      >>> list(yrange(5))
     717      [0, 1, 2, 3, 4]
     718  
     719  Generators always return to the most recent caller:
     720  
     721      >>> def creator():
     722      ...     r = yrange(5)
     723      ...     print("creator", next(r))
     724      ...     return r
     725      ...
     726      >>> def caller():
     727      ...     r = creator()
     728      ...     for i in r:
     729      ...             print("caller", i)
     730      ...
     731      >>> caller()
     732      creator 0
     733      caller 1
     734      caller 2
     735      caller 3
     736      caller 4
     737  
     738  Generators can call other generators:
     739  
     740      >>> def zrange(n):
     741      ...     for i in yrange(n):
     742      ...         yield i
     743      ...
     744      >>> list(zrange(5))
     745      [0, 1, 2, 3, 4]
     746  
     747  """
     748  
     749  # The examples from PEP 255.
     750  
     751  pep_tests = """
     752  
     753  Specification:  Yield
     754  
     755      Restriction:  A generator cannot be resumed while it is actively
     756      running:
     757  
     758      >>> def g():
     759      ...     i = next(me)
     760      ...     yield i
     761      >>> me = g()
     762      >>> next(me)
     763      Traceback (most recent call last):
     764       ...
     765        File "<string>", line 2, in g
     766      ValueError: generator already executing
     767  
     768  Specification: Return
     769  
     770      Note that return isn't always equivalent to raising StopIteration:  the
     771      difference lies in how enclosing try/except constructs are treated.
     772      For example,
     773  
     774          >>> def f1():
     775          ...     try:
     776          ...         return
     777          ...     except:
     778          ...        yield 1
     779          >>> print(list(f1()))
     780          []
     781  
     782      because, as in any function, return simply exits, but
     783  
     784          >>> def f2():
     785          ...     try:
     786          ...         raise StopIteration
     787          ...     except:
     788          ...         yield 42
     789          >>> print(list(f2()))
     790          [42]
     791  
     792      because StopIteration is captured by a bare "except", as is any
     793      exception.
     794  
     795  Specification: Generators and Exception Propagation
     796  
     797      >>> def f():
     798      ...     return 1//0
     799      >>> def g():
     800      ...     yield f()  # the zero division exception propagates
     801      ...     yield 42   # and we'll never get here
     802      >>> k = g()
     803      >>> next(k)
     804      Traceback (most recent call last):
     805        File "<stdin>", line 1, in ?
     806        File "<stdin>", line 2, in g
     807        File "<stdin>", line 2, in f
     808      ZeroDivisionError: integer division or modulo by zero
     809      >>> next(k)  # and the generator cannot be resumed
     810      Traceback (most recent call last):
     811        File "<stdin>", line 1, in ?
     812      StopIteration
     813      >>>
     814  
     815  Specification: Try/Except/Finally
     816  
     817      >>> def f():
     818      ...     try:
     819      ...         yield 1
     820      ...         try:
     821      ...             yield 2
     822      ...             1//0
     823      ...             yield 3  # never get here
     824      ...         except ZeroDivisionError:
     825      ...             yield 4
     826      ...             yield 5
     827      ...             raise
     828      ...         except:
     829      ...             yield 6
     830      ...         yield 7     # the "raise" above stops this
     831      ...     except:
     832      ...         yield 8
     833      ...     yield 9
     834      ...     try:
     835      ...         x = 12
     836      ...     finally:
     837      ...         yield 10
     838      ...     yield 11
     839      >>> print(list(f()))
     840      [1, 2, 4, 5, 8, 9, 10, 11]
     841      >>>
     842  
     843  Guido's binary tree example.
     844  
     845      >>> # A binary tree class.
     846      >>> class Tree:
     847      ...
     848      ...     def __init__(self, label, left=None, right=None):
     849      ...         self.label = label
     850      ...         self.left = left
     851      ...         self.right = right
     852      ...
     853      ...     def __repr__(self, level=0, indent="    "):
     854      ...         s = level*indent + repr(self.label)
     855      ...         if self.left:
     856      ...             s = s + "\\n" + self.left.__repr__(level+1, indent)
     857      ...         if self.right:
     858      ...             s = s + "\\n" + self.right.__repr__(level+1, indent)
     859      ...         return s
     860      ...
     861      ...     def __iter__(self):
     862      ...         return inorder(self)
     863  
     864      >>> # Create a Tree from a list.
     865      >>> def tree(list):
     866      ...     n = len(list)
     867      ...     if n == 0:
     868      ...         return []
     869      ...     i = n // 2
     870      ...     return Tree(list[i], tree(list[:i]), tree(list[i+1:]))
     871  
     872      >>> # Show it off: create a tree.
     873      >>> t = tree("ABCDEFGHIJKLMNOPQRSTUVWXYZ")
     874  
     875      >>> # A recursive generator that generates Tree labels in in-order.
     876      >>> def inorder(t):
     877      ...     if t:
     878      ...         for x in inorder(t.left):
     879      ...             yield x
     880      ...         yield t.label
     881      ...         for x in inorder(t.right):
     882      ...             yield x
     883  
     884      >>> # Show it off: create a tree.
     885      >>> t = tree("ABCDEFGHIJKLMNOPQRSTUVWXYZ")
     886      >>> # Print the nodes of the tree in in-order.
     887      >>> for x in t:
     888      ...     print(' '+x, end='')
     889       A B C D E F G H I J K L M N O P Q R S T U V W X Y Z
     890  
     891      >>> # A non-recursive generator.
     892      >>> def inorder(node):
     893      ...     stack = []
     894      ...     while node:
     895      ...         while node.left:
     896      ...             stack.append(node)
     897      ...             node = node.left
     898      ...         yield node.label
     899      ...         while not node.right:
     900      ...             try:
     901      ...                 node = stack.pop()
     902      ...             except IndexError:
     903      ...                 return
     904      ...             yield node.label
     905      ...         node = node.right
     906  
     907      >>> # Exercise the non-recursive generator.
     908      >>> for x in t:
     909      ...     print(' '+x, end='')
     910       A B C D E F G H I J K L M N O P Q R S T U V W X Y Z
     911  
     912  """
     913  
     914  # Examples from Iterator-List and Python-Dev and c.l.py.
     915  
     916  email_tests = """
     917  
     918  The difference between yielding None and returning it.
     919  
     920  >>> def g():
     921  ...     for i in range(3):
     922  ...         yield None
     923  ...     yield None
     924  ...     return
     925  >>> list(g())
     926  [None, None, None, None]
     927  
     928  Ensure that explicitly raising StopIteration acts like any other exception
     929  in try/except, not like a return.
     930  
     931  >>> def g():
     932  ...     yield 1
     933  ...     try:
     934  ...         raise StopIteration
     935  ...     except:
     936  ...         yield 2
     937  ...     yield 3
     938  >>> list(g())
     939  [1, 2, 3]
     940  
     941  Next one was posted to c.l.py.
     942  
     943  >>> def gcomb(x, k):
     944  ...     "Generate all combinations of k elements from list x."
     945  ...
     946  ...     if k > len(x):
     947  ...         return
     948  ...     if k == 0:
     949  ...         yield []
     950  ...     else:
     951  ...         first, rest = x[0], x[1:]
     952  ...         # A combination does or doesn't contain first.
     953  ...         # If it does, the remainder is a k-1 comb of rest.
     954  ...         for c in gcomb(rest, k-1):
     955  ...             c.insert(0, first)
     956  ...             yield c
     957  ...         # If it doesn't contain first, it's a k comb of rest.
     958  ...         for c in gcomb(rest, k):
     959  ...             yield c
     960  
     961  >>> seq = list(range(1, 5))
     962  >>> for k in range(len(seq) + 2):
     963  ...     print("%d-combs of %s:" % (k, seq))
     964  ...     for c in gcomb(seq, k):
     965  ...         print("   ", c)
     966  0-combs of [1, 2, 3, 4]:
     967      []
     968  1-combs of [1, 2, 3, 4]:
     969      [1]
     970      [2]
     971      [3]
     972      [4]
     973  2-combs of [1, 2, 3, 4]:
     974      [1, 2]
     975      [1, 3]
     976      [1, 4]
     977      [2, 3]
     978      [2, 4]
     979      [3, 4]
     980  3-combs of [1, 2, 3, 4]:
     981      [1, 2, 3]
     982      [1, 2, 4]
     983      [1, 3, 4]
     984      [2, 3, 4]
     985  4-combs of [1, 2, 3, 4]:
     986      [1, 2, 3, 4]
     987  5-combs of [1, 2, 3, 4]:
     988  
     989  From the Iterators list, about the types of these things.
     990  
     991  >>> def g():
     992  ...     yield 1
     993  ...
     994  >>> type(g)
     995  <class 'function'>
     996  >>> i = g()
     997  >>> type(i)
     998  <class 'generator'>
     999  >>> [s for s in dir(i) if not s.startswith('_')]
    1000  ['close', 'gi_code', 'gi_frame', 'gi_running', 'gi_suspended', 'gi_yieldfrom', 'send', 'throw']
    1001  >>> from test.support import HAVE_DOCSTRINGS
    1002  >>> print(i.__next__.__doc__ if HAVE_DOCSTRINGS else 'Implement next(self).')
    1003  Implement next(self).
    1004  >>> iter(i) is i
    1005  True
    1006  >>> import types
    1007  >>> isinstance(i, types.GeneratorType)
    1008  True
    1009  
    1010  And more, added later.
    1011  
    1012  >>> i.gi_running
    1013  0
    1014  >>> type(i.gi_frame)
    1015  <class 'frame'>
    1016  >>> i.gi_running = 42
    1017  Traceback (most recent call last):
    1018    ...
    1019  AttributeError: attribute 'gi_running' of 'generator' objects is not writable
    1020  >>> def g():
    1021  ...     yield me.gi_running
    1022  >>> me = g()
    1023  >>> me.gi_running
    1024  0
    1025  >>> next(me)
    1026  1
    1027  >>> me.gi_running
    1028  0
    1029  
    1030  A clever union-find implementation from c.l.py, due to David Eppstein.
    1031  Sent: Friday, June 29, 2001 12:16 PM
    1032  To: python-list@python.org
    1033  Subject: Re: PEP 255: Simple Generators
    1034  
    1035  >>> class disjointSet:
    1036  ...     def __init__(self, name):
    1037  ...         self.name = name
    1038  ...         self.parent = None
    1039  ...         self.generator = self.generate()
    1040  ...
    1041  ...     def generate(self):
    1042  ...         while not self.parent:
    1043  ...             yield self
    1044  ...         for x in self.parent.generator:
    1045  ...             yield x
    1046  ...
    1047  ...     def find(self):
    1048  ...         return next(self.generator)
    1049  ...
    1050  ...     def union(self, parent):
    1051  ...         if self.parent:
    1052  ...             raise ValueError("Sorry, I'm not a root!")
    1053  ...         self.parent = parent
    1054  ...
    1055  ...     def __str__(self):
    1056  ...         return self.name
    1057  
    1058  >>> names = "ABCDEFGHIJKLM"
    1059  >>> sets = [disjointSet(name) for name in names]
    1060  >>> roots = sets[:]
    1061  
    1062  >>> import random
    1063  >>> gen = random.Random(42)
    1064  >>> while 1:
    1065  ...     for s in sets:
    1066  ...         print(" %s->%s" % (s, s.find()), end='')
    1067  ...     print()
    1068  ...     if len(roots) > 1:
    1069  ...         s1 = gen.choice(roots)
    1070  ...         roots.remove(s1)
    1071  ...         s2 = gen.choice(roots)
    1072  ...         s1.union(s2)
    1073  ...         print("merged", s1, "into", s2)
    1074  ...     else:
    1075  ...         break
    1076   A->A B->B C->C D->D E->E F->F G->G H->H I->I J->J K->K L->L M->M
    1077  merged K into B
    1078   A->A B->B C->C D->D E->E F->F G->G H->H I->I J->J K->B L->L M->M
    1079  merged A into F
    1080   A->F B->B C->C D->D E->E F->F G->G H->H I->I J->J K->B L->L M->M
    1081  merged E into F
    1082   A->F B->B C->C D->D E->F F->F G->G H->H I->I J->J K->B L->L M->M
    1083  merged D into C
    1084   A->F B->B C->C D->C E->F F->F G->G H->H I->I J->J K->B L->L M->M
    1085  merged M into C
    1086   A->F B->B C->C D->C E->F F->F G->G H->H I->I J->J K->B L->L M->C
    1087  merged J into B
    1088   A->F B->B C->C D->C E->F F->F G->G H->H I->I J->B K->B L->L M->C
    1089  merged B into C
    1090   A->F B->C C->C D->C E->F F->F G->G H->H I->I J->C K->C L->L M->C
    1091  merged F into G
    1092   A->G B->C C->C D->C E->G F->G G->G H->H I->I J->C K->C L->L M->C
    1093  merged L into C
    1094   A->G B->C C->C D->C E->G F->G G->G H->H I->I J->C K->C L->C M->C
    1095  merged G into I
    1096   A->I B->C C->C D->C E->I F->I G->I H->H I->I J->C K->C L->C M->C
    1097  merged I into H
    1098   A->H B->C C->C D->C E->H F->H G->H H->H I->H J->C K->C L->C M->C
    1099  merged C into H
    1100   A->H B->H C->H D->H E->H F->H G->H H->H I->H J->H K->H L->H M->H
    1101  
    1102  """
    1103  # Emacs turd '
    1104  
    1105  # Fun tests (for sufficiently warped notions of "fun").
    1106  
    1107  fun_tests = """
    1108  
    1109  Build up to a recursive Sieve of Eratosthenes generator.
    1110  
    1111  >>> def firstn(g, n):
    1112  ...     return [next(g) for i in range(n)]
    1113  
    1114  >>> def intsfrom(i):
    1115  ...     while 1:
    1116  ...         yield i
    1117  ...         i += 1
    1118  
    1119  >>> firstn(intsfrom(5), 7)
    1120  [5, 6, 7, 8, 9, 10, 11]
    1121  
    1122  >>> def exclude_multiples(n, ints):
    1123  ...     for i in ints:
    1124  ...         if i % n:
    1125  ...             yield i
    1126  
    1127  >>> firstn(exclude_multiples(3, intsfrom(1)), 6)
    1128  [1, 2, 4, 5, 7, 8]
    1129  
    1130  >>> def sieve(ints):
    1131  ...     prime = next(ints)
    1132  ...     yield prime
    1133  ...     not_divisible_by_prime = exclude_multiples(prime, ints)
    1134  ...     for p in sieve(not_divisible_by_prime):
    1135  ...         yield p
    1136  
    1137  >>> primes = sieve(intsfrom(2))
    1138  >>> firstn(primes, 20)
    1139  [2, 3, 5, 7, 11, 13, 17, 19, 23, 29, 31, 37, 41, 43, 47, 53, 59, 61, 67, 71]
    1140  
    1141  
    1142  Another famous problem:  generate all integers of the form
    1143      2**i * 3**j  * 5**k
    1144  in increasing order, where i,j,k >= 0.  Trickier than it may look at first!
    1145  Try writing it without generators, and correctly, and without generating
    1146  3 internal results for each result output.
    1147  
    1148  >>> def times(n, g):
    1149  ...     for i in g:
    1150  ...         yield n * i
    1151  >>> firstn(times(10, intsfrom(1)), 10)
    1152  [10, 20, 30, 40, 50, 60, 70, 80, 90, 100]
    1153  
    1154  >>> def merge(g, h):
    1155  ...     ng = next(g)
    1156  ...     nh = next(h)
    1157  ...     while 1:
    1158  ...         if ng < nh:
    1159  ...             yield ng
    1160  ...             ng = next(g)
    1161  ...         elif ng > nh:
    1162  ...             yield nh
    1163  ...             nh = next(h)
    1164  ...         else:
    1165  ...             yield ng
    1166  ...             ng = next(g)
    1167  ...             nh = next(h)
    1168  
    1169  The following works, but is doing a whale of a lot of redundant work --
    1170  it's not clear how to get the internal uses of m235 to share a single
    1171  generator.  Note that me_times2 (etc) each need to see every element in the
    1172  result sequence.  So this is an example where lazy lists are more natural
    1173  (you can look at the head of a lazy list any number of times).
    1174  
    1175  >>> def m235():
    1176  ...     yield 1
    1177  ...     me_times2 = times(2, m235())
    1178  ...     me_times3 = times(3, m235())
    1179  ...     me_times5 = times(5, m235())
    1180  ...     for i in merge(merge(me_times2,
    1181  ...                          me_times3),
    1182  ...                    me_times5):
    1183  ...         yield i
    1184  
    1185  Don't print "too many" of these -- the implementation above is extremely
    1186  inefficient:  each call of m235() leads to 3 recursive calls, and in
    1187  turn each of those 3 more, and so on, and so on, until we've descended
    1188  enough levels to satisfy the print stmts.  Very odd:  when I printed 5
    1189  lines of results below, this managed to screw up Win98's malloc in "the
    1190  usual" way, i.e. the heap grew over 4Mb so Win98 started fragmenting
    1191  address space, and it *looked* like a very slow leak.
    1192  
    1193  >>> result = m235()
    1194  >>> for i in range(3):
    1195  ...     print(firstn(result, 15))
    1196  [1, 2, 3, 4, 5, 6, 8, 9, 10, 12, 15, 16, 18, 20, 24]
    1197  [25, 27, 30, 32, 36, 40, 45, 48, 50, 54, 60, 64, 72, 75, 80]
    1198  [81, 90, 96, 100, 108, 120, 125, 128, 135, 144, 150, 160, 162, 180, 192]
    1199  
    1200  Heh.  Here's one way to get a shared list, complete with an excruciating
    1201  namespace renaming trick.  The *pretty* part is that the times() and merge()
    1202  functions can be reused as-is, because they only assume their stream
    1203  arguments are iterable -- a LazyList is the same as a generator to times().
    1204  
    1205  >>> class LazyList:
    1206  ...     def __init__(self, g):
    1207  ...         self.sofar = []
    1208  ...         self.fetch = g.__next__
    1209  ...
    1210  ...     def __getitem__(self, i):
    1211  ...         sofar, fetch = self.sofar, self.fetch
    1212  ...         while i >= len(sofar):
    1213  ...             sofar.append(fetch())
    1214  ...         return sofar[i]
    1215  
    1216  >>> def m235():
    1217  ...     yield 1
    1218  ...     # Gack:  m235 below actually refers to a LazyList.
    1219  ...     me_times2 = times(2, m235)
    1220  ...     me_times3 = times(3, m235)
    1221  ...     me_times5 = times(5, m235)
    1222  ...     for i in merge(merge(me_times2,
    1223  ...                          me_times3),
    1224  ...                    me_times5):
    1225  ...         yield i
    1226  
    1227  Print as many of these as you like -- *this* implementation is memory-
    1228  efficient.
    1229  
    1230  >>> m235 = LazyList(m235())
    1231  >>> for i in range(5):
    1232  ...     print([m235[j] for j in range(15*i, 15*(i+1))])
    1233  [1, 2, 3, 4, 5, 6, 8, 9, 10, 12, 15, 16, 18, 20, 24]
    1234  [25, 27, 30, 32, 36, 40, 45, 48, 50, 54, 60, 64, 72, 75, 80]
    1235  [81, 90, 96, 100, 108, 120, 125, 128, 135, 144, 150, 160, 162, 180, 192]
    1236  [200, 216, 225, 240, 243, 250, 256, 270, 288, 300, 320, 324, 360, 375, 384]
    1237  [400, 405, 432, 450, 480, 486, 500, 512, 540, 576, 600, 625, 640, 648, 675]
    1238  
    1239  Ye olde Fibonacci generator, LazyList style.
    1240  
    1241  >>> def fibgen(a, b):
    1242  ...
    1243  ...     def sum(g, h):
    1244  ...         while 1:
    1245  ...             yield next(g) + next(h)
    1246  ...
    1247  ...     def tail(g):
    1248  ...         next(g)    # throw first away
    1249  ...         for x in g:
    1250  ...             yield x
    1251  ...
    1252  ...     yield a
    1253  ...     yield b
    1254  ...     for s in sum(iter(fib),
    1255  ...                  tail(iter(fib))):
    1256  ...         yield s
    1257  
    1258  >>> fib = LazyList(fibgen(1, 2))
    1259  >>> firstn(iter(fib), 17)
    1260  [1, 2, 3, 5, 8, 13, 21, 34, 55, 89, 144, 233, 377, 610, 987, 1597, 2584]
    1261  
    1262  
    1263  Running after your tail with itertools.tee (new in version 2.4)
    1264  
    1265  The algorithms "m235" (Hamming) and Fibonacci presented above are both
    1266  examples of a whole family of FP (functional programming) algorithms
    1267  where a function produces and returns a list while the production algorithm
    1268  suppose the list as already produced by recursively calling itself.
    1269  For these algorithms to work, they must:
    1270  
    1271  - produce at least a first element without presupposing the existence of
    1272    the rest of the list
    1273  - produce their elements in a lazy manner
    1274  
    1275  To work efficiently, the beginning of the list must not be recomputed over
    1276  and over again. This is ensured in most FP languages as a built-in feature.
    1277  In python, we have to explicitly maintain a list of already computed results
    1278  and abandon genuine recursivity.
    1279  
    1280  This is what had been attempted above with the LazyList class. One problem
    1281  with that class is that it keeps a list of all of the generated results and
    1282  therefore continually grows. This partially defeats the goal of the generator
    1283  concept, viz. produce the results only as needed instead of producing them
    1284  all and thereby wasting memory.
    1285  
    1286  Thanks to itertools.tee, it is now clear "how to get the internal uses of
    1287  m235 to share a single generator".
    1288  
    1289  >>> from itertools import tee
    1290  >>> def m235():
    1291  ...     def _m235():
    1292  ...         yield 1
    1293  ...         for n in merge(times(2, m2),
    1294  ...                        merge(times(3, m3),
    1295  ...                              times(5, m5))):
    1296  ...             yield n
    1297  ...     m1 = _m235()
    1298  ...     m2, m3, m5, mRes = tee(m1, 4)
    1299  ...     return mRes
    1300  
    1301  >>> it = m235()
    1302  >>> for i in range(5):
    1303  ...     print(firstn(it, 15))
    1304  [1, 2, 3, 4, 5, 6, 8, 9, 10, 12, 15, 16, 18, 20, 24]
    1305  [25, 27, 30, 32, 36, 40, 45, 48, 50, 54, 60, 64, 72, 75, 80]
    1306  [81, 90, 96, 100, 108, 120, 125, 128, 135, 144, 150, 160, 162, 180, 192]
    1307  [200, 216, 225, 240, 243, 250, 256, 270, 288, 300, 320, 324, 360, 375, 384]
    1308  [400, 405, 432, 450, 480, 486, 500, 512, 540, 576, 600, 625, 640, 648, 675]
    1309  
    1310  The "tee" function does just what we want. It internally keeps a generated
    1311  result for as long as it has not been "consumed" from all of the duplicated
    1312  iterators, whereupon it is deleted. You can therefore print the hamming
    1313  sequence during hours without increasing memory usage, or very little.
    1314  
    1315  The beauty of it is that recursive running-after-their-tail FP algorithms
    1316  are quite straightforwardly expressed with this Python idiom.
    1317  
    1318  Ye olde Fibonacci generator, tee style.
    1319  
    1320  >>> def fib():
    1321  ...
    1322  ...     def _isum(g, h):
    1323  ...         while 1:
    1324  ...             yield next(g) + next(h)
    1325  ...
    1326  ...     def _fib():
    1327  ...         yield 1
    1328  ...         yield 2
    1329  ...         next(fibTail) # throw first away
    1330  ...         for res in _isum(fibHead, fibTail):
    1331  ...             yield res
    1332  ...
    1333  ...     realfib = _fib()
    1334  ...     fibHead, fibTail, fibRes = tee(realfib, 3)
    1335  ...     return fibRes
    1336  
    1337  >>> firstn(fib(), 17)
    1338  [1, 2, 3, 5, 8, 13, 21, 34, 55, 89, 144, 233, 377, 610, 987, 1597, 2584]
    1339  
    1340  """
    1341  
    1342  # syntax_tests mostly provokes SyntaxErrors.  Also fiddling with #if 0
    1343  # hackery.
    1344  
    1345  syntax_tests = """
    1346  
    1347  These are fine:
    1348  
    1349  >>> def f():
    1350  ...     yield 1
    1351  ...     return
    1352  
    1353  >>> def f():
    1354  ...     try:
    1355  ...         yield 1
    1356  ...     finally:
    1357  ...         pass
    1358  
    1359  >>> def f():
    1360  ...     try:
    1361  ...         try:
    1362  ...             1//0
    1363  ...         except ZeroDivisionError:
    1364  ...             yield 666
    1365  ...         except:
    1366  ...             pass
    1367  ...     finally:
    1368  ...         pass
    1369  
    1370  >>> def f():
    1371  ...     try:
    1372  ...         try:
    1373  ...             yield 12
    1374  ...             1//0
    1375  ...         except ZeroDivisionError:
    1376  ...             yield 666
    1377  ...         except:
    1378  ...             try:
    1379  ...                 x = 12
    1380  ...             finally:
    1381  ...                 yield 12
    1382  ...     except:
    1383  ...         return
    1384  >>> list(f())
    1385  [12, 666]
    1386  
    1387  >>> def f():
    1388  ...    yield
    1389  >>> type(f())
    1390  <class 'generator'>
    1391  
    1392  
    1393  >>> def f():
    1394  ...    if 0:
    1395  ...        yield
    1396  >>> type(f())
    1397  <class 'generator'>
    1398  
    1399  
    1400  >>> def f():
    1401  ...     if 0:
    1402  ...         yield 1
    1403  >>> type(f())
    1404  <class 'generator'>
    1405  
    1406  >>> def f():
    1407  ...    if "":
    1408  ...        yield None
    1409  >>> type(f())
    1410  <class 'generator'>
    1411  
    1412  >>> def f():
    1413  ...     return
    1414  ...     try:
    1415  ...         if x==4:
    1416  ...             pass
    1417  ...         elif 0:
    1418  ...             try:
    1419  ...                 1//0
    1420  ...             except SyntaxError:
    1421  ...                 pass
    1422  ...             else:
    1423  ...                 if 0:
    1424  ...                     while 12:
    1425  ...                         x += 1
    1426  ...                         yield 2 # don't blink
    1427  ...                         f(a, b, c, d, e)
    1428  ...         else:
    1429  ...             pass
    1430  ...     except:
    1431  ...         x = 1
    1432  ...     return
    1433  >>> type(f())
    1434  <class 'generator'>
    1435  
    1436  >>> def f():
    1437  ...     if 0:
    1438  ...         def g():
    1439  ...             yield 1
    1440  ...
    1441  >>> type(f())
    1442  <class 'NoneType'>
    1443  
    1444  >>> def f():
    1445  ...     if 0:
    1446  ...         class C:
    1447  ...             def __init__(self):
    1448  ...                 yield 1
    1449  ...             def f(self):
    1450  ...                 yield 2
    1451  >>> type(f())
    1452  <class 'NoneType'>
    1453  
    1454  >>> def f():
    1455  ...     if 0:
    1456  ...         return
    1457  ...     if 0:
    1458  ...         yield 2
    1459  >>> type(f())
    1460  <class 'generator'>
    1461  
    1462  This one caused a crash (see SF bug 567538):
    1463  
    1464  >>> def f():
    1465  ...     for i in range(3):
    1466  ...         try:
    1467  ...             continue
    1468  ...         finally:
    1469  ...             yield i
    1470  ...
    1471  >>> g = f()
    1472  >>> print(next(g))
    1473  0
    1474  >>> print(next(g))
    1475  1
    1476  >>> print(next(g))
    1477  2
    1478  >>> print(next(g))
    1479  Traceback (most recent call last):
    1480  StopIteration
    1481  
    1482  
    1483  Test the gi_code attribute
    1484  
    1485  >>> def f():
    1486  ...     yield 5
    1487  ...
    1488  >>> g = f()
    1489  >>> g.gi_code is f.__code__
    1490  True
    1491  >>> next(g)
    1492  5
    1493  >>> next(g)
    1494  Traceback (most recent call last):
    1495  StopIteration
    1496  >>> g.gi_code is f.__code__
    1497  True
    1498  
    1499  
    1500  Test the __name__ attribute and the repr()
    1501  
    1502  >>> def f():
    1503  ...    yield 5
    1504  ...
    1505  >>> g = f()
    1506  >>> g.__name__
    1507  'f'
    1508  >>> repr(g)  # doctest: +ELLIPSIS
    1509  '<generator object f at ...>'
    1510  
    1511  Lambdas shouldn't have their usual return behavior.
    1512  
    1513  >>> x = lambda: (yield 1)
    1514  >>> list(x())
    1515  [1]
    1516  
    1517  >>> x = lambda: ((yield 1), (yield 2))
    1518  >>> list(x())
    1519  [1, 2]
    1520  """
    1521  
    1522  # conjoin is a simple backtracking generator, named in honor of Icon's
    1523  # "conjunction" control structure.  Pass a list of no-argument functions
    1524  # that return iterable objects.  Easiest to explain by example:  assume the
    1525  # function list [x, y, z] is passed.  Then conjoin acts like:
    1526  #
    1527  # def g():
    1528  #     values = [None] * 3
    1529  #     for values[0] in x():
    1530  #         for values[1] in y():
    1531  #             for values[2] in z():
    1532  #                 yield values
    1533  #
    1534  # So some 3-lists of values *may* be generated, each time we successfully
    1535  # get into the innermost loop.  If an iterator fails (is exhausted) before
    1536  # then, it "backtracks" to get the next value from the nearest enclosing
    1537  # iterator (the one "to the left"), and starts all over again at the next
    1538  # slot (pumps a fresh iterator).  Of course this is most useful when the
    1539  # iterators have side-effects, so that which values *can* be generated at
    1540  # each slot depend on the values iterated at previous slots.
    1541  
    1542  def simple_conjoin(gs):
    1543  
    1544      values = [None] * len(gs)
    1545  
    1546      def gen(i):
    1547          if i >= len(gs):
    1548              yield values
    1549          else:
    1550              for values[i] in gs[i]():
    1551                  for x in gen(i+1):
    1552                      yield x
    1553  
    1554      for x in gen(0):
    1555          yield x
    1556  
    1557  # That works fine, but recursing a level and checking i against len(gs) for
    1558  # each item produced is inefficient.  By doing manual loop unrolling across
    1559  # generator boundaries, it's possible to eliminate most of that overhead.
    1560  # This isn't worth the bother *in general* for generators, but conjoin() is
    1561  # a core building block for some CPU-intensive generator applications.
    1562  
    1563  def conjoin(gs):
    1564  
    1565      n = len(gs)
    1566      values = [None] * n
    1567  
    1568      # Do one loop nest at time recursively, until the # of loop nests
    1569      # remaining is divisible by 3.
    1570  
    1571      def gen(i):
    1572          if i >= n:
    1573              yield values
    1574  
    1575          elif (n-i) % 3:
    1576              ip1 = i+1
    1577              for values[i] in gs[i]():
    1578                  for x in gen(ip1):
    1579                      yield x
    1580  
    1581          else:
    1582              for x in _gen3(i):
    1583                  yield x
    1584  
    1585      # Do three loop nests at a time, recursing only if at least three more
    1586      # remain.  Don't call directly:  this is an internal optimization for
    1587      # gen's use.
    1588  
    1589      def _gen3(i):
    1590          assert i < n and (n-i) % 3 == 0
    1591          ip1, ip2, ip3 = i+1, i+2, i+3
    1592          g, g1, g2 = gs[i : ip3]
    1593  
    1594          if ip3 >= n:
    1595              # These are the last three, so we can yield values directly.
    1596              for values[i] in g():
    1597                  for values[ip1] in g1():
    1598                      for values[ip2] in g2():
    1599                          yield values
    1600  
    1601          else:
    1602              # At least 6 loop nests remain; peel off 3 and recurse for the
    1603              # rest.
    1604              for values[i] in g():
    1605                  for values[ip1] in g1():
    1606                      for values[ip2] in g2():
    1607                          for x in _gen3(ip3):
    1608                              yield x
    1609  
    1610      for x in gen(0):
    1611          yield x
    1612  
    1613  # And one more approach:  For backtracking apps like the Knight's Tour
    1614  # solver below, the number of backtracking levels can be enormous (one
    1615  # level per square, for the Knight's Tour, so that e.g. a 100x100 board
    1616  # needs 10,000 levels).  In such cases Python is likely to run out of
    1617  # stack space due to recursion.  So here's a recursion-free version of
    1618  # conjoin too.
    1619  # NOTE WELL:  This allows large problems to be solved with only trivial
    1620  # demands on stack space.  Without explicitly resumable generators, this is
    1621  # much harder to achieve.  OTOH, this is much slower (up to a factor of 2)
    1622  # than the fancy unrolled recursive conjoin.
    1623  
    1624  def flat_conjoin(gs):  # rename to conjoin to run tests with this instead
    1625      n = len(gs)
    1626      values = [None] * n
    1627      iters  = [None] * n
    1628      _StopIteration = StopIteration  # make local because caught a *lot*
    1629      i = 0
    1630      while 1:
    1631          # Descend.
    1632          try:
    1633              while i < n:
    1634                  it = iters[i] = gs[i]().__next__
    1635                  values[i] = it()
    1636                  i += 1
    1637          except _StopIteration:
    1638              pass
    1639          else:
    1640              assert i == n
    1641              yield values
    1642  
    1643          # Backtrack until an older iterator can be resumed.
    1644          i -= 1
    1645          while i >= 0:
    1646              try:
    1647                  values[i] = iters[i]()
    1648                  # Success!  Start fresh at next level.
    1649                  i += 1
    1650                  break
    1651              except _StopIteration:
    1652                  # Continue backtracking.
    1653                  i -= 1
    1654          else:
    1655              assert i < 0
    1656              break
    1657  
    1658  # A conjoin-based N-Queens solver.
    1659  
    1660  class ESC[4;38;5;81mQueens:
    1661      def __init__(self, n):
    1662          self.n = n
    1663          rangen = range(n)
    1664  
    1665          # Assign a unique int to each column and diagonal.
    1666          # columns:  n of those, range(n).
    1667          # NW-SE diagonals: 2n-1 of these, i-j unique and invariant along
    1668          # each, smallest i-j is 0-(n-1) = 1-n, so add n-1 to shift to 0-
    1669          # based.
    1670          # NE-SW diagonals: 2n-1 of these, i+j unique and invariant along
    1671          # each, smallest i+j is 0, largest is 2n-2.
    1672  
    1673          # For each square, compute a bit vector of the columns and
    1674          # diagonals it covers, and for each row compute a function that
    1675          # generates the possibilities for the columns in that row.
    1676          self.rowgenerators = []
    1677          for i in rangen:
    1678              rowuses = [(1 << j) |                  # column ordinal
    1679                         (1 << (n + i-j + n-1)) |    # NW-SE ordinal
    1680                         (1 << (n + 2*n-1 + i+j))    # NE-SW ordinal
    1681                              for j in rangen]
    1682  
    1683              def rowgen(rowuses=rowuses):
    1684                  for j in rangen:
    1685                      uses = rowuses[j]
    1686                      if uses & self.used == 0:
    1687                          self.used |= uses
    1688                          yield j
    1689                          self.used &= ~uses
    1690  
    1691              self.rowgenerators.append(rowgen)
    1692  
    1693      # Generate solutions.
    1694      def solve(self):
    1695          self.used = 0
    1696          for row2col in conjoin(self.rowgenerators):
    1697              yield row2col
    1698  
    1699      def printsolution(self, row2col):
    1700          n = self.n
    1701          assert n == len(row2col)
    1702          sep = "+" + "-+" * n
    1703          print(sep)
    1704          for i in range(n):
    1705              squares = [" " for j in range(n)]
    1706              squares[row2col[i]] = "Q"
    1707              print("|" + "|".join(squares) + "|")
    1708              print(sep)
    1709  
    1710  # A conjoin-based Knight's Tour solver.  This is pretty sophisticated
    1711  # (e.g., when used with flat_conjoin above, and passing hard=1 to the
    1712  # constructor, a 200x200 Knight's Tour was found quickly -- note that we're
    1713  # creating 10s of thousands of generators then!), and is lengthy.
    1714  
    1715  class ESC[4;38;5;81mKnights:
    1716      def __init__(self, m, n, hard=0):
    1717          self.m, self.n = m, n
    1718  
    1719          # solve() will set up succs[i] to be a list of square #i's
    1720          # successors.
    1721          succs = self.succs = []
    1722  
    1723          # Remove i0 from each of its successor's successor lists, i.e.
    1724          # successors can't go back to i0 again.  Return 0 if we can
    1725          # detect this makes a solution impossible, else return 1.
    1726  
    1727          def remove_from_successors(i0, len=len):
    1728              # If we remove all exits from a free square, we're dead:
    1729              # even if we move to it next, we can't leave it again.
    1730              # If we create a square with one exit, we must visit it next;
    1731              # else somebody else will have to visit it, and since there's
    1732              # only one adjacent, there won't be a way to leave it again.
    1733              # Finally, if we create more than one free square with a
    1734              # single exit, we can only move to one of them next, leaving
    1735              # the other one a dead end.
    1736              ne0 = ne1 = 0
    1737              for i in succs[i0]:
    1738                  s = succs[i]
    1739                  s.remove(i0)
    1740                  e = len(s)
    1741                  if e == 0:
    1742                      ne0 += 1
    1743                  elif e == 1:
    1744                      ne1 += 1
    1745              return ne0 == 0 and ne1 < 2
    1746  
    1747          # Put i0 back in each of its successor's successor lists.
    1748  
    1749          def add_to_successors(i0):
    1750              for i in succs[i0]:
    1751                  succs[i].append(i0)
    1752  
    1753          # Generate the first move.
    1754          def first():
    1755              if m < 1 or n < 1:
    1756                  return
    1757  
    1758              # Since we're looking for a cycle, it doesn't matter where we
    1759              # start.  Starting in a corner makes the 2nd move easy.
    1760              corner = self.coords2index(0, 0)
    1761              remove_from_successors(corner)
    1762              self.lastij = corner
    1763              yield corner
    1764              add_to_successors(corner)
    1765  
    1766          # Generate the second moves.
    1767          def second():
    1768              corner = self.coords2index(0, 0)
    1769              assert self.lastij == corner  # i.e., we started in the corner
    1770              if m < 3 or n < 3:
    1771                  return
    1772              assert len(succs[corner]) == 2
    1773              assert self.coords2index(1, 2) in succs[corner]
    1774              assert self.coords2index(2, 1) in succs[corner]
    1775              # Only two choices.  Whichever we pick, the other must be the
    1776              # square picked on move m*n, as it's the only way to get back
    1777              # to (0, 0).  Save its index in self.final so that moves before
    1778              # the last know it must be kept free.
    1779              for i, j in (1, 2), (2, 1):
    1780                  this  = self.coords2index(i, j)
    1781                  final = self.coords2index(3-i, 3-j)
    1782                  self.final = final
    1783  
    1784                  remove_from_successors(this)
    1785                  succs[final].append(corner)
    1786                  self.lastij = this
    1787                  yield this
    1788                  succs[final].remove(corner)
    1789                  add_to_successors(this)
    1790  
    1791          # Generate moves 3 through m*n-1.
    1792          def advance(len=len):
    1793              # If some successor has only one exit, must take it.
    1794              # Else favor successors with fewer exits.
    1795              candidates = []
    1796              for i in succs[self.lastij]:
    1797                  e = len(succs[i])
    1798                  assert e > 0, "else remove_from_successors() pruning flawed"
    1799                  if e == 1:
    1800                      candidates = [(e, i)]
    1801                      break
    1802                  candidates.append((e, i))
    1803              else:
    1804                  candidates.sort()
    1805  
    1806              for e, i in candidates:
    1807                  if i != self.final:
    1808                      if remove_from_successors(i):
    1809                          self.lastij = i
    1810                          yield i
    1811                      add_to_successors(i)
    1812  
    1813          # Generate moves 3 through m*n-1.  Alternative version using a
    1814          # stronger (but more expensive) heuristic to order successors.
    1815          # Since the # of backtracking levels is m*n, a poor move early on
    1816          # can take eons to undo.  Smallest square board for which this
    1817          # matters a lot is 52x52.
    1818          def advance_hard(vmid=(m-1)/2.0, hmid=(n-1)/2.0, len=len):
    1819              # If some successor has only one exit, must take it.
    1820              # Else favor successors with fewer exits.
    1821              # Break ties via max distance from board centerpoint (favor
    1822              # corners and edges whenever possible).
    1823              candidates = []
    1824              for i in succs[self.lastij]:
    1825                  e = len(succs[i])
    1826                  assert e > 0, "else remove_from_successors() pruning flawed"
    1827                  if e == 1:
    1828                      candidates = [(e, 0, i)]
    1829                      break
    1830                  i1, j1 = self.index2coords(i)
    1831                  d = (i1 - vmid)**2 + (j1 - hmid)**2
    1832                  candidates.append((e, -d, i))
    1833              else:
    1834                  candidates.sort()
    1835  
    1836              for e, d, i in candidates:
    1837                  if i != self.final:
    1838                      if remove_from_successors(i):
    1839                          self.lastij = i
    1840                          yield i
    1841                      add_to_successors(i)
    1842  
    1843          # Generate the last move.
    1844          def last():
    1845              assert self.final in succs[self.lastij]
    1846              yield self.final
    1847  
    1848          if m*n < 4:
    1849              self.squaregenerators = [first]
    1850          else:
    1851              self.squaregenerators = [first, second] + \
    1852                  [hard and advance_hard or advance] * (m*n - 3) + \
    1853                  [last]
    1854  
    1855      def coords2index(self, i, j):
    1856          assert 0 <= i < self.m
    1857          assert 0 <= j < self.n
    1858          return i * self.n + j
    1859  
    1860      def index2coords(self, index):
    1861          assert 0 <= index < self.m * self.n
    1862          return divmod(index, self.n)
    1863  
    1864      def _init_board(self):
    1865          succs = self.succs
    1866          del succs[:]
    1867          m, n = self.m, self.n
    1868          c2i = self.coords2index
    1869  
    1870          offsets = [( 1,  2), ( 2,  1), ( 2, -1), ( 1, -2),
    1871                     (-1, -2), (-2, -1), (-2,  1), (-1,  2)]
    1872          rangen = range(n)
    1873          for i in range(m):
    1874              for j in rangen:
    1875                  s = [c2i(i+io, j+jo) for io, jo in offsets
    1876                                       if 0 <= i+io < m and
    1877                                          0 <= j+jo < n]
    1878                  succs.append(s)
    1879  
    1880      # Generate solutions.
    1881      def solve(self):
    1882          self._init_board()
    1883          for x in conjoin(self.squaregenerators):
    1884              yield x
    1885  
    1886      def printsolution(self, x):
    1887          m, n = self.m, self.n
    1888          assert len(x) == m*n
    1889          w = len(str(m*n))
    1890          format = "%" + str(w) + "d"
    1891  
    1892          squares = [[None] * n for i in range(m)]
    1893          k = 1
    1894          for i in x:
    1895              i1, j1 = self.index2coords(i)
    1896              squares[i1][j1] = format % k
    1897              k += 1
    1898  
    1899          sep = "+" + ("-" * w + "+") * n
    1900          print(sep)
    1901          for i in range(m):
    1902              row = squares[i]
    1903              print("|" + "|".join(row) + "|")
    1904              print(sep)
    1905  
    1906  conjoin_tests = """
    1907  
    1908  Generate the 3-bit binary numbers in order.  This illustrates dumbest-
    1909  possible use of conjoin, just to generate the full cross-product.
    1910  
    1911  >>> for c in conjoin([lambda: iter((0, 1))] * 3):
    1912  ...     print(c)
    1913  [0, 0, 0]
    1914  [0, 0, 1]
    1915  [0, 1, 0]
    1916  [0, 1, 1]
    1917  [1, 0, 0]
    1918  [1, 0, 1]
    1919  [1, 1, 0]
    1920  [1, 1, 1]
    1921  
    1922  For efficiency in typical backtracking apps, conjoin() yields the same list
    1923  object each time.  So if you want to save away a full account of its
    1924  generated sequence, you need to copy its results.
    1925  
    1926  >>> def gencopy(iterator):
    1927  ...     for x in iterator:
    1928  ...         yield x[:]
    1929  
    1930  >>> for n in range(10):
    1931  ...     all = list(gencopy(conjoin([lambda: iter((0, 1))] * n)))
    1932  ...     print(n, len(all), all[0] == [0] * n, all[-1] == [1] * n)
    1933  0 1 True True
    1934  1 2 True True
    1935  2 4 True True
    1936  3 8 True True
    1937  4 16 True True
    1938  5 32 True True
    1939  6 64 True True
    1940  7 128 True True
    1941  8 256 True True
    1942  9 512 True True
    1943  
    1944  And run an 8-queens solver.
    1945  
    1946  >>> q = Queens(8)
    1947  >>> LIMIT = 2
    1948  >>> count = 0
    1949  >>> for row2col in q.solve():
    1950  ...     count += 1
    1951  ...     if count <= LIMIT:
    1952  ...         print("Solution", count)
    1953  ...         q.printsolution(row2col)
    1954  Solution 1
    1955  +-+-+-+-+-+-+-+-+
    1956  |Q| | | | | | | |
    1957  +-+-+-+-+-+-+-+-+
    1958  | | | | |Q| | | |
    1959  +-+-+-+-+-+-+-+-+
    1960  | | | | | | | |Q|
    1961  +-+-+-+-+-+-+-+-+
    1962  | | | | | |Q| | |
    1963  +-+-+-+-+-+-+-+-+
    1964  | | |Q| | | | | |
    1965  +-+-+-+-+-+-+-+-+
    1966  | | | | | | |Q| |
    1967  +-+-+-+-+-+-+-+-+
    1968  | |Q| | | | | | |
    1969  +-+-+-+-+-+-+-+-+
    1970  | | | |Q| | | | |
    1971  +-+-+-+-+-+-+-+-+
    1972  Solution 2
    1973  +-+-+-+-+-+-+-+-+
    1974  |Q| | | | | | | |
    1975  +-+-+-+-+-+-+-+-+
    1976  | | | | | |Q| | |
    1977  +-+-+-+-+-+-+-+-+
    1978  | | | | | | | |Q|
    1979  +-+-+-+-+-+-+-+-+
    1980  | | |Q| | | | | |
    1981  +-+-+-+-+-+-+-+-+
    1982  | | | | | | |Q| |
    1983  +-+-+-+-+-+-+-+-+
    1984  | | | |Q| | | | |
    1985  +-+-+-+-+-+-+-+-+
    1986  | |Q| | | | | | |
    1987  +-+-+-+-+-+-+-+-+
    1988  | | | | |Q| | | |
    1989  +-+-+-+-+-+-+-+-+
    1990  
    1991  >>> print(count, "solutions in all.")
    1992  92 solutions in all.
    1993  
    1994  And run a Knight's Tour on a 10x10 board.  Note that there are about
    1995  20,000 solutions even on a 6x6 board, so don't dare run this to exhaustion.
    1996  
    1997  >>> k = Knights(10, 10)
    1998  >>> LIMIT = 2
    1999  >>> count = 0
    2000  >>> for x in k.solve():
    2001  ...     count += 1
    2002  ...     if count <= LIMIT:
    2003  ...         print("Solution", count)
    2004  ...         k.printsolution(x)
    2005  ...     else:
    2006  ...         break
    2007  Solution 1
    2008  +---+---+---+---+---+---+---+---+---+---+
    2009  |  1| 58| 27| 34|  3| 40| 29| 10|  5|  8|
    2010  +---+---+---+---+---+---+---+---+---+---+
    2011  | 26| 35|  2| 57| 28| 33|  4|  7| 30| 11|
    2012  +---+---+---+---+---+---+---+---+---+---+
    2013  | 59|100| 73| 36| 41| 56| 39| 32|  9|  6|
    2014  +---+---+---+---+---+---+---+---+---+---+
    2015  | 74| 25| 60| 55| 72| 37| 42| 49| 12| 31|
    2016  +---+---+---+---+---+---+---+---+---+---+
    2017  | 61| 86| 99| 76| 63| 52| 47| 38| 43| 50|
    2018  +---+---+---+---+---+---+---+---+---+---+
    2019  | 24| 75| 62| 85| 54| 71| 64| 51| 48| 13|
    2020  +---+---+---+---+---+---+---+---+---+---+
    2021  | 87| 98| 91| 80| 77| 84| 53| 46| 65| 44|
    2022  +---+---+---+---+---+---+---+---+---+---+
    2023  | 90| 23| 88| 95| 70| 79| 68| 83| 14| 17|
    2024  +---+---+---+---+---+---+---+---+---+---+
    2025  | 97| 92| 21| 78| 81| 94| 19| 16| 45| 66|
    2026  +---+---+---+---+---+---+---+---+---+---+
    2027  | 22| 89| 96| 93| 20| 69| 82| 67| 18| 15|
    2028  +---+---+---+---+---+---+---+---+---+---+
    2029  Solution 2
    2030  +---+---+---+---+---+---+---+---+---+---+
    2031  |  1| 58| 27| 34|  3| 40| 29| 10|  5|  8|
    2032  +---+---+---+---+---+---+---+---+---+---+
    2033  | 26| 35|  2| 57| 28| 33|  4|  7| 30| 11|
    2034  +---+---+---+---+---+---+---+---+---+---+
    2035  | 59|100| 73| 36| 41| 56| 39| 32|  9|  6|
    2036  +---+---+---+---+---+---+---+---+---+---+
    2037  | 74| 25| 60| 55| 72| 37| 42| 49| 12| 31|
    2038  +---+---+---+---+---+---+---+---+---+---+
    2039  | 61| 86| 99| 76| 63| 52| 47| 38| 43| 50|
    2040  +---+---+---+---+---+---+---+---+---+---+
    2041  | 24| 75| 62| 85| 54| 71| 64| 51| 48| 13|
    2042  +---+---+---+---+---+---+---+---+---+---+
    2043  | 87| 98| 89| 80| 77| 84| 53| 46| 65| 44|
    2044  +---+---+---+---+---+---+---+---+---+---+
    2045  | 90| 23| 92| 95| 70| 79| 68| 83| 14| 17|
    2046  +---+---+---+---+---+---+---+---+---+---+
    2047  | 97| 88| 21| 78| 81| 94| 19| 16| 45| 66|
    2048  +---+---+---+---+---+---+---+---+---+---+
    2049  | 22| 91| 96| 93| 20| 69| 82| 67| 18| 15|
    2050  +---+---+---+---+---+---+---+---+---+---+
    2051  """
    2052  
    2053  weakref_tests = """\
    2054  Generators are weakly referencable:
    2055  
    2056  >>> import weakref
    2057  >>> def gen():
    2058  ...     yield 'foo!'
    2059  ...
    2060  >>> wr = weakref.ref(gen)
    2061  >>> wr() is gen
    2062  True
    2063  >>> p = weakref.proxy(gen)
    2064  
    2065  Generator-iterators are weakly referencable as well:
    2066  
    2067  >>> gi = gen()
    2068  >>> wr = weakref.ref(gi)
    2069  >>> wr() is gi
    2070  True
    2071  >>> p = weakref.proxy(gi)
    2072  >>> list(p)
    2073  ['foo!']
    2074  
    2075  """
    2076  
    2077  coroutine_tests = """\
    2078  >>> from test.support import gc_collect
    2079  
    2080  Sending a value into a started generator:
    2081  
    2082  >>> def f():
    2083  ...     print((yield 1))
    2084  ...     yield 2
    2085  >>> g = f()
    2086  >>> next(g)
    2087  1
    2088  >>> g.send(42)
    2089  42
    2090  2
    2091  
    2092  Sending a value into a new generator produces a TypeError:
    2093  
    2094  >>> f().send("foo")
    2095  Traceback (most recent call last):
    2096  ...
    2097  TypeError: can't send non-None value to a just-started generator
    2098  
    2099  
    2100  Yield by itself yields None:
    2101  
    2102  >>> def f(): yield
    2103  >>> list(f())
    2104  [None]
    2105  
    2106  
    2107  Yield is allowed only in the outermost iterable in generator expression:
    2108  
    2109  >>> def f(): list(i for i in [(yield 26)])
    2110  >>> type(f())
    2111  <class 'generator'>
    2112  
    2113  
    2114  A yield expression with augmented assignment.
    2115  
    2116  >>> def coroutine(seq):
    2117  ...     count = 0
    2118  ...     while count < 200:
    2119  ...         count += yield
    2120  ...         seq.append(count)
    2121  >>> seq = []
    2122  >>> c = coroutine(seq)
    2123  >>> next(c)
    2124  >>> print(seq)
    2125  []
    2126  >>> c.send(10)
    2127  >>> print(seq)
    2128  [10]
    2129  >>> c.send(10)
    2130  >>> print(seq)
    2131  [10, 20]
    2132  >>> c.send(10)
    2133  >>> print(seq)
    2134  [10, 20, 30]
    2135  
    2136  
    2137  Check some syntax errors for yield expressions:
    2138  
    2139  >>> f=lambda: (yield 1),(yield 2)
    2140  Traceback (most recent call last):
    2141    ...
    2142  SyntaxError: 'yield' outside function
    2143  
    2144  >>> def f(): x = yield = y
    2145  Traceback (most recent call last):
    2146    ...
    2147  SyntaxError: assignment to yield expression not possible
    2148  
    2149  >>> def f(): (yield bar) = y
    2150  Traceback (most recent call last):
    2151    ...
    2152  SyntaxError: cannot assign to yield expression here. Maybe you meant '==' instead of '='?
    2153  
    2154  >>> def f(): (yield bar) += y
    2155  Traceback (most recent call last):
    2156    ...
    2157  SyntaxError: 'yield expression' is an illegal expression for augmented assignment
    2158  
    2159  
    2160  Now check some throw() conditions:
    2161  
    2162  >>> def f():
    2163  ...     while True:
    2164  ...         try:
    2165  ...             print((yield))
    2166  ...         except ValueError as v:
    2167  ...             print("caught ValueError (%s)" % (v))
    2168  >>> import sys
    2169  >>> g = f()
    2170  >>> next(g)
    2171  
    2172  >>> g.throw(ValueError) # type only
    2173  caught ValueError ()
    2174  
    2175  >>> g.throw(ValueError("xyz"))  # value only
    2176  caught ValueError (xyz)
    2177  
    2178  >>> import warnings
    2179  >>> old_filters = warnings.filters.copy()
    2180  >>> warnings.filterwarnings("ignore", category=DeprecationWarning)
    2181  
    2182  # Filter DeprecationWarning: regarding the (type, val, tb) signature of throw().
    2183  # Deprecation warnings are re-enabled below.
    2184  
    2185  >>> g.throw(ValueError, ValueError(1))   # value+matching type
    2186  caught ValueError (1)
    2187  
    2188  >>> g.throw(ValueError, TypeError(1))  # mismatched type, rewrapped
    2189  caught ValueError (1)
    2190  
    2191  >>> g.throw(ValueError, ValueError(1), None)   # explicit None traceback
    2192  caught ValueError (1)
    2193  
    2194  >>> g.throw(ValueError(1), "foo")       # bad args
    2195  Traceback (most recent call last):
    2196    ...
    2197  TypeError: instance exception may not have a separate value
    2198  
    2199  >>> g.throw(ValueError, "foo", 23)      # bad args
    2200  Traceback (most recent call last):
    2201    ...
    2202  TypeError: throw() third argument must be a traceback object
    2203  
    2204  >>> g.throw("abc")
    2205  Traceback (most recent call last):
    2206    ...
    2207  TypeError: exceptions must be classes or instances deriving from BaseException, not str
    2208  
    2209  >>> g.throw(0)
    2210  Traceback (most recent call last):
    2211    ...
    2212  TypeError: exceptions must be classes or instances deriving from BaseException, not int
    2213  
    2214  >>> g.throw(list)
    2215  Traceback (most recent call last):
    2216    ...
    2217  TypeError: exceptions must be classes or instances deriving from BaseException, not type
    2218  
    2219  >>> def throw(g,exc):
    2220  ...     try:
    2221  ...         raise exc
    2222  ...     except:
    2223  ...         g.throw(*sys.exc_info())
    2224  >>> throw(g,ValueError) # do it with traceback included
    2225  caught ValueError ()
    2226  
    2227  >>> g.send(1)
    2228  1
    2229  
    2230  >>> throw(g,TypeError)  # terminate the generator
    2231  Traceback (most recent call last):
    2232    ...
    2233  TypeError
    2234  
    2235  >>> print(g.gi_frame)
    2236  None
    2237  
    2238  >>> g.send(2)
    2239  Traceback (most recent call last):
    2240    ...
    2241  StopIteration
    2242  
    2243  >>> g.throw(ValueError,6)       # throw on closed generator
    2244  Traceback (most recent call last):
    2245    ...
    2246  ValueError: 6
    2247  
    2248  >>> f().throw(ValueError,7)     # throw on just-opened generator
    2249  Traceback (most recent call last):
    2250    ...
    2251  ValueError: 7
    2252  
    2253  >>> warnings.filters[:] = old_filters
    2254  
    2255  # Re-enable DeprecationWarning: the (type, val, tb) exception representation is deprecated,
    2256  #                               and may be removed in a future version of Python.
    2257  
    2258  Plain "raise" inside a generator should preserve the traceback (#13188).
    2259  The traceback should have 3 levels:
    2260  - g.throw()
    2261  - f()
    2262  - 1/0
    2263  
    2264  >>> def f():
    2265  ...     try:
    2266  ...         yield
    2267  ...     except:
    2268  ...         raise
    2269  >>> g = f()
    2270  >>> try:
    2271  ...     1/0
    2272  ... except ZeroDivisionError as v:
    2273  ...     try:
    2274  ...         g.throw(v)
    2275  ...     except Exception as w:
    2276  ...         tb = w.__traceback__
    2277  >>> levels = 0
    2278  >>> while tb:
    2279  ...     levels += 1
    2280  ...     tb = tb.tb_next
    2281  >>> levels
    2282  3
    2283  
    2284  Now let's try closing a generator:
    2285  
    2286  >>> def f():
    2287  ...     try: yield
    2288  ...     except GeneratorExit:
    2289  ...         print("exiting")
    2290  
    2291  >>> g = f()
    2292  >>> next(g)
    2293  >>> g.close()
    2294  exiting
    2295  >>> g.close()  # should be no-op now
    2296  
    2297  >>> f().close()  # close on just-opened generator should be fine
    2298  
    2299  >>> def f(): yield      # an even simpler generator
    2300  >>> f().close()         # close before opening
    2301  >>> g = f()
    2302  >>> next(g)
    2303  >>> g.close()           # close normally
    2304  
    2305  And finalization:
    2306  
    2307  >>> def f():
    2308  ...     try: yield
    2309  ...     finally:
    2310  ...         print("exiting")
    2311  
    2312  >>> g = f()
    2313  >>> next(g)
    2314  >>> del g; gc_collect()  # For PyPy or other GCs.
    2315  exiting
    2316  
    2317  
    2318  GeneratorExit is not caught by except Exception:
    2319  
    2320  >>> def f():
    2321  ...     try: yield
    2322  ...     except Exception:
    2323  ...         print('except')
    2324  ...     finally:
    2325  ...         print('finally')
    2326  
    2327  >>> g = f()
    2328  >>> next(g)
    2329  >>> del g; gc_collect()  # For PyPy or other GCs.
    2330  finally
    2331  
    2332  
    2333  Now let's try some ill-behaved generators:
    2334  
    2335  >>> def f():
    2336  ...     try: yield
    2337  ...     except GeneratorExit:
    2338  ...         yield "foo!"
    2339  >>> g = f()
    2340  >>> next(g)
    2341  >>> g.close()
    2342  Traceback (most recent call last):
    2343    ...
    2344  RuntimeError: generator ignored GeneratorExit
    2345  >>> g.close()
    2346  
    2347  
    2348  Our ill-behaved code should be invoked during GC:
    2349  
    2350  >>> with support.catch_unraisable_exception() as cm:
    2351  ...     g = f()
    2352  ...     next(g)
    2353  ...     del g
    2354  ...
    2355  ...     cm.unraisable.exc_type == RuntimeError
    2356  ...     "generator ignored GeneratorExit" in str(cm.unraisable.exc_value)
    2357  ...     cm.unraisable.exc_traceback is not None
    2358  True
    2359  True
    2360  True
    2361  
    2362  And errors thrown during closing should propagate:
    2363  
    2364  >>> def f():
    2365  ...     try: yield
    2366  ...     except GeneratorExit:
    2367  ...         raise TypeError("fie!")
    2368  >>> g = f()
    2369  >>> next(g)
    2370  >>> g.close()
    2371  Traceback (most recent call last):
    2372    ...
    2373  TypeError: fie!
    2374  
    2375  
    2376  Ensure that various yield expression constructs make their
    2377  enclosing function a generator:
    2378  
    2379  >>> def f(): x += yield
    2380  >>> type(f())
    2381  <class 'generator'>
    2382  
    2383  >>> def f(): x = yield
    2384  >>> type(f())
    2385  <class 'generator'>
    2386  
    2387  >>> def f(): lambda x=(yield): 1
    2388  >>> type(f())
    2389  <class 'generator'>
    2390  
    2391  >>> def f(d): d[(yield "a")] = d[(yield "b")] = 27
    2392  >>> data = [1,2]
    2393  >>> g = f(data)
    2394  >>> type(g)
    2395  <class 'generator'>
    2396  >>> g.send(None)
    2397  'a'
    2398  >>> data
    2399  [1, 2]
    2400  >>> g.send(0)
    2401  'b'
    2402  >>> data
    2403  [27, 2]
    2404  >>> try: g.send(1)
    2405  ... except StopIteration: pass
    2406  >>> data
    2407  [27, 27]
    2408  
    2409  """
    2410  
    2411  refleaks_tests = """
    2412  Prior to adding cycle-GC support to itertools.tee, this code would leak
    2413  references. We add it to the standard suite so the routine refleak-tests
    2414  would trigger if it starts being uncleanable again.
    2415  
    2416  >>> import itertools
    2417  >>> def leak():
    2418  ...     class gen:
    2419  ...         def __iter__(self):
    2420  ...             return self
    2421  ...         def __next__(self):
    2422  ...             return self.item
    2423  ...     g = gen()
    2424  ...     head, tail = itertools.tee(g)
    2425  ...     g.item = head
    2426  ...     return head
    2427  >>> it = leak()
    2428  
    2429  Make sure to also test the involvement of the tee-internal teedataobject,
    2430  which stores returned items.
    2431  
    2432  >>> item = next(it)
    2433  
    2434  
    2435  
    2436  This test leaked at one point due to generator finalization/destruction.
    2437  It was copied from Lib/test/leakers/test_generator_cycle.py before the file
    2438  was removed.
    2439  
    2440  >>> def leak():
    2441  ...    def gen():
    2442  ...        while True:
    2443  ...            yield g
    2444  ...    g = gen()
    2445  
    2446  >>> leak()
    2447  
    2448  
    2449  
    2450  This test isn't really generator related, but rather exception-in-cleanup
    2451  related. The coroutine tests (above) just happen to cause an exception in
    2452  the generator's __del__ (tp_del) method. We can also test for this
    2453  explicitly, without generators. We do have to redirect stderr to avoid
    2454  printing warnings and to doublecheck that we actually tested what we wanted
    2455  to test.
    2456  
    2457  >>> from test import support
    2458  >>> class Leaker:
    2459  ...     def __del__(self):
    2460  ...         def invoke(message):
    2461  ...             raise RuntimeError(message)
    2462  ...         invoke("del failed")
    2463  ...
    2464  >>> with support.catch_unraisable_exception() as cm:
    2465  ...     l = Leaker()
    2466  ...     del l
    2467  ...
    2468  ...     cm.unraisable.object == Leaker.__del__
    2469  ...     cm.unraisable.exc_type == RuntimeError
    2470  ...     str(cm.unraisable.exc_value) == "del failed"
    2471  ...     cm.unraisable.exc_traceback is not None
    2472  True
    2473  True
    2474  True
    2475  True
    2476  
    2477  
    2478  These refleak tests should perhaps be in a testfile of their own,
    2479  test_generators just happened to be the test that drew these out.
    2480  
    2481  """
    2482  
    2483  __test__ = {"tut":      tutorial_tests,
    2484              "pep":      pep_tests,
    2485              "email":    email_tests,
    2486              "fun":      fun_tests,
    2487              "syntax":   syntax_tests,
    2488              "conjoin":  conjoin_tests,
    2489              "weakref":  weakref_tests,
    2490              "coroutine":  coroutine_tests,
    2491              "refleaks": refleaks_tests,
    2492              }
    2493  
    2494  def load_tests(loader, tests, pattern):
    2495      tests.addTest(doctest.DocTestSuite())
    2496      return tests
    2497  
    2498  
    2499  if __name__ == "__main__":
    2500      unittest.main()