(root)/
Python-3.12.0/
Tools/
peg_generator/
pegen/
keywordgen.py
       1  """Generate Lib/keyword.py from the Grammar and Tokens files using pgen"""
       2  
       3  import argparse
       4  
       5  from .build import build_parser, generate_token_definitions
       6  from .c_generator import CParserGenerator
       7  
       8  TEMPLATE = r'''
       9  """Keywords (from "Grammar/python.gram")
      10  
      11  This file is automatically generated; please don't muck it up!
      12  
      13  To update the symbols in this file, 'cd' to the top directory of
      14  the python source tree and run:
      15  
      16      PYTHONPATH=Tools/peg_generator python3 -m pegen.keywordgen \
      17          Grammar/python.gram \
      18          Grammar/Tokens \
      19          Lib/keyword.py
      20  
      21  Alternatively, you can run 'make regen-keyword'.
      22  """
      23  
      24  __all__ = ["iskeyword", "issoftkeyword", "kwlist", "softkwlist"]
      25  
      26  kwlist = [
      27  {keywords}
      28  ]
      29  
      30  softkwlist = [
      31  {soft_keywords}
      32  ]
      33  
      34  iskeyword = frozenset(kwlist).__contains__
      35  issoftkeyword = frozenset(softkwlist).__contains__
      36  '''.lstrip()
      37  
      38  EXTRA_KEYWORDS = ["async", "await"]
      39  
      40  
      41  def main() -> None:
      42      parser = argparse.ArgumentParser(
      43          description="Generate the Lib/keywords.py file from the grammar."
      44      )
      45      parser.add_argument(
      46          "grammar", type=str, help="The file with the grammar definition in PEG format"
      47      )
      48      parser.add_argument(
      49          "tokens_file", type=argparse.FileType("r"), help="The file with the token definitions"
      50      )
      51      parser.add_argument(
      52          "keyword_file",
      53          type=argparse.FileType("w"),
      54          help="The path to write the keyword definitions",
      55      )
      56      args = parser.parse_args()
      57  
      58      grammar, _, _ = build_parser(args.grammar)
      59      with args.tokens_file as tok_file:
      60          all_tokens, exact_tok, non_exact_tok = generate_token_definitions(tok_file)
      61      gen = CParserGenerator(grammar, all_tokens, exact_tok, non_exact_tok, file=None)
      62      gen.collect_rules()
      63  
      64      with args.keyword_file as thefile:
      65          all_keywords = sorted(list(gen.keywords.keys()) + EXTRA_KEYWORDS)
      66          all_soft_keywords = sorted(gen.soft_keywords)
      67  
      68          keywords = "" if not all_keywords else "    " + ",\n    ".join(map(repr, all_keywords))
      69          soft_keywords = (
      70              "" if not all_soft_keywords else "    " + ",\n    ".join(map(repr, all_soft_keywords))
      71          )
      72          thefile.write(TEMPLATE.format(keywords=keywords, soft_keywords=soft_keywords))
      73  
      74  
      75  if __name__ == "__main__":
      76      main()