(root)/
Python-3.12.0/
Python/
clinic/
Python-tokenize.c.h
       1  /*[clinic input]
       2  preserve
       3  [clinic start generated code]*/
       4  
       5  #if defined(Py_BUILD_CORE) && !defined(Py_BUILD_CORE_MODULE)
       6  #  include "pycore_gc.h"            // PyGC_Head
       7  #  include "pycore_runtime.h"       // _Py_ID()
       8  #endif
       9  
      10  
      11  static PyObject *
      12  tokenizeriter_new_impl(PyTypeObject *type, PyObject *readline,
      13                         int extra_tokens, const char *encoding);
      14  
      15  static PyObject *
      16  tokenizeriter_new(PyTypeObject *type, PyObject *args, PyObject *kwargs)
      17  {
      18      PyObject *return_value = NULL;
      19      #if defined(Py_BUILD_CORE) && !defined(Py_BUILD_CORE_MODULE)
      20  
      21      #define NUM_KEYWORDS 2
      22      static struct {
      23          PyGC_Head _this_is_not_used;
      24          PyObject_VAR_HEAD
      25          PyObject *ob_item[NUM_KEYWORDS];
      26      } _kwtuple = {
      27          .ob_base = PyVarObject_HEAD_INIT(&PyTuple_Type, NUM_KEYWORDS)
      28          .ob_item = { &_Py_ID(extra_tokens), &_Py_ID(encoding), },
      29      };
      30      #undef NUM_KEYWORDS
      31      #define KWTUPLE (&_kwtuple.ob_base.ob_base)
      32  
      33      #else  // !Py_BUILD_CORE
      34      #  define KWTUPLE NULL
      35      #endif  // !Py_BUILD_CORE
      36  
      37      static const char * const _keywords[] = {"", "extra_tokens", "encoding", NULL};
      38      static _PyArg_Parser _parser = {
      39          .keywords = _keywords,
      40          .fname = "tokenizeriter",
      41          .kwtuple = KWTUPLE,
      42      };
      43      #undef KWTUPLE
      44      PyObject *argsbuf[3];
      45      PyObject * const *fastargs;
      46      Py_ssize_t nargs = PyTuple_GET_SIZE(args);
      47      Py_ssize_t noptargs = nargs + (kwargs ? PyDict_GET_SIZE(kwargs) : 0) - 2;
      48      PyObject *readline;
      49      int extra_tokens;
      50      const char *encoding = NULL;
      51  
      52      fastargs = _PyArg_UnpackKeywords(_PyTuple_CAST(args)->ob_item, nargs, kwargs, NULL, &_parser, 1, 1, 1, argsbuf);
      53      if (!fastargs) {
      54          goto exit;
      55      }
      56      readline = fastargs[0];
      57      extra_tokens = PyObject_IsTrue(fastargs[1]);
      58      if (extra_tokens < 0) {
      59          goto exit;
      60      }
      61      if (!noptargs) {
      62          goto skip_optional_kwonly;
      63      }
      64      if (!PyUnicode_Check(fastargs[2])) {
      65          _PyArg_BadArgument("tokenizeriter", "argument 'encoding'", "str", fastargs[2]);
      66          goto exit;
      67      }
      68      Py_ssize_t encoding_length;
      69      encoding = PyUnicode_AsUTF8AndSize(fastargs[2], &encoding_length);
      70      if (encoding == NULL) {
      71          goto exit;
      72      }
      73      if (strlen(encoding) != (size_t)encoding_length) {
      74          PyErr_SetString(PyExc_ValueError, "embedded null character");
      75          goto exit;
      76      }
      77  skip_optional_kwonly:
      78      return_value = tokenizeriter_new_impl(type, readline, extra_tokens, encoding);
      79  
      80  exit:
      81      return return_value;
      82  }
      83  /*[clinic end generated code: output=48be65a2808bdfa6 input=a9049054013a1b77]*/