Python-tokenize.c.h 2.5 KB

1234567891011121314151617181920212223242526272829303132333435363738394041424344454647484950515253545556575859606162636465666768697071727374757677787980818283
  1. /*[clinic input]
  2. preserve
  3. [clinic start generated code]*/
  4. #if defined(Py_BUILD_CORE) && !defined(Py_BUILD_CORE_MODULE)
  5. # include "pycore_gc.h" // PyGC_Head
  6. # include "pycore_runtime.h" // _Py_ID()
  7. #endif
  8. static PyObject *
  9. tokenizeriter_new_impl(PyTypeObject *type, PyObject *readline,
  10. int extra_tokens, const char *encoding);
  11. static PyObject *
  12. tokenizeriter_new(PyTypeObject *type, PyObject *args, PyObject *kwargs)
  13. {
  14. PyObject *return_value = NULL;
  15. #if defined(Py_BUILD_CORE) && !defined(Py_BUILD_CORE_MODULE)
  16. #define NUM_KEYWORDS 2
  17. static struct {
  18. PyGC_Head _this_is_not_used;
  19. PyObject_VAR_HEAD
  20. PyObject *ob_item[NUM_KEYWORDS];
  21. } _kwtuple = {
  22. .ob_base = PyVarObject_HEAD_INIT(&PyTuple_Type, NUM_KEYWORDS)
  23. .ob_item = { &_Py_ID(extra_tokens), &_Py_ID(encoding), },
  24. };
  25. #undef NUM_KEYWORDS
  26. #define KWTUPLE (&_kwtuple.ob_base.ob_base)
  27. #else // !Py_BUILD_CORE
  28. # define KWTUPLE NULL
  29. #endif // !Py_BUILD_CORE
  30. static const char * const _keywords[] = {"", "extra_tokens", "encoding", NULL};
  31. static _PyArg_Parser _parser = {
  32. .keywords = _keywords,
  33. .fname = "tokenizeriter",
  34. .kwtuple = KWTUPLE,
  35. };
  36. #undef KWTUPLE
  37. PyObject *argsbuf[3];
  38. PyObject * const *fastargs;
  39. Py_ssize_t nargs = PyTuple_GET_SIZE(args);
  40. Py_ssize_t noptargs = nargs + (kwargs ? PyDict_GET_SIZE(kwargs) : 0) - 2;
  41. PyObject *readline;
  42. int extra_tokens;
  43. const char *encoding = NULL;
  44. fastargs = _PyArg_UnpackKeywords(_PyTuple_CAST(args)->ob_item, nargs, kwargs, NULL, &_parser, 1, 1, 1, argsbuf);
  45. if (!fastargs) {
  46. goto exit;
  47. }
  48. readline = fastargs[0];
  49. extra_tokens = PyObject_IsTrue(fastargs[1]);
  50. if (extra_tokens < 0) {
  51. goto exit;
  52. }
  53. if (!noptargs) {
  54. goto skip_optional_kwonly;
  55. }
  56. if (!PyUnicode_Check(fastargs[2])) {
  57. _PyArg_BadArgument("tokenizeriter", "argument 'encoding'", "str", fastargs[2]);
  58. goto exit;
  59. }
  60. Py_ssize_t encoding_length;
  61. encoding = PyUnicode_AsUTF8AndSize(fastargs[2], &encoding_length);
  62. if (encoding == NULL) {
  63. goto exit;
  64. }
  65. if (strlen(encoding) != (size_t)encoding_length) {
  66. PyErr_SetString(PyExc_ValueError, "embedded null character");
  67. goto exit;
  68. }
  69. skip_optional_kwonly:
  70. return_value = tokenizeriter_new_impl(type, readline, extra_tokens, encoding);
  71. exit:
  72. return return_value;
  73. }
  74. /*[clinic end generated code: output=48be65a2808bdfa6 input=a9049054013a1b77]*/