From a24676bedcd332dd7e6fa5521d0449206391d190 Mon Sep 17 00:00:00 2001 From: Pablo Galindo Salgado Date: Tue, 24 Aug 2021 17:50:05 +0100 Subject: Add tests for the C tokenizer and expose it as a private module (GH-27924) --- Python/clinic/Python-tokenize.c.h | 41 +++++++++++++++++++++++++++++++++++++++ 1 file changed, 41 insertions(+) create mode 100644 Python/clinic/Python-tokenize.c.h (limited to 'Python/clinic/Python-tokenize.c.h') diff --git a/Python/clinic/Python-tokenize.c.h b/Python/clinic/Python-tokenize.c.h new file mode 100644 index 0000000000..050b4d4944 --- /dev/null +++ b/Python/clinic/Python-tokenize.c.h @@ -0,0 +1,41 @@ +/*[clinic input] +preserve +[clinic start generated code]*/ + +static PyObject * +tokenizeriter_new_impl(PyTypeObject *type, const char *source); + +static PyObject * +tokenizeriter_new(PyTypeObject *type, PyObject *args, PyObject *kwargs) +{ + PyObject *return_value = NULL; + static const char * const _keywords[] = {"source", NULL}; + static _PyArg_Parser _parser = {NULL, _keywords, "tokenizeriter", 0}; + PyObject *argsbuf[1]; + PyObject * const *fastargs; + Py_ssize_t nargs = PyTuple_GET_SIZE(args); + const char *source; + + fastargs = _PyArg_UnpackKeywords(_PyTuple_CAST(args)->ob_item, nargs, kwargs, NULL, &_parser, 1, 1, 0, argsbuf); + if (!fastargs) { + goto exit; + } + if (!PyUnicode_Check(fastargs[0])) { + _PyArg_BadArgument("tokenizeriter", "argument 'source'", "str", fastargs[0]); + goto exit; + } + Py_ssize_t source_length; + source = PyUnicode_AsUTF8AndSize(fastargs[0], &source_length); + if (source == NULL) { + goto exit; + } + if (strlen(source) != (size_t)source_length) { + PyErr_SetString(PyExc_ValueError, "embedded null character"); + goto exit; + } + return_value = tokenizeriter_new_impl(type, source); + +exit: + return return_value; +} +/*[clinic end generated code: output=dfcd64774e01bfe6 input=a9049054013a1b77]*/ -- cgit v1.2.1