1 /*[clinic input]
2 preserve
3 [clinic start generated code]*/
4 
5 static PyObject *
6 tokenizeriter_new_impl(PyTypeObject *type, const char *source);
7 
8 static PyObject *
tokenizeriter_new(PyTypeObject * type,PyObject * args,PyObject * kwargs)9 tokenizeriter_new(PyTypeObject *type, PyObject *args, PyObject *kwargs)
10 {
11     PyObject *return_value = NULL;
12     static const char * const _keywords[] = {"source", NULL};
13     static _PyArg_Parser _parser = {NULL, _keywords, "tokenizeriter", 0};
14     PyObject *argsbuf[1];
15     PyObject * const *fastargs;
16     Py_ssize_t nargs = PyTuple_GET_SIZE(args);
17     const char *source;
18 
19     fastargs = _PyArg_UnpackKeywords(_PyTuple_CAST(args)->ob_item, nargs, kwargs, NULL, &_parser, 1, 1, 0, argsbuf);
20     if (!fastargs) {
21         goto exit;
22     }
23     if (!PyUnicode_Check(fastargs[0])) {
24         _PyArg_BadArgument("tokenizeriter", "argument 'source'", "str", fastargs[0]);
25         goto exit;
26     }
27     Py_ssize_t source_length;
28     source = PyUnicode_AsUTF8AndSize(fastargs[0], &source_length);
29     if (source == NULL) {
30         goto exit;
31     }
32     if (strlen(source) != (size_t)source_length) {
33         PyErr_SetString(PyExc_ValueError, "embedded null character");
34         goto exit;
35     }
36     return_value = tokenizeriter_new_impl(type, source);
37 
38 exit:
39     return return_value;
40 }
41 /*[clinic end generated code: output=dfcd64774e01bfe6 input=a9049054013a1b77]*/
42