OP '}' (3, 0) (3, 1)
FSTRING_MIDDLE '__' (3, 1) (3, 3)
FSTRING_END "'" (3, 3) (3, 4)
+ """)
+
+ self.check_tokenize("""\
+ '''Autorzy, którzy tą jednostkę mają wpisani jako AKTUALNA -- czyli
+ aktualni pracownicy, obecni pracownicy'''
+""", """\
+ INDENT ' ' (1, 0) (1, 4)
+ STRING "'''Autorzy, którzy tą jednostkę mają wpisani jako AKTUALNA -- czyli\\n aktualni pracownicy, obecni pracownicy'''" (1, 4) (2, 45)
+ NEWLINE '\\n' (2, 45) (2, 46)
+ DEDENT '' (3, 0) (3, 0)
""")
def test_function(self):
--- /dev/null
+Correctly compute end column offsets for multiline tokens in the
+:mod:`tokenize` module. Patch by Pablo Galindo
}
Py_ssize_t
-_PyPegen_byte_offset_to_character_offset(PyObject *line, Py_ssize_t col_offset)
+_PyPegen_byte_offset_to_character_offset_raw(const char* str, Py_ssize_t col_offset)
{
- const char *str = PyUnicode_AsUTF8(line);
- if (!str) {
- return -1;
- }
Py_ssize_t len = strlen(str);
if (col_offset > len + 1) {
col_offset = len + 1;
return size;
}
+Py_ssize_t
+_PyPegen_byte_offset_to_character_offset(PyObject *line, Py_ssize_t col_offset)
+{
+ const char *str = PyUnicode_AsUTF8(line);
+ if (!str) {
+ return -1;
+ }
+ return _PyPegen_byte_offset_to_character_offset_raw(str, col_offset);
+}
+
// Here, mark is the start of the node, while p->mark is the end.
// If node==NULL, they should be the same.
int
expr_ty _PyPegen_number_token(Parser *p);
void *_PyPegen_string_token(Parser *p);
Py_ssize_t _PyPegen_byte_offset_to_character_offset(PyObject *line, Py_ssize_t col_offset);
+Py_ssize_t _PyPegen_byte_offset_to_character_offset_raw(const char*, Py_ssize_t col_offset);
// Error handling functions and APIs
typedef enum {
col_offset = _PyPegen_byte_offset_to_character_offset(line, token.start - line_start);
}
if (token.end != NULL && token.end >= it->tok->line_start) {
- end_col_offset = _PyPegen_byte_offset_to_character_offset(line, token.end - it->tok->line_start);
+ end_col_offset = _PyPegen_byte_offset_to_character_offset_raw(it->tok->line_start, token.end - it->tok->line_start);
}
if (it->tok->tok_extra_tokens) {