]> git.ipfire.org Git - thirdparty/Python/cpython.git/commitdiff
bpo-46237: Fix the line number of tokenizer errors inside f-strings (GH-30463)
authorPablo Galindo Salgado <Pablogsal@gmail.com>
Sat, 8 Jan 2022 00:23:40 +0000 (00:23 +0000)
committerGitHub <noreply@github.com>
Sat, 8 Jan 2022 00:23:40 +0000 (00:23 +0000)
Lib/test/test_exceptions.py
Misc/NEWS.d/next/Core and Builtins/2022-01-07-19-33-05.bpo-46237.9A6Hpq.rst [new file with mode: 0644]
Parser/pegen.c
Parser/string_parser.c

index c04b57f5630ab3f075610eca94c8fc50e233d757..e4d685f4154ed84addf4025efc35cf4a008c7aea 100644 (file)
@@ -268,6 +268,18 @@ class ExceptionTests(unittest.TestCase):
         check("(1+)", 1, 4)
         check("[interesting\nfoo()\n", 1, 1)
         check(b"\xef\xbb\xbf#coding: utf8\nprint('\xe6\x88\x91')\n", 0, -1)
+        check("""f'''
+            {
+            (123_a)
+            }'''""", 3, 17)
+        check("""f'''
+            {
+            f\"\"\"
+            {
+            (123_a)
+            }
+            \"\"\"
+            }'''""", 5, 17)
 
         # Errors thrown by symtable.c
         check('x = [(yield i) for i in range(3)]', 1, 7)
diff --git a/Misc/NEWS.d/next/Core and Builtins/2022-01-07-19-33-05.bpo-46237.9A6Hpq.rst b/Misc/NEWS.d/next/Core and Builtins/2022-01-07-19-33-05.bpo-46237.9A6Hpq.rst
new file mode 100644 (file)
index 0000000..931a260
--- /dev/null
@@ -0,0 +1,2 @@
+Fix the line number of tokenizer errors inside f-strings. Patch by Pablo
+Galindo.
index cfea1c87199b2143b851d6571f18b1bab537813c..470c2cbd7438b2b0b2d621391625dec6855414da 100644 (file)
@@ -179,10 +179,10 @@ initialize_token(Parser *p, Token *token, const char *start, const char *end, in
     int col_offset = (start != NULL && start >= line_start) ? (int)(start - line_start) : -1;
     int end_col_offset = (end != NULL && end >= p->tok->line_start) ? (int)(end - p->tok->line_start) : -1;
 
-    token->lineno = p->starting_lineno + lineno;
-    token->col_offset = p->tok->lineno == 1 ? p->starting_col_offset + col_offset : col_offset;
-    token->end_lineno = p->starting_lineno + end_lineno;
-    token->end_col_offset = p->tok->lineno == 1 ? p->starting_col_offset + end_col_offset : end_col_offset;
+    token->lineno = lineno;
+    token->col_offset = p->tok->lineno == p->starting_lineno ? p->starting_col_offset + col_offset : col_offset;
+    token->end_lineno = end_lineno;
+    token->end_col_offset = p->tok->lineno == p->starting_lineno ? p->starting_col_offset + end_col_offset : end_col_offset;
 
     p->fill += 1;
 
index c6fe99c885d6914bc27b23a4587f35e2139d347b..57d9b9ed3fdbbd3bb5566466e6f9aa5d9cb2f992 100644 (file)
@@ -392,11 +392,14 @@ fstring_compile_expr(Parser *p, const char *expr_start, const char *expr_end,
         return NULL;
     }
     Py_INCREF(p->tok->filename);
+
     tok->filename = p->tok->filename;
+    tok->lineno = t->lineno + lines - 1;
 
     Parser *p2 = _PyPegen_Parser_New(tok, Py_fstring_input, p->flags, p->feature_version,
                                      NULL, p->arena);
-    p2->starting_lineno = t->lineno + lines - 1;
+
+    p2->starting_lineno = t->lineno + lines;
     p2->starting_col_offset = t->col_offset + cols;
 
     expr = _PyPegen_run_parser(p2);