]> git.ipfire.org Git - thirdparty/Python/cpython.git/commitdiff
bpo-46237: Fix the line number of tokenizer errors inside f-strings (GH-30463)
authorMiss Islington (bot) <31488909+miss-islington@users.noreply.github.com>
Tue, 11 Jan 2022 16:33:08 +0000 (08:33 -0800)
committerGitHub <noreply@github.com>
Tue, 11 Jan 2022 16:33:08 +0000 (08:33 -0800)
(cherry picked from commit 6fa8b2ceee38187b0ae96aee12fe4f0a5c8a2ce7)

Co-authored-by: Pablo Galindo Salgado <Pablogsal@gmail.com>
Lib/test/test_exceptions.py
Misc/NEWS.d/next/Core and Builtins/2022-01-07-19-33-05.bpo-46237.9A6Hpq.rst [new file with mode: 0644]
Parser/pegen.c
Parser/string_parser.c

index cc0640dda09802ee6fbd1993d6b3cb4b2231d39e..86b5dccaaed9854088ec678869fd33d79e0dbb93 100644 (file)
@@ -266,6 +266,18 @@ class ExceptionTests(unittest.TestCase):
         check("(1+)", 1, 4)
         check("[interesting\nfoo()\n", 1, 1)
         check(b"\xef\xbb\xbf#coding: utf8\nprint('\xe6\x88\x91')\n", 0, -1)
+        check("""f'''
+            {
+            (123_a)
+            }'''""", 3, 17)
+        check("""f'''
+            {
+            f\"\"\"
+            {
+            (123_a)
+            }
+            \"\"\"
+            }'''""", 5, 17)
 
         # Errors thrown by symtable.c
         check('x = [(yield i) for i in range(3)]', 1, 7)
diff --git a/Misc/NEWS.d/next/Core and Builtins/2022-01-07-19-33-05.bpo-46237.9A6Hpq.rst b/Misc/NEWS.d/next/Core and Builtins/2022-01-07-19-33-05.bpo-46237.9A6Hpq.rst
new file mode 100644 (file)
index 0000000..931a260
--- /dev/null
@@ -0,0 +1,2 @@
+Fix the line number of tokenizer errors inside f-strings. Patch by Pablo
+Galindo.
index 0504906c947d02e4d134ba98835786a63d9a0e0a..e507415f6d14cea163bf9975e46e6784183d2bf7 100644 (file)
@@ -701,10 +701,10 @@ initialize_token(Parser *p, Token *token, const char *start, const char *end, in
     int col_offset = (start != NULL && start >= line_start) ? (int)(start - line_start) : -1;
     int end_col_offset = (end != NULL && end >= p->tok->line_start) ? (int)(end - p->tok->line_start) : -1;
 
-    token->lineno = p->starting_lineno + lineno;
-    token->col_offset = p->tok->lineno == 1 ? p->starting_col_offset + col_offset : col_offset;
-    token->end_lineno = p->starting_lineno + end_lineno;
-    token->end_col_offset = p->tok->lineno == 1 ? p->starting_col_offset + end_col_offset : end_col_offset;
+    token->lineno = lineno;
+    token->col_offset = p->tok->lineno == p->starting_lineno ? p->starting_col_offset + col_offset : col_offset;
+    token->end_lineno = end_lineno;
+    token->end_col_offset = p->tok->lineno == p->starting_lineno ? p->starting_col_offset + end_col_offset : end_col_offset;
 
     p->fill += 1;
 
index dcd298cb358ee7a85c1094034af06e2edde92cfd..c83e63fc6f8f2bebef6ecb2f39305ff658923e4d 100644 (file)
@@ -392,11 +392,14 @@ fstring_compile_expr(Parser *p, const char *expr_start, const char *expr_end,
         return NULL;
     }
     Py_INCREF(p->tok->filename);
+
     tok->filename = p->tok->filename;
+    tok->lineno = t->lineno + lines - 1;
 
     Parser *p2 = _PyPegen_Parser_New(tok, Py_fstring_input, p->flags, p->feature_version,
                                      NULL, p->arena);
-    p2->starting_lineno = t->lineno + lines - 1;
+
+    p2->starting_lineno = t->lineno + lines;
     p2->starting_col_offset = t->col_offset + cols;
 
     expr = _PyPegen_run_parser(p2);