check('x = "a', 1, 5)
check('lambda x: x = 2', 1, 1)
check('f{a + b + c}', 1, 2)
- check('[file for str(file) in []\n])', 1, 11)
+ check('[file for str(file) in []\n]', 1, 11)
check('a = « hello » « world »', 1, 5)
check('[\nfile\nfor str(file)\nin\n[]\n]', 3, 5)
check('[file for\n str(file) in []]', 2, 2)
for paren in "([{":
self._check_error(paren + "1 + 2", f"\\{paren}' was never closed")
+ for paren in "([{":
+ self._check_error(f"a = {paren} 1, 2, 3\nb=3", f"\\{paren}' was never closed")
+
for paren in ")]}":
self._check_error(paren + "1 + 2", f"unmatched '\\{paren}'")
--- /dev/null
+Correct the error message for unclosed parentheses when the tokenizer
+doesn't reach the end of the source when the error is reported. Patch by
+Pablo Galindo
if (PyErr_Occurred()) {
// Prioritize tokenizer errors to custom syntax errors raised
// on the second phase only if the errors come from the parser.
- if (p->tok->done == E_DONE && PyErr_ExceptionMatches(PyExc_SyntaxError)) {
+ int is_tok_ok = (p->tok->done == E_DONE || p->tok->done == E_OK);
+ if (is_tok_ok && PyErr_ExceptionMatches(PyExc_SyntaxError)) {
_PyPegen_tokenize_full_source_to_check_for_errors(p);
}
// Propagate the existing syntax error.