check('lambda x: x = 2', 1, 1)
check('f{a + b + c}', 1, 2)
check('[file for str(file) in []\n])', 2, 2)
+ check('a = « hello » « world »', 1, 5)
check('[\nfile\nfor str(file)\nin\n[]\n]', 3, 5)
check('[file for\n str(file) in []]', 2, 2)
check("ages = {'Alice'=22, 'Bob'=23}", 1, 16)
reset_parser_state(p);
_PyPegen_parse(p);
if (PyErr_Occurred()) {
- if (PyErr_ExceptionMatches(PyExc_SyntaxError)) {
+ // Prioritize tokenizer errors to custom syntax errors raised
+ // on the second phase only if the errors come from the parser.
+ if (p->tok->done != E_ERROR && PyErr_ExceptionMatches(PyExc_SyntaxError)) {
_PyPegen_check_tokenizer_errors(p);
}
return NULL;