(KeywordToken[]) {{NULL, -1}},
(KeywordToken[]) {{NULL, -1}},
(KeywordToken[]) {
- {"if", 510},
- {"in", 520},
- {"as", 522},
- {"is", 529},
- {"or", 531},
+ {"if", 624},
+ {"as", 622},
+ {"in", 631},
+ {"or", 571},
+ {"is", 579},
{NULL, -1},
},
(KeywordToken[]) {
- {"del", 503},
- {"try", 511},
- {"def", 516},
- {"for", 519},
- {"not", 528},
- {"and", 532},
+ {"del", 597},
+ {"def", 632},
+ {"for", 630},
+ {"try", 609},
+ {"and", 572},
+ {"not", 578},
{NULL, -1},
},
(KeywordToken[]) {
- {"pass", 502},
- {"from", 514},
- {"elif", 517},
- {"else", 518},
- {"with", 521},
- {"None", 525},
- {"True", 526},
+ {"from", 569},
+ {"pass", 504},
+ {"with", 606},
+ {"elif", 626},
+ {"else", 627},
+ {"None", 595},
+ {"True", 594},
{NULL, -1},
},
(KeywordToken[]) {
- {"raise", 501},
- {"yield", 504},
- {"break", 506},
- {"while", 512},
- {"class", 515},
- {"False", 527},
+ {"raise", 522},
+ {"yield", 570},
+ {"break", 508},
+ {"class", 633},
+ {"while", 629},
+ {"False", 596},
{NULL, -1},
},
(KeywordToken[]) {
- {"return", 500},
- {"assert", 505},
- {"global", 508},
- {"import", 513},
- {"except", 523},
- {"lambda", 530},
+ {"return", 519},
+ {"import", 531},
+ {"assert", 526},
+ {"global", 523},
+ {"except", 620},
+ {"lambda", 583},
{NULL, -1},
},
(KeywordToken[]) {
- {"finally", 524},
+ {"finally", 617},
{NULL, -1},
},
(KeywordToken[]) {
- {"continue", 507},
- {"nonlocal", 509},
+ {"continue", 509},
+ {"nonlocal", 524},
{NULL, -1},
},
};
D(fprintf(stderr, "%*c> simple_stmt[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "&'return' return_stmt"));
stmt_ty return_stmt_var;
if (
- _PyPegen_lookahead_with_int(1, _PyPegen_expect_token, p, 500) // token='return'
+ _PyPegen_lookahead_with_int(1, _PyPegen_expect_token, p, 519) // token='return'
&&
(return_stmt_var = return_stmt_rule(p)) // return_stmt
)
D(fprintf(stderr, "%*c> simple_stmt[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "&'raise' raise_stmt"));
stmt_ty raise_stmt_var;
if (
- _PyPegen_lookahead_with_int(1, _PyPegen_expect_token, p, 501) // token='raise'
+ _PyPegen_lookahead_with_int(1, _PyPegen_expect_token, p, 522) // token='raise'
&&
(raise_stmt_var = raise_stmt_rule(p)) // raise_stmt
)
D(fprintf(stderr, "%*c> simple_stmt[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "'pass'"));
Token * _keyword;
if (
- (_keyword = _PyPegen_expect_token(p, 502)) // token='pass'
+ (_keyword = _PyPegen_expect_token(p, 504)) // token='pass'
)
{
D(fprintf(stderr, "%*c+ simple_stmt[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'pass'"));
D(fprintf(stderr, "%*c> simple_stmt[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "&'del' del_stmt"));
stmt_ty del_stmt_var;
if (
- _PyPegen_lookahead_with_int(1, _PyPegen_expect_token, p, 503) // token='del'
+ _PyPegen_lookahead_with_int(1, _PyPegen_expect_token, p, 597) // token='del'
&&
(del_stmt_var = del_stmt_rule(p)) // del_stmt
)
D(fprintf(stderr, "%*c> simple_stmt[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "&'yield' yield_stmt"));
stmt_ty yield_stmt_var;
if (
- _PyPegen_lookahead_with_int(1, _PyPegen_expect_token, p, 504) // token='yield'
+ _PyPegen_lookahead_with_int(1, _PyPegen_expect_token, p, 570) // token='yield'
&&
(yield_stmt_var = yield_stmt_rule(p)) // yield_stmt
)
D(fprintf(stderr, "%*c> simple_stmt[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "&'assert' assert_stmt"));
stmt_ty assert_stmt_var;
if (
- _PyPegen_lookahead_with_int(1, _PyPegen_expect_token, p, 505) // token='assert'
+ _PyPegen_lookahead_with_int(1, _PyPegen_expect_token, p, 526) // token='assert'
&&
(assert_stmt_var = assert_stmt_rule(p)) // assert_stmt
)
D(fprintf(stderr, "%*c> simple_stmt[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "'break'"));
Token * _keyword;
if (
- (_keyword = _PyPegen_expect_token(p, 506)) // token='break'
+ (_keyword = _PyPegen_expect_token(p, 508)) // token='break'
)
{
D(fprintf(stderr, "%*c+ simple_stmt[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'break'"));
D(fprintf(stderr, "%*c> simple_stmt[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "'continue'"));
Token * _keyword;
if (
- (_keyword = _PyPegen_expect_token(p, 507)) // token='continue'
+ (_keyword = _PyPegen_expect_token(p, 509)) // token='continue'
)
{
D(fprintf(stderr, "%*c+ simple_stmt[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'continue'"));
D(fprintf(stderr, "%*c> simple_stmt[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "&'global' global_stmt"));
stmt_ty global_stmt_var;
if (
- _PyPegen_lookahead_with_int(1, _PyPegen_expect_token, p, 508) // token='global'
+ _PyPegen_lookahead_with_int(1, _PyPegen_expect_token, p, 523) // token='global'
&&
(global_stmt_var = global_stmt_rule(p)) // global_stmt
)
D(fprintf(stderr, "%*c> simple_stmt[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "&'nonlocal' nonlocal_stmt"));
stmt_ty nonlocal_stmt_var;
if (
- _PyPegen_lookahead_with_int(1, _PyPegen_expect_token, p, 509) // token='nonlocal'
+ _PyPegen_lookahead_with_int(1, _PyPegen_expect_token, p, 524) // token='nonlocal'
&&
(nonlocal_stmt_var = nonlocal_stmt_rule(p)) // nonlocal_stmt
)
D(fprintf(stderr, "%*c> compound_stmt[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "&'if' if_stmt"));
stmt_ty if_stmt_var;
if (
- _PyPegen_lookahead_with_int(1, _PyPegen_expect_token, p, 510) // token='if'
+ _PyPegen_lookahead_with_int(1, _PyPegen_expect_token, p, 624) // token='if'
&&
(if_stmt_var = if_stmt_rule(p)) // if_stmt
)
D(fprintf(stderr, "%*c> compound_stmt[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "&'try' try_stmt"));
stmt_ty try_stmt_var;
if (
- _PyPegen_lookahead_with_int(1, _PyPegen_expect_token, p, 511) // token='try'
+ _PyPegen_lookahead_with_int(1, _PyPegen_expect_token, p, 609) // token='try'
&&
(try_stmt_var = try_stmt_rule(p)) // try_stmt
)
D(fprintf(stderr, "%*c> compound_stmt[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "&'while' while_stmt"));
stmt_ty while_stmt_var;
if (
- _PyPegen_lookahead_with_int(1, _PyPegen_expect_token, p, 512) // token='while'
+ _PyPegen_lookahead_with_int(1, _PyPegen_expect_token, p, 629) // token='while'
&&
(while_stmt_var = while_stmt_rule(p)) // while_stmt
)
Token * _keyword;
void *a;
if (
- (_keyword = _PyPegen_expect_token(p, 500)) // token='return'
+ (_keyword = _PyPegen_expect_token(p, 519)) // token='return'
&&
(a = star_expressions_rule(p), 1) // star_expressions?
)
expr_ty a;
void *b;
if (
- (_keyword = _PyPegen_expect_token(p, 501)) // token='raise'
+ (_keyword = _PyPegen_expect_token(p, 522)) // token='raise'
&&
(a = expression_rule(p)) // expression
&&
D(fprintf(stderr, "%*c> raise_stmt[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "'raise'"));
Token * _keyword;
if (
- (_keyword = _PyPegen_expect_token(p, 501)) // token='raise'
+ (_keyword = _PyPegen_expect_token(p, 522)) // token='raise'
)
{
D(fprintf(stderr, "%*c+ raise_stmt[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'raise'"));
Token * _keyword;
asdl_expr_seq* a;
if (
- (_keyword = _PyPegen_expect_token(p, 508)) // token='global'
+ (_keyword = _PyPegen_expect_token(p, 523)) // token='global'
&&
(a = (asdl_expr_seq*)_gather_18_rule(p)) // ','.NAME+
)
Token * _keyword;
asdl_expr_seq* a;
if (
- (_keyword = _PyPegen_expect_token(p, 509)) // token='nonlocal'
+ (_keyword = _PyPegen_expect_token(p, 524)) // token='nonlocal'
&&
(a = (asdl_expr_seq*)_gather_20_rule(p)) // ','.NAME+
)
Token * _keyword;
asdl_expr_seq* a;
if (
- (_keyword = _PyPegen_expect_token(p, 503)) // token='del'
+ (_keyword = _PyPegen_expect_token(p, 597)) // token='del'
&&
(a = del_targets_rule(p)) // del_targets
&&
expr_ty a;
void *b;
if (
- (_keyword = _PyPegen_expect_token(p, 505)) // token='assert'
+ (_keyword = _PyPegen_expect_token(p, 526)) // token='assert'
&&
(a = expression_rule(p)) // expression
&&
Token * _keyword;
asdl_alias_seq* a;
if (
- (_keyword = _PyPegen_expect_token(p, 513)) // token='import'
+ (_keyword = _PyPegen_expect_token(p, 531)) // token='import'
&&
(a = dotted_as_names_rule(p)) // dotted_as_names
)
expr_ty b;
asdl_alias_seq* c;
if (
- (_keyword = _PyPegen_expect_token(p, 514)) // token='from'
+ (_keyword = _PyPegen_expect_token(p, 569)) // token='from'
&&
(a = _loop0_24_rule(p)) // (('.' | '...'))*
&&
(b = dotted_name_rule(p)) // dotted_name
&&
- (_keyword_1 = _PyPegen_expect_token(p, 513)) // token='import'
+ (_keyword_1 = _PyPegen_expect_token(p, 531)) // token='import'
&&
(c = import_from_targets_rule(p)) // import_from_targets
)
asdl_seq * a;
asdl_alias_seq* b;
if (
- (_keyword = _PyPegen_expect_token(p, 514)) // token='from'
+ (_keyword = _PyPegen_expect_token(p, 569)) // token='from'
&&
(a = _loop1_25_rule(p)) // (('.' | '...'))+
&&
- (_keyword_1 = _PyPegen_expect_token(p, 513)) // token='import'
+ (_keyword_1 = _PyPegen_expect_token(p, 531)) // token='import'
&&
(b = import_from_targets_rule(p)) // import_from_targets
)
void *b;
asdl_stmt_seq* c;
if (
- (_keyword = _PyPegen_expect_token(p, 515)) // token='class'
+ (_keyword = _PyPegen_expect_token(p, 633)) // token='class'
&&
(a = _PyPegen_name_token(p)) // NAME
&&
void *params;
void *tc;
if (
- (_keyword = _PyPegen_expect_token(p, 516)) // token='def'
+ (_keyword = _PyPegen_expect_token(p, 632)) // token='def'
&&
(n = _PyPegen_name_token(p)) // NAME
&&
if (
(async_var = _PyPegen_expect_token(p, ASYNC)) // token='ASYNC'
&&
- (_keyword = _PyPegen_expect_token(p, 516)) // token='def'
+ (_keyword = _PyPegen_expect_token(p, 632)) // token='def'
&&
(n = _PyPegen_name_token(p)) // NAME
&&
asdl_stmt_seq* b;
stmt_ty c;
if (
- (_keyword = _PyPegen_expect_token(p, 510)) // token='if'
+ (_keyword = _PyPegen_expect_token(p, 624)) // token='if'
&&
(a = named_expression_rule(p)) // named_expression
&&
asdl_stmt_seq* b;
void *c;
if (
- (_keyword = _PyPegen_expect_token(p, 510)) // token='if'
+ (_keyword = _PyPegen_expect_token(p, 624)) // token='if'
&&
(a = named_expression_rule(p)) // named_expression
&&
asdl_stmt_seq* b;
stmt_ty c;
if (
- (_keyword = _PyPegen_expect_token(p, 517)) // token='elif'
+ (_keyword = _PyPegen_expect_token(p, 626)) // token='elif'
&&
(a = named_expression_rule(p)) // named_expression
&&
asdl_stmt_seq* b;
void *c;
if (
- (_keyword = _PyPegen_expect_token(p, 517)) // token='elif'
+ (_keyword = _PyPegen_expect_token(p, 626)) // token='elif'
&&
(a = named_expression_rule(p)) // named_expression
&&
Token * _literal;
asdl_stmt_seq* b;
if (
- (_keyword = _PyPegen_expect_token(p, 518)) // token='else'
+ (_keyword = _PyPegen_expect_token(p, 627)) // token='else'
&&
(_literal = _PyPegen_expect_forced_token(p, 11, ":")) // forced_token=':'
&&
asdl_stmt_seq* b;
void *c;
if (
- (_keyword = _PyPegen_expect_token(p, 512)) // token='while'
+ (_keyword = _PyPegen_expect_token(p, 629)) // token='while'
&&
(a = named_expression_rule(p)) // named_expression
&&
expr_ty t;
void *tc;
if (
- (_keyword = _PyPegen_expect_token(p, 519)) // token='for'
+ (_keyword = _PyPegen_expect_token(p, 630)) // token='for'
&&
(t = star_targets_rule(p)) // star_targets
&&
- (_keyword_1 = _PyPegen_expect_token(p, 520)) // token='in'
+ (_keyword_1 = _PyPegen_expect_token(p, 631)) // token='in'
&&
(_cut_var = 1)
&&
if (
(async_var = _PyPegen_expect_token(p, ASYNC)) // token='ASYNC'
&&
- (_keyword = _PyPegen_expect_token(p, 519)) // token='for'
+ (_keyword = _PyPegen_expect_token(p, 630)) // token='for'
&&
(t = star_targets_rule(p)) // star_targets
&&
- (_keyword_1 = _PyPegen_expect_token(p, 520)) // token='in'
+ (_keyword_1 = _PyPegen_expect_token(p, 631)) // token='in'
&&
(_cut_var = 1)
&&
asdl_withitem_seq* a;
asdl_stmt_seq* b;
if (
- (_keyword = _PyPegen_expect_token(p, 521)) // token='with'
+ (_keyword = _PyPegen_expect_token(p, 606)) // token='with'
&&
(_literal = _PyPegen_expect_token(p, 7)) // token='('
&&
asdl_stmt_seq* b;
void *tc;
if (
- (_keyword = _PyPegen_expect_token(p, 521)) // token='with'
+ (_keyword = _PyPegen_expect_token(p, 606)) // token='with'
&&
(a = (asdl_withitem_seq*)_gather_52_rule(p)) // ','.with_item+
&&
if (
(async_var = _PyPegen_expect_token(p, ASYNC)) // token='ASYNC'
&&
- (_keyword = _PyPegen_expect_token(p, 521)) // token='with'
+ (_keyword = _PyPegen_expect_token(p, 606)) // token='with'
&&
(_literal = _PyPegen_expect_token(p, 7)) // token='('
&&
if (
(async_var = _PyPegen_expect_token(p, ASYNC)) // token='ASYNC'
&&
- (_keyword = _PyPegen_expect_token(p, 521)) // token='with'
+ (_keyword = _PyPegen_expect_token(p, 606)) // token='with'
&&
(a = (asdl_withitem_seq*)_gather_56_rule(p)) // ','.with_item+
&&
if (
(e = expression_rule(p)) // expression
&&
- (_keyword = _PyPegen_expect_token(p, 522)) // token='as'
+ (_keyword = _PyPegen_expect_token(p, 622)) // token='as'
&&
(t = star_target_rule(p)) // star_target
&&
asdl_stmt_seq* b;
asdl_stmt_seq* f;
if (
- (_keyword = _PyPegen_expect_token(p, 511)) // token='try'
+ (_keyword = _PyPegen_expect_token(p, 609)) // token='try'
&&
(_literal = _PyPegen_expect_forced_token(p, 11, ":")) // forced_token=':'
&&
asdl_excepthandler_seq* ex;
void *f;
if (
- (_keyword = _PyPegen_expect_token(p, 511)) // token='try'
+ (_keyword = _PyPegen_expect_token(p, 609)) // token='try'
&&
(_literal = _PyPegen_expect_forced_token(p, 11, ":")) // forced_token=':'
&&
expr_ty e;
void *t;
if (
- (_keyword = _PyPegen_expect_token(p, 523)) // token='except'
+ (_keyword = _PyPegen_expect_token(p, 620)) // token='except'
&&
(e = expression_rule(p)) // expression
&&
Token * _literal;
asdl_stmt_seq* b;
if (
- (_keyword = _PyPegen_expect_token(p, 523)) // token='except'
+ (_keyword = _PyPegen_expect_token(p, 620)) // token='except'
&&
(_literal = _PyPegen_expect_token(p, 11)) // token=':'
&&
Token * _literal;
asdl_stmt_seq* a;
if (
- (_keyword = _PyPegen_expect_token(p, 524)) // token='finally'
+ (_keyword = _PyPegen_expect_token(p, 617)) // token='finally'
&&
(_literal = _PyPegen_expect_forced_token(p, 11, ":")) // forced_token=':'
&&
Token * _keyword;
expr_ty guard;
if (
- (_keyword = _PyPegen_expect_token(p, 510)) // token='if'
+ (_keyword = _PyPegen_expect_token(p, 624)) // token='if'
&&
(guard = named_expression_rule(p)) // named_expression
)
if (
(pattern = or_pattern_rule(p)) // or_pattern
&&
- (_keyword = _PyPegen_expect_token(p, 522)) // token='as'
+ (_keyword = _PyPegen_expect_token(p, 622)) // token='as'
&&
(target = pattern_capture_target_rule(p)) // pattern_capture_target
)
D(fprintf(stderr, "%*c> literal_pattern[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "'None'"));
Token * _keyword;
if (
- (_keyword = _PyPegen_expect_token(p, 525)) // token='None'
+ (_keyword = _PyPegen_expect_token(p, 595)) // token='None'
)
{
D(fprintf(stderr, "%*c+ literal_pattern[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'None'"));
D(fprintf(stderr, "%*c> literal_pattern[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "'True'"));
Token * _keyword;
if (
- (_keyword = _PyPegen_expect_token(p, 526)) // token='True'
+ (_keyword = _PyPegen_expect_token(p, 594)) // token='True'
)
{
D(fprintf(stderr, "%*c+ literal_pattern[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'True'"));
D(fprintf(stderr, "%*c> literal_pattern[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "'False'"));
Token * _keyword;
if (
- (_keyword = _PyPegen_expect_token(p, 527)) // token='False'
+ (_keyword = _PyPegen_expect_token(p, 596)) // token='False'
)
{
D(fprintf(stderr, "%*c+ literal_pattern[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'False'"));
D(fprintf(stderr, "%*c> literal_expr[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "'None'"));
Token * _keyword;
if (
- (_keyword = _PyPegen_expect_token(p, 525)) // token='None'
+ (_keyword = _PyPegen_expect_token(p, 595)) // token='None'
)
{
D(fprintf(stderr, "%*c+ literal_expr[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'None'"));
D(fprintf(stderr, "%*c> literal_expr[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "'True'"));
Token * _keyword;
if (
- (_keyword = _PyPegen_expect_token(p, 526)) // token='True'
+ (_keyword = _PyPegen_expect_token(p, 594)) // token='True'
)
{
D(fprintf(stderr, "%*c+ literal_expr[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'True'"));
D(fprintf(stderr, "%*c> literal_expr[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "'False'"));
Token * _keyword;
if (
- (_keyword = _PyPegen_expect_token(p, 527)) // token='False'
+ (_keyword = _PyPegen_expect_token(p, 596)) // token='False'
)
{
D(fprintf(stderr, "%*c+ literal_expr[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'False'"));
if (
(a = disjunction_rule(p)) // disjunction
&&
- (_keyword = _PyPegen_expect_token(p, 510)) // token='if'
+ (_keyword = _PyPegen_expect_token(p, 624)) // token='if'
&&
(b = disjunction_rule(p)) // disjunction
&&
- (_keyword_1 = _PyPegen_expect_token(p, 518)) // token='else'
+ (_keyword_1 = _PyPegen_expect_token(p, 627)) // token='else'
&&
(c = expression_rule(p)) // expression
)
Token * _keyword_1;
expr_ty a;
if (
- (_keyword = _PyPegen_expect_token(p, 504)) // token='yield'
+ (_keyword = _PyPegen_expect_token(p, 570)) // token='yield'
&&
- (_keyword_1 = _PyPegen_expect_token(p, 514)) // token='from'
+ (_keyword_1 = _PyPegen_expect_token(p, 569)) // token='from'
&&
(a = expression_rule(p)) // expression
)
Token * _keyword;
void *a;
if (
- (_keyword = _PyPegen_expect_token(p, 504)) // token='yield'
+ (_keyword = _PyPegen_expect_token(p, 570)) // token='yield'
&&
(a = star_expressions_rule(p), 1) // star_expressions?
)
Token * _keyword;
expr_ty a;
if (
- (_keyword = _PyPegen_expect_token(p, 528)) // token='not'
+ (_keyword = _PyPegen_expect_token(p, 578)) // token='not'
&&
(a = inversion_rule(p)) // inversion
)
Token * _keyword_1;
expr_ty a;
if (
- (_keyword = _PyPegen_expect_token(p, 528)) // token='not'
+ (_keyword = _PyPegen_expect_token(p, 578)) // token='not'
&&
- (_keyword_1 = _PyPegen_expect_token(p, 520)) // token='in'
+ (_keyword_1 = _PyPegen_expect_token(p, 631)) // token='in'
&&
(a = bitwise_or_rule(p)) // bitwise_or
)
Token * _keyword;
expr_ty a;
if (
- (_keyword = _PyPegen_expect_token(p, 520)) // token='in'
+ (_keyword = _PyPegen_expect_token(p, 631)) // token='in'
&&
(a = bitwise_or_rule(p)) // bitwise_or
)
Token * _keyword_1;
expr_ty a;
if (
- (_keyword = _PyPegen_expect_token(p, 529)) // token='is'
+ (_keyword = _PyPegen_expect_token(p, 579)) // token='is'
&&
- (_keyword_1 = _PyPegen_expect_token(p, 528)) // token='not'
+ (_keyword_1 = _PyPegen_expect_token(p, 578)) // token='not'
&&
(a = bitwise_or_rule(p)) // bitwise_or
)
Token * _keyword;
expr_ty a;
if (
- (_keyword = _PyPegen_expect_token(p, 529)) // token='is'
+ (_keyword = _PyPegen_expect_token(p, 579)) // token='is'
&&
(a = bitwise_or_rule(p)) // bitwise_or
)
D(fprintf(stderr, "%*c> atom[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "'True'"));
Token * _keyword;
if (
- (_keyword = _PyPegen_expect_token(p, 526)) // token='True'
+ (_keyword = _PyPegen_expect_token(p, 594)) // token='True'
)
{
D(fprintf(stderr, "%*c+ atom[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'True'"));
D(fprintf(stderr, "%*c> atom[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "'False'"));
Token * _keyword;
if (
- (_keyword = _PyPegen_expect_token(p, 527)) // token='False'
+ (_keyword = _PyPegen_expect_token(p, 596)) // token='False'
)
{
D(fprintf(stderr, "%*c+ atom[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'False'"));
D(fprintf(stderr, "%*c> atom[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "'None'"));
Token * _keyword;
if (
- (_keyword = _PyPegen_expect_token(p, 525)) // token='None'
+ (_keyword = _PyPegen_expect_token(p, 595)) // token='None'
)
{
D(fprintf(stderr, "%*c+ atom[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'None'"));
void *a;
expr_ty b;
if (
- (_keyword = _PyPegen_expect_token(p, 530)) // token='lambda'
+ (_keyword = _PyPegen_expect_token(p, 583)) // token='lambda'
&&
(a = lambda_params_rule(p), 1) // lambda_params?
&&
if (
(async_var = _PyPegen_expect_token(p, ASYNC)) // token='ASYNC'
&&
- (_keyword = _PyPegen_expect_token(p, 519)) // token='for'
+ (_keyword = _PyPegen_expect_token(p, 630)) // token='for'
&&
(a = star_targets_rule(p)) // star_targets
&&
- (_keyword_1 = _PyPegen_expect_token(p, 520)) // token='in'
+ (_keyword_1 = _PyPegen_expect_token(p, 631)) // token='in'
&&
(_cut_var = 1)
&&
expr_ty b;
asdl_expr_seq* c;
if (
- (_keyword = _PyPegen_expect_token(p, 519)) // token='for'
+ (_keyword = _PyPegen_expect_token(p, 630)) // token='for'
&&
(a = star_targets_rule(p)) // star_targets
&&
- (_keyword_1 = _PyPegen_expect_token(p, 520)) // token='in'
+ (_keyword_1 = _PyPegen_expect_token(p, 631)) // token='in'
&&
(_cut_var = 1)
&&
if (
(a = disjunction_rule(p)) // disjunction
&&
- (_keyword = _PyPegen_expect_token(p, 510)) // token='if'
+ (_keyword = _PyPegen_expect_token(p, 624)) // token='if'
&&
(b = disjunction_rule(p)) // disjunction
&&
- (_keyword_1 = _PyPegen_expect_token(p, 518)) // token='else'
+ (_keyword_1 = _PyPegen_expect_token(p, 627)) // token='else'
&&
(c = expression_rule(p)) // expression
)
if (
(a = disjunction_rule(p)) // disjunction
&&
- (_keyword = _PyPegen_expect_token(p, 510)) // token='if'
+ (_keyword = _PyPegen_expect_token(p, 624)) // token='if'
&&
(b = disjunction_rule(p)) // disjunction
&&
Token * _keyword;
expr_ty a;
if (
- (_keyword = _PyPegen_expect_token(p, 503)) // token='del'
+ (_keyword = _PyPegen_expect_token(p, 597)) // token='del'
&&
(a = star_expressions_rule(p)) // star_expressions
)
if (
(expression_var = expression_rule(p)) // expression
&&
- (_keyword = _PyPegen_expect_token(p, 522)) // token='as'
+ (_keyword = _PyPegen_expect_token(p, 622)) // token='as'
&&
(a = expression_rule(p)) // expression
&&
if (
(_opt_var = _PyPegen_expect_token(p, ASYNC), 1) // ASYNC?
&&
- (_keyword = _PyPegen_expect_token(p, 519)) // token='for'
+ (_keyword = _PyPegen_expect_token(p, 630)) // token='for'
&&
(a = star_expressions_rule(p)) // star_expressions
)
if (
(_opt_var = _PyPegen_expect_token(p, ASYNC), 1) // ASYNC?
&&
- (_keyword = _PyPegen_expect_token(p, 521)) // token='with'
+ (_keyword = _PyPegen_expect_token(p, 606)) // token='with'
&&
(_gather_162_var = _gather_162_rule(p)) // ','.(expression ['as' star_target])+
&&
if (
(_opt_var = _PyPegen_expect_token(p, ASYNC), 1) // ASYNC?
&&
- (_keyword = _PyPegen_expect_token(p, 521)) // token='with'
+ (_keyword = _PyPegen_expect_token(p, 606)) // token='with'
&&
(_literal = _PyPegen_expect_token(p, 7)) // token='('
&&
if (
(_opt_var = _PyPegen_expect_token(p, ASYNC), 1) // ASYNC?
&&
- (a = _PyPegen_expect_token(p, 521)) // token='with'
+ (a = _PyPegen_expect_token(p, 606)) // token='with'
&&
(_gather_166_var = _gather_166_rule(p)) // ','.(expression ['as' star_target])+
&&
if (
(_opt_var = _PyPegen_expect_token(p, ASYNC), 1) // ASYNC?
&&
- (a = _PyPegen_expect_token(p, 521)) // token='with'
+ (a = _PyPegen_expect_token(p, 606)) // token='with'
&&
(_literal = _PyPegen_expect_token(p, 7)) // token='('
&&
Token * a;
Token * newline_var;
if (
- (a = _PyPegen_expect_token(p, 511)) // token='try'
+ (a = _PyPegen_expect_token(p, 609)) // token='try'
&&
(_literal = _PyPegen_expect_token(p, 11)) // token=':'
&&
Token * _literal;
asdl_stmt_seq* block_var;
if (
- (_keyword = _PyPegen_expect_token(p, 511)) // token='try'
+ (_keyword = _PyPegen_expect_token(p, 609)) // token='try'
&&
(_literal = _PyPegen_expect_token(p, 11)) // token=':'
&&
expr_ty a;
expr_ty expressions_var;
if (
- (_keyword = _PyPegen_expect_token(p, 523)) // token='except'
+ (_keyword = _PyPegen_expect_token(p, 620)) // token='except'
&&
(a = expression_rule(p)) // expression
&&
expr_ty expression_var;
Token * newline_var;
if (
- (a = _PyPegen_expect_token(p, 523)) // token='except'
+ (a = _PyPegen_expect_token(p, 620)) // token='except'
&&
(expression_var = expression_rule(p)) // expression
&&
Token * a;
Token * newline_var;
if (
- (a = _PyPegen_expect_token(p, 523)) // token='except'
+ (a = _PyPegen_expect_token(p, 620)) // token='except'
&&
(newline_var = _PyPegen_expect_token(p, NEWLINE)) // token='NEWLINE'
)
Token * a;
Token * newline_var;
if (
- (a = _PyPegen_expect_token(p, 524)) // token='finally'
+ (a = _PyPegen_expect_token(p, 617)) // token='finally'
&&
(_literal = _PyPegen_expect_token(p, 11)) // token=':'
&&
expr_ty expression_var;
Token * newline_var;
if (
- (a = _PyPegen_expect_token(p, 523)) // token='except'
+ (a = _PyPegen_expect_token(p, 620)) // token='except'
&&
(expression_var = expression_rule(p)) // expression
&&
Token * a;
Token * newline_var;
if (
- (a = _PyPegen_expect_token(p, 523)) // token='except'
+ (a = _PyPegen_expect_token(p, 620)) // token='except'
&&
(_literal = _PyPegen_expect_token(p, 11)) // token=':'
&&
if (
(or_pattern_var = or_pattern_rule(p)) // or_pattern
&&
- (_keyword = _PyPegen_expect_token(p, 522)) // token='as'
+ (_keyword = _PyPegen_expect_token(p, 622)) // token='as'
&&
(a = _PyPegen_expect_soft_keyword(p, "_")) // soft_keyword='"_"'
)
if (
(or_pattern_var = or_pattern_rule(p)) // or_pattern
&&
- (_keyword = _PyPegen_expect_token(p, 522)) // token='as'
+ (_keyword = _PyPegen_expect_token(p, 622)) // token='as'
&&
_PyPegen_lookahead_with_name(0, _PyPegen_name_token, p)
&&
expr_ty named_expression_var;
Token * newline_var;
if (
- (_keyword = _PyPegen_expect_token(p, 510)) // token='if'
+ (_keyword = _PyPegen_expect_token(p, 624)) // token='if'
&&
(named_expression_var = named_expression_rule(p)) // named_expression
&&
expr_ty a_1;
Token * newline_var;
if (
- (a = _PyPegen_expect_token(p, 510)) // token='if'
+ (a = _PyPegen_expect_token(p, 624)) // token='if'
&&
(a_1 = named_expression_rule(p)) // named_expression
&&
expr_ty named_expression_var;
Token * newline_var;
if (
- (_keyword = _PyPegen_expect_token(p, 517)) // token='elif'
+ (_keyword = _PyPegen_expect_token(p, 626)) // token='elif'
&&
(named_expression_var = named_expression_rule(p)) // named_expression
&&
expr_ty named_expression_var;
Token * newline_var;
if (
- (a = _PyPegen_expect_token(p, 517)) // token='elif'
+ (a = _PyPegen_expect_token(p, 626)) // token='elif'
&&
(named_expression_var = named_expression_rule(p)) // named_expression
&&
Token * a;
Token * newline_var;
if (
- (a = _PyPegen_expect_token(p, 518)) // token='else'
+ (a = _PyPegen_expect_token(p, 627)) // token='else'
&&
(_literal = _PyPegen_expect_token(p, 11)) // token=':'
&&
expr_ty named_expression_var;
Token * newline_var;
if (
- (_keyword = _PyPegen_expect_token(p, 512)) // token='while'
+ (_keyword = _PyPegen_expect_token(p, 629)) // token='while'
&&
(named_expression_var = named_expression_rule(p)) // named_expression
&&
expr_ty named_expression_var;
Token * newline_var;
if (
- (a = _PyPegen_expect_token(p, 512)) // token='while'
+ (a = _PyPegen_expect_token(p, 629)) // token='while'
&&
(named_expression_var = named_expression_rule(p)) // named_expression
&&
if (
(_opt_var = _PyPegen_expect_token(p, ASYNC), 1) // ASYNC?
&&
- (a = _PyPegen_expect_token(p, 519)) // token='for'
+ (a = _PyPegen_expect_token(p, 630)) // token='for'
&&
(star_targets_var = star_targets_rule(p)) // star_targets
&&
- (_keyword = _PyPegen_expect_token(p, 520)) // token='in'
+ (_keyword = _PyPegen_expect_token(p, 631)) // token='in'
&&
(star_expressions_var = star_expressions_rule(p)) // star_expressions
&&
if (
(_opt_var = _PyPegen_expect_token(p, ASYNC), 1) // ASYNC?
&&
- (a = _PyPegen_expect_token(p, 516)) // token='def'
+ (a = _PyPegen_expect_token(p, 632)) // token='def'
&&
(name_var = _PyPegen_name_token(p)) // NAME
&&
expr_ty name_var;
Token * newline_var;
if (
- (a = _PyPegen_expect_token(p, 515)) // token='class'
+ (a = _PyPegen_expect_token(p, 633)) // token='class'
&&
(name_var = _PyPegen_name_token(p)) // NAME
&&
D(fprintf(stderr, "%*c> _tmp_6[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "'import'"));
Token * _keyword;
if (
- (_keyword = _PyPegen_expect_token(p, 513)) // token='import'
+ (_keyword = _PyPegen_expect_token(p, 531)) // token='import'
)
{
D(fprintf(stderr, "%*c+ _tmp_6[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'import'"));
D(fprintf(stderr, "%*c> _tmp_6[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "'from'"));
Token * _keyword;
if (
- (_keyword = _PyPegen_expect_token(p, 514)) // token='from'
+ (_keyword = _PyPegen_expect_token(p, 569)) // token='from'
)
{
D(fprintf(stderr, "%*c+ _tmp_6[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'from'"));
D(fprintf(stderr, "%*c> _tmp_7[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "'def'"));
Token * _keyword;
if (
- (_keyword = _PyPegen_expect_token(p, 516)) // token='def'
+ (_keyword = _PyPegen_expect_token(p, 632)) // token='def'
)
{
D(fprintf(stderr, "%*c+ _tmp_7[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'def'"));
D(fprintf(stderr, "%*c> _tmp_8[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "'class'"));
Token * _keyword;
if (
- (_keyword = _PyPegen_expect_token(p, 515)) // token='class'
+ (_keyword = _PyPegen_expect_token(p, 633)) // token='class'
)
{
D(fprintf(stderr, "%*c+ _tmp_8[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'class'"));
D(fprintf(stderr, "%*c> _tmp_9[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "'with'"));
Token * _keyword;
if (
- (_keyword = _PyPegen_expect_token(p, 521)) // token='with'
+ (_keyword = _PyPegen_expect_token(p, 606)) // token='with'
)
{
D(fprintf(stderr, "%*c+ _tmp_9[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'with'"));
D(fprintf(stderr, "%*c> _tmp_10[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "'for'"));
Token * _keyword;
if (
- (_keyword = _PyPegen_expect_token(p, 519)) // token='for'
+ (_keyword = _PyPegen_expect_token(p, 630)) // token='for'
)
{
D(fprintf(stderr, "%*c+ _tmp_10[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'for'"));
Token * _keyword;
expr_ty z;
if (
- (_keyword = _PyPegen_expect_token(p, 514)) // token='from'
+ (_keyword = _PyPegen_expect_token(p, 569)) // token='from'
&&
(z = expression_rule(p)) // expression
)
Token * _keyword;
expr_ty z;
if (
- (_keyword = _PyPegen_expect_token(p, 522)) // token='as'
+ (_keyword = _PyPegen_expect_token(p, 622)) // token='as'
&&
(z = _PyPegen_name_token(p)) // NAME
)
Token * _keyword;
expr_ty z;
if (
- (_keyword = _PyPegen_expect_token(p, 522)) // token='as'
+ (_keyword = _PyPegen_expect_token(p, 622)) // token='as'
&&
(z = _PyPegen_name_token(p)) // NAME
)
Token * _keyword;
expr_ty z;
if (
- (_keyword = _PyPegen_expect_token(p, 522)) // token='as'
+ (_keyword = _PyPegen_expect_token(p, 622)) // token='as'
&&
(z = _PyPegen_name_token(p)) // NAME
)
D(fprintf(stderr, "%*c> _tmp_144[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "'else'"));
Token * _keyword;
if (
- (_keyword = _PyPegen_expect_token(p, 518)) // token='else'
+ (_keyword = _PyPegen_expect_token(p, 627)) // token='else'
)
{
D(fprintf(stderr, "%*c+ _tmp_144[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'else'"));
D(fprintf(stderr, "%*c> _tmp_146[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "'True'"));
Token * _keyword;
if (
- (_keyword = _PyPegen_expect_token(p, 526)) // token='True'
+ (_keyword = _PyPegen_expect_token(p, 594)) // token='True'
)
{
D(fprintf(stderr, "%*c+ _tmp_146[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'True'"));
D(fprintf(stderr, "%*c> _tmp_146[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "'None'"));
Token * _keyword;
if (
- (_keyword = _PyPegen_expect_token(p, 525)) // token='None'
+ (_keyword = _PyPegen_expect_token(p, 595)) // token='None'
)
{
D(fprintf(stderr, "%*c+ _tmp_146[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'None'"));
D(fprintf(stderr, "%*c> _tmp_146[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "'False'"));
Token * _keyword;
if (
- (_keyword = _PyPegen_expect_token(p, 527)) // token='False'
+ (_keyword = _PyPegen_expect_token(p, 596)) // token='False'
)
{
D(fprintf(stderr, "%*c+ _tmp_146[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'False'"));
D(fprintf(stderr, "%*c> _tmp_170[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "'except'"));
Token * _keyword;
if (
- (_keyword = _PyPegen_expect_token(p, 523)) // token='except'
+ (_keyword = _PyPegen_expect_token(p, 620)) // token='except'
)
{
D(fprintf(stderr, "%*c+ _tmp_170[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'except'"));
D(fprintf(stderr, "%*c> _tmp_170[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "'finally'"));
Token * _keyword;
if (
- (_keyword = _PyPegen_expect_token(p, 524)) // token='finally'
+ (_keyword = _PyPegen_expect_token(p, 617)) // token='finally'
)
{
D(fprintf(stderr, "%*c+ _tmp_170[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'finally'"));
Token * _keyword;
expr_ty name_var;
if (
- (_keyword = _PyPegen_expect_token(p, 522)) // token='as'
+ (_keyword = _PyPegen_expect_token(p, 622)) // token='as'
&&
(name_var = _PyPegen_name_token(p)) // NAME
)
Token * _keyword;
expr_ty name_var;
if (
- (_keyword = _PyPegen_expect_token(p, 522)) // token='as'
+ (_keyword = _PyPegen_expect_token(p, 622)) // token='as'
&&
(name_var = _PyPegen_name_token(p)) // NAME
)
Token * _keyword;
expr_ty name_var;
if (
- (_keyword = _PyPegen_expect_token(p, 522)) // token='as'
+ (_keyword = _PyPegen_expect_token(p, 622)) // token='as'
&&
(name_var = _PyPegen_name_token(p)) // NAME
)
Token * _keyword;
expr_ty c;
if (
- (_keyword = _PyPegen_expect_token(p, 531)) // token='or'
+ (_keyword = _PyPegen_expect_token(p, 571)) // token='or'
&&
(c = conjunction_rule(p)) // conjunction
)
Token * _keyword;
expr_ty c;
if (
- (_keyword = _PyPegen_expect_token(p, 532)) // token='and'
+ (_keyword = _PyPegen_expect_token(p, 572)) // token='and'
&&
(c = inversion_rule(p)) // inversion
)
Token * _keyword;
expr_ty z;
if (
- (_keyword = _PyPegen_expect_token(p, 510)) // token='if'
+ (_keyword = _PyPegen_expect_token(p, 624)) // token='if'
&&
(z = disjunction_rule(p)) // disjunction
)
Token * _keyword;
expr_ty z;
if (
- (_keyword = _PyPegen_expect_token(p, 510)) // token='if'
+ (_keyword = _PyPegen_expect_token(p, 624)) // token='if'
&&
(z = disjunction_rule(p)) // disjunction
)
Token * _keyword;
expr_ty star_target_var;
if (
- (_keyword = _PyPegen_expect_token(p, 522)) // token='as'
+ (_keyword = _PyPegen_expect_token(p, 622)) // token='as'
&&
(star_target_var = star_target_rule(p)) // star_target
)
Token * _keyword;
expr_ty star_target_var;
if (
- (_keyword = _PyPegen_expect_token(p, 522)) // token='as'
+ (_keyword = _PyPegen_expect_token(p, 622)) // token='as'
&&
(star_target_var = star_target_rule(p)) // star_target
)
Token * _keyword;
expr_ty star_target_var;
if (
- (_keyword = _PyPegen_expect_token(p, 522)) // token='as'
+ (_keyword = _PyPegen_expect_token(p, 622)) // token='as'
&&
(star_target_var = star_target_rule(p)) // star_target
)
Token * _keyword;
expr_ty star_target_var;
if (
- (_keyword = _PyPegen_expect_token(p, 522)) // token='as'
+ (_keyword = _PyPegen_expect_token(p, 622)) // token='as'
&&
(star_target_var = star_target_rule(p)) // star_target
)
+import ast
import contextlib
+import re
from abc import abstractmethod
-from typing import IO, AbstractSet, Dict, Iterator, List, Optional, Set, Text, Tuple
+from typing import (
+ IO,
+ AbstractSet,
+ Any,
+ Dict,
+ Iterable,
+ Iterator,
+ List,
+ Optional,
+ Set,
+ Text,
+ Tuple,
+ Union,
+)
from pegen import sccutils
from pegen.grammar import (
Alt,
+ Cut,
+ Forced,
Gather,
Grammar,
GrammarError,
GrammarVisitor,
+ Group,
+ Lookahead,
NamedItem,
NameLeaf,
+ Opt,
Plain,
+ Repeat0,
+ Repeat1,
Rhs,
Rule,
+ StringLeaf,
)
+class RuleCollectorVisitor(GrammarVisitor):
+ """Visitor that invokes a provieded callmaker visitor with just the NamedItem nodes"""
+
+ def __init__(self, rules: Dict[str, Rule], callmakervisitor: GrammarVisitor) -> None:
+ self.rulses = rules
+ self.callmaker = callmakervisitor
+
+ def visit_Rule(self, rule: Rule) -> None:
+ self.visit(rule.flatten())
+
+ def visit_NamedItem(self, item: NamedItem) -> None:
+ self.callmaker.visit(item)
+
+
+class KeywordCollectorVisitor(GrammarVisitor):
+ """Visitor that collects all the keywods and soft keywords in the Grammar"""
+
+ def __init__(self, gen: "ParserGenerator", keywords: Dict[str, int], soft_keywords: Set[str]):
+ self.generator = gen
+ self.keywords = keywords
+ self.soft_keywords = soft_keywords
+
+ def visit_StringLeaf(self, node: StringLeaf) -> None:
+ val = ast.literal_eval(node.value)
+ if re.match(r"[a-zA-Z_]\w*\Z", val): # This is a keyword
+ if node.value.endswith("'") and node.value not in self.keywords:
+ self.keywords[val] = self.generator.keyword_type()
+ else:
+ return self.soft_keywords.add(node.value.replace('"', ""))
+
+
class RuleCheckingVisitor(GrammarVisitor):
def __init__(self, rules: Dict[str, Rule], tokens: Set[str]):
self.rules = rules
def __init__(self, grammar: Grammar, tokens: Set[str], file: Optional[IO[Text]]):
self.grammar = grammar
self.tokens = tokens
+ self.keywords: Dict[str, int] = {}
+ self.soft_keywords: Set[str] = set()
self.rules = grammar.rules
self.validate_rule_names()
if "trailer" not in grammar.metas and "start" not in self.rules:
checker.visit(rule)
self.file = file
self.level = 0
- compute_nullables(self.rules)
self.first_graph, self.first_sccs = compute_left_recursives(self.rules)
- self.todo = self.rules.copy() # Rules to generate
self.counter = 0 # For name_rule()/name_loop()
self.keyword_counter = 499 # For keyword_type()
- self.all_rules: Dict[str, Rule] = {} # Rules + temporal rules
+ self.all_rules: Dict[str, Rule] = self.rules.copy() # Rules + temporal rules
self._local_variable_stack: List[List[str]] = []
def validate_rule_names(self) -> None:
for line in lines.splitlines():
self.print(line)
- def collect_todo(self) -> None:
+ def collect_rules(self) -> None:
+ keyword_collector = KeywordCollectorVisitor(self, self.keywords, self.soft_keywords)
+ for rule in self.all_rules.values():
+ keyword_collector.visit(rule)
+
+ rule_collector = RuleCollectorVisitor(self.rules, self.callmakervisitor)
done: Set[str] = set()
while True:
- alltodo = list(self.todo)
- self.all_rules.update(self.todo)
- todo = [i for i in alltodo if i not in done]
+ computed_rules = list(self.all_rules)
+ todo = [i for i in computed_rules if i not in done]
if not todo:
break
+ done = set(self.all_rules)
for rulename in todo:
- self.todo[rulename].collect_todo(self)
- done = set(alltodo)
+ rule_collector.visit(self.all_rules[rulename])
def keyword_type(self) -> int:
self.keyword_counter += 1
return self.keyword_counter
- def name_node(self, rhs: Rhs) -> str:
+ def artifical_rule_from_rhs(self, rhs: Rhs) -> str:
self.counter += 1
name = f"_tmp_{self.counter}" # TODO: Pick a nicer name.
- self.todo[name] = Rule(name, None, rhs)
+ self.all_rules[name] = Rule(name, None, rhs)
return name
- def name_loop(self, node: Plain, is_repeat1: bool) -> str:
+ def artificial_rule_from_repeat(self, node: Plain, is_repeat1: bool) -> str:
self.counter += 1
if is_repeat1:
prefix = "_loop1_"
else:
prefix = "_loop0_"
- name = f"{prefix}{self.counter}" # TODO: It's ugly to signal via the name.
- self.todo[name] = Rule(name, None, Rhs([Alt([NamedItem(None, node)])]))
+ name = f"{prefix}{self.counter}"
+ self.all_rules[name] = Rule(name, None, Rhs([Alt([NamedItem(None, node)])]))
return name
- def name_gather(self, node: Gather) -> str:
+ def artifical_rule_from_gather(self, node: Gather) -> str:
self.counter += 1
name = f"_gather_{self.counter}"
self.counter += 1
[NamedItem(None, node.separator), NamedItem("elem", node.node)],
action="elem",
)
- self.todo[extra_function_name] = Rule(
+ self.all_rules[extra_function_name] = Rule(
extra_function_name,
None,
Rhs([extra_function_alt]),
alt = Alt(
[NamedItem("elem", node.node), NamedItem("seq", NameLeaf(extra_function_name))],
)
- self.todo[name] = Rule(
+ self.all_rules[name] = Rule(
name,
None,
Rhs([alt]),
return name
-def compute_nullables(rules: Dict[str, Rule]) -> None:
+class NullableVisitor(GrammarVisitor):
+ def __init__(self, rules: Dict[str, Rule]) -> None:
+ self.rules = rules
+ self.visited: Set[Any] = set()
+ self.nullables: Set[Union[Rule, NamedItem]] = set()
+
+ def visit_Rule(self, rule: Rule) -> bool:
+ if rule in self.visited:
+ return False
+ self.visited.add(rule)
+ if self.visit(rule.rhs):
+ self.nullables.add(rule)
+ return rule in self.nullables
+
+ def visit_Rhs(self, rhs: Rhs) -> bool:
+ for alt in rhs.alts:
+ if self.visit(alt):
+ return True
+ return False
+
+ def visit_Alt(self, alt: Alt) -> bool:
+ for item in alt.items:
+ if not self.visit(item):
+ return False
+ return True
+
+ def visit_Forced(self, force: Forced) -> bool:
+ return True
+
+ def visit_LookAhead(self, lookahead: Lookahead) -> bool:
+ return True
+
+ def visit_Opt(self, opt: Opt) -> bool:
+ return True
+
+ def visit_Repeat0(self, repeat: Repeat0) -> bool:
+ return True
+
+ def visit_Repeat1(self, repeat: Repeat1) -> bool:
+ return False
+
+ def visit_Gather(self, gather: Gather) -> bool:
+ return False
+
+ def visit_Cut(self, cut: Cut) -> bool:
+ return False
+
+ def visit_Group(self, group: Group) -> bool:
+ return self.visit(group.rhs)
+
+ def visit_NamedItem(self, item: NamedItem) -> bool:
+ if self.visit(item.item):
+ self.nullables.add(item)
+ return item in self.nullables
+
+ def visit_NameLeaf(self, node: NameLeaf) -> bool:
+ if node.value in self.rules:
+ return self.visit(self.rules[node.value])
+ # Token or unknown; never empty.
+ return False
+
+ def visit_StringLeaf(self, node: StringLeaf) -> bool:
+ # The string token '' is considered empty.
+ return not node.value
+
+
+def compute_nullables(rules: Dict[str, Rule]) -> Set[Any]:
"""Compute which rules in a grammar are nullable.
Thanks to TatSu (tatsu/leftrec.py) for inspiration.
"""
+ nullable_visitor = NullableVisitor(rules)
for rule in rules.values():
- rule.nullable_visit(rules)
+ nullable_visitor.visit(rule)
+ return nullable_visitor.nullables
+
+
+class InitialNamesVisitor(GrammarVisitor):
+ def __init__(self, rules: Dict[str, Rule]) -> None:
+ self.rules = rules
+ self.nullables = compute_nullables(rules)
+
+ def generic_visit(self, node: Iterable[Any], *args: Any, **kwargs: Any) -> Set[Any]:
+ names: Set[str] = set()
+ for value in node:
+ if isinstance(value, list):
+ for item in value:
+ names |= self.visit(item, *args, **kwargs)
+ else:
+ names |= self.visit(value, *args, **kwargs)
+ return names
+
+ def visit_Alt(self, alt: Alt) -> Set[Any]:
+ names: Set[str] = set()
+ for item in alt.items:
+ names |= self.visit(item)
+ if item not in self.nullables:
+ break
+ return names
+
+ def visit_Forced(self, force: Forced) -> Set[Any]:
+ return set()
+
+ def visit_LookAhead(self, lookahead: Lookahead) -> Set[Any]:
+ return set()
+
+ def visit_Cut(self, cut: Cut) -> Set[Any]:
+ return set()
+
+ def visit_NameLeaf(self, node: NameLeaf) -> Set[Any]:
+ return {node.value}
+
+ def visit_StringLeaf(self, node: StringLeaf) -> Set[Any]:
+ return set()
def compute_left_recursives(
Note that this requires the nullable flags to have been computed.
"""
+ initial_name_visitor = InitialNamesVisitor(rules)
graph = {}
vertices: Set[str] = set()
for rulename, rhs in rules.items():
- graph[rulename] = names = rhs.initial_names()
+ graph[rulename] = names = initial_name_visitor.visit(rhs)
vertices |= names
for vertex in vertices:
graph.setdefault(vertex, set())