val = eval(code)
self.assertEqual(val, 3.0)
+ def test_f_string_in_action(self) -> None:
+ grammar = """
+ start: n=NAME NEWLINE? $ { f"name -> {n.string}" }
+ """
+ parser_class = make_parser(grammar)
+ node = parse_string("a", parser_class)
+ self.assertEqual(node.strip(), "name -> a")
+
def test_nullable(self) -> None:
grammar_source = """
start: sign NUMBER
--- /dev/null
+The Python PEG generator can now use f-strings in the grammar actions. Patch
+by Pablo Galindo
@memoize
def target_atom(self) -> Optional[str]:
- # target_atom: "{" ~ target_atoms? "}" | "[" ~ target_atoms? "]" | NAME "*" | NAME | NUMBER | STRING | "?" | ":" | !"}" !"]" OP
+ # target_atom: "{" ~ target_atoms? "}" | "[" ~ target_atoms? "]" | NAME "*" | NAME | NUMBER | STRING | FSTRING_START | FSTRING_MIDDLE | FSTRING_END | "?" | ":" | !"}" !"]" OP
mark = self._mark()
cut = False
if (
):
return string . string
self._reset(mark)
+ if (
+ (fstring_start := self.fstring_start())
+ ):
+ return fstring_start . string
+ self._reset(mark)
+ if (
+ (fstring_middle := self.fstring_middle())
+ ):
+ return fstring_middle . string
+ self._reset(mark)
+ if (
+ (fstring_end := self.fstring_end())
+ ):
+ return fstring_end . string
+ self._reset(mark)
if (
(literal := self.expect("?"))
):
| NAME { name.string }
| NUMBER { number.string }
| STRING { string.string }
+ | FSTRING_START { fstring_start.string }
+ | FSTRING_MIDDLE { fstring_middle.string }
+ | FSTRING_END { fstring_end.string }
| "?" { "?" }
| ":" { ":" }
| !"}" !"]" OP { op.string }
return self._tokenizer.getnext()
return None
+ @memoize
+ def fstring_start(self) -> Optional[tokenize.TokenInfo]:
+ FSTRING_START = getattr(token, "FSTRING_START")
+ if not FSTRING_START:
+ return None
+ tok = self._tokenizer.peek()
+ if tok.type == FSTRING_START:
+ return self._tokenizer.getnext()
+ return None
+
+ @memoize
+ def fstring_middle(self) -> Optional[tokenize.TokenInfo]:
+ FSTRING_MIDDLE = getattr(token, "FSTRING_MIDDLE")
+ if not FSTRING_MIDDLE:
+ return None
+ tok = self._tokenizer.peek()
+ if tok.type == FSTRING_MIDDLE:
+ return self._tokenizer.getnext()
+ return None
+
+ @memoize
+ def fstring_end(self) -> Optional[tokenize.TokenInfo]:
+ FSTRING_END = getattr(token, "FSTRING_END")
+ if not FSTRING_END:
+ return None
+ tok = self._tokenizer.peek()
+ if tok.type == FSTRING_END:
+ return self._tokenizer.getnext()
+ return None
+
@memoize
def op(self) -> Optional[tokenize.TokenInfo]:
tok = self._tokenizer.peek()
name = node.value
if name == "SOFT_KEYWORD":
return "soft_keyword", "self.soft_keyword()"
- if name in ("NAME", "NUMBER", "STRING", "OP", "TYPE_COMMENT"):
+ if name in ("NAME", "NUMBER", "STRING", "OP", "TYPE_COMMENT",
+ "FSTRING_END", "FSTRING_MIDDLE", "FSTRING_START"):
name = name.lower()
return name, f"self.{name}()"
if name in ("NEWLINE", "DEDENT", "INDENT", "ENDMARKER"):