@memoize
def fstring_start(self) -> Optional[tokenize.TokenInfo]:
- FSTRING_START = getattr(token, "FSTRING_START")
+ FSTRING_START = getattr(token, "FSTRING_START", None)
if not FSTRING_START:
return None
tok = self._tokenizer.peek()
@memoize
def fstring_middle(self) -> Optional[tokenize.TokenInfo]:
- FSTRING_MIDDLE = getattr(token, "FSTRING_MIDDLE")
+ FSTRING_MIDDLE = getattr(token, "FSTRING_MIDDLE", None)
if not FSTRING_MIDDLE:
return None
tok = self._tokenizer.peek()
@memoize
def fstring_end(self) -> Optional[tokenize.TokenInfo]:
- FSTRING_END = getattr(token, "FSTRING_END")
+ FSTRING_END = getattr(token, "FSTRING_END", None)
if not FSTRING_END:
return None
tok = self._tokenizer.peek()
+import sys
import ast
import contextlib
import re
def __init__(self, rules: Dict[str, Rule], tokens: Set[str]):
self.rules = rules
self.tokens = tokens
+ # If python < 3.12 add the virtual fstring tokens
+ if sys.version_info < (3, 12):
+ self.tokens.add("FSTRING_START")
+ self.tokens.add("FSTRING_END")
+ self.tokens.add("FSTRING_MIDDLE")
def visit_NameLeaf(self, node: NameLeaf) -> None:
if node.value not in self.rules and node.value not in self.tokens: