def auto_aiter(
- iterable: "t.Union[t.AsyncIterable[V], t.Iterable[V]]",
+ iterable: "t.AsyncIterable[V] | t.Iterable[V]",
) -> "t.AsyncIterator[V]":
if hasattr(iterable, "__aiter__"):
return iterable.__aiter__()
async def auto_to_list(
- value: "t.Union[t.AsyncIterable[V], t.Iterable[V]]",
+ value: "t.AsyncIterable[V] | t.Iterable[V]",
) -> list["V"]:
return [x async for x in auto_aiter(value)]
class _MemcachedClient(te.Protocol):
def get(self, key: str) -> bytes: ...
- def set(
- self, key: str, value: bytes, timeout: t.Optional[int] = None
- ) -> None: ...
+ def set(self, key: str, value: bytes, timeout: int | None = None) -> None: ...
bc_version = 5
def reset(self) -> None:
"""Resets the bucket (unloads the bytecode)."""
- self.code: t.Optional[CodeType] = None
+ self.code: CodeType | None = None
def load_bytecode(self, f: t.BinaryIO) -> None:
"""Loads bytecode from a file or file like object."""
by a particular environment.
"""
- def get_cache_key(
- self, name: str, filename: t.Optional[t.Union[str]] = None
- ) -> str:
+ def get_cache_key(self, name: str, filename: str | None = None) -> str:
"""Returns the unique hash key for this template name."""
hash = sha1(name.encode("utf-8"))
self,
environment: "Environment",
name: str,
- filename: t.Optional[str],
+ filename: str | None,
source: str,
) -> Bucket:
"""Return a cache bucket for the given template. All arguments are
"""
def __init__(
- self, directory: t.Optional[str] = None, pattern: str = "__jinja2_%s.cache"
+ self, directory: str | None = None, pattern: str = "__jinja2_%s.cache"
) -> None:
if directory is None:
directory = self._get_default_cache_dir()
self,
client: "_MemcachedClient",
prefix: str = "jinja2/bytecode/",
- timeout: t.Optional[int] = None,
+ timeout: int | None = None,
ignore_memcache_errors: bool = True,
):
self.client = client
def generate(
node: nodes.Template,
environment: "Environment",
- name: t.Optional[str],
- filename: t.Optional[str],
- stream: t.Optional[t.TextIO] = None,
+ name: str | None,
+ filename: str | None,
+ stream: t.TextIO | None = None,
defer_init: bool = False,
optimized: bool = True,
-) -> t.Optional[str]:
+) -> str | None:
"""Generate the python source for a node tree."""
if not isinstance(node, nodes.Template):
raise TypeError("Can't compile non template nodes")
class MacroRef:
- def __init__(self, node: t.Union[nodes.Macro, nodes.CallBlock]) -> None:
+ def __init__(self, node: nodes.Macro | nodes.CallBlock) -> None:
self.node = node
self.accesses_caller = False
self.accesses_kwargs = False
self,
eval_ctx: EvalContext,
parent: t.Optional["Frame"] = None,
- level: t.Optional[int] = None,
+ level: int | None = None,
) -> None:
self.eval_ctx = eval_ctx
# this for example affects {% filter %} or {% macro %}. If a frame
# is buffered this variable points to the name of the list used as
# buffer.
- self.buffer: t.Optional[str] = None
+ self.buffer: str | None = None
# the name of the block we're in, otherwise None.
- self.block: t.Optional[str] = None
+ self.block: str | None = None
else:
self.symbols = Symbols(parent.symbols, level=level)
def __init__(
self,
environment: "Environment",
- name: t.Optional[str],
- filename: t.Optional[str],
- stream: t.Optional[t.TextIO] = None,
+ name: str | None,
+ filename: str | None,
+ stream: t.TextIO | None = None,
defer_init: bool = False,
optimized: bool = True,
) -> None:
self.stream = stream
self.created_block_context = False
self.defer_init = defer_init
- self.optimizer: t.Optional[Optimizer] = None
+ self.optimizer: Optimizer | None = None
if optimized:
self.optimizer = Optimizer(environment)
# the debug information
self.debug_info: list[tuple[int, int]] = []
- self._write_debug_info: t.Optional[int] = None
+ self._write_debug_info: int | None = None
# the number of new lines before the next write()
self._new_lines = 0
"""Outdent by step."""
self._indentation -= step
- def start_write(self, frame: Frame, node: t.Optional[nodes.Node] = None) -> None:
+ def start_write(self, frame: Frame, node: nodes.Node | None = None) -> None:
"""Yield or write into the frame buffer."""
if frame.buffer is None:
self.writeline("yield ", node)
self.write(")")
def simple_write(
- self, s: str, frame: Frame, node: t.Optional[nodes.Node] = None
+ self, s: str, frame: Frame, node: nodes.Node | None = None
) -> None:
"""Simple shortcut for start_write + write + end_write."""
self.start_write(frame, node)
self._new_lines = 0
self.stream.write(x)
- def writeline(
- self, x: str, node: t.Optional[nodes.Node] = None, extra: int = 0
- ) -> None:
+ def writeline(self, x: str, node: nodes.Node | None = None, extra: int = 0) -> None:
"""Combination of newline and write."""
self.newline(node, extra)
self.write(x)
- def newline(self, node: t.Optional[nodes.Node] = None, extra: int = 0) -> None:
+ def newline(self, node: nodes.Node | None = None, extra: int = 0) -> None:
"""Add one or more newlines before the next write."""
self._new_lines = max(self._new_lines, 1 + extra)
if node is not None and node.lineno != self._last_line:
def signature(
self,
- node: t.Union[nodes.Call, nodes.Filter, nodes.Test],
+ node: nodes.Call | nodes.Filter | nodes.Test,
frame: Frame,
- extra_kwargs: t.Optional[t.Mapping[str, t.Any]] = None,
+ extra_kwargs: t.Mapping[str, t.Any] | None = None,
) -> None:
"""Writes a function call to the stream for the current node.
A leading comma is added automatically. The extra keyword
return f"{self.choose_async()}def {name}"
def macro_body(
- self, node: t.Union[nodes.Macro, nodes.CallBlock], frame: Frame
+ self, node: nodes.Macro | nodes.CallBlock, frame: Frame
) -> tuple[Frame, MacroRef]:
"""Dump the function def of a macro or call block."""
frame = frame.inner()
# -- Statement Visitors
- def visit_Template(
- self, node: nodes.Template, frame: t.Optional[Frame] = None
- ) -> None:
+ def visit_Template(self, node: nodes.Template, frame: Frame | None = None) -> None:
assert frame is None, "no root frame allowed"
eval_ctx = EvalContext(self.environment, self.name)
self.outdent()
def _import_common(
- self, node: t.Union[nodes.Import, nodes.FromImport], frame: Frame
+ self, node: nodes.Import | nodes.FromImport, frame: Frame
) -> None:
self.write(f"{self.choose_async('await ')}environment.get_template(")
self.visit(node.template, frame)
with_frame = frame.inner()
with_frame.symbols.analyze_node(node)
self.enter_frame(with_frame)
- for target, expr in zip(node.targets, node.values):
+ for target, expr in zip(node.targets, node.values, strict=False):
self.newline()
self.visit(target, with_frame)
self.write(" = ")
self.visit(node.node, frame)
class _FinalizeInfo(t.NamedTuple):
- const: t.Optional[t.Callable[..., str]]
- src: t.Optional[str]
+ const: t.Callable[..., str] | None
+ src: str | None
@staticmethod
def _default_finalize(value: t.Any) -> t.Any:
"""
return str(value)
- _finalize: t.Optional[_FinalizeInfo] = None
+ _finalize: _FinalizeInfo | None = None
def _make_finalize(self) -> _FinalizeInfo:
"""Build the finalize function to be used on constants and at
if self._finalize is not None:
return self._finalize
- finalize: t.Optional[t.Callable[..., t.Any]]
+ finalize: t.Callable[..., t.Any] | None
finalize = default = self._default_finalize
src = None
self.indent()
finalize = self._make_finalize()
- body: list[t.Union[list[t.Any], nodes.Expr]] = []
+ body: list[list[t.Any] | nodes.Expr] = []
# Evaluate constants at compile time if possible. Each item in
# body will be either a list of static data or a node to be
@contextmanager
def _filter_test_common(
- self, node: t.Union[nodes.Filter, nodes.Test], frame: Frame, is_filter: bool
+ self, node: nodes.Filter | nodes.Test, frame: Frame, is_filter: bool
) -> t.Iterator[None]:
if self.environment.is_async:
self.write("(await auto_await(")
from .runtime import Context
-def rewrite_traceback_stack(source: t.Optional[str] = None) -> BaseException:
+def rewrite_traceback_stack(source: str | None = None) -> BaseException:
"""Rewrite the current exception to replace any tracebacks from
within compiled template code with tracebacks that look like they
came from the template source.
def fake_traceback( # type: ignore
- exc_value: BaseException, tb: t.Optional[TracebackType], filename: str, lineno: int
+ exc_value: BaseException, tb: TracebackType | None, filename: str, lineno: int
) -> TracebackType:
"""Produce a new traceback object that looks like it came from the
template source instead of the compiled code. The filename, line
available at that point in the template.
"""
# Start with the current template context.
- ctx: t.Optional[Context] = real_locals.get("context")
+ ctx: Context | None = real_locals.get("context")
if ctx is not None:
data: dict[str, t.Any] = ctx.get_all().copy()
VARIABLE_END_STRING = "}}"
COMMENT_START_STRING = "{#"
COMMENT_END_STRING = "#}"
-LINE_STATEMENT_PREFIX: t.Optional[str] = None
-LINE_COMMENT_PREFIX: t.Optional[str] = None
+LINE_STATEMENT_PREFIX: str | None = None
+LINE_COMMENT_PREFIX: str | None = None
TRIM_BLOCKS = False
LSTRIP_BLOCKS = False
NEWLINE_SEQUENCE: "te.Literal['\\n', '\\r\\n', '\\r']" = "\n"
def create_cache(
size: int,
-) -> t.Optional[t.MutableMapping[tuple["weakref.ref[BaseLoader]", str], "Template"]]:
+) -> t.MutableMapping[tuple["weakref.ref[BaseLoader]", str], "Template"] | None:
"""Return the cache class for the given size."""
if size == 0:
return None
def copy_cache(
- cache: t.Optional[
- t.MutableMapping[tuple["weakref.ref[BaseLoader]", str], "Template"]
- ],
-) -> t.Optional[t.MutableMapping[tuple["weakref.ref[BaseLoader]", str], "Template"]]:
+ cache: t.MutableMapping[tuple["weakref.ref[BaseLoader]", str], "Template"] | None,
+) -> t.MutableMapping[tuple["weakref.ref[BaseLoader]", str], "Template"] | None:
"""Create an empty copy of the given cache."""
if cache is None:
return None
def load_extensions(
environment: "Environment",
- extensions: t.Sequence[t.Union[str, type["Extension"]]],
+ extensions: t.Sequence[str | type["Extension"]],
) -> dict[str, "Extension"]:
"""Load the extensions from the list and bind it to the environment.
Returns a dict of instantiated extensions.
variable_end_string: str = VARIABLE_END_STRING,
comment_start_string: str = COMMENT_START_STRING,
comment_end_string: str = COMMENT_END_STRING,
- line_statement_prefix: t.Optional[str] = LINE_STATEMENT_PREFIX,
- line_comment_prefix: t.Optional[str] = LINE_COMMENT_PREFIX,
+ line_statement_prefix: str | None = LINE_STATEMENT_PREFIX,
+ line_comment_prefix: str | None = LINE_COMMENT_PREFIX,
trim_blocks: bool = TRIM_BLOCKS,
lstrip_blocks: bool = LSTRIP_BLOCKS,
newline_sequence: "te.Literal['\\n', '\\r\\n', '\\r']" = NEWLINE_SEQUENCE,
keep_trailing_newline: bool = KEEP_TRAILING_NEWLINE,
- extensions: t.Sequence[t.Union[str, type["Extension"]]] = (),
+ extensions: t.Sequence[str | type["Extension"]] = (),
optimized: bool = True,
undefined: type[Undefined] = Undefined,
- finalize: t.Optional[t.Callable[..., t.Any]] = None,
- autoescape: t.Union[bool, t.Callable[[t.Optional[str]], bool]] = False,
+ finalize: t.Callable[..., t.Any] | None = None,
+ autoescape: bool | t.Callable[[str | None], bool] = False,
loader: t.Optional["BaseLoader"] = None,
cache_size: int = 400,
auto_reload: bool = True,
self.is_async = enable_async
_environment_config_check(self)
- def add_extension(self, extension: t.Union[str, type["Extension"]]) -> None:
+ def add_extension(self, extension: str | type["Extension"]) -> None:
"""Adds an extension after the environment was created.
.. versionadded:: 2.5
variable_end_string: str = missing,
comment_start_string: str = missing,
comment_end_string: str = missing,
- line_statement_prefix: t.Optional[str] = missing,
- line_comment_prefix: t.Optional[str] = missing,
+ line_statement_prefix: str | None = missing,
+ line_comment_prefix: str | None = missing,
trim_blocks: bool = missing,
lstrip_blocks: bool = missing,
newline_sequence: "te.Literal['\\n', '\\r\\n', '\\r']" = missing,
keep_trailing_newline: bool = missing,
- extensions: t.Sequence[t.Union[str, type["Extension"]]] = missing,
+ extensions: t.Sequence[str | type["Extension"]] = missing,
optimized: bool = missing,
undefined: type[Undefined] = missing,
- finalize: t.Optional[t.Callable[..., t.Any]] = missing,
- autoescape: t.Union[bool, t.Callable[[t.Optional[str]], bool]] = missing,
+ finalize: t.Callable[..., t.Any] | None = missing,
+ autoescape: bool | t.Callable[[str | None], bool] = missing,
loader: t.Optional["BaseLoader"] = missing,
cache_size: int = missing,
auto_reload: bool = missing,
"""Iterates over the extensions by priority."""
return iter(sorted(self.extensions.values(), key=lambda x: x.priority))
- def getitem(
- self, obj: t.Any, argument: t.Union[str, t.Any]
- ) -> t.Union[t.Any, Undefined]:
+ def getitem(self, obj: t.Any, argument: str | t.Any) -> t.Any | Undefined:
"""Get an item or attribute of an object but prefer the item."""
try:
return obj[argument]
def _filter_test_common(
self,
- name: t.Union[str, Undefined],
+ name: str | Undefined,
value: t.Any,
- args: t.Optional[t.Sequence[t.Any]],
- kwargs: t.Optional[t.Mapping[str, t.Any]],
- context: t.Optional[Context],
- eval_ctx: t.Optional[EvalContext],
+ args: t.Sequence[t.Any] | None,
+ kwargs: t.Mapping[str, t.Any] | None,
+ context: Context | None,
+ eval_ctx: EvalContext | None,
is_filter: bool,
) -> t.Any:
if is_filter:
self,
name: str,
value: t.Any,
- args: t.Optional[t.Sequence[t.Any]] = None,
- kwargs: t.Optional[t.Mapping[str, t.Any]] = None,
- context: t.Optional[Context] = None,
- eval_ctx: t.Optional[EvalContext] = None,
+ args: t.Sequence[t.Any] | None = None,
+ kwargs: t.Mapping[str, t.Any] | None = None,
+ context: Context | None = None,
+ eval_ctx: EvalContext | None = None,
) -> t.Any:
"""Invoke a filter on a value the same way the compiler does.
self,
name: str,
value: t.Any,
- args: t.Optional[t.Sequence[t.Any]] = None,
- kwargs: t.Optional[t.Mapping[str, t.Any]] = None,
- context: t.Optional[Context] = None,
- eval_ctx: t.Optional[EvalContext] = None,
+ args: t.Sequence[t.Any] | None = None,
+ kwargs: t.Mapping[str, t.Any] | None = None,
+ context: Context | None = None,
+ eval_ctx: EvalContext | None = None,
) -> t.Any:
"""Invoke a test on a value the same way the compiler does.
def parse(
self,
source: str,
- name: t.Optional[str] = None,
- filename: t.Optional[str] = None,
+ name: str | None = None,
+ filename: str | None = None,
) -> nodes.Template:
"""Parse the sourcecode and return the abstract syntax tree. This
tree of nodes is used by the compiler to convert the template into
self.handle_exception(source=source)
def _parse(
- self, source: str, name: t.Optional[str], filename: t.Optional[str]
+ self, source: str, name: str | None, filename: str | None
) -> nodes.Template:
"""Internal parsing function used by `parse` and `compile`."""
return Parser(self, source, name, filename).parse()
def lex(
self,
source: str,
- name: t.Optional[str] = None,
- filename: t.Optional[str] = None,
+ name: str | None = None,
+ filename: str | None = None,
) -> t.Iterator[tuple[int, str, str]]:
"""Lex the given sourcecode and return a generator that yields
tokens as tuples in the form ``(lineno, token_type, value)``.
def preprocess(
self,
source: str,
- name: t.Optional[str] = None,
- filename: t.Optional[str] = None,
+ name: str | None = None,
+ filename: str | None = None,
) -> str:
"""Preprocesses the source with all extensions. This is automatically
called for all parsing and compiling methods but *not* for :meth:`lex`
def _tokenize(
self,
source: str,
- name: t.Optional[str],
- filename: t.Optional[str] = None,
- state: t.Optional[str] = None,
+ name: str | None,
+ filename: str | None = None,
+ state: str | None = None,
) -> TokenStream:
"""Called by the parser to do the preprocessing and filtering
for all the extensions. Returns a :class:`~jinja2.lexer.TokenStream`.
def _generate(
self,
source: nodes.Template,
- name: t.Optional[str],
- filename: t.Optional[str],
+ name: str | None,
+ filename: str | None,
defer_init: bool = False,
) -> str:
"""Internal hook that can be overridden to hook a different generate
@typing.overload
def compile(
self,
- source: t.Union[str, nodes.Template],
- name: t.Optional[str] = None,
- filename: t.Optional[str] = None,
+ source: str | nodes.Template,
+ name: str | None = None,
+ filename: str | None = None,
raw: "te.Literal[False]" = False,
defer_init: bool = False,
) -> CodeType: ...
@typing.overload
def compile(
self,
- source: t.Union[str, nodes.Template],
- name: t.Optional[str] = None,
- filename: t.Optional[str] = None,
+ source: str | nodes.Template,
+ name: str | None = None,
+ filename: str | None = None,
raw: "te.Literal[True]" = ...,
defer_init: bool = False,
) -> str: ...
@internalcode
def compile(
self,
- source: t.Union[str, nodes.Template],
- name: t.Optional[str] = None,
- filename: t.Optional[str] = None,
+ source: str | nodes.Template,
+ name: str | None = None,
+ filename: str | None = None,
raw: bool = False,
defer_init: bool = False,
- ) -> t.Union[str, CodeType]:
+ ) -> str | CodeType:
"""Compile a node or template source code. The `name` parameter is
the load name of the template after it was joined using
:meth:`join_path` if necessary, not the filename on the file system.
def compile_templates(
self,
target: t.Union[str, "os.PathLike[str]"],
- extensions: t.Optional[t.Collection[str]] = None,
- filter_func: t.Optional[t.Callable[[str], bool]] = None,
- zip: t.Optional[str] = "deflated",
- log_function: t.Optional[t.Callable[[str], None]] = None,
+ extensions: t.Collection[str] | None = None,
+ filter_func: t.Callable[[str], bool] | None = None,
+ zip: str | None = "deflated",
+ log_function: t.Callable[[str], None] | None = None,
ignore_errors: bool = True,
) -> None:
"""Finds all the templates the loader can find, compiles them
def list_templates(
self,
- extensions: t.Optional[t.Collection[str]] = None,
- filter_func: t.Optional[t.Callable[[str], bool]] = None,
+ extensions: t.Collection[str] | None = None,
+ filter_func: t.Callable[[str], bool] | None = None,
) -> list[str]:
"""Returns a list of templates for this environment. This requires
that the loader supports the loader's
return names
- def handle_exception(self, source: t.Optional[str] = None) -> "te.NoReturn":
+ def handle_exception(self, source: str | None = None) -> "te.NoReturn":
"""Exception handling helper. This is used internally to either raise
rewritten exceptions or return a rendered traceback for the template.
"""
@internalcode
def _load_template(
- self, name: str, globals: t.Optional[t.MutableMapping[str, t.Any]]
+ self, name: str, globals: t.MutableMapping[str, t.Any] | None
) -> "Template":
if self.loader is None:
raise TypeError("no loader for this environment specified")
def get_template(
self,
name: t.Union[str, "Template"],
- parent: t.Optional[str] = None,
- globals: t.Optional[t.MutableMapping[str, t.Any]] = None,
+ parent: str | None = None,
+ globals: t.MutableMapping[str, t.Any] | None = None,
) -> "Template":
"""Load a template by name with :attr:`loader` and return a
:class:`Template`. If the template does not exist a
def select_template(
self,
names: t.Iterable[t.Union[str, "Template"]],
- parent: t.Optional[str] = None,
- globals: t.Optional[t.MutableMapping[str, t.Any]] = None,
+ parent: str | None = None,
+ globals: t.MutableMapping[str, t.Any] | None = None,
) -> "Template":
"""Like :meth:`get_template`, but tries loading multiple names.
If none of the names can be loaded a :exc:`TemplatesNotFound`
def get_or_select_template(
self,
template_name_or_list: t.Union[str, "Template", list[t.Union[str, "Template"]]],
- parent: t.Optional[str] = None,
- globals: t.Optional[t.MutableMapping[str, t.Any]] = None,
+ parent: str | None = None,
+ globals: t.MutableMapping[str, t.Any] | None = None,
) -> "Template":
"""Use :meth:`select_template` if an iterable of template names
is given, or :meth:`get_template` if one name is given.
def from_string(
self,
- source: t.Union[str, nodes.Template],
- globals: t.Optional[t.MutableMapping[str, t.Any]] = None,
- template_class: t.Optional[type["Template"]] = None,
+ source: str | nodes.Template,
+ globals: t.MutableMapping[str, t.Any] | None = None,
+ template_class: type["Template"] | None = None,
) -> "Template":
"""Load a template from a source string without using
:attr:`loader`.
return cls.from_code(self, self.compile(source), gs, None)
def make_globals(
- self, d: t.Optional[t.MutableMapping[str, t.Any]]
+ self, d: t.MutableMapping[str, t.Any] | None
) -> t.MutableMapping[str, t.Any]:
"""Make the globals map for a template. Any given template
globals overlay the environment :attr:`globals`.
environment: Environment
globals: t.MutableMapping[str, t.Any]
- name: t.Optional[str]
- filename: t.Optional[str]
+ name: str | None
+ filename: str | None
blocks: dict[str, t.Callable[[Context], t.Iterator[str]]]
root_render_func: t.Callable[[Context], t.Iterator[str]]
_module: t.Optional["TemplateModule"]
_debug_info: str
- _uptodate: t.Optional[t.Callable[[], bool]]
+ _uptodate: t.Callable[[], bool] | None
def __new__(
cls,
- source: t.Union[str, nodes.Template],
+ source: str | nodes.Template,
block_start_string: str = BLOCK_START_STRING,
block_end_string: str = BLOCK_END_STRING,
variable_start_string: str = VARIABLE_START_STRING,
variable_end_string: str = VARIABLE_END_STRING,
comment_start_string: str = COMMENT_START_STRING,
comment_end_string: str = COMMENT_END_STRING,
- line_statement_prefix: t.Optional[str] = LINE_STATEMENT_PREFIX,
- line_comment_prefix: t.Optional[str] = LINE_COMMENT_PREFIX,
+ line_statement_prefix: str | None = LINE_STATEMENT_PREFIX,
+ line_comment_prefix: str | None = LINE_COMMENT_PREFIX,
trim_blocks: bool = TRIM_BLOCKS,
lstrip_blocks: bool = LSTRIP_BLOCKS,
newline_sequence: "te.Literal['\\n', '\\r\\n', '\\r']" = NEWLINE_SEQUENCE,
keep_trailing_newline: bool = KEEP_TRAILING_NEWLINE,
- extensions: t.Sequence[t.Union[str, type["Extension"]]] = (),
+ extensions: t.Sequence[str | type["Extension"]] = (),
optimized: bool = True,
undefined: type[Undefined] = Undefined,
- finalize: t.Optional[t.Callable[..., t.Any]] = None,
- autoescape: t.Union[bool, t.Callable[[t.Optional[str]], bool]] = False,
+ finalize: t.Callable[..., t.Any] | None = None,
+ autoescape: bool | t.Callable[[str | None], bool] = False,
enable_async: bool = False,
) -> t.Any: # it returns a `Template`, but this breaks the sphinx build...
env = get_spontaneous_environment(
environment: Environment,
code: CodeType,
globals: t.MutableMapping[str, t.Any],
- uptodate: t.Optional[t.Callable[[], bool]] = None,
+ uptodate: t.Callable[[], bool] | None = None,
) -> "Template":
"""Creates a template object from compiled code and the globals. This
is used by the loaders and environment to create a template object.
def new_context(
self,
- vars: t.Optional[dict[str, t.Any]] = None,
+ vars: dict[str, t.Any] | None = None,
shared: bool = False,
- locals: t.Optional[t.Mapping[str, t.Any]] = None,
+ locals: t.Mapping[str, t.Any] | None = None,
) -> Context:
"""Create a new :class:`Context` for this template. The vars
provided will be passed to the template. Per default the globals
def make_module(
self,
- vars: t.Optional[dict[str, t.Any]] = None,
+ vars: dict[str, t.Any] | None = None,
shared: bool = False,
- locals: t.Optional[t.Mapping[str, t.Any]] = None,
+ locals: t.Mapping[str, t.Any] | None = None,
) -> "TemplateModule":
"""This method works like the :attr:`module` attribute when called
without arguments but it will evaluate the template on every call
async def make_module_async(
self,
- vars: t.Optional[dict[str, t.Any]] = None,
+ vars: dict[str, t.Any] | None = None,
shared: bool = False,
- locals: t.Optional[t.Mapping[str, t.Any]] = None,
+ locals: t.Mapping[str, t.Any] | None = None,
) -> "TemplateModule":
"""As template module creation can invoke template code for
asynchronous executions this method must be used instead of the
)
@internalcode
- def _get_default_module(self, ctx: t.Optional[Context] = None) -> "TemplateModule":
+ def _get_default_module(self, ctx: Context | None = None) -> "TemplateModule":
"""If a context is passed in, this means that the template was
imported. Imported templates have access to the current
template's globals by default, but they can only be accessed via
return self._module
async def _get_default_module_async(
- self, ctx: t.Optional[Context] = None
+ self, ctx: Context | None = None
) -> "TemplateModule":
if ctx is not None:
keys = ctx.globals_keys - self.globals.keys()
self,
template: Template,
context: Context,
- body_stream: t.Optional[t.Iterable[str]] = None,
+ body_stream: t.Iterable[str] | None = None,
) -> None:
if body_stream is None:
if context.environment.is_async:
self._template = template
self._undefined_to_none = undefined_to_none
- def __call__(self, *args: t.Any, **kwargs: t.Any) -> t.Optional[t.Any]:
+ def __call__(self, *args: t.Any, **kwargs: t.Any) -> t.Any | None:
context = self._template.new_context(dict(*args, **kwargs))
consume(self._template.root_render_func(context))
rv = context.vars["result"]
def dump(
self,
- fp: t.Union[str, t.IO[bytes]],
- encoding: t.Optional[str] = None,
- errors: t.Optional[str] = "strict",
+ fp: str | t.IO[bytes],
+ encoding: str | None = None,
+ errors: str | None = "strict",
) -> None:
"""Dump the complete stream into a file or file-like object.
Per default strings are written, if you want to encode
class TemplateError(Exception):
"""Baseclass for all template errors."""
- def __init__(self, message: t.Optional[str] = None) -> None:
+ def __init__(self, message: str | None = None) -> None:
super().__init__(message)
@property
- def message(self) -> t.Optional[str]:
+ def message(self) -> str | None:
return self.args[0] if self.args else None
# Silence the Python warning about message being deprecated since
# it's not valid here.
- message: t.Optional[str] = None
+ message: str | None = None
def __init__(
self,
- name: t.Optional[t.Union[str, "Undefined"]],
- message: t.Optional[str] = None,
+ name: t.Union[str, "Undefined"] | None,
+ message: str | None = None,
) -> None:
IOError.__init__(self, name)
def __init__(
self,
names: t.Sequence[t.Union[str, "Undefined"]] = (),
- message: t.Optional[str] = None,
+ message: str | None = None,
) -> None:
if message is None:
from .runtime import Undefined
self,
message: str,
lineno: int,
- name: t.Optional[str] = None,
- filename: t.Optional[str] = None,
+ name: str | None = None,
+ filename: str | None = None,
) -> None:
super().__init__(message)
self.lineno = lineno
self.name = name
self.filename = filename
- self.source: t.Optional[str] = None
+ self.source: str | None = None
# this is set to True if the debug.translate_syntax_error
# function translated the syntax error into a new traceback
self, context: str, singular: str, plural: str, n: int
) -> str: ...
- _SupportedTranslations = t.Union[_TranslationsBasic, _TranslationsContext]
+ _SupportedTranslations = _TranslationsBasic | _TranslationsContext
# I18N functions available in Jinja templates. If the I18N library
return rv
def preprocess(
- self, source: str, name: t.Optional[str], filename: t.Optional[str] = None
+ self, source: str, name: str | None, filename: str | None = None
) -> str:
"""This method is called before the actual lexing and can be used to
preprocess the source. The `filename` is optional. The return value
"""
return stream
- def parse(self, parser: "Parser") -> t.Union[nodes.Node, list[nodes.Node]]:
+ def parse(self, parser: "Parser") -> nodes.Node | list[nodes.Node]:
"""If any of the :attr:`tags` matched this method is called with the
parser as first argument. The token the parser stream is pointing at
is the name token that matched. This method has to return one or a
"""
raise NotImplementedError()
- def attr(
- self, name: str, lineno: t.Optional[int] = None
- ) -> nodes.ExtensionAttribute:
+ def attr(self, name: str, lineno: int | None = None) -> nodes.ExtensionAttribute:
"""Return an attribute node for the current extension. This is useful
to pass constants on extensions to generated template code.
def call_method(
self,
name: str,
- args: t.Optional[list[nodes.Expr]] = None,
- kwargs: t.Optional[list[nodes.Keyword]] = None,
- dyn_args: t.Optional[nodes.Expr] = None,
- dyn_kwargs: t.Optional[nodes.Expr] = None,
- lineno: t.Optional[int] = None,
+ args: list[nodes.Expr] | None = None,
+ kwargs: list[nodes.Keyword] | None = None,
+ dyn_args: nodes.Expr | None = None,
+ dyn_kwargs: nodes.Expr | None = None,
+ lineno: int | None = None,
) -> nodes.Call:
"""Call a method of the extension. This is a shortcut for
:meth:`attr` + :class:`jinja2.nodes.Call`.
@pass_context
def _gettext_alias(
__context: Context, *args: t.Any, **kwargs: t.Any
-) -> t.Union[t.Any, Undefined]:
+) -> t.Any | Undefined:
return __context.call(__context.resolve("gettext"), *args, **kwargs)
)
def _install(
- self, translations: "_SupportedTranslations", newstyle: t.Optional[bool] = None
+ self, translations: "_SupportedTranslations", newstyle: bool | None = None
) -> None:
# ugettext and ungettext are preferred in case the I18N library
# is providing compatibility with older Python versions.
gettext, ngettext, newstyle=newstyle, pgettext=pgettext, npgettext=npgettext
)
- def _install_null(self, newstyle: t.Optional[bool] = None) -> None:
+ def _install_null(self, newstyle: bool | None = None) -> None:
import gettext
translations = gettext.NullTranslations()
self,
gettext: t.Callable[[str], str],
ngettext: t.Callable[[str, str, int], str],
- newstyle: t.Optional[bool] = None,
- pgettext: t.Optional[t.Callable[[str, str], str]] = None,
- npgettext: t.Optional[t.Callable[[str, str, str, int], str]] = None,
+ newstyle: bool | None = None,
+ pgettext: t.Callable[[str, str], str] | None = None,
+ npgettext: t.Callable[[str, str, str, int], str] | None = None,
) -> None:
if newstyle is not None:
self.environment.newstyle_gettext = newstyle # type: ignore
def _extract(
self,
- source: t.Union[str, nodes.Template],
+ source: str | nodes.Template,
gettext_functions: t.Sequence[str] = GETTEXT_FUNCTIONS,
- ) -> t.Iterator[
- tuple[int, str, t.Union[t.Optional[str], tuple[t.Optional[str], ...]]]
- ]:
+ ) -> t.Iterator[tuple[int, str, str | None | tuple[str | None, ...]]]:
if isinstance(source, str):
source = self.environment.parse(source)
return extract_from_ast(source, gettext_functions)
- def parse(self, parser: "Parser") -> t.Union[nodes.Node, list[nodes.Node]]:
+ def parse(self, parser: "Parser") -> nodes.Node | list[nodes.Node]:
"""Parse a translatable tag."""
lineno = next(parser.stream).lineno
# find all the variables referenced. Additionally a variable can be
# defined in the body of the trans block too, but this is checked at
# a later state.
- plural_expr: t.Optional[nodes.Expr] = None
- plural_expr_assignment: t.Optional[nodes.Assign] = None
+ plural_expr: nodes.Expr | None = None
+ plural_expr_assignment: nodes.Assign | None = None
num_called_num = False
variables: dict[str, nodes.Expr] = {}
trimmed = None
def _make_node(
self,
singular: str,
- plural: t.Optional[str],
- context: t.Optional[str],
+ plural: str | None,
+ context: str | None,
variables: dict[str, nodes.Expr],
- plural_expr: t.Optional[nodes.Expr],
+ plural_expr: nodes.Expr | None,
vars_referenced: bool,
num_called_num: bool,
) -> nodes.Output:
tags = {"break", "continue"}
- def parse(self, parser: "Parser") -> t.Union[nodes.Break, nodes.Continue]:
+ def parse(self, parser: "Parser") -> nodes.Break | nodes.Continue:
token = next(parser.stream)
if token.value == "break":
return nodes.Break(lineno=token.lineno)
ast: nodes.Template,
gettext_functions: t.Sequence[str] = GETTEXT_FUNCTIONS,
babel_style: bool = True,
-) -> t.Iterator[tuple[int, str, t.Union[t.Optional[str], tuple[t.Optional[str], ...]]]]:
+) -> t.Iterator[tuple[int, str, str | None | tuple[str | None, ...]]]:
"""Extract localizable strings from the given template node. Per
default this function returns matches in babel style that means non string
parameters as well as keyword arguments are returned as `None`. This
to extract any comments. For comment support you have to use the babel
extraction interface or extract comments yourself.
"""
- out: t.Union[t.Optional[str], tuple[t.Optional[str], ...]]
+ out: str | None | tuple[str | None, ...]
for node in ast.find_all(nodes.Call):
if (
):
continue
- strings: list[t.Optional[str]] = []
+ strings: list[str | None] = []
for arg in node.args:
if isinstance(arg, nodes.Const) and isinstance(arg.value, str):
keywords: t.Sequence[str],
comment_tags: t.Sequence[str],
options: dict[str, t.Any],
-) -> t.Iterator[
- tuple[int, str, t.Union[t.Optional[str], tuple[t.Optional[str], ...]], list[str]]
-]:
+) -> t.Iterator[tuple[int, str, str | None | tuple[str | None, ...], list[str]]]:
"""Babel extraction method for Jinja templates.
.. versionchanged:: 2.3
def make_attrgetter(
environment: "Environment",
- attribute: t.Optional[t.Union[str, int]],
- postprocess: t.Optional[t.Callable[[t.Any], t.Any]] = None,
- default: t.Optional[t.Any] = None,
+ attribute: str | int | None,
+ postprocess: t.Callable[[t.Any], t.Any] | None = None,
+ default: t.Any | None = None,
) -> t.Callable[[t.Any], t.Any]:
"""Returns a callable that looks up the given attribute from a
passed object with the rules of the environment. Dots are allowed
def make_multi_attrgetter(
environment: "Environment",
- attribute: t.Optional[t.Union[str, int]],
- postprocess: t.Optional[t.Callable[[t.Any], t.Any]] = None,
+ attribute: str | int | None,
+ postprocess: t.Callable[[t.Any], t.Any] | None = None,
) -> t.Callable[[t.Any], list[t.Any]]:
"""Returns a callable that looks up the given comma separated
attributes from a passed object with the rules of the environment.
Examples of attribute: "attr1,attr2", "attr1.inner1.0,attr2.inner2.0", etc.
"""
if isinstance(attribute, str):
- split: t.Sequence[t.Union[str, int, None]] = attribute.split(",")
+ split: t.Sequence[str | int | None] = attribute.split(",")
else:
split = [attribute]
def _prepare_attribute_parts(
- attr: t.Optional[t.Union[str, int]],
-) -> list[t.Union[str, int]]:
+ attr: str | int | None,
+) -> list[str | int]:
if attr is None:
return []
return [attr]
-def do_forceescape(value: "t.Union[str, HasHTML]") -> Markup:
+def do_forceescape(value: "str | HasHTML") -> Markup:
"""Enforce HTML escaping. This will probably double escape variables."""
if hasattr(value, "__html__"):
value = t.cast("HasHTML", value).__html__()
def do_urlencode(
- value: t.Union[str, t.Mapping[str, t.Any], t.Iterable[tuple[str, t.Any]]],
+ value: str | t.Mapping[str, t.Any] | t.Iterable[tuple[str, t.Any]],
) -> str:
"""Quote data for use in a URL path or query using UTF-8.
@pass_eval_context
def do_replace(
- eval_ctx: "EvalContext", s: str, old: str, new: str, count: t.Optional[int] = None
+ eval_ctx: "EvalContext", s: str, old: str, new: str, count: int | None = None
) -> str:
"""Return a copy of the value with all occurrences of a substring
replaced with a new one. The first argument is the substring
return soft_str(s).lower()
-def do_items(value: t.Union[t.Mapping[K, V], Undefined]) -> t.Iterator[tuple[K, V]]:
+def do_items(value: t.Mapping[K, V] | Undefined) -> t.Iterator[tuple[K, V]]:
"""Return an iterator over the ``(key, value)`` items of a mapping.
``x|items`` is the same as ``x.items()``, except if ``x`` is
value: "t.Iterable[V]",
reverse: bool = False,
case_sensitive: bool = False,
- attribute: t.Optional[t.Union[str, int]] = None,
+ attribute: str | int | None = None,
) -> "list[V]":
"""Sort an iterable using Python's :func:`sorted`.
environment: "Environment",
value: "t.Iterable[V]",
case_sensitive: bool = False,
- attribute: t.Optional[t.Union[str, int]] = None,
+ attribute: str | int | None = None,
) -> "t.Iterator[V]":
"""Returns a list of unique items from the given iterable.
@async_variant(sync_do_unique) # type: ignore
async def do_unique(
environment: "Environment",
- value: "t.Union[t.AsyncIterable[V], t.Iterable[V]]",
+ value: "t.AsyncIterable[V] | t.Iterable[V]",
case_sensitive: bool = False,
- attribute: t.Optional[t.Union[str, int]] = None,
+ attribute: str | int | None = None,
) -> "t.Iterator[V]":
return sync_do_unique(
environment, await auto_to_list(value), case_sensitive, attribute
value: "t.Iterable[V]",
func: "t.Callable[..., V]",
case_sensitive: bool,
- attribute: t.Optional[t.Union[str, int]],
-) -> "t.Union[V, Undefined]":
+ attribute: str | int | None,
+) -> "V | Undefined":
it = iter(value)
try:
environment: "Environment",
value: "t.Iterable[V]",
case_sensitive: bool = False,
- attribute: t.Optional[t.Union[str, int]] = None,
-) -> "t.Union[V, Undefined]":
+ attribute: str | int | None = None,
+) -> "V | Undefined":
"""Return the smallest item from the sequence.
.. sourcecode:: jinja
environment: "Environment",
value: "t.Iterable[V]",
case_sensitive: bool = False,
- attribute: t.Optional[t.Union[str, int]] = None,
-) -> "t.Union[V, Undefined]":
+ attribute: str | int | None = None,
+) -> "V | Undefined":
"""Return the largest item from the sequence.
.. sourcecode:: jinja
eval_ctx: "EvalContext",
value: t.Iterable[t.Any],
d: str = "",
- attribute: t.Optional[t.Union[str, int]] = None,
+ attribute: str | int | None = None,
) -> str:
"""Return a string which is the concatenation of the strings in the
sequence. The separator between elements is an empty string per
@async_variant(sync_do_join) # type: ignore
async def do_join(
eval_ctx: "EvalContext",
- value: t.Union[t.AsyncIterable[t.Any], t.Iterable[t.Any]],
+ value: t.AsyncIterable[t.Any] | t.Iterable[t.Any],
d: str = "",
- attribute: t.Optional[t.Union[str, int]] = None,
+ attribute: str | int | None = None,
) -> str:
return sync_do_join(eval_ctx, await auto_to_list(value), d, attribute)
@pass_environment
-def sync_do_first(
- environment: "Environment", seq: "t.Iterable[V]"
-) -> "t.Union[V, Undefined]":
+def sync_do_first(environment: "Environment", seq: "t.Iterable[V]") -> "V | Undefined":
"""Return the first item of a sequence."""
try:
return next(iter(seq))
@async_variant(sync_do_first) # type: ignore
async def do_first(
- environment: "Environment", seq: "t.Union[t.AsyncIterable[V], t.Iterable[V]]"
-) -> "t.Union[V, Undefined]":
+ environment: "Environment", seq: "t.AsyncIterable[V] | t.Iterable[V]"
+) -> "V | Undefined":
try:
return await auto_aiter(seq).__anext__()
except StopAsyncIteration:
@pass_environment
-def do_last(
- environment: "Environment", seq: "t.Reversible[V]"
-) -> "t.Union[V, Undefined]":
+def do_last(environment: "Environment", seq: "t.Reversible[V]") -> "V | Undefined":
"""Return the last item of a sequence.
Note: Does not work with generators. You may want to explicitly
@pass_context
-def do_random(context: "Context", seq: "t.Sequence[V]") -> "t.Union[V, Undefined]":
+def do_random(context: "Context", seq: "t.Sequence[V]") -> "V | Undefined":
"""Return a random item from the sequence."""
try:
return random.choice(seq)
return context.environment.undefined("No random item, sequence was empty.")
-def do_filesizeformat(value: t.Union[str, float, int], binary: bool = False) -> str:
+def do_filesizeformat(value: str | float | int, binary: bool = False) -> str:
"""Format the value like a 'human-readable' file size (i.e. 13 kB,
4.1 MB, 102 Bytes, etc). Per default decimal prefixes are used (Mega,
Giga, etc.), if the second parameter is set to `True` the binary
def do_urlize(
eval_ctx: "EvalContext",
value: str,
- trim_url_limit: t.Optional[int] = None,
+ trim_url_limit: int | None = None,
nofollow: bool = False,
- target: t.Optional[str] = None,
- rel: t.Optional[str] = None,
- extra_schemes: t.Optional[t.Iterable[str]] = None,
+ target: str | None = None,
+ rel: str | None = None,
+ extra_schemes: t.Iterable[str] | None = None,
) -> str:
"""Convert URLs in text into clickable links.
def do_indent(
- s: str, width: t.Union[int, str] = 4, first: bool = False, blank: bool = False
+ s: str, width: int | str = 4, first: bool = False, blank: bool = False
) -> str:
"""Return a copy of the string with each line indented by 4 spaces. The
first line and blank lines are not indented by default.
length: int = 255,
killwords: bool = False,
end: str = "...",
- leeway: t.Optional[int] = None,
+ leeway: int | None = None,
) -> str:
"""Return a truncated copy of the string. The length is specified
with the first parameter which defaults to ``255``. If the second
s: str,
width: int = 79,
break_long_words: bool = True,
- wrapstring: t.Optional[str] = None,
+ wrapstring: str | None = None,
break_on_hyphens: bool = True,
) -> str:
"""Wrap a string to the given width. Existing newlines are treated
return soft_str(value) % (kwargs or args)
-def do_trim(value: str, chars: t.Optional[str] = None) -> str:
+def do_trim(value: str, chars: str | None = None) -> str:
"""Strip leading and trailing characters, by default whitespace."""
return soft_str(value).strip(chars)
-def do_striptags(value: "t.Union[str, HasHTML]") -> str:
+def do_striptags(value: "str | HasHTML") -> str:
"""Strip SGML/XML tags and replace adjacent whitespace by one space."""
if hasattr(value, "__html__"):
value = t.cast("HasHTML", value).__html__()
def sync_do_slice(
- value: "t.Collection[V]", slices: int, fill_with: "t.Optional[V]" = None
+ value: "t.Collection[V]", slices: int, fill_with: "V | None" = None
) -> "t.Iterator[list[V]]":
"""Slice an iterator and return a list of lists containing
those items. Useful if you want to create a div containing
@async_variant(sync_do_slice) # type: ignore
async def do_slice(
- value: "t.Union[t.AsyncIterable[V], t.Iterable[V]]",
+ value: "t.AsyncIterable[V] | t.Iterable[V]",
slices: int,
- fill_with: t.Optional[t.Any] = None,
+ fill_with: t.Any | None = None,
) -> "t.Iterator[list[V]]":
return sync_do_slice(await auto_to_list(value), slices, fill_with)
def do_batch(
- value: "t.Iterable[V]", linecount: int, fill_with: "t.Optional[V]" = None
+ value: "t.Iterable[V]", linecount: int, fill_with: "V | None" = None
) -> "t.Iterator[list[V]]":
"""
A filter that batches items. It works pretty much like `slice`
def sync_do_groupby(
environment: "Environment",
value: "t.Iterable[V]",
- attribute: t.Union[str, int],
- default: t.Optional[t.Any] = None,
+ attribute: str | int,
+ default: t.Any | None = None,
case_sensitive: bool = False,
) -> "list[_GroupTuple]":
"""Group a sequence of objects by an attribute using Python's
@async_variant(sync_do_groupby) # type: ignore
async def do_groupby(
environment: "Environment",
- value: "t.Union[t.AsyncIterable[V], t.Iterable[V]]",
- attribute: t.Union[str, int],
- default: t.Optional[t.Any] = None,
+ value: "t.AsyncIterable[V] | t.Iterable[V]",
+ attribute: str | int,
+ default: t.Any | None = None,
case_sensitive: bool = False,
) -> "list[_GroupTuple]":
expr = make_attrgetter(
def sync_do_sum(
environment: "Environment",
iterable: "t.Iterable[V]",
- attribute: t.Optional[t.Union[str, int]] = None,
+ attribute: str | int | None = None,
start: V = 0, # type: ignore
) -> V:
"""Returns the sum of a sequence of numbers plus the value of parameter
@async_variant(sync_do_sum) # type: ignore
async def do_sum(
environment: "Environment",
- iterable: "t.Union[t.AsyncIterable[V], t.Iterable[V]]",
- attribute: t.Optional[t.Union[str, int]] = None,
+ iterable: "t.AsyncIterable[V] | t.Iterable[V]",
+ attribute: str | int | None = None,
start: V = 0, # type: ignore
) -> V:
rv = start
@async_variant(sync_do_list) # type: ignore
-async def do_list(value: "t.Union[t.AsyncIterable[V], t.Iterable[V]]") -> "list[V]":
+async def do_list(value: "t.AsyncIterable[V] | t.Iterable[V]") -> "list[V]":
return await auto_to_list(value)
def do_reverse(value: "t.Iterable[V]") -> "t.Iterable[V]": ...
-def do_reverse(value: t.Union[str, t.Iterable[V]]) -> t.Union[str, t.Iterable[V]]:
+def do_reverse(value: str | t.Iterable[V]) -> str | t.Iterable[V]:
"""Reverse the object or return an iterator that iterates over it the other
way round.
"""
@pass_environment
-def do_attr(
- environment: "Environment", obj: t.Any, name: str
-) -> t.Union[Undefined, t.Any]:
+def do_attr(environment: "Environment", obj: t.Any, name: str) -> Undefined | t.Any:
"""Get an attribute of an object. ``foo|attr("bar")`` works like
``foo.bar``, but returns undefined instead of falling back to ``foo["bar"]``
if the attribute doesn't exist.
value: t.Iterable[t.Any],
*,
attribute: str = ...,
- default: t.Optional[t.Any] = None,
+ default: t.Any | None = None,
) -> t.Iterable[t.Any]: ...
@typing.overload
def do_map(
context: "Context",
- value: t.Union[t.AsyncIterable[t.Any], t.Iterable[t.Any]],
+ value: t.AsyncIterable[t.Any] | t.Iterable[t.Any],
name: str,
*args: t.Any,
**kwargs: t.Any,
@typing.overload
def do_map(
context: "Context",
- value: t.Union[t.AsyncIterable[t.Any], t.Iterable[t.Any]],
+ value: t.AsyncIterable[t.Any] | t.Iterable[t.Any],
*,
attribute: str = ...,
- default: t.Optional[t.Any] = None,
+ default: t.Any | None = None,
) -> t.Iterable[t.Any]: ...
@async_variant(sync_do_map) # type: ignore
async def do_map(
context: "Context",
- value: t.Union[t.AsyncIterable[t.Any], t.Iterable[t.Any]],
+ value: t.AsyncIterable[t.Any] | t.Iterable[t.Any],
*args: t.Any,
**kwargs: t.Any,
) -> t.AsyncIterable[t.Any]:
@async_variant(sync_do_select) # type: ignore
async def do_select(
context: "Context",
- value: "t.Union[t.AsyncIterable[V], t.Iterable[V]]",
+ value: "t.AsyncIterable[V] | t.Iterable[V]",
*args: t.Any,
**kwargs: t.Any,
) -> "t.AsyncIterator[V]":
@async_variant(sync_do_reject) # type: ignore
async def do_reject(
context: "Context",
- value: "t.Union[t.AsyncIterable[V], t.Iterable[V]]",
+ value: "t.AsyncIterable[V] | t.Iterable[V]",
*args: t.Any,
**kwargs: t.Any,
) -> "t.AsyncIterator[V]":
@async_variant(sync_do_selectattr) # type: ignore
async def do_selectattr(
context: "Context",
- value: "t.Union[t.AsyncIterable[V], t.Iterable[V]]",
+ value: "t.AsyncIterable[V] | t.Iterable[V]",
*args: t.Any,
**kwargs: t.Any,
) -> "t.AsyncIterator[V]":
@async_variant(sync_do_rejectattr) # type: ignore
async def do_rejectattr(
context: "Context",
- value: "t.Union[t.AsyncIterable[V], t.Iterable[V]]",
+ value: "t.AsyncIterable[V] | t.Iterable[V]",
*args: t.Any,
**kwargs: t.Any,
) -> "t.AsyncIterator[V]":
@pass_eval_context
def do_tojson(
- eval_ctx: "EvalContext", value: t.Any, indent: t.Optional[int] = None
+ eval_ctx: "EvalContext", value: t.Any, indent: int | None = None
) -> Markup:
"""Serialize an object to a string of JSON, and mark it safe to
render in HTML. This filter is only for use in HTML documents.
async def async_select_or_reject(
context: "Context",
- value: "t.Union[t.AsyncIterable[V], t.Iterable[V]]",
+ value: "t.AsyncIterable[V] | t.Iterable[V]",
args: tuple[t.Any, ...],
kwargs: dict[str, t.Any],
modfunc: t.Callable[[t.Any], t.Any],
class Symbols:
def __init__(
- self, parent: t.Optional["Symbols"] = None, level: t.Optional[int] = None
+ self, parent: t.Optional["Symbols"] = None, level: int | None = None
) -> None:
if level is None:
if parent is None:
visitor = RootVisitor(self)
visitor.visit(node, **kwargs)
- def _define_ref(
- self, name: str, load: t.Optional[tuple[str, t.Optional[str]]] = None
- ) -> str:
+ def _define_ref(self, name: str, load: tuple[str, str | None] | None = None) -> str:
ident = f"l_{self.level}_{name}"
self.refs[name] = ident
if load is not None:
self.loads[ident] = load
return ident
- def find_load(self, target: str) -> t.Optional[t.Any]:
+ def find_load(self, target: str) -> t.Any | None:
if target in self.loads:
return self.loads[target]
return None
- def find_ref(self, name: str) -> t.Optional[str]:
+ def find_ref(self, name: str) -> str | None:
if name in self.refs:
return self.refs[name]
def dump_stores(self) -> dict[str, str]:
rv: dict[str, str] = {}
- node: t.Optional[Symbols] = self
+ node: Symbols | None = self
while node is not None:
for name in sorted(node.stores):
def dump_param_targets(self) -> set[str]:
rv = set()
- node: t.Optional[Symbols] = self
+ node: Symbols | None = self
while node is not None:
for target, (instr, _) in self.loads.items():
self.message = message
self.error_class = cls
- def __call__(self, lineno: int, filename: t.Optional[str]) -> "te.NoReturn":
+ def __call__(self, lineno: int, filename: str | None) -> "te.NoReturn":
raise self.error_class(self.message, lineno, filename)
def __init__(
self,
generator: t.Iterable[Token],
- name: t.Optional[str],
- filename: t.Optional[str],
+ name: str | None,
+ filename: str | None,
):
self._iter = iter(generator)
self._pushed: deque[Token] = deque()
for _ in range(n):
next(self)
- def next_if(self, expr: str) -> t.Optional[Token]:
+ def next_if(self, expr: str) -> Token | None:
"""Perform the token test and return the token if it matched.
Otherwise the return value is `None`.
"""
class _Rule(t.NamedTuple):
pattern: t.Pattern[str]
- tokens: t.Union[str, tuple[str, ...], tuple[Failure]]
- command: t.Optional[str]
+ tokens: str | tuple[str, ...] | tuple[Failure]
+ command: str | None
class Lexer:
def tokenize(
self,
source: str,
- name: t.Optional[str] = None,
- filename: t.Optional[str] = None,
- state: t.Optional[str] = None,
+ name: str | None = None,
+ filename: str | None = None,
+ state: str | None = None,
) -> TokenStream:
"""Calls tokeniter + tokenize and wraps it in a token stream."""
stream = self.tokeniter(source, name, filename, state)
def wrap(
self,
stream: t.Iterable[tuple[int, str, str]],
- name: t.Optional[str] = None,
- filename: t.Optional[str] = None,
+ name: str | None = None,
+ filename: str | None = None,
) -> t.Iterator[Token]:
"""This is called with the stream as returned by `tokenize` and wraps
every token in a :class:`Token` and converts the value.
def tokeniter(
self,
source: str,
- name: t.Optional[str],
- filename: t.Optional[str] = None,
- state: t.Optional[str] = None,
+ name: str | None,
+ filename: str | None = None,
+ state: str | None = None,
) -> t.Iterator[tuple[int, str, str]]:
"""This method tokenizes the text and returns the tokens in a
generator. Use this method if you just want to tokenize a template.
def get_source(
self, environment: "Environment", template: str
- ) -> tuple[str, t.Optional[str], t.Optional[t.Callable[[], bool]]]:
+ ) -> tuple[str, str | None, t.Callable[[], bool] | None]:
"""Get the template source, filename and reload helper for a template.
It's passed the environment and template name and has to return a
tuple in the form ``(source, filename, uptodate)`` or raise a
self,
environment: "Environment",
name: str,
- globals: t.Optional[t.MutableMapping[str, t.Any]] = None,
+ globals: t.MutableMapping[str, t.Any] | None = None,
) -> "Template":
"""Loads a template. This method looks up the template in the cache
or loads one by calling :meth:`get_source`. Subclasses should not
def get_source(
self, environment: "Environment", template: str
- ) -> tuple[str, str, t.Optional[t.Callable[[], bool]]]:
+ ) -> tuple[str, str, t.Callable[[], bool] | None]:
# Use posixpath even on Windows to avoid "drive:" or UNC
# segments breaking out of the search directory. Use normpath to
# convert Windows altsep to sep.
p = os.path.normpath(
posixpath.join(self._template_root, *split_template_path(template))
)
- up_to_date: t.Optional[t.Callable[[], bool]]
+ up_to_date: t.Callable[[], bool] | None
if self._archive is None:
# Package is a directory.
self,
load_func: t.Callable[
[str],
- t.Optional[
- t.Union[
- str, tuple[str, t.Optional[str], t.Optional[t.Callable[[], bool]]]
- ]
- ],
+ str | tuple[str, str | None, t.Callable[[], bool] | None] | None,
],
) -> None:
self.load_func = load_func
def get_source(
self, environment: "Environment", template: str
- ) -> tuple[str, t.Optional[str], t.Optional[t.Callable[[], bool]]]:
+ ) -> tuple[str, str | None, t.Callable[[], bool] | None]:
rv = self.load_func(template)
if rv is None:
def get_source(
self, environment: "Environment", template: str
- ) -> tuple[str, t.Optional[str], t.Optional[t.Callable[[], bool]]]:
+ ) -> tuple[str, str | None, t.Callable[[], bool] | None]:
loader, name = self.get_loader(template)
try:
return loader.get_source(environment, name)
self,
environment: "Environment",
name: str,
- globals: t.Optional[t.MutableMapping[str, t.Any]] = None,
+ globals: t.MutableMapping[str, t.Any] | None = None,
) -> "Template":
loader, local_name = self.get_loader(name)
try:
def get_source(
self, environment: "Environment", template: str
- ) -> tuple[str, t.Optional[str], t.Optional[t.Callable[[], bool]]]:
+ ) -> tuple[str, str | None, t.Callable[[], bool] | None]:
for loader in self.loaders:
try:
return loader.get_source(environment, template)
self,
environment: "Environment",
name: str,
- globals: t.Optional[t.MutableMapping[str, t.Any]] = None,
+ globals: t.MutableMapping[str, t.Any] | None = None,
) -> "Template":
for loader in self.loaders:
try:
self,
environment: "Environment",
name: str,
- globals: t.Optional[t.MutableMapping[str, t.Any]] = None,
+ globals: t.MutableMapping[str, t.Any] | None = None,
) -> "Template":
key = self.get_template_key(name)
module = f"{self.package_name}.{key}"
_ref_types = (nodes.Extends, nodes.FromImport, nodes.Import, nodes.Include)
-_RefType = t.Union[nodes.Extends, nodes.FromImport, nodes.Import, nodes.Include]
+_RefType = nodes.Extends | nodes.FromImport | nodes.Import | nodes.Include
-def find_referenced_templates(ast: nodes.Template) -> t.Iterator[t.Optional[str]]:
+def find_referenced_templates(ast: nodes.Template) -> t.Iterator[str | None]:
"""Finds all the referenced templates from the AST. This will return an
iterator over all the hardcoded template extensions, inclusions and
imports. If dynamic inheritance or inclusion is used, `None` will be
from .environment import Template
-def native_concat(values: t.Iterable[t.Any]) -> t.Optional[t.Any]:
+def native_concat(values: t.Iterable[t.Any]) -> t.Any | None:
"""Return a native Python type from the list of compiled nodes. If
the result is a single node, its value is returned. Otherwise, the
nodes are concatenated as strings. If the result can be parsed with
"""
def __init__(
- self, environment: "Environment", template_name: t.Optional[str] = None
+ self, environment: "Environment", template_name: str | None = None
) -> None:
self.environment = environment
if callable(environment.autoescape):
self.__dict__.update(old)
-def get_eval_context(node: "Node", ctx: t.Optional[EvalContext]) -> EvalContext:
+def get_eval_context(node: "Node", ctx: EvalContext | None) -> EvalContext:
if ctx is None:
if node.environment is None:
raise RuntimeError(
f"{type(self).__name__!r} takes 0 or {len(self.fields)}"
f" argument{'s' if len(self.fields) != 1 else ''}"
)
- for name, arg in zip(self.fields, fields):
+ for name, arg in zip(self.fields, fields, strict=False):
setattr(self, name, arg)
for attr in self.attributes:
setattr(self, attr, attributes.pop(attr, None))
def iter_fields(
self,
- exclude: t.Optional[t.Container[str]] = None,
- only: t.Optional[t.Container[str]] = None,
+ exclude: t.Container[str] | None = None,
+ only: t.Container[str] | None = None,
) -> t.Iterator[tuple[str, t.Any]]:
"""This method iterates over all fields that are defined and yields
``(key, value)`` tuples. Per default all fields are returned, but
def iter_child_nodes(
self,
- exclude: t.Optional[t.Container[str]] = None,
- only: t.Optional[t.Container[str]] = None,
+ exclude: t.Container[str] | None = None,
+ only: t.Container[str] | None = None,
) -> t.Iterator["Node"]:
"""Iterates over all direct child nodes of the node. This iterates
over all fields and yields the values of they are nodes. If the value
elif isinstance(item, Node):
yield item
- def find(self, node_type: type[_NodeBound]) -> t.Optional[_NodeBound]:
+ def find(self, node_type: type[_NodeBound]) -> _NodeBound | None:
"""Find the first node of a given type. If no such node exists the
return value is `None`.
"""
return None
def find_all(
- self, node_type: t.Union[type[_NodeBound], tuple[type[_NodeBound], ...]]
+ self, node_type: type[_NodeBound] | tuple[type[_NodeBound], ...]
) -> t.Iterator[_NodeBound]:
"""Find all the nodes of a given type. If the type is a tuple,
the check is performed for any of the tuple items.
return f"{type(self).__name__}({args_str})"
def dump(self) -> str:
- def _dump(node: t.Union[Node, t.Any]) -> None:
+ def _dump(node: Node | t.Any) -> None:
if not isinstance(node, Node):
buf.append(repr(node))
return
iter: Node
body: list[Node]
else_: list[Node]
- test: t.Optional[Node]
+ test: Node | None
recursive: bool
fields = ("template", "names", "with_context")
template: "Expr"
- names: list[t.Union[str, tuple[str, str]]]
+ names: list[str | tuple[str, str]]
with_context: bool
abstract = True
- def as_const(self, eval_ctx: t.Optional[EvalContext] = None) -> t.Any:
+ def as_const(self, eval_ctx: EvalContext | None = None) -> t.Any:
"""Return the value of the expression as constant or raise
:exc:`Impossible` if this was not possible.
operator: str
abstract = True
- def as_const(self, eval_ctx: t.Optional[EvalContext] = None) -> t.Any:
+ def as_const(self, eval_ctx: EvalContext | None = None) -> t.Any:
eval_ctx = get_eval_context(self, eval_ctx)
# intercepted operators cannot be folded at compile time
operator: str
abstract = True
- def as_const(self, eval_ctx: t.Optional[EvalContext] = None) -> t.Any:
+ def as_const(self, eval_ctx: EvalContext | None = None) -> t.Any:
eval_ctx = get_eval_context(self, eval_ctx)
# intercepted operators cannot be folded at compile time
fields = ("value",)
value: t.Any
- def as_const(self, eval_ctx: t.Optional[EvalContext] = None) -> t.Any:
+ def as_const(self, eval_ctx: EvalContext | None = None) -> t.Any:
return self.value
@classmethod
def from_untrusted(
cls,
value: t.Any,
- lineno: t.Optional[int] = None,
- environment: "t.Optional[Environment]" = None,
+ lineno: int | None = None,
+ environment: "Environment | None" = None,
) -> "Const":
"""Return a const object if the value is representable as
constant value in the generated code, otherwise it will raise
fields = ("data",)
data: str
- def as_const(self, eval_ctx: t.Optional[EvalContext] = None) -> str:
+ def as_const(self, eval_ctx: EvalContext | None = None) -> str:
eval_ctx = get_eval_context(self, eval_ctx)
if eval_ctx.volatile:
raise Impossible()
items: list[Expr]
ctx: str
- def as_const(self, eval_ctx: t.Optional[EvalContext] = None) -> tuple[t.Any, ...]:
+ def as_const(self, eval_ctx: EvalContext | None = None) -> tuple[t.Any, ...]:
eval_ctx = get_eval_context(self, eval_ctx)
return tuple(x.as_const(eval_ctx) for x in self.items)
fields = ("items",)
items: list[Expr]
- def as_const(self, eval_ctx: t.Optional[EvalContext] = None) -> list[t.Any]:
+ def as_const(self, eval_ctx: EvalContext | None = None) -> list[t.Any]:
eval_ctx = get_eval_context(self, eval_ctx)
return [x.as_const(eval_ctx) for x in self.items]
fields = ("items",)
items: list["Pair"]
- def as_const(self, eval_ctx: t.Optional[EvalContext] = None) -> dict[t.Any, t.Any]:
+ def as_const(self, eval_ctx: EvalContext | None = None) -> dict[t.Any, t.Any]:
eval_ctx = get_eval_context(self, eval_ctx)
return dict(x.as_const(eval_ctx) for x in self.items)
key: Expr
value: Expr
- def as_const(self, eval_ctx: t.Optional[EvalContext] = None) -> tuple[t.Any, t.Any]:
+ def as_const(self, eval_ctx: EvalContext | None = None) -> tuple[t.Any, t.Any]:
eval_ctx = get_eval_context(self, eval_ctx)
return self.key.as_const(eval_ctx), self.value.as_const(eval_ctx)
key: str
value: Expr
- def as_const(self, eval_ctx: t.Optional[EvalContext] = None) -> tuple[str, t.Any]:
+ def as_const(self, eval_ctx: EvalContext | None = None) -> tuple[str, t.Any]:
eval_ctx = get_eval_context(self, eval_ctx)
return self.key, self.value.as_const(eval_ctx)
fields = ("test", "expr1", "expr2")
test: Expr
expr1: Expr
- expr2: t.Optional[Expr]
+ expr2: Expr | None
- def as_const(self, eval_ctx: t.Optional[EvalContext] = None) -> t.Any:
+ def as_const(self, eval_ctx: EvalContext | None = None) -> t.Any:
eval_ctx = get_eval_context(self, eval_ctx)
if self.test.as_const(eval_ctx):
return self.expr1.as_const(eval_ctx)
def args_as_const(
- node: t.Union["_FilterTestCommon", "Call"], eval_ctx: t.Optional[EvalContext]
+ node: t.Union["_FilterTestCommon", "Call"], eval_ctx: EvalContext | None
) -> tuple[list[t.Any], dict[t.Any, t.Any]]:
args = [x.as_const(eval_ctx) for x in node.args]
kwargs = dict(x.as_const(eval_ctx) for x in node.kwargs)
name: str
args: list[Expr]
kwargs: list[Pair]
- dyn_args: t.Optional[Expr]
- dyn_kwargs: t.Optional[Expr]
+ dyn_args: Expr | None
+ dyn_kwargs: Expr | None
abstract = True
_is_filter = True
- def as_const(self, eval_ctx: t.Optional[EvalContext] = None) -> t.Any:
+ def as_const(self, eval_ctx: EvalContext | None = None) -> t.Any:
eval_ctx = get_eval_context(self, eval_ctx)
if eval_ctx.volatile:
and is applied to the content of the block.
"""
- node: t.Optional[Expr] # type: ignore
+ node: Expr | None # type: ignore
- def as_const(self, eval_ctx: t.Optional[EvalContext] = None) -> t.Any:
+ def as_const(self, eval_ctx: EvalContext | None = None) -> t.Any:
if self.node is None:
raise Impossible()
node: Expr
args: list[Expr]
kwargs: list[Keyword]
- dyn_args: t.Optional[Expr]
- dyn_kwargs: t.Optional[Expr]
+ dyn_args: Expr | None
+ dyn_kwargs: Expr | None
class Getitem(Expr):
arg: Expr
ctx: str
- def as_const(self, eval_ctx: t.Optional[EvalContext] = None) -> t.Any:
+ def as_const(self, eval_ctx: EvalContext | None = None) -> t.Any:
if self.ctx != "load":
raise Impossible()
attr: str
ctx: str
- def as_const(self, eval_ctx: t.Optional[EvalContext] = None) -> t.Any:
+ def as_const(self, eval_ctx: EvalContext | None = None) -> t.Any:
if self.ctx != "load":
raise Impossible()
"""
fields = ("start", "stop", "step")
- start: t.Optional[Expr]
- stop: t.Optional[Expr]
- step: t.Optional[Expr]
+ start: Expr | None
+ stop: Expr | None
+ step: Expr | None
- def as_const(self, eval_ctx: t.Optional[EvalContext] = None) -> slice:
+ def as_const(self, eval_ctx: EvalContext | None = None) -> slice:
eval_ctx = get_eval_context(self, eval_ctx)
- def const(obj: t.Optional[Expr]) -> t.Optional[t.Any]:
+ def const(obj: Expr | None) -> t.Any | None:
if obj is None:
return None
return obj.as_const(eval_ctx)
fields = ("nodes",)
nodes: list[Expr]
- def as_const(self, eval_ctx: t.Optional[EvalContext] = None) -> str:
+ def as_const(self, eval_ctx: EvalContext | None = None) -> str:
eval_ctx = get_eval_context(self, eval_ctx)
return "".join(str(x.as_const(eval_ctx)) for x in self.nodes)
expr: Expr
ops: list["Operand"]
- def as_const(self, eval_ctx: t.Optional[EvalContext] = None) -> t.Any:
+ def as_const(self, eval_ctx: EvalContext | None = None) -> t.Any:
eval_ctx = get_eval_context(self, eval_ctx)
result = value = self.expr.as_const(eval_ctx)
operator = "and"
- def as_const(self, eval_ctx: t.Optional[EvalContext] = None) -> t.Any:
+ def as_const(self, eval_ctx: EvalContext | None = None) -> t.Any:
eval_ctx = get_eval_context(self, eval_ctx)
return self.left.as_const(eval_ctx) and self.right.as_const(eval_ctx)
operator = "or"
- def as_const(self, eval_ctx: t.Optional[EvalContext] = None) -> t.Any:
+ def as_const(self, eval_ctx: EvalContext | None = None) -> t.Any:
eval_ctx = get_eval_context(self, eval_ctx)
return self.left.as_const(eval_ctx) or self.right.as_const(eval_ctx)
fields = ("expr",)
expr: Expr
- def as_const(self, eval_ctx: t.Optional[EvalContext] = None) -> Markup:
+ def as_const(self, eval_ctx: EvalContext | None = None) -> Markup:
eval_ctx = get_eval_context(self, eval_ctx)
return Markup(self.expr.as_const(eval_ctx))
fields = ("expr",)
expr: Expr
- def as_const(
- self, eval_ctx: t.Optional[EvalContext] = None
- ) -> t.Union[Markup, t.Any]:
+ def as_const(self, eval_ctx: EvalContext | None = None) -> Markup | t.Any:
eval_ctx = get_eval_context(self, eval_ctx)
if eval_ctx.volatile:
raise Impossible()
class Optimizer(NodeTransformer):
- def __init__(self, environment: "t.Optional[Environment]") -> None:
+ def __init__(self, environment: "Environment | None") -> None:
self.environment = environment
def generic_visit(
self,
environment: "Environment",
source: str,
- name: t.Optional[str] = None,
- filename: t.Optional[str] = None,
- state: t.Optional[str] = None,
+ name: str | None = None,
+ filename: str | None = None,
+ state: str | None = None,
) -> None:
self.environment = environment
self.stream = environment._tokenize(source, name, filename, state)
self.filename = filename
self.closed = False
self.extensions: dict[
- str, t.Callable[[Parser], t.Union[nodes.Node, list[nodes.Node]]]
+ str, t.Callable[[Parser], nodes.Node | list[nodes.Node]]
] = {}
for extension in environment.iter_extensions():
for tag in extension.tags:
def fail(
self,
msg: str,
- lineno: t.Optional[int] = None,
+ lineno: int | None = None,
exc: type[TemplateSyntaxError] = TemplateSyntaxError,
) -> "te.NoReturn":
"""Convenience method that raises `exc` with the message, passed
def _fail_ut_eof(
self,
- name: t.Optional[str],
+ name: str | None,
end_token_stack: list[tuple[str, ...]],
- lineno: t.Optional[int],
+ lineno: int | None,
) -> "te.NoReturn":
expected: set[str] = set()
for exprs in end_token_stack:
expected.update(map(describe_token_expr, exprs))
if end_token_stack:
- currently_looking: t.Optional[str] = " or ".join(
+ currently_looking: str | None = " or ".join(
map(repr, map(describe_token_expr, end_token_stack[-1]))
)
else:
self.fail(" ".join(message), lineno)
- def fail_unknown_tag(
- self, name: str, lineno: t.Optional[int] = None
- ) -> "te.NoReturn":
+ def fail_unknown_tag(self, name: str, lineno: int | None = None) -> "te.NoReturn":
"""Called if the parser encounters an unknown tag. Tries to fail
with a human readable error message that could help to identify
the problem.
def fail_eof(
self,
- end_tokens: t.Optional[tuple[str, ...]] = None,
- lineno: t.Optional[int] = None,
+ end_tokens: tuple[str, ...] | None = None,
+ lineno: int | None = None,
) -> "te.NoReturn":
"""Like fail_unknown_tag but for end of template situations."""
stack = list(self._end_token_stack)
stack.append(end_tokens)
self._fail_ut_eof(None, stack, lineno)
- def is_tuple_end(self, extra_end_rules: t.Optional[tuple[str, ...]] = None) -> bool:
+ def is_tuple_end(self, extra_end_rules: tuple[str, ...] | None = None) -> bool:
"""Are we at the end of a tuple?"""
if self.stream.current.type in ("variable_end", "block_end", "rparen"):
return True
return self.stream.current.test_any(extra_end_rules) # type: ignore
return False
- def free_identifier(self, lineno: t.Optional[int] = None) -> nodes.InternalName:
+ def free_identifier(self, lineno: int | None = None) -> nodes.InternalName:
"""Return a new free identifier as :class:`~jinja2.nodes.InternalName`."""
self._last_identifier += 1
rv = object.__new__(nodes.InternalName)
nodes.Node.__init__(rv, f"fi{self._last_identifier}", lineno=lineno)
return rv
- def parse_statement(self) -> t.Union[nodes.Node, list[nodes.Node]]:
+ def parse_statement(self) -> nodes.Node | list[nodes.Node]:
"""Parse a single statement."""
token = self.stream.current
if token.type != "name":
next(self.stream)
return result
- def parse_set(self) -> t.Union[nodes.Assign, nodes.AssignBlock]:
+ def parse_set(self) -> nodes.Assign | nodes.AssignBlock:
"""Parse an assign statement."""
lineno = next(self.stream).lineno
target = self.parse_assign_target(with_namespace=True)
self,
with_tuple: bool = True,
name_only: bool = False,
- extra_end_rules: t.Optional[tuple[str, ...]] = None,
+ extra_end_rules: tuple[str, ...] | None = None,
with_namespace: bool = False,
- ) -> t.Union[nodes.NSRef, nodes.Name, nodes.Tuple]: ...
+ ) -> nodes.NSRef | nodes.Name | nodes.Tuple: ...
def parse_assign_target(
self,
with_tuple: bool = True,
name_only: bool = False,
- extra_end_rules: t.Optional[tuple[str, ...]] = None,
+ extra_end_rules: tuple[str, ...] | None = None,
with_namespace: bool = False,
- ) -> t.Union[nodes.NSRef, nodes.Name, nodes.Tuple]:
+ ) -> nodes.NSRef | nodes.Name | nodes.Tuple:
"""Parse an assignment target. As Jinja allows assignments to
tuples, this function can parse all allowed assignment targets. Per
default assignments to tuples are parsed, that can be disable however
def parse_condexpr(self) -> nodes.Expr:
lineno = self.stream.current.lineno
expr1 = self.parse_or()
- expr3: t.Optional[nodes.Expr]
+ expr3: nodes.Expr | None
while self.stream.skip_if("name:if"):
expr2 = self.parse_or()
self,
simplified: bool = False,
with_condexpr: bool = True,
- extra_end_rules: t.Optional[tuple[str, ...]] = None,
+ extra_end_rules: tuple[str, ...] | None = None,
explicit_parentheses: bool = False,
with_namespace: bool = False,
- ) -> t.Union[nodes.Tuple, nodes.Expr]:
+ ) -> nodes.Tuple | nodes.Expr:
"""Works like `parse_expression` but if multiple expressions are
delimited by a comma a :class:`~jinja2.nodes.Tuple` node is created.
This method could also return a regular expression instead of a tuple
break
return node
- def parse_subscript(
- self, node: nodes.Expr
- ) -> t.Union[nodes.Getattr, nodes.Getitem]:
+ def parse_subscript(self, node: nodes.Expr) -> nodes.Getattr | nodes.Getitem:
token = next(self.stream)
arg: nodes.Expr
def parse_subscribed(self) -> nodes.Expr:
lineno = self.stream.current.lineno
- args: list[t.Optional[nodes.Expr]]
+ args: list[nodes.Expr | None]
if self.stream.current.type == "colon":
next(self.stream)
) -> tuple[
list[nodes.Expr],
list[nodes.Keyword],
- t.Union[nodes.Expr, None],
- t.Union[nodes.Expr, None],
+ nodes.Expr | None,
+ nodes.Expr | None,
]:
token = self.stream.expect("lparen")
args = []
return nodes.Call(node, args, kwargs, dyn_args, dyn_kwargs, lineno=token.lineno)
def parse_filter(
- self, node: t.Optional[nodes.Expr], start_inline: bool = False
- ) -> t.Optional[nodes.Expr]:
+ self, node: nodes.Expr | None, start_inline: bool = False
+ ) -> nodes.Expr | None:
while self.stream.current.type == "pipe" or start_inline:
if not start_inline:
next(self.stream)
node = nodes.Not(node, lineno=token.lineno)
return node
- def subparse(
- self, end_tokens: t.Optional[tuple[str, ...]] = None
- ) -> list[nodes.Node]:
+ def subparse(self, end_tokens: tuple[str, ...] | None = None) -> list[nodes.Node]:
body: list[nodes.Node] = []
data_buffer: list[nodes.Node] = []
add_data = data_buffer.append
def new_context(
environment: "Environment",
- template_name: t.Optional[str],
+ template_name: str | None,
blocks: dict[str, t.Callable[["Context"], t.Iterator[str]]],
- vars: t.Optional[dict[str, t.Any]] = None,
+ vars: dict[str, t.Any] | None = None,
shared: bool = False,
- globals: t.Optional[t.MutableMapping[str, t.Any]] = None,
- locals: t.Optional[t.Mapping[str, t.Any]] = None,
+ globals: t.MutableMapping[str, t.Any] | None = None,
+ locals: t.Mapping[str, t.Any] | None = None,
) -> "Context":
"""Internal helper for context creation."""
if vars is None:
self,
environment: "Environment",
parent: dict[str, t.Any],
- name: t.Optional[str],
+ name: str | None,
blocks: dict[str, t.Callable[["Context"], t.Iterator[str]]],
- globals: t.Optional[t.MutableMapping[str, t.Any]] = None,
+ globals: t.MutableMapping[str, t.Any] | None = None,
):
self.parent = parent
self.vars: dict[str, t.Any] = {}
" StopIteration exception"
)
- def derived(self, locals: t.Optional[dict[str, t.Any]] = None) -> "Context":
+ def derived(self, locals: dict[str, t.Any] | None = None) -> "Context":
"""Internal helper function to create a derived context. This is
used in situations where the system needs a new context in the same
template that is independent.
#: Current iteration of the loop, starting at 0.
index0 = -1
- _length: t.Optional[int] = None
+ _length: int | None = None
_after: t.Any = missing
_current: t.Any = missing
_before: t.Any = missing
@staticmethod
def _to_iterator( # type: ignore
- iterable: t.Union[t.Iterable[V], t.AsyncIterable[V]],
+ iterable: t.Iterable[V] | t.AsyncIterable[V],
) -> t.AsyncIterator[V]:
return auto_aiter(iterable)
catch_kwargs: bool,
catch_varargs: bool,
caller: bool,
- default_autoescape: t.Optional[bool] = None,
+ default_autoescape: bool | None = None,
):
self._environment = environment
self._func = func
def __init__(
self,
- hint: t.Optional[str] = None,
+ hint: str | None = None,
obj: t.Any = missing,
- name: t.Optional[str] = None,
+ name: str | None = None,
exc: type[TemplateRuntimeError] = UndefinedError,
) -> None:
self._undefined_hint = hint
"""
return self.unop_table[operator](arg)
- def getitem(
- self, obj: t.Any, argument: t.Union[str, t.Any]
- ) -> t.Union[t.Any, Undefined]:
+ def getitem(self, obj: t.Any, argument: str | t.Any) -> t.Any | Undefined:
"""Subscribe an object from sandboxed code."""
try:
return obj[argument]
return self.unsafe_undefined(obj, argument)
return self.undefined(obj=obj, name=argument)
- def getattr(self, obj: t.Any, attribute: str) -> t.Union[t.Any, Undefined]:
+ def getattr(self, obj: t.Any, attribute: str) -> t.Any | Undefined:
"""Subscribe an object from sandboxed code and prefer the
attribute. The attribute passed *must* be a bytestring.
"""
exc=SecurityError,
)
- def wrap_str_format(self, value: t.Any) -> t.Optional[t.Callable[..., str]]:
+ def wrap_str_format(self, value: t.Any) -> t.Callable[..., str] | None:
"""If the given value is a ``str.format`` or ``str.format_map`` method,
return a new function than handles sandboxing. This is done at access
rather than in :meth:`call`, so that calls made without ``call`` are
raise
-def open_if_exists(filename: str, mode: str = "rb") -> t.Optional[t.IO[t.Any]]:
+def open_if_exists(filename: str, mode: str = "rb") -> t.IO[t.Any] | None:
"""Returns a file descriptor for the filename if that file exists,
otherwise ``None``.
"""
def urlize(
text: str,
- trim_url_limit: t.Optional[int] = None,
- rel: t.Optional[str] = None,
- target: t.Optional[str] = None,
- extra_schemes: t.Optional[t.Iterable[str]] = None,
+ trim_url_limit: int | None = None,
+ rel: str | None = None,
+ target: str | None = None,
+ extra_schemes: t.Iterable[str] | None = None,
) -> str:
"""Convert URLs in text into clickable links.
disabled_extensions: t.Collection[str] = (),
default_for_string: bool = True,
default: bool = False,
-) -> t.Callable[[t.Optional[str]], bool]:
+) -> t.Callable[[str | None], bool]:
"""Intelligently sets the initial value of autoescaping based on the
filename of the template. This is the recommended way to configure
autoescaping if you do not want to write a custom function yourself.
enabled_patterns = tuple(f".{x.lstrip('.').lower()}" for x in enabled_extensions)
disabled_patterns = tuple(f".{x.lstrip('.').lower()}" for x in disabled_extensions)
- def autoescape(template_name: t.Optional[str]) -> bool:
+ def autoescape(template_name: str | None) -> bool:
if template_name is None:
return default_for_string
template_name = template_name.lower()
def htmlsafe_json_dumps(
- obj: t.Any, dumps: t.Optional[t.Callable[..., str]] = None, **kwargs: t.Any
+ obj: t.Any, dumps: t.Callable[..., str] | None = None, **kwargs: t.Any
) -> markupsafe.Markup:
"""Serialize an object to a string of JSON with :func:`json.dumps`,
then replace HTML-unsafe characters with Unicode escapes and mark
(return value `None`) the `generic_visit` visitor is used instead.
"""
- def get_visitor(self, node: Node) -> "t.Optional[VisitCallable]":
+ def get_visitor(self, node: Node) -> "VisitCallable | None":
"""Return the visitor function for this node or `None` if no visitor
exists for this node. In that case the generic visit function is
used instead.