# See https://pre-commit.com/hooks.html for more hooks
repos:
- repo: https://github.com/python/black
- rev: 23.3.0
+ rev: 24.1.1
hooks:
- id: black
::
- class MyType(AdaptOldConvertMethods, TypeEngine):
- ...
+ class MyType(AdaptOldConvertMethods, TypeEngine): ...
* The ``quote`` flag on ``Column`` and ``Table`` as well as
the ``quote_schema`` flag on ``Table`` now control quoting
::
class MyQuery(Query):
- def get(self, ident):
- ...
+ def get(self, ident): ...
session = sessionmaker(query_cls=MyQuery)()
directly::
@event.listens_for(Table, "column_reflect")
- def listen_for_col(inspector, table, column_info):
- ...
+ def listen_for_col(inspector, table, column_info): ...
:ticket:`2418`
mutually-dependent module imports, like this::
@util.dependency_for("sqlalchemy.sql.dml")
- def insert(self, dml, *args, **kw):
- ...
+ def insert(self, dml, *args, **kw): ...
Where the above function would be rewritten to no longer have the ``dml`` parameter
on the outside. This would confuse code-linting tools into seeing a missing parameter
addresses = relationship(Address, backref=backref("user", viewonly=True))
- class Address(Base):
- ...
+ class Address(Base): ...
u1 = session.query(User).filter_by(name="x").first()
def my_stmt(parameter, thing=False):
stmt = lambda_stmt(lambda: select(table))
- stmt += (
- lambda s: s.where(table.c.x > parameter)
- if thing
- else s.where(table.c.y == parameter)
+ stmt += lambda s: (
+ s.where(table.c.x > parameter) if thing else s.where(table.c.y == parameter)
)
return stmt
Base = declarative_base(metadata=metadata_obj)
- class MyClass(Base):
- ...
+ class MyClass(Base): ...
session = Session()
Base = declarative_base()
- class MyClass(Base):
- ...
+ class MyClass(Base): ...
session = Session()
# we create a Parent class which knows nothing about Child
- class Parent(Base):
- ...
+ class Parent(Base): ...
# ... later, in Module B, which is imported after module A:
- class Child(Base):
- ...
+ class Child(Base): ...
from module_a import Parent
...
@collection.iterator
- def hey_use_this_instead_for_iteration(self):
- ...
+ def hey_use_this_instead_for_iteration(self): ...
There is no requirement to be "list-like" or "set-like" at all. Collection classes
can be any shape, so long as they have the append, remove and iterate
)
name: Mapped[Optional[str]] = Mapped._special_method(Column(String))
- def __init__(self, id: Optional[int] = ..., name: Optional[str] = ...) -> None:
- ...
+ def __init__(self, id: Optional[int] = ..., name: Optional[str] = ...) -> None: ...
some_user = User(id=5, name="user")
}
- class Manager(Employee):
- ...
+ class Manager(Employee): ...
- class Engineer(Employee):
- ...
+ class Engineer(Employee): ...
If the foreign key constraint is on a table corresponding to a subclass,
the relationship should target that subclass instead. In the example
}
- class Engineer(Employee):
- ...
+ class Engineer(Employee): ...
Above, the ``Manager`` class will have a ``Manager.company`` attribute;
``Company`` will have a ``Company.managers`` attribute that always
pass
- class User(BaseA):
- ...
+ class User(BaseA): ...
- class Address(BaseA):
- ...
+ class Address(BaseA): ...
- class GameInfo(BaseB):
- ...
+ class GameInfo(BaseB): ...
- class GameStats(BaseB):
- ...
+ class GameStats(BaseB): ...
Session = sessionmaker()
for asynchronous ORM use.
"""
+
from __future__ import annotations
import asyncio
of ORM collections under asyncio.
"""
+
from __future__ import annotations
import asyncio
"""
-
import asyncio
from sqlalchemy import Column
"""
+
from sqlalchemy import Column
from sqlalchemy import create_engine
from sqlalchemy import ForeignKey
dogpile.cache constructs.
"""
+
from dogpile.cache.api import NO_VALUE
from sqlalchemy import event
class ORMCache:
-
"""An add-on for an ORM :class:`.Session` optionally loads full results
from a dogpile cache region.
bootstrap fixture data if necessary.
"""
+
from hashlib import md5
import os
with a randomly selected postal code.
"""
+
import random
from .environment import Base
City --(has a)--> Country
"""
+
from sqlalchemy import Column
from sqlalchemy import ForeignKey
from sqlalchemy import Integer
term cache.
"""
+
import os
from sqlalchemy import select
objects, but is also slightly more complex.
"""
+
from sqlalchemy import Column
from sqlalchemy import create_engine
from sqlalchemy import ForeignKey
or "table_per_association" instead of this approach.
"""
+
from sqlalchemy import and_
from sqlalchemy import Column
from sqlalchemy import create_engine
"""
+
from sqlalchemy import Column
from sqlalchemy import create_engine
from sqlalchemy import ForeignKey
is completely automated.
"""
+
from sqlalchemy import Column
from sqlalchemy import create_engine
from sqlalchemy import ForeignKey
"""Concrete-table (table-per-class) inheritance example."""
+
from __future__ import annotations
from typing import Annotated
"""Joined-table (table-per-subclass) inheritance example."""
+
from __future__ import annotations
from typing import Annotated
"""Single-table (table-per-hierarchy) inheritance example."""
+
from __future__ import annotations
from typing import Annotated
descendants and changing the prefix.
"""
+
from sqlalchemy import Column
from sqlalchemy import create_engine
from sqlalchemy import func
""" # noqa
+
import argparse
import cProfile
import gc
"""
+
from sqlalchemy import Column
from sqlalchemy import create_engine
from sqlalchemy import Identity
provide a huge amount of functionality.
"""
+
from sqlalchemy import Column
from sqlalchemy import create_engine
from sqlalchemy import Identity
"""
+
import random
from sqlalchemy import bindparam
a database connection, inserts the row, commits and closes.
"""
+
from sqlalchemy import bindparam
from sqlalchemy import Column
from sqlalchemy import create_engine
the routine that generates new primary keys.
"""
+
from __future__ import annotations
import asyncio
"""Illustrates sharding using distinct SQLite databases."""
+
from __future__ import annotations
import datetime
In this example we will set a "shard id" at all times.
"""
+
from __future__ import annotations
import datetime
"""Illustrates sharding using a single SQLite database, that will however
have multiple tables using a naming convention."""
+
from __future__ import annotations
import datetime
row is inserted with the new data, keeping the old row intact.
"""
+
from sqlalchemy import Column
from sqlalchemy import create_engine
from sqlalchemy import event
as the ability to see which row is the most "current" version.
"""
+
from sqlalchemy import Boolean
from sqlalchemy import Column
from sqlalchemy import create_engine
"""
- async def close(self) -> None:
- ...
+ async def close(self) -> None: ...
- async def commit(self) -> None:
- ...
+ async def commit(self) -> None: ...
- def cursor(self) -> AsyncIODBAPICursor:
- ...
+ def cursor(self) -> AsyncIODBAPICursor: ...
- async def rollback(self) -> None:
- ...
+ async def rollback(self) -> None: ...
class AsyncIODBAPICursor(Protocol):
"""
- def __aenter__(self) -> Any:
- ...
+ def __aenter__(self) -> Any: ...
@property
def description(
...
@property
- def rowcount(self) -> int:
- ...
+ def rowcount(self) -> int: ...
arraysize: int
lastrowid: int
- async def close(self) -> None:
- ...
+ async def close(self) -> None: ...
async def execute(
self,
operation: Any,
parameters: Optional[_DBAPISingleExecuteParams] = None,
- ) -> Any:
- ...
+ ) -> Any: ...
async def executemany(
self,
operation: Any,
parameters: _DBAPIMultiExecuteParams,
- ) -> Any:
- ...
+ ) -> Any: ...
- async def fetchone(self) -> Optional[Any]:
- ...
+ async def fetchone(self) -> Optional[Any]: ...
- async def fetchmany(self, size: Optional[int] = ...) -> Sequence[Any]:
- ...
+ async def fetchmany(self, size: Optional[int] = ...) -> Sequence[Any]: ...
- async def fetchall(self) -> Sequence[Any]:
- ...
+ async def fetchall(self) -> Sequence[Any]: ...
- async def setinputsizes(self, sizes: Sequence[Any]) -> None:
- ...
+ async def setinputsizes(self, sizes: Sequence[Any]) -> None: ...
- def setoutputsize(self, size: Any, column: Any) -> None:
- ...
+ def setoutputsize(self, size: Any, column: Any) -> None: ...
async def callproc(
self, procname: str, parameters: Sequence[Any] = ...
- ) -> Any:
- ...
+ ) -> Any: ...
- async def nextset(self) -> Optional[bool]:
- ...
+ async def nextset(self) -> Optional[bool]: ...
class AsyncAdapt_dbapi_cursor:
cursor.setinputsizes(
[
- (dbtype, None, None)
- if not isinstance(dbtype, tuple)
- else dbtype
+ (
+ (dbtype, None, None)
+ if not isinstance(dbtype, tuple)
+ else dbtype
+ )
for key, dbtype, sqltype in list_of_tuples
]
)
class NTEXT(sqltypes.UnicodeText):
-
"""MSSQL NTEXT type, for variable-length unicode text up to 2^30
characters."""
@overload
def __init__(
self: UNIQUEIDENTIFIER[_python_UUID], as_uuid: Literal[True] = ...
- ):
- ...
+ ): ...
@overload
- def __init__(self: UNIQUEIDENTIFIER[str], as_uuid: Literal[False] = ...):
- ...
+ def __init__(
+ self: UNIQUEIDENTIFIER[str], as_uuid: Literal[False] = ...
+ ): ...
def __init__(self, as_uuid: bool = True):
"""Construct a :class:`_mssql.UNIQUEIDENTIFIER` type.
type_expression = "ELSE CAST(JSON_VALUE(%s, %s) AS %s)" % (
self.process(binary.left, **kw),
self.process(binary.right, **kw),
- "FLOAT"
- if isinstance(binary.type, sqltypes.Float)
- else "NUMERIC(%s, %s)"
- % (binary.type.precision, binary.type.scale),
+ (
+ "FLOAT"
+ if isinstance(binary.type, sqltypes.Float)
+ else "NUMERIC(%s, %s)"
+ % (binary.type.precision, binary.type.scale)
+ ),
)
elif binary.type._type_affinity is sqltypes.Boolean:
# the NULL handling is particularly weird with boolean, so
class MSSQLStrictCompiler(MSSQLCompiler):
-
"""A subclass of MSSQLCompiler which disables the usage of bind
parameters where not allowed natively by MS-SQL.
int 1 is returned as "\x01\x00\x00\x00". On python 3 it returns the
correct value as string.
"""
+
impl = Unicode
cache_ok = True
class _ms_numeric_pyodbc:
-
"""Turns Decimals with adjusted() < 0 or > 7 into strings.
The routines here are needed for older pyodbc versions
columns = [
self.sql_compiler.process(
- elements.Grouping(expr)
- if (
- isinstance(expr, elements.BinaryExpression)
- or (
- isinstance(expr, elements.UnaryExpression)
- and expr.modifier
- not in (operators.desc_op, operators.asc_op)
+ (
+ elements.Grouping(expr)
+ if (
+ isinstance(expr, elements.BinaryExpression)
+ or (
+ isinstance(expr, elements.UnaryExpression)
+ and expr.modifier
+ not in (operators.desc_op, operators.asc_op)
+ )
+ or isinstance(expr, functions.FunctionElement)
)
- or isinstance(expr, functions.FunctionElement)
- )
- else expr,
+ else expr
+ ),
include_table=False,
literal_binds=True,
)
# mapping specifying the prefix length for each column of the
# index
columns = ", ".join(
- "%s(%d)" % (expr, length[col.name])
- if col.name in length
- else (
- "%s(%d)" % (expr, length[expr])
- if expr in length
- else "%s" % expr
+ (
+ "%s(%d)" % (expr, length[col.name])
+ if col.name in length
+ else (
+ "%s(%d)" % (expr, length[expr])
+ if expr in length
+ else "%s" % expr
+ )
)
for col, expr in zip(index.expressions, columns)
)
self.use_ansi = use_ansi
self.optimize_limits = optimize_limits
self.exclude_tablespaces = exclude_tablespaces
- self.enable_offset_fetch = (
- self._supports_offset_fetch
- ) = enable_offset_fetch
+ self.enable_offset_fetch = self._supports_offset_fetch = (
+ enable_offset_fetch
+ )
def initialize(self, connection):
super().initialize(connection)
return (
(
(schema, self.normalize_name(table)),
- {"text": comment}
- if comment is not None
- and not comment.startswith(ignore_mat_view)
- else default(),
+ (
+ {"text": comment}
+ if comment is not None
+ and not comment.startswith(ignore_mat_view)
+ else default()
+ ),
)
for table, comment in result
)
table_uc[constraint_name] = uc = {
"name": constraint_name,
"column_names": [],
- "duplicates_index": constraint_name
- if constraint_name_orig in index_names
- else None,
+ "duplicates_index": (
+ constraint_name
+ if constraint_name_orig in index_names
+ else None
+ ),
}
else:
uc = table_uc[constraint_name]
return (
(
key,
- list(unique_cons[key].values())
- if key in unique_cons
- else default(),
+ (
+ list(unique_cons[key].values())
+ if key in unique_cons
+ else default()
+ ),
)
for key in (
(schema, self.normalize_name(obj_name))
return (
(
key,
- check_constraints[key]
- if key in check_constraints
- else default(),
+ (
+ check_constraints[key]
+ if key in check_constraints
+ else default()
+ ),
)
for key in (
(schema, self.normalize_name(obj_name))
)
for param in self.parameters:
- param[
- quoted_bind_names.get(name, name)
- ] = out_parameters[name]
+ param[quoted_bind_names.get(name, name)] = (
+ out_parameters[name]
+ )
def _generate_cursor_outputtype_handler(self):
output_handlers = {}
class array(expression.ExpressionClauseList[_T]):
-
"""A PostgreSQL ARRAY literal.
This is used to produce ARRAY literals in SQL expressions, e.g.::
main_type = (
type_arg
if type_arg is not None
- else self._type_tuple[0]
- if self._type_tuple
- else sqltypes.NULLTYPE
+ else self._type_tuple[0] if self._type_tuple else sqltypes.NULLTYPE
)
if isinstance(main_type, ARRAY):
self.type = ARRAY(
main_type.item_type,
- dimensions=main_type.dimensions + 1
- if main_type.dimensions is not None
- else 2,
+ dimensions=(
+ main_type.dimensions + 1
+ if main_type.dimensions is not None
+ else 2
+ ),
)
else:
self.type = ARRAY(main_type)
"""
class Comparator(sqltypes.ARRAY.Comparator):
-
"""Define comparison operations for :class:`_types.ARRAY`.
Note that these operations are in addition to those provided
class _AsyncpgConnection(Protocol):
async def executemany(
self, operation: Any, seq_of_parameters: Sequence[Tuple[Any, ...]]
- ) -> Any:
- ...
+ ) -> Any: ...
- async def reload_schema_state(self) -> None:
- ...
+ async def reload_schema_state(self) -> None: ...
async def prepare(
self, operation: Any, *, name: Optional[str] = None
- ) -> Any:
- ...
+ ) -> Any: ...
- def is_closed(self) -> bool:
- ...
+ def is_closed(self) -> bool: ...
def transaction(
self,
isolation: Optional[str] = None,
readonly: bool = False,
deferrable: bool = False,
- ) -> Any:
- ...
+ ) -> Any: ...
- def fetchrow(self, operation: str) -> Any:
- ...
+ def fetchrow(self, operation: str) -> Any: ...
- async def close(self) -> None:
- ...
+ async def close(self) -> None: ...
- def terminate(self) -> None:
- ...
+ def terminate(self) -> None: ...
class _AsyncpgCursor(Protocol):
- def fetch(self, size: int) -> Any:
- ...
+ def fetch(self, size: int) -> Any: ...
class AsyncAdapt_asyncpg_cursor(AsyncAdapt_dbapi_cursor):
translated_error = exception_mapping[super_](
"%s: %s" % (type(error), error)
)
- translated_error.pgcode = (
- translated_error.sqlstate
- ) = getattr(error, "sqlstate", None)
+ translated_error.pgcode = translated_error.sqlstate = (
+ getattr(error, "sqlstate", None)
+ )
raise translated_error from error
else:
super()._handle_exception(error)
text += "\n FETCH FIRST (%s)%s ROWS %s" % (
self.process(select._fetch_clause, **kw),
" PERCENT" if select._fetch_clause_options["percent"] else "",
- "WITH TIES"
- if select._fetch_clause_options["with_ties"]
- else "ONLY",
+ (
+ "WITH TIES"
+ if select._fetch_clause_options["with_ties"]
+ else "ONLY"
+ ),
)
return text
", ".join(
[
self.sql_compiler.process(
- expr.self_group()
- if not isinstance(expr, expression.ColumnClause)
- else expr,
+ (
+ expr.self_group()
+ if not isinstance(expr, expression.ColumnClause)
+ else expr
+ ),
include_table=False,
literal_binds=True,
)
def visit_TIMESTAMP(self, type_, **kw):
return "TIMESTAMP%s %s" % (
- "(%d)" % type_.precision
- if getattr(type_, "precision", None) is not None
- else "",
+ (
+ "(%d)" % type_.precision
+ if getattr(type_, "precision", None) is not None
+ else ""
+ ),
(type_.timezone and "WITH" or "WITHOUT") + " TIME ZONE",
)
def visit_TIME(self, type_, **kw):
return "TIME%s %s" % (
- "(%d)" % type_.precision
- if getattr(type_, "precision", None) is not None
- else "",
+ (
+ "(%d)" % type_.precision
+ if getattr(type_, "precision", None) is not None
+ else ""
+ ),
(type_.timezone and "WITH" or "WITHOUT") + " TIME ZONE",
)
def get_deferrable(self, connection):
raise NotImplementedError()
- def _split_multihost_from_url(
- self, url: URL
- ) -> Union[
+ def _split_multihost_from_url(self, url: URL) -> Union[
Tuple[None, None],
Tuple[Tuple[Optional[str], ...], Tuple[Optional[int], ...]],
]:
# dictionary with (name, ) if default search path or (schema, name)
# as keys
enums = dict(
- ((rec["name"],), rec)
- if rec["visible"]
- else ((rec["schema"], rec["name"]), rec)
+ (
+ ((rec["name"],), rec)
+ if rec["visible"]
+ else ((rec["schema"], rec["name"]), rec)
+ )
for rec in self._load_enums(
connection, schema="*", info_cache=kw.get("info_cache")
)
for row_dict in rows:
# ensure that each table has an entry, even if it has no columns
if row_dict["name"] is None:
- columns[
- (schema, row_dict["table_name"])
- ] = ReflectionDefaults.columns()
+ columns[(schema, row_dict["table_name"])] = (
+ ReflectionDefaults.columns()
+ )
continue
table_cols = columns[(schema, row_dict["table_name"])]
return (
(
(schema, table_name),
- {
- "constrained_columns": [] if cols is None else cols,
- "name": pk_name,
- "comment": comment,
- }
- if pk_name is not None
- else default(),
+ (
+ {
+ "constrained_columns": [] if cols is None else cols,
+ "name": pk_name,
+ "comment": comment,
+ }
+ if pk_name is not None
+ else default()
+ ),
)
for table_name, cols, pk_name, comment, _ in result
)
self.inferred_target_elements = index_elements
self.inferred_target_whereclause = index_where
elif constraint is None:
- self.constraint_target = (
- self.inferred_target_elements
- ) = self.inferred_target_whereclause = None
+ self.constraint_target = self.inferred_target_elements = (
+ self.inferred_target_whereclause
+ ) = None
class OnConflictDoNothing(OnConflictClause):
class ENUM(NamedType, type_api.NativeForEmulated, sqltypes.Enum):
-
"""PostgreSQL ENUM type.
This is a subclass of :class:`_types.Enum` which includes
__abstract__ = True
@overload
- def adapt(self, cls: Type[_TE], **kw: Any) -> _TE:
- ...
+ def adapt(self, cls: Type[_TE], **kw: Any) -> _TE: ...
@overload
- def adapt(self, cls: Type[TypeEngineMixin], **kw: Any) -> TypeEngine[Any]:
- ...
+ def adapt(
+ self, cls: Type[TypeEngineMixin], **kw: Any
+ ) -> TypeEngine[Any]: ...
def adapt(
self,
@overload
def __init__(
self: PGUuid[_python_UUID], as_uuid: Literal[True] = ...
- ) -> None:
- ...
+ ) -> None: ...
@overload
- def __init__(self: PGUuid[str], as_uuid: Literal[False] = ...) -> None:
- ...
+ def __init__(
+ self: PGUuid[str], as_uuid: Literal[False] = ...
+ ) -> None: ...
- def __init__(self, as_uuid: bool = True) -> None:
- ...
+ def __init__(self, as_uuid: bool = True) -> None: ...
class BYTEA(sqltypes.LargeBinary):
class OID(sqltypes.TypeEngine[int]):
-
"""Provide the PostgreSQL OID type."""
__visit_name__ = "OID"
class REGCONFIG(sqltypes.TypeEngine[str]):
-
"""Provide the PostgreSQL REGCONFIG type.
.. versionadded:: 2.0.0rc1
class TSQUERY(sqltypes.TypeEngine[str]):
-
"""Provide the PostgreSQL TSQUERY type.
.. versionadded:: 2.0.0rc1
class REGCLASS(sqltypes.TypeEngine[str]):
-
"""Provide the PostgreSQL REGCLASS type.
.. versionadded:: 1.2.7
class TIMESTAMP(sqltypes.TIMESTAMP):
-
"""Provide the PostgreSQL TIMESTAMP type."""
__visit_name__ = "TIMESTAMP"
class TIME(sqltypes.TIME):
-
"""PostgreSQL TIME type."""
__visit_name__ = "TIME"
class INTERVAL(type_api.NativeForEmulated, sqltypes._AbstractInterval):
-
"""PostgreSQL INTERVAL type."""
__visit_name__ = "INTERVAL"
class TSVECTOR(sqltypes.TypeEngine[str]):
-
"""The :class:`_postgresql.TSVECTOR` type implements the PostgreSQL
text search type TSVECTOR.
class CITEXT(sqltypes.TEXT):
-
"""Provide the PostgreSQL CITEXT type.
.. versionadded:: 2.0.7
)
if self.dbapi.sqlite_version_info < (3, 35) or util.pypy:
- self.update_returning = (
- self.delete_returning
- ) = self.insert_returning = False
+ self.update_returning = self.delete_returning = (
+ self.insert_returning
+ ) = False
if self.dbapi.sqlite_version_info < (3, 32, 0):
# https://www.sqlite.org/limits.html
self.inferred_target_elements = index_elements
self.inferred_target_whereclause = index_where
else:
- self.constraint_target = (
- self.inferred_target_elements
- ) = self.inferred_target_whereclause = None
+ self.constraint_target = self.inferred_target_elements = (
+ self.inferred_target_whereclause
+ ) = None
class OnConflictDoNothing(OnConflictClause):
@property
def _schema_translate_map(self) -> Optional[SchemaTranslateMapType]:
- schema_translate_map: Optional[
- SchemaTranslateMapType
- ] = self._execution_options.get("schema_translate_map", None)
+ schema_translate_map: Optional[SchemaTranslateMapType] = (
+ self._execution_options.get("schema_translate_map", None)
+ )
return schema_translate_map
"""
name = obj.schema
- schema_translate_map: Optional[
- SchemaTranslateMapType
- ] = self._execution_options.get("schema_translate_map", None)
+ schema_translate_map: Optional[SchemaTranslateMapType] = (
+ self._execution_options.get("schema_translate_map", None)
+ )
if (
schema_translate_map
insertmanyvalues_page_size: int = ...,
schema_translate_map: Optional[SchemaTranslateMapType] = ...,
**opt: Any,
- ) -> Connection:
- ...
+ ) -> Connection: ...
@overload
- def execution_options(self, **opt: Any) -> Connection:
- ...
+ def execution_options(self, **opt: Any) -> Connection: ...
def execution_options(self, **opt: Any) -> Connection:
r"""Set non-SQL options for the connection which take effect
parameters: Optional[_CoreSingleExecuteParams] = None,
*,
execution_options: Optional[CoreExecuteOptionsParameter] = None,
- ) -> Optional[_T]:
- ...
+ ) -> Optional[_T]: ...
@overload
def scalar(
parameters: Optional[_CoreSingleExecuteParams] = None,
*,
execution_options: Optional[CoreExecuteOptionsParameter] = None,
- ) -> Any:
- ...
+ ) -> Any: ...
def scalar(
self,
parameters: Optional[_CoreAnyExecuteParams] = None,
*,
execution_options: Optional[CoreExecuteOptionsParameter] = None,
- ) -> ScalarResult[_T]:
- ...
+ ) -> ScalarResult[_T]: ...
@overload
def scalars(
parameters: Optional[_CoreAnyExecuteParams] = None,
*,
execution_options: Optional[CoreExecuteOptionsParameter] = None,
- ) -> ScalarResult[Any]:
- ...
+ ) -> ScalarResult[Any]: ...
def scalars(
self,
parameters: Optional[_CoreAnyExecuteParams] = None,
*,
execution_options: Optional[CoreExecuteOptionsParameter] = None,
- ) -> CursorResult[Unpack[_Ts]]:
- ...
+ ) -> CursorResult[Unpack[_Ts]]: ...
@overload
def execute(
parameters: Optional[_CoreAnyExecuteParams] = None,
*,
execution_options: Optional[CoreExecuteOptionsParameter] = None,
- ) -> CursorResult[Unpack[TupleAny]]:
- ...
+ ) -> CursorResult[Unpack[TupleAny]]: ...
def execute(
self,
engine_events = self._has_events or self.engine._has_events
if self.dialect._has_events:
- do_execute_dispatch: Iterable[
- Any
- ] = self.dialect.dispatch.do_execute
+ do_execute_dispatch: Iterable[Any] = (
+ self.dialect.dispatch.do_execute
+ )
else:
do_execute_dispatch = ()
None,
cast(Exception, e),
dialect.loaded_dbapi.Error,
- hide_parameters=engine.hide_parameters
- if engine is not None
- else False,
+ hide_parameters=(
+ engine.hide_parameters if engine is not None else False
+ ),
connection_invalidated=is_disconnect,
dialect=dialect,
)
break
if sqlalchemy_exception and is_disconnect != ctx.is_disconnect:
- sqlalchemy_exception.connection_invalidated = (
- is_disconnect
- ) = ctx.is_disconnect
+ sqlalchemy_exception.connection_invalidated = is_disconnect = (
+ ctx.is_disconnect
+ )
if newraise:
raise newraise.with_traceback(exc_info[2]) from e
insertmanyvalues_page_size: int = ...,
schema_translate_map: Optional[SchemaTranslateMapType] = ...,
**opt: Any,
- ) -> OptionEngine:
- ...
+ ) -> OptionEngine: ...
@overload
- def execution_options(self, **opt: Any) -> OptionEngine:
- ...
+ def execution_options(self, **opt: Any) -> OptionEngine: ...
def execution_options(self, **opt: Any) -> OptionEngine:
"""Return a new :class:`_engine.Engine` that will provide
query_cache_size: int = ...,
use_insertmanyvalues: bool = ...,
**kwargs: Any,
-) -> Engine:
- ...
+) -> Engine: ...
@overload
-def create_engine(url: Union[str, URL], **kwargs: Any) -> Engine:
- ...
+def create_engine(url: Union[str, URL], **kwargs: Any) -> Engine: ...
@util.deprecated_params(
timeout: float = ...,
use_lifo: bool = ...,
**kwargs: Any,
-) -> Pool:
- ...
+) -> Pool: ...
@overload
-def create_pool_from_url(url: Union[str, URL], **kwargs: Any) -> Pool:
- ...
+def create_pool_from_url(url: Union[str, URL], **kwargs: Any) -> Pool: ...
def create_pool_from_url(url: Union[str, URL], **kwargs: Any) -> Pool:
"_translated_indexes",
"_safe_for_cache",
"_unpickled",
- "_key_to_index"
+ "_key_to_index",
# don't need _unique_filters support here for now. Can be added
# if a need arises.
)
{
key: (
# int index should be None for ambiguous key
- value[0] + offset
- if value[0] is not None and key not in keymap
- else None,
+ (
+ value[0] + offset
+ if value[0] is not None and key not in keymap
+ else None
+ ),
value[1] + offset,
*value[2:],
)
) = context.result_column_struct
num_ctx_cols = len(result_columns)
else:
- result_columns = ( # type: ignore
- cols_are_ordered
- ) = (
+ result_columns = cols_are_ordered = ( # type: ignore
num_ctx_cols
- ) = (
- ad_hoc_textual
- ) = loose_column_name_matching = textual_ordered = False
+ ) = ad_hoc_textual = loose_column_name_matching = (
+ textual_ordered
+ ) = False
# merge cursor.description with the column info
# present in the compiled structure, if any
ordered_rows = [
rows_by_sentinel[
tuple(
- _resolver(parameters[_spk]) # type: ignore # noqa: E501
- if _resolver
- else parameters[_spk] # type: ignore # noqa: E501
+ (
+ _resolver(parameters[_spk]) # type: ignore # noqa: E501
+ if _resolver
+ else parameters[_spk] # type: ignore # noqa: E501
+ )
for _resolver, _spk in zip(
sentinel_value_resolvers,
imv.sentinel_param_keys,
assert positiontup is not None
for compiled_params in self.compiled_parameters:
l_param: List[Any] = [
- flattened_processors[key](compiled_params[key])
- if key in flattened_processors
- else compiled_params[key]
+ (
+ flattened_processors[key](compiled_params[key])
+ if key in flattened_processors
+ else compiled_params[key]
+ )
for key in positiontup
]
core_positional_parameters.append(
for compiled_params in self.compiled_parameters:
if escaped_names:
d_param = {
- escaped_names.get(key, key): flattened_processors[key](
- compiled_params[key]
+ escaped_names.get(key, key): (
+ flattened_processors[key](compiled_params[key])
+ if key in flattened_processors
+ else compiled_params[key]
)
- if key in flattened_processors
- else compiled_params[key]
for key in compiled_params
}
else:
d_param = {
- key: flattened_processors[key](compiled_params[key])
- if key in flattened_processors
- else compiled_params[key]
+ key: (
+ flattened_processors[key](compiled_params[key])
+ if key in flattened_processors
+ else compiled_params[key]
+ )
for key in compiled_params
}
if compiled.positional:
parameters = self.dialect.execute_sequence_format(
[
- processors[key](compiled_params[key]) # type: ignore
- if key in processors
- else compiled_params[key]
+ (
+ processors[key](compiled_params[key]) # type: ignore
+ if key in processors
+ else compiled_params[key]
+ )
for key in compiled.positiontup or ()
]
)
else:
parameters = {
- key: processors[key](compiled_params[key]) # type: ignore
- if key in processors
- else compiled_params[key]
+ key: (
+ processors[key](compiled_params[key]) # type: ignore
+ if key in processors
+ else compiled_params[key]
+ )
for key in compiled_params
}
return self._execute_scalar(
""" # noqa: E501
- def close(self) -> None:
- ...
+ def close(self) -> None: ...
- def commit(self) -> None:
- ...
+ def commit(self) -> None: ...
- def cursor(self) -> DBAPICursor:
- ...
+ def cursor(self) -> DBAPICursor: ...
- def rollback(self) -> None:
- ...
+ def rollback(self) -> None: ...
autocommit: bool
...
@property
- def rowcount(self) -> int:
- ...
+ def rowcount(self) -> int: ...
arraysize: int
lastrowid: int
- def close(self) -> None:
- ...
+ def close(self) -> None: ...
def execute(
self,
operation: Any,
parameters: Optional[_DBAPISingleExecuteParams] = None,
- ) -> Any:
- ...
+ ) -> Any: ...
def executemany(
self,
operation: Any,
parameters: _DBAPIMultiExecuteParams,
- ) -> Any:
- ...
+ ) -> Any: ...
- def fetchone(self) -> Optional[Any]:
- ...
+ def fetchone(self) -> Optional[Any]: ...
- def fetchmany(self, size: int = ...) -> Sequence[Any]:
- ...
+ def fetchmany(self, size: int = ...) -> Sequence[Any]: ...
- def fetchall(self) -> Sequence[Any]:
- ...
+ def fetchall(self) -> Sequence[Any]: ...
- def setinputsizes(self, sizes: Sequence[Any]) -> None:
- ...
+ def setinputsizes(self, sizes: Sequence[Any]) -> None: ...
- def setoutputsize(self, size: Any, column: Any) -> None:
- ...
+ def setoutputsize(self, size: Any, column: Any) -> None: ...
- def callproc(self, procname: str, parameters: Sequence[Any] = ...) -> Any:
- ...
+ def callproc(
+ self, procname: str, parameters: Sequence[Any] = ...
+ ) -> Any: ...
- def nextset(self) -> Optional[bool]:
- ...
+ def nextset(self) -> Optional[bool]: ...
- def __getattr__(self, key: str) -> Any:
- ...
+ def __getattr__(self, key: str) -> Any: ...
_CoreSingleExecuteParams = Mapping[str, Any]
if TYPE_CHECKING:
- def _overrides_default(self, method_name: str) -> bool:
- ...
+ def _overrides_default(self, method_name: str) -> bool: ...
def get_columns(
self,
@overload
def _key_fallback(
self, key: Any, err: Optional[Exception], raiseerr: Literal[True] = ...
- ) -> NoReturn:
- ...
+ ) -> NoReturn: ...
@overload
def _key_fallback(
key: Any,
err: Optional[Exception],
raiseerr: Literal[False] = ...,
- ) -> None:
- ...
+ ) -> None: ...
@overload
def _key_fallback(
self, key: Any, err: Optional[Exception], raiseerr: bool = ...
- ) -> Optional[NoReturn]:
- ...
+ ) -> Optional[NoReturn]: ...
def _key_fallback(
self, key: Any, err: Optional[Exception], raiseerr: bool = True
raise_for_second_row: bool,
raise_for_none: Literal[True],
scalar: bool,
- ) -> _R:
- ...
+ ) -> _R: ...
@overload
def _only_one_row(
raise_for_second_row: bool,
raise_for_none: bool,
scalar: bool,
- ) -> Optional[_R]:
- ...
+ ) -> Optional[_R]: ...
def _only_one_row(
self,
return self._column_slices(col_expressions)
@overload
- def scalars(self: Result[_T, Unpack[TupleAny]]) -> ScalarResult[_T]:
- ...
+ def scalars(self: Result[_T, Unpack[TupleAny]]) -> ScalarResult[_T]: ...
@overload
def scalars(
self: Result[_T, Unpack[TupleAny]], index: Literal[0]
- ) -> ScalarResult[_T]:
- ...
+ ) -> ScalarResult[_T]: ...
@overload
- def scalars(self, index: _KeyIndexType = 0) -> ScalarResult[Any]:
- ...
+ def scalars(self, index: _KeyIndexType = 0) -> ScalarResult[Any]: ...
def scalars(self, index: _KeyIndexType = 0) -> ScalarResult[Any]:
"""Return a :class:`_engine.ScalarResult` filtering object which
)
@overload
- def scalar_one(self: Result[_T]) -> _T:
- ...
+ def scalar_one(self: Result[_T]) -> _T: ...
@overload
- def scalar_one(self) -> Any:
- ...
+ def scalar_one(self) -> Any: ...
def scalar_one(self) -> Any:
"""Return exactly one scalar result or raise an exception.
)
@overload
- def scalar_one_or_none(self: Result[_T]) -> Optional[_T]:
- ...
+ def scalar_one_or_none(self: Result[_T]) -> Optional[_T]: ...
@overload
- def scalar_one_or_none(self) -> Optional[Any]:
- ...
+ def scalar_one_or_none(self) -> Optional[Any]: ...
def scalar_one_or_none(self) -> Optional[Any]:
"""Return exactly one scalar result or ``None``.
)
@overload
- def scalar(self: Result[_T]) -> Optional[_T]:
- ...
+ def scalar(self: Result[_T]) -> Optional[_T]: ...
@overload
- def scalar(self) -> Any:
- ...
+ def scalar(self) -> Any: ...
def scalar(self) -> Any:
"""Fetch the first column of the first row, and close the result set.
"""
...
- def __iter__(self) -> Iterator[_R]:
- ...
+ def __iter__(self) -> Iterator[_R]: ...
- def __next__(self) -> _R:
- ...
+ def __next__(self) -> _R: ...
def first(self) -> Optional[_R]:
"""Fetch the first object or ``None`` if no object is present.
...
@overload
- def scalar_one(self: TupleResult[Tuple[_T]]) -> _T:
- ...
+ def scalar_one(self: TupleResult[Tuple[_T]]) -> _T: ...
@overload
- def scalar_one(self) -> Any:
- ...
+ def scalar_one(self) -> Any: ...
def scalar_one(self) -> Any:
"""Return exactly one scalar result or raise an exception.
...
@overload
- def scalar_one_or_none(self: TupleResult[Tuple[_T]]) -> Optional[_T]:
- ...
+ def scalar_one_or_none(
+ self: TupleResult[Tuple[_T]],
+ ) -> Optional[_T]: ...
@overload
- def scalar_one_or_none(self) -> Optional[Any]:
- ...
+ def scalar_one_or_none(self) -> Optional[Any]: ...
def scalar_one_or_none(self) -> Optional[Any]:
"""Return exactly one or no scalar result.
...
@overload
- def scalar(self: TupleResult[Tuple[_T]]) -> Optional[_T]:
- ...
+ def scalar(self: TupleResult[Tuple[_T]]) -> Optional[_T]: ...
@overload
- def scalar(self) -> Any:
- ...
+ def scalar(self) -> Any: ...
def scalar(self) -> Any:
"""Fetch the first column of the first row, and close the result
if TYPE_CHECKING:
- def __getitem__(self, key: _KeyType) -> Any:
- ...
+ def __getitem__(self, key: _KeyType) -> Any: ...
else:
__getitem__ = BaseRow._get_by_key_impl_mapping
@overload
def _assert_value(
val: str,
- ) -> str:
- ...
+ ) -> str: ...
@overload
def _assert_value(
val: Sequence[str],
- ) -> Union[str, Tuple[str, ...]]:
- ...
+ ) -> Union[str, Tuple[str, ...]]: ...
def _assert_value(
val: Union[str, Sequence[str]],
class _MutexProtocol(Protocol):
- def __enter__(self) -> bool:
- ...
+ def __enter__(self) -> bool: ...
def __exit__(
self,
exc_type: Optional[Type[BaseException]],
exc_val: Optional[BaseException],
exc_tb: Optional[TracebackType],
- ) -> Optional[bool]:
- ...
+ ) -> Optional[bool]: ...
class _CompoundListener(_InstanceLevelDispatch[_ET]):
from .. import util
from ..util.typing import Literal
-_registrars: MutableMapping[
- str, List[Type[_HasEventsDispatch[Any]]]
-] = util.defaultdict(list)
+_registrars: MutableMapping[str, List[Type[_HasEventsDispatch[Any]]]] = (
+ util.defaultdict(list)
+)
def _is_event_name(name: str) -> bool:
if typing.TYPE_CHECKING:
- def __getattr__(self, name: str) -> _InstanceLevelDispatch[_ET]:
- ...
+ def __getattr__(self, name: str) -> _InstanceLevelDispatch[_ET]: ...
def __init_subclass__(cls) -> None:
"""Intercept new Event subclasses and create associated _Dispatch
@overload
def __get__(
self, obj: Literal[None], cls: Type[Any]
- ) -> Type[_Dispatch[_ET]]:
- ...
+ ) -> Type[_Dispatch[_ET]]: ...
@overload
- def __get__(self, obj: Any, cls: Type[Any]) -> _DispatchCommon[_ET]:
- ...
+ def __get__(self, obj: Any, cls: Type[Any]) -> _DispatchCommon[_ET]: ...
def __get__(self, obj: Any, cls: Type[Any]) -> Any:
if obj is None:
)
text %= {
- "current_since": " (arguments as of %s)" % current_since
- if current_since
- else "",
+ "current_since": (
+ " (arguments as of %s)" % current_since if current_since else ""
+ ),
"event_name": fn.__name__,
"has_kw_arguments": ", **kw" if dispatch_collection.has_kw else "",
"named_event_arguments": ", ".join(dispatch_collection.arg_names),
% {
"since": since,
"event_name": fn.__name__,
- "has_kw_arguments": " **kw"
- if dispatch_collection.has_kw
- else "",
+ "has_kw_arguments": (
+ " **kw" if dispatch_collection.has_kw else ""
+ ),
"named_event_arguments": ", ".join(args),
"sample_target": sample_target,
}
"weakref.ref[_ListenerFnType]",
]
-_key_to_collection: Dict[
- _EventKeyTupleType, _RefCollectionToListenerType
-] = collections.defaultdict(dict)
+_key_to_collection: Dict[_EventKeyTupleType, _RefCollectionToListenerType] = (
+ collections.defaultdict(dict)
+)
"""
Given an original listen() argument, can locate all
listener collections and the listener fn contained
connection_invalidated: bool = False,
dialect: Optional[Dialect] = None,
ismulti: Optional[bool] = None,
- ) -> StatementError:
- ...
+ ) -> StatementError: ...
@overload
@classmethod
connection_invalidated: bool = False,
dialect: Optional[Dialect] = None,
ismulti: Optional[bool] = None,
- ) -> DontWrapMixin:
- ...
+ ) -> DontWrapMixin: ...
@overload
@classmethod
connection_invalidated: bool = False,
dialect: Optional[Dialect] = None,
ismulti: Optional[bool] = None,
- ) -> BaseException:
- ...
+ ) -> BaseException: ...
@classmethod
def instance(
class _GetterProtocol(Protocol[_T_co]):
- def __call__(self, instance: Any) -> _T_co:
- ...
+ def __call__(self, instance: Any) -> _T_co: ...
# mypy 0.990 we are no longer allowed to make this Protocol[_T_con]
-class _SetterProtocol(Protocol):
- ...
+class _SetterProtocol(Protocol): ...
class _PlainSetterProtocol(_SetterProtocol, Protocol[_T_con]):
- def __call__(self, instance: Any, value: _T_con) -> None:
- ...
+ def __call__(self, instance: Any, value: _T_con) -> None: ...
class _DictSetterProtocol(_SetterProtocol, Protocol[_T_con]):
- def __call__(self, instance: Any, key: Any, value: _T_con) -> None:
- ...
+ def __call__(self, instance: Any, key: Any, value: _T_con) -> None: ...
# mypy 0.990 we are no longer allowed to make this Protocol[_T_con]
-class _CreatorProtocol(Protocol):
- ...
+class _CreatorProtocol(Protocol): ...
class _PlainCreatorProtocol(_CreatorProtocol, Protocol[_T_con]):
- def __call__(self, value: _T_con) -> Any:
- ...
+ def __call__(self, value: _T_con) -> Any: ...
class _KeyCreatorProtocol(_CreatorProtocol, Protocol[_T_con]):
- def __call__(self, key: Any, value: Optional[_T_con]) -> Any:
- ...
+ def __call__(self, key: Any, value: Optional[_T_con]) -> Any: ...
class _LazyCollectionProtocol(Protocol[_T]):
def __call__(
self,
- ) -> Union[MutableSet[_T], MutableMapping[Any, _T], MutableSequence[_T]]:
- ...
+ ) -> Union[
+ MutableSet[_T], MutableMapping[Any, _T], MutableSequence[_T]
+ ]: ...
class _GetSetFactoryProtocol(Protocol):
self,
collection_class: Optional[Type[Any]],
assoc_instance: AssociationProxyInstance[Any],
- ) -> Tuple[_GetterProtocol[Any], _SetterProtocol]:
- ...
+ ) -> Tuple[_GetterProtocol[Any], _SetterProtocol]: ...
class _ProxyFactoryProtocol(Protocol):
creator: _CreatorProtocol,
value_attr: str,
parent: AssociationProxyInstance[Any],
- ) -> Any:
- ...
+ ) -> Any: ...
class _ProxyBulkSetProtocol(Protocol):
def __call__(
self, proxy: _AssociationCollection[Any], collection: Iterable[Any]
- ) -> None:
- ...
+ ) -> None: ...
class _AssociationProxyProtocol(Protocol[_T]):
proxy_bulk_set: Optional[_ProxyBulkSetProtocol]
@util.ro_memoized_property
- def info(self) -> _InfoType:
- ...
+ def info(self) -> _InfoType: ...
def for_class(
self, class_: Type[Any], obj: Optional[object] = None
- ) -> AssociationProxyInstance[_T]:
- ...
+ ) -> AssociationProxyInstance[_T]: ...
def _default_getset(
self, collection_class: Any
- ) -> Tuple[_GetterProtocol[Any], _SetterProtocol]:
- ...
+ ) -> Tuple[_GetterProtocol[Any], _SetterProtocol]: ...
class AssociationProxy(
self._attribute_options = _DEFAULT_ATTRIBUTE_OPTIONS
@overload
- def __get__(self, instance: Literal[None], owner: Literal[None]) -> Self:
- ...
+ def __get__(
+ self, instance: Literal[None], owner: Literal[None]
+ ) -> Self: ...
@overload
def __get__(
self, instance: Literal[None], owner: Any
- ) -> AssociationProxyInstance[_T]:
- ...
+ ) -> AssociationProxyInstance[_T]: ...
@overload
- def __get__(self, instance: object, owner: Any) -> _T:
- ...
+ def __get__(self, instance: object, owner: Any) -> _T: ...
def __get__(
self, instance: object, owner: Any
return self.parent.info
@overload
- def get(self: _Self, obj: Literal[None]) -> _Self:
- ...
+ def get(self: _Self, obj: Literal[None]) -> _Self: ...
@overload
- def get(self, obj: Any) -> _T:
- ...
+ def get(self, obj: Any) -> _T: ...
def get(
self, obj: Any
self.setter(object_, value)
@overload
- def __getitem__(self, index: int) -> _T:
- ...
+ def __getitem__(self, index: int) -> _T: ...
@overload
- def __getitem__(self, index: slice) -> MutableSequence[_T]:
- ...
+ def __getitem__(self, index: slice) -> MutableSequence[_T]: ...
def __getitem__(
self, index: Union[int, slice]
return [self._get(member) for member in self.col[index]]
@overload
- def __setitem__(self, index: int, value: _T) -> None:
- ...
+ def __setitem__(self, index: int, value: _T) -> None: ...
@overload
- def __setitem__(self, index: slice, value: Iterable[_T]) -> None:
- ...
+ def __setitem__(self, index: slice, value: Iterable[_T]) -> None: ...
def __setitem__(
self, index: Union[int, slice], value: Union[_T, Iterable[_T]]
self._set(self.col[i], item)
@overload
- def __delitem__(self, index: int) -> None:
- ...
+ def __delitem__(self, index: int) -> None: ...
@overload
- def __delitem__(self, index: slice) -> None:
- ...
+ def __delitem__(self, index: slice) -> None: ...
def __delitem__(self, index: Union[slice, int]) -> None:
del self.col[index]
if typing.TYPE_CHECKING:
# TODO: no idea how to do this without separate "stub"
- def index(self, value: Any, start: int = ..., stop: int = ...) -> int:
- ...
+ def index(
+ self, value: Any, start: int = ..., stop: int = ...
+ ) -> int: ...
else:
return repr(dict(self))
@overload
- def get(self, __key: _KT, /) -> Optional[_VT]:
- ...
+ def get(self, __key: _KT, /) -> Optional[_VT]: ...
@overload
- def get(self, __key: _KT, /, default: Union[_VT, _T]) -> Union[_VT, _T]:
- ...
+ def get(
+ self, __key: _KT, /, default: Union[_VT, _T]
+ ) -> Union[_VT, _T]: ...
def get(
self, __key: _KT, /, default: Optional[Union[_VT, _T]] = None
return ValuesView(self)
@overload
- def pop(self, __key: _KT, /) -> _VT:
- ...
+ def pop(self, __key: _KT, /) -> _VT: ...
@overload
def pop(
self, __key: _KT, /, default: Union[_VT, _T] = ...
- ) -> Union[_VT, _T]:
- ...
+ ) -> Union[_VT, _T]: ...
def pop(self, __key: _KT, /, *arg: Any, **kw: Any) -> Union[_VT, _T]:
member = self.col.pop(__key, *arg, **kw)
@overload
def update(
self, __m: SupportsKeysAndGetItem[_KT, _VT], **kwargs: _VT
- ) -> None:
- ...
+ ) -> None: ...
@overload
- def update(self, __m: Iterable[tuple[_KT, _VT]], **kwargs: _VT) -> None:
- ...
+ def update(
+ self, __m: Iterable[tuple[_KT, _VT]], **kwargs: _VT
+ ) -> None: ...
@overload
- def update(self, **kwargs: _VT) -> None:
- ...
+ def update(self, **kwargs: _VT) -> None: ...
def update(self, *a: Any, **kw: Any) -> None:
up: Dict[_KT, _VT] = {}
__slots__ = ("__weakref__",)
@overload
- def _assign_proxied(self, target: _PT) -> _PT:
- ...
+ def _assign_proxied(self, target: _PT) -> _PT: ...
@overload
- def _assign_proxied(self, target: None) -> None:
- ...
+ def _assign_proxied(self, target: None) -> None: ...
def _assign_proxied(self, target: Optional[_PT]) -> Optional[_PT]:
if target is not None:
cls,
target: _PT,
regenerate: Literal[True] = ...,
- ) -> Self:
- ...
+ ) -> Self: ...
@overload
@classmethod
def _retrieve_proxy_for_target(
cls, target: _PT, regenerate: bool = True
- ) -> Optional[Self]:
- ...
+ ) -> Optional[Self]: ...
@classmethod
def _retrieve_proxy_for_target(
insertmanyvalues_page_size: int = ...,
schema_translate_map: Optional[SchemaTranslateMapType] = ...,
**opt: Any,
- ) -> AsyncConnection:
- ...
+ ) -> AsyncConnection: ...
@overload
- async def execution_options(self, **opt: Any) -> AsyncConnection:
- ...
+ async def execution_options(self, **opt: Any) -> AsyncConnection: ...
async def execution_options(self, **opt: Any) -> AsyncConnection:
r"""Set non-SQL options for the connection which take effect
parameters: Optional[_CoreAnyExecuteParams] = None,
*,
execution_options: Optional[CoreExecuteOptionsParameter] = None,
- ) -> GeneratorStartableContext[AsyncResult[Unpack[_Ts]]]:
- ...
+ ) -> GeneratorStartableContext[AsyncResult[Unpack[_Ts]]]: ...
@overload
def stream(
parameters: Optional[_CoreAnyExecuteParams] = None,
*,
execution_options: Optional[CoreExecuteOptionsParameter] = None,
- ) -> GeneratorStartableContext[AsyncResult[Unpack[TupleAny]]]:
- ...
+ ) -> GeneratorStartableContext[AsyncResult[Unpack[TupleAny]]]: ...
@asyncstartablecontext
async def stream(
parameters: Optional[_CoreAnyExecuteParams] = None,
*,
execution_options: Optional[CoreExecuteOptionsParameter] = None,
- ) -> CursorResult[Unpack[_Ts]]:
- ...
+ ) -> CursorResult[Unpack[_Ts]]: ...
@overload
async def execute(
parameters: Optional[_CoreAnyExecuteParams] = None,
*,
execution_options: Optional[CoreExecuteOptionsParameter] = None,
- ) -> CursorResult[Unpack[TupleAny]]:
- ...
+ ) -> CursorResult[Unpack[TupleAny]]: ...
async def execute(
self,
parameters: Optional[_CoreSingleExecuteParams] = None,
*,
execution_options: Optional[CoreExecuteOptionsParameter] = None,
- ) -> Optional[_T]:
- ...
+ ) -> Optional[_T]: ...
@overload
async def scalar(
parameters: Optional[_CoreSingleExecuteParams] = None,
*,
execution_options: Optional[CoreExecuteOptionsParameter] = None,
- ) -> Any:
- ...
+ ) -> Any: ...
async def scalar(
self,
parameters: Optional[_CoreAnyExecuteParams] = None,
*,
execution_options: Optional[CoreExecuteOptionsParameter] = None,
- ) -> ScalarResult[_T]:
- ...
+ ) -> ScalarResult[_T]: ...
@overload
async def scalars(
parameters: Optional[_CoreAnyExecuteParams] = None,
*,
execution_options: Optional[CoreExecuteOptionsParameter] = None,
- ) -> ScalarResult[Any]:
- ...
+ ) -> ScalarResult[Any]: ...
async def scalars(
self,
parameters: Optional[_CoreSingleExecuteParams] = None,
*,
execution_options: Optional[CoreExecuteOptionsParameter] = None,
- ) -> GeneratorStartableContext[AsyncScalarResult[_T]]:
- ...
+ ) -> GeneratorStartableContext[AsyncScalarResult[_T]]: ...
@overload
def stream_scalars(
parameters: Optional[_CoreSingleExecuteParams] = None,
*,
execution_options: Optional[CoreExecuteOptionsParameter] = None,
- ) -> GeneratorStartableContext[AsyncScalarResult[Any]]:
- ...
+ ) -> GeneratorStartableContext[AsyncScalarResult[Any]]: ...
@asyncstartablecontext
async def stream_scalars(
insertmanyvalues_page_size: int = ...,
schema_translate_map: Optional[SchemaTranslateMapType] = ...,
**opt: Any,
- ) -> AsyncEngine:
- ...
+ ) -> AsyncEngine: ...
@overload
- def execution_options(self, **opt: Any) -> AsyncEngine:
- ...
+ def execution_options(self, **opt: Any) -> AsyncEngine: ...
def execution_options(self, **opt: Any) -> AsyncEngine:
"""Return a new :class:`_asyncio.AsyncEngine` that will provide
@overload
-def _get_sync_engine_or_connection(async_engine: AsyncEngine) -> Engine:
- ...
+def _get_sync_engine_or_connection(async_engine: AsyncEngine) -> Engine: ...
@overload
def _get_sync_engine_or_connection(
async_engine: AsyncConnection,
-) -> Connection:
- ...
+) -> Connection: ...
def _get_sync_engine_or_connection(
return await greenlet_spawn(self._only_one_row, True, False, False)
@overload
- async def scalar_one(self: AsyncResult[_T]) -> _T:
- ...
+ async def scalar_one(self: AsyncResult[_T]) -> _T: ...
@overload
- async def scalar_one(self) -> Any:
- ...
+ async def scalar_one(self) -> Any: ...
async def scalar_one(self) -> Any:
"""Return exactly one scalar result or raise an exception.
@overload
async def scalar_one_or_none(
self: AsyncResult[_T],
- ) -> Optional[_T]:
- ...
+ ) -> Optional[_T]: ...
@overload
- async def scalar_one_or_none(self) -> Optional[Any]:
- ...
+ async def scalar_one_or_none(self) -> Optional[Any]: ...
async def scalar_one_or_none(self) -> Optional[Any]:
"""Return exactly one scalar result or ``None``.
return await greenlet_spawn(self._only_one_row, True, True, False)
@overload
- async def scalar(self: AsyncResult[_T]) -> Optional[_T]:
- ...
+ async def scalar(self: AsyncResult[_T]) -> Optional[_T]: ...
@overload
- async def scalar(self) -> Any:
- ...
+ async def scalar(self) -> Any: ...
async def scalar(self) -> Any:
"""Fetch the first column of the first row, and close the result set.
@overload
def scalars(
self: AsyncResult[_T, Unpack[TupleAny]], index: Literal[0]
- ) -> AsyncScalarResult[_T]:
- ...
+ ) -> AsyncScalarResult[_T]: ...
@overload
def scalars(
self: AsyncResult[_T, Unpack[TupleAny]],
- ) -> AsyncScalarResult[_T]:
- ...
+ ) -> AsyncScalarResult[_T]: ...
@overload
- def scalars(self, index: _KeyIndexType = 0) -> AsyncScalarResult[Any]:
- ...
+ def scalars(self, index: _KeyIndexType = 0) -> AsyncScalarResult[Any]: ...
def scalars(self, index: _KeyIndexType = 0) -> AsyncScalarResult[Any]:
"""Return an :class:`_asyncio.AsyncScalarResult` filtering object which
"""
...
- async def __aiter__(self) -> AsyncIterator[_R]:
- ...
+ async def __aiter__(self) -> AsyncIterator[_R]: ...
- async def __anext__(self) -> _R:
- ...
+ async def __anext__(self) -> _R: ...
async def first(self) -> Optional[_R]:
"""Fetch the first object or ``None`` if no object is present.
...
@overload
- async def scalar_one(self: AsyncTupleResult[Tuple[_T]]) -> _T:
- ...
+ async def scalar_one(self: AsyncTupleResult[Tuple[_T]]) -> _T: ...
@overload
- async def scalar_one(self) -> Any:
- ...
+ async def scalar_one(self) -> Any: ...
async def scalar_one(self) -> Any:
"""Return exactly one scalar result or raise an exception.
@overload
async def scalar_one_or_none(
self: AsyncTupleResult[Tuple[_T]],
- ) -> Optional[_T]:
- ...
+ ) -> Optional[_T]: ...
@overload
- async def scalar_one_or_none(self) -> Optional[Any]:
- ...
+ async def scalar_one_or_none(self) -> Optional[Any]: ...
async def scalar_one_or_none(self) -> Optional[Any]:
"""Return exactly one or no scalar result.
...
@overload
- async def scalar(self: AsyncTupleResult[Tuple[_T]]) -> Optional[_T]:
- ...
+ async def scalar(
+ self: AsyncTupleResult[Tuple[_T]],
+ ) -> Optional[_T]: ...
@overload
- async def scalar(self) -> Any:
- ...
+ async def scalar(self) -> Any: ...
async def scalar(self) -> Any:
"""Fetch the first column of the first row, and close the result
bind_arguments: Optional[_BindArguments] = None,
_parent_execute_state: Optional[Any] = None,
_add_event: Optional[Any] = None,
- ) -> Result[Unpack[_Ts]]:
- ...
+ ) -> Result[Unpack[_Ts]]: ...
@overload
async def execute(
bind_arguments: Optional[_BindArguments] = None,
_parent_execute_state: Optional[Any] = None,
_add_event: Optional[Any] = None,
- ) -> CursorResult[Unpack[TupleAny]]:
- ...
+ ) -> CursorResult[Unpack[TupleAny]]: ...
@overload
async def execute(
bind_arguments: Optional[_BindArguments] = None,
_parent_execute_state: Optional[Any] = None,
_add_event: Optional[Any] = None,
- ) -> Result[Unpack[TupleAny]]:
- ...
+ ) -> Result[Unpack[TupleAny]]: ...
async def execute(
self,
execution_options: OrmExecuteOptionsParameter = util.EMPTY_DICT,
bind_arguments: Optional[_BindArguments] = None,
**kw: Any,
- ) -> Optional[_T]:
- ...
+ ) -> Optional[_T]: ...
@overload
async def scalar(
execution_options: OrmExecuteOptionsParameter = util.EMPTY_DICT,
bind_arguments: Optional[_BindArguments] = None,
**kw: Any,
- ) -> Any:
- ...
+ ) -> Any: ...
async def scalar(
self,
execution_options: OrmExecuteOptionsParameter = util.EMPTY_DICT,
bind_arguments: Optional[_BindArguments] = None,
**kw: Any,
- ) -> ScalarResult[_T]:
- ...
+ ) -> ScalarResult[_T]: ...
@overload
async def scalars(
execution_options: OrmExecuteOptionsParameter = util.EMPTY_DICT,
bind_arguments: Optional[_BindArguments] = None,
**kw: Any,
- ) -> ScalarResult[Any]:
- ...
+ ) -> ScalarResult[Any]: ...
async def scalars(
self,
execution_options: OrmExecuteOptionsParameter = util.EMPTY_DICT,
bind_arguments: Optional[_BindArguments] = None,
**kw: Any,
- ) -> AsyncResult[Unpack[_Ts]]:
- ...
+ ) -> AsyncResult[Unpack[_Ts]]: ...
@overload
async def stream(
execution_options: OrmExecuteOptionsParameter = util.EMPTY_DICT,
bind_arguments: Optional[_BindArguments] = None,
**kw: Any,
- ) -> AsyncResult[Unpack[TupleAny]]:
- ...
+ ) -> AsyncResult[Unpack[TupleAny]]: ...
async def stream(
self,
execution_options: OrmExecuteOptionsParameter = util.EMPTY_DICT,
bind_arguments: Optional[_BindArguments] = None,
**kw: Any,
- ) -> AsyncScalarResult[_T]:
- ...
+ ) -> AsyncScalarResult[_T]: ...
@overload
async def stream_scalars(
execution_options: OrmExecuteOptionsParameter = util.EMPTY_DICT,
bind_arguments: Optional[_BindArguments] = None,
**kw: Any,
- ) -> AsyncScalarResult[Any]:
- ...
+ ) -> AsyncScalarResult[Any]: ...
async def stream_scalars(
self,
bind_arguments: Optional[_BindArguments] = None,
_parent_execute_state: Optional[Any] = None,
_add_event: Optional[Any] = None,
- ) -> Result[Unpack[_Ts]]:
- ...
+ ) -> Result[Unpack[_Ts]]: ...
@overload
async def execute(
bind_arguments: Optional[_BindArguments] = None,
_parent_execute_state: Optional[Any] = None,
_add_event: Optional[Any] = None,
- ) -> CursorResult[Unpack[TupleAny]]:
- ...
+ ) -> CursorResult[Unpack[TupleAny]]: ...
@overload
async def execute(
bind_arguments: Optional[_BindArguments] = None,
_parent_execute_state: Optional[Any] = None,
_add_event: Optional[Any] = None,
- ) -> Result[Unpack[TupleAny]]:
- ...
+ ) -> Result[Unpack[TupleAny]]: ...
async def execute(
self,
execution_options: OrmExecuteOptionsParameter = util.EMPTY_DICT,
bind_arguments: Optional[_BindArguments] = None,
**kw: Any,
- ) -> Optional[_T]:
- ...
+ ) -> Optional[_T]: ...
@overload
async def scalar(
execution_options: OrmExecuteOptionsParameter = util.EMPTY_DICT,
bind_arguments: Optional[_BindArguments] = None,
**kw: Any,
- ) -> Any:
- ...
+ ) -> Any: ...
async def scalar(
self,
execution_options: OrmExecuteOptionsParameter = util.EMPTY_DICT,
bind_arguments: Optional[_BindArguments] = None,
**kw: Any,
- ) -> ScalarResult[_T]:
- ...
+ ) -> ScalarResult[_T]: ...
@overload
async def scalars(
execution_options: OrmExecuteOptionsParameter = util.EMPTY_DICT,
bind_arguments: Optional[_BindArguments] = None,
**kw: Any,
- ) -> ScalarResult[Any]:
- ...
+ ) -> ScalarResult[Any]: ...
async def scalars(
self,
execution_options: OrmExecuteOptionsParameter = util.EMPTY_DICT,
bind_arguments: Optional[_BindArguments] = None,
**kw: Any,
- ) -> AsyncResult[Unpack[_Ts]]:
- ...
+ ) -> AsyncResult[Unpack[_Ts]]: ...
@overload
async def stream(
execution_options: OrmExecuteOptionsParameter = util.EMPTY_DICT,
bind_arguments: Optional[_BindArguments] = None,
**kw: Any,
- ) -> AsyncResult[Unpack[TupleAny]]:
- ...
+ ) -> AsyncResult[Unpack[TupleAny]]: ...
async def stream(
self,
execution_options: OrmExecuteOptionsParameter = util.EMPTY_DICT,
bind_arguments: Optional[_BindArguments] = None,
**kw: Any,
- ) -> AsyncScalarResult[_T]:
- ...
+ ) -> AsyncScalarResult[_T]: ...
@overload
async def stream_scalars(
execution_options: OrmExecuteOptionsParameter = util.EMPTY_DICT,
bind_arguments: Optional[_BindArguments] = None,
**kw: Any,
- ) -> AsyncScalarResult[Any]:
- ...
+ ) -> AsyncScalarResult[Any]: ...
async def stream_scalars(
self,
expire_on_commit: bool = ...,
info: Optional[_InfoType] = ...,
**kw: Any,
- ):
- ...
+ ): ...
@overload
def __init__(
expire_on_commit: bool = ...,
info: Optional[_InfoType] = ...,
**kw: Any,
- ):
- ...
+ ): ...
def __init__(
self,
class PythonNameForTableType(Protocol):
- def __call__(self, base: Type[Any], tablename: str, table: Table) -> str:
- ...
+ def __call__(
+ self, base: Type[Any], tablename: str, table: Table
+ ) -> str: ...
def classname_for_table(
local_cls: Type[Any],
referred_cls: Type[Any],
constraint: ForeignKeyConstraint,
- ) -> str:
- ...
+ ) -> str: ...
def name_for_scalar_relationship(
local_cls: Type[Any],
referred_cls: Type[Any],
constraint: ForeignKeyConstraint,
- ) -> str:
- ...
+ ) -> str: ...
def name_for_collection_relationship(
local_cls: Type[Any],
referred_cls: Type[Any],
**kw: Any,
- ) -> Relationship[Any]:
- ...
+ ) -> Relationship[Any]: ...
@overload
def __call__(
local_cls: Type[Any],
referred_cls: Type[Any],
**kw: Any,
- ) -> ORMBackrefArgument:
- ...
+ ) -> ORMBackrefArgument: ...
def __call__(
self,
local_cls: Type[Any],
referred_cls: Type[Any],
**kw: Any,
- ) -> Union[ORMBackrefArgument, Relationship[Any]]:
- ...
+ ) -> Union[ORMBackrefArgument, Relationship[Any]]: ...
@overload
local_cls: Type[Any],
referred_cls: Type[Any],
**kw: Any,
-) -> Relationship[Any]:
- ...
+) -> Relationship[Any]: ...
@overload
local_cls: Type[Any],
referred_cls: Type[Any],
**kw: Any,
-) -> ORMBackrefArgument:
- ...
+) -> ORMBackrefArgument: ...
def generate_relationship(
mapper: Optional[Mapper[_T]],
instance: Any,
clause: Optional[ClauseElement],
- ) -> Any:
- ...
+ ) -> Any: ...
class IdentityChooser(Protocol):
execution_options: OrmExecuteOptionsParameter,
bind_arguments: _BindArguments,
**kw: Any,
- ) -> Any:
- ...
+ ) -> Any: ...
class ShardedQuery(Query[_T]):
class _HybridGetterType(Protocol[_T_co]):
- def __call__(s, self: Any) -> _T_co:
- ...
+ def __call__(s, self: Any) -> _T_co: ...
class _HybridSetterType(Protocol[_T_con]):
- def __call__(s, self: Any, value: _T_con) -> None:
- ...
+ def __call__(s, self: Any, value: _T_con) -> None: ...
class _HybridUpdaterType(Protocol[_T_con]):
s,
cls: Any,
value: Union[_T_con, _ColumnExpressionArgument[_T_con]],
- ) -> List[Tuple[_DMLColumnArgument, Any]]:
- ...
+ ) -> List[Tuple[_DMLColumnArgument, Any]]: ...
class _HybridDeleterType(Protocol[_T_co]):
- def __call__(s, self: Any) -> None:
- ...
+ def __call__(s, self: Any) -> None: ...
class _HybridExprCallableType(Protocol[_T_co]):
def __call__(
s, cls: Any
- ) -> Union[_HasClauseElement[_T_co], SQLColumnExpression[_T_co]]:
- ...
+ ) -> Union[_HasClauseElement[_T_co], SQLColumnExpression[_T_co]]: ...
class _HybridComparatorCallableType(Protocol[_T]):
- def __call__(self, cls: Any) -> Comparator[_T]:
- ...
+ def __call__(self, cls: Any) -> Comparator[_T]: ...
class _HybridClassLevelAccessor(QueryableAttribute[_T]):
if TYPE_CHECKING:
- def getter(self, fget: _HybridGetterType[_T]) -> hybrid_property[_T]:
- ...
+ def getter(
+ self, fget: _HybridGetterType[_T]
+ ) -> hybrid_property[_T]: ...
- def setter(self, fset: _HybridSetterType[_T]) -> hybrid_property[_T]:
- ...
+ def setter(
+ self, fset: _HybridSetterType[_T]
+ ) -> hybrid_property[_T]: ...
- def deleter(self, fdel: _HybridDeleterType[_T]) -> hybrid_property[_T]:
- ...
+ def deleter(
+ self, fdel: _HybridDeleterType[_T]
+ ) -> hybrid_property[_T]: ...
@property
- def overrides(self) -> hybrid_property[_T]:
- ...
+ def overrides(self) -> hybrid_property[_T]: ...
def update_expression(
self, meth: _HybridUpdaterType[_T]
- ) -> hybrid_property[_T]:
- ...
+ ) -> hybrid_property[_T]: ...
class hybrid_method(interfaces.InspectionAttrInfo, Generic[_P, _R]):
@overload
def __get__(
self, instance: Literal[None], owner: Type[object]
- ) -> Callable[_P, SQLCoreOperations[_R]]:
- ...
+ ) -> Callable[_P, SQLCoreOperations[_R]]: ...
@overload
def __get__(
self, instance: object, owner: Type[object]
- ) -> Callable[_P, _R]:
- ...
+ ) -> Callable[_P, _R]: ...
def __get__(
self, instance: Optional[object], owner: Type[object]
util.update_wrapper(self, fget)
@overload
- def __get__(self, instance: Any, owner: Literal[None]) -> Self:
- ...
+ def __get__(self, instance: Any, owner: Literal[None]) -> Self: ...
@overload
def __get__(
self, instance: Literal[None], owner: Type[object]
- ) -> _HybridClassLevelAccessor[_T]:
- ...
+ ) -> _HybridClassLevelAccessor[_T]: ...
@overload
- def __get__(self, instance: object, owner: Type[object]) -> _T:
- ...
+ def __get__(self, instance: object, owner: Type[object]) -> _T: ...
def __get__(
self, instance: Optional[object], owner: Optional[Type[object]]
)(instance)
-orm_instrumentation._instrumentation_factory = (
- _instrumentation_factory
-) = ExtendedInstrumentationRegistry()
+orm_instrumentation._instrumentation_factory = _instrumentation_factory = (
+ ExtendedInstrumentationRegistry()
+)
orm_instrumentation.instrumentation_finders = instrumentation_finders
instance_dict = lookups["instance_dict"]
manager_of_class = lookups["manager_of_class"]
opt_manager_of_class = lookups["opt_manager_of_class"]
- orm_base.instance_state = (
- attributes.instance_state
- ) = orm_instrumentation.instance_state = instance_state
- orm_base.instance_dict = (
- attributes.instance_dict
- ) = orm_instrumentation.instance_dict = instance_dict
- orm_base.manager_of_class = (
- attributes.manager_of_class
- ) = orm_instrumentation.manager_of_class = manager_of_class
- orm_base.opt_manager_of_class = (
- orm_util.opt_manager_of_class
- ) = (
+ orm_base.instance_state = attributes.instance_state = (
+ orm_instrumentation.instance_state
+ ) = instance_state
+ orm_base.instance_dict = attributes.instance_dict = (
+ orm_instrumentation.instance_dict
+ ) = instance_dict
+ orm_base.manager_of_class = attributes.manager_of_class = (
+ orm_instrumentation.manager_of_class
+ ) = manager_of_class
+ orm_base.opt_manager_of_class = orm_util.opt_manager_of_class = (
attributes.opt_manager_of_class
) = orm_instrumentation.opt_manager_of_class = opt_manager_of_class
@overload
def setdefault(
self: MutableDict[_KT, Optional[_T]], key: _KT, value: None = None
- ) -> Optional[_T]:
- ...
+ ) -> Optional[_T]: ...
@overload
- def setdefault(self, key: _KT, value: _VT) -> _VT:
- ...
+ def setdefault(self, key: _KT, value: _VT) -> _VT: ...
- def setdefault(self, key: _KT, value: object = None) -> object:
- ...
+ def setdefault(self, key: _KT, value: object = None) -> object: ...
else:
if TYPE_CHECKING:
@overload
- def pop(self, __key: _KT, /) -> _VT:
- ...
+ def pop(self, __key: _KT, /) -> _VT: ...
@overload
- def pop(self, __key: _KT, default: _VT | _T, /) -> _VT | _T:
- ...
+ def pop(self, __key: _KT, default: _VT | _T, /) -> _VT | _T: ...
def pop(
self, __key: _KT, __default: _VT | _T | None = None, /
- ) -> _VT | _T:
- ...
+ ) -> _VT | _T: ...
else:
# update the SQLAlchemyAttribute with the better
# information
- mapped_attr_lookup[
- stmt.lvalues[0].name
- ].type = python_type_for_type
+ mapped_attr_lookup[stmt.lvalues[0].name].type = (
+ python_type_for_type
+ )
update_cls_metadata = True
lvalue.is_inferred_def = False
left_node.type = api.named_type(
NAMED_TYPE_SQLA_MAPPED,
- [AnyType(TypeOfAny.special_form)]
- if python_type_for_type is None
- else [python_type_for_type],
+ (
+ [AnyType(TypeOfAny.special_form)]
+ if python_type_for_type is None
+ else [python_type_for_type]
+ ),
)
# so to have it skip the right side totally, we can do this:
elif cls.fullname.startswith("builtins"):
return None
- mapped_attributes: Optional[
- List[util.SQLAlchemyAttribute]
- ] = util.get_mapped_attributes(info, api)
+ mapped_attributes: Optional[List[util.SQLAlchemyAttribute]] = (
+ util.get_mapped_attributes(info, api)
+ )
# used by assign.add_additional_orm_attributes among others
util.establish_as_sqlalchemy(info)
@overload
def get_callexpr_kwarg(
callexpr: CallExpr, name: str, *, expr_types: None = ...
-) -> Optional[Union[CallExpr, NameExpr]]:
- ...
+) -> Optional[Union[CallExpr, NameExpr]]: ...
@overload
name: str,
*,
expr_types: Tuple[TypingType[_TArgType], ...],
-) -> Optional[_TArgType]:
- ...
+) -> Optional[_TArgType]: ...
def get_callexpr_kwarg(
return Instance(
bound_type,
[
- unbound_to_instance(api, arg)
- if isinstance(arg, UnboundType)
- else arg
+ (
+ unbound_to_instance(api, arg)
+ if isinstance(arg, UnboundType)
+ else arg
+ )
for arg in typ.args
],
)
"""
- def _sa_inspect_type(self) -> _TCov:
- ...
+ def _sa_inspect_type(self) -> _TCov: ...
class _InspectableProtocol(Protocol[_TCov]):
"""
- def _sa_inspect_instance(self) -> _TCov:
- ...
+ def _sa_inspect_instance(self) -> _TCov: ...
@overload
def inspect(
subject: Type[_InspectableTypeProtocol[_IN]], raiseerr: bool = True
-) -> _IN:
- ...
+) -> _IN: ...
@overload
-def inspect(subject: _InspectableProtocol[_IN], raiseerr: bool = True) -> _IN:
- ...
+def inspect(
+ subject: _InspectableProtocol[_IN], raiseerr: bool = True
+) -> _IN: ...
@overload
-def inspect(subject: Inspectable[_IN], raiseerr: bool = True) -> _IN:
- ...
+def inspect(subject: Inspectable[_IN], raiseerr: bool = True) -> _IN: ...
@overload
-def inspect(subject: Any, raiseerr: Literal[False] = ...) -> Optional[Any]:
- ...
+def inspect(subject: Any, raiseerr: Literal[False] = ...) -> Optional[Any]: ...
@overload
-def inspect(subject: Any, raiseerr: bool = True) -> Any:
- ...
+def inspect(subject: Any, raiseerr: bool = True) -> Any: ...
def inspect(subject: Any, raiseerr: bool = True) -> Any:
@overload
def __get__(
self, instance: Literal[None], owner: Type[Identified]
- ) -> echo_property:
- ...
+ ) -> echo_property: ...
@overload
def __get__(
self, instance: Identified, owner: Type[Identified]
- ) -> _EchoFlagType:
- ...
+ ) -> _EchoFlagType: ...
def __get__(
self, instance: Optional[Identified], owner: Type[Identified]
return mapped_column(
name=name,
- default=default
- if default is not None
- else _InsertSentinelColumnDefault(),
+ default=(
+ default if default is not None else _InsertSentinelColumnDefault()
+ ),
_omit_from_statements=omit_from_statements,
insert_sentinel=True,
use_existing_column=True,
info: Optional[_InfoType] = None,
doc: Optional[str] = None,
**__kw: Any,
-) -> Composite[Any]:
- ...
+) -> Composite[Any]: ...
@overload
info: Optional[_InfoType] = None,
doc: Optional[str] = None,
**__kw: Any,
-) -> Composite[_CC]:
- ...
+) -> Composite[_CC]: ...
@overload
info: Optional[_InfoType] = None,
doc: Optional[str] = None,
**__kw: Any,
-) -> Composite[_CC]:
- ...
+) -> Composite[_CC]: ...
def composite(
name: Optional[str] = None,
flat: bool = False,
adapt_on_names: bool = False,
-) -> AliasedType[_O]:
- ...
+) -> AliasedType[_O]: ...
@overload
name: Optional[str] = None,
flat: bool = False,
adapt_on_names: bool = False,
-) -> AliasedClass[_O]:
- ...
+) -> AliasedClass[_O]: ...
@overload
name: Optional[str] = None,
flat: bool = False,
adapt_on_names: bool = False,
-) -> FromClause:
- ...
+) -> FromClause: ...
def aliased(
"""
- def __call__(self, obj: _CE, key: Optional[str] = None) -> _CE:
- ...
+ def __call__(self, obj: _CE, key: Optional[str] = None) -> _CE: ...
class _LoaderCallable(Protocol):
- def __call__(self, state: InstanceState[Any], passive: PassiveFlag) -> Any:
- ...
+ def __call__(
+ self, state: InstanceState[Any], passive: PassiveFlag
+ ) -> Any: ...
def is_orm_option(
if TYPE_CHECKING:
- def insp_is_mapper_property(obj: Any) -> TypeGuard[MapperProperty[Any]]:
- ...
+ def insp_is_mapper_property(
+ obj: Any,
+ ) -> TypeGuard[MapperProperty[Any]]: ...
- def insp_is_mapper(obj: Any) -> TypeGuard[Mapper[Any]]:
- ...
+ def insp_is_mapper(obj: Any) -> TypeGuard[Mapper[Any]]: ...
- def insp_is_aliased_class(obj: Any) -> TypeGuard[AliasedInsp[Any]]:
- ...
+ def insp_is_aliased_class(obj: Any) -> TypeGuard[AliasedInsp[Any]]: ...
def insp_is_attribute(
obj: InspectionAttr,
- ) -> TypeGuard[QueryableAttribute[Any]]:
- ...
+ ) -> TypeGuard[QueryableAttribute[Any]]: ...
def attr_is_internal_proxy(
obj: InspectionAttr,
- ) -> TypeGuard[QueryableAttribute[Any]]:
- ...
+ ) -> TypeGuard[QueryableAttribute[Any]]: ...
def prop_is_relationship(
prop: MapperProperty[Any],
- ) -> TypeGuard[RelationshipProperty[Any]]:
- ...
+ ) -> TypeGuard[RelationshipProperty[Any]]: ...
def is_collection_impl(
impl: AttributeImpl,
- ) -> TypeGuard[CollectionAttributeImpl]:
- ...
+ ) -> TypeGuard[CollectionAttributeImpl]: ...
def is_has_collection_adapter(
impl: AttributeImpl,
- ) -> TypeGuard[HasCollectionAdapter]:
- ...
+ ) -> TypeGuard[HasCollectionAdapter]: ...
else:
insp_is_mapper_property = operator.attrgetter("is_property")
self.impl.delete(instance_state(instance), instance_dict(instance))
@overload
- def __get__(self, instance: None, owner: Any) -> InstrumentedAttribute[_T]:
- ...
+ def __get__(
+ self, instance: None, owner: Any
+ ) -> InstrumentedAttribute[_T]: ...
@overload
- def __get__(self, instance: object, owner: Any) -> _T:
- ...
+ def __get__(self, instance: object, owner: Any) -> _T: ...
def __get__(
self, instance: Optional[object], owner: Any
dict_: _InstanceDict,
user_data: Literal[None] = ...,
passive: Literal[PassiveFlag.PASSIVE_OFF] = ...,
- ) -> CollectionAdapter:
- ...
+ ) -> CollectionAdapter: ...
@overload
def get_collection(
dict_: _InstanceDict,
user_data: _AdaptedCollectionProtocol = ...,
passive: PassiveFlag = ...,
- ) -> CollectionAdapter:
- ...
+ ) -> CollectionAdapter: ...
@overload
def get_collection(
passive: PassiveFlag = ...,
) -> Union[
Literal[LoaderCallableStatus.PASSIVE_NO_RESULT], CollectionAdapter
- ]:
- ...
+ ]: ...
def get_collection(
self,
def _is_collection_attribute_impl(
impl: AttributeImpl,
- ) -> TypeGuard[CollectionAttributeImpl]:
- ...
+ ) -> TypeGuard[CollectionAttributeImpl]: ...
else:
_is_collection_attribute_impl = operator.attrgetter("collection")
dict_: _InstanceDict,
user_data: Literal[None] = ...,
passive: Literal[PassiveFlag.PASSIVE_OFF] = ...,
- ) -> CollectionAdapter:
- ...
+ ) -> CollectionAdapter: ...
@overload
def get_collection(
dict_: _InstanceDict,
user_data: _AdaptedCollectionProtocol = ...,
passive: PassiveFlag = ...,
- ) -> CollectionAdapter:
- ...
+ ) -> CollectionAdapter: ...
@overload
def get_collection(
passive: PassiveFlag = PASSIVE_OFF,
) -> Union[
Literal[LoaderCallableStatus.PASSIVE_NO_RESULT], CollectionAdapter
- ]:
- ...
+ ]: ...
def get_collection(
self,
if TYPE_CHECKING:
- def manager_of_class(cls: Type[_O]) -> ClassManager[_O]:
- ...
+ def manager_of_class(cls: Type[_O]) -> ClassManager[_O]: ...
@overload
- def opt_manager_of_class(cls: AliasedClass[Any]) -> None:
- ...
+ def opt_manager_of_class(cls: AliasedClass[Any]) -> None: ...
@overload
def opt_manager_of_class(
cls: _ExternalEntityType[_O],
- ) -> Optional[ClassManager[_O]]:
- ...
+ ) -> Optional[ClassManager[_O]]: ...
def opt_manager_of_class(
cls: _ExternalEntityType[_O],
- ) -> Optional[ClassManager[_O]]:
- ...
+ ) -> Optional[ClassManager[_O]]: ...
- def instance_state(instance: _O) -> InstanceState[_O]:
- ...
+ def instance_state(instance: _O) -> InstanceState[_O]: ...
- def instance_dict(instance: object) -> Dict[str, Any]:
- ...
+ def instance_dict(instance: object) -> Dict[str, Any]: ...
else:
# these can be replaced by sqlalchemy.ext.instrumentation
if TYPE_CHECKING:
- def _state_mapper(state: InstanceState[_O]) -> Mapper[_O]:
- ...
+ def _state_mapper(state: InstanceState[_O]) -> Mapper[_O]: ...
else:
_state_mapper = util.dottedgetter("manager.mapper")
if typing.TYPE_CHECKING:
- def of_type(self, class_: _EntityType[Any]) -> PropComparator[_T_co]:
- ...
+ def of_type(
+ self, class_: _EntityType[Any]
+ ) -> PropComparator[_T_co]: ...
def and_(
self, *criteria: _ColumnExpressionArgument[bool]
- ) -> PropComparator[bool]:
- ...
+ ) -> PropComparator[bool]: ...
def any( # noqa: A001
self,
criterion: Optional[_ColumnExpressionArgument[bool]] = None,
**kwargs: Any,
- ) -> ColumnElement[bool]:
- ...
+ ) -> ColumnElement[bool]: ...
def has(
self,
criterion: Optional[_ColumnExpressionArgument[bool]] = None,
**kwargs: Any,
- ) -> ColumnElement[bool]:
- ...
+ ) -> ColumnElement[bool]: ...
class ORMDescriptor(Generic[_T_co], TypingOnly):
@overload
def __get__(
self, instance: Any, owner: Literal[None]
- ) -> ORMDescriptor[_T_co]:
- ...
+ ) -> ORMDescriptor[_T_co]: ...
@overload
def __get__(
self, instance: Literal[None], owner: Any
- ) -> SQLCoreOperations[_T_co]:
- ...
+ ) -> SQLCoreOperations[_T_co]: ...
@overload
- def __get__(self, instance: object, owner: Any) -> _T_co:
- ...
+ def __get__(self, instance: object, owner: Any) -> _T_co: ...
def __get__(
self, instance: object, owner: Any
- ) -> Union[ORMDescriptor[_T_co], SQLCoreOperations[_T_co], _T_co]:
- ...
+ ) -> Union[ORMDescriptor[_T_co], SQLCoreOperations[_T_co], _T_co]: ...
class _MappedAnnotationBase(Generic[_T_co], TypingOnly):
@overload
def __get__(
self, instance: None, owner: Any
- ) -> InstrumentedAttribute[_T_co]:
- ...
+ ) -> InstrumentedAttribute[_T_co]: ...
@overload
- def __get__(self, instance: object, owner: Any) -> _T_co:
- ...
+ def __get__(self, instance: object, owner: Any) -> _T_co: ...
def __get__(
self, instance: Optional[object], owner: Any
- ) -> Union[InstrumentedAttribute[_T_co], _T_co]:
- ...
+ ) -> Union[InstrumentedAttribute[_T_co], _T_co]: ...
@classmethod
- def _empty_constructor(cls, arg1: Any) -> Mapped[_T_co]:
- ...
+ def _empty_constructor(cls, arg1: Any) -> Mapped[_T_co]: ...
def __set__(
self, instance: Any, value: Union[SQLCoreOperations[_T_co], _T_co]
- ) -> None:
- ...
+ ) -> None: ...
- def __delete__(self, instance: Any) -> None:
- ...
+ def __delete__(self, instance: Any) -> None: ...
class _MappedAttribute(Generic[_T_co], TypingOnly):
@overload
def __get__(
self, instance: None, owner: Any
- ) -> InstrumentedAttribute[_T_co]:
- ...
+ ) -> InstrumentedAttribute[_T_co]: ...
@overload
def __get__(
self, instance: object, owner: Any
- ) -> AppenderQuery[_T_co]:
- ...
+ ) -> AppenderQuery[_T_co]: ...
def __get__(
self, instance: Optional[object], owner: Any
- ) -> Union[InstrumentedAttribute[_T_co], AppenderQuery[_T_co]]:
- ...
+ ) -> Union[InstrumentedAttribute[_T_co], AppenderQuery[_T_co]]: ...
def __set__(
self, instance: Any, value: typing.Collection[_T_co]
- ) -> None:
- ...
+ ) -> None: ...
class WriteOnlyMapped(_MappedAnnotationBase[_T_co]):
@overload
def __get__(
self, instance: None, owner: Any
- ) -> InstrumentedAttribute[_T_co]:
- ...
+ ) -> InstrumentedAttribute[_T_co]: ...
@overload
def __get__(
self, instance: object, owner: Any
- ) -> WriteOnlyCollection[_T_co]:
- ...
+ ) -> WriteOnlyCollection[_T_co]: ...
def __get__(
self, instance: Optional[object], owner: Any
- ) -> Union[InstrumentedAttribute[_T_co], WriteOnlyCollection[_T_co]]:
- ...
+ ) -> Union[
+ InstrumentedAttribute[_T_co], WriteOnlyCollection[_T_co]
+ ]: ...
def __set__(
self, instance: Any, value: typing.Collection[_T_co]
- ) -> None:
- ...
+ ) -> None: ...
render_nulls: bool,
use_orm_insert_stmt: Literal[None] = ...,
execution_options: Optional[OrmExecuteOptionsParameter] = ...,
-) -> None:
- ...
+) -> None: ...
@overload
render_nulls: bool,
use_orm_insert_stmt: Optional[dml.Insert] = ...,
execution_options: Optional[OrmExecuteOptionsParameter] = ...,
-) -> cursor.CursorResult[Any]:
- ...
+) -> cursor.CursorResult[Any]: ...
def _bulk_insert(
update_changed_only: bool,
use_orm_update_stmt: Literal[None] = ...,
enable_check_rowcount: bool = True,
-) -> None:
- ...
+) -> None: ...
@overload
update_changed_only: bool,
use_orm_update_stmt: Optional[dml.Update] = ...,
enable_check_rowcount: bool = True,
-) -> _result.Result[Unpack[TupleAny]]:
- ...
+) -> _result.Result[Unpack[TupleAny]]: ...
def _bulk_update(
if desc is NO_VALUE:
yield (
coercions.expect(roles.DMLColumnRole, k),
- coercions.expect(
- roles.ExpressionElementRole,
- v,
- type_=sqltypes.NullType(),
- is_crud=True,
- )
- if needs_to_be_cacheable
- else v,
+ (
+ coercions.expect(
+ roles.ExpressionElementRole,
+ v,
+ type_=sqltypes.NullType(),
+ is_crud=True,
+ )
+ if needs_to_be_cacheable
+ else v
+ ),
)
else:
yield from core_get_crud_kv_pairs(
else:
yield (
k,
- v
- if not needs_to_be_cacheable
- else coercions.expect(
- roles.ExpressionElementRole,
- v,
- type_=sqltypes.NullType(),
- is_crud=True,
+ (
+ v
+ if not needs_to_be_cacheable
+ else coercions.expect(
+ roles.ExpressionElementRole,
+ v,
+ type_=sqltypes.NullType(),
+ is_crud=True,
+ )
),
)
fs = fs.execution_options(**orm_level_statement._execution_options)
fs = fs.options(*orm_level_statement._with_options)
self.select_statement = fs
- self.from_statement_ctx = (
- fsc
- ) = ORMFromStatementCompileState.create_for_statement(fs, compiler)
+ self.from_statement_ctx = fsc = (
+ ORMFromStatementCompileState.create_for_statement(fs, compiler)
+ )
fsc.setup_dml_returning_compile_state(dml_mapper)
dml_level_statement = dml_level_statement._generate()
_ModuleMarker, decl_class_registry["_sa_module_registry"]
)
except KeyError:
- decl_class_registry[
- "_sa_module_registry"
- ] = root_module = _ModuleMarker("_sa_module_registry", None)
+ decl_class_registry["_sa_module_registry"] = root_module = (
+ _ModuleMarker("_sa_module_registry", None)
+ )
tokens = cls.__module__.split(".")
_fallback_dict: Mapping[str, Any] = None # type: ignore
-def _resolver(
- cls: Type[Any], prop: RelationshipProperty[Any]
-) -> Tuple[
+def _resolver(cls: Type[Any], prop: RelationshipProperty[Any]) -> Tuple[
Callable[[str], Callable[[], Union[Type[Any], Table, _ModNS]]],
Callable[[str, bool], _class_resolver],
]:
class _CollectionConverterProtocol(Protocol):
- def __call__(self, collection: _COL) -> _COL:
- ...
+ def __call__(self, collection: _COL) -> _COL: ...
class _AdaptedCollectionProtocol(Protocol):
self.empty
), "This collection adapter is not in the 'empty' state"
self.empty = False
- self.owner_state.dict[
- self._key
- ] = self.owner_state._empty_collections.pop(self._key)
+ self.owner_state.dict[self._key] = (
+ self.owner_state._empty_collections.pop(self._key)
+ )
def _refuse_empty(self) -> NoReturn:
raise sa_exc.InvalidRequestError(
"""An instrumented version of the built-in dict."""
-__canned_instrumentation: util.immutabledict[
- Any, _CollectionFactoryType
-] = util.immutabledict(
- {
- list: InstrumentedList,
- set: InstrumentedSet,
- dict: InstrumentedDict,
- }
+__canned_instrumentation: util.immutabledict[Any, _CollectionFactoryType] = (
+ util.immutabledict(
+ {
+ list: InstrumentedList,
+ set: InstrumentedSet,
+ dict: InstrumentedDict,
+ }
+ )
)
__interfaces: util.immutabledict[
statement: Union[Select, FromStatement],
compiler: Optional[SQLCompiler],
**kw: Any,
- ) -> ORMCompileState:
- ...
+ ) -> ORMCompileState: ...
def _append_dedupe_col_collection(self, obj, col_collection):
dedupe = self.dedupe_columns
and len(statement._compile_options._current_path) > 10
and execution_options.get("compiled_cache", True) is not None
):
- execution_options: util.immutabledict[
- str, Any
- ] = execution_options.union(
- {
- "compiled_cache": None,
- "_cache_disable_reason": "excess depth for "
- "ORM loader options",
- }
+ execution_options: util.immutabledict[str, Any] = (
+ execution_options.union(
+ {
+ "compiled_cache": None,
+ "_cache_disable_reason": "excess depth for "
+ "ORM loader options",
+ }
+ )
)
bind_arguments["clause"] = statement
self.statement = statement
self._label_convention = self._column_naming_convention(
- statement._label_style
- if not statement._is_textual and not statement.is_dml
- else LABEL_STYLE_NONE,
+ (
+ statement._label_style
+ if not statement._is_textual and not statement.is_dml
+ else LABEL_STYLE_NONE
+ ),
self.use_legacy_query_style,
)
for entity in self._entities:
entity.setup_compile_state(self)
- compiler._ordered_columns = (
- compiler._textual_ordered_columns
- ) = False
+ compiler._ordered_columns = compiler._textual_ordered_columns = (
+ False
+ )
# enable looser result column matching. this is shown to be
# needed by test_query.py::TextTest
def get_columns_clause_froms(cls, statement):
return cls._normalize_froms(
itertools.chain.from_iterable(
- element._from_objects
- if "parententity" not in element._annotations
- else [
- element._annotations["parententity"].__clause_element__()
- ]
+ (
+ element._from_objects
+ if "parententity" not in element._annotations
+ else [
+ element._annotations[
+ "parententity"
+ ].__clause_element__()
+ ]
+ )
for element in statement._raw_columns
)
)
# the original expressions outside of the label references
# in order to have them render.
unwrapped_order_by = [
- elem.element
- if isinstance(elem, sql.elements._label_reference)
- else elem
+ (
+ elem.element
+ if isinstance(elem, sql.elements._label_reference)
+ else elem
+ )
for elem in self.order_by
]
"type": ent.type,
"aliased": getattr(insp_ent, "is_aliased_class", False),
"expr": ent.expr,
- "entity": getattr(insp_ent, "entity", None)
- if ent.entity_zero is not None and not insp_ent.is_clause_element
- else None,
+ "entity": (
+ getattr(insp_ent, "entity", None)
+ if ent.entity_zero is not None
+ and not insp_ent.is_clause_element
+ else None
+ ),
}
for ent, insp_ent in [
(_ent, _ent.entity_zero) for _ent in ctx._entities
self,
fn: Callable[..., _T],
cascading: bool = False,
- ):
- ...
+ ): ...
- def __get__(self, instance: Optional[object], owner: Any) -> _T:
- ...
+ def __get__(self, instance: Optional[object], owner: Any) -> _T: ...
- def __set__(self, instance: Any, value: Any) -> None:
- ...
+ def __set__(self, instance: Any, value: Any) -> None: ...
- def __delete__(self, instance: Any) -> None:
- ...
+ def __delete__(self, instance: Any) -> None: ...
def __call__(self, fn: Callable[..., _TT]) -> _declared_directive[_TT]:
# extensive fooling of mypy underway...
self,
fn: _DeclaredAttrDecorated[_T],
cascading: bool = False,
- ):
- ...
+ ): ...
- def __set__(self, instance: Any, value: Any) -> None:
- ...
+ def __set__(self, instance: Any, value: Any) -> None: ...
- def __delete__(self, instance: Any) -> None:
- ...
+ def __delete__(self, instance: Any) -> None: ...
# this is the Mapped[] API where at class descriptor get time we want
# the type checker to see InstrumentedAttribute[_T]. However the
@overload
def __get__(
self, instance: None, owner: Any
- ) -> InstrumentedAttribute[_T]:
- ...
+ ) -> InstrumentedAttribute[_T]: ...
@overload
- def __get__(self, instance: object, owner: Any) -> _T:
- ...
+ def __get__(self, instance: object, owner: Any) -> _T: ...
def __get__(
self, instance: Optional[object], owner: Any
- ) -> Union[InstrumentedAttribute[_T], _T]:
- ...
+ ) -> Union[InstrumentedAttribute[_T], _T]: ...
@hybridmethod
def _stateful(cls, **kw: Any) -> _stateful_declared_attr[_T]:
for k, v in apply_dc_transforms.items()
}
else:
- cls._sa_apply_dc_transforms = (
- current_transforms
- ) = apply_dc_transforms
+ cls._sa_apply_dc_transforms = current_transforms = (
+ apply_dc_transforms
+ )
super().__init_subclass__(**kw)
if typing.TYPE_CHECKING:
- def _sa_inspect_type(self) -> Mapper[Self]:
- ...
+ def _sa_inspect_type(self) -> Mapper[Self]: ...
- def _sa_inspect_instance(self) -> InstanceState[Self]:
- ...
+ def _sa_inspect_instance(self) -> InstanceState[Self]: ...
_sa_registry: ClassVar[_RegistryType]
"""
- def __init__(self, **kw: Any):
- ...
+ def __init__(self, **kw: Any): ...
def __init_subclass__(cls, **kw: Any) -> None:
if DeclarativeBase in cls.__bases__:
if typing.TYPE_CHECKING:
- def _sa_inspect_type(self) -> Mapper[Self]:
- ...
+ def _sa_inspect_type(self) -> Mapper[Self]: ...
- def _sa_inspect_instance(self) -> InstanceState[Self]:
- ...
+ def _sa_inspect_instance(self) -> InstanceState[Self]: ...
__tablename__: Any
"""String name to assign to the generated
"""
- def __init__(self, **kw: Any):
- ...
+ def __init__(self, **kw: Any): ...
def __init_subclass__(cls, **kw: Any) -> None:
if DeclarativeBaseNoMeta in cls.__bases__:
),
)
@overload
- def mapped_as_dataclass(self, __cls: Type[_O], /) -> Type[_O]:
- ...
+ def mapped_as_dataclass(self, __cls: Type[_O], /) -> Type[_O]: ...
@overload
def mapped_as_dataclass(
match_args: Union[_NoArg, bool] = ...,
kw_only: Union[_NoArg, bool] = ...,
dataclass_callable: Union[_NoArg, Callable[..., Type[Any]]] = ...,
- ) -> Callable[[Type[_O]], Type[_O]]:
- ...
+ ) -> Callable[[Type[_O]], Type[_O]]: ...
def mapped_as_dataclass(
self,
__mapper__: Mapper[_O]
__table__: FromClause
- def __call__(self, **kw: Any) -> _O:
- ...
+ def __call__(self, **kw: Any) -> _O: ...
class _DeclMappedClassProtocol(MappedClassProtocol[_O], Protocol):
_sa_apply_dc_transforms: Optional[_DataclassArguments]
- def __declare_first__(self) -> None:
- ...
+ def __declare_first__(self) -> None: ...
- def __declare_last__(self) -> None:
- ...
+ def __declare_last__(self) -> None: ...
class _DataclassArguments(TypedDict):
"@declared_attr.cascading; "
"skipping" % (name, cls)
)
- collected_attributes[name] = column_copies[
- obj
- ] = ret = obj.__get__(obj, cls)
+ collected_attributes[name] = column_copies[obj] = (
+ ret
+ ) = obj.__get__(obj, cls)
setattr(cls, name, ret)
else:
if is_dataclass_field:
):
ret = ret.descriptor
- collected_attributes[name] = column_copies[
- obj
- ] = ret
+ collected_attributes[name] = column_copies[obj] = (
+ ret
+ )
if (
isinstance(ret, (Column, MapperProperty))
sum_ = state.manager[self.key].impl.get_all_pending(
state,
state.dict,
- self._passive_delete_flag
- if isdelete
- else attributes.PASSIVE_NO_INITIALIZE,
+ (
+ self._passive_delete_flag
+ if isdelete
+ else attributes.PASSIVE_NO_INITIALIZE
+ ),
)
if not sum_:
and self.composite_class not in _composite_getters
):
if self._generated_composite_accessor is not None:
- _composite_getters[
- self.composite_class
- ] = self._generated_composite_accessor
+ _composite_getters[self.composite_class] = (
+ self._generated_composite_accessor
+ )
elif hasattr(self.composite_class, "__composite_values__"):
- _composite_getters[
- self.composite_class
- ] = lambda obj: obj.__composite_values__()
+ _composite_getters[self.composite_class] = (
+ lambda obj: obj.__composite_values__()
+ )
@util.preload_module("sqlalchemy.orm.properties")
@util.preload_module("sqlalchemy.orm.decl_base")
if TYPE_CHECKING:
- def __iter__(self) -> Iterator[_T]:
- ...
+ def __iter__(self) -> Iterator[_T]: ...
def __getitem__(self, index: Any) -> Union[_T, List[_T]]:
sess = self.session
class _InstanceEventsHold(_EventsHold[_ET]):
- all_holds: weakref.WeakKeyDictionary[
- Any, Any
- ] = weakref.WeakKeyDictionary()
+ all_holds: weakref.WeakKeyDictionary[Any, Any] = (
+ weakref.WeakKeyDictionary()
+ )
def resolve(self, class_: Type[_O]) -> Optional[ClassManager[_O]]:
return instrumentation.opt_manager_of_class(class_)
state: state.InstanceState[Any],
toload: Set[str],
passive: base.PassiveFlag,
- ) -> None:
- ...
+ ) -> None: ...
class _ManagerFactory(Protocol):
- def __call__(self, class_: Type[_O]) -> ClassManager[_O]:
- ...
+ def __call__(self, class_: Type[_O]) -> ClassManager[_O]: ...
class ClassManager(
:attr:`.TypeEngine.comparator_factory`
"""
+
__slots__ = "prop", "_parententity", "_adapt_to_entity"
__visit_name__ = "orm_prop_comparator"
def operate(
self, op: OperatorType, *other: Any, **kwargs: Any
- ) -> ColumnElement[Any]:
- ...
+ ) -> ColumnElement[Any]: ...
def reverse_operate(
self, op: OperatorType, other: Any, **kwargs: Any
- ) -> ColumnElement[Any]:
- ...
+ ) -> ColumnElement[Any]: ...
def of_type(self, class_: _EntityType[Any]) -> PropComparator[_T_co]:
r"""Redefine this object in terms of a polymorphic subclass,
return go
unique_filters = [
- _no_unique
- if context.yield_per
- else _not_hashable(
- ent.column.type, # type: ignore
- legacy=context.load_options._legacy_uniquing,
- uncertain=ent._null_column_type,
- )
- if (
- not ent.use_id_for_hash
- and (ent._non_hashable_value or ent._null_column_type)
+ (
+ _no_unique
+ if context.yield_per
+ else (
+ _not_hashable(
+ ent.column.type, # type: ignore
+ legacy=context.load_options._legacy_uniquing,
+ uncertain=ent._null_column_type,
+ )
+ if (
+ not ent.use_id_for_hash
+ and (ent._non_hashable_value or ent._null_column_type)
+ )
+ else id if ent.use_id_for_hash else None
+ )
)
- else id
- if ent.use_id_for_hash
- else None
for ent in context.compile_state._entities
]
return self.__class__, (self.colkeys,)
@classmethod
- def _reduce_from_cols(
- cls, cols: Sequence[ColumnElement[_KT]]
- ) -> Tuple[
+ def _reduce_from_cols(cls, cols: Sequence[ColumnElement[_KT]]) -> Tuple[
Type[_SerializableColumnGetterV2[_KT]],
Tuple[Sequence[Tuple[Optional[str], Optional[str]]]],
]:
]
-_mapper_registries: weakref.WeakKeyDictionary[
- _RegistryType, bool
-] = weakref.WeakKeyDictionary()
+_mapper_registries: weakref.WeakKeyDictionary[_RegistryType, bool] = (
+ weakref.WeakKeyDictionary()
+)
def _all_registries() -> Set[registry]:
if self._primary_key_argument:
coerced_pk_arg = [
- self._str_arg_to_mapped_col("primary_key", c)
- if isinstance(c, str)
- else c
+ (
+ self._str_arg_to_mapped_col("primary_key", c)
+ if isinstance(c, str)
+ else c
+ )
for c in (
coercions.expect(
roles.DDLConstraintColumnRole,
return "Mapper[%s%s(%s)]" % (
self.class_.__name__,
self.non_primary and " (non-primary)" or "",
- self.local_table.description
- if self.local_table is not None
- else self.persist_selectable.description,
+ (
+ self.local_table.description
+ if self.local_table is not None
+ else self.persist_selectable.description
+ ),
)
def _is_orphan(self, state: InstanceState[_O]) -> bool:
from ..util.typing import _LiteralStar
from ..util.typing import TypeGuard
- def is_root(path: PathRegistry) -> TypeGuard[RootRegistry]:
- ...
+ def is_root(path: PathRegistry) -> TypeGuard[RootRegistry]: ...
- def is_entity(path: PathRegistry) -> TypeGuard[AbstractEntityRegistry]:
- ...
+ def is_entity(path: PathRegistry) -> TypeGuard[AbstractEntityRegistry]: ...
else:
is_root = operator.attrgetter("is_root")
return id(self)
@overload
- def __getitem__(self, entity: _StrPathToken) -> TokenRegistry:
- ...
+ def __getitem__(self, entity: _StrPathToken) -> TokenRegistry: ...
@overload
- def __getitem__(self, entity: int) -> _PathElementType:
- ...
+ def __getitem__(self, entity: int) -> _PathElementType: ...
@overload
- def __getitem__(self, entity: slice) -> _PathRepresentation:
- ...
+ def __getitem__(self, entity: slice) -> _PathRepresentation: ...
@overload
def __getitem__(
self, entity: _InternalEntityType[Any]
- ) -> AbstractEntityRegistry:
- ...
+ ) -> AbstractEntityRegistry: ...
@overload
- def __getitem__(self, entity: MapperProperty[Any]) -> PropRegistry:
- ...
+ def __getitem__(self, entity: MapperProperty[Any]) -> PropRegistry: ...
def __getitem__(
self,
@overload
@classmethod
- def per_mapper(cls, mapper: Mapper[Any]) -> CachingEntityRegistry:
- ...
+ def per_mapper(cls, mapper: Mapper[Any]) -> CachingEntityRegistry: ...
@overload
@classmethod
- def per_mapper(cls, mapper: AliasedInsp[Any]) -> SlotsEntityRegistry:
- ...
+ def per_mapper(cls, mapper: AliasedInsp[Any]) -> SlotsEntityRegistry: ...
@classmethod
def per_mapper(
def path_is_entity(
path: PathRegistry,
- ) -> TypeGuard[AbstractEntityRegistry]:
- ...
+ ) -> TypeGuard[AbstractEntityRegistry]: ...
- def path_is_property(path: PathRegistry) -> TypeGuard[PropRegistry]:
- ...
+ def path_is_property(path: PathRegistry) -> TypeGuard[PropRegistry]: ...
else:
path_is_entity = operator.attrgetter("is_entity")
state_dict,
sub_mapper,
connection,
- mapper._get_committed_state_attr_by_column(
- state, state_dict, mapper.version_id_col
- )
- if mapper.version_id_col is not None
- else None,
+ (
+ mapper._get_committed_state_attr_by_column(
+ state, state_dict, mapper.version_id_col
+ )
+ if mapper.version_id_col is not None
+ else None
+ ),
)
for state, state_dict, sub_mapper, connection in states_to_update
if table in sub_mapper._pks_by_table
params = {}
for col in mapper._pks_by_table[table]:
- params[
- col.key
- ] = value = mapper._get_committed_state_attr_by_column(
- state, state_dict, col
+ params[col.key] = value = (
+ mapper._get_committed_state_attr_by_column(
+ state, state_dict, col
+ )
)
if value is None:
raise orm_exc.FlushError(
c.context.compiled_parameters[0],
value_params,
True,
- c.returned_defaults
- if not c.context.executemany
- else None,
+ (
+ c.returned_defaults
+ if not c.context.executemany
+ else None
+ ),
)
if check_rowcount:
last_inserted_params,
value_params,
False,
- result.returned_defaults
- if not result.context.executemany
- else None,
+ (
+ result.returned_defaults
+ if not result.context.executemany
+ else None
+ ),
)
else:
_postfetch_bulk_save(mapper_rec, state_dict, table)
result.context.compiled_parameters[0],
value_params,
False,
- result.returned_defaults
- if not result.context.executemany
- else None,
+ (
+ result.returned_defaults
+ if not result.context.executemany
+ else None
+ ),
)
else:
_postfetch_bulk_save(mapper_rec, state_dict, table)
if TYPE_CHECKING:
- def __clause_element__(self) -> NamedColumn[_PT]:
- ...
+ def __clause_element__(self) -> NamedColumn[_PT]: ...
def _memoized_method___clause_element__(
self,
return [
(
self.column,
- self._sort_order
- if self._sort_order is not _NoArg.NO_ARG
- else 0,
+ (
+ self._sort_order
+ if self._sort_order is not _NoArg.NO_ARG
+ else 0
+ ),
)
]
Executable,
Generic[_T],
):
-
"""ORM-level SQL construction object.
.. legacy:: The ORM :class:`.Query` object is a legacy construct
_memoized_select_entities = ()
- _compile_options: Union[
- Type[CacheableOptions], CacheableOptions
- ] = ORMCompileState.default_compile_options
+ _compile_options: Union[Type[CacheableOptions], CacheableOptions] = (
+ ORMCompileState.default_compile_options
+ )
_with_options: Tuple[ExecutableOption, ...]
load_options = QueryContext.default_load_options + {
@overload
def as_scalar(
self: Query[Tuple[_MAYBE_ENTITY]],
- ) -> ScalarSelect[_MAYBE_ENTITY]:
- ...
+ ) -> ScalarSelect[_MAYBE_ENTITY]: ...
@overload
def as_scalar(
self: Query[Tuple[_NOT_ENTITY]],
- ) -> ScalarSelect[_NOT_ENTITY]:
- ...
+ ) -> ScalarSelect[_NOT_ENTITY]: ...
@overload
- def as_scalar(self) -> ScalarSelect[Any]:
- ...
+ def as_scalar(self) -> ScalarSelect[Any]: ...
@util.deprecated(
"1.4",
@overload
def scalar_subquery(
self: Query[Tuple[_MAYBE_ENTITY]],
- ) -> ScalarSelect[Any]:
- ...
+ ) -> ScalarSelect[Any]: ...
@overload
def scalar_subquery(
self: Query[Tuple[_NOT_ENTITY]],
- ) -> ScalarSelect[_NOT_ENTITY]:
- ...
+ ) -> ScalarSelect[_NOT_ENTITY]: ...
@overload
- def scalar_subquery(self) -> ScalarSelect[Any]:
- ...
+ def scalar_subquery(self) -> ScalarSelect[Any]: ...
def scalar_subquery(self) -> ScalarSelect[Any]:
"""Return the full SELECT statement represented by this
@overload
def only_return_tuples(
self: Query[_O], value: Literal[True]
- ) -> RowReturningQuery[_O]:
- ...
+ ) -> RowReturningQuery[_O]: ...
@overload
def only_return_tuples(
self: Query[_O], value: Literal[False]
- ) -> Query[_O]:
- ...
+ ) -> Query[_O]: ...
@_generative
def only_return_tuples(self, value: bool) -> Query[Any]:
return None
@overload
- def with_entities(self, _entity: _EntityType[_O]) -> Query[_O]:
- ...
+ def with_entities(self, _entity: _EntityType[_O]) -> Query[_O]: ...
@overload
def with_entities(
self,
_colexpr: roles.TypedColumnsClauseRole[_T],
- ) -> RowReturningQuery[Tuple[_T]]:
- ...
+ ) -> RowReturningQuery[Tuple[_T]]: ...
# START OVERLOADED FUNCTIONS self.with_entities RowReturningQuery 2-8
@overload
def with_entities(
self, __ent0: _TCCA[_T0], __ent1: _TCCA[_T1], /
- ) -> RowReturningQuery[_T0, _T1]:
- ...
+ ) -> RowReturningQuery[_T0, _T1]: ...
@overload
def with_entities(
self, __ent0: _TCCA[_T0], __ent1: _TCCA[_T1], __ent2: _TCCA[_T2], /
- ) -> RowReturningQuery[_T0, _T1, _T2]:
- ...
+ ) -> RowReturningQuery[_T0, _T1, _T2]: ...
@overload
def with_entities(
__ent2: _TCCA[_T2],
__ent3: _TCCA[_T3],
/,
- ) -> RowReturningQuery[_T0, _T1, _T2, _T3]:
- ...
+ ) -> RowReturningQuery[_T0, _T1, _T2, _T3]: ...
@overload
def with_entities(
__ent3: _TCCA[_T3],
__ent4: _TCCA[_T4],
/,
- ) -> RowReturningQuery[_T0, _T1, _T2, _T3, _T4]:
- ...
+ ) -> RowReturningQuery[_T0, _T1, _T2, _T3, _T4]: ...
@overload
def with_entities(
__ent4: _TCCA[_T4],
__ent5: _TCCA[_T5],
/,
- ) -> RowReturningQuery[_T0, _T1, _T2, _T3, _T4, _T5]:
- ...
+ ) -> RowReturningQuery[_T0, _T1, _T2, _T3, _T4, _T5]: ...
@overload
def with_entities(
__ent5: _TCCA[_T5],
__ent6: _TCCA[_T6],
/,
- ) -> RowReturningQuery[_T0, _T1, _T2, _T3, _T4, _T5, _T6]:
- ...
+ ) -> RowReturningQuery[_T0, _T1, _T2, _T3, _T4, _T5, _T6]: ...
@overload
def with_entities(
*entities: _ColumnsClauseArgument[Any],
) -> RowReturningQuery[
_T0, _T1, _T2, _T3, _T4, _T5, _T6, _T7, Unpack[TupleAny]
- ]:
- ...
+ ]: ...
# END OVERLOADED FUNCTIONS self.with_entities
@overload
def with_entities(
self, *entities: _ColumnsClauseArgument[Any]
- ) -> Query[Any]:
- ...
+ ) -> Query[Any]: ...
@_generative
def with_entities(
populate_existing: bool = False,
autoflush: bool = False,
**opt: Any,
- ) -> Self:
- ...
+ ) -> Self: ...
@overload
- def execution_options(self, **opt: Any) -> Self:
- ...
+ def execution_options(self, **opt: Any) -> Self: ...
@_generative
def execution_options(self, **kwargs: Any) -> Self:
state,
dict_,
column,
- passive=PassiveFlag.PASSIVE_OFF
- if state.persistent
- else PassiveFlag.PASSIVE_NO_FETCH ^ PassiveFlag.INIT_OK,
+ passive=(
+ PassiveFlag.PASSIVE_OFF
+ if state.persistent
+ else PassiveFlag.PASSIVE_NO_FETCH ^ PassiveFlag.INIT_OK
+ ),
)
if current_value is LoaderCallableStatus.NEVER_SET:
"the single_parent=True flag."
% {
"rel": self,
- "direction": "many-to-one"
- if self.direction is MANYTOONE
- else "many-to-many",
+ "direction": (
+ "many-to-one"
+ if self.direction is MANYTOONE
+ else "many-to-many"
+ ),
"clsname": self.parent.class_.__name__,
"relatedcls": self.mapper.class_.__name__,
},
def _setup_pairs(self) -> None:
sync_pairs: _MutableColumnPairs = []
- lrp: util.OrderedSet[
- Tuple[ColumnElement[Any], ColumnElement[Any]]
- ] = util.OrderedSet([])
+ lrp: util.OrderedSet[Tuple[ColumnElement[Any], ColumnElement[Any]]] = (
+ util.OrderedSet([])
+ )
secondary_sync_pairs: _MutableColumnPairs = []
def go(
# level configuration that benefits from this warning.
if to_ not in self._track_overlapping_sync_targets:
- self._track_overlapping_sync_targets[
- to_
- ] = weakref.WeakKeyDictionary({self.prop: from_})
+ self._track_overlapping_sync_targets[to_] = (
+ weakref.WeakKeyDictionary({self.prop: from_})
+ )
else:
other_props = []
prop_to_from = self._track_overlapping_sync_targets[to_]
dest_selectable,
)
- def create_lazy_clause(
- self, reverse_direction: bool = False
- ) -> Tuple[
+ def create_lazy_clause(self, reverse_direction: bool = False) -> Tuple[
ColumnElement[bool],
Dict[str, ColumnElement[Any]],
Dict[ColumnElement[Any], ColumnElement[Any]],
"""
- def __get__(self, instance: Any, owner: Type[_T]) -> Query[_T]:
- ...
+ def __get__(self, instance: Any, owner: Type[_T]) -> Query[_T]: ...
_O = TypeVar("_O", bound=object)
bind_arguments: Optional[_BindArguments] = None,
_parent_execute_state: Optional[Any] = None,
_add_event: Optional[Any] = None,
- ) -> Result[Unpack[_Ts]]:
- ...
+ ) -> Result[Unpack[_Ts]]: ...
@overload
def execute(
bind_arguments: Optional[_BindArguments] = None,
_parent_execute_state: Optional[Any] = None,
_add_event: Optional[Any] = None,
- ) -> CursorResult[Unpack[TupleAny]]:
- ...
+ ) -> CursorResult[Unpack[TupleAny]]: ...
@overload
def execute(
bind_arguments: Optional[_BindArguments] = None,
_parent_execute_state: Optional[Any] = None,
_add_event: Optional[Any] = None,
- ) -> Result[Unpack[TupleAny]]:
- ...
+ ) -> Result[Unpack[TupleAny]]: ...
def execute(
self,
return self._proxied.merge(instance, load=load, options=options)
@overload
- def query(self, _entity: _EntityType[_O]) -> Query[_O]:
- ...
+ def query(self, _entity: _EntityType[_O]) -> Query[_O]: ...
@overload
def query(
self, _colexpr: TypedColumnsClauseRole[_T]
- ) -> RowReturningQuery[_T]:
- ...
+ ) -> RowReturningQuery[_T]: ...
# START OVERLOADED FUNCTIONS self.query RowReturningQuery 2-8
@overload
def query(
self, __ent0: _TCCA[_T0], __ent1: _TCCA[_T1], /
- ) -> RowReturningQuery[_T0, _T1]:
- ...
+ ) -> RowReturningQuery[_T0, _T1]: ...
@overload
def query(
self, __ent0: _TCCA[_T0], __ent1: _TCCA[_T1], __ent2: _TCCA[_T2], /
- ) -> RowReturningQuery[_T0, _T1, _T2]:
- ...
+ ) -> RowReturningQuery[_T0, _T1, _T2]: ...
@overload
def query(
__ent2: _TCCA[_T2],
__ent3: _TCCA[_T3],
/,
- ) -> RowReturningQuery[_T0, _T1, _T2, _T3]:
- ...
+ ) -> RowReturningQuery[_T0, _T1, _T2, _T3]: ...
@overload
def query(
__ent3: _TCCA[_T3],
__ent4: _TCCA[_T4],
/,
- ) -> RowReturningQuery[_T0, _T1, _T2, _T3, _T4]:
- ...
+ ) -> RowReturningQuery[_T0, _T1, _T2, _T3, _T4]: ...
@overload
def query(
__ent4: _TCCA[_T4],
__ent5: _TCCA[_T5],
/,
- ) -> RowReturningQuery[_T0, _T1, _T2, _T3, _T4, _T5]:
- ...
+ ) -> RowReturningQuery[_T0, _T1, _T2, _T3, _T4, _T5]: ...
@overload
def query(
__ent5: _TCCA[_T5],
__ent6: _TCCA[_T6],
/,
- ) -> RowReturningQuery[_T0, _T1, _T2, _T3, _T4, _T5, _T6]:
- ...
+ ) -> RowReturningQuery[_T0, _T1, _T2, _T3, _T4, _T5, _T6]: ...
@overload
def query(
*entities: _ColumnsClauseArgument[Any],
) -> RowReturningQuery[
_T0, _T1, _T2, _T3, _T4, _T5, _T6, _T7, Unpack[TupleAny]
- ]:
- ...
+ ]: ...
# END OVERLOADED FUNCTIONS self.query
@overload
def query(
self, *entities: _ColumnsClauseArgument[Any], **kwargs: Any
- ) -> Query[Any]:
- ...
+ ) -> Query[Any]: ...
def query(
self, *entities: _ColumnsClauseArgument[Any], **kwargs: Any
execution_options: OrmExecuteOptionsParameter = util.EMPTY_DICT,
bind_arguments: Optional[_BindArguments] = None,
**kw: Any,
- ) -> Optional[_T]:
- ...
+ ) -> Optional[_T]: ...
@overload
def scalar(
execution_options: OrmExecuteOptionsParameter = util.EMPTY_DICT,
bind_arguments: Optional[_BindArguments] = None,
**kw: Any,
- ) -> Any:
- ...
+ ) -> Any: ...
def scalar(
self,
execution_options: OrmExecuteOptionsParameter = util.EMPTY_DICT,
bind_arguments: Optional[_BindArguments] = None,
**kw: Any,
- ) -> ScalarResult[_T]:
- ...
+ ) -> ScalarResult[_T]: ...
@overload
def scalars(
execution_options: OrmExecuteOptionsParameter = util.EMPTY_DICT,
bind_arguments: Optional[_BindArguments] = None,
**kw: Any,
- ) -> ScalarResult[Any]:
- ...
+ ) -> ScalarResult[Any]: ...
def scalars(
self,
"object_session",
]
-_sessions: weakref.WeakValueDictionary[
- int, Session
-] = weakref.WeakValueDictionary()
+_sessions: weakref.WeakValueDictionary[int, Session] = (
+ weakref.WeakValueDictionary()
+)
"""Weak-referencing dictionary of :class:`.Session` objects.
"""
mapper: Optional[Mapper[Any]] = None,
instance: Optional[object] = None,
**kw: Any,
- ) -> Connection:
- ...
+ ) -> Connection: ...
def _state_session(state: InstanceState[Any]) -> Optional[Session]:
def _begin(self, nested: bool = False) -> SessionTransaction:
return SessionTransaction(
self.session,
- SessionTransactionOrigin.BEGIN_NESTED
- if nested
- else SessionTransactionOrigin.SUBTRANSACTION,
+ (
+ SessionTransactionOrigin.BEGIN_NESTED
+ if nested
+ else SessionTransactionOrigin.SUBTRANSACTION
+ ),
self,
)
)
trans = SessionTransaction(
self,
- SessionTransactionOrigin.BEGIN
- if begin
- else SessionTransactionOrigin.AUTOBEGIN,
+ (
+ SessionTransactionOrigin.BEGIN
+ if begin
+ else SessionTransactionOrigin.AUTOBEGIN
+ ),
)
assert self._transaction is trans
return trans
_parent_execute_state: Optional[Any] = None,
_add_event: Optional[Any] = None,
_scalar_result: Literal[True] = ...,
- ) -> Any:
- ...
+ ) -> Any: ...
@overload
def _execute_internal(
_parent_execute_state: Optional[Any] = None,
_add_event: Optional[Any] = None,
_scalar_result: bool = ...,
- ) -> Result[Unpack[TupleAny]]:
- ...
+ ) -> Result[Unpack[TupleAny]]: ...
def _execute_internal(
self,
)
if compile_state_cls:
- result: Result[
- Unpack[TupleAny]
- ] = compile_state_cls.orm_execute_statement(
- self,
- statement,
- params or {},
- execution_options,
- bind_arguments,
- conn,
+ result: Result[Unpack[TupleAny]] = (
+ compile_state_cls.orm_execute_statement(
+ self,
+ statement,
+ params or {},
+ execution_options,
+ bind_arguments,
+ conn,
+ )
)
else:
result = conn.execute(
bind_arguments: Optional[_BindArguments] = None,
_parent_execute_state: Optional[Any] = None,
_add_event: Optional[Any] = None,
- ) -> Result[Unpack[_Ts]]:
- ...
+ ) -> Result[Unpack[_Ts]]: ...
@overload
def execute(
bind_arguments: Optional[_BindArguments] = None,
_parent_execute_state: Optional[Any] = None,
_add_event: Optional[Any] = None,
- ) -> CursorResult[Unpack[TupleAny]]:
- ...
+ ) -> CursorResult[Unpack[TupleAny]]: ...
@overload
def execute(
bind_arguments: Optional[_BindArguments] = None,
_parent_execute_state: Optional[Any] = None,
_add_event: Optional[Any] = None,
- ) -> Result[Unpack[TupleAny]]:
- ...
+ ) -> Result[Unpack[TupleAny]]: ...
def execute(
self,
execution_options: OrmExecuteOptionsParameter = util.EMPTY_DICT,
bind_arguments: Optional[_BindArguments] = None,
**kw: Any,
- ) -> Optional[_T]:
- ...
+ ) -> Optional[_T]: ...
@overload
def scalar(
execution_options: OrmExecuteOptionsParameter = util.EMPTY_DICT,
bind_arguments: Optional[_BindArguments] = None,
**kw: Any,
- ) -> Any:
- ...
+ ) -> Any: ...
def scalar(
self,
execution_options: OrmExecuteOptionsParameter = util.EMPTY_DICT,
bind_arguments: Optional[_BindArguments] = None,
**kw: Any,
- ) -> ScalarResult[_T]:
- ...
+ ) -> ScalarResult[_T]: ...
@overload
def scalars(
execution_options: OrmExecuteOptionsParameter = util.EMPTY_DICT,
bind_arguments: Optional[_BindArguments] = None,
**kw: Any,
- ) -> ScalarResult[Any]:
- ...
+ ) -> ScalarResult[Any]: ...
def scalars(
self,
)
@overload
- def query(self, _entity: _EntityType[_O]) -> Query[_O]:
- ...
+ def query(self, _entity: _EntityType[_O]) -> Query[_O]: ...
@overload
def query(
self, _colexpr: TypedColumnsClauseRole[_T]
- ) -> RowReturningQuery[_T]:
- ...
+ ) -> RowReturningQuery[_T]: ...
# START OVERLOADED FUNCTIONS self.query RowReturningQuery 2-8
@overload
def query(
self, __ent0: _TCCA[_T0], __ent1: _TCCA[_T1], /
- ) -> RowReturningQuery[_T0, _T1]:
- ...
+ ) -> RowReturningQuery[_T0, _T1]: ...
@overload
def query(
self, __ent0: _TCCA[_T0], __ent1: _TCCA[_T1], __ent2: _TCCA[_T2], /
- ) -> RowReturningQuery[_T0, _T1, _T2]:
- ...
+ ) -> RowReturningQuery[_T0, _T1, _T2]: ...
@overload
def query(
__ent2: _TCCA[_T2],
__ent3: _TCCA[_T3],
/,
- ) -> RowReturningQuery[_T0, _T1, _T2, _T3]:
- ...
+ ) -> RowReturningQuery[_T0, _T1, _T2, _T3]: ...
@overload
def query(
__ent3: _TCCA[_T3],
__ent4: _TCCA[_T4],
/,
- ) -> RowReturningQuery[_T0, _T1, _T2, _T3, _T4]:
- ...
+ ) -> RowReturningQuery[_T0, _T1, _T2, _T3, _T4]: ...
@overload
def query(
__ent4: _TCCA[_T4],
__ent5: _TCCA[_T5],
/,
- ) -> RowReturningQuery[_T0, _T1, _T2, _T3, _T4, _T5]:
- ...
+ ) -> RowReturningQuery[_T0, _T1, _T2, _T3, _T4, _T5]: ...
@overload
def query(
__ent5: _TCCA[_T5],
__ent6: _TCCA[_T6],
/,
- ) -> RowReturningQuery[_T0, _T1, _T2, _T3, _T4, _T5, _T6]:
- ...
+ ) -> RowReturningQuery[_T0, _T1, _T2, _T3, _T4, _T5, _T6]: ...
@overload
def query(
*entities: _ColumnsClauseArgument[Any],
) -> RowReturningQuery[
_T0, _T1, _T2, _T3, _T4, _T5, _T6, _T7, Unpack[TupleAny]
- ]:
- ...
+ ]: ...
# END OVERLOADED FUNCTIONS self.query
@overload
def query(
self, *entities: _ColumnsClauseArgument[Any], **kwargs: Any
- ) -> Query[Any]:
- ...
+ ) -> Query[Any]: ...
def query(
self, *entities: _ColumnsClauseArgument[Any], **kwargs: Any
if correct_keys:
primary_key_identity = dict(primary_key_identity)
for k in correct_keys:
- primary_key_identity[
- pk_synonyms[k]
- ] = primary_key_identity[k]
+ primary_key_identity[pk_synonyms[k]] = (
+ primary_key_identity[k]
+ )
try:
primary_key_identity = list(
expire_on_commit: bool = ...,
info: Optional[_InfoType] = ...,
**kw: Any,
- ):
- ...
+ ): ...
@overload
def __init__(
expire_on_commit: bool = ...,
info: Optional[_InfoType] = ...,
**kw: Any,
- ):
- ...
+ ): ...
def __init__(
self,
class _InstanceDictProto(Protocol):
- def __call__(self) -> Optional[IdentityMap]:
- ...
+ def __call__(self) -> Optional[IdentityMap]: ...
class _InstallLoaderCallableProto(Protocol[_O]):
state: InstanceState[_O],
dict_: _InstanceDict,
row: Row[Unpack[TupleAny]],
- ) -> None:
- ...
+ ) -> None: ...
@inspection._self_inspects
key,
self,
loadopt,
- loadopt._generate_extra_criteria(context)
- if loadopt._extra_criteria
- else None,
+ (
+ loadopt._generate_extra_criteria(context)
+ if loadopt._extra_criteria
+ else None
+ ),
),
key,
)
elif ltj > 2:
middle = [
(
- orm_util.AliasedClass(item[0])
- if not inspect(item[0]).is_aliased_class
- else item[0].entity,
+ (
+ orm_util.AliasedClass(item[0])
+ if not inspect(item[0]).is_aliased_class
+ else item[0].entity
+ ),
item[1],
)
for item in to_join[1:-1]
to_adapt = orm_util.AliasedClass(
self.mapper,
- alias=alt_selectable._anonymous_fromclause(flat=True)
- if alt_selectable is not None
- else None,
+ alias=(
+ alt_selectable._anonymous_fromclause(flat=True)
+ if alt_selectable is not None
+ else None
+ ),
flat=True,
use_mapper_path=True,
)
loader = self._set_relationship_strategy(
attr,
{"lazy": "joined"},
- opts={"innerjoin": innerjoin}
- if innerjoin is not None
- else util.EMPTY_DICT,
+ opts=(
+ {"innerjoin": innerjoin}
+ if innerjoin is not None
+ else util.EMPTY_DICT
+ ),
)
return loader
return self
@overload
- def _coerce_strat(self, strategy: _StrategySpec) -> _StrategyKey:
- ...
+ def _coerce_strat(self, strategy: _StrategySpec) -> _StrategyKey: ...
@overload
- def _coerce_strat(self, strategy: Literal[None]) -> None:
- ...
+ def _coerce_strat(self, strategy: Literal[None]) -> None: ...
def _coerce_strat(
self, strategy: Optional[_StrategySpec]
d["_extra_criteria"] = ()
if self._path_with_polymorphic_path:
- d[
- "_path_with_polymorphic_path"
- ] = self._path_with_polymorphic_path.serialize()
+ d["_path_with_polymorphic_path"] = (
+ self._path_with_polymorphic_path.serialize()
+ )
if self._of_type:
if self._of_type.is_aliased_class:
*,
str_cleanup_fn: Optional[Callable[[str, str], str]] = None,
include_generic: bool = False,
- ) -> Type[Any]:
- ...
+ ) -> Type[Any]: ...
de_stringify_annotation = cast(
originating_module: str,
*,
str_cleanup_fn: Optional[Callable[[str, str], str]] = None,
- ) -> Type[Any]:
- ...
+ ) -> Type[Any]: ...
de_stringify_union_elements = cast(
class _EvalNameOnly(Protocol):
- def __call__(self, name: str, module_name: str) -> Any:
- ...
+ def __call__(self, name: str, module_name: str) -> Any: ...
eval_name_only = cast(_EvalNameOnly, _de_stringify_partial(_eval_name_only))
insp,
alias,
name,
- with_polymorphic_mappers
- if with_polymorphic_mappers
- else mapper.with_polymorphic_mappers,
- with_polymorphic_discriminator
- if with_polymorphic_discriminator is not None
- else mapper.polymorphic_on,
+ (
+ with_polymorphic_mappers
+ if with_polymorphic_mappers
+ else mapper.with_polymorphic_mappers
+ ),
+ (
+ with_polymorphic_discriminator
+ if with_polymorphic_discriminator is not None
+ else mapper.polymorphic_on
+ ),
base_alias,
use_mapper_path,
adapt_on_names,
self._weak_entity = weakref.ref(entity)
self.mapper = mapper
- self.selectable = (
- self.persist_selectable
- ) = self.local_table = selectable
+ self.selectable = self.persist_selectable = self.local_table = (
+ selectable
+ )
self.name = name
self.polymorphic_on = polymorphic_on
self._base_alias = weakref.ref(_base_alias or self)
self,
obj: _CE,
key: Optional[str] = None,
- ) -> _CE:
- ...
+ ) -> _CE: ...
else:
_orm_adapt_element = _adapt_element
dict_: _InstanceDict,
user_data: Literal[None] = ...,
passive: Literal[PassiveFlag.PASSIVE_OFF] = ...,
- ) -> CollectionAdapter:
- ...
+ ) -> CollectionAdapter: ...
@overload
def get_collection(
dict_: _InstanceDict,
user_data: _AdaptedCollectionProtocol = ...,
passive: PassiveFlag = ...,
- ) -> CollectionAdapter:
- ...
+ ) -> CollectionAdapter: ...
@overload
def get_collection(
passive: PassiveFlag = ...,
) -> Union[
Literal[LoaderCallableStatus.PASSIVE_NO_RESULT], CollectionAdapter
- ]:
- ...
+ ]: ...
def get_collection(
self,
class _CreatorFnType(Protocol):
- def __call__(self) -> DBAPIConnection:
- ...
+ def __call__(self) -> DBAPIConnection: ...
class _CreatorWRecFnType(Protocol):
- def __call__(self, rec: ConnectionPoolEntry) -> DBAPIConnection:
- ...
+ def __call__(self, rec: ConnectionPoolEntry) -> DBAPIConnection: ...
class Pool(log.Identified, event.EventTarget):
-
"""Abstract base class for connection pools."""
dispatch: dispatcher[Pool]
class _ConnectionRecord(ConnectionPoolEntry):
-
"""Maintains a position in a connection pool which references a pooled
connection.
rec.fairy_ref = ref = weakref.ref(
fairy,
- lambda ref: _finalize_fairy(
- None, rec, pool, ref, echo, transaction_was_reset=False
- )
- if _finalize_fairy is not None
- else None,
+ lambda ref: (
+ _finalize_fairy(
+ None, rec, pool, ref, echo, transaction_was_reset=False
+ )
+ if _finalize_fairy is not None
+ else None
+ ),
)
_strong_ref_connection_records[ref] = rec
if echo:
if typing.TYPE_CHECKING:
- def commit(self) -> None:
- ...
+ def commit(self) -> None: ...
- def cursor(self) -> DBAPICursor:
- ...
+ def cursor(self) -> DBAPICursor: ...
- def rollback(self) -> None:
- ...
+ def rollback(self) -> None: ...
@property
def is_valid(self) -> bool:
class _ConnectionFairy(PoolProxiedConnection):
-
"""Proxies a DBAPI connection and provides return-on-dereference
support.
class QueuePool(Pool):
-
"""A :class:`_pool.Pool`
that imposes a limit on the number of open connections.
_is_asyncio = False # type: ignore[assignment]
- _queue_class: Type[
- sqla_queue.QueueCommon[ConnectionPoolEntry]
- ] = sqla_queue.Queue
+ _queue_class: Type[sqla_queue.QueueCommon[ConnectionPoolEntry]] = (
+ sqla_queue.Queue
+ )
_pool: sqla_queue.QueueCommon[ConnectionPoolEntry]
class AsyncAdaptedQueuePool(QueuePool):
_is_asyncio = True # type: ignore[assignment]
- _queue_class: Type[
- sqla_queue.QueueCommon[ConnectionPoolEntry]
- ] = sqla_queue.AsyncAdaptedQueue
+ _queue_class: Type[sqla_queue.QueueCommon[ConnectionPoolEntry]] = (
+ sqla_queue.AsyncAdaptedQueue
+ )
_dialect = _AsyncConnDialect()
class NullPool(Pool):
-
"""A Pool which does not pool connections.
Instead it literally opens and closes the underlying DB-API connection
class SingletonThreadPool(Pool):
-
"""A Pool that maintains one connection per thread.
Maintains one connection per each thread, never moving a connection to a
class StaticPool(Pool):
-
"""A Pool of exactly one connection, used for all requests.
Reconnect-related functions such as ``recycle`` and connection
class AssertionPool(Pool):
-
"""A :class:`_pool.Pool` that allows at most one checked out connection at
any given time.
@overload
-def not_(clause: BinaryExpression[_T]) -> BinaryExpression[_T]:
- ...
+def not_(clause: BinaryExpression[_T]) -> BinaryExpression[_T]: ...
@overload
-def not_(clause: _ColumnExpressionArgument[_T]) -> ColumnElement[_T]:
- ...
+def not_(clause: _ColumnExpressionArgument[_T]) -> ColumnElement[_T]: ...
def not_(clause: _ColumnExpressionArgument[_T]) -> ColumnElement[_T]:
@overload
-def select(__ent0: _TCCA[_T0], /) -> Select[_T0]:
- ...
+def select(__ent0: _TCCA[_T0], /) -> Select[_T0]: ...
@overload
-def select(__ent0: _TCCA[_T0], __ent1: _TCCA[_T1], /) -> Select[_T0, _T1]:
- ...
+def select(__ent0: _TCCA[_T0], __ent1: _TCCA[_T1], /) -> Select[_T0, _T1]: ...
@overload
def select(
__ent0: _TCCA[_T0], __ent1: _TCCA[_T1], __ent2: _TCCA[_T2], /
-) -> Select[_T0, _T1, _T2]:
- ...
+) -> Select[_T0, _T1, _T2]: ...
@overload
__ent2: _TCCA[_T2],
__ent3: _TCCA[_T3],
/,
-) -> Select[_T0, _T1, _T2, _T3]:
- ...
+) -> Select[_T0, _T1, _T2, _T3]: ...
@overload
__ent3: _TCCA[_T3],
__ent4: _TCCA[_T4],
/,
-) -> Select[_T0, _T1, _T2, _T3, _T4]:
- ...
+) -> Select[_T0, _T1, _T2, _T3, _T4]: ...
@overload
__ent4: _TCCA[_T4],
__ent5: _TCCA[_T5],
/,
-) -> Select[_T0, _T1, _T2, _T3, _T4, _T5]:
- ...
+) -> Select[_T0, _T1, _T2, _T3, _T4, _T5]: ...
@overload
__ent5: _TCCA[_T5],
__ent6: _TCCA[_T6],
/,
-) -> Select[_T0, _T1, _T2, _T3, _T4, _T5, _T6]:
- ...
+) -> Select[_T0, _T1, _T2, _T3, _T4, _T5, _T6]: ...
@overload
__ent6: _TCCA[_T6],
__ent7: _TCCA[_T7],
/,
-) -> Select[_T0, _T1, _T2, _T3, _T4, _T5, _T6, _T7]:
- ...
+) -> Select[_T0, _T1, _T2, _T3, _T4, _T5, _T6, _T7]: ...
@overload
__ent7: _TCCA[_T7],
__ent8: _TCCA[_T8],
/,
-) -> Select[_T0, _T1, _T2, _T3, _T4, _T5, _T6, _T7, _T8]:
- ...
+) -> Select[_T0, _T1, _T2, _T3, _T4, _T5, _T6, _T7, _T8]: ...
@overload
*entities: _ColumnsClauseArgument[Any],
) -> Select[
_T0, _T1, _T2, _T3, _T4, _T5, _T6, _T7, _T8, _T9, Unpack[TupleAny]
-]:
- ...
+]: ...
# END OVERLOADED FUNCTIONS select
@overload
def select(
*entities: _ColumnsClauseArgument[Any], **__kw: Any
-) -> Select[Unpack[TupleAny]]:
- ...
+) -> Select[Unpack[TupleAny]]: ...
def select(
class _HasClauseElement(Protocol, Generic[_T_co]):
"""indicates a class that has a __clause_element__() method"""
- def __clause_element__(self) -> roles.ExpressionElementRole[_T_co]:
- ...
+ def __clause_element__(self) -> roles.ExpressionElementRole[_T_co]: ...
class _CoreAdapterProto(Protocol):
"""protocol for the ClauseAdapter/ColumnAdapter.traverse() method."""
- def __call__(self, obj: _CE) -> _CE:
- ...
+ def __call__(self, obj: _CE) -> _CE: ...
# match column types that are not ORM entities
if TYPE_CHECKING:
- def is_sql_compiler(c: Compiled) -> TypeGuard[SQLCompiler]:
- ...
+ def is_sql_compiler(c: Compiled) -> TypeGuard[SQLCompiler]: ...
- def is_ddl_compiler(c: Compiled) -> TypeGuard[DDLCompiler]:
- ...
+ def is_ddl_compiler(c: Compiled) -> TypeGuard[DDLCompiler]: ...
- def is_named_from_clause(t: FromClauseRole) -> TypeGuard[NamedFromClause]:
- ...
+ def is_named_from_clause(
+ t: FromClauseRole,
+ ) -> TypeGuard[NamedFromClause]: ...
- def is_column_element(c: ClauseElement) -> TypeGuard[ColumnElement[Any]]:
- ...
+ def is_column_element(
+ c: ClauseElement,
+ ) -> TypeGuard[ColumnElement[Any]]: ...
def is_keyed_column_element(
c: ClauseElement,
- ) -> TypeGuard[KeyedColumnElement[Any]]:
- ...
+ ) -> TypeGuard[KeyedColumnElement[Any]]: ...
- def is_text_clause(c: ClauseElement) -> TypeGuard[TextClause]:
- ...
+ def is_text_clause(c: ClauseElement) -> TypeGuard[TextClause]: ...
- def is_from_clause(c: ClauseElement) -> TypeGuard[FromClause]:
- ...
+ def is_from_clause(c: ClauseElement) -> TypeGuard[FromClause]: ...
- def is_tuple_type(t: TypeEngine[Any]) -> TypeGuard[TupleType]:
- ...
+ def is_tuple_type(t: TypeEngine[Any]) -> TypeGuard[TupleType]: ...
- def is_table_value_type(t: TypeEngine[Any]) -> TypeGuard[TableValueType]:
- ...
+ def is_table_value_type(
+ t: TypeEngine[Any],
+ ) -> TypeGuard[TableValueType]: ...
- def is_selectable(t: Any) -> TypeGuard[Selectable]:
- ...
+ def is_selectable(t: Any) -> TypeGuard[Selectable]: ...
def is_select_base(
t: Union[Executable, ReturnsRows]
- ) -> TypeGuard[SelectBase]:
- ...
+ ) -> TypeGuard[SelectBase]: ...
def is_select_statement(
t: Union[Executable, ReturnsRows]
- ) -> TypeGuard[Select[Unpack[TupleAny]]]:
- ...
+ ) -> TypeGuard[Select[Unpack[TupleAny]]]: ...
- def is_table(t: FromClause) -> TypeGuard[TableClause]:
- ...
+ def is_table(t: FromClause) -> TypeGuard[TableClause]: ...
- def is_subquery(t: FromClause) -> TypeGuard[Subquery]:
- ...
+ def is_subquery(t: FromClause) -> TypeGuard[Subquery]: ...
- def is_dml(c: ClauseElement) -> TypeGuard[UpdateBase]:
- ...
+ def is_dml(c: ClauseElement) -> TypeGuard[UpdateBase]: ...
else:
is_sql_compiler = operator.attrgetter("is_sql")
@overload
def Nullable(
val: "SQLCoreOperations[_T]",
-) -> "SQLCoreOperations[Optional[_T]]":
- ...
+) -> "SQLCoreOperations[Optional[_T]]": ...
@overload
def Nullable(
val: roles.ExpressionElementRole[_T],
-) -> roles.ExpressionElementRole[Optional[_T]]:
- ...
+) -> roles.ExpressionElementRole[Optional[_T]]: ...
@overload
-def Nullable(val: Type[_T]) -> Type[Optional[_T]]:
- ...
+def Nullable(val: Type[_T]) -> Type[Optional[_T]]: ...
def Nullable(
@overload
def NotNullable(
val: "SQLCoreOperations[Optional[_T]]",
-) -> "SQLCoreOperations[_T]":
- ...
+) -> "SQLCoreOperations[_T]": ...
@overload
def NotNullable(
val: roles.ExpressionElementRole[Optional[_T]],
-) -> roles.ExpressionElementRole[_T]:
- ...
+) -> roles.ExpressionElementRole[_T]: ...
@overload
-def NotNullable(val: Type[Optional[_T]]) -> Type[_T]:
- ...
+def NotNullable(val: Type[Optional[_T]]) -> Type[_T]: ...
@overload
-def NotNullable(val: Optional[Type[_T]]) -> Type[_T]:
- ...
+def NotNullable(val: Optional[Type[_T]]) -> Type[_T]: ...
def NotNullable(
self,
values: Literal[None] = ...,
clone: bool = ...,
- ) -> Self:
- ...
+ ) -> Self: ...
@overload
def _deannotate(
self,
values: Sequence[str] = ...,
clone: bool = ...,
- ) -> SupportsAnnotations:
- ...
+ ) -> SupportsAnnotations: ...
def _deannotate(
self,
tuple(
(
key,
- value._gen_cache_key(anon_map, [])
- if isinstance(value, HasCacheKey)
- else value,
+ (
+ value._gen_cache_key(anon_map, [])
+ if isinstance(value, HasCacheKey)
+ else value
+ ),
)
for key, value in [
(key, self._annotations[key])
if TYPE_CHECKING:
@util.ro_non_memoized_property
- def entity_namespace(self) -> _EntityNamespace:
- ...
+ def entity_namespace(self) -> _EntityNamespace: ...
def _annotate(self, values: _AnnotationDict) -> Self:
"""return a copy of this ClauseElement with annotations
self,
values: Literal[None] = ...,
clone: bool = ...,
- ) -> Self:
- ...
+ ) -> Self: ...
@overload
def _deannotate(
self,
values: Sequence[str] = ...,
clone: bool = ...,
- ) -> SupportsAnnotations:
- ...
+ ) -> SupportsAnnotations: ...
def _deannotate(
self,
self,
values: Literal[None] = ...,
clone: bool = ...,
- ) -> Self:
- ...
+ ) -> Self: ...
@overload
def _deannotate(
self,
values: Sequence[str] = ...,
clone: bool = ...,
- ) -> SupportsAnnotations:
- ...
+ ) -> SupportsAnnotations: ...
def _deannotate(
self,
self,
values: Literal[None] = ...,
clone: bool = ...,
- ) -> Self:
- ...
+ ) -> Self: ...
@overload
def _deannotate(
self,
values: Sequence[str] = ...,
clone: bool = ...,
- ) -> Annotated:
- ...
+ ) -> Annotated: ...
def _deannotate(
self,
# so that the resulting objects are pickleable; additionally, other
# decisions can be made up front about the type of object being annotated
# just once per class rather than per-instance.
-annotated_classes: Dict[
- Type[SupportsWrappingAnnotations], Type[Annotated]
-] = {}
+annotated_classes: Dict[Type[SupportsWrappingAnnotations], Type[Annotated]] = (
+ {}
+)
_SA = TypeVar("_SA", bound="SupportsAnnotations")
@overload
def _deep_deannotate(
element: Literal[None], values: Optional[Sequence[str]] = None
-) -> Literal[None]:
- ...
+) -> Literal[None]: ...
@overload
def _deep_deannotate(
element: _SA, values: Optional[Sequence[str]] = None
-) -> _SA:
- ...
+) -> _SA: ...
def _deep_deannotate(
class _EntityNamespace(Protocol):
- def __getattr__(self, key: str) -> SQLCoreOperations[Any]:
- ...
+ def __getattr__(self, key: str) -> SQLCoreOperations[Any]: ...
class _HasEntityNamespace(Protocol):
@util.ro_non_memoized_property
- def entity_namespace(self) -> _EntityNamespace:
- ...
+ def entity_namespace(self) -> _EntityNamespace: ...
def _is_has_entity_namespace(element: Any) -> TypeGuard[_HasEntityNamespace]:
class _GenerativeType(Protocol):
- def _generate(self) -> Self:
- ...
+ def _generate(self) -> Self: ...
def _generative(fn: _Fn) -> _Fn:
if TYPE_CHECKING:
- def __getattr__(self, key: str) -> Any:
- ...
+ def __getattr__(self, key: str) -> Any: ...
- def __setattr__(self, key: str, value: Any) -> None:
- ...
+ def __setattr__(self, key: str, value: Any) -> None: ...
- def __delattr__(self, key: str) -> None:
- ...
+ def __delattr__(self, key: str) -> None: ...
class Options(metaclass=_MetaOptions):
if TYPE_CHECKING:
- def __getattr__(self, key: str) -> Any:
- ...
+ def __getattr__(self, key: str) -> Any: ...
- def __setattr__(self, key: str, value: Any) -> None:
- ...
+ def __setattr__(self, key: str, value: Any) -> None: ...
- def __delattr__(self, key: str) -> None:
- ...
+ def __delattr__(self, key: str) -> None: ...
class CacheableOptions(Options, HasCacheKey):
**kw: Any,
) -> Tuple[
Compiled, Optional[Sequence[BindParameter[Any]]], CacheStats
- ]:
- ...
+ ]: ...
def _execute_on_connection(
self,
connection: Connection,
distilled_params: _CoreMultiExecuteParams,
execution_options: CoreExecuteOptionsParameter,
- ) -> CursorResult[Any]:
- ...
+ ) -> CursorResult[Any]: ...
def _execute_on_scalar(
self,
connection: Connection,
distilled_params: _CoreMultiExecuteParams,
execution_options: CoreExecuteOptionsParameter,
- ) -> Any:
- ...
+ ) -> Any: ...
@util.ro_non_memoized_property
def _all_selected_columns(self):
is_delete_using: bool = ...,
is_update_from: bool = ...,
**opt: Any,
- ) -> Self:
- ...
+ ) -> Self: ...
@overload
- def execution_options(self, **opt: Any) -> Self:
- ...
+ def execution_options(self, **opt: Any) -> Self: ...
@_generative
def execution_options(self, **kw: Any) -> Self:
return iter([col for _, col, _ in self._collection])
@overload
- def __getitem__(self, key: Union[str, int]) -> _COL_co:
- ...
+ def __getitem__(self, key: Union[str, int]) -> _COL_co: ...
@overload
def __getitem__(
self, key: Tuple[Union[str, int], ...]
- ) -> ReadOnlyColumnCollection[_COLKEY, _COL_co]:
- ...
+ ) -> ReadOnlyColumnCollection[_COLKEY, _COL_co]: ...
@overload
def __getitem__(
self, key: slice
- ) -> ReadOnlyColumnCollection[_COLKEY, _COL_co]:
- ...
+ ) -> ReadOnlyColumnCollection[_COLKEY, _COL_co]: ...
def __getitem__(
self, key: Union[str, int, slice, Tuple[Union[str, int], ...]]
class _CacheKeyTraversalDispatchType(Protocol):
def __call__(
s, self: HasCacheKey, visitor: _CacheKeyTraversal
- ) -> _CacheKeyTraversalDispatchTypeReturn:
- ...
+ ) -> _CacheKeyTraversalDispatchTypeReturn: ...
class CacheConst(enum.Enum):
result += (
attrname,
obj["compile_state_plugin"],
- obj["plugin_subject"]._gen_cache_key(
- anon_map, bindparams
- )
- if obj["plugin_subject"]
- else None,
+ (
+ obj["plugin_subject"]._gen_cache_key(
+ anon_map, bindparams
+ )
+ if obj["plugin_subject"]
+ else None
+ ),
)
elif meth is InternalTraversal.dp_annotations_key:
# obj is here is the _annotations dict. Table uses
InternalTraversal.dp_memoized_select_entities
)
- visit_string = (
- visit_boolean
- ) = visit_operator = visit_plain_obj = CACHE_IN_PLACE
+ visit_string = visit_boolean = visit_operator = visit_plain_obj = (
+ CACHE_IN_PLACE
+ )
visit_statement_hint_list = CACHE_IN_PLACE
visit_type = STATIC_CACHE_KEY
visit_anon_name = ANON_NAME
) -> Tuple[Any, ...]:
return (
attrname,
- obj._gen_cache_key(anon_map, bindparams)
- if isinstance(obj, HasCacheKey)
- else obj,
+ (
+ obj._gen_cache_key(anon_map, bindparams)
+ if isinstance(obj, HasCacheKey)
+ else obj
+ ),
)
def visit_multi_list(
return (
attrname,
tuple(
- elem._gen_cache_key(anon_map, bindparams)
- if isinstance(elem, HasCacheKey)
- else elem
+ (
+ elem._gen_cache_key(anon_map, bindparams)
+ if isinstance(elem, HasCacheKey)
+ else elem
+ )
for elem in obj
),
)
return tuple(
(
target._gen_cache_key(anon_map, bindparams),
- onclause._gen_cache_key(anon_map, bindparams)
- if onclause is not None
- else None,
- from_._gen_cache_key(anon_map, bindparams)
- if from_ is not None
- else None,
+ (
+ onclause._gen_cache_key(anon_map, bindparams)
+ if onclause is not None
+ else None
+ ),
+ (
+ from_._gen_cache_key(anon_map, bindparams)
+ if from_ is not None
+ else None
+ ),
tuple([(key, flags[key]) for key in sorted(flags)]),
)
for (target, onclause, from_, flags) in obj
tuple(
(
key,
- value._gen_cache_key(anon_map, bindparams)
- if isinstance(value, HasCacheKey)
- else value,
+ (
+ value._gen_cache_key(anon_map, bindparams)
+ if isinstance(value, HasCacheKey)
+ else value
+ ),
)
for key, value in [(key, obj[key]) for key in sorted(obj)]
),
attrname,
tuple(
(
- key._gen_cache_key(anon_map, bindparams)
- if hasattr(key, "__clause_element__")
- else key,
+ (
+ key._gen_cache_key(anon_map, bindparams)
+ if hasattr(key, "__clause_element__")
+ else key
+ ),
value._gen_cache_key(anon_map, bindparams),
)
for key, value in obj
attrname,
tuple(
(
- k._gen_cache_key(anon_map, bindparams)
- if hasattr(k, "__clause_element__")
- else k,
+ (
+ k._gen_cache_key(anon_map, bindparams)
+ if hasattr(k, "__clause_element__")
+ else k
+ ),
obj[k]._gen_cache_key(anon_map, bindparams),
)
for k in obj
role: Type[roles.TruncatedLabelRole],
element: Any,
**kw: Any,
-) -> str:
- ...
+) -> str: ...
@overload
*,
as_key: Literal[True] = ...,
**kw: Any,
-) -> str:
- ...
+) -> str: ...
@overload
role: Type[roles.LiteralValueRole],
element: Any,
**kw: Any,
-) -> BindParameter[Any]:
- ...
+) -> BindParameter[Any]: ...
@overload
role: Type[roles.DDLReferredColumnRole],
element: Any,
**kw: Any,
-) -> Column[Any]:
- ...
+) -> Column[Any]: ...
@overload
role: Type[roles.DDLConstraintColumnRole],
element: Any,
**kw: Any,
-) -> Union[Column[Any], str]:
- ...
+) -> Union[Column[Any], str]: ...
@overload
role: Type[roles.StatementOptionRole],
element: Any,
**kw: Any,
-) -> DQLDMLClauseElement:
- ...
+) -> DQLDMLClauseElement: ...
@overload
role: Type[roles.LabeledColumnExprRole[Any]],
element: _ColumnExpressionArgument[_T],
**kw: Any,
-) -> NamedColumn[_T]:
- ...
+) -> NamedColumn[_T]: ...
@overload
],
element: _ColumnExpressionArgument[_T],
**kw: Any,
-) -> ColumnElement[_T]:
- ...
+) -> ColumnElement[_T]: ...
@overload
],
element: Any,
**kw: Any,
-) -> ColumnElement[Any]:
- ...
+) -> ColumnElement[Any]: ...
@overload
role: Type[roles.DMLTableRole],
element: _DMLTableArgument,
**kw: Any,
-) -> _DMLTableElement:
- ...
+) -> _DMLTableElement: ...
@overload
role: Type[roles.HasCTERole],
element: HasCTE,
**kw: Any,
-) -> HasCTE:
- ...
+) -> HasCTE: ...
@overload
role: Type[roles.SelectStatementRole],
element: SelectBase,
**kw: Any,
-) -> SelectBase:
- ...
+) -> SelectBase: ...
@overload
role: Type[roles.FromClauseRole],
element: _FromClauseArgument,
**kw: Any,
-) -> FromClause:
- ...
+) -> FromClause: ...
@overload
*,
explicit_subquery: Literal[True] = ...,
**kw: Any,
-) -> Subquery:
- ...
+) -> Subquery: ...
@overload
role: Type[roles.ColumnsClauseRole],
element: _ColumnsClauseArgument[Any],
**kw: Any,
-) -> _ColumnsClauseElement:
- ...
+) -> _ColumnsClauseElement: ...
@overload
role: Type[roles.JoinTargetRole],
element: _JoinTargetProtocol,
**kw: Any,
-) -> _JoinTargetProtocol:
- ...
+) -> _JoinTargetProtocol: ...
# catchall for not-yet-implemented overloads
role: Type[_SR],
element: Any,
**kw: Any,
-) -> Any:
- ...
+) -> Any: ...
def expect(
if non_literal_expressions:
return elements.ClauseList(
*[
- non_literal_expressions[o]
- if o in non_literal_expressions
- else expr._bind_param(operator, o)
+ (
+ non_literal_expressions[o]
+ if o in non_literal_expressions
+ else expr._bind_param(operator, o)
+ )
for o in element
]
)
% {
"column": util.ellipses_string(element),
"argname": "for argument %s" % (argname,) if argname else "",
- "literal_column": "literal_column"
- if guess_is_literal
- else "column",
+ "literal_column": (
+ "literal_column" if guess_is_literal else "column"
+ ),
}
)
name: str,
objects: Sequence[Any],
type_: TypeEngine[Any],
- ) -> None:
- ...
+ ) -> None: ...
# integer indexes into ResultColumnsEntry used by cursor.py.
class Compiled:
-
"""Represent a compiled SQL or DDL expression.
The ``__str__`` method of the ``Compiled`` object should produce
class _CompileLabel(
roles.BinaryElementRole[Any], elements.CompilerColumnElement
):
-
"""lightweight label object which acts as an expression.Label."""
__visit_name__ = "label"
extract_map = EXTRACT_MAP
- bindname_escape_characters: ClassVar[
- Mapping[str, str]
- ] = util.immutabledict(
- {
- "%": "P",
- "(": "A",
- ")": "Z",
- ":": "C",
- ".": "_",
- "[": "_",
- "]": "_",
- " ": "_",
- }
+ bindname_escape_characters: ClassVar[Mapping[str, str]] = (
+ util.immutabledict(
+ {
+ "%": "P",
+ "(": "A",
+ ")": "Z",
+ ":": "C",
+ ".": "_",
+ "[": "_",
+ "]": "_",
+ " ": "_",
+ }
+ )
)
"""A mapping (e.g. dict or similar) containing a lookup of
characters keyed to replacement characters which will be applied to all
for key, value in (
(
self.bind_names[bindparam],
- bindparam.type._cached_bind_processor(self.dialect)
- if not bindparam.type._is_tuple_type
- else tuple(
- elem_type._cached_bind_processor(self.dialect)
- for elem_type in cast(TupleType, bindparam.type).types
+ (
+ bindparam.type._cached_bind_processor(self.dialect)
+ if not bindparam.type._is_tuple_type
+ else tuple(
+ elem_type._cached_bind_processor(self.dialect)
+ for elem_type in cast(
+ TupleType, bindparam.type
+ ).types
+ )
),
)
for bindparam in self.bind_names
if parameter in self.literal_execute_params:
if escaped_name not in replacement_expressions:
- replacement_expressions[
- escaped_name
- ] = self.render_literal_bindparam(
- parameter,
- render_literal_value=parameters.pop(escaped_name),
+ replacement_expressions[escaped_name] = (
+ self.render_literal_bindparam(
+ parameter,
+ render_literal_value=parameters.pop(escaped_name),
+ )
)
continue
else:
return row_fn(
(
- autoinc_getter(lastrowid, parameters)
- if autoinc_getter is not None
- else lastrowid
+ (
+ autoinc_getter(lastrowid, parameters)
+ if autoinc_getter is not None
+ else lastrowid
+ )
+ if col is autoinc_col
+ else getter(parameters)
)
- if col is autoinc_col
- else getter(parameters)
for getter, col in getters
)
getters = cast(
"List[Tuple[Callable[[Any], Any], bool]]",
[
- (operator.itemgetter(ret[col]), True)
- if col in ret
- else (
- operator.methodcaller("get", param_key_getter(col), None),
- False,
+ (
+ (operator.itemgetter(ret[col]), True)
+ if col in ret
+ else (
+ operator.methodcaller(
+ "get", param_key_getter(col), None
+ ),
+ False,
+ )
)
for col in table.primary_key
],
resolve_dict[order_by_elem.name]
)
):
- kwargs[
- "render_label_as_label"
- ] = element.element._order_by_label_element
+ kwargs["render_label_as_label"] = (
+ element.element._order_by_label_element
+ )
return self.process(
element.element,
within_columns_clause=within_columns_clause,
)
if populate_result_map:
- self._ordered_columns = (
- self._textual_ordered_columns
- ) = taf.positional
+ self._ordered_columns = self._textual_ordered_columns = (
+ taf.positional
+ )
# enable looser result column matching when the SQL text links to
# Column objects by name only
def _format_frame_clause(self, range_, **kw):
return "%s AND %s" % (
- "UNBOUNDED PRECEDING"
- if range_[0] is elements.RANGE_UNBOUNDED
- else "CURRENT ROW"
- if range_[0] is elements.RANGE_CURRENT
- else "%s PRECEDING"
- % (self.process(elements.literal(abs(range_[0])), **kw),)
- if range_[0] < 0
- else "%s FOLLOWING"
- % (self.process(elements.literal(range_[0]), **kw),),
- "UNBOUNDED FOLLOWING"
- if range_[1] is elements.RANGE_UNBOUNDED
- else "CURRENT ROW"
- if range_[1] is elements.RANGE_CURRENT
- else "%s PRECEDING"
- % (self.process(elements.literal(abs(range_[1])), **kw),)
- if range_[1] < 0
- else "%s FOLLOWING"
- % (self.process(elements.literal(range_[1]), **kw),),
+ (
+ "UNBOUNDED PRECEDING"
+ if range_[0] is elements.RANGE_UNBOUNDED
+ else (
+ "CURRENT ROW"
+ if range_[0] is elements.RANGE_CURRENT
+ else (
+ "%s PRECEDING"
+ % (
+ self.process(
+ elements.literal(abs(range_[0])), **kw
+ ),
+ )
+ if range_[0] < 0
+ else "%s FOLLOWING"
+ % (self.process(elements.literal(range_[0]), **kw),)
+ )
+ )
+ ),
+ (
+ "UNBOUNDED FOLLOWING"
+ if range_[1] is elements.RANGE_UNBOUNDED
+ else (
+ "CURRENT ROW"
+ if range_[1] is elements.RANGE_CURRENT
+ else (
+ "%s PRECEDING"
+ % (
+ self.process(
+ elements.literal(abs(range_[1])), **kw
+ ),
+ )
+ if range_[1] < 0
+ else "%s FOLLOWING"
+ % (self.process(elements.literal(range_[1]), **kw),)
+ )
+ )
+ ),
)
def visit_over(self, over, **kwargs):
+ self.process(
elements.Cast(
binary.right,
- binary.right.type
- if binary.right.type._type_affinity is sqltypes.Numeric
- else sqltypes.Numeric(),
+ (
+ binary.right.type
+ if binary.right.type._type_affinity
+ is sqltypes.Numeric
+ else sqltypes.Numeric()
+ ),
),
**kw,
)
"%s%s"
% (
self.preparer.quote(col.name),
- " %s"
- % self.dialect.type_compiler_instance.process(
- col.type, **kwargs
- )
- if alias._render_derived_w_types
- else "",
+ (
+ " %s"
+ % self.dialect.type_compiler_instance.process(
+ col.type, **kwargs
+ )
+ if alias._render_derived_w_types
+ else ""
+ ),
)
for col in alias.c
)
compile_state = select_stmt._compile_state_factory(
select_stmt, self, **kwargs
)
- kwargs[
- "ambiguous_table_name_map"
- ] = compile_state._ambiguous_table_name_map
+ kwargs["ambiguous_table_name_map"] = (
+ compile_state._ambiguous_table_name_map
+ )
select_stmt = compile_state.statement
insert_stmt._post_values_clause is not None
),
sentinel_columns=add_sentinel_cols,
- num_sentinel_columns=len(add_sentinel_cols)
- if add_sentinel_cols
- else 0,
+ num_sentinel_columns=(
+ len(add_sentinel_cols) if add_sentinel_cols else 0
+ ),
implicit_sentinel=implicit_sentinel,
)
elif compile_state._has_multi_parameters:
insert_stmt._post_values_clause is not None
),
sentinel_columns=add_sentinel_cols,
- num_sentinel_columns=len(add_sentinel_cols)
- if add_sentinel_cols
- else 0,
+ num_sentinel_columns=(
+ len(add_sentinel_cols) if add_sentinel_cols else 0
+ ),
sentinel_param_keys=named_sentinel_params,
implicit_sentinel=implicit_sentinel,
embed_values_counter=embed_sentinel_value,
schema_translate_map: Optional[SchemaTranslateMapType] = ...,
render_schema_translate: bool = ...,
compile_kwargs: Mapping[str, Any] = ...,
- ):
- ...
+ ): ...
@util.memoized_property
def sql_compiler(self):
class _SchemaForObjectCallable(Protocol):
- def __call__(self, obj: Any) -> str:
- ...
+ def __call__(self, obj: Any) -> str: ...
class _BindNameForColProtocol(Protocol):
- def __call__(self, col: ColumnClause[Any]) -> str:
- ...
+ def __call__(self, col: ColumnClause[Any]) -> str: ...
class IdentifierPreparer:
-
"""Handle quoting and case-folding of identifiers based on options."""
reserved_words = RESERVED_WORDS
required: bool = False,
name: Optional[str] = None,
**kw: Any,
-) -> str:
- ...
+) -> str: ...
@overload
col: ColumnElement[Any],
value: Any,
**kw: Any,
-) -> str:
- ...
+) -> str: ...
def _create_bind_param(
c,
value,
required=value is REQUIRED,
- name=_col_bind_name(c)
- if not _compile_state_isinsert(compile_state)
- or not compile_state._has_multi_parameters
- else "%s_m0" % _col_bind_name(c),
+ name=(
+ _col_bind_name(c)
+ if not _compile_state_isinsert(compile_state)
+ or not compile_state._has_multi_parameters
+ else "%s_m0" % _col_bind_name(c)
+ ),
accumulate_bind_names=accumulated_bind_names,
**kw,
)
compiler,
c,
value,
- name=_col_bind_name(c)
- if not _compile_state_isinsert(compile_state)
- or not compile_state._has_multi_parameters
- else "%s_m0" % _col_bind_name(c),
+ name=(
+ _col_bind_name(c)
+ if not _compile_state_isinsert(compile_state)
+ or not compile_state._has_multi_parameters
+ else "%s_m0" % _col_bind_name(c)
+ ),
accumulate_bind_names=accumulated_bind_names,
**kw,
)
c: ColumnElement[Any],
process: Literal[True] = ...,
**kw: Any,
-) -> str:
- ...
+) -> str: ...
@overload
c: ColumnElement[Any],
process: Literal[False],
**kw: Any,
-) -> elements.BindParameter[Any]:
- ...
+) -> elements.BindParameter[Any]: ...
def _create_insert_prefetch_bind_param(
c: ColumnElement[Any],
process: Literal[True] = ...,
**kw: Any,
-) -> str:
- ...
+) -> str: ...
@overload
c: ColumnElement[Any],
process: Literal[False],
**kw: Any,
-) -> elements.BindParameter[Any]:
- ...
+) -> elements.BindParameter[Any]: ...
def _create_update_prefetch_bind_param(
dialect: Dialect,
compiler: Optional[DDLCompiler] = ...,
checkfirst: bool,
- ) -> bool:
- ...
+ ) -> bool: ...
class DDLIf(typing.NamedTuple):
reversed(
sort_tables_and_constraints(
unsorted_tables,
- filter_fn=lambda constraint: False
- if not self.dialect.supports_alter
- or constraint.name is None
- else None,
+ filter_fn=lambda constraint: (
+ False
+ if not self.dialect.supports_alter
+ or constraint.name is None
+ else None
+ ),
)
)
)
operator=operators.match_op,
),
result_type=type_api.MATCHTYPE,
- negate_op=operators.not_match_op
- if op is operators.match_op
- else operators.match_op,
+ negate_op=(
+ operators.not_match_op
+ if op is operators.match_op
+ else operators.match_op
+ ),
**kw,
)
group=False,
),
op,
- negate=operators.not_between_op
- if op is operators.between_op
- else operators.between_op,
+ negate=(
+ operators.not_between_op
+ if op is operators.between_op
+ else operators.between_op
+ ),
modifiers=kw,
)
from .selectable import Select
from .selectable import Selectable
- def isupdate(dml: DMLState) -> TypeGuard[UpdateDMLState]:
- ...
+ def isupdate(dml: DMLState) -> TypeGuard[UpdateDMLState]: ...
- def isdelete(dml: DMLState) -> TypeGuard[DeleteDMLState]:
- ...
+ def isdelete(dml: DMLState) -> TypeGuard[DeleteDMLState]: ...
- def isinsert(dml: DMLState) -> TypeGuard[InsertDMLState]:
- ...
+ def isinsert(dml: DMLState) -> TypeGuard[InsertDMLState]: ...
else:
isupdate = operator.attrgetter("isupdate")
@classmethod
def get_entity_description(cls, statement: UpdateBase) -> Dict[str, Any]:
return {
- "name": statement.table.name
- if is_named_from_clause(statement.table)
- else None,
+ "name": (
+ statement.table.name
+ if is_named_from_clause(statement.table)
+ else None
+ ),
"table": statement.table,
}
if TYPE_CHECKING:
@classmethod
- def get_plugin_class(cls, statement: Executable) -> Type[DMLState]:
- ...
+ def get_plugin_class(cls, statement: Executable) -> Type[DMLState]: ...
@classmethod
def _get_multi_crud_kv_pairs(
return [
(
coercions.expect(roles.DMLColumnRole, k),
- v
- if not needs_to_be_cacheable
- else coercions.expect(
- roles.ExpressionElementRole,
- v,
- type_=NullType(),
- is_crud=True,
+ (
+ v
+ if not needs_to_be_cacheable
+ else coercions.expect(
+ roles.ExpressionElementRole,
+ v,
+ type_=NullType(),
+ is_crud=True,
+ )
),
)
for k, v in kv_iterator
def _process_multi_values(self, statement: ValuesBase) -> None:
for parameters in statement._multi_values:
multi_parameters: List[MutableMapping[_DMLColumnElement, Any]] = [
- {
- c.key: value
- for c, value in zip(statement.table.c, parameter_set)
- }
- if isinstance(parameter_set, collections_abc.Sequence)
- else parameter_set
+ (
+ {
+ c.key: value
+ for c, value in zip(statement.table.c, parameter_set)
+ }
+ if isinstance(parameter_set, collections_abc.Sequence)
+ else parameter_set
+ )
for parameter_set in parameters
]
__visit_name__ = "update_base"
- _hints: util.immutabledict[
- Tuple[_DMLTableElement, str], str
- ] = util.EMPTY_DICT
+ _hints: util.immutabledict[Tuple[_DMLTableElement, str], str] = (
+ util.EMPTY_DICT
+ )
named_with_column = False
_label_style: SelectLabelStyle = (
table: _DMLTableElement
_return_defaults = False
- _return_defaults_columns: Optional[
- Tuple[_ColumnsClauseElement, ...]
- ] = None
+ _return_defaults_columns: Optional[Tuple[_ColumnsClauseElement, ...]] = (
+ None
+ )
_supplemental_returning: Optional[Tuple[_ColumnsClauseElement, ...]] = None
_returning: Tuple[_ColumnsClauseElement, ...] = ()
/,
*,
sort_by_parameter_order: bool = False,
- ) -> ReturningInsert[_T0]:
- ...
+ ) -> ReturningInsert[_T0]: ...
@overload
def returning(
/,
*,
sort_by_parameter_order: bool = False,
- ) -> ReturningInsert[_T0, _T1]:
- ...
+ ) -> ReturningInsert[_T0, _T1]: ...
@overload
def returning(
/,
*,
sort_by_parameter_order: bool = False,
- ) -> ReturningInsert[_T0, _T1, _T2]:
- ...
+ ) -> ReturningInsert[_T0, _T1, _T2]: ...
@overload
def returning(
/,
*,
sort_by_parameter_order: bool = False,
- ) -> ReturningInsert[_T0, _T1, _T2, _T3]:
- ...
+ ) -> ReturningInsert[_T0, _T1, _T2, _T3]: ...
@overload
def returning(
/,
*,
sort_by_parameter_order: bool = False,
- ) -> ReturningInsert[_T0, _T1, _T2, _T3, _T4]:
- ...
+ ) -> ReturningInsert[_T0, _T1, _T2, _T3, _T4]: ...
@overload
def returning(
/,
*,
sort_by_parameter_order: bool = False,
- ) -> ReturningInsert[_T0, _T1, _T2, _T3, _T4, _T5]:
- ...
+ ) -> ReturningInsert[_T0, _T1, _T2, _T3, _T4, _T5]: ...
@overload
def returning(
/,
*,
sort_by_parameter_order: bool = False,
- ) -> ReturningInsert[_T0, _T1, _T2, _T3, _T4, _T5, _T6]:
- ...
+ ) -> ReturningInsert[_T0, _T1, _T2, _T3, _T4, _T5, _T6]: ...
@overload
def returning(
sort_by_parameter_order: bool = False,
) -> ReturningInsert[
_T0, _T1, _T2, _T3, _T4, _T5, _T6, _T7, Unpack[TupleAny]
- ]:
- ...
+ ]: ...
# END OVERLOADED FUNCTIONS self.returning
*cols: _ColumnsClauseArgument[Any],
sort_by_parameter_order: bool = False,
**__kw: Any,
- ) -> ReturningInsert[Any]:
- ...
+ ) -> ReturningInsert[Any]: ...
def returning(
self,
*cols: _ColumnsClauseArgument[Any],
sort_by_parameter_order: bool = False,
**__kw: Any,
- ) -> ReturningInsert[Any]:
- ...
+ ) -> ReturningInsert[Any]: ...
class ReturningInsert(Insert, TypedReturnsRows[Unpack[_Ts]]):
# statically generated** by tools/generate_tuple_map_overloads.py
@overload
- def returning(self, __ent0: _TCCA[_T0], /) -> ReturningUpdate[_T0]:
- ...
+ def returning(self, __ent0: _TCCA[_T0], /) -> ReturningUpdate[_T0]: ...
@overload
def returning(
self, __ent0: _TCCA[_T0], __ent1: _TCCA[_T1], /
- ) -> ReturningUpdate[_T0, _T1]:
- ...
+ ) -> ReturningUpdate[_T0, _T1]: ...
@overload
def returning(
self, __ent0: _TCCA[_T0], __ent1: _TCCA[_T1], __ent2: _TCCA[_T2], /
- ) -> ReturningUpdate[_T0, _T1, _T2]:
- ...
+ ) -> ReturningUpdate[_T0, _T1, _T2]: ...
@overload
def returning(
__ent2: _TCCA[_T2],
__ent3: _TCCA[_T3],
/,
- ) -> ReturningUpdate[_T0, _T1, _T2, _T3]:
- ...
+ ) -> ReturningUpdate[_T0, _T1, _T2, _T3]: ...
@overload
def returning(
__ent3: _TCCA[_T3],
__ent4: _TCCA[_T4],
/,
- ) -> ReturningUpdate[_T0, _T1, _T2, _T3, _T4]:
- ...
+ ) -> ReturningUpdate[_T0, _T1, _T2, _T3, _T4]: ...
@overload
def returning(
__ent4: _TCCA[_T4],
__ent5: _TCCA[_T5],
/,
- ) -> ReturningUpdate[_T0, _T1, _T2, _T3, _T4, _T5]:
- ...
+ ) -> ReturningUpdate[_T0, _T1, _T2, _T3, _T4, _T5]: ...
@overload
def returning(
__ent5: _TCCA[_T5],
__ent6: _TCCA[_T6],
/,
- ) -> ReturningUpdate[_T0, _T1, _T2, _T3, _T4, _T5, _T6]:
- ...
+ ) -> ReturningUpdate[_T0, _T1, _T2, _T3, _T4, _T5, _T6]: ...
@overload
def returning(
*entities: _ColumnsClauseArgument[Any],
) -> ReturningUpdate[
_T0, _T1, _T2, _T3, _T4, _T5, _T6, _T7, Unpack[TupleAny]
- ]:
- ...
+ ]: ...
# END OVERLOADED FUNCTIONS self.returning
@overload
def returning(
self, *cols: _ColumnsClauseArgument[Any], **__kw: Any
- ) -> ReturningUpdate[Any]:
- ...
+ ) -> ReturningUpdate[Any]: ...
def returning(
self, *cols: _ColumnsClauseArgument[Any], **__kw: Any
- ) -> ReturningUpdate[Any]:
- ...
+ ) -> ReturningUpdate[Any]: ...
class ReturningUpdate(Update, TypedReturnsRows[Unpack[_Ts]]):
# statically generated** by tools/generate_tuple_map_overloads.py
@overload
- def returning(self, __ent0: _TCCA[_T0], /) -> ReturningDelete[_T0]:
- ...
+ def returning(self, __ent0: _TCCA[_T0], /) -> ReturningDelete[_T0]: ...
@overload
def returning(
self, __ent0: _TCCA[_T0], __ent1: _TCCA[_T1], /
- ) -> ReturningDelete[_T0, _T1]:
- ...
+ ) -> ReturningDelete[_T0, _T1]: ...
@overload
def returning(
self, __ent0: _TCCA[_T0], __ent1: _TCCA[_T1], __ent2: _TCCA[_T2], /
- ) -> ReturningDelete[_T0, _T1, _T2]:
- ...
+ ) -> ReturningDelete[_T0, _T1, _T2]: ...
@overload
def returning(
__ent2: _TCCA[_T2],
__ent3: _TCCA[_T3],
/,
- ) -> ReturningDelete[_T0, _T1, _T2, _T3]:
- ...
+ ) -> ReturningDelete[_T0, _T1, _T2, _T3]: ...
@overload
def returning(
__ent3: _TCCA[_T3],
__ent4: _TCCA[_T4],
/,
- ) -> ReturningDelete[_T0, _T1, _T2, _T3, _T4]:
- ...
+ ) -> ReturningDelete[_T0, _T1, _T2, _T3, _T4]: ...
@overload
def returning(
__ent4: _TCCA[_T4],
__ent5: _TCCA[_T5],
/,
- ) -> ReturningDelete[_T0, _T1, _T2, _T3, _T4, _T5]:
- ...
+ ) -> ReturningDelete[_T0, _T1, _T2, _T3, _T4, _T5]: ...
@overload
def returning(
__ent5: _TCCA[_T5],
__ent6: _TCCA[_T6],
/,
- ) -> ReturningDelete[_T0, _T1, _T2, _T3, _T4, _T5, _T6]:
- ...
+ ) -> ReturningDelete[_T0, _T1, _T2, _T3, _T4, _T5, _T6]: ...
@overload
def returning(
*entities: _ColumnsClauseArgument[Any],
) -> ReturningDelete[
_T0, _T1, _T2, _T3, _T4, _T5, _T6, _T7, Unpack[TupleAny]
- ]:
- ...
+ ]: ...
# END OVERLOADED FUNCTIONS self.returning
@overload
def returning(
self, *cols: _ColumnsClauseArgument[Any], **__kw: Any
- ) -> ReturningDelete[Unpack[TupleAny]]:
- ...
+ ) -> ReturningDelete[Unpack[TupleAny]]: ...
def returning(
self, *cols: _ColumnsClauseArgument[Any], **__kw: Any
- ) -> ReturningDelete[Unpack[TupleAny]]:
- ...
+ ) -> ReturningDelete[Unpack[TupleAny]]: ...
class ReturningDelete(Update, TypedReturnsRows[Unpack[_Ts]]):
value: Any,
type_: _TypeEngineArgument[_T],
literal_execute: bool = False,
-) -> BindParameter[_T]:
- ...
+) -> BindParameter[_T]: ...
@overload
value: _T,
type_: None = None,
literal_execute: bool = False,
-) -> BindParameter[_T]:
- ...
+) -> BindParameter[_T]: ...
@overload
value: Any,
type_: Optional[_TypeEngineArgument[Any]] = None,
literal_execute: bool = False,
-) -> BindParameter[Any]:
- ...
+) -> BindParameter[Any]: ...
def literal(
def get_children(
self, *, omit_attrs: typing_Tuple[str, ...] = ..., **kw: Any
- ) -> Iterable[ClauseElement]:
- ...
+ ) -> Iterable[ClauseElement]: ...
@util.ro_non_memoized_property
def _from_objects(self) -> List[FromClause]:
bind: Optional[Union[Engine, Connection]] = None,
dialect: Optional[Dialect] = None,
**kw: Any,
- ) -> SQLCompiler:
- ...
+ ) -> SQLCompiler: ...
class CompilerColumnElement(
if typing.TYPE_CHECKING:
@util.non_memoized_property
- def _propagate_attrs(self) -> _PropagateAttrsType:
- ...
+ def _propagate_attrs(self) -> _PropagateAttrsType: ...
def operate(
self, op: OperatorType, *other: Any, **kwargs: Any
- ) -> ColumnElement[Any]:
- ...
+ ) -> ColumnElement[Any]: ...
def reverse_operate(
self, op: OperatorType, other: Any, **kwargs: Any
- ) -> ColumnElement[Any]:
- ...
+ ) -> ColumnElement[Any]: ...
@overload
def op(
*,
return_type: _TypeEngineArgument[_OPT],
python_impl: Optional[Callable[..., Any]] = None,
- ) -> Callable[[Any], BinaryExpression[_OPT]]:
- ...
+ ) -> Callable[[Any], BinaryExpression[_OPT]]: ...
@overload
def op(
is_comparison: bool = ...,
return_type: Optional[_TypeEngineArgument[Any]] = ...,
python_impl: Optional[Callable[..., Any]] = ...,
- ) -> Callable[[Any], BinaryExpression[Any]]:
- ...
+ ) -> Callable[[Any], BinaryExpression[Any]]: ...
def op(
self,
is_comparison: bool = False,
return_type: Optional[_TypeEngineArgument[Any]] = None,
python_impl: Optional[Callable[..., Any]] = None,
- ) -> Callable[[Any], BinaryExpression[Any]]:
- ...
+ ) -> Callable[[Any], BinaryExpression[Any]]: ...
def bool_op(
self,
opstring: str,
precedence: int = 0,
python_impl: Optional[Callable[..., Any]] = None,
- ) -> Callable[[Any], BinaryExpression[bool]]:
- ...
+ ) -> Callable[[Any], BinaryExpression[bool]]: ...
- def __and__(self, other: Any) -> BooleanClauseList:
- ...
+ def __and__(self, other: Any) -> BooleanClauseList: ...
- def __or__(self, other: Any) -> BooleanClauseList:
- ...
+ def __or__(self, other: Any) -> BooleanClauseList: ...
- def __invert__(self) -> ColumnElement[_T_co]:
- ...
+ def __invert__(self) -> ColumnElement[_T_co]: ...
- def __lt__(self, other: Any) -> ColumnElement[bool]:
- ...
+ def __lt__(self, other: Any) -> ColumnElement[bool]: ...
- def __le__(self, other: Any) -> ColumnElement[bool]:
- ...
+ def __le__(self, other: Any) -> ColumnElement[bool]: ...
# declare also that this class has an hash method otherwise
# it may be assumed to be None by type checkers since the
# object defines __eq__ and python sets it to None in that case:
# https://docs.python.org/3/reference/datamodel.html#object.__hash__
- def __hash__(self) -> int:
- ...
+ def __hash__(self) -> int: ...
def __eq__(self, other: Any) -> ColumnElement[bool]: # type: ignore[override] # noqa: E501
...
def __ne__(self, other: Any) -> ColumnElement[bool]: # type: ignore[override] # noqa: E501
...
- def is_distinct_from(self, other: Any) -> ColumnElement[bool]:
- ...
+ def is_distinct_from(self, other: Any) -> ColumnElement[bool]: ...
- def is_not_distinct_from(self, other: Any) -> ColumnElement[bool]:
- ...
+ def is_not_distinct_from(self, other: Any) -> ColumnElement[bool]: ...
- def __gt__(self, other: Any) -> ColumnElement[bool]:
- ...
+ def __gt__(self, other: Any) -> ColumnElement[bool]: ...
- def __ge__(self, other: Any) -> ColumnElement[bool]:
- ...
+ def __ge__(self, other: Any) -> ColumnElement[bool]: ...
- def __neg__(self) -> UnaryExpression[_T_co]:
- ...
+ def __neg__(self) -> UnaryExpression[_T_co]: ...
- def __contains__(self, other: Any) -> ColumnElement[bool]:
- ...
+ def __contains__(self, other: Any) -> ColumnElement[bool]: ...
- def __getitem__(self, index: Any) -> ColumnElement[Any]:
- ...
+ def __getitem__(self, index: Any) -> ColumnElement[Any]: ...
@overload
- def __lshift__(self: _SQO[int], other: Any) -> ColumnElement[int]:
- ...
+ def __lshift__(self: _SQO[int], other: Any) -> ColumnElement[int]: ...
@overload
- def __lshift__(self, other: Any) -> ColumnElement[Any]:
- ...
+ def __lshift__(self, other: Any) -> ColumnElement[Any]: ...
- def __lshift__(self, other: Any) -> ColumnElement[Any]:
- ...
+ def __lshift__(self, other: Any) -> ColumnElement[Any]: ...
@overload
- def __rshift__(self: _SQO[int], other: Any) -> ColumnElement[int]:
- ...
+ def __rshift__(self: _SQO[int], other: Any) -> ColumnElement[int]: ...
@overload
- def __rshift__(self, other: Any) -> ColumnElement[Any]:
- ...
+ def __rshift__(self, other: Any) -> ColumnElement[Any]: ...
- def __rshift__(self, other: Any) -> ColumnElement[Any]:
- ...
+ def __rshift__(self, other: Any) -> ColumnElement[Any]: ...
@overload
- def concat(self: _SQO[str], other: Any) -> ColumnElement[str]:
- ...
+ def concat(self: _SQO[str], other: Any) -> ColumnElement[str]: ...
@overload
- def concat(self, other: Any) -> ColumnElement[Any]:
- ...
+ def concat(self, other: Any) -> ColumnElement[Any]: ...
- def concat(self, other: Any) -> ColumnElement[Any]:
- ...
+ def concat(self, other: Any) -> ColumnElement[Any]: ...
def like(
self, other: Any, escape: Optional[str] = None
- ) -> BinaryExpression[bool]:
- ...
+ ) -> BinaryExpression[bool]: ...
def ilike(
self, other: Any, escape: Optional[str] = None
- ) -> BinaryExpression[bool]:
- ...
+ ) -> BinaryExpression[bool]: ...
- def bitwise_xor(self, other: Any) -> BinaryExpression[Any]:
- ...
+ def bitwise_xor(self, other: Any) -> BinaryExpression[Any]: ...
- def bitwise_or(self, other: Any) -> BinaryExpression[Any]:
- ...
+ def bitwise_or(self, other: Any) -> BinaryExpression[Any]: ...
- def bitwise_and(self, other: Any) -> BinaryExpression[Any]:
- ...
+ def bitwise_and(self, other: Any) -> BinaryExpression[Any]: ...
- def bitwise_not(self) -> UnaryExpression[_T_co]:
- ...
+ def bitwise_not(self) -> UnaryExpression[_T_co]: ...
- def bitwise_lshift(self, other: Any) -> BinaryExpression[Any]:
- ...
+ def bitwise_lshift(self, other: Any) -> BinaryExpression[Any]: ...
- def bitwise_rshift(self, other: Any) -> BinaryExpression[Any]:
- ...
+ def bitwise_rshift(self, other: Any) -> BinaryExpression[Any]: ...
def in_(
self,
other: Union[
Iterable[Any], BindParameter[Any], roles.InElementRole
],
- ) -> BinaryExpression[bool]:
- ...
+ ) -> BinaryExpression[bool]: ...
def not_in(
self,
other: Union[
Iterable[Any], BindParameter[Any], roles.InElementRole
],
- ) -> BinaryExpression[bool]:
- ...
+ ) -> BinaryExpression[bool]: ...
def notin_(
self,
other: Union[
Iterable[Any], BindParameter[Any], roles.InElementRole
],
- ) -> BinaryExpression[bool]:
- ...
+ ) -> BinaryExpression[bool]: ...
def not_like(
self, other: Any, escape: Optional[str] = None
- ) -> BinaryExpression[bool]:
- ...
+ ) -> BinaryExpression[bool]: ...
def notlike(
self, other: Any, escape: Optional[str] = None
- ) -> BinaryExpression[bool]:
- ...
+ ) -> BinaryExpression[bool]: ...
def not_ilike(
self, other: Any, escape: Optional[str] = None
- ) -> BinaryExpression[bool]:
- ...
+ ) -> BinaryExpression[bool]: ...
def notilike(
self, other: Any, escape: Optional[str] = None
- ) -> BinaryExpression[bool]:
- ...
+ ) -> BinaryExpression[bool]: ...
- def is_(self, other: Any) -> BinaryExpression[bool]:
- ...
+ def is_(self, other: Any) -> BinaryExpression[bool]: ...
- def is_not(self, other: Any) -> BinaryExpression[bool]:
- ...
+ def is_not(self, other: Any) -> BinaryExpression[bool]: ...
- def isnot(self, other: Any) -> BinaryExpression[bool]:
- ...
+ def isnot(self, other: Any) -> BinaryExpression[bool]: ...
def startswith(
self,
other: Any,
escape: Optional[str] = None,
autoescape: bool = False,
- ) -> ColumnElement[bool]:
- ...
+ ) -> ColumnElement[bool]: ...
def istartswith(
self,
other: Any,
escape: Optional[str] = None,
autoescape: bool = False,
- ) -> ColumnElement[bool]:
- ...
+ ) -> ColumnElement[bool]: ...
def endswith(
self,
other: Any,
escape: Optional[str] = None,
autoescape: bool = False,
- ) -> ColumnElement[bool]:
- ...
+ ) -> ColumnElement[bool]: ...
def iendswith(
self,
other: Any,
escape: Optional[str] = None,
autoescape: bool = False,
- ) -> ColumnElement[bool]:
- ...
+ ) -> ColumnElement[bool]: ...
- def contains(self, other: Any, **kw: Any) -> ColumnElement[bool]:
- ...
+ def contains(self, other: Any, **kw: Any) -> ColumnElement[bool]: ...
- def icontains(self, other: Any, **kw: Any) -> ColumnElement[bool]:
- ...
+ def icontains(self, other: Any, **kw: Any) -> ColumnElement[bool]: ...
- def match(self, other: Any, **kwargs: Any) -> ColumnElement[bool]:
- ...
+ def match(self, other: Any, **kwargs: Any) -> ColumnElement[bool]: ...
def regexp_match(
self, pattern: Any, flags: Optional[str] = None
- ) -> ColumnElement[bool]:
- ...
+ ) -> ColumnElement[bool]: ...
def regexp_replace(
self, pattern: Any, replacement: Any, flags: Optional[str] = None
- ) -> ColumnElement[str]:
- ...
+ ) -> ColumnElement[str]: ...
- def desc(self) -> UnaryExpression[_T_co]:
- ...
+ def desc(self) -> UnaryExpression[_T_co]: ...
- def asc(self) -> UnaryExpression[_T_co]:
- ...
+ def asc(self) -> UnaryExpression[_T_co]: ...
- def nulls_first(self) -> UnaryExpression[_T_co]:
- ...
+ def nulls_first(self) -> UnaryExpression[_T_co]: ...
- def nullsfirst(self) -> UnaryExpression[_T_co]:
- ...
+ def nullsfirst(self) -> UnaryExpression[_T_co]: ...
- def nulls_last(self) -> UnaryExpression[_T_co]:
- ...
+ def nulls_last(self) -> UnaryExpression[_T_co]: ...
- def nullslast(self) -> UnaryExpression[_T_co]:
- ...
+ def nullslast(self) -> UnaryExpression[_T_co]: ...
- def collate(self, collation: str) -> CollationClause:
- ...
+ def collate(self, collation: str) -> CollationClause: ...
def between(
self, cleft: Any, cright: Any, symmetric: bool = False
- ) -> BinaryExpression[bool]:
- ...
+ ) -> BinaryExpression[bool]: ...
- def distinct(self: _SQO[_T_co]) -> UnaryExpression[_T_co]:
- ...
+ def distinct(self: _SQO[_T_co]) -> UnaryExpression[_T_co]: ...
- def any_(self) -> CollectionAggregate[Any]:
- ...
+ def any_(self) -> CollectionAggregate[Any]: ...
- def all_(self) -> CollectionAggregate[Any]:
- ...
+ def all_(self) -> CollectionAggregate[Any]: ...
# numeric overloads. These need more tweaking
# in particular they all need to have a variant for Optiona[_T]
def __add__(
self: _SQO[_NMT],
other: Any,
- ) -> ColumnElement[_NMT]:
- ...
+ ) -> ColumnElement[_NMT]: ...
@overload
def __add__(
self: _SQO[str],
other: Any,
- ) -> ColumnElement[str]:
- ...
+ ) -> ColumnElement[str]: ...
- def __add__(self, other: Any) -> ColumnElement[Any]:
- ...
+ def __add__(self, other: Any) -> ColumnElement[Any]: ...
@overload
- def __radd__(self: _SQO[_NMT], other: Any) -> ColumnElement[_NMT]:
- ...
+ def __radd__(self: _SQO[_NMT], other: Any) -> ColumnElement[_NMT]: ...
@overload
- def __radd__(self: _SQO[str], other: Any) -> ColumnElement[str]:
- ...
+ def __radd__(self: _SQO[str], other: Any) -> ColumnElement[str]: ...
- def __radd__(self, other: Any) -> ColumnElement[Any]:
- ...
+ def __radd__(self, other: Any) -> ColumnElement[Any]: ...
@overload
def __sub__(
self: _SQO[_NMT],
other: Any,
- ) -> ColumnElement[_NMT]:
- ...
+ ) -> ColumnElement[_NMT]: ...
@overload
- def __sub__(self, other: Any) -> ColumnElement[Any]:
- ...
+ def __sub__(self, other: Any) -> ColumnElement[Any]: ...
- def __sub__(self, other: Any) -> ColumnElement[Any]:
- ...
+ def __sub__(self, other: Any) -> ColumnElement[Any]: ...
@overload
def __rsub__(
self: _SQO[_NMT],
other: Any,
- ) -> ColumnElement[_NMT]:
- ...
+ ) -> ColumnElement[_NMT]: ...
@overload
- def __rsub__(self, other: Any) -> ColumnElement[Any]:
- ...
+ def __rsub__(self, other: Any) -> ColumnElement[Any]: ...
- def __rsub__(self, other: Any) -> ColumnElement[Any]:
- ...
+ def __rsub__(self, other: Any) -> ColumnElement[Any]: ...
@overload
def __mul__(
self: _SQO[_NMT],
other: Any,
- ) -> ColumnElement[_NMT]:
- ...
+ ) -> ColumnElement[_NMT]: ...
@overload
- def __mul__(self, other: Any) -> ColumnElement[Any]:
- ...
+ def __mul__(self, other: Any) -> ColumnElement[Any]: ...
- def __mul__(self, other: Any) -> ColumnElement[Any]:
- ...
+ def __mul__(self, other: Any) -> ColumnElement[Any]: ...
@overload
def __rmul__(
self: _SQO[_NMT],
other: Any,
- ) -> ColumnElement[_NMT]:
- ...
+ ) -> ColumnElement[_NMT]: ...
@overload
- def __rmul__(self, other: Any) -> ColumnElement[Any]:
- ...
+ def __rmul__(self, other: Any) -> ColumnElement[Any]: ...
- def __rmul__(self, other: Any) -> ColumnElement[Any]:
- ...
+ def __rmul__(self, other: Any) -> ColumnElement[Any]: ...
@overload
- def __mod__(self: _SQO[_NMT], other: Any) -> ColumnElement[_NMT]:
- ...
+ def __mod__(self: _SQO[_NMT], other: Any) -> ColumnElement[_NMT]: ...
@overload
- def __mod__(self, other: Any) -> ColumnElement[Any]:
- ...
+ def __mod__(self, other: Any) -> ColumnElement[Any]: ...
- def __mod__(self, other: Any) -> ColumnElement[Any]:
- ...
+ def __mod__(self, other: Any) -> ColumnElement[Any]: ...
@overload
- def __rmod__(self: _SQO[_NMT], other: Any) -> ColumnElement[_NMT]:
- ...
+ def __rmod__(self: _SQO[_NMT], other: Any) -> ColumnElement[_NMT]: ...
@overload
- def __rmod__(self, other: Any) -> ColumnElement[Any]:
- ...
+ def __rmod__(self, other: Any) -> ColumnElement[Any]: ...
- def __rmod__(self, other: Any) -> ColumnElement[Any]:
- ...
+ def __rmod__(self, other: Any) -> ColumnElement[Any]: ...
@overload
def __truediv__(
self: _SQO[int], other: Any
- ) -> ColumnElement[_NUMERIC]:
- ...
+ ) -> ColumnElement[_NUMERIC]: ...
@overload
- def __truediv__(self: _SQO[_NT], other: Any) -> ColumnElement[_NT]:
- ...
+ def __truediv__(self: _SQO[_NT], other: Any) -> ColumnElement[_NT]: ...
@overload
- def __truediv__(self, other: Any) -> ColumnElement[Any]:
- ...
+ def __truediv__(self, other: Any) -> ColumnElement[Any]: ...
- def __truediv__(self, other: Any) -> ColumnElement[Any]:
- ...
+ def __truediv__(self, other: Any) -> ColumnElement[Any]: ...
@overload
def __rtruediv__(
self: _SQO[_NMT], other: Any
- ) -> ColumnElement[_NUMERIC]:
- ...
+ ) -> ColumnElement[_NUMERIC]: ...
@overload
- def __rtruediv__(self, other: Any) -> ColumnElement[Any]:
- ...
+ def __rtruediv__(self, other: Any) -> ColumnElement[Any]: ...
- def __rtruediv__(self, other: Any) -> ColumnElement[Any]:
- ...
+ def __rtruediv__(self, other: Any) -> ColumnElement[Any]: ...
@overload
- def __floordiv__(self: _SQO[_NMT], other: Any) -> ColumnElement[_NMT]:
- ...
+ def __floordiv__(
+ self: _SQO[_NMT], other: Any
+ ) -> ColumnElement[_NMT]: ...
@overload
- def __floordiv__(self, other: Any) -> ColumnElement[Any]:
- ...
+ def __floordiv__(self, other: Any) -> ColumnElement[Any]: ...
- def __floordiv__(self, other: Any) -> ColumnElement[Any]:
- ...
+ def __floordiv__(self, other: Any) -> ColumnElement[Any]: ...
@overload
- def __rfloordiv__(self: _SQO[_NMT], other: Any) -> ColumnElement[_NMT]:
- ...
+ def __rfloordiv__(
+ self: _SQO[_NMT], other: Any
+ ) -> ColumnElement[_NMT]: ...
@overload
- def __rfloordiv__(self, other: Any) -> ColumnElement[Any]:
- ...
+ def __rfloordiv__(self, other: Any) -> ColumnElement[Any]: ...
- def __rfloordiv__(self, other: Any) -> ColumnElement[Any]:
- ...
+ def __rfloordiv__(self, other: Any) -> ColumnElement[Any]: ...
class SQLColumnExpression(
@overload
def self_group(
self: ColumnElement[_T], against: Optional[OperatorType] = None
- ) -> ColumnElement[_T]:
- ...
+ ) -> ColumnElement[_T]: ...
@overload
def self_group(
self: ColumnElement[Any], against: Optional[OperatorType] = None
- ) -> ColumnElement[Any]:
- ...
+ ) -> ColumnElement[Any]: ...
def self_group(
self, against: Optional[OperatorType] = None
return self
@overload
- def _negate(self: ColumnElement[bool]) -> ColumnElement[bool]:
- ...
+ def _negate(self: ColumnElement[bool]) -> ColumnElement[bool]: ...
@overload
- def _negate(self: ColumnElement[_T]) -> ColumnElement[_T]:
- ...
+ def _negate(self: ColumnElement[_T]) -> ColumnElement[_T]: ...
def _negate(self) -> ColumnElement[Any]:
if self.type._type_affinity is type_api.BOOLEANTYPE._type_affinity:
assert key is not None
co: ColumnClause[_T] = ColumnClause(
- coercions.expect(roles.TruncatedLabelRole, name)
- if name_is_truncatable
- else name,
+ (
+ coercions.expect(roles.TruncatedLabelRole, name)
+ if name_is_truncatable
+ else name
+ ),
type_=getattr(self, "type", None),
_selectable=selectable,
)
if unique:
self.key = _anonymous_label.safe_construct(
id(self),
- key
- if key is not None and not isinstance(key, _anonymous_label)
- else "param",
+ (
+ key
+ if key is not None
+ and not isinstance(key, _anonymous_label)
+ else "param"
+ ),
sanitize_key=True,
)
self._key_is_anon = True
check_value = value[0]
else:
check_value = value
- cast(
- "BindParameter[TupleAny]", self
- ).type = type_._resolve_values_to_types(check_value)
+ cast("BindParameter[TupleAny]", self).type = (
+ type_._resolve_values_to_types(check_value)
+ )
else:
cast("BindParameter[TupleAny]", self).type = type_
else:
]
positional_input_cols = [
- ColumnClause(col.key, types.pop(col.key))
- if col.key in types
- else col
+ (
+ ColumnClause(col.key, types.pop(col.key))
+ if col.key in types
+ else col
+ )
for col in input_cols
]
keyed_input_cols: List[NamedColumn[Any]] = [
# which will link elements against the operator.
flattened_clauses = itertools.chain.from_iterable(
- (c for c in to_flat._flattened_operator_clauses)
- if getattr(to_flat, "operator", None) is operator
- else (to_flat,)
+ (
+ (c for c in to_flat._flattened_operator_clauses)
+ if getattr(to_flat, "operator", None) is operator
+ else (to_flat,)
+ )
for to_flat in convert_clauses
)
def __invert__(
self: BinaryExpression[_T],
- ) -> BinaryExpression[_T]:
- ...
+ ) -> BinaryExpression[_T]: ...
@util.ro_non_memoized_property
def _from_objects(self) -> List[FromClause]:
**kw: Any,
) -> typing_Tuple[str, ColumnClause[_T]]:
c = ColumnClause(
- coercions.expect(roles.TruncatedLabelRole, name or self.name)
- if name_is_truncatable
- else (name or self.name),
+ (
+ coercions.expect(roles.TruncatedLabelRole, name or self.name)
+ if name_is_truncatable
+ else (name or self.name)
+ ),
type_=self.type,
_selectable=selectable,
is_literal=False,
)
)
c = self._constructor(
- coercions.expect(roles.TruncatedLabelRole, name or self.name)
- if name_is_truncatable
- else (name or self.name),
+ (
+ coercions.expect(roles.TruncatedLabelRole, name or self.name)
+ if name_is_truncatable
+ else (name or self.name)
+ ),
type_=self.type,
_selectable=selectable,
is_literal=is_literal,
@overload
@classmethod
- def construct(cls, value: str, quote: Optional[bool]) -> quoted_name:
- ...
+ def construct(cls, value: str, quote: Optional[bool]) -> quoted_name: ...
@overload
@classmethod
- def construct(cls, value: None, quote: Optional[bool]) -> None:
- ...
+ def construct(cls, value: None, quote: Optional[bool]) -> None: ...
@classmethod
def construct(
_T = TypeVar("_T", bound=Any)
_S = TypeVar("_S", bound=Any)
-_registry: util.defaultdict[
- str, Dict[str, Type[Function[Any]]]
-] = util.defaultdict(dict)
+_registry: util.defaultdict[str, Dict[str, Type[Function[Any]]]] = (
+ util.defaultdict(dict)
+)
def register_function(
return WithinGroup(self, *order_by)
@overload
- def filter(self) -> Self:
- ...
+ def filter(self) -> Self: ...
@overload
def filter(
self,
__criterion0: _ColumnExpressionArgument[bool],
*criterion: _ColumnExpressionArgument[bool],
- ) -> FunctionFilter[_T]:
- ...
+ ) -> FunctionFilter[_T]: ...
def filter(
self, *criterion: _ColumnExpressionArgument[bool]
@overload
def __call__(
self, *c: Any, type_: _TypeEngineArgument[_T], **kwargs: Any
- ) -> Function[_T]:
- ...
+ ) -> Function[_T]: ...
@overload
- def __call__(self, *c: Any, **kwargs: Any) -> Function[Any]:
- ...
+ def __call__(self, *c: Any, **kwargs: Any) -> Function[Any]: ...
def __call__(self, *c: Any, **kwargs: Any) -> Function[Any]:
o = self.opts.copy()
# statically generated** by tools/generate_sql_functions.py
@property
- def aggregate_strings(self) -> Type[aggregate_strings]:
- ...
+ def aggregate_strings(self) -> Type[aggregate_strings]: ...
@property
- def ansifunction(self) -> Type[AnsiFunction[Any]]:
- ...
+ def ansifunction(self) -> Type[AnsiFunction[Any]]: ...
@property
- def array_agg(self) -> Type[array_agg[Any]]:
- ...
+ def array_agg(self) -> Type[array_agg[Any]]: ...
@property
- def cast(self) -> Type[Cast[Any]]:
- ...
+ def cast(self) -> Type[Cast[Any]]: ...
@property
- def char_length(self) -> Type[char_length]:
- ...
+ def char_length(self) -> Type[char_length]: ...
# set ColumnElement[_T] as a separate overload, to appease mypy
# which seems to not want to accept _T from _ColumnExpressionArgument.
col: ColumnElement[_T],
*args: _ColumnExpressionOrLiteralArgument[Any],
**kwargs: Any,
- ) -> coalesce[_T]:
- ...
+ ) -> coalesce[_T]: ...
@overload
def coalesce(
col: _ColumnExpressionArgument[_T],
*args: _ColumnExpressionOrLiteralArgument[Any],
**kwargs: Any,
- ) -> coalesce[_T]:
- ...
+ ) -> coalesce[_T]: ...
@overload
def coalesce(
col: _ColumnExpressionOrLiteralArgument[_T],
*args: _ColumnExpressionOrLiteralArgument[Any],
**kwargs: Any,
- ) -> coalesce[_T]:
- ...
+ ) -> coalesce[_T]: ...
def coalesce(
self,
col: _ColumnExpressionOrLiteralArgument[_T],
*args: _ColumnExpressionOrLiteralArgument[Any],
**kwargs: Any,
- ) -> coalesce[_T]:
- ...
+ ) -> coalesce[_T]: ...
@property
- def concat(self) -> Type[concat]:
- ...
+ def concat(self) -> Type[concat]: ...
@property
- def count(self) -> Type[count]:
- ...
+ def count(self) -> Type[count]: ...
@property
- def cube(self) -> Type[cube[Any]]:
- ...
+ def cube(self) -> Type[cube[Any]]: ...
@property
- def cume_dist(self) -> Type[cume_dist]:
- ...
+ def cume_dist(self) -> Type[cume_dist]: ...
@property
- def current_date(self) -> Type[current_date]:
- ...
+ def current_date(self) -> Type[current_date]: ...
@property
- def current_time(self) -> Type[current_time]:
- ...
+ def current_time(self) -> Type[current_time]: ...
@property
- def current_timestamp(self) -> Type[current_timestamp]:
- ...
+ def current_timestamp(self) -> Type[current_timestamp]: ...
@property
- def current_user(self) -> Type[current_user]:
- ...
+ def current_user(self) -> Type[current_user]: ...
@property
- def dense_rank(self) -> Type[dense_rank]:
- ...
+ def dense_rank(self) -> Type[dense_rank]: ...
@property
- def extract(self) -> Type[Extract]:
- ...
+ def extract(self) -> Type[Extract]: ...
@property
- def grouping_sets(self) -> Type[grouping_sets[Any]]:
- ...
+ def grouping_sets(self) -> Type[grouping_sets[Any]]: ...
@property
- def localtime(self) -> Type[localtime]:
- ...
+ def localtime(self) -> Type[localtime]: ...
@property
- def localtimestamp(self) -> Type[localtimestamp]:
- ...
+ def localtimestamp(self) -> Type[localtimestamp]: ...
# set ColumnElement[_T] as a separate overload, to appease mypy
# which seems to not want to accept _T from _ColumnExpressionArgument.
col: ColumnElement[_T],
*args: _ColumnExpressionOrLiteralArgument[Any],
**kwargs: Any,
- ) -> max[_T]:
- ...
+ ) -> max[_T]: ...
@overload
def max( # noqa: A001
col: _ColumnExpressionArgument[_T],
*args: _ColumnExpressionOrLiteralArgument[Any],
**kwargs: Any,
- ) -> max[_T]:
- ...
+ ) -> max[_T]: ...
@overload
def max( # noqa: A001
col: _ColumnExpressionOrLiteralArgument[_T],
*args: _ColumnExpressionOrLiteralArgument[Any],
**kwargs: Any,
- ) -> max[_T]:
- ...
+ ) -> max[_T]: ...
def max( # noqa: A001
self,
col: _ColumnExpressionOrLiteralArgument[_T],
*args: _ColumnExpressionOrLiteralArgument[Any],
**kwargs: Any,
- ) -> max[_T]:
- ...
+ ) -> max[_T]: ...
# set ColumnElement[_T] as a separate overload, to appease mypy
# which seems to not want to accept _T from _ColumnExpressionArgument.
col: ColumnElement[_T],
*args: _ColumnExpressionOrLiteralArgument[Any],
**kwargs: Any,
- ) -> min[_T]:
- ...
+ ) -> min[_T]: ...
@overload
def min( # noqa: A001
col: _ColumnExpressionArgument[_T],
*args: _ColumnExpressionOrLiteralArgument[Any],
**kwargs: Any,
- ) -> min[_T]:
- ...
+ ) -> min[_T]: ...
@overload
def min( # noqa: A001
col: _ColumnExpressionOrLiteralArgument[_T],
*args: _ColumnExpressionOrLiteralArgument[Any],
**kwargs: Any,
- ) -> min[_T]:
- ...
+ ) -> min[_T]: ...
def min( # noqa: A001
self,
col: _ColumnExpressionOrLiteralArgument[_T],
*args: _ColumnExpressionOrLiteralArgument[Any],
**kwargs: Any,
- ) -> min[_T]:
- ...
+ ) -> min[_T]: ...
@property
- def mode(self) -> Type[mode[Any]]:
- ...
+ def mode(self) -> Type[mode[Any]]: ...
@property
- def next_value(self) -> Type[next_value]:
- ...
+ def next_value(self) -> Type[next_value]: ...
@property
- def now(self) -> Type[now]:
- ...
+ def now(self) -> Type[now]: ...
@property
- def orderedsetagg(self) -> Type[OrderedSetAgg[Any]]:
- ...
+ def orderedsetagg(self) -> Type[OrderedSetAgg[Any]]: ...
@property
- def percent_rank(self) -> Type[percent_rank]:
- ...
+ def percent_rank(self) -> Type[percent_rank]: ...
@property
- def percentile_cont(self) -> Type[percentile_cont[Any]]:
- ...
+ def percentile_cont(self) -> Type[percentile_cont[Any]]: ...
@property
- def percentile_disc(self) -> Type[percentile_disc[Any]]:
- ...
+ def percentile_disc(self) -> Type[percentile_disc[Any]]: ...
@property
- def random(self) -> Type[random]:
- ...
+ def random(self) -> Type[random]: ...
@property
- def rank(self) -> Type[rank]:
- ...
+ def rank(self) -> Type[rank]: ...
@property
- def rollup(self) -> Type[rollup[Any]]:
- ...
+ def rollup(self) -> Type[rollup[Any]]: ...
@property
- def session_user(self) -> Type[session_user]:
- ...
+ def session_user(self) -> Type[session_user]: ...
# set ColumnElement[_T] as a separate overload, to appease mypy
# which seems to not want to accept _T from _ColumnExpressionArgument.
col: ColumnElement[_T],
*args: _ColumnExpressionOrLiteralArgument[Any],
**kwargs: Any,
- ) -> sum[_T]:
- ...
+ ) -> sum[_T]: ...
@overload
def sum( # noqa: A001
col: _ColumnExpressionArgument[_T],
*args: _ColumnExpressionOrLiteralArgument[Any],
**kwargs: Any,
- ) -> sum[_T]:
- ...
+ ) -> sum[_T]: ...
@overload
def sum( # noqa: A001
col: _ColumnExpressionOrLiteralArgument[_T],
*args: _ColumnExpressionOrLiteralArgument[Any],
**kwargs: Any,
- ) -> sum[_T]:
- ...
+ ) -> sum[_T]: ...
def sum( # noqa: A001
self,
col: _ColumnExpressionOrLiteralArgument[_T],
*args: _ColumnExpressionOrLiteralArgument[Any],
**kwargs: Any,
- ) -> sum[_T]:
- ...
+ ) -> sum[_T]: ...
@property
- def sysdate(self) -> Type[sysdate]:
- ...
+ def sysdate(self) -> Type[sysdate]: ...
@property
- def user(self) -> Type[user]:
- ...
+ def user(self) -> Type[user]: ...
# END GENERATED FUNCTION ACCESSORS
*clauses: _ColumnExpressionOrLiteralArgument[_T],
type_: None = ...,
packagenames: Optional[Tuple[str, ...]] = ...,
- ):
- ...
+ ): ...
@overload
def __init__(
*clauses: _ColumnExpressionOrLiteralArgument[Any],
type_: _TypeEngineArgument[_T] = ...,
packagenames: Optional[Tuple[str, ...]] = ...,
- ):
- ...
+ ): ...
def __init__(
self,
col: ColumnElement[_T],
*args: _ColumnExpressionOrLiteralArgument[Any],
**kwargs: Any,
- ):
- ...
+ ): ...
@overload
def __init__(
col: _ColumnExpressionArgument[_T],
*args: _ColumnExpressionOrLiteralArgument[Any],
**kwargs: Any,
- ):
- ...
+ ): ...
@overload
def __init__(
col: _ColumnExpressionOrLiteralArgument[_T],
*args: _ColumnExpressionOrLiteralArgument[Any],
**kwargs: Any,
- ):
- ...
+ ): ...
def __init__(
self, *args: _ColumnExpressionOrLiteralArgument[Any], **kwargs: Any
"""
+
type = sqltypes.Integer()
inherit_cache = True
.. versionadded:: 1.2
"""
+
_has_args = True
inherit_cache = True
.. versionadded:: 1.2
"""
+
_has_args = True
inherit_cache = True
.. versionadded:: 1.2
"""
+
_has_args = True
inherit_cache = True
while parent is not None:
assert parent.closure_cache_key is not CacheConst.NO_CACHE
- parent_closure_cache_key: Tuple[
- Any, ...
- ] = parent.closure_cache_key
+ parent_closure_cache_key: Tuple[Any, ...] = (
+ parent.closure_cache_key
+ )
cache_key = (
(parent.fn.__code__,) + parent_closure_cache_key + cache_key
role: Type[SQLRole],
opts: Union[Type[LambdaOptions], LambdaOptions] = LambdaOptions,
apply_propagate_attrs: Optional[ClauseElement] = None,
- ):
- ...
+ ): ...
def __add__(
self, other: _StmtLambdaElementType[Any]
"closure_trackers",
"build_py_wrappers",
)
- _fns: weakref.WeakKeyDictionary[
- CodeType, AnalyzedCode
- ] = weakref.WeakKeyDictionary()
+ _fns: weakref.WeakKeyDictionary[CodeType, AnalyzedCode] = (
+ weakref.WeakKeyDictionary()
+ )
_generation_mutex = threading.RLock()
# rewrite the original fn. things that look like they will
# become bound parameters are wrapped in a PyWrapper.
- self.tracker_instrumented_fn = (
- tracker_instrumented_fn
- ) = self._rewrite_code_obj(
- fn,
- [new_closure[name] for name in fn.__code__.co_freevars],
- new_globals,
+ self.tracker_instrumented_fn = tracker_instrumented_fn = (
+ self._rewrite_code_obj(
+ fn,
+ [new_closure[name] for name in fn.__code__.co_freevars],
+ new_globals,
+ )
)
# now invoke the function. This will give us a new SQL
right: Optional[Any] = None,
*other: Any,
**kwargs: Any,
- ) -> ColumnElement[Any]:
- ...
+ ) -> ColumnElement[Any]: ...
@overload
def __call__(
right: Optional[Any] = None,
*other: Any,
**kwargs: Any,
- ) -> Operators:
- ...
+ ) -> Operators: ...
def __call__(
self,
right: Optional[Any] = None,
*other: Any,
**kwargs: Any,
- ) -> Operators:
- ...
+ ) -> Operators: ...
add = cast(OperatorType, _uncast_add)
right: Optional[Any] = None,
*other: Any,
**kwargs: Any,
- ) -> ColumnElement[Any]:
- ...
+ ) -> ColumnElement[Any]: ...
@overload
def __call__(
right: Optional[Any] = None,
*other: Any,
**kwargs: Any,
- ) -> Operators:
- ...
+ ) -> Operators: ...
def __call__(
self,
def operate(
self, op: OperatorType, *other: Any, **kwargs: Any
- ) -> ColumnOperators:
- ...
+ ) -> ColumnOperators: ...
def reverse_operate(
self, op: OperatorType, other: Any, **kwargs: Any
- ) -> ColumnOperators:
- ...
+ ) -> ColumnOperators: ...
def __lt__(self, other: Any) -> ColumnOperators:
"""Implement the ``<`` operator.
# https://docs.python.org/3/reference/datamodel.html#object.__hash__
if TYPE_CHECKING:
- def __hash__(self) -> int:
- ...
+ def __hash__(self) -> int: ...
else:
__hash__ = Operators.__hash__
# deprecated 1.4; see #5435
if TYPE_CHECKING:
- def isnot_distinct_from(self, other: Any) -> ColumnOperators:
- ...
+ def isnot_distinct_from(self, other: Any) -> ColumnOperators: ...
else:
isnot_distinct_from = is_not_distinct_from
# deprecated 1.4; see #5429
if TYPE_CHECKING:
- def notin_(self, other: Any) -> ColumnOperators:
- ...
+ def notin_(self, other: Any) -> ColumnOperators: ...
else:
notin_ = not_in
def notlike(
self, other: Any, escape: Optional[str] = None
- ) -> ColumnOperators:
- ...
+ ) -> ColumnOperators: ...
else:
notlike = not_like
def notilike(
self, other: Any, escape: Optional[str] = None
- ) -> ColumnOperators:
- ...
+ ) -> ColumnOperators: ...
else:
notilike = not_ilike
# deprecated 1.4; see #5429
if TYPE_CHECKING:
- def isnot(self, other: Any) -> ColumnOperators:
- ...
+ def isnot(self, other: Any) -> ColumnOperators: ...
else:
isnot = is_not
# deprecated 1.4; see #5435
if TYPE_CHECKING:
- def nullsfirst(self) -> ColumnOperators:
- ...
+ def nullsfirst(self) -> ColumnOperators: ...
else:
nullsfirst = nulls_first
# deprecated 1.4; see #5429
if TYPE_CHECKING:
- def nullslast(self) -> ColumnOperators:
- ...
+ def nullslast(self) -> ColumnOperators: ...
else:
nullslast = nulls_last
if TYPE_CHECKING:
@_operator_fn
- def istrue(a: Any) -> Any:
- ...
+ def istrue(a: Any) -> Any: ...
else:
istrue = is_true
if TYPE_CHECKING:
@_operator_fn
- def isfalse(a: Any) -> Any:
- ...
+ def isfalse(a: Any) -> Any: ...
else:
isfalse = is_false
if TYPE_CHECKING:
@_operator_fn
- def isnot_distinct_from(a: Any, b: Any) -> Any:
- ...
+ def isnot_distinct_from(a: Any, b: Any) -> Any: ...
else:
isnot_distinct_from = is_not_distinct_from
if TYPE_CHECKING:
@_operator_fn
- def isnot(a: Any, b: Any) -> Any:
- ...
+ def isnot(a: Any, b: Any) -> Any: ...
else:
isnot = is_not
if TYPE_CHECKING:
@_operator_fn
- def notlike_op(a: Any, b: Any, escape: Optional[str] = None) -> Any:
- ...
+ def notlike_op(a: Any, b: Any, escape: Optional[str] = None) -> Any: ...
else:
notlike_op = not_like_op
if TYPE_CHECKING:
@_operator_fn
- def notilike_op(a: Any, b: Any, escape: Optional[str] = None) -> Any:
- ...
+ def notilike_op(a: Any, b: Any, escape: Optional[str] = None) -> Any: ...
else:
notilike_op = not_ilike_op
if TYPE_CHECKING:
@_operator_fn
- def notbetween_op(a: Any, b: Any, c: Any, symmetric: bool = False) -> Any:
- ...
+ def notbetween_op(
+ a: Any, b: Any, c: Any, symmetric: bool = False
+ ) -> Any: ...
else:
notbetween_op = not_between_op
if TYPE_CHECKING:
@_operator_fn
- def notin_op(a: Any, b: Any) -> Any:
- ...
+ def notin_op(a: Any, b: Any) -> Any: ...
else:
notin_op = not_in_op
@_operator_fn
def notstartswith_op(
a: Any, b: Any, escape: Optional[str] = None, autoescape: bool = False
- ) -> Any:
- ...
+ ) -> Any: ...
else:
notstartswith_op = not_startswith_op
@_operator_fn
def notendswith_op(
a: Any, b: Any, escape: Optional[str] = None, autoescape: bool = False
- ) -> Any:
- ...
+ ) -> Any: ...
else:
notendswith_op = not_endswith_op
@_operator_fn
def notcontains_op(
a: Any, b: Any, escape: Optional[str] = None, autoescape: bool = False
- ) -> Any:
- ...
+ ) -> Any: ...
else:
notcontains_op = not_contains_op
if TYPE_CHECKING:
@_operator_fn
- def notmatch_op(a: Any, b: Any, **kw: Any) -> Any:
- ...
+ def notmatch_op(a: Any, b: Any, **kw: Any) -> Any: ...
else:
notmatch_op = not_match_op
if TYPE_CHECKING:
@_operator_fn
- def nullsfirst_op(a: Any) -> Any:
- ...
+ def nullsfirst_op(a: Any) -> Any: ...
else:
nullsfirst_op = nulls_first_op
if TYPE_CHECKING:
@_operator_fn
- def nullslast_op(a: Any) -> Any:
- ...
+ def nullslast_op(a: Any) -> Any: ...
else:
nullslast_op = nulls_last_op
def _anonymous_fromclause(
self, *, name: Optional[str] = None, flat: bool = False
- ) -> FromClause:
- ...
+ ) -> FromClause: ...
class ReturnsRowsRole(SQLRole):
if TYPE_CHECKING:
@util.memoized_property
- def _propagate_attrs(self) -> _PropagateAttrsType:
- ...
+ def _propagate_attrs(self) -> _PropagateAttrsType: ...
else:
_propagate_attrs = util.EMPTY_DICT
"""
-RETAIN_SCHEMA: Final[
- Literal[SchemaConst.RETAIN_SCHEMA]
-] = SchemaConst.RETAIN_SCHEMA
-BLANK_SCHEMA: Final[
- Literal[SchemaConst.BLANK_SCHEMA]
-] = SchemaConst.BLANK_SCHEMA
-NULL_UNSPECIFIED: Final[
- Literal[SchemaConst.NULL_UNSPECIFIED]
-] = SchemaConst.NULL_UNSPECIFIED
+RETAIN_SCHEMA: Final[Literal[SchemaConst.RETAIN_SCHEMA]] = (
+ SchemaConst.RETAIN_SCHEMA
+)
+BLANK_SCHEMA: Final[Literal[SchemaConst.BLANK_SCHEMA]] = (
+ SchemaConst.BLANK_SCHEMA
+)
+NULL_UNSPECIFIED: Final[Literal[SchemaConst.NULL_UNSPECIFIED]] = (
+ SchemaConst.NULL_UNSPECIFIED
+)
def _get_table_key(name: str, schema: Optional[str]) -> str:
if TYPE_CHECKING:
@util.ro_non_memoized_property
- def primary_key(self) -> PrimaryKeyConstraint:
- ...
+ def primary_key(self) -> PrimaryKeyConstraint: ...
@util.ro_non_memoized_property
- def foreign_keys(self) -> Set[ForeignKey]:
- ...
+ def foreign_keys(self) -> Set[ForeignKey]: ...
_columns: DedupeColumnCollection[Column[Any]]
if TYPE_CHECKING:
@util.ro_non_memoized_property
- def columns(self) -> ReadOnlyColumnCollection[str, Column[Any]]:
- ...
+ def columns(self) -> ReadOnlyColumnCollection[str, Column[Any]]: ...
@util.ro_non_memoized_property
def exported_columns(
self,
- ) -> ReadOnlyColumnCollection[str, Column[Any]]:
- ...
+ ) -> ReadOnlyColumnCollection[str, Column[Any]]: ...
@util.ro_non_memoized_property
- def c(self) -> ReadOnlyColumnCollection[str, Column[Any]]:
- ...
+ def c(self) -> ReadOnlyColumnCollection[str, Column[Any]]: ...
def _gen_cache_key(
self, anon_map: anon_map, bindparams: List[BindParameter[Any]]
dialect_option_key,
dialect_option_value,
) in dialect_options.items():
- column_kwargs[
- dialect_name + "_" + dialect_option_key
- ] = dialect_option_value
+ column_kwargs[dialect_name + "_" + dialect_option_key] = (
+ dialect_option_value
+ )
server_default = self.server_default
server_onupdate = self.server_onupdate
)
try:
c = self._constructor(
- coercions.expect(
- roles.TruncatedLabelRole, name if name else self.name
- )
- if name_is_truncatable
- else (name or self.name),
+ (
+ coercions.expect(
+ roles.TruncatedLabelRole, name if name else self.name
+ )
+ if name_is_truncatable
+ else (name or self.name)
+ ),
self.type,
# this may actually be ._proxy_key when the key is incoming
key=key if key else name if name else self.key,
primary_key=self.primary_key,
nullable=self.nullable,
- _proxies=list(compound_select_cols)
- if compound_select_cols
- else [self],
+ _proxies=(
+ list(compound_select_cols)
+ if compound_select_cols
+ else [self]
+ ),
*fk,
)
except TypeError as err:
return Column(
name=name,
type_=type_api.INTEGERTYPE if type_ is None else type_,
- default=default
- if default is not None
- else _InsertSentinelColumnDefault(),
+ default=(
+ default if default is not None else _InsertSentinelColumnDefault()
+ ),
_omit_from_statements=omit_from_statements,
insert_sentinel=True,
)
def _resolve_colspec_argument(
self,
- ) -> Tuple[Union[str, Column[Any]], Optional[Column[Any]],]:
+ ) -> Tuple[
+ Union[str, Column[Any]],
+ Optional[Column[Any]],
+ ]:
argument = self._colspec
return self._parse_colspec_argument(argument)
def _parse_colspec_argument(
self,
argument: _DDLColumnArgument,
- ) -> Tuple[Union[str, Column[Any]], Optional[Column[Any]],]:
+ ) -> Tuple[
+ Union[str, Column[Any]],
+ Optional[Column[Any]],
+ ]:
_colspec = coercions.expect(roles.DDLReferredColumnRole, argument)
if isinstance(_colspec, str):
return self._resolve_column()
@overload
- def _resolve_column(self, *, raiseerr: Literal[True] = ...) -> Column[Any]:
- ...
+ def _resolve_column(
+ self, *, raiseerr: Literal[True] = ...
+ ) -> Column[Any]: ...
@overload
def _resolve_column(
self, *, raiseerr: bool = ...
- ) -> Optional[Column[Any]]:
- ...
+ ) -> Optional[Column[Any]]: ...
def _resolve_column(
self, *, raiseerr: bool = True
def default_is_sequence(
obj: Optional[DefaultGenerator],
- ) -> TypeGuard[Sequence]:
- ...
+ ) -> TypeGuard[Sequence]: ...
def default_is_clause_element(
obj: Optional[DefaultGenerator],
- ) -> TypeGuard[ColumnElementColumnDefault]:
- ...
+ ) -> TypeGuard[ColumnElementColumnDefault]: ...
def default_is_scalar(
obj: Optional[DefaultGenerator],
- ) -> TypeGuard[ScalarElementColumnDefault]:
- ...
+ ) -> TypeGuard[ScalarElementColumnDefault]: ...
else:
default_is_sequence = operator.attrgetter("is_sequence")
@overload
def __new__(
cls, arg: Callable[..., Any], for_update: bool = ...
- ) -> CallableColumnDefault:
- ...
+ ) -> CallableColumnDefault: ...
@overload
def __new__(
cls, arg: ColumnElement[Any], for_update: bool = ...
- ) -> ColumnElementColumnDefault:
- ...
+ ) -> ColumnElementColumnDefault: ...
# if I return ScalarElementColumnDefault here, which is what's actually
# returned, mypy complains that
# overloads overlap w/ incompatible return types.
@overload
- def __new__(cls, arg: object, for_update: bool = ...) -> ColumnDefault:
- ...
+ def __new__(cls, arg: object, for_update: bool = ...) -> ColumnDefault: ...
def __new__(
cls, arg: Any = None, for_update: bool = False
class _CallableColumnDefaultProtocol(Protocol):
- def __call__(self, context: ExecutionContext) -> Any:
- ...
+ def __call__(self, context: ExecutionContext) -> Any: ...
class CallableColumnDefault(ColumnDefault):
def _set_parent_with_dispatch(
self, parent: SchemaEventTarget, **kw: Any
- ) -> None:
- ...
+ ) -> None: ...
def __init__(
self,
dialect_option_key,
dialect_option_value,
) in dialect_options.items():
- constraint_kwargs[
- dialect_name + "_" + dialect_option_key
- ] = dialect_option_value
+ constraint_kwargs[dialect_name + "_" + dialect_option_key] = (
+ dialect_option_value
+ )
assert isinstance(self.parent, Table)
c = self.__class__(
[
x._get_colspec(
schema=schema,
- table_name=target_table.name
- if target_table is not None
- and x._table_key_within_construction()
- == x.parent.table.key
- else None,
+ table_name=(
+ target_table.name
+ if target_table is not None
+ and x._table_key_within_construction()
+ == x.parent.table.key
+ else None
+ ),
_is_copy=True,
)
for x in self.elements
self.info = info
self._schemas: Set[str] = set()
self._sequences: Dict[str, Sequence] = {}
- self._fk_memos: Dict[
- Tuple[str, Optional[str]], List[ForeignKey]
- ] = collections.defaultdict(list)
+ self._fk_memos: Dict[Tuple[str, Optional[str]], List[ForeignKey]] = (
+ collections.defaultdict(list)
+ )
tables: util.FacadeDict[str, Table]
"""A dictionary of :class:`_schema.Table`
class _JoinTargetProtocol(Protocol):
@util.ro_non_memoized_property
- def _from_objects(self) -> List[FromClause]:
- ...
+ def _from_objects(self) -> List[FromClause]: ...
@util.ro_non_memoized_property
- def entity_namespace(self) -> _EntityNamespace:
- ...
+ def entity_namespace(self) -> _EntityNamespace: ...
_JoinTargetElement = Union["FromClause", _JoinTargetProtocol]
class HasHints:
- _hints: util.immutabledict[
- Tuple[FromClause, str], str
- ] = util.immutabledict()
+ _hints: util.immutabledict[Tuple[FromClause, str], str] = (
+ util.immutabledict()
+ )
_statement_hints: Tuple[Tuple[str, str], ...] = ()
_has_hints_traverse_internals: _TraverseInternalsType = [
def self_group(
self, against: Optional[OperatorType] = None
- ) -> Union[FromGrouping, Self]:
- ...
+ ) -> Union[FromGrouping, Self]: ...
class NamedFromClause(FromClause):
repeated = False
if not c._render_label_in_columns_clause:
- effective_name = (
- required_label_name
- ) = fallback_label_name = None
+ effective_name = required_label_name = fallback_label_name = (
+ None
+ )
elif label_style_none:
if TYPE_CHECKING:
assert is_column_element(c)
assert is_column_element(c)
if table_qualified:
- required_label_name = (
- effective_name
- ) = fallback_label_name = c._tq_label
+ required_label_name = effective_name = (
+ fallback_label_name
+ ) = c._tq_label
else:
effective_name = fallback_label_name = c._non_anon_label
required_label_name = None
else:
fallback_label_name = c._anon_name_label
else:
- required_label_name = (
- effective_name
- ) = fallback_label_name = expr_label
+ required_label_name = effective_name = (
+ fallback_label_name
+ ) = expr_label
if effective_name is not None:
if TYPE_CHECKING:
# different column under the same name. apply
# disambiguating label
if table_qualified:
- required_label_name = (
- fallback_label_name
- ) = c._anon_tq_label
+ required_label_name = fallback_label_name = (
+ c._anon_tq_label
+ )
else:
- required_label_name = (
- fallback_label_name
- ) = c._anon_name_label
+ required_label_name = fallback_label_name = (
+ c._anon_name_label
+ )
if anon_for_dupe_key and required_label_name in names:
# here, c._anon_tq_label is definitely unique to
# subsequent occurrences of the column so that the
# original stays non-ambiguous
if table_qualified:
- required_label_name = (
- fallback_label_name
- ) = c._dedupe_anon_tq_label_idx(dedupe_hash)
+ required_label_name = fallback_label_name = (
+ c._dedupe_anon_tq_label_idx(dedupe_hash)
+ )
dedupe_hash += 1
else:
- required_label_name = (
- fallback_label_name
- ) = c._dedupe_anon_label_idx(dedupe_hash)
+ required_label_name = fallback_label_name = (
+ c._dedupe_anon_label_idx(dedupe_hash)
+ )
dedupe_hash += 1
repeated = True
else:
# same column under the same name. apply the "dedupe"
# label so that the original stays non-ambiguous
if table_qualified:
- required_label_name = (
- fallback_label_name
- ) = c._dedupe_anon_tq_label_idx(dedupe_hash)
+ required_label_name = fallback_label_name = (
+ c._dedupe_anon_tq_label_idx(dedupe_hash)
+ )
dedupe_hash += 1
else:
- required_label_name = (
- fallback_label_name
- ) = c._dedupe_anon_label_idx(dedupe_hash)
+ required_label_name = fallback_label_name = (
+ c._dedupe_anon_label_idx(dedupe_hash)
+ )
dedupe_hash += 1
repeated = True
else:
if TYPE_CHECKING:
@util.ro_non_memoized_property
- def columns(self) -> ReadOnlyColumnCollection[str, ColumnClause[Any]]:
- ...
+ def columns(
+ self,
+ ) -> ReadOnlyColumnCollection[str, ColumnClause[Any]]: ...
@util.ro_non_memoized_property
- def c(self) -> ReadOnlyColumnCollection[str, ColumnClause[Any]]:
- ...
+ def c(self) -> ReadOnlyColumnCollection[str, ColumnClause[Any]]: ...
def __str__(self) -> str:
if self.schema is not None:
if TYPE_CHECKING:
- def _ungroup(self) -> _SB:
- ...
+ def _ungroup(self) -> _SB: ...
# def _generate_columns_plus_names(
# self, anon_for_dupe_key: bool
@overload
def _offset_or_limit_clause_asint(
self, clause: ColumnElement[Any], attrname: str
- ) -> NoReturn:
- ...
+ ) -> NoReturn: ...
@overload
def _offset_or_limit_clause_asint(
self, clause: Optional[_OffsetLimitParam], attrname: str
- ) -> Optional[int]:
- ...
+ ) -> Optional[int]: ...
def _offset_or_limit_clause_asint(
self, clause: Optional[ColumnElement[Any]], attrname: str
if TYPE_CHECKING:
@classmethod
- def get_plugin_class(cls, statement: Executable) -> Type[SelectState]:
- ...
+ def get_plugin_class(
+ cls, statement: Executable
+ ) -> Type[SelectState]: ...
def __init__(
self,
@overload
def scalar_subquery(
self: Select[_MAYBE_ENTITY],
- ) -> ScalarSelect[Any]:
- ...
+ ) -> ScalarSelect[Any]: ...
@overload
def scalar_subquery(
self: Select[_NOT_ENTITY],
- ) -> ScalarSelect[_NOT_ENTITY]:
- ...
+ ) -> ScalarSelect[_NOT_ENTITY]: ...
@overload
- def scalar_subquery(self) -> ScalarSelect[Any]:
- ...
+ def scalar_subquery(self) -> ScalarSelect[Any]: ...
- def scalar_subquery(self) -> ScalarSelect[Any]:
- ...
+ def scalar_subquery(self) -> ScalarSelect[Any]: ...
def filter_by(self, **kwargs: Any) -> Self:
r"""apply the given filtering criterion as a WHERE clause
# statically generated** by tools/generate_sel_v1_overloads.py
@overload
- def with_only_columns(self, __ent0: _TCCA[_T0]) -> Select[_T0]:
- ...
+ def with_only_columns(self, __ent0: _TCCA[_T0]) -> Select[_T0]: ...
@overload
def with_only_columns(
self, __ent0: _TCCA[_T0], __ent1: _TCCA[_T1]
- ) -> Select[_T0, _T1]:
- ...
+ ) -> Select[_T0, _T1]: ...
@overload
def with_only_columns(
self, __ent0: _TCCA[_T0], __ent1: _TCCA[_T1], __ent2: _TCCA[_T2]
- ) -> Select[_T0, _T1, _T2]:
- ...
+ ) -> Select[_T0, _T1, _T2]: ...
@overload
def with_only_columns(
__ent1: _TCCA[_T1],
__ent2: _TCCA[_T2],
__ent3: _TCCA[_T3],
- ) -> Select[_T0, _T1, _T2, _T3]:
- ...
+ ) -> Select[_T0, _T1, _T2, _T3]: ...
@overload
def with_only_columns(
__ent2: _TCCA[_T2],
__ent3: _TCCA[_T3],
__ent4: _TCCA[_T4],
- ) -> Select[_T0, _T1, _T2, _T3, _T4]:
- ...
+ ) -> Select[_T0, _T1, _T2, _T3, _T4]: ...
@overload
def with_only_columns(
__ent3: _TCCA[_T3],
__ent4: _TCCA[_T4],
__ent5: _TCCA[_T5],
- ) -> Select[_T0, _T1, _T2, _T3, _T4, _T5]:
- ...
+ ) -> Select[_T0, _T1, _T2, _T3, _T4, _T5]: ...
@overload
def with_only_columns(
__ent4: _TCCA[_T4],
__ent5: _TCCA[_T5],
__ent6: _TCCA[_T6],
- ) -> Select[_T0, _T1, _T2, _T3, _T4, _T5, _T6]:
- ...
+ ) -> Select[_T0, _T1, _T2, _T3, _T4, _T5, _T6]: ...
@overload
def with_only_columns(
__ent5: _TCCA[_T5],
__ent6: _TCCA[_T6],
__ent7: _TCCA[_T7],
- ) -> Select[_T0, _T1, _T2, _T3, _T4, _T5, _T6, _T7]:
- ...
+ ) -> Select[_T0, _T1, _T2, _T3, _T4, _T5, _T6, _T7]: ...
# END OVERLOADED FUNCTIONS self.with_only_columns
*entities: _ColumnsClauseArgument[Any],
maintain_column_froms: bool = False,
**__kw: Any,
- ) -> Select[Unpack[TupleAny]]:
- ...
+ ) -> Select[Unpack[TupleAny]]: ...
@_generative
def with_only_columns(
@overload
def self_group(
self: ScalarSelect[Any], against: Optional[OperatorType] = None
- ) -> ScalarSelect[Any]:
- ...
+ ) -> ScalarSelect[Any]: ...
@overload
def self_group(
self: ColumnElement[Any], against: Optional[OperatorType] = None
- ) -> ColumnElement[Any]:
- ...
+ ) -> ColumnElement[Any]: ...
def self_group(
self, against: Optional[OperatorType] = None
if TYPE_CHECKING:
- def _ungroup(self) -> Select[Unpack[TupleAny]]:
- ...
+ def _ungroup(self) -> Select[Unpack[TupleAny]]: ...
@_generative
def correlate(
class HasExpressionLookup(TypeEngineMixin):
-
"""Mixin expression adaptations based on lookup tables.
These rules are currently used by the numeric, integer and date types
class Concatenable(TypeEngineMixin):
-
"""A mixin that marks a type as supporting 'concatenation',
typically strings."""
class String(Concatenable, TypeEngine[str]):
-
"""The base for all string and character types.
In SQL, corresponds to VARCHAR.
class Text(String):
-
"""A variably sized string type.
In SQL, usually corresponds to CLOB or TEXT. In general, TEXT objects
class Unicode(String):
-
"""A variable length Unicode string type.
The :class:`.Unicode` type is a :class:`.String` subclass that assumes
class UnicodeText(Text):
-
"""An unbounded-length Unicode string type.
See :class:`.Unicode` for details on the unicode
class Integer(HasExpressionLookup, TypeEngine[int]):
-
"""A type for ``int`` integers."""
__visit_name__ = "integer"
if TYPE_CHECKING:
@util.ro_memoized_property
- def _type_affinity(self) -> Type[Integer]:
- ...
+ def _type_affinity(self) -> Type[Integer]: ...
def get_dbapi_type(self, dbapi):
return dbapi.NUMBER
class SmallInteger(Integer):
-
"""A type for smaller ``int`` integers.
Typically generates a ``SMALLINT`` in DDL, and otherwise acts like
class BigInteger(Integer):
-
"""A type for bigger ``int`` integers.
Typically generates a ``BIGINT`` in DDL, and otherwise acts like
class Numeric(HasExpressionLookup, TypeEngine[_N]):
-
"""Base for non-integer numeric types, such as
``NUMERIC``, ``FLOAT``, ``DECIMAL``, and other variants.
if TYPE_CHECKING:
@util.ro_memoized_property
- def _type_affinity(self) -> Type[Numeric[_N]]:
- ...
+ def _type_affinity(self) -> Type[Numeric[_N]]: ...
_default_decimal_return_scale = 10
scale: Optional[int] = ...,
decimal_return_scale: Optional[int] = ...,
asdecimal: Literal[True] = ...,
- ):
- ...
+ ): ...
@overload
def __init__(
scale: Optional[int] = ...,
decimal_return_scale: Optional[int] = ...,
asdecimal: Literal[False] = ...,
- ):
- ...
+ ): ...
def __init__(
self,
# we're a "numeric", DBAPI returns floats, convert.
return processors.to_decimal_processor_factory(
decimal.Decimal,
- self.scale
- if self.scale is not None
- else self._default_decimal_return_scale,
+ (
+ self.scale
+ if self.scale is not None
+ else self._default_decimal_return_scale
+ ),
)
else:
if dialect.supports_native_decimal:
precision: Optional[int] = ...,
asdecimal: Literal[False] = ...,
decimal_return_scale: Optional[int] = ...,
- ):
- ...
+ ): ...
@overload
def __init__(
precision: Optional[int] = ...,
asdecimal: Literal[True] = ...,
decimal_return_scale: Optional[int] = ...,
- ):
- ...
+ ): ...
def __init__(
self: Float[_N],
class DateTime(
_RenderISO8601NoT, HasExpressionLookup, TypeEngine[dt.datetime]
):
-
"""A type for ``datetime.datetime()`` objects.
Date and time types return objects from the Python ``datetime``
class Date(_RenderISO8601NoT, HasExpressionLookup, TypeEngine[dt.date]):
-
"""A type for ``datetime.date()`` objects."""
__visit_name__ = "date"
class Time(_RenderISO8601NoT, HasExpressionLookup, TypeEngine[dt.time]):
-
"""A type for ``datetime.time()`` objects."""
__visit_name__ = "time"
class _Binary(TypeEngine[bytes]):
-
"""Define base behavior for binary types."""
def __init__(self, length: Optional[int] = None):
class LargeBinary(_Binary):
-
"""A type for large binary byte data.
The :class:`.LargeBinary` type corresponds to a large and/or unlengthed
class SchemaType(SchemaEventTarget, TypeEngineMixin):
-
"""Add capabilities to a type which allow for schema-level DDL to be
associated with a type.
)
@overload
- def adapt(self, cls: Type[_TE], **kw: Any) -> _TE:
- ...
+ def adapt(self, cls: Type[_TE], **kw: Any) -> _TE: ...
@overload
- def adapt(self, cls: Type[TypeEngineMixin], **kw: Any) -> TypeEngine[Any]:
- ...
+ def adapt(
+ self, cls: Type[TypeEngineMixin], **kw: Any
+ ) -> TypeEngine[Any]: ...
def adapt(
self, cls: Type[Union[TypeEngine[Any], TypeEngineMixin]], **kw: Any
class Boolean(SchemaType, Emulated, TypeEngine[bool]):
-
"""A bool datatype.
:class:`.Boolean` typically uses BOOLEAN or SMALLINT on the DDL side,
class Interval(Emulated, _AbstractInterval, TypeDecorator[dt.timedelta]):
-
"""A type for ``datetime.timedelta()`` objects.
The Interval type deals with ``datetime.timedelta`` objects. In
index,
expr=self.expr,
operator=operators.json_getitem_op,
- bindparam_type=JSON.JSONIntIndexType
- if isinstance(index, int)
- else JSON.JSONStrIndexType,
+ bindparam_type=(
+ JSON.JSONIntIndexType
+ if isinstance(index, int)
+ else JSON.JSONStrIndexType
+ ),
)
operator = operators.json_getitem_op
Indexable.Comparator[Sequence[Any]],
Concatenable.Comparator[Sequence[Any]],
):
-
"""Define comparison operations for :class:`_types.ARRAY`.
More operators are available on the dialect-specific form
return collection_callable(arr)
else:
return collection_callable(
- self._apply_item_processor(
- x,
- itemproc,
- dim - 1 if dim is not None else None,
- collection_callable,
+ (
+ self._apply_item_processor(
+ x,
+ itemproc,
+ dim - 1 if dim is not None else None,
+ collection_callable,
+ )
+ if x is not None
+ else None
)
- if x is not None
- else None
for x in arr
)
class REAL(Float[_N]):
-
"""The SQL REAL type.
.. seealso::
class FLOAT(Float[_N]):
-
"""The SQL FLOAT type.
.. seealso::
class NUMERIC(Numeric[_N]):
-
"""The SQL NUMERIC type.
.. seealso::
class DECIMAL(Numeric[_N]):
-
"""The SQL DECIMAL type.
.. seealso::
class INTEGER(Integer):
-
"""The SQL INT or INTEGER type.
.. seealso::
class SMALLINT(SmallInteger):
-
"""The SQL SMALLINT type.
.. seealso::
class BIGINT(BigInteger):
-
"""The SQL BIGINT type.
.. seealso::
class TIMESTAMP(DateTime):
-
"""The SQL TIMESTAMP type.
:class:`_types.TIMESTAMP` datatypes have support for timezone
class DATETIME(DateTime):
-
"""The SQL DATETIME type."""
__visit_name__ = "DATETIME"
class DATE(Date):
-
"""The SQL DATE type."""
__visit_name__ = "DATE"
class TIME(Time):
-
"""The SQL TIME type."""
__visit_name__ = "TIME"
class TEXT(Text):
-
"""The SQL TEXT type."""
__visit_name__ = "TEXT"
class CLOB(Text):
-
"""The CLOB type.
This type is found in Oracle and Informix.
class VARCHAR(String):
-
"""The SQL VARCHAR type."""
__visit_name__ = "VARCHAR"
class NVARCHAR(Unicode):
-
"""The SQL NVARCHAR type."""
__visit_name__ = "NVARCHAR"
class CHAR(String):
-
"""The SQL CHAR type."""
__visit_name__ = "CHAR"
class NCHAR(Unicode):
-
"""The SQL NCHAR type."""
__visit_name__ = "NCHAR"
class BLOB(LargeBinary):
-
"""The SQL BLOB type."""
__visit_name__ = "BLOB"
class BINARY(_Binary):
-
"""The SQL BINARY type."""
__visit_name__ = "BINARY"
class VARBINARY(_Binary):
-
"""The SQL VARBINARY type."""
__visit_name__ = "VARBINARY"
class BOOLEAN(Boolean):
-
"""The SQL BOOLEAN type."""
__visit_name__ = "BOOLEAN"
class NullType(TypeEngine[None]):
-
"""An unknown type.
:class:`.NullType` is used as a default type for those cases where
class Uuid(Emulated, TypeEngine[_UUID_RETURN]):
-
"""Represent a database agnostic UUID datatype.
For backends that have no "native" UUID datatype, the value will
self: Uuid[_python_UUID],
as_uuid: Literal[True] = ...,
native_uuid: bool = ...,
- ):
- ...
+ ): ...
@overload
def __init__(
self: Uuid[str],
as_uuid: Literal[False] = ...,
native_uuid: bool = ...,
- ):
- ...
+ ): ...
def __init__(self, as_uuid: bool = True, native_uuid: bool = True):
"""Construct a :class:`_sqltypes.Uuid` type.
class UUID(Uuid[_UUID_RETURN], type_api.NativeForEmulated):
-
"""Represent the SQL UUID type.
This is the SQL-native form of the :class:`_types.Uuid` database agnostic
__visit_name__ = "UUID"
@overload
- def __init__(self: UUID[_python_UUID], as_uuid: Literal[True] = ...):
- ...
+ def __init__(self: UUID[_python_UUID], as_uuid: Literal[True] = ...): ...
@overload
- def __init__(self: UUID[str], as_uuid: Literal[False] = ...):
- ...
+ def __init__(self: UUID[str], as_uuid: Literal[False] = ...): ...
def __init__(self, as_uuid: bool = True):
"""Construct a :class:`_sqltypes.UUID` type.
if typing.TYPE_CHECKING:
- def _generated_shallow_copy_traversal(self, other: Self) -> None:
- ...
+ def _generated_shallow_copy_traversal(self, other: Self) -> None: ...
def _generated_shallow_from_dict_traversal(
self, d: Dict[str, Any]
- ) -> None:
- ...
+ ) -> None: ...
- def _generated_shallow_to_dict_traversal(self) -> Dict[str, Any]:
- ...
+ def _generated_shallow_to_dict_traversal(self) -> Dict[str, Any]: ...
@classmethod
def _generate_shallow_copy(
# sequence of 2-tuples
return [
(
- clone(key, **kw)
- if hasattr(key, "__clause_element__")
- else key,
+ (
+ clone(key, **kw)
+ if hasattr(key, "__clause_element__")
+ else key
+ ),
clone(value, **kw),
)
for key, value in element
def copy(elem):
if isinstance(elem, (list, tuple)):
return [
- clone(value, **kw)
- if hasattr(value, "__clause_element__")
- else value
+ (
+ clone(value, **kw)
+ if hasattr(value, "__clause_element__")
+ else value
+ )
for value in elem
]
elif isinstance(elem, dict):
class _LiteralProcessorType(Protocol[_T_co]):
- def __call__(self, value: Any) -> str:
- ...
+ def __call__(self, value: Any) -> str: ...
class _BindProcessorType(Protocol[_T_con]):
- def __call__(self, value: Optional[_T_con]) -> Any:
- ...
+ def __call__(self, value: Optional[_T_con]) -> Any: ...
class _ResultProcessorType(Protocol[_T_co]):
- def __call__(self, value: Any) -> Optional[_T_co]:
- ...
+ def __call__(self, value: Any) -> Optional[_T_co]: ...
class _SentinelProcessorType(Protocol[_T_co]):
- def __call__(self, value: Any) -> Optional[_T_co]:
- ...
+ def __call__(self, value: Any) -> Optional[_T_co]: ...
class _BaseTypeMemoDict(TypedDict):
class _ComparatorFactory(Protocol[_T]):
- def __call__(self, expr: ColumnElement[_T]) -> TypeEngine.Comparator[_T]:
- ...
+ def __call__(
+ self, expr: ColumnElement[_T]
+ ) -> TypeEngine.Comparator[_T]: ...
class TypeEngine(Visitable, Generic[_T]):
"""
- _variant_mapping: util.immutabledict[
- str, TypeEngine[Any]
- ] = util.EMPTY_DICT
+ _variant_mapping: util.immutabledict[str, TypeEngine[Any]] = (
+ util.EMPTY_DICT
+ )
def evaluates_none(self) -> Self:
"""Return a copy of this type which has the
return (self.__class__,) + tuple(
(
k,
- self.__dict__[k]._static_cache_key
- if isinstance(self.__dict__[k], TypeEngine)
- else self.__dict__[k],
+ (
+ self.__dict__[k]._static_cache_key
+ if isinstance(self.__dict__[k], TypeEngine)
+ else self.__dict__[k]
+ ),
)
for k in names
if k in self.__dict__
)
@overload
- def adapt(self, cls: Type[_TE], **kw: Any) -> _TE:
- ...
+ def adapt(self, cls: Type[_TE], **kw: Any) -> _TE: ...
@overload
- def adapt(self, cls: Type[TypeEngineMixin], **kw: Any) -> TypeEngine[Any]:
- ...
+ def adapt(
+ self, cls: Type[TypeEngineMixin], **kw: Any
+ ) -> TypeEngine[Any]: ...
def adapt(
self, cls: Type[Union[TypeEngine[Any], TypeEngineMixin]], **kw: Any
@util.memoized_property
def _static_cache_key(
self,
- ) -> Union[CacheConst, Tuple[Any, ...]]:
- ...
+ ) -> Union[CacheConst, Tuple[Any, ...]]: ...
@overload
- def adapt(self, cls: Type[_TE], **kw: Any) -> _TE:
- ...
+ def adapt(self, cls: Type[_TE], **kw: Any) -> _TE: ...
@overload
def adapt(
self, cls: Type[TypeEngineMixin], **kw: Any
- ) -> TypeEngine[Any]:
- ...
+ ) -> TypeEngine[Any]: ...
def adapt(
self, cls: Type[Union[TypeEngine[Any], TypeEngineMixin]], **kw: Any
- ) -> TypeEngine[Any]:
- ...
+ ) -> TypeEngine[Any]: ...
- def dialect_impl(self, dialect: Dialect) -> TypeEngine[Any]:
- ...
+ def dialect_impl(self, dialect: Dialect) -> TypeEngine[Any]: ...
class ExternalType(TypeEngineMixin):
return super().adapt(impltype, **kw)
@overload
- def adapt(self, cls: Type[_TE], **kw: Any) -> _TE:
- ...
+ def adapt(self, cls: Type[_TE], **kw: Any) -> _TE: ...
@overload
- def adapt(self, cls: Type[TypeEngineMixin], **kw: Any) -> TypeEngine[Any]:
- ...
+ def adapt(
+ self, cls: Type[TypeEngineMixin], **kw: Any
+ ) -> TypeEngine[Any]: ...
def adapt(
self, cls: Type[Union[TypeEngine[Any], TypeEngineMixin]], **kw: Any
@overload
-def to_instance(typeobj: Union[Type[_TE], _TE], *arg: Any, **kw: Any) -> _TE:
- ...
+def to_instance(
+ typeobj: Union[Type[_TE], _TE], *arg: Any, **kw: Any
+) -> _TE: ...
@overload
-def to_instance(typeobj: None, *arg: Any, **kw: Any) -> TypeEngine[None]:
- ...
+def to_instance(typeobj: None, *arg: Any, **kw: Any) -> TypeEngine[None]: ...
def to_instance(
] = _visitors["lateral"] = tables.append
if include_crud:
- _visitors["insert"] = _visitors["update"] = _visitors[
- "delete"
- ] = lambda ent: tables.append(ent.table)
+ _visitors["insert"] = _visitors["update"] = _visitors["delete"] = (
+ lambda ent: tables.append(ent.table)
+ )
if check_columns:
columns: Iterable[ColumnElement[Any]],
*clauses: Optional[ClauseElement],
**kw: bool,
-) -> Sequence[ColumnElement[Any]]:
- ...
+) -> Sequence[ColumnElement[Any]]: ...
@overload
columns: _SelectIterable,
*clauses: Optional[ClauseElement],
**kw: bool,
-) -> Sequence[Union[ColumnElement[Any], TextClause]]:
- ...
+) -> Sequence[Union[ColumnElement[Any], TextClause]]: ...
def reduce_columns(
if TYPE_CHECKING:
@overload
- def traverse(self, obj: Literal[None]) -> None:
- ...
+ def traverse(self, obj: Literal[None]) -> None: ...
# note this specializes the ReplacingExternalTraversal.traverse()
# method to state
# FromClause but Mypy is not accepting those as compatible with
# the base ReplacingExternalTraversal
@overload
- def traverse(self, obj: _ET) -> _ET:
- ...
+ def traverse(self, obj: _ET) -> _ET: ...
def traverse(
self, obj: Optional[ExternallyTraversible]
- ) -> Optional[ExternallyTraversible]:
- ...
+ ) -> Optional[ExternallyTraversible]: ...
def _corresponding_column(
self, col, require_embedded, _seen=util.EMPTY_SET
class _ColumnLookup(Protocol):
@overload
- def __getitem__(self, key: None) -> None:
- ...
+ def __getitem__(self, key: None) -> None: ...
@overload
- def __getitem__(self, key: ColumnClause[Any]) -> ColumnClause[Any]:
- ...
+ def __getitem__(self, key: ColumnClause[Any]) -> ColumnClause[Any]: ...
@overload
- def __getitem__(self, key: ColumnElement[Any]) -> ColumnElement[Any]:
- ...
+ def __getitem__(self, key: ColumnElement[Any]) -> ColumnElement[Any]: ...
@overload
- def __getitem__(self, key: _ET) -> _ET:
- ...
+ def __getitem__(self, key: _ET) -> _ET: ...
- def __getitem__(self, key: Any) -> Any:
- ...
+ def __getitem__(self, key: Any) -> Any: ...
class ColumnAdapter(ClauseAdapter):
return ac
@overload
- def traverse(self, obj: Literal[None]) -> None:
- ...
+ def traverse(self, obj: Literal[None]) -> None: ...
@overload
- def traverse(self, obj: _ET) -> _ET:
- ...
+ def traverse(self, obj: _ET) -> _ET: ...
def traverse(
self, obj: Optional[ExternallyTraversible]
if TYPE_CHECKING:
@property
- def visitor_iterator(self) -> Iterator[ColumnAdapter]:
- ...
+ def visitor_iterator(self) -> Iterator[ColumnAdapter]: ...
adapt_clause = traverse
adapt_list = ClauseAdapter.copy_and_process
class _CompilerDispatchType(Protocol):
- def __call__(_self, self: Visitable, visitor: Any, **kw: Any) -> Any:
- ...
+ def __call__(_self, self: Visitable, visitor: Any, **kw: Any) -> Any: ...
class Visitable:
if typing.TYPE_CHECKING:
- def _compiler_dispatch(self, visitor: Any, **kw: Any) -> str:
- ...
+ def _compiler_dispatch(self, visitor: Any, **kw: Any) -> str: ...
def __init_subclass__(cls) -> None:
if "__visit_name__" in cls.__dict__:
class _InternalTraversalDispatchType(Protocol):
- def __call__(s, self: object, visitor: HasTraversalDispatch) -> Any:
- ...
+ def __call__(s, self: object, visitor: HasTraversalDispatch) -> Any: ...
class HasTraversalDispatch:
if typing.TYPE_CHECKING:
- def _annotate(self, values: _AnnotationDict) -> Self:
- ...
+ def _annotate(self, values: _AnnotationDict) -> Self: ...
def get_children(
self, *, omit_attrs: Tuple[str, ...] = (), **kw: Any
- ) -> Iterable[ExternallyTraversible]:
- ...
+ ) -> Iterable[ExternallyTraversible]: ...
def _clone(self, **kw: Any) -> Self:
"""clone this element"""
class _CloneCallableType(Protocol):
- def __call__(self, element: _ET, **kw: Any) -> _ET:
- ...
+ def __call__(self, element: _ET, **kw: Any) -> _ET: ...
class _TraverseTransformCallableType(Protocol[_ET]):
- def __call__(self, element: _ET, **kw: Any) -> Optional[_ET]:
- ...
+ def __call__(self, element: _ET, **kw: Any) -> Optional[_ET]: ...
_ExtT = TypeVar("_ExtT", bound="ExternalTraversal")
return iterate(obj, self.__traverse_options__)
@overload
- def traverse(self, obj: Literal[None]) -> None:
- ...
+ def traverse(self, obj: Literal[None]) -> None: ...
@overload
- def traverse(self, obj: ExternallyTraversible) -> ExternallyTraversible:
- ...
+ def traverse(
+ self, obj: ExternallyTraversible
+ ) -> ExternallyTraversible: ...
def traverse(
self, obj: Optional[ExternallyTraversible]
return [self.traverse(x) for x in list_]
@overload
- def traverse(self, obj: Literal[None]) -> None:
- ...
+ def traverse(self, obj: Literal[None]) -> None: ...
@overload
- def traverse(self, obj: ExternallyTraversible) -> ExternallyTraversible:
- ...
+ def traverse(
+ self, obj: ExternallyTraversible
+ ) -> ExternallyTraversible: ...
def traverse(
self, obj: Optional[ExternallyTraversible]
return None
@overload
- def traverse(self, obj: Literal[None]) -> None:
- ...
+ def traverse(self, obj: Literal[None]) -> None: ...
@overload
- def traverse(self, obj: ExternallyTraversible) -> ExternallyTraversible:
- ...
+ def traverse(
+ self, obj: ExternallyTraversible
+ ) -> ExternallyTraversible: ...
def traverse(
self, obj: Optional[ExternallyTraversible]
iterator: Iterable[ExternallyTraversible],
obj: Literal[None],
visitors: Mapping[str, _TraverseCallableType[Any]],
-) -> None:
- ...
+) -> None: ...
@overload
iterator: Iterable[ExternallyTraversible],
obj: ExternallyTraversible,
visitors: Mapping[str, _TraverseCallableType[Any]],
-) -> ExternallyTraversible:
- ...
+) -> ExternallyTraversible: ...
def traverse_using(
obj: Literal[None],
opts: Mapping[str, Any],
visitors: Mapping[str, _TraverseCallableType[Any]],
-) -> None:
- ...
+) -> None: ...
@overload
obj: ExternallyTraversible,
opts: Mapping[str, Any],
visitors: Mapping[str, _TraverseCallableType[Any]],
-) -> ExternallyTraversible:
- ...
+) -> ExternallyTraversible: ...
def traverse(
obj: Literal[None],
opts: Mapping[str, Any],
visitors: Mapping[str, _TraverseCallableType[Any]],
-) -> None:
- ...
+) -> None: ...
# a bit of controversy here, as the clone of the lead element
obj: _ET,
opts: Mapping[str, Any],
visitors: Mapping[str, _TraverseCallableType[Any]],
-) -> _ET:
- ...
+) -> _ET: ...
def cloned_traverse(
obj: Literal[None],
opts: Mapping[str, Any],
replace: _TraverseTransformCallableType[Any],
-) -> None:
- ...
+) -> None: ...
@overload
obj: _CE,
opts: Mapping[str, Any],
replace: _TraverseTransformCallableType[Any],
-) -> _CE:
- ...
+) -> _CE: ...
@overload
obj: ExternallyTraversible,
opts: Mapping[str, Any],
replace: _TraverseTransformCallableType[Any],
-) -> ExternallyTraversible:
- ...
+) -> ExternallyTraversible: ...
def replacement_traverse(
dialect.supports_default_metavalue = True
if self.enable_returning:
- dialect.insert_returning = (
- dialect.update_returning
- ) = dialect.delete_returning = True
+ dialect.insert_returning = dialect.update_returning = (
+ dialect.delete_returning
+ ) = True
dialect.use_insertmanyvalues = True
dialect.supports_multivalues_insert = True
dialect.update_returning_multifrom = True
if typing.TYPE_CHECKING:
- def __getattr__(self, key: str) -> bool:
- ...
+ def __getattr__(self, key: str) -> bool: ...
@property
def name(self):
else:
argname = argname_or_fn
cases_plus_limitations = [
- entry
- if (isinstance(entry, tuple) and len(entry) == 2)
- else (entry, None)
+ (
+ entry
+ if (isinstance(entry, tuple) and len(entry) == 2)
+ else (entry, None)
+ )
for entry in cases
]
)
return combinations(
*[
- (variation._name, variation, limitation)
- if limitation is not None
- else (variation._name, variation)
+ (
+ (variation._name, variation, limitation)
+ if limitation is not None
+ else (variation._name, variation)
+ )
for variation, (case, limitation) in zip(
variations, cases_plus_limitations
)
options: Optional[Dict[str, Any]] = None,
asyncio: Literal[False] = False,
transfer_staticpool: bool = False,
-) -> Engine:
- ...
+) -> Engine: ...
@typing.overload
options: Optional[Dict[str, Any]] = None,
asyncio: Literal[True] = True,
transfer_staticpool: bool = False,
-) -> AsyncEngine:
- ...
+) -> AsyncEngine: ...
def testing_engine(
if negate:
bool_ = not negate
return self.description % {
- "driver": config.db.url.get_driver_name()
- if config
- else "<no driver>",
- "database": config.db.url.get_backend_name()
- if config
- else "<no database>",
+ "driver": (
+ config.db.url.get_driver_name() if config else "<no driver>"
+ ),
+ "database": (
+ config.db.url.get_backend_name() if config else "<no database>"
+ ),
"doesnt_support": "doesn't support" if bool_ else "does support",
"does_support": "does support" if bool_ else "doesn't support",
}
"--config-file",
os.path.join(
use_cachedir,
- "sqla_mypy_config.cfg"
- if use_plugin
- else "plain_mypy_config.cfg",
+ (
+ "sqla_mypy_config.cfg"
+ if use_plugin
+ else "plain_mypy_config.cfg"
+ ),
),
]
# skip first character which could be capitalized
# "List item x not found" type of message
expected_msg = expected_msg[0] + re.sub(
- r"\b(List|Tuple|Dict|Set)\b"
- if is_type
- else r"\b(List|Tuple|Dict|Set|Type)\b",
+ (
+ r"\b(List|Tuple|Dict|Set)\b"
+ if is_type
+ else r"\b(List|Tuple|Dict|Set|Type)\b"
+ ),
lambda m: m.group(1).lower(),
expected_msg[1:],
)
"i": lambda obj: obj,
"r": repr,
"s": str,
- "n": lambda obj: obj.__name__
- if hasattr(obj, "__name__")
- else type(obj).__name__,
+ "n": lambda obj: (
+ obj.__name__ if hasattr(obj, "__name__") else type(obj).__name__
+ ),
}
def combinations(self, *arg_sets, **kw):
t.c.value,
sort_by_parameter_order=bool(sort_by_parameter_order),
),
- [{"value": value} for i in range(10)]
- if multiple_rows
- else {"value": value},
+ (
+ [{"value": value} for i in range(10)]
+ if multiple_rows
+ else {"value": value}
+ ),
)
if multiple_rows:
t.c.value,
sort_by_parameter_order=bool(sort_by_parameter_order),
),
- [{"value": value} for i in range(10)]
- if multiple_rows
- else {"value": value},
+ (
+ [{"value": value} for i in range(10)]
+ if multiple_rows
+ else {"value": value}
+ ),
)
if multiple_rows:
"referred_columns": ref_col,
"name": name,
"options": mock.ANY,
- "referred_schema": ref_schema
- if ref_schema is not None
- else tt(),
+ "referred_schema": (
+ ref_schema if ref_schema is not None else tt()
+ ),
"referred_table": ref_table,
"comment": comment,
}
eq_(
connection.execute(t.select().order_by(t.c.id)).fetchall(),
- [(1, "d1"), (2, "d2_new"), (3, "d3")]
- if criteria.rows
- else [(1, "d1"), (2, "d2"), (3, "d3")],
+ (
+ [(1, "d1"), (2, "d2_new"), (3, "d3")]
+ if criteria.rows
+ else [(1, "d1"), (2, "d2"), (3, "d3")]
+ ),
)
@testing.variation("criteria", ["rows", "norows", "emptyin"])
eq_(
connection.execute(t.select().order_by(t.c.id)).fetchall(),
- [(1, "d1"), (3, "d3")]
- if criteria.rows
- else [(1, "d1"), (2, "d2"), (3, "d3")],
+ (
+ [(1, "d1"), (3, "d3")]
+ if criteria.rows
+ else [(1, "d1"), (2, "d2"), (3, "d3")]
+ ),
)
self._data.update(value)
@overload
- def get(self, key: str) -> Optional[_T]:
- ...
+ def get(self, key: str) -> Optional[_T]: ...
@overload
- def get(self, key: str, default: Union[_DT, _T]) -> Union[_DT, _T]:
- ...
+ def get(self, key: str, default: Union[_DT, _T]) -> Union[_DT, _T]: ...
def get(
self, key: str, default: Optional[Union[_DT, _T]] = None
return self._counter
@overload
- def get(self, key: _KT) -> Optional[_VT]:
- ...
+ def get(self, key: _KT) -> Optional[_VT]: ...
@overload
- def get(self, key: _KT, default: Union[_VT, _T]) -> Union[_VT, _T]:
- ...
+ def get(self, key: _KT, default: Union[_VT, _T]) -> Union[_VT, _T]: ...
def get(
self, key: _KT, default: Optional[Union[_VT, _T]] = None
class _CreateFuncType(Protocol[_T_co]):
- def __call__(self) -> _T_co:
- ...
+ def __call__(self) -> _T_co: ...
class _ScopeFuncType(Protocol):
- def __call__(self) -> Any:
- ...
+ def __call__(self) -> Any: ...
class ScopedRegistry(Generic[_T]):
class ImmutableDictBase(ReadOnlyContainer, Dict[_KT, _VT]):
if TYPE_CHECKING:
- def __new__(cls, *args: Any) -> Self:
- ...
+ def __new__(cls, *args: Any) -> Self: ...
- def __init__(cls, *args: Any):
- ...
+ def __init__(cls, *args: Any): ...
def _readonly(self, *arg: Any, **kw: Any) -> NoReturn:
self._immutable()
def iscoroutine(
awaitable: Awaitable[_T_co],
- ) -> TypeGuard[Coroutine[Any, Any, _T_co]]:
- ...
+ ) -> TypeGuard[Coroutine[Any, Any, _T_co]]: ...
else:
iscoroutine = asyncio.iscoroutine
*,
_set: Optional[Set[str]] = None,
raiseerr: Literal[True] = ...,
-) -> Set[str]:
- ...
+) -> Set[str]: ...
@overload
def get_cls_kwargs(
cls: type, *, _set: Optional[Set[str]] = None, raiseerr: bool = False
-) -> Optional[Set[str]]:
- ...
+) -> Optional[Set[str]]: ...
def get_cls_kwargs(
self.__name__ = fget.__name__
@overload
- def __get__(self: _GFD, obj: None, cls: Any) -> _GFD:
- ...
+ def __get__(self: _GFD, obj: None, cls: Any) -> _GFD: ...
@overload
- def __get__(self, obj: object, cls: Any) -> _T_co:
- ...
+ def __get__(self, obj: object, cls: Any) -> _T_co: ...
def __get__(self: _GFD, obj: Any, cls: Any) -> Union[_GFD, _T_co]:
raise NotImplementedError()
if TYPE_CHECKING:
- def __set__(self, instance: Any, value: Any) -> None:
- ...
+ def __set__(self, instance: Any, value: Any) -> None: ...
- def __delete__(self, instance: Any) -> None:
- ...
+ def __delete__(self, instance: Any) -> None: ...
def _reset(self, obj: Any) -> None:
raise NotImplementedError()
self.__name__ = fget.__name__
@overload
- def __get__(self: _MA, obj: None, cls: Any) -> _MA:
- ...
+ def __get__(self: _MA, obj: None, cls: Any) -> _MA: ...
@overload
- def __get__(self, obj: Any, cls: Any) -> _T:
- ...
+ def __get__(self, obj: Any, cls: Any) -> _T: ...
def __get__(self, obj, cls):
if obj is None:
maxsize: int
use_lifo: bool
- def __init__(self, maxsize: int = 0, use_lifo: bool = False):
- ...
+ def __init__(self, maxsize: int = 0, use_lifo: bool = False): ...
def empty(self) -> bool:
raise NotImplementedError()
# copied from TypeShed, required in order to implement
# MutableMapping.update()
class SupportsKeysAndGetItem(Protocol[_KT, _VT_co]):
- def keys(self) -> Iterable[_KT]:
- ...
+ def keys(self) -> Iterable[_KT]: ...
- def __getitem__(self, __k: _KT) -> _VT_co:
- ...
+ def __getitem__(self, __k: _KT) -> _VT_co: ...
# work around https://github.com/microsoft/pyright/issues/3025
@overload
-def de_optionalize_union_types(type_: str) -> str:
- ...
+def de_optionalize_union_types(type_: str) -> str: ...
@overload
-def de_optionalize_union_types(type_: Type[Any]) -> Type[Any]:
- ...
+def de_optionalize_union_types(type_: Type[Any]) -> Type[Any]: ...
@overload
def de_optionalize_union_types(
type_: _AnnotationScanType,
-) -> _AnnotationScanType:
- ...
+) -> _AnnotationScanType: ...
def de_optionalize_union_types(
class DescriptorProto(Protocol):
- def __get__(self, instance: object, owner: Any) -> Any:
- ...
+ def __get__(self, instance: object, owner: Any) -> Any: ...
- def __set__(self, instance: Any, value: Any) -> None:
- ...
+ def __set__(self, instance: Any, value: Any) -> None: ...
- def __delete__(self, instance: Any) -> None:
- ...
+ def __delete__(self, instance: Any) -> None: ...
_DESC = TypeVar("_DESC", bound=DescriptorProto)
if TYPE_CHECKING:
- def __get__(self, instance: object, owner: Any) -> _DESC:
- ...
+ def __get__(self, instance: object, owner: Any) -> _DESC: ...
- def __set__(self, instance: Any, value: _DESC) -> None:
- ...
+ def __set__(self, instance: Any, value: _DESC) -> None: ...
- def __delete__(self, instance: Any) -> None:
- ...
+ def __delete__(self, instance: Any) -> None: ...
_DESC_co = TypeVar("_DESC_co", bound=DescriptorProto, covariant=True)
if TYPE_CHECKING:
- def __get__(self, instance: object, owner: Any) -> _DESC_co:
- ...
+ def __get__(self, instance: object, owner: Any) -> _DESC_co: ...
- def __set__(self, instance: Any, value: Any) -> NoReturn:
- ...
+ def __set__(self, instance: Any, value: Any) -> NoReturn: ...
- def __delete__(self, instance: Any) -> NoReturn:
- ...
+ def __delete__(self, instance: Any) -> NoReturn: ...
_FN = TypeVar("_FN", bound=Optional[Callable[..., Any]])
if TYPE_CHECKING:
- def __get__(self, instance: object, owner: Any) -> _FN:
- ...
+ def __get__(self, instance: object, owner: Any) -> _FN: ...
- def __set__(self, instance: Any, value: _FN) -> None:
- ...
+ def __set__(self, instance: Any, value: _FN) -> None: ...
- def __delete__(self, instance: Any) -> None:
- ...
+ def __delete__(self, instance: Any) -> None: ...
# $def ro_descriptor_reference(fn: Callable[])
ignore =
A003,
D,
- E203,E305,E711,E712,E721,E722,E741,
+ E203,E305,E701,E704,E711,E712,E721,E722,E741,
N801,N802,N806,
RST304,RST303,RST299,RST399,
W503,W504,W601
class LoadManyToOneFromIdentityTest(fixtures.MappedTest):
-
"""test overhead associated with many-to-one fetches.
Prior to the refactor of LoadLazyAttribute and
select(tbl),
"SELECT %(name)s.test.id FROM %(name)s.test"
% {"name": rendered_schema},
- schema_translate_map={None: schemaname}
- if use_schema_translate
- else None,
+ schema_translate_map=(
+ {None: schemaname} if use_schema_translate else None
+ ),
render_schema_translate=True if use_schema_translate else False,
)
"test",
metadata,
Column("id", Integer, primary_key=True),
- schema=quoted_name("Foo.dbo", True)
- if not use_schema_translate
- else None,
+ schema=(
+ quoted_name("Foo.dbo", True)
+ if not use_schema_translate
+ else None
+ ),
)
self.assert_compile(
select(tbl),
"SELECT [Foo.dbo].test.id FROM [Foo.dbo].test",
- schema_translate_map={None: quoted_name("Foo.dbo", True)}
- if use_schema_translate
- else None,
+ schema_translate_map=(
+ {None: quoted_name("Foo.dbo", True)}
+ if use_schema_translate
+ else None
+ ),
render_schema_translate=True if use_schema_translate else False,
)
self.assert_compile(
select(tbl),
"SELECT [Foo.dbo].test.id FROM [Foo.dbo].test",
- schema_translate_map={None: "[Foo.dbo]"}
- if use_schema_translate
- else None,
+ schema_translate_map=(
+ {None: "[Foo.dbo]"} if use_schema_translate else None
+ ),
render_schema_translate=True if use_schema_translate else False,
)
self.assert_compile(
select(tbl),
"SELECT foo.dbo.test.id FROM foo.dbo.test",
- schema_translate_map={None: "foo.dbo"}
- if use_schema_translate
- else None,
+ schema_translate_map=(
+ {None: "foo.dbo"} if use_schema_translate else None
+ ),
render_schema_translate=True if use_schema_translate else False,
)
self.assert_compile(
select(tbl),
"SELECT [Foo].dbo.test.id FROM [Foo].dbo.test",
- schema_translate_map={None: "Foo.dbo"}
- if use_schema_translate
- else None,
+ schema_translate_map=(
+ {None: "Foo.dbo"} if use_schema_translate else None
+ ),
render_schema_translate=True if use_schema_translate else False,
)
for i in range(col_num)
],
)
- cls.view_str = (
- view_str
- ) = "CREATE VIEW huge_named_view AS SELECT %s FROM base_table" % (
- ",".join("long_named_column_number_%d" % i for i in range(col_num))
+ cls.view_str = view_str = (
+ "CREATE VIEW huge_named_view AS SELECT %s FROM base_table"
+ % (
+ ",".join(
+ "long_named_column_number_%d" % i for i in range(col_num)
+ )
+ )
)
assert len(view_str) > 4000
class SQLTest(fixtures.TestBase, AssertsCompiledSQL):
-
"""Tests MySQL-dialect specific compilation."""
__dialect__ = mysql.dialect()
See #4246
"""
+
import contextlib
from sqlalchemy import Column
class DistinctOnTest(fixtures.MappedTest, AssertsCompiledSQL):
-
"""Test 'DISTINCT' with SQL expression language and orm.Query with
an emphasis on PG's 'DISTINCT ON' syntax.
class FullTextSearchTest(fixtures.TestBase, AssertsCompiledSQL):
-
"""Tests for full text searching"""
__dialect__ = postgresql.dialect()
def test_autocommit_pre_ping(self, testing_engine, autocommit):
engine = testing_engine(
options={
- "isolation_level": "AUTOCOMMIT"
- if autocommit
- else "SERIALIZABLE",
+ "isolation_level": (
+ "AUTOCOMMIT" if autocommit else "SERIALIZABLE"
+ ),
"pool_pre_ping": True,
}
)
engine = testing_engine(
options={
- "isolation_level": "AUTOCOMMIT"
- if autocommit
- else "SERIALIZABLE",
+ "isolation_level": (
+ "AUTOCOMMIT" if autocommit else "SERIALIZABLE"
+ ),
"pool_pre_ping": True,
}
)
class ExtractTest(fixtures.TablesTest):
-
"""The rationale behind this test is that for many years we've had a system
of embedding type casts into the expressions rendered by visit_extract()
on the postgreql platform. The reason for this cast is not clear.
"one",
"two",
"three",
- native_enum=True # make sure this is True because
+ native_enum=True, # make sure this is True because
# it should *not* take effect due to
# the variant
).with_variant(
class SpecialTypesTest(fixtures.TablesTest, ComparesTables):
-
"""test DDL and reflection of PG-specific types"""
__only_on__ = ("postgresql >= 8.3.0",)
class UUIDTest(fixtures.TestBase):
-
"""Test postgresql-specific UUID cases.
See also generic UUID tests in testing/suite/test_types
self._test_clause(
fn(self.col, self._data_str()),
f"data_table.range {op} %(range_1)s",
- self.col.type
- if op in self._not_compare_op
- else sqltypes.BOOLEANTYPE,
+ (
+ self.col.type
+ if op in self._not_compare_op
+ else sqltypes.BOOLEANTYPE
+ ),
)
@testing.combinations(*_all_fns, id_="as")
self._test_clause(
fn(self.col, self._data_obj()),
f"data_table.range {op} %(range_1)s::{self._col_str}",
- self.col.type
- if op in self._not_compare_op
- else sqltypes.BOOLEANTYPE,
+ (
+ self.col.type
+ if op in self._not_compare_op
+ else sqltypes.BOOLEANTYPE
+ ),
)
@testing.combinations(*_comparisons, id_="as")
self._test_clause(
fn(self.col, any_(array([self._data_str()]))),
f"data_table.range {op} ANY (ARRAY[%(param_1)s])",
- self.col.type
- if op in self._not_compare_op
- else sqltypes.BOOLEANTYPE,
+ (
+ self.col.type
+ if op in self._not_compare_op
+ else sqltypes.BOOLEANTYPE
+ ),
)
def test_where_is_null(self):
t.c.value,
sort_by_parameter_order=bool(sort_by_parameter_order),
),
- [{"value": value} for i in range(10)]
- if multiple_rows
- else {"value": value},
+ (
+ [{"value": value} for i in range(10)]
+ if multiple_rows
+ else {"value": value}
+ ),
)
if multiple_rows:
"""SQLite-specific tests."""
+
import datetime
import json
import os
class SQLTest(fixtures.TestBase, AssertsCompiledSQL):
-
"""Tests SQLite-dialect specific compilation."""
__dialect__ = sqlite.dialect()
class InsertTest(fixtures.TestBase, AssertsExecutionResults):
-
"""Tests inserts and autoincrement."""
__only_on__ = "sqlite"
class SavepointTest(fixtures.TablesTest):
-
"""test that savepoints work when we use the correct event setup"""
__only_on__ = "sqlite"
arg[-1].get_result_proxy = Mock(return_value=Mock(context=arg[-1]))
return retval
- m1.real_do_execute.side_effect = (
- m1.do_execute.side_effect
- ) = mock_the_cursor
- m1.real_do_executemany.side_effect = (
- m1.do_executemany.side_effect
- ) = mock_the_cursor
+ m1.real_do_execute.side_effect = m1.do_execute.side_effect = (
+ mock_the_cursor
+ )
+ m1.real_do_executemany.side_effect = m1.do_executemany.side_effect = (
+ mock_the_cursor
+ )
m1.real_do_execute_no_params.side_effect = (
m1.do_execute_no_params.side_effect
) = mock_the_cursor
connection.rollback()
time.sleep(retry_interval)
- context.cursor = (
- cursor
- ) = connection.connection.cursor()
+ context.cursor = cursor = (
+ connection.connection.cursor()
+ )
else:
raise
else:
self.assert_compile(
session.query(Document),
- "SELECT pjoin.id AS pjoin_id, pjoin.doctype AS pjoin_doctype, "
- "pjoin.type AS pjoin_type, pjoin.send_method AS pjoin_send_method "
- "FROM "
- "(SELECT actual_documents.id AS id, "
- "actual_documents.send_method AS send_method, "
- "actual_documents.doctype AS doctype, "
- "'actual' AS type FROM actual_documents) AS pjoin"
- if use_strict_attrs
- else "SELECT pjoin.id AS pjoin_id, pjoin.send_method AS "
- "pjoin_send_method, pjoin.doctype AS pjoin_doctype, "
- "pjoin.type AS pjoin_type "
- "FROM "
- "(SELECT actual_documents.id AS id, "
- "actual_documents.send_method AS send_method, "
- "actual_documents.doctype AS doctype, "
- "'actual' AS type FROM actual_documents) AS pjoin",
+ (
+ "SELECT pjoin.id AS pjoin_id, pjoin.doctype AS pjoin_doctype, "
+ "pjoin.type AS pjoin_type, "
+ "pjoin.send_method AS pjoin_send_method "
+ "FROM "
+ "(SELECT actual_documents.id AS id, "
+ "actual_documents.send_method AS send_method, "
+ "actual_documents.doctype AS doctype, "
+ "'actual' AS type FROM actual_documents) AS pjoin"
+ if use_strict_attrs
+ else "SELECT pjoin.id AS pjoin_id, pjoin.send_method AS "
+ "pjoin_send_method, pjoin.doctype AS pjoin_doctype, "
+ "pjoin.type AS pjoin_type "
+ "FROM "
+ "(SELECT actual_documents.id AS id, "
+ "actual_documents.send_method AS send_method, "
+ "actual_documents.doctype AS doctype, "
+ "'actual' AS type FROM actual_documents) AS pjoin"
+ ),
)
@testing.combinations(True, False)
"""
+
from typing import Optional
from sqlalchemy import Column
type checked.
"""
+
from typing import List
from sqlalchemy import Column
id: Mapped[int] = mapped_column(primary_key=True)
- user_keyword_associations: Mapped[
- List[UserKeywordAssociation]
- ] = relationship(
- back_populates="user",
- cascade="all, delete-orphan",
+ user_keyword_associations: Mapped[List[UserKeywordAssociation]] = (
+ relationship(
+ back_populates="user",
+ cascade="all, delete-orphan",
+ )
)
keywords: AssociationProxy[list[str]] = association_proxy(
primary_key=True, repr=True, init=False
)
- user_keyword_associations: Mapped[
- List[UserKeywordAssociation]
- ] = relationship(
- back_populates="user",
- cascade="all, delete-orphan",
- init=False,
+ user_keyword_associations: Mapped[List[UserKeywordAssociation]] = (
+ relationship(
+ back_populates="user",
+ cascade="all, delete-orphan",
+ init=False,
+ )
)
if embed_in_field:
m,
Column("id", Integer, primary_key=True),
Column("data", String(50)),
- Column(
- "t_%d_id" % (i - 1), ForeignKey("table_%d.id" % (i - 1))
- )
- if i > 4
- else None,
+ (
+ Column(
+ "t_%d_id" % (i - 1),
+ ForeignKey("table_%d.id" % (i - 1)),
+ )
+ if i > 4
+ else None
+ ),
)
m.drop_all(e)
m.create_all(e)
self.assert_compile(
stmt,
- "SELECT my_function(t1.q) AS my_function_1 FROM t1"
- if named
- else "SELECT my_function(t1.q) AS anon_1 FROM t1",
+ (
+ "SELECT my_function(t1.q) AS my_function_1 FROM t1"
+ if named
+ else "SELECT my_function(t1.q) AS anon_1 FROM t1"
+ ),
dialect="sqlite",
)
class ExtendedEventsTest(_ExtBase, fixtures.ORMTest):
-
"""Allow custom Events implementations."""
@modifies_instrumentation_finders
if construct.Mapped:
bars: orm.Mapped[typing.List[Bar]] = orm.relationship()
elif construct.WriteOnlyMapped:
- bars: orm.WriteOnlyMapped[
- typing.List[Bar]
- ] = orm.relationship()
+ bars: orm.WriteOnlyMapped[typing.List[Bar]] = (
+ orm.relationship()
+ )
elif construct.DynamicMapped:
bars: orm.DynamicMapped[typing.List[Bar]] = orm.relationship()
else:
JSON, init=True, default_factory=lambda: {}
)
- new_instance: GenericSetting[ # noqa: F841
- Dict[str, Any]
- ] = GenericSetting(key="x", value={"foo": "bar"})
+ new_instance: GenericSetting[Dict[str, Any]] = ( # noqa: F841
+ GenericSetting(key="x", value={"foo": "bar"})
+ )
def test_no_anno_doesnt_go_into_dc(
self, dc_decl_base: Type[MappedAsDataclass]
target_id = Column(Integer, primary_key=True)
class Engineer(Person):
-
"""single table inheritance"""
if decl_type.legacy:
)
class Manager(Person):
-
"""single table inheritance"""
if decl_type.legacy:
class OverlapColPrecedenceTest(DeclarativeTestBase):
-
"""test #1892 cases when declarative does column precedence."""
def _run_test(self, Engineer, e_id, p_id):
return relationship("Other")
class Engineer(Mixin, Person):
-
"""single table inheritance"""
class Manager(Mixin, Person):
-
"""single table inheritance"""
class Other(Base):
data: Mapped[Union[float, Decimal]] = mapped_column()
reverse_data: Mapped[Union[Decimal, float]] = mapped_column()
- optional_data: Mapped[
- Optional[Union[float, Decimal]]
- ] = mapped_column()
+ optional_data: Mapped[Optional[Union[float, Decimal]]] = (
+ mapped_column()
+ )
# use Optional directly
- reverse_optional_data: Mapped[
- Optional[Union[Decimal, float]]
- ] = mapped_column()
+ reverse_optional_data: Mapped[Optional[Union[Decimal, float]]] = (
+ mapped_column()
+ )
# use Union with None, same as Optional but presents differently
# (Optional object with __origin__ Union vs. Union)
- reverse_u_optional_data: Mapped[
- Union[Decimal, float, None]
- ] = mapped_column()
+ reverse_u_optional_data: Mapped[Union[Decimal, float, None]] = (
+ mapped_column()
+ )
float_data: Mapped[float] = mapped_column()
decimal_data: Mapped[Decimal] = mapped_column()
if compat.py310:
pep604_data: Mapped[float | Decimal] = mapped_column()
pep604_reverse: Mapped[Decimal | float] = mapped_column()
- pep604_optional: Mapped[
- Decimal | float | None
- ] = mapped_column()
+ pep604_optional: Mapped[Decimal | float | None] = (
+ mapped_column()
+ )
pep604_data_fwd: Mapped["float | Decimal"] = mapped_column()
pep604_reverse_fwd: Mapped["Decimal | float"] = mapped_column()
- pep604_optional_fwd: Mapped[
- "Decimal | float | None"
- ] = mapped_column()
+ pep604_optional_fwd: Mapped["Decimal | float | None"] = (
+ mapped_column()
+ )
is_(User.__table__.c.data.type, our_type)
is_false(User.__table__.c.data.nullable)
collection_class=list
)
elif datatype.collections_mutable_sequence:
- bs: Mapped[
- collections.abc.MutableSequence[B]
- ] = relationship(collection_class=list)
+ bs: Mapped[collections.abc.MutableSequence[B]] = (
+ relationship(collection_class=list)
+ )
else:
datatype.fail()
if datatype.typing_sequence:
bs: Mapped[typing.Sequence[B]] = relationship()
elif datatype.collections_sequence:
- bs: Mapped[
- collections.abc.Sequence[B]
- ] = relationship()
+ bs: Mapped[collections.abc.Sequence[B]] = (
+ relationship()
+ )
elif datatype.typing_mutable_sequence:
bs: Mapped[typing.MutableSequence[B]] = relationship()
elif datatype.collections_mutable_sequence:
- bs: Mapped[
- collections.abc.MutableSequence[B]
- ] = relationship()
+ bs: Mapped[collections.abc.MutableSequence[B]] = (
+ relationship()
+ )
else:
datatype.fail()
data: Mapped[Union[float, Decimal]] = mapped_column()
reverse_data: Mapped[Union[Decimal, float]] = mapped_column()
- optional_data: Mapped[
- Optional[Union[float, Decimal]]
- ] = mapped_column()
+ optional_data: Mapped[Optional[Union[float, Decimal]]] = (
+ mapped_column()
+ )
# use Optional directly
- reverse_optional_data: Mapped[
- Optional[Union[Decimal, float]]
- ] = mapped_column()
+ reverse_optional_data: Mapped[Optional[Union[Decimal, float]]] = (
+ mapped_column()
+ )
# use Union with None, same as Optional but presents differently
# (Optional object with __origin__ Union vs. Union)
- reverse_u_optional_data: Mapped[
- Union[Decimal, float, None]
- ] = mapped_column()
+ reverse_u_optional_data: Mapped[Union[Decimal, float, None]] = (
+ mapped_column()
+ )
float_data: Mapped[float] = mapped_column()
decimal_data: Mapped[Decimal] = mapped_column()
if compat.py310:
pep604_data: Mapped[float | Decimal] = mapped_column()
pep604_reverse: Mapped[Decimal | float] = mapped_column()
- pep604_optional: Mapped[
- Decimal | float | None
- ] = mapped_column()
+ pep604_optional: Mapped[Decimal | float | None] = (
+ mapped_column()
+ )
pep604_data_fwd: Mapped["float | Decimal"] = mapped_column()
pep604_reverse_fwd: Mapped["Decimal | float"] = mapped_column()
- pep604_optional_fwd: Mapped[
- "Decimal | float | None"
- ] = mapped_column()
+ pep604_optional_fwd: Mapped["Decimal | float | None"] = (
+ mapped_column()
+ )
is_(User.__table__.c.data.type, our_type)
is_false(User.__table__.c.data.nullable)
collection_class=list
)
elif datatype.collections_mutable_sequence:
- bs: Mapped[
- collections.abc.MutableSequence[B]
- ] = relationship(collection_class=list)
+ bs: Mapped[collections.abc.MutableSequence[B]] = (
+ relationship(collection_class=list)
+ )
else:
datatype.fail()
if datatype.typing_sequence:
bs: Mapped[typing.Sequence[B]] = relationship()
elif datatype.collections_sequence:
- bs: Mapped[
- collections.abc.Sequence[B]
- ] = relationship()
+ bs: Mapped[collections.abc.Sequence[B]] = (
+ relationship()
+ )
elif datatype.typing_mutable_sequence:
bs: Mapped[typing.MutableSequence[B]] = relationship()
elif datatype.collections_mutable_sequence:
- bs: Mapped[
- collections.abc.MutableSequence[B]
- ] = relationship()
+ bs: Mapped[collections.abc.MutableSequence[B]] = (
+ relationship()
+ )
else:
datatype.fail()
__mapper_args__ = {
"polymorphic_identity": "retailer",
- "polymorphic_load": "inline"
- if use_poly_on_retailer
- else None,
+ "polymorphic_load": (
+ "inline" if use_poly_on_retailer else None
+ ),
}
return Customer, Store, Retailer
# a.id is not included in the SELECT list
"SELECT b.data FROM a JOIN b ON a.id = b.id "
"WHERE a.id = :pk_1",
- [{"pk_1": pk}]
+ [{"pk_1": pk}],
# if we used load_scalar_attributes(), it would look like
# this
# "SELECT b.data AS b_data FROM b WHERE :param_1 = b.id",
m1 = aliased(Manager, flat=True)
q = sess.query(Engineer, m1).join(Engineer.manager.of_type(m1))
- with _aliased_join_warning(
- r"Manager\(managers\)"
- ) if autoalias else nullcontext():
+ with (
+ _aliased_join_warning(r"Manager\(managers\)")
+ if autoalias
+ else nullcontext()
+ ):
self.assert_compile(
q,
"SELECT engineers.id AS "
e1 = aliased(Engineer, flat=True)
q = s.query(Boss).join(e1, e1.manager_id == Boss.id)
- with _aliased_join_warning(
- r"Mapper\[Engineer\(engineer\)\]"
- ) if autoalias else nullcontext():
+ with (
+ _aliased_join_warning(r"Mapper\[Engineer\(engineer\)\]")
+ if autoalias
+ else nullcontext()
+ ):
self.assert_compile(
q,
"SELECT manager.id AS manager_id, employee.id AS employee_id, "
b1 = aliased(Boss, flat=True)
q = s.query(Engineer).join(b1, Engineer.manager_id == b1.id)
- with _aliased_join_warning(
- r"Mapper\[Boss\(manager\)\]"
- ) if autoalias else nullcontext():
+ with (
+ _aliased_join_warning(r"Mapper\[Boss\(manager\)\]")
+ if autoalias
+ else nullcontext()
+ ):
self.assert_compile(
q,
"SELECT engineer.id AS engineer_id, "
be cleaned up and modernized.
"""
+
import datetime
import sqlalchemy as sa
assert_data = [
{
"start": d["start"] if "start" in d else None,
- "end": d["end"]
- if "end" in d
- else Point(d["x2"], d["y2"])
- if "x2" in d
- else None,
+ "end": (
+ d["end"]
+ if "end" in d
+ else Point(d["x2"], d["y2"]) if "x2" in d else None
+ ),
"graph_id": d["graph_id"],
}
for d in data
mock.call(
e1,
Point(5, 6),
- LoaderCallableStatus.NO_VALUE
- if not active_history
- else None,
+ (
+ LoaderCallableStatus.NO_VALUE
+ if not active_history
+ else None
+ ),
Edge.start.impl,
)
],
mock.call(
e1,
Point(7, 8),
- LoaderCallableStatus.NO_VALUE
- if not active_history
- else Point(5, 6),
+ (
+ LoaderCallableStatus.NO_VALUE
+ if not active_history
+ else Point(5, 6)
+ ),
Edge.start.impl,
)
],
[
mock.call(
e1,
- LoaderCallableStatus.NO_VALUE
- if not active_history
- else Point(5, 6),
+ (
+ LoaderCallableStatus.NO_VALUE
+ if not active_history
+ else Point(5, 6)
+ ),
Edge.start.impl,
)
],
T1/T2.
"""
+
from itertools import count
from sqlalchemy import bindparam
eq_ignore_whitespace(
str(q),
- "SELECT users.id AS users_id, users.name AS users_name "
- "FROM users WHERE users.id = ?"
- if expect_bound
- else "SELECT users.id AS users_id, users.name AS users_name "
- "FROM users WHERE users.id = :id_1",
+ (
+ "SELECT users.id AS users_id, users.name AS users_name "
+ "FROM users WHERE users.id = ?"
+ if expect_bound
+ else "SELECT users.id AS users_id, users.name AS users_name "
+ "FROM users WHERE users.id = :id_1"
+ ),
)
def test_query_bound_session(self):
class RequirementsTest(fixtures.MappedTest):
-
"""Tests the contract for user classes."""
@classmethod
addresses_args={
"order_by": addresses.c.id,
"backref": "user",
- "cascade": "save-update"
- if not delete_cascade_configured
- else "all, delete",
+ "cascade": (
+ "save-update"
+ if not delete_cascade_configured
+ else "all, delete"
+ ),
}
)
data: Mapped[str]
bs: WriteOnlyMapped["B"] = relationship( # noqa: F821
passive_deletes=passive_deletes,
- cascade="all, delete-orphan"
- if cascade_deletes
- else "save-update, merge",
+ cascade=(
+ "all, delete-orphan"
+ if cascade_deletes
+ else "save-update, merge"
+ ),
order_by="B.id",
)
attributes.get_history(
obj,
attrname,
- PassiveFlag.PASSIVE_NO_FETCH
- if self.lazy == "write_only"
- else PassiveFlag.PASSIVE_OFF,
+ (
+ PassiveFlag.PASSIVE_NO_FETCH
+ if self.lazy == "write_only"
+ else PassiveFlag.PASSIVE_OFF
+ ),
),
compare,
)
class SubqueryAliasingTest(fixtures.MappedTest, testing.AssertsCompiledSQL):
-
"""test #2188"""
__dialect__ = "default"
class LoadOnExistingTest(_fixtures.FixtureTest):
-
"""test that loaders from a base Query fully populate."""
run_inserts = "once"
class CorrelatedSubqueryTest(fixtures.MappedTest):
-
"""tests for #946, #947, #948.
The "users" table is joined to "stuff", and the relationship
class SecondaryOptionsTest(fixtures.MappedTest):
-
"""test that the contains_eager() option doesn't bleed
into a secondary load."""
is_orm_statement=ctx.is_orm_statement,
is_relationship_load=ctx.is_relationship_load,
is_column_load=ctx.is_column_load,
- lazy_loaded_from=ctx.lazy_loaded_from
- if ctx.is_select
- else None,
+ lazy_loaded_from=(
+ ctx.lazy_loaded_from if ctx.is_select else None
+ ),
)
return canary
(
lambda session: session,
"loaded_as_persistent",
- lambda session, instance: instance.unloaded
- if instance.__class__.__name__ == "A"
- else None,
+ lambda session, instance: (
+ instance.unloaded
+ if instance.__class__.__name__ == "A"
+ else None
+ ),
),
argnames="target, event_name, fn",
)(fn)
class DeferredMapperEventsTest(RemoveORMEventsGlobally, _fixtures.FixtureTest):
-
""" "test event listeners against unmapped classes.
This incurs special logic. Note if we ever do the "remove" case,
"""test the current state of the hasparent() flag."""
+
from sqlalchemy import ForeignKey
from sqlalchemy import Integer
from sqlalchemy import testing
class GetterStateTest(_fixtures.FixtureTest):
-
"""test lazyloader on non-existent attribute returns
expected attribute symbols, maintain expected state"""
properties={
"user": relationship(
User,
- primaryjoin=and_(
- users.c.id == addresses.c.user_id, users.c.id != 27
- )
- if dont_use_get
- else None,
+ primaryjoin=(
+ and_(
+ users.c.id == addresses.c.user_id, users.c.id != 27
+ )
+ if dont_use_get
+ else None
+ ),
back_populates="addresses",
)
},
class RequirementsTest(fixtures.MappedTest):
-
"""Tests the contract for user classes."""
@classmethod
CountStatements(
0
if load.noload
- else 1
- if merge_persistent.merge_persistent
- else 2
+ else 1 if merge_persistent.merge_persistent else 2
)
)
Keyword = self.classes.Keyword
self._assert_eager_with_entity_exception(
[Item],
- lambda: (joinedload(Keyword),)
- if first_element
- else (Load(Item).joinedload(Keyword),),
+ lambda: (
+ (joinedload(Keyword),)
+ if first_element
+ else (Load(Item).joinedload(Keyword),)
+ ),
"expected ORM mapped attribute for loader " "strategy argument",
)
Item = self.classes.Item
self._assert_eager_with_entity_exception(
[Item],
- lambda: (joinedload(rando),)
- if first_element
- else (Load(Item).joinedload(rando)),
+ lambda: (
+ (joinedload(rando),)
+ if first_element
+ else (Load(Item).joinedload(rando))
+ ),
"expected ORM mapped attribute for loader strategy argument",
)
self._assert_eager_with_entity_exception(
[OrderWProp],
- lambda: (joinedload(OrderWProp.some_attr),)
- if first_element
- else (Load(OrderWProp).joinedload(OrderWProp.some_attr),),
+ lambda: (
+ (joinedload(OrderWProp.some_attr),)
+ if first_element
+ else (Load(OrderWProp).joinedload(OrderWProp.some_attr),)
+ ),
"expected ORM mapped attribute for loader strategy argument",
)
eq_(
result.scalars().unique().all(),
- self._user_minus_edwood(*user_address_fixture)
- if value == "ed@wood.com"
- else self._user_minus_edlala(*user_address_fixture),
+ (
+ self._user_minus_edwood(*user_address_fixture)
+ if value == "ed@wood.com"
+ else self._user_minus_edlala(*user_address_fixture)
+ ),
)
asserter.assert_(
eq_(
result.scalars().unique().all(),
- self._user_minus_edwood(*user_address_fixture)
- if value == "ed@wood.com"
- else self._user_minus_edlala(*user_address_fixture),
+ (
+ self._user_minus_edwood(*user_address_fixture)
+ if value == "ed@wood.com"
+ else self._user_minus_edlala(*user_address_fixture)
+ ),
)
asserter.assert_(
eq_(
result.scalars().unique().all(),
- self._user_minus_edwood(*user_address_fixture)
- if value == "ed@wood.com"
- else self._user_minus_edlala(*user_address_fixture),
+ (
+ self._user_minus_edwood(*user_address_fixture)
+ if value == "ed@wood.com"
+ else self._user_minus_edlala(*user_address_fixture)
+ ),
)
asserter.assert_(
eq_(
result,
- self._user_minus_edwood(*user_address_fixture)
- if value == "ed@wood.com"
- else self._user_minus_edlala(*user_address_fixture),
+ (
+ self._user_minus_edwood(*user_address_fixture)
+ if value == "ed@wood.com"
+ else self._user_minus_edlala(*user_address_fixture)
+ ),
)
@testing.combinations((True,), (False,), argnames="use_compiled_cache")
eq_(
result.scalars().unique().all(),
- self._user_minus_edwood(*user_address_fixture)
- if value == "ed@wood.com"
- else self._user_minus_edlala(*user_address_fixture),
+ (
+ self._user_minus_edwood(*user_address_fixture)
+ if value == "ed@wood.com"
+ else self._user_minus_edlala(*user_address_fixture)
+ ),
)
asserter.assert_(
eq_(
result.scalars().unique().all(),
- self._user_minus_edwood(*user_address_fixture)
- if value == "ed@wood.com"
- else self._user_minus_edlala(*user_address_fixture),
+ (
+ self._user_minus_edwood(*user_address_fixture)
+ if value == "ed@wood.com"
+ else self._user_minus_edlala(*user_address_fixture)
+ ),
)
asserter.assert_(
class DependencyTwoParentTest(fixtures.MappedTest):
-
"""Test flush() when a mapper is dependent on multiple relationships"""
run_setup_mappers = "once"
class DirectSelfRefFKTest(fixtures.MappedTest, AssertsCompiledSQL):
-
"""Tests the ultimate join condition, a single column
that points to itself, e.g. within a SQL function or similar.
The test is against a materialized path setup.
class CompositeSelfRefFKTest(fixtures.MappedTest, AssertsCompiledSQL):
-
"""Tests a composite FK where, in
the relationship(), one col points
to itself in the same table.
class SynonymsAsFKsTest(fixtures.MappedTest):
-
"""Syncrules on foreign keys that are also primary"""
@classmethod
class FKsAsPksTest(fixtures.MappedTest):
-
"""Syncrules on foreign keys that are also primary"""
@classmethod
class UniqueColReferenceSwitchTest(fixtures.MappedTest):
-
"""test a relationship based on a primary
join against a unique non-pk column"""
class RelationshipToSelectableTest(fixtures.MappedTest):
-
"""Test a map to a select that relates to a map to the table."""
@classmethod
class FKEquatedToConstantTest(fixtures.MappedTest):
-
"""test a relationship with a non-column entity in the primary join,
is not viewonly, and also has the non-column's clause mentioned in the
foreign keys list.
class AmbiguousJoinInterpretedAsSelfRef(fixtures.MappedTest):
-
"""test ambiguous joins due to FKs on both sides treated as
self-referential.
class ManualBackrefTest(_fixtures.FixtureTest):
-
"""Test explicit relationships that are backrefs to each other."""
run_inserts = None
class NoLoadBackPopulates(_fixtures.FixtureTest):
-
"""test the noload stratgegy which unlike others doesn't use
lazyloader to set up instrumentation"""
class TypeMatchTest(fixtures.MappedTest):
-
"""test errors raised when trying to add items
whose type is not handled by a relationship"""
class CustomOperatorTest(fixtures.MappedTest, AssertsCompiledSQL):
-
"""test op() in conjunction with join conditions"""
run_create_tables = run_deletes = None
class ViewOnlyOverlappingNames(fixtures.MappedTest):
-
"""'viewonly' mappings with overlapping PK column names."""
@classmethod
class ViewOnlyUniqueNames(fixtures.MappedTest):
-
"""'viewonly' mappings with unique PK column names."""
@classmethod
class ViewOnlyLocalRemoteM2M(fixtures.TestBase):
-
"""test that local-remote is correctly determined for m2m"""
def test_local_remote(self, registry):
class ViewOnlyNonEquijoin(fixtures.MappedTest):
-
"""'viewonly' mappings based on non-equijoins."""
@classmethod
class ViewOnlyRepeatedRemoteColumn(fixtures.MappedTest):
-
"""'viewonly' mappings that contain the same 'remote' column twice"""
@classmethod
class ViewOnlyRepeatedLocalColumn(fixtures.MappedTest):
-
"""'viewonly' mappings that contain the same 'local' column twice"""
@classmethod
class ViewOnlyComplexJoin(_RelationshipErrors, fixtures.MappedTest):
-
"""'viewonly' mappings with a complex join condition."""
@classmethod
class RemoteForeignBetweenColsTest(fixtures.DeclarativeMappedTest):
-
"""test a complex annotation using between().
Using declarative here as an integration test for the local()
class SecondaryNestedJoinTest(
fixtures.MappedTest, AssertsCompiledSQL, testing.AssertsExecutionResults
):
-
"""test support for a relationship where the 'secondary' table is a
compound join().
class RelationDeprecationTest(fixtures.MappedTest):
-
"""test usage of the old 'relation' function."""
run_inserts = "once"
"""Generic mapping to Select statements"""
+
import sqlalchemy as sa
from sqlalchemy import column
from sqlalchemy import Integer
class CleanSavepointTest(FixtureTest):
-
"""test the behavior for [ticket:2452] - rollback on begin_nested()
only expires objects tracked as being modified in that transaction.
self.session = Session(
self.connection,
- join_transaction_mode="create_savepoint"
- if (
- self.join_mode.create_savepoint
- or self.join_mode.create_savepoint_w_savepoint
- )
- else "conditional_savepoint",
+ join_transaction_mode=(
+ "create_savepoint"
+ if (
+ self.join_mode.create_savepoint
+ or self.join_mode.create_savepoint_w_savepoint
+ )
+ else "conditional_savepoint"
+ ),
)
def teardown_session(self):
mp = self.mapper_registry.map_imperatively(
Hoho,
default_t,
- eager_defaults="auto"
- if eager_defaults.auto
- else bool(eager_defaults),
+ eager_defaults=(
+ "auto" if eager_defaults.auto else bool(eager_defaults)
+ ),
)
h1 = Hoho(hoho=althohoval)
class LoadersUsingCommittedTest(UOWTest):
-
"""Test that events which occur within a flush()
get the same attribute loading behavior as on the outside
of the flush, and that the unit of work itself uses the
Address, User = self.classes.Address, self.classes.User
class AvoidReferencialError(Exception):
-
"""the test here would require ON UPDATE CASCADE on FKs
for the flush to fully succeed; this exception is used
to cancel the flush before we get that far.
f"table_{table_num}_col_{i + 1}",
*args,
primary_key=i == 0,
- comment=f"primary key of table_{table_num}"
- if i == 0
- else None,
+ comment=(
+ f"primary key of table_{table_num}" if i == 0 else None
+ ),
index=random.random() > 0.97 and i > 0,
unique=random.random() > 0.97 and i > 0,
)
class ResultMapTest(fixtures.TestBase):
-
"""test the behavior of the 'entry stack' and the determination
when the result_map needs to be populated.
with mock.patch.object(
dialect.statement_compiler,
"translate_select_structure",
- lambda self, to_translate, **kw: wrapped_again
- if to_translate is stmt
- else to_translate,
+ lambda self, to_translate, **kw: (
+ wrapped_again if to_translate is stmt else to_translate
+ ),
):
compiled = stmt.compile(dialect=dialect)
with mock.patch.object(
dialect.statement_compiler,
"translate_select_structure",
- lambda self, to_translate, **kw: wrapped_again
- if to_translate is stmt
- else to_translate,
+ lambda self, to_translate, **kw: (
+ wrapped_again if to_translate is stmt else to_translate
+ ),
):
compiled = stmt.compile(dialect=dialect)
stmt,
"WITH anon_1 AS (SELECT test.a AS b FROM test %s b) "
"SELECT (SELECT anon_1.b FROM anon_1) AS c"
- % ("ORDER BY" if order_by == "order_by" else "GROUP BY")
+ % ("ORDER BY" if order_by == "order_by" else "GROUP BY"),
# prior to the fix, the use_object version came out as:
# "WITH anon_1 AS (SELECT test.a AS b FROM test "
# "ORDER BY test.a) "
class SpecialTypePKTest(fixtures.TestBase):
-
"""test process_result_value in conjunction with primary key columns.
Also tests that "autoincrement" checks are against
class TraversalTest(
fixtures.TestBase, AssertsExecutionResults, AssertsCompiledSQL
):
-
"""test ClauseVisitor's traversal, particularly its
ability to copy and modify a ClauseElement in place."""
class BinaryEndpointTraversalTest(fixtures.TestBase):
-
"""test the special binary product visit"""
def _assert_traversal(self, expr, expected):
class ClauseTest(fixtures.TestBase, AssertsCompiledSQL):
-
"""test copy-in-place behavior of various ClauseElements."""
__dialect__ = "default"
class SelectTest(fixtures.TestBase, AssertsCompiledSQL):
-
"""tests the generative capability of Select"""
__dialect__ = "default"
class ValuesBaseTest(fixtures.TestBase, AssertsCompiledSQL):
-
"""Tests the generative capability of Insert, Update"""
__dialect__ = "default"
class TableInsertTest(fixtures.TablesTest):
-
"""test for consistent insert behavior across dialects
regarding the inline() method, values() method, lower-case 't' tables.
Column(
"id",
Uuid(),
- server_default=func.gen_random_uuid()
- if default_type.server_side
- else None,
+ server_default=(
+ func.gen_random_uuid()
+ if default_type.server_side
+ else None
+ ),
default=uuid.uuid4 if default_type.client_side else None,
primary_key=True,
insert_sentinel=bool(add_insert_sentinel),
stmt = lambda_stmt(lambda: select(tab))
stmt = stmt.add_criteria(
- lambda s: s.where(tab.c.col > parameter)
- if add_criteria
- else s.where(tab.c.col == parameter),
+ lambda s: (
+ s.where(tab.c.col > parameter)
+ if add_criteria
+ else s.where(tab.c.col == parameter)
+ ),
)
stmt += lambda s: s.order_by(tab.c.id)
stmt = lambda_stmt(lambda: select(tab))
stmt = stmt.add_criteria(
- lambda s: s.where(tab.c.col > parameter)
- if add_criteria
- else s.where(tab.c.col == parameter),
+ lambda s: (
+ s.where(tab.c.col > parameter)
+ if add_criteria
+ else s.where(tab.c.col == parameter)
+ ),
track_on=[add_criteria],
)
# lambda produces either "t1 IN vv" or "t2 IN qq" based on the
# argument. will not produce a consistent cache key
elem = lambdas.DeferredLambdaElement(
- lambda tab: tab.c.q.in_(vv)
- if tab.name == "t1"
- else tab.c.q.in_(qq),
+ lambda tab: (
+ tab.c.q.in_(vv) if tab.name == "t1" else tab.c.q.in_(qq)
+ ),
roles.WhereHavingRole,
lambda_args=(t1,),
opts=lambdas.LambdaOptions(track_closure_variables=False),
class ColumnDefinitionTest(AssertsCompiledSQL, fixtures.TestBase):
-
"""Test Column() construction."""
__dialect__ = "default"
class ColumnDefaultsTest(fixtures.TestBase):
-
"""test assignment of default fixures to columns"""
def _fixture(self, *arg, **kw):
"b",
metadata,
Column("id", Integer, primary_key=True),
- Column("aid", ForeignKey("a.id"))
- if not col_has_type
- else Column("aid", Integer, ForeignKey("a.id")),
+ (
+ Column("aid", ForeignKey("a.id"))
+ if not col_has_type
+ else Column("aid", Integer, ForeignKey("a.id"))
+ ),
)
fks = list(
c for c in b.constraints if isinstance(c, ForeignKeyConstraint)
if negate:
self.assert_compile(
select(~expr),
- f"SELECT NOT (t.q{opstring}t.p{opstring}{exprs}) "
- "AS anon_1 FROM t"
- if not reverse
- else f"SELECT NOT ({exprs}{opstring}t.q{opstring}t.p) "
- "AS anon_1 FROM t",
+ (
+ f"SELECT NOT (t.q{opstring}t.p{opstring}{exprs}) "
+ "AS anon_1 FROM t"
+ if not reverse
+ else f"SELECT NOT ({exprs}{opstring}t.q{opstring}t.p) "
+ "AS anon_1 FROM t"
+ ),
)
else:
self.assert_compile(
select(expr),
- f"SELECT t.q{opstring}t.p{opstring}{exprs} AS anon_1 FROM t"
- if not reverse
- else f"SELECT {exprs}{opstring}t.q{opstring}t.p "
- f"AS anon_1 FROM t",
+ (
+ f"SELECT t.q{opstring}t.p{opstring}{exprs} "
+ "AS anon_1 FROM t"
+ if not reverse
+ else f"SELECT {exprs}{opstring}t.q{opstring}t.p "
+ "AS anon_1 FROM t"
+ ),
)
@testing.combinations(
self.assert_compile(
select(~expr),
- f"SELECT {str_expr} AS anon_1 FROM t"
- if not reverse
- else f"SELECT {str_expr} AS anon_1 FROM t",
+ (
+ f"SELECT {str_expr} AS anon_1 FROM t"
+ if not reverse
+ else f"SELECT {str_expr} AS anon_1 FROM t"
+ ),
)
else:
if reverse:
self.assert_compile(
select(expr),
- f"SELECT {str_expr} AS anon_1 FROM t"
- if not reverse
- else f"SELECT {str_expr} AS anon_1 FROM t",
+ (
+ f"SELECT {str_expr} AS anon_1 FROM t"
+ if not reverse
+ else f"SELECT {str_expr} AS anon_1 FROM t"
+ ),
)
col = column("somecol", modulus())
self.assert_compile(
col.modulus(),
- "somecol %%"
- if paramstyle in ("format", "pyformat")
- else "somecol %",
+ (
+ "somecol %%"
+ if paramstyle in ("format", "pyformat")
+ else "somecol %"
+ ),
dialect=default.DefaultDialect(paramstyle=paramstyle),
)
col = column("somecol", modulus())
self.assert_compile(
col.modulus_prefix(),
- "%% somecol"
- if paramstyle in ("format", "pyformat")
- else "% somecol",
+ (
+ "%% somecol"
+ if paramstyle in ("format", "pyformat")
+ else "% somecol"
+ ),
dialect=default.DefaultDialect(paramstyle=paramstyle),
)
class BooleanEvalTest(fixtures.TestBase, testing.AssertsCompiledSQL):
-
"""test standalone booleans being wrapped in an AsBoolean, as well
as true/false compilation."""
class ConjunctionTest(fixtures.TestBase, testing.AssertsCompiledSQL):
-
"""test interaction of and_()/or_() with boolean , null constants"""
__dialect__ = default.DefaultDialect(supports_native_boolean=True)
class CompoundTest(fixtures.TablesTest):
-
"""test compound statements like UNION, INTERSECT, particularly their
ability to nest on different databases."""
class JoinTest(fixtures.TablesTest):
-
"""Tests join execution.
The compiled SQL emitted by the dialect might be ANSI joins or
class PreparerTest(fixtures.TestBase):
-
"""Test the db-agnostic quoting services of IdentifierPreparer."""
def test_unformat(self):
stmt = select(
*[
- text("*")
- if colname == "*"
- else users.c.user_name.label("name_label")
- if colname == "name_label"
- else users.c[colname]
+ (
+ text("*")
+ if colname == "*"
+ else (
+ users.c.user_name.label("name_label")
+ if colname == "name_label"
+ else users.c[colname]
+ )
+ )
for colname in cols
]
)
class KeyReturningTest(fixtures.TablesTest, AssertsExecutionResults):
-
"""test returning() works with columns that define 'key'."""
__requires__ = ("insert_returning",)
config,
t1,
(t1.c.id, t1.c.insdef, t1.c.data),
- set_lambda=(lambda excluded: {"data": excluded.data + " excluded"})
- if update_cols
- else None,
+ set_lambda=(
+ (lambda excluded: {"data": excluded.data + " excluded"})
+ if update_cols
+ else None
+ ),
)
upserted_rows = connection.execute(
"""Test various algorithmic properties of selectables."""
+
from itertools import zip_longest
from sqlalchemy import and_
class AnonLabelTest(fixtures.TestBase):
-
"""Test behaviors fixed by [ticket:2168]."""
def test_anon_labels_named_column(self):
class SelectCompositionTest(fixtures.TestBase, AssertsCompiledSQL):
-
"""test the usage of text() implicit within the select() construct
when strings are passed."""
# on the way in here
eq_(
conn.execute(new_stmt).fetchall(),
- [("x", "BIND_INxBIND_OUT")]
- if coerce_fn is type_coerce
- else [("x", "xBIND_OUT")],
+ (
+ [("x", "BIND_INxBIND_OUT")]
+ if coerce_fn is type_coerce
+ else [("x", "xBIND_OUT")]
+ ),
)
def test_cast_bind(self, connection):
eq_(
conn.execute(stmt).fetchall(),
- [("x", "BIND_INxBIND_OUT")]
- if coerce_fn is type_coerce
- else [("x", "xBIND_OUT")],
+ (
+ [("x", "BIND_INxBIND_OUT")]
+ if coerce_fn is type_coerce
+ else [("x", "xBIND_OUT")]
+ ),
)
def test_cast_existing_typed(self, connection):
class NumericRawSQLTest(fixtures.TestBase):
-
"""Test what DBAPIs and dialects return without any typing
information supplied at the SQLA level.
class BooleanTest(
fixtures.TablesTest, AssertsExecutionResults, AssertsCompiledSQL
):
-
"""test edge cases for booleans. Note that the main boolean test suite
is now in testing/suite/test_types.py
for asynchronous ORM use.
"""
+
from __future__ import annotations
import asyncio
from sqlalchemy.orm import with_polymorphic
-class Base(DeclarativeBase):
- ...
+class Base(DeclarativeBase): ...
class Message(Base):
# Read-only for simplicity, mutable protocol members are complicated,\r
# see https://mypy.readthedocs.io/en/latest/common_issues.html#covariant-subtyping-of-mutable-protocol-members-is-rejected\r
@property\r
- def parent(self) -> Mapped[ParentProtocol]:\r
- ...\r
+ def parent(self) -> Mapped[ParentProtocol]: ...\r
\r
\r
def get_parent_name(child: ChildProtocol) -> str:\r
"""this suite experiments with other kinds of relationship syntaxes.
"""
+
from __future__ import annotations
import typing
"""
+
import typing
from typing import cast
from typing import Dict
if no uselists are present.
"""
+
import typing
from typing import List
from typing import Set
"""
-
from __future__ import annotations
from sqlalchemy import asc
# in case it requires a version pin
pydocstyle
pygments
- black==23.3.0
+ black==24.1.1
slotscheck>=0.17.0
# required by generate_tuple_map_overloads