.. autoclass:: registry
:members:
+.. autofunction:: add_mapped_attribute
+
.. autofunction:: declarative_base
.. autofunction:: declarative_mixin
.. autofunction:: as_declarative
+.. autofunction:: mapped_column
+
.. autoclass:: declared_attr
:members:
+.. autoclass:: DeclarativeBaseNoMeta
+ :members:
+
+.. autoclass:: DeclarativeBase
+ :members:
+
.. autofunction:: has_inherited_table
.. autofunction:: synonym_for
# the MIT License: https://www.opensource.org/licenses/mit-license.php
"""Define generic result set constructs."""
-
import collections.abc as collections_abc
import functools
import itertools
from ..sql.base import _generative
from ..sql.base import HasMemoized
from ..sql.base import InPlaceGenerative
+from ..util._has_cy import HAS_CYEXTENSION
-try:
- from sqlalchemy.cyextension.resultproxy import tuplegetter
-except ImportError:
+
+if typing.TYPE_CHECKING or not HAS_CYEXTENSION:
from ._py_row import tuplegetter
+else:
+ from sqlalchemy.cyextension.resultproxy import tuplegetter
class ResultMetaData:
"""Define row constructs including :class:`.Row`."""
-
import collections.abc as collections_abc
import operator
+import typing
from ..sql import util as sql_util
+from ..util._has_cy import HAS_CYEXTENSION
-
-try:
- from sqlalchemy.cyextension.resultproxy import BaseRow
- from sqlalchemy.cyextension.resultproxy import KEY_INTEGER_ONLY
- from sqlalchemy.cyextension.resultproxy import KEY_OBJECTS_ONLY
-except ImportError:
+if typing.TYPE_CHECKING or not HAS_CYEXTENSION:
from ._py_row import BaseRow
from ._py_row import KEY_INTEGER_ONLY
from ._py_row import KEY_OBJECTS_ONLY
+else:
+ from sqlalchemy.cyextension.resultproxy import BaseRow
+ from sqlalchemy.cyextension.resultproxy import KEY_INTEGER_ONLY
+ from sqlalchemy.cyextension.resultproxy import KEY_OBJECTS_ONLY
class Row(BaseRow, collections_abc.Sequence):
import collections.abc as collections_abc
import re
+from typing import Dict
+from typing import NamedTuple
+from typing import Optional
+from typing import Tuple
+from typing import Union
from urllib.parse import parse_qsl
from urllib.parse import quote_plus
from urllib.parse import unquote
from ..dialects import registry
-class URL(
- util.namedtuple(
- "URL",
- [
- "drivername",
- "username",
- "password",
- "host",
- "port",
- "database",
- "query",
- ],
- )
-):
+class URL(NamedTuple):
"""
Represent the components of a URL used to connect to a database.
"""
- def __new__(self, *arg, **kw):
- if kw.pop("_new_ok", False):
- return super(URL, self).__new__(self, *arg, **kw)
- else:
- util.warn_deprecated(
- "Calling URL() directly is deprecated and will be disabled "
- "in a future release. The public constructor for URL is "
- "now the URL.create() method.",
- "1.4",
- )
- return URL.create(*arg, **kw)
+ drivername: str
+ username: Optional[str]
+ password: Optional[str]
+ host: Optional[str]
+ port: Optional[int]
+ database: Optional[str]
+ query: Dict[str, Union[str, Tuple[str]]]
@classmethod
def create(
cls._assert_port(port),
cls._assert_none_str(database, "database"),
cls._str_dict(query),
- _new_ok=True,
)
@classmethod
if query is not None:
kw["query"] = query
- return self._replace(**kw)
+ return self._assert_replace(**kw)
- def _replace(self, **kw):
- """Override ``namedtuple._replace()`` to provide argument checking."""
+ def _assert_replace(self, **kw):
+ """argument checks before calling _replace()"""
if "drivername" in kw:
self._assert_str(kw["drivername"], "drivername")
if "query" in kw:
kw["query"] = self._str_dict(kw["query"])
- return super(URL, self)._replace(**kw)
+ return self._replace(**kw)
def update_query_string(self, query_string, append=False):
"""Return a new :class:`_engine.URL` object with the :attr:`_engine.URL.query`
for key in set(self.query).difference(names)
}
),
- _new_ok=True,
)
@util.memoized_property
things it can be used for.
""" # noqa
+from typing import Any
+from typing import TypeVar
+
from .. import util
from ..orm import attributes
from ..orm import interfaces
+_T = TypeVar("_T", bound=Any)
+
HYBRID_METHOD = util.symbol("HYBRID_METHOD")
"""Symbol indicating an :class:`InspectionAttr` that's
of type :class:`.hybrid_method`.
return expr_comparator
-class Comparator(interfaces.PropComparator):
+class Comparator(interfaces.PropComparator[_T]):
"""A helper class that allows easy construction of custom
:class:`~.orm.interfaces.PropComparator`
classes for usage with hybrids."""
return self
-class ExprComparator(Comparator):
+class ExprComparator(Comparator[_T]):
def __init__(self, cls, expression, hybrid):
self.cls = cls
self.expression = expression
},
),
"TypeEngine": (TYPEENGINE, {"sqlalchemy.sql.type_api.TypeEngine"}),
- "Mapped": (MAPPED, {"sqlalchemy.orm.attributes.Mapped"}),
+ "Mapped": (MAPPED, {NAMED_TYPE_SQLA_MAPPED}),
"declarative_base": (
DECLARATIVE_BASE,
{
self, file: MypyFile
) -> List[Tuple[int, str, int]]:
return [
+ #
+ (10, "sqlalchemy.orm", -1),
(10, "sqlalchemy.orm.attributes", -1),
(10, "sqlalchemy.orm.decl_api", -1),
]
"""
- util.add_global(ctx, "sqlalchemy.orm.attributes", "Mapped", "__sa_Mapped")
+ util.add_global(ctx, "sqlalchemy.orm", "Mapped", "__sa_Mapped")
def _set_declarative_metaclass(
from ._orm_constructors import create_session as create_session
from ._orm_constructors import deferred as deferred
from ._orm_constructors import dynamic_loader as dynamic_loader
+from ._orm_constructors import mapped_column as mapped_column
from ._orm_constructors import query_expression as query_expression
from ._orm_constructors import relationship as relationship
from ._orm_constructors import synonym as synonym
from ._orm_constructors import with_loader_criteria as with_loader_criteria
from .attributes import AttributeEvent as AttributeEvent
from .attributes import InstrumentedAttribute as InstrumentedAttribute
-from .attributes import Mapped as Mapped
from .attributes import QueryableAttribute as QueryableAttribute
+from .base import Mapped as Mapped
from .context import QueryContext as QueryContext
+from .decl_api import add_mapped_attribute as add_mapped_attribute
from .decl_api import as_declarative as as_declarative
from .decl_api import declarative_base as declarative_base
from .decl_api import declarative_mixin as declarative_mixin
+from .decl_api import DeclarativeBase as DeclarativeBase
+from .decl_api import DeclarativeBaseNoMeta as DeclarativeBaseNoMeta
from .decl_api import DeclarativeMeta as DeclarativeMeta
from .decl_api import declared_attr as declared_attr
from .decl_api import has_inherited_table as has_inherited_table
# the MIT License: https://www.opensource.org/licenses/mit-license.php
import typing
+from typing import Any
from typing import Callable
+from typing import Collection
+from typing import Optional
+from typing import overload
from typing import Type
from typing import Union
from . import mapper as mapperlib
+from .base import Mapped
from .descriptor_props import CompositeProperty
from .descriptor_props import SynonymProperty
from .properties import ColumnProperty
from .. import sql
from .. import util
from ..exc import InvalidRequestError
+from ..sql.schema import Column
+from ..sql.schema import SchemaEventTarget
+from ..sql.type_api import TypeEngine
+from ..util.typing import Literal
+
_RC = typing.TypeVar("_RC")
_T = typing.TypeVar("_T")
return AliasOption(alias)
+@overload
+def mapped_column(
+ *args: SchemaEventTarget,
+ nullable: bool = ...,
+ primary_key: bool = ...,
+ **kw: Any,
+) -> "Mapped":
+ ...
+
+
+@overload
+def mapped_column(
+ __type: Union[Type["TypeEngine[_T]"], "TypeEngine[_T]"],
+ *args: SchemaEventTarget,
+ nullable: Union[Literal[None], Literal[True]] = ...,
+ primary_key: Union[Literal[None], Literal[False]] = ...,
+ **kw: Any,
+) -> "Mapped[Optional[_T]]":
+ ...
+
+
+@overload
+def mapped_column(
+ __type: Union[Type["TypeEngine[_T]"], "TypeEngine[_T]"],
+ *args: SchemaEventTarget,
+ nullable: Union[Literal[None], Literal[True]] = ...,
+ primary_key: Union[Literal[None], Literal[False]] = ...,
+ **kw: Any,
+) -> "Mapped[Optional[_T]]":
+ ...
+
+
+@overload
+def mapped_column(
+ __type: Union[Type["TypeEngine[_T]"], "TypeEngine[_T]"],
+ *args: SchemaEventTarget,
+ nullable: Union[Literal[None], Literal[False]] = ...,
+ primary_key: Literal[True] = True,
+ **kw: Any,
+) -> "Mapped[_T]":
+ ...
+
+
+@overload
+def mapped_column(
+ __type: Union[Type["TypeEngine[_T]"], "TypeEngine[_T]"],
+ *args: SchemaEventTarget,
+ nullable: Literal[False] = ...,
+ primary_key: bool = ...,
+ **kw: Any,
+) -> "Mapped[_T]":
+ ...
+
+
+@overload
+def mapped_column(
+ __name: str,
+ __type: Union[Type["TypeEngine[_T]"], "TypeEngine[_T]"],
+ *args: SchemaEventTarget,
+ nullable: Union[Literal[None], Literal[True]] = ...,
+ primary_key: Union[Literal[None], Literal[False]] = ...,
+ **kw: Any,
+) -> "Mapped[Optional[_T]]":
+ ...
+
+
+@overload
+def mapped_column(
+ __name: str,
+ __type: Union[Type["TypeEngine[_T]"], "TypeEngine[_T]"],
+ *args: SchemaEventTarget,
+ nullable: Union[Literal[None], Literal[True]] = ...,
+ primary_key: Union[Literal[None], Literal[False]] = ...,
+ **kw: Any,
+) -> "Mapped[Optional[_T]]":
+ ...
+
+
+@overload
+def mapped_column(
+ __name: str,
+ __type: Union[Type["TypeEngine[_T]"], "TypeEngine[_T]"],
+ *args: SchemaEventTarget,
+ nullable: Union[Literal[None], Literal[False]] = ...,
+ primary_key: Literal[True] = True,
+ **kw: Any,
+) -> "Mapped[_T]":
+ ...
+
+
+@overload
+def mapped_column(
+ __name: str,
+ __type: Union[Type["TypeEngine[_T]"], "TypeEngine[_T]"],
+ *args: SchemaEventTarget,
+ nullable: Literal[False] = ...,
+ primary_key: bool = ...,
+ **kw: Any,
+) -> "Mapped[_T]":
+ ...
+
+
+def mapped_column(*args, **kw) -> "Mapped":
+ """construct a new ORM-mapped :class:`_schema.Column` construct.
+
+ The :func:`_orm.mapped_column` function is shorthand for the construction
+ of a Core :class:`_schema.Column` object delivered within a
+ :func:`_orm.column_property` construct, which provides for consistent
+ typing information to be delivered to the class so that it works under
+ static type checkers such as mypy and delivers useful information in
+ IDE related type checkers such as pylance. The function can be used
+ in declarative mappings anywhere that :class:`_schema.Column` is normally
+ used::
+
+ from sqlalchemy.orm import mapped_column
+
+ class User(Base):
+ __tablename__ = 'user'
+
+ id = mapped_column(Integer)
+ name = mapped_column(String)
+
+
+ .. versionadded:: 2.0
+
+ """
+ return column_property(Column(*args, **kw))
+
+
def column_property(
column: sql.ColumnElement[_T], *additional_columns, **kwargs
-) -> "ColumnProperty[_T]":
+) -> "Mapped[_T]":
r"""Provide a column-level property for use with a mapping.
Column-based properties can normally be applied to the mapper's
return ColumnProperty(column, *additional_columns, **kwargs)
-def composite(class_: Type[_T], *attrs, **kwargs) -> "CompositeProperty[_T]":
+def composite(class_: Type[_T], *attrs, **kwargs) -> "Mapped[_T]":
r"""Return a composite column-based property for use with a Mapper.
See the mapping documentation section :ref:`mapper_composite` for a
)
+@overload
+def relationship(
+ argument: Union[str, Type[_RC], Callable[[], Type[_RC]]],
+ secondary=None,
+ *,
+ uselist: Literal[True] = None,
+ primaryjoin=None,
+ secondaryjoin=None,
+ foreign_keys=None,
+ order_by=False,
+ backref=None,
+ back_populates=None,
+ overlaps=None,
+ post_update=False,
+ cascade=False,
+ viewonly=False,
+ lazy="select",
+ collection_class=None,
+ passive_deletes=RelationshipProperty._persistence_only["passive_deletes"],
+ passive_updates=RelationshipProperty._persistence_only["passive_updates"],
+ remote_side=None,
+ enable_typechecks=RelationshipProperty._persistence_only[
+ "enable_typechecks"
+ ],
+ join_depth=None,
+ comparator_factory=None,
+ single_parent=False,
+ innerjoin=False,
+ distinct_target_key=None,
+ doc=None,
+ active_history=RelationshipProperty._persistence_only["active_history"],
+ cascade_backrefs=RelationshipProperty._persistence_only[
+ "cascade_backrefs"
+ ],
+ load_on_pending=False,
+ bake_queries=True,
+ _local_remote_pairs=None,
+ query_class=None,
+ info=None,
+ omit_join=None,
+ sync_backref=None,
+ _legacy_inactive_history_style=False,
+) -> Mapped[Collection[_RC]]:
+ ...
+
+
+@overload
+def relationship(
+ argument: Union[str, Type[_RC], Callable[[], Type[_RC]]],
+ secondary=None,
+ *,
+ uselist: Optional[bool] = None,
+ primaryjoin=None,
+ secondaryjoin=None,
+ foreign_keys=None,
+ order_by=False,
+ backref=None,
+ back_populates=None,
+ overlaps=None,
+ post_update=False,
+ cascade=False,
+ viewonly=False,
+ lazy="select",
+ collection_class=None,
+ passive_deletes=RelationshipProperty._persistence_only["passive_deletes"],
+ passive_updates=RelationshipProperty._persistence_only["passive_updates"],
+ remote_side=None,
+ enable_typechecks=RelationshipProperty._persistence_only[
+ "enable_typechecks"
+ ],
+ join_depth=None,
+ comparator_factory=None,
+ single_parent=False,
+ innerjoin=False,
+ distinct_target_key=None,
+ doc=None,
+ active_history=RelationshipProperty._persistence_only["active_history"],
+ cascade_backrefs=RelationshipProperty._persistence_only[
+ "cascade_backrefs"
+ ],
+ load_on_pending=False,
+ bake_queries=True,
+ _local_remote_pairs=None,
+ query_class=None,
+ info=None,
+ omit_join=None,
+ sync_backref=None,
+ _legacy_inactive_history_style=False,
+) -> Mapped[_RC]:
+ ...
+
+
def relationship(
argument: Union[str, Type[_RC], Callable[[], Type[_RC]]],
secondary=None,
+ *,
primaryjoin=None,
secondaryjoin=None,
foreign_keys=None,
- uselist=None,
+ uselist: Optional[bool] = None,
order_by=False,
backref=None,
back_populates=None,
omit_join=None,
sync_backref=None,
_legacy_inactive_history_style=False,
-) -> RelationshipProperty[_RC]:
+) -> Mapped[_RC]:
"""Provide a relationship between two mapped classes.
This corresponds to a parent-child or associative table relationship.
comparator_factory=None,
doc=None,
info=None,
-) -> "SynonymProperty":
+) -> "Mapped":
"""Denote an attribute name as a synonym to a mapped property,
in that the attribute will mirror the value and expression behavior
of another attribute.
"""
-
+from collections import namedtuple
import operator
-from typing import Generic
+from typing import Any
+from typing import List
+from typing import NamedTuple
+from typing import Tuple
from typing import TypeVar
+from typing import Union
from . import collections
from . import exc as orm_exc
from ..sql import traversals
from ..sql import visitors
+_T = TypeVar("_T")
+
class NoKey(str):
pass
@inspection._self_inspects
class QueryableAttribute(
- interfaces._MappedAttribute,
+ interfaces._MappedAttribute[_T],
interfaces.InspectionAttr,
- interfaces.PropComparator,
+ interfaces.PropComparator[_T],
traversals.HasCopyInternals,
roles.JoinTargetRole,
roles.OnClauseRole,
return getattr(entity, key)
-_T = TypeVar("_T")
-_Generic_T = Generic[_T]
-
-
-class Mapped(QueryableAttribute, _Generic_T):
- """Represent an ORM mapped :term:`descriptor` attribute for typing purposes.
-
- This class represents the complete descriptor interface for any class
- attribute that will have been :term:`instrumented` by the ORM
- :class:`_orm.Mapper` class. When used with typing stubs, it is the final
- type that would be used by a type checker such as mypy to provide the full
- behavioral contract for the attribute.
-
- .. tip::
-
- The :class:`_orm.Mapped` class represents attributes that are handled
- directly by the :class:`_orm.Mapper` class. It does not include other
- Python descriptor classes that are provided as extensions, including
- :ref:`hybrids_toplevel` and the :ref:`associationproxy_toplevel`.
- While these systems still make use of ORM-specific superclasses
- and structures, they are not :term:`instrumented` by the
- :class:`_orm.Mapper` and instead provide their own functionality
- when they are accessed on a class.
-
- When using the :ref:`SQLAlchemy Mypy plugin <mypy_toplevel>`, the
- :class:`_orm.Mapped` construct is used in typing annotations to indicate to
- the plugin those attributes that are expected to be mapped; the plugin also
- applies :class:`_orm.Mapped` as an annotation automatically when it scans
- through declarative mappings in :ref:`orm_declarative_table` style. For
- more indirect mapping styles such as
- :ref:`imperative table <orm_imperative_table_configuration>` it is
- typically applied explicitly to class level attributes that expect
- to be mapped based on a given :class:`_schema.Table` configuration.
-
- :class:`_orm.Mapped` is defined in the
- `sqlalchemy2-stubs <https://pypi.org/project/sqlalchemy2-stubs>`_ project
- as a :pep:`484` generic class which may subscribe to any arbitrary Python
- type, which represents the Python type handled by the attribute::
-
- class MyMappedClass(Base):
- __table_ = Table(
- "some_table", Base.metadata,
- Column("id", Integer, primary_key=True),
- Column("data", String(50)),
- Column("created_at", DateTime)
- )
-
- id : Mapped[int]
- data: Mapped[str]
- created_at: Mapped[datetime]
-
- For complete background on how to use :class:`_orm.Mapped` with
- pep-484 tools like Mypy, see the link below for background on SQLAlchemy's
- Mypy plugin.
-
- .. versionadded:: 1.4
-
- .. seealso::
-
- :ref:`mypy_toplevel` - complete background on Mypy integration
-
- """
-
- def __get__(self, instance, owner):
- raise NotImplementedError()
-
- def __set__(self, instance, value):
- raise NotImplementedError()
-
- def __delete__(self, instance):
- raise NotImplementedError()
-
-
-class InstrumentedAttribute(Mapped):
+class InstrumentedAttribute(QueryableAttribute[_T]):
"""Class bound instrumented attribute which adds basic
:term:`descriptor` methods.
return self.impl.get(state, dict_)
-HasEntityNamespace = util.namedtuple(
- "HasEntityNamespace", ["entity_namespace"]
-)
+HasEntityNamespace = namedtuple("HasEntityNamespace", ["entity_namespace"])
HasEntityNamespace.is_mapper = HasEntityNamespace.is_aliased_class = False
_NO_STATE_SYMBOLS = frozenset([id(PASSIVE_NO_RESULT), id(NO_VALUE)])
-class History(util.namedtuple("History", ["added", "unchanged", "deleted"])):
+class History(NamedTuple):
"""A 3-tuple of added, unchanged and deleted values,
representing the changes which have occurred on an instrumented
attribute.
"""
+ added: Union[Tuple[()], List[Any]]
+ unchanged: Union[Tuple[()], List[Any]]
+ deleted: Union[Tuple[()], List[Any]]
+
def __bool__(self):
return self != HISTORY_BLANK
- __nonzero__ = __bool__
-
def empty(self):
"""Return True if this :class:`.History` has no changes
and no existing, unchanged state.
)
-HISTORY_BLANK = History(None, None, None)
+HISTORY_BLANK = History((), (), ())
def get_history(obj, key, passive=PASSIVE_OFF):
import typing
from typing import Any
from typing import Generic
+from typing import overload
from typing import TypeVar
+from typing import Union
from . import exc
from .. import exc as sa_exc
from .. import inspection
from .. import util
+from ..sql.elements import SQLCoreOperations
from ..util import typing as compat_typing
+from ..util.langhelpers import TypingOnly
+if typing.TYPE_CHECKING:
+ from .attributes import InstrumentedAttribute
+
_T = TypeVar("_T", bound=Any)
+
PASSIVE_NO_RESULT = util.symbol(
"PASSIVE_NO_RESULT",
"""Symbol returned by a loader callable or other attribute/history
return {}
-class _MappedAttribute(Generic[_T]):
+class SQLORMOperations(SQLCoreOperations[_T], TypingOnly):
+ __slots__ = ()
+
+ if typing.TYPE_CHECKING:
+
+ def of_type(self, class_):
+ ...
+
+ def and_(self, *criteria):
+ ...
+
+ def any(self, criterion=None, **kwargs): # noqa A001
+ ...
+
+ def has(self, criterion=None, **kwargs):
+ ...
+
+
+class Mapped(Generic[_T], util.TypingOnly):
+ """Represent an ORM mapped attribute for typing purposes.
+
+ This class represents the complete descriptor interface for any class
+ attribute that will have been :term:`instrumented` by the ORM
+ :class:`_orm.Mapper` class. Provides appropriate information to type
+ checkers such as pylance and mypy so that ORM-mapped attributes
+ are correctly typed.
+
+ .. tip::
+
+ The :class:`_orm.Mapped` class represents attributes that are handled
+ directly by the :class:`_orm.Mapper` class. It does not include other
+ Python descriptor classes that are provided as extensions, including
+ :ref:`hybrids_toplevel` and the :ref:`associationproxy_toplevel`.
+ While these systems still make use of ORM-specific superclasses
+ and structures, they are not :term:`instrumented` by the
+ :class:`_orm.Mapper` and instead provide their own functionality
+ when they are accessed on a class.
+
+ .. versionadded:: 1.4
+
+
+ """
+
+ __slots__ = ()
+
+ if typing.TYPE_CHECKING:
+
+ @overload
+ def __get__(
+ self, instance: None, owner: Any
+ ) -> "InstrumentedAttribute[_T]":
+ ...
+
+ @overload
+ def __get__(self, instance: object, owner: Any) -> _T:
+ ...
+
+ def __get__(
+ self, instance: object, owner: Any
+ ) -> Union["InstrumentedAttribute[_T]", _T]:
+ ...
+
+ @classmethod
+ def _empty_constructor(cls, arg1: Any) -> "SQLORMOperations[_T]":
+ ...
+
+ @overload
+ def __set__(self, instance: Any, value: _T) -> None:
+ ...
+
+ @overload
+ def __set__(self, instance: Any, value: SQLCoreOperations) -> None:
+ ...
+
+ def __set__(self, instance, value):
+ ...
+
+ def __delete__(self, instance: Any):
+ ...
+
+
+class _MappedAttribute(Mapped[_T], TypingOnly):
"""Mixin for attributes which should be replaced by mapper-assigned
attributes.
"""Public API functions and helpers for declarative."""
import itertools
import re
+import typing
+from typing import Any
+from typing import Callable
+from typing import ClassVar
+from typing import Optional
+from typing import TypeVar
+from typing import Union
import weakref
from . import attributes
from . import instrumentation
from . import interfaces
from . import mapperlib
+from .attributes import InstrumentedAttribute
from .base import _inspect_mapped_class
+from .base import Mapped
from .decl_base import _add_attribute
from .decl_base import _as_declarative
from .decl_base import _declarative_constructor
from .decl_base import _del_attribute
from .decl_base import _mapper
from .descriptor_props import SynonymProperty as _orm_synonym
+from .mapper import Mapper
from .. import exc
from .. import inspection
from .. import util
+from ..sql.elements import SQLCoreOperations
from ..sql.schema import MetaData
+from ..sql.selectable import FromClause
from ..util import hybridmethod
from ..util import hybridproperty
+_T = TypeVar("_T", bound=Any)
+
def has_inherited_table(cls):
"""Given a class, return True if any of the classes it inherits from has a
return False
-class DeclarativeMeta(type):
- # DeclarativeMeta could be replaced by __subclass_init__()
- # except for the class-level __setattr__() and __delattr__ hooks,
- # which are still very important.
+class DeclarativeAttributeIntercept(type):
+ """Metaclass that may be used in conjunction with the
+ :class:`_orm.DeclarativeBase` class to support addition of class
+ attributes dynamically.
+
+ """
+
+ def __setattr__(cls, key, value):
+ _add_attribute(cls, key, value)
+
+ def __delattr__(cls, key):
+ _del_attribute(cls, key)
+
+class DeclarativeMeta(type):
def __init__(cls, classname, bases, dict_, **kw):
# early-consume registry from the initial declarative base,
# assign privately to not conflict with subclass attributes named
return decorate
-class declared_attr(interfaces._MappedAttribute, property):
+class declared_attr(interfaces._MappedAttribute[_T]):
"""Mark a class-level method as representing the definition of
a mapped property or special declarative member name.
""" # noqa E501
- def __init__(self, fget, cascading=False):
- super(declared_attr, self).__init__(fget)
- self.__doc__ = fget.__doc__
+ if typing.TYPE_CHECKING:
+
+ def __set__(self, instance, value):
+ ...
+
+ def __delete__(self, instance: Any):
+ ...
+
+ def __init__(
+ self,
+ fn: Callable[..., Union[Mapped[_T], SQLCoreOperations[_T]]],
+ cascading=False,
+ ):
+ self.fget = fn
self._cascading = cascading
+ self.__doc__ = fn.__doc__
- def __get__(desc, self, cls):
+ def __get__(self, instance, owner) -> InstrumentedAttribute[_T]:
# the declared_attr needs to make use of a cache that exists
# for the span of the declarative scan_attributes() phase.
# to achieve this we look at the class manager that's configured.
+ cls = owner
manager = attributes.manager_of_class(cls)
if manager is None:
- if not re.match(r"^__.+__$", desc.fget.__name__):
+ if not re.match(r"^__.+__$", self.fget.__name__):
# if there is no manager at all, then this class hasn't been
# run through declarative or mapper() at all, emit a warning.
util.warn(
"Unmanaged access of declarative attribute %s from "
- "non-mapped class %s" % (desc.fget.__name__, cls.__name__)
+ "non-mapped class %s" % (self.fget.__name__, cls.__name__)
)
- return desc.fget(cls)
+ return self.fget(cls)
elif manager.is_mapped:
# the class is mapped, which means we're outside of the declarative
# scan setup, just run the function.
- return desc.fget(cls)
+ return self.fget(cls)
# here, we are inside of the declarative scan. use the registry
# that is tracking the values of these attributes.
declarative_scan = manager.declarative_scan
reg = declarative_scan.declared_attr_reg
- if desc in reg:
- return reg[desc]
+ if self in reg:
+ return reg[self]
else:
- reg[desc] = obj = desc.fget(cls)
+ reg[self] = obj = self.fget(cls)
return obj
@hybridmethod
return cls
+def _setup_declarative_base(cls):
+ if "metadata" in cls.__dict__:
+ metadata = cls.metadata
+ else:
+ metadata = None
+
+ reg = cls.__dict__.get("registry", None)
+ if reg is not None:
+ if not isinstance(reg, registry):
+ raise exc.InvalidRequestError(
+ "Declarative base class has a 'registry' attribute that is "
+ "not an instance of sqlalchemy.orm.registry()"
+ )
+ else:
+ reg = registry(metadata=metadata)
+ cls.registry = reg
+
+ cls._sa_registry = reg
+
+ if "metadata" not in cls.__dict__:
+ cls.metadata = cls.registry.metadata
+
+
+class DeclarativeBaseNoMeta:
+ """Same as :class:`_orm.DeclarativeBase`, but does not use a metaclass
+ to intercept new attributes.
+
+ The :class:`_orm.DeclarativeBaseNoMeta` base may be used when use of
+ custom metaclasses is desirable.
+
+ .. versionadded:: 2.0
+
+
+ """
+
+ registry: ClassVar["registry"]
+ _sa_registry: ClassVar["registry"]
+ metadata: ClassVar[MetaData]
+ __mapper__: ClassVar[Mapper]
+ __table__: Optional[FromClause]
+
+ if typing.TYPE_CHECKING:
+
+ def __init__(self, **kw: Any):
+ ...
+
+ def __init_subclass__(cls) -> None:
+ if DeclarativeBaseNoMeta in cls.__bases__:
+ _setup_declarative_base(cls)
+ else:
+ cls._sa_registry.map_declaratively(cls)
+
+
+class DeclarativeBase(metaclass=DeclarativeAttributeIntercept):
+ """Base class used for declarative class definitions.
+
+ The :class:`_orm.DeclarativeBase` allows for the creation of new
+ declarative bases in such a way that is compatible with type checkers::
+
+
+ from sqlalchemy.orm import DeclarativeBase
+
+ class Base(DeclarativeBase):
+ pass
+
+
+ The above ``Base`` class is now usable as the base for new declarative
+ mappings. The superclass makes use of the ``__init_subclass__()``
+ method to set up new classes and metaclasses aren't used.
+
+ .. versionadded:: 2.0
+
+ """
+
+ registry: ClassVar["registry"]
+ _sa_registry: ClassVar["registry"]
+ metadata: ClassVar[MetaData]
+ __mapper__: ClassVar[Mapper]
+ __table__: Optional[FromClause]
+
+ if typing.TYPE_CHECKING:
+
+ def __init__(self, **kw: Any):
+ ...
+
+ def __init_subclass__(cls) -> None:
+ if DeclarativeBase in cls.__bases__:
+ _setup_declarative_base(cls)
+ else:
+ cls._sa_registry.map_declaratively(cls)
+
+
+def add_mapped_attribute(target, key, attr):
+ """Add a new mapped attribute to an ORM mapped class.
+
+ E.g.::
+
+ add_mapped_attribute(User, "addresses", relationship(Address))
+
+ This may be used for ORM mappings that aren't using a declarative
+ metaclass that intercepts attribute set operations.
+
+ .. versionadded:: 2.0
+
+
+ """
+ _add_attribute(target, key, attr)
+
+
def declarative_base(
metadata=None,
mapper=None,
constructor=_declarative_constructor,
class_registry=None,
metaclass=DeclarativeMeta,
-):
+) -> Any:
r"""Construct a base class for declarative class definitions.
The new base class will be given a metaclass that produces
).as_declarative_base(**kw)
-@inspection._inspects(DeclarativeMeta)
+@inspection._inspects(
+ DeclarativeMeta, DeclarativeBase, DeclarativeAttributeIntercept
+)
def _inspect_decl_meta(cls):
mp = _inspect_mapped_class(cls)
if mp is None:
from ..sql import operators
_T = TypeVar("_T", bound=Any)
+_PT = TypeVar("_PT", bound=Any)
class DescriptorProperty(MapperProperty[_T]):
return proc
- class Comparator(PropComparator):
+ class Comparator(PropComparator[_PT]):
"""Produce boolean, comparison, and other operators for
:class:`.CompositeProperty` attributes.
return str(self.parent.class_.__name__) + "." + self.key
-class ConcreteInheritedProperty(DescriptorProperty):
+class ConcreteInheritedProperty(DescriptorProperty[_T]):
"""A 'do nothing' :class:`.MapperProperty` that disables
an attribute on a concrete subclass that is only present
on the inherited mapper, not the concrete classes' mapper.
self.descriptor = NoninheritedConcreteProp()
-class SynonymProperty(DescriptorProperty):
+class SynonymProperty(DescriptorProperty[_T]):
def __init__(
self,
name,
"""
import collections
+import typing
from typing import Any
+from typing import cast
from typing import TypeVar
from . import exc as orm_exc
from .base import MANYTOONE
from .base import NOT_EXTENSION
from .base import ONETOMANY
+from .base import SQLORMOperations
from .. import inspect
from .. import inspection
from .. import util
@inspection._self_inspects
-class PropComparator(operators.ColumnOperators):
- r"""Defines SQL operators for :class:`.MapperProperty` objects.
+class PropComparator(
+ SQLORMOperations[_T], operators.ColumnOperators[SQLORMOperations]
+):
+ r"""Defines SQL operations for ORM mapped attributes.
SQLAlchemy allows for operators to
be redefined at both the Core and ORM level. :class:`.PropComparator`
including those of :class:`.ColumnProperty`,
:class:`.RelationshipProperty`, and :class:`.CompositeProperty`.
- .. note:: With the advent of Hybrid properties introduced in SQLAlchemy
- 0.7, as well as Core-level operator redefinition in
- SQLAlchemy 0.8, the use case for user-defined :class:`.PropComparator`
- instances is extremely rare. See :ref:`hybrids_toplevel` as well
- as :ref:`types_operators`.
-
User-defined subclasses of :class:`.PropComparator` may be created. The
built-in Python comparison and math operator methods, such as
:meth:`.operators.ColumnOperators.__eq__`,
return self.property.info
@staticmethod
- def any_op(a, b, **kwargs):
+ def _any_op(a, b, **kwargs):
return a.any(b, **kwargs)
@staticmethod
- def has_op(a, b, **kwargs):
- return a.has(b, **kwargs)
+ def _has_op(left, other, **kwargs):
+ return left.has(other, **kwargs)
@staticmethod
- def of_type_op(a, class_):
+ def _of_type_op(a, class_):
return a.of_type(class_)
- def of_type(self, class_):
+ any_op = cast(operators.OperatorType, _any_op)
+ has_op = cast(operators.OperatorType, _has_op)
+ of_type_op = cast(operators.OperatorType, _of_type_op)
+
+ if typing.TYPE_CHECKING:
+
+ def operate(
+ self, op: operators.OperatorType, *other: Any, **kwargs: Any
+ ) -> "SQLORMOperations":
+ ...
+
+ def reverse_operate(
+ self, op: operators.OperatorType, other: Any, **kwargs: Any
+ ) -> "SQLORMOperations":
+ ...
+
+ def of_type(self, class_) -> "SQLORMOperations[_T]":
r"""Redefine this object in terms of a polymorphic subclass,
:func:`_orm.with_polymorphic` construct, or :func:`_orm.aliased`
construct.
return self.operate(PropComparator.of_type_op, class_)
- def and_(self, *criteria):
+ def and_(self, *criteria) -> "SQLORMOperations[_T]":
"""Add additional criteria to the ON clause that's represented by this
relationship attribute.
"""
return self.operate(operators.and_, *criteria)
- def any(self, criterion=None, **kwargs):
+ def any(self, criterion=None, **kwargs) -> "SQLORMOperations[_T]":
r"""Return true if this collection contains any member that meets the
given criterion.
return self.operate(PropComparator.any_op, criterion, **kwargs)
- def has(self, criterion=None, **kwargs):
+ def has(self, criterion=None, **kwargs) -> "SQLORMOperations[_T]":
r"""Return true if this element references a member which meets the
given criterion.
return self.operate(PropComparator.has_op, criterion, **kwargs)
-class StrategizedProperty(MapperProperty):
+class StrategizedProperty(MapperProperty[_T]):
"""A MapperProperty which uses selectable strategies to affect
loading behavior.
"""
from typing import Any
-from typing import Generic
from typing import TypeVar
from . import attributes
from ..sql import roles
_T = TypeVar("_T", bound=Any)
+_PT = TypeVar("_PT", bound=Any)
__all__ = [
"ColumnProperty",
@log.class_logger
-class ColumnProperty(StrategizedProperty, Generic[_T]):
+class ColumnProperty(StrategizedProperty[_T]):
"""Describes an object attribute that corresponds to a table column.
Public constructor is the :func:`_orm.column_property` function.
)
for c in columns
]
+ self.parent = self.key = None
self.group = kwargs.pop("group", None)
self.deferred = kwargs.pop("deferred", False)
self.raiseload = kwargs.pop("raiseload", False)
dest_dict, [self.key], no_loader=True
)
- class Comparator(util.MemoizedSlots, PropComparator):
+ class Comparator(util.MemoizedSlots, PropComparator[_PT]):
"""Produce boolean, comparison, and other operators for
:class:`.ColumnProperty` attributes.
return op(col._bind_param(op, other), col, **kwargs)
def __str__(self):
+ if not self.parent or not self.key:
+ return object.__repr__(self)
return str(self.parent.class_.__name__) + "." + self.key
"""
import collections
import re
+from typing import Any
from typing import Callable
-from typing import Generic
from typing import Type
from typing import TypeVar
from typing import Union
from ..sql.util import selectables_overlap
from ..sql.util import visit_binary_product
-_RC = TypeVar("_RC")
+_T = TypeVar("_T", bound=Any)
+_PT = TypeVar("_PT", bound=Any)
def remote(expr):
@log.class_logger
-class RelationshipProperty(StrategizedProperty, Generic[_RC]):
+class RelationshipProperty(StrategizedProperty[_T]):
"""Describes an object property that holds a single item or list
of items that correspond to a related database table.
def __init__(
self,
- argument: Union[str, Type[_RC], Callable[[], Type[_RC]]],
+ argument: Union[str, Type[_T], Callable[[], Type[_T]]],
secondary=None,
primaryjoin=None,
secondaryjoin=None,
doc=self.doc,
)
- class Comparator(PropComparator):
+ class Comparator(PropComparator[_PT]):
"""Produce boolean, comparison, and other operators for
:class:`.RelationshipProperty` attributes.
self.prop.parent._check_configure()
return self.prop
+ comparator: Comparator[_T]
+
def _with_parent(self, instance, alias_secondary=True, from_entity=None):
assert instance is not None
adapt_source = None
from ..util import HasMemoized
from ..util import hybridmethod
from ..util import typing as compat_typing
+from ..util._has_cy import HAS_CYEXTENSION
-try:
- from sqlalchemy.cyextension.util import prefix_anon_map # noqa
-except ImportError:
+if typing.TYPE_CHECKING or not HAS_CYEXTENSION:
from ._py_util import prefix_anon_map # noqa
+else:
+ from sqlalchemy.cyextension.util import prefix_anon_map # noqa
coercions = None
elements = None
import re
import typing
from typing import Any
-from typing import Callable
+from typing import Any as TODO_Any
from typing import Optional
-from typing import overload
from typing import Type
from typing import TypeVar
-from typing import Union
from . import operators
from . import roles
from . import selectable
from . import traversals
from .elements import ClauseElement
- from .elements import ColumnElement
_SR = TypeVar("_SR", bound=roles.SQLRole)
_StringOnlyR = TypeVar("_StringOnlyR", bound=roles.StringRole)
return args
-@overload
-def expect(
- role: Type[roles.InElementRole],
- element: Any,
- *,
- apply_propagate_attrs: Optional["ClauseElement"] = None,
- argname: Optional[str] = None,
- post_inspect: bool = False,
- **kw: Any,
-) -> Union["elements.ColumnElement", "selectable.Select"]:
- ...
-
-
-@overload
-def expect(
- role: Type[roles.HasCTERole],
- element: Any,
- *,
- apply_propagate_attrs: Optional["ClauseElement"] = None,
- argname: Optional[str] = None,
- post_inspect: bool = False,
- **kw: Any,
-) -> "selectable.HasCTE":
- ...
+# TODO; would like to have overloads here, however mypy is being extremely
+# pedantic about them. not sure why pylance is OK with them.
-@overload
-def expect(
- role: Type[roles.ExpressionElementRole],
- element: Any,
- *,
- apply_propagate_attrs: Optional["ClauseElement"] = None,
- argname: Optional[str] = None,
- post_inspect: bool = False,
- **kw: Any,
-) -> "ColumnElement":
- ...
-
-
-@overload
-def expect(
- role: "Type[_StringOnlyR]",
- element: Any,
- *,
- apply_propagate_attrs: Optional["ClauseElement"] = None,
- argname: Optional[str] = None,
- post_inspect: bool = False,
- **kw: Any,
-) -> str:
- ...
-
-
-@overload
def expect(
role: Type[_SR],
element: Any,
argname: Optional[str] = None,
post_inspect: bool = False,
**kw: Any,
-) -> _SR:
- ...
-
-
-@overload
-def expect(
- role: Type[_SR],
- element: Callable[..., Any],
- *,
- apply_propagate_attrs: Optional["ClauseElement"] = None,
- argname: Optional[str] = None,
- post_inspect: bool = False,
- **kw: Any,
-) -> "lambdas.LambdaElement":
- ...
-
-
-def expect(
- role: Type[_SR],
- element: Any,
- *,
- apply_propagate_attrs: Optional["ClauseElement"] = None,
- argname: Optional[str] = None,
- post_inspect: bool = False,
- **kw: Any,
-) -> Union[str, _SR, "lambdas.LambdaElement"]:
+) -> TODO_Any:
if (
role.allows_lambda
# note callable() will not invoke a __getattr__() method, whereas
self.name = role_class._role_name
self._use_inspection = issubclass(role_class, roles.UsesInspection)
- def _implicit_coercions(self, element, resolved, argname=None, **kw):
+ def _implicit_coercions(
+ self, element, resolved, argname=None, **kw
+ ) -> Any:
self._raise_for_expected(element, argname, resolved)
def _raise_for_expected(
"subquery.",
)
- def _implicit_coercions(
- self, original_element, resolved, argname=None, **kw
- ):
+ def _implicit_coercions(self, element, resolved, argname=None, **kw):
+ original_element = element
if not getattr(resolved, "is_clause_element", False):
self._raise_for_expected(original_element, argname, resolved)
elif resolved._is_select_statement:
from .. import exc
from .. import inspection
from .. import util
+from ..util.langhelpers import TypingOnly
if typing.TYPE_CHECKING:
from decimal import Decimal
__slots__ = ()
-class ColumnElement(
- roles.ColumnArgumentOrKeyRole,
- roles.StatementOptionRole,
- roles.WhereHavingRole,
- roles.BinaryElementRole,
- roles.OrderByRole,
- roles.ColumnsClauseRole,
- roles.LimitOffsetRole,
- roles.DMLColumnRole,
- roles.DDLConstraintColumnRole,
- roles.DDLExpressionRole,
- operators.ColumnOperators["ColumnElement"],
- ClauseElement,
- Generic[_T],
-):
- """Represent a column-oriented SQL expression suitable for usage in the
- "columns" clause, WHERE clause etc. of a statement.
-
- While the most familiar kind of :class:`_expression.ColumnElement` is the
- :class:`_schema.Column` object, :class:`_expression.ColumnElement`
- serves as the basis
- for any unit that may be present in a SQL expression, including
- the expressions themselves, SQL functions, bound parameters,
- literal expressions, keywords such as ``NULL``, etc.
- :class:`_expression.ColumnElement`
- is the ultimate base class for all such elements.
-
- A wide variety of SQLAlchemy Core functions work at the SQL expression
- level, and are intended to accept instances of
- :class:`_expression.ColumnElement` as
- arguments. These functions will typically document that they accept a
- "SQL expression" as an argument. What this means in terms of SQLAlchemy
- usually refers to an input which is either already in the form of a
- :class:`_expression.ColumnElement` object,
- or a value which can be **coerced** into
- one. The coercion rules followed by most, but not all, SQLAlchemy Core
- functions with regards to SQL expressions are as follows:
-
- * a literal Python value, such as a string, integer or floating
- point value, boolean, datetime, ``Decimal`` object, or virtually
- any other Python object, will be coerced into a "literal bound
- value". This generally means that a :func:`.bindparam` will be
- produced featuring the given value embedded into the construct; the
- resulting :class:`.BindParameter` object is an instance of
- :class:`_expression.ColumnElement`.
- The Python value will ultimately be sent
- to the DBAPI at execution time as a parameterized argument to the
- ``execute()`` or ``executemany()`` methods, after SQLAlchemy
- type-specific converters (e.g. those provided by any associated
- :class:`.TypeEngine` objects) are applied to the value.
-
- * any special object value, typically ORM-level constructs, which
- feature an accessor called ``__clause_element__()``. The Core
- expression system looks for this method when an object of otherwise
- unknown type is passed to a function that is looking to coerce the
- argument into a :class:`_expression.ColumnElement` and sometimes a
- :class:`_expression.SelectBase` expression.
- It is used within the ORM to
- convert from ORM-specific objects like mapped classes and
- mapped attributes into Core expression objects.
-
- * The Python ``None`` value is typically interpreted as ``NULL``,
- which in SQLAlchemy Core produces an instance of :func:`.null`.
-
- A :class:`_expression.ColumnElement` provides the ability to generate new
- :class:`_expression.ColumnElement`
- objects using Python expressions. This means that Python operators
- such as ``==``, ``!=`` and ``<`` are overloaded to mimic SQL operations,
- and allow the instantiation of further :class:`_expression.ColumnElement`
- instances
- which are composed from other, more fundamental
- :class:`_expression.ColumnElement`
- objects. For example, two :class:`.ColumnClause` objects can be added
- together with the addition operator ``+`` to produce
- a :class:`.BinaryExpression`.
- Both :class:`.ColumnClause` and :class:`.BinaryExpression` are subclasses
- of :class:`_expression.ColumnElement`::
-
- >>> from sqlalchemy.sql import column
- >>> column('a') + column('b')
- <sqlalchemy.sql.expression.BinaryExpression object at 0x101029dd0>
- >>> print(column('a') + column('b'))
- a + b
-
- .. seealso::
-
- :class:`_schema.Column`
-
- :func:`_expression.column`
-
- """
-
- __visit_name__ = "column_element"
-
- primary_key = False
- foreign_keys = []
- _proxies = ()
-
- _tq_label = None
- """The named label that can be used to target
- this column in a result set in a "table qualified" context.
-
- This label is almost always the label used when
- rendering <expr> AS <label> in a SELECT statement when using
- the LABEL_STYLE_TABLENAME_PLUS_COL label style, which is what the legacy
- ORM ``Query`` object uses as well.
-
- For a regular Column bound to a Table, this is typically the label
- <tablename>_<columnname>. For other constructs, different rules
- may apply, such as anonymized labels and others.
-
- .. versionchanged:: 1.4.21 renamed from ``._label``
-
- """
-
- key = None
- """The 'key' that in some circumstances refers to this object in a
- Python namespace.
-
- This typically refers to the "key" of the column as present in the
- ``.c`` collection of a selectable, e.g. ``sometable.c["somekey"]`` would
- return a :class:`_schema.Column` with a ``.key`` of "somekey".
-
- """
-
- @HasMemoized.memoized_attribute
- def _tq_key_label(self):
- """A label-based version of 'key' that in some circumstances refers
- to this object in a Python namespace.
-
-
- _tq_key_label comes into play when a select() statement is constructed
- with apply_labels(); in this case, all Column objects in the ``.c``
- collection are rendered as <tablename>_<columnname> in SQL; this is
- essentially the value of ._label. But to locate those columns in the
- ``.c`` collection, the name is along the lines of <tablename>_<key>;
- that's the typical value of .key_label.
-
- .. versionchanged:: 1.4.21 renamed from ``._key_label``
-
- """
- return self._proxy_key
-
- @property
- def _key_label(self):
- """legacy; renamed to _tq_key_label"""
- return self._tq_key_label
-
- @property
- def _label(self):
- """legacy; renamed to _tq_label"""
- return self._tq_label
-
- @property
- def _non_anon_label(self):
- """the 'name' that naturally applies this element when rendered in
- SQL.
-
- Concretely, this is the "name" of a column or a label in a
- SELECT statement; ``<columnname>`` and ``<labelname>`` below::
-
- SELECT <columnmame> FROM table
-
- SELECT column AS <labelname> FROM table
-
- Above, the two names noted will be what's present in the DBAPI
- ``cursor.description`` as the names.
-
- If this attribute returns ``None``, it means that the SQL element as
- written does not have a 100% fully predictable "name" that would appear
- in the ``cursor.description``. Examples include SQL functions, CAST
- functions, etc. While such things do return names in
- ``cursor.description``, they are only predictable on a
- database-specific basis; e.g. an expression like ``MAX(table.col)`` may
- appear as the string ``max`` on one database (like PostgreSQL) or may
- appear as the whole expression ``max(table.col)`` on SQLite.
-
- The default implementation looks for a ``.name`` attribute on the
- object, as has been the precedent established in SQLAlchemy for many
- years. An exception is made on the ``FunctionElement`` subclass
- so that the return value is always ``None``.
-
- .. versionadded:: 1.4.21
-
-
-
- """
- return getattr(self, "name", None)
-
- _render_label_in_columns_clause = True
- """A flag used by select._columns_plus_names that helps to determine
- we are actually going to render in terms of "SELECT <col> AS <label>".
- This flag can be returned as False for some Column objects that want
- to be rendered as simple "SELECT <col>"; typically columns that don't have
- any parent table and are named the same as what the label would be
- in any case.
-
- """
-
- _allow_label_resolve = True
- """A flag that can be flipped to prevent a column from being resolvable
- by string label name.
-
- The joined eager loader strategy in the ORM uses this, for example.
-
- """
-
- _is_implicitly_boolean = False
-
- _alt_names = ()
-
- def self_group(self, against=None):
- if (
- against in (operators.and_, operators.or_, operators._asbool)
- and self.type._type_affinity is type_api.BOOLEANTYPE._type_affinity
- ):
- return AsBoolean(self, operators.is_true, operators.is_false)
- elif against in (operators.any_op, operators.all_op):
- return Grouping(self)
- else:
- return self
-
- def _negate(self):
- if self.type._type_affinity is type_api.BOOLEANTYPE._type_affinity:
- return AsBoolean(self, operators.is_false, operators.is_true)
- else:
- return super(ColumnElement, self)._negate()
-
- @util.memoized_property
- def type(self) -> "TypeEngine[_T]":
- return type_api.NULLTYPE
-
- @HasMemoized.memoized_attribute
- def comparator(self) -> "TypeEngine.Comparator[_T]":
- try:
- comparator_factory = self.type.comparator_factory
- except AttributeError as err:
- raise TypeError(
- "Object %r associated with '.type' attribute "
- "is not a TypeEngine class or object" % self.type
- ) from err
- else:
- return comparator_factory(self)
-
- def __getattr__(self, key):
- try:
- return getattr(self.comparator, key)
- except AttributeError as err:
- raise AttributeError(
- "Neither %r object nor %r object has an attribute %r"
- % (
- type(self).__name__,
- type(self.comparator).__name__,
- key,
- )
- ) from err
+class SQLCoreOperations(Generic[_T], TypingOnly):
+ __slots__ = ()
# annotations for comparison methods
# these are from operators->Operators / ColumnOperators,
...
@overload
- def concat(self, other: Any) -> "BinaryExpression[_ST]":
+ def concat(
+ self: "SQLCoreOperations[_ST]", other: Any
+ ) -> "BinaryExpression[_ST]":
...
@overload
) -> "BinaryExpression[bool]":
...
- def distinct(self: "ColumnElement[_T]") -> "UnaryExpression[_T]":
+ def distinct(self: "SQLCoreOperations[_T]") -> "UnaryExpression[_T]":
...
def any_(self) -> "CollectionAggregate":
...
# numeric overloads. These need more tweaking
+ # in particular they all need to have a variant for Optiona[_T]
+ # because Optional only applies to the data side, not the expression
+ # side
@overload
def __add__(
- self: "ColumnElement[_NT]", other: "Union[ColumnElement[_NT], _NT]"
+ self: "Union[_SQO[_NT], _SQO[Optional[_NT]]]",
+ other: "Union[_SQO[Optional[_NT]], _SQO[_NT], _NT]",
) -> "BinaryExpression[_NT]":
...
@overload
def __add__(
- self: "ColumnElement[_NT]", other: Any
+ self: "Union[_SQO[_NT], _SQO[Optional[_NT]]]",
+ other: Any,
) -> "BinaryExpression[_NUMERIC]":
...
@overload
def __add__(
- self: "ColumnElement[_ST]", other: Any
+ self: "Union[_SQO[_ST], _SQO[Optional[_ST]]]",
+ other: Any,
) -> "BinaryExpression[_ST]":
...
@overload
def __sub__(
- self: "ColumnElement[_NT]", other: "Union[ColumnElement[_NT], _NT]"
+ self: "SQLCoreOperations[_NT]",
+ other: "Union[SQLCoreOperations[_NT], _NT]",
) -> "BinaryExpression[_NT]":
...
@overload
def __rsub__(
- self: "ColumnElement[_NT]", other: Any
+ self: "SQLCoreOperations[_NT]", other: Any
) -> "BinaryExpression[_NUMERIC]":
...
@overload
def __mul__(
- self: "ColumnElement[_NT]", other: Any
+ self: "SQLCoreOperations[_NT]", other: Any
) -> "BinaryExpression[_NUMERIC]":
...
@overload
def __rmul__(
- self: "ColumnElement[_NT]", other: Any
+ self: "SQLCoreOperations[_NT]", other: Any
) -> "BinaryExpression[_NUMERIC]":
...
@overload
def __mod__(
- self: "ColumnElement[_NT]", other: Any
+ self: "SQLCoreOperations[_NT]", other: Any
) -> "BinaryExpression[_NUMERIC]":
...
@overload
def __rmod__(
- self: "ColumnElement[_NT]", other: Any
+ self: "SQLCoreOperations[_NT]", other: Any
) -> "BinaryExpression[_NUMERIC]":
...
@overload
def __truediv__(
- self: "ColumnElement[_NT]", other: Any
+ self: "SQLCoreOperations[_NT]", other: Any
) -> "BinaryExpression[_NUMERIC]":
...
@overload
def __rtruediv__(
- self: "ColumnElement[_NT]", other: Any
+ self: "SQLCoreOperations[_NT]", other: Any
) -> "BinaryExpression[_NUMERIC]":
...
@overload
def __floordiv__(
- self: "ColumnElement[_NT]", other: Any
+ self: "SQLCoreOperations[_NT]", other: Any
) -> "BinaryExpression[_NUMERIC]":
...
@overload
def __rfloordiv__(
- self: "ColumnElement[_NT]", other: Any
+ self: "SQLCoreOperations[_NT]", other: Any
) -> "BinaryExpression[_NUMERIC]":
...
def __rfloordiv__(self, other: Any) -> "BinaryExpression":
...
+
+_SQO = SQLCoreOperations
+
+
+class ColumnElement(
+ roles.ColumnArgumentOrKeyRole,
+ roles.StatementOptionRole,
+ roles.WhereHavingRole,
+ roles.BinaryElementRole,
+ roles.OrderByRole,
+ roles.ColumnsClauseRole,
+ roles.LimitOffsetRole,
+ roles.DMLColumnRole,
+ roles.DDLConstraintColumnRole,
+ roles.DDLExpressionRole,
+ SQLCoreOperations[_T],
+ operators.ColumnOperators[SQLCoreOperations],
+ ClauseElement,
+):
+ """Represent a column-oriented SQL expression suitable for usage in the
+ "columns" clause, WHERE clause etc. of a statement.
+
+ While the most familiar kind of :class:`_expression.ColumnElement` is the
+ :class:`_schema.Column` object, :class:`_expression.ColumnElement`
+ serves as the basis
+ for any unit that may be present in a SQL expression, including
+ the expressions themselves, SQL functions, bound parameters,
+ literal expressions, keywords such as ``NULL``, etc.
+ :class:`_expression.ColumnElement`
+ is the ultimate base class for all such elements.
+
+ A wide variety of SQLAlchemy Core functions work at the SQL expression
+ level, and are intended to accept instances of
+ :class:`_expression.ColumnElement` as
+ arguments. These functions will typically document that they accept a
+ "SQL expression" as an argument. What this means in terms of SQLAlchemy
+ usually refers to an input which is either already in the form of a
+ :class:`_expression.ColumnElement` object,
+ or a value which can be **coerced** into
+ one. The coercion rules followed by most, but not all, SQLAlchemy Core
+ functions with regards to SQL expressions are as follows:
+
+ * a literal Python value, such as a string, integer or floating
+ point value, boolean, datetime, ``Decimal`` object, or virtually
+ any other Python object, will be coerced into a "literal bound
+ value". This generally means that a :func:`.bindparam` will be
+ produced featuring the given value embedded into the construct; the
+ resulting :class:`.BindParameter` object is an instance of
+ :class:`_expression.ColumnElement`.
+ The Python value will ultimately be sent
+ to the DBAPI at execution time as a parameterized argument to the
+ ``execute()`` or ``executemany()`` methods, after SQLAlchemy
+ type-specific converters (e.g. those provided by any associated
+ :class:`.TypeEngine` objects) are applied to the value.
+
+ * any special object value, typically ORM-level constructs, which
+ feature an accessor called ``__clause_element__()``. The Core
+ expression system looks for this method when an object of otherwise
+ unknown type is passed to a function that is looking to coerce the
+ argument into a :class:`_expression.ColumnElement` and sometimes a
+ :class:`_expression.SelectBase` expression.
+ It is used within the ORM to
+ convert from ORM-specific objects like mapped classes and
+ mapped attributes into Core expression objects.
+
+ * The Python ``None`` value is typically interpreted as ``NULL``,
+ which in SQLAlchemy Core produces an instance of :func:`.null`.
+
+ A :class:`_expression.ColumnElement` provides the ability to generate new
+ :class:`_expression.ColumnElement`
+ objects using Python expressions. This means that Python operators
+ such as ``==``, ``!=`` and ``<`` are overloaded to mimic SQL operations,
+ and allow the instantiation of further :class:`_expression.ColumnElement`
+ instances
+ which are composed from other, more fundamental
+ :class:`_expression.ColumnElement`
+ objects. For example, two :class:`.ColumnClause` objects can be added
+ together with the addition operator ``+`` to produce
+ a :class:`.BinaryExpression`.
+ Both :class:`.ColumnClause` and :class:`.BinaryExpression` are subclasses
+ of :class:`_expression.ColumnElement`::
+
+ >>> from sqlalchemy.sql import column
+ >>> column('a') + column('b')
+ <sqlalchemy.sql.expression.BinaryExpression object at 0x101029dd0>
+ >>> print(column('a') + column('b'))
+ a + b
+
+ .. seealso::
+
+ :class:`_schema.Column`
+
+ :func:`_expression.column`
+
+ """
+
+ __visit_name__ = "column_element"
+
+ primary_key = False
+ foreign_keys = []
+ _proxies = ()
+
+ _tq_label = None
+ """The named label that can be used to target
+ this column in a result set in a "table qualified" context.
+
+ This label is almost always the label used when
+ rendering <expr> AS <label> in a SELECT statement when using
+ the LABEL_STYLE_TABLENAME_PLUS_COL label style, which is what the legacy
+ ORM ``Query`` object uses as well.
+
+ For a regular Column bound to a Table, this is typically the label
+ <tablename>_<columnname>. For other constructs, different rules
+ may apply, such as anonymized labels and others.
+
+ .. versionchanged:: 1.4.21 renamed from ``._label``
+
+ """
+
+ key = None
+ """The 'key' that in some circumstances refers to this object in a
+ Python namespace.
+
+ This typically refers to the "key" of the column as present in the
+ ``.c`` collection of a selectable, e.g. ``sometable.c["somekey"]`` would
+ return a :class:`_schema.Column` with a ``.key`` of "somekey".
+
+ """
+
+ @HasMemoized.memoized_attribute
+ def _tq_key_label(self):
+ """A label-based version of 'key' that in some circumstances refers
+ to this object in a Python namespace.
+
+
+ _tq_key_label comes into play when a select() statement is constructed
+ with apply_labels(); in this case, all Column objects in the ``.c``
+ collection are rendered as <tablename>_<columnname> in SQL; this is
+ essentially the value of ._label. But to locate those columns in the
+ ``.c`` collection, the name is along the lines of <tablename>_<key>;
+ that's the typical value of .key_label.
+
+ .. versionchanged:: 1.4.21 renamed from ``._key_label``
+
+ """
+ return self._proxy_key
+
+ @property
+ def _key_label(self):
+ """legacy; renamed to _tq_key_label"""
+ return self._tq_key_label
+
+ @property
+ def _label(self):
+ """legacy; renamed to _tq_label"""
+ return self._tq_label
+
+ @property
+ def _non_anon_label(self):
+ """the 'name' that naturally applies this element when rendered in
+ SQL.
+
+ Concretely, this is the "name" of a column or a label in a
+ SELECT statement; ``<columnname>`` and ``<labelname>`` below::
+
+ SELECT <columnmame> FROM table
+
+ SELECT column AS <labelname> FROM table
+
+ Above, the two names noted will be what's present in the DBAPI
+ ``cursor.description`` as the names.
+
+ If this attribute returns ``None``, it means that the SQL element as
+ written does not have a 100% fully predictable "name" that would appear
+ in the ``cursor.description``. Examples include SQL functions, CAST
+ functions, etc. While such things do return names in
+ ``cursor.description``, they are only predictable on a
+ database-specific basis; e.g. an expression like ``MAX(table.col)`` may
+ appear as the string ``max`` on one database (like PostgreSQL) or may
+ appear as the whole expression ``max(table.col)`` on SQLite.
+
+ The default implementation looks for a ``.name`` attribute on the
+ object, as has been the precedent established in SQLAlchemy for many
+ years. An exception is made on the ``FunctionElement`` subclass
+ so that the return value is always ``None``.
+
+ .. versionadded:: 1.4.21
+
+
+
+ """
+ return getattr(self, "name", None)
+
+ _render_label_in_columns_clause = True
+ """A flag used by select._columns_plus_names that helps to determine
+ we are actually going to render in terms of "SELECT <col> AS <label>".
+ This flag can be returned as False for some Column objects that want
+ to be rendered as simple "SELECT <col>"; typically columns that don't have
+ any parent table and are named the same as what the label would be
+ in any case.
+
+ """
+
+ _allow_label_resolve = True
+ """A flag that can be flipped to prevent a column from being resolvable
+ by string label name.
+
+ The joined eager loader strategy in the ORM uses this, for example.
+
+ """
+
+ _is_implicitly_boolean = False
+
+ _alt_names = ()
+
+ def self_group(self, against=None):
+ if (
+ against in (operators.and_, operators.or_, operators._asbool)
+ and self.type._type_affinity is type_api.BOOLEANTYPE._type_affinity
+ ):
+ return AsBoolean(self, operators.is_true, operators.is_false)
+ elif against in (operators.any_op, operators.all_op):
+ return Grouping(self)
+ else:
+ return self
+
+ def _negate(self):
+ if self.type._type_affinity is type_api.BOOLEANTYPE._type_affinity:
+ return AsBoolean(self, operators.is_false, operators.is_true)
+ else:
+ return super(ColumnElement, self)._negate()
+
+ @util.memoized_property
+ def type(self) -> "TypeEngine[_T]":
+ return type_api.NULLTYPE
+
+ @HasMemoized.memoized_attribute
+ def comparator(self) -> "TypeEngine.Comparator[_T]":
+ try:
+ comparator_factory = self.type.comparator_factory
+ except AttributeError as err:
+ raise TypeError(
+ "Object %r associated with '.type' attribute "
+ "is not a TypeEngine class or object" % self.type
+ ) from err
+ else:
+ return comparator_factory(self)
+
+ def __getattr__(self, key):
+ try:
+ return getattr(self.comparator, key)
+ except AttributeError as err:
+ raise AttributeError(
+ "Neither %r object nor %r object has an attribute %r"
+ % (
+ type(self).__name__,
+ type(self.comparator).__name__,
+ key,
+ )
+ ) from err
+
def operate(
self,
op: operators.OperatorType,
"""
+from typing import Any
+from typing import TypeVar
+
from . import annotation
from . import coercions
from . import operators
from .. import util
+_T = TypeVar("_T", bound=Any)
+
_registry = util.defaultdict(dict)
reg[identifier] = fn
-class FunctionElement(Executable, ColumnElement, FromClause, Generative):
+class FunctionElement(Executable, ColumnElement[_T], FromClause, Generative):
"""Base for SQL function-oriented constructs.
.. seealso::
import itertools
from operator import attrgetter
import typing
+from typing import Any as TODO_Any
+from typing import Optional
+from typing import Tuple
from typing import Type
from typing import Union
from .elements import ClauseElement
from .elements import ClauseList
from .elements import ColumnClause
+from .elements import ColumnElement
from .elements import GroupedElement
from .elements import Grouping
from .elements import literal_column
"""
return self.selected_columns
- @property
- @util.deprecated(
+ @util.deprecated_property(
"1.4",
"The :attr:`_expression.SelectBase.c` and "
":attr:`_expression.SelectBase.columns` attributes "
__visit_name__ = "select"
- _setup_joins = ()
- _memoized_select_entities = ()
+ _setup_joins: Tuple[TODO_Any, ...] = ()
+ _memoized_select_entities: Tuple[TODO_Any, ...] = ()
_distinct = False
- _distinct_on = ()
- _correlate = ()
- _correlate_except = None
- _where_criteria = ()
- _having_criteria = ()
- _from_obj = ()
+ _distinct_on: Tuple[ColumnElement, ...] = ()
+ _correlate: Tuple[FromClause, ...] = ()
+ _correlate_except: Optional[Tuple[FromClause, ...]] = None
+ _where_criteria: Tuple[ColumnElement, ...] = ()
+ _having_criteria: Tuple[ColumnElement, ...] = ()
+ _from_obj: Tuple[FromClause, ...] = ()
_auto_correlate = True
_compile_options = SelectState.default_select_compile_options
"""
return self._compile_state_factory(self, None)._get_display_froms()
- @property
- @util.deprecated(
+ @util.deprecated_property(
"1.4.23",
"The :attr:`_expression.Select.froms` attribute is moved to "
"the :meth:`_expression.Select.get_final_froms` method.",
def _expression_adaptations(self):
raise NotImplementedError()
- class Comparator(TypeEngine.Comparator):
+ class Comparator(TypeEngine.Comparator[_T]):
_blank_dict = util.immutabledict()
def _adapt_expression(self, op, other_comparator):
"""A mixin that marks a type as supporting 'concatenation',
typically strings."""
- class Comparator(TypeEngine.Comparator):
+ class Comparator(TypeEngine.Comparator[_T]):
def _adapt_expression(self, op, other_comparator):
if op is operators.add and isinstance(
other_comparator,
"""
- class Comparator(TypeEngine.Comparator):
+ class Comparator(TypeEngine.Comparator[_T]):
def _setup_getitem(self, index):
raise NotImplementedError()
)
) from err
- class Comparator(String.Comparator):
+ class Comparator(String.Comparator[_T]):
def _adapt_expression(self, op, other_comparator):
op, typ = super(Enum.Comparator, self)._adapt_expression(
op, other_comparator
"""
- class Comparator(Indexable.Comparator, Concatenable.Comparator):
+ class Comparator(Indexable.Comparator[_T], Concatenable.Comparator[_T]):
"""Define comparison operations for :class:`_types.JSON`."""
def _setup_getitem(self, index):
"""If True, Python zero-based indexes should be interpreted as one-based
on the SQL expression side."""
- class Comparator(Indexable.Comparator, Concatenable.Comparator):
+ class Comparator(Indexable.Comparator[_T], Concatenable.Comparator[_T]):
"""Define comparison operations for :class:`_types.ARRAY`.
return process
- class Comparator(TypeEngine.Comparator):
+ class Comparator(TypeEngine.Comparator[_T]):
def _adapt_expression(self, op, other_comparator):
if isinstance(
other_comparator, NullType.Comparator
import typing
from typing import Any
+from typing import Callable
from typing import Generic
from typing import Tuple
from typing import Type
"""
- class Comparator(TypeEngine.Comparator):
+ class Comparator(TypeEngine.Comparator[_CT]):
"""A :class:`.TypeEngine.Comparator` that is specific to
:class:`.TypeDecorator`.
)
@property
- def comparator_factory(self):
+ def comparator_factory(self) -> Callable[..., TypeEngine.Comparator[_T]]:
if TypeDecorator.Comparator in self.impl.comparator_factory.__mro__:
return self.impl.comparator_factory
else:
from .compat import has_refcount_gc
from .compat import inspect_getfullargspec
from .compat import local_dataclass_fields
-from .compat import namedtuple
from .compat import next
from .compat import osx
from .compat import py38
from .deprecations import deprecated
from .deprecations import deprecated_cls
from .deprecations import deprecated_params
+from .deprecations import deprecated_property
from .deprecations import inject_docstring_text
from .deprecations import moved_20
from .deprecations import warn_deprecated
from .langhelpers import set_creation_order
from .langhelpers import string_or_unprintable
from .langhelpers import symbol
+from .langhelpers import TypingOnly
from .langhelpers import unbound_method_to_callable
from .langhelpers import walk_subclasses
from .langhelpers import warn
--- /dev/null
+import typing
+
+if not typing.TYPE_CHECKING:
+ try:
+ from ..cyextension import util # noqa
+ except ImportError:
+ HAS_CYEXTENSION = False
+ else:
+ HAS_CYEXTENSION = True
+else:
+ HAS_CYEXTENSION = False
has_refcount_gc = bool(cpython)
dottedgetter = operator.attrgetter
-namedtuple = collections.namedtuple
next = next # noqa
FullArgSpec = collections.namedtuple(
functionality."""
import re
+from typing import Any
+from typing import Callable
+from typing import TypeVar
from . import compat
from .langhelpers import _hash_limit_string
from .langhelpers import decorator
from .langhelpers import inject_docstring_text
from .langhelpers import inject_param_text
+from .typing import ReadOnlyInstanceDescriptor
from .. import exc
+_T = TypeVar("_T", bound=Any)
+
def _warn_with_version(msg, version, type_, stacklevel, code=None):
warn = type_(msg, code=code)
return decorate
+def deprecated_property(
+ version,
+ message=None,
+ add_deprecation_to_docstring=True,
+ warning=None,
+ enable_warnings=True,
+) -> Callable[[Callable[..., _T]], ReadOnlyInstanceDescriptor[_T]]:
+ """the @deprecated decorator with a @property.
+
+ E.g.::
+
+ class Foo:
+ @deprecated_property("1.4", "thing is deprecated")
+ def thing(self):
+ return "thing"
+
+ is equivalent to::
+
+ class Foo:
+ @property
+ @deprecated("1.4", "thing is deprecated")
+ def thing(self):
+ return "thing"
+
+ How come?
+
+ Because::
+
+ mypy: error: Decorated property not supported
+
+ great! now it is.
+
+ """
+ return lambda fn: property(
+ deprecated(
+ version,
+ message=message,
+ add_deprecation_to_docstring=add_deprecation_to_docstring,
+ warning=warning,
+ enable_warnings=enable_warnings,
+ )(fn)
+ )
+
+
def deprecated(
version,
message=None,
return env["set"]
+class TypingOnly:
+ """A mixin class that marks a class as 'typing only', meaning it has
+ absolutely no methods, attributes, or runtime functionality whatsoever.
+
+ """
+
+ __slots__ = ()
+
+ def __init_subclass__(cls) -> None:
+ if TypingOnly in cls.__bases__:
+ remaining = set(cls.__dict__).difference(
+ {
+ "__module__",
+ "__doc__",
+ "__slots__",
+ "__orig_bases__",
+ }
+ )
+ if remaining:
+ raise AssertionError(
+ f"Class {cls} directly inherits TypingOnly but has "
+ f"additional attributes {remaining}."
+ )
+ super().__init_subclass__()
+
+
class EnsureKWArg:
r"""Apply translation of functions to accept \**kw arguments if they
don't already.
from typing import overload
from typing import Type
from typing import TypeVar
+from typing import Union
from . import compat
+_T = TypeVar("_T", bound=Any)
+
if compat.py38:
from typing import Literal
from typing import Protocol
@overload
def __set__(self, instance: object, value: _T) -> None:
...
+
+
+class ReadOnlyInstanceDescriptor(Protocol[_T]):
+ """protocol representing an instance-only descriptor"""
+
+ @overload
+ def __get__(
+ self, instance: None, owner: Any
+ ) -> "ReadOnlyInstanceDescriptor[_T]":
+ ...
+
+ @overload
+ def __get__(self, instance: object, owner: Any) -> _T:
+ ...
+
+ def __get__(
+ self, instance: object, owner: Any
+ ) -> Union["ReadOnlyInstanceDescriptor[_T]", _T]:
+ ...
lib/sqlalchemy/sql/expression.py:F401
[mypy]
-# min mypy version 0.800
+mypy_path = ./lib/
strict = True
incremental = True
#plugins = sqlalchemy.ext.mypy.plugin
from sqlalchemy.testing import is_true
from sqlalchemy.testing import mock
from sqlalchemy.testing import ne_
-from sqlalchemy.testing.assertions import expect_deprecated
from sqlalchemy.testing.assertions import expect_raises_message
TypeError, "drivername must be a string", url.URL.create, value
)
- @testing.combinations((35.8), (True,), (None,), argnames="value")
- def test_only_str_drivername_no_none_legacy(self, value):
- with expect_deprecated(r"Calling URL\(\) directly"):
- assert_raises_message(
- TypeError, "drivername must be a string", url.URL, value
- )
-
@testing.combinations(
"username",
"host",
{"foo": 35.8},
)
- def test_deprecated_constructor(self):
- with testing.expect_deprecated(
- r"Calling URL\(\) directly is deprecated and will be "
- "disabled in a future release."
- ):
- u1 = url.URL(
- drivername="somedriver",
- username="user",
- port=52,
- host="hostname",
- )
- eq_(u1, url.make_url("somedriver://user@hostname:52"))
+ def test_constructor_is_nt_so_all_args_work(self):
+ """test #7130
- def test_deprecated_constructor_all_args(self):
- """test #7130"""
- with testing.expect_deprecated(
- r"Calling URL\(\) directly is deprecated and will be "
- "disabled in a future release."
- ):
- u1 = url.URL(
- "somedriver",
- "user",
- "secret",
- "10.20.30.40",
- 1234,
- "DB",
- {"key": "value"},
- )
+ For typing support, we can't override the __new__ method. so
+ URL now allows the namedtuple constructor, which people basically
+ shouldn't use directly.
+
+ """
+ u1 = url.URL(
+ "somedriver",
+ "user",
+ "secret",
+ "10.20.30.40",
+ 1234,
+ "DB",
+ {"key": "value"},
+ )
eq_(
u1,
url.make_url(
),
)
+ @testing.fails()
def test_arg_validation_all_seven_posn(self):
- """test #7130"""
- with testing.expect_deprecated(
- r"Calling URL\(\) directly is deprecated and will be "
- "disabled in a future release."
- ):
+ """test #7130
- assert_raises_message(
- TypeError,
- "drivername must be a string",
- url.URL,
- b"somedriver",
- "user",
- "secret",
- "10.20.30.40",
- 1234,
- "DB",
- {"key": "value"},
- )
+ this can't work right now with typing.NamedTuple, we'd have to
+ convert URL to a dataclass.
+
+ """
+ assert_raises_message(
+ TypeError,
+ "drivername must be a string",
+ url.URL,
+ b"somedriver",
+ "user",
+ "secret",
+ "10.20.30.40",
+ 1234,
+ "DB",
+ {"key": "value"},
+ )
def test_deprecated_translate_connect_args_names(self):
u = url.make_url("somedriver://user@hostname:52")
from sqlalchemy.orm import configure_mappers
from sqlalchemy.orm import decl_base
from sqlalchemy.orm import declarative_base
+from sqlalchemy.orm import DeclarativeBase
from sqlalchemy.orm import declared_attr
from sqlalchemy.orm import deferred
from sqlalchemy.orm import descriptor_props
from sqlalchemy.orm import exc as orm_exc
from sqlalchemy.orm import joinedload
+from sqlalchemy.orm import mapped_column
from sqlalchemy.orm import Mapper
from sqlalchemy.orm import registry
from sqlalchemy.orm import relationship
from sqlalchemy.orm import Session
from sqlalchemy.orm import synonym_for
+from sqlalchemy.orm.decl_api import add_mapped_attribute
+from sqlalchemy.orm.decl_api import DeclarativeBaseNoMeta
from sqlalchemy.orm.decl_api import DeclarativeMeta
from sqlalchemy.orm.decl_base import _DeferredMapperConfig
from sqlalchemy.orm.events import InstrumentationEvents
User = Address = None
-class DeclarativeTestBase(
+class DeclarativeBaseSetupsTest(fixtures.TestBase):
+ def test_unbound_declarative_base(self):
+ Base = declarative_base()
+
+ class User(Base):
+ __tablename__ = "user"
+ id = Column(Integer, primary_key=True)
+
+ s = Session()
+
+ with testing.expect_raises(exc.UnboundExecutionError):
+ s.get_bind(User)
+
+ def test_unbound_cls_registry(self):
+ reg = registry()
+
+ Base = reg.generate_base()
+
+ class User(Base):
+ __tablename__ = "user"
+ id = Column(Integer, primary_key=True)
+
+ s = Session()
+
+ with testing.expect_raises(exc.UnboundExecutionError):
+ s.get_bind(User)
+
+ def test_dispose_attrs(self):
+ reg = registry()
+
+ class Foo:
+ __tablename__ = "some_table"
+
+ id = Column(Integer, primary_key=True)
+
+ reg.mapped(Foo)
+
+ is_(Foo.__mapper__, class_mapper(Foo))
+ is_(Foo.__table__, class_mapper(Foo).local_table)
+
+ clear_mappers()
+
+ assert not hasattr(Foo, "__mapper__")
+ assert not hasattr(Foo, "__table__")
+
+ from sqlalchemy.orm import clsregistry
+
+ assert clsregistry._key_is_empty(
+ "Foo", reg._class_registry, lambda cls: cls is Foo
+ )
+
+ def test_difficult_class(self, metadata):
+ """test no getattr() errors with a customized class"""
+
+ # metaclass to mock the way zope.interface breaks getattr()
+ class BrokenMeta(type):
+ def __getattribute__(self, attr):
+ if attr == "xyzzy":
+ raise AttributeError("xyzzy")
+ else:
+ return object.__getattribute__(self, attr)
+
+ # even though this class has an xyzzy attribute, getattr(cls,"xyzzy")
+ # fails
+ class BrokenParent(metaclass=BrokenMeta):
+ xyzzy = "magic"
+
+ # _as_declarative() inspects obj.__class__.__bases__
+ class User(BrokenParent, fixtures.ComparableEntity):
+ __tablename__ = "users"
+ id = Column(
+ "id", Integer, primary_key=True, test_needs_autoincrement=True
+ )
+ name = Column("name", String(50))
+
+ reg = registry(metadata=metadata)
+
+ reg.map_declaratively(User)
+
+ def test_undefer_column_name(self):
+ # TODO: not sure if there was an explicit
+ # test for this elsewhere
+ foo = Column(Integer)
+ eq_(str(foo), "(no name)")
+ eq_(foo.key, None)
+ eq_(foo.name, None)
+ decl_base._undefer_column_name("foo", foo)
+ eq_(str(foo), "foo")
+ eq_(foo.key, "foo")
+ eq_(foo.name, "foo")
+
+ def test_string_dependency_resolution_schemas(self):
+ Base = declarative_base()
+
+ class User(Base):
+
+ __tablename__ = "users"
+ __table_args__ = {"schema": "fooschema"}
+
+ id = Column(Integer, primary_key=True)
+ name = Column(String(50))
+ props = relationship(
+ "Prop",
+ secondary="fooschema.user_to_prop",
+ primaryjoin="User.id==fooschema.user_to_prop.c.user_id",
+ secondaryjoin="fooschema.user_to_prop.c.prop_id==Prop.id",
+ backref="users",
+ )
+
+ class Prop(Base):
+
+ __tablename__ = "props"
+ __table_args__ = {"schema": "fooschema"}
+
+ id = Column(Integer, primary_key=True)
+ name = Column(String(50))
+
+ user_to_prop = Table(
+ "user_to_prop",
+ Base.metadata,
+ Column("user_id", Integer, ForeignKey("fooschema.users.id")),
+ Column("prop_id", Integer, ForeignKey("fooschema.props.id")),
+ schema="fooschema",
+ )
+ configure_mappers()
+
+ assert (
+ class_mapper(User).get_property("props").secondary is user_to_prop
+ )
+
+ def test_string_dependency_resolution_annotations(self):
+ Base = declarative_base()
+
+ class Parent(Base):
+ __tablename__ = "parent"
+ id = Column(Integer, primary_key=True)
+ name = Column(String)
+ children = relationship(
+ "Child",
+ primaryjoin="Parent.name=="
+ "remote(foreign(func.lower(Child.name_upper)))",
+ )
+
+ class Child(Base):
+ __tablename__ = "child"
+ id = Column(Integer, primary_key=True)
+ name_upper = Column(String)
+
+ configure_mappers()
+ eq_(
+ Parent.children.property._calculated_foreign_keys,
+ set([Child.name_upper.property.columns[0]]),
+ )
+
+ def test_class_has_registry_attr(self, registry):
+ existing_registry = registry
+
+ Base = registry.generate_base()
+
+ class A(Base):
+ __tablename__ = "a"
+
+ registry = {"foo": "bar"}
+ id = Column(Integer, primary_key=True)
+ data = Column(String)
+
+ class SubA(A):
+ pass
+
+ is_(Base.registry, existing_registry)
+ is_(inspect(A).registry, existing_registry)
+ eq_(A.registry, {"foo": "bar"})
+
+ is_(inspect(SubA).registry, existing_registry)
+ eq_(SubA.registry, {"foo": "bar"})
+
+ def test_class_does_not_have_registry_attr(self):
+ with assertions.expect_raises_message(
+ exc.InvalidRequestError,
+ r"Declarative base class has no 'registry' attribute, or "
+ r"registry is not a sqlalchemy.orm.registry\(\) object",
+ ):
+
+ class Base(metaclass=DeclarativeMeta):
+ metadata = sa.MetaData()
+
+ def test_shared_class_registry(self):
+ reg = {}
+ Base1 = declarative_base(class_registry=reg)
+ Base2 = declarative_base(class_registry=reg)
+
+ class A(Base1):
+ __tablename__ = "a"
+ id = Column(Integer, primary_key=True)
+
+ class B(Base2):
+ __tablename__ = "b"
+ id = Column(Integer, primary_key=True)
+ aid = Column(Integer, ForeignKey(A.id))
+ as_ = relationship("A")
+
+ assert B.as_.property.mapper.class_ is A
+
+ def test_custom_base(self):
+ class MyBase:
+ def foobar(self):
+ return "foobar"
+
+ Base = declarative_base(cls=MyBase)
+ assert hasattr(Base, "metadata")
+ assert Base().foobar() == "foobar"
+
+ def test_as_declarative(self, metadata):
+ class User(fixtures.ComparableEntity):
+
+ __tablename__ = "users"
+ id = Column(
+ "id", Integer, primary_key=True, test_needs_autoincrement=True
+ )
+ name = Column("name", String(50))
+ addresses = relationship("Address", backref="user")
+
+ class Address(fixtures.ComparableEntity):
+
+ __tablename__ = "addresses"
+ id = Column(
+ "id", Integer, primary_key=True, test_needs_autoincrement=True
+ )
+ email = Column("email", String(50))
+ user_id = Column("user_id", Integer, ForeignKey("users.id"))
+
+ reg = registry(metadata=metadata)
+ reg.mapped(User)
+ reg.mapped(Address)
+ reg.metadata.create_all(testing.db)
+ u1 = User(
+ name="u1", addresses=[Address(email="one"), Address(email="two")]
+ )
+ with Session(testing.db) as sess:
+ sess.add(u1)
+ sess.commit()
+ with Session(testing.db) as sess:
+ eq_(
+ sess.query(User).all(),
+ [
+ User(
+ name="u1",
+ addresses=[Address(email="one"), Address(email="two")],
+ )
+ ],
+ )
+
+ def test_custom_mapper_attribute(self):
+ def mymapper(cls, tbl, **kwargs):
+ m = sa.orm.Mapper(cls, tbl, **kwargs)
+ m.CHECK = True
+ return m
+
+ base = declarative_base()
+
+ class Foo(base):
+ __tablename__ = "foo"
+ __mapper_cls__ = mymapper
+ id = Column(Integer, primary_key=True)
+
+ eq_(Foo.__mapper__.CHECK, True)
+
+ def test_custom_mapper_argument(self):
+ def mymapper(cls, tbl, **kwargs):
+ m = sa.orm.Mapper(cls, tbl, **kwargs)
+ m.CHECK = True
+ return m
+
+ base = declarative_base(mapper=mymapper)
+
+ class Foo(base):
+ __tablename__ = "foo"
+ id = Column(Integer, primary_key=True)
+
+ eq_(Foo.__mapper__.CHECK, True)
+
+ def test_no_change_to_all_descriptors(self):
+ base = declarative_base()
+
+ class Foo(base):
+ __tablename__ = "foo"
+ id = Column(Integer, primary_key=True)
+
+ eq_(Foo.__mapper__.all_orm_descriptors.keys(), ["id"])
+
+ def test_cls_docstring(self):
+ class MyBase:
+ """MyBase Docstring"""
+
+ Base = declarative_base(cls=MyBase)
+
+ eq_(Base.__doc__, MyBase.__doc__)
+
+ def test_delattr_mapped_raises(self):
+ Base = declarative_base()
+
+ class Foo(Base):
+ __tablename__ = "foo"
+
+ id = Column(Integer, primary_key=True)
+ data = Column(String)
+
+ def go():
+ del Foo.data
+
+ assert_raises_message(
+ NotImplementedError,
+ "Can't un-map individual mapped attributes on a mapped class.",
+ go,
+ )
+
+ def test_delattr_hybrid_fine(self):
+ Base = declarative_base()
+
+ class Foo(Base):
+ __tablename__ = "foo"
+
+ id = Column(Integer, primary_key=True)
+ data = Column(String)
+
+ @hybrid_property
+ def data_hybrid(self):
+ return self.data
+
+ assert "data_hybrid" in Foo.__mapper__.all_orm_descriptors.keys()
+
+ del Foo.data_hybrid
+
+ assert "data_hybrid" not in Foo.__mapper__.all_orm_descriptors.keys()
+
+ assert not hasattr(Foo, "data_hybrid")
+
+ def test_setattr_hybrid_updates_descriptors(self):
+ Base = declarative_base()
+
+ class Foo(Base):
+ __tablename__ = "foo"
+
+ id = Column(Integer, primary_key=True)
+ data = Column(String)
+
+ assert "data_hybrid" not in Foo.__mapper__.all_orm_descriptors.keys()
+
+ @hybrid_property
+ def data_hybrid(self):
+ return self.data
+
+ Foo.data_hybrid = data_hybrid
+ assert "data_hybrid" in Foo.__mapper__.all_orm_descriptors.keys()
+
+ del Foo.data_hybrid
+
+ assert "data_hybrid" not in Foo.__mapper__.all_orm_descriptors.keys()
+
+ assert not hasattr(Foo, "data_hybrid")
+
+ def test_kw_support_in_declarative_meta_init(self, registry):
+ # This will not fail if DeclarativeMeta __init__ supports **kw
+
+ reg = registry
+
+ class Base(metaclass=DeclarativeMeta):
+ __abstract__ = True
+ registry = reg
+ metadata = reg.metadata
+
+ class BaseUser(Base):
+ __tablename__ = "base"
+ id_ = Column(Integer, primary_key=True)
+
+ @classmethod
+ def __init_subclass__(cls, random_keyword=False, **kw):
+ super().__init_subclass__(**kw)
+ cls._set_random_keyword_used_here = random_keyword
+
+ class User(BaseUser):
+ __tablename__ = "user"
+ id_ = Column(Integer, ForeignKey("base.id_"), primary_key=True)
+
+ # Check the default option
+ eq_(User._set_random_keyword_used_here, False)
+
+ # Build the metaclass with a keyword!
+ bases = (BaseUser,)
+ UserType = DeclarativeMeta("UserType", bases, {}, random_keyword=True)
+
+ # Check to see if __init_subclass__ works in supported versions
+ eq_(UserType._set_random_keyword_used_here, True)
+
+ def test_declarative_base_bad_registry(self):
+ with assertions.expect_raises_message(
+ exc.InvalidRequestError,
+ "Declarative base class has a 'registry' attribute "
+ "that is not an instance",
+ ):
+
+ class MyClass(DeclarativeBase):
+ registry = {"foo": "bar"}
+
+
+@testing.combinations(
+ ("declarative_base_nometa_superclass",),
+ ("declarative_base_superclass",),
+ ("dynamic",),
+ ("explicit",),
+ argnames="base_style",
+ id_="s",
+)
+class DeclarativeMultiBaseTest(
fixtures.TestBase,
testing.AssertsExecutionResults,
testing.AssertsCompiledSQL,
registry = mapper_registry
metadata = mapper_registry.metadata
- def teardown_test(self):
- close_all_sessions()
- clear_mappers()
- Base.metadata.drop_all(testing.db)
-
-
-@testing.combinations(
- ("dynamic",), ("explicit",), argnames="base_style", id_="s"
-)
-class DeclarativeTest(DeclarativeTestBase):
- def test_unbound_declarative_base(self):
- Base = declarative_base()
-
- class User(Base):
- __tablename__ = "user"
- id = Column(Integer, primary_key=True)
-
- s = Session()
+ elif self.base_style == "declarative_base_superclass":
- with testing.expect_raises(exc.UnboundExecutionError):
- s.get_bind(User)
+ class Base(DeclarativeBase):
+ pass
- def test_unbound_cls_registry(self):
- reg = registry()
+ elif self.base_style == "declarative_base_nometa_superclass":
- Base = reg.generate_base()
+ class Base(DeclarativeBaseNoMeta):
+ pass
- class User(Base):
- __tablename__ = "user"
- id = Column(Integer, primary_key=True)
+ else:
+ assert False
- s = Session()
+ def teardown_test(self):
+ close_all_sessions()
+ clear_mappers()
+ Base.metadata.drop_all(testing.db)
- with testing.expect_raises(exc.UnboundExecutionError):
- s.get_bind(User)
+ @testing.fixture
+ def require_metaclass(self):
+ if type(Base) is type:
+ testing.config.skip_test("current base has no metaclass")
def test_basic(self):
class User(Base, fixtures.ComparableEntity):
eq_(a1, Address(email="two"))
eq_(a1.user, User(name="u1"))
+ def test_mapped_column_construct(self):
+ class User(Base, fixtures.ComparableEntity):
+ __tablename__ = "users"
+
+ id = mapped_column("id", Integer, primary_key=True)
+ name = mapped_column(String(50))
+
+ Base.metadata.create_all(testing.db)
+
+ u1 = User(id=1, name="u1")
+ sess = fixture_session()
+ sess.add(u1)
+ sess.flush()
+ sess.expunge_all()
+
+ eq_(
+ sess.query(User).all(),
+ [User(name="u1", id=1)],
+ )
+
def test_back_populates_setup(self):
class User(Base):
__tablename__ = "users"
in Address.__mapper__.attrs.user._reverse_property
)
- def test_dispose_attrs(self):
- reg = registry()
-
- class Foo:
- __tablename__ = "some_table"
-
- id = Column(Integer, primary_key=True)
-
- reg.mapped(Foo)
-
- is_(Foo.__mapper__, class_mapper(Foo))
- is_(Foo.__table__, class_mapper(Foo).local_table)
-
- clear_mappers()
-
- assert not hasattr(Foo, "__mapper__")
- assert not hasattr(Foo, "__table__")
-
- from sqlalchemy.orm import clsregistry
-
- assert clsregistry._key_is_empty(
- "Foo", reg._class_registry, lambda cls: cls is Foo
- )
-
- def test_deferred_reflection_default_error(self):
+ def test_deferred_reflection_default_error(self, require_metaclass):
class MyExt:
@classmethod
def prepare(cls):
__tablename__ = "user"
id = Column(Integer, primary_key=True)
+ # TODO; this use case only works when using
+ # DeclarativeMeta or DeclarativeAttributeIntercept
assert_raises_message(
orm_exc.UnmappedClassError,
"Class .*User has a deferred "
configure_mappers,
)
- def test_difficult_class(self):
- """test no getattr() errors with a customized class"""
-
- # metaclass to mock the way zope.interface breaks getattr()
- class BrokenMeta(type):
- def __getattribute__(self, attr):
- if attr == "xyzzy":
- raise AttributeError("xyzzy")
- else:
- return object.__getattribute__(self, attr)
-
- # even though this class has an xyzzy attribute, getattr(cls,"xyzzy")
- # fails
- class BrokenParent(metaclass=BrokenMeta):
- xyzzy = "magic"
-
- # _as_declarative() inspects obj.__class__.__bases__
- class User(BrokenParent, fixtures.ComparableEntity):
- __tablename__ = "users"
- id = Column(
- "id", Integer, primary_key=True, test_needs_autoincrement=True
- )
- name = Column("name", String(50))
-
- reg = registry(metadata=Base.metadata)
-
- reg.map_declaratively(User)
-
def test_reserved_identifiers(self):
def go1():
class User1(Base):
go,
)
- def test_undefer_column_name(self):
- # TODO: not sure if there was an explicit
- # test for this elsewhere
- foo = Column(Integer)
- eq_(str(foo), "(no name)")
- eq_(foo.key, None)
- eq_(foo.name, None)
- decl_base._undefer_column_name("foo", foo)
- eq_(str(foo), "foo")
- eq_(foo.key, "foo")
- eq_(foo.name, "foo")
-
def test_recompile_on_othermapper(self):
"""declarative version of the same test in mappers.py"""
assert (
class_mapper(User).get_property("props").secondary
is Secondary.__table__
- )
-
- def test_string_dependency_resolution_class_over_table(self):
- # test for second half of #5774
- class User(Base, fixtures.ComparableEntity):
-
- __tablename__ = "users"
- id = Column(Integer, primary_key=True)
- name = Column(String(50))
- secondary = relationship(
- "Secondary",
- )
-
- # class name and table name match
- class Secondary(Base):
- __tablename__ = "Secondary"
- user_id = Column(Integer, ForeignKey("users.id"), primary_key=True)
-
- configure_mappers()
- assert (
- class_mapper(User).get_property("secondary").mapper
- is Secondary.__mapper__
- )
-
- def test_string_dependency_resolution_schemas(self):
- Base = declarative_base()
-
- class User(Base):
-
- __tablename__ = "users"
- __table_args__ = {"schema": "fooschema"}
-
- id = Column(Integer, primary_key=True)
- name = Column(String(50))
- props = relationship(
- "Prop",
- secondary="fooschema.user_to_prop",
- primaryjoin="User.id==fooschema.user_to_prop.c.user_id",
- secondaryjoin="fooschema.user_to_prop.c.prop_id==Prop.id",
- backref="users",
- )
-
- class Prop(Base):
-
- __tablename__ = "props"
- __table_args__ = {"schema": "fooschema"}
-
- id = Column(Integer, primary_key=True)
- name = Column(String(50))
-
- user_to_prop = Table(
- "user_to_prop",
- Base.metadata,
- Column("user_id", Integer, ForeignKey("fooschema.users.id")),
- Column("prop_id", Integer, ForeignKey("fooschema.props.id")),
- schema="fooschema",
- )
- configure_mappers()
-
- assert (
- class_mapper(User).get_property("props").secondary is user_to_prop
- )
-
- def test_string_dependency_resolution_annotations(self):
- Base = declarative_base()
-
- class Parent(Base):
- __tablename__ = "parent"
- id = Column(Integer, primary_key=True)
- name = Column(String)
- children = relationship(
- "Child",
- primaryjoin="Parent.name=="
- "remote(foreign(func.lower(Child.name_upper)))",
- )
-
- class Child(Base):
- __tablename__ = "child"
- id = Column(Integer, primary_key=True)
- name_upper = Column(String)
-
- configure_mappers()
- eq_(
- Parent.children.property._calculated_foreign_keys,
- set([Child.name_upper.property.columns[0]]),
- )
-
- def test_class_has_registry_attr(self):
- existing_registry = Base.registry
-
- class A(Base):
- __tablename__ = "a"
-
- registry = {"foo": "bar"}
- id = Column(Integer, primary_key=True)
- data = Column(String)
-
- class SubA(A):
- pass
-
- is_(Base.registry, existing_registry)
- is_(inspect(A).registry, existing_registry)
- eq_(A.registry, {"foo": "bar"})
-
- is_(inspect(SubA).registry, existing_registry)
- eq_(SubA.registry, {"foo": "bar"})
-
- def test_class_does_not_have_registry_attr(self):
- with assertions.expect_raises_message(
- exc.InvalidRequestError,
- r"Declarative base class has no 'registry' attribute, or "
- r"registry is not a sqlalchemy.orm.registry\(\) object",
- ):
-
- class Base(metaclass=DeclarativeMeta):
- metadata = sa.MetaData()
+ )
- def test_shared_class_registry(self):
- reg = {}
- Base1 = declarative_base(class_registry=reg)
- Base2 = declarative_base(class_registry=reg)
+ def test_string_dependency_resolution_class_over_table(self):
+ # test for second half of #5774
+ class User(Base, fixtures.ComparableEntity):
- class A(Base1):
- __tablename__ = "a"
+ __tablename__ = "users"
id = Column(Integer, primary_key=True)
+ name = Column(String(50))
+ secondary = relationship(
+ "Secondary",
+ )
- class B(Base2):
- __tablename__ = "b"
- id = Column(Integer, primary_key=True)
- aid = Column(Integer, ForeignKey(A.id))
- as_ = relationship("A")
+ # class name and table name match
+ class Secondary(Base):
+ __tablename__ = "Secondary"
+ user_id = Column(Integer, ForeignKey("users.id"), primary_key=True)
- assert B.as_.property.mapper.class_ is A
+ configure_mappers()
+ assert (
+ class_mapper(User).get_property("secondary").mapper
+ is Secondary.__mapper__
+ )
def test_uncompiled_attributes_in_relationship(self):
class Address(Base, fixtures.ComparableEntity):
configure_mappers,
)
- def test_custom_base(self):
- class MyBase:
- def foobar(self):
- return "foobar"
-
- Base = declarative_base(cls=MyBase)
- assert hasattr(Base, "metadata")
- assert Base().foobar() == "foobar"
-
def test_uses_get_on_class_col_fk(self):
# test [ticket:1492]
# tables create fine
Base.metadata.create_all(testing.db)
- def test_add_prop(self):
+ @testing.fixture
+ def assert_user_address_mapping(self, connection):
+ def go(User, Address):
+ User.metadata.create_all(connection)
+
+ u1 = User(
+ name="u1",
+ addresses=[Address(email="one"), Address(email="two")],
+ )
+
+ with Session(connection) as sess:
+ sess.add(u1)
+ sess.flush()
+ sess.expunge_all()
+ eq_(
+ sess.query(User).all(),
+ [
+ User(
+ name="u1",
+ addresses=[
+ Address(email="one"),
+ Address(email="two"),
+ ],
+ )
+ ],
+ )
+ a1 = sess.query(Address).filter(Address.email == "two").one()
+ eq_(a1, Address(email="two"))
+ eq_(a1.user, User(name="u1"))
+
+ yield go
+
+ def test_add_prop_auto(
+ self, require_metaclass, assert_user_address_mapping
+ ):
class User(Base, fixtures.ComparableEntity):
__tablename__ = "users"
Address.user_id = Column(
"user_id", Integer, ForeignKey("users.id"), key="_user_id"
)
- Base.metadata.create_all(testing.db)
+
eq_(Address.__table__.c["id"].name, "id")
eq_(Address.__table__.c["_email"].name, "email")
eq_(Address.__table__.c["_user_id"].name, "user_id")
- u1 = User(
- name="u1", addresses=[Address(email="one"), Address(email="two")]
+
+ assert_user_address_mapping(User, Address)
+
+ def test_add_prop_manual(self, assert_user_address_mapping):
+ class User(Base, fixtures.ComparableEntity):
+
+ __tablename__ = "users"
+ id = Column(
+ "id", Integer, primary_key=True, test_needs_autoincrement=True
+ )
+
+ add_mapped_attribute(User, "name", Column("name", String(50)))
+ add_mapped_attribute(
+ User, "addresses", relationship("Address", backref="user")
)
- sess = fixture_session()
- sess.add(u1)
- sess.flush()
- sess.expunge_all()
- eq_(
- sess.query(User).all(),
- [
- User(
- name="u1",
- addresses=[Address(email="one"), Address(email="two")],
- )
- ],
+
+ class Address(Base, fixtures.ComparableEntity):
+
+ __tablename__ = "addresses"
+ id = Column(
+ Integer, primary_key=True, test_needs_autoincrement=True
+ )
+
+ add_mapped_attribute(
+ Address, "email", Column(String(50), key="_email")
)
- a1 = sess.query(Address).filter(Address.email == "two").one()
- eq_(a1, Address(email="two"))
- eq_(a1.user, User(name="u1"))
+ add_mapped_attribute(
+ Address,
+ "user_id",
+ Column("user_id", Integer, ForeignKey("users.id"), key="_user_id"),
+ )
+
+ eq_(Address.__table__.c["id"].name, "id")
+ eq_(Address.__table__.c["_email"].name, "email")
+ eq_(Address.__table__.c["_user_id"].name, "user_id")
+
+ assert_user_address_mapping(User, Address)
def test_alt_name_attr_subclass_column_inline(self):
# [ticket:2900]
)
ASub(brap=B())
- def test_alt_name_attr_subclass_column_attrset(self):
+ def test_alt_name_attr_subclass_column_attrset(self, require_metaclass):
# [ticket:2900]
class A(Base):
__tablename__ = "a"
A.brap.original_property, descriptor_props.SynonymProperty
)
- def test_alt_name_attr_subclass_relationship_attrset(self):
+ def test_alt_name_attr_subclass_relationship_attrset(
+ self, require_metaclass
+ ):
# [ticket:2900]
class A(Base):
__tablename__ = "a"
u = sess.query(User).filter(User.name == "u1").one()
u.addresses
- def test_as_declarative(self):
- class User(fixtures.ComparableEntity):
-
- __tablename__ = "users"
- id = Column(
- "id", Integer, primary_key=True, test_needs_autoincrement=True
- )
- name = Column("name", String(50))
- addresses = relationship("Address", backref="user")
-
- class Address(fixtures.ComparableEntity):
-
- __tablename__ = "addresses"
- id = Column(
- "id", Integer, primary_key=True, test_needs_autoincrement=True
- )
- email = Column("email", String(50))
- user_id = Column("user_id", Integer, ForeignKey("users.id"))
-
- reg = registry(metadata=Base.metadata)
- reg.mapped(User)
- reg.mapped(Address)
- reg.metadata.create_all(testing.db)
- u1 = User(
- name="u1", addresses=[Address(email="one"), Address(email="two")]
- )
- with Session(testing.db) as sess:
- sess.add(u1)
- sess.commit()
- with Session(testing.db) as sess:
- eq_(
- sess.query(User).all(),
- [
- User(
- name="u1",
- addresses=[Address(email="one"), Address(email="two")],
- )
- ],
- )
-
- def test_custom_mapper_attribute(self):
- def mymapper(cls, tbl, **kwargs):
- m = sa.orm.Mapper(cls, tbl, **kwargs)
- m.CHECK = True
- return m
-
- base = declarative_base()
-
- class Foo(base):
- __tablename__ = "foo"
- __mapper_cls__ = mymapper
- id = Column(Integer, primary_key=True)
-
- eq_(Foo.__mapper__.CHECK, True)
-
- def test_custom_mapper_argument(self):
- def mymapper(cls, tbl, **kwargs):
- m = sa.orm.Mapper(cls, tbl, **kwargs)
- m.CHECK = True
- return m
-
- base = declarative_base(mapper=mymapper)
-
- class Foo(base):
- __tablename__ = "foo"
- id = Column(Integer, primary_key=True)
-
- eq_(Foo.__mapper__.CHECK, True)
-
- def test_no_change_to_all_descriptors(self):
- base = declarative_base()
-
- class Foo(base):
- __tablename__ = "foo"
- id = Column(Integer, primary_key=True)
-
- eq_(Foo.__mapper__.all_orm_descriptors.keys(), ["id"])
-
def test_oops(self):
with testing.expect_warnings(
is_(inspect(Employee).local_table, Person.__table__)
- def test_expression(self):
+ def test_expression(self, require_metaclass):
class User(Base, fixtures.ComparableEntity):
__tablename__ = "users"
def somecol(cls):
return Column(Integer)
- def test_column(self):
+ def test_column(self, require_metaclass):
class User(Base, fixtures.ComparableEntity):
__tablename__ = "users"
sess.flush()
eq_(sess.query(User).filter(User.name == "someuser").one(), u1)
- def test_synonym_added(self):
+ def test_synonym_added(self, require_metaclass):
class User(Base, fixtures.ComparableEntity):
__tablename__ = "users"
],
)
- def test_relationship_reference(self):
+ def test_relationship_reference(self, require_metaclass):
class Address(Base, fixtures.ComparableEntity):
__tablename__ = "addresses"
],
)
- def test_cls_docstring(self):
- class MyBase:
- """MyBase Docstring"""
-
- Base = declarative_base(cls=MyBase)
-
- eq_(Base.__doc__, MyBase.__doc__)
-
- def test_delattr_mapped_raises(self):
- Base = declarative_base()
-
- class Foo(Base):
- __tablename__ = "foo"
-
- id = Column(Integer, primary_key=True)
- data = Column(String)
-
- def go():
- del Foo.data
-
- assert_raises_message(
- NotImplementedError,
- "Can't un-map individual mapped attributes on a mapped class.",
- go,
- )
-
- def test_delattr_hybrid_fine(self):
- Base = declarative_base()
-
- class Foo(Base):
- __tablename__ = "foo"
-
- id = Column(Integer, primary_key=True)
- data = Column(String)
-
- @hybrid_property
- def data_hybrid(self):
- return self.data
-
- assert "data_hybrid" in Foo.__mapper__.all_orm_descriptors.keys()
-
- del Foo.data_hybrid
-
- assert "data_hybrid" not in Foo.__mapper__.all_orm_descriptors.keys()
-
- assert not hasattr(Foo, "data_hybrid")
-
- def test_setattr_hybrid_updates_descriptors(self):
- Base = declarative_base()
-
- class Foo(Base):
- __tablename__ = "foo"
-
- id = Column(Integer, primary_key=True)
- data = Column(String)
-
- assert "data_hybrid" not in Foo.__mapper__.all_orm_descriptors.keys()
-
- @hybrid_property
- def data_hybrid(self):
- return self.data
-
- Foo.data_hybrid = data_hybrid
- assert "data_hybrid" in Foo.__mapper__.all_orm_descriptors.keys()
-
- del Foo.data_hybrid
-
- assert "data_hybrid" not in Foo.__mapper__.all_orm_descriptors.keys()
-
- assert not hasattr(Foo, "data_hybrid")
-
def test_classes_can_override_new(self):
class MyTable(Base):
__tablename__ = "my_table"
mt = MyTable(id=5)
eq_(mt.id, 5)
-
- def test_kw_support_in_declarative_meta_init(self):
- # This will not fail if DeclarativeMeta __init__ supports **kw
-
- class BaseUser(Base):
- __tablename__ = "base"
- id_ = Column(Integer, primary_key=True)
-
- @classmethod
- def __init_subclass__(cls, random_keyword=False, **kw):
- super().__init_subclass__(**kw)
- cls._set_random_keyword_used_here = random_keyword
-
- class User(BaseUser):
- __tablename__ = "user"
- id_ = Column(Integer, ForeignKey("base.id_"), primary_key=True)
-
- # Check the default option
- eq_(User._set_random_keyword_used_here, False)
-
- # Build the metaclass with a keyword!
- bases = (BaseUser,)
- UserType = DeclarativeMeta("UserType", bases, {}, random_keyword=True)
-
- # Check to see if __init_subclass__ works in supported versions
- eq_(UserType._set_random_keyword_used_here, True)
def test_collection_no_value(self):
Foo = self._fixture(uselist=True, useobject=True, active_history=True)
f = Foo()
- eq_(self._someattr_history(f, passive=True), (None, None, None))
+ eq_(self._someattr_history(f, passive=True), ((), (), ()))
def test_scalar_obj_no_value(self):
Foo = self._fixture(uselist=False, useobject=True, active_history=True)
f = Foo()
- eq_(self._someattr_history(f, passive=True), (None, None, None))
+ eq_(self._someattr_history(f, passive=True), ((), (), ()))
def test_scalar_no_value(self):
Foo = self._fixture(
uselist=False, useobject=False, active_history=True
)
f = Foo()
- eq_(self._someattr_history(f, passive=True), (None, None, None))
+ eq_(self._someattr_history(f, passive=True), ((), (), ()))
def test_scalar_active_set(self):
Foo = self._fixture(
u1 = User(name="ed")
insp = inspect(u1)
hist = insp.attrs.addresses.history
- eq_(hist.unchanged, None)
+ eq_(hist.unchanged, ())
u1.addresses
hist = insp.attrs.addresses.history
# stays, this is #4519
- eq_(hist.unchanged, None)
+ eq_(hist.unchanged, ())
def test_instance_state_scalar_attr_hist(self):
User = self.classes.User
assert "name" not in u1.__dict__
insp = inspect(u1)
hist = insp.attrs.name.history
- eq_(hist.unchanged, None)
+ eq_(hist.unchanged, ())
assert "name" not in u1.__dict__
def test_instance_state_collection_attr_load_hist(self):
)
u1 = User()
- eq_(attributes.instance_state(u1).attrs.x.history, (None, None, None))
- eq_(attributes.instance_state(u1).attrs.y.history, (None, None, None))
+ eq_(
+ attributes.instance_state(u1).attrs.x.history,
+ attributes.HISTORY_BLANK,
+ )
+ eq_(
+ attributes.instance_state(u1).attrs.y.history,
+ attributes.HISTORY_BLANK,
+ )
u1.y = 5
eq_(attributes.instance_state(u1).attrs.x.history, ([5], (), ()))