]> git.ipfire.org Git - thirdparty/sqlalchemy/sqlalchemy.git/commitdiff
Build out new declarative systems; deprecate mapper()
authorMike Bayer <mike_mp@zzzcomputing.com>
Mon, 31 Aug 2020 15:46:55 +0000 (11:46 -0400)
committerMike Bayer <mike_mp@zzzcomputing.com>
Thu, 10 Sep 2020 21:53:53 +0000 (17:53 -0400)
The ORM Declarative system is now unified into the ORM itself, with new
import spaces under ``sqlalchemy.orm`` and new kinds of mappings.  Support
for decorator-based mappings without using a base class, support for
classical style-mapper() calls that have access to the declarative class
registry for relationships, and full integration of Declarative with 3rd
party class attribute systems like ``dataclasses`` and ``attrs`` is now
supported.

Fixes: #5508
Change-Id: I130b2b6edff6450bfe8a3e6baa099ff04b5471ff

66 files changed:
doc/build/changelog/migration_14.rst
doc/build/changelog/unreleased_14/5027.rst
doc/build/changelog/unreleased_14/5508.rst [new file with mode: 0644]
doc/build/core/engines_connections.rst
doc/build/core/expression_api.rst
doc/build/core/index.rst
doc/build/core/schema.rst
doc/build/core/types.rst
doc/build/faq/index.rst
doc/build/orm/basic_relationships.rst
doc/build/orm/declarative_config.rst [new file with mode: 0644]
doc/build/orm/declarative_mapping.rst [new file with mode: 0644]
doc/build/orm/declarative_mixins.rst [new file with mode: 0644]
doc/build/orm/declarative_tables.rst [new file with mode: 0644]
doc/build/orm/extensions/declarative/api.rst
doc/build/orm/extensions/declarative/basic_use.rst
doc/build/orm/extensions/declarative/index.rst
doc/build/orm/extensions/declarative/inheritance.rst
doc/build/orm/extensions/declarative/mixins.rst
doc/build/orm/extensions/declarative/relationships.rst
doc/build/orm/extensions/declarative/table_config.rst
doc/build/orm/inheritance.rst
doc/build/orm/internals.rst
doc/build/orm/loading_objects.rst
doc/build/orm/mapper_config.rst
doc/build/orm/mapping_api.rst
doc/build/orm/mapping_columns.rst
doc/build/orm/mapping_styles.rst
doc/build/orm/nonstandard_mappings.rst
doc/build/orm/relationships.rst
doc/build/orm/scalar_mapping.rst
doc/build/orm/session.rst
doc/build/orm/tutorial.rst
lib/sqlalchemy/exc.py
lib/sqlalchemy/ext/automap.py
lib/sqlalchemy/ext/declarative/__init__.py
lib/sqlalchemy/ext/declarative/api.py [deleted file]
lib/sqlalchemy/ext/declarative/extensions.py [new file with mode: 0644]
lib/sqlalchemy/orm/__init__.py
lib/sqlalchemy/orm/clsregistry.py [moved from lib/sqlalchemy/ext/declarative/clsregistry.py with 72% similarity]
lib/sqlalchemy/orm/decl_api.py [new file with mode: 0644]
lib/sqlalchemy/orm/decl_base.py [moved from lib/sqlalchemy/ext/declarative/base.py with 73% similarity]
lib/sqlalchemy/orm/instrumentation.py
lib/sqlalchemy/orm/mapper.py
lib/sqlalchemy/orm/relationships.py
lib/sqlalchemy/testing/entities.py
lib/sqlalchemy/testing/fixtures.py
lib/sqlalchemy/util/__init__.py
lib/sqlalchemy/util/deprecations.py
test/ext/declarative/test_inheritance.py
test/ext/declarative/test_reflection.py
test/orm/declarative/test_basic.py [moved from test/ext/declarative/test_basic.py with 95% similarity]
test/orm/declarative/test_clsregistry.py [moved from test/ext/declarative/test_clsregistry.py with 77% similarity]
test/orm/declarative/test_concurrency.py [moved from test/ext/declarative/test_concurrency.py with 95% similarity]
test/orm/declarative/test_inheritance.py [new file with mode: 0644]
test/orm/declarative/test_mixin.py [moved from test/ext/declarative/test_mixin.py with 93% similarity]
test/orm/declarative/test_reflection.py [new file with mode: 0644]
test/orm/inheritance/_poly_fixtures.py
test/orm/test_dataclasses_py3k.py
test/orm/test_default_strategies.py
test/orm/test_deferred.py
test/orm/test_eager_relations.py
test/orm/test_events.py
test/orm/test_mapper.py
test/orm/test_options.py
test/orm/test_relationships.py

index 91b85660599df767cf506c8f30c639860441440f..ca51ea26a6cacdd839f111ab852bef3a4b1e57d4 100644 (file)
@@ -224,6 +224,117 @@ driven in order to support this new feature.
 :ticket:`4808`
 :ticket:`5004`
 
+.. _change_5508:
+
+Declarative is now integrated into the ORM with new features
+-------------------------------------------------------------
+
+After ten years or so of popularity, the ``sqlalchemy.ext.declarative``
+package is now integrated into the ``sqlalchemy.orm`` namespace, with the
+exception of the declarative "extension" classes which remain as Declarative
+extensions.
+
+The new classes added to ``sqlalchemy.orm`` include:
+
+* :class:`_orm.registry` - a new class that supercedes the role of the
+  "declarative base" class, serving as a registry of mapped classes which
+  can be referenced via string name within :func:`_orm.relationship` calls
+  and is agnostic of the style in which any particular class was mapped.
+
+* :func:`_orm.declarative_base` - this is the same declarative base class that
+  has been in use throughout the span of the declarative system, except it now
+  references a :class:`_orm.registry` object internally and is implemented
+  by the :meth:`_orm.registry.generate_base` method which can be invoked
+  from a :class:`_orm.registry` directly.   The :func:`_orm.declarative_base`
+  function creates this registry automatically so there is no impact on
+  existing code.    The ``sqlalchemy.ext.declarative.declarative_base`` name
+  is still present, emitting a 2.0 deprecation warning when
+  :ref:`2.0 deprecations mode <deprecation_20_mode>` is enabled.
+
+* :func:`_orm.declared_attr` - the same "declared attr" function call now
+  part of ``sqlalchemy.orm``.  The ``sqlalchemy.ext.declarative.declared_attr``
+  name is still present, emitting a 2.0 deprecation warning when
+  :ref:`2.0 deprecations mode <deprecation_20_mode>` is enabled.
+
+* Other names moved into ``sqlalchemy.orm`` include :func:`_orm.has_inherited_table`,
+  :func:`_orm.synonym_for`, :class:`_orm.DeclarativeMeta`, :func:`_orm.as_declarative`.
+
+In addition, The :func:`_declarative.instrument_declarative` function is
+deprecated, superseded by :meth:`_orm.registry.map_declaratively`.  The
+:class:`_declarative.ConcreteBase`, :class:`_declarative.AbstractConcreteBase`,
+and :class:`_declarative.DeferredReflection` classes remain as extensions in the
+:ref:`declarative_toplevel` package.
+
+Mapping styles have now been organized such that they all extend from
+the :class:`_orm.registry` object, and fall into these categories:
+
+* :ref:`orm_declarative_mapping`
+    * Using :func:`_orm.declarative_base` Base class w/ metaclass
+        * :ref:`orm_declarative_table`
+        * :ref:`Imperative Table (a.k.a. "hybrid table") <orm_imperative_table_configuration>`
+    * Using :meth:`_orm.registry.mapped` Declarative Decorator
+        * Declarative Table
+        * Imperative Table (Hybrid)
+            * :ref:`orm_declarative_dataclasses`
+* :ref:`Imperative (a.k.a. "classical" mapping) <classical_mapping>`
+    * Using :meth:`_orm.registry.map_imperatively`
+        * :ref:`orm_imperative_dataclasses`
+
+The existing classical mapping function :func:`_orm.mapper` remains, however
+it is deprecated to call upon :func:`_orm.mapper` directly; the new
+:meth:`_orm.registry.map_imperatively` method now routes the request through
+the :meth:`_orm.registry` so that it integrates with other declarative mappings
+unambiguously.
+
+The new approach interoperates with 3rd party class instrumentation systems
+which necessarily must take place on the class before the mapping process
+does, allowing declartive mapping to work via a decorator instead of a
+declarative base so that packages like dataclasses_ and attrs_ can be
+used with declarative mappings, in addition to working with classical
+mappings.
+
+Declarative documentation has now been fully integrated into the ORM mapper
+configuration documentation and includes examples for all styles of mappings
+organized into one place. See the section
+:ref:`orm_mapping_classes_toplevel` for the start of the newly reorganized
+documentation.
+
+.. _dataclasses: https://docs.python.org/3/library/dataclasses.html
+.. _attrs: https://pypi.org/project/attrs/
+
+.. seealso::
+
+  :ref:`orm_mapping_classes_toplevel`
+
+  :ref:`change_5027`
+
+:ticket:`5508`
+
+
+.. _change_5027:
+
+Python Dataclasses, attrs Supported w/ Declarative, Imperative Mappings
+-----------------------------------------------------------------------
+
+Along with the new declarative decorator styles introduced in :ref:`change_5508`,
+the :class:`_orm.Mapper` is now explicitly aware of the Python ``dataclasses``
+module and will recognize attributes that are configured in this way, and
+proceed to map them without skipping them as was the case previously.  In the
+case of the ``attrs`` module, ``attrs`` already removes its own attributes
+from the class so was already compatible with SQLAlchemy classical mappings.
+With the addition of the :meth:`_orm.registry.mapped` decorator, both
+attribute systems can now interoperate with Declarative mappings as well.
+
+.. seealso::
+
+  :ref:`orm_declarative_dataclasses`
+
+  :ref:`orm_imperative_dataclasses`
+
+
+:ticket:`5027`
+
+
 .. _change_3414:
 
 Asynchronous IO Support for Core and ORM
index 6fd2bc9b253460cb276aa171fa28f67893321f82..fba8c6ba3bc6a8a008358b1691dd6bc2626a3f53 100644 (file)
@@ -3,6 +3,13 @@
     :tickets: 5027
 
     Added support for direct mapping of Python classes that are defined using
-    the Python ``dataclasses`` decorator.    See the section
-    :ref:`mapping_dataclasses` for background.  Pull request courtesy Václav
-    Klusák.
\ No newline at end of file
+    the Python ``dataclasses`` decorator.    Pull request courtesy Václav
+    Klusák.  The new feature integrates into new support at the Declarative
+    level for systems such as ``dataclasses`` and ``attrs``.
+
+    .. seealso::
+
+        :ref:`change_5027`
+
+        :ref:`change_5508`
+
diff --git a/doc/build/changelog/unreleased_14/5508.rst b/doc/build/changelog/unreleased_14/5508.rst
new file mode 100644 (file)
index 0000000..d1304c7
--- /dev/null
@@ -0,0 +1,17 @@
+.. change::
+    :tags: change, orm
+    :tickets: 5508
+
+    The ORM Declarative system is now unified into the ORM itself, with new
+    import spaces under ``sqlalchemy.orm`` and new kinds of mappings.  Support
+    for decorator-based mappings without using a base class, support for
+    classical style-mapper() calls that have access to the declarative class
+    registry for relationships, and full integration of Declarative with 3rd
+    party class attribute systems like ``dataclasses`` and ``attrs`` is now
+    supported.
+
+    .. seealso::
+
+        :ref:`change_5508`
+
+        :ref:`change_5027`
index f163a7629d6f0b74eb1676ce2535b9afc13c0e7d..70ece2ca5801d320e8e59983d02160599e38f6e6 100644 (file)
@@ -3,7 +3,7 @@ Engine and Connection Use
 =========================
 
 .. toctree::
-       :maxdepth: 2
+       :maxdepth: 3
 
        engines
        connections
index c080b3a6335bbcb01c79142b09d0d529620e01fe..944222fbd6964d213596710fb6e72d9056a61bab 100644 (file)
@@ -10,7 +10,7 @@ see :ref:`sqlexpression_toplevel`.
 
 
 .. toctree::
-    :maxdepth: 1
+    :maxdepth: 3
 
     sqlelement
     selectable
index a3574341a4cef9f7515a7c8e5d87ed3ece31bc32..aaa63ca26543ecb2fb7180207374e06a237aaa72 100644 (file)
@@ -17,4 +17,4 @@ Language provides a schema-centric usage paradigm.
     types
     engines_connections
     api_basics
-    future
\ No newline at end of file
+    future
index 5de685c7f24840f04ed368e14724595f8929fd96..5a4f939bf7ef83563a269d53d6b02897c23dce67 100644 (file)
@@ -33,7 +33,7 @@ real DDL. They are therefore most intuitive to those who have some background
 in creating real schema generation scripts.
 
 .. toctree::
-    :maxdepth: 2
+    :maxdepth: 3
 
     metadata
     reflection
index ab761a1cb0999f384144a7d290fc71a956b53d19..762105646cb83f23100b2c40e221551daf25c21a 100644 (file)
@@ -4,7 +4,7 @@ Column and Data Types
 =====================
 
 .. toctree::
-    :maxdepth: 2
+    :maxdepth: 3
 
     type_basics
     custom_types
index 5238490a4070b2022ba72541fc07c6ffa5f755b6..810a040115727eb618354a134934629820941a02 100644 (file)
@@ -8,7 +8,7 @@ The Frequently Asked Questions section is a growing collection of commonly
 observed questions to well-known issues.
 
 .. toctree::
-    :maxdepth: 1
+    :maxdepth: 2
 
     connections
     metadata_schema
index b05701802b7579a3837b3613d5c0ccef2a2c80f5..0ea699180be343ae11487be2e088411d413699cc 100644 (file)
@@ -456,3 +456,200 @@ associated object, and a second to a target attribute.
   two-object ``Parent->Child`` relationship while still using the association
   object pattern, use the association proxy extension
   as documented at :ref:`associationproxy_toplevel`.
+
+.. _orm_declarative_relationship_eval:
+
+Late-Evaluation of Relationship Arguments
+-----------------------------------------
+
+Many of the examples in the preceding sections illustrate mappings
+where the various :func:`_orm.relationship` constructs refer to their target
+classes using a string name, rather than the class itself::
+
+    class Parent(Base):
+        # ...
+
+        children = relationship("Child", back_populates="parent")
+
+    class Child(Base):
+        # ...
+
+        parent = relationship("Parent", back_populates="children")
+
+These string names are resolved into classes in the mapper resolution stage,
+which is an internal process that occurs typically after all mappings have
+been defined and is normally triggered by the first usage of the mappings
+themselves.     The :class:`_orm.registry` object is the container in which
+these names are stored and resolved to the mapped classes they refer towards.
+
+In addition to the main class argument for :func:`_orm.relationship`,
+other arguments which depend upon the columns present on an as-yet
+undefined class may also be specified either as Python functions, or more
+commonly as strings.   For most of these
+arguments except that of the main argument, string inputs are
+**evaluated as Python expressions using Python's built-in eval() function.**,
+as they are intended to recieve complete SQL expressions.
+
+.. warning:: As the Python ``eval()`` function is used to interpret the
+   late-evaluated string arguments passed to :func:`_orm.relationship` mapper
+   configuration construct, these arguments should **not** be repurposed
+   such that they would receive untrusted user input; ``eval()`` is
+   **not secure** against untrusted user input.
+
+The full namespace available within this evaluation includes all classes mapped
+for this declarative base, as well as the contents of the ``sqlalchemy``
+package, including expression functions like :func:`_sql.desc` and
+:attr:`_functions.func`::
+
+    class Parent(Base):
+        # ...
+
+        children = relationship(
+            "Child",
+            order_by="desc(Child.email_address)",
+            primaryjoin="Parent.id == Child.parent_id"
+        )
+
+For the case where more than one module contains a class of the same name,
+string class names can also be specified as module-qualified paths
+within any of these string expressions::
+
+    class Parent(Base):
+        # ...
+
+        children = relationship(
+            "myapp.mymodel.Child",
+            order_by="desc(myapp.mymodel.Child.email_address)",
+            primaryjoin="myapp.mymodel.Parent.id == myapp.mymodel.Child.parent_id"
+        )
+
+The qualified path can be any partial path that removes ambiguity between
+the names.  For example, to disambiguate between
+``myapp.model1.Child`` and ``myapp.model2.Child``,
+we can specify ``model1.Child`` or ``model2.Child``::
+
+    class Parent(Base):
+        # ...
+
+        children = relationship(
+            "model1.Child",
+            order_by="desc(mymodel1.Child.email_address)",
+            primaryjoin="Parent.id == model1.Child.parent_id"
+        )
+
+The :func:`_orm.relationship` construct also accepts Python functions or
+lambdas as input for these arguments.   This has the advantage of providing
+more compile-time safety and better support for IDEs and :pep:`484` scenarios.
+
+A Python functional approach might look like the following::
+
+    from sqlalchemy import desc
+
+    def _resolve_child_model():
+         from myapplication import Child
+         return Child
+
+    class Parent(Base):
+        # ...
+
+        children = relationship(
+            _resolve_child_model(),
+            order_by=lambda: desc(_resolve_child_model().email_address),
+            primaryjoin=lambda: Parent.id == _resolve_child_model().parent_id
+        )
+
+The full list of parameters which accept Python functions/lambdas or strings
+that will be passed to ``eval()`` are:
+
+* :paramref:`_orm.relationship.order_by`
+
+* :paramref:`_orm.relationship.primaryjoin`
+
+* :paramref:`_orm.relationship.secondaryjoin`
+
+* :paramref:`_orm.relationship.secondary`
+
+* :paramref:`_orm.relationship.remote_side`
+
+* :paramref:`_orm.relationship.foreign_keys`
+
+* :paramref:`_orm.relationship._user_defined_foreign_keys`
+
+.. versionchanged:: 1.3.16
+
+    Prior to SQLAlchemy 1.3.16, the main :paramref:`_orm.relationship.argument`
+    to :func:`_orm.relationship` was also evaluated throught ``eval()``   As of
+    1.3.16 the string name is resolved from the class resolver directly without
+    supporting custom Python expressions.
+
+.. warning::
+
+    As stated previously, the above parameters to :func:`_orm.relationship`
+    are **evaluated as Python code expressions using eval().  DO NOT PASS
+    UNTRUSTED INPUT TO THESE ARGUMENTS.**
+
+It should also be noted that in a similar way as described at
+:ref:`orm_declarative_table_adding_columns`, any :class:`_orm.MapperProperty`
+construct can be added to a declarative base mapping at any time.  If
+we wanted to implement this :func:`_orm.relationship` after the ``Address``
+class were available, we could also apply it afterwards::
+
+    # first, module A, where Child has not been created yet,
+    # we create a Parent class which knows nothing about Child
+
+    class Parent(Base):
+        # ...
+
+
+    #... later, in Module B, which is imported after module A:
+
+    class Child(Base):
+        # ...
+
+    from module_a import Parent
+
+    # assign the User.addresses relationship as a class variable.  The
+    # declarative base class will intercept this and map the relationship.
+    Parent.children = relationship(
+        Child,
+        primaryjoin=Child.parent_id==Parent.id
+    )
+
+.. note:: assignment of mapped properties to a declaratively mapped class will only
+    function correctly if the "declarative base" class is used, which also
+    provides for a metaclass-driven ``__setattr__()`` method which will
+    intercept these operations. It will **not** work if the declarative
+    decorator provided by :meth:`_orm.registry.mapped` is used, nor will it
+    work for an imperatively mapped class mapped by
+    :meth:`_orm.registry.map_imperatively`.
+
+
+.. _orm_declarative_relationship_secondary_eval:
+
+Late-Evaluation for a many-to-many relationship
+~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
+
+Many-to-many relationships include a reference to an additional, non-mapped
+:class:`_schema.Table` object that is typically present in the :class:`_schema.MetaData`
+collection referred towards by the :class:`_orm.registry`.   The late-evaluation
+system includes support for having this attribute also be specified as a
+string argument which will be resolved from this :class:`_schema.MetaData`
+collection.  Below we specify an association table ``keyword_author``,
+sharing the :class:`_schema.MetaData` collection associated with our
+declarative base and its :class:`_orm.registry`.  We can then refer to this
+:class:`_schema.Table` by name in the :paramref:`_orm.relationship.secondary`
+parameter::
+
+    keyword_author = Table(
+        'keyword_author', Base.metadata,
+        Column('author_id', Integer, ForeignKey('authors.id')),
+        Column('keyword_id', Integer, ForeignKey('keywords.id'))
+        )
+
+    class Author(Base):
+        __tablename__ = 'authors'
+        id = Column(Integer, primary_key=True)
+        keywords = relationship("Keyword", secondary="keyword_author")
+
+For additional detail on many-to-many relationships see the section
+:ref:`relationships_many_to_many`.
diff --git a/doc/build/orm/declarative_config.rst b/doc/build/orm/declarative_config.rst
new file mode 100644 (file)
index 0000000..bf5bd14
--- /dev/null
@@ -0,0 +1,335 @@
+.. _orm_declarative_mapper_config_toplevel:
+
+=============================================
+Mapper Configuration with Declarative
+=============================================
+
+The section :ref:`orm_mapper_configuration_overview` discusses the general
+configurational elements of a :class:`_orm.Mapper` construct, which is the
+structure that defines how a particular user defined class is mapped to a
+database table or other SQL construct.    The following sections describe
+specific details about how the declarative system goes about constructing
+the :class:`_orm.Mapper`.
+
+.. _orm_declarative_properties:
+
+Defining Mapped Properties with Declarative
+--------------------------------------------
+
+The examples given at :ref:`orm_declarative_table_config_toplevel`
+illustrate mappings against table-bound columns;
+the mapping of an individual column to an ORM class attribute is represented
+internally by the :class:`_orm.ColumnProperty` construct.   There are many
+other varieties of mapper properties, the most common being the
+:func:`_orm.relationship` construct.  Other kinds of properties include
+synonyms to columns which are defined using the :func:`_orm.synonym`
+construct, SQL expressions that are defined using the :func:`_orm.column_property`
+construct, and deferred columns and SQL expressions which load only when
+accessed, defined using the :func:`_orm.deferred` construct.
+
+While an :ref:`imperative mapping <orm_imperative_mapping>` makes use of
+the :ref:`properties <orm_mapping_properties>` dictionary to establish
+all the mapped class attributes, in the declarative
+mapping, these properties are all specified inline with the class definition,
+which in the case of a declarative table mapping are inline with the
+:class:`_schema.Column` objects that will be used to generate a
+:class:`_schema.Table` object.
+
+Working with the example mapping of ``User`` and ``Address``, we may illustrate
+a declarative table mapping that includes not just :class:`_schema.Column`
+objects but also relationships and SQL expressions::
+
+    # mapping attributes using declarative with declarative table
+    # i.e. __tablename__
+
+    from sqlalchemy import Column, Integer, String, Text, ForeignKey
+    from sqlalchemy.orm import column_property, relationship, deferred
+    from sqlalchemy.orm import declarative_base
+
+    Base = declarative_base()
+
+    class User(Base):
+        __tablename__ = 'user'
+
+        id = Column(Integer, primary_key=True)
+        name = Column(String)
+        firstname = Column(String(50))
+        lastname = Column(String(50))
+
+        fullname = column_property(firstname + " " + lastname)
+
+        addresses = relationship("Address", back_populates="user")
+
+    class Address(Base):
+        __tablename__ = 'address'
+
+        id = Column(Integer, primary_key=True)
+        user_id = Column(ForeignKey("user.id"))
+        email_address = Column(String)
+        address_statistics = deferred(Column(Text))
+
+        user = relationship("User", back_populates="addresses")
+
+The above declarative table mapping features two tables, each with a
+:func:`_orm.relationship` referring to the other, as well as a simple
+SQL expression mapped by :func:`_orm.column_property`, and an additional
+:class:`_schema.Column` that will be loaded on a "deferred" basis as defined
+by the :func:`_orm.deferred` construct.    More documentation
+on these particular concepts may be found at :ref:`relationship_patterns`,
+:ref:`mapper_column_property_sql_expressions`, and :ref:`deferred`.
+
+Properties may be specified with a declarative mapping as above using
+"hybrid table" style as well; the :class:`_schema.Column` objects that
+are directly part of a table move into the :class:`_schema.Table` definition
+but everything else, including composed SQL expressions, would still be
+inline with the class definition.  Constructs that need to refer to a
+:class:`_schema.Column` directly would reference it in terms of the
+:class:`_schema.Table` object.  To illustrate the above mapping using
+hybrid table style::
+
+    # mapping attributes using declarative with imperative table
+    # i.e. __table__
+
+    from sqlalchemy import Table
+    from sqlalchemy import Column, Integer, String, Text, ForeignKey
+    from sqlalchemy.orm import column_property, relationship, deferred
+    from sqlalchemy.orm import declarative_base
+
+    Base = declarative_base()
+
+    class User(Base):
+        __table__ = Table(
+            "user",
+            Base.metadata,
+            Column("id", Integer, primary_key=True),
+            Column("name", String),
+            Column("firstname", String(50)),
+            Column("lastname", String(50))
+        )
+
+        fullname = column_property(__table__.c.firstname + " " + __table__.c.lastname)
+
+        addresses = relationship("Address", back_populates="user")
+
+    class Address(Base):
+        __table__ = Table(
+            "address",
+            Base.metadata,
+            Column("id", Integer, primary_key=True),
+            Column("user_id", ForeignKey("user.id")),
+            Column("email_address", String),
+            Column("address_statistics", Text)
+        )
+
+        address_statistics = deferred(__table__.c.address_statistics)
+
+        user = relationship("User", back_populates="addresses")
+
+Things to note above:
+
+* The address :class:`_schema.Table` contains a column called ``address_statistics``,
+  however we re-map this column under the same attribute name to be under
+  the control of a :func:`_orm.deferred` construct.
+
+* With both declararative table and hybrid table mappings, when we define a
+  :class:`_schema.ForeignKey` construct, we always name the target table
+  using the **table name**, and not the mapped class name.
+
+* When we define :func:`_orm.relationship` constructs, as these constructs
+  create a linkage between two mapped classes where one necessarily is defined
+  before the other, we can refer to the remote class using its string name.
+  This functionality also extends into the area of other arguments specified
+  on the :func:`_orm.relationship` such as the "primary join" and "order by"
+  arguments.   See the next section for details on this.
+
+
+.. _orm_declarative_mapper_options:
+
+Mapper Configuration Options with Declarative
+----------------------------------------------
+
+With all mapping forms, the mapping of the class is configured through
+parameters that become part of the :class:`_orm.Mapper` object.
+The function which ultimately receives these arguments is the
+:func:`_orm.mapper` function, and are delivered to it from one of
+the front-facing mapping functions defined on the :class:`_orm.registry`
+object.
+
+For the declarative form of mapping, mapper arguments are specified
+using the ``__mapper_args__`` declarative class variable, which is a dictionary
+that is passed as keyword arguments to the :func:`_orm.mapper` function.
+Some examples:
+
+**Version ID Column**
+
+The :paramref:`_orm.mapper.version_id_col` and
+:paramref:`_orm.mapper.version_id_generator` parameters::
+
+    from datetime import datetime
+
+    class Widget(Base):
+        __tablename__ = 'widgets'
+
+        id = Column(Integer, primary_key=True)
+        timestamp = Column(DateTime, nullable=False)
+
+        __mapper_args__ = {
+            'version_id_col': timestamp,
+            'version_id_generator': lambda v:datetime.now()
+        }
+
+**Single Table Inheritance**
+
+The :paramref:`_orm.mapper.polymorphic_on` and
+:paramref:`_orm.mapper.polymorphic_identity` parameters::
+
+    class Person(Base):
+        __tablename__ = 'person'
+
+        person_id = Column(Integer, primary_key=True)
+        type = Column(String, nullable=False)
+
+        __mapper_args__ = dict(
+            polymorphic_on=type,
+            polymorphic_identity="person"
+        )
+
+    class Employee(Person):
+        __mapper_args__ = dict(
+            polymorphic_identity="employee"
+        )
+
+The ``__mapper_args__`` dictionary may be generated from a class-bound
+descriptor method rather than from a fixed dictionary by making use of the
+:func:`_orm.declared_attr` construct.   The section :ref:`orm_mixins_toplevel`
+discusses this concept further.
+
+.. seealso::
+
+    :ref:`orm_mixins_toplevel`
+
+Other Declarative Mapping Directives
+--------------------------------------
+
+``__declare_last__()``
+~~~~~~~~~~~~~~~~~~~~~~
+
+The ``__declare_last__()`` hook allows definition of
+a class level function that is automatically called by the
+:meth:`.MapperEvents.after_configured` event, which occurs after mappings are
+assumed to be completed and the 'configure' step has finished::
+
+    class MyClass(Base):
+        @classmethod
+        def __declare_last__(cls):
+            ""
+            # do something with mappings
+
+``__declare_first__()``
+~~~~~~~~~~~~~~~~~~~~~~~
+
+Like ``__declare_last__()``, but is called at the beginning of mapper
+configuration via the :meth:`.MapperEvents.before_configured` event::
+
+    class MyClass(Base):
+        @classmethod
+        def __declare_first__(cls):
+            ""
+            # do something before mappings are configured
+
+.. versionadded:: 0.9.3
+
+.. _declarative_abstract:
+
+``__abstract__``
+~~~~~~~~~~~~~~~~
+
+``__abstract__`` causes declarative to skip the production
+of a table or mapper for the class entirely.  A class can be added within a
+hierarchy in the same way as mixin (see :ref:`declarative_mixins`), allowing
+subclasses to extend just from the special class::
+
+    class SomeAbstractBase(Base):
+        __abstract__ = True
+
+        def some_helpful_method(self):
+            ""
+
+        @declared_attr
+        def __mapper_args__(cls):
+            return {"helpful mapper arguments":True}
+
+    class MyMappedClass(SomeAbstractBase):
+        ""
+
+One possible use of ``__abstract__`` is to use a distinct
+:class:`_schema.MetaData` for different bases::
+
+    Base = declarative_base()
+
+    class DefaultBase(Base):
+        __abstract__ = True
+        metadata = MetaData()
+
+    class OtherBase(Base):
+        __abstract__ = True
+        metadata = MetaData()
+
+Above, classes which inherit from ``DefaultBase`` will use one
+:class:`_schema.MetaData` as the registry of tables, and those which inherit from
+``OtherBase`` will use a different one. The tables themselves can then be
+created perhaps within distinct databases::
+
+    DefaultBase.metadata.create_all(some_engine)
+    OtherBase.metadata.create_all(some_other_engine)
+
+
+``__table_cls__``
+~~~~~~~~~~~~~~~~~
+
+Allows the callable / class used to generate a :class:`_schema.Table` to be customized.
+This is a very open-ended hook that can allow special customizations
+to a :class:`_schema.Table` that one generates here::
+
+    class MyMixin(object):
+        @classmethod
+        def __table_cls__(cls, name, metadata, *arg, **kw):
+            return Table(
+                "my_" + name,
+                metadata, *arg, **kw
+            )
+
+The above mixin would cause all :class:`_schema.Table` objects generated to include
+the prefix ``"my_"``, followed by the name normally specified using the
+``__tablename__`` attribute.
+
+``__table_cls__`` also supports the case of returning ``None``, which
+causes the class to be considered as single-table inheritance vs. its subclass.
+This may be useful in some customization schemes to determine that single-table
+inheritance should take place based on the arguments for the table itself,
+such as, define as single-inheritance if there is no primary key present::
+
+    class AutoTable(object):
+        @declared_attr
+        def __tablename__(cls):
+            return cls.__name__
+
+        @classmethod
+        def __table_cls__(cls, *arg, **kw):
+            for obj in arg[1:]:
+                if (isinstance(obj, Column) and obj.primary_key) or \
+                        isinstance(obj, PrimaryKeyConstraint):
+                    return Table(*arg, **kw)
+
+            return None
+
+    class Person(AutoTable, Base):
+        id = Column(Integer, primary_key=True)
+
+    class Employee(Person):
+        employee_name = Column(String)
+
+The above ``Employee`` class would be mapped as single-table inheritance
+against ``Person``; the ``employee_name`` column would be added as a member
+of the ``Person`` table.
+
diff --git a/doc/build/orm/declarative_mapping.rst b/doc/build/orm/declarative_mapping.rst
new file mode 100644 (file)
index 0000000..9d2f3af
--- /dev/null
@@ -0,0 +1,17 @@
+.. _declarative_config_toplevel:
+
+================================
+Mapping Classes with Declarative
+================================
+
+The Declarative mapping style is the primary style of mapping that is used
+with SQLAlchemy.   See the section :ref:`orm_declarative_mapping` for the
+top level introduction.
+
+
+.. toctree::
+    :maxdepth: 3
+
+    declarative_tables
+    declarative_config
+    declarative_mixins
diff --git a/doc/build/orm/declarative_mixins.rst b/doc/build/orm/declarative_mixins.rst
new file mode 100644 (file)
index 0000000..c591218
--- /dev/null
@@ -0,0 +1,548 @@
+.. _orm_mixins_toplevel:
+
+Composing Mapped Hierarchies with Mixins
+========================================
+
+A common need when mapping classes using the :ref:`Declarative
+<orm_declarative_mapping>` style is to share some functionality, such as a set
+of common columns, some common table options, or other mapped properties,
+across many classes.  The standard Python idioms for this is to have the
+classes inherit from a superclass which includes these common features.
+
+When using declarative mappings, this idiom is allowed via the
+usage of mixin classes, as well as via augmenting the declarative base
+produced by either the :meth:`_orm.registry.generate_base` method
+or :func:`_orm.declarative_base` functions.
+
+An example of some commonly mixed-in idioms is below::
+
+    from sqlalchemy.orm import declared_attr
+
+    class MyMixin(object):
+
+        @declared_attr
+        def __tablename__(cls):
+            return cls.__name__.lower()
+
+        __table_args__ = {'mysql_engine': 'InnoDB'}
+        __mapper_args__= {'always_refresh': True}
+
+        id =  Column(Integer, primary_key=True)
+
+    class MyModel(MyMixin, Base):
+        name = Column(String(1000))
+
+Where above, the class ``MyModel`` will contain an "id" column
+as the primary key, a ``__tablename__`` attribute that derives
+from the name of the class itself, as well as ``__table_args__``
+and ``__mapper_args__`` defined by the ``MyMixin`` mixin class.
+
+There's no fixed convention over whether ``MyMixin`` precedes
+``Base`` or not.  Normal Python method resolution rules apply, and
+the above example would work just as well with::
+
+    class MyModel(Base, MyMixin):
+        name = Column(String(1000))
+
+This works because ``Base`` here doesn't define any of the
+variables that ``MyMixin`` defines, i.e. ``__tablename__``,
+``__table_args__``, ``id``, etc.   If the ``Base`` did define
+an attribute of the same name, the class placed first in the
+inherits list would determine which attribute is used on the
+newly defined class.
+
+Augmenting the Base
+~~~~~~~~~~~~~~~~~~~
+
+In addition to using a pure mixin, most of the techniques in this
+section can also be applied to the base class itself, for patterns that
+should apply to all classes derived from a particular base.  This is achieved
+using the ``cls`` argument of the :func:`_orm.declarative_base` function::
+
+    from sqlalchemy.orm import declared_attr
+
+    class Base(object):
+        @declared_attr
+        def __tablename__(cls):
+            return cls.__name__.lower()
+
+        __table_args__ = {'mysql_engine': 'InnoDB'}
+
+        id =  Column(Integer, primary_key=True)
+
+    from sqlalchemy.orm import declarative_base
+
+    Base = declarative_base(cls=Base)
+
+    class MyModel(Base):
+        name = Column(String(1000))
+
+Where above, ``MyModel`` and all other classes that derive from ``Base`` will
+have a table name derived from the class name, an ``id`` primary key column,
+as well as the "InnoDB" engine for MySQL.
+
+Mixing in Columns
+~~~~~~~~~~~~~~~~~
+
+The most basic way to specify a column on a mixin is by simple
+declaration::
+
+    class TimestampMixin(object):
+        created_at = Column(DateTime, default=func.now())
+
+    class MyModel(TimestampMixin, Base):
+        __tablename__ = 'test'
+
+        id =  Column(Integer, primary_key=True)
+        name = Column(String(1000))
+
+Where above, all declarative classes that include ``TimestampMixin``
+will also have a column ``created_at`` that applies a timestamp to
+all row insertions.
+
+Those familiar with the SQLAlchemy expression language know that
+the object identity of clause elements defines their role in a schema.
+Two ``Table`` objects ``a`` and ``b`` may both have a column called
+``id``, but the way these are differentiated is that ``a.c.id``
+and ``b.c.id`` are two distinct Python objects, referencing their
+parent tables ``a`` and ``b`` respectively.
+
+In the case of the mixin column, it seems that only one
+:class:`_schema.Column` object is explicitly created, yet the ultimate
+``created_at`` column above must exist as a distinct Python object
+for each separate destination class.  To accomplish this, the declarative
+extension creates a **copy** of each :class:`_schema.Column` object encountered on
+a class that is detected as a mixin.
+
+This copy mechanism is limited to simple columns that have no foreign
+keys, as a :class:`_schema.ForeignKey` itself contains references to columns
+which can't be properly recreated at this level.  For columns that
+have foreign keys, as well as for the variety of mapper-level constructs
+that require destination-explicit context, the
+:class:`_orm.declared_attr` decorator is provided so that
+patterns common to many classes can be defined as callables::
+
+    from sqlalchemy.orm import declared_attr
+
+    class ReferenceAddressMixin(object):
+        @declared_attr
+        def address_id(cls):
+            return Column(Integer, ForeignKey('address.id'))
+
+    class User(ReferenceAddressMixin, Base):
+        __tablename__ = 'user'
+        id = Column(Integer, primary_key=True)
+
+Where above, the ``address_id`` class-level callable is executed at the
+point at which the ``User`` class is constructed, and the declarative
+extension can use the resulting :class:`_schema.Column` object as returned by
+the method without the need to copy it.
+
+Columns generated by :class:`_orm.declared_attr` can also be
+referenced by ``__mapper_args__`` to a limited degree, currently
+by ``polymorphic_on`` and ``version_id_col``; the declarative extension
+will resolve them at class construction time::
+
+    class MyMixin:
+        @declared_attr
+        def type_(cls):
+            return Column(String(50))
+
+        __mapper_args__= {'polymorphic_on':type_}
+
+    class MyModel(MyMixin, Base):
+        __tablename__='test'
+        id =  Column(Integer, primary_key=True)
+
+
+Mixing in Relationships
+~~~~~~~~~~~~~~~~~~~~~~~
+
+Relationships created by :func:`~sqlalchemy.orm.relationship` are provided
+with declarative mixin classes exclusively using the
+:class:`_orm.declared_attr` approach, eliminating any ambiguity
+which could arise when copying a relationship and its possibly column-bound
+contents. Below is an example which combines a foreign key column and a
+relationship so that two classes ``Foo`` and ``Bar`` can both be configured to
+reference a common target class via many-to-one::
+
+    class RefTargetMixin(object):
+        @declared_attr
+        def target_id(cls):
+            return Column('target_id', ForeignKey('target.id'))
+
+        @declared_attr
+        def target(cls):
+            return relationship("Target")
+
+    class Foo(RefTargetMixin, Base):
+        __tablename__ = 'foo'
+        id = Column(Integer, primary_key=True)
+
+    class Bar(RefTargetMixin, Base):
+        __tablename__ = 'bar'
+        id = Column(Integer, primary_key=True)
+
+    class Target(Base):
+        __tablename__ = 'target'
+        id = Column(Integer, primary_key=True)
+
+
+Using Advanced Relationship Arguments (e.g. ``primaryjoin``, etc.)
+^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
+
+:func:`~sqlalchemy.orm.relationship` definitions which require explicit
+primaryjoin, order_by etc. expressions should in all but the most
+simplistic cases use **late bound** forms
+for these arguments, meaning, using either the string form or a lambda.
+The reason for this is that the related :class:`_schema.Column` objects which are to
+be configured using ``@declared_attr`` are not available to another
+``@declared_attr`` attribute; while the methods will work and return new
+:class:`_schema.Column` objects, those are not the :class:`_schema.Column` objects that
+Declarative will be using as it calls the methods on its own, thus using
+*different* :class:`_schema.Column` objects.
+
+The canonical example is the primaryjoin condition that depends upon
+another mixed-in column::
+
+    class RefTargetMixin(object):
+        @declared_attr
+        def target_id(cls):
+            return Column('target_id', ForeignKey('target.id'))
+
+        @declared_attr
+        def target(cls):
+            return relationship(Target,
+                primaryjoin=Target.id==cls.target_id   # this is *incorrect*
+            )
+
+Mapping a class using the above mixin, we will get an error like::
+
+    sqlalchemy.exc.InvalidRequestError: this ForeignKey's parent column is not
+    yet associated with a Table.
+
+This is because the ``target_id`` :class:`_schema.Column` we've called upon in our
+``target()`` method is not the same :class:`_schema.Column` that declarative is
+actually going to map to our table.
+
+The condition above is resolved using a lambda::
+
+    class RefTargetMixin(object):
+        @declared_attr
+        def target_id(cls):
+            return Column('target_id', ForeignKey('target.id'))
+
+        @declared_attr
+        def target(cls):
+            return relationship(Target,
+                primaryjoin=lambda: Target.id==cls.target_id
+            )
+
+or alternatively, the string form (which ultimately generates a lambda)::
+
+    class RefTargetMixin(object):
+        @declared_attr
+        def target_id(cls):
+            return Column('target_id', ForeignKey('target.id'))
+
+        @declared_attr
+        def target(cls):
+            return relationship("Target",
+                primaryjoin="Target.id==%s.target_id" % cls.__name__
+            )
+
+.. seealso::
+
+    :ref:`orm_declarative_relationship_eval`
+
+Mixing in deferred(), column_property(), and other MapperProperty classes
+~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
+
+Like :func:`~sqlalchemy.orm.relationship`, all
+:class:`~sqlalchemy.orm.interfaces.MapperProperty` subclasses such as
+:func:`~sqlalchemy.orm.deferred`, :func:`~sqlalchemy.orm.column_property`,
+etc. ultimately involve references to columns, and therefore, when
+used with declarative mixins, have the :class:`_orm.declared_attr`
+requirement so that no reliance on copying is needed::
+
+    class SomethingMixin(object):
+
+        @declared_attr
+        def dprop(cls):
+            return deferred(Column(Integer))
+
+    class Something(SomethingMixin, Base):
+        __tablename__ = "something"
+
+The :func:`.column_property` or other construct may refer
+to other columns from the mixin.  These are copied ahead of time before
+the :class:`_orm.declared_attr` is invoked::
+
+    class SomethingMixin(object):
+        x = Column(Integer)
+
+        y = Column(Integer)
+
+        @declared_attr
+        def x_plus_y(cls):
+            return column_property(cls.x + cls.y)
+
+
+.. versionchanged:: 1.0.0 mixin columns are copied to the final mapped class
+   so that :class:`_orm.declared_attr` methods can access the actual column
+   that will be mapped.
+
+Mixing in Association Proxy and Other Attributes
+~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
+
+Mixins can specify user-defined attributes as well as other extension
+units such as :func:`.association_proxy`.   The usage of
+:class:`_orm.declared_attr` is required in those cases where the attribute must
+be tailored specifically to the target subclass.   An example is when
+constructing multiple :func:`.association_proxy` attributes which each
+target a different type of child object.  Below is an
+:func:`.association_proxy` / mixin example which provides a scalar list of
+string values to an implementing class::
+
+    from sqlalchemy import Column, Integer, ForeignKey, String
+    from sqlalchemy.orm import relationship
+    from sqlalchemy.ext.associationproxy import association_proxy
+    from sqlalchemy.orm import declarative_base, declared_attr
+
+    Base = declarative_base()
+
+    class HasStringCollection(object):
+        @declared_attr
+        def _strings(cls):
+            class StringAttribute(Base):
+                __tablename__ = cls.string_table_name
+                id = Column(Integer, primary_key=True)
+                value = Column(String(50), nullable=False)
+                parent_id = Column(Integer,
+                                ForeignKey('%s.id' % cls.__tablename__),
+                                nullable=False)
+                def __init__(self, value):
+                    self.value = value
+
+            return relationship(StringAttribute)
+
+        @declared_attr
+        def strings(cls):
+            return association_proxy('_strings', 'value')
+
+    class TypeA(HasStringCollection, Base):
+        __tablename__ = 'type_a'
+        string_table_name = 'type_a_strings'
+        id = Column(Integer(), primary_key=True)
+
+    class TypeB(HasStringCollection, Base):
+        __tablename__ = 'type_b'
+        string_table_name = 'type_b_strings'
+        id = Column(Integer(), primary_key=True)
+
+Above, the ``HasStringCollection`` mixin produces a :func:`_orm.relationship`
+which refers to a newly generated class called ``StringAttribute``.  The
+``StringAttribute`` class is generated with its own :class:`_schema.Table`
+definition which is local to the parent class making usage of the
+``HasStringCollection`` mixin.  It also produces an :func:`.association_proxy`
+object which proxies references to the ``strings`` attribute onto the ``value``
+attribute of each ``StringAttribute`` instance.
+
+``TypeA`` or ``TypeB`` can be instantiated given the constructor
+argument ``strings``, a list of strings::
+
+    ta = TypeA(strings=['foo', 'bar'])
+    tb = TypeA(strings=['bat', 'bar'])
+
+This list will generate a collection
+of ``StringAttribute`` objects, which are persisted into a table that's
+local to either the ``type_a_strings`` or ``type_b_strings`` table::
+
+    >>> print(ta._strings)
+    [<__main__.StringAttribute object at 0x10151cd90>,
+        <__main__.StringAttribute object at 0x10151ce10>]
+
+When constructing the :func:`.association_proxy`, the
+:class:`_orm.declared_attr` decorator must be used so that a distinct
+:func:`.association_proxy` object is created for each of the ``TypeA``
+and ``TypeB`` classes.
+
+.. _decl_mixin_inheritance:
+
+Controlling table inheritance with mixins
+~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
+
+The ``__tablename__`` attribute may be used to provide a function that
+will determine the name of the table used for each class in an inheritance
+hierarchy, as well as whether a class has its own distinct table.
+
+This is achieved using the :class:`_orm.declared_attr` indicator in conjunction
+with a method named ``__tablename__()``.   Declarative will always
+invoke :class:`_orm.declared_attr` for the special names
+``__tablename__``, ``__mapper_args__`` and ``__table_args__``
+function **for each mapped class in the hierarchy, except if overridden
+in a subclass**.   The function therefore
+needs to expect to receive each class individually and to provide the
+correct answer for each.
+
+For example, to create a mixin that gives every class a simple table
+name based on class name::
+
+    from sqlalchemy.orm import declared_attr
+
+    class Tablename:
+        @declared_attr
+        def __tablename__(cls):
+            return cls.__name__.lower()
+
+    class Person(Tablename, Base):
+        id = Column(Integer, primary_key=True)
+        discriminator = Column('type', String(50))
+        __mapper_args__ = {'polymorphic_on': discriminator}
+
+    class Engineer(Person):
+        __tablename__ = None
+        __mapper_args__ = {'polymorphic_identity': 'engineer'}
+        primary_language = Column(String(50))
+
+Alternatively, we can modify our ``__tablename__`` function to return
+``None`` for subclasses, using :func:`.has_inherited_table`.  This has
+the effect of those subclasses being mapped with single table inheritance
+against the parent::
+
+    from sqlalchemy.orm import declared_attr
+    from sqlalchemy.orm import has_inherited_table
+
+    class Tablename(object):
+        @declared_attr
+        def __tablename__(cls):
+            if has_inherited_table(cls):
+                return None
+            return cls.__name__.lower()
+
+    class Person(Tablename, Base):
+        id = Column(Integer, primary_key=True)
+        discriminator = Column('type', String(50))
+        __mapper_args__ = {'polymorphic_on': discriminator}
+
+    class Engineer(Person):
+        primary_language = Column(String(50))
+        __mapper_args__ = {'polymorphic_identity': 'engineer'}
+
+.. _mixin_inheritance_columns:
+
+Mixing in Columns in Inheritance Scenarios
+~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
+
+In contrast to how ``__tablename__`` and other special names are handled when
+used with :class:`_orm.declared_attr`, when we mix in columns and properties (e.g.
+relationships, column properties, etc.), the function is
+invoked for the **base class only** in the hierarchy.  Below, only the
+``Person`` class will receive a column
+called ``id``; the mapping will fail on ``Engineer``, which is not given
+a primary key::
+
+    class HasId(object):
+        @declared_attr
+        def id(cls):
+            return Column('id', Integer, primary_key=True)
+
+    class Person(HasId, Base):
+        __tablename__ = 'person'
+        discriminator = Column('type', String(50))
+        __mapper_args__ = {'polymorphic_on': discriminator}
+
+    class Engineer(Person):
+        __tablename__ = 'engineer'
+        primary_language = Column(String(50))
+        __mapper_args__ = {'polymorphic_identity': 'engineer'}
+
+It is usually the case in joined-table inheritance that we want distinctly
+named columns on each subclass.  However in this case, we may want to have
+an ``id`` column on every table, and have them refer to each other via
+foreign key.  We can achieve this as a mixin by using the
+:attr:`.declared_attr.cascading` modifier, which indicates that the
+function should be invoked **for each class in the hierarchy**, in *almost*
+(see warning below) the same way as it does for ``__tablename__``::
+
+    class HasIdMixin(object):
+        @declared_attr.cascading
+        def id(cls):
+            if has_inherited_table(cls):
+                return Column(ForeignKey('person.id'), primary_key=True)
+            else:
+                return Column(Integer, primary_key=True)
+
+    class Person(HasIdMixin, Base):
+        __tablename__ = 'person'
+        discriminator = Column('type', String(50))
+        __mapper_args__ = {'polymorphic_on': discriminator}
+
+    class Engineer(Person):
+        __tablename__ = 'engineer'
+        primary_language = Column(String(50))
+        __mapper_args__ = {'polymorphic_identity': 'engineer'}
+
+.. warning::
+
+    The :attr:`.declared_attr.cascading` feature currently does
+    **not** allow for a subclass to override the attribute with a different
+    function or value.  This is a current limitation in the mechanics of
+    how ``@declared_attr`` is resolved, and a warning is emitted if
+    this condition is detected.   This limitation does **not**
+    exist for the special attribute names such as ``__tablename__``, which
+    resolve in a different way internally than that of
+    :attr:`.declared_attr.cascading`.
+
+
+.. versionadded:: 1.0.0 added :attr:`.declared_attr.cascading`.
+
+Combining Table/Mapper Arguments from Multiple Mixins
+~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
+
+In the case of ``__table_args__`` or ``__mapper_args__``
+specified with declarative mixins, you may want to combine
+some parameters from several mixins with those you wish to
+define on the class itself. The
+:class:`_orm.declared_attr` decorator can be used
+here to create user-defined collation routines that pull
+from multiple collections::
+
+    from sqlalchemy.orm import declared_attr
+
+    class MySQLSettings(object):
+        __table_args__ = {'mysql_engine':'InnoDB'}
+
+    class MyOtherMixin(object):
+        __table_args__ = {'info':'foo'}
+
+    class MyModel(MySQLSettings, MyOtherMixin, Base):
+        __tablename__='my_model'
+
+        @declared_attr
+        def __table_args__(cls):
+            args = dict()
+            args.update(MySQLSettings.__table_args__)
+            args.update(MyOtherMixin.__table_args__)
+            return args
+
+        id =  Column(Integer, primary_key=True)
+
+Creating Indexes with Mixins
+~~~~~~~~~~~~~~~~~~~~~~~~~~~~
+
+To define a named, potentially multicolumn :class:`.Index` that applies to all
+tables derived from a mixin, use the "inline" form of :class:`.Index` and
+establish it as part of ``__table_args__``::
+
+    class MyMixin(object):
+        a =  Column(Integer)
+        b =  Column(Integer)
+
+        @declared_attr
+        def __table_args__(cls):
+            return (Index('test_idx_%s' % cls.__tablename__, 'a', 'b'),)
+
+    class MyModel(MyMixin, Base):
+        __tablename__ = 'atable'
+        c =  Column(Integer,primary_key=True)
diff --git a/doc/build/orm/declarative_tables.rst b/doc/build/orm/declarative_tables.rst
new file mode 100644 (file)
index 0000000..bbad7a3
--- /dev/null
@@ -0,0 +1,372 @@
+
+.. _orm_declarative_table_config_toplevel:
+
+=============================================
+Table Configuration with Declarative
+=============================================
+
+As introduced at :ref:`orm_declarative_mapping`, the Declarative style
+includses the ability to generate a mapped :class:`_schema.Table` object
+at the same time, or to accommodate a :class:`_schema.Table` or other
+:class:`_sql.FromClause` object directly.
+
+The following examples assume a declarative base class as::
+
+    from sqlalchemy.orm import declarative_base
+
+    Base = declarative_base()
+
+All of the examples that follow illustrate a class inheriting from the above
+``Base``.  The decorator style introduced at :ref:`orm_declarative_decorator`
+is fully supported with all the following examples as well.
+
+.. _orm_declarative_table:
+
+Declarative Table
+-----------------
+
+With the declarative base class, the typical form of mapping includes an
+attribute ``__tablename__`` that indicates the name of a :class:`_schema.Table`
+that should be generated along with the mapping::
+
+    from sqlalchemy import Column, Integer, String, ForeignKey
+    from sqlalchemy.orm import declarative_base
+
+    Base = declarative_base()
+
+    class User(Base):
+        __tablename__ = 'user'
+
+        id = Column(Integer, primary_key=True)
+        name = Column(String)
+        fullname = Column(String)
+        nickname = Column(String)
+
+Above, :class:`_schema.Column` objects are placed inline with the class
+definition.   The declarative mapping process will generate a new
+:class:`_schema.Table` object against the :class:`_schema.MetaData` collection
+associated with the declarative base, and each specified
+:class:`_schema.Column` object will become part of the :attr:`.schema.Table.columns`
+collection of this :class:`_schema.Table` object.   The :class:`_schema.Column`
+objects can omit their "name" field, which is usually the first positional
+argument to the :class:`_schema.Column` constructor; the declarative system
+will assign the key associated with each :class:`_schema.Column` as the name,
+to produce a :class:`_schema.Table` that is equvialent to::
+
+    # equivalent Table object produced
+    user_table = Table(
+        "user",
+        Base.metadata,
+        Column("id", Integer, primary_key=True),
+        Column("name", String),
+        Column("fullname", String),
+        Column("nickname", String),
+    )
+
+.. _orm_declarative_metadata:
+
+Accessing Table and Metadata
+^^^^^^^^^^^^^^^^^^^^^^^^^^^^
+
+A declaratively mapped class will always include an attribute called
+``__table__``; when the above configuration using ``__tablename__`` is
+complete, the declarative process makes the :class:`_schema.Table`
+available via the ``__table__`` attribute::
+
+
+    # access the Table
+    user_table = User.__table__
+
+The above table is ultimately the same one that corresponds to the
+:attr:`_orm.Mapper.local_table` attribute, which we can see through the
+:ref:`runtime inspection system <inspection_toplevel>`::
+
+    from sqlalchemy import inspect
+
+    user_table = inspect(User).local_table
+
+The :class:`_schema.MetaData` collection associated with both the declarative
+:class:`_orm.registry` as well as the base class is frequently necessary in
+order to run DDL operations such as CREATE, as well as in use with migration
+tools such as Alembic.   This object is available via the ``.metadata``
+attribute of :class:`_orm.registry` as well as the declarative base class.
+Below, for a small script we may wish to emit a CREATE for all tables against a
+SQLite database::
+
+    engine = create_engine("sqlite://")
+
+    Base.metadata.create_all(engine)
+
+.. _orm_declarative_table_configuration:
+
+Declarative Table Configuration
+^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
+
+When using Declarative Table configuration with the ``__tablename__``
+declarative class attribute, additional arguments to be supplied to the
+:class:`_schema.Table` constructor should be provided using the
+``__table_args__`` declarative class attribute.
+
+This attribute accommodates both positional as well as keyword
+arguments that are normally sent to the
+:class:`_schema.Table` constructor.
+The attribute can be specified in one of two forms. One is as a
+dictionary::
+
+    class MyClass(Base):
+        __tablename__ = 'sometable'
+        __table_args__ = {'mysql_engine':'InnoDB'}
+
+The other, a tuple, where each argument is positional
+(usually constraints)::
+
+    class MyClass(Base):
+        __tablename__ = 'sometable'
+        __table_args__ = (
+                ForeignKeyConstraint(['id'], ['remote_table.id']),
+                UniqueConstraint('foo'),
+                )
+
+Keyword arguments can be specified with the above form by
+specifying the last argument as a dictionary::
+
+    class MyClass(Base):
+        __tablename__ = 'sometable'
+        __table_args__ = (
+                ForeignKeyConstraint(['id'], ['remote_table.id']),
+                UniqueConstraint('foo'),
+                {'autoload':True}
+                )
+
+A class may also specify the ``__table_args__`` declarative attribute,
+as well as the ``__tablename__`` attribute, in a dynamic style using the
+:func:`_orm.declared_attr` method decorator.   See the section
+:ref:`declarative_mixins` for examples on how this is often used.
+
+.. _orm_declarative_table_adding_columns:
+
+Adding New Columns
+^^^^^^^^^^^^^^^^^^^
+
+The declarative table configuration allows the addition of new
+:class:`_schema.Column` objects under two scenarios.  The most basic
+is that of simply assigning new :class:`_schema.Column` objects to the
+class::
+
+    MyClass.some_new_column = Column('data', Unicode)
+
+The above operation performed against a declarative class that has been
+mapped using the declarative base (note, not the decorator form of declarative)
+will add the above :class:`_schema.Column` to the :class:`_schema.Table`
+using the :meth:`_schema.Table.append_column` method and will also add the
+column to the :class:`_orm.Mapper` to be fully mapped.
+
+.. note:: assignment of new columns to an existing declaratively mapped class
+   will only function correctly if the "declarative base" class is used, which
+   also provides for a metaclass-driven ``__setattr__()`` method which will
+   intercept these operations.   It will **not** work if the declarative
+   decorator provided by
+   :meth:`_orm.registry.mapped` is used, nor will it work for an imperatively
+   mapped class mapped by :meth:`_orm.registry.map_imperatively`.
+
+
+The other scenario where a :class:`_schema.Column` is added on the fly is
+when an inheriting subclass that has no table of its own indicates
+additional columns; these columns will be added to the superclass table.
+The section :ref:`single_inheritance` discusses single table inheritance.
+
+
+.. _orm_imperative_table_configuration:
+
+Declarative with Imperative Table (a.k.a. Hybrid Declarative)
+-------------------------------------------------------------
+
+Declarative mappings may also be provided with a pre-existing
+:class:`_schema.Table` object, or otherwise a :class:`_schema.Table` or other
+arbitrary :class:`_sql.FromClause` construct (such as a :class:`_sql.Join`
+or :class:`_sql.Subquery`) that is constructed separately.
+
+This is referred to as a "hybrid declarative"
+mapping, as the class is mapped using the declarative style for everything
+involving the mapper configuration, however the mapped :class:`_schema.Table`
+object is produced separately and passed to the declarative process
+directly::
+
+
+    from sqlalchemy.orm import declarative_base
+    from sqlalchemy import Column, Integer, String, ForeignKey
+
+
+    Base = declarative_base()
+
+    # construct a Table directly.  The Base.metadata collection is
+    # usually a good choice for MetaData but any MetaData
+    # collection may be used.
+
+    user_table = Table(
+        "user",
+        Base.metadata,
+        Column("id", Integer, primary_key=True),
+        Column("name", String),
+        Column("fullname", String),
+        Column("nickname", String),
+    )
+
+    # construct the User class using this table.
+    class User(Base):
+        __table__ = user_table
+
+Above, a :class:`_schema.Table` object is constructed using the approach
+described at :ref:`metadata_describing`.   It can then be applied directly
+to a class that is declaratively mapped.  The ``__tablename__`` and
+``__table_args__`` declarative class attributes are not used in this form.
+The above configuration is often more readable as an inline definition::
+
+    class User(Base):
+        __table__ = Table(
+            "user",
+            Base.metadata,
+            Column("id", Integer, primary_key=True),
+            Column("name", String),
+            Column("fullname", String),
+            Column("nickname", String),
+        )
+
+A natural effect of the above style is that the ``__table__`` attribute is
+itself defined within the class definition block.   As such it may be
+immediately referred towards within subsequent attributes, such as the example
+below which illustrates referring to the ``type`` column in a polymorphic
+mapper configuration::
+
+    class Person(Base):
+        __table__ = Table(
+            'person',
+            Base.metadata,
+            Column('id', Integer, primary_key=True),
+            Column('name', String(50)),
+            Column('type', String(50))
+        )
+
+        __mapper_args__ = {
+            "polymorphic_on": __table__.c.type,
+            "polymorhpic_identity": "person"
+        }
+
+The "imperative table" form is also used when a non-:class:`_schema.Table`
+construct, such as a :class:`_sql.Join` or :class:`_sql.Subquery` object,
+is to be mapped.  An example below::
+
+    from sqlalchemy import select, func
+
+    subq = select(
+        func.count(orders.c.id).label('order_count'),
+        func.max(orders.c.price).label('highest_order'),
+        orders.c.customer_id
+    ).group_by(orders.c.customer_id).subquery()
+
+    customer_select = select(customers, subq).join_from(
+        customers, subq, customers.c.id == subq.c.customer_id
+    ).subquery()
+
+    class Customer(Base):
+        __table__ = customer_select
+
+For background on mapping to non-:class:`_schema.Table` constructs see
+the sections :ref:`orm_mapping_joins` and :ref:`orm_mapping_arbitrary_subqueries`.
+
+The "imperative table" form is of particular use when the class itself
+is using an alternative form of attribute declaration, such as Python
+dataclasses.   See the section :ref:`orm_declarative_dataclasses` for detail.
+
+.. seealso::
+
+    :ref:`metadata_describing`
+
+    :ref:`orm_declarative_dataclasses`
+
+.. _orm_declarative_reflected:
+
+Mapping Declaratively with Reflected Tables
+--------------------------------------------
+
+There are several patterns available which provide for producing mapped
+classes against a series of :class:`_schema.Table` objects that were
+introspected from the database, using the reflection process described at
+:ref:`metadata_reflection`.
+
+A very simple way to map a class to a table reflected from the database is to
+use a declarative hybrid mapping, passing the
+:paramref:`_schema.Table.autoload_with` parameter to the
+:class:`_schema.Table`::
+
+    engine = create_engine("postgresql://user:pass@hostname/my_existing_database")
+
+    class MyClass(Base):
+        __table__ = Table(
+            'mytable',
+            Base.metadata,
+            autoload_with=engine
+        )
+
+A major downside of the above approach however is that it requires the database
+connectivity source to be present while the application classes are being
+declared; it's typical that classes are declared as the modules of an
+application are being imported, but database connectivity isn't available
+until the application starts running code so that it can consume configuration
+information and create an engine.
+
+Using DeferredReflection
+^^^^^^^^^^^^^^^^^^^^^^^^^
+
+To accommodate this case, a simple extension called the
+:class:`.DeferredReflection` mixin is available, which alters the declarative
+mapping process to be delayed until a special class-level
+:meth:`.DeferredReflection.prepare` method is called, which will perform
+the reflection process against a target database, and will integrate the
+results with the declarative table mapping process, that is, classes which
+use the ``__tablename__`` attribute::
+
+    from sqlalchemy.orm import declarative_base
+    from sqlalchemy.ext.declarative import DeferredReflection
+
+    Base = declarative_base()
+
+    class Reflected(DeferredReflection):
+        __abstract__ = True
+
+    class Foo(Reflected, Base):
+        __tablename__ = 'foo'
+        bars = relationship("Bar")
+
+    class Bar(Reflected, Base):
+        __tablename__ = 'bar'
+
+        foo_id = Column(Integer, ForeignKey('foo.id'))
+
+Above, we create a mixin class ``Reflected`` that will serve as a base
+for classes in our declarative hierarchy that should become mapped when
+the ``Reflected.prepare`` method is called.   The above mapping is not
+complete until we do so, given an :class:`_engine.Engine`::
+
+
+    engine = create_engine("postgresql://user:pass@hostname/my_existing_database")
+    Reflected.prepare(engine)
+
+The purpose of the ``Reflected`` class is to define the scope at which
+classes should be reflectively mapped.   The plugin will search among the
+subclass tree of the target against which ``.prepare()`` is called and reflect
+all tables.
+
+Using Automap
+^^^^^^^^^^^^^^
+
+A more automated solution to mapping against an existing database where
+table reflection is to be used is to use the :ref:`automap_toplevel`
+extension.  This extension will generate entire mapped classes from a
+database schema, and allows several hooks for customization including the
+ability to explicitly map some or all classes while still making use of
+reflection to fill in the remaining columns.
+
+.. seealso::
+
+    :ref:`automap_toplevel`
index be97604d3b62a9eb426443367caae80bb623b1a0..6e413a07e151858a51d67e49bc42711726e091a2 100644 (file)
@@ -7,18 +7,19 @@ Declarative API
 API Reference
 =============
 
-.. autofunction:: declarative_base
+.. versionchanged:: 1.4  The fundamental structures of the declarative
+   system are now part of SQLAlchemy ORM directly.   For these components
+   see:
 
-.. autofunction:: as_declarative
+   * :func:`_orm.declarative_base`
 
-.. autoclass:: declared_attr
-    :members:
+   * :class:`_orm.declared_attr`
 
-.. autofunction:: sqlalchemy.ext.declarative.api._declarative_constructor
+   * :func:`_orm.has_inherited_table`
 
-.. autofunction:: has_inherited_table
+   * :func:`_orm.synonym_for`
 
-.. autofunction:: synonym_for
+   * :meth:`_orm.as_declarative`
 
 .. autofunction:: instrument_declarative
 
@@ -30,130 +31,3 @@ API Reference
    :members:
 
 
-Special Directives
-------------------
-
-``__declare_last__()``
-~~~~~~~~~~~~~~~~~~~~~~
-
-The ``__declare_last__()`` hook allows definition of
-a class level function that is automatically called by the
-:meth:`.MapperEvents.after_configured` event, which occurs after mappings are
-assumed to be completed and the 'configure' step has finished::
-
-    class MyClass(Base):
-        @classmethod
-        def __declare_last__(cls):
-            ""
-            # do something with mappings
-
-``__declare_first__()``
-~~~~~~~~~~~~~~~~~~~~~~~
-
-Like ``__declare_last__()``, but is called at the beginning of mapper
-configuration via the :meth:`.MapperEvents.before_configured` event::
-
-    class MyClass(Base):
-        @classmethod
-        def __declare_first__(cls):
-            ""
-            # do something before mappings are configured
-
-.. versionadded:: 0.9.3
-
-.. _declarative_abstract:
-
-``__abstract__``
-~~~~~~~~~~~~~~~~
-
-``__abstract__`` causes declarative to skip the production
-of a table or mapper for the class entirely.  A class can be added within a
-hierarchy in the same way as mixin (see :ref:`declarative_mixins`), allowing
-subclasses to extend just from the special class::
-
-    class SomeAbstractBase(Base):
-        __abstract__ = True
-
-        def some_helpful_method(self):
-            ""
-
-        @declared_attr
-        def __mapper_args__(cls):
-            return {"helpful mapper arguments":True}
-
-    class MyMappedClass(SomeAbstractBase):
-        ""
-
-One possible use of ``__abstract__`` is to use a distinct
-:class:`_schema.MetaData` for different bases::
-
-    Base = declarative_base()
-
-    class DefaultBase(Base):
-        __abstract__ = True
-        metadata = MetaData()
-
-    class OtherBase(Base):
-        __abstract__ = True
-        metadata = MetaData()
-
-Above, classes which inherit from ``DefaultBase`` will use one
-:class:`_schema.MetaData` as the registry of tables, and those which inherit from
-``OtherBase`` will use a different one. The tables themselves can then be
-created perhaps within distinct databases::
-
-    DefaultBase.metadata.create_all(some_engine)
-    OtherBase.metadata.create_all(some_other_engine)
-
-
-``__table_cls__``
-~~~~~~~~~~~~~~~~~
-
-Allows the callable / class used to generate a :class:`_schema.Table` to be customized.
-This is a very open-ended hook that can allow special customizations
-to a :class:`_schema.Table` that one generates here::
-
-    class MyMixin(object):
-        @classmethod
-        def __table_cls__(cls, name, metadata, *arg, **kw):
-            return Table(
-                "my_" + name,
-                metadata, *arg, **kw
-            )
-
-The above mixin would cause all :class:`_schema.Table` objects generated to include
-the prefix ``"my_"``, followed by the name normally specified using the
-``__tablename__`` attribute.
-
-``__table_cls__`` also supports the case of returning ``None``, which
-causes the class to be considered as single-table inheritance vs. its subclass.
-This may be useful in some customization schemes to determine that single-table
-inheritance should take place based on the arguments for the table itself,
-such as, define as single-inheritance if there is no primary key present::
-
-    class AutoTable(object):
-        @declared_attr
-        def __tablename__(cls):
-            return cls.__name__
-
-        @classmethod
-        def __table_cls__(cls, *arg, **kw):
-            for obj in arg[1:]:
-                if (isinstance(obj, Column) and obj.primary_key) or \
-                        isinstance(obj, PrimaryKeyConstraint):
-                    return Table(*arg, **kw)
-
-            return None
-
-    class Person(AutoTable, Base):
-        id = Column(Integer, primary_key=True)
-
-    class Employee(Person):
-        employee_name = Column(String)
-
-The above ``Employee`` class would be mapped as single-table inheritance
-against ``Person``; the ``employee_name`` column would be added as a member
-of the ``Person`` table.
-
-
-.. versionadded:: 1.0.0
index b939f7e3931a29f95cec312e31eb412a2c1fa298..f1ce1d4a026502859d6c15d1bbdea3e619f776c9 100644 (file)
 Basic Use
 =========
 
-.. seealso::
-
-    This section describes specifics about how the Declarative system
-    interacts with the SQLAlchemy ORM.  For a general introduction
-    to class mapping, see :ref:`ormtutorial_toplevel` as well as
-    :ref:`mapper_config_toplevel`.
-
-SQLAlchemy object-relational configuration involves the
-combination of :class:`_schema.Table`, :func:`.mapper`, and class
-objects to define a mapped class.
-:mod:`~sqlalchemy.ext.declarative` allows all three to be
-expressed at once within the class declaration. As much as
-possible, regular SQLAlchemy schema and ORM constructs are
-used directly, so that configuration between "classical" ORM
-usage and declarative remain highly similar.
-
-As a simple example::
-
-    from sqlalchemy import Column, Integer, String
-    from sqlalchemy.ext.declarative import declarative_base
-
-    Base = declarative_base()
-
-    class SomeClass(Base):
-        __tablename__ = 'some_table'
-        id = Column(Integer, primary_key=True)
-        name =  Column(String(50))
-
-Above, the :func:`declarative_base` callable returns a new base class from
-which all mapped classes should inherit. When the class definition is
-completed, a new :class:`_schema.Table` and :func:`.mapper` will have been generated.
-
-The resulting table and mapper are accessible via
-``__table__`` and ``__mapper__`` attributes on the
-``SomeClass`` class::
-
-    # access the mapped Table
-    SomeClass.__table__
-
-    # access the Mapper
-    SomeClass.__mapper__
+This section has moved to :ref:`orm_declarative_mapping`.
 
 Defining Attributes
 ===================
 
-In the previous example, the :class:`_schema.Column` objects are
-automatically named with the name of the attribute to which they are
-assigned.
-
-To name columns explicitly with a name distinct from their mapped attribute,
-just give the column a name.  Below, column "some_table_id" is mapped to the
-"id" attribute of `SomeClass`, but in SQL will be represented as
-"some_table_id"::
-
-    class SomeClass(Base):
-        __tablename__ = 'some_table'
-        id = Column("some_table_id", Integer, primary_key=True)
-
-Attributes may be added to the class after its construction, and they will be
-added to the underlying :class:`_schema.Table` and
-:func:`.mapper` definitions as appropriate::
+This section is covered by :ref:`mapping_columns_toplevel`
 
-    SomeClass.data = Column('data', Unicode)
-    SomeClass.related = relationship(RelatedInfo)
 
-Classes which are constructed using declarative can interact freely
-with classes that are mapped explicitly with :func:`.mapper`.
-
-
-.. sidebar:: Using MyPy with SQLAlchemy models
-
-    If you are using PEP 484 static type checkers for Python, a `MyPy <http://mypy-lang.org/>`_
-    plugin is included with
-    `type stubs for SQLAlchemy <https://github.com/dropbox/sqlalchemy-stubs>`_.  The plugin
-    is tailored towards SQLAlchemy declarative models.
-
-
-It is recommended, though not required, that all tables
-share the same underlying :class:`~sqlalchemy.schema.MetaData` object,
-so that string-configured :class:`~sqlalchemy.schema.ForeignKey`
-references can be resolved without issue.
 
 Accessing the MetaData
 ======================
 
-The :func:`declarative_base` base class contains a
-:class:`_schema.MetaData` object where newly defined
-:class:`_schema.Table` objects are collected. This object is
-intended to be accessed directly for
-:class:`_schema.MetaData`-specific operations. Such as, to issue
-CREATE statements for all tables::
-
-    engine = create_engine('sqlite://')
-    Base.metadata.create_all(engine)
-
-:func:`declarative_base` can also receive a pre-existing
-:class:`_schema.MetaData` object, which allows a
-declarative setup to be associated with an already
-existing traditional collection of :class:`~sqlalchemy.schema.Table`
-objects::
-
-    mymetadata = MetaData()
-    Base = declarative_base(metadata=mymetadata)
+This section has moved to :ref:`orm_declarative_metadata`.
 
 
 Class Constructor
@@ -119,25 +29,7 @@ to the named attributes::
 Mapper Configuration
 ====================
 
-Declarative makes use of the :func:`_orm.mapper` function internally
-when it creates the mapping to the declared table.   The options
-for :func:`_orm.mapper` are passed directly through via the
-``__mapper_args__`` class attribute.  As always, arguments which reference
-locally mapped columns can reference them directly from within the
-class declaration::
-
-    from datetime import datetime
-
-    class Widget(Base):
-        __tablename__ = 'widgets'
-
-        id = Column(Integer, primary_key=True)
-        timestamp = Column(DateTime, nullable=False)
-
-        __mapper_args__ = {
-                        'version_id_col': timestamp,
-                        'version_id_generator': lambda v:datetime.now()
-                    }
+This section is moved to :ref:`orm_declarative_mapper_options`.
 
 
 .. _declarative_sql_expressions:
index 43972b03e1e3d301e1bd40e8aa753332e31790e6..36700f8127d8e9c416c9211b9dc3b917835fef58 100644 (file)
@@ -1,32 +1,23 @@
 .. _declarative_toplevel:
 
-===========
-Declarative
-===========
+.. currentmodule:: sqlalchemy.ext.declarative
 
-The Declarative system is the typically used system provided by the SQLAlchemy
-ORM in order to define classes mapped to relational database tables.  However,
-as noted in :ref:`classical_mapping`, Declarative is in fact a series of
-extensions that ride on top of the SQLAlchemy :func:`.mapper` construct.
-
-While the documentation typically refers to Declarative for most examples,
-the following sections will provide detailed information on how the
-Declarative API interacts with the basic :func:`.mapper` and Core :class:`_schema.Table`
-systems, as well as how sophisticated patterns can be built using systems
-such as mixins.
-
-
-.. toctree::
-       :maxdepth: 2
-
-       basic_use
-       relationships
-       table_config
-       inheritance
-       mixins
-       api
+======================
+Declarative Extensions
+======================
 
+Extensions specific to the :ref:`Declarative <orm_declarative_mapping>`
+mapping API.
 
+.. versionchanged:: 1.4  The vast majority of the Declarative extension is now
+   integrated into the SQLAlchemy ORM and is importable from the
+   ``sqlalchemy.orm`` namespace.  See the documentation at
+   :ref:`orm_declarative_mapping` for new documentation.
+   For an overview of the change, see :ref:`change_5508`.
 
+.. autoclass:: AbstractConcreteBase
 
+.. autoclass:: ConcreteBase
 
+.. autoclass:: DeferredReflection
+   :members:
index fcbdc0a949dddaca6b7dda1eec96a1b0cef18d30..70148986bc2e45f7b453dca05ae7c04d4dcde408 100644 (file)
@@ -1,250 +1,3 @@
 .. _declarative_inheritance:
 
-Inheritance Configuration
-=========================
-
-Declarative supports all three forms of inheritance as intuitively
-as possible.  The ``inherits`` mapper keyword argument is not needed
-as declarative will determine this from the class itself.   The various
-"polymorphic" keyword arguments are specified using ``__mapper_args__``.
-
-.. seealso::
-
-    This section describes some specific details on how the Declarative system
-    interacts with SQLAlchemy ORM inheritance configuration.  See
-    :ref:`inheritance_toplevel` for a general introduction to inheritance
-    mapping.
-
-Joined Table Inheritance
-~~~~~~~~~~~~~~~~~~~~~~~~
-
-Joined table inheritance is defined as a subclass that defines its own
-table::
-
-    class Person(Base):
-        __tablename__ = 'people'
-        id = Column(Integer, primary_key=True)
-        discriminator = Column('type', String(50))
-        __mapper_args__ = {'polymorphic_on': discriminator}
-
-    class Engineer(Person):
-        __tablename__ = 'engineers'
-        __mapper_args__ = {'polymorphic_identity': 'engineer'}
-        id = Column(Integer, ForeignKey('people.id'), primary_key=True)
-        primary_language = Column(String(50))
-
-Note that above, the ``Engineer.id`` attribute, since it shares the
-same attribute name as the ``Person.id`` attribute, will in fact
-represent the ``people.id`` and ``engineers.id`` columns together,
-with the "Engineer.id" column taking precedence if queried directly.
-To provide the ``Engineer`` class with an attribute that represents
-only the ``engineers.id`` column, give it a different attribute name::
-
-    class Engineer(Person):
-        __tablename__ = 'engineers'
-        __mapper_args__ = {'polymorphic_identity': 'engineer'}
-        engineer_id = Column('id', Integer, ForeignKey('people.id'),
-                                                    primary_key=True)
-        primary_language = Column(String(50))
-
-
-.. _declarative_single_table:
-
-Single Table Inheritance
-~~~~~~~~~~~~~~~~~~~~~~~~
-
-Single table inheritance is defined as a subclass that does not have
-its own table; you just leave out the ``__table__`` and ``__tablename__``
-attributes::
-
-    class Person(Base):
-        __tablename__ = 'people'
-        id = Column(Integer, primary_key=True)
-        discriminator = Column('type', String(50))
-        __mapper_args__ = {'polymorphic_on': discriminator}
-
-    class Engineer(Person):
-        __mapper_args__ = {'polymorphic_identity': 'engineer'}
-        primary_language = Column(String(50))
-
-When the above mappers are configured, the ``Person`` class is mapped
-to the ``people`` table *before* the ``primary_language`` column is
-defined, and this column will not be included in its own mapping.
-When ``Engineer`` then defines the ``primary_language`` column, the
-column is added to the ``people`` table so that it is included in the
-mapping for ``Engineer`` and is also part of the table's full set of
-columns.  Columns which are not mapped to ``Person`` are also excluded
-from any other single or joined inheriting classes using the
-``exclude_properties`` mapper argument.  Below, ``Manager`` will have
-all the attributes of ``Person`` and ``Manager`` but *not* the
-``primary_language`` attribute of ``Engineer``::
-
-    class Manager(Person):
-        __mapper_args__ = {'polymorphic_identity': 'manager'}
-        golf_swing = Column(String(50))
-
-The attribute exclusion logic is provided by the
-``exclude_properties`` mapper argument, and declarative's default
-behavior can be disabled by passing an explicit ``exclude_properties``
-collection (empty or otherwise) to the ``__mapper_args__``.
-
-.. _declarative_column_conflicts:
-
-Resolving Column Conflicts
-^^^^^^^^^^^^^^^^^^^^^^^^^^
-
-Note above that the ``primary_language`` and ``golf_swing`` columns
-are "moved up" to be applied to ``Person.__table__``, as a result of their
-declaration on a subclass that has no table of its own.   A tricky case
-comes up when two subclasses want to specify *the same* column, as below::
-
-    class Person(Base):
-        __tablename__ = 'people'
-        id = Column(Integer, primary_key=True)
-        discriminator = Column('type', String(50))
-        __mapper_args__ = {'polymorphic_on': discriminator}
-
-    class Engineer(Person):
-        __mapper_args__ = {'polymorphic_identity': 'engineer'}
-        start_date = Column(DateTime)
-
-    class Manager(Person):
-        __mapper_args__ = {'polymorphic_identity': 'manager'}
-        start_date = Column(DateTime)
-
-Above, the ``start_date`` column declared on both ``Engineer`` and ``Manager``
-will result in an error::
-
-    sqlalchemy.exc.ArgumentError: Column 'start_date' on class
-    <class '__main__.Manager'> conflicts with existing
-    column 'people.start_date'
-
-In a situation like this, Declarative can't be sure
-of the intent, especially if the ``start_date`` columns had, for example,
-different types.   A situation like this can be resolved by using
-:class:`.declared_attr` to define the :class:`_schema.Column` conditionally, taking
-care to return the **existing column** via the parent ``__table__`` if it
-already exists::
-
-    from sqlalchemy.ext.declarative import declared_attr
-
-    class Person(Base):
-        __tablename__ = 'people'
-        id = Column(Integer, primary_key=True)
-        discriminator = Column('type', String(50))
-        __mapper_args__ = {'polymorphic_on': discriminator}
-
-    class Engineer(Person):
-        __mapper_args__ = {'polymorphic_identity': 'engineer'}
-
-        @declared_attr
-        def start_date(cls):
-            "Start date column, if not present already."
-            return Person.__table__.c.get('start_date', Column(DateTime))
-
-    class Manager(Person):
-        __mapper_args__ = {'polymorphic_identity': 'manager'}
-
-        @declared_attr
-        def start_date(cls):
-            "Start date column, if not present already."
-            return Person.__table__.c.get('start_date', Column(DateTime))
-
-Above, when ``Manager`` is mapped, the ``start_date`` column is
-already present on the ``Person`` class.  Declarative lets us return
-that :class:`_schema.Column` as a result in this case, where it knows to skip
-re-assigning the same column. If the mapping is mis-configured such
-that the ``start_date`` column is accidentally re-assigned to a
-different table (such as, if we changed ``Manager`` to be joined
-inheritance without fixing ``start_date``), an error is raised which
-indicates an existing :class:`_schema.Column` is trying to be re-assigned to
-a different owning :class:`_schema.Table`.
-
-The same concept can be used with mixin classes (see
-:ref:`declarative_mixins`)::
-
-    class Person(Base):
-        __tablename__ = 'people'
-        id = Column(Integer, primary_key=True)
-        discriminator = Column('type', String(50))
-        __mapper_args__ = {'polymorphic_on': discriminator}
-
-    class HasStartDate(object):
-        @declared_attr
-        def start_date(cls):
-            return cls.__table__.c.get('start_date', Column(DateTime))
-
-    class Engineer(HasStartDate, Person):
-        __mapper_args__ = {'polymorphic_identity': 'engineer'}
-
-    class Manager(HasStartDate, Person):
-        __mapper_args__ = {'polymorphic_identity': 'manager'}
-
-The above mixin checks the local ``__table__`` attribute for the column.
-Because we're using single table inheritance, we're sure that in this case,
-``cls.__table__`` refers to ``Person.__table__``.  If we were mixing joined-
-and single-table inheritance, we might want our mixin to check more carefully
-if ``cls.__table__`` is really the :class:`_schema.Table` we're looking for.
-
-.. _declarative_concrete_table:
-
-Concrete Table Inheritance
-~~~~~~~~~~~~~~~~~~~~~~~~~~
-
-Concrete is defined as a subclass which has its own table and sets the
-``concrete`` keyword argument to ``True``::
-
-    class Person(Base):
-        __tablename__ = 'people'
-        id = Column(Integer, primary_key=True)
-        name = Column(String(50))
-
-    class Engineer(Person):
-        __tablename__ = 'engineers'
-        __mapper_args__ = {'concrete':True}
-        id = Column(Integer, primary_key=True)
-        primary_language = Column(String(50))
-        name = Column(String(50))
-
-Usage of an abstract base class is a little less straightforward as it
-requires usage of :func:`~sqlalchemy.orm.util.polymorphic_union`,
-which needs to be created with the :class:`_schema.Table` objects
-before the class is built::
-
-    engineers = Table('engineers', Base.metadata,
-                    Column('id', Integer, primary_key=True),
-                    Column('name', String(50)),
-                    Column('primary_language', String(50))
-                )
-    managers = Table('managers', Base.metadata,
-                    Column('id', Integer, primary_key=True),
-                    Column('name', String(50)),
-                    Column('golf_swing', String(50))
-                )
-
-    punion = polymorphic_union({
-        'engineer':engineers,
-        'manager':managers
-    }, 'type', 'punion')
-
-    class Person(Base):
-        __table__ = punion
-        __mapper_args__ = {'polymorphic_on':punion.c.type}
-
-    class Engineer(Person):
-        __table__ = engineers
-        __mapper_args__ = {'polymorphic_identity':'engineer', 'concrete':True}
-
-    class Manager(Person):
-        __table__ = managers
-        __mapper_args__ = {'polymorphic_identity':'manager', 'concrete':True}
-
-The helper classes :class:`.AbstractConcreteBase` and :class:`.ConcreteBase`
-provide automation for the above system of creating a polymorphic union.
-See the documentation for these helpers as well as the main ORM documentation
-on concrete inheritance for details.
-
-.. seealso::
-
-    :ref:`concrete_inheritance`
-
+See :ref:`inheritance_toplevel` for this section.
index 509b1d34c687a65e40d8fe5b9c7cd37660d941a4..221e8f8f8c92dda0d107441dce42a0dcc041c827 100644 (file)
@@ -3,542 +3,4 @@
 Mixin and Custom Base Classes
 =============================
 
-A common need when using :mod:`~sqlalchemy.ext.declarative` is to
-share some functionality, such as a set of common columns, some common
-table options, or other mapped properties, across many
-classes.  The standard Python idioms for this is to have the classes
-inherit from a base which includes these common features.
-
-When using :mod:`~sqlalchemy.ext.declarative`, this idiom is allowed
-via the usage of a custom declarative base class, as well as a "mixin" class
-which is inherited from in addition to the primary base.  Declarative
-includes several helper features to make this work in terms of how
-mappings are declared.   An example of some commonly mixed-in
-idioms is below::
-
-    from sqlalchemy.ext.declarative import declared_attr
-
-    class MyMixin(object):
-
-        @declared_attr
-        def __tablename__(cls):
-            return cls.__name__.lower()
-
-        __table_args__ = {'mysql_engine': 'InnoDB'}
-        __mapper_args__= {'always_refresh': True}
-
-        id =  Column(Integer, primary_key=True)
-
-    class MyModel(MyMixin, Base):
-        name = Column(String(1000))
-
-Where above, the class ``MyModel`` will contain an "id" column
-as the primary key, a ``__tablename__`` attribute that derives
-from the name of the class itself, as well as ``__table_args__``
-and ``__mapper_args__`` defined by the ``MyMixin`` mixin class.
-
-There's no fixed convention over whether ``MyMixin`` precedes
-``Base`` or not.  Normal Python method resolution rules apply, and
-the above example would work just as well with::
-
-    class MyModel(Base, MyMixin):
-        name = Column(String(1000))
-
-This works because ``Base`` here doesn't define any of the
-variables that ``MyMixin`` defines, i.e. ``__tablename__``,
-``__table_args__``, ``id``, etc.   If the ``Base`` did define
-an attribute of the same name, the class placed first in the
-inherits list would determine which attribute is used on the
-newly defined class.
-
-Augmenting the Base
-~~~~~~~~~~~~~~~~~~~
-
-In addition to using a pure mixin, most of the techniques in this
-section can also be applied to the base class itself, for patterns that
-should apply to all classes derived from a particular base.  This is achieved
-using the ``cls`` argument of the :func:`.declarative_base` function::
-
-    from sqlalchemy.ext.declarative import declared_attr
-
-    class Base(object):
-        @declared_attr
-        def __tablename__(cls):
-            return cls.__name__.lower()
-
-        __table_args__ = {'mysql_engine': 'InnoDB'}
-
-        id =  Column(Integer, primary_key=True)
-
-    from sqlalchemy.ext.declarative import declarative_base
-
-    Base = declarative_base(cls=Base)
-
-    class MyModel(Base):
-        name = Column(String(1000))
-
-Where above, ``MyModel`` and all other classes that derive from ``Base`` will
-have a table name derived from the class name, an ``id`` primary key column,
-as well as the "InnoDB" engine for MySQL.
-
-Mixing in Columns
-~~~~~~~~~~~~~~~~~
-
-The most basic way to specify a column on a mixin is by simple
-declaration::
-
-    class TimestampMixin(object):
-        created_at = Column(DateTime, default=func.now())
-
-    class MyModel(TimestampMixin, Base):
-        __tablename__ = 'test'
-
-        id =  Column(Integer, primary_key=True)
-        name = Column(String(1000))
-
-Where above, all declarative classes that include ``TimestampMixin``
-will also have a column ``created_at`` that applies a timestamp to
-all row insertions.
-
-Those familiar with the SQLAlchemy expression language know that
-the object identity of clause elements defines their role in a schema.
-Two ``Table`` objects ``a`` and ``b`` may both have a column called
-``id``, but the way these are differentiated is that ``a.c.id``
-and ``b.c.id`` are two distinct Python objects, referencing their
-parent tables ``a`` and ``b`` respectively.
-
-In the case of the mixin column, it seems that only one
-:class:`_schema.Column` object is explicitly created, yet the ultimate
-``created_at`` column above must exist as a distinct Python object
-for each separate destination class.  To accomplish this, the declarative
-extension creates a **copy** of each :class:`_schema.Column` object encountered on
-a class that is detected as a mixin.
-
-This copy mechanism is limited to simple columns that have no foreign
-keys, as a :class:`_schema.ForeignKey` itself contains references to columns
-which can't be properly recreated at this level.  For columns that
-have foreign keys, as well as for the variety of mapper-level constructs
-that require destination-explicit context, the
-:class:`~.declared_attr` decorator is provided so that
-patterns common to many classes can be defined as callables::
-
-    from sqlalchemy.ext.declarative import declared_attr
-
-    class ReferenceAddressMixin(object):
-        @declared_attr
-        def address_id(cls):
-            return Column(Integer, ForeignKey('address.id'))
-
-    class User(ReferenceAddressMixin, Base):
-        __tablename__ = 'user'
-        id = Column(Integer, primary_key=True)
-
-Where above, the ``address_id`` class-level callable is executed at the
-point at which the ``User`` class is constructed, and the declarative
-extension can use the resulting :class:`_schema.Column` object as returned by
-the method without the need to copy it.
-
-Columns generated by :class:`~.declared_attr` can also be
-referenced by ``__mapper_args__`` to a limited degree, currently
-by ``polymorphic_on`` and ``version_id_col``; the declarative extension
-will resolve them at class construction time::
-
-    class MyMixin:
-        @declared_attr
-        def type_(cls):
-            return Column(String(50))
-
-        __mapper_args__= {'polymorphic_on':type_}
-
-    class MyModel(MyMixin, Base):
-        __tablename__='test'
-        id =  Column(Integer, primary_key=True)
-
-
-Mixing in Relationships
-~~~~~~~~~~~~~~~~~~~~~~~
-
-Relationships created by :func:`~sqlalchemy.orm.relationship` are provided
-with declarative mixin classes exclusively using the
-:class:`.declared_attr` approach, eliminating any ambiguity
-which could arise when copying a relationship and its possibly column-bound
-contents. Below is an example which combines a foreign key column and a
-relationship so that two classes ``Foo`` and ``Bar`` can both be configured to
-reference a common target class via many-to-one::
-
-    class RefTargetMixin(object):
-        @declared_attr
-        def target_id(cls):
-            return Column('target_id', ForeignKey('target.id'))
-
-        @declared_attr
-        def target(cls):
-            return relationship("Target")
-
-    class Foo(RefTargetMixin, Base):
-        __tablename__ = 'foo'
-        id = Column(Integer, primary_key=True)
-
-    class Bar(RefTargetMixin, Base):
-        __tablename__ = 'bar'
-        id = Column(Integer, primary_key=True)
-
-    class Target(Base):
-        __tablename__ = 'target'
-        id = Column(Integer, primary_key=True)
-
-
-Using Advanced Relationship Arguments (e.g. ``primaryjoin``, etc.)
-^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
-
-:func:`~sqlalchemy.orm.relationship` definitions which require explicit
-primaryjoin, order_by etc. expressions should in all but the most
-simplistic cases use **late bound** forms
-for these arguments, meaning, using either the string form or a lambda.
-The reason for this is that the related :class:`_schema.Column` objects which are to
-be configured using ``@declared_attr`` are not available to another
-``@declared_attr`` attribute; while the methods will work and return new
-:class:`_schema.Column` objects, those are not the :class:`_schema.Column` objects that
-Declarative will be using as it calls the methods on its own, thus using
-*different* :class:`_schema.Column` objects.
-
-The canonical example is the primaryjoin condition that depends upon
-another mixed-in column::
-
-    class RefTargetMixin(object):
-        @declared_attr
-        def target_id(cls):
-            return Column('target_id', ForeignKey('target.id'))
-
-        @declared_attr
-        def target(cls):
-            return relationship(Target,
-                primaryjoin=Target.id==cls.target_id   # this is *incorrect*
-            )
-
-Mapping a class using the above mixin, we will get an error like::
-
-    sqlalchemy.exc.InvalidRequestError: this ForeignKey's parent column is not
-    yet associated with a Table.
-
-This is because the ``target_id`` :class:`_schema.Column` we've called upon in our
-``target()`` method is not the same :class:`_schema.Column` that declarative is
-actually going to map to our table.
-
-The condition above is resolved using a lambda::
-
-    class RefTargetMixin(object):
-        @declared_attr
-        def target_id(cls):
-            return Column('target_id', ForeignKey('target.id'))
-
-        @declared_attr
-        def target(cls):
-            return relationship(Target,
-                primaryjoin=lambda: Target.id==cls.target_id
-            )
-
-or alternatively, the string form (which ultimately generates a lambda)::
-
-    class RefTargetMixin(object):
-        @declared_attr
-        def target_id(cls):
-            return Column('target_id', ForeignKey('target.id'))
-
-        @declared_attr
-        def target(cls):
-            return relationship("Target",
-                primaryjoin="Target.id==%s.target_id" % cls.__name__
-            )
-
-Mixing in deferred(), column_property(), and other MapperProperty classes
-~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
-
-Like :func:`~sqlalchemy.orm.relationship`, all
-:class:`~sqlalchemy.orm.interfaces.MapperProperty` subclasses such as
-:func:`~sqlalchemy.orm.deferred`, :func:`~sqlalchemy.orm.column_property`,
-etc. ultimately involve references to columns, and therefore, when
-used with declarative mixins, have the :class:`.declared_attr`
-requirement so that no reliance on copying is needed::
-
-    class SomethingMixin(object):
-
-        @declared_attr
-        def dprop(cls):
-            return deferred(Column(Integer))
-
-    class Something(SomethingMixin, Base):
-        __tablename__ = "something"
-
-The :func:`.column_property` or other construct may refer
-to other columns from the mixin.  These are copied ahead of time before
-the :class:`.declared_attr` is invoked::
-
-    class SomethingMixin(object):
-        x = Column(Integer)
-
-        y = Column(Integer)
-
-        @declared_attr
-        def x_plus_y(cls):
-            return column_property(cls.x + cls.y)
-
-
-.. versionchanged:: 1.0.0 mixin columns are copied to the final mapped class
-   so that :class:`.declared_attr` methods can access the actual column
-   that will be mapped.
-
-Mixing in Association Proxy and Other Attributes
-~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
-
-Mixins can specify user-defined attributes as well as other extension
-units such as :func:`.association_proxy`.   The usage of
-:class:`.declared_attr` is required in those cases where the attribute must
-be tailored specifically to the target subclass.   An example is when
-constructing multiple :func:`.association_proxy` attributes which each
-target a different type of child object.  Below is an
-:func:`.association_proxy` / mixin example which provides a scalar list of
-string values to an implementing class::
-
-    from sqlalchemy import Column, Integer, ForeignKey, String
-    from sqlalchemy.orm import relationship
-    from sqlalchemy.ext.associationproxy import association_proxy
-    from sqlalchemy.ext.declarative import declarative_base, declared_attr
-
-    Base = declarative_base()
-
-    class HasStringCollection(object):
-        @declared_attr
-        def _strings(cls):
-            class StringAttribute(Base):
-                __tablename__ = cls.string_table_name
-                id = Column(Integer, primary_key=True)
-                value = Column(String(50), nullable=False)
-                parent_id = Column(Integer,
-                                ForeignKey('%s.id' % cls.__tablename__),
-                                nullable=False)
-                def __init__(self, value):
-                    self.value = value
-
-            return relationship(StringAttribute)
-
-        @declared_attr
-        def strings(cls):
-            return association_proxy('_strings', 'value')
-
-    class TypeA(HasStringCollection, Base):
-        __tablename__ = 'type_a'
-        string_table_name = 'type_a_strings'
-        id = Column(Integer(), primary_key=True)
-
-    class TypeB(HasStringCollection, Base):
-        __tablename__ = 'type_b'
-        string_table_name = 'type_b_strings'
-        id = Column(Integer(), primary_key=True)
-
-Above, the ``HasStringCollection`` mixin produces a :func:`_orm.relationship`
-which refers to a newly generated class called ``StringAttribute``.  The
-``StringAttribute`` class is generated with its own :class:`_schema.Table`
-definition which is local to the parent class making usage of the
-``HasStringCollection`` mixin.  It also produces an :func:`.association_proxy`
-object which proxies references to the ``strings`` attribute onto the ``value``
-attribute of each ``StringAttribute`` instance.
-
-``TypeA`` or ``TypeB`` can be instantiated given the constructor
-argument ``strings``, a list of strings::
-
-    ta = TypeA(strings=['foo', 'bar'])
-    tb = TypeA(strings=['bat', 'bar'])
-
-This list will generate a collection
-of ``StringAttribute`` objects, which are persisted into a table that's
-local to either the ``type_a_strings`` or ``type_b_strings`` table::
-
-    >>> print(ta._strings)
-    [<__main__.StringAttribute object at 0x10151cd90>,
-        <__main__.StringAttribute object at 0x10151ce10>]
-
-When constructing the :func:`.association_proxy`, the
-:class:`.declared_attr` decorator must be used so that a distinct
-:func:`.association_proxy` object is created for each of the ``TypeA``
-and ``TypeB`` classes.
-
-.. _decl_mixin_inheritance:
-
-Controlling table inheritance with mixins
-~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
-
-The ``__tablename__`` attribute may be used to provide a function that
-will determine the name of the table used for each class in an inheritance
-hierarchy, as well as whether a class has its own distinct table.
-
-This is achieved using the :class:`.declared_attr` indicator in conjunction
-with a method named ``__tablename__()``.   Declarative will always
-invoke :class:`.declared_attr` for the special names
-``__tablename__``, ``__mapper_args__`` and ``__table_args__``
-function **for each mapped class in the hierarchy, except if overridden
-in a subclass**.   The function therefore
-needs to expect to receive each class individually and to provide the
-correct answer for each.
-
-For example, to create a mixin that gives every class a simple table
-name based on class name::
-
-    from sqlalchemy.ext.declarative import declared_attr
-
-    class Tablename:
-        @declared_attr
-        def __tablename__(cls):
-            return cls.__name__.lower()
-
-    class Person(Tablename, Base):
-        id = Column(Integer, primary_key=True)
-        discriminator = Column('type', String(50))
-        __mapper_args__ = {'polymorphic_on': discriminator}
-
-    class Engineer(Person):
-        __tablename__ = None
-        __mapper_args__ = {'polymorphic_identity': 'engineer'}
-        primary_language = Column(String(50))
-
-Alternatively, we can modify our ``__tablename__`` function to return
-``None`` for subclasses, using :func:`.has_inherited_table`.  This has
-the effect of those subclasses being mapped with single table inheritance
-against the parent::
-
-    from sqlalchemy.ext.declarative import declared_attr
-    from sqlalchemy.ext.declarative import has_inherited_table
-
-    class Tablename(object):
-        @declared_attr
-        def __tablename__(cls):
-            if has_inherited_table(cls):
-                return None
-            return cls.__name__.lower()
-
-    class Person(Tablename, Base):
-        id = Column(Integer, primary_key=True)
-        discriminator = Column('type', String(50))
-        __mapper_args__ = {'polymorphic_on': discriminator}
-
-    class Engineer(Person):
-        primary_language = Column(String(50))
-        __mapper_args__ = {'polymorphic_identity': 'engineer'}
-
-.. _mixin_inheritance_columns:
-
-Mixing in Columns in Inheritance Scenarios
-~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
-
-In contrast to how ``__tablename__`` and other special names are handled when
-used with :class:`.declared_attr`, when we mix in columns and properties (e.g.
-relationships, column properties, etc.), the function is
-invoked for the **base class only** in the hierarchy.  Below, only the
-``Person`` class will receive a column
-called ``id``; the mapping will fail on ``Engineer``, which is not given
-a primary key::
-
-    class HasId(object):
-        @declared_attr
-        def id(cls):
-            return Column('id', Integer, primary_key=True)
-
-    class Person(HasId, Base):
-        __tablename__ = 'person'
-        discriminator = Column('type', String(50))
-        __mapper_args__ = {'polymorphic_on': discriminator}
-
-    class Engineer(Person):
-        __tablename__ = 'engineer'
-        primary_language = Column(String(50))
-        __mapper_args__ = {'polymorphic_identity': 'engineer'}
-
-It is usually the case in joined-table inheritance that we want distinctly
-named columns on each subclass.  However in this case, we may want to have
-an ``id`` column on every table, and have them refer to each other via
-foreign key.  We can achieve this as a mixin by using the
-:attr:`.declared_attr.cascading` modifier, which indicates that the
-function should be invoked **for each class in the hierarchy**, in *almost*
-(see warning below) the same way as it does for ``__tablename__``::
-
-    class HasIdMixin(object):
-        @declared_attr.cascading
-        def id(cls):
-            if has_inherited_table(cls):
-                return Column(ForeignKey('person.id'), primary_key=True)
-            else:
-                return Column(Integer, primary_key=True)
-
-    class Person(HasIdMixin, Base):
-        __tablename__ = 'person'
-        discriminator = Column('type', String(50))
-        __mapper_args__ = {'polymorphic_on': discriminator}
-
-    class Engineer(Person):
-        __tablename__ = 'engineer'
-        primary_language = Column(String(50))
-        __mapper_args__ = {'polymorphic_identity': 'engineer'}
-
-.. warning::
-
-    The :attr:`.declared_attr.cascading` feature currently does
-    **not** allow for a subclass to override the attribute with a different
-    function or value.  This is a current limitation in the mechanics of
-    how ``@declared_attr`` is resolved, and a warning is emitted if
-    this condition is detected.   This limitation does **not**
-    exist for the special attribute names such as ``__tablename__``, which
-    resolve in a different way internally than that of
-    :attr:`.declared_attr.cascading`.
-
-
-.. versionadded:: 1.0.0 added :attr:`.declared_attr.cascading`.
-
-Combining Table/Mapper Arguments from Multiple Mixins
-~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
-
-In the case of ``__table_args__`` or ``__mapper_args__``
-specified with declarative mixins, you may want to combine
-some parameters from several mixins with those you wish to
-define on the class itself. The
-:class:`.declared_attr` decorator can be used
-here to create user-defined collation routines that pull
-from multiple collections::
-
-    from sqlalchemy.ext.declarative import declared_attr
-
-    class MySQLSettings(object):
-        __table_args__ = {'mysql_engine':'InnoDB'}
-
-    class MyOtherMixin(object):
-        __table_args__ = {'info':'foo'}
-
-    class MyModel(MySQLSettings, MyOtherMixin, Base):
-        __tablename__='my_model'
-
-        @declared_attr
-        def __table_args__(cls):
-            args = dict()
-            args.update(MySQLSettings.__table_args__)
-            args.update(MyOtherMixin.__table_args__)
-            return args
-
-        id =  Column(Integer, primary_key=True)
-
-Creating Indexes with Mixins
-~~~~~~~~~~~~~~~~~~~~~~~~~~~~
-
-To define a named, potentially multicolumn :class:`.Index` that applies to all
-tables derived from a mixin, use the "inline" form of :class:`.Index` and
-establish it as part of ``__table_args__``::
-
-    class MyMixin(object):
-        a =  Column(Integer)
-        b =  Column(Integer)
-
-        @declared_attr
-        def __table_args__(cls):
-            return (Index('test_idx_%s' % cls.__tablename__, 'a', 'b'),)
-
-    class MyModel(MyMixin, Base):
-        __tablename__ = 'atable'
-        c =  Column(Integer,primary_key=True)
+See :ref:`orm_mixins_toplevel` for this section.
\ No newline at end of file
index ac2671c52e07183011fd92299eaf8b305efe211a..a5884ef108c706a49b3e6fbe2e0c1f3d26662f5b 100644 (file)
 Configuring Relationships
 =========================
 
-.. seealso::
-
-    This section describes specifics about how the Declarative system
-    interacts with SQLAlchemy ORM relationship constructs.  For general
-    information about setting up relationships between mappings,
-    see :ref:`ormtutorial_toplevel` and :ref:`relationship_patterns`.
-
-Relationships to other classes are done in the usual way, with the added
-feature that the class specified to :func:`~sqlalchemy.orm.relationship`
-may be a string name.  The "class registry" associated with ``Base``
-is used at mapper compilation time to resolve the name into the actual
-class object, which is expected to have been defined once the mapper
-configuration is used::
-
-    class User(Base):
-        __tablename__ = 'users'
-
-        id = Column(Integer, primary_key=True)
-        name = Column(String(50))
-        addresses = relationship("Address", backref="user")
-
-    class Address(Base):
-        __tablename__ = 'addresses'
-
-        id = Column(Integer, primary_key=True)
-        email = Column(String(50))
-        user_id = Column(Integer, ForeignKey('users.id'))
-
-Column constructs, since they are just that, are immediately usable,
-as below where we define a primary join condition on the ``Address``
-class using them::
-
-    class Address(Base):
-        __tablename__ = 'addresses'
-
-        id = Column(Integer, primary_key=True)
-        email = Column(String(50))
-        user_id = Column(Integer, ForeignKey('users.id'))
-        user = relationship(User, primaryjoin=user_id == User.id)
+This section is covered by :ref:`orm_declarative_properties`.
 
 .. _declarative_relationship_eval:
 
 Evaluation of relationship arguments
 =====================================
 
-In addition to the main argument for :func:`~sqlalchemy.orm.relationship`,
-other arguments which depend upon the columns present on an as-yet
-undefined class may also be specified as strings.   For most of these
-arguments except that of the main argument, these strings are
-**evaluated as Python expressions using Python's built-in eval() function.**
-
-The full namespace available within this evaluation includes all classes mapped
-for this declarative base, as well as the contents of the ``sqlalchemy``
-package, including expression functions like
-:func:`~sqlalchemy.sql.expression.desc` and
-:attr:`~sqlalchemy.sql.expression.func`::
-
-    class User(Base):
-        # ....
-        addresses = relationship("Address",
-                             order_by="desc(Address.email)",
-                             primaryjoin="Address.user_id==User.id")
-
-.. warning::
-
-    The strings accepted by the following parameters:
-
-        :paramref:`_orm.relationship.order_by`
-
-        :paramref:`_orm.relationship.primaryjoin`
-
-        :paramref:`_orm.relationship.secondaryjoin`
-
-        :paramref:`_orm.relationship.secondary`
-
-        :paramref:`_orm.relationship.remote_side`
-
-        :paramref:`_orm.relationship.foreign_keys`
-
-        :paramref:`_orm.relationship._user_defined_foreign_keys`
-
-    Are **evaluated as Python code expressions using eval().  DO NOT PASS
-    UNTRUSTED INPUT TO THESE ARGUMENTS.**
-
-    In addition, prior to version 1.3.16 of SQLAlchemy, the main
-    "argument" to :func:`_orm.relationship` is also evaluated as Python
-    code.  **DO NOT PASS UNTRUSTED INPUT TO THIS ARGUMENT.**
-
-.. versionchanged:: 1.3.16
-
-    The string evaluation of the main "argument" no longer accepts an open
-    ended Python expression, instead only accepting a string class name
-    or dotted package-qualified name.
-
-For the case where more than one module contains a class of the same name,
-string class names can also be specified as module-qualified paths
-within any of these string expressions::
-
-    class User(Base):
-        # ....
-        addresses = relationship("myapp.model.address.Address",
-                             order_by="desc(myapp.model.address.Address.email)",
-                             primaryjoin="myapp.model.address.Address.user_id=="
-                                            "myapp.model.user.User.id")
-
-The qualified path can be any partial path that removes ambiguity between
-the names.  For example, to disambiguate between
-``myapp.model.address.Address`` and ``myapp.model.lookup.Address``,
-we can specify ``address.Address`` or ``lookup.Address``::
-
-    class User(Base):
-        # ....
-        addresses = relationship("address.Address",
-                             order_by="desc(address.Address.email)",
-                             primaryjoin="address.Address.user_id=="
-                                            "User.id")
-
-Two alternatives also exist to using string-based attributes.  A lambda
-can also be used, which will be evaluated after all mappers have been
-configured::
-
-    class User(Base):
-        # ...
-        addresses = relationship(lambda: Address,
-                             order_by=lambda: desc(Address.email),
-                             primaryjoin=lambda: Address.user_id==User.id)
-
-Or, the relationship can be added to the class explicitly after the classes
-are available::
-
-    User.addresses = relationship(Address,
-                              primaryjoin=Address.user_id==User.id)
-
+This section is moved to :ref:`orm_declarative_relationship_eval`.
 
 
 .. _declarative_many_to_many:
@@ -144,37 +19,5 @@ are available::
 Configuring Many-to-Many Relationships
 ======================================
 
-Many-to-many relationships are also declared in the same way
-with declarative as with traditional mappings. The
-``secondary`` argument to
-:func:`_orm.relationship` is as usual passed a
-:class:`_schema.Table` object, which is typically declared in the
-traditional way.  The :class:`_schema.Table` usually shares
-the :class:`_schema.MetaData` object used by the declarative base::
-
-    keyword_author = Table(
-        'keyword_author', Base.metadata,
-        Column('author_id', Integer, ForeignKey('authors.id')),
-        Column('keyword_id', Integer, ForeignKey('keywords.id'))
-        )
-
-    class Author(Base):
-        __tablename__ = 'authors'
-        id = Column(Integer, primary_key=True)
-        keywords = relationship("Keyword", secondary=keyword_author)
-
-Like other :func:`~sqlalchemy.orm.relationship` arguments, a string is accepted
-as well, passing the string name of the table as defined in the
-``Base.metadata.tables`` collection::
-
-    class Author(Base):
-        __tablename__ = 'authors'
-        id = Column(Integer, primary_key=True)
-        keywords = relationship("Keyword", secondary="keyword_author")
-
-As with traditional mapping, its generally not a good idea to use
-a :class:`_schema.Table` as the "secondary" argument which is also mapped to
-a class, unless the :func:`_orm.relationship` is declared with ``viewonly=True``.
-Otherwise, the unit-of-work system may attempt duplicate INSERT and
-DELETE statements against the underlying table.
+this section is moved to :ref:`orm_declarative_relationship_secondary_eval`.
 
index b35f54d7d4d437b3158210abc641df13fa486ba6..d51fb1831d71e9f7332d2eb428587833b327763b 100644 (file)
 Table Configuration
 ===================
 
-.. seealso::
+This section has moved; see :ref:`orm_declarative_table_configuration`.
 
-    This section describes specifics about how the Declarative system
-    defines :class:`_schema.Table` objects that are to be mapped with the
-    SQLAlchemy ORM.  For general information on :class:`_schema.Table` objects
-    see :ref:`metadata_describing_toplevel`.
 
-Table arguments other than the name, metadata, and mapped Column
-arguments are specified using the ``__table_args__`` class attribute.
-This attribute accommodates both positional as well as keyword
-arguments that are normally sent to the
-:class:`~sqlalchemy.schema.Table` constructor.
-The attribute can be specified in one of two forms. One is as a
-dictionary::
-
-    class MyClass(Base):
-        __tablename__ = 'sometable'
-        __table_args__ = {'mysql_engine':'InnoDB'}
-
-The other, a tuple, where each argument is positional
-(usually constraints)::
-
-    class MyClass(Base):
-        __tablename__ = 'sometable'
-        __table_args__ = (
-                ForeignKeyConstraint(['id'], ['remote_table.id']),
-                UniqueConstraint('foo'),
-                )
-
-Keyword arguments can be specified with the above form by
-specifying the last argument as a dictionary::
-
-    class MyClass(Base):
-        __tablename__ = 'sometable'
-        __table_args__ = (
-                ForeignKeyConstraint(['id'], ['remote_table.id']),
-                UniqueConstraint('foo'),
-                {'autoload':True}
-                )
+.. _declarative_hybrid_table:
 
 Using a Hybrid Approach with __table__
 ======================================
 
-As an alternative to ``__tablename__``, a direct
-:class:`~sqlalchemy.schema.Table` construct may be used.  The
-:class:`~sqlalchemy.schema.Column` objects, which in this case require
-their names, will be added to the mapping just like a regular mapping
-to a table::
-
-    class MyClass(Base):
-        __table__ = Table('my_table', Base.metadata,
-            Column('id', Integer, primary_key=True),
-            Column('name', String(50))
-        )
-
-``__table__`` provides a more focused point of control for establishing
-table metadata, while still getting most of the benefits of using declarative.
-An application that uses reflection might want to load table metadata elsewhere
-and pass it to declarative classes::
-
-    from sqlalchemy.ext.declarative import declarative_base
-
-    Base = declarative_base()
-    Base.metadata.reflect(some_engine)
-
-    class User(Base):
-        __table__ = metadata.tables['user']
-
-    class Address(Base):
-        __table__ = metadata.tables['address']
-
-Some configuration schemes may find it more appropriate to use ``__table__``,
-such as those which already take advantage of the data-driven nature of
-:class:`_schema.Table` to customize and/or automate schema definition.
-
-Note that when the ``__table__`` approach is used, the object is immediately
-usable as a plain :class:`_schema.Table` within the class declaration body itself,
-as a Python class is only another syntactical block.  Below this is illustrated
-by using the ``id`` column in the ``primaryjoin`` condition of a
-:func:`_orm.relationship`::
-
-    class MyClass(Base):
-        __table__ = Table('my_table', Base.metadata,
-            Column('id', Integer, primary_key=True),
-            Column('name', String(50))
-        )
+This section has moved; see :ref:`orm_imperative_table_configuration`.
 
-        widgets = relationship(Widget,
-                    primaryjoin=Widget.myclass_id==__table__.c.id)
-
-Similarly, mapped attributes which refer to ``__table__`` can be placed inline,
-as below where we assign the ``name`` column to the attribute ``_name``,
-generating a synonym for ``name``::
-
-    from sqlalchemy.ext.declarative import synonym_for
-
-    class MyClass(Base):
-        __table__ = Table('my_table', Base.metadata,
-            Column('id', Integer, primary_key=True),
-            Column('name', String(50))
-        )
-
-        _name = __table__.c.name
-
-        @synonym_for("_name")
-        def name(self):
-            return "Name: %s" % _name
 
 Using Reflection with Declarative
 =================================
 
-It's easy to set up a :class:`_schema.Table` that uses ``autoload=True``
-in conjunction with a mapped class::
-
-    class MyClass(Base):
-        __table__ = Table('mytable', Base.metadata,
-                        autoload=True, autoload_with=some_engine)
-
-However, one improvement that can be made here is to not
-require the :class:`_engine.Engine` to be available when classes are
-being first declared.   To achieve this, use the
-:class:`.DeferredReflection` mixin, which sets up mappings
-only after a special ``prepare(engine)`` step is called::
-
-    from sqlalchemy.ext.declarative import declarative_base, DeferredReflection
-
-    Base = declarative_base(cls=DeferredReflection)
-
-    class Foo(Base):
-        __tablename__ = 'foo'
-        bars = relationship("Bar")
-
-    class Bar(Base):
-        __tablename__ = 'bar'
-
-        # illustrate overriding of "bar.foo_id" to have
-        # a foreign key constraint otherwise not
-        # reflected, such as when using MySQL
-        foo_id = Column(Integer, ForeignKey('foo.id'))
-
-    Base.prepare(e)
+This section has moved to :ref:`orm_declarative_reflected`.
 
index ccda5f20b20715072b207dd7ee77e3e3d0921e7c..12f18c04ad68818ddbadff2e88f240f9fcd5e684 100644 (file)
@@ -291,6 +291,108 @@ Note that the mappers for the derived classes Manager and Engineer omit the
 ``__tablename__``, indicating they do not have a mapped table of
 their own.
 
+.. _orm_inheritance_column_conflicts:
+
+Resolving Column Conflicts
++++++++++++++++++++++++++++
+
+Note in the previous section that the ``manager_name`` and ``engineer_info`` columns
+are "moved up" to be applied to ``Employee.__table__``, as a result of their
+declaration on a subclass that has no table of its own.   A tricky case
+comes up when two subclasses want to specify *the same* column, as below::
+
+    class Employee(Base):
+        __tablename__ = 'employee'
+        id = Column(Integer, primary_key=True)
+        name = Column(String(50))
+        type = Column(String(20))
+
+        __mapper_args__ = {
+            'polymorphic_on':type,
+            'polymorphic_identity':'employee'
+        }
+
+    class Engineer(Employee):
+        __mapper_args__ = {'polymorphic_identity': 'engineer'}
+        start_date = Column(DateTime)
+
+    class Manager(Employee):
+        __mapper_args__ = {'polymorphic_identity': 'manager'}
+        start_date = Column(DateTime)
+
+Above, the ``start_date`` column declared on both ``Engineer`` and ``Manager``
+will result in an error::
+
+    sqlalchemy.exc.ArgumentError: Column 'start_date' on class
+    <class '__main__.Manager'> conflicts with existing
+    column 'employee.start_date'
+
+The above scenario presents an ambiguity to the Declarative mapping system that
+may be resolved by using
+:class:`.declared_attr` to define the :class:`_schema.Column` conditionally,
+taking care to return the **existing column** via the parent ``__table__``
+if it already exists::
+
+    from sqlalchemy.orm import declared_attr
+
+    class Employee(Base):
+        __tablename__ = 'employee'
+        id = Column(Integer, primary_key=True)
+        name = Column(String(50))
+        type = Column(String(20))
+
+        __mapper_args__ = {
+            'polymorphic_on':type,
+            'polymorphic_identity':'employee'
+        }
+
+    class Engineer(Employee):
+        __mapper_args__ = {'polymorphic_identity': 'engineer'}
+
+        @declared_attr
+        def start_date(cls):
+            "Start date column, if not present already."
+            return Employee.__table__.c.get('start_date', Column(DateTime))
+
+    class Manager(Employee):
+        __mapper_args__ = {'polymorphic_identity': 'manager'}
+
+        @declared_attr
+        def start_date(cls):
+            "Start date column, if not present already."
+            return Employee.__table__.c.get('start_date', Column(DateTime))
+
+Above, when ``Manager`` is mapped, the ``start_date`` column is
+already present on the ``Employee`` class; by returning the existing
+:class:`_schema.Column` object, the declarative system recognizes that this
+is the same column to be mapped to the two different subclasses separately.
+
+A similar concept can be used with mixin classes (see :ref:`orm_mixins_toplevel`)
+to define a particular series of columns and/or other mapped attributes
+from a reusable mixin class::
+
+    class Employee(Base):
+        __tablename__ = 'employee'
+        id = Column(Integer, primary_key=True)
+        name = Column(String(50))
+        type = Column(String(20))
+
+        __mapper_args__ = {
+            'polymorphic_on':type,
+            'polymorphic_identity':'employee'
+        }
+
+    class HasStartDate:
+        @declared_attr
+        def start_date(cls):
+            return cls.__table__.c.get('start_date', Column(DateTime))
+
+    class Engineer(HasStartDate, Employee):
+        __mapper_args__ = {'polymorphic_identity': 'engineer'}
+
+    class Manager(HasStartDate, Employee):
+        __mapper_args__ = {'polymorphic_identity': 'manager'}
+
 Relationships with Single Table Inheritance
 +++++++++++++++++++++++++++++++++++++++++++
 
@@ -379,6 +481,7 @@ Above, the ``Manager`` class will have a ``Manager.company`` attribute;
 loads against the ``employee`` with an additional WHERE clause that
 limits rows to those with ``type = 'manager'``.
 
+
 Loading Single Inheritance Mappings
 +++++++++++++++++++++++++++++++++++
 
@@ -680,9 +783,6 @@ With a mapping like the above, only instances of ``Manager`` and ``Engineer``
 may be persisted; querying against the ``Employee`` class will always produce
 ``Manager`` and ``Engineer`` objects.
 
-.. seealso::
-
-    :ref:`declarative_concrete_table` - in the Declarative reference documentation
 
 Classical and Semi-Classical Concrete Polymorphic Configuration
 +++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++
index 1a06b73b85973072fa759bf7e5b028f9936411fc..3fdfe500ee0254b394ea74b17313f5644c645715 100644 (file)
@@ -56,11 +56,11 @@ sections, are listed here.
     :members: __get__, __set__, __delete__
     :undoc-members:
 
-.. autodata:: sqlalchemy.orm.interfaces.MANYTOONE
+.. autodata:: sqlalchemy.orm.MANYTOONE
 
-.. autodata:: sqlalchemy.orm.interfaces.MANYTOMANY
+.. autodata:: sqlalchemy.orm.MANYTOMANY
 
-.. autoclass:: sqlalchemy.orm.interfaces.MapperProperty
+.. autoclass:: sqlalchemy.orm.MapperProperty
     :members:
 
     .. py:attribute:: info
@@ -90,7 +90,7 @@ sections, are listed here.
 .. autofunction:: sqlalchemy.orm.loading.merge_frozen_result
 
 
-.. autodata:: sqlalchemy.orm.interfaces.ONETOMANY
+.. autodata:: sqlalchemy.orm.ONETOMANY
 
 .. autoclass:: sqlalchemy.orm.PropComparator
     :members:
index 64dce643c7c2e671634cda2373228fd267826e8c..b26b32087b326e72d90ba1f20714efddbdf56e93 100644 (file)
@@ -7,7 +7,7 @@ Notes and features regarding the general loading of mapped objects.
 For an in-depth introduction to querying with the SQLAlchemy ORM, please see the :ref:`ormtutorial_toplevel`.
 
 .. toctree::
-    :maxdepth: 2
+    :maxdepth: 3
 
     loading_columns
     loading_relationships
index 60ad7f5f9a3ed8feda88f16673e4a098399d08d1..4de08690329782518496c200857123b095c13d4e 100644 (file)
@@ -11,9 +11,10 @@ know how to construct and use rudimentary mappers and relationships.
 
 
 .. toctree::
-    :maxdepth: 2
+    :maxdepth: 3
 
     mapping_styles
+    declarative_mapping
     scalar_mapping
     inheritance
     nonstandard_mappings
index 250bd26a485b565c17586cc9fd92008d6cbb523d..6aa08114dab41a7ee92344aa9e9e7ba6ce0af635 100644 (file)
@@ -1,8 +1,23 @@
+
 .. currentmodule:: sqlalchemy.orm
 
 Class Mapping API
 =================
 
+.. autoclass:: registry
+    :members:
+
+.. autofunction:: declarative_base
+
+.. autofunction:: as_declarative
+
+.. autoclass:: declared_attr
+    :members:
+
+.. autofunction:: has_inherited_table
+
+.. autofunction:: synonym_for
+
 .. autofunction:: mapper
 
 .. autofunction:: object_mapper
index 5423a84eb79caadd3c0cf439b4560b0182c3d88d..29794ce9ef4661e9ae593f3d04e5824b73db505c 100644 (file)
@@ -1,3 +1,5 @@
+.. _mapping_columns_toplevel:
+
 .. currentmodule:: sqlalchemy.orm
 
 Mapping Table Columns
@@ -20,8 +22,9 @@ it matches the :attr:`_schema.Column.key` attribute on :class:`_schema.Column`,
 by default is the same as the :attr:`_schema.Column.name`.
 
 The name assigned to the Python attribute which maps to
-:class:`_schema.Column` can be different from either :attr:`_schema.Column.name` or :attr:`_schema.Column.key`
-just by assigning it that way, as we illustrate here in a Declarative mapping::
+:class:`_schema.Column` can be different from either
+:attr:`_schema.Column.name` or :attr:`_schema.Column.key` just by assigning
+it that way, as we illustrate here in a Declarative mapping::
 
     class User(Base):
         __tablename__ = 'user'
@@ -39,10 +42,11 @@ can be referenced directly::
         id = user_table.c.user_id
         name = user_table.c.user_name
 
-Or in a classical mapping, placed in the ``properties`` dictionary
-with the desired key::
+The corresponding technique for an :term:`imperative` mapping is
+to place the desired key in the :paramref:`_orm.mapper.properties`
+dictionary with the desired key::
 
-    mapper(User, user_table, properties={
+    registry.mapper(User, user_table, properties={
        'id': user_table.c.user_id,
        'name': user_table.c.user_name,
     })
index c156f08f1992c1876f3e7f5dd913fe80446ef8c7..29045dbb73811dcacbc9928ab1aae62212b33a72 100644 (file)
@@ -1,29 +1,62 @@
-=================
-Types of Mappings
-=================
-
-Modern SQLAlchemy features two distinct styles of mapper configuration.
-The "Classical" style is SQLAlchemy's original mapping API, whereas
-"Declarative" is the richer and more succinct system that builds on top
-of "Classical".   Both styles may be used interchangeably, as the end
-result of each is exactly the same - a user-defined class mapped by the
-:func:`.mapper` function onto a selectable unit, typically a :class:`_schema.Table`.
+.. _orm_mapping_classes_toplevel:
+
+=======================
+Mapping Python Classes
+=======================
+
+SQLAlchemy historically features two distinct styles of mapper configuration.
+The original mapping API is commonly referred to as "classical" style,
+whereas the more automated style of mapping is known as "declarative" style.
+SQLAlchemy now refers to these two mapping styles as **imperative mapping**
+and **declarative mapping**.
+
+Both styles may be used interchangeably, as the end result of each is exactly
+the same - a user-defined class that has a :class:`_orm.Mapper` configured
+against a selectable unit, typically represented by a :class:`_schema.Table`
+object.
+
+Both imperative and declarative mapping begin with an ORM :class:`_orm.registry`
+object, which maintains a set of classes that are mapped.    This registry
+is present for all mappings.
+
+.. versionchanged:: 1.4  Declarative and classical mapping are now referred
+   to as "declarative" and "imperative" mapping, and are unified internally,
+   all originating from the :class:`_orm.registry` construct that represents
+   a collection of related mappings.
+
+The full suite of styles can be hierarchically organized as follows:
+
+* :ref:`orm_declarative_mapping`
+    * Using :func:`_orm.declarative_base` Base class w/ metaclass
+        * :ref:`orm_declarative_table`
+        * :ref:`Imperative Table (a.k.a. "hybrid table") <orm_imperative_table_configuration>`
+    * Using :meth:`_orm.registry.mapped` Declarative Decorator
+        * Declarative Table
+        * Imperative Table (Hybrid)
+            * :ref:`orm_declarative_dataclasses`
+* :ref:`Imperative (a.k.a. "classical" mapping) <orm_imperative_mapping>`
+    * Using :meth:`_orm.registry.map_imperatively`
+        * :ref:`orm_imperative_dataclasses`
+
+.. _orm_declarative_mapping:
 
 Declarative Mapping
 ===================
 
-The *Declarative Mapping* is the typical way that
-mappings are constructed in modern SQLAlchemy.
-Making use of the :ref:`declarative_toplevel`
-system, the components of the user-defined class as well as the
-:class:`_schema.Table` metadata to which the class is mapped are defined
-at once::
+The **Declarative Mapping** is the typical way that
+mappings are constructed in modern SQLAlchemy.   The most common pattern
+is to first construct a base class using the :func:`_orm.declarative_base`
+function, which will apply the declarative mapping process to all subclasses
+that derive from it.  Below features a declarative base which is then
+used in a declarative table mapping::
 
-    from sqlalchemy.ext.declarative import declarative_base
     from sqlalchemy import Column, Integer, String, ForeignKey
+    from sqlalchemy.orm import declarative_base
 
+    # declarative base class
     Base = declarative_base()
 
+    # an example mapping using the base
     class User(Base):
         __tablename__ = 'user'
 
@@ -32,64 +65,313 @@ at once::
         fullname = Column(String)
         nickname = Column(String)
 
-Above, a basic single-table mapping with four columns.   Additional
-attributes, such as relationships to other mapped classes, are also
-declared inline within the class definition::
+Above, the :func:`_orm.declarative_base` callable returns a new base class from
+which new classes to be mapped may inherit from, as above a new mapped
+class ``User`` is constructed.
 
-    class User(Base):
+The base class refers to a
+:class:`_orm.registry` object that maintains a collection of related mapped
+classes.   The :func:`_orm.declarative_base` function is in fact shorthand
+for first creating the registry with the :class:`_orm.registry`
+constructor, and then generating a base class using the
+:meth:`_orm.registry.generate_base` method::
+
+    from sqlalchemy.orm import registry
+
+    # equivalent to Base = declarative_base()
+
+    mapper_registry = registry()
+    Base = mapper_registry.generate_base()
+
+The :class:`_orm.registry` is used directly in order to access a variety
+of mapping styles to suit different use cases:
+
+* :ref:`orm_declarative_decorator` - declarative mapping using a decorator,
+  rather than a base class.
+
+* :ref:`orm_imperative_mapping` - imperative mapping, specifying all mapping
+  arguments directly rather than scanning a class.
+
+Documentation for Declarative mapping continues at :ref:`declarative_config_toplevel`.
+
+.. seealso::
+
+    :ref:`declarative_config_toplevel`
+
+
+.. _orm_declarative_decorator:
+
+Declarative Mapping using a Decorator (no declarative base)
+------------------------------------------------------------
+
+As an alternative to using the "declarative base" class is to apply
+declarative mapping to a class explicitly, using either an imperative technique
+similar to that of a "classical" mapping, or more succinctly by using
+a decorator.  The :meth:`_orm.registry.mapped` function is a class decorator
+that can be applied to any Python class with no hierarchy in place.  The
+Python class otherwise is configured in declarative style normally::
+
+    from sqlalchemy import Column, Integer, String, Text, ForeignKey
+
+    from sqlalchemy.orm import registry
+    from sqlalchemy.orm import relationship
+
+    mapper_registry = registry()
+
+    @mapper_registry.mapped
+    class User:
         __tablename__ = 'user'
 
         id = Column(Integer, primary_key=True)
         name = Column(String)
-        fullname = Column(String)
-        nickname = Column(String)
 
-        addresses = relationship("Address", backref="user", order_by="Address.id")
+        addresses = relationship("Address", back_populates="user")
 
-    class Address(Base):
+    @mapper_registry.mapped
+    class Address:
         __tablename__ = 'address'
 
         id = Column(Integer, primary_key=True)
-        user_id = Column(ForeignKey('user.id'))
+        user_id = Column(ForeignKey("user.id"))
         email_address = Column(String)
 
-The declarative mapping system is introduced in the
-:ref:`ormtutorial_toplevel`.  For additional details on how this system
-works, see :ref:`declarative_toplevel`.
+        user = relationship("User", back_populates="addresses")
+
+Above, the same :class:`_orm.registry` that we'd use to generate a declarative
+base class via its :meth:`_orm.registry.generate_base` method may also apply
+a declarative-style mapping to a class without using a base.   When using
+the above style, the mapping of a particular class will **only** proceed
+if the decorator is applied to that class directly.   For inheritance
+mappings, the decorator should be applied to each subclass::
+
+    from sqlalchemy.orm import registry
+    mapper_registry = registry()
+
+    @mapper_registry.mapped
+    class Person:
+        __tablename__ = "person"
+
+        person_id = Column(Integer, primary_key=True)
+        type = Column(String, nullable=False)
+
+        __mapper_args__ = {
+
+            "polymorphic_on": type,
+            "polymorphic_identity": "person"
+        }
+
+
+    @mapper_registry.mapped
+    class Employee(Person):
+        __tablename__ = "employee"
+
+        person_id = Column(ForeignKey("person.person_id"), primary_key=True)
+
+        __mapper_args__ = {
+            "polymorphic_identity": "employee"
+        }
+
+Both the "declarative table" and "imperative table" styles of declarative
+mapping may be used with the above mapping style.
+
+The decorator form of mapping is particularly useful when combining a
+SQLAlchemy declarative mapping with other forms of class declaration, notably
+the Python ``dataclasses`` module.  See the next section.
+
+.. _orm_declarative_dataclasses:
+
+Declarative Mapping with Dataclasses and Attrs
+----------------------------------------------
+
+The dataclasses_ module, added in Python 3.7, provides a ``@dataclass`` class
+decorator to automatically generate boilerplate definitions of ``__init__()``,
+``__eq__()``, ``__repr()__``, etc. methods. Another very popular library that does
+the same, and much more, is attrs_.  Both libraries make use of class
+decorators in order to scan a class for attributes that define the class'
+behavior, which are then used to generate methods, documentation, and annotations.
+
+The :meth:`_orm.registry.mapped` class decorator allows the declarative mapping
+of a class to occur after the class has been fully constructed, allowing the
+class to be processed by other class decorators first.  The ``@dataclass``
+and ``@attr.s`` decorators may therefore be applied first before the
+ORM mapping process proceeds via the :meth:`_orm.registry.mapped` decorator
+or via the :meth:`_orm.registry.map_imperatively` method discussed in a
+later section.
+
+As the attributes set up for ``@dataclass`` or ``@attr.s`` are typically those
+which will be matched up to the :class:`_schema.Column` objects that are
+mapped, it is usually required that the
+:ref:`orm_imperative_table_configuration` style is used in order to configure
+the :class:`_schema.Table`, which means that it is defined separately and
+associated with the class via the ``__table__``.
+
+
+When attributes are defined using ``dataclasses``, the ``@dataclass``
+decorator consumes them but leaves them in place on the class.
+SQLAlchemy's mapping process, when it encounters an attribute that normally
+is to be mapped to a :class:`_schema.Column`, checks explicitly if the
+attribute is part of a Dataclasses setup, and if so will **replace**
+the class-bound dataclass attribute with its usual mapped
+properties.  The ``__init__`` method created by ``@dataclass`` is left
+intact.   In contrast, the ``@attr.s`` decorator actually removes its
+own class-bound attributes after the decorator runs, so that SQLAlchemy's
+mapping process takes over these attributes without any issue.
+
+.. versionadded:: 1.4 Added support for direct mapping of Python dataclasses,
+   where the :class:`_orm.Mapper` will now detect attributes that are specific
+   to the ``@dataclasses`` module and replace them at mapping time, rather
+   than skipping them as is the default behavior for any class attribute
+   that's not part of the mapping.
+
+An example of a mapping using ``@dataclass`` is as follows::
+
+    from __future__ import annotations
+
+    from dataclasses import dataclass
+    from dataclasses import field
+    from typing import List
+
+    from sqlalchemy import Column
+    from sqlalchemy import ForeignKey
+    from sqlalchemy import Integer
+    from sqlalchemy import String
+    from sqlalchemy import Table
+    from sqlalchemy.orm import registry
+
+    mapper_registry = registry()
+
+
+    @mapper_registry.mapped
+    @dataclass
+    class User:
+        __table__ = Table(
+            "user",
+            mapper_registry.metadata,
+            Column("id", Integer, primary_key=True),
+            Column("name", String(50)),
+            Column("fullname", String(50)),
+            Column("nickname", String(12)),
+        )
+        id: int = field(init=False)
+        name: str = None
+        fullname: str = None
+        nickname: str = None
+        addresses: List[Address] = field(default_factory=list)
+
+
+    @mapper_registry.mapped
+    @dataclass
+    class Address:
+        __table__ = Table(
+            "address",
+            mapper_registry.metadata,
+            Column("id", Integer, primary_key=True),
+            Column("user_id", Integer, ForeignKey("user.id")),
+            Column("email_address", String(50)),
+        )
+        id: int = field(init=False)
+        user_id: int = field(init=False)
+        email_address: str = None
+
+In the above example, the ``User.id``, ``Address.id``, and ``Address.user_id``
+attributes are defined as ``field(init=False)``. This means that parameters for
+these won't be added to ``__init__()`` methods, but
+:class:`.Session` will still be able to set them after getting their values
+during flush from autoincrement or other default value generator.   To
+allow them to be specified in the constructor explicitly, they would instead
+be given a default value of ``None``.
+
+Similarly, a mapping using ``@attr.s``::
+
+    import attr
+
+    # other imports
+
+    from sqlalchemy.orm import registry
+
+    mapper_registry = registry()
+
+
+    @mapper_registry.mapped
+    @attr.s
+    class User:
+        __table__ = Table(
+            "user",
+            mapper_registry.metadata,
+            Column("id", Integer, primary_key=True),
+            Column("name", String(50)),
+            Column("fullname", String(50)),
+            Column("nickname", String(12)),
+        )
+        id = attr.ib()
+        name = attr.ib()
+        fullname = attr.ib()
+        nickname = attr.ib()
+        addresses = attr.ib()
+
+    # other classes...
+
+.. sidebar:: Using MyPy with SQLAlchemy models
+
+    If you are using PEP 484 static type checkers for Python, a `MyPy
+    <http://mypy-lang.org/>`_ plugin is included with `type stubs for
+    SQLAlchemy <https://github.com/dropbox/sqlalchemy-stubs>`_.  The plugin is
+    tailored towards SQLAlchemy declarative models.   SQLAlchemy hopes to include
+    more comprehensive PEP 484 support in future releases.
+
+
+``@dataclass`` and attrs_ mappings may also be used with classical mappings, i.e.
+with the :meth:`_orm.registry.map_imperatively` function.  See the section
+:ref:`orm_imperative_dataclasses` for a similar example.
+
+.. _dataclasses: https://docs.python.org/3/library/dataclasses.html
+.. _attrs: https://pypi.org/project/attrs/
+
+.. _orm_imperative_mapping:
 
 .. _classical_mapping:
 
-Classical Mappings
-==================
+Imperative (a.k.a. Classical) Mappings
+======================================
+
+An **imperative** or **classical** mapping refers to the configuration of a
+mapped class using the :meth:`_orm.registry.map_imperatively` method,
+where the target class does not include any declarative class attributes.
+The "map imperative" style has historically been achieved using the
+:func:`_orm.mapper` function directly, however this function now expects
+that a :meth:`_orm.registry` is present.
+
+.. deprecated:: 1.4  Using the :func:`_orm.mapper` function directly to
+   achieve a classical mapping directly is deprecated.   The
+   :meth:`_orm.registry.map_imperatively` method retains the identical
+   functionality while also allowing for string-based resolution of
+   other mapped classes from within the registry.
 
-A *Classical Mapping* refers to the configuration of a mapped class using the
-:func:`.mapper` function, without using the Declarative system.  This is
-SQLAlchemy's original class mapping API, and is still the base mapping
-system provided by the ORM.
 
 In "classical" form, the table metadata is created separately with the
 :class:`_schema.Table` construct, then associated with the ``User`` class via
 the :func:`.mapper` function::
 
-    from sqlalchemy import Table, MetaData, Column, Integer, String, ForeignKey
-    from sqlalchemy.orm import mapper
+    from sqlalchemy import Table, Column, Integer, String, ForeignKey
+    from sqlalchemy.orm import registry
 
-    metadata = MetaData()
+    mapper_registry = registry()
 
-    user = Table('user', metadata,
-                Column('id', Integer, primary_key=True),
-                Column('name', String(50)),
-                Column('fullname', String(50)),
-                Column('nickname', String(12))
-            )
+    user_table = Table(
+        'user',
+        mapper_registry.metadata,
+        Column('id', Integer, primary_key=True),
+        Column('name', String(50)),
+        Column('fullname', String(50)),
+        Column('nickname', String(12))
+    )
+
+    class User:
+        pass
+
+    mapper_registry.map_imperatively(User, user_table)
 
-    class User(object):
-        def __init__(self, name, fullname, nickname):
-            self.name = name
-            self.fullname = fullname
-            self.nickname = nickname
 
-    mapper(User, user)
 
 Information about mapped attributes, such as relationships to other classes, are provided
 via the ``properties`` dictionary.  The example below illustrates a second :class:`_schema.Table`
@@ -120,39 +402,38 @@ user-defined class, linked together with a :func:`.mapper`.  When we talk about
 "the behavior of :func:`.mapper`", this includes when using the Declarative system
 as well - it's still used, just behind the scenes.
 
-.. _mapping_dataclasses:
-
-Mapping dataclasses and attrs
------------------------------
-
-The dataclasses_ module, added in Python 3.7, provides a ``dataclass`` class
-decorator to automatically generate boilerplate definitions of ``__init__()``,
-``__eq__()``, ``__repr()__``, etc. methods. Another very popular library that does
-the same, and much more, is attrs_. Classes defined using either of these can
-be mapped with the following caveats.
 
-.. versionadded:: 1.4 Added support for direct mapping of Python dataclasses.
 
-The declarative "base" can't be used directly; a mapping function such as
-:func:`_declarative.instrument_declarative` or :func:`_orm.mapper` may be
-used.
 
-The ``dataclass`` decorator adds class attributes corresponding to simple default values.
-This is done mostly as documentation, these attributes are not necessary for the function
-of any of the generated methods. Mapping replaces these class attributes with property
-descriptors.
+.. _orm_imperative_dataclasses:
 
-Mapping of frozen ``dataclass`` and ``attrs`` classes is not possible, because the
-machinery used to enforce immutability interferes with loading.
+Imperative Mapping with Dataclasses and Attrs
+---------------------------------------------
 
-Example using classical mapping::
+As described in the section :ref:`orm_declarative_dataclasses`, the
+``@dataclass`` decorator and the attrs_ library both work as class
+decorators that are applied to a class first, before it is passed to
+SQLAlchemy for mapping.   Just like we can use the
+:meth:`_orm.registry.mapped` decorator in order to apply declarative-style
+mapping to the class, we can also pass it to the :meth:`_orm.registry.map_imperatively`
+method so that we may pass all :class:`_schema.Table` and :class:`_orm.Mapper`
+configuration imperatively to the function rather than having them defined
+on the class itself as declarative class variables::
 
     from __future__ import annotations
-    from dataclasses import dataclass, field
+
+    from dataclasses import dataclass
+    from dataclasses import field
     from typing import List
 
-    from sqlalchemy import Column, ForeignKey, Integer, MetaData, String, Table
-    from sqlalchemy.orm import mapper, relationship
+    from sqlalchemy import Column
+    from sqlalchemy import ForeignKey
+    from sqlalchemy import Integer
+    from sqlalchemy import MetaData
+    from sqlalchemy import String
+    from sqlalchemy import Table
+    from sqlalchemy.orm import mapper
+    from sqlalchemy.orm import relationship
 
     @dataclass
     class User:
@@ -193,20 +474,221 @@ Example using classical mapping::
 
     mapper(Address, address)
 
-Note that ``User.id``, ``Address.id``, and ``Address.user_id`` are defined as ``field(init=False)``.
-This means that parameters for these won't be added to ``__init__()`` methods, but
-:class:`.Session` will still be able to set them after getting their values during flush
-from autoincrement or other default value generator. You can also give them a
-``None`` default value instead if you want to be able to specify their values in the constructor.
+.. _orm_mapper_configuration_overview:
 
-.. _dataclasses: https://docs.python.org/3/library/dataclasses.html
-.. _attrs: https://www.attrs.org/en/stable/
+Mapper Configuration Overview
+=============================
+
+With all mapping forms, the mapping of the class can be
+configured in many ways by passing construction arguments that become
+part of the :class:`_orm.Mapper` object.   The function which ultimately
+receives these arguments is the :func:`_orm.mapper` function, which are delivered
+to it originating from one of the front-facing mapping functions defined
+on the :class:`_orm.registry` object.
+
+There are four general classes of configuration information that the
+:func:`_orm.mapper` function looks for:
+
+The class to be mapped
+-----------------------
+
+This is a class that we construct in our application.
+There are generally no restrictions on the structure of this class. [1]_
+When a Python class is mapped, there can only be **one** :class:`_orm.Mapper`
+object for the class. [2]_
+
+When mapping with the :ref:`declarative <orm_declarative_mapping>` mapping
+style, the class to be mapped is either a subclass of the declarative base class,
+or is handled by a decorator or function such as :meth:`_orm.registry.mapped`.
+
+When mapping with the :ref:`imperative <orm_imperative_mapping>` style, the
+class is passed directly as the
+:paramref:`_orm.registry.map_imperatively.class_` argument.
+
+the table, or other from clause object
+--------------------------------------
+
+In the vast majority of common cases this is an instance of
+:class:`_schema.Table`.  For more advanced use cases, it may also refer
+to any kind of :class:`_sql.FromClause` object, the most common
+alternative objects being the :class:`_sql.Subquery` and :class:`_sql.Join`
+object.
+
+When mapping with the :ref:`declarative <orm_declarative_mapping>` mapping
+style, the subject table is either generated by the declarative system based
+on the ``__tablename__`` attribute and the :class:`_schema.Column` objects
+presented, or it is established via the ``__table__`` attribute.  These
+two styles of configuration are presented at
+:ref:`orm_declarative_table` and :ref:`orm_imperative_table_configuration`.
+
+When mapping with the :ref:`imperative <orm_imperative_mapping>` style, the
+subject table is passed positionally as the
+:paramref:`_orm.registry.map_imperatively.local_table` argument.
+
+In contrast to the "one mapper per class" requirement of a mapped class,
+the :class:`_schema.Table` or other :class:`_sql.FromClause` object that
+is the subject of the mapping may be associated with any number of mappings.
+The :class:`_orm.Mapper` applies modifications directly to the user-defined
+class, but does not modify the given :class:`_schema.Table` or other
+:class:`_sql.FromClause` in any way.
+
+.. _orm_mapping_properties:
+
+The properties dictionary
+--------------------------
+
+This is a dictionary of all of the attributes
+that will be associated with the mapped class.    By default, the
+:class:`_orm.Mapper` generates entries for this dictionary derived from the
+given :class:`_schema.Table`, in the form of :class:`_orm.ColumnProperty`
+objects which each refer to an individual :class:`_schema.Column` of the
+mapped table.  The properties dictionary will also contain all the other
+kinds of :class:`_orm.MapperProperty` objects to be configured, most
+commonly instances generated by the :func:`_orm.relationship` construct.
+
+When mapping with the :ref:`declarative <orm_declarative_mapping>` mapping
+style, the properties dictionary is generated by the declarative system
+by scanning the class to be mapped for appropriate attributes.  See
+the section :ref:`orm_declarative_properties` for notes on this process.
+
+When mapping with the :ref:`imperative <orm_imperative_mapping>` style, the
+properties dictionary is passed directly as the ``properties`` argument
+to :meth:`_orm.registry.map_imperatively`, which will pass it along to the
+:paramref:`_orm.mapper.properties` parameter.
+
+Other mapper configuration parameters
+---------------------------------------
+
+These flags are documented at  :func:`_orm.mapper`.
+
+When mapping with the :ref:`declarative <orm_declarative_mapping>` mapping
+style, additional mapper configuration arguments are configured via the
+``__mapper_args__`` class attribute, documented at
+:ref:`orm_declarative_mapper_options`
+
+When mapping with the :ref:`imperative <orm_imperative_mapping>` style,
+keyword arguments are passed to the to :meth:`_orm.registry.map_imperatively`
+method which passes them along to the :func:`_orm.mapper` function.
+
+
+.. [1] When running under Python 2, a Python 2 "old style" class is the only
+       kind of class that isn't compatible.    When running code on Python 2,
+       all classes must extend from the Python ``object`` class.  Under
+       Python 3 this is always the case.
+
+.. [2] There is a legacy feature known as a "non primary mapper", where
+       additional :class:`_orm.Mapper` objects may be associated with a class
+       that's already mapped, however they don't apply instrumentation
+       to the class.  This feature is deprecated as of SQLAlchemy 1.3.
+
+
+Mapped Class Behavior
+=====================
+
+Across all styles of mapping using the :class:`_orm.registry` object,
+the following behaviors are common:
+
+Default Constructor
+-------------------
+
+The :class:`_orm.registry` applies a default constructor, i.e. ``__init__``
+method, to all mapped classes that don't explicitly have their own
+``__init__`` method.   The behavior of this method is such that it provides
+a convenient keyword constructor that will accept as keywords the attributes
+that are named.   E.g.::
+
+    from sqlalchemy.orm import declarative_base
+
+    Base = declarative_base()
+
+    class User(Base):
+        __tablename__ = 'user'
+
+        id = Column(...)
+        name = Column(...)
+        fullname = Column(...)
+
+An object of type ``User`` above will have a constructor which allows
+``User`` objects to be created as::
+
+    u1 = User(name='some name', fullname='some fullname')
+
+The above constructor may be customized by passing a Python callable to
+the :paramref:`_orm.registry.constructor` parameter which provides the
+desired default ``__init__()`` behavior.
+
+The constructor also applies to imperative mappings::
+
+    from sqlalchemy.orm import registry
+
+    mapper_registry = registry()
+
+    user_table = Table(
+        'user',
+        mapper_registry.metadata,
+        Column('id', Integer, primary_key=True),
+        Column('name', String(50))
+    )
+
+    class User:
+        pass
+
+    mapper_registry.map_imperatively(User, user_table)
+
+The above class, mapped imperatively as described at :ref:`classical_mapping`,
+will also feature the default constructor associated with the :class:`_orm.registry`.
+
+.. versionadded:: 1.4  classical mappings now support a standard configuration-level
+   constructor when they are mapped via the :meth:`_orm.registry.map_imperatively`
+   method.
+
+Runtime Introspection of Mapped classes and Mappers
+---------------------------------------------------
+
+A class that is mapped using :class:`_orm.registry` will also feature a few
+attributes that are common to all mappings:
+
+* The ``__mapper__`` attribute will refer to the :class:`_orm.Mapper` that
+  is associated with the class::
+
+    mapper = User.__mapper__
+
+  This :class:`_orm.Mapper` is also what's returned when using the
+  :func:`_sa.inspect` function against the mapped class::
+
+    from sqlalchemy import inspect
+
+    mapper = inspect(User)
+
+  ..
+
+* The ``__table__`` attribute will refer to the :class:`_schema.Table`, or
+  more generically to the :class:`_schema.FromClause` object, to which the
+  class is mapped::
+
+    table = User.__table__
+
+  This :class:`_schema.FromClause` is also what's returned when using the
+  :attr:`_orm.Mapper.local_table` attribute of the :class:`_orm.Mapper`::
+
+    table = inspect(User).local_table
+
+  For a single-table inheritance mapping, where the class is a subclass that
+  does not have a table of its own, the :attr:`_orm.Mapper.local_table` attribute as well
+  as the ``.__table__`` attribute will be ``None``.   To retrieve the
+  "selectable" that is actually selected from during a query for this class,
+  this is available via the :attr:`_orm.Mapper.selectable` attribute::
+
+    table = inspect(User).selectable
+
+  ..
 
-Runtime Introspection of Mappings, Objects
-==========================================
+Mapper Inspection Features
+--------------------------
 
-The :class:`_orm.Mapper` object is available from any mapped class, regardless
-of method, using the :ref:`core_inspection_toplevel` system.  Using the
+As illustrated in the previous section, the :class:`_orm.Mapper` object is
+available from any mapped class, regardless of method, using the
+:ref:`core_inspection_toplevel` system.  Using the
 :func:`_sa.inspect` function, one can acquire the :class:`_orm.Mapper` from a
 mapped class::
 
index 94892ffbeb48fe8ee84d9b2589bc60e523b964b4..16473e26c0ba39ca8a346e8d90847f51c716aff9 100644 (file)
@@ -2,6 +2,8 @@
 Non-Traditional Mappings
 ========================
 
+.. _orm_mapping_joins:
+
 .. _maptojoin:
 
 Mapping a Class against Multiple Tables
@@ -114,26 +116,27 @@ may be used::
     that the LEFT OUTER JOIN from "p" to "q" does not have an entry for the "q"
     side.
 
+.. _orm_mapping_arbitrary_subqueries:
 
-Mapping a Class against Arbitrary Selects
-=========================================
+Mapping a Class against Arbitrary Subqueries
+============================================
 
-Similar to mapping against a join, a plain :func:`_expression.select` object can be used with a
-mapper as well.  The example fragment below illustrates mapping a class
-called ``Customer`` to a :func:`_expression.select` which includes a join to a
-subquery::
+Similar to mapping against a join, a plain :func:`_expression.select` object
+can be used with a mapper as well.  The example fragment below illustrates
+mapping a class called ``Customer`` to a :func:`_expression.select` which
+includes a join to a subquery::
 
     from sqlalchemy import select, func
 
     subq = select(
-                func.count(orders.c.id).label('order_count'),
-                func.max(orders.c.price).label('highest_order'),
-                orders.c.customer_id
-                ).group_by(orders.c.customer_id).alias()
-
-    customer_select = select(customers, subq).select_from(
-        join(customers, subq, customers.c.id == subq.c.customer_id)
-    ).alias()
+        func.count(orders.c.id).label('order_count'),
+        func.max(orders.c.price).label('highest_order'),
+        orders.c.customer_id
+    ).group_by(orders.c.customer_id).subquery()
+
+    customer_select = select(customers, subq).join_from(
+        customers, subq, customers.c.id == subq.c.customer_id
+    ).subquery()
 
     class Customer(Base):
         __table__ = customer_select
index 37f59d34523473b02253423728ccdfd0260ee98e..8a4fe36a1d8287c4f86c6bbb6673cc41bdd7cf2a 100644 (file)
@@ -10,7 +10,7 @@ of its usage.   For an introduction to relationships, start with the
 :ref:`ormtutorial_toplevel` and head into :ref:`orm_tutorial_relationship`.
 
 .. toctree::
-    :maxdepth: 2
+    :maxdepth: 3
 
     basic_relationships
     self_referential
index e8829af49a61166f04cd6101fdc3f1437c928c5b..001745e9eddbf6d39b53c491418955797e73c3d8 100644 (file)
@@ -8,7 +8,7 @@ The following sections discuss how table columns and SQL expressions are
 mapped to individual object attributes.
 
 .. toctree::
-    :maxdepth: 2
+    :maxdepth: 3
 
     mapping_columns
     mapped_sql_expr
index db52fd3d1555cb18942ec48eebe863bd0d93c887..8e14942c4e95c8ff551d2afd40421490e6ee9b81 100644 (file)
@@ -13,7 +13,7 @@ persistence operations is the
 :class:`.Session`.
 
 .. toctree::
-    :maxdepth: 2
+    :maxdepth: 3
 
     session_basics
     session_state_management
index 8c148ac32a6c93aac58ba0af31f0632cdc27ec7c..c04caf9e6fc8cff7b2df83b3ec81df2e6f39b5cc 100644 (file)
@@ -104,7 +104,7 @@ application will usually have just one instance of this base in a commonly
 imported module.   We create the base class using the :func:`.declarative_base`
 function, as follows::
 
-    >>> from sqlalchemy.ext.declarative import declarative_base
+    >>> from sqlalchemy.orm import declarative_base
 
     >>> Base = declarative_base()
 
index a17bb5cece8574cfeae1a821030af2ea4655f02c..1a38fc7562a4e72940e6667f991dfc148bf7ab97 100644 (file)
@@ -644,6 +644,10 @@ class RemovedIn20Warning(SADeprecationWarning):
     "Indicates the version that started raising this deprecation warning"
 
 
+class MovedIn20Warning(RemovedIn20Warning):
+    """subtype of RemovedIn20Warning to indicate an API that moved only."""
+
+
 class SAPendingDeprecationWarning(PendingDeprecationWarning):
     """A similar warning as :class:`_exc.SADeprecationWarning`, this warning
     is not used in modern versions of SQLAlchemy.
index 4ae3a415e434fe284eff2ee5f2f29b7dd34ac546..2dc7d54debc7498e3d1c254e9c0657d4bfd10fa7 100644 (file)
@@ -531,12 +531,12 @@ we've declared are in an un-mapped state.
 
 """  # noqa
 from .declarative import declarative_base as _declarative_base
-from .declarative.base import _DeferredMapperConfig
 from .. import util
 from ..orm import backref
 from ..orm import exc as orm_exc
 from ..orm import interfaces
 from ..orm import relationship
+from ..orm.decl_base import _DeferredMapperConfig
 from ..orm.mapper import _CONFIGURE_MUTEX
 from ..schema import ForeignKeyConstraint
 from ..sql import and_
index 6dc4d23c800457d8e81d551e079a15d6be149ba1..8b38945b2c018fcc716640a53ac0c1508ed04d21 100644 (file)
@@ -5,16 +5,49 @@
 # This module is part of SQLAlchemy and is released under
 # the MIT License: http://www.opensource.org/licenses/mit-license.php
 
-from .api import AbstractConcreteBase
-from .api import as_declarative
-from .api import ConcreteBase
-from .api import declarative_base
-from .api import DeclarativeMeta
-from .api import declared_attr
-from .api import DeferredReflection
-from .api import has_inherited_table
-from .api import instrument_declarative
-from .api import synonym_for
+from .extensions import AbstractConcreteBase
+from .extensions import ConcreteBase
+from .extensions import DeferredReflection
+from .extensions import instrument_declarative
+from ... import util
+from ...orm.decl_api import as_declarative as _as_declarative
+from ...orm.decl_api import declarative_base as _declarative_base
+from ...orm.decl_api import DeclarativeMeta
+from ...orm.decl_api import declared_attr
+from ...orm.decl_api import has_inherited_table as _has_inherited_table
+from ...orm.decl_api import synonym_for as _synonym_for
+
+
+@util.moved_20(
+    "The ``declarative_base()`` function is now available as "
+    ":func:`sqlalchemy.orm.declarative_base`."
+)
+def declarative_base(*arg, **kw):
+    return _declarative_base(*arg, **kw)
+
+
+@util.moved_20(
+    "The ``as_declarative()`` function is now available as "
+    ":func:`sqlalchemy.orm.as_declarative`"
+)
+def as_declarative(*arg, **kw):
+    return _as_declarative(*arg, **kw)
+
+
+@util.moved_20(
+    "The ``has_inherited_table()`` function is now available as "
+    ":func:`sqlalchemy.orm.has_inherited_table`."
+)
+def has_inherited_table(*arg, **kw):
+    return _has_inherited_table(*arg, **kw)
+
+
+@util.moved_20(
+    "The ``synonym_for()`` function is now available as "
+    ":func:`sqlalchemy.orm.synonym_for`"
+)
+def synonym_for(*arg, **kw):
+    return _synonym_for(*arg, **kw)
 
 
 __all__ = [
diff --git a/lib/sqlalchemy/ext/declarative/api.py b/lib/sqlalchemy/ext/declarative/api.py
deleted file mode 100644 (file)
index 076fa11..0000000
+++ /dev/null
@@ -1,823 +0,0 @@
-# ext/declarative/api.py
-# Copyright (C) 2005-2020 the SQLAlchemy authors and contributors
-# <see AUTHORS file>
-#
-# This module is part of SQLAlchemy and is released under
-# the MIT License: http://www.opensource.org/licenses/mit-license.php
-"""Public API functions and helpers for declarative."""
-
-
-import re
-import weakref
-
-from .base import _add_attribute
-from .base import _as_declarative
-from .base import _declarative_constructor
-from .base import _DeferredMapperConfig
-from .base import _del_attribute
-from .base import _get_immediate_cls_attr
-from .clsregistry import _class_resolver
-from ... import exc
-from ... import inspection
-from ... import util
-from ...orm import attributes
-from ...orm import exc as orm_exc
-from ...orm import interfaces
-from ...orm import relationships
-from ...orm import synonym as _orm_synonym
-from ...orm.base import _inspect_mapped_class
-from ...orm.base import _mapper_or_none
-from ...orm.util import polymorphic_union
-from ...schema import MetaData
-from ...schema import Table
-from ...util import hybridmethod
-from ...util import hybridproperty
-from ...util import OrderedDict
-
-
-def instrument_declarative(cls, registry, metadata):
-    """Given a class, configure the class declaratively,
-    using the given registry, which can be any dictionary, and
-    MetaData object.
-
-    """
-    if "_decl_class_registry" in cls.__dict__:
-        raise exc.InvalidRequestError(
-            "Class %r already has been " "instrumented declaratively" % cls
-        )
-    cls._decl_class_registry = registry
-    cls.metadata = metadata
-    _as_declarative(cls, cls.__name__, cls.__dict__)
-
-
-def has_inherited_table(cls):
-    """Given a class, return True if any of the classes it inherits from has a
-    mapped table, otherwise return False.
-
-    This is used in declarative mixins to build attributes that behave
-    differently for the base class vs. a subclass in an inheritance
-    hierarchy.
-
-    .. seealso::
-
-        :ref:`decl_mixin_inheritance`
-
-    """
-    for class_ in cls.__mro__[1:]:
-        if getattr(class_, "__table__", None) is not None:
-            return True
-    return False
-
-
-class DeclarativeMeta(type):
-    def __init__(cls, classname, bases, dict_, **kw):
-        if "_decl_class_registry" not in cls.__dict__:
-            _as_declarative(cls, classname, cls.__dict__)
-        type.__init__(cls, classname, bases, dict_)
-
-    def __setattr__(cls, key, value):
-        _add_attribute(cls, key, value)
-
-    def __delattr__(cls, key):
-        _del_attribute(cls, key)
-
-
-def synonym_for(name, map_column=False):
-    """Decorator that produces an :func:`_orm.synonym`
-    attribute in conjunction
-    with a Python descriptor.
-
-    The function being decorated is passed to :func:`_orm.synonym` as the
-    :paramref:`.orm.synonym.descriptor` parameter::
-
-        class MyClass(Base):
-            __tablename__ = 'my_table'
-
-            id = Column(Integer, primary_key=True)
-            _job_status = Column("job_status", String(50))
-
-            @synonym_for("job_status")
-            @property
-            def job_status(self):
-                return "Status: %s" % self._job_status
-
-    The :ref:`hybrid properties <mapper_hybrids>` feature of SQLAlchemy
-    is typically preferred instead of synonyms, which is a more legacy
-    feature.
-
-    .. seealso::
-
-        :ref:`synonyms` - Overview of synonyms
-
-        :func:`_orm.synonym` - the mapper-level function
-
-        :ref:`mapper_hybrids` - The Hybrid Attribute extension provides an
-        updated approach to augmenting attribute behavior more flexibly than
-        can be achieved with synonyms.
-
-    """
-
-    def decorate(fn):
-        return _orm_synonym(name, map_column=map_column, descriptor=fn)
-
-    return decorate
-
-
-class declared_attr(interfaces._MappedAttribute, property):
-    """Mark a class-level method as representing the definition of
-    a mapped property or special declarative member name.
-
-    @declared_attr turns the attribute into a scalar-like
-    property that can be invoked from the uninstantiated class.
-    Declarative treats attributes specifically marked with
-    @declared_attr as returning a construct that is specific
-    to mapping or declarative table configuration.  The name
-    of the attribute is that of what the non-dynamic version
-    of the attribute would be.
-
-    @declared_attr is more often than not applicable to mixins,
-    to define relationships that are to be applied to different
-    implementors of the class::
-
-        class ProvidesUser(object):
-            "A mixin that adds a 'user' relationship to classes."
-
-            @declared_attr
-            def user(self):
-                return relationship("User")
-
-    It also can be applied to mapped classes, such as to provide
-    a "polymorphic" scheme for inheritance::
-
-        class Employee(Base):
-            id = Column(Integer, primary_key=True)
-            type = Column(String(50), nullable=False)
-
-            @declared_attr
-            def __tablename__(cls):
-                return cls.__name__.lower()
-
-            @declared_attr
-            def __mapper_args__(cls):
-                if cls.__name__ == 'Employee':
-                    return {
-                            "polymorphic_on":cls.type,
-                            "polymorphic_identity":"Employee"
-                    }
-                else:
-                    return {"polymorphic_identity":cls.__name__}
-
-    """
-
-    def __init__(self, fget, cascading=False):
-        super(declared_attr, self).__init__(fget)
-        self.__doc__ = fget.__doc__
-        self._cascading = cascading
-
-    def __get__(desc, self, cls):
-        reg = cls.__dict__.get("_sa_declared_attr_reg", None)
-        if reg is None:
-            if (
-                not re.match(r"^__.+__$", desc.fget.__name__)
-                and attributes.manager_of_class(cls) is None
-            ):
-                util.warn(
-                    "Unmanaged access of declarative attribute %s from "
-                    "non-mapped class %s" % (desc.fget.__name__, cls.__name__)
-                )
-            return desc.fget(cls)
-        elif desc in reg:
-            return reg[desc]
-        else:
-            reg[desc] = obj = desc.fget(cls)
-            return obj
-
-    @hybridmethod
-    def _stateful(cls, **kw):
-        return _stateful_declared_attr(**kw)
-
-    @hybridproperty
-    def cascading(cls):
-        """Mark a :class:`.declared_attr` as cascading.
-
-        This is a special-use modifier which indicates that a column
-        or MapperProperty-based declared attribute should be configured
-        distinctly per mapped subclass, within a mapped-inheritance scenario.
-
-        .. warning::
-
-            The :attr:`.declared_attr.cascading` modifier has several
-            limitations:
-
-            * The flag **only** applies to the use of :class:`.declared_attr`
-              on declarative mixin classes and ``__abstract__`` classes; it
-              currently has no effect when used on a mapped class directly.
-
-            * The flag **only** applies to normally-named attributes, e.g.
-              not any special underscore attributes such as ``__tablename__``.
-              On these attributes it has **no** effect.
-
-            * The flag currently **does not allow further overrides** down
-              the class hierarchy; if a subclass tries to override the
-              attribute, a warning is emitted and the overridden attribute
-              is skipped.  This is a limitation that it is hoped will be
-              resolved at some point.
-
-        Below, both MyClass as well as MySubClass will have a distinct
-        ``id`` Column object established::
-
-            class HasIdMixin(object):
-                @declared_attr.cascading
-                def id(cls):
-                    if has_inherited_table(cls):
-                        return Column(
-                            ForeignKey('myclass.id'), primary_key=True
-                        )
-                    else:
-                        return Column(Integer, primary_key=True)
-
-            class MyClass(HasIdMixin, Base):
-                __tablename__ = 'myclass'
-                # ...
-
-            class MySubClass(MyClass):
-                ""
-                # ...
-
-        The behavior of the above configuration is that ``MySubClass``
-        will refer to both its own ``id`` column as well as that of
-        ``MyClass`` underneath the attribute named ``some_id``.
-
-        .. seealso::
-
-            :ref:`declarative_inheritance`
-
-            :ref:`mixin_inheritance_columns`
-
-
-        """
-        return cls._stateful(cascading=True)
-
-
-class _stateful_declared_attr(declared_attr):
-    def __init__(self, **kw):
-        self.kw = kw
-
-    def _stateful(self, **kw):
-        new_kw = self.kw.copy()
-        new_kw.update(kw)
-        return _stateful_declared_attr(**new_kw)
-
-    def __call__(self, fn):
-        return declared_attr(fn, **self.kw)
-
-
-def declarative_base(
-    bind=None,
-    metadata=None,
-    mapper=None,
-    cls=object,
-    name="Base",
-    constructor=_declarative_constructor,
-    class_registry=None,
-    metaclass=DeclarativeMeta,
-):
-    r"""Construct a base class for declarative class definitions.
-
-    The new base class will be given a metaclass that produces
-    appropriate :class:`~sqlalchemy.schema.Table` objects and makes
-    the appropriate :func:`~sqlalchemy.orm.mapper` calls based on the
-    information provided declaratively in the class and any subclasses
-    of the class.
-
-    :param bind: An optional
-      :class:`~sqlalchemy.engine.Connectable`, will be assigned
-      the ``bind`` attribute on the :class:`~sqlalchemy.schema.MetaData`
-      instance.
-
-    :param metadata:
-      An optional :class:`~sqlalchemy.schema.MetaData` instance.  All
-      :class:`~sqlalchemy.schema.Table` objects implicitly declared by
-      subclasses of the base will share this MetaData.  A MetaData instance
-      will be created if none is provided.  The
-      :class:`~sqlalchemy.schema.MetaData` instance will be available via the
-      `metadata` attribute of the generated declarative base class.
-
-    :param mapper:
-      An optional callable, defaults to :func:`~sqlalchemy.orm.mapper`. Will
-      be used to map subclasses to their Tables.
-
-    :param cls:
-      Defaults to :class:`object`. A type to use as the base for the generated
-      declarative base class. May be a class or tuple of classes.
-
-    :param name:
-      Defaults to ``Base``.  The display name for the generated
-      class.  Customizing this is not required, but can improve clarity in
-      tracebacks and debugging.
-
-    :param constructor:
-      Defaults to
-      :func:`~sqlalchemy.ext.declarative.base._declarative_constructor`, an
-      __init__ implementation that assigns \**kwargs for declared
-      fields and relationships to an instance.  If ``None`` is supplied,
-      no __init__ will be provided and construction will fall back to
-      cls.__init__ by way of the normal Python semantics.
-
-    :param class_registry: optional dictionary that will serve as the
-      registry of class names-> mapped classes when string names
-      are used to identify classes inside of :func:`_orm.relationship`
-      and others.  Allows two or more declarative base classes
-      to share the same registry of class names for simplified
-      inter-base relationships.
-
-    :param metaclass:
-      Defaults to :class:`.DeclarativeMeta`.  A metaclass or __metaclass__
-      compatible callable to use as the meta type of the generated
-      declarative base class.
-
-    .. versionchanged:: 1.1 if :paramref:`.declarative_base.cls` is a
-         single class (rather than a tuple), the constructed base class will
-         inherit its docstring.
-
-    .. seealso::
-
-        :func:`.as_declarative`
-
-    """
-    lcl_metadata = metadata or MetaData()
-    if bind:
-        lcl_metadata.bind = bind
-
-    if class_registry is None:
-        class_registry = weakref.WeakValueDictionary()
-
-    bases = not isinstance(cls, tuple) and (cls,) or cls
-    class_dict = dict(
-        _decl_class_registry=class_registry, metadata=lcl_metadata
-    )
-
-    if isinstance(cls, type):
-        class_dict["__doc__"] = cls.__doc__
-
-    if constructor:
-        class_dict["__init__"] = constructor
-    if mapper:
-        class_dict["__mapper_cls__"] = mapper
-
-    return metaclass(name, bases, class_dict)
-
-
-def as_declarative(**kw):
-    """
-    Class decorator for :func:`.declarative_base`.
-
-    Provides a syntactical shortcut to the ``cls`` argument
-    sent to :func:`.declarative_base`, allowing the base class
-    to be converted in-place to a "declarative" base::
-
-        from sqlalchemy.ext.declarative import as_declarative
-
-        @as_declarative()
-        class Base(object):
-            @declared_attr
-            def __tablename__(cls):
-                return cls.__name__.lower()
-            id = Column(Integer, primary_key=True)
-
-        class MyMappedClass(Base):
-            # ...
-
-    All keyword arguments passed to :func:`.as_declarative` are passed
-    along to :func:`.declarative_base`.
-
-    .. seealso::
-
-        :func:`.declarative_base`
-
-    """
-
-    def decorate(cls):
-        kw["cls"] = cls
-        kw["name"] = cls.__name__
-        return declarative_base(**kw)
-
-    return decorate
-
-
-class ConcreteBase(object):
-    """A helper class for 'concrete' declarative mappings.
-
-    :class:`.ConcreteBase` will use the :func:`.polymorphic_union`
-    function automatically, against all tables mapped as a subclass
-    to this class.   The function is called via the
-    ``__declare_last__()`` function, which is essentially
-    a hook for the :meth:`.after_configured` event.
-
-    :class:`.ConcreteBase` produces a mapped
-    table for the class itself.  Compare to :class:`.AbstractConcreteBase`,
-    which does not.
-
-    Example::
-
-        from sqlalchemy.ext.declarative import ConcreteBase
-
-        class Employee(ConcreteBase, Base):
-            __tablename__ = 'employee'
-            employee_id = Column(Integer, primary_key=True)
-            name = Column(String(50))
-            __mapper_args__ = {
-                            'polymorphic_identity':'employee',
-                            'concrete':True}
-
-        class Manager(Employee):
-            __tablename__ = 'manager'
-            employee_id = Column(Integer, primary_key=True)
-            name = Column(String(50))
-            manager_data = Column(String(40))
-            __mapper_args__ = {
-                            'polymorphic_identity':'manager',
-                            'concrete':True}
-
-
-    The name of the discriminator column used by :func:`.polymorphic_union`
-    defaults to the name ``type``.  To suit the use case of a mapping where an
-    actual column in a mapped table is already named ``type``, the
-    discriminator name can be configured by setting the
-    ``_concrete_discriminator_name`` attribute::
-
-        class Employee(ConcreteBase, Base):
-            _concrete_discriminator_name = '_concrete_discriminator'
-
-    .. versionadded:: 1.3.19 Added the ``_concrete_discriminator_name``
-       attribute to :class:`_declarative.ConcreteBase` so that the
-       virtual discriminator column name can be customized.
-
-    .. seealso::
-
-        :class:`.AbstractConcreteBase`
-
-        :ref:`concrete_inheritance`
-
-
-    """
-
-    @classmethod
-    def _create_polymorphic_union(cls, mappers, discriminator_name):
-        return polymorphic_union(
-            OrderedDict(
-                (mp.polymorphic_identity, mp.local_table) for mp in mappers
-            ),
-            discriminator_name,
-            "pjoin",
-        )
-
-    @classmethod
-    def __declare_first__(cls):
-        m = cls.__mapper__
-        if m.with_polymorphic:
-            return
-
-        discriminator_name = (
-            _get_immediate_cls_attr(cls, "_concrete_discriminator_name")
-            or "type"
-        )
-
-        mappers = list(m.self_and_descendants)
-        pjoin = cls._create_polymorphic_union(mappers, discriminator_name)
-        m._set_with_polymorphic(("*", pjoin))
-        m._set_polymorphic_on(pjoin.c[discriminator_name])
-
-
-class AbstractConcreteBase(ConcreteBase):
-    """A helper class for 'concrete' declarative mappings.
-
-    :class:`.AbstractConcreteBase` will use the :func:`.polymorphic_union`
-    function automatically, against all tables mapped as a subclass
-    to this class.   The function is called via the
-    ``__declare_last__()`` function, which is essentially
-    a hook for the :meth:`.after_configured` event.
-
-    :class:`.AbstractConcreteBase` does produce a mapped class
-    for the base class, however it is not persisted to any table; it
-    is instead mapped directly to the "polymorphic" selectable directly
-    and is only used for selecting.  Compare to :class:`.ConcreteBase`,
-    which does create a persisted table for the base class.
-
-    .. note::
-
-        The :class:`.AbstractConcreteBase` class does not intend to set up  the
-        mapping for the base class until all the subclasses have been defined,
-        as it needs to create a mapping against a selectable that will include
-        all subclass tables.  In order to achieve this, it waits for the
-        **mapper configuration event** to occur, at which point it scans
-        through all the configured subclasses and sets up a mapping that will
-        query against all subclasses at once.
-
-        While this event is normally invoked automatically, in the case of
-        :class:`.AbstractConcreteBase`, it may be necessary to invoke it
-        explicitly after **all** subclass mappings are defined, if the first
-        operation is to be a query against this base class.  To do so, invoke
-        :func:`.configure_mappers` once all the desired classes have been
-        configured::
-
-            from sqlalchemy.orm import configure_mappers
-
-            configure_mappers()
-
-        .. seealso::
-
-            :func:`_orm.configure_mappers`
-
-
-    Example::
-
-        from sqlalchemy.ext.declarative import AbstractConcreteBase
-
-        class Employee(AbstractConcreteBase, Base):
-            pass
-
-        class Manager(Employee):
-            __tablename__ = 'manager'
-            employee_id = Column(Integer, primary_key=True)
-            name = Column(String(50))
-            manager_data = Column(String(40))
-
-            __mapper_args__ = {
-                'polymorphic_identity':'manager',
-                'concrete':True}
-
-        configure_mappers()
-
-    The abstract base class is handled by declarative in a special way;
-    at class configuration time, it behaves like a declarative mixin
-    or an ``__abstract__`` base class.   Once classes are configured
-    and mappings are produced, it then gets mapped itself, but
-    after all of its descendants.  This is a very unique system of mapping
-    not found in any other SQLAlchemy system.
-
-    Using this approach, we can specify columns and properties
-    that will take place on mapped subclasses, in the way that
-    we normally do as in :ref:`declarative_mixins`::
-
-        class Company(Base):
-            __tablename__ = 'company'
-            id = Column(Integer, primary_key=True)
-
-        class Employee(AbstractConcreteBase, Base):
-            employee_id = Column(Integer, primary_key=True)
-
-            @declared_attr
-            def company_id(cls):
-                return Column(ForeignKey('company.id'))
-
-            @declared_attr
-            def company(cls):
-                return relationship("Company")
-
-        class Manager(Employee):
-            __tablename__ = 'manager'
-
-            name = Column(String(50))
-            manager_data = Column(String(40))
-
-            __mapper_args__ = {
-                'polymorphic_identity':'manager',
-                'concrete':True}
-
-        configure_mappers()
-
-    When we make use of our mappings however, both ``Manager`` and
-    ``Employee`` will have an independently usable ``.company`` attribute::
-
-        session.query(Employee).filter(Employee.company.has(id=5))
-
-    .. versionchanged:: 1.0.0 - The mechanics of :class:`.AbstractConcreteBase`
-       have been reworked to support relationships established directly
-       on the abstract base, without any special configurational steps.
-
-    .. seealso::
-
-        :class:`.ConcreteBase`
-
-        :ref:`concrete_inheritance`
-
-    """
-
-    __no_table__ = True
-
-    @classmethod
-    def __declare_first__(cls):
-        cls._sa_decl_prepare_nocascade()
-
-    @classmethod
-    def _sa_decl_prepare_nocascade(cls):
-        if getattr(cls, "__mapper__", None):
-            return
-
-        to_map = _DeferredMapperConfig.config_for_cls(cls)
-
-        # can't rely on 'self_and_descendants' here
-        # since technically an immediate subclass
-        # might not be mapped, but a subclass
-        # may be.
-        mappers = []
-        stack = list(cls.__subclasses__())
-        while stack:
-            klass = stack.pop()
-            stack.extend(klass.__subclasses__())
-            mn = _mapper_or_none(klass)
-            if mn is not None:
-                mappers.append(mn)
-
-        discriminator_name = (
-            _get_immediate_cls_attr(cls, "_concrete_discriminator_name")
-            or "type"
-        )
-        pjoin = cls._create_polymorphic_union(mappers, discriminator_name)
-
-        # For columns that were declared on the class, these
-        # are normally ignored with the "__no_table__" mapping,
-        # unless they have a different attribute key vs. col name
-        # and are in the properties argument.
-        # In that case, ensure we update the properties entry
-        # to the correct column from the pjoin target table.
-        declared_cols = set(to_map.declared_columns)
-        for k, v in list(to_map.properties.items()):
-            if v in declared_cols:
-                to_map.properties[k] = pjoin.c[v.key]
-
-        to_map.local_table = pjoin
-
-        m_args = to_map.mapper_args_fn or dict
-
-        def mapper_args():
-            args = m_args()
-            args["polymorphic_on"] = pjoin.c[discriminator_name]
-            return args
-
-        to_map.mapper_args_fn = mapper_args
-
-        m = to_map.map()
-
-        for scls in cls.__subclasses__():
-            sm = _mapper_or_none(scls)
-            if sm and sm.concrete and cls in scls.__bases__:
-                sm._set_concrete_base(m)
-
-    @classmethod
-    def _sa_raise_deferred_config(cls):
-        raise orm_exc.UnmappedClassError(
-            cls,
-            msg="Class %s is a subclass of AbstractConcreteBase and "
-            "has a mapping pending until all subclasses are defined. "
-            "Call the sqlalchemy.orm.configure_mappers() function after "
-            "all subclasses have been defined to "
-            "complete the mapping of this class."
-            % orm_exc._safe_cls_name(cls),
-        )
-
-
-class DeferredReflection(object):
-    """A helper class for construction of mappings based on
-    a deferred reflection step.
-
-    Normally, declarative can be used with reflection by
-    setting a :class:`_schema.Table` object using autoload=True
-    as the ``__table__`` attribute on a declarative class.
-    The caveat is that the :class:`_schema.Table` must be fully
-    reflected, or at the very least have a primary key column,
-    at the point at which a normal declarative mapping is
-    constructed, meaning the :class:`_engine.Engine` must be available
-    at class declaration time.
-
-    The :class:`.DeferredReflection` mixin moves the construction
-    of mappers to be at a later point, after a specific
-    method is called which first reflects all :class:`_schema.Table`
-    objects created so far.   Classes can define it as such::
-
-        from sqlalchemy.ext.declarative import declarative_base
-        from sqlalchemy.ext.declarative import DeferredReflection
-        Base = declarative_base()
-
-        class MyClass(DeferredReflection, Base):
-            __tablename__ = 'mytable'
-
-    Above, ``MyClass`` is not yet mapped.   After a series of
-    classes have been defined in the above fashion, all tables
-    can be reflected and mappings created using
-    :meth:`.prepare`::
-
-        engine = create_engine("someengine://...")
-        DeferredReflection.prepare(engine)
-
-    The :class:`.DeferredReflection` mixin can be applied to individual
-    classes, used as the base for the declarative base itself,
-    or used in a custom abstract class.   Using an abstract base
-    allows that only a subset of classes to be prepared for a
-    particular prepare step, which is necessary for applications
-    that use more than one engine.  For example, if an application
-    has two engines, you might use two bases, and prepare each
-    separately, e.g.::
-
-        class ReflectedOne(DeferredReflection, Base):
-            __abstract__ = True
-
-        class ReflectedTwo(DeferredReflection, Base):
-            __abstract__ = True
-
-        class MyClass(ReflectedOne):
-            __tablename__ = 'mytable'
-
-        class MyOtherClass(ReflectedOne):
-            __tablename__ = 'myothertable'
-
-        class YetAnotherClass(ReflectedTwo):
-            __tablename__ = 'yetanothertable'
-
-        # ... etc.
-
-    Above, the class hierarchies for ``ReflectedOne`` and
-    ``ReflectedTwo`` can be configured separately::
-
-        ReflectedOne.prepare(engine_one)
-        ReflectedTwo.prepare(engine_two)
-
-    """
-
-    @classmethod
-    def prepare(cls, engine):
-        """Reflect all :class:`_schema.Table` objects for all current
-        :class:`.DeferredReflection` subclasses"""
-
-        to_map = _DeferredMapperConfig.classes_for_base(cls)
-        for thingy in to_map:
-            cls._sa_decl_prepare(thingy.local_table, engine)
-            thingy.map()
-            mapper = thingy.cls.__mapper__
-            metadata = mapper.class_.metadata
-            for rel in mapper._props.values():
-                if (
-                    isinstance(rel, relationships.RelationshipProperty)
-                    and rel.secondary is not None
-                ):
-                    if isinstance(rel.secondary, Table):
-                        cls._reflect_table(rel.secondary, engine)
-                    elif isinstance(rel.secondary, _class_resolver):
-                        rel.secondary._resolvers += (
-                            cls._sa_deferred_table_resolver(engine, metadata),
-                        )
-
-    @classmethod
-    def _sa_deferred_table_resolver(cls, engine, metadata):
-        def _resolve(key):
-            t1 = Table(key, metadata)
-            cls._reflect_table(t1, engine)
-            return t1
-
-        return _resolve
-
-    @classmethod
-    def _sa_decl_prepare(cls, local_table, engine):
-        # autoload Table, which is already
-        # present in the metadata.  This
-        # will fill in db-loaded columns
-        # into the existing Table object.
-        if local_table is not None:
-            cls._reflect_table(local_table, engine)
-
-    @classmethod
-    def _sa_raise_deferred_config(cls):
-        raise orm_exc.UnmappedClassError(
-            cls,
-            msg="Class %s is a subclass of DeferredReflection.  "
-            "Mappings are not produced until the .prepare() "
-            "method is called on the class hierarchy."
-            % orm_exc._safe_cls_name(cls),
-        )
-
-    @classmethod
-    def _reflect_table(cls, table, engine):
-        Table(
-            table.name,
-            table.metadata,
-            extend_existing=True,
-            autoload_replace=False,
-            autoload=True,
-            autoload_with=engine,
-            schema=table.schema,
-        )
-
-
-@inspection._inspects(DeclarativeMeta)
-def _inspect_decl_meta(cls):
-    mp = _inspect_mapped_class(cls)
-    if mp is None:
-        if _DeferredMapperConfig.has_cls(cls):
-            _DeferredMapperConfig.raise_unmapped_for_cls(cls)
-            raise orm_exc.UnmappedClassError(
-                cls,
-                msg="Class %s has a deferred mapping on it.  It is not yet "
-                "usable as a mapped class." % orm_exc._safe_cls_name(cls),
-            )
-    return mp
diff --git a/lib/sqlalchemy/ext/declarative/extensions.py b/lib/sqlalchemy/ext/declarative/extensions.py
new file mode 100644 (file)
index 0000000..0b9a6f7
--- /dev/null
@@ -0,0 +1,455 @@
+# ext/declarative/extensions.py
+# Copyright (C) 2005-2020 the SQLAlchemy authors and contributors
+# <see AUTHORS file>
+#
+# This module is part of SQLAlchemy and is released under
+# the MIT License: http://www.opensource.org/licenses/mit-license.php
+"""Public API functions and helpers for declarative."""
+
+
+from ... import inspection
+from ... import util
+from ...orm import exc as orm_exc
+from ...orm import registry
+from ...orm import relationships
+from ...orm.base import _mapper_or_none
+from ...orm.clsregistry import _resolver
+from ...orm.decl_base import _DeferredMapperConfig
+from ...orm.decl_base import _get_immediate_cls_attr
+from ...orm.util import polymorphic_union
+from ...schema import Table
+from ...util import OrderedDict
+
+
+@util.deprecated(
+    "2.0",
+    "the instrument_declarative function is deprecated "
+    "and will be removed in SQLAlhcemy 2.0.  Please use "
+    ":meth:`_orm.registry.map_declaratively",
+)
+def instrument_declarative(cls, cls_registry, metadata):
+    """Given a class, configure the class declaratively,
+    using the given registry, which can be any dictionary, and
+    MetaData object.
+
+    """
+    return registry(
+        metadata=metadata, class_registry=cls_registry
+    ).instrument_declarative(cls)
+
+
+class ConcreteBase(object):
+    """A helper class for 'concrete' declarative mappings.
+
+    :class:`.ConcreteBase` will use the :func:`.polymorphic_union`
+    function automatically, against all tables mapped as a subclass
+    to this class.   The function is called via the
+    ``__declare_last__()`` function, which is essentially
+    a hook for the :meth:`.after_configured` event.
+
+    :class:`.ConcreteBase` produces a mapped
+    table for the class itself.  Compare to :class:`.AbstractConcreteBase`,
+    which does not.
+
+    Example::
+
+        from sqlalchemy.ext.declarative import ConcreteBase
+
+        class Employee(ConcreteBase, Base):
+            __tablename__ = 'employee'
+            employee_id = Column(Integer, primary_key=True)
+            name = Column(String(50))
+            __mapper_args__ = {
+                            'polymorphic_identity':'employee',
+                            'concrete':True}
+
+        class Manager(Employee):
+            __tablename__ = 'manager'
+            employee_id = Column(Integer, primary_key=True)
+            name = Column(String(50))
+            manager_data = Column(String(40))
+            __mapper_args__ = {
+                            'polymorphic_identity':'manager',
+                            'concrete':True}
+
+
+    The name of the discriminator column used by :func:`.polymorphic_union`
+    defaults to the name ``type``.  To suit the use case of a mapping where an
+    actual column in a mapped table is already named ``type``, the
+    discriminator name can be configured by setting the
+    ``_concrete_discriminator_name`` attribute::
+
+        class Employee(ConcreteBase, Base):
+            _concrete_discriminator_name = '_concrete_discriminator'
+
+    .. versionadded:: 1.3.19 Added the ``_concrete_discriminator_name``
+       attribute to :class:`_declarative.ConcreteBase` so that the
+       virtual discriminator column name can be customized.
+
+    .. seealso::
+
+        :class:`.AbstractConcreteBase`
+
+        :ref:`concrete_inheritance`
+
+
+    """
+
+    @classmethod
+    def _create_polymorphic_union(cls, mappers, discriminator_name):
+        return polymorphic_union(
+            OrderedDict(
+                (mp.polymorphic_identity, mp.local_table) for mp in mappers
+            ),
+            discriminator_name,
+            "pjoin",
+        )
+
+    @classmethod
+    def __declare_first__(cls):
+        m = cls.__mapper__
+        if m.with_polymorphic:
+            return
+
+        discriminator_name = (
+            _get_immediate_cls_attr(cls, "_concrete_discriminator_name")
+            or "type"
+        )
+
+        mappers = list(m.self_and_descendants)
+        pjoin = cls._create_polymorphic_union(mappers, discriminator_name)
+        m._set_with_polymorphic(("*", pjoin))
+        m._set_polymorphic_on(pjoin.c[discriminator_name])
+
+
+class AbstractConcreteBase(ConcreteBase):
+    """A helper class for 'concrete' declarative mappings.
+
+    :class:`.AbstractConcreteBase` will use the :func:`.polymorphic_union`
+    function automatically, against all tables mapped as a subclass
+    to this class.   The function is called via the
+    ``__declare_last__()`` function, which is essentially
+    a hook for the :meth:`.after_configured` event.
+
+    :class:`.AbstractConcreteBase` does produce a mapped class
+    for the base class, however it is not persisted to any table; it
+    is instead mapped directly to the "polymorphic" selectable directly
+    and is only used for selecting.  Compare to :class:`.ConcreteBase`,
+    which does create a persisted table for the base class.
+
+    .. note::
+
+        The :class:`.AbstractConcreteBase` class does not intend to set up  the
+        mapping for the base class until all the subclasses have been defined,
+        as it needs to create a mapping against a selectable that will include
+        all subclass tables.  In order to achieve this, it waits for the
+        **mapper configuration event** to occur, at which point it scans
+        through all the configured subclasses and sets up a mapping that will
+        query against all subclasses at once.
+
+        While this event is normally invoked automatically, in the case of
+        :class:`.AbstractConcreteBase`, it may be necessary to invoke it
+        explicitly after **all** subclass mappings are defined, if the first
+        operation is to be a query against this base class.  To do so, invoke
+        :func:`.configure_mappers` once all the desired classes have been
+        configured::
+
+            from sqlalchemy.orm import configure_mappers
+
+            configure_mappers()
+
+        .. seealso::
+
+            :func:`_orm.configure_mappers`
+
+
+    Example::
+
+        from sqlalchemy.ext.declarative import AbstractConcreteBase
+
+        class Employee(AbstractConcreteBase, Base):
+            pass
+
+        class Manager(Employee):
+            __tablename__ = 'manager'
+            employee_id = Column(Integer, primary_key=True)
+            name = Column(String(50))
+            manager_data = Column(String(40))
+
+            __mapper_args__ = {
+                'polymorphic_identity':'manager',
+                'concrete':True}
+
+        configure_mappers()
+
+    The abstract base class is handled by declarative in a special way;
+    at class configuration time, it behaves like a declarative mixin
+    or an ``__abstract__`` base class.   Once classes are configured
+    and mappings are produced, it then gets mapped itself, but
+    after all of its descendants.  This is a very unique system of mapping
+    not found in any other SQLAlchemy system.
+
+    Using this approach, we can specify columns and properties
+    that will take place on mapped subclasses, in the way that
+    we normally do as in :ref:`declarative_mixins`::
+
+        class Company(Base):
+            __tablename__ = 'company'
+            id = Column(Integer, primary_key=True)
+
+        class Employee(AbstractConcreteBase, Base):
+            employee_id = Column(Integer, primary_key=True)
+
+            @declared_attr
+            def company_id(cls):
+                return Column(ForeignKey('company.id'))
+
+            @declared_attr
+            def company(cls):
+                return relationship("Company")
+
+        class Manager(Employee):
+            __tablename__ = 'manager'
+
+            name = Column(String(50))
+            manager_data = Column(String(40))
+
+            __mapper_args__ = {
+                'polymorphic_identity':'manager',
+                'concrete':True}
+
+        configure_mappers()
+
+    When we make use of our mappings however, both ``Manager`` and
+    ``Employee`` will have an independently usable ``.company`` attribute::
+
+        session.query(Employee).filter(Employee.company.has(id=5))
+
+    .. versionchanged:: 1.0.0 - The mechanics of :class:`.AbstractConcreteBase`
+       have been reworked to support relationships established directly
+       on the abstract base, without any special configurational steps.
+
+    .. seealso::
+
+        :class:`.ConcreteBase`
+
+        :ref:`concrete_inheritance`
+
+    """
+
+    __no_table__ = True
+
+    @classmethod
+    def __declare_first__(cls):
+        cls._sa_decl_prepare_nocascade()
+
+    @classmethod
+    def _sa_decl_prepare_nocascade(cls):
+        if getattr(cls, "__mapper__", None):
+            return
+
+        to_map = _DeferredMapperConfig.config_for_cls(cls)
+
+        # can't rely on 'self_and_descendants' here
+        # since technically an immediate subclass
+        # might not be mapped, but a subclass
+        # may be.
+        mappers = []
+        stack = list(cls.__subclasses__())
+        while stack:
+            klass = stack.pop()
+            stack.extend(klass.__subclasses__())
+            mn = _mapper_or_none(klass)
+            if mn is not None:
+                mappers.append(mn)
+
+        discriminator_name = (
+            _get_immediate_cls_attr(cls, "_concrete_discriminator_name")
+            or "type"
+        )
+        pjoin = cls._create_polymorphic_union(mappers, discriminator_name)
+
+        # For columns that were declared on the class, these
+        # are normally ignored with the "__no_table__" mapping,
+        # unless they have a different attribute key vs. col name
+        # and are in the properties argument.
+        # In that case, ensure we update the properties entry
+        # to the correct column from the pjoin target table.
+        declared_cols = set(to_map.declared_columns)
+        for k, v in list(to_map.properties.items()):
+            if v in declared_cols:
+                to_map.properties[k] = pjoin.c[v.key]
+
+        to_map.local_table = pjoin
+
+        m_args = to_map.mapper_args_fn or dict
+
+        def mapper_args():
+            args = m_args()
+            args["polymorphic_on"] = pjoin.c[discriminator_name]
+            return args
+
+        to_map.mapper_args_fn = mapper_args
+
+        m = to_map.map()
+
+        for scls in cls.__subclasses__():
+            sm = _mapper_or_none(scls)
+            if sm and sm.concrete and cls in scls.__bases__:
+                sm._set_concrete_base(m)
+
+    @classmethod
+    def _sa_raise_deferred_config(cls):
+        raise orm_exc.UnmappedClassError(
+            cls,
+            msg="Class %s is a subclass of AbstractConcreteBase and "
+            "has a mapping pending until all subclasses are defined. "
+            "Call the sqlalchemy.orm.configure_mappers() function after "
+            "all subclasses have been defined to "
+            "complete the mapping of this class."
+            % orm_exc._safe_cls_name(cls),
+        )
+
+
+class DeferredReflection(object):
+    """A helper class for construction of mappings based on
+    a deferred reflection step.
+
+    Normally, declarative can be used with reflection by
+    setting a :class:`_schema.Table` object using autoload=True
+    as the ``__table__`` attribute on a declarative class.
+    The caveat is that the :class:`_schema.Table` must be fully
+    reflected, or at the very least have a primary key column,
+    at the point at which a normal declarative mapping is
+    constructed, meaning the :class:`_engine.Engine` must be available
+    at class declaration time.
+
+    The :class:`.DeferredReflection` mixin moves the construction
+    of mappers to be at a later point, after a specific
+    method is called which first reflects all :class:`_schema.Table`
+    objects created so far.   Classes can define it as such::
+
+        from sqlalchemy.ext.declarative import declarative_base
+        from sqlalchemy.ext.declarative import DeferredReflection
+        Base = declarative_base()
+
+        class MyClass(DeferredReflection, Base):
+            __tablename__ = 'mytable'
+
+    Above, ``MyClass`` is not yet mapped.   After a series of
+    classes have been defined in the above fashion, all tables
+    can be reflected and mappings created using
+    :meth:`.prepare`::
+
+        engine = create_engine("someengine://...")
+        DeferredReflection.prepare(engine)
+
+    The :class:`.DeferredReflection` mixin can be applied to individual
+    classes, used as the base for the declarative base itself,
+    or used in a custom abstract class.   Using an abstract base
+    allows that only a subset of classes to be prepared for a
+    particular prepare step, which is necessary for applications
+    that use more than one engine.  For example, if an application
+    has two engines, you might use two bases, and prepare each
+    separately, e.g.::
+
+        class ReflectedOne(DeferredReflection, Base):
+            __abstract__ = True
+
+        class ReflectedTwo(DeferredReflection, Base):
+            __abstract__ = True
+
+        class MyClass(ReflectedOne):
+            __tablename__ = 'mytable'
+
+        class MyOtherClass(ReflectedOne):
+            __tablename__ = 'myothertable'
+
+        class YetAnotherClass(ReflectedTwo):
+            __tablename__ = 'yetanothertable'
+
+        # ... etc.
+
+    Above, the class hierarchies for ``ReflectedOne`` and
+    ``ReflectedTwo`` can be configured separately::
+
+        ReflectedOne.prepare(engine_one)
+        ReflectedTwo.prepare(engine_two)
+
+    """
+
+    @classmethod
+    def prepare(cls, engine):
+        """Reflect all :class:`_schema.Table` objects for all current
+        :class:`.DeferredReflection` subclasses"""
+
+        to_map = _DeferredMapperConfig.classes_for_base(cls)
+
+        with inspection.inspect(engine)._inspection_context() as insp:
+            for thingy in to_map:
+                cls._sa_decl_prepare(thingy.local_table, insp)
+                thingy.map()
+                mapper = thingy.cls.__mapper__
+                metadata = mapper.class_.metadata
+                for rel in mapper._props.values():
+                    if (
+                        isinstance(rel, relationships.RelationshipProperty)
+                        and rel.secondary is not None
+                    ):
+                        if isinstance(rel.secondary, Table):
+                            cls._reflect_table(rel.secondary, insp)
+                        elif isinstance(rel.secondary, str):
+
+                            _, resolve_arg = _resolver(rel.parent.class_, rel)
+
+                            rel.secondary = resolve_arg(rel.secondary)
+                            rel.secondary._resolvers += (
+                                cls._sa_deferred_table_resolver(
+                                    insp, metadata
+                                ),
+                            )
+
+                            # contoversy!  do we resolve it here? or leave
+                            # it deferred?   I think doing it here is necessary
+                            # so the connection does not leak.
+                            rel.secondary = rel.secondary()
+
+    @classmethod
+    def _sa_deferred_table_resolver(cls, inspector, metadata):
+        def _resolve(key):
+            t1 = Table(key, metadata)
+            cls._reflect_table(t1, inspector)
+            return t1
+
+        return _resolve
+
+    @classmethod
+    def _sa_decl_prepare(cls, local_table, inspector):
+        # autoload Table, which is already
+        # present in the metadata.  This
+        # will fill in db-loaded columns
+        # into the existing Table object.
+        if local_table is not None:
+            cls._reflect_table(local_table, inspector)
+
+    @classmethod
+    def _sa_raise_deferred_config(cls):
+        raise orm_exc.UnmappedClassError(
+            cls,
+            msg="Class %s is a subclass of DeferredReflection.  "
+            "Mappings are not produced until the .prepare() "
+            "method is called on the class hierarchy."
+            % orm_exc._safe_cls_name(cls),
+        )
+
+    @classmethod
+    def _reflect_table(cls, table, inspector):
+        Table(
+            table.name,
+            table.metadata,
+            extend_existing=True,
+            autoload_replace=False,
+            autoload=True,
+            autoload_with=inspector,
+            schema=table.schema,
+        )
index 199ae11e5147a53628f4666d19bed2795254dc3f..13626fb2179287a3d5123bbaf1b1d63754f93482 100644 (file)
@@ -16,11 +16,21 @@ documentation for an overview of how this module is used.
 from . import exc  # noqa
 from . import mapper as mapperlib  # noqa
 from . import strategy_options
+from .decl_api import as_declarative  # noqa
+from .decl_api import declarative_base  # noqa
+from .decl_api import declared_attr  # noqa
+from .decl_api import has_inherited_table  # noqa
+from .decl_api import registry  # noqa
+from .decl_api import synonym_for  # noqa
 from .descriptor_props import CompositeProperty  # noqa
 from .descriptor_props import SynonymProperty  # noqa
 from .interfaces import EXT_CONTINUE  # noqa
 from .interfaces import EXT_SKIP  # noqa
 from .interfaces import EXT_STOP  # noqa
+from .interfaces import MANYTOMANY  # noqa
+from .interfaces import MANYTOONE  # noqa
+from .interfaces import MapperProperty  # noqa
+from .interfaces import ONETOMANY  # noqa
 from .interfaces import PropComparator  # noqa
 from .mapper import _mapper_registry
 from .mapper import class_mapper  # noqa
similarity index 72%
rename from lib/sqlalchemy/ext/declarative/clsregistry.py
rename to lib/sqlalchemy/orm/clsregistry.py
index 51af6f1b44fa81c1f62a1ba3f9b80b8db5f297d9..07b8afbf924db876a8cf3bca776bd9359be3493a 100644 (file)
@@ -12,16 +12,15 @@ This system allows specification of classes and expressions used in
 """
 import weakref
 
-from ... import exc
-from ... import inspection
-from ... import util
-from ...orm import class_mapper
-from ...orm import ColumnProperty
-from ...orm import interfaces
-from ...orm import RelationshipProperty
-from ...orm import SynonymProperty
-from ...schema import _get_table_key
-
+from . import attributes
+from . import interfaces
+from .descriptor_props import SynonymProperty
+from .properties import ColumnProperty
+from .util import class_mapper
+from .. import exc
+from .. import inspection
+from .. import util
+from ..sql.schema import _get_table_key
 
 # strong references to registries which we place in
 # the _decl_class_registry, which is usually weak referencing.
@@ -30,25 +29,25 @@ from ...schema import _get_table_key
 _registries = set()
 
 
-def add_class(classname, cls):
+def add_class(classname, cls, decl_class_registry):
     """Add a class to the _decl_class_registry associated with the
     given declarative class.
 
     """
-    if classname in cls._decl_class_registry:
+    if classname in decl_class_registry:
         # class already exists.
-        existing = cls._decl_class_registry[classname]
+        existing = decl_class_registry[classname]
         if not isinstance(existing, _MultipleClassMarker):
-            existing = cls._decl_class_registry[
-                classname
-            ] = _MultipleClassMarker([cls, existing])
+            existing = decl_class_registry[classname] = _MultipleClassMarker(
+                [cls, existing]
+            )
     else:
-        cls._decl_class_registry[classname] = cls
+        decl_class_registry[classname] = cls
 
     try:
-        root_module = cls._decl_class_registry["_sa_module_registry"]
+        root_module = decl_class_registry["_sa_module_registry"]
     except KeyError:
-        cls._decl_class_registry[
+        decl_class_registry[
             "_sa_module_registry"
         ] = root_module = _ModuleMarker("_sa_module_registry", None)
 
@@ -70,6 +69,55 @@ def add_class(classname, cls):
         module.add_class(classname, cls)
 
 
+def remove_class(classname, cls, decl_class_registry):
+    if classname in decl_class_registry:
+        existing = decl_class_registry[classname]
+        if isinstance(existing, _MultipleClassMarker):
+            existing.remove_item(cls)
+        else:
+            del decl_class_registry[classname]
+
+    try:
+        root_module = decl_class_registry["_sa_module_registry"]
+    except KeyError:
+        return
+
+    tokens = cls.__module__.split(".")
+
+    while tokens:
+        token = tokens.pop(0)
+        module = root_module.get_module(token)
+        for token in tokens:
+            module = module.get_module(token)
+        module.remove_class(classname, cls)
+
+
+def _key_is_empty(key, decl_class_registry, test):
+    """test if a key is empty of a certain object.
+
+    used for unit tests against the registry to see if garbage collection
+    is working.
+
+    "test" is a callable that will be passed an object should return True
+    if the given object is the one we were looking for.
+
+    We can't pass the actual object itself b.c. this is for testing garbage
+    collection; the caller will have to have removed references to the
+    object itself.
+
+    """
+    if key not in decl_class_registry:
+        return True
+
+    thing = decl_class_registry[key]
+    if isinstance(thing, _MultipleClassMarker):
+        for sub_thing in thing.contents:
+            if test(sub_thing):
+                return False
+    else:
+        return not test(thing)
+
+
 class _MultipleClassMarker(object):
     """refers to multiple classes of the same name
     within _decl_class_registry.
@@ -85,6 +133,9 @@ class _MultipleClassMarker(object):
         )
         _registries.add(self)
 
+    def remove_item(self, cls):
+        self._remove_item(weakref.ref(cls))
+
     def __iter__(self):
         return (ref() for ref in self.contents)
 
@@ -104,7 +155,7 @@ class _MultipleClassMarker(object):
             return cls
 
     def _remove_item(self, ref):
-        self.contents.remove(ref)
+        self.contents.discard(ref)
         if not self.contents:
             _registries.discard(self)
             if self.on_remove:
@@ -182,6 +233,11 @@ class _ModuleMarker(object):
                 [cls], on_remove=lambda: self._remove_item(name)
             )
 
+    def remove_class(self, name, cls):
+        if name in self.contents:
+            existing = self.contents[name]
+            existing.remove_item(cls)
+
 
 class _ModNS(object):
     __slots__ = ("__parent",)
@@ -259,27 +315,35 @@ def _determine_container(key, value):
 
 
 class _class_resolver(object):
+    __slots__ = "cls", "prop", "arg", "fallback", "_dict", "_resolvers"
+
     def __init__(self, cls, prop, fallback, arg):
         self.cls = cls
         self.prop = prop
-        self.arg = self._declarative_arg = arg
+        self.arg = arg
         self.fallback = fallback
         self._dict = util.PopulateDict(self._access_cls)
         self._resolvers = ()
 
     def _access_cls(self, key):
         cls = self.cls
-        if key in cls._decl_class_registry:
-            return _determine_container(key, cls._decl_class_registry[key])
-        elif key in cls.metadata.tables:
-            return cls.metadata.tables[key]
-        elif key in cls.metadata._schemas:
+
+        manager = attributes.manager_of_class(cls)
+        decl_base = manager.registry
+        decl_class_registry = decl_base._class_registry
+        metadata = decl_base.metadata
+
+        if key in decl_class_registry:
+            return _determine_container(key, decl_class_registry[key])
+        elif key in metadata.tables:
+            return metadata.tables[key]
+        elif key in metadata._schemas:
             return _GetTable(key, cls.metadata)
         elif (
-            "_sa_module_registry" in cls._decl_class_registry
-            and key in cls._decl_class_registry["_sa_module_registry"]
+            "_sa_module_registry" in decl_class_registry
+            and key in decl_class_registry["_sa_module_registry"]
         ):
-            registry = cls._decl_class_registry["_sa_module_registry"]
+            registry = decl_class_registry["_sa_module_registry"]
             return registry.resolve_attr(key)
         elif self._resolvers:
             for resolv in self._resolvers:
@@ -333,57 +397,25 @@ class _class_resolver(object):
             self._raise_for_name(n.args[0], n)
 
 
-def _resolver(cls, prop):
-    import sqlalchemy
-    from sqlalchemy.orm import foreign, remote
+_fallback_dict = None
 
-    fallback = sqlalchemy.__dict__.copy()
-    fallback.update({"foreign": foreign, "remote": remote})
 
-    def resolve_arg(arg):
-        return _class_resolver(cls, prop, fallback, arg)
+def _resolver(cls, prop):
 
-    def resolve_name(arg):
-        return _class_resolver(cls, prop, fallback, arg)._resolve_name
+    global _fallback_dict
 
-    return resolve_name, resolve_arg
+    if _fallback_dict is None:
+        import sqlalchemy
+        from sqlalchemy.orm import foreign, remote
 
+        _fallback_dict = util.immutabledict(sqlalchemy.__dict__).union(
+            {"foreign": foreign, "remote": remote}
+        )
 
-def _deferred_relationship(cls, prop):
+    def resolve_arg(arg):
+        return _class_resolver(cls, prop, _fallback_dict, arg)
 
-    if isinstance(prop, RelationshipProperty):
-        resolve_name, resolve_arg = _resolver(cls, prop)
+    def resolve_name(arg):
+        return _class_resolver(cls, prop, _fallback_dict, arg)._resolve_name
 
-        for attr in (
-            "order_by",
-            "primaryjoin",
-            "secondaryjoin",
-            "secondary",
-            "_user_defined_foreign_keys",
-            "remote_side",
-        ):
-            v = getattr(prop, attr)
-            if isinstance(v, util.string_types):
-                setattr(prop, attr, resolve_arg(v))
-
-        for attr in ("argument",):
-            v = getattr(prop, attr)
-            if isinstance(v, util.string_types):
-                setattr(prop, attr, resolve_name(v))
-
-        if prop.backref and isinstance(prop.backref, tuple):
-            key, kwargs = prop.backref
-            for attr in (
-                "primaryjoin",
-                "secondaryjoin",
-                "secondary",
-                "foreign_keys",
-                "remote_side",
-                "order_by",
-            ):
-                if attr in kwargs and isinstance(
-                    kwargs[attr], util.string_types
-                ):
-                    kwargs[attr] = resolve_arg(kwargs[attr])
-
-    return prop
+    return resolve_name, resolve_arg
diff --git a/lib/sqlalchemy/orm/decl_api.py b/lib/sqlalchemy/orm/decl_api.py
new file mode 100644 (file)
index 0000000..1df916e
--- /dev/null
@@ -0,0 +1,753 @@
+# ext/declarative/api.py
+# Copyright (C) 2005-2020 the SQLAlchemy authors and contributors
+# <see AUTHORS file>
+#
+# This module is part of SQLAlchemy and is released under
+# the MIT License: http://www.opensource.org/licenses/mit-license.php
+"""Public API functions and helpers for declarative."""
+from __future__ import absolute_import
+
+import re
+import weakref
+
+from . import attributes
+from . import clsregistry
+from . import exc as orm_exc
+from . import interfaces
+from .base import _inspect_mapped_class
+from .decl_base import _add_attribute
+from .decl_base import _as_declarative
+from .decl_base import _declarative_constructor
+from .decl_base import _DeferredMapperConfig
+from .decl_base import _del_attribute
+from .decl_base import _mapper
+from .descriptor_props import SynonymProperty as _orm_synonym
+from .. import inspection
+from .. import util
+from ..sql.schema import MetaData
+from ..util import hybridmethod
+from ..util import hybridproperty
+
+if util.TYPE_CHECKING:
+    from .mapper import Mapper
+
+
+def has_inherited_table(cls):
+    """Given a class, return True if any of the classes it inherits from has a
+    mapped table, otherwise return False.
+
+    This is used in declarative mixins to build attributes that behave
+    differently for the base class vs. a subclass in an inheritance
+    hierarchy.
+
+    .. seealso::
+
+        :ref:`decl_mixin_inheritance`
+
+    """
+    for class_ in cls.__mro__[1:]:
+        if getattr(class_, "__table__", None) is not None:
+            return True
+    return False
+
+
+class DeclarativeMeta(type):
+    def __init__(cls, classname, bases, dict_, **kw):
+        if not cls.__dict__.get("__abstract__", False):
+            _as_declarative(cls.registry, cls, cls.__dict__)
+        type.__init__(cls, classname, bases, dict_)
+
+    def __setattr__(cls, key, value):
+        _add_attribute(cls, key, value)
+
+    def __delattr__(cls, key):
+        _del_attribute(cls, key)
+
+
+def synonym_for(name, map_column=False):
+    """Decorator that produces an :func:`_orm.synonym`
+    attribute in conjunction with a Python descriptor.
+
+    The function being decorated is passed to :func:`_orm.synonym` as the
+    :paramref:`.orm.synonym.descriptor` parameter::
+
+        class MyClass(Base):
+            __tablename__ = 'my_table'
+
+            id = Column(Integer, primary_key=True)
+            _job_status = Column("job_status", String(50))
+
+            @synonym_for("job_status")
+            @property
+            def job_status(self):
+                return "Status: %s" % self._job_status
+
+    The :ref:`hybrid properties <mapper_hybrids>` feature of SQLAlchemy
+    is typically preferred instead of synonyms, which is a more legacy
+    feature.
+
+    .. seealso::
+
+        :ref:`synonyms` - Overview of synonyms
+
+        :func:`_orm.synonym` - the mapper-level function
+
+        :ref:`mapper_hybrids` - The Hybrid Attribute extension provides an
+        updated approach to augmenting attribute behavior more flexibly than
+        can be achieved with synonyms.
+
+    """
+
+    def decorate(fn):
+        return _orm_synonym(name, map_column=map_column, descriptor=fn)
+
+    return decorate
+
+
+class declared_attr(interfaces._MappedAttribute, property):
+    """Mark a class-level method as representing the definition of
+    a mapped property or special declarative member name.
+
+    @declared_attr turns the attribute into a scalar-like
+    property that can be invoked from the uninstantiated class.
+    Declarative treats attributes specifically marked with
+    @declared_attr as returning a construct that is specific
+    to mapping or declarative table configuration.  The name
+    of the attribute is that of what the non-dynamic version
+    of the attribute would be.
+
+    @declared_attr is more often than not applicable to mixins,
+    to define relationships that are to be applied to different
+    implementors of the class::
+
+        class ProvidesUser(object):
+            "A mixin that adds a 'user' relationship to classes."
+
+            @declared_attr
+            def user(self):
+                return relationship("User")
+
+    It also can be applied to mapped classes, such as to provide
+    a "polymorphic" scheme for inheritance::
+
+        class Employee(Base):
+            id = Column(Integer, primary_key=True)
+            type = Column(String(50), nullable=False)
+
+            @declared_attr
+            def __tablename__(cls):
+                return cls.__name__.lower()
+
+            @declared_attr
+            def __mapper_args__(cls):
+                if cls.__name__ == 'Employee':
+                    return {
+                            "polymorphic_on":cls.type,
+                            "polymorphic_identity":"Employee"
+                    }
+                else:
+                    return {"polymorphic_identity":cls.__name__}
+
+    """
+
+    def __init__(self, fget, cascading=False):
+        super(declared_attr, self).__init__(fget)
+        self.__doc__ = fget.__doc__
+        self._cascading = cascading
+
+    def __get__(desc, self, cls):
+        # the declared_attr needs to make use of a cache that exists
+        # for the span of the declarative scan_attributes() phase.
+        # to achieve this we look at the class manager that's configured.
+        manager = attributes.manager_of_class(cls)
+        if manager is None:
+            if not re.match(r"^__.+__$", desc.fget.__name__):
+                # if there is no manager at all, then this class hasn't been
+                # run through declarative or mapper() at all, emit a warning.
+                util.warn(
+                    "Unmanaged access of declarative attribute %s from "
+                    "non-mapped class %s" % (desc.fget.__name__, cls.__name__)
+                )
+            return desc.fget(cls)
+        elif manager.is_mapped:
+            # the class is mapped, which means we're outside of the declarative
+            # scan setup, just run the function.
+            return desc.fget(cls)
+
+        # here, we are inside of the declarative scan.  use the registry
+        # that is tracking the values of these attributes.
+        declarative_scan = manager.declarative_scan
+        reg = declarative_scan.declared_attr_reg
+
+        if desc in reg:
+            return reg[desc]
+        else:
+            reg[desc] = obj = desc.fget(cls)
+            return obj
+
+    @hybridmethod
+    def _stateful(cls, **kw):
+        return _stateful_declared_attr(**kw)
+
+    @hybridproperty
+    def cascading(cls):
+        """Mark a :class:`.declared_attr` as cascading.
+
+        This is a special-use modifier which indicates that a column
+        or MapperProperty-based declared attribute should be configured
+        distinctly per mapped subclass, within a mapped-inheritance scenario.
+
+        .. warning::
+
+            The :attr:`.declared_attr.cascading` modifier has several
+            limitations:
+
+            * The flag **only** applies to the use of :class:`.declared_attr`
+              on declarative mixin classes and ``__abstract__`` classes; it
+              currently has no effect when used on a mapped class directly.
+
+            * The flag **only** applies to normally-named attributes, e.g.
+              not any special underscore attributes such as ``__tablename__``.
+              On these attributes it has **no** effect.
+
+            * The flag currently **does not allow further overrides** down
+              the class hierarchy; if a subclass tries to override the
+              attribute, a warning is emitted and the overridden attribute
+              is skipped.  This is a limitation that it is hoped will be
+              resolved at some point.
+
+        Below, both MyClass as well as MySubClass will have a distinct
+        ``id`` Column object established::
+
+            class HasIdMixin(object):
+                @declared_attr.cascading
+                def id(cls):
+                    if has_inherited_table(cls):
+                        return Column(
+                            ForeignKey('myclass.id'), primary_key=True
+                        )
+                    else:
+                        return Column(Integer, primary_key=True)
+
+            class MyClass(HasIdMixin, Base):
+                __tablename__ = 'myclass'
+                # ...
+
+            class MySubClass(MyClass):
+                ""
+                # ...
+
+        The behavior of the above configuration is that ``MySubClass``
+        will refer to both its own ``id`` column as well as that of
+        ``MyClass`` underneath the attribute named ``some_id``.
+
+        .. seealso::
+
+            :ref:`declarative_inheritance`
+
+            :ref:`mixin_inheritance_columns`
+
+
+        """
+        return cls._stateful(cascading=True)
+
+
+class _stateful_declared_attr(declared_attr):
+    def __init__(self, **kw):
+        self.kw = kw
+
+    def _stateful(self, **kw):
+        new_kw = self.kw.copy()
+        new_kw.update(kw)
+        return _stateful_declared_attr(**new_kw)
+
+    def __call__(self, fn):
+        return declared_attr(fn, **self.kw)
+
+
+def declarative_base(
+    bind=None,
+    metadata=None,
+    mapper=None,
+    cls=object,
+    name="Base",
+    constructor=_declarative_constructor,
+    class_registry=None,
+    metaclass=DeclarativeMeta,
+):
+    r"""Construct a base class for declarative class definitions.
+
+    The new base class will be given a metaclass that produces
+    appropriate :class:`~sqlalchemy.schema.Table` objects and makes
+    the appropriate :func:`~sqlalchemy.orm.mapper` calls based on the
+    information provided declaratively in the class and any subclasses
+    of the class.
+
+    The :func:`_orm.declarative_base` function is a shorthand version
+    of using the :meth:`_orm.registry.generate_base`
+    method.  That is, the following::
+
+        from sqlalchemy.orm import declarative_base
+
+        Base = declarative_base()
+
+    Is equvialent to::
+
+        from sqlalchemy.orm import registry
+
+        mapper_registry = registry()
+        Base = mapper_registry.generate_base()
+
+    See the docstring for :class:`_orm.registry`
+    and :meth:`_orm.registry.generate_base`
+    for more details.
+
+    .. versionchanged:: 1.4  The :func:`_orm.declarative_base`
+       function is now a specialization of the more generic
+       :class:`_orm.registry` class.  The function also moves to the
+       ``sqlalchemy.orm`` package from the ``declarative.ext`` package.
+
+
+    :param bind: An optional
+      :class:`~sqlalchemy.engine.Connectable`, will be assigned
+      the ``bind`` attribute on the :class:`~sqlalchemy.schema.MetaData`
+      instance.
+
+      .. deprecated:: 1.4  The "bind" argument to declarative_base is
+         deprecated and will be removed in SQLAlchemy 2.0.
+
+    :param metadata:
+      An optional :class:`~sqlalchemy.schema.MetaData` instance.  All
+      :class:`~sqlalchemy.schema.Table` objects implicitly declared by
+      subclasses of the base will share this MetaData.  A MetaData instance
+      will be created if none is provided.  The
+      :class:`~sqlalchemy.schema.MetaData` instance will be available via the
+      ``metadata`` attribute of the generated declarative base class.
+
+    :param mapper:
+      An optional callable, defaults to :func:`~sqlalchemy.orm.mapper`. Will
+      be used to map subclasses to their Tables.
+
+    :param cls:
+      Defaults to :class:`object`. A type to use as the base for the generated
+      declarative base class. May be a class or tuple of classes.
+
+    :param name:
+      Defaults to ``Base``.  The display name for the generated
+      class.  Customizing this is not required, but can improve clarity in
+      tracebacks and debugging.
+
+    :param constructor:
+      Specify the implementation for the ``__init__`` function on a mapped
+      class that has no ``__init__`` of its own.  Defaults to an
+      implementation that assigns \**kwargs for declared
+      fields and relationships to an instance.  If ``None`` is supplied,
+      no __init__ will be provided and construction will fall back to
+      cls.__init__ by way of the normal Python semantics.
+
+    :param class_registry: optional dictionary that will serve as the
+      registry of class names-> mapped classes when string names
+      are used to identify classes inside of :func:`_orm.relationship`
+      and others.  Allows two or more declarative base classes
+      to share the same registry of class names for simplified
+      inter-base relationships.
+
+    :param metaclass:
+      Defaults to :class:`.DeclarativeMeta`.  A metaclass or __metaclass__
+      compatible callable to use as the meta type of the generated
+      declarative base class.
+
+    .. seealso::
+
+        :class:`_orm.registry`
+
+    """
+
+    return registry(
+        bind=bind,
+        metadata=metadata,
+        class_registry=class_registry,
+        constructor=constructor,
+    ).generate_base(mapper=mapper, cls=cls, name=name, metaclass=metaclass,)
+
+
+class registry(object):
+    """Generalized registry for mapping classes.
+
+    The :class:`_orm.registry` serves as the basis for maintaining a collection
+    of mappings, and provides configurational hooks used to map classes.
+
+    The three general kinds of mappings supported are Declarative Base,
+    Declarative Decorator, and Imperative Mapping.   All of these mapping
+    styles may be used interchangeably:
+
+    * :meth:`_orm.registry.generate_base` returns a new declarative base
+      class, and is the underlying implementation of the
+      :func:`_orm.declarative_base` function.
+
+    * :meth:`_orm.registry.mapped` provides a class decorator that will
+      apply declarative mapping to a class without the use of a declarative
+      base class.
+
+    * :meth:`_orm.registry.map_imperatively` will produce a
+      :class:`_orm.Mapper` for a class without scanning the class for
+      declarative class attributes. This method suits the use case historically
+      provided by the
+      :func:`_orm.mapper` classical mapping function.
+
+    .. versionadded:: 1.4
+
+    .. seealso::
+
+        :ref:`orm_mapping_classes_toplevel` - overview of class mapping
+        styles.
+
+    """
+
+    def __init__(
+        self,
+        bind=None,
+        metadata=None,
+        class_registry=None,
+        constructor=_declarative_constructor,
+    ):
+        r"""Construct a new :class:`_orm.registry`
+
+        :param metadata:
+          An optional :class:`_schema.MetaData` instance.  All
+          :class:`_schema.Table` objects generated using declarative
+          table mapping will make use of this :class:`_schema.MetaData`
+          collection.  If this argument is left at its default of ``None``,
+          a blank :class:`_schema.MetaData` collection is created.
+
+        :param constructor:
+          Specify the implementation for the ``__init__`` function on a mapped
+          class that has no ``__init__`` of its own.  Defaults to an
+          implementation that assigns \**kwargs for declared
+          fields and relationships to an instance.  If ``None`` is supplied,
+          no __init__ will be provided and construction will fall back to
+          cls.__init__ by way of the normal Python semantics.
+
+        :param class_registry: optional dictionary that will serve as the
+          registry of class names-> mapped classes when string names
+          are used to identify classes inside of :func:`_orm.relationship`
+          and others.  Allows two or more declarative base classes
+          to share the same registry of class names for simplified
+          inter-base relationships.
+
+        :param bind: An optional
+          :class:`~sqlalchemy.engine.Connectable`, will be assigned
+          the ``bind`` attribute on the :class:`~sqlalchemy.schema.MetaData`
+          instance.
+
+          .. deprecated:: 1.4  The "bind" argument to registry is
+             deprecated and will be removed in SQLAlchemy 2.0.
+
+
+        """
+        lcl_metadata = metadata or MetaData()
+        if bind:
+            lcl_metadata.bind = bind
+
+        if class_registry is None:
+            class_registry = weakref.WeakValueDictionary()
+
+        self._class_registry = class_registry
+        self.metadata = lcl_metadata
+        self.constructor = constructor
+
+    def _dispose_declarative_artifacts(self, cls):
+        clsregistry.remove_class(cls.__name__, cls, self._class_registry)
+
+    def generate_base(
+        self, mapper=None, cls=object, name="Base", metaclass=DeclarativeMeta,
+    ):
+        """Generate a declarative base class.
+
+        Classes that inherit from the returned class object will be
+        automatically mapped using declarative mapping.
+
+        E.g.::
+
+            from sqlalchemy.orm import registry
+
+            mapper_registry = registry()
+
+            Base = mapper_registry.generate_base()
+
+            class MyClass(Base):
+                __tablename__ = "my_table"
+                id = Column(Integer, primary_key=True)
+
+        The :meth:`_orm.registry.generate_base` method provides the
+        implementation for the :func:`_orm.declarative_base` function, which
+        creates the :class:`_orm.registry` and base class all at once.
+
+
+        See the section :ref:`orm_declarative_mapping` for background and
+        examples.
+
+        :param mapper:
+          An optional callable, defaults to :func:`~sqlalchemy.orm.mapper`.
+          This function is used to generate new :class:`_orm.Mapper` objects.
+
+        :param cls:
+          Defaults to :class:`object`. A type to use as the base for the
+          generated declarative base class. May be a class or tuple of classes.
+
+        :param name:
+          Defaults to ``Base``.  The display name for the generated
+          class.  Customizing this is not required, but can improve clarity in
+          tracebacks and debugging.
+
+        :param metaclass:
+          Defaults to :class:`.DeclarativeMeta`.  A metaclass or __metaclass__
+          compatible callable to use as the meta type of the generated
+          declarative base class.
+
+        .. seealso::
+
+            :ref:`orm_declarative_mapping`
+
+            :func:`_orm.declarative_base`
+
+        """
+        metadata = self.metadata
+
+        bases = not isinstance(cls, tuple) and (cls,) or cls
+
+        class_dict = dict(registry=self, metadata=metadata)
+        if isinstance(cls, type):
+            class_dict["__doc__"] = cls.__doc__
+
+        if self.constructor:
+            class_dict["__init__"] = self.constructor
+
+        class_dict["__abstract__"] = True
+        if mapper:
+            class_dict["__mapper_cls__"] = mapper
+
+        return metaclass(name, bases, class_dict)
+
+    def mapped(self, cls):
+        """Class decorator that will apply the Declarative mapping process
+        to a given class.
+
+        E.g.::
+
+            from sqlalchemy.orm import registry
+
+            mapper_registry = registry()
+
+            @mapper_registry.mapped
+            class Foo:
+                __tablename__ = 'some_table'
+
+                id = Column(Integer, primary_key=True)
+                name = Column(String)
+
+        See the section :ref:`orm_declarative_mapping` for complete
+        details and examples.
+
+        :param cls: class to be mapped.
+
+        :return: the class that was passed.
+
+        .. seealso::
+
+            :ref:`orm_declarative_mapping`
+
+            :meth:`_orm.registry.generate_base` - generates a base class
+            that will apply Declarative mapping to subclasses automatically
+            using a Python metaclass.
+
+        """
+        _as_declarative(self, cls, cls.__dict__)
+        return cls
+
+    def as_declarative_base(self, **kw):
+        """
+        Class decorator which will invoke
+        :meth:`_orm.registry.generate_base`
+        for a given base class.
+
+        E.g.::
+
+            from sqlalchemy.orm import registry
+
+            mapper_registry = registry()
+
+            @mapper_registry.as_declarative_base()
+            class Base(object):
+                @declared_attr
+                def __tablename__(cls):
+                    return cls.__name__.lower()
+                id = Column(Integer, primary_key=True)
+
+            class MyMappedClass(Base):
+                # ...
+
+        All keyword arguments passed to
+        :meth:`_orm.registry.as_declarative_base` are passed
+        along to :meth:`_orm.registry.generate_base`.
+
+        """
+
+        def decorate(cls):
+            kw["cls"] = cls
+            kw["name"] = cls.__name__
+            return self.generate_base(**kw)
+
+        return decorate
+
+    def map_declaratively(self, cls):
+        # type: (type) -> Mapper
+        """Map a class declaratively.
+
+        In this form of mapping, the class is scanned for mapping information,
+        including for columns to be associaed with a table, and/or an
+        actual table object.
+
+        Returns the :class:`_orm.Mapper` object.
+
+        E.g.::
+
+            from sqlalchemy.orm import registry
+
+            mapper_registry = registry()
+
+            class Foo:
+                __tablename__ = 'some_table'
+
+                id = Column(Integer, primary_key=True)
+                name = Column(String)
+
+            mapper = mapper_registry.map_declaratively(Foo)
+
+        This function is more conveniently invoked indirectly via either the
+        :meth:`_orm.registry.mapped` class decorator or by subclassing a
+        declarative metaclass generated from
+        :meth:`_orm.registry.generate_base`.
+
+        See the section :ref:`orm_declarative_mapping` for complete
+        details and examples.
+
+        :param cls: class to be mapped.
+
+        :return: a :class:`_orm.Mapper` object.
+
+        .. seealso::
+
+            :ref:`orm_declarative_mapping`
+
+            :meth:`_orm.registry.mapped` - more common decorator interface
+            to this function.
+
+            :meth:`_orm.registry.map_imperatively`
+
+        """
+        return _as_declarative(self, cls, cls.__dict__)
+
+    def map_imperatively(self, class_, local_table=None, **kw):
+        r"""Map a class imperatively.
+
+        In this form of mapping, the class is not scanned for any mapping
+        information.  Instead, all mapping constructs are passed as
+        arguments.
+
+        This method is intended to be fully equivalent to the classic
+        SQLAlchemy :func:`_orm.mapper` function, except that it's in terms of
+        a particular registry.
+
+        E.g.::
+
+            from sqlalchemy.orm import registry
+
+            mapper_registry = registry()
+
+            my_table = Table(
+                "my_table",
+                mapper_registry.metadata,
+                Column('id', Integer, primary_key=True)
+            )
+
+            class MyClass:
+                pass
+
+            mapper_registry.map_imperatively(MyClass, my_table)
+
+        See the section :ref:`orm_imperative_mapping` for complete background
+        and usage examples.
+
+        :param class\_: The class to be mapped.  Corresponds to the
+         :paramref:`_orm.mapper.class_` parameter.
+
+        :param local_table: the :class:`_schema.Table` or other
+         :class:`_sql.FromClause` object that is the subject of the mapping.
+         Corresponds to the
+         :paramref:`_orm.mapper.local_table` parameter.
+
+        :param \**kw: all other keyword arguments are passed to the
+         :func:`_orm.mapper` function directly.
+
+        .. seealso::
+
+            :ref:`orm_imperative_mapping`
+
+            :ref:`orm_declarative_mapping`
+
+        """
+        return _mapper(self, class_, local_table, kw)
+
+
+def as_declarative(**kw):
+    """
+    Class decorator which will adapt a given class into a
+    :func:`_orm.declarative_base`.
+
+    This function makes use of the :meth:`_orm.registry.as_declarative_base`
+    method, by first creating a :class:`_orm.registry` automatically
+    and then invoking the decorator.
+
+    E.g.::
+
+        from sqlalchemy.orm import as_declarative
+
+        @as_declarative()
+        class Base(object):
+            @declared_attr
+            def __tablename__(cls):
+                return cls.__name__.lower()
+            id = Column(Integer, primary_key=True)
+
+        class MyMappedClass(Base):
+            # ...
+
+    .. seealso::
+
+        :meth:`_orm.registry.as_declarative_base`
+
+    """
+    bind, metadata, class_registry = (
+        kw.pop("bind", None),
+        kw.pop("metadata", None),
+        kw.pop("class_registry", None),
+    )
+
+    return registry(
+        bind=bind, metadata=metadata, class_registry=class_registry
+    ).as_declarative_base(**kw)
+
+
+@inspection._inspects(DeclarativeMeta)
+def _inspect_decl_meta(cls):
+    mp = _inspect_mapped_class(cls)
+    if mp is None:
+        if _DeferredMapperConfig.has_cls(cls):
+            _DeferredMapperConfig.raise_unmapped_for_cls(cls)
+            raise orm_exc.UnmappedClassError(
+                cls,
+                msg="Class %s has a deferred mapping on it.  It is not yet "
+                "usable as a mapped class." % orm_exc._safe_cls_name(cls),
+            )
+    return mp
similarity index 73%
rename from lib/sqlalchemy/ext/declarative/base.py
rename to lib/sqlalchemy/orm/decl_base.py
index 9b72fe8abff17ecf37e75ab0cee47893e5900411..b9c890429e4f055ac607448b7d49536f6736da76 100644 (file)
@@ -5,33 +5,32 @@
 # This module is part of SQLAlchemy and is released under
 # the MIT License: http://www.opensource.org/licenses/mit-license.php
 """Internal implementation for declarative."""
+from __future__ import absolute_import
 
 import collections
 import weakref
 
+from sqlalchemy.orm import attributes
 from sqlalchemy.orm import instrumentation
 from . import clsregistry
-from ... import event
-from ... import exc
-from ... import util
-from ...orm import class_mapper
-from ...orm import exc as orm_exc
-from ...orm import mapper
-from ...orm import mapperlib
-from ...orm import synonym
-from ...orm.attributes import QueryableAttribute
-from ...orm.base import _is_mapped_class
-from ...orm.base import InspectionAttr
-from ...orm.descriptor_props import CompositeProperty
-from ...orm.interfaces import MapperProperty
-from ...orm.properties import ColumnProperty
-from ...schema import Column
-from ...schema import Table
-from ...sql import expression
-from ...util import topological
-
-
-declared_attr = declarative_props = None
+from . import exc as orm_exc
+from . import mapper as mapperlib
+from .attributes import QueryableAttribute
+from .base import _is_mapped_class
+from .base import InspectionAttr
+from .descriptor_props import CompositeProperty
+from .descriptor_props import SynonymProperty
+from .interfaces import MapperProperty
+from .mapper import Mapper as mapper
+from .properties import ColumnProperty
+from .util import class_mapper
+from .. import event
+from .. import exc
+from .. import util
+from ..sql import expression
+from ..sql.schema import Column
+from ..sql.schema import Table
+from ..util import topological
 
 
 def _declared_mapping_info(cls):
@@ -49,7 +48,7 @@ def _resolve_for_abstract_or_classical(cls):
     if cls is object:
         return None
 
-    if _get_immediate_cls_attr(cls, "__abstract__", strict=True):
+    if cls.__dict__.get("__abstract__", False):
         for sup in cls.__bases__:
             sup = _resolve_for_abstract_or_classical(sup)
             if sup is not None:
@@ -57,31 +56,12 @@ def _resolve_for_abstract_or_classical(cls):
         else:
             return None
     else:
-        classical = _dive_for_classically_mapped_class(cls)
-        if classical is not None:
-            return classical
-        else:
-            return cls
-
-
-def _dive_for_classically_mapped_class(cls):
-    # support issue #4321
-
-    # if we are within a base hierarchy, don't
-    # search at all for classical mappings
-    if hasattr(cls, "_decl_class_registry"):
-        return None
+        clsmanager = _dive_for_cls_manager(cls)
 
-    manager = instrumentation.manager_of_class(cls)
-    if manager is not None:
-        return cls
-    else:
-        for sup in cls.__bases__:
-            mapper = _dive_for_classically_mapped_class(sup)
-            if mapper is not None:
-                return sup
+        if clsmanager:
+            return clsmanager.class_
         else:
-            return None
+            return cls
 
 
 def _get_immediate_cls_attr(cls, attrname, strict=False):
@@ -95,21 +75,24 @@ def _get_immediate_cls_attr(cls, attrname, strict=False):
     inherit from.
 
     """
+
+    # the rules are different for this name than others,
+    # make sure we've moved it out.  transitional
+    assert attrname != "__abstract__"
+
     if not issubclass(cls, object):
         return None
 
-    for base in cls.__mro__:
-        _is_declarative_inherits = hasattr(base, "_decl_class_registry")
-        _is_classicial_inherits = (
-            not _is_declarative_inherits
-            and _dive_for_classically_mapped_class(base) is not None
-        )
+    if attrname in cls.__dict__:
+        return getattr(cls, attrname)
+
+    for base in cls.__mro__[1:]:
+        _is_classicial_inherits = _dive_for_cls_manager(base)
 
         if attrname in base.__dict__ and (
             base is cls
             or (
                 (base in cls.__bases__ if strict else True)
-                and not _is_declarative_inherits
                 and not _is_classicial_inherits
             )
         ):
@@ -118,22 +101,44 @@ def _get_immediate_cls_attr(cls, attrname, strict=False):
         return None
 
 
-def _as_declarative(cls, classname, dict_):
-    global declared_attr, declarative_props
-    if declared_attr is None:
-        from .api import declared_attr
+def _dive_for_cls_manager(cls):
+    # because the class manager registration is pluggable,
+    # we need to do the search for every class in the hierarchy,
+    # rather than just a simple "cls._sa_class_manager"
 
-        declarative_props = (declared_attr, util.classproperty)
+    # python 2 old style class
+    if not hasattr(cls, "__mro__"):
+        return None
 
-    if _get_immediate_cls_attr(cls, "__abstract__", strict=True):
-        return
+    for base in cls.__mro__:
+        manager = attributes.manager_of_class(base)
+        if manager:
+            return manager
+    return None
 
-    _MapperConfig.setup_mapping(cls, classname, dict_)
 
+def _as_declarative(registry, cls, dict_):
+
+    # declarative scans the class for attributes.  no table or mapper
+    # args passed separately.
+
+    return _MapperConfig.setup_mapping(registry, cls, dict_, None, {})
+
+
+def _mapper(registry, cls, table, mapper_kw):
+    _ImperativeMapperConfig(registry, cls, table, mapper_kw)
+    return cls.__mapper__
 
-def _check_declared_props_nocascade(obj, name, cls):
 
-    if isinstance(obj, declarative_props):
+@util.preload_module("sqlalchemy.orm.decl_api")
+def _is_declarative_props(obj):
+    declared_attr = util.preloaded.orm_decl_api.declared_attr
+
+    return isinstance(obj, (declared_attr, util.classproperty))
+
+
+def _check_declared_props_nocascade(obj, name, cls):
+    if _is_declarative_props(obj):
         if getattr(obj, "_cascading", False):
             util.warn(
                 "@declared_attr.cascading is not supported on the %s "
@@ -146,8 +151,19 @@ def _check_declared_props_nocascade(obj, name, cls):
 
 
 class _MapperConfig(object):
+    __slots__ = ("cls", "classname", "properties", "declared_attr_reg")
+
     @classmethod
-    def setup_mapping(cls, cls_, classname, dict_):
+    def setup_mapping(cls, registry, cls_, dict_, table, mapper_kw):
+        manager = attributes.manager_of_class(cls)
+        if manager and manager.class_ is cls_:
+            raise exc.InvalidRequestError(
+                "Class %r already has been " "instrumented declaratively" % cls
+            )
+
+        if cls_.__dict__.get("__abstract__", False):
+            return
+
         defer_map = _get_immediate_cls_attr(
             cls_, "_sa_decl_prepare_nocascade", strict=True
         ) or hasattr(cls_, "_sa_decl_prepare")
@@ -155,45 +171,142 @@ class _MapperConfig(object):
         if defer_map:
             cfg_cls = _DeferredMapperConfig
         else:
-            cfg_cls = _MapperConfig
+            cfg_cls = _ClassScanMapperConfig
 
-        cfg_cls(cls_, classname, dict_)
-
-    def __init__(self, cls_, classname, dict_):
+        return cfg_cls(registry, cls_, dict_, table, mapper_kw)
 
+    def __init__(self, registry, cls_):
         self.cls = cls_
+        self.classname = cls_.__name__
+        self.properties = util.OrderedDict()
+        self.declared_attr_reg = {}
+
+        instrumentation.register_class(
+            self.cls,
+            finalize=False,
+            registry=registry,
+            declarative_scan=self,
+            init_method=registry.constructor,
+        )
+
+        event.listen(
+            cls_,
+            "class_uninstrument",
+            registry._dispose_declarative_artifacts,
+        )
+
+    def set_cls_attribute(self, attrname, value):
+
+        manager = instrumentation.manager_of_class(self.cls)
+        manager.install_member(attrname, value)
+        return value
+
+    def _early_mapping(self, mapper_kw):
+        self.map(mapper_kw)
+
+
+class _ImperativeMapperConfig(_MapperConfig):
+    __slots__ = ("dict_", "local_table", "inherits")
+
+    def __init__(
+        self, registry, cls_, table, mapper_kw,
+    ):
+        super(_ImperativeMapperConfig, self).__init__(registry, cls_)
+
+        self.dict_ = {}
+        self.local_table = self.set_cls_attribute("__table__", table)
+
+        with mapperlib._CONFIGURE_MUTEX:
+            clsregistry.add_class(
+                self.classname, self.cls, registry._class_registry
+            )
+
+            self._setup_inheritance(mapper_kw)
+
+            self._early_mapping(mapper_kw)
+
+    def map(self, mapper_kw=util.EMPTY_DICT):
+        mapper_cls = mapper
+
+        return self.set_cls_attribute(
+            "__mapper__", mapper_cls(self.cls, self.local_table, **mapper_kw),
+        )
+
+    def _setup_inheritance(self, mapper_kw):
+        cls = self.cls
 
-        # dict_ will be a dictproxy, which we can't write to, and we need to!
-        self.dict_ = dict(dict_)
-        self.classname = classname
+        inherits = mapper_kw.get("inherits", None)
+
+        if inherits is None:
+            # since we search for classical mappings now, search for
+            # multiple mapped bases as well and raise an error.
+            inherits_search = []
+            for c in cls.__bases__:
+                c = _resolve_for_abstract_or_classical(c)
+                if c is None:
+                    continue
+                if _declared_mapping_info(
+                    c
+                ) is not None and not _get_immediate_cls_attr(
+                    c, "_sa_decl_prepare_nocascade", strict=True
+                ):
+                    inherits_search.append(c)
+
+            if inherits_search:
+                if len(inherits_search) > 1:
+                    raise exc.InvalidRequestError(
+                        "Class %s has multiple mapped bases: %r"
+                        % (cls, inherits_search)
+                    )
+                inherits = inherits_search[0]
+        elif isinstance(inherits, mapper):
+            inherits = inherits.class_
+
+        self.inherits = inherits
+
+
+class _ClassScanMapperConfig(_MapperConfig):
+    __slots__ = (
+        "dict_",
+        "local_table",
+        "persist_selectable",
+        "declared_columns",
+        "column_copies",
+        "table_args",
+        "tablename",
+        "mapper_args",
+        "mapper_args_fn",
+        "inherits",
+    )
+
+    def __init__(
+        self, registry, cls_, dict_, table, mapper_kw,
+    ):
+
+        super(_ClassScanMapperConfig, self).__init__(registry, cls_)
+
+        self.dict_ = dict(dict_) if dict_ else {}
         self.persist_selectable = None
-        self.properties = util.OrderedDict()
         self.declared_columns = set()
         self.column_copies = {}
         self._setup_declared_events()
 
-        # temporary registry.  While early 1.0 versions
-        # set up the ClassManager here, by API contract
-        # we can't do that until there's a mapper.
-        self.cls._sa_declared_attr_reg = {}
-
         self._scan_attributes()
 
         with mapperlib._CONFIGURE_MUTEX:
-            clsregistry.add_class(self.classname, self.cls)
+            clsregistry.add_class(
+                self.classname, self.cls, registry._class_registry
+            )
 
             self._extract_mappable_attributes()
 
             self._extract_declared_columns()
 
-            self._setup_table()
+            self._setup_table(table)
 
-            self._setup_inheritance()
+            self._setup_inheritance(mapper_kw)
 
-            self._early_mapping()
-
-    def _early_mapping(self):
-        self.map()
+            self._early_mapping(mapper_kw)
 
     def _setup_declared_events(self):
         if _get_immediate_cls_attr(self.cls, "__declare_last__"):
@@ -265,7 +378,7 @@ class _MapperConfig(object):
                         if base is not cls:
                             inherited_table_args = True
                 elif class_mapped:
-                    if isinstance(obj, declarative_props):
+                    if _is_declarative_props(obj):
                         util.warn(
                             "Regular (i.e. not __special__) "
                             "attribute '%s.%s' uses @declared_attr, "
@@ -287,7 +400,7 @@ class _MapperConfig(object):
                             "be declared as @declared_attr callables "
                             "on declarative mixin classes."
                         )
-                    elif isinstance(obj, declarative_props):
+                    elif _is_declarative_props(obj):
                         if obj._cascading:
                             if name in dict_:
                                 # unfortunately, while we can use the user-
@@ -395,8 +508,8 @@ class _MapperConfig(object):
                 continue
 
             value = dict_[k]
-            if isinstance(value, declarative_props):
-                if isinstance(value, declared_attr) and value._cascading:
+            if _is_declarative_props(value):
+                if value._cascading:
                     util.warn(
                         "Use of @declared_attr.cascading only applies to "
                         "Declarative 'mixin' and 'abstract' classes.  "
@@ -413,7 +526,7 @@ class _MapperConfig(object):
             ):
                 # detect a QueryableAttribute that's already mapped being
                 # assigned elsewhere in userland, turn into a synonym()
-                value = synonym(value.key)
+                value = SynonymProperty(value.key)
                 setattr(cls, k, value)
 
             if (
@@ -446,8 +559,7 @@ class _MapperConfig(object):
                     "for the MetaData instance when using a "
                     "declarative base class."
                 )
-            prop = clsregistry._deferred_relationship(cls, value)
-            our_stuff[k] = prop
+            our_stuff[k] = value
 
     def _extract_declared_columns(self):
         our_stuff = self.properties
@@ -488,24 +600,25 @@ class _MapperConfig(object):
                     % (self.classname, name, (", ".join(sorted(keys))))
                 )
 
-    def _setup_table(self):
+    def _setup_table(self, table=None):
         cls = self.cls
         tablename = self.tablename
         table_args = self.table_args
         dict_ = self.dict_
         declared_columns = self.declared_columns
 
+        manager = attributes.manager_of_class(cls)
+
         declared_columns = self.declared_columns = sorted(
             declared_columns, key=lambda c: c._creation_order
         )
-        table = None
 
-        if hasattr(cls, "__table_cls__"):
-            table_cls = util.unbound_method_to_callable(cls.__table_cls__)
-        else:
-            table_cls = Table
+        if "__table__" not in dict_ and table is None:
+            if hasattr(cls, "__table_cls__"):
+                table_cls = util.unbound_method_to_callable(cls.__table_cls__)
+            else:
+                table_cls = Table
 
-        if "__table__" not in dict_:
             if tablename is not None:
 
                 args, table_kw = (), {}
@@ -522,14 +635,18 @@ class _MapperConfig(object):
                 if autoload:
                     table_kw["autoload"] = True
 
-                cls.__table__ = table = table_cls(
-                    tablename,
-                    cls.metadata,
-                    *(tuple(declared_columns) + tuple(args)),
-                    **table_kw
+                table = self.set_cls_attribute(
+                    "__table__",
+                    table_cls(
+                        tablename,
+                        manager.registry.metadata,
+                        *(tuple(declared_columns) + tuple(args)),
+                        **table_kw
+                    ),
                 )
         else:
-            table = cls.__table__
+            if table is None:
+                table = cls.__table__
             if declared_columns:
                 for c in declared_columns:
                     if not table.c.contains_column(c):
@@ -539,34 +656,40 @@ class _MapperConfig(object):
                         )
         self.local_table = table
 
-    def _setup_inheritance(self):
+    def _setup_inheritance(self, mapper_kw):
         table = self.local_table
         cls = self.cls
         table_args = self.table_args
         declared_columns = self.declared_columns
 
-        # since we search for classical mappings now, search for
-        # multiple mapped bases as well and raise an error.
-        inherits = []
-        for c in cls.__bases__:
-            c = _resolve_for_abstract_or_classical(c)
-            if c is None:
-                continue
-            if _declared_mapping_info(
-                c
-            ) is not None and not _get_immediate_cls_attr(
-                c, "_sa_decl_prepare_nocascade", strict=True
-            ):
-                inherits.append(c)
+        inherits = mapper_kw.get("inherits", None)
 
-        if inherits:
-            if len(inherits) > 1:
-                raise exc.InvalidRequestError(
-                    "Class %s has multiple mapped bases: %r" % (cls, inherits)
-                )
-            self.inherits = inherits[0]
-        else:
-            self.inherits = None
+        if inherits is None:
+            # since we search for classical mappings now, search for
+            # multiple mapped bases as well and raise an error.
+            inherits_search = []
+            for c in cls.__bases__:
+                c = _resolve_for_abstract_or_classical(c)
+                if c is None:
+                    continue
+                if _declared_mapping_info(
+                    c
+                ) is not None and not _get_immediate_cls_attr(
+                    c, "_sa_decl_prepare_nocascade", strict=True
+                ):
+                    inherits_search.append(c)
+
+            if inherits_search:
+                if len(inherits_search) > 1:
+                    raise exc.InvalidRequestError(
+                        "Class %s has multiple mapped bases: %r"
+                        % (cls, inherits_search)
+                    )
+                inherits = inherits_search[0]
+        elif isinstance(inherits, mapper):
+            inherits = inherits.class_
+
+        self.inherits = inherits
 
         if (
             table is None
@@ -614,13 +737,21 @@ class _MapperConfig(object):
                     ):
                         inherited_persist_selectable._refresh_for_new_column(c)
 
-    def _prepare_mapper_arguments(self):
+    def _prepare_mapper_arguments(self, mapper_kw):
         properties = self.properties
+
         if self.mapper_args_fn:
             mapper_args = self.mapper_args_fn()
         else:
             mapper_args = {}
 
+        if mapper_kw:
+            mapper_args.update(mapper_kw)
+
+        if "properties" in mapper_args:
+            properties = dict(properties)
+            properties.update(mapper_args["properties"])
+
         # make sure that column copies are used rather
         # than the original columns from any mixins
         for k in ("version_id_col", "polymorphic_on"):
@@ -628,9 +759,16 @@ class _MapperConfig(object):
                 v = mapper_args[k]
                 mapper_args[k] = self.column_copies.get(v, v)
 
-        assert (
-            "inherits" not in mapper_args
-        ), "Can't specify 'inherits' explicitly with declarative mappings"
+        if "inherits" in mapper_args:
+            inherits_arg = mapper_args["inherits"]
+            if isinstance(inherits_arg, mapper):
+                inherits_arg = inherits_arg.class_
+
+            if inherits_arg is not self.inherits:
+                raise exc.InvalidRequestError(
+                    "mapper inherits argument given for non-inheriting "
+                    "class %s" % (mapper_args["inherits"])
+                )
 
         if self.inherits:
             mapper_args["inherits"] = self.inherits
@@ -674,8 +812,8 @@ class _MapperConfig(object):
         result_mapper_args["properties"] = properties
         self.mapper_args = result_mapper_args
 
-    def map(self):
-        self._prepare_mapper_arguments()
+    def map(self, mapper_kw=util.EMPTY_DICT):
+        self._prepare_mapper_arguments(mapper_kw)
         if hasattr(self.cls, "__mapper_cls__"):
             mapper_cls = util.unbound_method_to_callable(
                 self.cls.__mapper_cls__
@@ -683,17 +821,16 @@ class _MapperConfig(object):
         else:
             mapper_cls = mapper
 
-        self.cls.__mapper__ = mp_ = mapper_cls(
-            self.cls, self.local_table, **self.mapper_args
+        return self.set_cls_attribute(
+            "__mapper__",
+            mapper_cls(self.cls, self.local_table, **self.mapper_args),
         )
-        del self.cls._sa_declared_attr_reg
-        return mp_
 
 
-class _DeferredMapperConfig(_MapperConfig):
+class _DeferredMapperConfig(_ClassScanMapperConfig):
     _configs = util.OrderedDict()
 
-    def _early_mapping(self):
+    def _early_mapping(self, mapper_kw):
         pass
 
     @property
@@ -751,9 +888,9 @@ class _DeferredMapperConfig(_MapperConfig):
             )
         return list(topological.sort(tuples, classes_for_base))
 
-    def map(self):
+    def map(self, mapper_kw=util.EMPTY_DICT):
         self._configs.pop(self._cls, None)
-        return super(_DeferredMapperConfig, self).map()
+        return super(_DeferredMapperConfig, self).map(mapper_kw)
 
 
 def _add_attribute(cls, key, value):
@@ -776,16 +913,12 @@ def _add_attribute(cls, key, value):
                     cls.__table__.append_column(col)
             cls.__mapper__.add_property(key, value)
         elif isinstance(value, MapperProperty):
-            cls.__mapper__.add_property(
-                key, clsregistry._deferred_relationship(cls, value)
-            )
+            cls.__mapper__.add_property(key, value)
         elif isinstance(value, QueryableAttribute) and value.key != key:
             # detect a QueryableAttribute that's already mapped being
             # assigned elsewhere in userland, turn into a synonym()
-            value = synonym(value.key)
-            cls.__mapper__.add_property(
-                key, clsregistry._deferred_relationship(cls, value)
-            )
+            value = SynonymProperty(value.key)
+            cls.__mapper__.add_property(key, value)
         else:
             type.__setattr__(cls, key, value)
             cls.__mapper__._expire_memoizations()
index f647440831c6b9fb350cbfb5868278e1ad220c70..f390c49a749d1ac44d757bb30e04f1c53eaf7c2e 100644 (file)
@@ -39,6 +39,9 @@ from .. import util
 from ..util import HasMemoized
 
 
+DEL_ATTR = util.symbol("DEL_ATTR")
+
+
 class ClassManager(HasMemoized, dict):
     """Tracks state information at the class level."""
 
@@ -50,9 +53,12 @@ class ClassManager(HasMemoized, dict):
     expired_attribute_loader = None
     "previously known as deferred_scalar_loader"
 
-    original_init = object.__init__
+    init_method = None
 
     factory = None
+    mapper = None
+    declarative_scan = None
+    registry = None
 
     @property
     @util.deprecated(
@@ -78,6 +84,7 @@ class ClassManager(HasMemoized, dict):
         self.new_init = None
         self.local_attrs = {}
         self.originals = {}
+        self._finalized = False
 
         self._bases = [
             mgr
@@ -93,14 +100,13 @@ class ClassManager(HasMemoized, dict):
             self.update(base_)
 
         self.dispatch._events._new_classmanager_instance(class_, self)
-        # events._InstanceEventsHold.populate(class_, self)
 
         for basecls in class_.__mro__:
             mgr = manager_of_class(basecls)
             if mgr is not None:
                 self.dispatch._update(mgr.dispatch)
+
         self.manage()
-        self._instrument_init()
 
         if "__del__" in class_.__dict__:
             util.warn(
@@ -110,6 +116,52 @@ class ClassManager(HasMemoized, dict):
                 "reference cycles.  Please remove this method." % class_
             )
 
+    def _update_state(
+        self,
+        finalize=False,
+        mapper=None,
+        registry=None,
+        declarative_scan=None,
+        expired_attribute_loader=None,
+        init_method=None,
+    ):
+
+        if mapper:
+            self.mapper = mapper
+        if registry:
+            self.registry = registry
+        if declarative_scan:
+            self.declarative_scan = declarative_scan
+        if expired_attribute_loader:
+            self.expired_attribute_loader = expired_attribute_loader
+
+        if init_method:
+            assert not self._finalized, (
+                "class is already instrumented, "
+                "init_method %s can't be applied" % init_method
+            )
+            self.init_method = init_method
+
+        if not self._finalized:
+            self.original_init = (
+                self.init_method
+                if self.init_method is not None
+                and self.class_.__init__ is object.__init__
+                else self.class_.__init__
+            )
+
+        if finalize and not self._finalized:
+            self._finalize()
+
+    def _finalize(self):
+        if self._finalized:
+            return
+        self._finalized = True
+
+        self._instrument_init()
+
+        _instrumentation_factory.dispatch.class_instrument(self.class_)
+
     def __hash__(self):
         return id(self)
 
@@ -210,26 +262,12 @@ class ClassManager(HasMemoized, dict):
         can post-configure the auto-generated ClassManager when needed.
 
         """
-        manager = manager_of_class(cls)
-        if manager is None:
-            manager = _instrumentation_factory.create_manager_for_cls(cls)
-        return manager
+        return register_class(cls, finalize=False)
 
     def _instrument_init(self):
-        # TODO: self.class_.__init__ is often the already-instrumented
-        # __init__ from an instrumented superclass.  We still need to make
-        # our own wrapper, but it would
-        # be nice to wrap the original __init__ and not our existing wrapper
-        # of such, since this adds method overhead.
-        self.original_init = self.class_.__init__
-        self.new_init = _generate_init(self.class_, self)
+        self.new_init = _generate_init(self.class_, self, self.original_init)
         self.install_member("__init__", self.new_init)
 
-    def _uninstrument_init(self):
-        if self.new_init:
-            self.uninstall_member("__init__")
-            self.new_init = None
-
     @util.memoized_property
     def _state_constructor(self):
         self.dispatch.first_init(self, self.class_)
@@ -311,9 +349,10 @@ class ClassManager(HasMemoized, dict):
     def unregister(self):
         """remove all instrumentation established by this ClassManager."""
 
-        self._uninstrument_init()
+        for key in list(self.originals):
+            self.uninstall_member(key)
 
-        self.mapper = self.dispatch = None
+        self.mapper = self.dispatch = self.new_init = None
         self.info.clear()
 
         for key in list(self):
@@ -337,13 +376,15 @@ class ClassManager(HasMemoized, dict):
                 "%r: requested attribute name conflicts with "
                 "instrumentation attribute of the same name." % key
             )
-        self.originals.setdefault(key, getattr(self.class_, key, None))
+        self.originals.setdefault(key, self.class_.__dict__.get(key, DEL_ATTR))
         setattr(self.class_, key, implementation)
 
     def uninstall_member(self, key):
         original = self.originals.pop(key, None)
-        if original is not None:
+        if original is not DEL_ATTR:
             setattr(self.class_, key, original)
+        else:
+            delattr(self.class_, key)
 
     def instrument_collection_class(self, key, collection_class):
         return collections.prepare_instrumentation(collection_class)
@@ -484,7 +525,6 @@ class InstrumentationFactory(object):
 
         manager.factory = factory
 
-        self.dispatch.class_instrument(class_)
         return manager
 
     def _locate_extended_factory(self, class_):
@@ -518,7 +558,15 @@ instance_dict = _default_dict_getter = base.instance_dict
 manager_of_class = _default_manager_getter = base.manager_of_class
 
 
-def register_class(class_):
+def register_class(
+    class_,
+    finalize=True,
+    mapper=None,
+    registry=None,
+    declarative_scan=None,
+    expired_attribute_loader=None,
+    init_method=None,
+):
     """Register class instrumentation.
 
     Returns the existing or newly created class manager.
@@ -528,6 +576,15 @@ def register_class(class_):
     manager = manager_of_class(class_)
     if manager is None:
         manager = _instrumentation_factory.create_manager_for_cls(class_)
+    manager._update_state(
+        mapper=mapper,
+        registry=registry,
+        declarative_scan=declarative_scan,
+        expired_attribute_loader=expired_attribute_loader,
+        init_method=init_method,
+        finalize=finalize,
+    )
+
     return manager
 
 
@@ -550,14 +607,15 @@ def is_instrumented(instance, key):
     )
 
 
-def _generate_init(class_, class_manager):
+def _generate_init(class_, class_manager, original_init):
     """Build an __init__ decorator that triggers ClassManager events."""
 
     # TODO: we should use the ClassManager's notion of the
     # original '__init__' method, once ClassManager is fixed
     # to always reference that.
-    original__init__ = class_.__init__
-    assert original__init__
+
+    if original_init is None:
+        original_init = class_.__init__
 
     # Go through some effort here and don't change the user's __init__
     # calling signature, including the unlikely case that it has
@@ -570,23 +628,23 @@ def __init__(%(apply_pos)s):
     if new_state:
         return new_state._initialize_instance(%(apply_kw)s)
     else:
-        return original__init__(%(apply_kw)s)
+        return original_init(%(apply_kw)s)
 """
-    func_vars = util.format_argspec_init(original__init__, grouped=False)
+    func_vars = util.format_argspec_init(original_init, grouped=False)
     func_text = func_body % func_vars
 
     if util.py2k:
-        func = getattr(original__init__, "im_func", original__init__)
+        func = getattr(original_init, "im_func", original_init)
         func_defaults = getattr(func, "func_defaults", None)
     else:
-        func_defaults = getattr(original__init__, "__defaults__", None)
-        func_kw_defaults = getattr(original__init__, "__kwdefaults__", None)
+        func_defaults = getattr(original_init, "__defaults__", None)
+        func_kw_defaults = getattr(original_init, "__kwdefaults__", None)
 
     env = locals().copy()
     exec(func_text, env)
     __init__ = env["__init__"]
-    __init__.__doc__ = original__init__.__doc__
-    __init__._sa_original_init = original__init__
+    __init__.__doc__ = original_init.__doc__
+    __init__._sa_original_init = original_init
 
     if func_defaults:
         __init__.__defaults__ = func_defaults
index 755d4afc79b8858ed79351538c24aaa52469925d..db2b94a4eaffb958f625766936a58b40adae3935 100644 (file)
@@ -160,50 +160,23 @@ class Mapper(
         legacy_is_orphan=False,
         _compiled_cache_size=100,
     ):
-        r"""Return a new :class:`_orm.Mapper` object.
-
-        This function is typically used behind the scenes
-        via the Declarative extension.   When using Declarative,
-        many of the usual :func:`.mapper` arguments are handled
-        by the Declarative extension itself, including ``class_``,
-        ``local_table``, ``properties``, and  ``inherits``.
-        Other options are passed to :func:`.mapper` using
-        the ``__mapper_args__`` class variable::
-
-           class MyClass(Base):
-               __tablename__ = 'my_table'
-               id = Column(Integer, primary_key=True)
-               type = Column(String(50))
-               alt = Column("some_alt", Integer)
-
-               __mapper_args__ = {
-                   'polymorphic_on' : type
-               }
-
-
-        Explicit use of :func:`.mapper`
-        is often referred to as *classical mapping*.  The above
-        declarative example is equivalent in classical form to::
-
-            my_table = Table("my_table", metadata,
-                Column('id', Integer, primary_key=True),
-                Column('type', String(50)),
-                Column("some_alt", Integer)
-            )
-
-            class MyClass(object):
-                pass
+        r"""Direct consructor for a new :class:`_orm.Mapper` object.
 
-            mapper(MyClass, my_table,
-                polymorphic_on=my_table.c.type,
-                properties={
-                    'alt':my_table.c.some_alt
-                })
+        The :func:`_orm.mapper` function is normally invoked through the
+        use of the :class:`_orm.registry` object through either the
+        :ref:`Declarative <orm_declarative_mapping>` or
+        :ref:`Imperative <orm_imperative_mapping>` mapping styles.
 
-        .. seealso::
+        .. versionchanged:: 1.4 The :func:`_orm.mapper` function should not
+           be called directly for classical mapping; for a classical mapping
+           configuration, use the :meth:`_orm.registry.map_imperatively`
+           method.   The :func:`_orm.mapper` function may become private in a
+           future release.
 
-            :ref:`classical_mapping` - discussion of direct usage of
-            :func:`.mapper`
+        Parameters documented below may be passed to either the
+        :meth:`_orm.registry.map_imperatively` method, or may be passed in the
+        ``__mapper_args__`` declarative class attribute described at
+        :ref:`orm_declarative_mapper_options`.
 
         :param class\_: The class to be mapped.  When using Declarative,
           this argument is automatically passed as the declared class
@@ -342,12 +315,10 @@ class Mapper(
           mapping of the class to an alternate selectable, for loading
           only.
 
-          :paramref:`_orm.Mapper.non_primary` is not an often used option, but
-          is useful in some specific :func:`_orm.relationship` cases.
-
-          .. seealso::
+         .. seealso::
 
-              :ref:`relationship_non_primary_mapper`
+            :ref:`relationship_aliased_class` - the new pattern that removes
+            the need for the :paramref:`_orm.Mapper.non_primary` flag.
 
         :param passive_deletes: Indicates DELETE behavior of foreign key
            columns when a joined-table inheritance entity is being deleted.
@@ -1207,6 +1178,10 @@ class Mapper(
 
         """
 
+        # we expect that declarative has applied the class manager
+        # already and set up a registry.  if this is None,
+        # we will emit a deprecation warning below when we also see that
+        # it has no registry.
         manager = attributes.manager_of_class(self.class_)
 
         if self.non_primary:
@@ -1226,9 +1201,6 @@ class Mapper(
             if manager.is_mapped:
                 raise sa_exc.ArgumentError(
                     "Class '%s' already has a primary mapper defined. "
-                    "Use non_primary=True to "
-                    "create a non primary Mapper.  clear_mappers() will "
-                    "remove *all* current mappers from all classes."
                     % self.class_
                 )
             # else:
@@ -1238,19 +1210,36 @@ class Mapper(
 
         _mapper_registry[self] = True
 
-        # note: this *must be called before instrumentation.register_class*
-        # to maintain the documented behavior of instrument_class
         self.dispatch.instrument_class(self, self.class_)
 
-        if manager is None:
-            manager = instrumentation.register_class(self.class_)
+        # this invokes the class_instrument event and sets up
+        # the __init__ method.  documented behavior is that this must
+        # occur after the instrument_class event above.
+        # yes two events with the same two words reversed and different APIs.
+        # :(
+
+        manager = instrumentation.register_class(
+            self.class_,
+            mapper=self,
+            expired_attribute_loader=util.partial(
+                loading.load_scalar_attributes, self
+            ),
+            # finalize flag means instrument the __init__ method
+            # and call the class_instrument event
+            finalize=True,
+        )
+        if not manager.registry:
+            util.warn_deprecated_20(
+                "Calling the mapper() function directly outside of a "
+                "declarative registry is deprecated."
+                " Please use the sqlalchemy.orm.registry.map_imperatively() "
+                "function for a classical mapping."
+            )
+            from . import registry
 
-        self.class_manager = manager
+            manager.registry = registry()
 
-        manager.mapper = self
-        manager.expired_attribute_loader = util.partial(
-            loading.load_scalar_attributes, self
-        )
+        self.class_manager = manager
 
         # The remaining members can be added by any mapper,
         # e_name None or not.
@@ -2281,7 +2270,7 @@ class Mapper(
 
     @property
     def selectable(self):
-        """The :func:`_expression.select` construct this
+        """The :class:`_schema.FromClause` construct this
         :class:`_orm.Mapper` selects from by default.
 
         Normally, this is equivalent to :attr:`.persist_selectable`, unless
index 794b9422c4dc01b77440bc89e2eb00144cf7e62a..1c95b6e06472d4230aba136fc4ede30c19964ff8 100644 (file)
@@ -2088,8 +2088,13 @@ class RelationshipProperty(StrategizedProperty):
         class or aliased class that is referred towards.
 
         """
+
         mapperlib = util.preloaded.orm_mapper
-        if callable(self.argument) and not isinstance(
+
+        if isinstance(self.argument, util.string_types):
+            argument = self._clsregistry_resolve_name(self.argument)()
+
+        elif callable(self.argument) and not isinstance(
             self.argument, (type, mapperlib.Mapper)
         ):
             argument = self.argument()
@@ -2124,6 +2129,7 @@ class RelationshipProperty(StrategizedProperty):
         return self.entity.mapper
 
     def do_init(self):
+
         self._check_conflicts()
         self._process_dependent_arguments()
         self._setup_join_conditions()
@@ -2141,6 +2147,7 @@ class RelationshipProperty(StrategizedProperty):
         Callables are resolved, ORM annotations removed.
 
         """
+
         # accept callables for other attributes which may require
         # deferred initialization.  This technique is used
         # by declarative "string configs" and some recipes.
@@ -2153,7 +2160,12 @@ class RelationshipProperty(StrategizedProperty):
             "remote_side",
         ):
             attr_value = getattr(self, attr)
-            if callable(attr_value):
+
+            if isinstance(attr_value, util.string_types):
+                setattr(
+                    self, attr, self._clsregistry_resolve_arg(attr_value)()
+                )
+            elif callable(attr_value):
                 setattr(self, attr, attr_value())
 
         # remove "annotations" which are present if mapped class
@@ -2226,6 +2238,21 @@ class RelationshipProperty(StrategizedProperty):
         self._calculated_foreign_keys = jc.foreign_key_columns
         self.secondary_synchronize_pairs = jc.secondary_synchronize_pairs
 
+    @property
+    def _clsregistry_resolve_arg(self):
+        return self._clsregistry_resolvers[1]
+
+    @property
+    def _clsregistry_resolve_name(self):
+        return self._clsregistry_resolvers[0]
+
+    @util.memoized_property
+    @util.preload_module("sqlalchemy.orm.clsregistry")
+    def _clsregistry_resolvers(self):
+        _resolver = util.preloaded.orm_clsregistry._resolver
+
+        return _resolver(self.parent.class_, self)
+
     @util.preload_module("sqlalchemy.orm.mapper")
     def _check_conflicts(self):
         """Test that this relationship is legal, warn about
index f5d207bc2c158f9fce4ef29b618202b266013708..085c191965b502379fd42e7fac918f093c1fcd34 100644 (file)
@@ -39,10 +39,7 @@ class BasicEntity(object):
 _recursion_stack = set()
 
 
-class ComparableEntity(BasicEntity):
-    def __hash__(self):
-        return hash(self.__class__)
-
+class ComparableMixin(object):
     def __ne__(self, other):
         return not self.__eq__(other)
 
@@ -107,3 +104,8 @@ class ComparableEntity(BasicEntity):
             return True
         finally:
             _recursion_stack.remove(id(self))
+
+
+class ComparableEntity(ComparableMixin, BasicEntity):
+    def __hash__(self):
+        return hash(self.__class__)
index 85d3374de182482558be4d69c6a8cd14000699d0..2d3b279172dbe2697fd4bdae28e0e0d8a6a49703 100644 (file)
@@ -15,11 +15,13 @@ from . import schema
 from .engines import drop_all_tables
 from .entities import BasicEntity
 from .entities import ComparableEntity
+from .entities import ComparableMixin  # noqa
 from .util import adict
 from .. import event
 from .. import util
-from ..ext.declarative import declarative_base
-from ..ext.declarative import DeclarativeMeta
+from ..orm import declarative_base
+from ..orm import registry
+from ..orm.decl_api import DeclarativeMeta
 from ..schema import sort_tables_and_constraints
 
 
@@ -383,16 +385,23 @@ class MappedTest(_ORMTest, TablesTest, assertions.AssertsExecutionResults):
     @classmethod
     def _setup_once_mappers(cls):
         if cls.run_setup_mappers == "once":
+            cls.mapper = cls._generate_mapper()
             cls._with_register_classes(cls.setup_mappers)
 
     def _setup_each_mappers(self):
         if self.run_setup_mappers == "each":
+            self.mapper = self._generate_mapper()
             self._with_register_classes(self.setup_mappers)
 
     def _setup_each_classes(self):
         if self.run_setup_classes == "each":
             self._with_register_classes(self.setup_classes)
 
+    @classmethod
+    def _generate_mapper(cls):
+        decl = registry()
+        return decl.map_imperatively
+
     @classmethod
     def _with_register_classes(cls, fn):
         """Run a setup method, framing the operation with a Base class
index 1d92084cce41cb8a9c96d662d21bdafce8704201..5fdcdf65423a0845b1e28a7cd8cdc0cb5458fe7c 100644 (file)
@@ -101,6 +101,7 @@ from .deprecations import deprecated_20_cls  # noqa
 from .deprecations import deprecated_cls  # noqa
 from .deprecations import deprecated_params  # noqa
 from .deprecations import inject_docstring_text  # noqa
+from .deprecations import moved_20  # noqa
 from .deprecations import SQLALCHEMY_WARN_20  # noqa
 from .deprecations import warn_deprecated  # noqa
 from .deprecations import warn_deprecated_20  # noqa
index 0a79344c501d0765c152b8f98ec857e9242990c8..eae4be768bef04f0fe6a0278e7af6e4bf9b400a8 100644 (file)
@@ -27,10 +27,12 @@ if os.getenv("SQLALCHEMY_WARN_20", "false").lower() in ("true", "yes", "1"):
 
 
 def _warn_with_version(msg, version, type_, stacklevel):
-    if type_ is exc.RemovedIn20Warning and not SQLALCHEMY_WARN_20:
+    is_20 = issubclass(type_, exc.RemovedIn20Warning)
+
+    if is_20 and not SQLALCHEMY_WARN_20:
         return
 
-    if type_ is exc.RemovedIn20Warning:
+    if is_20:
         msg += " (Background on SQLAlchemy 2.0 at: http://sqlalche.me/e/b8d9)"
 
     warn = type_(msg)
@@ -150,6 +152,12 @@ def deprecated(
     return decorate
 
 
+def moved_20(message, **kw):
+    return deprecated(
+        "2.0", message=message, warning=exc.MovedIn20Warning, **kw
+    )
+
+
 def deprecated_20(api_name, alternative=None, **kw):
     message = (
         "The %s function/method is considered legacy as of the "
@@ -325,19 +333,14 @@ def _decorate_with_warning(
             " (Background on SQLAlchemy 2.0 at: "
             ":ref:`migration_20_toplevel`)"
         )
-        warning_only = (
-            " (Background on SQLAlchemy 2.0 at: http://sqlalche.me/e/b8d9)"
-        )
     else:
-        doc_only = warning_only = ""
+        doc_only = ""
 
     @decorator
     def warned(fn, *args, **kwargs):
         skip_warning = kwargs.pop("_sa_skip_warning", False)
         if not skip_warning:
-            _warn_with_version(
-                message + warning_only, version, wtype, stacklevel=3
-            )
+            _warn_with_version(message, version, wtype, stacklevel=3)
         return fn(*args, **kwargs)
 
     doc = func.__doc__ is not None and func.__doc__ or ""
index 4a4e8a00f5dfba96656d9a3b592e3124545cd916..77d4a80fe0d92015fc2392898d926985b0391fcb 100644 (file)
@@ -1,4 +1,3 @@
-import sqlalchemy as sa
 from sqlalchemy import ForeignKey
 from sqlalchemy import Integer
 from sqlalchemy import String
@@ -8,24 +7,17 @@ from sqlalchemy.ext.declarative import AbstractConcreteBase
 from sqlalchemy.ext.declarative import ConcreteBase
 from sqlalchemy.ext.declarative import declared_attr
 from sqlalchemy.ext.declarative import has_inherited_table
-from sqlalchemy.orm import class_mapper
 from sqlalchemy.orm import clear_mappers
 from sqlalchemy.orm import close_all_sessions
 from sqlalchemy.orm import configure_mappers
 from sqlalchemy.orm import create_session
-from sqlalchemy.orm import deferred
 from sqlalchemy.orm import exc as orm_exc
-from sqlalchemy.orm import mapper
 from sqlalchemy.orm import polymorphic_union
 from sqlalchemy.orm import relationship
 from sqlalchemy.orm import Session
-from sqlalchemy.testing import assert_raises
 from sqlalchemy.testing import assert_raises_message
 from sqlalchemy.testing import eq_
 from sqlalchemy.testing import fixtures
-from sqlalchemy.testing import is_
-from sqlalchemy.testing import is_false
-from sqlalchemy.testing import is_true
 from sqlalchemy.testing import mock
 from sqlalchemy.testing.schema import Column
 from sqlalchemy.testing.schema import Table
@@ -45,1278 +37,6 @@ class DeclarativeTestBase(fixtures.TestBase, testing.AssertsExecutionResults):
         Base.metadata.drop_all()
 
 
-class DeclarativeInheritanceTest(DeclarativeTestBase):
-    def test_we_must_copy_mapper_args(self):
-        class Person(Base):
-
-            __tablename__ = "people"
-            id = Column(Integer, primary_key=True)
-            discriminator = Column("type", String(50))
-            __mapper_args__ = {
-                "polymorphic_on": discriminator,
-                "polymorphic_identity": "person",
-            }
-
-        class Engineer(Person):
-
-            primary_language = Column(String(50))
-
-        assert "inherits" not in Person.__mapper_args__
-        assert class_mapper(Engineer).polymorphic_identity is None
-        assert class_mapper(Engineer).polymorphic_on is Person.__table__.c.type
-
-    def test_we_must_only_copy_column_mapper_args(self):
-        class Person(Base):
-
-            __tablename__ = "people"
-            id = Column(Integer, primary_key=True)
-            a = Column(Integer)
-            b = Column(Integer)
-            c = Column(Integer)
-            d = Column(Integer)
-            discriminator = Column("type", String(50))
-            __mapper_args__ = {
-                "polymorphic_on": discriminator,
-                "polymorphic_identity": "person",
-                "version_id_col": "a",
-                "column_prefix": "bar",
-                "include_properties": ["id", "a", "b"],
-            }
-
-        assert class_mapper(Person).version_id_col == "a"
-        assert class_mapper(Person).include_properties == set(["id", "a", "b"])
-
-    def test_custom_join_condition(self):
-        class Foo(Base):
-
-            __tablename__ = "foo"
-            id = Column("id", Integer, primary_key=True)
-
-        class Bar(Foo):
-
-            __tablename__ = "bar"
-            bar_id = Column("id", Integer, primary_key=True)
-            foo_id = Column("foo_id", Integer)
-            __mapper_args__ = {"inherit_condition": foo_id == Foo.id}
-
-        # compile succeeds because inherit_condition is honored
-
-        configure_mappers()
-
-    def test_joined(self):
-        class Company(Base, fixtures.ComparableEntity):
-
-            __tablename__ = "companies"
-            id = Column(
-                "id", Integer, primary_key=True, test_needs_autoincrement=True
-            )
-            name = Column("name", String(50))
-            employees = relationship("Person")
-
-        class Person(Base, fixtures.ComparableEntity):
-
-            __tablename__ = "people"
-            id = Column(
-                "id", Integer, primary_key=True, test_needs_autoincrement=True
-            )
-            company_id = Column(
-                "company_id", Integer, ForeignKey("companies.id")
-            )
-            name = Column("name", String(50))
-            discriminator = Column("type", String(50))
-            __mapper_args__ = {"polymorphic_on": discriminator}
-
-        class Engineer(Person):
-
-            __tablename__ = "engineers"
-            __mapper_args__ = {"polymorphic_identity": "engineer"}
-            id = Column(
-                "id", Integer, ForeignKey("people.id"), primary_key=True
-            )
-            primary_language = Column("primary_language", String(50))
-
-        class Manager(Person):
-
-            __tablename__ = "managers"
-            __mapper_args__ = {"polymorphic_identity": "manager"}
-            id = Column(
-                "id", Integer, ForeignKey("people.id"), primary_key=True
-            )
-            golf_swing = Column("golf_swing", String(50))
-
-        Base.metadata.create_all()
-        sess = create_session()
-        c1 = Company(
-            name="MegaCorp, Inc.",
-            employees=[
-                Engineer(name="dilbert", primary_language="java"),
-                Engineer(name="wally", primary_language="c++"),
-                Manager(name="dogbert", golf_swing="fore!"),
-            ],
-        )
-
-        c2 = Company(
-            name="Elbonia, Inc.",
-            employees=[Engineer(name="vlad", primary_language="cobol")],
-        )
-        sess.add(c1)
-        sess.add(c2)
-        sess.flush()
-        sess.expunge_all()
-        eq_(
-            sess.query(Company)
-            .filter(
-                Company.employees.of_type(Engineer).any(
-                    Engineer.primary_language == "cobol"
-                )
-            )
-            .first(),
-            c2,
-        )
-
-        # ensure that the Manager mapper was compiled with the Manager id
-        # column as higher priority. this ensures that "Manager.id"
-        # is appropriately treated as the "id" column in the "manager"
-        # table (reversed from 0.6's behavior.)
-
-        eq_(
-            Manager.id.property.columns,
-            [Manager.__table__.c.id, Person.__table__.c.id],
-        )
-
-        # assert that the "id" column is available without a second
-        # load. as of 0.7, the ColumnProperty tests all columns
-        # in its list to see which is present in the row.
-
-        sess.expunge_all()
-
-        def go():
-            assert (
-                sess.query(Manager).filter(Manager.name == "dogbert").one().id
-            )
-
-        self.assert_sql_count(testing.db, go, 1)
-        sess.expunge_all()
-
-        def go():
-            assert (
-                sess.query(Person).filter(Manager.name == "dogbert").one().id
-            )
-
-        self.assert_sql_count(testing.db, go, 1)
-
-    def test_add_subcol_after_the_fact(self):
-        class Person(Base, fixtures.ComparableEntity):
-
-            __tablename__ = "people"
-            id = Column(
-                "id", Integer, primary_key=True, test_needs_autoincrement=True
-            )
-            name = Column("name", String(50))
-            discriminator = Column("type", String(50))
-            __mapper_args__ = {"polymorphic_on": discriminator}
-
-        class Engineer(Person):
-
-            __tablename__ = "engineers"
-            __mapper_args__ = {"polymorphic_identity": "engineer"}
-            id = Column(
-                "id", Integer, ForeignKey("people.id"), primary_key=True
-            )
-
-        Engineer.primary_language = Column("primary_language", String(50))
-        Base.metadata.create_all()
-        sess = create_session()
-        e1 = Engineer(primary_language="java", name="dilbert")
-        sess.add(e1)
-        sess.flush()
-        sess.expunge_all()
-        eq_(
-            sess.query(Person).first(),
-            Engineer(primary_language="java", name="dilbert"),
-        )
-
-    def test_add_parentcol_after_the_fact(self):
-        class Person(Base, fixtures.ComparableEntity):
-
-            __tablename__ = "people"
-            id = Column(
-                "id", Integer, primary_key=True, test_needs_autoincrement=True
-            )
-            discriminator = Column("type", String(50))
-            __mapper_args__ = {"polymorphic_on": discriminator}
-
-        class Engineer(Person):
-
-            __tablename__ = "engineers"
-            __mapper_args__ = {"polymorphic_identity": "engineer"}
-            primary_language = Column(String(50))
-            id = Column(
-                "id", Integer, ForeignKey("people.id"), primary_key=True
-            )
-
-        Person.name = Column("name", String(50))
-        Base.metadata.create_all()
-        sess = create_session()
-        e1 = Engineer(primary_language="java", name="dilbert")
-        sess.add(e1)
-        sess.flush()
-        sess.expunge_all()
-        eq_(
-            sess.query(Person).first(),
-            Engineer(primary_language="java", name="dilbert"),
-        )
-
-    def test_add_sub_parentcol_after_the_fact(self):
-        class Person(Base, fixtures.ComparableEntity):
-
-            __tablename__ = "people"
-            id = Column(
-                "id", Integer, primary_key=True, test_needs_autoincrement=True
-            )
-            discriminator = Column("type", String(50))
-            __mapper_args__ = {"polymorphic_on": discriminator}
-
-        class Engineer(Person):
-
-            __tablename__ = "engineers"
-            __mapper_args__ = {"polymorphic_identity": "engineer"}
-            primary_language = Column(String(50))
-            id = Column(
-                "id", Integer, ForeignKey("people.id"), primary_key=True
-            )
-
-        class Admin(Engineer):
-
-            __tablename__ = "admins"
-            __mapper_args__ = {"polymorphic_identity": "admin"}
-            workstation = Column(String(50))
-            id = Column(
-                "id", Integer, ForeignKey("engineers.id"), primary_key=True
-            )
-
-        Person.name = Column("name", String(50))
-        Base.metadata.create_all()
-        sess = create_session()
-        e1 = Admin(primary_language="java", name="dilbert", workstation="foo")
-        sess.add(e1)
-        sess.flush()
-        sess.expunge_all()
-        eq_(
-            sess.query(Person).first(),
-            Admin(primary_language="java", name="dilbert", workstation="foo"),
-        )
-
-    def test_subclass_mixin(self):
-        class Person(Base, fixtures.ComparableEntity):
-
-            __tablename__ = "people"
-            id = Column("id", Integer, primary_key=True)
-            name = Column("name", String(50))
-            discriminator = Column("type", String(50))
-            __mapper_args__ = {"polymorphic_on": discriminator}
-
-        class MyMixin(object):
-
-            pass
-
-        class Engineer(MyMixin, Person):
-
-            __tablename__ = "engineers"
-            __mapper_args__ = {"polymorphic_identity": "engineer"}
-            id = Column(
-                "id", Integer, ForeignKey("people.id"), primary_key=True
-            )
-            primary_language = Column("primary_language", String(50))
-
-        assert class_mapper(Engineer).inherits is class_mapper(Person)
-
-    def test_intermediate_abstract_class_on_classical(self):
-        class Person(object):
-            pass
-
-        person_table = Table(
-            "people",
-            Base.metadata,
-            Column("id", Integer, primary_key=True),
-            Column("kind", String(50)),
-        )
-
-        mapper(
-            Person,
-            person_table,
-            polymorphic_on="kind",
-            polymorphic_identity="person",
-        )
-
-        class SpecialPerson(Person):
-            __abstract__ = True
-
-        class Manager(SpecialPerson, Base):
-            __tablename__ = "managers"
-            id = Column(Integer, ForeignKey(Person.id), primary_key=True)
-            __mapper_args__ = {"polymorphic_identity": "manager"}
-
-        from sqlalchemy import inspect
-
-        assert inspect(Manager).inherits is inspect(Person)
-
-        eq_(set(class_mapper(Person).class_manager), {"id", "kind"})
-        eq_(set(class_mapper(Manager).class_manager), {"id", "kind"})
-
-    def test_intermediate_unmapped_class_on_classical(self):
-        class Person(object):
-            pass
-
-        person_table = Table(
-            "people",
-            Base.metadata,
-            Column("id", Integer, primary_key=True),
-            Column("kind", String(50)),
-        )
-
-        mapper(
-            Person,
-            person_table,
-            polymorphic_on="kind",
-            polymorphic_identity="person",
-        )
-
-        class SpecialPerson(Person):
-            pass
-
-        class Manager(SpecialPerson, Base):
-            __tablename__ = "managers"
-            id = Column(Integer, ForeignKey(Person.id), primary_key=True)
-            __mapper_args__ = {"polymorphic_identity": "manager"}
-
-        from sqlalchemy import inspect
-
-        assert inspect(Manager).inherits is inspect(Person)
-
-        eq_(set(class_mapper(Person).class_manager), {"id", "kind"})
-        eq_(set(class_mapper(Manager).class_manager), {"id", "kind"})
-
-    def test_class_w_invalid_multiple_bases(self):
-        class Person(object):
-            pass
-
-        person_table = Table(
-            "people",
-            Base.metadata,
-            Column("id", Integer, primary_key=True),
-            Column("kind", String(50)),
-        )
-
-        mapper(
-            Person,
-            person_table,
-            polymorphic_on="kind",
-            polymorphic_identity="person",
-        )
-
-        class DeclPerson(Base):
-            __tablename__ = "decl_people"
-            id = Column(Integer, primary_key=True)
-            kind = Column(String(50))
-
-        class SpecialPerson(Person):
-            pass
-
-        def go():
-            class Manager(SpecialPerson, DeclPerson):
-                __tablename__ = "managers"
-                id = Column(
-                    Integer, ForeignKey(DeclPerson.id), primary_key=True
-                )
-                __mapper_args__ = {"polymorphic_identity": "manager"}
-
-        assert_raises_message(
-            sa.exc.InvalidRequestError,
-            r"Class .*Manager.* has multiple mapped "
-            r"bases: \[.*Person.*DeclPerson.*\]",
-            go,
-        )
-
-    def test_with_undefined_foreignkey(self):
-        class Parent(Base):
-
-            __tablename__ = "parent"
-            id = Column("id", Integer, primary_key=True)
-            tp = Column("type", String(50))
-            __mapper_args__ = dict(polymorphic_on=tp)
-
-        class Child1(Parent):
-
-            __tablename__ = "child1"
-            id = Column(
-                "id", Integer, ForeignKey("parent.id"), primary_key=True
-            )
-            related_child2 = Column("c2", Integer, ForeignKey("child2.id"))
-            __mapper_args__ = dict(polymorphic_identity="child1")
-
-        # no exception is raised by the ForeignKey to "child2" even
-        # though child2 doesn't exist yet
-
-        class Child2(Parent):
-
-            __tablename__ = "child2"
-            id = Column(
-                "id", Integer, ForeignKey("parent.id"), primary_key=True
-            )
-            related_child1 = Column("c1", Integer)
-            __mapper_args__ = dict(polymorphic_identity="child2")
-
-        sa.orm.configure_mappers()  # no exceptions here
-
-    def test_foreign_keys_with_col(self):
-        """Test that foreign keys that reference a literal 'id' subclass
-        'id' attribute behave intuitively.
-
-        See [ticket:1892].
-
-        """
-
-        class Booking(Base):
-            __tablename__ = "booking"
-            id = Column(Integer, primary_key=True)
-
-        class PlanBooking(Booking):
-            __tablename__ = "plan_booking"
-            id = Column(Integer, ForeignKey(Booking.id), primary_key=True)
-
-        # referencing PlanBooking.id gives us the column
-        # on plan_booking, not booking
-        class FeatureBooking(Booking):
-            __tablename__ = "feature_booking"
-            id = Column(Integer, ForeignKey(Booking.id), primary_key=True)
-            plan_booking_id = Column(Integer, ForeignKey(PlanBooking.id))
-
-            plan_booking = relationship(
-                PlanBooking, backref="feature_bookings"
-            )
-
-        assert FeatureBooking.__table__.c.plan_booking_id.references(
-            PlanBooking.__table__.c.id
-        )
-
-        assert FeatureBooking.__table__.c.id.references(Booking.__table__.c.id)
-
-    def test_single_colsonbase(self):
-        """test single inheritance where all the columns are on the base
-        class."""
-
-        class Company(Base, fixtures.ComparableEntity):
-
-            __tablename__ = "companies"
-            id = Column(
-                "id", Integer, primary_key=True, test_needs_autoincrement=True
-            )
-            name = Column("name", String(50))
-            employees = relationship("Person")
-
-        class Person(Base, fixtures.ComparableEntity):
-
-            __tablename__ = "people"
-            id = Column(
-                "id", Integer, primary_key=True, test_needs_autoincrement=True
-            )
-            company_id = Column(
-                "company_id", Integer, ForeignKey("companies.id")
-            )
-            name = Column("name", String(50))
-            discriminator = Column("type", String(50))
-            primary_language = Column("primary_language", String(50))
-            golf_swing = Column("golf_swing", String(50))
-            __mapper_args__ = {"polymorphic_on": discriminator}
-
-        class Engineer(Person):
-
-            __mapper_args__ = {"polymorphic_identity": "engineer"}
-
-        class Manager(Person):
-
-            __mapper_args__ = {"polymorphic_identity": "manager"}
-
-        Base.metadata.create_all()
-        sess = create_session()
-        c1 = Company(
-            name="MegaCorp, Inc.",
-            employees=[
-                Engineer(name="dilbert", primary_language="java"),
-                Engineer(name="wally", primary_language="c++"),
-                Manager(name="dogbert", golf_swing="fore!"),
-            ],
-        )
-
-        c2 = Company(
-            name="Elbonia, Inc.",
-            employees=[Engineer(name="vlad", primary_language="cobol")],
-        )
-        sess.add(c1)
-        sess.add(c2)
-        sess.flush()
-        sess.expunge_all()
-        eq_(
-            sess.query(Person)
-            .filter(Engineer.primary_language == "cobol")
-            .first(),
-            Engineer(name="vlad"),
-        )
-        eq_(
-            sess.query(Company)
-            .filter(
-                Company.employees.of_type(Engineer).any(
-                    Engineer.primary_language == "cobol"
-                )
-            )
-            .first(),
-            c2,
-        )
-
-    def test_single_colsonsub(self):
-        """test single inheritance where the columns are local to their
-        class.
-
-        this is a newer usage.
-
-        """
-
-        class Company(Base, fixtures.ComparableEntity):
-
-            __tablename__ = "companies"
-            id = Column(
-                "id", Integer, primary_key=True, test_needs_autoincrement=True
-            )
-            name = Column("name", String(50))
-            employees = relationship("Person")
-
-        class Person(Base, fixtures.ComparableEntity):
-
-            __tablename__ = "people"
-            id = Column(
-                Integer, primary_key=True, test_needs_autoincrement=True
-            )
-            company_id = Column(Integer, ForeignKey("companies.id"))
-            name = Column(String(50))
-            discriminator = Column("type", String(50))
-            __mapper_args__ = {"polymorphic_on": discriminator}
-
-        class Engineer(Person):
-
-            __mapper_args__ = {"polymorphic_identity": "engineer"}
-            primary_language = Column(String(50))
-
-        class Manager(Person):
-
-            __mapper_args__ = {"polymorphic_identity": "manager"}
-            golf_swing = Column(String(50))
-
-        # we have here a situation that is somewhat unique. the Person
-        # class is mapped to the "people" table, but it was mapped when
-        # the table did not include the "primary_language" or
-        # "golf_swing" columns.  declarative will also manipulate the
-        # exclude_properties collection so that sibling classes don't
-        # cross-pollinate.
-
-        assert Person.__table__.c.company_id is not None
-        assert Person.__table__.c.golf_swing is not None
-        assert Person.__table__.c.primary_language is not None
-        assert Engineer.primary_language is not None
-        assert Manager.golf_swing is not None
-        assert not hasattr(Person, "primary_language")
-        assert not hasattr(Person, "golf_swing")
-        assert not hasattr(Engineer, "golf_swing")
-        assert not hasattr(Manager, "primary_language")
-        Base.metadata.create_all()
-        sess = create_session()
-        e1 = Engineer(name="dilbert", primary_language="java")
-        e2 = Engineer(name="wally", primary_language="c++")
-        m1 = Manager(name="dogbert", golf_swing="fore!")
-        c1 = Company(name="MegaCorp, Inc.", employees=[e1, e2, m1])
-        e3 = Engineer(name="vlad", primary_language="cobol")
-        c2 = Company(name="Elbonia, Inc.", employees=[e3])
-        sess.add(c1)
-        sess.add(c2)
-        sess.flush()
-        sess.expunge_all()
-        eq_(
-            sess.query(Person)
-            .filter(Engineer.primary_language == "cobol")
-            .first(),
-            Engineer(name="vlad"),
-        )
-        eq_(
-            sess.query(Company)
-            .filter(
-                Company.employees.of_type(Engineer).any(
-                    Engineer.primary_language == "cobol"
-                )
-            )
-            .first(),
-            c2,
-        )
-        eq_(
-            sess.query(Engineer).filter_by(primary_language="cobol").one(),
-            Engineer(name="vlad", primary_language="cobol"),
-        )
-
-    def test_single_cols_on_sub_base_of_joined(self):
-        """test [ticket:3895]"""
-
-        class Person(Base):
-            __tablename__ = "person"
-
-            id = Column(Integer, primary_key=True)
-            type = Column(String)
-
-            __mapper_args__ = {"polymorphic_on": type}
-
-        class Contractor(Person):
-            contractor_field = Column(String)
-
-            __mapper_args__ = {"polymorphic_identity": "contractor"}
-
-        class Employee(Person):
-            __tablename__ = "employee"
-
-            id = Column(Integer, ForeignKey(Person.id), primary_key=True)
-
-        class Engineer(Employee):
-            __mapper_args__ = {"polymorphic_identity": "engineer"}
-
-        configure_mappers()
-
-        is_false(hasattr(Person, "contractor_field"))
-        is_true(hasattr(Contractor, "contractor_field"))
-        is_false(hasattr(Employee, "contractor_field"))
-        is_false(hasattr(Engineer, "contractor_field"))
-
-    def test_single_cols_on_sub_to_joined(self):
-        """test [ticket:3797]"""
-
-        class BaseUser(Base):
-            __tablename__ = "root"
-
-            id = Column(Integer, primary_key=True)
-            row_type = Column(String)
-
-            __mapper_args__ = {
-                "polymorphic_on": row_type,
-                "polymorphic_identity": "baseuser",
-            }
-
-        class User(BaseUser):
-            __tablename__ = "user"
-
-            __mapper_args__ = {"polymorphic_identity": "user"}
-
-            baseuser_id = Column(
-                Integer, ForeignKey("root.id"), primary_key=True
-            )
-
-        class Bat(Base):
-            __tablename__ = "bat"
-            id = Column(Integer, primary_key=True)
-
-        class Thing(Base):
-            __tablename__ = "thing"
-
-            id = Column(Integer, primary_key=True)
-
-            owner_id = Column(Integer, ForeignKey("user.baseuser_id"))
-            owner = relationship("User")
-
-        class SubUser(User):
-            __mapper_args__ = {"polymorphic_identity": "subuser"}
-
-            sub_user_custom_thing = Column(Integer, ForeignKey("bat.id"))
-
-        eq_(
-            User.__table__.foreign_keys,
-            User.baseuser_id.foreign_keys.union(
-                SubUser.sub_user_custom_thing.foreign_keys
-            ),
-        )
-        is_true(
-            Thing.owner.property.primaryjoin.compare(
-                Thing.owner_id == User.baseuser_id
-            )
-        )
-
-    def test_single_constraint_on_sub(self):
-        """test the somewhat unusual case of [ticket:3341]"""
-
-        class Person(Base, fixtures.ComparableEntity):
-
-            __tablename__ = "people"
-            id = Column(
-                Integer, primary_key=True, test_needs_autoincrement=True
-            )
-            name = Column(String(50))
-            discriminator = Column("type", String(50))
-            __mapper_args__ = {"polymorphic_on": discriminator}
-
-        class Engineer(Person):
-
-            __mapper_args__ = {"polymorphic_identity": "engineer"}
-            primary_language = Column(String(50))
-
-            __hack_args_one__ = sa.UniqueConstraint(
-                Person.name, primary_language
-            )
-            __hack_args_two__ = sa.CheckConstraint(
-                Person.name != primary_language
-            )
-
-        uq = [
-            c
-            for c in Person.__table__.constraints
-            if isinstance(c, sa.UniqueConstraint)
-        ][0]
-        ck = [
-            c
-            for c in Person.__table__.constraints
-            if isinstance(c, sa.CheckConstraint)
-        ][0]
-        eq_(
-            list(uq.columns),
-            [Person.__table__.c.name, Person.__table__.c.primary_language],
-        )
-        eq_(
-            list(ck.columns),
-            [Person.__table__.c.name, Person.__table__.c.primary_language],
-        )
-
-    @testing.skip_if(
-        lambda: testing.against("oracle"),
-        "Test has an empty insert in it at the moment",
-    )
-    def test_columns_single_inheritance_conflict_resolution(self):
-        """Test that a declared_attr can return the existing column and it will
-        be ignored.  this allows conditional columns to be added.
-
-        See [ticket:2472].
-
-        """
-
-        class Person(Base):
-            __tablename__ = "person"
-            id = Column(Integer, primary_key=True)
-
-        class Engineer(Person):
-
-            """single table inheritance"""
-
-            @declared_attr
-            def target_id(cls):
-                return cls.__table__.c.get(
-                    "target_id", Column(Integer, ForeignKey("other.id"))
-                )
-
-            @declared_attr
-            def target(cls):
-                return relationship("Other")
-
-        class Manager(Person):
-
-            """single table inheritance"""
-
-            @declared_attr
-            def target_id(cls):
-                return cls.__table__.c.get(
-                    "target_id", Column(Integer, ForeignKey("other.id"))
-                )
-
-            @declared_attr
-            def target(cls):
-                return relationship("Other")
-
-        class Other(Base):
-            __tablename__ = "other"
-            id = Column(Integer, primary_key=True)
-
-        is_(
-            Engineer.target_id.property.columns[0],
-            Person.__table__.c.target_id,
-        )
-        is_(
-            Manager.target_id.property.columns[0], Person.__table__.c.target_id
-        )
-        # do a brief round trip on this
-        Base.metadata.create_all()
-        session = Session()
-        o1, o2 = Other(), Other()
-        session.add_all(
-            [Engineer(target=o1), Manager(target=o2), Manager(target=o1)]
-        )
-        session.commit()
-        eq_(session.query(Engineer).first().target, o1)
-
-    def test_columns_single_inheritance_conflict_resolution_pk(self):
-        """Test #2472 in terms of a primary key column.  This is
-        #4352.
-
-        """
-
-        class Person(Base):
-            __tablename__ = "person"
-            id = Column(Integer, primary_key=True)
-
-            target_id = Column(Integer, primary_key=True)
-
-        class Engineer(Person):
-
-            """single table inheritance"""
-
-            @declared_attr
-            def target_id(cls):
-                return cls.__table__.c.get(
-                    "target_id", Column(Integer, primary_key=True)
-                )
-
-        class Manager(Person):
-
-            """single table inheritance"""
-
-            @declared_attr
-            def target_id(cls):
-                return cls.__table__.c.get(
-                    "target_id", Column(Integer, primary_key=True)
-                )
-
-        is_(
-            Engineer.target_id.property.columns[0],
-            Person.__table__.c.target_id,
-        )
-        is_(
-            Manager.target_id.property.columns[0], Person.__table__.c.target_id
-        )
-
-    def test_columns_single_inheritance_cascading_resolution_pk(self):
-        """An additional test for #4352 in terms of the requested use case.
-
-        """
-
-        class TestBase(Base):
-            __abstract__ = True
-
-            @declared_attr.cascading
-            def id(cls):
-                col_val = None
-                if TestBase not in cls.__bases__:
-                    col_val = cls.__table__.c.get("id")
-                if col_val is None:
-                    col_val = Column(Integer, primary_key=True)
-                return col_val
-
-        class Person(TestBase):
-            """single table base class"""
-
-            __tablename__ = "person"
-
-        class Engineer(Person):
-            """ single table inheritance, no extra cols """
-
-        class Manager(Person):
-            """ single table inheritance, no extra cols """
-
-        is_(Engineer.id.property.columns[0], Person.__table__.c.id)
-        is_(Manager.id.property.columns[0], Person.__table__.c.id)
-
-    def test_joined_from_single(self):
-        class Company(Base, fixtures.ComparableEntity):
-
-            __tablename__ = "companies"
-            id = Column(
-                "id", Integer, primary_key=True, test_needs_autoincrement=True
-            )
-            name = Column("name", String(50))
-            employees = relationship("Person")
-
-        class Person(Base, fixtures.ComparableEntity):
-
-            __tablename__ = "people"
-            id = Column(
-                Integer, primary_key=True, test_needs_autoincrement=True
-            )
-            company_id = Column(Integer, ForeignKey("companies.id"))
-            name = Column(String(50))
-            discriminator = Column("type", String(50))
-            __mapper_args__ = {"polymorphic_on": discriminator}
-
-        class Manager(Person):
-
-            __mapper_args__ = {"polymorphic_identity": "manager"}
-            golf_swing = Column(String(50))
-
-        class Engineer(Person):
-
-            __tablename__ = "engineers"
-            __mapper_args__ = {"polymorphic_identity": "engineer"}
-            id = Column(Integer, ForeignKey("people.id"), primary_key=True)
-            primary_language = Column(String(50))
-
-        assert Person.__table__.c.golf_swing is not None
-        assert "primary_language" not in Person.__table__.c
-        assert Engineer.__table__.c.primary_language is not None
-        assert Engineer.primary_language is not None
-        assert Manager.golf_swing is not None
-        assert not hasattr(Person, "primary_language")
-        assert not hasattr(Person, "golf_swing")
-        assert not hasattr(Engineer, "golf_swing")
-        assert not hasattr(Manager, "primary_language")
-        Base.metadata.create_all()
-        sess = create_session()
-        e1 = Engineer(name="dilbert", primary_language="java")
-        e2 = Engineer(name="wally", primary_language="c++")
-        m1 = Manager(name="dogbert", golf_swing="fore!")
-        c1 = Company(name="MegaCorp, Inc.", employees=[e1, e2, m1])
-        e3 = Engineer(name="vlad", primary_language="cobol")
-        c2 = Company(name="Elbonia, Inc.", employees=[e3])
-        sess.add(c1)
-        sess.add(c2)
-        sess.flush()
-        sess.expunge_all()
-        eq_(
-            sess.query(Person)
-            .with_polymorphic(Engineer)
-            .filter(Engineer.primary_language == "cobol")
-            .first(),
-            Engineer(name="vlad"),
-        )
-        eq_(
-            sess.query(Company)
-            .filter(
-                Company.employees.of_type(Engineer).any(
-                    Engineer.primary_language == "cobol"
-                )
-            )
-            .first(),
-            c2,
-        )
-        eq_(
-            sess.query(Engineer).filter_by(primary_language="cobol").one(),
-            Engineer(name="vlad", primary_language="cobol"),
-        )
-
-    def test_single_from_joined_colsonsub(self):
-        class Person(Base, fixtures.ComparableEntity):
-
-            __tablename__ = "people"
-            id = Column(
-                Integer, primary_key=True, test_needs_autoincrement=True
-            )
-            name = Column(String(50))
-            discriminator = Column("type", String(50))
-            __mapper_args__ = {"polymorphic_on": discriminator}
-
-        class Manager(Person):
-            __tablename__ = "manager"
-            __mapper_args__ = {"polymorphic_identity": "manager"}
-            id = Column(Integer, ForeignKey("people.id"), primary_key=True)
-            golf_swing = Column(String(50))
-
-        class Boss(Manager):
-            boss_name = Column(String(50))
-
-        is_(
-            Boss.__mapper__.column_attrs["boss_name"].columns[0],
-            Manager.__table__.c.boss_name,
-        )
-
-    def test_polymorphic_on_converted_from_inst(self):
-        class A(Base):
-            __tablename__ = "A"
-            id = Column(Integer, primary_key=True)
-            discriminator = Column(String)
-
-            @declared_attr
-            def __mapper_args__(cls):
-                return {
-                    "polymorphic_identity": cls.__name__,
-                    "polymorphic_on": cls.discriminator,
-                }
-
-        class B(A):
-            pass
-
-        is_(B.__mapper__.polymorphic_on, A.__table__.c.discriminator)
-
-    def test_add_deferred(self):
-        class Person(Base, fixtures.ComparableEntity):
-
-            __tablename__ = "people"
-            id = Column(
-                "id", Integer, primary_key=True, test_needs_autoincrement=True
-            )
-
-        Person.name = deferred(Column(String(10)))
-        Base.metadata.create_all()
-        sess = create_session()
-        p = Person(name="ratbert")
-        sess.add(p)
-        sess.flush()
-        sess.expunge_all()
-        eq_(sess.query(Person).all(), [Person(name="ratbert")])
-        sess.expunge_all()
-        person = sess.query(Person).filter(Person.name == "ratbert").one()
-        assert "name" not in person.__dict__
-
-    def test_single_fksonsub(self):
-        """test single inheritance with a foreign key-holding column on
-        a subclass.
-
-        """
-
-        class Person(Base, fixtures.ComparableEntity):
-
-            __tablename__ = "people"
-            id = Column(
-                Integer, primary_key=True, test_needs_autoincrement=True
-            )
-            name = Column(String(50))
-            discriminator = Column("type", String(50))
-            __mapper_args__ = {"polymorphic_on": discriminator}
-
-        class Engineer(Person):
-
-            __mapper_args__ = {"polymorphic_identity": "engineer"}
-            primary_language_id = Column(Integer, ForeignKey("languages.id"))
-            primary_language = relationship("Language")
-
-        class Language(Base, fixtures.ComparableEntity):
-
-            __tablename__ = "languages"
-            id = Column(
-                Integer, primary_key=True, test_needs_autoincrement=True
-            )
-            name = Column(String(50))
-
-        assert not hasattr(Person, "primary_language_id")
-        Base.metadata.create_all()
-        sess = create_session()
-        java, cpp, cobol = (
-            Language(name="java"),
-            Language(name="cpp"),
-            Language(name="cobol"),
-        )
-        e1 = Engineer(name="dilbert", primary_language=java)
-        e2 = Engineer(name="wally", primary_language=cpp)
-        e3 = Engineer(name="vlad", primary_language=cobol)
-        sess.add_all([e1, e2, e3])
-        sess.flush()
-        sess.expunge_all()
-        eq_(
-            sess.query(Person)
-            .filter(Engineer.primary_language.has(Language.name == "cobol"))
-            .first(),
-            Engineer(name="vlad", primary_language=Language(name="cobol")),
-        )
-        eq_(
-            sess.query(Engineer)
-            .filter(Engineer.primary_language.has(Language.name == "cobol"))
-            .one(),
-            Engineer(name="vlad", primary_language=Language(name="cobol")),
-        )
-        eq_(
-            sess.query(Person)
-            .join(Engineer.primary_language)
-            .order_by(Language.name)
-            .all(),
-            [
-                Engineer(name="vlad", primary_language=Language(name="cobol")),
-                Engineer(name="wally", primary_language=Language(name="cpp")),
-                Engineer(
-                    name="dilbert", primary_language=Language(name="java")
-                ),
-            ],
-        )
-
-    def test_single_three_levels(self):
-        class Person(Base, fixtures.ComparableEntity):
-
-            __tablename__ = "people"
-            id = Column(Integer, primary_key=True)
-            name = Column(String(50))
-            discriminator = Column("type", String(50))
-            __mapper_args__ = {"polymorphic_on": discriminator}
-
-        class Engineer(Person):
-
-            __mapper_args__ = {"polymorphic_identity": "engineer"}
-            primary_language = Column(String(50))
-
-        class JuniorEngineer(Engineer):
-
-            __mapper_args__ = {"polymorphic_identity": "junior_engineer"}
-            nerf_gun = Column(String(50))
-
-        class Manager(Person):
-
-            __mapper_args__ = {"polymorphic_identity": "manager"}
-            golf_swing = Column(String(50))
-
-        assert JuniorEngineer.nerf_gun
-        assert JuniorEngineer.primary_language
-        assert JuniorEngineer.name
-        assert Manager.golf_swing
-        assert Engineer.primary_language
-        assert not hasattr(Engineer, "golf_swing")
-        assert not hasattr(Engineer, "nerf_gun")
-        assert not hasattr(Manager, "nerf_gun")
-        assert not hasattr(Manager, "primary_language")
-
-    def test_single_detects_conflict(self):
-        class Person(Base):
-
-            __tablename__ = "people"
-            id = Column(Integer, primary_key=True)
-            name = Column(String(50))
-            discriminator = Column("type", String(50))
-            __mapper_args__ = {"polymorphic_on": discriminator}
-
-        class Engineer(Person):
-
-            __mapper_args__ = {"polymorphic_identity": "engineer"}
-            primary_language = Column(String(50))
-
-        # test sibling col conflict
-
-        def go():
-            class Manager(Person):
-
-                __mapper_args__ = {"polymorphic_identity": "manager"}
-                golf_swing = Column(String(50))
-                primary_language = Column(String(50))
-
-        assert_raises(sa.exc.ArgumentError, go)
-
-        # test parent col conflict
-
-        def go():
-            class Salesman(Person):
-
-                __mapper_args__ = {"polymorphic_identity": "manager"}
-                name = Column(String(50))
-
-        assert_raises(sa.exc.ArgumentError, go)
-
-    def test_single_no_special_cols(self):
-        class Person(Base, fixtures.ComparableEntity):
-
-            __tablename__ = "people"
-            id = Column("id", Integer, primary_key=True)
-            name = Column("name", String(50))
-            discriminator = Column("type", String(50))
-            __mapper_args__ = {"polymorphic_on": discriminator}
-
-        def go():
-            class Engineer(Person):
-
-                __mapper_args__ = {"polymorphic_identity": "engineer"}
-                primary_language = Column("primary_language", String(50))
-                foo_bar = Column(Integer, primary_key=True)
-
-        assert_raises_message(sa.exc.ArgumentError, "place primary key", go)
-
-    def test_single_no_table_args(self):
-        class Person(Base, fixtures.ComparableEntity):
-
-            __tablename__ = "people"
-            id = Column("id", Integer, primary_key=True)
-            name = Column("name", String(50))
-            discriminator = Column("type", String(50))
-            __mapper_args__ = {"polymorphic_on": discriminator}
-
-        def go():
-            class Engineer(Person):
-
-                __mapper_args__ = {"polymorphic_identity": "engineer"}
-                primary_language = Column("primary_language", String(50))
-
-                # this should be on the Person class, as this is single
-                # table inheritance, which is why we test that this
-                # throws an exception!
-
-                __table_args__ = {"mysql_engine": "InnoDB"}
-
-        assert_raises_message(sa.exc.ArgumentError, "place __table_args__", go)
-
-    @testing.emits_warning("This declarative")
-    def test_dupe_name_in_hierarchy(self):
-        class A(Base):
-            __tablename__ = "a"
-            id = Column(Integer, primary_key=True)
-
-        a_1 = A
-
-        class A(a_1):
-            __tablename__ = "b"
-            id = Column(Integer(), ForeignKey(a_1.id), primary_key=True)
-
-        assert A.__mapper__.inherits is a_1.__mapper__
-
-
-class OverlapColPrecedenceTest(DeclarativeTestBase):
-
-    """test #1892 cases when declarative does column precedence."""
-
-    def _run_test(self, Engineer, e_id, p_id):
-        p_table = Base.metadata.tables["person"]
-        e_table = Base.metadata.tables["engineer"]
-        assert Engineer.id.property.columns[0] is e_table.c[e_id]
-        assert Engineer.id.property.columns[1] is p_table.c[p_id]
-
-    def test_basic(self):
-        class Person(Base):
-            __tablename__ = "person"
-            id = Column(Integer, primary_key=True)
-
-        class Engineer(Person):
-            __tablename__ = "engineer"
-            id = Column(Integer, ForeignKey("person.id"), primary_key=True)
-
-        self._run_test(Engineer, "id", "id")
-
-    def test_alt_name_base(self):
-        class Person(Base):
-            __tablename__ = "person"
-            id = Column("pid", Integer, primary_key=True)
-
-        class Engineer(Person):
-            __tablename__ = "engineer"
-            id = Column(Integer, ForeignKey("person.pid"), primary_key=True)
-
-        self._run_test(Engineer, "id", "pid")
-
-    def test_alt_name_sub(self):
-        class Person(Base):
-            __tablename__ = "person"
-            id = Column(Integer, primary_key=True)
-
-        class Engineer(Person):
-            __tablename__ = "engineer"
-            id = Column(
-                "eid", Integer, ForeignKey("person.id"), primary_key=True
-            )
-
-        self._run_test(Engineer, "eid", "id")
-
-    def test_alt_name_both(self):
-        class Person(Base):
-            __tablename__ = "person"
-            id = Column("pid", Integer, primary_key=True)
-
-        class Engineer(Person):
-            __tablename__ = "engineer"
-            id = Column(
-                "eid", Integer, ForeignKey("person.pid"), primary_key=True
-            )
-
-        self._run_test(Engineer, "eid", "pid")
-
-
 class ConcreteInhTest(
     _RemoveListeners, DeclarativeTestBase, testing.AssertsCompiledSQL
 ):
index 8517acf0ba962bcd78e0ba9186053a5d4acab349..504025d6f81477eb3620703a4597c7e06e598a67 100644 (file)
@@ -1,15 +1,17 @@
 from sqlalchemy import ForeignKey
 from sqlalchemy import Integer
+from sqlalchemy import MetaData
 from sqlalchemy import String
 from sqlalchemy import testing
-from sqlalchemy.ext import declarative as decl
-from sqlalchemy.ext.declarative.base import _DeferredMapperConfig
+from sqlalchemy.ext.declarative import DeferredReflection
 from sqlalchemy.orm import clear_mappers
 from sqlalchemy.orm import create_session
+from sqlalchemy.orm import decl_api as decl
+from sqlalchemy.orm import declared_attr
 from sqlalchemy.orm import exc as orm_exc
 from sqlalchemy.orm import relationship
 from sqlalchemy.orm import Session
-from sqlalchemy.testing import assert_raises
+from sqlalchemy.orm.decl_base import _DeferredMapperConfig
 from sqlalchemy.testing import assert_raises_message
 from sqlalchemy.testing import eq_
 from sqlalchemy.testing import fixtures
@@ -22,155 +24,16 @@ class DeclarativeReflectionBase(fixtures.TablesTest):
     __requires__ = ("reflectable_autoincrement",)
 
     def setup(self):
-        global Base
-        Base = decl.declarative_base(testing.db)
+        global Base, registry
+
+        registry = decl.registry(metadata=MetaData(bind=testing.db))
+        Base = registry.generate_base()
 
     def teardown(self):
         super(DeclarativeReflectionBase, self).teardown()
         clear_mappers()
 
 
-class DeclarativeReflectionTest(DeclarativeReflectionBase):
-    @classmethod
-    def define_tables(cls, metadata):
-        Table(
-            "users",
-            metadata,
-            Column(
-                "id", Integer, primary_key=True, test_needs_autoincrement=True
-            ),
-            Column("name", String(50)),
-            test_needs_fk=True,
-        )
-        Table(
-            "addresses",
-            metadata,
-            Column(
-                "id", Integer, primary_key=True, test_needs_autoincrement=True
-            ),
-            Column("email", String(50)),
-            Column("user_id", Integer, ForeignKey("users.id")),
-            test_needs_fk=True,
-        )
-        Table(
-            "imhandles",
-            metadata,
-            Column(
-                "id", Integer, primary_key=True, test_needs_autoincrement=True
-            ),
-            Column("user_id", Integer),
-            Column("network", String(50)),
-            Column("handle", String(50)),
-            test_needs_fk=True,
-        )
-
-    def test_basic(self):
-        class User(Base, fixtures.ComparableEntity):
-
-            __tablename__ = "users"
-            __autoload__ = True
-            addresses = relationship("Address", backref="user")
-
-        class Address(Base, fixtures.ComparableEntity):
-
-            __tablename__ = "addresses"
-            __autoload__ = True
-
-        u1 = User(
-            name="u1", addresses=[Address(email="one"), Address(email="two")]
-        )
-        sess = create_session()
-        sess.add(u1)
-        sess.flush()
-        sess.expunge_all()
-        eq_(
-            sess.query(User).all(),
-            [
-                User(
-                    name="u1",
-                    addresses=[Address(email="one"), Address(email="two")],
-                )
-            ],
-        )
-        a1 = sess.query(Address).filter(Address.email == "two").one()
-        eq_(a1, Address(email="two"))
-        eq_(a1.user, User(name="u1"))
-
-    def test_rekey(self):
-        class User(Base, fixtures.ComparableEntity):
-
-            __tablename__ = "users"
-            __autoload__ = True
-            nom = Column("name", String(50), key="nom")
-            addresses = relationship("Address", backref="user")
-
-        class Address(Base, fixtures.ComparableEntity):
-
-            __tablename__ = "addresses"
-            __autoload__ = True
-
-        u1 = User(
-            nom="u1", addresses=[Address(email="one"), Address(email="two")]
-        )
-        sess = create_session()
-        sess.add(u1)
-        sess.flush()
-        sess.expunge_all()
-        eq_(
-            sess.query(User).all(),
-            [
-                User(
-                    nom="u1",
-                    addresses=[Address(email="one"), Address(email="two")],
-                )
-            ],
-        )
-        a1 = sess.query(Address).filter(Address.email == "two").one()
-        eq_(a1, Address(email="two"))
-        eq_(a1.user, User(nom="u1"))
-        assert_raises(TypeError, User, name="u3")
-
-    def test_supplied_fk(self):
-        class IMHandle(Base, fixtures.ComparableEntity):
-
-            __tablename__ = "imhandles"
-            __autoload__ = True
-            user_id = Column("user_id", Integer, ForeignKey("users.id"))
-
-        class User(Base, fixtures.ComparableEntity):
-
-            __tablename__ = "users"
-            __autoload__ = True
-            handles = relationship("IMHandle", backref="user")
-
-        u1 = User(
-            name="u1",
-            handles=[
-                IMHandle(network="blabber", handle="foo"),
-                IMHandle(network="lol", handle="zomg"),
-            ],
-        )
-        sess = create_session()
-        sess.add(u1)
-        sess.flush()
-        sess.expunge_all()
-        eq_(
-            sess.query(User).all(),
-            [
-                User(
-                    name="u1",
-                    handles=[
-                        IMHandle(network="blabber", handle="foo"),
-                        IMHandle(network="lol", handle="zomg"),
-                    ],
-                )
-            ],
-        )
-        a1 = sess.query(IMHandle).filter(IMHandle.handle == "zomg").one()
-        eq_(a1, IMHandle(network="lol", handle="zomg"))
-        eq_(a1.user, User(name="u1"))
-
-
 class DeferredReflectBase(DeclarativeReflectionBase):
     def teardown(self):
         super(DeferredReflectBase, self).teardown()
@@ -198,14 +61,14 @@ class DeferredReflectPKFKTest(DeferredReflectBase):
         )
 
     def test_pk_fk(self):
-        class B(decl.DeferredReflection, fixtures.ComparableEntity, Base):
+        class B(DeferredReflection, fixtures.ComparableEntity, Base):
             __tablename__ = "b"
             a = relationship("A")
 
-        class A(decl.DeferredReflection, fixtures.ComparableEntity, Base):
+        class A(DeferredReflection, fixtures.ComparableEntity, Base):
             __tablename__ = "a"
 
-        decl.DeferredReflection.prepare(testing.db)
+        DeferredReflection.prepare(testing.db)
 
 
 class DeferredReflectionTest(DeferredReflectBase):
@@ -233,8 +96,8 @@ class DeferredReflectionTest(DeferredReflectBase):
 
     def _roundtrip(self):
 
-        User = Base._decl_class_registry["User"]
-        Address = Base._decl_class_registry["Address"]
+        User = Base.registry._class_registry["User"]
+        Address = Base.registry._class_registry["Address"]
 
         u1 = User(
             name="u1", addresses=[Address(email="one"), Address(email="two")]
@@ -257,13 +120,11 @@ class DeferredReflectionTest(DeferredReflectBase):
         eq_(a1.user, User(name="u1"))
 
     def test_exception_prepare_not_called(self):
-        class User(decl.DeferredReflection, fixtures.ComparableEntity, Base):
+        class User(DeferredReflection, fixtures.ComparableEntity, Base):
             __tablename__ = "users"
             addresses = relationship("Address", backref="user")
 
-        class Address(
-            decl.DeferredReflection, fixtures.ComparableEntity, Base
-        ):
+        class Address(DeferredReflection, fixtures.ComparableEntity, Base):
             __tablename__ = "addresses"
 
         assert_raises_message(
@@ -277,23 +138,21 @@ class DeferredReflectionTest(DeferredReflectBase):
         )
 
     def test_basic_deferred(self):
-        class User(decl.DeferredReflection, fixtures.ComparableEntity, Base):
+        class User(DeferredReflection, fixtures.ComparableEntity, Base):
             __tablename__ = "users"
             addresses = relationship("Address", backref="user")
 
-        class Address(
-            decl.DeferredReflection, fixtures.ComparableEntity, Base
-        ):
+        class Address(DeferredReflection, fixtures.ComparableEntity, Base):
             __tablename__ = "addresses"
 
-        decl.DeferredReflection.prepare(testing.db)
+        DeferredReflection.prepare(testing.db)
         self._roundtrip()
 
     def test_abstract_base(self):
-        class DefBase(decl.DeferredReflection, Base):
+        class DefBase(DeferredReflection, Base):
             __abstract__ = True
 
-        class OtherDefBase(decl.DeferredReflection, Base):
+        class OtherDefBase(DeferredReflection, Base):
             __abstract__ = True
 
         class User(fixtures.ComparableEntity, DefBase):
@@ -310,31 +169,29 @@ class DeferredReflectionTest(DeferredReflectBase):
         self._roundtrip()
 
     def test_redefine_fk_double(self):
-        class User(decl.DeferredReflection, fixtures.ComparableEntity, Base):
+        class User(DeferredReflection, fixtures.ComparableEntity, Base):
             __tablename__ = "users"
             addresses = relationship("Address", backref="user")
 
-        class Address(
-            decl.DeferredReflection, fixtures.ComparableEntity, Base
-        ):
+        class Address(DeferredReflection, fixtures.ComparableEntity, Base):
             __tablename__ = "addresses"
             user_id = Column(Integer, ForeignKey("users.id"))
 
-        decl.DeferredReflection.prepare(testing.db)
+        DeferredReflection.prepare(testing.db)
         self._roundtrip()
 
     def test_mapper_args_deferred(self):
         """test that __mapper_args__ is not called until *after*
         table reflection"""
 
-        class User(decl.DeferredReflection, fixtures.ComparableEntity, Base):
+        class User(DeferredReflection, fixtures.ComparableEntity, Base):
             __tablename__ = "users"
 
-            @decl.declared_attr
+            @declared_attr
             def __mapper_args__(cls):
                 return {"primary_key": cls.__table__.c.id}
 
-        decl.DeferredReflection.prepare(testing.db)
+        DeferredReflection.prepare(testing.db)
         sess = Session()
         sess.add_all(
             [User(name="G"), User(name="Q"), User(name="A"), User(name="C")]
@@ -347,19 +204,17 @@ class DeferredReflectionTest(DeferredReflectBase):
 
     @testing.requires.predictable_gc
     def test_cls_not_strong_ref(self):
-        class User(decl.DeferredReflection, fixtures.ComparableEntity, Base):
+        class User(DeferredReflection, fixtures.ComparableEntity, Base):
             __tablename__ = "users"
 
-        class Address(
-            decl.DeferredReflection, fixtures.ComparableEntity, Base
-        ):
+        class Address(DeferredReflection, fixtures.ComparableEntity, Base):
             __tablename__ = "addresses"
 
         eq_(len(_DeferredMapperConfig._configs), 2)
         del Address
         gc_collect()
         eq_(len(_DeferredMapperConfig._configs), 1)
-        decl.DeferredReflection.prepare(testing.db)
+        DeferredReflection.prepare(testing.db)
         assert not _DeferredMapperConfig._configs
 
 
@@ -396,8 +251,8 @@ class DeferredSecondaryReflectionTest(DeferredReflectBase):
 
     def _roundtrip(self):
 
-        User = Base._decl_class_registry["User"]
-        Item = Base._decl_class_registry["Item"]
+        User = Base.registry._class_registry["User"]
+        Item = Base.registry._class_registry["Item"]
 
         u1 = User(name="u1", items=[Item(name="i1"), Item(name="i2")])
 
@@ -411,36 +266,36 @@ class DeferredSecondaryReflectionTest(DeferredReflectBase):
         )
 
     def test_string_resolution(self):
-        class User(decl.DeferredReflection, fixtures.ComparableEntity, Base):
+        class User(DeferredReflection, fixtures.ComparableEntity, Base):
             __tablename__ = "users"
 
             items = relationship("Item", secondary="user_items")
 
-        class Item(decl.DeferredReflection, fixtures.ComparableEntity, Base):
+        class Item(DeferredReflection, fixtures.ComparableEntity, Base):
             __tablename__ = "items"
 
-        decl.DeferredReflection.prepare(testing.db)
+        DeferredReflection.prepare(testing.db)
         self._roundtrip()
 
     def test_table_resolution(self):
-        class User(decl.DeferredReflection, fixtures.ComparableEntity, Base):
+        class User(DeferredReflection, fixtures.ComparableEntity, Base):
             __tablename__ = "users"
 
             items = relationship(
                 "Item", secondary=Table("user_items", Base.metadata)
             )
 
-        class Item(decl.DeferredReflection, fixtures.ComparableEntity, Base):
+        class Item(DeferredReflection, fixtures.ComparableEntity, Base):
             __tablename__ = "items"
 
-        decl.DeferredReflection.prepare(testing.db)
+        DeferredReflection.prepare(testing.db)
         self._roundtrip()
 
 
 class DeferredInhReflectBase(DeferredReflectBase):
     def _roundtrip(self):
-        Foo = Base._decl_class_registry["Foo"]
-        Bar = Base._decl_class_registry["Bar"]
+        Foo = Base.registry._class_registry["Foo"]
+        Bar = Base.registry._class_registry["Bar"]
 
         s = Session(testing.db)
 
@@ -480,7 +335,7 @@ class DeferredSingleInhReflectionTest(DeferredInhReflectBase):
         )
 
     def test_basic(self):
-        class Foo(decl.DeferredReflection, fixtures.ComparableEntity, Base):
+        class Foo(DeferredReflection, fixtures.ComparableEntity, Base):
             __tablename__ = "foo"
             __mapper_args__ = {
                 "polymorphic_on": "type",
@@ -490,11 +345,11 @@ class DeferredSingleInhReflectionTest(DeferredInhReflectBase):
         class Bar(Foo):
             __mapper_args__ = {"polymorphic_identity": "bar"}
 
-        decl.DeferredReflection.prepare(testing.db)
+        DeferredReflection.prepare(testing.db)
         self._roundtrip()
 
     def test_add_subclass_column(self):
-        class Foo(decl.DeferredReflection, fixtures.ComparableEntity, Base):
+        class Foo(DeferredReflection, fixtures.ComparableEntity, Base):
             __tablename__ = "foo"
             __mapper_args__ = {
                 "polymorphic_on": "type",
@@ -505,11 +360,11 @@ class DeferredSingleInhReflectionTest(DeferredInhReflectBase):
             __mapper_args__ = {"polymorphic_identity": "bar"}
             bar_data = Column(String(30))
 
-        decl.DeferredReflection.prepare(testing.db)
+        DeferredReflection.prepare(testing.db)
         self._roundtrip()
 
     def test_add_pk_column(self):
-        class Foo(decl.DeferredReflection, fixtures.ComparableEntity, Base):
+        class Foo(DeferredReflection, fixtures.ComparableEntity, Base):
             __tablename__ = "foo"
             __mapper_args__ = {
                 "polymorphic_on": "type",
@@ -520,7 +375,7 @@ class DeferredSingleInhReflectionTest(DeferredInhReflectBase):
         class Bar(Foo):
             __mapper_args__ = {"polymorphic_identity": "bar"}
 
-        decl.DeferredReflection.prepare(testing.db)
+        DeferredReflection.prepare(testing.db)
         self._roundtrip()
 
 
@@ -546,7 +401,7 @@ class DeferredJoinedInhReflectionTest(DeferredInhReflectBase):
         )
 
     def test_basic(self):
-        class Foo(decl.DeferredReflection, fixtures.ComparableEntity, Base):
+        class Foo(DeferredReflection, fixtures.ComparableEntity, Base):
             __tablename__ = "foo"
             __mapper_args__ = {
                 "polymorphic_on": "type",
@@ -557,11 +412,11 @@ class DeferredJoinedInhReflectionTest(DeferredInhReflectBase):
             __tablename__ = "bar"
             __mapper_args__ = {"polymorphic_identity": "bar"}
 
-        decl.DeferredReflection.prepare(testing.db)
+        DeferredReflection.prepare(testing.db)
         self._roundtrip()
 
     def test_add_subclass_column(self):
-        class Foo(decl.DeferredReflection, fixtures.ComparableEntity, Base):
+        class Foo(DeferredReflection, fixtures.ComparableEntity, Base):
             __tablename__ = "foo"
             __mapper_args__ = {
                 "polymorphic_on": "type",
@@ -573,11 +428,11 @@ class DeferredJoinedInhReflectionTest(DeferredInhReflectBase):
             __mapper_args__ = {"polymorphic_identity": "bar"}
             bar_data = Column(String(30))
 
-        decl.DeferredReflection.prepare(testing.db)
+        DeferredReflection.prepare(testing.db)
         self._roundtrip()
 
     def test_add_pk_column(self):
-        class Foo(decl.DeferredReflection, fixtures.ComparableEntity, Base):
+        class Foo(DeferredReflection, fixtures.ComparableEntity, Base):
             __tablename__ = "foo"
             __mapper_args__ = {
                 "polymorphic_on": "type",
@@ -589,11 +444,11 @@ class DeferredJoinedInhReflectionTest(DeferredInhReflectBase):
             __tablename__ = "bar"
             __mapper_args__ = {"polymorphic_identity": "bar"}
 
-        decl.DeferredReflection.prepare(testing.db)
+        DeferredReflection.prepare(testing.db)
         self._roundtrip()
 
     def test_add_fk_pk_column(self):
-        class Foo(decl.DeferredReflection, fixtures.ComparableEntity, Base):
+        class Foo(DeferredReflection, fixtures.ComparableEntity, Base):
             __tablename__ = "foo"
             __mapper_args__ = {
                 "polymorphic_on": "type",
@@ -605,5 +460,5 @@ class DeferredJoinedInhReflectionTest(DeferredInhReflectBase):
             __mapper_args__ = {"polymorphic_identity": "bar"}
             id = Column(Integer, ForeignKey("foo.id"), primary_key=True)
 
-        decl.DeferredReflection.prepare(testing.db)
+        DeferredReflection.prepare(testing.db)
         self._roundtrip()
similarity index 95%
rename from test/ext/declarative/test_basic.py
rename to test/orm/declarative/test_basic.py
index b7b5ec61aa9b3d1b538c0ade5f914da39550c365..9a7c6ef91e88cbcc1e7f76f571473a863b8cfbd9 100644 (file)
@@ -12,11 +12,6 @@ from sqlalchemy import String
 from sqlalchemy import testing
 from sqlalchemy import UniqueConstraint
 from sqlalchemy import util
-from sqlalchemy.ext import declarative as decl
-from sqlalchemy.ext.declarative import DeclarativeMeta
-from sqlalchemy.ext.declarative import declared_attr
-from sqlalchemy.ext.declarative import synonym_for
-from sqlalchemy.ext.declarative.base import _DeferredMapperConfig
 from sqlalchemy.ext.hybrid import hybrid_property
 from sqlalchemy.orm import backref
 from sqlalchemy.orm import class_mapper
@@ -26,13 +21,20 @@ from sqlalchemy.orm import column_property
 from sqlalchemy.orm import composite
 from sqlalchemy.orm import configure_mappers
 from sqlalchemy.orm import create_session
+from sqlalchemy.orm import decl_base
+from sqlalchemy.orm import declarative_base
+from sqlalchemy.orm import declared_attr
 from sqlalchemy.orm import deferred
 from sqlalchemy.orm import descriptor_props
 from sqlalchemy.orm import exc as orm_exc
 from sqlalchemy.orm import joinedload
 from sqlalchemy.orm import mapper
+from sqlalchemy.orm import registry
 from sqlalchemy.orm import relationship
 from sqlalchemy.orm import Session
+from sqlalchemy.orm import synonym_for
+from sqlalchemy.orm.decl_api import DeclarativeMeta
+from sqlalchemy.orm.decl_base import _DeferredMapperConfig
 from sqlalchemy.orm.events import MapperEvents
 from sqlalchemy.testing import assert_raises
 from sqlalchemy.testing import assert_raises_message
@@ -61,7 +63,7 @@ class DeclarativeTestBase(
 
     def setup(self):
         global Base
-        Base = decl.declarative_base(testing.db)
+        Base = declarative_base(testing.db)
 
     def teardown(self):
         close_all_sessions()
@@ -119,6 +121,57 @@ class DeclarativeTest(DeclarativeTestBase):
         eq_(a1, Address(email="two"))
         eq_(a1.user, User(name="u1"))
 
+    def test_back_populates_setup(self):
+        class User(Base):
+            __tablename__ = "users"
+
+            id = Column("id", Integer, primary_key=True)
+            addresses = relationship("Address", back_populates="user")
+
+        class Address(Base):
+            __tablename__ = "addresses"
+
+            id = Column(Integer, primary_key=True)
+            user_id = Column(
+                "user_id", Integer, ForeignKey("users.id"), key="_user_id"
+            )
+            user = relationship("User", back_populates="addresses")
+
+        configure_mappers()
+
+        assert (
+            Address.__mapper__.attrs.user
+            in User.__mapper__.attrs.addresses._reverse_property
+        )
+        assert (
+            User.__mapper__.attrs.addresses
+            in Address.__mapper__.attrs.user._reverse_property
+        )
+
+    def test_dispose_attrs(self):
+        reg = registry()
+
+        class Foo(object):
+            __tablename__ = "some_table"
+
+            id = Column(Integer, primary_key=True)
+
+        reg.mapped(Foo)
+
+        is_(Foo.__mapper__, class_mapper(Foo))
+        is_(Foo.__table__, class_mapper(Foo).local_table)
+
+        clear_mappers()
+
+        assert not hasattr(Foo, "__mapper__")
+        assert not hasattr(Foo, "__table__")
+
+        from sqlalchemy.orm import clsregistry
+
+        assert clsregistry._key_is_empty(
+            "Foo", reg._class_registry, lambda cls: cls is Foo
+        )
+
     def test_deferred_reflection_default_error(self):
         class MyExt(object):
             @classmethod
@@ -126,7 +179,7 @@ class DeclarativeTest(DeclarativeTestBase):
                 "sample prepare method"
                 to_map = _DeferredMapperConfig.classes_for_base(cls)
                 for thingy in to_map:
-                    thingy.map()
+                    thingy.map({})
 
             @classmethod
             def _sa_decl_prepare(cls):
@@ -138,7 +191,7 @@ class DeclarativeTest(DeclarativeTestBase):
 
         assert_raises_message(
             orm_exc.UnmappedClassError,
-            "Class test.ext.declarative.test_basic.User has a deferred "
+            "Class .*User has a deferred "
             "mapping on it.  It is not yet usable as a mapped class.",
             Session().query,
             User,
@@ -452,7 +505,9 @@ class DeclarativeTest(DeclarativeTestBase):
             )
             name = Column("name", String(50))
 
-        decl.instrument_declarative(User, {}, Base.metadata)
+        reg = registry(metadata=Base.metadata)
+
+        reg.map_declaratively(User)
 
     def test_reserved_identifiers(self):
         def go1():
@@ -483,7 +538,7 @@ class DeclarativeTest(DeclarativeTestBase):
         eq_(str(foo), "(no name)")
         eq_(foo.key, None)
         eq_(foo.name, None)
-        decl.base._undefer_column_name("foo", foo)
+        decl_base._undefer_column_name("foo", foo)
         eq_(str(foo), "foo")
         eq_(foo.key, "foo")
         eq_(foo.name, "foo")
@@ -841,7 +896,7 @@ class DeclarativeTest(DeclarativeTestBase):
         )
 
     def test_string_dependency_resolution_schemas(self):
-        Base = decl.declarative_base()
+        Base = declarative_base()
 
         class User(Base):
 
@@ -880,7 +935,7 @@ class DeclarativeTest(DeclarativeTestBase):
         )
 
     def test_string_dependency_resolution_annotations(self):
-        Base = decl.declarative_base()
+        Base = declarative_base()
 
         class Parent(Base):
             __tablename__ = "parent"
@@ -905,8 +960,8 @@ class DeclarativeTest(DeclarativeTestBase):
 
     def test_shared_class_registry(self):
         reg = {}
-        Base1 = decl.declarative_base(testing.db, class_registry=reg)
-        Base2 = decl.declarative_base(testing.db, class_registry=reg)
+        Base1 = declarative_base(testing.db, class_registry=reg)
+        Base2 = declarative_base(testing.db, class_registry=reg)
 
         class A(Base1):
             __tablename__ = "a"
@@ -1026,7 +1081,7 @@ class DeclarativeTest(DeclarativeTestBase):
             def foobar(self):
                 return "foobar"
 
-        Base = decl.declarative_base(cls=MyBase)
+        Base = declarative_base(cls=MyBase)
         assert hasattr(Base, "metadata")
         assert Base().foobar() == "foobar"
 
@@ -1292,26 +1347,26 @@ class DeclarativeTest(DeclarativeTestBase):
             email = Column("email", String(50))
             user_id = Column("user_id", Integer, ForeignKey("users.id"))
 
-        reg = {}
-        decl.instrument_declarative(User, reg, Base.metadata)
-        decl.instrument_declarative(Address, reg, Base.metadata)
-        Base.metadata.create_all()
+        reg = registry(metadata=Base.metadata)
+        reg.mapped(User)
+        reg.mapped(Address)
+        reg.metadata.create_all()
         u1 = User(
             name="u1", addresses=[Address(email="one"), Address(email="two")]
         )
-        sess = create_session()
-        sess.add(u1)
-        sess.flush()
-        sess.expunge_all()
-        eq_(
-            sess.query(User).all(),
-            [
-                User(
-                    name="u1",
-                    addresses=[Address(email="one"), Address(email="two")],
-                )
-            ],
-        )
+        with Session(testing.db) as sess:
+            sess.add(u1)
+            sess.commit()
+        with Session(testing.db) as sess:
+            eq_(
+                sess.query(User).all(),
+                [
+                    User(
+                        name="u1",
+                        addresses=[Address(email="one"), Address(email="two")],
+                    )
+                ],
+            )
 
     def test_custom_mapper_attribute(self):
         def mymapper(cls, tbl, **kwargs):
@@ -1319,7 +1374,7 @@ class DeclarativeTest(DeclarativeTestBase):
             m.CHECK = True
             return m
 
-        base = decl.declarative_base()
+        base = declarative_base()
 
         class Foo(base):
             __tablename__ = "foo"
@@ -1334,7 +1389,7 @@ class DeclarativeTest(DeclarativeTestBase):
             m.CHECK = True
             return m
 
-        base = decl.declarative_base(mapper=mymapper)
+        base = declarative_base(mapper=mymapper)
 
         class Foo(base):
             __tablename__ = "foo"
@@ -1343,7 +1398,7 @@ class DeclarativeTest(DeclarativeTestBase):
         eq_(Foo.__mapper__.CHECK, True)
 
     def test_no_change_to_all_descriptors(self):
-        base = decl.declarative_base()
+        base = declarative_base()
 
         class Foo(base):
             __tablename__ = "foo"
@@ -2005,7 +2060,7 @@ class DeclarativeTest(DeclarativeTestBase):
             )
             name = Column("name", String(50))
 
-            @decl.synonym_for("name")
+            @synonym_for("name")
             @property
             def namesyn(self):
                 return self.name
@@ -2065,12 +2120,12 @@ class DeclarativeTest(DeclarativeTestBase):
         class MyBase(object):
             """MyBase Docstring"""
 
-        Base = decl.declarative_base(cls=MyBase)
+        Base = declarative_base(cls=MyBase)
 
         eq_(Base.__doc__, MyBase.__doc__)
 
     def test_delattr_mapped_raises(self):
-        Base = decl.declarative_base()
+        Base = declarative_base()
 
         class Foo(Base):
             __tablename__ = "foo"
@@ -2088,7 +2143,7 @@ class DeclarativeTest(DeclarativeTestBase):
         )
 
     def test_delattr_hybrid_fine(self):
-        Base = decl.declarative_base()
+        Base = declarative_base()
 
         class Foo(Base):
             __tablename__ = "foo"
@@ -2109,7 +2164,7 @@ class DeclarativeTest(DeclarativeTestBase):
         assert not hasattr(Foo, "data_hybrid")
 
     def test_setattr_hybrid_updates_descriptors(self):
-        Base = decl.declarative_base()
+        Base = declarative_base()
 
         class Foo(Base):
             __tablename__ = "foo"
@@ -2165,7 +2220,7 @@ def _produce_test(inline, stringbased):
         @classmethod
         def define_tables(cls, metadata):
             global User, Address
-            Base = decl.declarative_base(metadata=metadata)
+            Base = declarative_base(metadata=metadata)
 
             class User(Base, fixtures.ComparableEntity):
 
similarity index 77%
rename from test/ext/declarative/test_clsregistry.py
rename to test/orm/declarative/test_clsregistry.py
index fbde544e4f65e146eb5b6f1580a53bca0ba7186f..b9d41ee5325b279f7695380e554151a10c962ed5 100644 (file)
@@ -1,18 +1,18 @@
-import weakref
-
 from sqlalchemy import exc
 from sqlalchemy import MetaData
-from sqlalchemy.ext.declarative import clsregistry
+from sqlalchemy.orm import clsregistry
+from sqlalchemy.orm import registry
 from sqlalchemy.testing import assert_raises_message
 from sqlalchemy.testing import eq_
 from sqlalchemy.testing import fixtures
 from sqlalchemy.testing import is_
+from sqlalchemy.testing import mock
 from sqlalchemy.testing.util import gc_collect
 
 
 class MockClass(object):
     def __init__(self, base, name):
-        self._decl_class_registry = base
+        self._sa_class_manager = mock.Mock(registry=base)
         tokens = name.split(".")
         self.__module__ = ".".join(tokens[0:-1])
         self.name = self.__name__ = tokens[-1]
@@ -27,10 +27,11 @@ class ClsRegistryTest(fixtures.TestBase):
     __requires__ = ("predictable_gc",)
 
     def test_same_module_same_name(self):
-        base = weakref.WeakValueDictionary()
+
+        base = registry()
         f1 = MockClass(base, "foo.bar.Foo")
         f2 = MockClass(base, "foo.bar.Foo")
-        clsregistry.add_class("Foo", f1)
+        clsregistry.add_class("Foo", f1, base._class_registry)
         gc_collect()
 
         assert_raises_message(
@@ -41,14 +42,15 @@ class ClsRegistryTest(fixtures.TestBase):
             clsregistry.add_class,
             "Foo",
             f2,
+            base._class_registry,
         )
 
     def test_resolve(self):
-        base = weakref.WeakValueDictionary()
+        base = registry()
         f1 = MockClass(base, "foo.bar.Foo")
         f2 = MockClass(base, "foo.alt.Foo")
-        clsregistry.add_class("Foo", f1)
-        clsregistry.add_class("Foo", f2)
+        clsregistry.add_class("Foo", f1, base._class_registry)
+        clsregistry.add_class("Foo", f2, base._class_registry)
         name_resolver, resolver = clsregistry._resolver(f1, MockProp())
 
         gc_collect()
@@ -60,13 +62,13 @@ class ClsRegistryTest(fixtures.TestBase):
         is_(name_resolver("foo.alt.Foo")(), f2)
 
     def test_fragment_resolve(self):
-        base = weakref.WeakValueDictionary()
+        base = registry()
         f1 = MockClass(base, "foo.bar.Foo")
         f2 = MockClass(base, "foo.alt.Foo")
         f3 = MockClass(base, "bat.alt.Hoho")
-        clsregistry.add_class("Foo", f1)
-        clsregistry.add_class("Foo", f2)
-        clsregistry.add_class("HoHo", f3)
+        clsregistry.add_class("Foo", f1, base._class_registry)
+        clsregistry.add_class("Foo", f2, base._class_registry)
+        clsregistry.add_class("HoHo", f3, base._class_registry)
         name_resolver, resolver = clsregistry._resolver(f1, MockProp())
 
         gc_collect()
@@ -78,13 +80,13 @@ class ClsRegistryTest(fixtures.TestBase):
         is_(name_resolver("alt.Foo")(), f2)
 
     def test_fragment_ambiguous(self):
-        base = weakref.WeakValueDictionary()
+        base = registry()
         f1 = MockClass(base, "foo.bar.Foo")
         f2 = MockClass(base, "foo.alt.Foo")
         f3 = MockClass(base, "bat.alt.Foo")
-        clsregistry.add_class("Foo", f1)
-        clsregistry.add_class("Foo", f2)
-        clsregistry.add_class("Foo", f3)
+        clsregistry.add_class("Foo", f1, base._class_registry)
+        clsregistry.add_class("Foo", f2, base._class_registry)
+        clsregistry.add_class("Foo", f3, base._class_registry)
         name_resolver, resolver = clsregistry._resolver(f1, MockProp())
 
         gc_collect()
@@ -106,11 +108,11 @@ class ClsRegistryTest(fixtures.TestBase):
         )
 
     def test_no_fns_in_name_resolve(self):
-        base = weakref.WeakValueDictionary()
+        base = registry()
         f1 = MockClass(base, "foo.bar.Foo")
         f2 = MockClass(base, "foo.alt.Foo")
-        clsregistry.add_class("Foo", f1)
-        clsregistry.add_class("Foo", f2)
+        clsregistry.add_class("Foo", f1, base._class_registry)
+        clsregistry.add_class("Foo", f2, base._class_registry)
         name_resolver, resolver = clsregistry._resolver(f1, MockProp())
 
         gc_collect()
@@ -131,11 +133,11 @@ class ClsRegistryTest(fixtures.TestBase):
         )
 
     def test_resolve_dupe_by_name(self):
-        base = weakref.WeakValueDictionary()
+        base = registry()
         f1 = MockClass(base, "foo.bar.Foo")
         f2 = MockClass(base, "foo.alt.Foo")
-        clsregistry.add_class("Foo", f1)
-        clsregistry.add_class("Foo", f2)
+        clsregistry.add_class("Foo", f1, base._class_registry)
+        clsregistry.add_class("Foo", f2, base._class_registry)
 
         gc_collect()
 
@@ -159,11 +161,11 @@ class ClsRegistryTest(fixtures.TestBase):
         )
 
     def test_dupe_classes_back_to_one(self):
-        base = weakref.WeakValueDictionary()
+        base = registry()
         f1 = MockClass(base, "foo.bar.Foo")
         f2 = MockClass(base, "foo.alt.Foo")
-        clsregistry.add_class("Foo", f1)
-        clsregistry.add_class("Foo", f2)
+        clsregistry.add_class("Foo", f1, base._class_registry)
+        clsregistry.add_class("Foo", f2, base._class_registry)
 
         del f2
         gc_collect()
@@ -180,13 +182,13 @@ class ClsRegistryTest(fixtures.TestBase):
         # force this to maintain isolation between tests
         clsregistry._registries.clear()
 
-        base = weakref.WeakValueDictionary()
+        base = registry()
 
         for i in range(3):
             f1 = MockClass(base, "foo.bar.Foo")
             f2 = MockClass(base, "foo.alt.Foo")
-            clsregistry.add_class("Foo", f1)
-            clsregistry.add_class("Foo", f2)
+            clsregistry.add_class("Foo", f1, base._class_registry)
+            clsregistry.add_class("Foo", f2, base._class_registry)
 
             eq_(len(clsregistry._registries), 11)
 
@@ -199,13 +201,13 @@ class ClsRegistryTest(fixtures.TestBase):
     def test_dupe_classes_name_race(self):
         """test the race condition that the class was garbage "
         "collected while being resolved from a dupe class."""
-        base = weakref.WeakValueDictionary()
+        base = registry()
         f1 = MockClass(base, "foo.bar.Foo")
         f2 = MockClass(base, "foo.alt.Foo")
-        clsregistry.add_class("Foo", f1)
-        clsregistry.add_class("Foo", f2)
+        clsregistry.add_class("Foo", f1, base._class_registry)
+        clsregistry.add_class("Foo", f2, base._class_registry)
 
-        dupe_reg = base["Foo"]
+        dupe_reg = base._class_registry["Foo"]
         dupe_reg.contents = [lambda: None]
         name_resolver, resolver = clsregistry._resolver(f1, MockProp())
         f_resolver = resolver("Foo")
@@ -228,10 +230,10 @@ class ClsRegistryTest(fixtures.TestBase):
         """test the race condition that a class was gc'ed as we tried
         to look it up by module name."""
 
-        base = weakref.WeakValueDictionary()
+        base = registry()
         f1 = MockClass(base, "foo.bar.Foo")
-        clsregistry.add_class("Foo", f1)
-        reg = base["_sa_module_registry"]
+        clsregistry.add_class("Foo", f1, base._class_registry)
+        reg = base._class_registry["_sa_module_registry"]
 
         mod_entry = reg["foo"]["bar"]
         name_resolver, resolver = clsregistry._resolver(f1, MockProp())
@@ -253,10 +255,10 @@ class ClsRegistryTest(fixtures.TestBase):
         )
 
     def test_module_reg_no_class(self):
-        base = weakref.WeakValueDictionary()
+        base = registry()
         f1 = MockClass(base, "foo.bar.Foo")
-        clsregistry.add_class("Foo", f1)
-        reg = base["_sa_module_registry"]
+        clsregistry.add_class("Foo", f1, base._class_registry)
+        reg = base._class_registry["_sa_module_registry"]
         mod_entry = reg["foo"]["bar"]  # noqa
         name_resolver, resolver = clsregistry._resolver(f1, MockProp())
         f_resolver = resolver("foo")
@@ -276,13 +278,13 @@ class ClsRegistryTest(fixtures.TestBase):
         )
 
     def test_module_reg_cleanout_two_sub(self):
-        base = weakref.WeakValueDictionary()
+        base = registry()
         f1 = MockClass(base, "foo.bar.Foo")
-        clsregistry.add_class("Foo", f1)
-        reg = base["_sa_module_registry"]
+        clsregistry.add_class("Foo", f1, base._class_registry)
+        reg = base._class_registry["_sa_module_registry"]
 
         f2 = MockClass(base, "foo.alt.Bar")
-        clsregistry.add_class("Bar", f2)
+        clsregistry.add_class("Bar", f2, base._class_registry)
         assert reg["foo"]["bar"]
         del f1
         gc_collect()
@@ -294,10 +296,10 @@ class ClsRegistryTest(fixtures.TestBase):
         assert "foo" not in reg.contents
 
     def test_module_reg_cleanout_sub_to_base(self):
-        base = weakref.WeakValueDictionary()
+        base = registry()
         f3 = MockClass(base, "bat.bar.Hoho")
-        clsregistry.add_class("Hoho", f3)
-        reg = base["_sa_module_registry"]
+        clsregistry.add_class("Hoho", f3, base._class_registry)
+        reg = base._class_registry["_sa_module_registry"]
 
         assert reg["bat"]["bar"]
         del f3
@@ -305,10 +307,10 @@ class ClsRegistryTest(fixtures.TestBase):
         assert "bat" not in reg
 
     def test_module_reg_cleanout_cls_to_base(self):
-        base = weakref.WeakValueDictionary()
+        base = registry()
         f4 = MockClass(base, "single.Blat")
-        clsregistry.add_class("Blat", f4)
-        reg = base["_sa_module_registry"]
+        clsregistry.add_class("Blat", f4, base._class_registry)
+        reg = base._class_registry["_sa_module_registry"]
         assert reg["single"]
         del f4
         gc_collect()
similarity index 95%
rename from test/ext/declarative/test_concurrency.py
rename to test/orm/declarative/test_concurrency.py
index ac1ae0aaff351e0d84700042589989c34adba31d..d731c6afae50e0ddb119441515a5619251451d7e 100644 (file)
@@ -7,9 +7,9 @@ from sqlalchemy import exc
 from sqlalchemy import ForeignKey
 from sqlalchemy import Integer
 from sqlalchemy import String
-from sqlalchemy.ext.declarative import declarative_base
-from sqlalchemy.ext.declarative import declared_attr
 from sqlalchemy.orm import clear_mappers
+from sqlalchemy.orm import declarative_base
+from sqlalchemy.orm import declared_attr
 from sqlalchemy.orm import exc as orm_exc
 from sqlalchemy.orm import relationship
 from sqlalchemy.orm import Session
diff --git a/test/orm/declarative/test_inheritance.py b/test/orm/declarative/test_inheritance.py
new file mode 100644 (file)
index 0000000..290c83e
--- /dev/null
@@ -0,0 +1,1310 @@
+import sqlalchemy as sa
+from sqlalchemy import ForeignKey
+from sqlalchemy import Integer
+from sqlalchemy import String
+from sqlalchemy import testing
+from sqlalchemy.ext import declarative as decl
+from sqlalchemy.ext.declarative import declared_attr
+from sqlalchemy.orm import class_mapper
+from sqlalchemy.orm import clear_mappers
+from sqlalchemy.orm import close_all_sessions
+from sqlalchemy.orm import configure_mappers
+from sqlalchemy.orm import create_session
+from sqlalchemy.orm import deferred
+from sqlalchemy.orm import mapper
+from sqlalchemy.orm import relationship
+from sqlalchemy.orm import Session
+from sqlalchemy.testing import assert_raises
+from sqlalchemy.testing import assert_raises_message
+from sqlalchemy.testing import eq_
+from sqlalchemy.testing import fixtures
+from sqlalchemy.testing import is_
+from sqlalchemy.testing import is_false
+from sqlalchemy.testing import is_true
+from sqlalchemy.testing.schema import Column
+from sqlalchemy.testing.schema import Table
+
+Base = None
+
+
+class DeclarativeTestBase(fixtures.TestBase, testing.AssertsExecutionResults):
+    def setup(self):
+        global Base
+        Base = decl.declarative_base(testing.db)
+
+    def teardown(self):
+        close_all_sessions()
+        clear_mappers()
+        Base.metadata.drop_all()
+
+
+class DeclarativeInheritanceTest(DeclarativeTestBase):
+    def test_we_must_copy_mapper_args(self):
+        class Person(Base):
+
+            __tablename__ = "people"
+            id = Column(Integer, primary_key=True)
+            discriminator = Column("type", String(50))
+            __mapper_args__ = {
+                "polymorphic_on": discriminator,
+                "polymorphic_identity": "person",
+            }
+
+        class Engineer(Person):
+
+            primary_language = Column(String(50))
+
+        assert "inherits" not in Person.__mapper_args__
+        assert class_mapper(Engineer).polymorphic_identity is None
+        assert class_mapper(Engineer).polymorphic_on is Person.__table__.c.type
+
+    def test_we_must_only_copy_column_mapper_args(self):
+        class Person(Base):
+
+            __tablename__ = "people"
+            id = Column(Integer, primary_key=True)
+            a = Column(Integer)
+            b = Column(Integer)
+            c = Column(Integer)
+            d = Column(Integer)
+            discriminator = Column("type", String(50))
+            __mapper_args__ = {
+                "polymorphic_on": discriminator,
+                "polymorphic_identity": "person",
+                "version_id_col": "a",
+                "column_prefix": "bar",
+                "include_properties": ["id", "a", "b"],
+            }
+
+        assert class_mapper(Person).version_id_col == "a"
+        assert class_mapper(Person).include_properties == set(["id", "a", "b"])
+
+    def test_custom_join_condition(self):
+        class Foo(Base):
+
+            __tablename__ = "foo"
+            id = Column("id", Integer, primary_key=True)
+
+        class Bar(Foo):
+
+            __tablename__ = "bar"
+            bar_id = Column("id", Integer, primary_key=True)
+            foo_id = Column("foo_id", Integer)
+            __mapper_args__ = {"inherit_condition": foo_id == Foo.id}
+
+        # compile succeeds because inherit_condition is honored
+
+        configure_mappers()
+
+    def test_joined(self):
+        class Company(Base, fixtures.ComparableEntity):
+
+            __tablename__ = "companies"
+            id = Column(
+                "id", Integer, primary_key=True, test_needs_autoincrement=True
+            )
+            name = Column("name", String(50))
+            employees = relationship("Person")
+
+        class Person(Base, fixtures.ComparableEntity):
+
+            __tablename__ = "people"
+            id = Column(
+                "id", Integer, primary_key=True, test_needs_autoincrement=True
+            )
+            company_id = Column(
+                "company_id", Integer, ForeignKey("companies.id")
+            )
+            name = Column("name", String(50))
+            discriminator = Column("type", String(50))
+            __mapper_args__ = {"polymorphic_on": discriminator}
+
+        class Engineer(Person):
+
+            __tablename__ = "engineers"
+            __mapper_args__ = {"polymorphic_identity": "engineer"}
+            id = Column(
+                "id", Integer, ForeignKey("people.id"), primary_key=True
+            )
+            primary_language = Column("primary_language", String(50))
+
+        class Manager(Person):
+
+            __tablename__ = "managers"
+            __mapper_args__ = {"polymorphic_identity": "manager"}
+            id = Column(
+                "id", Integer, ForeignKey("people.id"), primary_key=True
+            )
+            golf_swing = Column("golf_swing", String(50))
+
+        Base.metadata.create_all()
+        sess = create_session()
+        c1 = Company(
+            name="MegaCorp, Inc.",
+            employees=[
+                Engineer(name="dilbert", primary_language="java"),
+                Engineer(name="wally", primary_language="c++"),
+                Manager(name="dogbert", golf_swing="fore!"),
+            ],
+        )
+
+        c2 = Company(
+            name="Elbonia, Inc.",
+            employees=[Engineer(name="vlad", primary_language="cobol")],
+        )
+        sess.add(c1)
+        sess.add(c2)
+        sess.flush()
+        sess.expunge_all()
+        eq_(
+            sess.query(Company)
+            .filter(
+                Company.employees.of_type(Engineer).any(
+                    Engineer.primary_language == "cobol"
+                )
+            )
+            .first(),
+            c2,
+        )
+
+        # ensure that the Manager mapper was compiled with the Manager id
+        # column as higher priority. this ensures that "Manager.id"
+        # is appropriately treated as the "id" column in the "manager"
+        # table (reversed from 0.6's behavior.)
+
+        eq_(
+            Manager.id.property.columns,
+            [Manager.__table__.c.id, Person.__table__.c.id],
+        )
+
+        # assert that the "id" column is available without a second
+        # load. as of 0.7, the ColumnProperty tests all columns
+        # in its list to see which is present in the row.
+
+        sess.expunge_all()
+
+        def go():
+            assert (
+                sess.query(Manager).filter(Manager.name == "dogbert").one().id
+            )
+
+        self.assert_sql_count(testing.db, go, 1)
+        sess.expunge_all()
+
+        def go():
+            assert (
+                sess.query(Person).filter(Manager.name == "dogbert").one().id
+            )
+
+        self.assert_sql_count(testing.db, go, 1)
+
+    def test_add_subcol_after_the_fact(self):
+        class Person(Base, fixtures.ComparableEntity):
+
+            __tablename__ = "people"
+            id = Column(
+                "id", Integer, primary_key=True, test_needs_autoincrement=True
+            )
+            name = Column("name", String(50))
+            discriminator = Column("type", String(50))
+            __mapper_args__ = {"polymorphic_on": discriminator}
+
+        class Engineer(Person):
+
+            __tablename__ = "engineers"
+            __mapper_args__ = {"polymorphic_identity": "engineer"}
+            id = Column(
+                "id", Integer, ForeignKey("people.id"), primary_key=True
+            )
+
+        Engineer.primary_language = Column("primary_language", String(50))
+        Base.metadata.create_all()
+        sess = create_session()
+        e1 = Engineer(primary_language="java", name="dilbert")
+        sess.add(e1)
+        sess.flush()
+        sess.expunge_all()
+        eq_(
+            sess.query(Person).first(),
+            Engineer(primary_language="java", name="dilbert"),
+        )
+
+    def test_add_parentcol_after_the_fact(self):
+        class Person(Base, fixtures.ComparableEntity):
+
+            __tablename__ = "people"
+            id = Column(
+                "id", Integer, primary_key=True, test_needs_autoincrement=True
+            )
+            discriminator = Column("type", String(50))
+            __mapper_args__ = {"polymorphic_on": discriminator}
+
+        class Engineer(Person):
+
+            __tablename__ = "engineers"
+            __mapper_args__ = {"polymorphic_identity": "engineer"}
+            primary_language = Column(String(50))
+            id = Column(
+                "id", Integer, ForeignKey("people.id"), primary_key=True
+            )
+
+        Person.name = Column("name", String(50))
+        Base.metadata.create_all()
+        sess = create_session()
+        e1 = Engineer(primary_language="java", name="dilbert")
+        sess.add(e1)
+        sess.flush()
+        sess.expunge_all()
+        eq_(
+            sess.query(Person).first(),
+            Engineer(primary_language="java", name="dilbert"),
+        )
+
+    def test_add_sub_parentcol_after_the_fact(self):
+        class Person(Base, fixtures.ComparableEntity):
+
+            __tablename__ = "people"
+            id = Column(
+                "id", Integer, primary_key=True, test_needs_autoincrement=True
+            )
+            discriminator = Column("type", String(50))
+            __mapper_args__ = {"polymorphic_on": discriminator}
+
+        class Engineer(Person):
+
+            __tablename__ = "engineers"
+            __mapper_args__ = {"polymorphic_identity": "engineer"}
+            primary_language = Column(String(50))
+            id = Column(
+                "id", Integer, ForeignKey("people.id"), primary_key=True
+            )
+
+        class Admin(Engineer):
+
+            __tablename__ = "admins"
+            __mapper_args__ = {"polymorphic_identity": "admin"}
+            workstation = Column(String(50))
+            id = Column(
+                "id", Integer, ForeignKey("engineers.id"), primary_key=True
+            )
+
+        Person.name = Column("name", String(50))
+        Base.metadata.create_all()
+        sess = create_session()
+        e1 = Admin(primary_language="java", name="dilbert", workstation="foo")
+        sess.add(e1)
+        sess.flush()
+        sess.expunge_all()
+        eq_(
+            sess.query(Person).first(),
+            Admin(primary_language="java", name="dilbert", workstation="foo"),
+        )
+
+    def test_subclass_mixin(self):
+        class Person(Base, fixtures.ComparableEntity):
+
+            __tablename__ = "people"
+            id = Column("id", Integer, primary_key=True)
+            name = Column("name", String(50))
+            discriminator = Column("type", String(50))
+            __mapper_args__ = {"polymorphic_on": discriminator}
+
+        class MyMixin(object):
+
+            pass
+
+        class Engineer(MyMixin, Person):
+
+            __tablename__ = "engineers"
+            __mapper_args__ = {"polymorphic_identity": "engineer"}
+            id = Column(
+                "id", Integer, ForeignKey("people.id"), primary_key=True
+            )
+            primary_language = Column("primary_language", String(50))
+
+        assert class_mapper(Engineer).inherits is class_mapper(Person)
+
+    def test_intermediate_abstract_class_on_classical(self):
+        class Person(object):
+            pass
+
+        person_table = Table(
+            "people",
+            Base.metadata,
+            Column("id", Integer, primary_key=True),
+            Column("kind", String(50)),
+        )
+
+        mapper(
+            Person,
+            person_table,
+            polymorphic_on="kind",
+            polymorphic_identity="person",
+        )
+
+        class SpecialPerson(Person):
+            __abstract__ = True
+
+        class Manager(SpecialPerson, Base):
+            __tablename__ = "managers"
+            id = Column(Integer, ForeignKey(Person.id), primary_key=True)
+            __mapper_args__ = {"polymorphic_identity": "manager"}
+
+        from sqlalchemy import inspect
+
+        assert inspect(Manager).inherits is inspect(Person)
+
+        eq_(set(class_mapper(Person).class_manager), {"id", "kind"})
+        eq_(set(class_mapper(Manager).class_manager), {"id", "kind"})
+
+    def test_intermediate_unmapped_class_on_classical(self):
+        class Person(object):
+            pass
+
+        person_table = Table(
+            "people",
+            Base.metadata,
+            Column("id", Integer, primary_key=True),
+            Column("kind", String(50)),
+        )
+
+        mapper(
+            Person,
+            person_table,
+            polymorphic_on="kind",
+            polymorphic_identity="person",
+        )
+
+        class SpecialPerson(Person):
+            pass
+
+        class Manager(SpecialPerson, Base):
+            __tablename__ = "managers"
+            id = Column(Integer, ForeignKey(Person.id), primary_key=True)
+            __mapper_args__ = {"polymorphic_identity": "manager"}
+
+        from sqlalchemy import inspect
+
+        assert inspect(Manager).inherits is inspect(Person)
+
+        eq_(set(class_mapper(Person).class_manager), {"id", "kind"})
+        eq_(set(class_mapper(Manager).class_manager), {"id", "kind"})
+
+    def test_class_w_invalid_multiple_bases(self):
+        class Person(object):
+            pass
+
+        person_table = Table(
+            "people",
+            Base.metadata,
+            Column("id", Integer, primary_key=True),
+            Column("kind", String(50)),
+        )
+
+        mapper(
+            Person,
+            person_table,
+            polymorphic_on="kind",
+            polymorphic_identity="person",
+        )
+
+        class DeclPerson(Base):
+            __tablename__ = "decl_people"
+            id = Column(Integer, primary_key=True)
+            kind = Column(String(50))
+
+        class SpecialPerson(Person):
+            pass
+
+        def go():
+            class Manager(SpecialPerson, DeclPerson):
+                __tablename__ = "managers"
+                id = Column(
+                    Integer, ForeignKey(DeclPerson.id), primary_key=True
+                )
+                __mapper_args__ = {"polymorphic_identity": "manager"}
+
+        assert_raises_message(
+            sa.exc.InvalidRequestError,
+            r"Class .*Manager.* has multiple mapped "
+            r"bases: \[.*Person.*DeclPerson.*\]",
+            go,
+        )
+
+    def test_with_undefined_foreignkey(self):
+        class Parent(Base):
+
+            __tablename__ = "parent"
+            id = Column("id", Integer, primary_key=True)
+            tp = Column("type", String(50))
+            __mapper_args__ = dict(polymorphic_on=tp)
+
+        class Child1(Parent):
+
+            __tablename__ = "child1"
+            id = Column(
+                "id", Integer, ForeignKey("parent.id"), primary_key=True
+            )
+            related_child2 = Column("c2", Integer, ForeignKey("child2.id"))
+            __mapper_args__ = dict(polymorphic_identity="child1")
+
+        # no exception is raised by the ForeignKey to "child2" even
+        # though child2 doesn't exist yet
+
+        class Child2(Parent):
+
+            __tablename__ = "child2"
+            id = Column(
+                "id", Integer, ForeignKey("parent.id"), primary_key=True
+            )
+            related_child1 = Column("c1", Integer)
+            __mapper_args__ = dict(polymorphic_identity="child2")
+
+        sa.orm.configure_mappers()  # no exceptions here
+
+    def test_foreign_keys_with_col(self):
+        """Test that foreign keys that reference a literal 'id' subclass
+        'id' attribute behave intuitively.
+
+        See [ticket:1892].
+
+        """
+
+        class Booking(Base):
+            __tablename__ = "booking"
+            id = Column(Integer, primary_key=True)
+
+        class PlanBooking(Booking):
+            __tablename__ = "plan_booking"
+            id = Column(Integer, ForeignKey(Booking.id), primary_key=True)
+
+        # referencing PlanBooking.id gives us the column
+        # on plan_booking, not booking
+        class FeatureBooking(Booking):
+            __tablename__ = "feature_booking"
+            id = Column(Integer, ForeignKey(Booking.id), primary_key=True)
+            plan_booking_id = Column(Integer, ForeignKey(PlanBooking.id))
+
+            plan_booking = relationship(
+                PlanBooking, backref="feature_bookings"
+            )
+
+        assert FeatureBooking.__table__.c.plan_booking_id.references(
+            PlanBooking.__table__.c.id
+        )
+
+        assert FeatureBooking.__table__.c.id.references(Booking.__table__.c.id)
+
+    def test_single_colsonbase(self):
+        """test single inheritance where all the columns are on the base
+        class."""
+
+        class Company(Base, fixtures.ComparableEntity):
+
+            __tablename__ = "companies"
+            id = Column(
+                "id", Integer, primary_key=True, test_needs_autoincrement=True
+            )
+            name = Column("name", String(50))
+            employees = relationship("Person")
+
+        class Person(Base, fixtures.ComparableEntity):
+
+            __tablename__ = "people"
+            id = Column(
+                "id", Integer, primary_key=True, test_needs_autoincrement=True
+            )
+            company_id = Column(
+                "company_id", Integer, ForeignKey("companies.id")
+            )
+            name = Column("name", String(50))
+            discriminator = Column("type", String(50))
+            primary_language = Column("primary_language", String(50))
+            golf_swing = Column("golf_swing", String(50))
+            __mapper_args__ = {"polymorphic_on": discriminator}
+
+        class Engineer(Person):
+
+            __mapper_args__ = {"polymorphic_identity": "engineer"}
+
+        class Manager(Person):
+
+            __mapper_args__ = {"polymorphic_identity": "manager"}
+
+        Base.metadata.create_all()
+        sess = create_session()
+        c1 = Company(
+            name="MegaCorp, Inc.",
+            employees=[
+                Engineer(name="dilbert", primary_language="java"),
+                Engineer(name="wally", primary_language="c++"),
+                Manager(name="dogbert", golf_swing="fore!"),
+            ],
+        )
+
+        c2 = Company(
+            name="Elbonia, Inc.",
+            employees=[Engineer(name="vlad", primary_language="cobol")],
+        )
+        sess.add(c1)
+        sess.add(c2)
+        sess.flush()
+        sess.expunge_all()
+        eq_(
+            sess.query(Person)
+            .filter(Engineer.primary_language == "cobol")
+            .first(),
+            Engineer(name="vlad"),
+        )
+        eq_(
+            sess.query(Company)
+            .filter(
+                Company.employees.of_type(Engineer).any(
+                    Engineer.primary_language == "cobol"
+                )
+            )
+            .first(),
+            c2,
+        )
+
+    def test_single_colsonsub(self):
+        """test single inheritance where the columns are local to their
+        class.
+
+        this is a newer usage.
+
+        """
+
+        class Company(Base, fixtures.ComparableEntity):
+
+            __tablename__ = "companies"
+            id = Column(
+                "id", Integer, primary_key=True, test_needs_autoincrement=True
+            )
+            name = Column("name", String(50))
+            employees = relationship("Person")
+
+        class Person(Base, fixtures.ComparableEntity):
+
+            __tablename__ = "people"
+            id = Column(
+                Integer, primary_key=True, test_needs_autoincrement=True
+            )
+            company_id = Column(Integer, ForeignKey("companies.id"))
+            name = Column(String(50))
+            discriminator = Column("type", String(50))
+            __mapper_args__ = {"polymorphic_on": discriminator}
+
+        class Engineer(Person):
+
+            __mapper_args__ = {"polymorphic_identity": "engineer"}
+            primary_language = Column(String(50))
+
+        class Manager(Person):
+
+            __mapper_args__ = {"polymorphic_identity": "manager"}
+            golf_swing = Column(String(50))
+
+        # we have here a situation that is somewhat unique. the Person
+        # class is mapped to the "people" table, but it was mapped when
+        # the table did not include the "primary_language" or
+        # "golf_swing" columns.  declarative will also manipulate the
+        # exclude_properties collection so that sibling classes don't
+        # cross-pollinate.
+
+        assert Person.__table__.c.company_id is not None
+        assert Person.__table__.c.golf_swing is not None
+        assert Person.__table__.c.primary_language is not None
+        assert Engineer.primary_language is not None
+        assert Manager.golf_swing is not None
+        assert not hasattr(Person, "primary_language")
+        assert not hasattr(Person, "golf_swing")
+        assert not hasattr(Engineer, "golf_swing")
+        assert not hasattr(Manager, "primary_language")
+        Base.metadata.create_all()
+        sess = create_session()
+        e1 = Engineer(name="dilbert", primary_language="java")
+        e2 = Engineer(name="wally", primary_language="c++")
+        m1 = Manager(name="dogbert", golf_swing="fore!")
+        c1 = Company(name="MegaCorp, Inc.", employees=[e1, e2, m1])
+        e3 = Engineer(name="vlad", primary_language="cobol")
+        c2 = Company(name="Elbonia, Inc.", employees=[e3])
+        sess.add(c1)
+        sess.add(c2)
+        sess.flush()
+        sess.expunge_all()
+        eq_(
+            sess.query(Person)
+            .filter(Engineer.primary_language == "cobol")
+            .first(),
+            Engineer(name="vlad"),
+        )
+        eq_(
+            sess.query(Company)
+            .filter(
+                Company.employees.of_type(Engineer).any(
+                    Engineer.primary_language == "cobol"
+                )
+            )
+            .first(),
+            c2,
+        )
+        eq_(
+            sess.query(Engineer).filter_by(primary_language="cobol").one(),
+            Engineer(name="vlad", primary_language="cobol"),
+        )
+
+    def test_single_cols_on_sub_base_of_joined(self):
+        """test [ticket:3895]"""
+
+        class Person(Base):
+            __tablename__ = "person"
+
+            id = Column(Integer, primary_key=True)
+            type = Column(String)
+
+            __mapper_args__ = {"polymorphic_on": type}
+
+        class Contractor(Person):
+            contractor_field = Column(String)
+
+            __mapper_args__ = {"polymorphic_identity": "contractor"}
+
+        class Employee(Person):
+            __tablename__ = "employee"
+
+            id = Column(Integer, ForeignKey(Person.id), primary_key=True)
+
+        class Engineer(Employee):
+            __mapper_args__ = {"polymorphic_identity": "engineer"}
+
+        configure_mappers()
+
+        is_false(hasattr(Person, "contractor_field"))
+        is_true(hasattr(Contractor, "contractor_field"))
+        is_false(hasattr(Employee, "contractor_field"))
+        is_false(hasattr(Engineer, "contractor_field"))
+
+    def test_single_cols_on_sub_to_joined(self):
+        """test [ticket:3797]"""
+
+        class BaseUser(Base):
+            __tablename__ = "root"
+
+            id = Column(Integer, primary_key=True)
+            row_type = Column(String)
+
+            __mapper_args__ = {
+                "polymorphic_on": row_type,
+                "polymorphic_identity": "baseuser",
+            }
+
+        class User(BaseUser):
+            __tablename__ = "user"
+
+            __mapper_args__ = {"polymorphic_identity": "user"}
+
+            baseuser_id = Column(
+                Integer, ForeignKey("root.id"), primary_key=True
+            )
+
+        class Bat(Base):
+            __tablename__ = "bat"
+            id = Column(Integer, primary_key=True)
+
+        class Thing(Base):
+            __tablename__ = "thing"
+
+            id = Column(Integer, primary_key=True)
+
+            owner_id = Column(Integer, ForeignKey("user.baseuser_id"))
+            owner = relationship("User")
+
+        class SubUser(User):
+            __mapper_args__ = {"polymorphic_identity": "subuser"}
+
+            sub_user_custom_thing = Column(Integer, ForeignKey("bat.id"))
+
+        eq_(
+            User.__table__.foreign_keys,
+            User.baseuser_id.foreign_keys.union(
+                SubUser.sub_user_custom_thing.foreign_keys
+            ),
+        )
+        is_true(
+            Thing.owner.property.primaryjoin.compare(
+                Thing.owner_id == User.baseuser_id
+            )
+        )
+
+    def test_single_constraint_on_sub(self):
+        """test the somewhat unusual case of [ticket:3341]"""
+
+        class Person(Base, fixtures.ComparableEntity):
+
+            __tablename__ = "people"
+            id = Column(
+                Integer, primary_key=True, test_needs_autoincrement=True
+            )
+            name = Column(String(50))
+            discriminator = Column("type", String(50))
+            __mapper_args__ = {"polymorphic_on": discriminator}
+
+        class Engineer(Person):
+
+            __mapper_args__ = {"polymorphic_identity": "engineer"}
+            primary_language = Column(String(50))
+
+            __hack_args_one__ = sa.UniqueConstraint(
+                Person.name, primary_language
+            )
+            __hack_args_two__ = sa.CheckConstraint(
+                Person.name != primary_language
+            )
+
+        uq = [
+            c
+            for c in Person.__table__.constraints
+            if isinstance(c, sa.UniqueConstraint)
+        ][0]
+        ck = [
+            c
+            for c in Person.__table__.constraints
+            if isinstance(c, sa.CheckConstraint)
+        ][0]
+        eq_(
+            list(uq.columns),
+            [Person.__table__.c.name, Person.__table__.c.primary_language],
+        )
+        eq_(
+            list(ck.columns),
+            [Person.__table__.c.name, Person.__table__.c.primary_language],
+        )
+
+    @testing.skip_if(
+        lambda: testing.against("oracle"),
+        "Test has an empty insert in it at the moment",
+    )
+    def test_columns_single_inheritance_conflict_resolution(self):
+        """Test that a declared_attr can return the existing column and it will
+        be ignored.  this allows conditional columns to be added.
+
+        See [ticket:2472].
+
+        """
+
+        class Person(Base):
+            __tablename__ = "person"
+            id = Column(Integer, primary_key=True)
+
+        class Engineer(Person):
+
+            """single table inheritance"""
+
+            @declared_attr
+            def target_id(cls):
+                return cls.__table__.c.get(
+                    "target_id", Column(Integer, ForeignKey("other.id"))
+                )
+
+            @declared_attr
+            def target(cls):
+                return relationship("Other")
+
+        class Manager(Person):
+
+            """single table inheritance"""
+
+            @declared_attr
+            def target_id(cls):
+                return cls.__table__.c.get(
+                    "target_id", Column(Integer, ForeignKey("other.id"))
+                )
+
+            @declared_attr
+            def target(cls):
+                return relationship("Other")
+
+        class Other(Base):
+            __tablename__ = "other"
+            id = Column(Integer, primary_key=True)
+
+        is_(
+            Engineer.target_id.property.columns[0],
+            Person.__table__.c.target_id,
+        )
+        is_(
+            Manager.target_id.property.columns[0], Person.__table__.c.target_id
+        )
+        # do a brief round trip on this
+        Base.metadata.create_all()
+        session = Session()
+        o1, o2 = Other(), Other()
+        session.add_all(
+            [Engineer(target=o1), Manager(target=o2), Manager(target=o1)]
+        )
+        session.commit()
+        eq_(session.query(Engineer).first().target, o1)
+
+    def test_columns_single_inheritance_conflict_resolution_pk(self):
+        """Test #2472 in terms of a primary key column.  This is
+        #4352.
+
+        """
+
+        class Person(Base):
+            __tablename__ = "person"
+            id = Column(Integer, primary_key=True)
+
+            target_id = Column(Integer, primary_key=True)
+
+        class Engineer(Person):
+
+            """single table inheritance"""
+
+            @declared_attr
+            def target_id(cls):
+                return cls.__table__.c.get(
+                    "target_id", Column(Integer, primary_key=True)
+                )
+
+        class Manager(Person):
+
+            """single table inheritance"""
+
+            @declared_attr
+            def target_id(cls):
+                return cls.__table__.c.get(
+                    "target_id", Column(Integer, primary_key=True)
+                )
+
+        is_(
+            Engineer.target_id.property.columns[0],
+            Person.__table__.c.target_id,
+        )
+        is_(
+            Manager.target_id.property.columns[0], Person.__table__.c.target_id
+        )
+
+    def test_columns_single_inheritance_cascading_resolution_pk(self):
+        """An additional test for #4352 in terms of the requested use case.
+
+        """
+
+        class TestBase(Base):
+            __abstract__ = True
+
+            @declared_attr.cascading
+            def id(cls):
+                col_val = None
+                if TestBase not in cls.__bases__:
+                    col_val = cls.__table__.c.get("id")
+                if col_val is None:
+                    col_val = Column(Integer, primary_key=True)
+                return col_val
+
+        class Person(TestBase):
+            """single table base class"""
+
+            __tablename__ = "person"
+
+        class Engineer(Person):
+            """ single table inheritance, no extra cols """
+
+        class Manager(Person):
+            """ single table inheritance, no extra cols """
+
+        is_(Engineer.id.property.columns[0], Person.__table__.c.id)
+        is_(Manager.id.property.columns[0], Person.__table__.c.id)
+
+    def test_joined_from_single(self):
+        class Company(Base, fixtures.ComparableEntity):
+
+            __tablename__ = "companies"
+            id = Column(
+                "id", Integer, primary_key=True, test_needs_autoincrement=True
+            )
+            name = Column("name", String(50))
+            employees = relationship("Person")
+
+        class Person(Base, fixtures.ComparableEntity):
+
+            __tablename__ = "people"
+            id = Column(
+                Integer, primary_key=True, test_needs_autoincrement=True
+            )
+            company_id = Column(Integer, ForeignKey("companies.id"))
+            name = Column(String(50))
+            discriminator = Column("type", String(50))
+            __mapper_args__ = {"polymorphic_on": discriminator}
+
+        class Manager(Person):
+
+            __mapper_args__ = {"polymorphic_identity": "manager"}
+            golf_swing = Column(String(50))
+
+        class Engineer(Person):
+
+            __tablename__ = "engineers"
+            __mapper_args__ = {"polymorphic_identity": "engineer"}
+            id = Column(Integer, ForeignKey("people.id"), primary_key=True)
+            primary_language = Column(String(50))
+
+        assert Person.__table__.c.golf_swing is not None
+        assert "primary_language" not in Person.__table__.c
+        assert Engineer.__table__.c.primary_language is not None
+        assert Engineer.primary_language is not None
+        assert Manager.golf_swing is not None
+        assert not hasattr(Person, "primary_language")
+        assert not hasattr(Person, "golf_swing")
+        assert not hasattr(Engineer, "golf_swing")
+        assert not hasattr(Manager, "primary_language")
+        Base.metadata.create_all()
+        sess = create_session()
+        e1 = Engineer(name="dilbert", primary_language="java")
+        e2 = Engineer(name="wally", primary_language="c++")
+        m1 = Manager(name="dogbert", golf_swing="fore!")
+        c1 = Company(name="MegaCorp, Inc.", employees=[e1, e2, m1])
+        e3 = Engineer(name="vlad", primary_language="cobol")
+        c2 = Company(name="Elbonia, Inc.", employees=[e3])
+        sess.add(c1)
+        sess.add(c2)
+        sess.flush()
+        sess.expunge_all()
+        eq_(
+            sess.query(Person)
+            .with_polymorphic(Engineer)
+            .filter(Engineer.primary_language == "cobol")
+            .first(),
+            Engineer(name="vlad"),
+        )
+        eq_(
+            sess.query(Company)
+            .filter(
+                Company.employees.of_type(Engineer).any(
+                    Engineer.primary_language == "cobol"
+                )
+            )
+            .first(),
+            c2,
+        )
+        eq_(
+            sess.query(Engineer).filter_by(primary_language="cobol").one(),
+            Engineer(name="vlad", primary_language="cobol"),
+        )
+
+    def test_single_from_joined_colsonsub(self):
+        class Person(Base, fixtures.ComparableEntity):
+
+            __tablename__ = "people"
+            id = Column(
+                Integer, primary_key=True, test_needs_autoincrement=True
+            )
+            name = Column(String(50))
+            discriminator = Column("type", String(50))
+            __mapper_args__ = {"polymorphic_on": discriminator}
+
+        class Manager(Person):
+            __tablename__ = "manager"
+            __mapper_args__ = {"polymorphic_identity": "manager"}
+            id = Column(Integer, ForeignKey("people.id"), primary_key=True)
+            golf_swing = Column(String(50))
+
+        class Boss(Manager):
+            boss_name = Column(String(50))
+
+        is_(
+            Boss.__mapper__.column_attrs["boss_name"].columns[0],
+            Manager.__table__.c.boss_name,
+        )
+
+    def test_polymorphic_on_converted_from_inst(self):
+        class A(Base):
+            __tablename__ = "A"
+            id = Column(Integer, primary_key=True)
+            discriminator = Column(String)
+
+            @declared_attr
+            def __mapper_args__(cls):
+                return {
+                    "polymorphic_identity": cls.__name__,
+                    "polymorphic_on": cls.discriminator,
+                }
+
+        class B(A):
+            pass
+
+        is_(B.__mapper__.polymorphic_on, A.__table__.c.discriminator)
+
+    def test_add_deferred(self):
+        class Person(Base, fixtures.ComparableEntity):
+
+            __tablename__ = "people"
+            id = Column(
+                "id", Integer, primary_key=True, test_needs_autoincrement=True
+            )
+
+        Person.name = deferred(Column(String(10)))
+        Base.metadata.create_all()
+        sess = create_session()
+        p = Person(name="ratbert")
+        sess.add(p)
+        sess.flush()
+        sess.expunge_all()
+        eq_(sess.query(Person).all(), [Person(name="ratbert")])
+        sess.expunge_all()
+        person = sess.query(Person).filter(Person.name == "ratbert").one()
+        assert "name" not in person.__dict__
+
+    def test_single_fksonsub(self):
+        """test single inheritance with a foreign key-holding column on
+        a subclass.
+
+        """
+
+        class Person(Base, fixtures.ComparableEntity):
+
+            __tablename__ = "people"
+            id = Column(
+                Integer, primary_key=True, test_needs_autoincrement=True
+            )
+            name = Column(String(50))
+            discriminator = Column("type", String(50))
+            __mapper_args__ = {"polymorphic_on": discriminator}
+
+        class Engineer(Person):
+
+            __mapper_args__ = {"polymorphic_identity": "engineer"}
+            primary_language_id = Column(Integer, ForeignKey("languages.id"))
+            primary_language = relationship("Language")
+
+        class Language(Base, fixtures.ComparableEntity):
+
+            __tablename__ = "languages"
+            id = Column(
+                Integer, primary_key=True, test_needs_autoincrement=True
+            )
+            name = Column(String(50))
+
+        assert not hasattr(Person, "primary_language_id")
+        Base.metadata.create_all()
+        sess = create_session()
+        java, cpp, cobol = (
+            Language(name="java"),
+            Language(name="cpp"),
+            Language(name="cobol"),
+        )
+        e1 = Engineer(name="dilbert", primary_language=java)
+        e2 = Engineer(name="wally", primary_language=cpp)
+        e3 = Engineer(name="vlad", primary_language=cobol)
+        sess.add_all([e1, e2, e3])
+        sess.flush()
+        sess.expunge_all()
+        eq_(
+            sess.query(Person)
+            .filter(Engineer.primary_language.has(Language.name == "cobol"))
+            .first(),
+            Engineer(name="vlad", primary_language=Language(name="cobol")),
+        )
+        eq_(
+            sess.query(Engineer)
+            .filter(Engineer.primary_language.has(Language.name == "cobol"))
+            .one(),
+            Engineer(name="vlad", primary_language=Language(name="cobol")),
+        )
+        eq_(
+            sess.query(Person)
+            .join(Engineer.primary_language)
+            .order_by(Language.name)
+            .all(),
+            [
+                Engineer(name="vlad", primary_language=Language(name="cobol")),
+                Engineer(name="wally", primary_language=Language(name="cpp")),
+                Engineer(
+                    name="dilbert", primary_language=Language(name="java")
+                ),
+            ],
+        )
+
+    def test_single_three_levels(self):
+        class Person(Base, fixtures.ComparableEntity):
+
+            __tablename__ = "people"
+            id = Column(Integer, primary_key=True)
+            name = Column(String(50))
+            discriminator = Column("type", String(50))
+            __mapper_args__ = {"polymorphic_on": discriminator}
+
+        class Engineer(Person):
+
+            __mapper_args__ = {"polymorphic_identity": "engineer"}
+            primary_language = Column(String(50))
+
+        class JuniorEngineer(Engineer):
+
+            __mapper_args__ = {"polymorphic_identity": "junior_engineer"}
+            nerf_gun = Column(String(50))
+
+        class Manager(Person):
+
+            __mapper_args__ = {"polymorphic_identity": "manager"}
+            golf_swing = Column(String(50))
+
+        assert JuniorEngineer.nerf_gun
+        assert JuniorEngineer.primary_language
+        assert JuniorEngineer.name
+        assert Manager.golf_swing
+        assert Engineer.primary_language
+        assert not hasattr(Engineer, "golf_swing")
+        assert not hasattr(Engineer, "nerf_gun")
+        assert not hasattr(Manager, "nerf_gun")
+        assert not hasattr(Manager, "primary_language")
+
+    def test_single_detects_conflict(self):
+        class Person(Base):
+
+            __tablename__ = "people"
+            id = Column(Integer, primary_key=True)
+            name = Column(String(50))
+            discriminator = Column("type", String(50))
+            __mapper_args__ = {"polymorphic_on": discriminator}
+
+        class Engineer(Person):
+
+            __mapper_args__ = {"polymorphic_identity": "engineer"}
+            primary_language = Column(String(50))
+
+        # test sibling col conflict
+
+        def go():
+            class Manager(Person):
+
+                __mapper_args__ = {"polymorphic_identity": "manager"}
+                golf_swing = Column(String(50))
+                primary_language = Column(String(50))
+
+        assert_raises(sa.exc.ArgumentError, go)
+
+        # test parent col conflict
+
+        def go():
+            class Salesman(Person):
+
+                __mapper_args__ = {"polymorphic_identity": "manager"}
+                name = Column(String(50))
+
+        assert_raises(sa.exc.ArgumentError, go)
+
+    def test_single_no_special_cols(self):
+        class Person(Base, fixtures.ComparableEntity):
+
+            __tablename__ = "people"
+            id = Column("id", Integer, primary_key=True)
+            name = Column("name", String(50))
+            discriminator = Column("type", String(50))
+            __mapper_args__ = {"polymorphic_on": discriminator}
+
+        def go():
+            class Engineer(Person):
+
+                __mapper_args__ = {"polymorphic_identity": "engineer"}
+                primary_language = Column("primary_language", String(50))
+                foo_bar = Column(Integer, primary_key=True)
+
+        assert_raises_message(sa.exc.ArgumentError, "place primary key", go)
+
+    def test_single_no_table_args(self):
+        class Person(Base, fixtures.ComparableEntity):
+
+            __tablename__ = "people"
+            id = Column("id", Integer, primary_key=True)
+            name = Column("name", String(50))
+            discriminator = Column("type", String(50))
+            __mapper_args__ = {"polymorphic_on": discriminator}
+
+        def go():
+            class Engineer(Person):
+
+                __mapper_args__ = {"polymorphic_identity": "engineer"}
+                primary_language = Column("primary_language", String(50))
+
+                # this should be on the Person class, as this is single
+                # table inheritance, which is why we test that this
+                # throws an exception!
+
+                __table_args__ = {"mysql_engine": "InnoDB"}
+
+        assert_raises_message(sa.exc.ArgumentError, "place __table_args__", go)
+
+    @testing.emits_warning("This declarative")
+    def test_dupe_name_in_hierarchy(self):
+        class A(Base):
+            __tablename__ = "a"
+            id = Column(Integer, primary_key=True)
+
+        a_1 = A
+
+        class A(a_1):
+            __tablename__ = "b"
+            id = Column(Integer(), ForeignKey(a_1.id), primary_key=True)
+
+        assert A.__mapper__.inherits is a_1.__mapper__
+
+
+class OverlapColPrecedenceTest(DeclarativeTestBase):
+
+    """test #1892 cases when declarative does column precedence."""
+
+    def _run_test(self, Engineer, e_id, p_id):
+        p_table = Base.metadata.tables["person"]
+        e_table = Base.metadata.tables["engineer"]
+        assert Engineer.id.property.columns[0] is e_table.c[e_id]
+        assert Engineer.id.property.columns[1] is p_table.c[p_id]
+
+    def test_basic(self):
+        class Person(Base):
+            __tablename__ = "person"
+            id = Column(Integer, primary_key=True)
+
+        class Engineer(Person):
+            __tablename__ = "engineer"
+            id = Column(Integer, ForeignKey("person.id"), primary_key=True)
+
+        self._run_test(Engineer, "id", "id")
+
+    def test_alt_name_base(self):
+        class Person(Base):
+            __tablename__ = "person"
+            id = Column("pid", Integer, primary_key=True)
+
+        class Engineer(Person):
+            __tablename__ = "engineer"
+            id = Column(Integer, ForeignKey("person.pid"), primary_key=True)
+
+        self._run_test(Engineer, "id", "pid")
+
+    def test_alt_name_sub(self):
+        class Person(Base):
+            __tablename__ = "person"
+            id = Column(Integer, primary_key=True)
+
+        class Engineer(Person):
+            __tablename__ = "engineer"
+            id = Column(
+                "eid", Integer, ForeignKey("person.id"), primary_key=True
+            )
+
+        self._run_test(Engineer, "eid", "id")
+
+    def test_alt_name_both(self):
+        class Person(Base):
+            __tablename__ = "person"
+            id = Column("pid", Integer, primary_key=True)
+
+        class Engineer(Person):
+            __tablename__ = "engineer"
+            id = Column(
+                "eid", Integer, ForeignKey("person.pid"), primary_key=True
+            )
+
+        self._run_test(Engineer, "eid", "pid")
similarity index 93%
rename from test/ext/declarative/test_mixin.py
rename to test/orm/declarative/test_mixin.py
index 75e5912a7fe9dd2e4170819cd605cfbde0afb44f..eed918572895cedd066a9653d91cdd6f755be8bf 100644 (file)
@@ -2,12 +2,10 @@ import sqlalchemy as sa
 from sqlalchemy import ForeignKey
 from sqlalchemy import func
 from sqlalchemy import Integer
+from sqlalchemy import MetaData
 from sqlalchemy import select
 from sqlalchemy import String
 from sqlalchemy import testing
-from sqlalchemy.ext import declarative as decl
-from sqlalchemy.ext.declarative import declarative_base
-from sqlalchemy.ext.declarative import declared_attr
 from sqlalchemy.orm import base as orm_base
 from sqlalchemy.orm import class_mapper
 from sqlalchemy.orm import clear_mappers
@@ -15,8 +13,12 @@ from sqlalchemy.orm import close_all_sessions
 from sqlalchemy.orm import column_property
 from sqlalchemy.orm import configure_mappers
 from sqlalchemy.orm import create_session
+from sqlalchemy.orm import declarative_base
+from sqlalchemy.orm import declared_attr
 from sqlalchemy.orm import deferred
 from sqlalchemy.orm import events as orm_events
+from sqlalchemy.orm import has_inherited_table
+from sqlalchemy.orm import registry
 from sqlalchemy.orm import relationship
 from sqlalchemy.orm import Session
 from sqlalchemy.orm import synonym
@@ -32,14 +34,16 @@ from sqlalchemy.testing.schema import Table
 from sqlalchemy.testing.util import gc_collect
 from sqlalchemy.util import classproperty
 
-
 Base = None
+mapper_registry = None
 
 
 class DeclarativeTestBase(fixtures.TestBase, testing.AssertsExecutionResults):
     def setup(self):
-        global Base
-        Base = decl.declarative_base(testing.db)
+        global Base, mapper_registry
+
+        mapper_registry = registry(metadata=MetaData(bind=testing.db))
+        Base = mapper_registry.generate_base()
 
     def teardown(self):
         close_all_sessions()
@@ -48,7 +52,7 @@ class DeclarativeTestBase(fixtures.TestBase, testing.AssertsExecutionResults):
 
 
 class DeclarativeMixinTest(DeclarativeTestBase):
-    def test_simple(self):
+    def test_simple_wbase(self):
         class MyMixin(object):
 
             id = Column(
@@ -63,7 +67,33 @@ class DeclarativeMixinTest(DeclarativeTestBase):
             __tablename__ = "test"
             name = Column(String(100), nullable=False, index=True)
 
-        Base.metadata.create_all()
+        Base.metadata.create_all(testing.db)
+        session = create_session()
+        session.add(MyModel(name="testing"))
+        session.flush()
+        session.expunge_all()
+        obj = session.query(MyModel).one()
+        eq_(obj.id, 1)
+        eq_(obj.name, "testing")
+        eq_(obj.foo(), "bar1")
+
+    def test_simple_wdecorator(self):
+        class MyMixin(object):
+
+            id = Column(
+                Integer, primary_key=True, test_needs_autoincrement=True
+            )
+
+            def foo(self):
+                return "bar" + str(self.id)
+
+        @mapper_registry.mapped
+        class MyModel(MyMixin):
+
+            __tablename__ = "test"
+            name = Column(String(100), nullable=False, index=True)
+
+        Base.metadata.create_all(testing.db)
         session = create_session()
         session.add(MyModel(name="testing"))
         session.flush()
@@ -85,7 +115,7 @@ class DeclarativeMixinTest(DeclarativeTestBase):
 
         assert MyModel.__table__.c.value.unique
 
-    def test_hierarchical_bases(self):
+    def test_hierarchical_bases_wbase(self):
         class MyMixinParent:
 
             id = Column(
@@ -104,7 +134,7 @@ class DeclarativeMixinTest(DeclarativeTestBase):
             __tablename__ = "test"
             name = Column(String(100), nullable=False, index=True)
 
-        Base.metadata.create_all()
+        Base.metadata.create_all(testing.db)
         session = create_session()
         session.add(MyModel(name="testing", baz="fu"))
         session.flush()
@@ -115,7 +145,38 @@ class DeclarativeMixinTest(DeclarativeTestBase):
         eq_(obj.foo(), "bar1")
         eq_(obj.baz, "fu")
 
-    def test_mixin_overrides(self):
+    def test_hierarchical_bases_wdecorator(self):
+        class MyMixinParent:
+
+            id = Column(
+                Integer, primary_key=True, test_needs_autoincrement=True
+            )
+
+            def foo(self):
+                return "bar" + str(self.id)
+
+        class MyMixin(MyMixinParent):
+
+            baz = Column(String(100), nullable=False, index=True)
+
+        @mapper_registry.mapped
+        class MyModel(MyMixin, object):
+
+            __tablename__ = "test"
+            name = Column(String(100), nullable=False, index=True)
+
+        Base.metadata.create_all(testing.db)
+        session = create_session()
+        session.add(MyModel(name="testing", baz="fu"))
+        session.flush()
+        session.expunge_all()
+        obj = session.query(MyModel).one()
+        eq_(obj.id, 1)
+        eq_(obj.name, "testing")
+        eq_(obj.foo(), "bar1")
+        eq_(obj.baz, "fu")
+
+    def test_mixin_overrides_wbase(self):
         """test a mixin that overrides a column on a superclass."""
 
         class MixinA(object):
@@ -135,6 +196,28 @@ class DeclarativeMixinTest(DeclarativeTestBase):
         eq_(MyModelA.__table__.c.foo.type.__class__, String)
         eq_(MyModelB.__table__.c.foo.type.__class__, Integer)
 
+    def test_mixin_overrides_wdecorator(self):
+        """test a mixin that overrides a column on a superclass."""
+
+        class MixinA(object):
+            foo = Column(String(50))
+
+        class MixinB(MixinA):
+            foo = Column(Integer)
+
+        @mapper_registry.mapped
+        class MyModelA(MixinA):
+            __tablename__ = "testa"
+            id = Column(Integer, primary_key=True)
+
+        @mapper_registry.mapped
+        class MyModelB(MixinB):
+            __tablename__ = "testb"
+            id = Column(Integer, primary_key=True)
+
+        eq_(MyModelA.__table__.c.foo.type.__class__, String)
+        eq_(MyModelB.__table__.c.foo.type.__class__, Integer)
+
     def test_not_allowed(self):
         class MyMixin:
             foo = Column(Integer, ForeignKey("bar.id"))
@@ -350,7 +433,7 @@ class DeclarativeMixinTest(DeclarativeTestBase):
             Manager.target_id.property.columns[0], Person.__table__.c.target_id
         )
         # do a brief round trip on this
-        Base.metadata.create_all()
+        Base.metadata.create_all(testing.db)
         session = Session()
         o1, o2 = Other(), Other()
         session.add_all(
@@ -742,7 +825,7 @@ class DeclarativeMixinTest(DeclarativeTestBase):
             def id(self):
                 return Column(Integer, primary_key=True)
 
-        Base = decl.declarative_base(cls=Base)
+        Base = declarative_base(cls=Base)
 
         class MyClass(Base):
             pass
@@ -910,7 +993,7 @@ class DeclarativeMixinTest(DeclarativeTestBase):
         class NoJoinedTableNameMixin:
             @declared_attr
             def __tablename__(cls):
-                if decl.has_inherited_table(cls):
+                if has_inherited_table(cls):
                     return None
                 return cls.__name__.lower()
 
@@ -938,7 +1021,7 @@ class DeclarativeMixinTest(DeclarativeTestBase):
             @declared_attr
             def __tablename__(cls):
                 if (
-                    decl.has_inherited_table(cls)
+                    has_inherited_table(cls)
                     and TableNameMixin not in cls.__bases__
                 ):
                     return None
@@ -1279,7 +1362,7 @@ class DeclarativeMixinPropertyTest(
         assert (
             MyModel.prop_hoho.property is not MyOtherModel.prop_hoho.property
         )
-        Base.metadata.create_all()
+        Base.metadata.create_all(testing.db)
         sess = create_session()
         m1, m2 = MyModel(prop_hoho="foo"), MyOtherModel(prop_hoho="bar")
         sess.add_all([m1, m2])
@@ -1458,7 +1541,7 @@ class DeclarativeMixinPropertyTest(
                 Integer, primary_key=True, test_needs_autoincrement=True
             )
 
-        Base.metadata.create_all()
+        Base.metadata.create_all(testing.db)
         sess = create_session()
         sess.add_all([MyModel(data="d1"), MyModel(data="d2")])
         sess.flush()
@@ -1510,7 +1593,7 @@ class DeclarativeMixinPropertyTest(
                 Integer, primary_key=True, test_needs_autoincrement=True
             )
 
-        Base.metadata.create_all()
+        Base.metadata.create_all(testing.db)
         sess = create_session()
         t1, t2 = Target(), Target()
         f1, f2, b1 = Foo(target=t1), Foo(target=t2), Bar(target=t1)
@@ -1601,7 +1684,14 @@ class DeclaredAttrTest(DeclarativeTestBase, testing.AssertsCompiledSQL):
         eq_(counter.mock_calls, [mock.call("A")])
         del A
         gc_collect()
-        assert "A" not in Base._decl_class_registry
+
+        from sqlalchemy.orm.clsregistry import _key_is_empty
+
+        assert _key_is_empty(
+            "A",
+            Base.registry._class_registry,
+            lambda cls: hasattr(cls, "my_other_prop"),
+        )
 
     def test_can_we_access_the_mixin_straight(self):
         class Mixin(object):
@@ -1656,6 +1746,9 @@ class DeclaredAttrTest(DeclarativeTestBase, testing.AssertsCompiledSQL):
         eq_(Foo.__tablename__, "foo")
         eq_(Foo.__tablename__, "foo")
 
+        # here we are testing that access of __tablename__ does in fact
+        # call the user-defined function, as we are no longer in the
+        # "declarative_scan" phase.  the class *is* mapped here.
         eq_(
             counter.mock_calls,
             [mock.call(Foo), mock.call(Foo), mock.call(Foo)],
@@ -1774,7 +1867,7 @@ class DeclaredAttrTest(DeclarativeTestBase, testing.AssertsCompiledSQL):
         class Mixin(object):
             @declared_attr.cascading
             def my_attr(cls):
-                if decl.has_inherited_table(cls):
+                if has_inherited_table(cls):
                     id_ = Column(ForeignKey("a.my_attr"), primary_key=True)
                     asserted["b"].add(id_)
                 else:
@@ -1916,6 +2009,23 @@ class AbstractTest(DeclarativeTestBase):
 
         eq_(sa.inspect(C).attrs.keys(), ["id", "name", "data", "c_value"])
 
+    def test_implicit_abstract_viadecorator(self):
+        @mapper_registry.mapped
+        class A(object):
+            __tablename__ = "a"
+
+            id = Column(Integer, primary_key=True)
+            name = Column(String)
+
+        class B(A):
+            data = Column(String)
+
+        @mapper_registry.mapped
+        class C(B):
+            c_value = Column(String)
+
+        eq_(sa.inspect(C).attrs.keys(), ["id", "name", "data", "c_value"])
+
     def test_middle_abstract_inherits(self):
         # test for [ticket:3240]
 
diff --git a/test/orm/declarative/test_reflection.py b/test/orm/declarative/test_reflection.py
new file mode 100644 (file)
index 0000000..169f4b8
--- /dev/null
@@ -0,0 +1,205 @@
+from sqlalchemy import ForeignKey
+from sqlalchemy import Integer
+from sqlalchemy import MetaData
+from sqlalchemy import String
+from sqlalchemy import testing
+from sqlalchemy.orm import clear_mappers
+from sqlalchemy.orm import create_session
+from sqlalchemy.orm import decl_api as decl
+from sqlalchemy.orm import relationship
+from sqlalchemy.testing import assert_raises
+from sqlalchemy.testing import eq_
+from sqlalchemy.testing import fixtures
+from sqlalchemy.testing.schema import Column
+from sqlalchemy.testing.schema import Table
+
+
+class DeclarativeReflectionBase(fixtures.TablesTest):
+    __requires__ = ("reflectable_autoincrement",)
+
+    def setup(self):
+        global Base, registry
+
+        registry = decl.registry(metadata=MetaData(bind=testing.db))
+        Base = registry.generate_base()
+
+    def teardown(self):
+        super(DeclarativeReflectionBase, self).teardown()
+        clear_mappers()
+
+
+class DeclarativeReflectionTest(DeclarativeReflectionBase):
+    @classmethod
+    def define_tables(cls, metadata):
+        Table(
+            "users",
+            metadata,
+            Column(
+                "id", Integer, primary_key=True, test_needs_autoincrement=True
+            ),
+            Column("name", String(50)),
+            test_needs_fk=True,
+        )
+        Table(
+            "addresses",
+            metadata,
+            Column(
+                "id", Integer, primary_key=True, test_needs_autoincrement=True
+            ),
+            Column("email", String(50)),
+            Column("user_id", Integer, ForeignKey("users.id")),
+            test_needs_fk=True,
+        )
+        Table(
+            "imhandles",
+            metadata,
+            Column(
+                "id", Integer, primary_key=True, test_needs_autoincrement=True
+            ),
+            Column("user_id", Integer),
+            Column("network", String(50)),
+            Column("handle", String(50)),
+            test_needs_fk=True,
+        )
+
+    def test_basic(self):
+        class User(Base, fixtures.ComparableEntity):
+
+            __tablename__ = "users"
+            __autoload__ = True
+            addresses = relationship("Address", backref="user")
+
+        class Address(Base, fixtures.ComparableEntity):
+
+            __tablename__ = "addresses"
+            __autoload__ = True
+
+        u1 = User(
+            name="u1", addresses=[Address(email="one"), Address(email="two")]
+        )
+        sess = create_session()
+        sess.add(u1)
+        sess.flush()
+        sess.expunge_all()
+        eq_(
+            sess.query(User).all(),
+            [
+                User(
+                    name="u1",
+                    addresses=[Address(email="one"), Address(email="two")],
+                )
+            ],
+        )
+        a1 = sess.query(Address).filter(Address.email == "two").one()
+        eq_(a1, Address(email="two"))
+        eq_(a1.user, User(name="u1"))
+
+    def test_rekey_wbase(self):
+        class User(Base, fixtures.ComparableEntity):
+
+            __tablename__ = "users"
+            __autoload__ = True
+            nom = Column("name", String(50), key="nom")
+            addresses = relationship("Address", backref="user")
+
+        class Address(Base, fixtures.ComparableEntity):
+
+            __tablename__ = "addresses"
+            __autoload__ = True
+
+        u1 = User(
+            nom="u1", addresses=[Address(email="one"), Address(email="two")]
+        )
+        sess = create_session()
+        sess.add(u1)
+        sess.flush()
+        sess.expunge_all()
+        eq_(
+            sess.query(User).all(),
+            [
+                User(
+                    nom="u1",
+                    addresses=[Address(email="one"), Address(email="two")],
+                )
+            ],
+        )
+        a1 = sess.query(Address).filter(Address.email == "two").one()
+        eq_(a1, Address(email="two"))
+        eq_(a1.user, User(nom="u1"))
+        assert_raises(TypeError, User, name="u3")
+
+    def test_rekey_wdecorator(self):
+        @registry.mapped
+        class User(fixtures.ComparableMixin):
+
+            __tablename__ = "users"
+            __autoload__ = True
+            nom = Column("name", String(50), key="nom")
+            addresses = relationship("Address", backref="user")
+
+        @registry.mapped
+        class Address(fixtures.ComparableMixin):
+
+            __tablename__ = "addresses"
+            __autoload__ = True
+
+        u1 = User(
+            nom="u1", addresses=[Address(email="one"), Address(email="two")]
+        )
+        sess = create_session()
+        sess.add(u1)
+        sess.flush()
+        sess.expunge_all()
+        eq_(
+            sess.query(User).all(),
+            [
+                User(
+                    nom="u1",
+                    addresses=[Address(email="one"), Address(email="two")],
+                )
+            ],
+        )
+        a1 = sess.query(Address).filter(Address.email == "two").one()
+        eq_(a1, Address(email="two"))
+        eq_(a1.user, User(nom="u1"))
+        assert_raises(TypeError, User, name="u3")
+
+    def test_supplied_fk(self):
+        class IMHandle(Base, fixtures.ComparableEntity):
+
+            __tablename__ = "imhandles"
+            __autoload__ = True
+            user_id = Column("user_id", Integer, ForeignKey("users.id"))
+
+        class User(Base, fixtures.ComparableEntity):
+
+            __tablename__ = "users"
+            __autoload__ = True
+            handles = relationship("IMHandle", backref="user")
+
+        u1 = User(
+            name="u1",
+            handles=[
+                IMHandle(network="blabber", handle="foo"),
+                IMHandle(network="lol", handle="zomg"),
+            ],
+        )
+        sess = create_session()
+        sess.add(u1)
+        sess.flush()
+        sess.expunge_all()
+        eq_(
+            sess.query(User).all(),
+            [
+                User(
+                    name="u1",
+                    handles=[
+                        IMHandle(network="blabber", handle="foo"),
+                        IMHandle(network="lol", handle="zomg"),
+                    ],
+                )
+            ],
+        )
+        a1 = sess.query(IMHandle).filter(IMHandle.handle == "zomg").one()
+        eq_(a1, IMHandle(network="lol", handle="zomg"))
+        eq_(a1.user, User(name="u1"))
index da2ad4cdf815782fe2b61f76c521caf71aa3144e..77564bcdb4dc3b4eee3ad92072287c3dd934dbe1 100644 (file)
@@ -3,7 +3,6 @@ from sqlalchemy import Integer
 from sqlalchemy import String
 from sqlalchemy import util
 from sqlalchemy.orm import create_session
-from sqlalchemy.orm import mapper
 from sqlalchemy.orm import polymorphic_union
 from sqlalchemy.orm import relationship
 from sqlalchemy.sql.selectable import LABEL_STYLE_TABLENAME_PLUS_COL
@@ -319,7 +318,7 @@ class _PolymorphicFixtureBase(fixtures.MappedTest, AssertsCompiledSQL):
 
     @classmethod
     def setup_mappers(cls):
-        mapper(
+        cls.mapper(
             Company,
             companies,
             properties={
@@ -327,14 +326,14 @@ class _PolymorphicFixtureBase(fixtures.MappedTest, AssertsCompiledSQL):
             },
         )
 
-        mapper(Machine, machines)
+        cls.mapper(Machine, machines)
 
         (
             person_with_polymorphic,
             manager_with_polymorphic,
         ) = cls._get_polymorphics()
 
-        mapper(
+        cls.mapper(
             Person,
             people,
             with_polymorphic=person_with_polymorphic,
@@ -347,7 +346,7 @@ class _PolymorphicFixtureBase(fixtures.MappedTest, AssertsCompiledSQL):
             },
         )
 
-        mapper(
+        cls.mapper(
             Engineer,
             engineers,
             inherits=Person,
@@ -359,7 +358,7 @@ class _PolymorphicFixtureBase(fixtures.MappedTest, AssertsCompiledSQL):
             },
         )
 
-        mapper(
+        cls.mapper(
             Manager,
             managers,
             with_polymorphic=manager_with_polymorphic,
@@ -367,9 +366,9 @@ class _PolymorphicFixtureBase(fixtures.MappedTest, AssertsCompiledSQL):
             polymorphic_identity="manager",
         )
 
-        mapper(Boss, boss, inherits=Manager, polymorphic_identity="boss")
+        cls.mapper(Boss, boss, inherits=Manager, polymorphic_identity="boss")
 
-        mapper(Paperwork, paperwork)
+        cls.mapper(Paperwork, paperwork)
 
 
 class _Polymorphic(_PolymorphicFixtureBase):
index 1ac97b64a91583ca47188b1870785a05fe4b6d83..e0665b23fc2cf0b95ff58f3560e857a70332e131 100644 (file)
@@ -7,6 +7,7 @@ from sqlalchemy import Integer
 from sqlalchemy import String
 from sqlalchemy import testing
 from sqlalchemy.orm import mapper
+from sqlalchemy.orm import registry as declarative_registry
 from sqlalchemy.orm import relationship
 from sqlalchemy.orm import Session
 from sqlalchemy.testing import eq_
@@ -225,3 +226,62 @@ class DataclassesTest(fixtures.MappedTest, testing.AssertsCompiledSQL):
                 .one()
             )
             self.check_data_fixture(a)
+
+
+class PlainDeclarativeDataclassesTest(DataclassesTest):
+    __requires__ = ("dataclasses",)
+
+    run_setup_classes = "each"
+    run_setup_mappers = "each"
+
+    @classmethod
+    def setup_classes(cls):
+        accounts = cls.tables.accounts
+        widgets = cls.tables.widgets
+
+        declarative = declarative_registry().mapped
+
+        @declarative
+        @dataclasses.dataclass
+        class Widget:
+            __table__ = widgets
+
+            name: Optional[str] = None
+
+            __mapper_args__ = dict(
+                polymorphic_on=widgets.c.type, polymorphic_identity="normal",
+            )
+
+        @declarative
+        @dataclasses.dataclass
+        class SpecialWidget(Widget):
+
+            magic: bool = False
+
+            __mapper_args__ = dict(polymorphic_identity="special",)
+
+        @declarative
+        @dataclasses.dataclass
+        class Account:
+            __table__ = accounts
+
+            account_id: int
+            widgets: List[Widget] = dataclasses.field(default_factory=list)
+            widget_count: int = dataclasses.field(init=False)
+
+            widgets = relationship("Widget")
+
+            def __post_init__(self):
+                self.widget_count = len(self.widgets)
+
+            def add_widget(self, widget: Widget):
+                self.widgets.append(widget)
+                self.widget_count += 1
+
+        cls.classes.Account = Account
+        cls.classes.Widget = Widget
+        cls.classes.SpecialWidget = SpecialWidget
+
+    @classmethod
+    def setup_mappers(cls):
+        pass
index 6de59d2a2f2116e5f488125c8f80248e162252d4..3bd5d97dba06d468d972783286b583486ebb285f 100644 (file)
@@ -4,6 +4,7 @@ from sqlalchemy import util
 from sqlalchemy.orm import create_session
 from sqlalchemy.orm import mapper
 from sqlalchemy.orm import relationship
+from sqlalchemy.orm import Session
 from sqlalchemy.testing import assert_raises_message
 from sqlalchemy.testing import eq_
 from test.orm import _fixtures
@@ -605,3 +606,93 @@ class DefaultStrategyOptionsTest(_fixtures.FixtureTest):
 
         # verify everything loaded, with no additional sql needed
         self._assert_fully_loaded(users)
+
+
+class NoLoadTest(_fixtures.FixtureTest):
+    run_inserts = "once"
+    run_deletes = None
+
+    def test_o2m_noload(self):
+
+        Address, addresses, users, User = (
+            self.classes.Address,
+            self.tables.addresses,
+            self.tables.users,
+            self.classes.User,
+        )
+
+        m = mapper(
+            User,
+            users,
+            properties=dict(
+                addresses=relationship(
+                    mapper(Address, addresses), lazy="noload"
+                )
+            ),
+        )
+        q = create_session().query(m)
+        result = [None]
+
+        def go():
+            x = q.filter(User.id == 7).all()
+            x[0].addresses
+            result[0] = x
+
+        self.assert_sql_count(testing.db, go, 1)
+
+        self.assert_result(
+            result[0], User, {"id": 7, "addresses": (Address, [])}
+        )
+
+    def test_upgrade_o2m_noload_lazyload_option(self):
+        Address, addresses, users, User = (
+            self.classes.Address,
+            self.tables.addresses,
+            self.tables.users,
+            self.classes.User,
+        )
+
+        m = mapper(
+            User,
+            users,
+            properties=dict(
+                addresses=relationship(
+                    mapper(Address, addresses), lazy="noload"
+                )
+            ),
+        )
+        q = create_session().query(m).options(sa.orm.lazyload("addresses"))
+        result = [None]
+
+        def go():
+            x = q.filter(User.id == 7).all()
+            x[0].addresses
+            result[0] = x
+
+        self.sql_count_(2, go)
+
+        self.assert_result(
+            result[0], User, {"id": 7, "addresses": (Address, [{"id": 1}])}
+        )
+
+    def test_m2o_noload_option(self):
+        Address, addresses, users, User = (
+            self.classes.Address,
+            self.tables.addresses,
+            self.tables.users,
+            self.classes.User,
+        )
+        mapper(Address, addresses, properties={"user": relationship(User)})
+        mapper(User, users)
+        s = Session()
+        a1 = (
+            s.query(Address)
+            .filter_by(id=1)
+            .options(sa.orm.noload("user"))
+            .first()
+        )
+
+        def go():
+            eq_(a1.user, None)
+
+        self.sql_count_(0, go)
index e3cefe9b760ae711b898a6fd5c5ffedffd041e55..0a452bb1eaef3e3b053be09b0f150a82211d42ea 100644 (file)
@@ -3,9 +3,11 @@ from sqlalchemy import ForeignKey
 from sqlalchemy import func
 from sqlalchemy import Integer
 from sqlalchemy import select
+from sqlalchemy import String
 from sqlalchemy import testing
 from sqlalchemy import util
 from sqlalchemy.orm import aliased
+from sqlalchemy.orm import attributes
 from sqlalchemy.orm import contains_eager
 from sqlalchemy.orm import create_session
 from sqlalchemy.orm import defaultload
@@ -30,6 +32,7 @@ from sqlalchemy.testing import AssertsCompiledSQL
 from sqlalchemy.testing import eq_
 from sqlalchemy.testing import fixtures
 from sqlalchemy.testing.schema import Column
+from sqlalchemy.testing.schema import Table
 from test.orm import _fixtures
 from .inheritance._poly_fixtures import _Polymorphic
 from .inheritance._poly_fixtures import Boss
@@ -2132,3 +2135,118 @@ class AutoflushTest(fixtures.DeclarativeMappedTest):
         eq_(a1.b_count, 2)
 
         assert b1 in s
+
+
+class DeferredPopulationTest(fixtures.MappedTest):
+    @classmethod
+    def define_tables(cls, metadata):
+        Table(
+            "thing",
+            metadata,
+            Column(
+                "id", Integer, primary_key=True, test_needs_autoincrement=True
+            ),
+            Column("name", String(20)),
+        )
+
+        Table(
+            "human",
+            metadata,
+            Column(
+                "id", Integer, primary_key=True, test_needs_autoincrement=True
+            ),
+            Column("thing_id", Integer, ForeignKey("thing.id")),
+            Column("name", String(20)),
+        )
+
+    @classmethod
+    def setup_mappers(cls):
+        thing, human = cls.tables.thing, cls.tables.human
+
+        class Human(cls.Basic):
+            pass
+
+        class Thing(cls.Basic):
+            pass
+
+        mapper(Human, human, properties={"thing": relationship(Thing)})
+        mapper(Thing, thing, properties={"name": deferred(thing.c.name)})
+
+    @classmethod
+    def insert_data(cls, connection):
+        thing, human = cls.tables.thing, cls.tables.human
+
+        connection.execute(thing.insert(), [{"id": 1, "name": "Chair"}])
+
+        connection.execute(
+            human.insert(), [{"id": 1, "thing_id": 1, "name": "Clark Kent"}]
+        )
+
+    def _test(self, thing):
+        assert "name" in attributes.instance_state(thing).dict
+
+    def test_no_previous_query(self):
+        Thing = self.classes.Thing
+
+        session = create_session()
+        thing = session.query(Thing).options(sa.orm.undefer("name")).first()
+        self._test(thing)
+
+    def test_query_twice_with_clear(self):
+        Thing = self.classes.Thing
+
+        session = create_session()
+        result = session.query(Thing).first()  # noqa
+        session.expunge_all()
+        thing = session.query(Thing).options(sa.orm.undefer("name")).first()
+        self._test(thing)
+
+    def test_query_twice_no_clear(self):
+        Thing = self.classes.Thing
+
+        session = create_session()
+        result = session.query(Thing).first()  # noqa
+        thing = session.query(Thing).options(sa.orm.undefer("name")).first()
+        self._test(thing)
+
+    def test_joinedload_with_clear(self):
+        Thing, Human = self.classes.Thing, self.classes.Human
+
+        session = create_session()
+        human = (  # noqa
+            session.query(Human).options(sa.orm.joinedload("thing")).first()
+        )
+        session.expunge_all()
+        thing = session.query(Thing).options(sa.orm.undefer("name")).first()
+        self._test(thing)
+
+    def test_joinedload_no_clear(self):
+        Thing, Human = self.classes.Thing, self.classes.Human
+
+        session = create_session()
+        human = (  # noqa
+            session.query(Human).options(sa.orm.joinedload("thing")).first()
+        )
+        thing = session.query(Thing).options(sa.orm.undefer("name")).first()
+        self._test(thing)
+
+    def test_join_with_clear(self):
+        Thing, Human = self.classes.Thing, self.classes.Human
+
+        session = create_session()
+        result = (  # noqa
+            session.query(Human).add_entity(Thing).join("thing").first()
+        )
+        session.expunge_all()
+        thing = session.query(Thing).options(sa.orm.undefer("name")).first()
+        self._test(thing)
+
+    def test_join_no_clear(self):
+        Thing, Human = self.classes.Thing, self.classes.Human
+
+        session = create_session()
+        result = (  # noqa
+            session.query(Human).add_entity(Thing).join("thing").first()
+        )
+        thing = session.query(Thing).options(sa.orm.undefer("name")).first()
+        self._test(thing)
index 36c9a18691b74af4ad772221d41c198fc345a633..00d98d2b5927b288493554537cb23f0c1de4790a 100644 (file)
@@ -5825,3 +5825,412 @@ class EntityViaMultiplePathTestThree(fixtures.DeclarativeMappedTest):
                     assert a.parent is not None
 
         self.assert_sql_count(testing.db, go, 1)
+
+
+class DeepOptionsTest(_fixtures.FixtureTest):
+    @classmethod
+    def setup_mappers(cls):
+        (
+            users,
+            Keyword,
+            items,
+            order_items,
+            Order,
+            Item,
+            User,
+            keywords,
+            item_keywords,
+            orders,
+        ) = (
+            cls.tables.users,
+            cls.classes.Keyword,
+            cls.tables.items,
+            cls.tables.order_items,
+            cls.classes.Order,
+            cls.classes.Item,
+            cls.classes.User,
+            cls.tables.keywords,
+            cls.tables.item_keywords,
+            cls.tables.orders,
+        )
+
+        mapper(Keyword, keywords)
+
+        mapper(
+            Item,
+            items,
+            properties=dict(
+                keywords=relationship(
+                    Keyword, item_keywords, order_by=item_keywords.c.item_id
+                )
+            ),
+        )
+
+        mapper(
+            Order,
+            orders,
+            properties=dict(
+                items=relationship(Item, order_items, order_by=items.c.id)
+            ),
+        )
+
+        mapper(
+            User,
+            users,
+            properties=dict(orders=relationship(Order, order_by=orders.c.id)),
+        )
+
+    def test_deep_options_1(self):
+        User = self.classes.User
+
+        sess = create_session()
+
+        # joinedload nothing.
+        u = sess.query(User).order_by(User.id).all()
+
+        def go():
+            u[0].orders[1].items[0].keywords[1]
+
+        self.assert_sql_count(testing.db, go, 3)
+
+    def test_deep_options_2(self):
+        """test (joined|subquery)load_all() options"""
+
+        User = self.classes.User
+
+        sess = create_session()
+
+        result = (
+            sess.query(User)
+            .order_by(User.id)
+            .options(
+                sa.orm.joinedload("orders")
+                .joinedload("items")
+                .joinedload("keywords")
+            )
+        ).all()
+
+        def go():
+            result[0].orders[1].items[0].keywords[1]
+
+        self.sql_count_(0, go)
+
+        sess = create_session()
+
+        result = (
+            sess.query(User).options(
+                sa.orm.subqueryload("orders")
+                .subqueryload("items")
+                .subqueryload("keywords")
+            )
+        ).all()
+
+        def go():
+            result[0].orders[1].items[0].keywords[1]
+
+        self.sql_count_(0, go)
+
+    def test_deep_options_3(self):
+        User = self.classes.User
+
+        sess = create_session()
+
+        # same thing, with separate options calls
+        q2 = (
+            sess.query(User)
+            .order_by(User.id)
+            .options(sa.orm.joinedload("orders"))
+            .options(sa.orm.joinedload("orders.items"))
+            .options(sa.orm.joinedload("orders.items.keywords"))
+        )
+        u = q2.all()
+
+        def go():
+            u[0].orders[1].items[0].keywords[1]
+
+        self.sql_count_(0, go)
+
+    def test_deep_options_4(self):
+        Item, User, Order = (
+            self.classes.Item,
+            self.classes.User,
+            self.classes.Order,
+        )
+
+        sess = create_session()
+
+        assert_raises_message(
+            sa.exc.ArgumentError,
+            'Mapped attribute "Order.items" does not apply to any of the '
+            "root entities in this query, e.g. mapped class User->users. "
+            "Please specify the full path from one of the root entities "
+            "to the target attribute.",
+            sess.query(User)
+            .options(sa.orm.joinedload(Order.items))
+            ._compile_context,
+        )
+
+        # joinedload "keywords" on items.  it will lazy load "orders", then
+        # lazy load the "items" on the order, but on "items" it will eager
+        # load the "keywords"
+        q3 = (
+            sess.query(User)
+            .order_by(User.id)
+            .options(sa.orm.joinedload("orders.items.keywords"))
+        )
+        u = q3.all()
+
+        def go():
+            u[0].orders[1].items[0].keywords[1]
+
+        self.sql_count_(2, go)
+
+        sess = create_session()
+        q3 = (
+            sess.query(User)
+            .order_by(User.id)
+            .options(
+                sa.orm.joinedload(User.orders, Order.items, Item.keywords)
+            )
+        )
+        u = q3.all()
+
+        def go():
+            u[0].orders[1].items[0].keywords[1]
+
+        self.sql_count_(2, go)
+
+
+class SecondaryOptionsTest(fixtures.MappedTest):
+
+    """test that the contains_eager() option doesn't bleed
+    into a secondary load."""
+
+    run_inserts = "once"
+
+    run_deletes = None
+
+    @classmethod
+    def define_tables(cls, metadata):
+        Table(
+            "base",
+            metadata,
+            Column("id", Integer, primary_key=True),
+            Column("type", String(50), nullable=False),
+        )
+        Table(
+            "child1",
+            metadata,
+            Column("id", Integer, ForeignKey("base.id"), primary_key=True),
+            Column(
+                "child2id", Integer, ForeignKey("child2.id"), nullable=False
+            ),
+        )
+        Table(
+            "child2",
+            metadata,
+            Column("id", Integer, ForeignKey("base.id"), primary_key=True),
+        )
+        Table(
+            "related",
+            metadata,
+            Column("id", Integer, ForeignKey("base.id"), primary_key=True),
+        )
+
+    @classmethod
+    def setup_mappers(cls):
+        child1, child2, base, related = (
+            cls.tables.child1,
+            cls.tables.child2,
+            cls.tables.base,
+            cls.tables.related,
+        )
+
+        class Base(cls.Comparable):
+            pass
+
+        class Child1(Base):
+            pass
+
+        class Child2(Base):
+            pass
+
+        class Related(cls.Comparable):
+            pass
+
+        mapper(
+            Base,
+            base,
+            polymorphic_on=base.c.type,
+            properties={"related": relationship(Related, uselist=False)},
+        )
+        mapper(
+            Child1,
+            child1,
+            inherits=Base,
+            polymorphic_identity="child1",
+            properties={
+                "child2": relationship(
+                    Child2,
+                    primaryjoin=child1.c.child2id == base.c.id,
+                    foreign_keys=child1.c.child2id,
+                )
+            },
+        )
+        mapper(Child2, child2, inherits=Base, polymorphic_identity="child2")
+        mapper(Related, related)
+
+    @classmethod
+    def insert_data(cls, connection):
+        child1, child2, base, related = (
+            cls.tables.child1,
+            cls.tables.child2,
+            cls.tables.base,
+            cls.tables.related,
+        )
+
+        connection.execute(
+            base.insert(),
+            [
+                {"id": 1, "type": "child1"},
+                {"id": 2, "type": "child1"},
+                {"id": 3, "type": "child1"},
+                {"id": 4, "type": "child2"},
+                {"id": 5, "type": "child2"},
+                {"id": 6, "type": "child2"},
+            ],
+        )
+        connection.execute(child2.insert(), [{"id": 4}, {"id": 5}, {"id": 6}])
+        connection.execute(
+            child1.insert(),
+            [
+                {"id": 1, "child2id": 4},
+                {"id": 2, "child2id": 5},
+                {"id": 3, "child2id": 6},
+            ],
+        )
+        connection.execute(
+            related.insert(),
+            [{"id": 1}, {"id": 2}, {"id": 3}, {"id": 4}, {"id": 5}, {"id": 6}],
+        )
+
+    def test_contains_eager(self):
+        Child1, Related = self.classes.Child1, self.classes.Related
+
+        sess = create_session()
+
+        child1s = (
+            sess.query(Child1)
+            .join(Child1.related)
+            .options(sa.orm.contains_eager(Child1.related))
+            .order_by(Child1.id)
+        )
+
+        def go():
+            eq_(
+                child1s.all(),
+                [
+                    Child1(id=1, related=Related(id=1)),
+                    Child1(id=2, related=Related(id=2)),
+                    Child1(id=3, related=Related(id=3)),
+                ],
+            )
+
+        self.assert_sql_count(testing.db, go, 1)
+
+        c1 = child1s[0]
+
+        self.assert_sql_execution(
+            testing.db,
+            lambda: c1.child2,
+            CompiledSQL(
+                "SELECT child2.id AS child2_id, base.id AS base_id, "
+                "base.type AS base_type "
+                "FROM base JOIN child2 ON base.id = child2.id "
+                "WHERE base.id = :param_1",
+                {"param_1": 4},
+            ),
+        )
+
+    def test_joinedload_on_other(self):
+        Child1, Related = self.classes.Child1, self.classes.Related
+
+        sess = create_session()
+
+        child1s = (
+            sess.query(Child1)
+            .join(Child1.related)
+            .options(sa.orm.joinedload(Child1.related))
+            .order_by(Child1.id)
+        )
+
+        def go():
+            eq_(
+                child1s.all(),
+                [
+                    Child1(id=1, related=Related(id=1)),
+                    Child1(id=2, related=Related(id=2)),
+                    Child1(id=3, related=Related(id=3)),
+                ],
+            )
+
+        self.assert_sql_count(testing.db, go, 1)
+
+        c1 = child1s[0]
+
+        self.assert_sql_execution(
+            testing.db,
+            lambda: c1.child2,
+            CompiledSQL(
+                "SELECT child2.id AS child2_id, base.id AS base_id, "
+                "base.type AS base_type "
+                "FROM base JOIN child2 ON base.id = child2.id "
+                "WHERE base.id = :param_1",
+                {"param_1": 4},
+            ),
+        )
+
+    def test_joinedload_on_same(self):
+        Child1, Child2, Related = (
+            self.classes.Child1,
+            self.classes.Child2,
+            self.classes.Related,
+        )
+
+        sess = create_session()
+
+        child1s = (
+            sess.query(Child1)
+            .join(Child1.related)
+            .options(sa.orm.joinedload(Child1.child2, Child2.related))
+            .order_by(Child1.id)
+        )
+
+        def go():
+            eq_(
+                child1s.all(),
+                [
+                    Child1(id=1, related=Related(id=1)),
+                    Child1(id=2, related=Related(id=2)),
+                    Child1(id=3, related=Related(id=3)),
+                ],
+            )
+
+        self.assert_sql_count(testing.db, go, 4)
+
+        c1 = child1s[0]
+
+        # this *does* joinedload
+        self.assert_sql_execution(
+            testing.db,
+            lambda: c1.child2,
+            CompiledSQL(
+                "SELECT child2.id AS child2_id, base.id AS base_id, "
+                "base.type AS base_type, "
+                "related_1.id AS related_1_id FROM base JOIN child2 "
+                "ON base.id = child2.id "
+                "LEFT OUTER JOIN related AS related_1 "
+                "ON base.id = related_1.id WHERE base.id = :param_1",
+                {"param_1": 4},
+            ),
+        )
index 02bd740410057879f11e26482fdd595906d9dc91..b6a4b41cb381f8e2d0e92c7121be7d518d137901 100644 (file)
@@ -711,6 +711,9 @@ class MapperEventsTest(_RemoveListeners, _fixtures.FixtureTest):
         class MyClass(object):
             pass
 
+        class MySubClass(MyClass):
+            pass
+
         canary = Mock()
 
         def my_init(self):
@@ -720,6 +723,7 @@ class MapperEventsTest(_RemoveListeners, _fixtures.FixtureTest):
         @event.listens_for(mapper, "instrument_class")
         def instrument_class(mp, class_):
             canary.instrument_class(class_)
+
             class_.__init__ = my_init
 
         # instrumentationmanager event
@@ -729,13 +733,21 @@ class MapperEventsTest(_RemoveListeners, _fixtures.FixtureTest):
 
         mapper(MyClass, users)
 
+        mapper(MySubClass, inherits=MyClass)
+
         m1 = MyClass()
         assert attributes.instance_state(m1)
 
+        m2 = MySubClass()
+        assert attributes.instance_state(m2)
+
         eq_(
             [
                 call.instrument_class(MyClass),
                 call.class_instrument(MyClass),
+                call.instrument_class(MySubClass),
+                call.class_instrument(MySubClass),
+                call.init(),
                 call.init(),
             ],
             canary.mock_calls,
index c94fe0833d1b092c80c7dd0259bfc40293b8ddb4..fc6caa75d4155519520246b235c88e74f2081241 100644 (file)
@@ -1,5 +1,3 @@
-"""General mapper operations with an emphasis on selecting/loading."""
-
 import logging
 import logging.handlers
 
@@ -25,6 +23,7 @@ from sqlalchemy.orm import deferred
 from sqlalchemy.orm import dynamic_loader
 from sqlalchemy.orm import mapper
 from sqlalchemy.orm import reconstructor
+from sqlalchemy.orm import registry
 from sqlalchemy.orm import relationship
 from sqlalchemy.orm import Session
 from sqlalchemy.orm import synonym
@@ -35,7 +34,8 @@ from sqlalchemy.testing import AssertsCompiledSQL
 from sqlalchemy.testing import eq_
 from sqlalchemy.testing import fixtures
 from sqlalchemy.testing import is_
-from sqlalchemy.testing.assertsql import CompiledSQL
+from sqlalchemy.testing import ne_
+from sqlalchemy.testing.fixtures import ComparableMixin
 from sqlalchemy.testing.schema import Column
 from sqlalchemy.testing.schema import Table
 from test.orm import _fixtures
@@ -44,6 +44,71 @@ from test.orm import _fixtures
 class MapperTest(_fixtures.FixtureTest, AssertsCompiledSQL):
     __dialect__ = "default"
 
+    def test_decl_attributes(self):
+        """declarative mapper() now sets up some of the convenience
+        attributes"""
+
+        Address, addresses, users, User = (
+            self.classes.Address,
+            self.tables.addresses,
+            self.tables.users,
+            self.classes.User,
+        )
+
+        am = self.mapper(Address, addresses)
+        um = self.mapper(
+            User,
+            users,
+            properties={
+                "addresses": relationship(
+                    "Address",
+                    order_by="Address.id",
+                    primaryjoin="User.id == remote(Address.user_id)",
+                    backref="user",
+                )
+            },
+        )
+
+        assert not hasattr(User, "metadata")
+
+        is_(um, User.__mapper__)
+        is_(am, Address.__mapper__)
+
+        is_(um.local_table, User.__table__)
+        is_(am.local_table, Address.__table__)
+
+        assert um.attrs.addresses.primaryjoin.compare(
+            users.c.id == addresses.c.user_id
+        )
+        assert um.attrs.addresses.order_by[0].compare(Address.id)
+
+        configure_mappers()
+
+        is_(um.attrs.addresses.mapper, am)
+        is_(am.attrs.user.mapper, um)
+
+        sa.orm.clear_mappers()
+
+        assert not hasattr(User, "__mapper__")
+        assert not hasattr(User, "__table__")
+
+    def test_default_constructor_imperative_map(self):
+        class Plain(ComparableMixin):
+            pass
+
+        users = self.tables.users
+        self.mapper(Plain, users)
+
+        eq_(Plain(name="n1"), Plain(name="n1"))
+        ne_(Plain(name="n1"), Plain(name="not1"))
+
+        assert_raises_message(
+            TypeError,
+            "'foobar' is an invalid keyword argument for Plain",
+            Plain,
+            foobar="x",
+        )
+
     def test_prop_shadow(self):
         """A backref name may not shadow an existing property name."""
 
@@ -54,8 +119,8 @@ class MapperTest(_fixtures.FixtureTest, AssertsCompiledSQL):
             self.classes.User,
         )
 
-        mapper(Address, addresses)
-        mapper(
+        self.mapper(Address, addresses)
+        self.mapper(
             User,
             users,
             properties={
@@ -70,7 +135,7 @@ class MapperTest(_fixtures.FixtureTest, AssertsCompiledSQL):
 
         User, users = self.classes.User, self.tables.users
 
-        mapper(User, users, properties={"foobar": users.c.name})
+        self.mapper(User, users, properties={"foobar": users.c.name})
 
         users.insert().values({User.foobar: "name1"}).execute()
         eq_(
@@ -104,8 +169,10 @@ class MapperTest(_fixtures.FixtureTest, AssertsCompiledSQL):
             def y(self):
                 return "something else"
 
-        m = mapper(Foo, users, properties={"addresses": relationship(Address)})
-        mapper(Address, addresses)
+        m = self.mapper(
+            Foo, users, properties={"addresses": relationship(Address)}
+        )
+        self.mapper(Address, addresses)
         a1 = aliased(Foo)
 
         f = Foo()
@@ -141,7 +208,7 @@ class MapperTest(_fixtures.FixtureTest, AssertsCompiledSQL):
             def y(self):
                 return "something else"
 
-        m = mapper(Foo, users)
+        m = self.mapper(Foo, users)
         a1 = aliased(Foo)
 
         for arg, key, ret in [
@@ -158,7 +225,7 @@ class MapperTest(_fixtures.FixtureTest, AssertsCompiledSQL):
         def boom():
             raise Exception("it broke")
 
-        mapper(User, users, properties={"addresses": relationship(boom)})
+        self.mapper(User, users, properties={"addresses": relationship(boom)})
 
         # test that QueryableAttribute.__str__() doesn't
         # cause a compile.
@@ -177,7 +244,9 @@ class MapperTest(_fixtures.FixtureTest, AssertsCompiledSQL):
             self.classes.User,
         )
 
-        mapper(Address, addresses, properties={"user": relationship(User)})
+        self.mapper(
+            Address, addresses, properties={"user": relationship(User)}
+        )
 
         try:
             hasattr(Address.user, "property")
@@ -198,7 +267,7 @@ class MapperTest(_fixtures.FixtureTest, AssertsCompiledSQL):
     def test_column_prefix(self):
         users, User = self.tables.users, self.classes.User
 
-        mapper(
+        self.mapper(
             User,
             users,
             column_prefix="_",
@@ -235,11 +304,11 @@ class MapperTest(_fixtures.FixtureTest, AssertsCompiledSQL):
             self.classes.User,
         )
 
-        mapper(User, users)
+        self.mapper(User, users)
         sa.orm.configure_mappers()
         assert sa.orm.mapperlib.Mapper._new_mappers is False
 
-        m = mapper(
+        m = self.mapper(
             Address,
             addresses,
             properties={"user": relationship(User, backref="addresses")},
@@ -254,7 +323,7 @@ class MapperTest(_fixtures.FixtureTest, AssertsCompiledSQL):
     def test_configure_on_session(self):
         User, users = self.classes.User, self.tables.users
 
-        m = mapper(User, users)
+        m = self.mapper(User, users)
         session = create_session()
         session.connection(mapper=m)
 
@@ -263,7 +332,7 @@ class MapperTest(_fixtures.FixtureTest, AssertsCompiledSQL):
 
         addresses, Address = self.tables.addresses, self.classes.Address
 
-        mapper(Address, addresses)
+        self.mapper(Address, addresses)
         s = create_session()
         a = (
             s.query(Address)
@@ -309,8 +378,8 @@ class MapperTest(_fixtures.FixtureTest, AssertsCompiledSQL):
         class Bar(object):
             pass
 
-        mapper(Foo, users)
-        mapper(Bar, addresses)
+        self.mapper(Foo, users)
+        self.mapper(Bar, addresses)
         assert_raises(TypeError, Foo, x=5)
         assert_raises(TypeError, Bar, x=5)
 
@@ -328,7 +397,7 @@ class MapperTest(_fixtures.FixtureTest, AssertsCompiledSQL):
 
         m = MetaData()
         foo_t = Table("foo", m, Column("id", String, primary_key=True))
-        m = mapper(Foo, foo_t)
+        m = self.mapper(Foo, foo_t)
 
         class DontCompareMeToString(int):
             if util.py2k:
@@ -358,10 +427,12 @@ class MapperTest(_fixtures.FixtureTest, AssertsCompiledSQL):
             self.classes.User,
         )
 
-        m = mapper(
+        m = self.mapper(
             User,
             users,
-            properties={"addresses": relationship(mapper(Address, addresses))},
+            properties={
+                "addresses": relationship(self.mapper(Address, addresses))
+            },
         )
         assert User.addresses.property is m.get_property("addresses")
 
@@ -374,8 +445,8 @@ class MapperTest(_fixtures.FixtureTest, AssertsCompiledSQL):
             self.classes.User,
         )
 
-        mapper(Address, addresses)
-        mapper(
+        self.mapper(Address, addresses)
+        self.mapper(
             User,
             users,
             properties={
@@ -397,10 +468,12 @@ class MapperTest(_fixtures.FixtureTest, AssertsCompiledSQL):
             self.classes.User,
         )
 
-        mapper(
+        self.mapper(
             User,
             users,
-            properties={"addresses": relationship(mapper(Address, addresses))},
+            properties={
+                "addresses": relationship(self.mapper(Address, addresses))
+            },
         )
         User.addresses.any(Address.email_address == "foo@bar.com")
 
@@ -412,10 +485,12 @@ class MapperTest(_fixtures.FixtureTest, AssertsCompiledSQL):
             self.classes.User,
         )
 
-        mapper(
+        self.mapper(
             User,
             users,
-            properties={"addresses": relationship(mapper(Address, addresses))},
+            properties={
+                "addresses": relationship(self.mapper(Address, addresses))
+            },
         )
         eq_(str(User.id == 3), str(users.c.id == 3))
 
@@ -429,8 +504,8 @@ class MapperTest(_fixtures.FixtureTest, AssertsCompiledSQL):
         class Foo(User):
             pass
 
-        mapper(User, users)
-        mapper(
+        self.mapper(User, users)
+        self.mapper(
             Foo,
             addresses,
             inherits=User,
@@ -448,9 +523,9 @@ class MapperTest(_fixtures.FixtureTest, AssertsCompiledSQL):
         class Foo(User):
             pass
 
-        mapper(User, users)
+        self.mapper(User, users)
         configure_mappers()
-        mapper(
+        self.mapper(
             Foo,
             addresses,
             inherits=User,
@@ -481,13 +556,13 @@ class MapperTest(_fixtures.FixtureTest, AssertsCompiledSQL):
             side_effect=register_attribute_impl,
         ) as some_mock:
 
-            mapper(A, users, properties={"bs": relationship(B)})
-            mapper(B, addresses)
+            self.mapper(A, users, properties={"bs": relationship(B)})
+            self.mapper(B, addresses)
 
             configure_mappers()
 
-            mapper(ASub, inherits=A)
-            mapper(ASubSub, inherits=ASub)
+            self.mapper(ASub, inherits=A)
+            self.mapper(ASubSub, inherits=ASub)
 
             configure_mappers()
 
@@ -497,7 +572,7 @@ class MapperTest(_fixtures.FixtureTest, AssertsCompiledSQL):
     def test_check_descriptor_as_method(self):
         User, users = self.classes.User, self.tables.users
 
-        m = mapper(User, users)
+        m = self.mapper(User, users)
 
         class MyClass(User):
             def foo(self):
@@ -508,7 +583,7 @@ class MapperTest(_fixtures.FixtureTest, AssertsCompiledSQL):
     def test_configure_on_get_props_1(self):
         User, users = self.classes.User, self.tables.users
 
-        m = mapper(User, users)
+        m = self.mapper(User, users)
         assert not m.configured
         assert list(m.iterate_properties)
         assert m.configured
@@ -516,7 +591,7 @@ class MapperTest(_fixtures.FixtureTest, AssertsCompiledSQL):
     def test_configure_on_get_props_2(self):
         User, users = self.classes.User, self.tables.users
 
-        m = mapper(User, users)
+        m = self.mapper(User, users)
         assert not m.configured
         assert m.get_property("name")
         assert m.configured
@@ -529,11 +604,11 @@ class MapperTest(_fixtures.FixtureTest, AssertsCompiledSQL):
             self.classes.User,
         )
 
-        m = mapper(User, users)
+        m = self.mapper(User, users)
         assert not m.configured
         configure_mappers()
 
-        mapper(
+        self.mapper(
             Address,
             addresses,
             properties={"user": relationship(User, backref="addresses")},
@@ -583,7 +658,7 @@ class MapperTest(_fixtures.FixtureTest, AssertsCompiledSQL):
         Address = self.classes.Address
         User = self.classes.User
 
-        mapper(
+        self.mapper(
             User,
             users,
             properties={
@@ -591,7 +666,7 @@ class MapperTest(_fixtures.FixtureTest, AssertsCompiledSQL):
                 "addresses": relationship(Address),
             },
         )
-        mapper(Address, addresses)
+        self.mapper(Address, addresses)
 
         # attr.info goes down to the original Column object
         # for the dictionary.  The annotated element needs to pass
@@ -626,8 +701,8 @@ class MapperTest(_fixtures.FixtureTest, AssertsCompiledSQL):
 
             name = property(_get_name, _set_name)
 
-        m = mapper(User, users)
-        mapper(Address, addresses)
+        m = self.mapper(User, users)
+        self.mapper(Address, addresses)
 
         m.add_property("_name", deferred(users.c.name))
         m.add_property("name", synonym("_name"))
@@ -659,10 +734,10 @@ class MapperTest(_fixtures.FixtureTest, AssertsCompiledSQL):
         users, User = self.tables.users, self.classes.User
         addresses, Address = self.tables.addresses, self.classes.Address
 
-        m1 = mapper(User, users)
+        m1 = self.mapper(User, users)
         User()
 
-        mapper(
+        self.mapper(
             Address,
             addresses,
             properties={"user": relationship(User, backref="addresses")},
@@ -675,7 +750,7 @@ class MapperTest(_fixtures.FixtureTest, AssertsCompiledSQL):
     def test_replace_col_prop_w_syn(self):
         users, User = self.tables.users, self.classes.User
 
-        m = mapper(User, users)
+        m = self.mapper(User, users)
         m.add_property("_name", users.c.name)
         m.add_property("name", synonym("_name"))
 
@@ -688,7 +763,7 @@ class MapperTest(_fixtures.FixtureTest, AssertsCompiledSQL):
 
         sa.orm.clear_mappers()
 
-        m = mapper(User, users)
+        m = self.mapper(User, users)
         m.add_property("name", synonym("_name", map_column=True))
 
         sess.expunge_all()
@@ -702,10 +777,10 @@ class MapperTest(_fixtures.FixtureTest, AssertsCompiledSQL):
         users, User = self.tables.users, self.classes.User
         addresses, Address = self.tables.addresses, self.classes.Address
 
-        m = mapper(
+        m = self.mapper(
             User, users, properties={"addresses": relationship(Address)}
         )
-        mapper(Address, addresses)
+        self.mapper(Address, addresses)
 
         assert_raises_message(
             sa.exc.SAWarning,
@@ -724,14 +799,14 @@ class MapperTest(_fixtures.FixtureTest, AssertsCompiledSQL):
         class SubUser(User):
             pass
 
-        m = mapper(User, users)
-        m2 = mapper(
+        m = self.mapper(User, users)
+        m2 = self.mapper(
             SubUser,
             addresses,
             inherits=User,
             properties={"address_id": addresses.c.id},
         )
-        mapper(Address, addresses, properties={"foo": relationship(m2)})
+        self.mapper(Address, addresses, properties={"foo": relationship(m2)})
         # add property using annotated User.name,
         # needs to be deannotated
         m.add_property("x", column_property(User.name + "name"))
@@ -758,7 +833,7 @@ class MapperTest(_fixtures.FixtureTest, AssertsCompiledSQL):
         bringing expressions down to the original mapped columns.
         """
         User, users = self.classes.User, self.tables.users
-        m = mapper(User, users)
+        m = self.mapper(User, users)
         assert User.id.property.columns[0] is users.c.id
         assert User.name.property.columns[0] is users.c.name
         expr = User.name + "name"
@@ -799,11 +874,11 @@ class MapperTest(_fixtures.FixtureTest, AssertsCompiledSQL):
             user = property(_get_user, _set_user)
 
         # synonym is created against nonexistent prop
-        mapper(Address, addresses, properties={"user": synonym("_user")})
+        self.mapper(Address, addresses, properties={"user": synonym("_user")})
         sa.orm.configure_mappers()
 
         # later, backref sets up the prop
-        mapper(
+        self.mapper(
             User,
             users,
             properties={"addresses": relationship(Address, backref="_user")},
@@ -832,7 +907,7 @@ class MapperTest(_fixtures.FixtureTest, AssertsCompiledSQL):
         class Node(object):
             pass
 
-        mapper(
+        self.mapper(
             Node,
             t,
             properties={
@@ -858,7 +933,7 @@ class MapperTest(_fixtures.FixtureTest, AssertsCompiledSQL):
         class MyUser(User):
             pass
 
-        mapper(
+        self.mapper(
             User,
             users,
             polymorphic_on=users.c.name,
@@ -919,16 +994,16 @@ class MapperTest(_fixtures.FixtureTest, AssertsCompiledSQL):
         class Empty(object):
             pass
 
-        mapper(
+        self.mapper(
             Empty, t, properties={"empty_id": t.c.id}, include_properties=[]
         )
-        p_m = mapper(
+        p_m = self.mapper(
             Person,
             t,
             polymorphic_on=t.c.type,
             include_properties=("id", "type", "name"),
         )
-        e_m = mapper(
+        e_m = self.mapper(
             Employee,
             inherits=p_m,
             polymorphic_identity="employee",
@@ -940,31 +1015,31 @@ class MapperTest(_fixtures.FixtureTest, AssertsCompiledSQL):
             exclude_properties=("vendor_id",),
         )
 
-        mapper(
+        self.mapper(
             Manager,
             inherits=e_m,
             polymorphic_identity="manager",
             include_properties=("id", "type"),
         )
 
-        mapper(
+        self.mapper(
             Vendor,
             inherits=p_m,
             polymorphic_identity="vendor",
             exclude_properties=("boss_id", "employee_number"),
         )
-        mapper(Hoho, t, include_properties=("id", "type", "name"))
-        mapper(
+        self.mapper(Hoho, t, include_properties=("id", "type", "name"))
+        self.mapper(
             Lala,
             t,
             exclude_properties=("vendor_id", "boss_id"),
             column_prefix="p_",
         )
 
-        mapper(HasDef, t, column_prefix="h_")
+        self.mapper(HasDef, t, column_prefix="h_")
 
-        mapper(Fub, t, include_properties=(t.c.id, t.c.type))
-        mapper(
+        self.mapper(Fub, t, include_properties=(t.c.id, t.c.type))
+        self.mapper(
             Frob,
             t,
             column_prefix="f_",
@@ -1034,7 +1109,7 @@ class MapperTest(_fixtures.FixtureTest, AssertsCompiledSQL):
 
         assert_props(Empty, ["empty_id"])
 
-        mapper(
+        self.mapper(
             Foo,
             inherits=Person,
             polymorphic_identity="foo",
@@ -1062,7 +1137,7 @@ class MapperTest(_fixtures.FixtureTest, AssertsCompiledSQL):
         class A(object):
             pass
 
-        mapper(A, t, include_properties=["id"])
+        self.mapper(A, t, include_properties=["id"])
         s = Session()
         s.add(A())
         s.commit()
@@ -1072,7 +1147,7 @@ class MapperTest(_fixtures.FixtureTest, AssertsCompiledSQL):
             def __bool__(self):
                 raise Exception("nope")
 
-        mapper(NoBoolAllowed, self.tables.users)
+        self.mapper(NoBoolAllowed, self.tables.users)
         u1 = NoBoolAllowed()
         u1.name = "some name"
         s = Session(testing.db)
@@ -1088,12 +1163,12 @@ class MapperTest(_fixtures.FixtureTest, AssertsCompiledSQL):
         addresses, users = self.tables.addresses, self.tables.users
         Address = self.classes.Address
 
-        mapper(
+        self.mapper(
             NoEqAllowed,
             users,
             properties={"addresses": relationship(Address, backref="user")},
         )
-        mapper(Address, addresses)
+        self.mapper(Address, addresses)
 
         u1 = NoEqAllowed()
         u1.name = "some name"
@@ -1138,7 +1213,7 @@ class MapperTest(_fixtures.FixtureTest, AssertsCompiledSQL):
         usersaddresses = sa.join(
             users, addresses, users.c.id == addresses.c.user_id
         )
-        mapper(
+        self.mapper(
             User,
             usersaddresses,
             primary_key=[users.c.id],
@@ -1159,7 +1234,7 @@ class MapperTest(_fixtures.FixtureTest, AssertsCompiledSQL):
         usersaddresses = sa.join(
             users, addresses, users.c.id == addresses.c.user_id
         )
-        mapper(
+        self.mapper(
             User,
             usersaddresses,
             primary_key=[users.c.id],
@@ -1175,7 +1250,7 @@ class MapperTest(_fixtures.FixtureTest, AssertsCompiledSQL):
             self.classes.Address,
         )
 
-        m = mapper(
+        m = self.mapper(
             Address,
             addresses.join(email_bounces),
             properties={"id": [addresses.c.id, email_bounces.c.id]},
@@ -1201,7 +1276,7 @@ class MapperTest(_fixtures.FixtureTest, AssertsCompiledSQL):
             self.classes.User,
         )
 
-        mapper(
+        self.mapper(
             User,
             users.outerjoin(addresses),
             primary_key=[users.c.id, addresses.c.id],
@@ -1232,7 +1307,7 @@ class MapperTest(_fixtures.FixtureTest, AssertsCompiledSQL):
             self.classes.User,
         )
 
-        mapper(
+        self.mapper(
             User,
             users.outerjoin(addresses),
             allow_partial_pks=False,
@@ -1265,9 +1340,9 @@ class MapperTest(_fixtures.FixtureTest, AssertsCompiledSQL):
             self.tables.keywords,
         )
 
-        m1 = mapper(Item, items, primary_key=[items.c.id])
-        m2 = mapper(Keyword, keywords, primary_key=keywords.c.id)
-        m3 = mapper(User, users, primary_key=(users.c.id,))
+        m1 = self.mapper(Item, items, primary_key=[items.c.id])
+        m2 = self.mapper(Keyword, keywords, primary_key=keywords.c.id)
+        m3 = self.mapper(User, users, primary_key=(users.c.id,))
 
         assert m1.primary_key[0] is items.c.id
         assert m2.primary_key[0] is keywords.c.id
@@ -1286,15 +1361,15 @@ class MapperTest(_fixtures.FixtureTest, AssertsCompiledSQL):
             self.classes.Order,
         )
 
-        mapper(Item, items)
+        self.mapper(Item, items)
 
-        mapper(
+        self.mapper(
             Order,
             orders,
             properties=dict(items=relationship(Item, order_items)),
         )
 
-        mapper(User, users, properties=dict(orders=relationship(Order)))
+        self.mapper(User, users, properties=dict(orders=relationship(Order)))
 
         session = create_session()
         result = (
@@ -1327,7 +1402,7 @@ class MapperTest(_fixtures.FixtureTest, AssertsCompiledSQL):
             .alias("myselect")
         )
 
-        mapper(User, s)
+        self.mapper(User, s)
         sess = create_session()
         result = sess.query(User).order_by(s.c.id).all()
 
@@ -1340,7 +1415,7 @@ class MapperTest(_fixtures.FixtureTest, AssertsCompiledSQL):
 
         User, users = self.classes.User, self.tables.users
 
-        mapper(User, users)
+        self.mapper(User, users)
 
         session = create_session()
         q = session.query(User)
@@ -1361,8 +1436,8 @@ class MapperTest(_fixtures.FixtureTest, AssertsCompiledSQL):
             self.classes.Item,
         )
 
-        mapper(Keyword, keywords)
-        mapper(
+        self.mapper(Keyword, keywords)
+        self.mapper(
             Item,
             items,
             properties=dict(
@@ -1390,10 +1465,12 @@ class MapperTest(_fixtures.FixtureTest, AssertsCompiledSQL):
         )
 
         def go():
-            mapper(
+            self.mapper(
                 User,
                 users,
-                properties=dict(name=relationship(mapper(Address, addresses))),
+                properties=dict(
+                    name=relationship(self.mapper(Address, addresses))
+                ),
             )
 
         assert_raises(sa.exc.ArgumentError, go)
@@ -1408,11 +1485,13 @@ class MapperTest(_fixtures.FixtureTest, AssertsCompiledSQL):
             self.classes.User,
         )
 
-        mapper(
+        self.mapper(
             User,
             users,
             exclude_properties=["name"],
-            properties=dict(name=relationship(mapper(Address, addresses))),
+            properties=dict(
+                name=relationship(self.mapper(Address, addresses))
+            ),
         )
 
         assert bool(User.name)
@@ -1427,11 +1506,12 @@ class MapperTest(_fixtures.FixtureTest, AssertsCompiledSQL):
             self.classes.User,
         )
 
-        mapper(
+        self.mapper(
             User,
             users,
             properties=dict(
-                name=relationship(mapper(Address, addresses)), foo=users.c.name
+                name=relationship(self.mapper(Address, addresses)),
+                foo=users.c.name,
             ),
         )
 
@@ -1458,12 +1538,12 @@ class MapperTest(_fixtures.FixtureTest, AssertsCompiledSQL):
 
             uname = extendedproperty(_get_name, _set_name)
 
-        mapper(
+        self.mapper(
             User,
             users,
             properties=dict(
                 addresses=relationship(
-                    mapper(Address, addresses), lazy="select"
+                    self.mapper(Address, addresses), lazy="select"
                 ),
                 uname=synonym("name"),
                 adlist=synonym("addresses"),
@@ -1517,7 +1597,9 @@ class MapperTest(_fixtures.FixtureTest, AssertsCompiledSQL):
     def test_synonym_of_synonym(self):
         users, User = (self.tables.users, self.classes.User)
 
-        mapper(User, users, properties={"x": synonym("id"), "y": synonym("x")})
+        self.mapper(
+            User, users, properties={"x": synonym("id"), "y": synonym("x")}
+        )
 
         s = Session()
         u = s.query(User).filter(User.y == 8).one()
@@ -1526,7 +1608,9 @@ class MapperTest(_fixtures.FixtureTest, AssertsCompiledSQL):
     def test_synonym_get_history(self):
         users, User = (self.tables.users, self.classes.User)
 
-        mapper(User, users, properties={"x": synonym("id"), "y": synonym("x")})
+        self.mapper(
+            User, users, properties={"x": synonym("id"), "y": synonym("x")}
+        )
 
         u1 = User()
         eq_(attributes.instance_state(u1).attrs.x.history, (None, None, None))
@@ -1547,7 +1631,7 @@ class MapperTest(_fixtures.FixtureTest, AssertsCompiledSQL):
 
             x = property(_x)
 
-        m = mapper(
+        m = self.mapper(
             User,
             users,
             properties={"x": synonym("some_attr", descriptor=User.x)},
@@ -1574,12 +1658,12 @@ class MapperTest(_fixtures.FixtureTest, AssertsCompiledSQL):
             self.tables.addresses,
         )
 
-        mapper(
+        self.mapper(
             User,
             users,
             properties={"y": synonym("x"), "addresses": relationship(Address)},
         )
-        mapper(Address, addresses)
+        self.mapper(Address, addresses)
         User.x = association_proxy("addresses", "email_address")
 
         assert_raises_message(
@@ -1595,7 +1679,7 @@ class MapperTest(_fixtures.FixtureTest, AssertsCompiledSQL):
         users, User = self.tables.users, self.classes.User
 
         def go():
-            mapper(
+            self.mapper(
                 User,
                 users,
                 properties={"not_name": synonym("_name", map_column=True)},
@@ -1633,8 +1717,8 @@ class MapperTest(_fixtures.FixtureTest, AssertsCompiledSQL):
 
             name = property(_get_name, _set_name)
 
-        mapper(Address, addresses)
-        mapper(
+        self.mapper(Address, addresses)
+        self.mapper(
             User,
             users,
             properties={
@@ -1698,8 +1782,10 @@ class MapperTest(_fixtures.FixtureTest, AssertsCompiledSQL):
                 super(MyFakeProperty, self).post_instrument_class(mapper)
                 configure_mappers()
 
-        mapper(User, users, properties={"name": MyFakeProperty(users.c.name)})
-        mapper(Address, addresses)
+        self.mapper(
+            User, users, properties={"name": MyFakeProperty(users.c.name)}
+        )
+        self.mapper(Address, addresses)
         configure_mappers()
 
         sa.orm.clear_mappers()
@@ -1709,8 +1795,10 @@ class MapperTest(_fixtures.FixtureTest, AssertsCompiledSQL):
                 super(MyFakeProperty, self).post_instrument_class(mapper)
                 configure_mappers()
 
-        mapper(User, users, properties={"name": MyFakeProperty(users.c.name)})
-        mapper(Address, addresses)
+        self.mapper(
+            User, users, properties={"name": MyFakeProperty(users.c.name)}
+        )
+        self.mapper(Address, addresses)
         configure_mappers()
 
     def test_reconstructor(self):
@@ -1723,7 +1811,7 @@ class MapperTest(_fixtures.FixtureTest, AssertsCompiledSQL):
             def reconstruct(self):
                 recon.append("go")
 
-        mapper(User, users)
+        self.mapper(User, users)
 
         User()
         eq_(recon, [])
@@ -1753,11 +1841,11 @@ class MapperTest(_fixtures.FixtureTest, AssertsCompiledSQL):
                 assert isinstance(self, C)
                 recon.append("C")
 
-        mapper(
+        self.mapper(
             A, users, polymorphic_on=users.c.name, polymorphic_identity="jack"
         )
-        mapper(B, inherits=A, polymorphic_identity="ed")
-        mapper(C, inherits=A, polymorphic_identity="chuck")
+        self.mapper(B, inherits=A, polymorphic_identity="ed")
+        self.mapper(C, inherits=A, polymorphic_identity="chuck")
 
         A()
         B()
@@ -1781,7 +1869,7 @@ class MapperTest(_fixtures.FixtureTest, AssertsCompiledSQL):
             def __init__(self):
                 recon.append("go")
 
-        mapper(User, users)
+        self.mapper(User, users)
 
         User()
         eq_(recon, ["go"])
@@ -1813,11 +1901,11 @@ class MapperTest(_fixtures.FixtureTest, AssertsCompiledSQL):
                 assert isinstance(self, C)
                 recon.append("C")
 
-        mapper(
+        self.mapper(
             A, users, polymorphic_on=users.c.name, polymorphic_identity="jack"
         )
-        mapper(B, inherits=A, polymorphic_identity="ed")
-        mapper(C, inherits=A, polymorphic_identity="chuck")
+        self.mapper(B, inherits=A, polymorphic_identity="ed")
+        self.mapper(C, inherits=A, polymorphic_identity="chuck")
 
         A()
         B()
@@ -1844,7 +1932,7 @@ class MapperTest(_fixtures.FixtureTest, AssertsCompiledSQL):
         class User(Base):
             pass
 
-        mapper(User, users)
+        self.mapper(User, users)
 
         User()
         eq_(recon, [])
@@ -1860,10 +1948,12 @@ class MapperTest(_fixtures.FixtureTest, AssertsCompiledSQL):
             self.classes.User,
         )
 
-        mapper(Address, addresses)
+        self.mapper(Address, addresses)
         sa.orm.clear_mappers()
 
-        mapper(User, users, properties={"addresses": relationship(Address)})
+        self.mapper(
+            User, users, properties={"addresses": relationship(Address)}
+        )
 
         assert_raises_message(
             sa.orm.exc.UnmappedClassError,
@@ -1893,7 +1983,7 @@ class MapperTest(_fixtures.FixtureTest, AssertsCompiledSQL):
         User = self.classes.User
         Address = self.classes.Address
 
-        mapper(
+        self.mapper(
             User,
             users,
             properties={
@@ -1903,7 +1993,7 @@ class MapperTest(_fixtures.FixtureTest, AssertsCompiledSQL):
                 )
             },
         )
-        mapper(Address, addresses)
+        self.mapper(Address, addresses)
         assert_raises_message(
             AttributeError,
             "'Table' object has no attribute 'wrong'",
@@ -1917,7 +2007,7 @@ class MapperTest(_fixtures.FixtureTest, AssertsCompiledSQL):
         User = self.classes.User
         Address = self.classes.Address
 
-        mapper(
+        self.mapper(
             User,
             users,
             properties={
@@ -1928,7 +2018,7 @@ class MapperTest(_fixtures.FixtureTest, AssertsCompiledSQL):
                 )
             },
         )
-        mapper(Address, addresses)
+        self.mapper(Address, addresses)
         assert_raises_message(KeyError, "wrong", class_mapper, Address)
 
     def test_unmapped_subclass_error_postmap(self):
@@ -1940,7 +2030,7 @@ class MapperTest(_fixtures.FixtureTest, AssertsCompiledSQL):
         class Sub(Base):
             pass
 
-        mapper(Base, users)
+        self.mapper(Base, users)
         sa.orm.configure_mappers()
 
         # we can create new instances, set attributes.
@@ -1960,7 +2050,7 @@ class MapperTest(_fixtures.FixtureTest, AssertsCompiledSQL):
         class Base(object):
             pass
 
-        mapper(Base, users)
+        self.mapper(Base, users)
 
         class Sub(Base):
             pass
@@ -1990,1632 +2080,148 @@ class MapperTest(_fixtures.FixtureTest, AssertsCompiledSQL):
         class A(NewStyle, OldStyle):
             pass
 
-        mapper(A, users)
+        self.mapper(A, users)
 
         class B(OldStyle, NewStyle):
             pass
 
-        mapper(B, users)
+        self.mapper(B, users)
 
 
-class DocumentTest(fixtures.TestBase):
-    def test_doc_propagate(self):
-        metadata = MetaData()
-        t1 = Table(
-            "t1",
+class RequirementsTest(fixtures.MappedTest):
+
+    """Tests the contract for user classes."""
+
+    @classmethod
+    def define_tables(cls, metadata):
+        Table(
+            "ht1",
             metadata,
             Column(
-                "col1", Integer, primary_key=True, doc="primary key column"
+                "id", Integer, primary_key=True, test_needs_autoincrement=True
             ),
-            Column("col2", String, doc="data col"),
-            Column("col3", String, doc="data col 2"),
-            Column("col4", String, doc="data col 3"),
-            Column("col5", String),
+            Column("value", String(10)),
         )
-        t2 = Table(
-            "t2",
+        Table(
+            "ht2",
             metadata,
             Column(
-                "col1", Integer, primary_key=True, doc="primary key column"
+                "id", Integer, primary_key=True, test_needs_autoincrement=True
             ),
-            Column("col2", String, doc="data col"),
+            Column("ht1_id", Integer, ForeignKey("ht1.id")),
+            Column("value", String(10)),
+        )
+        Table(
+            "ht3",
+            metadata,
             Column(
-                "col3",
-                Integer,
-                ForeignKey("t1.col1"),
-                doc="foreign key to t1.col1",
+                "id", Integer, primary_key=True, test_needs_autoincrement=True
             ),
+            Column("value", String(10)),
         )
-
-        class Foo(object):
-            pass
-
-        class Bar(object):
-            pass
-
-        mapper(
-            Foo,
-            t1,
-            properties={
-                "bars": relationship(
-                    Bar,
-                    doc="bar relationship",
-                    backref=backref("foo", doc="foo relationship"),
-                ),
-                "foober": column_property(t1.c.col3, doc="alternate data col"),
-                "hoho": synonym("col4", doc="syn of col4"),
-            },
+        Table(
+            "ht4",
+            metadata,
+            Column("ht1_id", Integer, ForeignKey("ht1.id"), primary_key=True),
+            Column("ht3_id", Integer, ForeignKey("ht3.id"), primary_key=True),
+        )
+        Table(
+            "ht5",
+            metadata,
+            Column("ht1_id", Integer, ForeignKey("ht1.id"), primary_key=True),
+        )
+        Table(
+            "ht6",
+            metadata,
+            Column("ht1a_id", Integer, ForeignKey("ht1.id"), primary_key=True),
+            Column("ht1b_id", Integer, ForeignKey("ht1.id"), primary_key=True),
+            Column("value", String(10)),
         )
-        mapper(Bar, t2)
-        configure_mappers()
-        eq_(Foo.col1.__doc__, "primary key column")
-        eq_(Foo.col2.__doc__, "data col")
-        eq_(Foo.col5.__doc__, None)
-        eq_(Foo.foober.__doc__, "alternate data col")
-        eq_(Foo.bars.__doc__, "bar relationship")
-        eq_(Foo.hoho.__doc__, "syn of col4")
-        eq_(Bar.col1.__doc__, "primary key column")
-        eq_(Bar.foo.__doc__, "foo relationship")
 
+    if util.py2k:
 
-class ORMLoggingTest(_fixtures.FixtureTest):
-    def setup(self):
-        self.buf = logging.handlers.BufferingHandler(100)
-        for log in [logging.getLogger("sqlalchemy.orm")]:
-            log.addHandler(self.buf)
+        def test_baseclass(self):
+            ht1 = self.tables.ht1
 
-    def teardown(self):
-        for log in [logging.getLogger("sqlalchemy.orm")]:
-            log.removeHandler(self.buf)
+            class OldStyle:
+                pass
 
-    def _current_messages(self):
-        return [b.getMessage() for b in self.buf.buffer]
+            assert_raises(sa.exc.ArgumentError, mapper, OldStyle, ht1)
 
-    def test_mapper_info_aliased(self):
-        User, users = self.classes.User, self.tables.users
-        tb = users.select().alias()
-        mapper(User, tb)
-        s = Session()
-        s.add(User(name="ed"))
-        s.commit()
+            assert_raises(sa.exc.ArgumentError, mapper, 123)
 
-        for msg in self._current_messages():
-            assert msg.startswith("(User|%%(%d anon)s) " % id(tb))
+            class NoWeakrefSupport(str):
+                pass
 
+            # TODO: is weakref support detectable without an instance?
+            # self.assertRaises(
+            #  sa.exc.ArgumentError, mapper, NoWeakrefSupport, t2)
 
-class OptionsTest(_fixtures.FixtureTest):
-    def test_synonym_options(self):
-        Address, addresses, users, User = (
-            self.classes.Address,
-            self.tables.addresses,
-            self.tables.users,
-            self.classes.User,
-        )
+    class _ValueBase(object):
+        def __init__(self, value="abc", id_=None):
+            self.id = id_
+            self.value = value
 
-        mapper(
-            User,
-            users,
-            properties=dict(
-                addresses=relationship(
-                    mapper(Address, addresses),
-                    lazy="select",
-                    order_by=addresses.c.id,
-                ),
-                adlist=synonym("addresses"),
-            ),
-        )
+        def __bool__(self):
+            return False
 
-        def go():
-            sess = create_session()
-            u = (
-                sess.query(User)
-                .order_by(User.id)
-                .options(sa.orm.joinedload("adlist"))
-                .filter_by(name="jack")
-            ).one()
-            eq_(u.adlist, [self.static.user_address_result[0].addresses[0]])
+        def __hash__(self):
+            return hash(self.value)
+
+        def __eq__(self, other):
+            if isinstance(other, type(self)):
+                return self.value == other.value
+            return False
 
-        self.assert_sql_count(testing.db, go, 1)
+    def test_comparison_overrides(self):
+        """Simple tests to ensure users can supply comparison __methods__.
 
-    def test_eager_options(self):
-        """A lazy relationship can be upgraded to an eager relationship."""
+        The suite-level test --options are better suited to detect
+        problems- they add selected __methods__ across the board on all
+        ORM tests.  This test simply shoves a variety of operations
+        through the ORM to catch basic regressions early in a standard
+        test run.
+        """
 
-        Address, addresses, users, User = (
-            self.classes.Address,
-            self.tables.addresses,
-            self.tables.users,
-            self.classes.User,
+        ht6, ht5, ht4, ht3, ht2, ht1 = (
+            self.tables.ht6,
+            self.tables.ht5,
+            self.tables.ht4,
+            self.tables.ht3,
+            self.tables.ht2,
+            self.tables.ht1,
         )
 
-        mapper(
-            User,
-            users,
-            properties=dict(
-                addresses=relationship(
-                    mapper(Address, addresses), order_by=addresses.c.id
-                )
-            ),
-        )
+        class H1(self._ValueBase):
+            pass
 
-        sess = create_session()
-        result = (
-            sess.query(User)
-            .order_by(User.id)
-            .options(sa.orm.joinedload("addresses"))
-        ).all()
+        class H2(self._ValueBase):
+            pass
 
-        def go():
-            eq_(result, self.static.user_address_result)
+        class H3(self._ValueBase):
+            pass
 
-        self.sql_count_(0, go)
+        class H6(self._ValueBase):
+            pass
 
-    def test_eager_options_with_limit(self):
-        Address, addresses, users, User = (
-            self.classes.Address,
-            self.tables.addresses,
-            self.tables.users,
-            self.classes.User,
+        self.mapper(
+            H1,
+            ht1,
+            properties={
+                "h2s": relationship(H2, backref="h1"),
+                "h3s": relationship(H3, secondary=ht4, backref="h1s"),
+                "h1s": relationship(H1, secondary=ht5, backref="parent_h1"),
+                "t6a": relationship(
+                    H6, backref="h1a", primaryjoin=ht1.c.id == ht6.c.ht1a_id
+                ),
+                "t6b": relationship(
+                    H6, backref="h1b", primaryjoin=ht1.c.id == ht6.c.ht1b_id
+                ),
+            },
         )
-
-        mapper(
-            User,
-            users,
-            properties=dict(
-                addresses=relationship(
-                    mapper(Address, addresses), lazy="select"
-                )
-            ),
-        )
-
-        sess = create_session()
-        u = (
-            sess.query(User)
-            .options(sa.orm.joinedload("addresses"))
-            .filter_by(id=8)
-        ).one()
-
-        def go():
-            eq_(u.id, 8)
-            eq_(len(u.addresses), 3)
-
-        self.sql_count_(0, go)
-
-        sess.expunge_all()
-
-        u = sess.query(User).filter_by(id=8).one()
-        eq_(u.id, 8)
-        eq_(len(u.addresses), 3)
-
-    def test_lazy_options_with_limit(self):
-        Address, addresses, users, User = (
-            self.classes.Address,
-            self.tables.addresses,
-            self.tables.users,
-            self.classes.User,
-        )
-
-        mapper(
-            User,
-            users,
-            properties=dict(
-                addresses=relationship(
-                    mapper(Address, addresses), lazy="joined"
-                )
-            ),
-        )
-
-        sess = create_session()
-        u = (
-            sess.query(User)
-            .options(sa.orm.lazyload("addresses"))
-            .filter_by(id=8)
-        ).one()
-
-        def go():
-            eq_(u.id, 8)
-            eq_(len(u.addresses), 3)
-
-        self.sql_count_(1, go)
-
-    def test_eager_degrade(self):
-        """An eager relationship automatically degrades to a lazy relationship
-        if eager columns are not available"""
-
-        Address, addresses, users, User = (
-            self.classes.Address,
-            self.tables.addresses,
-            self.tables.users,
-            self.classes.User,
-        )
-
-        mapper(
-            User,
-            users,
-            properties=dict(
-                addresses=relationship(
-                    mapper(Address, addresses),
-                    lazy="joined",
-                    order_by=addresses.c.id,
-                )
-            ),
-        )
-
-        sess = create_session()
-        # first test straight eager load, 1 statement
-
-        def go():
-            result = sess.query(User).order_by(User.id).all()
-            eq_(result, self.static.user_address_result)
-
-        self.sql_count_(1, go)
-
-        sess.expunge_all()
-
-        # then select just from users.  run it into instances.
-        # then assert the data, which will launch 3 more lazy loads
-        # (previous users in session fell out of scope and were removed from
-        # session's identity map)
-        r = users.select().order_by(users.c.id).execute()
-
-        ctx = sess.query(User)._compile_context()
-
-        def go():
-            result = list(sess.query(User).instances(r, ctx))
-            eq_(result, self.static.user_address_result)
-
-        self.sql_count_(4, go)
-
-    def test_eager_degrade_deep(self):
-        (
-            users,
-            Keyword,
-            items,
-            order_items,
-            orders,
-            Item,
-            User,
-            Address,
-            keywords,
-            item_keywords,
-            Order,
-            addresses,
-        ) = (
-            self.tables.users,
-            self.classes.Keyword,
-            self.tables.items,
-            self.tables.order_items,
-            self.tables.orders,
-            self.classes.Item,
-            self.classes.User,
-            self.classes.Address,
-            self.tables.keywords,
-            self.tables.item_keywords,
-            self.classes.Order,
-            self.tables.addresses,
-        )
-
-        # test with a deeper set of eager loads.  when we first load the three
-        # users, they will have no addresses or orders.  the number of lazy
-        # loads when traversing the whole thing will be three for the
-        # addresses and three for the orders.
-        mapper(Address, addresses)
-
-        mapper(Keyword, keywords)
-
-        mapper(
-            Item,
-            items,
-            properties=dict(
-                keywords=relationship(
-                    Keyword,
-                    secondary=item_keywords,
-                    lazy="joined",
-                    order_by=item_keywords.c.keyword_id,
-                )
-            ),
-        )
-
-        mapper(
-            Order,
-            orders,
-            properties=dict(
-                items=relationship(
-                    Item,
-                    secondary=order_items,
-                    lazy="joined",
-                    order_by=order_items.c.item_id,
-                )
-            ),
-        )
-
-        mapper(
-            User,
-            users,
-            properties=dict(
-                addresses=relationship(
-                    Address, lazy="joined", order_by=addresses.c.id
-                ),
-                orders=relationship(
-                    Order, lazy="joined", order_by=orders.c.id
-                ),
-            ),
-        )
-
-        sess = create_session()
-
-        # first test straight eager load, 1 statement
-        def go():
-            result = sess.query(User).order_by(User.id).all()
-            eq_(result, self.static.user_all_result)
-
-        self.assert_sql_count(testing.db, go, 1)
-
-        sess.expunge_all()
-
-        # then select just from users.  run it into instances.
-        # then assert the data, which will launch 6 more lazy loads
-        r = users.select().execute()
-
-        ctx = sess.query(User)._compile_context()
-
-        def go():
-            result = list(sess.query(User).instances(r, ctx))
-            eq_(result, self.static.user_all_result)
-
-        self.assert_sql_count(testing.db, go, 6)
-
-    def test_lazy_options(self):
-        """An eager relationship can be upgraded to a lazy relationship."""
-
-        Address, addresses, users, User = (
-            self.classes.Address,
-            self.tables.addresses,
-            self.tables.users,
-            self.classes.User,
-        )
-
-        mapper(
-            User,
-            users,
-            properties=dict(
-                addresses=relationship(
-                    mapper(Address, addresses), lazy="joined"
-                )
-            ),
-        )
-
-        sess = create_session()
-        result = (
-            sess.query(User)
-            .order_by(User.id)
-            .options(sa.orm.lazyload("addresses"))
-        ).all()
-
-        def go():
-            eq_(result, self.static.user_address_result)
-
-        self.sql_count_(4, go)
-
-    def test_option_propagate(self):
-        users, items, order_items, Order, Item, User, orders = (
-            self.tables.users,
-            self.tables.items,
-            self.tables.order_items,
-            self.classes.Order,
-            self.classes.Item,
-            self.classes.User,
-            self.tables.orders,
-        )
-
-        mapper(User, users, properties=dict(orders=relationship(Order)))
-        mapper(
-            Order,
-            orders,
-            properties=dict(items=relationship(Item, secondary=order_items)),
-        )
-        mapper(Item, items)
-
-        sess = create_session()
-
-        oalias = aliased(Order)
-        opt1 = sa.orm.joinedload(User.orders, Order.items)
-        opt2 = sa.orm.contains_eager(User.orders, Order.items, alias=oalias)
-        u1 = (
-            sess.query(User)
-            .join(oalias, User.orders)
-            .options(opt1, opt2)
-            .first()
-        )
-        ustate = attributes.instance_state(u1)
-        assert opt1 in ustate.load_options
-        assert opt2 not in ustate.load_options
-
-
-class DeepOptionsTest(_fixtures.FixtureTest):
-    @classmethod
-    def setup_mappers(cls):
-        (
-            users,
-            Keyword,
-            items,
-            order_items,
-            Order,
-            Item,
-            User,
-            keywords,
-            item_keywords,
-            orders,
-        ) = (
-            cls.tables.users,
-            cls.classes.Keyword,
-            cls.tables.items,
-            cls.tables.order_items,
-            cls.classes.Order,
-            cls.classes.Item,
-            cls.classes.User,
-            cls.tables.keywords,
-            cls.tables.item_keywords,
-            cls.tables.orders,
-        )
-
-        mapper(Keyword, keywords)
-
-        mapper(
-            Item,
-            items,
-            properties=dict(
-                keywords=relationship(
-                    Keyword, item_keywords, order_by=item_keywords.c.item_id
-                )
-            ),
-        )
-
-        mapper(
-            Order,
-            orders,
-            properties=dict(
-                items=relationship(Item, order_items, order_by=items.c.id)
-            ),
-        )
-
-        mapper(
-            User,
-            users,
-            properties=dict(orders=relationship(Order, order_by=orders.c.id)),
-        )
-
-    def test_deep_options_1(self):
-        User = self.classes.User
-
-        sess = create_session()
-
-        # joinedload nothing.
-        u = sess.query(User).order_by(User.id).all()
-
-        def go():
-            u[0].orders[1].items[0].keywords[1]
-
-        self.assert_sql_count(testing.db, go, 3)
-
-    def test_deep_options_2(self):
-        """test (joined|subquery)load_all() options"""
-
-        User = self.classes.User
-
-        sess = create_session()
-
-        result = (
-            sess.query(User)
-            .order_by(User.id)
-            .options(
-                sa.orm.joinedload("orders")
-                .joinedload("items")
-                .joinedload("keywords")
-            )
-        ).all()
-
-        def go():
-            result[0].orders[1].items[0].keywords[1]
-
-        self.sql_count_(0, go)
-
-        sess = create_session()
-
-        result = (
-            sess.query(User).options(
-                sa.orm.subqueryload("orders")
-                .subqueryload("items")
-                .subqueryload("keywords")
-            )
-        ).all()
-
-        def go():
-            result[0].orders[1].items[0].keywords[1]
-
-        self.sql_count_(0, go)
-
-    def test_deep_options_3(self):
-        User = self.classes.User
-
-        sess = create_session()
-
-        # same thing, with separate options calls
-        q2 = (
-            sess.query(User)
-            .order_by(User.id)
-            .options(sa.orm.joinedload("orders"))
-            .options(sa.orm.joinedload("orders.items"))
-            .options(sa.orm.joinedload("orders.items.keywords"))
-        )
-        u = q2.all()
-
-        def go():
-            u[0].orders[1].items[0].keywords[1]
-
-        self.sql_count_(0, go)
-
-    def test_deep_options_4(self):
-        Item, User, Order = (
-            self.classes.Item,
-            self.classes.User,
-            self.classes.Order,
-        )
-
-        sess = create_session()
-
-        assert_raises_message(
-            sa.exc.ArgumentError,
-            'Mapped attribute "Order.items" does not apply to any of the '
-            "root entities in this query, e.g. mapped class User->users. "
-            "Please specify the full path from one of the root entities "
-            "to the target attribute.",
-            sess.query(User)
-            .options(sa.orm.joinedload(Order.items))
-            ._compile_context,
-        )
-
-        # joinedload "keywords" on items.  it will lazy load "orders", then
-        # lazy load the "items" on the order, but on "items" it will eager
-        # load the "keywords"
-        q3 = (
-            sess.query(User)
-            .order_by(User.id)
-            .options(sa.orm.joinedload("orders.items.keywords"))
-        )
-        u = q3.all()
-
-        def go():
-            u[0].orders[1].items[0].keywords[1]
-
-        self.sql_count_(2, go)
-
-        sess = create_session()
-        q3 = (
-            sess.query(User)
-            .order_by(User.id)
-            .options(
-                sa.orm.joinedload(User.orders, Order.items, Item.keywords)
-            )
-        )
-        u = q3.all()
-
-        def go():
-            u[0].orders[1].items[0].keywords[1]
-
-        self.sql_count_(2, go)
-
-
-class ComparatorFactoryTest(_fixtures.FixtureTest, AssertsCompiledSQL):
-    def test_kwarg_accepted(self):
-        users, Address = self.tables.users, self.classes.Address
-
-        class DummyComposite(object):
-            def __init__(self, x, y):
-                pass
-
-        from sqlalchemy.orm.interfaces import PropComparator
-
-        class MyFactory(PropComparator):
-            pass
-
-        for args in (
-            (column_property, users.c.name),
-            (deferred, users.c.name),
-            (synonym, "name"),
-            (composite, DummyComposite, users.c.id, users.c.name),
-            (relationship, Address),
-            (backref, "address"),
-            (dynamic_loader, Address),
-        ):
-            fn = args[0]
-            args = args[1:]
-            fn(comparator_factory=MyFactory, *args)
-
-    def test_column(self):
-        User, users = self.classes.User, self.tables.users
-
-        from sqlalchemy.orm.properties import ColumnProperty
-
-        class MyFactory(ColumnProperty.Comparator):
-            __hash__ = None
-
-            def __eq__(self, other):
-                return func.foobar(self.__clause_element__()) == func.foobar(
-                    other
-                )
-
-        mapper(
-            User,
-            users,
-            properties={
-                "name": column_property(
-                    users.c.name, comparator_factory=MyFactory
-                )
-            },
-        )
-        self.assert_compile(
-            User.name == "ed",
-            "foobar(users.name) = foobar(:foobar_1)",
-            dialect=default.DefaultDialect(),
-        )
-        self.assert_compile(
-            aliased(User).name == "ed",
-            "foobar(users_1.name) = foobar(:foobar_1)",
-            dialect=default.DefaultDialect(),
-        )
-
-    def test_synonym(self):
-        users, User = self.tables.users, self.classes.User
-
-        from sqlalchemy.orm.properties import ColumnProperty
-
-        class MyFactory(ColumnProperty.Comparator):
-            __hash__ = None
-
-            def __eq__(self, other):
-                return func.foobar(self.__clause_element__()) == func.foobar(
-                    other
-                )
-
-        mapper(
-            User,
-            users,
-            properties={
-                "name": synonym(
-                    "_name", map_column=True, comparator_factory=MyFactory
-                )
-            },
-        )
-        self.assert_compile(
-            User.name == "ed",
-            "foobar(users.name) = foobar(:foobar_1)",
-            dialect=default.DefaultDialect(),
-        )
-
-        self.assert_compile(
-            aliased(User).name == "ed",
-            "foobar(users_1.name) = foobar(:foobar_1)",
-            dialect=default.DefaultDialect(),
-        )
-
-    def test_relationship(self):
-        users, Address, addresses, User = (
-            self.tables.users,
-            self.classes.Address,
-            self.tables.addresses,
-            self.classes.User,
-        )
-
-        from sqlalchemy.orm.relationships import RelationshipProperty
-
-        # NOTE: this API changed in 0.8, previously __clause_element__()
-        # gave the parent selecatable, now it gives the
-        # primaryjoin/secondaryjoin
-        class MyFactory(RelationshipProperty.Comparator):
-            __hash__ = None
-
-            def __eq__(self, other):
-                return func.foobar(
-                    self._source_selectable().c.user_id
-                ) == func.foobar(other.id)
-
-        class MyFactory2(RelationshipProperty.Comparator):
-            __hash__ = None
-
-            def __eq__(self, other):
-                return func.foobar(
-                    self._source_selectable().c.id
-                ) == func.foobar(other.user_id)
-
-        mapper(User, users)
-        mapper(
-            Address,
-            addresses,
-            properties={
-                "user": relationship(
-                    User,
-                    comparator_factory=MyFactory,
-                    backref=backref(
-                        "addresses", comparator_factory=MyFactory2
-                    ),
-                )
-            },
-        )
-
-        # these are kind of nonsensical tests.
-        self.assert_compile(
-            Address.user == User(id=5),
-            "foobar(addresses.user_id) = foobar(:foobar_1)",
-            dialect=default.DefaultDialect(),
-        )
-        self.assert_compile(
-            User.addresses == Address(id=5, user_id=7),
-            "foobar(users.id) = foobar(:foobar_1)",
-            dialect=default.DefaultDialect(),
-        )
-
-        self.assert_compile(
-            aliased(Address).user == User(id=5),
-            "foobar(addresses_1.user_id) = foobar(:foobar_1)",
-            dialect=default.DefaultDialect(),
-        )
-
-        self.assert_compile(
-            aliased(User).addresses == Address(id=5, user_id=7),
-            "foobar(users_1.id) = foobar(:foobar_1)",
-            dialect=default.DefaultDialect(),
-        )
-
-
-class SecondaryOptionsTest(fixtures.MappedTest):
-
-    """test that the contains_eager() option doesn't bleed
-    into a secondary load."""
-
-    run_inserts = "once"
-
-    run_deletes = None
-
-    @classmethod
-    def define_tables(cls, metadata):
-        Table(
-            "base",
-            metadata,
-            Column("id", Integer, primary_key=True),
-            Column("type", String(50), nullable=False),
-        )
-        Table(
-            "child1",
-            metadata,
-            Column("id", Integer, ForeignKey("base.id"), primary_key=True),
-            Column(
-                "child2id", Integer, ForeignKey("child2.id"), nullable=False
-            ),
-        )
-        Table(
-            "child2",
-            metadata,
-            Column("id", Integer, ForeignKey("base.id"), primary_key=True),
-        )
-        Table(
-            "related",
-            metadata,
-            Column("id", Integer, ForeignKey("base.id"), primary_key=True),
-        )
-
-    @classmethod
-    def setup_mappers(cls):
-        child1, child2, base, related = (
-            cls.tables.child1,
-            cls.tables.child2,
-            cls.tables.base,
-            cls.tables.related,
-        )
-
-        class Base(cls.Comparable):
-            pass
-
-        class Child1(Base):
-            pass
-
-        class Child2(Base):
-            pass
-
-        class Related(cls.Comparable):
-            pass
-
-        mapper(
-            Base,
-            base,
-            polymorphic_on=base.c.type,
-            properties={"related": relationship(Related, uselist=False)},
-        )
-        mapper(
-            Child1,
-            child1,
-            inherits=Base,
-            polymorphic_identity="child1",
-            properties={
-                "child2": relationship(
-                    Child2,
-                    primaryjoin=child1.c.child2id == base.c.id,
-                    foreign_keys=child1.c.child2id,
-                )
-            },
-        )
-        mapper(Child2, child2, inherits=Base, polymorphic_identity="child2")
-        mapper(Related, related)
-
-    @classmethod
-    def insert_data(cls, connection):
-        child1, child2, base, related = (
-            cls.tables.child1,
-            cls.tables.child2,
-            cls.tables.base,
-            cls.tables.related,
-        )
-
-        connection.execute(
-            base.insert(),
-            [
-                {"id": 1, "type": "child1"},
-                {"id": 2, "type": "child1"},
-                {"id": 3, "type": "child1"},
-                {"id": 4, "type": "child2"},
-                {"id": 5, "type": "child2"},
-                {"id": 6, "type": "child2"},
-            ],
-        )
-        connection.execute(child2.insert(), [{"id": 4}, {"id": 5}, {"id": 6}])
-        connection.execute(
-            child1.insert(),
-            [
-                {"id": 1, "child2id": 4},
-                {"id": 2, "child2id": 5},
-                {"id": 3, "child2id": 6},
-            ],
-        )
-        connection.execute(
-            related.insert(),
-            [{"id": 1}, {"id": 2}, {"id": 3}, {"id": 4}, {"id": 5}, {"id": 6}],
-        )
-
-    def test_contains_eager(self):
-        Child1, Related = self.classes.Child1, self.classes.Related
-
-        sess = create_session()
-
-        child1s = (
-            sess.query(Child1)
-            .join(Child1.related)
-            .options(sa.orm.contains_eager(Child1.related))
-            .order_by(Child1.id)
-        )
-
-        def go():
-            eq_(
-                child1s.all(),
-                [
-                    Child1(id=1, related=Related(id=1)),
-                    Child1(id=2, related=Related(id=2)),
-                    Child1(id=3, related=Related(id=3)),
-                ],
-            )
-
-        self.assert_sql_count(testing.db, go, 1)
-
-        c1 = child1s[0]
-
-        self.assert_sql_execution(
-            testing.db,
-            lambda: c1.child2,
-            CompiledSQL(
-                "SELECT child2.id AS child2_id, base.id AS base_id, "
-                "base.type AS base_type "
-                "FROM base JOIN child2 ON base.id = child2.id "
-                "WHERE base.id = :param_1",
-                {"param_1": 4},
-            ),
-        )
-
-    def test_joinedload_on_other(self):
-        Child1, Related = self.classes.Child1, self.classes.Related
-
-        sess = create_session()
-
-        child1s = (
-            sess.query(Child1)
-            .join(Child1.related)
-            .options(sa.orm.joinedload(Child1.related))
-            .order_by(Child1.id)
-        )
-
-        def go():
-            eq_(
-                child1s.all(),
-                [
-                    Child1(id=1, related=Related(id=1)),
-                    Child1(id=2, related=Related(id=2)),
-                    Child1(id=3, related=Related(id=3)),
-                ],
-            )
-
-        self.assert_sql_count(testing.db, go, 1)
-
-        c1 = child1s[0]
-
-        self.assert_sql_execution(
-            testing.db,
-            lambda: c1.child2,
-            CompiledSQL(
-                "SELECT child2.id AS child2_id, base.id AS base_id, "
-                "base.type AS base_type "
-                "FROM base JOIN child2 ON base.id = child2.id "
-                "WHERE base.id = :param_1",
-                {"param_1": 4},
-            ),
-        )
-
-    def test_joinedload_on_same(self):
-        Child1, Child2, Related = (
-            self.classes.Child1,
-            self.classes.Child2,
-            self.classes.Related,
-        )
-
-        sess = create_session()
-
-        child1s = (
-            sess.query(Child1)
-            .join(Child1.related)
-            .options(sa.orm.joinedload(Child1.child2, Child2.related))
-            .order_by(Child1.id)
-        )
-
-        def go():
-            eq_(
-                child1s.all(),
-                [
-                    Child1(id=1, related=Related(id=1)),
-                    Child1(id=2, related=Related(id=2)),
-                    Child1(id=3, related=Related(id=3)),
-                ],
-            )
-
-        self.assert_sql_count(testing.db, go, 4)
-
-        c1 = child1s[0]
-
-        # this *does* joinedload
-        self.assert_sql_execution(
-            testing.db,
-            lambda: c1.child2,
-            CompiledSQL(
-                "SELECT child2.id AS child2_id, base.id AS base_id, "
-                "base.type AS base_type, "
-                "related_1.id AS related_1_id FROM base JOIN child2 "
-                "ON base.id = child2.id "
-                "LEFT OUTER JOIN related AS related_1 "
-                "ON base.id = related_1.id WHERE base.id = :param_1",
-                {"param_1": 4},
-            ),
-        )
-
-
-class DeferredPopulationTest(fixtures.MappedTest):
-    @classmethod
-    def define_tables(cls, metadata):
-        Table(
-            "thing",
-            metadata,
-            Column(
-                "id", Integer, primary_key=True, test_needs_autoincrement=True
-            ),
-            Column("name", String(20)),
-        )
-
-        Table(
-            "human",
-            metadata,
-            Column(
-                "id", Integer, primary_key=True, test_needs_autoincrement=True
-            ),
-            Column("thing_id", Integer, ForeignKey("thing.id")),
-            Column("name", String(20)),
-        )
-
-    @classmethod
-    def setup_mappers(cls):
-        thing, human = cls.tables.thing, cls.tables.human
-
-        class Human(cls.Basic):
-            pass
-
-        class Thing(cls.Basic):
-            pass
-
-        mapper(Human, human, properties={"thing": relationship(Thing)})
-        mapper(Thing, thing, properties={"name": deferred(thing.c.name)})
-
-    @classmethod
-    def insert_data(cls, connection):
-        thing, human = cls.tables.thing, cls.tables.human
-
-        connection.execute(thing.insert(), [{"id": 1, "name": "Chair"}])
-
-        connection.execute(
-            human.insert(), [{"id": 1, "thing_id": 1, "name": "Clark Kent"}]
-        )
-
-    def _test(self, thing):
-        assert "name" in attributes.instance_state(thing).dict
-
-    def test_no_previous_query(self):
-        Thing = self.classes.Thing
-
-        session = create_session()
-        thing = session.query(Thing).options(sa.orm.undefer("name")).first()
-        self._test(thing)
-
-    def test_query_twice_with_clear(self):
-        Thing = self.classes.Thing
-
-        session = create_session()
-        result = session.query(Thing).first()  # noqa
-        session.expunge_all()
-        thing = session.query(Thing).options(sa.orm.undefer("name")).first()
-        self._test(thing)
-
-    def test_query_twice_no_clear(self):
-        Thing = self.classes.Thing
-
-        session = create_session()
-        result = session.query(Thing).first()  # noqa
-        thing = session.query(Thing).options(sa.orm.undefer("name")).first()
-        self._test(thing)
-
-    def test_joinedload_with_clear(self):
-        Thing, Human = self.classes.Thing, self.classes.Human
-
-        session = create_session()
-        human = (  # noqa
-            session.query(Human).options(sa.orm.joinedload("thing")).first()
-        )
-        session.expunge_all()
-        thing = session.query(Thing).options(sa.orm.undefer("name")).first()
-        self._test(thing)
-
-    def test_joinedload_no_clear(self):
-        Thing, Human = self.classes.Thing, self.classes.Human
-
-        session = create_session()
-        human = (  # noqa
-            session.query(Human).options(sa.orm.joinedload("thing")).first()
-        )
-        thing = session.query(Thing).options(sa.orm.undefer("name")).first()
-        self._test(thing)
-
-    def test_join_with_clear(self):
-        Thing, Human = self.classes.Thing, self.classes.Human
-
-        session = create_session()
-        result = (  # noqa
-            session.query(Human).add_entity(Thing).join("thing").first()
-        )
-        session.expunge_all()
-        thing = session.query(Thing).options(sa.orm.undefer("name")).first()
-        self._test(thing)
-
-    def test_join_no_clear(self):
-        Thing, Human = self.classes.Thing, self.classes.Human
-
-        session = create_session()
-        result = (  # noqa
-            session.query(Human).add_entity(Thing).join("thing").first()
-        )
-        thing = session.query(Thing).options(sa.orm.undefer("name")).first()
-        self._test(thing)
-
-
-class NoLoadTest(_fixtures.FixtureTest):
-    run_inserts = "once"
-    run_deletes = None
-
-    def test_o2m_noload(self):
-
-        Address, addresses, users, User = (
-            self.classes.Address,
-            self.tables.addresses,
-            self.tables.users,
-            self.classes.User,
-        )
-
-        m = mapper(
-            User,
-            users,
-            properties=dict(
-                addresses=relationship(
-                    mapper(Address, addresses), lazy="noload"
-                )
-            ),
-        )
-        q = create_session().query(m)
-        result = [None]
-
-        def go():
-            x = q.filter(User.id == 7).all()
-            x[0].addresses
-            result[0] = x
-
-        self.assert_sql_count(testing.db, go, 1)
-
-        self.assert_result(
-            result[0], User, {"id": 7, "addresses": (Address, [])}
-        )
-
-    def test_upgrade_o2m_noload_lazyload_option(self):
-        Address, addresses, users, User = (
-            self.classes.Address,
-            self.tables.addresses,
-            self.tables.users,
-            self.classes.User,
-        )
-
-        m = mapper(
-            User,
-            users,
-            properties=dict(
-                addresses=relationship(
-                    mapper(Address, addresses), lazy="noload"
-                )
-            ),
-        )
-        q = create_session().query(m).options(sa.orm.lazyload("addresses"))
-        result = [None]
-
-        def go():
-            x = q.filter(User.id == 7).all()
-            x[0].addresses
-            result[0] = x
-
-        self.sql_count_(2, go)
-
-        self.assert_result(
-            result[0], User, {"id": 7, "addresses": (Address, [{"id": 1}])}
-        )
-
-    def test_m2o_noload_option(self):
-        Address, addresses, users, User = (
-            self.classes.Address,
-            self.tables.addresses,
-            self.tables.users,
-            self.classes.User,
-        )
-        mapper(Address, addresses, properties={"user": relationship(User)})
-        mapper(User, users)
-        s = Session()
-        a1 = (
-            s.query(Address)
-            .filter_by(id=1)
-            .options(sa.orm.noload("user"))
-            .first()
-        )
-
-        def go():
-            eq_(a1.user, None)
-
-        self.sql_count_(0, go)
-
-
-class RaiseLoadTest(_fixtures.FixtureTest):
-    run_inserts = "once"
-    run_deletes = None
-
-    def test_o2m_raiseload_mapper(self):
-        Address, addresses, users, User = (
-            self.classes.Address,
-            self.tables.addresses,
-            self.tables.users,
-            self.classes.User,
-        )
-
-        mapper(Address, addresses)
-        mapper(
-            User,
-            users,
-            properties=dict(addresses=relationship(Address, lazy="raise")),
-        )
-        q = create_session().query(User)
-        result = [None]
-
-        def go():
-            x = q.filter(User.id == 7).all()
-            assert_raises_message(
-                sa.exc.InvalidRequestError,
-                "'User.addresses' is not available due to lazy='raise'",
-                lambda: x[0].addresses,
-            )
-            result[0] = x
-
-        self.assert_sql_count(testing.db, go, 1)
-
-        self.assert_result(result[0], User, {"id": 7})
-
-    def test_o2m_raiseload_option(self):
-        Address, addresses, users, User = (
-            self.classes.Address,
-            self.tables.addresses,
-            self.tables.users,
-            self.classes.User,
-        )
-
-        mapper(Address, addresses)
-        mapper(User, users, properties=dict(addresses=relationship(Address)))
-        q = create_session().query(User)
-        result = [None]
-
-        def go():
-            x = (
-                q.options(sa.orm.raiseload(User.addresses))
-                .filter(User.id == 7)
-                .all()
-            )
-            assert_raises_message(
-                sa.exc.InvalidRequestError,
-                "'User.addresses' is not available due to lazy='raise'",
-                lambda: x[0].addresses,
-            )
-            result[0] = x
-
-        self.assert_sql_count(testing.db, go, 1)
-
-        self.assert_result(result[0], User, {"id": 7})
-
-    def test_o2m_raiseload_lazyload_option(self):
-        Address, addresses, users, User = (
-            self.classes.Address,
-            self.tables.addresses,
-            self.tables.users,
-            self.classes.User,
-        )
-
-        mapper(Address, addresses)
-        mapper(
-            User,
-            users,
-            properties=dict(addresses=relationship(Address, lazy="raise")),
-        )
-        q = create_session().query(User).options(sa.orm.lazyload("addresses"))
-        result = [None]
-
-        def go():
-            x = q.filter(User.id == 7).all()
-            x[0].addresses
-            result[0] = x
-
-        self.sql_count_(2, go)
-
-        self.assert_result(
-            result[0], User, {"id": 7, "addresses": (Address, [{"id": 1}])}
-        )
-
-    def test_m2o_raiseload_option(self):
-        Address, addresses, users, User = (
-            self.classes.Address,
-            self.tables.addresses,
-            self.tables.users,
-            self.classes.User,
-        )
-        mapper(Address, addresses, properties={"user": relationship(User)})
-        mapper(User, users)
-        s = Session()
-        a1 = (
-            s.query(Address)
-            .filter_by(id=1)
-            .options(sa.orm.raiseload("user"))
-            .first()
-        )
-
-        def go():
-            assert_raises_message(
-                sa.exc.InvalidRequestError,
-                "'Address.user' is not available due to lazy='raise'",
-                lambda: a1.user,
-            )
-
-        self.sql_count_(0, go)
-
-    def test_m2o_raise_on_sql_option(self):
-        Address, addresses, users, User = (
-            self.classes.Address,
-            self.tables.addresses,
-            self.tables.users,
-            self.classes.User,
-        )
-        mapper(Address, addresses, properties={"user": relationship(User)})
-        mapper(User, users)
-        s = Session()
-        a1 = (
-            s.query(Address)
-            .filter_by(id=1)
-            .options(sa.orm.raiseload("user", sql_only=True))
-            .first()
-        )
-
-        def go():
-            assert_raises_message(
-                sa.exc.InvalidRequestError,
-                "'Address.user' is not available due to lazy='raise_on_sql'",
-                lambda: a1.user,
-            )
-
-        self.sql_count_(0, go)
-
-        s.close()
-
-        u1 = s.query(User).first()
-        a1 = (
-            s.query(Address)
-            .filter_by(id=1)
-            .options(sa.orm.raiseload("user", sql_only=True))
-            .first()
-        )
-        assert "user" not in a1.__dict__
-        is_(a1.user, u1)
-
-    def test_m2o_non_use_get_raise_on_sql_option(self):
-        Address, addresses, users, User = (
-            self.classes.Address,
-            self.tables.addresses,
-            self.tables.users,
-            self.classes.User,
-        )
-        mapper(
-            Address,
-            addresses,
-            properties={
-                "user": relationship(
-                    User,
-                    primaryjoin=sa.and_(
-                        addresses.c.user_id == users.c.id,
-                        users.c.name != None,  # noqa
-                    ),
-                )
-            },
-        )
-        mapper(User, users)
-        s = Session()
-        u1 = s.query(User).first()  # noqa
-        a1 = (
-            s.query(Address)
-            .filter_by(id=1)
-            .options(sa.orm.raiseload("user", sql_only=True))
-            .first()
-        )
-
-        def go():
-            assert_raises_message(
-                sa.exc.InvalidRequestError,
-                "'Address.user' is not available due to lazy='raise_on_sql'",
-                lambda: a1.user,
-            )
-
-    def test_raiseload_wildcard_all_classes_option(self):
-        Address, addresses, users, User = (
-            self.classes.Address,
-            self.tables.addresses,
-            self.tables.users,
-            self.classes.User,
-        )
-
-        mapper(Address, addresses)
-        mapper(
-            User,
-            users,
-            properties=dict(addresses=relationship(Address, backref="user")),
-        )
-        q = (
-            create_session()
-            .query(User, Address)
-            .join(Address, User.id == Address.user_id)
-        )
-
-        u1, a1 = q.options(sa.orm.raiseload("*")).filter(User.id == 7).first()
-
-        assert_raises_message(
-            sa.exc.InvalidRequestError,
-            "'User.addresses' is not available due to lazy='raise'",
-            lambda: u1.addresses,
-        )
-
-        assert_raises_message(
-            sa.exc.InvalidRequestError,
-            "'Address.user' is not available due to lazy='raise'",
-            lambda: a1.user,
-        )
-
-        # columns still work
-        eq_(u1.id, 7)
-        eq_(a1.id, 1)
-
-    def test_raiseload_wildcard_specific_class_option(self):
-        Address, addresses, users, User = (
-            self.classes.Address,
-            self.tables.addresses,
-            self.tables.users,
-            self.classes.User,
-        )
-
-        mapper(Address, addresses)
-        mapper(
-            User,
-            users,
-            properties=dict(addresses=relationship(Address, backref="user")),
-        )
-        q = (
-            create_session()
-            .query(User, Address)
-            .join(Address, User.id == Address.user_id)
-        )
-
-        u1, a1 = (
-            q.options(sa.orm.Load(Address).raiseload("*"))
-            .filter(User.id == 7)
-            .first()
-        )
-
-        # User doesn't raise
-        def go():
-            eq_(u1.addresses, [a1])
-
-        self.assert_sql_count(testing.db, go, 1)
-
-        # Address does
-        assert_raises_message(
-            sa.exc.InvalidRequestError,
-            "'Address.user' is not available due to lazy='raise'",
-            lambda: a1.user,
-        )
-
-        # columns still work
-        eq_(u1.id, 7)
-        eq_(a1.id, 1)
-
-
-class RequirementsTest(fixtures.MappedTest):
-
-    """Tests the contract for user classes."""
-
-    @classmethod
-    def define_tables(cls, metadata):
-        Table(
-            "ht1",
-            metadata,
-            Column(
-                "id", Integer, primary_key=True, test_needs_autoincrement=True
-            ),
-            Column("value", String(10)),
-        )
-        Table(
-            "ht2",
-            metadata,
-            Column(
-                "id", Integer, primary_key=True, test_needs_autoincrement=True
-            ),
-            Column("ht1_id", Integer, ForeignKey("ht1.id")),
-            Column("value", String(10)),
-        )
-        Table(
-            "ht3",
-            metadata,
-            Column(
-                "id", Integer, primary_key=True, test_needs_autoincrement=True
-            ),
-            Column("value", String(10)),
-        )
-        Table(
-            "ht4",
-            metadata,
-            Column("ht1_id", Integer, ForeignKey("ht1.id"), primary_key=True),
-            Column("ht3_id", Integer, ForeignKey("ht3.id"), primary_key=True),
-        )
-        Table(
-            "ht5",
-            metadata,
-            Column("ht1_id", Integer, ForeignKey("ht1.id"), primary_key=True),
-        )
-        Table(
-            "ht6",
-            metadata,
-            Column("ht1a_id", Integer, ForeignKey("ht1.id"), primary_key=True),
-            Column("ht1b_id", Integer, ForeignKey("ht1.id"), primary_key=True),
-            Column("value", String(10)),
-        )
-
-    if util.py2k:
-
-        def test_baseclass(self):
-            ht1 = self.tables.ht1
-
-            class OldStyle:
-                pass
-
-            assert_raises(sa.exc.ArgumentError, mapper, OldStyle, ht1)
-
-            assert_raises(sa.exc.ArgumentError, mapper, 123)
-
-            class NoWeakrefSupport(str):
-                pass
-
-            # TODO: is weakref support detectable without an instance?
-            # self.assertRaises(
-            #  sa.exc.ArgumentError, mapper, NoWeakrefSupport, t2)
-
-    class _ValueBase(object):
-        def __init__(self, value="abc", id_=None):
-            self.id = id_
-            self.value = value
-
-        def __bool__(self):
-            return False
-
-        def __hash__(self):
-            return hash(self.value)
-
-        def __eq__(self, other):
-            if isinstance(other, type(self)):
-                return self.value == other.value
-            return False
-
-    def test_comparison_overrides(self):
-        """Simple tests to ensure users can supply comparison __methods__.
-
-        The suite-level test --options are better suited to detect
-        problems- they add selected __methods__ across the board on all
-        ORM tests.  This test simply shoves a variety of operations
-        through the ORM to catch basic regressions early in a standard
-        test run.
-        """
-
-        ht6, ht5, ht4, ht3, ht2, ht1 = (
-            self.tables.ht6,
-            self.tables.ht5,
-            self.tables.ht4,
-            self.tables.ht3,
-            self.tables.ht2,
-            self.tables.ht1,
-        )
-
-        class H1(self._ValueBase):
-            pass
-
-        class H2(self._ValueBase):
-            pass
-
-        class H3(self._ValueBase):
-            pass
-
-        class H6(self._ValueBase):
-            pass
-
-        mapper(
-            H1,
-            ht1,
-            properties={
-                "h2s": relationship(H2, backref="h1"),
-                "h3s": relationship(H3, secondary=ht4, backref="h1s"),
-                "h1s": relationship(H1, secondary=ht5, backref="parent_h1"),
-                "t6a": relationship(
-                    H6, backref="h1a", primaryjoin=ht1.c.id == ht6.c.ht1a_id
-                ),
-                "t6b": relationship(
-                    H6, backref="h1b", primaryjoin=ht1.c.id == ht6.c.ht1b_id
-                ),
-            },
-        )
-        mapper(H2, ht2)
-        mapper(H3, ht3)
-        mapper(H6, ht6)
+        self.mapper(H2, ht2)
+        self.mapper(H3, ht3)
+        self.mapper(H6, ht6)
 
         s = create_session()
         s.add_all([H1("abc"), H1("def")])
@@ -3690,8 +2296,10 @@ class RequirementsTest(fixtures.MappedTest):
                 self.value = value
                 self.id = id_
 
-        mapper(H1, ht1, properties={"h2s": relationship(H2, backref="h1")})
-        mapper(H2, ht2)
+        self.mapper(
+            H1, ht1, properties={"h2s": relationship(H2, backref="h1")}
+        )
+        self.mapper(H2, ht2)
         s = Session()
         s.add_all(
             [
@@ -3744,8 +2352,8 @@ class RequirementsTest(fixtures.MappedTest):
                 self.value = "foobar"
                 return self.value
 
-        mapper(H1, ht1)
-        mapper(H2, ht1)
+        self.mapper(H1, ht1)
+        self.mapper(H2, ht1)
 
         h1 = H1()
         h1.value = "Asdf"
@@ -3770,7 +2378,7 @@ class IsUserlandTest(fixtures.MappedTest):
         class Foo(object):
             someprop = value
 
-        m = mapper(Foo, self.tables.foo)
+        m = self.mapper(Foo, self.tables.foo)
         eq_(Foo.someprop, value)
         f1 = Foo()
         if instancelevel is not None:
@@ -3783,7 +2391,7 @@ class IsUserlandTest(fixtures.MappedTest):
         class Foo(object):
             someprop = value
 
-        m = mapper(Foo, self.tables.foo)
+        m = self.mapper(Foo, self.tables.foo)
         is_(Foo.someprop.property.columns[0], self.tables.foo.c.someprop)
         assert self.tables.foo.c.someprop in m._columntoproperty
 
@@ -3855,12 +2463,12 @@ class MagicNamesTest(fixtures.MappedTest):
             self.classes.Map,
         )
 
-        mapper(
+        self.mapper(
             Cartographer,
             cartographers,
             properties=dict(query=cartographers.c.quip),
         )
-        mapper(
+        self.mapper(
             Map,
             maps,
             properties=dict(mapper=relationship(Cartographer, backref="maps")),
@@ -3940,3 +2548,256 @@ class MagicNamesTest(fixtures.MappedTest):
                 maps,
                 properties={reserved: maps.c.state},
             )
+
+
+class DocumentTest(fixtures.TestBase):
+    def setup(self):
+
+        self.mapper = registry().map_imperatively
+
+    def test_doc_propagate(self):
+        metadata = MetaData()
+        t1 = Table(
+            "t1",
+            metadata,
+            Column(
+                "col1", Integer, primary_key=True, doc="primary key column"
+            ),
+            Column("col2", String, doc="data col"),
+            Column("col3", String, doc="data col 2"),
+            Column("col4", String, doc="data col 3"),
+            Column("col5", String),
+        )
+        t2 = Table(
+            "t2",
+            metadata,
+            Column(
+                "col1", Integer, primary_key=True, doc="primary key column"
+            ),
+            Column("col2", String, doc="data col"),
+            Column(
+                "col3",
+                Integer,
+                ForeignKey("t1.col1"),
+                doc="foreign key to t1.col1",
+            ),
+        )
+
+        class Foo(object):
+            pass
+
+        class Bar(object):
+            pass
+
+        self.mapper(
+            Foo,
+            t1,
+            properties={
+                "bars": relationship(
+                    Bar,
+                    doc="bar relationship",
+                    backref=backref("foo", doc="foo relationship"),
+                ),
+                "foober": column_property(t1.c.col3, doc="alternate data col"),
+                "hoho": synonym("col4", doc="syn of col4"),
+            },
+        )
+        self.mapper(Bar, t2)
+        configure_mappers()
+        eq_(Foo.col1.__doc__, "primary key column")
+        eq_(Foo.col2.__doc__, "data col")
+        eq_(Foo.col5.__doc__, None)
+        eq_(Foo.foober.__doc__, "alternate data col")
+        eq_(Foo.bars.__doc__, "bar relationship")
+        eq_(Foo.hoho.__doc__, "syn of col4")
+        eq_(Bar.col1.__doc__, "primary key column")
+        eq_(Bar.foo.__doc__, "foo relationship")
+
+
+class ORMLoggingTest(_fixtures.FixtureTest):
+    def setup(self):
+        self.buf = logging.handlers.BufferingHandler(100)
+        for log in [logging.getLogger("sqlalchemy.orm")]:
+            log.addHandler(self.buf)
+
+        self.mapper = registry().map_imperatively
+
+    def teardown(self):
+        for log in [logging.getLogger("sqlalchemy.orm")]:
+            log.removeHandler(self.buf)
+
+    def _current_messages(self):
+        return [b.getMessage() for b in self.buf.buffer]
+
+    def test_mapper_info_aliased(self):
+        User, users = self.classes.User, self.tables.users
+        tb = users.select().alias()
+        self.mapper(User, tb)
+        s = Session()
+        s.add(User(name="ed"))
+        s.commit()
+
+        for msg in self._current_messages():
+            assert msg.startswith("(User|%%(%d anon)s) " % id(tb))
+
+
+class ComparatorFactoryTest(_fixtures.FixtureTest, AssertsCompiledSQL):
+    def test_kwarg_accepted(self):
+        users, Address = self.tables.users, self.classes.Address
+
+        class DummyComposite(object):
+            def __init__(self, x, y):
+                pass
+
+        from sqlalchemy.orm.interfaces import PropComparator
+
+        class MyFactory(PropComparator):
+            pass
+
+        for args in (
+            (column_property, users.c.name),
+            (deferred, users.c.name),
+            (synonym, "name"),
+            (composite, DummyComposite, users.c.id, users.c.name),
+            (relationship, Address),
+            (backref, "address"),
+            (dynamic_loader, Address),
+        ):
+            fn = args[0]
+            args = args[1:]
+            fn(comparator_factory=MyFactory, *args)
+
+    def test_column(self):
+        User, users = self.classes.User, self.tables.users
+
+        from sqlalchemy.orm.properties import ColumnProperty
+
+        class MyFactory(ColumnProperty.Comparator):
+            __hash__ = None
+
+            def __eq__(self, other):
+                return func.foobar(self.__clause_element__()) == func.foobar(
+                    other
+                )
+
+        self.mapper(
+            User,
+            users,
+            properties={
+                "name": column_property(
+                    users.c.name, comparator_factory=MyFactory
+                )
+            },
+        )
+        self.assert_compile(
+            User.name == "ed",
+            "foobar(users.name) = foobar(:foobar_1)",
+            dialect=default.DefaultDialect(),
+        )
+        self.assert_compile(
+            aliased(User).name == "ed",
+            "foobar(users_1.name) = foobar(:foobar_1)",
+            dialect=default.DefaultDialect(),
+        )
+
+    def test_synonym(self):
+        users, User = self.tables.users, self.classes.User
+
+        from sqlalchemy.orm.properties import ColumnProperty
+
+        class MyFactory(ColumnProperty.Comparator):
+            __hash__ = None
+
+            def __eq__(self, other):
+                return func.foobar(self.__clause_element__()) == func.foobar(
+                    other
+                )
+
+        self.mapper(
+            User,
+            users,
+            properties={
+                "name": synonym(
+                    "_name", map_column=True, comparator_factory=MyFactory
+                )
+            },
+        )
+        self.assert_compile(
+            User.name == "ed",
+            "foobar(users.name) = foobar(:foobar_1)",
+            dialect=default.DefaultDialect(),
+        )
+
+        self.assert_compile(
+            aliased(User).name == "ed",
+            "foobar(users_1.name) = foobar(:foobar_1)",
+            dialect=default.DefaultDialect(),
+        )
+
+    def test_relationship(self):
+        users, Address, addresses, User = (
+            self.tables.users,
+            self.classes.Address,
+            self.tables.addresses,
+            self.classes.User,
+        )
+
+        from sqlalchemy.orm.relationships import RelationshipProperty
+
+        # NOTE: this API changed in 0.8, previously __clause_element__()
+        # gave the parent selecatable, now it gives the
+        # primaryjoin/secondaryjoin
+        class MyFactory(RelationshipProperty.Comparator):
+            __hash__ = None
+
+            def __eq__(self, other):
+                return func.foobar(
+                    self._source_selectable().c.user_id
+                ) == func.foobar(other.id)
+
+        class MyFactory2(RelationshipProperty.Comparator):
+            __hash__ = None
+
+            def __eq__(self, other):
+                return func.foobar(
+                    self._source_selectable().c.id
+                ) == func.foobar(other.user_id)
+
+        self.mapper(User, users)
+        self.mapper(
+            Address,
+            addresses,
+            properties={
+                "user": relationship(
+                    User,
+                    comparator_factory=MyFactory,
+                    backref=backref(
+                        "addresses", comparator_factory=MyFactory2
+                    ),
+                )
+            },
+        )
+
+        # these are kind of nonsensical tests.
+        self.assert_compile(
+            Address.user == User(id=5),
+            "foobar(addresses.user_id) = foobar(:foobar_1)",
+            dialect=default.DefaultDialect(),
+        )
+        self.assert_compile(
+            User.addresses == Address(id=5, user_id=7),
+            "foobar(users.id) = foobar(:foobar_1)",
+            dialect=default.DefaultDialect(),
+        )
+
+        self.assert_compile(
+            aliased(Address).user == User(id=5),
+            "foobar(addresses_1.user_id) = foobar(:foobar_1)",
+            dialect=default.DefaultDialect(),
+        )
+
+        self.assert_compile(
+            aliased(User).addresses == Address(id=5, user_id=7),
+            "foobar(users_1.id) = foobar(:foobar_1)",
+            dialect=default.DefaultDialect(),
+        )
index cec8865d9cb2fbc39d17fbd31a2e1dcbc0af7b51..cde66d400110917f58ccebdcabf2c7101d5c4f45 100644 (file)
@@ -6,6 +6,7 @@ from sqlalchemy import Integer
 from sqlalchemy import String
 from sqlalchemy import testing
 from sqlalchemy.orm import aliased
+from sqlalchemy.orm import attributes
 from sqlalchemy.orm import class_mapper
 from sqlalchemy.orm import column_property
 from sqlalchemy.orm import create_session
@@ -20,6 +21,7 @@ from sqlalchemy.orm import relationship
 from sqlalchemy.orm import Session
 from sqlalchemy.orm import strategy_options
 from sqlalchemy.orm import subqueryload
+from sqlalchemy.orm import synonym
 from sqlalchemy.orm import util as orm_util
 from sqlalchemy.orm import with_polymorphic
 from sqlalchemy.testing import fixtures
@@ -1878,3 +1880,351 @@ class SubOptionsTest(PathTest, QueryTest):
             joinedload(User.orders).options,
             Load(Order).joinedload(Order.items),
         )
+
+
+class MapperOptionsTest(_fixtures.FixtureTest):
+    def test_synonym_options(self):
+        Address, addresses, users, User = (
+            self.classes.Address,
+            self.tables.addresses,
+            self.tables.users,
+            self.classes.User,
+        )
+
+        mapper(
+            User,
+            users,
+            properties=dict(
+                addresses=relationship(
+                    mapper(Address, addresses),
+                    lazy="select",
+                    order_by=addresses.c.id,
+                ),
+                adlist=synonym("addresses"),
+            ),
+        )
+
+        def go():
+            sess = create_session()
+            u = (
+                sess.query(User)
+                .order_by(User.id)
+                .options(sa.orm.joinedload("adlist"))
+                .filter_by(name="jack")
+            ).one()
+            eq_(u.adlist, [self.static.user_address_result[0].addresses[0]])
+
+        self.assert_sql_count(testing.db, go, 1)
+
+    def test_eager_options(self):
+        """A lazy relationship can be upgraded to an eager relationship."""
+
+        Address, addresses, users, User = (
+            self.classes.Address,
+            self.tables.addresses,
+            self.tables.users,
+            self.classes.User,
+        )
+
+        mapper(
+            User,
+            users,
+            properties=dict(
+                addresses=relationship(
+                    mapper(Address, addresses), order_by=addresses.c.id
+                )
+            ),
+        )
+
+        sess = create_session()
+        result = (
+            sess.query(User)
+            .order_by(User.id)
+            .options(sa.orm.joinedload("addresses"))
+        ).all()
+
+        def go():
+            eq_(result, self.static.user_address_result)
+
+        self.sql_count_(0, go)
+
+    def test_eager_options_with_limit(self):
+        Address, addresses, users, User = (
+            self.classes.Address,
+            self.tables.addresses,
+            self.tables.users,
+            self.classes.User,
+        )
+
+        mapper(
+            User,
+            users,
+            properties=dict(
+                addresses=relationship(
+                    mapper(Address, addresses), lazy="select"
+                )
+            ),
+        )
+
+        sess = create_session()
+        u = (
+            sess.query(User)
+            .options(sa.orm.joinedload("addresses"))
+            .filter_by(id=8)
+        ).one()
+
+        def go():
+            eq_(u.id, 8)
+            eq_(len(u.addresses), 3)
+
+        self.sql_count_(0, go)
+
+        sess.expunge_all()
+
+        u = sess.query(User).filter_by(id=8).one()
+        eq_(u.id, 8)
+        eq_(len(u.addresses), 3)
+
+    def test_lazy_options_with_limit(self):
+        Address, addresses, users, User = (
+            self.classes.Address,
+            self.tables.addresses,
+            self.tables.users,
+            self.classes.User,
+        )
+
+        mapper(
+            User,
+            users,
+            properties=dict(
+                addresses=relationship(
+                    mapper(Address, addresses), lazy="joined"
+                )
+            ),
+        )
+
+        sess = create_session()
+        u = (
+            sess.query(User)
+            .options(sa.orm.lazyload("addresses"))
+            .filter_by(id=8)
+        ).one()
+
+        def go():
+            eq_(u.id, 8)
+            eq_(len(u.addresses), 3)
+
+        self.sql_count_(1, go)
+
+    def test_eager_degrade(self):
+        """An eager relationship automatically degrades to a lazy relationship
+        if eager columns are not available"""
+
+        Address, addresses, users, User = (
+            self.classes.Address,
+            self.tables.addresses,
+            self.tables.users,
+            self.classes.User,
+        )
+
+        mapper(
+            User,
+            users,
+            properties=dict(
+                addresses=relationship(
+                    mapper(Address, addresses),
+                    lazy="joined",
+                    order_by=addresses.c.id,
+                )
+            ),
+        )
+
+        sess = create_session()
+        # first test straight eager load, 1 statement
+
+        def go():
+            result = sess.query(User).order_by(User.id).all()
+            eq_(result, self.static.user_address_result)
+
+        self.sql_count_(1, go)
+
+        sess.expunge_all()
+
+        # then select just from users.  run it into instances.
+        # then assert the data, which will launch 3 more lazy loads
+        # (previous users in session fell out of scope and were removed from
+        # session's identity map)
+        r = users.select().order_by(users.c.id).execute()
+
+        ctx = sess.query(User)._compile_context()
+
+        def go():
+            result = list(sess.query(User).instances(r, ctx))
+            eq_(result, self.static.user_address_result)
+
+        self.sql_count_(4, go)
+
+    def test_eager_degrade_deep(self):
+        (
+            users,
+            Keyword,
+            items,
+            order_items,
+            orders,
+            Item,
+            User,
+            Address,
+            keywords,
+            item_keywords,
+            Order,
+            addresses,
+        ) = (
+            self.tables.users,
+            self.classes.Keyword,
+            self.tables.items,
+            self.tables.order_items,
+            self.tables.orders,
+            self.classes.Item,
+            self.classes.User,
+            self.classes.Address,
+            self.tables.keywords,
+            self.tables.item_keywords,
+            self.classes.Order,
+            self.tables.addresses,
+        )
+
+        # test with a deeper set of eager loads.  when we first load the three
+        # users, they will have no addresses or orders.  the number of lazy
+        # loads when traversing the whole thing will be three for the
+        # addresses and three for the orders.
+        mapper(Address, addresses)
+
+        mapper(Keyword, keywords)
+
+        mapper(
+            Item,
+            items,
+            properties=dict(
+                keywords=relationship(
+                    Keyword,
+                    secondary=item_keywords,
+                    lazy="joined",
+                    order_by=item_keywords.c.keyword_id,
+                )
+            ),
+        )
+
+        mapper(
+            Order,
+            orders,
+            properties=dict(
+                items=relationship(
+                    Item,
+                    secondary=order_items,
+                    lazy="joined",
+                    order_by=order_items.c.item_id,
+                )
+            ),
+        )
+
+        mapper(
+            User,
+            users,
+            properties=dict(
+                addresses=relationship(
+                    Address, lazy="joined", order_by=addresses.c.id
+                ),
+                orders=relationship(
+                    Order, lazy="joined", order_by=orders.c.id
+                ),
+            ),
+        )
+
+        sess = create_session()
+
+        # first test straight eager load, 1 statement
+        def go():
+            result = sess.query(User).order_by(User.id).all()
+            eq_(result, self.static.user_all_result)
+
+        self.assert_sql_count(testing.db, go, 1)
+
+        sess.expunge_all()
+
+        # then select just from users.  run it into instances.
+        # then assert the data, which will launch 6 more lazy loads
+        r = users.select().execute()
+
+        ctx = sess.query(User)._compile_context()
+
+        def go():
+            result = list(sess.query(User).instances(r, ctx))
+            eq_(result, self.static.user_all_result)
+
+        self.assert_sql_count(testing.db, go, 6)
+
+    def test_lazy_options(self):
+        """An eager relationship can be upgraded to a lazy relationship."""
+
+        Address, addresses, users, User = (
+            self.classes.Address,
+            self.tables.addresses,
+            self.tables.users,
+            self.classes.User,
+        )
+
+        mapper(
+            User,
+            users,
+            properties=dict(
+                addresses=relationship(
+                    mapper(Address, addresses), lazy="joined"
+                )
+            ),
+        )
+
+        sess = create_session()
+        result = (
+            sess.query(User)
+            .order_by(User.id)
+            .options(sa.orm.lazyload("addresses"))
+        ).all()
+
+        def go():
+            eq_(result, self.static.user_address_result)
+
+        self.sql_count_(4, go)
+
+    def test_option_propagate(self):
+        users, items, order_items, Order, Item, User, orders = (
+            self.tables.users,
+            self.tables.items,
+            self.tables.order_items,
+            self.classes.Order,
+            self.classes.Item,
+            self.classes.User,
+            self.tables.orders,
+        )
+
+        mapper(User, users, properties=dict(orders=relationship(Order)))
+        mapper(
+            Order,
+            orders,
+            properties=dict(items=relationship(Item, secondary=order_items)),
+        )
+        mapper(Item, items)
+
+        sess = create_session()
+
+        oalias = aliased(Order)
+        opt1 = sa.orm.joinedload(User.orders, Order.items)
+        opt2 = sa.orm.contains_eager(User.orders, Order.items, alias=oalias)
+        u1 = (
+            sess.query(User)
+            .join(oalias, User.orders)
+            .options(opt1, opt2)
+            .first()
+        )
+        ustate = attributes.instance_state(u1)
+        assert opt1 in ustate.load_options
+        assert opt2 not in ustate.load_options
index 1a27fd98d14b716762056358ff6d98b82cfa8894..eaa1751f9655f54028a83830d6809bf44bc82b31 100644 (file)
@@ -5668,6 +5668,281 @@ class InactiveHistoryNoRaiseTest(_fixtures.FixtureTest):
         eq_(s.query(User).count(), 1)
 
 
+class RaiseLoadTest(_fixtures.FixtureTest):
+    run_inserts = "once"
+    run_deletes = None
+
+    def test_o2m_raiseload_mapper(self):
+        Address, addresses, users, User = (
+            self.classes.Address,
+            self.tables.addresses,
+            self.tables.users,
+            self.classes.User,
+        )
+
+        mapper(Address, addresses)
+        mapper(
+            User,
+            users,
+            properties=dict(addresses=relationship(Address, lazy="raise")),
+        )
+        q = create_session().query(User)
+        result = [None]
+
+        def go():
+            x = q.filter(User.id == 7).all()
+            assert_raises_message(
+                sa.exc.InvalidRequestError,
+                "'User.addresses' is not available due to lazy='raise'",
+                lambda: x[0].addresses,
+            )
+            result[0] = x
+
+        self.assert_sql_count(testing.db, go, 1)
+
+        self.assert_result(result[0], User, {"id": 7})
+
+    def test_o2m_raiseload_option(self):
+        Address, addresses, users, User = (
+            self.classes.Address,
+            self.tables.addresses,
+            self.tables.users,
+            self.classes.User,
+        )
+
+        mapper(Address, addresses)
+        mapper(User, users, properties=dict(addresses=relationship(Address)))
+        q = create_session().query(User)
+        result = [None]
+
+        def go():
+            x = (
+                q.options(sa.orm.raiseload(User.addresses))
+                .filter(User.id == 7)
+                .all()
+            )
+            assert_raises_message(
+                sa.exc.InvalidRequestError,
+                "'User.addresses' is not available due to lazy='raise'",
+                lambda: x[0].addresses,
+            )
+            result[0] = x
+
+        self.assert_sql_count(testing.db, go, 1)
+
+        self.assert_result(result[0], User, {"id": 7})
+
+    def test_o2m_raiseload_lazyload_option(self):
+        Address, addresses, users, User = (
+            self.classes.Address,
+            self.tables.addresses,
+            self.tables.users,
+            self.classes.User,
+        )
+
+        mapper(Address, addresses)
+        mapper(
+            User,
+            users,
+            properties=dict(addresses=relationship(Address, lazy="raise")),
+        )
+        q = create_session().query(User).options(sa.orm.lazyload("addresses"))
+        result = [None]
+
+        def go():
+            x = q.filter(User.id == 7).all()
+            x[0].addresses
+            result[0] = x
+
+        self.sql_count_(2, go)
+
+        self.assert_result(
+            result[0], User, {"id": 7, "addresses": (Address, [{"id": 1}])}
+        )
+
+    def test_m2o_raiseload_option(self):
+        Address, addresses, users, User = (
+            self.classes.Address,
+            self.tables.addresses,
+            self.tables.users,
+            self.classes.User,
+        )
+        mapper(Address, addresses, properties={"user": relationship(User)})
+        mapper(User, users)
+        s = Session()
+        a1 = (
+            s.query(Address)
+            .filter_by(id=1)
+            .options(sa.orm.raiseload("user"))
+            .first()
+        )
+
+        def go():
+            assert_raises_message(
+                sa.exc.InvalidRequestError,
+                "'Address.user' is not available due to lazy='raise'",
+                lambda: a1.user,
+            )
+
+        self.sql_count_(0, go)
+
+    def test_m2o_raise_on_sql_option(self):
+        Address, addresses, users, User = (
+            self.classes.Address,
+            self.tables.addresses,
+            self.tables.users,
+            self.classes.User,
+        )
+        mapper(Address, addresses, properties={"user": relationship(User)})
+        mapper(User, users)
+        s = Session()
+        a1 = (
+            s.query(Address)
+            .filter_by(id=1)
+            .options(sa.orm.raiseload("user", sql_only=True))
+            .first()
+        )
+
+        def go():
+            assert_raises_message(
+                sa.exc.InvalidRequestError,
+                "'Address.user' is not available due to lazy='raise_on_sql'",
+                lambda: a1.user,
+            )
+
+        self.sql_count_(0, go)
+
+        s.close()
+
+        u1 = s.query(User).first()
+        a1 = (
+            s.query(Address)
+            .filter_by(id=1)
+            .options(sa.orm.raiseload("user", sql_only=True))
+            .first()
+        )
+        assert "user" not in a1.__dict__
+        is_(a1.user, u1)
+
+    def test_m2o_non_use_get_raise_on_sql_option(self):
+        Address, addresses, users, User = (
+            self.classes.Address,
+            self.tables.addresses,
+            self.tables.users,
+            self.classes.User,
+        )
+        mapper(
+            Address,
+            addresses,
+            properties={
+                "user": relationship(
+                    User,
+                    primaryjoin=sa.and_(
+                        addresses.c.user_id == users.c.id,
+                        users.c.name != None,  # noqa
+                    ),
+                )
+            },
+        )
+        mapper(User, users)
+        s = Session()
+        u1 = s.query(User).first()  # noqa
+        a1 = (
+            s.query(Address)
+            .filter_by(id=1)
+            .options(sa.orm.raiseload("user", sql_only=True))
+            .first()
+        )
+
+        def go():
+            assert_raises_message(
+                sa.exc.InvalidRequestError,
+                "'Address.user' is not available due to lazy='raise_on_sql'",
+                lambda: a1.user,
+            )
+
+    def test_raiseload_wildcard_all_classes_option(self):
+        Address, addresses, users, User = (
+            self.classes.Address,
+            self.tables.addresses,
+            self.tables.users,
+            self.classes.User,
+        )
+
+        mapper(Address, addresses)
+        mapper(
+            User,
+            users,
+            properties=dict(addresses=relationship(Address, backref="user")),
+        )
+        q = (
+            create_session()
+            .query(User, Address)
+            .join(Address, User.id == Address.user_id)
+        )
+
+        u1, a1 = q.options(sa.orm.raiseload("*")).filter(User.id == 7).first()
+
+        assert_raises_message(
+            sa.exc.InvalidRequestError,
+            "'User.addresses' is not available due to lazy='raise'",
+            lambda: u1.addresses,
+        )
+
+        assert_raises_message(
+            sa.exc.InvalidRequestError,
+            "'Address.user' is not available due to lazy='raise'",
+            lambda: a1.user,
+        )
+
+        # columns still work
+        eq_(u1.id, 7)
+        eq_(a1.id, 1)
+
+    def test_raiseload_wildcard_specific_class_option(self):
+        Address, addresses, users, User = (
+            self.classes.Address,
+            self.tables.addresses,
+            self.tables.users,
+            self.classes.User,
+        )
+
+        mapper(Address, addresses)
+        mapper(
+            User,
+            users,
+            properties=dict(addresses=relationship(Address, backref="user")),
+        )
+        q = (
+            create_session()
+            .query(User, Address)
+            .join(Address, User.id == Address.user_id)
+        )
+
+        u1, a1 = (
+            q.options(sa.orm.Load(Address).raiseload("*"))
+            .filter(User.id == 7)
+            .first()
+        )
+
+        # User doesn't raise
+        def go():
+            eq_(u1.addresses, [a1])
+
+        self.assert_sql_count(testing.db, go, 1)
+
+        # Address does
+        assert_raises_message(
+            sa.exc.InvalidRequestError,
+            "'Address.user' is not available due to lazy='raise'",
+            lambda: a1.user,
+        )
+
+        # columns still work
+        eq_(u1.id, 7)
+        eq_(a1.id, 1)
+
+
 class RelationDeprecationTest(fixtures.MappedTest):
 
     """test usage of the old 'relation' function."""