]> git.ipfire.org Git - thirdparty/sqlalchemy/sqlalchemy.git/commitdiff
Add selectin loading
authorMike Bayer <mike_mp@zzzcomputing.com>
Thu, 23 Mar 2017 21:55:43 +0000 (17:55 -0400)
committerMike Bayer <mike_mp@zzzcomputing.com>
Wed, 26 Apr 2017 20:08:57 +0000 (16:08 -0400)
Adding a new kind of relationship loader that is
a cross between the "immediateload" and the "subquery"
eager loader, using an IN criteria to load related items
in bulk immediately after the lead query result is loaded.

Change-Id: If13713fba9b465865aef8fd50b5b6b977fe3ef7d
Fixes: #3944
16 files changed:
doc/build/changelog/changelog_12.rst
doc/build/changelog/migration_12.rst
doc/build/faq/ormconfiguration.rst
doc/build/orm/loading_relationships.rst
lib/sqlalchemy/orm/__init__.py
lib/sqlalchemy/orm/loading.py
lib/sqlalchemy/orm/query.py
lib/sqlalchemy/orm/relationships.py
lib/sqlalchemy/orm/strategies.py
lib/sqlalchemy/orm/strategy_options.py
regen_callcounts.tox.ini
test/aaa_profiling/test_orm.py
test/orm/test_selectin_relations.py [new file with mode: 0644]
test/orm/test_subquery_relations.py
test/profiles.txt
test/requirements.py

index 1f49281eddd12df82fc8411fed3cfbd91c969351..b87682b6dca0f14d6b3552622cf24ac3f213e710 100644 (file)
         inner element is negated correctly, when the :func:`.not_` modifier
         is applied to the labeled expression.
 
+    .. change:: 3944
+        :tags: feature, orm
+        :tickets: 3944
+
+        Added a new kind of eager loading called "selectin" loading.  This
+        style of loading is very similar to "subquery" eager loading,
+        except that it uses an IN expression given a list of primary key
+        values from the loaded parent objects, rather than re-stating the
+        original query.   This produces a more efficient query that is
+        "baked" (e.g. the SQL string is cached) and also works in the
+        context of :meth:`.Query.yield_per`.
+
+        .. seealso::
+
+            :ref:`change_3944`
+
     .. change::
         :tags: bug, orm
         :tickets: 3967
index c673eaa2e91c6bccff34f5cfa3a81dba3baf4196..72655321ce21c7c711e9a9a2c327fe7914c8769d 100644 (file)
@@ -70,6 +70,97 @@ very unusual cases, such as a relationship that uses a custom
 
 :ticket:`3954`
 
+.. _change_3944:
+
+New "selectin" eager loading, loads all collections at once using IN
+--------------------------------------------------------------------
+
+A new eager loader called "selectin" loading is added, which in many ways
+is similar to "subquery" loading, however produces a simpler SQL statement
+that is cacheable as well as more efficient.
+
+Given a query as below::
+
+    q = session.query(User).\
+        filter(User.name.like('%ed%')).\
+        options(subqueryload(User.addresses))
+
+The SQL produced would be the query against ``User`` followed by the
+subqueryload for ``User.addresses`` (note the parameters are also listed)::
+
+    SELECT users.id AS users_id, users.name AS users_name
+    FROM users
+    WHERE users.name LIKE ?
+    ('%ed%',)
+
+    SELECT addresses.id AS addresses_id,
+           addresses.user_id AS addresses_user_id,
+           addresses.email_address AS addresses_email_address,
+           anon_1.users_id AS anon_1_users_id
+    FROM (SELECT users.id AS users_id
+    FROM users
+    WHERE users.name LIKE ?) AS anon_1
+    JOIN addresses ON anon_1.users_id = addresses.user_id
+    ORDER BY anon_1.users_id
+    ('%ed%',)
+
+With "selectin" loading, we instead get a SELECT that refers to the
+actual primary key values loaded in the parent query::
+
+    q = session.query(User).\
+        filter(User.name.like('%ed%')).\
+        options(selectinload(User.addresses))
+
+Produces::
+
+    SELECT users.id AS users_id, users.name AS users_name
+    FROM users
+    WHERE users.name LIKE ?
+    ('%ed%',)
+
+    SELECT users_1.id AS users_1_id,
+           addresses.id AS addresses_id,
+           addresses.user_id AS addresses_user_id,
+           addresses.email_address AS addresses_email_address
+    FROM users AS users_1
+    JOIN addresses ON users_1.id = addresses.user_id
+    WHERE users_1.id IN (?, ?)
+    ORDER BY users_1.id
+    (1, 3)
+
+The above SELECT statement includes these advantages:
+
+* It doesn't use a subquery, just an INNER JOIN, meaning it will perform
+  much better on a database like MySQL that doesn't like subqueries
+
+* Its structure is independent of the original query; in conjunction with the
+  new :ref:`expanding IN parameter system <change_3953>_` we can in most cases
+  use the "baked" query to cache the string SQL, reducing per-query overhead
+  significantly
+
+* Because the query only fetches for a given list of primary key identifiers,
+  "selectin" loading is potentially compatible with :meth:`.Query.yield_per` to
+  operate on chunks of a SELECT result at a time, provided that the
+  database driver allows for multiple, simultaneous cursors (SQlite, Postgresql;
+  **not** MySQL drivers or SQL Server ODBC drivers).   Neither joined eager
+  loading nor subquery eager loading are compatible with :meth:`.Query.yield_per`.
+
+The disadvanages of selectin eager loading are potentially large SQL
+queries, with large lists of IN parameters.  The list of IN parameters themselves
+are chunked in groups of 500, so a result set of more than 500 lead objects
+will have more additional "SELECT IN" queries following.  Also, support
+for composite primary keys depends on the database's ability to use
+tuples with IN, e.g.
+``(table.column_one, table_column_two) IN ((?, ?), (?, ?) (?, ?))``.
+Currently, Postgresql and MySQL are known to be compatible with this syntax,
+SQLite is not.
+
+..seealso::
+
+    :ref:`selectin_eager_loading`
+
+:ticket:`3944`
+
 .. _change_3229:
 
 Support for bulk updates of hybrids, composites
index 5baa5effe543ce7e9b66d59da97a6f1304763e91..1263906c434a1e1ecef03cbb1e5bcb943860c932 100644 (file)
@@ -328,7 +328,9 @@ The primary key is a good choice for this::
 
 Note that the :func:`.joinedload` eager loader strategy does not suffer from
 the same problem because only one query is ever issued, so the load query
-cannot be different from the main query.
+cannot be different from the main query.  Similarly, the :func:`.selectinload`
+eager loader strategy also does not have this issue as it links its collection
+loads directly to primary key values just loaded.
 
 .. seealso::
 
index aaddb87f9b31abddf1af0cf67eae934369b57d6b..f2279a2b49757199fe789acc51f64dd1e68ade4e 100644 (file)
@@ -48,6 +48,12 @@ The primary forms of relationship loading are:
   related table to be loaded to load all members of related collections / scalar
   references at once.  Subquery eager loading is detailed at :ref:`subquery_eager_loading`.
 
+* **select IN loading** - available via ``lazy='selectin'`` or the :func:`.selectinload`
+  option, this form of loading emits a second (or more) SELECT statement which
+  assembles the primary key identifiers of the parent objects into an IN clause,
+  so that all members of related collections / scalar references are loaded at once
+  by primary key.  Select IN loading is detailed at :ref:`selectin_eager_loading`.
+
 * **raise loading** - available via ``lazy='raise'``, ``lazy='raise_sql'``,
   or the :func:`.raiseload` option, this form of loading is triggered at the
   same time a lazy load would normally occur, except it raises an ORM exception
@@ -69,7 +75,7 @@ at mapping time to take place in all cases where an object of the mapped
 type is loaded, in the absense of any query-level options that modify it.
 This is configured using the :paramref:`.relationship.lazy` parameter to
 :func:`.relationship`; common values for this parameter
-include ``"select"``, ``"joined"``, and ``"subquery"``.
+include ``select``, ``joined``, ``subquery`` and ``selectin``.
 
 For example, to configure a relationship to use joined eager loading when
 the parent object is queried::
@@ -99,7 +105,7 @@ is to set them up on a per-query basis against specific attributes.  Very detail
 control over relationship loading is available using loader options;
 the most common are
 :func:`~sqlalchemy.orm.joinedload`,
-:func:`~sqlalchemy.orm.subqueryload`,
+:func:`~sqlalchemy.orm.subqueryload`, :func:`~sqlalchemy.orm.selectinload`
 and :func:`~sqlalchemy.orm.lazyload`.   The option accepts either
 the string name of an attribute against a parent, or for greater specificity
 can accommodate a class-bound attribute directly::
@@ -348,7 +354,10 @@ in play.
 To "batch" queries with arbitrarily large sets of result data while maintaining
 compatibility with collection-based joined eager loading, emit multiple
 SELECT statements, each referring to a subset of rows using the WHERE
-clause, e.g. windowing.
+clause, e.g. windowing.   Alternatively, consider using "select IN" eager loading
+which is **potentially** compatible with :meth:`.Query.yield_per`, provided
+that the database driver in use supports multiple, simultaneous cursors
+(SQLite, Postgresql drivers, not MySQL drivers or SQL Server ODBC drivers).
 
 
 .. _zen_of_eager_loading:
@@ -597,6 +606,13 @@ load the full contents of all collections at once, is therefore incompatible
 with "batched" loading supplied by :meth:`.Query.yield_per`, both for collection
 and scalar relationships.
 
+The newer style of loading provided by :func:`.selectinload` solves these
+limitations of :func:`.subqueryload`.
+
+.. seealso::
+
+    :ref:`selectin_eager_loading`
+
 
 .. _subqueryload_ordering:
 
@@ -629,6 +645,124 @@ that the inner query could return the wrong rows::
 
     :ref:`faq_subqueryload_limit_sort` - detailed example
 
+.. _selectin_eager_loading:
+
+Select IN loading
+-----------------
+
+Select IN loading is similar in operation to subquery eager loading, however
+the SELECT statement which is emitted has a much simpler structure than
+that of subquery eager loading.  Additionally, select IN loading applies
+itself to subsets of the load result at a time, so unlike joined and subquery
+eager loading, is compatible with batching of results using
+:meth:`.Query.yield_per`, provided the database driver supports simultaneous
+cursors.
+
+.. versionadded:: 1.2
+
+"Select IN" eager loading is provided using the ``"selectin"`` argument
+to :paramref:`.relationship.lazy` or by using the :func:`.selectinload` loader
+option.   This style of loading emits a SELECT that refers to
+the primary key values of the parent object inside of an IN clause,
+in order to load related associations:
+
+.. sourcecode:: python+sql
+
+    >>> jack = session.query(User).\
+    ... options(selectinload('addresses')).\
+    ... filter(or_(User.name == 'jack', User.name == 'ed')).all()
+    {opensql}SELECT
+        users.id AS users_id,
+        users.name AS users_name,
+        users.fullname AS users_fullname,
+        users.password AS users_password
+    FROM users
+    WHERE users.name = ? OR users.name = ?
+    ('jack', 'ed')
+    SELECT
+        users_1.id AS users_1_id,
+        addresses.id AS addresses_id,
+        addresses.email_address AS addresses_email_address,
+        addresses.user_id AS addresses_user_id
+    FROM users AS users_1
+    JOIN addresses ON users_1.id = addresses.user_id
+    WHERE users_1.id IN (?, ?)
+    ORDER BY users_1.id, addresses.id
+    (5, 7)
+
+Above, the second SELECT refers to ``users_1.id IN (5, 7)``, where the
+"5" and "7" are the primary key values for the previous two ``User``
+objects loaded; after a batch of objects are completely loaded, their primary
+key values are injected into the ``IN`` clause for the second SELECT.
+
+"Select IN" loading is the newest form of eager loading added to SQLAlchemy
+as of the 1.2 series.   Things to know about this kind of loading include:
+
+* The SELECT statement emitted by the "selectin" loader strategy, unlike
+  that of "subquery", does not
+  require a subquery nor does it inherit any of the performance limitations
+  of the original query; the lookup is a simple primary key lookup and should
+  have high performance.
+
+* The special ordering requirements of subqueryload described at
+  :ref:`subqueryload_ordering` also don't apply to selectin loading; selectin
+  is always linking directly to a parent primary key and can't really
+  return the wrong result.
+
+* "selectin" loading, unlike joined or subquery eager loading, always emits
+  its SELECT in terms of the immediate parent objects just loaded, and
+  not the original type of object at the top of the chain.  So if eager loading
+  many levels deep, "selectin" loading still uses exactly one JOIN in the statement.
+  joined and subquery eager loading always refer to multiple JOINs up to
+  the original parent.
+
+* "selectin" loading produces a SELECT statement of a predictable structure,
+  independent of that of the original query.  As such, taking advantage of
+  a new feature with :meth:`.ColumnOperators.in_` that allows it to work
+  with cached queries, the selectin loader makes full use of the
+  :mod:`sqlalchemy.ext.baked` extension to cache generated SQL and greatly
+  cut down on internal function call overhead.
+
+* The strategy will only query for at most 500 parent primary key values at a
+  time, as the primary keys are rendered into a large IN expression in the
+  SQL statement.   Some databases like Oracle have a hard limit on how large
+  an IN expression can be, and overall the size of the SQL string shouldn't
+  be arbitrarily large.   So for large result sets, "selectin" loading
+  will emit a SELECT per 500 parent rows returned.   These SELECT statements
+  emit with minimal Python overhead due to the "baked" queries and also minimal
+  SQL overhead as they query against primary key directly.
+
+* "selectin" loading is the only eager loading that can work in conjunction with
+  the "batching" feature provided by :meth:`.Query.yield_per`, provided
+  the database driver supports simultaneous cursors.   As it only
+  queries for related items against specific result objects, "selectin" loading
+  allows for eagerly loaded collections against arbitrarily large result sets
+  with a top limit on memory use when used with :meth:`.Query.yield_per`.
+
+  Current database drivers that support simultaneous cursors include
+  SQLite, Postgresql.   The MySQL drivers mysqlclient and pymysql currently
+  **do not** support simultaneous cursors, nor do the ODBC drivers for
+  SQL Server.
+
+* As "selectin" loading relies upon IN, for a mapping with composite primary
+  keys, it must use the "tuple" form of IN, which looks like
+  ``WHERE (table.column_a, table.column_b) IN ((?, ?), (?, ?), (?, ?))``.
+  This syntax is not supported on every database; currently it is known
+  to be only supported by modern Postgresql and MySQL versions.  Therefore
+  **selectin loading is not platform-agnostic for composite primary keys**.
+  There is no special logic in SQLAlchemy to check ahead of time which platforms
+  support this syntax or not; if run against a non-supporting platform (such
+  as SQLite), the database will return an error immediately.   An advantage to SQLAlchemy
+  just running the SQL out for it to fail is that if a database like
+  SQLite does start supporting this syntax, it will work without any changes
+  to SQLAlchemy.
+
+In general, "selectin" loading is probably superior to "subquery" eager loading
+in most ways, save for the syntax requirement with composite primary keys
+and possibly that it may emit many SELECT statements for larger result sets.
+As always, developers should spend time looking at the
+statements and results generated by their applications in development to
+check that things are working efficiently.
 
 .. _what_kind_of_loading:
 
@@ -666,7 +800,27 @@ references a scalar many-to-one reference.
  * When multiple levels of depth are used with joined or subquery loading, loading collections-within-
    collections will multiply the total number of rows fetched in a cartesian fashion.  Both
    joined and subquery eager loading always join from the original parent class; if loading a collection
-   four levels deep, there will be four JOINs out to the parent.
+   four levels deep, there will be four JOINs out to the parent.  selectin loading
+   on the other hand will always have exactly one JOIN to the immediate
+   parent table.
+
+ * Using selectin loading, the load of 100 objects will also emit two SQL
+   statements, the second of which refers to the 100 primary keys of the
+   objects loaded.   selectin loading will however render at most 500 primary
+   key values into a single SELECT statement; so for a lead collection larger
+   than 500, there will be a SELECT statement emitted for each batch of
+   500 objects selected.
+
+ * Using multiple levels of depth with selectin loading does not incur the
+   "cartesian" issue that joined and subquery eager loading have; the queries
+   for selectin loading have the best performance characteristics and the
+   fewest number of rows.  The only caveat is that there might be more than
+   one SELECT emitted depending on the size of the lead result.
+
+ * selectin loading, unlike joined (when using collections) and subquery eager
+   loading (all kinds of relationships), is potentially compatible with result
+   set batching provided by :meth:`.Query.yield_per` assuming an appropriate
+   database driver, so may be able to allow batching for large result sets.
 
 * Many to One Reference
 
@@ -692,6 +846,12 @@ references a scalar many-to-one reference.
    joined loading, however, except perhaps that subquery loading can use an INNER JOIN in all cases
    whereas joined loading requires that the foreign key is NOT NULL.
 
+ * Selectin loading will also issue a second load for all the child objects (and as
+   stated before, for larger results it will emit a SELECT per 500 rows), so for a load of 100 objects
+   there would be two SQL statements emitted.  The query itself still has to
+   JOIN to the parent table, so again there's not too much advantage to
+   selectin loading for many-to-one vs. joined eager loading save for the
+   use of INNER JOIN in all cases.
 
 Polymorphic Eager Loading
 -------------------------
@@ -707,6 +867,7 @@ Wildcard Loading Strategies
 ---------------------------
 
 Each of :func:`.joinedload`, :func:`.subqueryload`, :func:`.lazyload`,
+:func:`.selectinload`,
 :func:`.noload`, and :func:`.raiseload` can be used to set the default
 style of :func:`.relationship` loading
 for a particular query, affecting all :func:`.relationship` -mapped
@@ -1011,6 +1172,10 @@ Relationship Loader API
 
 .. autofunction:: raiseload
 
+.. autofunction:: selectinload
+
+.. autofunction:: selectinload_all
+
 .. autofunction:: subqueryload
 
 .. autofunction:: subqueryload_all
index 449173548a49ab8b9400e6aae9b6bf3132b92fc5..8d46324e8e29a80be5c5d38fff073a4d37c75527 100644 (file)
@@ -240,6 +240,8 @@ lazyload = strategy_options.lazyload._unbound_fn
 lazyload_all = strategy_options.lazyload_all._unbound_all_fn
 subqueryload = strategy_options.subqueryload._unbound_fn
 subqueryload_all = strategy_options.subqueryload_all._unbound_all_fn
+selectinload = strategy_options.selectinload._unbound_fn
+selectinload_all = strategy_options.selectinload_all._unbound_all_fn
 immediateload = strategy_options.immediateload._unbound_fn
 noload = strategy_options.noload._unbound_fn
 raiseload = strategy_options.raiseload._unbound_fn
index f749cdd1db03ed2dbaa3cc35c2a9e74be273cfb1..3733d50e14dc0608488cf781aadf0dd2c75e3bc1 100644 (file)
@@ -18,6 +18,7 @@ from .. import util
 from . import attributes, exc as orm_exc
 from ..sql import util as sql_util
 from . import strategy_options
+from . import path_registry
 
 from .util import _none_set, state_str
 from .base import _SET_DEFERRED_EXPIRED, _DEFER_FOR_STATE
@@ -31,6 +32,7 @@ def instances(query, cursor, context):
     """Return an ORM result as an iterator."""
 
     context.runid = _new_runid()
+    context.post_load_paths = {}
 
     filtered = query._has_mapper_entities
 
@@ -77,6 +79,10 @@ def instances(query, cursor, context):
                 rows = [keyed_tuple([proc(row) for proc in process])
                         for row in fetch]
 
+            for path, post_load in \
+                    context.post_load_paths.items():
+                post_load.invoke(context, path)
+
             if filtered:
                 rows = util.unique_list(rows, filter_fn)
 
@@ -346,6 +352,7 @@ def _instance_processor(
     session_id = context.session.hash_key
     version_check = context.version_check
     runid = context.runid
+    post_load = PostLoad.for_context(context, load_path, only_load_props)
 
     if refresh_state:
         refresh_identity_key = refresh_state.key
@@ -452,6 +459,9 @@ def _instance_processor(
                     else:
                         state._commit_all(dict_, session_identity_map)
 
+            if post_load:
+                post_load.add_state(state, True)
+
         else:
             # partial population routines, for objects that were already
             # in the Session, but a row matches them; apply eager loaders
@@ -475,6 +485,9 @@ def _instance_processor(
 
                     state._commit(dict_, to_load)
 
+            if post_load and context.invoke_all_eagers:
+                post_load.add_state(state, False)
+
         return instance
 
     if mapper.polymorphic_map and not _polymorphic_from and not refresh_state:
@@ -632,6 +645,52 @@ def _decorate_polymorphic_switch(
     return polymorphic_instance
 
 
+class PostLoad(object):
+    """Track loaders and states for "post load" operations.
+
+    """
+    __slots__ = 'loaders', 'states', 'load_keys'
+
+    def __init__(self):
+        self.loaders = {}
+        self.states = util.OrderedDict()
+        self.load_keys = None
+
+    def add_state(self, state, overwrite):
+        self.states[state] = overwrite
+
+    def invoke(self, context, path):
+        if not self.states:
+            return
+        path = path_registry.PathRegistry.coerce(path)
+        for key, loader, arg, kw in self.loaders.values():
+            loader(
+                context, path, self.states.items(),
+                self.load_keys, *arg, **kw)
+        self.states.clear()
+
+    @classmethod
+    def for_context(cls, context, path, only_load_props):
+        pl = context.post_load_paths.get(path.path)
+        if pl is not None and only_load_props:
+            pl.load_keys = only_load_props
+        return pl
+
+    @classmethod
+    def path_exists(self, context, path, key):
+        return path.path in context.post_load_paths and \
+            key in context.post_load_paths[path.path].loaders
+
+    @classmethod
+    def callable_for_path(
+            cls, context, path, attr_key, loader_callable, *arg, **kw):
+        if path.path in context.post_load_paths:
+            pl = context.post_load_paths[path.path]
+        else:
+            pl = context.post_load_paths[path.path] = PostLoad()
+        pl.loaders[attr_key] = (attr_key, loader_callable, arg, kw)
+
+
 def load_scalar_attributes(mapper, state, attribute_names):
     """initiate a column-based attribute refresh operation."""
 
index 272ef77fb80b4b5ba5135160de14e5bfc1818e7a..a2f83818c86553e4f840fe761439380d672a7abb 100644 (file)
@@ -753,9 +753,14 @@ class Query(object):
         (e.g. approximately 1000) is used, even with DBAPIs that buffer
         rows (which are most).
 
-        The :meth:`.Query.yield_per` method **is not compatible with most
-        eager loading schemes, including subqueryload and joinedload with
-        collections**.  For this reason, it may be helpful to disable
+        The :meth:`.Query.yield_per` method **is not compatible
+        subqueryload eager loading or joinedload eager loading when
+        using collections**.  It is potentially compatible with "select in"
+        eager loading, **provided the databse driver supports multiple,
+        independent cursors** (pysqlite and psycopg2 are known to work,
+        MySQL and SQL Server ODBC drivers do not).
+
+        Therefore in some cases, it may be helpful to disable
         eager loads, either unconditionally with
         :meth:`.Query.enable_eagerloads`::
 
@@ -4103,7 +4108,8 @@ class QueryContext(object):
         'primary_columns', 'secondary_columns', 'eager_order_by',
         'eager_joins', 'create_eager_joins', 'propagate_options',
         'attributes', 'statement', 'from_clause', 'whereclause',
-        'order_by', 'labels', '_for_update_arg', 'runid', 'partials'
+        'order_by', 'labels', '_for_update_arg', 'runid', 'partials',
+        'post_load_paths'
     )
 
     def __init__(self, query):
index 1005e7eeb40d254c86398c5e402466b7ac10de23..43f53aec522afd51a231fb14d6035b5fb5517f01 100644 (file)
@@ -527,6 +527,13 @@ class RelationshipProperty(StrategizedProperty):
             a subquery of the original statement, for each collection
             requested.
 
+          * ``selectin`` - items should be loaded "eagerly" as the parents
+            are loaded, using one or more additional SQL statements, which
+            issues a JOIN to the immediate parent object, specifying primary
+            key identifiers using an IN clause.
+
+            .. versionadded:: 1.2
+
           * ``noload`` - no loading should occur at any time.  This is to
             support "write-only" attributes, or attributes which are
             populated in some manner specific to the application.
index 10131c80d3e8fca5adc352766627bbd37fb85a1a..ddf5397fd3e213c7823813bf6e5cdd85d21b259a 100644 (file)
@@ -14,10 +14,10 @@ from ..sql import util as sql_util, visitors
 from .. import sql
 from . import (
     attributes, interfaces, exc as orm_exc, loading,
-    unitofwork, util as orm_util
+    unitofwork, util as orm_util, query
 )
 from .state import InstanceState
-from .util import _none_set
+from .util import _none_set, aliased
 from . import properties
 from .interfaces import (
     LoaderStrategy, StrategizedProperty
@@ -1744,6 +1744,178 @@ class JoinedLoader(AbstractRelationshipLoader):
             populators["eager"].append((self.key, load_scalar_from_joined_exec))
 
 
+@log.class_logger
+@properties.RelationshipProperty.strategy_for(lazy="selectin")
+class SelectInLoader(AbstractRelationshipLoader):
+    __slots__ = (
+        'join_depth', '_parent_alias', '_in_expr', '_parent_pk_cols',
+        '_zero_idx'
+    )
+
+    _chunksize = 500
+
+    def __init__(self, parent, strategy_key):
+        super(SelectInLoader, self).__init__(parent, strategy_key)
+        self.join_depth = self.parent_property.join_depth
+        self._parent_alias = aliased(self.parent.class_)
+        pa_insp = inspect(self._parent_alias)
+        self._parent_pk_cols = pk_cols = [
+            pa_insp._adapt_element(col) for col in self.parent.primary_key]
+        if len(pk_cols) > 1:
+            self._in_expr = sql.tuple_(*pk_cols)
+            self._zero_idx = False
+        else:
+            self._in_expr = pk_cols[0]
+            self._zero_idx = True
+
+    def init_class_attribute(self, mapper):
+        self.parent_property.\
+            _get_strategy((("lazy", "select"),)).\
+            init_class_attribute(mapper)
+
+    def create_row_processor(
+            self, context, path, loadopt, mapper,
+            result, adapter, populators):
+        if not self.parent.class_manager[self.key].impl.supports_population:
+            raise sa_exc.InvalidRequestError(
+                "'%s' does not support object "
+                "population - eager loading cannot be applied." %
+                self
+            )
+
+        selectin_path = (
+            context.query._current_path or orm_util.PathRegistry.root) + path
+
+        if loading.PostLoad.path_exists(context, selectin_path, self.key):
+            return
+
+        path_w_prop = path[self.parent_property]
+        selectin_path_w_prop = selectin_path[self.parent_property]
+
+        # build up a path indicating the path from the leftmost
+        # entity to the thing we're subquery loading.
+        with_poly_info = path_w_prop.get(
+            context.attributes,
+            "path_with_polymorphic", None)
+
+        if with_poly_info is not None:
+            effective_entity = with_poly_info.entity
+        else:
+            effective_entity = self.mapper
+
+        if not path_w_prop.contains(context.attributes, "loader"):
+            if self.join_depth:
+                if selectin_path_w_prop.length / 2 > self.join_depth:
+                    return
+            elif selectin_path_w_prop.contains_mapper(self.mapper):
+                return
+
+        loading.PostLoad.callable_for_path(
+            context, selectin_path, self.key,
+            self._load_for_path, effective_entity)
+
+    @util.dependencies("sqlalchemy.ext.baked")
+    def _load_for_path(
+            self, baked, context, path, states, load_only, effective_entity):
+
+        if load_only and self.key not in load_only:
+            return
+
+        our_states = [
+            (state.key[1], state, overwrite)
+            for state, overwrite in states
+        ]
+
+        pk_cols = self._parent_pk_cols
+        pa = self._parent_alias
+
+        q = baked.BakedQuery(
+            # TODO: use strategy-local cache
+            self.mapper._compiled_cache,
+            lambda session: session.query(
+                query.Bundle("pk", *pk_cols), effective_entity
+            )
+        )
+
+        q.add_criteria(
+            lambda q: q.select_from(pa).join(
+                getattr(pa,
+                        self.parent_property.key).of_type(effective_entity)).
+            filter(
+                self._in_expr.in_(
+                    sql.bindparam('primary_keys', expanding=True))
+            ).order_by(*pk_cols)
+        )
+
+        orig_query = context.query
+
+        q._add_lazyload_options(
+            orig_query._with_options, path[self.parent_property]
+        )
+
+        if orig_query._populate_existing:
+            q.add_criteria(
+                lambda q: q.populate_existing()
+            )
+
+        if self.parent_property.order_by:
+            def _setup_outermost_orderby(q):
+                # imitate the same method that
+                # subquery eager loading does it, looking for the
+                # adapted "secondary" table
+                eagerjoin = q._from_obj[0]
+                eager_order_by = \
+                    eagerjoin._target_adapter.\
+                    copy_and_process(
+                        util.to_list(
+                            self.parent_property.order_by
+                        )
+                    )
+                return q.order_by(*eager_order_by)
+
+            q.add_criteria(
+                _setup_outermost_orderby
+            )
+
+        uselist = self.uselist
+        _empty_result = () if uselist else None
+
+        while our_states:
+            chunk = our_states[0:self._chunksize]
+            our_states = our_states[self._chunksize:]
+
+            data = {
+                k: [vv[1] for vv in v]
+                for k, v in itertools.groupby(
+                    q(context.session).params(
+                        primary_keys=[
+                            key[0] if self._zero_idx else key
+                            for key, state, overwrite in chunk]
+                    ),
+                    lambda x: x[0]
+                )
+            }
+
+            for key, state, overwrite in chunk:
+                if not overwrite and self.key in state.dict:
+                    continue
+
+                collection = data.get(key, _empty_result)
+
+                if not uselist and collection:
+                    if len(collection) > 1:
+                        util.warn(
+                            "Multiple rows returned with "
+                            "uselist=False for eagerly-loaded "
+                            "attribute '%s' "
+                            % self)
+                    state.get_impl(self.key).set_committed_value(
+                        state, state.dict, collection[0])
+                else:
+                    state.get_impl(self.key).set_committed_value(
+                        state, state.dict, collection)
+
+
 def single_parent_validator(desc, prop):
     def _do_check(state, value, oldvalue, initiator):
         if value is not None and initiator.key == prop.key:
index 423d3f558ad4e5c2ef9338443374d9bb49cf5422..df13f05db1673fa5f57b3b61bfb2be41e98a7560 100644 (file)
@@ -900,11 +900,13 @@ def subqueryload(loadopt, attr):
         query(User).options(subqueryload(User.orders))
 
         # subquery-load Order.items and then Item.keywords
-        query(Order).options(subqueryload(Order.items).subqueryload(Item.keywords))
+        query(Order).options(
+            subqueryload(Order.items).subqueryload(Item.keywords))
 
         # lazily load Order.items, but when Items are loaded,
         # subquery-load the keywords collection
-        query(Order).options(lazyload(Order.items).subqueryload(Item.keywords))
+        query(Order).options(
+            lazyload(Order.items).subqueryload(Item.keywords))
 
 
     .. seealso::
@@ -927,6 +929,50 @@ def subqueryload_all(*keys):
     return _UnboundLoad._from_keys(_UnboundLoad.subqueryload, keys, True, {})
 
 
+@loader_option()
+def selectinload(loadopt, attr):
+    """Indicate that the given attribute should be loaded using
+    SELECT IN eager loading.
+
+    This function is part of the :class:`.Load` interface and supports
+    both method-chained and standalone operation.
+
+    examples::
+
+        # selectin-load the "orders" collection on "User"
+        query(User).options(selectinload(User.orders))
+
+        # selectin-load Order.items and then Item.keywords
+        query(Order).options(
+            selectinload(Order.items).selectinload(Item.keywords))
+
+        # lazily load Order.items, but when Items are loaded,
+        # selectin-load the keywords collection
+        query(Order).options(
+            lazyload(Order.items).selectinload(Item.keywords))
+
+    .. versionadded:: 1.2
+
+    .. seealso::
+
+        :ref:`loading_toplevel`
+
+        :ref:`selectin_eager_loading`
+
+    """
+    return loadopt.set_relationship_strategy(attr, {"lazy": "selectin"})
+
+
+@selectinload._add_unbound_fn
+def selectinload(*keys):
+    return _UnboundLoad._from_keys(_UnboundLoad.selectinload, keys, False, {})
+
+
+@selectinload._add_unbound_all_fn
+def selectinload_all(*keys):
+    return _UnboundLoad._from_keys(_UnboundLoad.selectinload, keys, True, {})
+
+
 @loader_option()
 def lazyload(loadopt, attr):
     """Indicate that the given attribute should be loaded using "lazy"
@@ -960,6 +1006,10 @@ def immediateload(loadopt, attr):
     """Indicate that the given attribute should be loaded using
     an immediate load with a per-attribute SELECT statement.
 
+    The :func:`.immediateload` option is superseded in general
+    by the :func:`.selectinload` option, which performs the same task
+    more efficiently by emitting a SELECT for all loaded objects.
+
     This function is part of the :class:`.Load` interface and supports
     both method-chained and standalone operation.
 
@@ -967,6 +1017,8 @@ def immediateload(loadopt, attr):
 
         :ref:`loading_toplevel`
 
+        :ref:`selectin_eager_loading`
+
     """
     loader = loadopt.set_relationship_strategy(attr, {"lazy": "immediate"})
     return loader
index 0106de97bcd12e070207a63a2531e53468613465..62403a70c7c9c440a446ef14cd402cf3a45c5acd 100644 (file)
@@ -3,7 +3,7 @@ envlist = py{27,34,35}-sqla_{cext,nocext}-db_{sqlite,postgresql,mysql}
 
 [base]
 basecommand=
-       python -m pytest test/aaa_profiling -k "not memusage" --force-write-profiles
+       python -m pytest test/aaa_profiling -k "not memusage" --force-write-profiles {posargs}
 
 [testenv]
 deps=pytest
index 61fd532e570f8a0185e11bf4859f42c39d855e3d..f97773a4523c62e30c626a57cec512aff0047d20 100644 (file)
@@ -1,6 +1,6 @@
 from sqlalchemy import Integer, String, ForeignKey
 from sqlalchemy.orm import mapper, relationship, \
-    sessionmaker, Session, defer, joinedload, defaultload
+    sessionmaker, Session, defer, joinedload, defaultload, selectinload
 from sqlalchemy import testing
 from sqlalchemy.testing import profiling
 from sqlalchemy.testing import fixtures
@@ -516,6 +516,96 @@ class QueryTest(fixtures.MappedTest):
         go()
 
 
+class SelectInEagerLoadTest(fixtures.MappedTest):
+    """basic test for selectin() loading, which uses a baked query.
+
+    if the baked query starts spoiling due to some bug in cache keys,
+    this callcount blows up.
+
+    """
+
+    @classmethod
+    def define_tables(cls, metadata):
+
+        Table(
+            'a',
+            metadata,
+            Column('id', Integer,
+                   primary_key=True, test_needs_autoincrement=True),
+            Column('x', Integer),
+            Column('y', Integer)
+        )
+        Table(
+            'b',
+            metadata,
+            Column('id', Integer,
+                   primary_key=True, test_needs_autoincrement=True),
+            Column('a_id', ForeignKey('a.id')),
+            Column('x', Integer),
+            Column('y', Integer)
+        )
+        Table(
+            'c',
+            metadata,
+            Column('id', Integer,
+                   primary_key=True, test_needs_autoincrement=True),
+            Column('b_id', ForeignKey('b.id')),
+            Column('x', Integer),
+            Column('y', Integer)
+        )
+
+    @classmethod
+    def setup_classes(cls):
+        class A(cls.Basic):
+            pass
+
+        class B(cls.Basic):
+            pass
+
+        class C(cls.Basic):
+            pass
+
+    @classmethod
+    def setup_mappers(cls):
+        A, B, C = cls.classes('A', 'B', 'C')
+        a, b, c = cls.tables('a', 'b', 'c')
+
+        mapper(A, a, properties={
+            'bs': relationship(B),
+        })
+        mapper(B, b, properties={
+            'cs': relationship(C)
+        })
+        mapper(C, c)
+
+    @classmethod
+    def insert_data(cls):
+        A, B, C = cls.classes('A', 'B', 'C')
+        s = Session()
+        s.add(
+            A(
+                bs=[B(cs=[C()]), B(cs=[C()])]
+            )
+        )
+        s.commit()
+
+    def test_round_trip_results(self):
+        A, B, C = self.classes('A', 'B', 'C')
+
+        sess = Session()
+
+        q = sess.query(A).options(
+            selectinload(A.bs).selectinload(B.cs)
+        )
+
+        @profiling.function_call_count()
+        def go():
+            for i in range(100):
+                obj = q.all()
+                list(obj)
+                sess.close()
+        go()
+
 class JoinedEagerLoadTest(fixtures.MappedTest):
     @classmethod
     def define_tables(cls, metadata):
diff --git a/test/orm/test_selectin_relations.py b/test/orm/test_selectin_relations.py
new file mode 100644 (file)
index 0000000..6f10260
--- /dev/null
@@ -0,0 +1,1985 @@
+from sqlalchemy.testing import eq_, is_, is_not_, is_true
+from sqlalchemy import testing
+from sqlalchemy.testing.schema import Table, Column
+from sqlalchemy import Integer, String, ForeignKey, bindparam
+from sqlalchemy.orm import selectinload, selectinload_all, \
+    mapper, relationship, clear_mappers, create_session, \
+    aliased, joinedload, deferred, undefer,\
+    Session, subqueryload
+from sqlalchemy.testing import assert_raises, \
+    assert_raises_message
+from sqlalchemy.testing.assertsql import CompiledSQL
+from sqlalchemy.testing import fixtures
+from sqlalchemy.testing import mock
+from test.orm import _fixtures
+import sqlalchemy as sa
+
+from sqlalchemy.orm import with_polymorphic
+
+from .inheritance._poly_fixtures import _Polymorphic, Person, Engineer, \
+    Paperwork, Machine, MachineType, Company
+
+
+class EagerTest(_fixtures.FixtureTest, testing.AssertsCompiledSQL):
+    run_inserts = 'once'
+    run_deletes = None
+
+    def test_basic(self):
+        users, Address, addresses, User = (self.tables.users,
+                                           self.classes.Address,
+                                           self.tables.addresses,
+                                           self.classes.User)
+
+        mapper(User, users, properties={
+            'addresses': relationship(
+                mapper(Address, addresses),
+                order_by=Address.id)
+        })
+        sess = create_session()
+
+        q = sess.query(User).options(selectinload(User.addresses))
+
+        def go():
+            eq_(
+                [User(id=7, addresses=[
+                    Address(id=1, email_address='jack@bean.com')])],
+                q.filter(User.id == 7).all()
+            )
+
+        self.assert_sql_count(testing.db, go, 2)
+
+        def go():
+            eq_(
+                self.static.user_address_result,
+                q.order_by(User.id).all()
+            )
+        self.assert_sql_count(testing.db, go, 2)
+
+    def test_from_aliased(self):
+        users, Dingaling, User, dingalings, Address, addresses = (
+            self.tables.users,
+            self.classes.Dingaling,
+            self.classes.User,
+            self.tables.dingalings,
+            self.classes.Address,
+            self.tables.addresses)
+
+        mapper(Dingaling, dingalings)
+        mapper(Address, addresses, properties={
+            'dingalings': relationship(Dingaling, order_by=Dingaling.id)
+        })
+        mapper(User, users, properties={
+            'addresses': relationship(
+                Address,
+                order_by=Address.id)
+        })
+        sess = create_session()
+
+        u = aliased(User)
+
+        q = sess.query(u).options(selectinload(u.addresses))
+
+        def go():
+            eq_(
+                [User(id=7, addresses=[
+                    Address(id=1, email_address='jack@bean.com')])],
+                q.filter(u.id == 7).all()
+            )
+
+        self.assert_sql_count(testing.db, go, 2)
+
+        def go():
+            eq_(
+                self.static.user_address_result,
+                q.order_by(u.id).all()
+            )
+        self.assert_sql_count(testing.db, go, 2)
+
+        q = sess.query(u).\
+            options(selectinload_all(u.addresses, Address.dingalings))
+
+        def go():
+            eq_(
+                [
+                    User(id=8, addresses=[
+                        Address(id=2, email_address='ed@wood.com',
+                                dingalings=[Dingaling()]),
+                        Address(id=3, email_address='ed@bettyboop.com'),
+                        Address(id=4, email_address='ed@lala.com'),
+                    ]),
+                    User(id=9, addresses=[
+                        Address(id=5, dingalings=[Dingaling()])
+                    ]),
+                ],
+                q.filter(u.id.in_([8, 9])).all()
+            )
+        self.assert_sql_count(testing.db, go, 3)
+
+    def test_from_get(self):
+        users, Address, addresses, User = (self.tables.users,
+                                           self.classes.Address,
+                                           self.tables.addresses,
+                                           self.classes.User)
+
+        mapper(User, users, properties={
+            'addresses': relationship(
+                mapper(Address, addresses),
+                order_by=Address.id)
+        })
+        sess = create_session()
+
+        q = sess.query(User).options(selectinload(User.addresses))
+
+        def go():
+            eq_(
+                User(id=7, addresses=[
+                    Address(id=1, email_address='jack@bean.com')]),
+                q.get(7)
+            )
+
+        self.assert_sql_count(testing.db, go, 2)
+
+    def test_from_params(self):
+        users, Address, addresses, User = (self.tables.users,
+                                           self.classes.Address,
+                                           self.tables.addresses,
+                                           self.classes.User)
+
+        mapper(User, users, properties={
+            'addresses': relationship(
+                mapper(Address, addresses),
+                order_by=Address.id)
+        })
+        sess = create_session()
+
+        q = sess.query(User).options(selectinload(User.addresses))
+
+        def go():
+            eq_(
+                User(id=7, addresses=[
+                    Address(id=1, email_address='jack@bean.com')]),
+                q.filter(User.id == bindparam('foo')).params(foo=7).one()
+            )
+
+        self.assert_sql_count(testing.db, go, 2)
+
+    def test_disable_dynamic(self):
+        """test no selectin option on a dynamic."""
+
+        users, Address, addresses, User = (self.tables.users,
+                                           self.classes.Address,
+                                           self.tables.addresses,
+                                           self.classes.User)
+
+        mapper(User, users, properties={
+            'addresses': relationship(Address, lazy="dynamic")
+        })
+        mapper(Address, addresses)
+        sess = create_session()
+
+        # previously this would not raise, but would emit
+        # the query needlessly and put the result nowhere.
+        assert_raises_message(
+            sa.exc.InvalidRequestError,
+            "User.addresses' does not support object population - eager "
+            "loading cannot be applied.",
+            sess.query(User).options(selectinload(User.addresses)).first,
+        )
+
+    def test_many_to_many_plain(self):
+        keywords, items, item_keywords, Keyword, Item = (
+            self.tables.keywords,
+            self.tables.items,
+            self.tables.item_keywords,
+            self.classes.Keyword,
+            self.classes.Item)
+
+        mapper(Keyword, keywords)
+        mapper(Item, items, properties=dict(
+            keywords=relationship(Keyword, secondary=item_keywords,
+                                  lazy='selectin', order_by=keywords.c.id)))
+
+        q = create_session().query(Item).order_by(Item.id)
+
+        def go():
+            eq_(self.static.item_keyword_result, q.all())
+        self.assert_sql_count(testing.db, go, 2)
+
+    def test_many_to_many_with_join(self):
+        keywords, items, item_keywords, Keyword, Item = (
+            self.tables.keywords,
+            self.tables.items,
+            self.tables.item_keywords,
+            self.classes.Keyword,
+            self.classes.Item)
+
+        mapper(Keyword, keywords)
+        mapper(Item, items, properties=dict(
+            keywords=relationship(Keyword, secondary=item_keywords,
+                                  lazy='selectin', order_by=keywords.c.id)))
+
+        q = create_session().query(Item).order_by(Item.id)
+
+        def go():
+            eq_(self.static.item_keyword_result[0:2],
+                q.join('keywords').filter(Keyword.name == 'red').all())
+        self.assert_sql_count(testing.db, go, 2)
+
+    def test_many_to_many_with_join_alias(self):
+        keywords, items, item_keywords, Keyword, Item = (
+            self.tables.keywords,
+            self.tables.items,
+            self.tables.item_keywords,
+            self.classes.Keyword,
+            self.classes.Item)
+
+        mapper(Keyword, keywords)
+        mapper(Item, items, properties=dict(
+            keywords=relationship(Keyword, secondary=item_keywords,
+                                  lazy='selectin', order_by=keywords.c.id)))
+
+        q = create_session().query(Item).order_by(Item.id)
+
+        def go():
+            eq_(self.static.item_keyword_result[0:2],
+                (q.join('keywords', aliased=True).
+                 filter(Keyword.name == 'red')).all())
+        self.assert_sql_count(testing.db, go, 2)
+
+    def test_orderby(self):
+        users, Address, addresses, User = (self.tables.users,
+                                           self.classes.Address,
+                                           self.tables.addresses,
+                                           self.classes.User)
+
+        mapper(User, users, properties={
+            'addresses': relationship(mapper(Address, addresses),
+                                      lazy='selectin',
+                                      order_by=addresses.c.email_address),
+        })
+        q = create_session().query(User)
+        eq_([
+            User(id=7, addresses=[
+                Address(id=1)
+            ]),
+            User(id=8, addresses=[
+                Address(id=3, email_address='ed@bettyboop.com'),
+                Address(id=4, email_address='ed@lala.com'),
+                Address(id=2, email_address='ed@wood.com')
+            ]),
+            User(id=9, addresses=[
+                Address(id=5)
+            ]),
+            User(id=10, addresses=[])
+        ], q.order_by(User.id).all())
+
+    def test_orderby_multi(self):
+        users, Address, addresses, User = (self.tables.users,
+                                           self.classes.Address,
+                                           self.tables.addresses,
+                                           self.classes.User)
+
+        mapper(User, users, properties={
+            'addresses': relationship(mapper(Address, addresses),
+                                      lazy='selectin',
+                                      order_by=[
+                addresses.c.email_address,
+                addresses.c.id]),
+        })
+        q = create_session().query(User)
+        eq_([
+            User(id=7, addresses=[
+                Address(id=1)
+            ]),
+            User(id=8, addresses=[
+                Address(id=3, email_address='ed@bettyboop.com'),
+                Address(id=4, email_address='ed@lala.com'),
+                Address(id=2, email_address='ed@wood.com')
+            ]),
+            User(id=9, addresses=[
+                Address(id=5)
+            ]),
+            User(id=10, addresses=[])
+        ], q.order_by(User.id).all())
+
+    def test_orderby_related(self):
+        """A regular mapper select on a single table can
+            order by a relationship to a second table"""
+
+        Address, addresses, users, User = (self.classes.Address,
+                                           self.tables.addresses,
+                                           self.tables.users,
+                                           self.classes.User)
+
+        mapper(Address, addresses)
+        mapper(User, users, properties=dict(
+            addresses=relationship(Address,
+                                   lazy='selectin',
+                                   order_by=addresses.c.id),
+        ))
+
+        q = create_session().query(User)
+        result = q.filter(User.id == Address.user_id).\
+            order_by(Address.email_address).all()
+
+        eq_([
+            User(id=8, addresses=[
+                Address(id=2, email_address='ed@wood.com'),
+                Address(id=3, email_address='ed@bettyboop.com'),
+                Address(id=4, email_address='ed@lala.com'),
+            ]),
+            User(id=9, addresses=[
+                Address(id=5)
+            ]),
+            User(id=7, addresses=[
+                Address(id=1)
+            ]),
+        ], result)
+
+    def test_orderby_desc(self):
+        Address, addresses, users, User = (self.classes.Address,
+                                           self.tables.addresses,
+                                           self.tables.users,
+                                           self.classes.User)
+
+        mapper(Address, addresses)
+        mapper(User, users, properties=dict(
+            addresses=relationship(Address, lazy='selectin',
+                                   order_by=[
+                                       sa.desc(addresses.c.email_address)
+                                   ]),
+        ))
+        sess = create_session()
+        eq_([
+            User(id=7, addresses=[
+                Address(id=1)
+            ]),
+            User(id=8, addresses=[
+                Address(id=2, email_address='ed@wood.com'),
+                Address(id=4, email_address='ed@lala.com'),
+                Address(id=3, email_address='ed@bettyboop.com'),
+            ]),
+            User(id=9, addresses=[
+                Address(id=5)
+            ]),
+            User(id=10, addresses=[])
+        ], sess.query(User).order_by(User.id).all())
+
+    _pathing_runs = [
+        ("lazyload", "lazyload", "lazyload", 15),
+        ("selectinload", "lazyload", "lazyload", 12),
+        ("selectinload", "selectinload", "lazyload", 8),
+        ("joinedload", "selectinload", "lazyload", 7),
+        ("lazyload", "lazyload", "selectinload", 12),
+        ("selectinload", "selectinload", "selectinload", 4),
+        ("selectinload", "selectinload", "joinedload", 3),
+    ]
+
+    def test_options_pathing(self):
+        self._do_options_test(self._pathing_runs)
+
+    def test_mapper_pathing(self):
+        self._do_mapper_test(self._pathing_runs)
+
+    def _do_options_test(self, configs):
+        users, Keyword, orders, items, order_items, Order, Item, User, \
+            keywords, item_keywords = (self.tables.users,
+                                       self.classes.Keyword,
+                                       self.tables.orders,
+                                       self.tables.items,
+                                       self.tables.order_items,
+                                       self.classes.Order,
+                                       self.classes.Item,
+                                       self.classes.User,
+                                       self.tables.keywords,
+                                       self.tables.item_keywords)
+
+        mapper(User, users, properties={
+            'orders': relationship(Order, order_by=orders.c.id),  # o2m, m2o
+        })
+        mapper(Order, orders, properties={
+            'items': relationship(Item,
+                                  secondary=order_items,
+                                  order_by=items.c.id),  # m2m
+        })
+        mapper(Item, items, properties={
+            'keywords': relationship(Keyword,
+                                     secondary=item_keywords,
+                                     order_by=keywords.c.id)  # m2m
+        })
+        mapper(Keyword, keywords)
+
+        callables = {
+            'joinedload': joinedload,
+            'selectinload': selectinload,
+            'subqueryload': subqueryload
+        }
+
+        for o, i, k, count in configs:
+            options = []
+            if o in callables:
+                options.append(callables[o](User.orders))
+            if i in callables:
+                options.append(callables[i](User.orders, Order.items))
+            if k in callables:
+                options.append(callables[k](
+                    User.orders, Order.items, Item.keywords))
+
+            self._do_query_tests(options, count)
+
+    def _do_mapper_test(self, configs):
+        users, Keyword, orders, items, order_items, Order, Item, User, \
+            keywords, item_keywords = (self.tables.users,
+                                       self.classes.Keyword,
+                                       self.tables.orders,
+                                       self.tables.items,
+                                       self.tables.order_items,
+                                       self.classes.Order,
+                                       self.classes.Item,
+                                       self.classes.User,
+                                       self.tables.keywords,
+                                       self.tables.item_keywords)
+
+        opts = {
+            'lazyload': 'select',
+            'joinedload': 'joined',
+            'selectinload': 'selectin',
+        }
+
+        for o, i, k, count in configs:
+            mapper(User, users, properties={
+                'orders': relationship(Order, lazy=opts[o],
+                                       order_by=orders.c.id),
+            })
+            mapper(Order, orders, properties={
+                'items': relationship(Item,
+                                      secondary=order_items, lazy=opts[i],
+                                      order_by=items.c.id),
+            })
+            mapper(Item, items, properties={
+                'keywords': relationship(Keyword,
+                                         lazy=opts[k],
+                                         secondary=item_keywords,
+                                         order_by=keywords.c.id)
+            })
+            mapper(Keyword, keywords)
+
+            try:
+                self._do_query_tests([], count)
+            finally:
+                clear_mappers()
+
+    def _do_query_tests(self, opts, count):
+        Order, User = self.classes.Order, self.classes.User
+
+        sess = create_session()
+
+        def go():
+            eq_(
+                sess.query(User).options(*opts).order_by(User.id).all(),
+                self.static.user_item_keyword_result
+            )
+        self.assert_sql_count(testing.db, go, count)
+
+        eq_(
+            sess.query(User).options(*opts).filter(User.name == 'fred').
+            order_by(User.id).all(),
+            self.static.user_item_keyword_result[2:3]
+        )
+
+        sess = create_session()
+        eq_(
+            sess.query(User).options(*opts).join(User.orders).
+            filter(Order.id == 3).
+            order_by(User.id).all(),
+            self.static.user_item_keyword_result[0:1]
+        )
+
+    def test_cyclical(self):
+        """A circular eager relationship breaks the cycle with a lazy loader"""
+
+        Address, addresses, users, User = (self.classes.Address,
+                                           self.tables.addresses,
+                                           self.tables.users,
+                                           self.classes.User)
+
+        mapper(Address, addresses)
+        mapper(User, users, properties=dict(
+            addresses=relationship(Address, lazy='selectin',
+                                   backref=sa.orm.backref(
+                                       'user', lazy='selectin'),
+                                   order_by=Address.id)
+        ))
+        is_(sa.orm.class_mapper(User).get_property('addresses').lazy,
+            'selectin')
+        is_(sa.orm.class_mapper(Address).get_property('user').lazy, 'selectin')
+
+        sess = create_session()
+        eq_(self.static.user_address_result,
+            sess.query(User).order_by(User.id).all())
+
+    def test_cyclical_explicit_join_depth(self):
+        """A circular eager relationship breaks the cycle with a lazy loader"""
+
+        Address, addresses, users, User = (self.classes.Address,
+                                           self.tables.addresses,
+                                           self.tables.users,
+                                           self.classes.User)
+
+        mapper(Address, addresses)
+        mapper(User, users, properties=dict(
+            addresses=relationship(Address, lazy='selectin', join_depth=1,
+                                   backref=sa.orm.backref(
+                                       'user', lazy='selectin', join_depth=1),
+                                   order_by=Address.id)
+        ))
+        is_(sa.orm.class_mapper(User).get_property('addresses').lazy,
+            'selectin')
+        is_(sa.orm.class_mapper(Address).get_property('user').lazy, 'selectin')
+
+        sess = create_session()
+        eq_(self.static.user_address_result,
+            sess.query(User).order_by(User.id).all())
+
+    def test_double(self):
+        """Eager loading with two relationships simultaneously,
+            from the same table, using aliases."""
+
+        users, orders, User, Address, Order, addresses = (
+            self.tables.users,
+            self.tables.orders,
+            self.classes.User,
+            self.classes.Address,
+            self.classes.Order,
+            self.tables.addresses)
+
+        openorders = sa.alias(orders, 'openorders')
+        closedorders = sa.alias(orders, 'closedorders')
+
+        mapper(Address, addresses)
+        mapper(Order, orders)
+
+        open_mapper = mapper(Order, openorders, non_primary=True)
+        closed_mapper = mapper(Order, closedorders, non_primary=True)
+
+        mapper(User, users, properties=dict(
+            addresses=relationship(Address, lazy='selectin',
+                                   order_by=addresses.c.id),
+            open_orders=relationship(
+                open_mapper,
+                primaryjoin=sa.and_(openorders.c.isopen == 1,
+                                    users.c.id == openorders.c.user_id),
+                lazy='selectin', order_by=openorders.c.id),
+            closed_orders=relationship(
+                closed_mapper,
+                primaryjoin=sa.and_(closedorders.c.isopen == 0,
+                                    users.c.id == closedorders.c.user_id),
+                lazy='selectin', order_by=closedorders.c.id)))
+
+        q = create_session().query(User).order_by(User.id)
+
+        def go():
+            eq_([
+                User(
+                    id=7,
+                    addresses=[Address(id=1)],
+                    open_orders=[Order(id=3)],
+                    closed_orders=[Order(id=1), Order(id=5)]
+                ),
+                User(
+                    id=8,
+                    addresses=[Address(id=2), Address(id=3), Address(id=4)],
+                    open_orders=[],
+                    closed_orders=[]
+                ),
+                User(
+                    id=9,
+                    addresses=[Address(id=5)],
+                    open_orders=[Order(id=4)],
+                    closed_orders=[Order(id=2)]
+                ),
+                User(id=10)
+
+            ], q.all())
+        self.assert_sql_count(testing.db, go, 4)
+
+    def test_double_same_mappers(self):
+        """Eager loading with two relationships simultaneously,
+        from the same table, using aliases."""
+
+        addresses, items, order_items, orders, Item, User, Address, Order, \
+            users = (self.tables.addresses,
+                     self.tables.items,
+                     self.tables.order_items,
+                     self.tables.orders,
+                     self.classes.Item,
+                     self.classes.User,
+                     self.classes.Address,
+                     self.classes.Order,
+                     self.tables.users)
+
+        mapper(Address, addresses)
+        mapper(Order, orders, properties={
+            'items': relationship(Item, secondary=order_items, lazy='selectin',
+                                  order_by=items.c.id)})
+        mapper(Item, items)
+        mapper(User, users, properties=dict(
+            addresses=relationship(
+                Address, lazy='selectin', order_by=addresses.c.id),
+            open_orders=relationship(
+                Order,
+                primaryjoin=sa.and_(orders.c.isopen == 1,
+                                    users.c.id == orders.c.user_id),
+                lazy='selectin', order_by=orders.c.id),
+            closed_orders=relationship(
+                Order,
+                primaryjoin=sa.and_(orders.c.isopen == 0,
+                                    users.c.id == orders.c.user_id),
+                lazy='selectin', order_by=orders.c.id)))
+        q = create_session().query(User).order_by(User.id)
+
+        def go():
+            eq_([
+                User(id=7,
+                     addresses=[
+                         Address(id=1)],
+                     open_orders=[Order(id=3,
+                                        items=[
+                                            Item(id=3),
+                                            Item(id=4),
+                                            Item(id=5)])],
+                     closed_orders=[Order(id=1,
+                                          items=[
+                                              Item(id=1),
+                                              Item(id=2),
+                                              Item(id=3)]),
+                                    Order(id=5,
+                                          items=[
+                                              Item(id=5)])]),
+                User(id=8,
+                     addresses=[
+                         Address(id=2),
+                         Address(id=3),
+                         Address(id=4)],
+                     open_orders=[],
+                     closed_orders=[]),
+                User(id=9,
+                     addresses=[
+                         Address(id=5)],
+                     open_orders=[
+                         Order(id=4,
+                               items=[
+                                   Item(id=1),
+                                   Item(id=5)])],
+                     closed_orders=[
+                         Order(id=2,
+                               items=[
+                                   Item(id=1),
+                                   Item(id=2),
+                                   Item(id=3)])]),
+                User(id=10)
+            ], q.all())
+        self.assert_sql_count(testing.db, go, 6)
+
+    def test_limit(self):
+        """Limit operations combined with lazy-load relationships."""
+
+        users, items, order_items, orders, Item, User, Address, Order, \
+            addresses = (self.tables.users,
+                         self.tables.items,
+                         self.tables.order_items,
+                         self.tables.orders,
+                         self.classes.Item,
+                         self.classes.User,
+                         self.classes.Address,
+                         self.classes.Order,
+                         self.tables.addresses)
+
+        mapper(Item, items)
+        mapper(Order, orders, properties={
+            'items': relationship(Item, secondary=order_items, lazy='selectin',
+                                  order_by=items.c.id)
+        })
+        mapper(User, users, properties={
+            'addresses': relationship(mapper(Address, addresses),
+                                      lazy='selectin',
+                                      order_by=addresses.c.id),
+            'orders': relationship(Order, lazy='select', order_by=orders.c.id)
+        })
+
+        sess = create_session()
+        q = sess.query(User)
+
+        result = q.order_by(User.id).limit(2).offset(1).all()
+        eq_(self.static.user_all_result[1:3], result)
+
+        result = q.order_by(sa.desc(User.id)).limit(2).offset(2).all()
+        eq_(list(reversed(self.static.user_all_result[0:2])), result)
+
+    @testing.uses_deprecated("Mapper.order_by")
+    def test_mapper_order_by(self):
+        users, User, Address, addresses = (self.tables.users,
+                                           self.classes.User,
+                                           self.classes.Address,
+                                           self.tables.addresses)
+
+        mapper(Address, addresses)
+        mapper(User, users, properties={
+            'addresses': relationship(Address,
+                                      lazy='selectin',
+                                      order_by=addresses.c.id),
+        }, order_by=users.c.id.desc())
+
+        sess = create_session()
+        q = sess.query(User)
+
+        result = q.limit(2).all()
+        eq_(result, list(reversed(self.static.user_address_result[2:4])))
+
+    def test_one_to_many_scalar(self):
+        Address, addresses, users, User = (self.classes.Address,
+                                           self.tables.addresses,
+                                           self.tables.users,
+                                           self.classes.User)
+
+        mapper(User, users, properties=dict(
+            address=relationship(mapper(Address, addresses),
+                                 lazy='selectin', uselist=False)
+        ))
+        q = create_session().query(User)
+
+        def go():
+            result = q.filter(users.c.id == 7).all()
+            eq_([User(id=7, address=Address(id=1))], result)
+        self.assert_sql_count(testing.db, go, 2)
+
+    def test_many_to_one(self):
+        users, Address, addresses, User = (self.tables.users,
+                                           self.classes.Address,
+                                           self.tables.addresses,
+                                           self.classes.User)
+
+        mapper(Address, addresses, properties=dict(
+            user=relationship(mapper(User, users), lazy='selectin')
+        ))
+        sess = create_session()
+        q = sess.query(Address)
+
+        def go():
+            a = q.filter(addresses.c.id == 1).one()
+            is_not_(a.user, None)
+            u1 = sess.query(User).get(7)
+            is_(a.user, u1)
+        self.assert_sql_count(testing.db, go, 2)
+
+    def test_double_with_aggregate(self):
+        User, users, orders, Order = (self.classes.User,
+                                      self.tables.users,
+                                      self.tables.orders,
+                                      self.classes.Order)
+
+        max_orders_by_user = sa.select([sa.func.max(orders.c.id)
+                                        .label('order_id')],
+                                       group_by=[orders.c.user_id]) \
+            .alias('max_orders_by_user')
+
+        max_orders = orders.select(
+            orders.c.id == max_orders_by_user.c.order_id).\
+            alias('max_orders')
+
+        mapper(Order, orders)
+        mapper(User, users, properties={
+               'orders': relationship(Order, backref='user', lazy='selectin',
+                                      order_by=orders.c.id),
+               'max_order': relationship(
+                   mapper(Order, max_orders, non_primary=True),
+                   lazy='selectin', uselist=False)
+               })
+
+        q = create_session().query(User)
+
+        def go():
+            eq_([
+                User(id=7, orders=[
+                    Order(id=1),
+                    Order(id=3),
+                    Order(id=5),
+                ],
+                    max_order=Order(id=5)
+                ),
+                User(id=8, orders=[]),
+                User(id=9, orders=[Order(id=2), Order(id=4)],
+                     max_order=Order(id=4)),
+                User(id=10),
+            ], q.order_by(User.id).all())
+        self.assert_sql_count(testing.db, go, 3)
+
+    def test_uselist_false_warning(self):
+        """test that multiple rows received by a
+        uselist=False raises a warning."""
+
+        User, users, orders, Order = (self.classes.User,
+                                      self.tables.users,
+                                      self.tables.orders,
+                                      self.classes.Order)
+
+        mapper(User, users, properties={
+            'order': relationship(Order, uselist=False)
+        })
+        mapper(Order, orders)
+        s = create_session()
+        assert_raises(sa.exc.SAWarning,
+                      s.query(User).options(selectinload(User.order)).all)
+
+
+class LoadOnExistingTest(_fixtures.FixtureTest):
+    """test that loaders from a base Query fully populate."""
+
+    run_inserts = 'once'
+    run_deletes = None
+
+    def _collection_to_scalar_fixture(self):
+        User, Address, Dingaling = self.classes.User, \
+            self.classes.Address, self.classes.Dingaling
+        mapper(User, self.tables.users, properties={
+            'addresses': relationship(Address),
+        })
+        mapper(Address, self.tables.addresses, properties={
+            'dingaling': relationship(Dingaling)
+        })
+        mapper(Dingaling, self.tables.dingalings)
+
+        sess = Session(autoflush=False)
+        return User, Address, Dingaling, sess
+
+    def _collection_to_collection_fixture(self):
+        User, Order, Item = self.classes.User, \
+            self.classes.Order, self.classes.Item
+        mapper(User, self.tables.users, properties={
+            'orders': relationship(Order),
+        })
+        mapper(Order, self.tables.orders, properties={
+            'items': relationship(Item, secondary=self.tables.order_items),
+        })
+        mapper(Item, self.tables.items)
+
+        sess = Session(autoflush=False)
+        return User, Order, Item, sess
+
+    def _eager_config_fixture(self):
+        User, Address = self.classes.User, self.classes.Address
+        mapper(User, self.tables.users, properties={
+            'addresses': relationship(Address, lazy="selectin"),
+        })
+        mapper(Address, self.tables.addresses)
+        sess = Session(autoflush=False)
+        return User, Address, sess
+
+    def _deferred_config_fixture(self):
+        User, Address = self.classes.User, self.classes.Address
+        mapper(User, self.tables.users, properties={
+            'name': deferred(self.tables.users.c.name),
+            'addresses': relationship(Address, lazy="selectin"),
+        })
+        mapper(Address, self.tables.addresses)
+        sess = Session(autoflush=False)
+        return User, Address, sess
+
+    def test_no_query_on_refresh(self):
+        User, Address, sess = self._eager_config_fixture()
+
+        u1 = sess.query(User).get(8)
+        assert 'addresses' in u1.__dict__
+        sess.expire(u1)
+
+        def go():
+            eq_(u1.id, 8)
+        self.assert_sql_count(testing.db, go, 1)
+        assert 'addresses' not in u1.__dict__
+
+    def test_no_query_on_deferred(self):
+        User, Address, sess = self._deferred_config_fixture()
+        u1 = sess.query(User).get(8)
+        assert 'addresses' in u1.__dict__
+        sess.expire(u1, ['addresses'])
+
+        def go():
+            eq_(u1.name, 'ed')
+        self.assert_sql_count(testing.db, go, 1)
+        assert 'addresses' not in u1.__dict__
+
+    def test_populate_existing_propagate(self):
+        User, Address, sess = self._eager_config_fixture()
+        u1 = sess.query(User).get(8)
+        u1.addresses[2].email_address = "foofoo"
+        del u1.addresses[1]
+        u1 = sess.query(User).populate_existing().filter_by(id=8).one()
+        # collection is reverted
+        eq_(len(u1.addresses), 3)
+
+        # attributes on related items reverted
+        eq_(u1.addresses[2].email_address, "ed@lala.com")
+
+    def test_loads_second_level_collection_to_scalar(self):
+        User, Address, Dingaling, sess = self._collection_to_scalar_fixture()
+
+        u1 = sess.query(User).get(8)
+        a1 = Address()
+        u1.addresses.append(a1)
+        a2 = u1.addresses[0]
+        a2.email_address = 'foo'
+        sess.query(User).options(selectinload_all("addresses.dingaling")).\
+            filter_by(id=8).all()
+        assert u1.addresses[-1] is a1
+        for a in u1.addresses:
+            if a is not a1:
+                assert 'dingaling' in a.__dict__
+            else:
+                assert 'dingaling' not in a.__dict__
+            if a is a2:
+                eq_(a2.email_address, 'foo')
+
+    def test_loads_second_level_collection_to_collection(self):
+        User, Order, Item, sess = self._collection_to_collection_fixture()
+
+        u1 = sess.query(User).get(7)
+        u1.orders
+        o1 = Order()
+        u1.orders.append(o1)
+        sess.query(User).options(selectinload_all("orders.items")).\
+            filter_by(id=7).all()
+        for o in u1.orders:
+            if o is not o1:
+                assert 'items' in o.__dict__
+            else:
+                assert 'items' not in o.__dict__
+
+    def test_load_two_levels_collection_to_scalar(self):
+        User, Address, Dingaling, sess = self._collection_to_scalar_fixture()
+
+        u1 = sess.query(User).filter_by(id=8).options(
+            selectinload("addresses")).one()
+        sess.query(User).filter_by(id=8).options(
+            selectinload_all("addresses.dingaling")).first()
+        assert 'dingaling' in u1.addresses[0].__dict__
+
+    def test_load_two_levels_collection_to_collection(self):
+        User, Order, Item, sess = self._collection_to_collection_fixture()
+
+        u1 = sess.query(User).filter_by(id=7).options(
+            selectinload("orders")).one()
+        sess.query(User).filter_by(id=7).options(
+            selectinload_all("orders.items")).first()
+        assert 'items' in u1.orders[0].__dict__
+
+
+class OrderBySecondaryTest(fixtures.MappedTest):
+    @classmethod
+    def define_tables(cls, metadata):
+        Table('m2m', metadata,
+              Column('id', Integer, primary_key=True,
+                     test_needs_autoincrement=True),
+              Column('aid', Integer, ForeignKey('a.id')),
+              Column('bid', Integer, ForeignKey('b.id')))
+
+        Table('a', metadata,
+              Column('id', Integer, primary_key=True,
+                     test_needs_autoincrement=True),
+              Column('data', String(50)))
+        Table('b', metadata,
+              Column('id', Integer, primary_key=True,
+                     test_needs_autoincrement=True),
+              Column('data', String(50)))
+
+    @classmethod
+    def fixtures(cls):
+        return dict(
+            a=(('id', 'data'),
+               (1, 'a1'),
+               (2, 'a2')),
+
+            b=(('id', 'data'),
+               (1, 'b1'),
+               (2, 'b2'),
+               (3, 'b3'),
+               (4, 'b4')),
+
+            m2m=(('id', 'aid', 'bid'),
+                 (2, 1, 1),
+                 (4, 2, 4),
+                 (1, 1, 3),
+                 (6, 2, 2),
+                 (3, 1, 2),
+                 (5, 2, 3)))
+
+    def test_ordering(self):
+        a, m2m, b = (self.tables.a,
+                     self.tables.m2m,
+                     self.tables.b)
+
+        class A(fixtures.ComparableEntity):
+            pass
+
+        class B(fixtures.ComparableEntity):
+            pass
+
+        mapper(A, a, properties={
+            'bs': relationship(B, secondary=m2m, lazy='selectin',
+                               order_by=m2m.c.id)
+        })
+        mapper(B, b)
+
+        sess = create_session()
+
+        def go():
+            eq_(sess.query(A).all(), [
+                A(data='a1', bs=[B(data='b3'), B(data='b1'), B(data='b2')]),
+                A(bs=[B(data='b4'), B(data='b3'), B(data='b2')])
+            ])
+        self.assert_sql_count(testing.db, go, 2)
+
+
+class BaseRelationFromJoinedSubclassTest(_Polymorphic):
+    """Like most tests here, this is adapted from subquery_relations
+    as part of general inheritance testing.
+
+    The subquery test exercised the issue that the subquery load must
+    imitate the original query very closely so that filter criteria, ordering
+    etc. can be maintained with the original query embedded.   However,
+    for selectin loading, none of that is really needed, so here the secondary
+    queries are all just a simple "people JOIN paperwork".
+
+    """
+
+    @classmethod
+    def define_tables(cls, metadata):
+        Table('people', metadata,
+              Column('person_id', Integer,
+                     primary_key=True,
+                     test_needs_autoincrement=True),
+              Column('name', String(50)),
+              Column('type', String(30)))
+
+        # to test fully, PK of engineers table must be
+        # named differently from that of people
+        Table('engineers', metadata,
+              Column('engineer_id', Integer,
+                     ForeignKey('people.person_id'),
+                     primary_key=True),
+              Column('primary_language', String(50)))
+
+        Table('paperwork', metadata,
+              Column('paperwork_id', Integer,
+                     primary_key=True,
+                     test_needs_autoincrement=True),
+              Column('description', String(50)),
+              Column('person_id', Integer,
+                     ForeignKey('people.person_id')))
+
+    @classmethod
+    def setup_mappers(cls):
+        people = cls.tables.people
+        engineers = cls.tables.engineers
+        paperwork = cls.tables.paperwork
+
+        mapper(Person, people,
+               polymorphic_on=people.c.type,
+               polymorphic_identity='person',
+               properties={
+                   'paperwork': relationship(
+                       Paperwork, order_by=paperwork.c.paperwork_id)})
+
+        mapper(Engineer, engineers,
+               inherits=Person,
+               polymorphic_identity='engineer')
+
+        mapper(Paperwork, paperwork)
+
+    @classmethod
+    def insert_data(cls):
+
+        e1 = Engineer(primary_language="java")
+        e2 = Engineer(primary_language="c++")
+        e1.paperwork = [Paperwork(description="tps report #1"),
+                        Paperwork(description="tps report #2")]
+        e2.paperwork = [Paperwork(description="tps report #3")]
+        sess = create_session()
+        sess.add_all([e1, e2])
+        sess.flush()
+
+    def test_correct_select_nofrom(self):
+        sess = create_session()
+        # use Person.paperwork here just to give the least
+        # amount of context
+        q = sess.query(Engineer).\
+            filter(Engineer.primary_language == 'java').\
+            options(selectinload(Person.paperwork))
+
+        def go():
+            eq_(q.all()[0].paperwork,
+                [Paperwork(description="tps report #1"),
+                 Paperwork(description="tps report #2")],
+
+                )
+        self.assert_sql_execution(
+            testing.db,
+            go,
+            CompiledSQL(
+                "SELECT people.person_id AS people_person_id, "
+                "people.name AS people_name, people.type AS people_type, "
+                "engineers.engineer_id AS engineers_engineer_id, "
+                "engineers.primary_language AS engineers_primary_language "
+                "FROM people JOIN engineers ON "
+                "people.person_id = engineers.engineer_id "
+                "WHERE engineers.primary_language = :primary_language_1",
+                {"primary_language_1": "java"}
+            ),
+            CompiledSQL(
+                "SELECT people_1.person_id AS people_1_person_id, "
+                "paperwork.paperwork_id AS paperwork_paperwork_id, "
+                "paperwork.description AS paperwork_description, "
+                "paperwork.person_id AS paperwork_person_id "
+                "FROM people AS people_1 JOIN paperwork "
+                "ON people_1.person_id = paperwork.person_id "
+                "WHERE people_1.person_id IN ([EXPANDING_primary_keys]) "
+                "ORDER BY people_1.person_id, paperwork.paperwork_id",
+                [{'primary_keys': [1]}]
+            )
+        )
+
+    def test_correct_select_existingfrom(self):
+        sess = create_session()
+        # use Person.paperwork here just to give the least
+        # amount of context
+        q = sess.query(Engineer).\
+            filter(Engineer.primary_language == 'java').\
+            join(Engineer.paperwork).\
+            filter(Paperwork.description == "tps report #2").\
+            options(selectinload(Person.paperwork))
+
+        def go():
+            eq_(q.one().paperwork,
+                [Paperwork(description="tps report #1"),
+                 Paperwork(description="tps report #2")],
+
+                )
+        self.assert_sql_execution(
+            testing.db,
+            go,
+            CompiledSQL(
+                "SELECT people.person_id AS people_person_id, "
+                "people.name AS people_name, people.type AS people_type, "
+                "engineers.engineer_id AS engineers_engineer_id, "
+                "engineers.primary_language AS engineers_primary_language "
+                "FROM people JOIN engineers "
+                "ON people.person_id = engineers.engineer_id "
+                "JOIN paperwork ON people.person_id = paperwork.person_id "
+                "WHERE engineers.primary_language = :primary_language_1 "
+                "AND paperwork.description = :description_1",
+                {"primary_language_1": "java",
+                    "description_1": "tps report #2"}
+            ),
+            CompiledSQL(
+
+                "SELECT people_1.person_id AS people_1_person_id, "
+                "paperwork.paperwork_id AS paperwork_paperwork_id, "
+                "paperwork.description AS paperwork_description, "
+                "paperwork.person_id AS paperwork_person_id "
+                "FROM people AS people_1 JOIN paperwork "
+                "ON people_1.person_id = paperwork.person_id "
+                "WHERE people_1.person_id IN ([EXPANDING_primary_keys]) "
+                "ORDER BY people_1.person_id, paperwork.paperwork_id",
+                [{'primary_keys': [1]}]
+            )
+        )
+
+    def test_correct_select_with_polymorphic_no_alias(self):
+        # test #3106
+        sess = create_session()
+
+        wp = with_polymorphic(Person, [Engineer])
+        q = sess.query(wp).\
+            options(selectinload(wp.paperwork)).\
+            order_by(Engineer.primary_language.desc())
+
+        def go():
+            eq_(q.first(),
+                Engineer(
+                    paperwork=[
+                        Paperwork(description="tps report #1"),
+                        Paperwork(description="tps report #2")],
+                    primary_language='java'
+            )
+
+            )
+        self.assert_sql_execution(
+            testing.db,
+            go,
+            CompiledSQL(
+                "SELECT people.person_id AS people_person_id, "
+                "people.name AS people_name, people.type AS people_type, "
+                "engineers.engineer_id AS engineers_engineer_id, "
+                "engineers.primary_language AS engineers_primary_language "
+                "FROM people LEFT OUTER JOIN engineers ON people.person_id = "
+                "engineers.engineer_id ORDER BY engineers.primary_language "
+                "DESC LIMIT :param_1"),
+            CompiledSQL(
+                "SELECT people_1.person_id AS people_1_person_id, "
+                "paperwork.paperwork_id AS paperwork_paperwork_id, "
+                "paperwork.description AS paperwork_description, "
+                "paperwork.person_id AS paperwork_person_id "
+                "FROM people AS people_1 "
+                "JOIN paperwork ON people_1.person_id = paperwork.person_id "
+                "WHERE people_1.person_id IN ([EXPANDING_primary_keys]) "
+                "ORDER BY people_1.person_id, paperwork.paperwork_id",
+                [{'primary_keys': [1]}]
+            )
+        )
+
+    def test_correct_select_with_polymorphic_alias(self):
+        # test #3106
+        sess = create_session()
+
+        wp = with_polymorphic(Person, [Engineer], aliased=True)
+        q = sess.query(wp).\
+            options(selectinload(wp.paperwork)).\
+            order_by(wp.Engineer.primary_language.desc())
+
+        def go():
+            eq_(q.first(),
+                Engineer(
+                    paperwork=[
+                        Paperwork(description="tps report #1"),
+                        Paperwork(description="tps report #2")],
+                    primary_language='java'
+            )
+
+            )
+        self.assert_sql_execution(
+            testing.db,
+            go,
+            CompiledSQL(
+                "SELECT anon_1.people_person_id AS anon_1_people_person_id, "
+                "anon_1.people_name AS anon_1_people_name, "
+                "anon_1.people_type AS anon_1_people_type, "
+                "anon_1.engineers_engineer_id AS "
+                "anon_1_engineers_engineer_id, "
+                "anon_1.engineers_primary_language "
+                "AS anon_1_engineers_primary_language FROM "
+                "(SELECT people.person_id AS people_person_id, "
+                "people.name AS people_name, people.type AS people_type, "
+                "engineers.engineer_id AS engineers_engineer_id, "
+                "engineers.primary_language AS engineers_primary_language "
+                "FROM people LEFT OUTER JOIN engineers ON people.person_id = "
+                "engineers.engineer_id) AS anon_1 "
+                "ORDER BY anon_1.engineers_primary_language DESC "
+                "LIMIT :param_1"),
+            CompiledSQL(
+                "SELECT people_1.person_id AS people_1_person_id, "
+                "paperwork.paperwork_id AS paperwork_paperwork_id, "
+                "paperwork.description AS paperwork_description, "
+                "paperwork.person_id AS paperwork_person_id "
+                "FROM people AS people_1 JOIN paperwork "
+                "ON people_1.person_id = paperwork.person_id "
+                "WHERE people_1.person_id IN ([EXPANDING_primary_keys]) "
+                "ORDER BY people_1.person_id, paperwork.paperwork_id",
+                [{'primary_keys': [1]}]
+            )
+        )
+
+    def test_correct_select_with_polymorphic_flat_alias(self):
+        # test #3106
+        sess = create_session()
+
+        wp = with_polymorphic(Person, [Engineer], aliased=True, flat=True)
+        q = sess.query(wp).\
+            options(selectinload(wp.paperwork)).\
+            order_by(wp.Engineer.primary_language.desc())
+
+        def go():
+            eq_(q.first(),
+                Engineer(
+                    paperwork=[
+                        Paperwork(description="tps report #1"),
+                        Paperwork(description="tps report #2")],
+                    primary_language='java'
+            )
+
+            )
+        self.assert_sql_execution(
+            testing.db,
+            go,
+            CompiledSQL(
+                "SELECT people_1.person_id AS people_1_person_id, "
+                "people_1.name AS people_1_name, "
+                "people_1.type AS people_1_type, "
+                "engineers_1.engineer_id AS engineers_1_engineer_id, "
+                "engineers_1.primary_language AS engineers_1_primary_language "
+                "FROM people AS people_1 "
+                "LEFT OUTER JOIN engineers AS engineers_1 "
+                "ON people_1.person_id = engineers_1.engineer_id "
+                "ORDER BY engineers_1.primary_language DESC LIMIT :param_1"),
+            CompiledSQL(
+                "SELECT people_1.person_id AS people_1_person_id, "
+                "paperwork.paperwork_id AS paperwork_paperwork_id, "
+                "paperwork.description AS paperwork_description, "
+                "paperwork.person_id AS paperwork_person_id "
+                "FROM people AS people_1 JOIN paperwork "
+                "ON people_1.person_id = paperwork.person_id "
+                "WHERE people_1.person_id IN ([EXPANDING_primary_keys]) "
+                "ORDER BY people_1.person_id, paperwork.paperwork_id",
+                [{'primary_keys': [1]}]
+
+            )
+        )
+
+
+class ChunkingTest(fixtures.DeclarativeMappedTest):
+    """test IN chunking.
+
+    the length of IN has a limit on at least some databases.
+    On Oracle it's 1000.  In any case, you don't want a SQL statement with
+    500K entries in an IN, so larger results need to chunk.
+
+    """
+
+    @classmethod
+    def setup_classes(cls):
+        Base = cls.DeclarativeBasic
+
+        class A(fixtures.ComparableEntity, Base):
+            __tablename__ = 'a'
+            id = Column(Integer, primary_key=True)
+            bs = relationship("B", order_by="B.id")
+
+        class B(fixtures.ComparableEntity, Base):
+            __tablename__ = 'b'
+            id = Column(Integer, primary_key=True)
+            a_id = Column(ForeignKey('a.id'))
+
+    @classmethod
+    def insert_data(cls):
+        A, B = cls.classes('A', 'B')
+
+        session = Session()
+        session.add_all([
+            A(id=i, bs=[B(id=(i * 6) + j) for j in range(1, 6)])
+            for i in range(1, 101)
+        ])
+        session.commit()
+
+    def test_odd_number_chunks(self):
+        A, B = self.classes('A', 'B')
+
+        session = Session()
+
+        def go():
+            with mock.patch(
+                    "sqlalchemy.orm.strategies.SelectInLoader._chunksize", 47):
+                q = session.query(A).options(selectinload(A.bs)).order_by(A.id)
+
+                for a in q:
+                    a.bs
+
+        self.assert_sql_execution(
+            testing.db,
+            go,
+            CompiledSQL(
+                "SELECT a.id AS a_id FROM a ORDER BY a.id",
+                {}
+            ),
+            CompiledSQL(
+                "SELECT a_1.id AS a_1_id, b.id AS b_id, b.a_id AS b_a_id "
+                "FROM a AS a_1 JOIN b ON a_1.id = b.a_id "
+                "WHERE a_1.id IN ([EXPANDING_primary_keys]) "
+                "ORDER BY a_1.id, b.id",
+                {"primary_keys": list(range(1, 48))}
+            ),
+            CompiledSQL(
+                "SELECT a_1.id AS a_1_id, b.id AS b_id, b.a_id AS b_a_id "
+                "FROM a AS a_1 JOIN b ON a_1.id = b.a_id "
+                "WHERE a_1.id IN ([EXPANDING_primary_keys]) "
+                "ORDER BY a_1.id, b.id",
+                {"primary_keys": list(range(48, 95))}
+            ),
+            CompiledSQL(
+                "SELECT a_1.id AS a_1_id, b.id AS b_id, b.a_id AS b_a_id "
+                "FROM a AS a_1 JOIN b ON a_1.id = b.a_id "
+                "WHERE a_1.id IN ([EXPANDING_primary_keys]) "
+                "ORDER BY a_1.id, b.id",
+                {"primary_keys": list(range(95, 101))}
+            )
+        )
+
+    @testing.requires.independent_cursors
+    def test_yield_per(self):
+        # the docs make a lot of guarantees about yield_per
+        # so test that it works
+        A, B = self.classes('A', 'B')
+
+        import random
+
+        session = Session()
+
+        yield_per = random.randint(8, 105)
+        offset = random.randint(0, 19)
+        total_rows = 100 - offset
+        total_expected_statements = 1 + int(total_rows / yield_per) + \
+            (1 if total_rows % yield_per else 0)
+
+        def go():
+            for a in session.query(A).\
+                    yield_per(yield_per).\
+                    offset(offset).\
+                    options(selectinload(A.bs)):
+
+                # this part fails with joined eager loading
+                # (if you enable joined eager w/ yield_per)
+                eq_(
+                    a.bs, [
+                        B(id=(a.id * 6) + j) for j in range(1, 6)
+                    ]
+                )
+
+        # this part fails with subquery eager loading
+        # (if you enable subquery eager w/ yield_per)
+        self.assert_sql_count(testing.db, go, total_expected_statements)
+
+
+class SubRelationFromJoinedSubclassMultiLevelTest(_Polymorphic):
+    @classmethod
+    def define_tables(cls, metadata):
+        Table('companies', metadata,
+              Column('company_id', Integer,
+                     primary_key=True,
+                     test_needs_autoincrement=True),
+              Column('name', String(50)))
+
+        Table('people', metadata,
+              Column('person_id', Integer,
+                     primary_key=True,
+                     test_needs_autoincrement=True),
+              Column('company_id', ForeignKey('companies.company_id')),
+              Column('name', String(50)),
+              Column('type', String(30)))
+
+        Table('engineers', metadata,
+              Column('engineer_id', ForeignKey('people.person_id'),
+                     primary_key=True),
+              Column('primary_language', String(50)))
+
+        Table('machines', metadata,
+              Column('machine_id',
+                     Integer, primary_key=True,
+                     test_needs_autoincrement=True),
+              Column('name', String(50)),
+              Column('engineer_id', ForeignKey('engineers.engineer_id')),
+              Column('machine_type_id',
+                     ForeignKey('machine_type.machine_type_id')))
+
+        Table('machine_type', metadata,
+              Column('machine_type_id',
+                     Integer, primary_key=True,
+                     test_needs_autoincrement=True),
+              Column('name', String(50)))
+
+    @classmethod
+    def setup_mappers(cls):
+        companies = cls.tables.companies
+        people = cls.tables.people
+        engineers = cls.tables.engineers
+        machines = cls.tables.machines
+        machine_type = cls.tables.machine_type
+
+        mapper(Company, companies, properties={
+            'employees': relationship(Person, order_by=people.c.person_id)
+        })
+        mapper(Person, people,
+               polymorphic_on=people.c.type,
+               polymorphic_identity='person',
+               with_polymorphic='*')
+
+        mapper(Engineer, engineers,
+               inherits=Person,
+               polymorphic_identity='engineer', properties={
+                   'machines': relationship(Machine,
+                                            order_by=machines.c.machine_id)
+               })
+
+        mapper(Machine, machines, properties={
+            'type': relationship(MachineType)
+        })
+        mapper(MachineType, machine_type)
+
+    @classmethod
+    def insert_data(cls):
+        c1 = cls._fixture()
+        sess = create_session()
+        sess.add(c1)
+        sess.flush()
+
+    @classmethod
+    def _fixture(cls):
+        mt1 = MachineType(name='mt1')
+        mt2 = MachineType(name='mt2')
+        return Company(
+            employees=[
+                Engineer(
+                    name='e1',
+                    machines=[
+                        Machine(name='m1', type=mt1),
+                        Machine(name='m2', type=mt2)
+                    ]
+                ),
+                Engineer(
+                    name='e2',
+                    machines=[
+                        Machine(name='m3', type=mt1),
+                        Machine(name='m4', type=mt1)
+                    ]
+                )
+            ])
+
+    def test_chained_selectin_subclass(self):
+        s = Session()
+        q = s.query(Company).options(
+            selectinload(Company.employees.of_type(Engineer)).
+            selectinload(Engineer.machines).
+            selectinload(Machine.type)
+        )
+
+        def go():
+            eq_(
+                q.all(),
+                [self._fixture()]
+            )
+        self.assert_sql_count(testing.db, go, 4)
+
+
+class SelfReferentialTest(fixtures.MappedTest):
+    @classmethod
+    def define_tables(cls, metadata):
+        Table('nodes', metadata,
+              Column('id', Integer, primary_key=True,
+                     test_needs_autoincrement=True),
+              Column('parent_id', Integer, ForeignKey('nodes.id')),
+              Column('data', String(30)))
+
+    def test_basic(self):
+        nodes = self.tables.nodes
+
+        class Node(fixtures.ComparableEntity):
+            def append(self, node):
+                self.children.append(node)
+
+        mapper(Node, nodes, properties={
+            'children': relationship(Node,
+                                     lazy='selectin',
+                                     join_depth=3, order_by=nodes.c.id)
+        })
+        sess = create_session()
+        n1 = Node(data='n1')
+        n1.append(Node(data='n11'))
+        n1.append(Node(data='n12'))
+        n1.append(Node(data='n13'))
+        n1.children[1].append(Node(data='n121'))
+        n1.children[1].append(Node(data='n122'))
+        n1.children[1].append(Node(data='n123'))
+        n2 = Node(data='n2')
+        n2.append(Node(data='n21'))
+        n2.children[0].append(Node(data='n211'))
+        n2.children[0].append(Node(data='n212'))
+
+        sess.add(n1)
+        sess.add(n2)
+        sess.flush()
+        sess.expunge_all()
+
+        def go():
+            d = sess.query(Node).filter(Node.data.in_(['n1', 'n2'])).\
+                order_by(Node.data).all()
+            eq_([Node(data='n1', children=[
+                Node(data='n11'),
+                Node(data='n12', children=[
+                    Node(data='n121'),
+                    Node(data='n122'),
+                    Node(data='n123')
+                ]),
+                Node(data='n13')
+            ]),
+                Node(data='n2', children=[
+                    Node(data='n21', children=[
+                        Node(data='n211'),
+                        Node(data='n212'),
+                    ])
+                ])
+            ], d)
+        self.assert_sql_count(testing.db, go, 4)
+
+    def test_lazy_fallback_doesnt_affect_eager(self):
+        nodes = self.tables.nodes
+
+        class Node(fixtures.ComparableEntity):
+            def append(self, node):
+                self.children.append(node)
+
+        mapper(Node, nodes, properties={
+            'children': relationship(Node, lazy='selectin', join_depth=1,
+                                     order_by=nodes.c.id)
+        })
+        sess = create_session()
+        n1 = Node(data='n1')
+        n1.append(Node(data='n11'))
+        n1.append(Node(data='n12'))
+        n1.append(Node(data='n13'))
+        n1.children[0].append(Node(data='n111'))
+        n1.children[0].append(Node(data='n112'))
+        n1.children[1].append(Node(data='n121'))
+        n1.children[1].append(Node(data='n122'))
+        n1.children[1].append(Node(data='n123'))
+        sess.add(n1)
+        sess.flush()
+        sess.expunge_all()
+
+        def go():
+            allnodes = sess.query(Node).order_by(Node.data).all()
+
+            n11 = allnodes[1]
+            eq_(n11.data, 'n11')
+            eq_([
+                Node(data='n111'),
+                Node(data='n112'),
+            ], list(n11.children))
+
+            n12 = allnodes[4]
+            eq_(n12.data, 'n12')
+            eq_([
+                Node(data='n121'),
+                Node(data='n122'),
+                Node(data='n123')
+            ], list(n12.children))
+        self.assert_sql_count(testing.db, go, 2)
+
+    def test_with_deferred(self):
+        nodes = self.tables.nodes
+
+        class Node(fixtures.ComparableEntity):
+            def append(self, node):
+                self.children.append(node)
+
+        mapper(Node, nodes, properties={
+            'children': relationship(Node, lazy='selectin', join_depth=3,
+                                     order_by=nodes.c.id),
+            'data': deferred(nodes.c.data)
+        })
+        sess = create_session()
+        n1 = Node(data='n1')
+        n1.append(Node(data='n11'))
+        n1.append(Node(data='n12'))
+        sess.add(n1)
+        sess.flush()
+        sess.expunge_all()
+
+        def go():
+            eq_(
+                Node(data='n1', children=[Node(data='n11'), Node(data='n12')]),
+                sess.query(Node).order_by(Node.id).first(),
+            )
+        self.assert_sql_count(testing.db, go, 6)
+
+        sess.expunge_all()
+
+        def go():
+            eq_(Node(data='n1', children=[Node(data='n11'), Node(data='n12')]),
+                sess.query(Node).options(undefer('data')).order_by(Node.id)
+                .first())
+        self.assert_sql_count(testing.db, go, 5)
+
+        sess.expunge_all()
+
+        def go():
+            eq_(Node(data='n1', children=[Node(data='n11'), Node(data='n12')]),
+                sess.query(Node).options(undefer('data'),
+                                         undefer('children.data')).first())
+        self.assert_sql_count(testing.db, go, 3)
+
+    def test_options(self):
+        nodes = self.tables.nodes
+
+        class Node(fixtures.ComparableEntity):
+            def append(self, node):
+                self.children.append(node)
+
+        mapper(Node, nodes, properties={
+            'children': relationship(Node, order_by=nodes.c.id)
+        })
+        sess = create_session()
+        n1 = Node(data='n1')
+        n1.append(Node(data='n11'))
+        n1.append(Node(data='n12'))
+        n1.append(Node(data='n13'))
+        n1.children[1].append(Node(data='n121'))
+        n1.children[1].append(Node(data='n122'))
+        n1.children[1].append(Node(data='n123'))
+        sess.add(n1)
+        sess.flush()
+        sess.expunge_all()
+
+        def go():
+            d = sess.query(Node).filter_by(data='n1').order_by(Node.id).\
+                options(selectinload_all('children.children')).first()
+            eq_(Node(data='n1', children=[
+                Node(data='n11'),
+                Node(data='n12', children=[
+                    Node(data='n121'),
+                    Node(data='n122'),
+                    Node(data='n123')
+                ]),
+                Node(data='n13')
+            ]), d)
+        self.assert_sql_count(testing.db, go, 3)
+
+    def test_no_depth(self):
+        """no join depth is set, so no eager loading occurs."""
+
+        nodes = self.tables.nodes
+
+        class Node(fixtures.ComparableEntity):
+            def append(self, node):
+                self.children.append(node)
+
+        mapper(Node, nodes, properties={
+            'children': relationship(Node, lazy='selectin')
+        })
+        sess = create_session()
+        n1 = Node(data='n1')
+        n1.append(Node(data='n11'))
+        n1.append(Node(data='n12'))
+        n1.append(Node(data='n13'))
+        n1.children[1].append(Node(data='n121'))
+        n1.children[1].append(Node(data='n122'))
+        n1.children[1].append(Node(data='n123'))
+        n2 = Node(data='n2')
+        n2.append(Node(data='n21'))
+        sess.add(n1)
+        sess.add(n2)
+        sess.flush()
+        sess.expunge_all()
+
+        def go():
+            d = sess.query(Node).filter(Node.data.in_(
+                ['n1', 'n2'])).order_by(Node.data).all()
+            eq_([
+                Node(data='n1', children=[
+                    Node(data='n11'),
+                    Node(data='n12', children=[
+                        Node(data='n121'),
+                        Node(data='n122'),
+                        Node(data='n123')
+                    ]),
+                    Node(data='n13')
+                ]),
+                Node(data='n2', children=[
+                    Node(data='n21')
+                ])
+            ], d)
+        self.assert_sql_count(testing.db, go, 4)
+
+
+class SelfRefInheritanceAliasedTest(
+        fixtures.DeclarativeMappedTest,
+        testing.AssertsCompiledSQL):
+    __dialect__ = 'default'
+
+    @classmethod
+    def setup_classes(cls):
+        Base = cls.DeclarativeBasic
+
+        class Foo(Base):
+            __tablename__ = "foo"
+            id = Column(Integer, primary_key=True)
+            type = Column(String(50))
+
+            foo_id = Column(Integer, ForeignKey("foo.id"))
+            foo = relationship(
+                lambda: Foo, foreign_keys=foo_id, remote_side=id)
+
+            __mapper_args__ = {
+                "polymorphic_on": type,
+                "polymorphic_identity": "foo",
+            }
+
+        class Bar(Foo):
+            __mapper_args__ = {
+                "polymorphic_identity": "bar",
+            }
+
+    @classmethod
+    def insert_data(cls):
+        Foo, Bar = cls.classes('Foo', 'Bar')
+
+        session = Session()
+        target = Bar(id=1)
+        b1 = Bar(id=2, foo=Foo(id=3, foo=target))
+        session.add(b1)
+        session.commit()
+
+    def test_twolevel_selectin_w_polymorphic(self):
+        Foo, Bar = self.classes('Foo', 'Bar')
+
+        r = with_polymorphic(Foo, "*", aliased=True)
+        attr1 = Foo.foo.of_type(r)
+        attr2 = r.foo
+
+        s = Session()
+        q = s.query(Foo).filter(Foo.id == 2).options(
+            selectinload(attr1).selectinload(attr2),
+        )
+        self.assert_sql_execution(
+            testing.db,
+            q.all,
+            CompiledSQL(
+                "SELECT foo.id AS foo_id_1, foo.type AS foo_type, "
+                "foo.foo_id AS foo_foo_id FROM foo WHERE foo.id = :id_1",
+                [{'id_1': 2}]
+            ),
+            CompiledSQL(
+                "SELECT foo_1.id AS foo_1_id, foo_2.id AS foo_2_id, "
+                "foo_2.type AS foo_2_type, foo_2.foo_id AS foo_2_foo_id "
+                "FROM foo AS foo_1 JOIN foo AS foo_2 "
+                "ON foo_2.id = foo_1.foo_id "
+                "WHERE foo_1.id "
+                "IN ([EXPANDING_primary_keys]) ORDER BY foo_1.id",
+                {'primary_keys': [2]}
+            ),
+            CompiledSQL(
+
+                "SELECT foo_1.id AS foo_1_id, foo_2.id AS foo_2_id, "
+                "foo_2.type AS foo_2_type, foo_2.foo_id AS foo_2_foo_id "
+                "FROM foo AS foo_1 JOIN foo AS foo_2 "
+                "ON foo_2.id = foo_1.foo_id "
+                "WHERE foo_1.id IN ([EXPANDING_primary_keys]) "
+                "ORDER BY foo_1.id",
+                {'primary_keys': [3]}
+            ),
+        )
+
+
+class TestExistingRowPopulation(fixtures.DeclarativeMappedTest):
+    @classmethod
+    def setup_classes(cls):
+        Base = cls.DeclarativeBasic
+
+        class A(Base):
+            __tablename__ = 'a'
+
+            id = Column(Integer, primary_key=True)
+            b_id = Column(ForeignKey('b.id'))
+            a2_id = Column(ForeignKey('a2.id'))
+            a2 = relationship("A2")
+            b = relationship("B")
+
+        class A2(Base):
+            __tablename__ = 'a2'
+
+            id = Column(Integer, primary_key=True)
+            b_id = Column(ForeignKey('b.id'))
+            b = relationship("B")
+
+        class B(Base):
+            __tablename__ = 'b'
+
+            id = Column(Integer, primary_key=True)
+
+            c1_m2o_id = Column(ForeignKey('c1_m2o.id'))
+            c2_m2o_id = Column(ForeignKey('c2_m2o.id'))
+
+            c1_o2m = relationship("C1o2m")
+            c2_o2m = relationship("C2o2m")
+            c1_m2o = relationship("C1m2o")
+            c2_m2o = relationship("C2m2o")
+
+        class C1o2m(Base):
+            __tablename__ = 'c1_o2m'
+
+            id = Column(Integer, primary_key=True)
+            b_id = Column(ForeignKey('b.id'))
+
+        class C2o2m(Base):
+            __tablename__ = 'c2_o2m'
+
+            id = Column(Integer, primary_key=True)
+            b_id = Column(ForeignKey('b.id'))
+
+        class C1m2o(Base):
+            __tablename__ = 'c1_m2o'
+
+            id = Column(Integer, primary_key=True)
+
+        class C2m2o(Base):
+            __tablename__ = 'c2_m2o'
+
+            id = Column(Integer, primary_key=True)
+
+    @classmethod
+    def insert_data(cls):
+        A, A2, B, C1o2m, C2o2m, C1m2o, C2m2o = cls.classes(
+            'A', 'A2', 'B', 'C1o2m', 'C2o2m', 'C1m2o', 'C2m2o'
+        )
+
+        s = Session()
+
+        b = B(
+            c1_o2m=[C1o2m()],
+            c2_o2m=[C2o2m()],
+            c1_m2o=C1m2o(),
+            c2_m2o=C2m2o(),
+        )
+
+        s.add(A(b=b, a2=A2(b=b)))
+        s.commit()
+
+    def test_o2m(self):
+        A, A2, B, C1o2m, C2o2m = self.classes(
+            'A', 'A2', 'B', 'C1o2m', 'C2o2m'
+        )
+
+        s = Session()
+
+        # A -J-> B -L-> C1
+        # A -J-> B -S-> C2
+
+        # A -J-> A2 -J-> B -S-> C1
+        # A -J-> A2 -J-> B -L-> C2
+
+        q = s.query(A).options(
+            joinedload(A.b).selectinload(B.c2_o2m),
+            joinedload(A.a2).joinedload(A2.b).selectinload(B.c1_o2m)
+        )
+
+        a1 = q.all()[0]
+
+        is_true('c1_o2m' in a1.b.__dict__)
+        is_true('c2_o2m' in a1.b.__dict__)
+
+    def test_m2o(self):
+        A, A2, B, C1m2o, C2m2o = self.classes(
+            'A', 'A2', 'B', 'C1m2o', 'C2m2o'
+        )
+
+        s = Session()
+
+        # A -J-> B -L-> C1
+        # A -J-> B -S-> C2
+
+        # A -J-> A2 -J-> B -S-> C1
+        # A -J-> A2 -J-> B -L-> C2
+
+        q = s.query(A).options(
+            joinedload(A.b).selectinload(B.c2_m2o),
+            joinedload(A.a2).joinedload(A2.b).selectinload(B.c1_m2o)
+        )
+
+        a1 = q.all()[0]
+        is_true('c1_m2o' in a1.b.__dict__)
+        is_true('c2_m2o' in a1.b.__dict__)
index 5d0aa13287de3bd4902b0c766d30e91d12e98e91..52be4ee4053b2679b21d6ba1c3fc1e82fb3ed81c 100644 (file)
@@ -506,7 +506,7 @@ class EagerTest(_fixtures.FixtureTest, testing.AssertsCompiledSQL):
         mapper(User, users, properties=dict(
             addresses=relationship(Address, lazy='subquery',
                                    backref=sa.orm.backref(
-                                       'user', lazy='subquery'),
+                                       'user', lazy='subquery',),
                                    order_by=Address.id)
         ))
         is_(sa.orm.class_mapper(User).get_property('addresses').lazy,
@@ -712,7 +712,6 @@ class EagerTest(_fixtures.FixtureTest, testing.AssertsCompiledSQL):
         result = q.order_by(User.id).limit(2).offset(1).all()
         eq_(self.static.user_all_result[1:3], result)
 
-        sess = create_session()
         result = q.order_by(sa.desc(User.id)).limit(2).offset(2).all()
         eq_(list(reversed(self.static.user_all_result[0:2])), result)
 
index 8af5f9b1b461a9bbfb204863aba8317f8e1a631c..7938dadc8e469870ee603c85e8aedfdd680278e1 100644 (file)
@@ -392,6 +392,27 @@ test.aaa_profiling.test_orm.QueryTest.test_query_cols 3.5_postgresql_psycopg2_db
 test.aaa_profiling.test_orm.QueryTest.test_query_cols 3.5_sqlite_pysqlite_dbapiunicode_cextensions 6096
 test.aaa_profiling.test_orm.QueryTest.test_query_cols 3.5_sqlite_pysqlite_dbapiunicode_nocextensions 6666
 
+# TEST: test.aaa_profiling.test_orm.SelectInEagerLoadTest.test_round_trip_results
+
+test.aaa_profiling.test_orm.SelectInEagerLoadTest.test_round_trip_results 2.7_mysql_mysqldb_dbapiunicode_cextensions 175241
+test.aaa_profiling.test_orm.SelectInEagerLoadTest.test_round_trip_results 2.7_mysql_mysqldb_dbapiunicode_nocextensions 179645
+test.aaa_profiling.test_orm.SelectInEagerLoadTest.test_round_trip_results 2.7_postgresql_psycopg2_dbapiunicode_cextensions 166213
+test.aaa_profiling.test_orm.SelectInEagerLoadTest.test_round_trip_results 2.7_postgresql_psycopg2_dbapiunicode_nocextensions 170610
+test.aaa_profiling.test_orm.SelectInEagerLoadTest.test_round_trip_results 2.7_sqlite_pysqlite_dbapiunicode_cextensions 159214
+test.aaa_profiling.test_orm.SelectInEagerLoadTest.test_round_trip_results 2.7_sqlite_pysqlite_dbapiunicode_nocextensions 163527
+test.aaa_profiling.test_orm.SelectInEagerLoadTest.test_round_trip_results 3.4_mysql_mysqldb_dbapiunicode_cextensions 178735
+test.aaa_profiling.test_orm.SelectInEagerLoadTest.test_round_trip_results 3.4_mysql_mysqldb_dbapiunicode_nocextensions 183343
+test.aaa_profiling.test_orm.SelectInEagerLoadTest.test_round_trip_results 3.4_postgresql_psycopg2_dbapiunicode_cextensions 171707
+test.aaa_profiling.test_orm.SelectInEagerLoadTest.test_round_trip_results 3.4_postgresql_psycopg2_dbapiunicode_nocextensions 176315
+test.aaa_profiling.test_orm.SelectInEagerLoadTest.test_round_trip_results 3.4_sqlite_pysqlite_dbapiunicode_cextensions 163708
+test.aaa_profiling.test_orm.SelectInEagerLoadTest.test_round_trip_results 3.4_sqlite_pysqlite_dbapiunicode_nocextensions 168316
+test.aaa_profiling.test_orm.SelectInEagerLoadTest.test_round_trip_results 3.5_mysql_mysqldb_dbapiunicode_cextensions 178712
+test.aaa_profiling.test_orm.SelectInEagerLoadTest.test_round_trip_results 3.5_mysql_mysqldb_dbapiunicode_nocextensions 183320
+test.aaa_profiling.test_orm.SelectInEagerLoadTest.test_round_trip_results 3.5_postgresql_psycopg2_dbapiunicode_cextensions 171586
+test.aaa_profiling.test_orm.SelectInEagerLoadTest.test_round_trip_results 3.5_postgresql_psycopg2_dbapiunicode_nocextensions 176285
+test.aaa_profiling.test_orm.SelectInEagerLoadTest.test_round_trip_results 3.5_sqlite_pysqlite_dbapiunicode_cextensions 163587
+test.aaa_profiling.test_orm.SelectInEagerLoadTest.test_round_trip_results 3.5_sqlite_pysqlite_dbapiunicode_nocextensions 168293
+
 # TEST: test.aaa_profiling.test_orm.SessionTest.test_expire_lots
 
 test.aaa_profiling.test_orm.SessionTest.test_expire_lots 2.7_mysql_mysqldb_dbapiunicode_cextensions 1138
index 63745a1137f782aa808cdbe232ed336286542252..0f854d269389174c4eb173bd80ea6dad3c6408aa 100644 (file)
@@ -218,7 +218,13 @@ class DefaultRequirements(SuiteRequirements):
         """Target must support simultaneous, independent database cursors
         on a single connection."""
 
-        return skip_if(["mssql+pyodbc", "mssql+mxodbc"], "no driver support")
+        return skip_if(
+            [
+                "mssql+pyodbc",
+                "mssql+mxodbc",
+                "mysql+mysqldb",
+                "mysql+pymysql"], "no driver support"
+        )
 
     @property
     def independent_connections(self):