]> git.ipfire.org Git - thirdparty/sqlalchemy/sqlalchemy.git/commitdiff
Enable multi-level selectin polymorphic loading
authorMike Bayer <mike_mp@zzzcomputing.com>
Thu, 13 Jul 2017 22:32:42 +0000 (18:32 -0400)
committerMike Bayer <mike_mp@zzzcomputing.com>
Mon, 7 Aug 2017 19:05:00 +0000 (15:05 -0400)
Change-Id: Icc742bbeecdb7448ce84caccd63e086af16e81c1
Fixes: #4026
doc/build/changelog/unreleased_12/4026.rst [new file with mode: 0644]
doc/build/orm/inheritance_loading.rst
lib/sqlalchemy/orm/loading.py
lib/sqlalchemy/orm/mapper.py
lib/sqlalchemy/orm/session.py
lib/sqlalchemy/orm/state.py
lib/sqlalchemy/orm/strategy_options.py
lib/sqlalchemy/orm/unitofwork.py
lib/sqlalchemy/testing/assertions.py
test/orm/inheritance/_poly_fixtures.py
test/orm/inheritance/test_poly_loading.py

diff --git a/doc/build/changelog/unreleased_12/4026.rst b/doc/build/changelog/unreleased_12/4026.rst
new file mode 100644 (file)
index 0000000..20cdc4d
--- /dev/null
@@ -0,0 +1,11 @@
+.. change::
+    :tags: bug, orm
+    :tickets: 4026
+
+    Fixed bug in :ref:`change_3948` which prevented "selectin" and
+    "inline" settings in a multi-level class hierarchy from interacting
+    together as expected.    A new example is added to the documentation.
+
+    .. seealso::
+
+        :ref:`polymorphic_selectin_and_withpoly`
\ No newline at end of file
index 1d56439c65fa96911d4dbb26bd30d2f5c209c7c0..6f41f43e61a21e60b77e618232fe337e6bbe8cc6 100644 (file)
@@ -403,8 +403,8 @@ using the value ``"selectin"`` on a per-subclass basis::
         type = Column(String(50))
 
         __mapper_args__ = {
-            'polymorphic_identity':'employee',
-            'polymorphic_on':type
+            'polymorphic_identity': 'employee',
+            'polymorphic_on': type
         }
 
     class Engineer(Employee):
@@ -414,7 +414,7 @@ using the value ``"selectin"`` on a per-subclass basis::
 
         __mapper_args__ = {
             'polymorphic_load': 'selectin',
-            'polymorphic_identity':'engineer',
+            'polymorphic_identity': 'engineer',
         }
 
     class Manager(Employee):
@@ -424,7 +424,7 @@ using the value ``"selectin"`` on a per-subclass basis::
 
         __mapper_args__ = {
             'polymorphic_load': 'selectin',
-            'polymorphic_identity':'manager',
+            'polymorphic_identity': 'manager',
         }
 
 
@@ -476,6 +476,78 @@ known to work with MySQL and Postgresql.
 .. warning::  The selectin polymorphic loading feature should be considered
    as **experimental** within early releases of the 1.2 series.
 
+.. _polymorphic_selectin_and_withpoly:
+
+Combining selectin and with_polymorphic
+~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
+
+.. note:: works as of 1.2.0b3
+
+With careful planning, selectin loading can be applied against a hierarchy
+that itself uses "with_polymorphic".  A particular use case is that of
+using selectin loading to load a joined-inheritance subtable, which then
+uses "with_polymorphic" to refer to further sub-classes, which may be
+joined- or single-table inheritanace.  If we added a class ``VicePresident`` that
+extends ``Manager`` using single-table inheritance, we could ensure that
+a load of ``Manager`` also fully loads ``VicePresident`` subtypes at the same time::
+
+    # use "Employee" example from the enclosing section
+
+    class Manager(Employee):
+        __tablename__ = 'manager'
+        id = Column(Integer, ForeignKey('employee.id'), primary_key=True)
+        manager_name = Column(String(30))
+
+        __mapper_args__ = {
+            'polymorphic_load': 'selectin',
+            'polymorphic_identity': 'manager',
+        }
+
+    class VicePresident(Manager):
+        vp_info = Column(String(30))
+
+        __mapper_args__ = {
+            "polymorphic_load": "inline",
+            "polymorphic_identity": "vp"
+        }
+
+
+Above, we add a ``vp_info`` column to the ``manager`` table, local to the
+``VicePresident`` subclass.   This subclass is linked to the polymorphic
+identity ``"vp"`` which refers to rows which have this data.     By setting the
+load style to "inline", it means that a load of ``Manager`` objects will also
+ensure that the ``vp_info`` column is queried for in the same SELECT statement.
+A query against ``Employee`` that encounters a ``Manager`` row would emit
+similarly to the following:
+
+.. sourcecode:: sql
+
+    SELECT employee.id AS employee_id, employee.name AS employee_name,
+           employee.type AS employee_type
+    FROM employee
+    )
+
+    SELECT manager.id AS manager_id, employee.id AS employee_id,
+           employee.type AS employee_type,
+           manager.manager_name AS manager_manager_name,
+           manager.vp_info AS manager_vp_info
+    FROM employee JOIN manager ON employee.id = manager.id
+    WHERE employee.id IN (?) ORDER BY employee.id
+    (1,)
+
+Combining "selectin" polymorhic loading with query-time
+:func:`.orm.with_polymorphic` usage is also possible (though this is very
+outer-space stuff!); assuming the above mappings had no ``polymorphic_load``
+set up, we could get the same result as follows::
+
+    from sqlalchemy.orm import with_polymorphic, selectin_polymorphic
+
+    manager_poly = with_polymorphic(Manager, [VicePresident])
+
+    s.query(Employee).options(
+        selectin_polymorphic(Employee, [manager_poly])).all()
+
+
 Referring to specific subtypes on relationships
 -----------------------------------------------
 
index 48c0db8515cb8aeb0092c76813f6dce2f39c4a31..e4aea3994b3e5fe6e8f234e8aa054182fd3c4258 100644 (file)
@@ -360,20 +360,26 @@ def _instance_processor(
         if (
                 key in context.attributes and
                 context.attributes[key].strategy ==
-                (('selectinload_polymorphic', True), ) and
-                mapper in context.attributes[key].local_opts['mappers']
-        ) or mapper.polymorphic_load == 'selectin':
+                (('selectinload_polymorphic', True), )
+        ):
+            selectin_load_via = mapper._should_selectin_load(
+                context.attributes[key].local_opts['entities'],
+                _polymorphic_from)
+        else:
+            selectin_load_via = mapper._should_selectin_load(
+                None, _polymorphic_from)
 
+        if selectin_load_via and selectin_load_via is not _polymorphic_from:
             # only_load_props goes w/ refresh_state only, and in a refresh
             # we are a single row query for the exact entity; polymorphic
             # loading does not apply
             assert only_load_props is None
 
-            callable_ = _load_subclass_via_in(context, path, mapper)
+            callable_ = _load_subclass_via_in(context, path, selectin_load_via)
 
             PostLoad.callable_for_path(
-                context, load_path, mapper,
-                callable_, mapper)
+                context, load_path, selectin_load_via,
+                callable_, selectin_load_via)
 
     post_load = PostLoad.for_context(context, load_path, only_load_props)
 
@@ -523,12 +529,15 @@ def _instance_processor(
     return _instance
 
 
-@util.dependencies("sqlalchemy.ext.baked")
-def _load_subclass_via_in(baked, context, path, mapper):
+def _load_subclass_via_in(context, path, entity):
+    mapper = entity.mapper
 
     zero_idx = len(mapper.base_mapper.primary_key) == 1
 
-    q, enable_opt, disable_opt = mapper._subclass_load_via_in
+    if entity.is_aliased_class:
+        q, enable_opt, disable_opt = mapper._subclass_load_via_in(entity)
+    else:
+        q, enable_opt, disable_opt = mapper._subclass_load_via_in_mapper
 
     def do_load(context, path, states, load_only, effective_entity):
         orig_query = context.query
index d102618a230f1a0455dbd863b625b74d45d1beb8..9b9457213358c3982588803820c92dcd9f54e9f9 100644 (file)
@@ -2706,11 +2706,44 @@ class Mapper(InspectionAttr):
             cols.extend(props[key].columns)
         return sql.select(cols, cond, use_labels=True)
 
-    @_memoized_configured_property
+    def _iterate_to_target_viawpoly(self, mapper):
+        if self.isa(mapper):
+            prev = self
+            for m in self.iterate_to_root():
+                yield m
+
+                if m is not prev and prev not in \
+                        m._with_polymorphic_mappers:
+                    break
+
+                prev = m
+                if m is mapper:
+                    break
+
+    def _should_selectin_load(self, enabled_via_opt, polymorphic_from):
+        if not enabled_via_opt:
+            # common case, takes place for all polymorphic loads
+            mapper = polymorphic_from
+            for m in self._iterate_to_target_viawpoly(mapper):
+                if m.polymorphic_load == 'selectin':
+                    return m
+        else:
+            # uncommon case, selectin load options were used
+            enabled_via_opt = set(enabled_via_opt)
+            enabled_via_opt_mappers = {e.mapper: e for e in enabled_via_opt}
+            for entity in enabled_via_opt.union([polymorphic_from]):
+                mapper = entity.mapper
+                for m in self._iterate_to_target_viawpoly(mapper):
+                    if m.polymorphic_load == 'selectin' or \
+                            m in enabled_via_opt_mappers:
+                        return enabled_via_opt_mappers.get(m, m)
+
+        return None
+
     @util.dependencies(
         "sqlalchemy.ext.baked",
         "sqlalchemy.orm.strategy_options")
-    def _subclass_load_via_in(self, baked, strategy_options):
+    def _subclass_load_via_in(self, baked, strategy_options, entity):
         """Assemble a BakedQuery that can load the columns local to
         this subclass as a SELECT with IN.
 
@@ -2722,8 +2755,8 @@ class Mapper(InspectionAttr):
         keep_props = set(
             [polymorphic_prop] + self._identity_key_props)
 
-        disable_opt = strategy_options.Load(self)
-        enable_opt = strategy_options.Load(self)
+        disable_opt = strategy_options.Load(entity)
+        enable_opt = strategy_options.Load(entity)
 
         for prop in self.attrs:
             if prop.parent is self or prop in keep_props:
@@ -2747,11 +2780,22 @@ class Mapper(InspectionAttr):
         else:
             in_expr = self.primary_key[0]
 
-        q = baked.BakedQuery(
-            self._compiled_cache,
-            lambda session: session.query(self),
-            (self, )
-        )
+        if entity.is_aliased_class:
+            assert entity.mapper is self
+            q = baked.BakedQuery(
+                self._compiled_cache,
+                lambda session: session.query(entity).
+                select_entity_from(entity.selectable)._adapt_all_clauses(),
+                (self, )
+            )
+            q.spoil()
+        else:
+            q = baked.BakedQuery(
+                self._compiled_cache,
+                lambda session: session.query(self),
+                (self, )
+            )
+
         q += lambda q: q.filter(
             in_expr.in_(
                 sql.bindparam('primary_keys', expanding=True)
@@ -2760,6 +2804,10 @@ class Mapper(InspectionAttr):
 
         return q, enable_opt, disable_opt
 
+    @_memoized_configured_property
+    def _subclass_load_via_in_mapper(self):
+        return self._subclass_load_via_in(self)
+
     def cascade_iterator(self, type_, state, halt_on=None):
         """Iterate each element and its mapper in an object graph,
         for all relationships that meet the given cascade rule.
index 7c313e635acb87a440c1b5419eb4deb1a10811c7..752f182e5150936a833adc08b5cc7fc53eb878fa 100644 (file)
@@ -1688,6 +1688,7 @@ class Session(_SessionClassMethods):
                     state.key = instance_key
 
                 self.identity_map.replace(state)
+                state._orphaned_outside_of_session = False
 
         statelib.InstanceState._commit_all_states(
             ((state, state.dict) for state in states),
@@ -1762,6 +1763,7 @@ class Session(_SessionClassMethods):
             self.add(instance, _warn=False)
 
     def _save_or_update_state(self, state):
+        state._orphaned_outside_of_session = False
         self._save_or_update_impl(state)
 
         mapper = _state_mapper(state)
@@ -2271,11 +2273,17 @@ class Session(_SessionClassMethods):
             proc = new.union(dirty).difference(deleted)
 
         for state in proc:
-            is_orphan = (
-                _state_mapper(state)._is_orphan(state) and state.has_identity)
-            _reg = flush_context.register_object(state, isdelete=is_orphan)
-            assert _reg, "Failed to add object to the flush context!"
-            processed.add(state)
+            is_orphan = _state_mapper(state)._is_orphan(state)
+
+            is_persistent_orphan = is_orphan and state.has_identity
+
+            if is_orphan and not is_persistent_orphan and state._orphaned_outside_of_session:
+                self._expunge_states([state])
+            else:
+                _reg = flush_context.register_object(
+                    state, isdelete=is_persistent_orphan)
+                assert _reg, "Failed to add object to the flush context!"
+                processed.add(state)
 
         # put all remaining deletes into the flush context.
         if objset:
index 1781a41e9ab23eb0d02d8a8bc0be28fc8fe6a436..2e53fe9e34c9b3339a16189492e789f0f59ae7c0 100644 (file)
@@ -61,6 +61,7 @@ class InstanceState(interfaces.InspectionAttr):
     expired = False
     _deleted = False
     _load_pending = False
+    _orphaned_outside_of_session = False
     is_instance = True
 
     callables = ()
index 796f859f810fabbbbe27a98f9d4f6e40edc333e4..c47536a02d121f7f71421fa9536023ae15601530 100644 (file)
@@ -1414,7 +1414,7 @@ def selectin_polymorphic(loadopt, classes):
     """
     loadopt.set_class_strategy(
         {"selectinload_polymorphic": True},
-        opts={"mappers": tuple(sorted((inspect(cls) for cls in classes), key=id))}
+        opts={"entities": tuple(sorted((inspect(cls) for cls in classes), key=id))}
     )
     return loadopt
 
index ee3e2043b90834cb48fd9b3d5320622695233d30..a3bd53637d1666cc7b6373a647785c1e38a0bed6 100644 (file)
@@ -52,22 +52,24 @@ def track_cascade_events(descriptor, prop):
             return
 
         sess = state.session
-        if sess:
 
-            prop = state.manager.mapper._props[key]
+        prop = state.manager.mapper._props[key]
 
-            if sess._warn_on_events:
-                sess._flush_warning(
-                    "collection remove"
-                    if prop.uselist
-                    else "related attribute delete")
+        if sess and sess._warn_on_events:
+            sess._flush_warning(
+                "collection remove"
+                if prop.uselist
+                else "related attribute delete")
 
-            # expunge pending orphans
-            item_state = attributes.instance_state(item)
-            if prop._cascade.delete_orphan and \
-                item_state in sess._new and \
-                    prop.mapper._is_orphan(item_state):
+        # expunge pending orphans
+        item_state = attributes.instance_state(item)
+
+        if prop._cascade.delete_orphan and \
+                prop.mapper._is_orphan(item_state):
+            if sess and item_state in sess._new:
                 sess.expunge(item)
+            else:
+                item_state._orphaned_outside_of_session = True
 
     def set_(state, newvalue, oldvalue, initiator):
         # process "save_update" cascade rules for when an instance
index c0854ea55c715f2775e8d0eb08eaaa51679d3efb..08d0f0aac71124002f20615a20f2fc1e6f38778e 100644 (file)
@@ -520,13 +520,10 @@ class AssertsExecutionResults(object):
             db, callable_, assertsql.CountStatements(count))
 
     @contextlib.contextmanager
-    def assert_execution(self, *rules):
-        assertsql.asserter.add_rules(rules)
-        try:
+    def assert_execution(self, db, *rules):
+        with self.sql_execution_asserter(db) as asserter:
             yield
-            assertsql.asserter.statement_complete()
-        finally:
-            assertsql.asserter.clear_rules()
+        asserter.assert_(*rules)
 
-    def assert_statement_count(self, count):
-        return self.assert_execution(assertsql.CountStatements(count))
+    def assert_statement_count(self, db, count):
+        return self.assert_execution(db, assertsql.CountStatements(count))
index 79ff456e47ca8c99c3b5e972f1228bf6644c0fbc..f1f9cd6f36477437e5de0f6d5c32a5a5d496b6f9 100644 (file)
@@ -1,15 +1,10 @@
-from sqlalchemy import Integer, String, ForeignKey, func, desc, and_, or_
-from sqlalchemy.orm import interfaces, relationship, mapper, \
-    clear_mappers, create_session, joinedload, joinedload_all, \
-    subqueryload, subqueryload_all, polymorphic_union, aliased,\
-    class_mapper
-from sqlalchemy import exc as sa_exc
-from sqlalchemy.engine import default
+from sqlalchemy import Integer, String, ForeignKey
+from sqlalchemy.orm import relationship, mapper, \
+    create_session, polymorphic_union
 
 from sqlalchemy.testing import AssertsCompiledSQL, fixtures
-from sqlalchemy import testing
 from sqlalchemy.testing.schema import Table, Column
-from sqlalchemy.testing import assert_raises, eq_
+from sqlalchemy.testing import config
 
 
 class Company(fixtures.ComparableEntity):
@@ -370,3 +365,127 @@ class _PolymorphicJoins(_PolymorphicFixtureBase):
         manager_with_polymorphic = ('*', manager_join)
         return person_with_polymorphic,\
             manager_with_polymorphic
+
+
+class GeometryFixtureBase(fixtures.DeclarativeMappedTest):
+    """Provides arbitrary inheritance hierarchies based on a dictionary
+    structure.
+
+    e.g.::
+
+        self._fixture_from_geometry(
+            "a": {
+                "subclasses": {
+                    "b": {"polymorphic_load": "selectin"},
+                    "c": {
+                        "subclasses": {
+                            "d": {
+                                "polymorphic_load": "inlne", "single": True
+                            },
+                            "e": {
+                                "polymorphic_load": "inline", "single": True
+                            },
+                        },
+                        "polymorphic_load": "selectin",
+                    }
+                }
+            }
+        )
+
+    would provide the equivalent of::
+
+        class a(Base):
+            __tablename__ = 'a'
+
+            id = Column(Integer, primary_key=True)
+            a_data = Column(String(50))
+            type = Column(String(50))
+            __mapper_args__ = {
+                "polymorphic_on": type,
+                "polymorphic_identity": "a"
+            }
+
+        class b(a):
+            __tablename__ = 'b'
+
+            id = Column(ForeignKey('a.id'), primary_key=True)
+            b_data = Column(String(50))
+
+            __mapper_args__ = {
+                "polymorphic_identity": "b",
+                "polymorphic_load": "selectin"
+            }
+
+            # ...
+
+        class c(a):
+            __tablename__ = 'c'
+
+        class d(c):
+            # ...
+
+        class e(c):
+            # ...
+
+    Declarative is used so that we get extra behaviors of declarative,
+    such as single-inheritance column masking.
+
+    """
+
+    run_create_tables = 'each'
+    run_define_tables = 'each'
+    run_setup_classes = 'each'
+    run_setup_mappers = 'each'
+
+    def _fixture_from_geometry(self, geometry, base=None):
+        if not base:
+            is_base = True
+            base = self.DeclarativeBasic
+        else:
+            is_base = False
+
+        for key, value in geometry.items():
+            if is_base:
+                type_ = Column(String(50))
+                items = {
+                    "__tablename__": key,
+                    "id": Column(Integer, primary_key=True),
+                    "type": type_,
+                    "__mapper_args__": {
+                        "polymorphic_on": type_,
+                        "polymorphic_identity": key
+                    }
+
+                }
+            else:
+                items = {
+                    "__mapper_args__": {
+                        "polymorphic_identity": key
+                    }
+                }
+
+                if not value.get("single", False):
+                    items["__tablename__"] = key
+                    items["id"] = Column(
+                        ForeignKey("%s.id" % base.__tablename__),
+                        primary_key=True)
+
+            items["%s_data" % key] = Column(String(50))
+
+            # add other mapper options to be transferred here as needed.
+            for mapper_opt in ("polymorphic_load", ):
+                if mapper_opt in value:
+                    items["__mapper_args__"][mapper_opt] = value[mapper_opt]
+
+            if is_base:
+                klass = type(key, (fixtures.ComparableEntity, base, ), items)
+            else:
+                klass = type(key, (base, ), items)
+
+            if "subclasses" in value:
+                self._fixture_from_geometry(value["subclasses"], klass)
+
+        if is_base and self.metadata.tables and self.run_create_tables:
+            self.tables.update(self.metadata.tables)
+            self.metadata.create_all(config.db)
+
index ab807b45cad843bcac19c1c92ce226a9675f6028..f6046b3b2153c13fd841d02d273c45de56e20112 100644 (file)
@@ -1,12 +1,12 @@
 from sqlalchemy import String, Integer, Column, ForeignKey
 from sqlalchemy.orm import relationship, Session, \
-    selectin_polymorphic, selectinload
+    selectin_polymorphic, selectinload, with_polymorphic
 from sqlalchemy.testing import fixtures
 from sqlalchemy import testing
 from sqlalchemy.testing import eq_
-from sqlalchemy.testing.assertsql import AllOf, CompiledSQL, EachOf
-from ._poly_fixtures import Company, Person, Engineer, Manager, Boss, \
-    Machine, Paperwork, _Polymorphic
+from sqlalchemy.testing.assertsql import AllOf, CompiledSQL, EachOf, Or
+from ._poly_fixtures import Company, Person, Engineer, Manager, \
+    _Polymorphic, GeometryFixtureBase
 
 
 class BaseAndSubFixture(object):
@@ -258,3 +258,213 @@ class FixtureLoadTest(_Polymorphic, testing.AssertsExecutionResults):
         )
         eq_(result, [self.c1, self.c2])
 
+
+class TestGeometries(GeometryFixtureBase):
+
+    def test_threelevel_selectin_to_inline_mapped(self):
+        self._fixture_from_geometry({
+            "a": {
+                "subclasses": {
+                    "b": {"polymorphic_load": "selectin"},
+                    "c": {
+                        "subclasses": {
+                            "d": {
+                                "polymorphic_load": "inline", "single": True
+                            },
+                            "e": {
+                                "polymorphic_load": "inline", "single": True
+                            },
+                        },
+                        "polymorphic_load": "selectin",
+                    }
+                }
+            }
+        })
+
+        a, b, c, d, e = self.classes("a", "b", "c", "d", "e")
+        sess = Session()
+        sess.add_all([d(d_data="d1"), e(e_data="e1")])
+        sess.commit()
+
+        q = sess.query(a)
+
+        result = self.assert_sql_execution(
+            testing.db,
+            q.all,
+            CompiledSQL(
+                "SELECT a.type AS a_type, a.id AS a_id, "
+                "a.a_data AS a_a_data FROM a",
+                {}
+            ),
+            Or(
+                CompiledSQL(
+                    "SELECT a.type AS a_type, c.id AS c_id, a.id AS a_id, "
+                    "c.c_data AS c_c_data, c.e_data AS c_e_data, "
+                    "c.d_data AS c_d_data "
+                    "FROM a JOIN c ON a.id = c.id "
+                    "WHERE a.id IN ([EXPANDING_primary_keys]) ORDER BY a.id",
+                    [{'primary_keys': [1, 2]}]
+                ),
+                CompiledSQL(
+                    "SELECT a.type AS a_type, c.id AS c_id, a.id AS a_id, "
+                    "c.c_data AS c_c_data, "
+                    "c.d_data AS c_d_data, c.e_data AS c_e_data "
+                    "FROM a JOIN c ON a.id = c.id "
+                    "WHERE a.id IN ([EXPANDING_primary_keys]) ORDER BY a.id",
+                    [{'primary_keys': [1, 2]}]
+                )
+            )
+        )
+        with self.assert_statement_count(testing.db, 0):
+            eq_(
+                result,
+                [d(d_data="d1"), e(e_data="e1")]
+            )
+
+    def test_threelevel_selectin_to_inline_options(self):
+        self._fixture_from_geometry({
+            "a": {
+                "subclasses": {
+                    "b": {},
+                    "c": {
+                        "subclasses": {
+                            "d": {
+                                "single": True
+                            },
+                            "e": {
+                                "single": True
+                            },
+                        },
+                    }
+                }
+            }
+        })
+
+        a, b, c, d, e = self.classes("a", "b", "c", "d", "e")
+        sess = Session()
+        sess.add_all([d(d_data="d1"), e(e_data="e1")])
+        sess.commit()
+
+        c_alias = with_polymorphic(c, (d, e))
+        q = sess.query(a).options(
+            selectin_polymorphic(a, [b, c_alias])
+        )
+
+        result = self.assert_sql_execution(
+            testing.db,
+            q.all,
+            CompiledSQL(
+                "SELECT a.type AS a_type, a.id AS a_id, "
+                "a.a_data AS a_a_data FROM a",
+                {}
+            ),
+            Or(
+                CompiledSQL(
+                    "SELECT a.type AS a_type, c.id AS c_id, a.id AS a_id, "
+                    "c.c_data AS c_c_data, c.e_data AS c_e_data, "
+                    "c.d_data AS c_d_data "
+                    "FROM a JOIN c ON a.id = c.id "
+                    "WHERE a.id IN ([EXPANDING_primary_keys]) ORDER BY a.id",
+                    [{'primary_keys': [1, 2]}]
+                ),
+                CompiledSQL(
+                    "SELECT a.type AS a_type, c.id AS c_id, a.id AS a_id, "
+                    "c.c_data AS c_c_data, c.d_data AS c_d_data, "
+                    "c.e_data AS c_e_data "
+                    "FROM a JOIN c ON a.id = c.id "
+                    "WHERE a.id IN ([EXPANDING_primary_keys]) ORDER BY a.id",
+                    [{'primary_keys': [1, 2]}]
+                ),
+            )
+        )
+        with self.assert_statement_count(testing.db, 0):
+            eq_(
+                result,
+                [d(d_data="d1"), e(e_data="e1")]
+            )
+
+    def test_threelevel_selectin_to_inline_awkward_alias_options(self):
+        self._fixture_from_geometry({
+            "a": {
+                "subclasses": {
+                    "b": {},
+                    "c": {
+                        "subclasses": {
+                            "d": {},
+                            "e": {},
+                        },
+                    }
+                }
+            }
+        })
+
+        a, b, c, d, e = self.classes("a", "b", "c", "d", "e")
+        sess = Session()
+        sess.add_all([d(d_data="d1"), e(e_data="e1")])
+        sess.commit()
+
+        from sqlalchemy import select
+
+        a_table, c_table, d_table, e_table = self.tables("a", "c", "d", "e")
+
+        poly = select([
+            a_table.c.id, a_table.c.type, c_table, d_table, e_table
+        ]).select_from(
+            a_table.join(c_table).outerjoin(d_table).outerjoin(e_table)
+        ).apply_labels().alias('poly')
+
+        c_alias = with_polymorphic(c, (d, e), poly)
+        q = sess.query(a).options(
+            selectin_polymorphic(a, [b, c_alias])
+        ).order_by(a.id)
+
+        result = self.assert_sql_execution(
+            testing.db,
+            q.all,
+            CompiledSQL(
+                "SELECT a.type AS a_type, a.id AS a_id, "
+                "a.a_data AS a_a_data FROM a ORDER BY a.id",
+                {}
+            ),
+            Or(
+                # here, the test is that the adaptation of "a" takes place
+                CompiledSQL(
+                    "SELECT poly.a_type AS poly_a_type, "
+                    "poly.c_id AS poly_c_id, "
+                    "poly.a_id AS poly_a_id, poly.c_c_data AS poly_c_c_data, "
+                    "poly.e_id AS poly_e_id, poly.e_e_data AS poly_e_e_data, "
+                    "poly.d_id AS poly_d_id, poly.d_d_data AS poly_d_d_data "
+                    "FROM (SELECT a.id AS a_id, a.type AS a_type, "
+                    "c.id AS c_id, "
+                    "c.c_data AS c_c_data, d.id AS d_id, "
+                    "d.d_data AS d_d_data, "
+                    "e.id AS e_id, e.e_data AS e_e_data FROM a JOIN c "
+                    "ON a.id = c.id LEFT OUTER JOIN d ON c.id = d.id "
+                    "LEFT OUTER JOIN e ON c.id = e.id) AS poly "
+                    "WHERE poly.a_id IN ([EXPANDING_primary_keys]) "
+                    "ORDER BY poly.a_id",
+                    [{'primary_keys': [1, 2]}]
+                ),
+                CompiledSQL(
+                    "SELECT poly.a_type AS poly_a_type, "
+                    "poly.c_id AS poly_c_id, "
+                    "poly.a_id AS poly_a_id, poly.c_c_data AS poly_c_c_data, "
+                    "poly.d_id AS poly_d_id, poly.d_d_data AS poly_d_d_data, "
+                    "poly.e_id AS poly_e_id, poly.e_e_data AS poly_e_e_data "
+                    "FROM (SELECT a.id AS a_id, a.type AS a_type, "
+                    "c.id AS c_id, c.c_data AS c_c_data, d.id AS d_id, "
+                    "d.d_data AS d_d_data, e.id AS e_id, "
+                    "e.e_data AS e_e_data FROM a JOIN c ON a.id = c.id "
+                    "LEFT OUTER JOIN d ON c.id = d.id "
+                    "LEFT OUTER JOIN e ON c.id = e.id) AS poly "
+                    "WHERE poly.a_id IN ([EXPANDING_primary_keys]) "
+                    "ORDER BY poly.a_id",
+                    [{'primary_keys': [1, 2]}]
+                )
+            )
+        )
+        with self.assert_statement_count(testing.db, 0):
+            eq_(
+                result,
+                [d(d_data="d1"), e(e_data="e1")]
+            )