]> git.ipfire.org Git - thirdparty/sqlalchemy/sqlalchemy.git/commitdiff
- move all of orm to use absolute imports
authorMike Bayer <mike_mp@zzzcomputing.com>
Sat, 23 Jun 2012 18:45:47 +0000 (14:45 -0400)
committerMike Bayer <mike_mp@zzzcomputing.com>
Sat, 23 Jun 2012 18:45:47 +0000 (14:45 -0400)
- break out key mechanics of loading objects
into new "orm.loading" module, removing implementation
details from both mapper.py and query.py. is analogous
to persistence.py
- some other cleanup and old cruft removal

29 files changed:
lib/sqlalchemy/orm/__init__.py
lib/sqlalchemy/orm/attributes.py
lib/sqlalchemy/orm/collections.py
lib/sqlalchemy/orm/dependency.py
lib/sqlalchemy/orm/deprecated_interfaces.py
lib/sqlalchemy/orm/descriptor_props.py
lib/sqlalchemy/orm/dynamic.py
lib/sqlalchemy/orm/evaluator.py
lib/sqlalchemy/orm/events.py
lib/sqlalchemy/orm/exc.py
lib/sqlalchemy/orm/identity.py
lib/sqlalchemy/orm/instrumentation.py
lib/sqlalchemy/orm/interfaces.py
lib/sqlalchemy/orm/loading.py [new file with mode: 0644]
lib/sqlalchemy/orm/mapper.py
lib/sqlalchemy/orm/persistence.py
lib/sqlalchemy/orm/properties.py
lib/sqlalchemy/orm/query.py
lib/sqlalchemy/orm/relationships.py
lib/sqlalchemy/orm/scoping.py
lib/sqlalchemy/orm/session.py
lib/sqlalchemy/orm/state.py
lib/sqlalchemy/orm/strategies.py
lib/sqlalchemy/orm/sync.py
lib/sqlalchemy/orm/unitofwork.py
lib/sqlalchemy/orm/util.py
test/orm/test_dynamic.py
test/orm/test_transaction.py
test/orm/test_unitofwork.py

index 75e3684c08802dea949af770e8834b76dc2099ec..8080ac387a4112bd178f2049896e32ab01d8b6d4 100644 (file)
@@ -12,14 +12,16 @@ documentation for an overview of how this module is used.
 
 """
 
-from sqlalchemy.orm import exc
-from sqlalchemy.orm.mapper import (
+from . import exc
+from .mapper import (
      Mapper,
      _mapper_registry,
      class_mapper,
-     configure_mappers
+     configure_mappers,
+     reconstructor,
+     validates
      )
-from sqlalchemy.orm.interfaces import (
+from .interfaces import (
      EXT_CONTINUE,
      EXT_STOP,
      InstrumentationManager,
@@ -28,7 +30,7 @@ from sqlalchemy.orm.interfaces import (
      SessionExtension,
      AttributeExtension,
      )
-from sqlalchemy.orm.util import (
+from .util import (
      aliased,
      join,
      object_mapper,
@@ -37,7 +39,7 @@ from sqlalchemy.orm.util import (
      with_parent,
      with_polymorphic,
      )
-from sqlalchemy.orm.properties import (
+from .properties import (
      ColumnProperty,
      ComparableProperty,
      CompositeProperty,
@@ -45,21 +47,25 @@ from sqlalchemy.orm.properties import (
      PropertyLoader,
      SynonymProperty,
      )
-from sqlalchemy.orm.relationships import (
+from .relationships import (
     foreign,
     remote,
     remote_foreign
 )
-from sqlalchemy.orm import mapper as mapperlib
-from sqlalchemy.orm.mapper import reconstructor, validates
-from sqlalchemy.orm import strategies
-from sqlalchemy.orm.query import AliasOption, Query
-from sqlalchemy.sql import util as sql_util
-from sqlalchemy.orm.session import Session
-from sqlalchemy.orm.session import object_session, sessionmaker, \
+from .session import (
+    Session, 
+    object_session, 
+    sessionmaker, 
     make_transient
-from sqlalchemy.orm.scoping import ScopedSession
-from sqlalchemy import util as sa_util
+)
+from .scoping import (
+    ScopedSession
+)
+from . import mapper as mapperlib
+from . import strategies
+from .query import AliasOption, Query
+from ..sql import util as sql_util
+from .. import util as sa_util
 
 __all__ = (
     'EXT_CONTINUE',
@@ -1578,8 +1584,8 @@ def contains_eager(*keys, **kwargs):
     """
     alias = kwargs.pop('alias', None)
     if kwargs:
-        raise exceptions.ArgumentError('Invalid kwargs for contains_eag'
-                'er: %r' % kwargs.keys())
+        raise exc.ArgumentError(
+                'Invalid kwargs for contains_eager: %r' % kwargs.keys())
     return strategies.EagerLazyOption(keys, lazy='joined',
             propagate_to_loaders=False, chained=True), \
         strategies.LoadEagerFromAliasOption(keys, alias=alias, chained=True)
index e71752ab52b1c1d047558d3469741cd2faf18f31..0dd331354c556ba67beda0a72ec8104169448b09 100644 (file)
@@ -16,11 +16,9 @@ defines a large part of the ORM's interactivity.
 import operator
 from operator import itemgetter
 
-from sqlalchemy import util, event, exc as sa_exc, inspection
-from sqlalchemy.orm import interfaces, collections, events, exc as orm_exc
-
-
-mapperutil = util.importlater("sqlalchemy.orm", "util")
+from .. import util, event, inspection
+from . import interfaces, collections, events, exc as orm_exc
+orm_util = util.importlater("sqlalchemy.orm", "util")
 
 PASSIVE_NO_RESULT = util.symbol('PASSIVE_NO_RESULT',
 """Symbol returned by a loader callable or other attribute/history
@@ -415,8 +413,8 @@ class AttributeImpl(object):
                             "but the parent record "
                             "has gone stale, can't be sure this "
                             "is the most recent parent." % 
-                            (mapperutil.state_str(state), 
-                            mapperutil.state_str(parent_state),
+                            (orm_util.state_str(state), 
+                            orm_util.state_str(parent_state),
                             self.key))
 
                     return
@@ -675,8 +673,8 @@ class ScalarObjectAttributeImpl(ScalarAttributeImpl):
             else:
                 raise ValueError(
                     "Object %s not associated with %s on attribute '%s'" % (
-                    mapperutil.instance_str(check_old),
-                   mapperutil.state_str(state),
+                    orm_util.instance_str(check_old),
+                   orm_util.state_str(state),
                    self.key
                 ))
         value = self.fire_replace_event(state, dict_, value, old, initiator)
index d51d7bcd213ef090270130d375893a5150889856..68b0003826bb74a09c2f1a90c9afb4c1a5c61c06 100644 (file)
@@ -108,8 +108,11 @@ import operator
 import sys
 import weakref
 
-from sqlalchemy.sql import expression
-from sqlalchemy import schema, util, exc as sa_exc
+from ..sql import expression
+from .. import util, exc as sa_exc
+orm_util = util.importlater("sqlalchemy.orm", "util")
+attributes = util.importlater("sqlalchemy.orm", "attributes")
+
 
 __all__ = ['collection', 'collection_adapter',
            'mapped_collection', 'column_mapped_collection',
@@ -138,8 +141,8 @@ class _PlainColumnGetter(object):
         return self.cols
 
     def __call__(self, value):
-        state = instance_state(value)
-        m = _state_mapper(state)
+        state = attributes.instance_state(value)
+        m = orm_util._state_mapper(state)
 
         key = [
             m._get_state_attr_by_column(state, state.dict, col)
@@ -163,8 +166,8 @@ class _SerializableColumnGetter(object):
     def __reduce__(self):
         return _SerializableColumnGetter, (self.colkeys,)
     def __call__(self, value):
-        state = instance_state(value)
-        m = _state_mapper(state)
+        state = attributes.instance_state(value)
+        m = orm_util._state_mapper(state)
         key = [m._get_state_attr_by_column(
                         state, state.dict, 
                         m.mapped_table.columns[k])
@@ -228,10 +231,6 @@ def column_mapped_collection(mapping_spec):
     after a session flush.
 
     """
-    global _state_mapper, instance_state
-    from sqlalchemy.orm.util import _state_mapper
-    from sqlalchemy.orm.attributes import instance_state
-
     cols = [expression._only_column_elements(q, "mapping_spec")
                 for q in util.to_list(mapping_spec)
             ]
index b3789e75861d59c3bf9a7c1f69165bf5ede0f666..1552f6aef145c33594d7e49667260d5d48f08879 100644 (file)
@@ -8,10 +8,10 @@
 
 """
 
-from sqlalchemy import sql, util, exc as sa_exc
-from sqlalchemy.orm import attributes, exc, sync, unitofwork, \
+from .. import sql, util, exc as sa_exc
+from . import attributes, exc, sync, unitofwork, \
                                         util as mapperutil
-from sqlalchemy.orm.interfaces import ONETOMANY, MANYTOONE, MANYTOMANY
+from .interfaces import ONETOMANY, MANYTOONE, MANYTOMANY
 
 class DependencyProcessor(object):
     def __init__(self, prop):
@@ -704,7 +704,6 @@ class ManyToOneDP(DependencyProcessor):
                                         self.key, 
                                         self._passive_delete_flag)
                 if history:
-                    ret = True
                     for child in history.deleted:
                         if self.hasparent(child) is False:
                             uowcommit.register_object(child, isdelete=True, 
@@ -932,12 +931,14 @@ class ManyToManyDP(DependencyProcessor):
             ])
 
     def presort_deletes(self, uowcommit, states):
+        # TODO: no tests fail if this whole
+        # thing is removed !!!!
         if not self.passive_deletes:
             # if no passive deletes, load history on 
             # the collection, so that prop_has_changes()
             # returns True
             for state in states:
-                history = uowcommit.get_attribute_history(
+                uowcommit.get_attribute_history(
                                         state, 
                                         self.key, 
                                         self._passive_delete_flag)
index de9c5ef75a818db1924ae1e248a4d576a63849af..b66724e281cff6af1bc338849237f809125dad0c 100644 (file)
@@ -4,7 +4,7 @@
 # This module is part of SQLAlchemy and is released under
 # the MIT License: http://www.opensource.org/licenses/mit-license.php
 
-from sqlalchemy import event, util
+from .. import event, util
 from interfaces import EXT_CONTINUE
 
 
index ba1109dfb460de6d3fb3f690214ae3b61a54c810..83ccf75d85a5dc58a588f04c158c96950eb026e5 100644 (file)
@@ -10,12 +10,11 @@ as actively in the load/persist ORM loop.
 
 """
 
-from sqlalchemy.orm.interfaces import \
-    MapperProperty, PropComparator, StrategizedProperty
-from sqlalchemy.orm.mapper import _none_set
-from sqlalchemy.orm import attributes, strategies
-from sqlalchemy import util, sql, exc as sa_exc, event, schema
-from sqlalchemy.sql import expression
+from .interfaces import MapperProperty, PropComparator
+from .util import _none_set
+from . import attributes, strategies
+from .. import util, sql, exc as sa_exc, event, schema
+from ..sql import expression
 properties = util.importlater('sqlalchemy.orm', 'properties')
 
 class DescriptorProperty(MapperProperty):
index 18fc76aa9969dd153690ec448edf689b554112b7..c2f4aff02604c97a9b221c7e46819bb86747fbad 100644 (file)
@@ -11,16 +11,13 @@ basic add/delete mutation.
 
 """
 
-from sqlalchemy import log, util
-from sqlalchemy import exc as sa_exc
-from sqlalchemy.orm import exc as orm_exc
-from sqlalchemy.sql import operators
-from sqlalchemy.orm import (
-    attributes, object_session, util as mapperutil, strategies, object_mapper
+from .. import log, util
+from ..sql import operators
+from . import (
+    attributes, object_session, util as orm_util, strategies, 
+    object_mapper, exc as orm_exc, collections
     )
-from sqlalchemy.orm.query import Query
-from sqlalchemy.orm.util import has_identity
-from sqlalchemy.orm import attributes, collections
+from .query import Query
 
 class DynaLoader(strategies.AbstractRelationshipLoader):
     def init_class_attribute(self, mapper):
@@ -199,7 +196,7 @@ class AppenderMixin(object):
     query_class = None
 
     def __init__(self, attr, state):
-        Query.__init__(self, attr.target_mapper, None)
+        super(AppenderMixin, self).__init__(attr.target_mapper, None)
         self.instance = instance = state.obj()
         self.attr = attr
 
@@ -219,7 +216,7 @@ class AppenderMixin(object):
         if sess is not None and self.autoflush and sess.autoflush \
             and self.instance in sess:
             sess.flush()
-        if not has_identity(self.instance):
+        if not orm_util.has_identity(self.instance):
             return None
         else:
             return sess
@@ -268,7 +265,7 @@ class AppenderMixin(object):
                     "Parent instance %s is not bound to a Session, and no "
                     "contextual session is established; lazy load operation "
                     "of attribute '%s' cannot proceed" % (
-                        mapperutil.instance_str(instance), self.attr.key))
+                        orm_util.instance_str(instance), self.attr.key))
 
         if self.query_class:
             query = self.query_class(self.attr.target_mapper, session=sess)
index e727c17bb2d222f7adb07b20e793dd9fc6342cc6..5de514da8fe36316a91c8fba04349eb66da80ca5 100644 (file)
@@ -5,9 +5,7 @@
 # the MIT License: http://www.opensource.org/licenses/mit-license.php
 
 import operator
-from sqlalchemy.sql import operators, functions
-from sqlalchemy.sql import expression as sql
-
+from ..sql import operators
 
 class UnevaluatableError(Exception):
     pass
@@ -22,15 +20,16 @@ _straight_ops = set(getattr(operators, op)
 
 
 _notimplemented_ops = set(getattr(operators, op)
-                          for op in ('like_op', 'notlike_op', 'ilike_op',
-                                     'notilike_op', 'between_op', 'in_op',
-                                     'notin_op', 'endswith_op', 'concat_op'))
+                      for op in ('like_op', 'notlike_op', 'ilike_op',
+                                 'notilike_op', 'between_op', 'in_op',
+                                 'notin_op', 'endswith_op', 'concat_op'))
 
 class EvaluatorCompiler(object):
     def process(self, clause):
         meth = getattr(self, "visit_%s" % clause.__visit_name__, None)
         if not meth:
-            raise UnevaluatableError("Cannot evaluate %s" % type(clause).__name__)
+            raise UnevaluatableError(
+                "Cannot evaluate %s" % type(clause).__name__)
         return meth(clause)
 
     def visit_grouping(self, clause):
@@ -71,12 +70,15 @@ class EvaluatorCompiler(object):
                         return False
                 return True
         else:
-            raise UnevaluatableError("Cannot evaluate clauselist with operator %s" % clause.operator)
+            raise UnevaluatableError(
+                "Cannot evaluate clauselist with operator %s" % 
+                clause.operator)
 
         return evaluate
 
     def visit_binary(self, clause):
-        eval_left,eval_right = map(self.process, [clause.left, clause.right])
+        eval_left,eval_right = map(self.process, 
+                                [clause.left, clause.right])
         operator = clause.operator
         if operator is operators.is_:
             def evaluate(obj):
@@ -92,7 +94,9 @@ class EvaluatorCompiler(object):
                     return None
                 return operator(eval_left(obj), eval_right(obj))
         else:
-            raise UnevaluatableError("Cannot evaluate %s with operator %s" % (type(clause).__name__, clause.operator))
+            raise UnevaluatableError(
+                    "Cannot evaluate %s with operator %s" % 
+                    (type(clause).__name__, clause.operator))
         return evaluate
 
     def visit_unary(self, clause):
@@ -104,7 +108,9 @@ class EvaluatorCompiler(object):
                     return None
                 return not value
             return evaluate
-        raise UnevaluatableError("Cannot evaluate %s with operator %s" % (type(clause).__name__, clause.operator))
+        raise UnevaluatableError(
+                    "Cannot evaluate %s with operator %s" % 
+                    (type(clause).__name__, clause.operator))
 
     def visit_bindparam(self, clause):
         val = clause.value
index f39ed778dba79590fe85f17e39ba365a70eecf70..982c4d77f65e55068c2bfcef9fcbcd4fcd8b3c49 100644 (file)
@@ -7,7 +7,7 @@
 """ORM event interfaces.
 
 """
-from sqlalchemy import event, exc, util
+from .. import event, exc, util
 orm = util.importlater("sqlalchemy", "orm")
 import inspect
 
index 9b3a78c43557a22001128f84ad2a98744b150a80..d42dd42a77db8788bd8b74d475d85259bccc92df 100644 (file)
@@ -5,14 +5,14 @@
 # the MIT License: http://www.opensource.org/licenses/mit-license.php
 
 """SQLAlchemy ORM exceptions."""
-
-import sqlalchemy as sa
-orm_util = sa.util.importlater('sqlalchemy.orm', 'util')
+from .. import exc as sa_exc, util
+orm_util = util.importlater('sqlalchemy.orm', 'util')
+attributes = util.importlater('sqlalchemy.orm', 'attributes')
 
 NO_STATE = (AttributeError, KeyError)
 """Exception types that may be raised by instrumentation implementations."""
 
-class StaleDataError(sa.exc.SQLAlchemyError):
+class StaleDataError(sa_exc.SQLAlchemyError):
     """An operation encountered database state that is unaccounted for.
 
     Conditions which cause this to happen include:
@@ -41,17 +41,17 @@ class StaleDataError(sa.exc.SQLAlchemyError):
 ConcurrentModificationError = StaleDataError
 
 
-class FlushError(sa.exc.SQLAlchemyError):
+class FlushError(sa_exc.SQLAlchemyError):
     """A invalid condition was detected during flush()."""
 
 
-class UnmappedError(sa.exc.InvalidRequestError):
+class UnmappedError(sa_exc.InvalidRequestError):
     """Base for exceptions that involve expected mappings not present."""
 
-class ObjectDereferencedError(sa.exc.SQLAlchemyError):
+class ObjectDereferencedError(sa_exc.SQLAlchemyError):
     """An operation cannot complete due to an object being garbage collected."""
 
-class DetachedInstanceError(sa.exc.SQLAlchemyError):
+class DetachedInstanceError(sa_exc.SQLAlchemyError):
     """An attempt to access unloaded attributes on a 
     mapped instance that is detached."""
 
@@ -61,7 +61,7 @@ class UnmappedInstanceError(UnmappedError):
     def __init__(self, obj, msg=None):
         if not msg:
             try:
-                mapper = sa.orm.class_mapper(type(obj))
+                mapper = orm_util.class_mapper(type(obj))
                 name = _safe_cls_name(type(obj))
                 msg = ("Class %r is mapped, but this instance lacks "
                        "instrumentation.  This occurs when the instance is created "
@@ -88,7 +88,7 @@ class UnmappedClassError(UnmappedError):
     def __reduce__(self):
         return self.__class__, (None, self.args[0])
 
-class ObjectDeletedError(sa.exc.InvalidRequestError):
+class ObjectDeletedError(sa_exc.InvalidRequestError):
     """A refresh operation failed to retrieve the database
     row corresponding to an object's known primary key identity.
     
@@ -112,28 +112,23 @@ class ObjectDeletedError(sa.exc.InvalidRequestError):
             msg = "Instance '%s' has been deleted, or its "\
              "row is otherwise not present." % orm_util.state_str(state)
 
-        sa.exc.InvalidRequestError.__init__(self, msg)
+        sa_exc.InvalidRequestError.__init__(self, msg)
 
     def __reduce__(self):
         return self.__class__, (None, self.args[0])
 
-class UnmappedColumnError(sa.exc.InvalidRequestError):
+class UnmappedColumnError(sa_exc.InvalidRequestError):
     """Mapping operation was requested on an unknown column."""
 
 
-class NoResultFound(sa.exc.InvalidRequestError):
+class NoResultFound(sa_exc.InvalidRequestError):
     """A database result was required but none was found."""
 
 
-class MultipleResultsFound(sa.exc.InvalidRequestError):
+class MultipleResultsFound(sa_exc.InvalidRequestError):
     """A single database result was required but more than one were found."""
 
 
-# Legacy compat until 0.6.
-sa.exc.ConcurrentModificationError = ConcurrentModificationError
-sa.exc.FlushError = FlushError
-sa.exc.UnmappedColumnError
-
 def _safe_cls_name(cls):
     try:
         cls_name = '.'.join((cls.__module__, cls.__name__))
@@ -145,7 +140,7 @@ def _safe_cls_name(cls):
 
 def _default_unmapped(cls):
     try:
-        mappers = sa.orm.attributes.manager_of_class(cls).mappers
+        mappers = attributes.manager_of_class(cls).mappers
     except NO_STATE:
         mappers = {}
     except TypeError:
index bb5fbb6e8e46f50bdcefc78c3255067b1db9e27b..4ba54b2f0fd3feae621a4e25472ade0bc5701a27 100644 (file)
@@ -5,7 +5,7 @@
 # the MIT License: http://www.opensource.org/licenses/mit-license.php
 
 import weakref
-from sqlalchemy.orm import attributes
+from . import attributes
 
 
 class IdentityMap(dict):
index e9d1ca36abb4ddb103a7022aebc2de7287ade511..c322039391c827c64590465ff455d12f2958b67f 100644 (file)
@@ -21,11 +21,10 @@ An example of full customization is in /examples/custom_attributes.
 """
 
 
-from sqlalchemy.orm import exc, collections, events
-from operator import attrgetter, itemgetter
-from sqlalchemy import event, util, inspection
+from . import exc, collections, events, state, attributes
+from operator import attrgetter
+from .. import event, util
 import weakref
-from sqlalchemy.orm import state, attributes
 
 
 INSTRUMENTATION_MANAGER = '__sa_instrumentation_manager__'
index 8d185e9f3b94cfd8496a034c75a3fba2c6d3c128..da62ecbda247e86325c1ab51a2c05b8a28ea2b77 100644 (file)
@@ -15,17 +15,15 @@ Other than the deprecated extensions, this module and the
 classes within should be considered mostly private.
 
 """
-
+from __future__ import absolute_import
 from itertools import chain
 
-from sqlalchemy import exc as sa_exc
-from sqlalchemy import util
-from sqlalchemy.sql import operators
-deque = __import__('collections').deque
-
-mapperutil = util.importlater('sqlalchemy.orm', 'util')
+from .. import exc as sa_exc, util
+from ..sql import operators
+from collections import deque
 
-collections = None
+orm_util = util.importlater('sqlalchemy.orm', 'util')
+collections = util.importlater('sqlalchemy.orm', 'collections')
 
 __all__ = (
     'AttributeExtension',
@@ -51,7 +49,7 @@ ONETOMANY = util.symbol('ONETOMANY')
 MANYTOONE = util.symbol('MANYTOONE')
 MANYTOMANY = util.symbol('MANYTOMANY')
 
-from deprecated_interfaces import AttributeExtension, SessionExtension, \
+from .deprecated_interfaces import AttributeExtension, SessionExtension, \
     MapperExtension
 
 
@@ -426,11 +424,11 @@ class PropertyOption(MapperOption):
         self.__dict__ = state
 
     def _find_entity_prop_comparator(self, query, token, mapper, raiseerr):
-        if mapperutil._is_aliased_class(mapper):
+        if orm_util._is_aliased_class(mapper):
             searchfor = mapper
             isa = False
         else:
-            searchfor = mapperutil._class_to_mapper(mapper)
+            searchfor = orm_util._class_to_mapper(mapper)
             isa = True
         for ent in query._mapper_entities:
             if ent.corresponds_to(searchfor):
@@ -477,7 +475,7 @@ class PropertyOption(MapperOption):
         Return a list of affected paths.
         
         """
-        path = mapperutil.PathRegistry.root
+        path = orm_util.PathRegistry.root
         entity = None
         paths = []
         no_result = []
@@ -565,13 +563,13 @@ class PropertyOption(MapperOption):
 
             if getattr(token, '_of_type', None):
                 ac = token._of_type
-                ext_info = mapperutil._extended_entity_info(ac)
+                ext_info = orm_util._extended_entity_info(ac)
                 path_element = mapper = ext_info.mapper
                 if not ext_info.is_aliased_class:
-                    ac = mapperutil.with_polymorphic(
+                    ac = orm_util.with_polymorphic(
                                 ext_info.mapper.base_mapper, 
                                 ext_info.mapper, aliased=True)
-                    ext_info = mapperutil._extended_entity_info(ac)
+                    ext_info = orm_util._extended_entity_info(ac)
                 path.set(query, "path_with_polymorphic", ext_info)
             else:
                 path_element = mapper = getattr(prop, 'mapper', None)
@@ -722,9 +720,6 @@ class InstrumentationManager(object):
         delattr(class_, key)
 
     def instrument_collection_class(self, class_, key, collection_class):
-        global collections
-        if collections is None:
-            from sqlalchemy.orm import collections
         return collections.prepare_instrumentation(collection_class)
 
     def get_instance_dict(self, class_, instance):
diff --git a/lib/sqlalchemy/orm/loading.py b/lib/sqlalchemy/orm/loading.py
new file mode 100644 (file)
index 0000000..3234d22
--- /dev/null
@@ -0,0 +1,533 @@
+# orm/loading.py
+# Copyright (C) 2005-2012 the SQLAlchemy authors and contributors <see AUTHORS file>
+#
+# This module is part of SQLAlchemy and is released under
+# the MIT License: http://www.opensource.org/licenses/mit-license.php
+
+"""private module containing functions used to convert database
+rows into object instances and associated state.
+
+the functions here are called primarily by Query, Mapper, 
+as well as some of the attribute loading strategies.
+
+"""
+from __future__ import absolute_import
+
+from .. import util
+from . import attributes, exc as orm_exc
+from .interfaces import EXT_CONTINUE
+from ..sql import util as sql_util
+from .util import _none_set, state_str
+
+_new_runid = util.counter()
+
+def instances(query, cursor, context):
+    """Return an ORM result as an iterator."""
+    session = query.session
+
+    context.runid = _new_runid()
+
+    filter_fns = [ent.filter_fn
+                for ent in query._entities]
+    filtered = id in filter_fns
+
+    single_entity = filtered and len(query._entities) == 1
+
+    if filtered:
+        if single_entity:
+            filter_fn = id
+        else:
+            def filter_fn(row):
+                return tuple(fn(x) for x, fn in zip(row, filter_fns))
+
+    custom_rows = single_entity and \
+                    query._entities[0].mapper.dispatch.append_result
+
+    (process, labels) = \
+                zip(*[
+                    query_entity.row_processor(query, 
+                            context, custom_rows)
+                    for query_entity in query._entities
+                ])
+
+    while True:
+        context.progress = {}
+        context.partials = {}
+
+        if query._yield_per:
+            fetch = cursor.fetchmany(query._yield_per)
+            if not fetch:
+                break
+        else:
+            fetch = cursor.fetchall()
+
+        if custom_rows:
+            rows = []
+            for row in fetch:
+                process[0](row, rows)
+        elif single_entity:
+            rows = [process[0](row, None) for row in fetch]
+        else:
+            rows = [util.NamedTuple([proc(row, None) for proc in process],
+                                    labels) for row in fetch]
+
+        if filtered:
+            rows = util.unique_list(rows, filter_fn)
+
+        if context.refresh_state and query._only_load_props \
+                    and context.refresh_state in context.progress:
+            context.refresh_state.commit(
+                    context.refresh_state.dict, query._only_load_props)
+            context.progress.pop(context.refresh_state)
+
+        session._finalize_loaded(context.progress)
+
+        for ii, (dict_, attrs) in context.partials.iteritems():
+            ii.commit(dict_, attrs)
+
+        for row in rows:
+            yield row
+
+        if not query._yield_per:
+            break
+
+def merge_result(query, iterator, load=True):
+    """Merge a result into this :class:`.Query` object's Session."""
+
+    from . import query as querylib
+
+    session = query.session
+    if load:
+        # flush current contents if we expect to load data
+        session._autoflush()
+
+    autoflush = session.autoflush
+    try:
+        session.autoflush = False
+        single_entity = len(query._entities) == 1
+        if single_entity:
+            if isinstance(query._entities[0], querylib._MapperEntity):
+                result = [session._merge(
+                        attributes.instance_state(instance), 
+                        attributes.instance_dict(instance), 
+                        load=load, _recursive={})
+                        for instance in iterator]
+            else:
+                result = list(iterator)
+        else:
+            mapped_entities = [i for i, e in enumerate(query._entities) 
+                                    if isinstance(e, querylib._MapperEntity)]
+            result = []
+            for row in iterator:
+                newrow = list(row)
+                for i in mapped_entities:
+                    newrow[i] = session._merge(
+                            attributes.instance_state(newrow[i]), 
+                            attributes.instance_dict(newrow[i]), 
+                            load=load, _recursive={})
+                result.append(util.NamedTuple(newrow, row._labels))
+
+        return iter(result)
+    finally:
+        session.autoflush = autoflush
+
+def get_from_identity(session, key, passive):
+    """Look up the given key in the given session's identity map, 
+    check the object for expired state if found.
+
+    """
+    instance = session.identity_map.get(key)
+    if instance is not None:
+
+        state = attributes.instance_state(instance)
+
+        # expired - ensure it still exists
+        if state.expired:
+            if not passive & attributes.SQL_OK:
+                # TODO: no coverage here
+                return attributes.PASSIVE_NO_RESULT
+            elif not passive & attributes.RELATED_OBJECT_OK:
+                # this mode is used within a flush and the instance's
+                # expired state will be checked soon enough, if necessary
+                return instance
+            try:
+                state(passive)
+            except orm_exc.ObjectDeletedError:
+                session._remove_newly_deleted([state])
+                return None
+        return instance
+    else:
+        return None
+
+def load_on_ident(query, key, 
+                    refresh_state=None, lockmode=None,
+                        only_load_props=None):
+    """Load the given identity key from the database."""
+
+    lockmode = lockmode or query._lockmode
+
+    if key is not None:
+        ident = key[1]
+    else:
+        ident = None
+
+    if refresh_state is None:
+        q = query._clone()
+        q._get_condition()
+    else:
+        q = query._clone()
+
+    if ident is not None:
+        mapper = query._mapper_zero()
+
+        (_get_clause, _get_params) = mapper._get_clause
+
+        # None present in ident - turn those comparisons
+        # into "IS NULL"
+        if None in ident:
+            nones = set([
+                        _get_params[col].key for col, value in
+                         zip(mapper.primary_key, ident) if value is None
+                        ])
+            _get_clause = sql_util.adapt_criterion_to_null(
+                                            _get_clause, nones)
+
+        _get_clause = q._adapt_clause(_get_clause, True, False)
+        q._criterion = _get_clause
+
+        params = dict([
+            (_get_params[primary_key].key, id_val)
+            for id_val, primary_key in zip(ident, mapper.primary_key)
+        ])
+
+        q._params = params
+
+    if lockmode is not None:
+        q._lockmode = lockmode
+    q._get_options(
+        populate_existing=bool(refresh_state),
+        version_check=(lockmode is not None),
+        only_load_props=only_load_props,
+        refresh_state=refresh_state)
+    q._order_by = None
+
+    try:
+        return q.one()
+    except orm_exc.NoResultFound:
+        return None
+
+def instance_processor(mapper, context, path, adapter, 
+                            polymorphic_from=None, 
+                            only_load_props=None, 
+                            refresh_state=None,
+                            polymorphic_discriminator=None):
+
+    """Produce a mapper level row processor callable 
+       which processes rows into mapped instances."""
+
+    # note that this method, most of which exists in a closure
+    # called _instance(), resists being broken out, as 
+    # attempts to do so tend to add significant function
+    # call overhead.  _instance() is the most
+    # performance-critical section in the whole ORM.
+
+    pk_cols = mapper.primary_key
+
+    if polymorphic_from or refresh_state:
+        polymorphic_on = None
+    else:
+        if polymorphic_discriminator is not None:
+            polymorphic_on = polymorphic_discriminator
+        else:
+            polymorphic_on = mapper.polymorphic_on
+        polymorphic_instances = util.PopulateDict(
+                                    _configure_subclass_mapper(
+                                            mapper,
+                                            context, path, adapter)
+                                    )
+
+    version_id_col = mapper.version_id_col
+
+    if adapter:
+        pk_cols = [adapter.columns[c] for c in pk_cols]
+        if polymorphic_on is not None:
+            polymorphic_on = adapter.columns[polymorphic_on]
+        if version_id_col is not None:
+            version_id_col = adapter.columns[version_id_col]
+
+    identity_class = mapper._identity_class
+
+    new_populators = []
+    existing_populators = []
+    eager_populators = []
+    load_path = context.query._current_path + path \
+                if context.query._current_path.path \
+                else path
+
+    def populate_state(state, dict_, row, isnew, only_load_props):
+        if isnew:
+            if context.propagate_options:
+                state.load_options = context.propagate_options
+            if state.load_options:
+                state.load_path = load_path
+
+        if not new_populators:
+            _populators(mapper, context, path, row, adapter,
+                            new_populators,
+                            existing_populators,
+                            eager_populators
+            )
+
+        if isnew:
+            populators = new_populators
+        else:
+            populators = existing_populators
+
+        if only_load_props is None:
+            for key, populator in populators:
+                populator(state, dict_, row)
+        elif only_load_props:
+            for key, populator in populators:
+                if key in only_load_props:
+                    populator(state, dict_, row)
+
+    session_identity_map = context.session.identity_map
+
+    listeners = mapper.dispatch
+
+    translate_row = listeners.translate_row or None
+    create_instance = listeners.create_instance or None
+    populate_instance = listeners.populate_instance or None
+    append_result = listeners.append_result or None
+    populate_existing = context.populate_existing or mapper.always_refresh
+    invoke_all_eagers = context.invoke_all_eagers
+
+    if mapper.allow_partial_pks:
+        is_not_primary_key = _none_set.issuperset
+    else:
+        is_not_primary_key = _none_set.issubset
+
+    def _instance(row, result):
+        if not new_populators and invoke_all_eagers:
+            _populators(mapper, context, path, row, adapter,
+                            new_populators,
+                            existing_populators,
+                            eager_populators
+            )
+
+        if translate_row:
+            for fn in translate_row:
+                ret = fn(mapper, context, row)
+                if ret is not EXT_CONTINUE:
+                    row = ret
+                    break
+
+        if polymorphic_on is not None:
+            discriminator = row[polymorphic_on]
+            if discriminator is not None:
+                _instance = polymorphic_instances[discriminator]
+                if _instance:
+                    return _instance(row, result)
+
+        # determine identity key
+        if refresh_state:
+            identitykey = refresh_state.key
+            if identitykey is None:
+                # super-rare condition; a refresh is being called
+                # on a non-instance-key instance; this is meant to only
+                # occur within a flush()
+                identitykey = mapper._identity_key_from_state(refresh_state)
+        else:
+            identitykey = (
+                            identity_class, 
+                            tuple([row[column] for column in pk_cols])
+                        )
+
+        instance = session_identity_map.get(identitykey)
+        if instance is not None:
+            state = attributes.instance_state(instance)
+            dict_ = attributes.instance_dict(instance)
+
+            isnew = state.runid != context.runid
+            currentload = not isnew
+            loaded_instance = False
+
+            if not currentload and \
+                    version_id_col is not None and \
+                    context.version_check and \
+                    mapper._get_state_attr_by_column(
+                            state, 
+                            dict_, 
+                            mapper.version_id_col) != \
+                                    row[version_id_col]:
+
+                raise orm_exc.StaleDataError(
+                        "Instance '%s' has version id '%s' which "
+                        "does not match database-loaded version id '%s'." 
+                        % (state_str(state), 
+                            mapper._get_state_attr_by_column(
+                                        state, dict_,
+                                        mapper.version_id_col),
+                                row[version_id_col]))
+        elif refresh_state:
+            # out of band refresh_state detected (i.e. its not in the
+            # session.identity_map) honor it anyway.  this can happen 
+            # if a _get() occurs within save_obj(), such as
+            # when eager_defaults is True.
+            state = refresh_state
+            instance = state.obj()
+            dict_ = attributes.instance_dict(instance)
+            isnew = state.runid != context.runid
+            currentload = True
+            loaded_instance = False
+        else:
+            # check for non-NULL values in the primary key columns,
+            # else no entity is returned for the row
+            if is_not_primary_key(identitykey[1]):
+                return None
+
+            isnew = True
+            currentload = True
+            loaded_instance = True
+
+            if create_instance:
+                for fn in create_instance:
+                    instance = fn(mapper, context, 
+                                        row, mapper.class_)
+                    if instance is not EXT_CONTINUE:
+                        manager = attributes.manager_of_class(
+                                                instance.__class__)
+                        # TODO: if manager is None, raise a friendly error
+                        # about returning instances of unmapped types
+                        manager.setup_instance(instance)
+                        break
+                else:
+                    instance = mapper.class_manager.new_instance()
+            else:
+                instance = mapper.class_manager.new_instance()
+
+            dict_ = attributes.instance_dict(instance)
+            state = attributes.instance_state(instance)
+            state.key = identitykey
+
+            # attach instance to session.
+            state.session_id = context.session.hash_key
+            session_identity_map.add(state)
+
+        if currentload or populate_existing:
+            # state is being fully loaded, so populate.
+            # add to the "context.progress" collection.
+            if isnew:
+                state.runid = context.runid
+                context.progress[state] = dict_
+
+            if populate_instance:
+                for fn in populate_instance:
+                    ret = fn(mapper, context, row, state, 
+                        only_load_props=only_load_props, 
+                        instancekey=identitykey, isnew=isnew)
+                    if ret is not EXT_CONTINUE:
+                        break
+                else:
+                    populate_state(state, dict_, row, isnew, only_load_props)
+            else:
+                populate_state(state, dict_, row, isnew, only_load_props)
+
+            if loaded_instance:
+                state.manager.dispatch.load(state, context)
+            elif isnew:
+                state.manager.dispatch.refresh(state, context, only_load_props)
+
+        elif state in context.partials or state.unloaded or eager_populators:
+            # state is having a partial set of its attributes
+            # refreshed.  Populate those attributes,
+            # and add to the "context.partials" collection.
+            if state in context.partials:
+                isnew = False
+                (d_, attrs) = context.partials[state]
+            else:
+                isnew = True
+                attrs = state.unloaded
+                context.partials[state] = (dict_, attrs)
+
+            if populate_instance:
+                for fn in populate_instance:
+                    ret = fn(mapper, context, row, state, 
+                        only_load_props=attrs, 
+                        instancekey=identitykey, isnew=isnew)
+                    if ret is not EXT_CONTINUE:
+                        break
+                else:
+                    populate_state(state, dict_, row, isnew, attrs)
+            else:
+                populate_state(state, dict_, row, isnew, attrs)
+
+            for key, pop in eager_populators:
+                if key not in state.unloaded:
+                    pop(state, dict_, row)
+
+            if isnew:
+                state.manager.dispatch.refresh(state, context, attrs)
+
+
+        if result is not None:
+            if append_result:
+                for fn in append_result:
+                    if fn(mapper, context, row, state, 
+                                result, instancekey=identitykey,
+                                isnew=isnew) is not EXT_CONTINUE:
+                        break
+                else:
+                    result.append(instance)
+            else:
+                result.append(instance)
+
+        return instance
+    return _instance
+
+def _populators(mapper, context, path, row, adapter,
+        new_populators, existing_populators, eager_populators):
+    """Produce a collection of attribute level row processor 
+    callables."""
+
+    delayed_populators = []
+    pops = (new_populators, existing_populators, delayed_populators, 
+                        eager_populators)
+    for prop in mapper._props.itervalues():
+        for i, pop in enumerate(prop.create_row_processor(
+                                    context, path,
+                                    mapper, row, adapter)):
+            if pop is not None:
+                pops[i].append((prop.key, pop))
+
+    if delayed_populators:
+        new_populators.extend(delayed_populators)
+
+def _configure_subclass_mapper(mapper, context, path, adapter):
+    """Produce a mapper level row processor callable factory for mappers
+    inheriting this one."""
+
+    def configure_subclass_mapper(discriminator):
+        try:
+            sub_mapper = mapper.polymorphic_map[discriminator]
+        except KeyError:
+            raise AssertionError(
+                    "No such polymorphic_identity %r is defined" %
+                    discriminator)
+        if sub_mapper is mapper:
+            return None
+
+        # replace the tip of the path info with the subclass mapper 
+        # being used, that way accurate "load_path" info is available 
+        # for options invoked during deferred loads, e.g.
+        # query(Person).options(defer(Engineer.machines, Machine.name)).
+        # for AliasedClass paths, disregard this step (new in 0.8).
+        return instance_processor(
+                            sub_mapper,
+                            context, 
+                            path.parent[sub_mapper] 
+                                if not path.is_aliased_class 
+                                else path, 
+                            adapter,
+                            polymorphic_from=mapper)
+    return configure_subclass_mapper
index 044c52d5c72da47ff5871fc181eba0911a52958b..761e1af569c83c1c33dde2ca62e65e222a6ce468 100644 (file)
@@ -13,24 +13,23 @@ This is a semi-private module; the main configurational API of the ORM is
 available in :class:`~sqlalchemy.orm.`.
 
 """
-
+from __future__ import absolute_import
 import types
 import weakref
 import operator
 from itertools import chain, groupby
-deque = __import__('collections').deque
+from collections import deque
 
-from sqlalchemy import sql, util, log, exc as sa_exc, event, schema
-from sqlalchemy.sql import expression, visitors, operators, util as sqlutil
-from sqlalchemy.orm import instrumentation, attributes, sync, \
-                        exc as orm_exc, unitofwork, events
-from sqlalchemy.orm.interfaces import MapperProperty, EXT_CONTINUE, \
+from .. import sql, util, log, exc as sa_exc, event, schema
+from ..sql import expression, visitors, operators, util as sql_util
+from . import instrumentation, attributes, sync, \
+                        exc as orm_exc, unitofwork, events, loading
+from .interfaces import MapperProperty, EXT_CONTINUE, \
                                 PropComparator
 
-from sqlalchemy.orm.util import _INSTRUMENTOR, _class_to_mapper, \
+from .util import _INSTRUMENTOR, _class_to_mapper, \
      _state_mapper, class_mapper, instance_str, state_str,\
-     PathRegistry
-
+     PathRegistry, _none_set
 import sys
 sessionlib = util.importlater("sqlalchemy.orm", "session")
 properties = util.importlater("sqlalchemy.orm", "properties")
@@ -46,7 +45,6 @@ __all__ = (
 _mapper_registry = weakref.WeakKeyDictionary()
 _new_mappers = False
 _already_compiling = False
-_none_set = frozenset([None])
 
 _memoized_configured_property = util.group_expirable_memoized_property()
 
@@ -466,7 +464,7 @@ class Mapper(object):
                         # immediate table of the inherited mapper, not its
                         # full table which could pull in other stuff we dont
                         # want (allows test/inheritance.InheritTest4 to pass)
-                        self.inherit_condition = sqlutil.join_condition(
+                        self.inherit_condition = sql_util.join_condition(
                                                     self.inherits.local_table,
                                                     self.local_table)
                     self.mapped_table = sql.join(
@@ -475,7 +473,7 @@ class Mapper(object):
                                                 self.inherit_condition)
 
                     fks = util.to_set(self.inherit_foreign_keys)
-                    self._inherits_equated_pairs = sqlutil.criterion_as_pairs(
+                    self._inherits_equated_pairs = sql_util.criterion_as_pairs(
                                                 self.mapped_table.onclause,
                                                 consider_as_foreign_keys=fks)
             else:
@@ -717,7 +715,7 @@ class Mapper(object):
 
     def _configure_pks(self):
 
-        self.tables = sqlutil.find_tables(self.mapped_table)
+        self.tables = sql_util.find_tables(self.mapped_table)
 
         self._pks_by_table = {}
         self._cols_by_table = {}
@@ -782,12 +780,12 @@ class Mapper(object):
             # determine primary key from argument or mapped_table pks - 
             # reduce to the minimal set of columns
             if self._primary_key_argument:
-                primary_key = sqlutil.reduce_columns(
+                primary_key = sql_util.reduce_columns(
                     [self.mapped_table.corresponding_column(c) for c in
                     self._primary_key_argument],
                     ignore_nonexistent_tables=True)
             else:
-                primary_key = sqlutil.reduce_columns(
+                primary_key = sql_util.reduce_columns(
                                 self._pks_by_table[self.mapped_table],
                                 ignore_nonexistent_tables=True)
 
@@ -1289,7 +1287,7 @@ class Mapper(object):
             mappers = []
 
         if selectable is not None:
-            tables = set(sqlutil.find_tables(selectable,
+            tables = set(sql_util.find_tables(selectable,
                             include_aliases=True))
             mappers = [m for m in mappers if m.local_table in tables]
 
@@ -1698,10 +1696,12 @@ class Mapper(object):
         if self.inherits and not self.concrete:
             statement = self._optimized_get_statement(state, attribute_names)
             if statement is not None:
-                result = session.query(self).from_statement(statement).\
-                                        _load_on_ident(None, 
-                                            only_load_props=attribute_names, 
-                                            refresh_state=state)
+                result = loading.load_on_ident(
+                            session.query(self).from_statement(statement),
+                                None, 
+                                only_load_props=attribute_names, 
+                                refresh_state=state
+                            )
 
         if result is False:
             if has_key:
@@ -1727,10 +1727,11 @@ class Mapper(object):
                             % state_str(state))
                 return
 
-            result = session.query(self)._load_on_ident(
-                                                identity_key, 
-                                                refresh_state=state, 
-                                                only_load_props=attribute_names)
+            result = loading.load_on_ident(
+                        session.query(self),
+                                    identity_key, 
+                                    refresh_state=state, 
+                                    only_load_props=attribute_names)
 
         # if instance is pending, a refresh operation 
         # may not complete (even if PK attributes are assigned)
@@ -1750,7 +1751,7 @@ class Mapper(object):
         props = self._props
 
         tables = set(chain(
-                        *[sqlutil.find_tables(c, check_columns=True) 
+                        *[sql_util.find_tables(c, check_columns=True) 
                         for key in attribute_names
                         for c in props[key].columns]
                     ))
@@ -1866,7 +1867,7 @@ class Mapper(object):
             for t in mapper.tables:
                 table_to_mapper[t] = mapper
 
-        sorted_ = sqlutil.sort_tables(table_to_mapper.iterkeys())
+        sorted_ = sql_util.sort_tables(table_to_mapper.iterkeys())
         ret = util.OrderedDict()
         for t in sorted_:
             ret[t] = table_to_mapper[t]
@@ -1924,320 +1925,6 @@ class Mapper(object):
 
         return result
 
-
-    def _instance_processor(self, context, path, adapter, 
-                                polymorphic_from=None, 
-                                only_load_props=None, refresh_state=None,
-                                polymorphic_discriminator=None):
-
-        """Produce a mapper level row processor callable 
-           which processes rows into mapped instances."""
-
-        # note that this method, most of which exists in a closure
-        # called _instance(), resists being broken out, as 
-        # attempts to do so tend to add significant function
-        # call overhead.  _instance() is the most
-        # performance-critical section in the whole ORM.
-
-        pk_cols = self.primary_key
-
-        if polymorphic_from or refresh_state:
-            polymorphic_on = None
-        else:
-            if polymorphic_discriminator is not None:
-                polymorphic_on = polymorphic_discriminator
-            else:
-                polymorphic_on = self.polymorphic_on
-            polymorphic_instances = util.PopulateDict(
-                                        self._configure_subclass_mapper(
-                                                context, path, adapter)
-                                        )
-
-        version_id_col = self.version_id_col
-
-        if adapter:
-            pk_cols = [adapter.columns[c] for c in pk_cols]
-            if polymorphic_on is not None:
-                polymorphic_on = adapter.columns[polymorphic_on]
-            if version_id_col is not None:
-                version_id_col = adapter.columns[version_id_col]
-
-        identity_class = self._identity_class
-
-        new_populators = []
-        existing_populators = []
-        eager_populators = []
-        load_path = context.query._current_path + path \
-                    if context.query._current_path.path \
-                    else path
-
-        def populate_state(state, dict_, row, isnew, only_load_props):
-            if isnew:
-                if context.propagate_options:
-                    state.load_options = context.propagate_options
-                if state.load_options:
-                    state.load_path = load_path
-
-            if not new_populators:
-                self._populators(context, path, row, adapter,
-                                new_populators,
-                                existing_populators,
-                                eager_populators
-                )
-
-            if isnew:
-                populators = new_populators
-            else:
-                populators = existing_populators
-
-            if only_load_props is None:
-                for key, populator in populators:
-                    populator(state, dict_, row)
-            elif only_load_props:
-                for key, populator in populators:
-                    if key in only_load_props:
-                        populator(state, dict_, row)
-
-        session_identity_map = context.session.identity_map
-
-        listeners = self.dispatch
-
-        translate_row = listeners.translate_row or None
-        create_instance = listeners.create_instance or None
-        populate_instance = listeners.populate_instance or None
-        append_result = listeners.append_result or None
-        populate_existing = context.populate_existing or self.always_refresh
-        invoke_all_eagers = context.invoke_all_eagers
-
-        if self.allow_partial_pks:
-            is_not_primary_key = _none_set.issuperset
-        else:
-            is_not_primary_key = _none_set.issubset
-
-        def _instance(row, result):
-            if not new_populators and invoke_all_eagers:
-                self._populators(context, path, row, adapter,
-                                new_populators,
-                                existing_populators,
-                                eager_populators
-                )
-
-            if translate_row:
-                for fn in translate_row:
-                    ret = fn(self, context, row)
-                    if ret is not EXT_CONTINUE:
-                        row = ret
-                        break
-
-            if polymorphic_on is not None:
-                discriminator = row[polymorphic_on]
-                if discriminator is not None:
-                    _instance = polymorphic_instances[discriminator]
-                    if _instance:
-                        return _instance(row, result)
-
-            # determine identity key
-            if refresh_state:
-                identitykey = refresh_state.key
-                if identitykey is None:
-                    # super-rare condition; a refresh is being called
-                    # on a non-instance-key instance; this is meant to only
-                    # occur within a flush()
-                    identitykey = self._identity_key_from_state(refresh_state)
-            else:
-                identitykey = (
-                                identity_class, 
-                                tuple([row[column] for column in pk_cols])
-                            )
-
-            instance = session_identity_map.get(identitykey)
-            if instance is not None:
-                state = attributes.instance_state(instance)
-                dict_ = attributes.instance_dict(instance)
-
-                isnew = state.runid != context.runid
-                currentload = not isnew
-                loaded_instance = False
-
-                if not currentload and \
-                        version_id_col is not None and \
-                        context.version_check and \
-                        self._get_state_attr_by_column(
-                                state, 
-                                dict_, 
-                                self.version_id_col) != \
-                                        row[version_id_col]:
-
-                    raise orm_exc.StaleDataError(
-                            "Instance '%s' has version id '%s' which "
-                            "does not match database-loaded version id '%s'." 
-                            % (state_str(state), 
-                                self._get_state_attr_by_column(
-                                            state, dict_,
-                                            self.version_id_col),
-                                    row[version_id_col]))
-            elif refresh_state:
-                # out of band refresh_state detected (i.e. its not in the
-                # session.identity_map) honor it anyway.  this can happen 
-                # if a _get() occurs within save_obj(), such as
-                # when eager_defaults is True.
-                state = refresh_state
-                instance = state.obj()
-                dict_ = attributes.instance_dict(instance)
-                isnew = state.runid != context.runid
-                currentload = True
-                loaded_instance = False
-            else:
-                # check for non-NULL values in the primary key columns,
-                # else no entity is returned for the row
-                if is_not_primary_key(identitykey[1]):
-                    return None
-
-                isnew = True
-                currentload = True
-                loaded_instance = True
-
-                if create_instance:
-                    for fn in create_instance:
-                        instance = fn(self, context, 
-                                            row, self.class_)
-                        if instance is not EXT_CONTINUE:
-                            manager = attributes.manager_of_class(
-                                                    instance.__class__)
-                            # TODO: if manager is None, raise a friendly error
-                            # about returning instances of unmapped types
-                            manager.setup_instance(instance)
-                            break
-                    else:
-                        instance = self.class_manager.new_instance()
-                else:
-                    instance = self.class_manager.new_instance()
-
-                dict_ = attributes.instance_dict(instance)
-                state = attributes.instance_state(instance)
-                state.key = identitykey
-
-                # attach instance to session.
-                state.session_id = context.session.hash_key
-                session_identity_map.add(state)
-
-            if currentload or populate_existing:
-                # state is being fully loaded, so populate.
-                # add to the "context.progress" collection.
-                if isnew:
-                    state.runid = context.runid
-                    context.progress[state] = dict_
-
-                if populate_instance:
-                    for fn in populate_instance:
-                        ret = fn(self, context, row, state, 
-                            only_load_props=only_load_props, 
-                            instancekey=identitykey, isnew=isnew)
-                        if ret is not EXT_CONTINUE:
-                            break
-                    else:
-                        populate_state(state, dict_, row, isnew, only_load_props)
-                else:
-                    populate_state(state, dict_, row, isnew, only_load_props)
-
-                if loaded_instance:
-                    state.manager.dispatch.load(state, context)
-                elif isnew:
-                    state.manager.dispatch.refresh(state, context, only_load_props)
-
-            elif state in context.partials or state.unloaded or eager_populators:
-                # state is having a partial set of its attributes
-                # refreshed.  Populate those attributes,
-                # and add to the "context.partials" collection.
-                if state in context.partials:
-                    isnew = False
-                    (d_, attrs) = context.partials[state]
-                else:
-                    isnew = True
-                    attrs = state.unloaded
-                    context.partials[state] = (dict_, attrs)
-
-                if populate_instance:
-                    for fn in populate_instance:
-                        ret = fn(self, context, row, state, 
-                            only_load_props=attrs, 
-                            instancekey=identitykey, isnew=isnew)
-                        if ret is not EXT_CONTINUE:
-                            break
-                    else:
-                        populate_state(state, dict_, row, isnew, attrs)
-                else:
-                    populate_state(state, dict_, row, isnew, attrs)
-
-                for key, pop in eager_populators:
-                    if key not in state.unloaded:
-                        pop(state, dict_, row)
-
-                if isnew:
-                    state.manager.dispatch.refresh(state, context, attrs)
-
-
-            if result is not None:
-                if append_result:
-                    for fn in append_result:
-                        if fn(self, context, row, state, 
-                                    result, instancekey=identitykey,
-                                    isnew=isnew) is not EXT_CONTINUE:
-                            break
-                    else:
-                        result.append(instance)
-                else:
-                    result.append(instance)
-
-            return instance
-        return _instance
-
-    def _populators(self, context, path, row, adapter,
-            new_populators, existing_populators, eager_populators):
-        """Produce a collection of attribute level row processor 
-        callables."""
-
-        delayed_populators = []
-        pops = (new_populators, existing_populators, delayed_populators, 
-                            eager_populators)
-        for prop in self._props.itervalues():
-            for i, pop in enumerate(prop.create_row_processor(
-                                        context, path,
-                                        self, row, adapter)):
-                if pop is not None:
-                    pops[i].append((prop.key, pop))
-
-        if delayed_populators:
-            new_populators.extend(delayed_populators)
-
-    def _configure_subclass_mapper(self, context, path, adapter):
-        """Produce a mapper level row processor callable factory for mappers
-        inheriting this one."""
-
-        def configure_subclass_mapper(discriminator):
-            try:
-                mapper = self.polymorphic_map[discriminator]
-            except KeyError:
-                raise AssertionError(
-                        "No such polymorphic_identity %r is defined" %
-                        discriminator)
-            if mapper is self:
-                return None
-
-            # replace the tip of the path info with the subclass mapper 
-            # being used, that way accurate "load_path" info is available 
-            # for options invoked during deferred loads, e.g.
-            # query(Person).options(defer(Engineer.machines, Machine.name)).
-            # for AliasedClass paths, disregard this step (new in 0.8).
-            return mapper._instance_processor(
-                                context, 
-                                path.parent[mapper] 
-                                    if not path.is_aliased_class 
-                                    else path, 
-                                adapter,
-                                polymorphic_from=self)
-        return configure_subclass_mapper
-
 log.class_logger(Mapper)
 
 def configure_mappers():
index 35a12c2754d5740a341ca61cd437f430e8facedd..5c33fd2d83ef7609ead1d7380ab0e6cecd7f2c6d 100644 (file)
@@ -15,14 +15,10 @@ in unitofwork.py.
 
 import operator
 from itertools import groupby
-
-from sqlalchemy import sql, util, exc as sa_exc, schema
-from sqlalchemy.orm import attributes, sync, \
-                        exc as orm_exc,\
-                        evaluator
-
-from sqlalchemy.orm.util import _state_mapper, state_str, _attr_as_key
-from sqlalchemy.sql import expression
+from .. import sql, util, exc as sa_exc, schema
+from . import attributes, sync, exc as orm_exc, evaluator
+from .util import _state_mapper, state_str, _attr_as_key
+from ..sql import expression
 
 def save_obj(base_mapper, states, uowtransaction, single=False):
     """Issue ``INSERT`` and/or ``UPDATE`` statements for a list 
@@ -372,7 +368,7 @@ def _collect_update_commands(base_mapper, uowtransaction,
                     params[col._label] = value
         if hasdata:
             if hasnull:
-                raise sa_exc.FlushError(
+                raise orm_exc.FlushError(
                             "Can't update table "
                             "using NULL for primary "
                             "key value")
@@ -436,7 +432,7 @@ def _collect_delete_commands(base_mapper, uowtransaction, table,
                     mapper._get_state_attr_by_column(
                                     state, state_dict, col)
             if value is None:
-                raise sa_exc.FlushError(
+                raise orm_exc.FlushError(
                             "Can't delete from table "
                             "using NULL for primary "
                             "key value")
@@ -695,7 +691,9 @@ def _finalize_insert_update_commands(base_mapper, uowtransaction,
         # refresh whatever has been expired.
         if base_mapper.eager_defaults and state.unloaded:
             state.key = base_mapper._identity_key_from_state(state)
-            uowtransaction.session.query(base_mapper)._load_on_ident(
+            from . import loading
+            loading.load_on_ident(
+                uowtransaction.session.query(base_mapper),
                 state.key, refresh_state=state,
                 only_load_props=state.unloaded)
 
index 5634a9c5fe8b8c5a86ad1117774c102a2a8595c1..b17a589fb081e8d1359392dbbc52afe1d38ea1bf 100644 (file)
@@ -11,26 +11,31 @@ mapped attributes.
 
 """
 
-from sqlalchemy import sql, util, log, exc as sa_exc
-from sqlalchemy.sql.util import ClauseAdapter, criterion_as_pairs, \
-    join_condition, _shallow_annotate
-from sqlalchemy.sql import operators, expression, visitors
-from sqlalchemy.orm import attributes, dependency, mapper, \
-    object_mapper, strategies, configure_mappers, relationships
-from sqlalchemy.orm.util import CascadeOptions, _class_to_mapper, \
-    _orm_annotate, _orm_deannotate, _orm_full_deannotate,\
-    _entity_info
-
-from sqlalchemy.orm.interfaces import MANYTOMANY, MANYTOONE, \
-    MapperProperty, ONETOMANY, PropComparator, StrategizedProperty
+from .. import sql, util, log, exc as sa_exc
+from ..sql import operators, expression
+from . import (
+    attributes, dependency, mapper, 
+    strategies, configure_mappers, relationships
+    )
+from .util import (
+    CascadeOptions, \
+        _orm_annotate, _orm_deannotate, _orm_full_deannotate,
+        _entity_info
+    )
+
+from .interfaces import (
+    MANYTOMANY, MANYTOONE, MapperProperty, ONETOMANY, 
+    PropComparator, StrategizedProperty
+    )
 mapperlib = util.importlater("sqlalchemy.orm", "mapperlib")
 NoneType = type(None)
 
+from descriptor_props import CompositeProperty, SynonymProperty, \
+            ComparableProperty,ConcreteInheritedProperty
+
 __all__ = ('ColumnProperty', 'CompositeProperty', 'SynonymProperty',
            'ComparableProperty', 'RelationshipProperty', 'RelationProperty')
 
-from descriptor_props import CompositeProperty, SynonymProperty, \
-            ComparableProperty,ConcreteInheritedProperty
 
 class ColumnProperty(StrategizedProperty):
     """Describes an object attribute that corresponds to a table column.
index 156dd6128f12dbf8637c60c8c6eb7e6ad155d9b0..f0ef520f40643e0f0601c676d50581e5ec301455 100644 (file)
@@ -19,24 +19,22 @@ database to return iterable result sets.
 """
 
 from itertools import chain
-from operator import itemgetter
-
-from sqlalchemy import sql, util, log, schema
-from sqlalchemy import exc as sa_exc
-from sqlalchemy.orm import exc as orm_exc
-from sqlalchemy.orm import persistence
-from sqlalchemy.sql import util as sql_util
-from sqlalchemy.sql import expression, visitors, operators
-from sqlalchemy.orm import (
-    attributes, interfaces, mapper, object_mapper, evaluator,
+
+from . import (
+    attributes, interfaces, object_mapper, persistence, 
+    exc as orm_exc, loading
     )
-from sqlalchemy.orm.util import (
+from .util import (
     AliasedClass, ORMAdapter, _entity_descriptor, _entity_info,
     _extended_entity_info, PathRegistry,
     _is_aliased_class, _is_mapped_class, _orm_columns, _orm_selectable,
-    join as orm_join,with_parent, _attr_as_key, aliased
+    join as orm_join,with_parent, aliased
+    )
+from .. import sql, util, log, exc as sa_exc
+from ..sql import (
+        util as sql_util,
+        expression, visitors
     )
-
 
 __all__ = ['Query', 'QueryContext', 'aliased']
 
@@ -66,8 +64,9 @@ class Query(object):
     criteria and options associated with it.
 
     :class:`.Query` objects are normally initially generated using the 
-    :meth:`~.Session.query` method of :class:`.Session`.  For a full walkthrough 
-    of :class:`.Query` usage, see the :ref:`ormtutorial_toplevel`.
+    :meth:`~.Session.query` method of :class:`.Session`.  For a full 
+    walkthrough of :class:`.Query` usage, see the 
+    :ref:`ormtutorial_toplevel`.
 
     """
 
@@ -133,15 +132,19 @@ class Query(object):
                         ext_info.mapper.with_polymorphic:
                         if ext_info.mapper.mapped_table not in \
                                             self._polymorphic_adapters:
-                            self._mapper_loads_polymorphically_with(ext_info.mapper, 
+                            self._mapper_loads_polymorphically_with(
+                                ext_info.mapper, 
                                 sql_util.ColumnAdapter(
-                                            ext_info.selectable, 
-                                            ext_info.mapper._equivalent_columns))
+                                        ext_info.selectable, 
+                                        ext_info.mapper._equivalent_columns
+                                )
+                            )
                         aliased_adapter = None
                     elif ext_info.is_aliased_class:
                         aliased_adapter = sql_util.ColumnAdapter(
-                                            ext_info.selectable, 
-                                            ext_info.mapper._equivalent_columns)
+                                    ext_info.selectable, 
+                                    ext_info.mapper._equivalent_columns
+                                    )
                     else:
                         aliased_adapter = None
 
@@ -206,8 +209,8 @@ class Query(object):
         self._orm_only_adapt = False
 
     def _adapt_clause(self, clause, as_filter, orm_only):
-        """Adapt incoming clauses to transformations which have been applied 
-        within this query."""
+        """Adapt incoming clauses to transformations which 
+        have been applied within this query."""
 
         adapters = []
 
@@ -295,7 +298,8 @@ class Query(object):
         if len(self._entities) > 1:
             raise sa_exc.InvalidRequestError(
                     rationale or 
-                    "This operation requires a Query against a single mapper."
+                    "This operation requires a Query "
+                    "against a single mapper."
                 )
         return self._mapper_zero()
 
@@ -315,7 +319,8 @@ class Query(object):
         if len(self._entities) > 1:
             raise sa_exc.InvalidRequestError(
                     rationale or 
-                    "This operation requires a Query against a single mapper."
+                    "This operation requires a Query "
+                    "against a single mapper."
                 )
         return self._entity_zero()
 
@@ -370,7 +375,8 @@ class Query(object):
                 "Query.%s() being called on a Query which already has LIMIT "
                 "or OFFSET applied. To modify the row-limited results of a "
                 " Query, call from_self() first.  "
-                "Otherwise, call %s() before limit() or offset() are applied."
+                "Otherwise, call %s() before limit() or offset() "
+                "are applied."
                 % (meth, meth)
             )
 
@@ -429,8 +435,8 @@ class Query(object):
         return stmt._annotate({'no_replacement_traverse': True})
 
     def subquery(self, name=None):
-        """return the full SELECT statement represented by this :class:`.Query`, 
-        embedded within an :class:`.Alias`.
+        """return the full SELECT statement represented by 
+        this :class:`.Query`, embedded within an :class:`.Alias`.
 
         Eager JOIN generation within the query is disabled.
 
@@ -449,8 +455,8 @@ class Query(object):
         return self.enable_eagerloads(False).statement.alias(name=name)
 
     def cte(self, name=None, recursive=False):
-        """Return the full SELECT statement represented by this :class:`.Query`
-        represented as a common table expression (CTE).
+        """Return the full SELECT statement represented by this 
+        :class:`.Query` represented as a common table expression (CTE).
 
         .. versionadded:: 0.7.6
 
@@ -459,12 +465,14 @@ class Query(object):
         further details.
 
         Here is the `Postgresql WITH 
-        RECURSIVE example <http://www.postgresql.org/docs/8.4/static/queries-with.html>`_.
-        Note that, in this example, the ``included_parts`` cte and the ``incl_alias`` alias
-        of it are Core selectables, which
-        means the columns are accessed via the ``.c.`` attribute.  The ``parts_alias``
-        object is an :func:`.orm.aliased` instance of the ``Part`` entity, so column-mapped
-        attributes are available directly::
+        RECURSIVE example 
+        <http://www.postgresql.org/docs/8.4/static/queries-with.html>`_.
+        Note that, in this example, the ``included_parts`` cte and the 
+        ``incl_alias`` alias of it are Core selectables, which
+        means the columns are accessed via the ``.c.`` attribute.  The 
+        ``parts_alias`` object is an :func:`.orm.aliased` instance of the 
+        ``Part`` entity, so column-mapped attributes are available 
+        directly::
 
             from sqlalchemy.orm import aliased
 
@@ -475,11 +483,11 @@ class Query(object):
                 quantity = Column(Integer)
 
             included_parts = session.query(
-                                Part.sub_part, 
-                                Part.part, 
-                                Part.quantity).\\
-                                    filter(Part.part=="our part").\\
-                                    cte(name="included_parts", recursive=True)
+                            Part.sub_part, 
+                            Part.part, 
+                            Part.quantity).\\
+                                filter(Part.part=="our part").\\
+                                cte(name="included_parts", recursive=True)
 
             incl_alias = aliased(included_parts, name="pr")
             parts_alias = aliased(Part, name="p")
@@ -493,7 +501,8 @@ class Query(object):
 
             q = session.query(
                     included_parts.c.sub_part,
-                    func.sum(included_parts.c.quantity).label('total_quantity')
+                    func.sum(included_parts.c.quantity).
+                        label('total_quantity')
                 ).\\
                 group_by(included_parts.c.sub_part)
 
@@ -502,10 +511,12 @@ class Query(object):
         :meth:`._SelectBase.cte`
 
         """
-        return self.enable_eagerloads(False).statement.cte(name=name, recursive=recursive)
+        return self.enable_eagerloads(False).\
+            statement.cte(name=name, recursive=recursive)
 
     def label(self, name):
-        """Return the full SELECT statement represented by this :class:`.Query`, converted 
+        """Return the full SELECT statement represented by this 
+        :class:`.Query`, converted 
         to a scalar subquery with a label of the given name.
 
         Analogous to :meth:`sqlalchemy.sql._SelectBaseMixin.label`.
@@ -756,7 +767,7 @@ class Query(object):
                 not mapper.always_refresh and \
                 self._lockmode is None:
 
-            instance = self._get_from_identity(self.session, key, attributes.PASSIVE_OFF)
+            instance = loading.get_from_identity(self.session, key, attributes.PASSIVE_OFF)
             if instance is not None:
                 # reject calls for id in identity map but class
                 # mismatch.
@@ -764,7 +775,7 @@ class Query(object):
                     return None
                 return instance
 
-        return self._load_on_ident(key)
+        return loading.load_on_ident(self, key)
 
     @_generative()
     def correlate(self, *args):
@@ -2289,77 +2300,12 @@ class Query(object):
             for u in session.query(User).instances(result):
                 print u
         """
-        session = self.session
-
         context = __context
         if context is None:
             context = QueryContext(self)
 
-        context.runid = _new_runid()
-
-        filter_fns = [ent.filter_fn
-                    for ent in self._entities]
-        filtered = id in filter_fns
-
-        single_entity = filtered and len(self._entities) == 1
-
-        if filtered:
-            if single_entity:
-                filter_fn = id
-            else:
-                def filter_fn(row):
-                    return tuple(fn(x) for x, fn in zip(row, filter_fns))
-
-        custom_rows = single_entity and \
-                        self._entities[0].mapper.dispatch.append_result
-
-        (process, labels) = \
-                    zip(*[
-                        query_entity.row_processor(self, context, custom_rows)
-                        for query_entity in self._entities
-                    ])
-
-
-        while True:
-            context.progress = {}
-            context.partials = {}
-
-            if self._yield_per:
-                fetch = cursor.fetchmany(self._yield_per)
-                if not fetch:
-                    break
-            else:
-                fetch = cursor.fetchall()
-
-            if custom_rows:
-                rows = []
-                for row in fetch:
-                    process[0](row, rows)
-            elif single_entity:
-                rows = [process[0](row, None) for row in fetch]
-            else:
-                rows = [util.NamedTuple([proc(row, None) for proc in process],
-                                        labels) for row in fetch]
-
-            if filtered:
-                rows = util.unique_list(rows, filter_fn)
-
-            if context.refresh_state and self._only_load_props \
-                        and context.refresh_state in context.progress:
-                context.refresh_state.commit(
-                        context.refresh_state.dict, self._only_load_props)
-                context.progress.pop(context.refresh_state)
-
-            session._finalize_loaded(context.progress)
+        return loading.instances(self, cursor, context)
 
-            for ii, (dict_, attrs) in context.partials.iteritems():
-                ii.commit(dict_, attrs)
-
-            for row in rows:
-                yield row
-
-            if not self._yield_per:
-                break
 
     def merge_result(self, iterator, load=True):
         """Merge a result into this :class:`.Query` object's Session.
@@ -2384,125 +2330,7 @@ class Query(object):
 
         """
 
-        session = self.session
-        if load:
-            # flush current contents if we expect to load data
-            session._autoflush()
-
-        autoflush = session.autoflush
-        try:
-            session.autoflush = False
-            single_entity = len(self._entities) == 1
-            if single_entity:
-                if isinstance(self._entities[0], _MapperEntity):
-                    result = [session._merge(
-                            attributes.instance_state(instance), 
-                            attributes.instance_dict(instance), 
-                            load=load, _recursive={})
-                            for instance in iterator]
-                else:
-                    result = list(iterator)
-            else:
-                mapped_entities = [i for i, e in enumerate(self._entities) 
-                                        if isinstance(e, _MapperEntity)]
-                result = []
-                for row in iterator:
-                    newrow = list(row)
-                    for i in mapped_entities:
-                        newrow[i] = session._merge(
-                                attributes.instance_state(newrow[i]), 
-                                attributes.instance_dict(newrow[i]), 
-                                load=load, _recursive={})
-                    result.append(util.NamedTuple(newrow, row._labels))
-
-            return iter(result)
-        finally:
-            session.autoflush = autoflush
-
-    @classmethod
-    def _get_from_identity(cls, session, key, passive):
-        """Look up the given key in the given session's identity map, 
-        check the object for expired state if found.
-
-        """
-        instance = session.identity_map.get(key)
-        if instance is not None:
-
-            state = attributes.instance_state(instance)
-
-            # expired - ensure it still exists
-            if state.expired:
-                if not passive & attributes.SQL_OK:
-                    # TODO: no coverage here
-                    return attributes.PASSIVE_NO_RESULT
-                elif not passive & attributes.RELATED_OBJECT_OK:
-                    # this mode is used within a flush and the instance's
-                    # expired state will be checked soon enough, if necessary
-                    return instance
-                try:
-                    state(passive)
-                except orm_exc.ObjectDeletedError:
-                    session._remove_newly_deleted([state])
-                    return None
-            return instance
-        else:
-            return None
-
-    def _load_on_ident(self, key, refresh_state=None, lockmode=None,
-                                        only_load_props=None):
-        """Load the given identity key from the database."""
-
-        lockmode = lockmode or self._lockmode
-
-        if key is not None:
-            ident = key[1]
-        else:
-            ident = None
-
-        if refresh_state is None:
-            q = self._clone()
-            q._get_condition()
-        else:
-            q = self._clone()
-
-        if ident is not None:
-            mapper = self._mapper_zero()
-
-            (_get_clause, _get_params) = mapper._get_clause
-
-            # None present in ident - turn those comparisons
-            # into "IS NULL"
-            if None in ident:
-                nones = set([
-                            _get_params[col].key for col, value in
-                             zip(mapper.primary_key, ident) if value is None
-                            ])
-                _get_clause = sql_util.adapt_criterion_to_null(
-                                                _get_clause, nones)
-
-            _get_clause = q._adapt_clause(_get_clause, True, False)
-            q._criterion = _get_clause
-
-            params = dict([
-                (_get_params[primary_key].key, id_val)
-                for id_val, primary_key in zip(ident, mapper.primary_key)
-            ])
-
-            q._params = params
-
-        if lockmode is not None:
-            q._lockmode = lockmode
-        q._get_options(
-            populate_existing=bool(refresh_state),
-            version_check=(lockmode is not None),
-            only_load_props=only_load_props,
-            refresh_state=refresh_state)
-        q._order_by = None
-
-        try:
-            return q.one()
-        except orm_exc.NoResultFound:
-            return None
+        return loading.merge_result(self, iterator, load)
 
     @property
     def _select_args(self):
@@ -2700,8 +2528,6 @@ class Query(object):
             strategy = rec[0]
             strategy(*rec[1:])
 
-        eager_joins = context.eager_joins.values()
-
         if context.from_clause:
             # "load from explicit FROMs" mode, 
             # i.e. when select_from() or join() is used
@@ -2983,7 +2809,8 @@ class _MapperEntity(_QueryEntity):
                                         self.mapper._equivalent_columns)
 
         if self.primary_entity:
-            _instance = self.mapper._instance_processor(
+            _instance = loading.instance_processor(
+                                self.mapper,
                                 context, 
                                 self.path,
                                 adapter,
@@ -2993,7 +2820,8 @@ class _MapperEntity(_QueryEntity):
                                     self._polymorphic_discriminator
             )
         else:
-            _instance = self.mapper._instance_processor(
+            _instance = loading.instance_processor(
+                                self.mapper,
                                 context, 
                                 self.path,
                                 adapter,
@@ -3232,4 +3060,3 @@ class AliasOption(interfaces.MapperOption):
         query._from_obj_alias = sql_util.ColumnAdapter(alias)
 
 
-_new_runid = util.counter()
index dc5adb90de998616a7f890657df657c8b0614990..85fe2f3acc761fe57678dbc3b58c98dd4f0c72b7 100644 (file)
@@ -13,12 +13,14 @@ and `secondaryjoin` aspects of :func:`.relationship`.
 
 """
 
-from sqlalchemy import sql, util, log, exc as sa_exc, schema
-from sqlalchemy.sql.util import ClauseAdapter, criterion_as_pairs, \
-    join_condition, _shallow_annotate, visit_binary_product,\
+from .. import sql, util, exc as sa_exc, schema
+from ..sql.util import (
+    ClauseAdapter, 
+    join_condition, _shallow_annotate, visit_binary_product,
     _deep_deannotate, find_tables
-from sqlalchemy.sql import operators, expression, visitors
-from sqlalchemy.orm.interfaces import MANYTOMANY, MANYTOONE, ONETOMANY
+    )
+from ..sql import operators, expression, visitors
+from .interfaces import MANYTOMANY, MANYTOONE, ONETOMANY
 
 def remote(expr):
     """Annotate a portion of a primaryjoin expression 
@@ -220,7 +222,7 @@ class JoinCondition(object):
                                 consider_as_foreign_keys=\
                                     self.consider_as_foreign_keys or None
                                 )
-        except sa_exc.NoForeignKeysError, nfke:
+        except sa_exc.NoForeignKeysError:
             if self.secondary is not None:
                 raise sa_exc.NoForeignKeysError("Could not determine join "
                         "condition between parent/child tables on "
@@ -240,7 +242,7 @@ class JoinCondition(object):
                         "with a ForeignKey or ForeignKeyConstraint, or "
                         "specify a 'primaryjoin' expression."
                         % self.prop)
-        except sa_exc.AmbiguousForeignKeysError, afke:
+        except sa_exc.AmbiguousForeignKeysError:
             if self.secondary is not None:
                 raise sa_exc.AmbiguousForeignKeysError(
                         "Could not determine join "
@@ -433,8 +435,6 @@ class JoinCondition(object):
         if self._has_remote_annotations:
             return
 
-        parentcols = util.column_set(self.parent_selectable.c)
-
         if self.secondary is not None:
             self._annotate_remote_secondary()
         elif self._local_remote_pairs or self._remote_side:
@@ -575,8 +575,6 @@ class JoinCondition(object):
         if self._has_annotation(self.primaryjoin, "local"):
             return
 
-        parentcols = util.column_set(self.parent_selectable.c)
-
         if self._local_remote_pairs:
             local_side = util.column_set([l for (l, r) 
                                 in self._local_remote_pairs])
index 3c1cd7f26d8a1fdb8a05dfbd8ae843996442fb19..940ae1db9ffc1ca140dece4cfec995f32748584c 100644 (file)
@@ -4,11 +4,10 @@
 # This module is part of SQLAlchemy and is released under
 # the MIT License: http://www.opensource.org/licenses/mit-license.php
 
-from sqlalchemy import exc as sa_exc
-from sqlalchemy.util import ScopedRegistry, ThreadLocalRegistry, warn
-from sqlalchemy.orm import class_mapper
-from sqlalchemy.orm import exc as orm_exc
-from sqlalchemy.orm.session import Session
+from .. import exc as sa_exc
+from ..util import ScopedRegistry, ThreadLocalRegistry, warn
+from . import class_mapper, exc as orm_exc
+from .session import Session
 
 
 __all__ = ['ScopedSession']
index 13d407aae78aa4c3f1343272c5f9aab9766b20cc..2076f23d52fc22661671d9e27cc5cc9aef385fd9 100644 (file)
@@ -7,22 +7,20 @@
 """Provides the Session class and related utilities."""
 
 import weakref
-from itertools import chain
-from sqlalchemy import util, sql, engine, log, exc as sa_exc
-from sqlalchemy.sql import util as sql_util, expression
-from sqlalchemy.orm import (
-    SessionExtension, attributes, exc, query, unitofwork, util as mapperutil, state
+from .. import util, sql, engine, exc as sa_exc, event
+from ..sql import util as sql_util, expression
+from . import (
+    SessionExtension, attributes, exc, query, util as orm_util,
+    loading, identity
     )
-from sqlalchemy.orm.util import object_mapper as _object_mapper
-from sqlalchemy.orm.util import class_mapper as _class_mapper
-from sqlalchemy.orm.util import (
-    _class_to_mapper, _state_mapper, object_state
+from .util import (
+    object_mapper, class_mapper,
+    _class_to_mapper, _state_mapper, object_state,
+    _none_set
     )
-from sqlalchemy.orm.mapper import Mapper, _none_set
-from sqlalchemy.orm.unitofwork import UOWTransaction
-from sqlalchemy.orm import identity
-from sqlalchemy import event
-from sqlalchemy.orm.events import SessionEvents
+from .unitofwork import UOWTransaction
+from .mapper import Mapper
+from .events import SessionEvents
 
 import sys
 
@@ -190,8 +188,8 @@ class SessionTransaction(object):
 
     def connection(self, bindkey, **kwargs):
         self._assert_is_active()
-        engine = self.session.get_bind(bindkey, **kwargs)
-        return self._connection_for_bind(engine)
+        bind = self.session.get_bind(bindkey, **kwargs)
+        return self._connection_for_bind(bind)
 
     def _begin(self, nested=False):
         self._assert_is_active()
@@ -862,7 +860,7 @@ class Session(object):
 
         """
         if isinstance(mapper, type):
-            mapper = _class_mapper(mapper)
+            mapper = class_mapper(mapper)
 
         self.__binds[mapper.base_mapper] = bind
         for t in mapper._all_tables:
@@ -1053,13 +1051,14 @@ class Session(object):
 
         self._expire_state(state, attribute_names)
 
-        if self.query(_object_mapper(instance))._load_on_ident(
+        if loading.load_on_ident(
+                self.query(object_mapper(instance)),
                 state.key, refresh_state=state,
                 lockmode=lockmode,
                 only_load_props=attribute_names) is None:
             raise sa_exc.InvalidRequestError(
                 "Could not refresh instance '%s'" %
-                mapperutil.instance_str(instance))
+                orm_util.instance_str(instance))
 
     def expire_all(self):
         """Expires all persistent instances within this Session.
@@ -1170,7 +1169,7 @@ class Session(object):
         if state.session_id is not self.hash_key:
             raise sa_exc.InvalidRequestError(
                 "Instance %s is not present in this Session" %
-                mapperutil.state_str(state))
+                orm_util.state_str(state))
 
         cascaded = list(state.manager.mapper.cascade_iterator(
                                     'expunge', state))
@@ -1209,7 +1208,7 @@ class Session(object):
                         "the mapped Column object is configured to expect these "
                         "generated values.  Ensure also that this flush() is "
                         "not occurring at an inappropriate time, such as within "
-                        "a load() event." % mapperutil.state_str(state)
+                        "a load() event." % orm_util.state_str(state)
                     )
 
                 if state.key is None:
@@ -1293,7 +1292,7 @@ class Session(object):
         if state.key is None:
             raise sa_exc.InvalidRequestError(
                 "Instance '%s' is not persisted" %
-                mapperutil.state_str(state))
+                orm_util.state_str(state))
 
         if state in self._deleted:
             return
@@ -1338,7 +1337,7 @@ class Session(object):
             # flush current contents if we expect to load data
             self._autoflush()
 
-        _object_mapper(instance) # verify mapped
+        object_mapper(instance) # verify mapped
         autoflush = self.autoflush
         try:
             self.autoflush = False
@@ -1427,7 +1426,7 @@ class Session(object):
                             "merging to update the most recent version."
                             % (
                                 existing_version,
-                                mapperutil.state_str(merged_state),
+                                orm_util.state_str(merged_state),
                                 merged_version
                             ))
 
@@ -1449,7 +1448,7 @@ class Session(object):
 
     @classmethod
     def identity_key(cls, *args, **kwargs):
-        return mapperutil.identity_key(*args, **kwargs)
+        return orm_util.identity_key(*args, **kwargs)
 
     @classmethod
     def object_session(cls, instance):
@@ -1461,13 +1460,13 @@ class Session(object):
         if not self.identity_map.contains_state(state):
             raise sa_exc.InvalidRequestError(
                 "Instance '%s' is not persistent within this Session" %
-                mapperutil.state_str(state))
+                orm_util.state_str(state))
 
     def _save_impl(self, state):
         if state.key is not None:
             raise sa_exc.InvalidRequestError(
                 "Object '%s' already has an identity - it can't be registered "
-                "as pending" % mapperutil.state_str(state))
+                "as pending" % orm_util.state_str(state))
 
         self._before_attach(state)
         if state not in self._new:
@@ -1483,13 +1482,13 @@ class Session(object):
         if state.key is None:
             raise sa_exc.InvalidRequestError(
                 "Instance '%s' is not persisted" %
-                mapperutil.state_str(state))
+                orm_util.state_str(state))
 
         if state.deleted:
             raise sa_exc.InvalidRequestError(
                 "Instance '%s' has been deleted.  Use the make_transient() "
                 "function to send this object back to the transient state." %
-                mapperutil.state_str(state)
+                orm_util.state_str(state)
             )
         self._before_attach(state)
         self._deleted.pop(state, None)
@@ -1569,14 +1568,14 @@ class Session(object):
             raise sa_exc.InvalidRequestError("Can't attach instance "
                     "%s; another instance with key %s is already "
                     "present in this session."
-                    % (mapperutil.state_str(state), state.key))
+                    % (orm_util.state_str(state), state.key))
 
         if state.session_id and \
                 state.session_id is not self.hash_key and \
                 state.session_id in _sessions:
             raise sa_exc.InvalidRequestError(
                 "Object '%s' is already attached to session '%s' "
-                "(this is '%s')" % (mapperutil.state_str(state),
+                "(this is '%s')" % (orm_util.state_str(state),
                                     state.session_id, self.hash_key))
 
         if state.session_id != self.hash_key:
index 720554483b5ee5cea3488a9c520e36c44672df80..a6aea9b2b6c7f35c834bd8acc0e073a317853c21 100644 (file)
@@ -11,15 +11,14 @@ defines a large part of the ORM's interactivity.
 
 """
 
-from sqlalchemy.util import EMPTY_SET
 import weakref
-from sqlalchemy import util
-
-from sqlalchemy.orm import exc as orm_exc, attributes, interfaces,\
-        util as orm_util
-from sqlalchemy.orm.attributes import PASSIVE_NO_RESULT, \
+from .. import util
+from . import exc as orm_exc, attributes,util as orm_util
+from .attributes import (
+    PASSIVE_NO_RESULT, 
     SQL_OK, NEVER_SET, ATTR_WAS_SET, NO_VALUE,\
     PASSIVE_NO_INITIALIZE
+    )
 
 mapperlib = util.importlater("sqlalchemy.orm", "mapperlib")
 sessionlib = util.importlater("sqlalchemy.orm", "session")
@@ -31,7 +30,7 @@ class InstanceState(object):
     session_id = None
     key = None
     runid = None
-    load_options = EMPTY_SET
+    load_options = util.EMPTY_SET
     load_path = ()
     insert_order = None
     _strong_obj = None
index 1bf736dd9a0f93d004dcefc37faa0dd8fbbf71b4..2bd60e76d44cff5e1b9e31aec46b6ea08deac0bf 100644 (file)
@@ -7,19 +7,19 @@
 """sqlalchemy.orm.interfaces.LoaderStrategy 
    implementations, and related MapperOptions."""
 
-from sqlalchemy import exc as sa_exc
-from sqlalchemy import sql, util, log, event
-from sqlalchemy.sql import util as sql_util
-from sqlalchemy.sql import visitors
-from sqlalchemy.orm import attributes, interfaces, exc as orm_exc
-from sqlalchemy.orm.mapper import _none_set
-from sqlalchemy.orm.interfaces import (
+from .. import exc as sa_exc
+from .. import util, log, event
+from ..sql import util as sql_util, visitors
+from . import (
+        attributes, interfaces, exc as orm_exc, loading, 
+        unitofwork, util as orm_util
+    )
+from .util import _none_set
+from .interfaces import (
     LoaderStrategy, StrategizedOption, MapperOption, PropertyOption,
     StrategizedProperty
     )
-from sqlalchemy.orm import session as sessionlib, unitofwork
-from sqlalchemy.orm import util as mapperutil
-from sqlalchemy.orm.query import Query
+from .session import _state_session
 import itertools
 
 def _register_attribute(strategy, mapper, useobject,
@@ -45,7 +45,7 @@ def _register_attribute(strategy, mapper, useobject,
     if prop.key in prop.parent.validators:
         fn, include_removes = prop.parent.validators[prop.key]
         listen_hooks.append(
-            lambda desc, prop: mapperutil._validator_events(desc, 
+            lambda desc, prop: orm_util._validator_events(desc, 
                                 prop.key, fn, include_removes)
             )
 
@@ -228,16 +228,16 @@ class DeferredColumnLoader(LoaderStrategy):
         # narrow the keys down to just those which have no history
         group = [k for k in toload if k in state.unmodified]
 
-        session = sessionlib._state_session(state)
+        session = _state_session(state)
         if session is None:
             raise orm_exc.DetachedInstanceError(
                 "Parent instance %s is not bound to a Session; "
                 "deferred load operation of attribute '%s' cannot proceed" % 
-                (mapperutil.state_str(state), self.key)
+                (orm_util.state_str(state), self.key)
                 )
 
         query = session.query(localparent)
-        if query._load_on_ident(state.key, 
+        if loading.load_on_ident(query, state.key, 
                     only_load_props=group, refresh_state=state) is None:
             raise orm_exc.ObjectDeletedError(state)
 
@@ -405,7 +405,7 @@ class LazyLoader(AbstractRelationshipLoader):
         # use the "committed state" only if we're in a flush
         # for this state.
 
-        sess = sessionlib._state_session(state)
+        sess = _state_session(state)
         if sess is not None and sess._flushing:
             def visit_bindparam(bindparam):
                 if bindparam._identifying_key in bind_to_col:
@@ -473,12 +473,12 @@ class LazyLoader(AbstractRelationshipLoader):
         ):
             return attributes.PASSIVE_NO_RESULT
 
-        session = sessionlib._state_session(state)
+        session = _state_session(state)
         if not session:
             raise orm_exc.DetachedInstanceError(
                 "Parent instance %s is not bound to a Session; "
                 "lazy load operation of attribute '%s' cannot proceed" % 
-                (mapperutil.state_str(state), self.key)
+                (orm_util.state_str(state), self.key)
             )
 
         # if we have a simple primary key load, check the 
@@ -498,7 +498,7 @@ class LazyLoader(AbstractRelationshipLoader):
                 return None
 
             ident_key = self.mapper.identity_key_from_primary_key(ident)
-            instance = Query._get_from_identity(session, ident_key, passive)
+            instance = loading.get_from_identity(session, ident_key, passive)
             if instance is not None:
                 return instance
             elif not passive & attributes.SQL_OK or \
@@ -544,7 +544,7 @@ class LazyLoader(AbstractRelationshipLoader):
             q = q._conditional_options(*state.load_options)
 
         if self.use_get:
-            return q._load_on_ident(ident_key)
+            return loading.load_on_ident(q, ident_key)
 
         if self.parent_property.order_by:
             q = q.order_by(*util.to_list(self.parent_property.order_by))
@@ -678,7 +678,7 @@ class SubqueryLoader(AbstractRelationshipLoader):
             effective_entity = self.mapper
 
         subq_path = context.attributes.get(('subquery_path', None), 
-                                mapperutil.PathRegistry.root)
+                                orm_util.PathRegistry.root)
 
         subq_path = subq_path + path
 
@@ -734,7 +734,7 @@ class SubqueryLoader(AbstractRelationshipLoader):
 
     def _get_leftmost(self, subq_path):
         subq_path = subq_path.path
-        subq_mapper = mapperutil._class_to_mapper(subq_path[0])
+        subq_mapper = orm_util._class_to_mapper(subq_path[0])
 
         # determine attributes of the leftmost mapper
         if self.parent.isa(subq_mapper) and self.key==subq_path[1]:
@@ -776,7 +776,7 @@ class SubqueryLoader(AbstractRelationshipLoader):
         # the original query now becomes a subquery
         # which we'll join onto.
         embed_q = q.with_labels().subquery()
-        left_alias = mapperutil.AliasedClass(leftmost_mapper, embed_q)
+        left_alias = orm_util.AliasedClass(leftmost_mapper, embed_q)
         return left_alias
 
 
@@ -793,7 +793,7 @@ class SubqueryLoader(AbstractRelationshipLoader):
         # which needs to be aliased.
 
         if len(to_join) > 1:
-            ext = mapperutil._extended_entity_info(subq_path[-2])
+            ext = orm_util._extended_entity_info(subq_path[-2])
 
         if len(to_join) < 2:
             # in the case of a one level eager load, this is the
@@ -805,11 +805,11 @@ class SubqueryLoader(AbstractRelationshipLoader):
             # in the vast majority of cases, and [ticket:2014] 
             # illustrates a case where sub_path[-2] is a subclass
             # of self.parent
-            parent_alias = mapperutil.AliasedClass(subq_path[-2])
+            parent_alias = orm_util.AliasedClass(subq_path[-2])
         else:
             # if of_type() were used leading to this relationship, 
             # self.parent is more specific than subq_path[-2]
-            parent_alias = mapperutil.AliasedClass(self.parent)
+            parent_alias = orm_util.AliasedClass(self.parent)
 
         local_cols = self.parent_property.local_columns
 
@@ -1049,8 +1049,8 @@ class JoinedLoader(AbstractRelationshipLoader):
         if with_poly_info:
             to_adapt = with_poly_info.entity
         else:
-            to_adapt = mapperutil.AliasedClass(self.mapper)
-        clauses = mapperutil.ORMAdapter(
+            to_adapt = orm_util.AliasedClass(self.mapper)
+        clauses = orm_util.ORMAdapter(
                     to_adapt, 
                     equivalents=self.mapper._equivalent_columns,
                     adapt_required=True)
@@ -1120,7 +1120,7 @@ class JoinedLoader(AbstractRelationshipLoader):
                                 self.parent_property)
             else:
                 onclause = getattr(
-                                mapperutil.AliasedClass(
+                                orm_util.AliasedClass(
                                         self.parent, 
                                         adapter.selectable
                                 ), 
@@ -1137,7 +1137,7 @@ class JoinedLoader(AbstractRelationshipLoader):
 
         assert clauses.aliased_class is not None
         context.eager_joins[entity_key] = eagerjoin = \
-                                mapperutil.join(
+                                orm_util.join(
                                             towrap, 
                                             clauses.aliased_class, 
                                             onclause, 
@@ -1217,7 +1217,8 @@ class JoinedLoader(AbstractRelationshipLoader):
         if eager_adapter is not False:
             key = self.key
 
-            _instance = self.mapper._instance_processor(
+            _instance = loading.instance_processor(
+                                self.mapper,
                                 context, 
                                 our_path[self.mapper],
                                 eager_adapter)
@@ -1346,7 +1347,7 @@ class LoadEagerFromAliasOption(PropertyOption):
         if alias is not None:
             if not isinstance(alias, basestring):
                 mapper, alias, is_aliased_class = \
-                        mapperutil._entity_info(alias)
+                        orm_util._entity_info(alias)
         self.alias = alias
         self.chained = chained
 
@@ -1372,7 +1373,7 @@ class LoadEagerFromAliasOption(PropertyOption):
         else:
             if paths[-1].contains(query, "path_with_polymorphic"):
                 with_poly_info = paths[-1].get(query, "path_with_polymorphic")
-                adapter = mapperutil.ORMAdapter(
+                adapter = orm_util.ORMAdapter(
                             with_poly_info.entity, 
                             equivalents=prop.mapper._equivalent_columns,
                             adapt_required=True)
@@ -1390,7 +1391,7 @@ def single_parent_validator(desc, prop):
                     "Instance %s is already associated with an instance "
                     "of %s via its %s attribute, and is only allowed a "
                     "single parent." % 
-                    (mapperutil.instance_str(value), state.class_, prop)
+                    (orm_util.instance_str(value), state.class_, prop)
                 )
         return value
 
index c1eaab2343521d40878e29ca8bb62bb314ad4d16..6f1e6c166d3752b7f34cc4b7192bcf3b7b66fa8f 100644 (file)
@@ -9,7 +9,7 @@ between instances based on join conditions.
 
 """
 
-from sqlalchemy.orm import exc, util as mapperutil, attributes
+from . import exc, util as orm_util, attributes
 
 def populate(source, source_mapper, dest, dest_mapper, 
                         synchronize_pairs, uowcommit, flag_cascaded_pks):
@@ -48,7 +48,7 @@ def clear(dest, dest_mapper, synchronize_pairs):
             raise AssertionError(
                                 "Dependency rule tried to blank-out primary key "
                                 "column '%s' on instance '%s'" % 
-                                (r, mapperutil.state_str(dest))
+                                (r, orm_util.state_str(dest))
                             )
         try:
             dest_mapper._set_state_attr_by_column(dest, dest.dict, r, None)
index bc3be8b4131fb503c4917c050c684e10290fabaa..caf7f2e8e27d1336f391ee3f7b39c12cce305844 100644 (file)
@@ -12,10 +12,9 @@ organizes them in order of dependency, and executes.
 
 """
 
-from sqlalchemy import util, event
-from sqlalchemy.util import topological
-from sqlalchemy.orm import attributes, interfaces, persistence
-from sqlalchemy.orm import util as mapperutil
+from .. import util, event
+from ..util import topological
+from . import attributes, interfaces, persistence, util as orm_util
 session = util.importlater("sqlalchemy.orm", "session")
 
 def track_cascade_events(descriptor, prop):
@@ -205,7 +204,7 @@ class UOWTransaction(object):
             if not state.deleted and operation is not None:
                 util.warn("Object of type %s not in session, %s operation "
                             "along '%s' will not proceed" % 
-                            (mapperutil.state_class_str(state), operation, prop))
+                            (orm_util.state_class_str(state), operation, prop))
             return False
 
         if state not in self.states:
@@ -537,7 +536,7 @@ class ProcessState(PostSortRec):
         return "%s(%s, %s, delete=%s)" % (
             self.__class__.__name__,
             self.dependency_processor,
-            mapperutil.state_str(self.state),
+            orm_util.state_str(self.state),
             self.delete
         )
 
@@ -561,7 +560,7 @@ class SaveUpdateState(PostSortRec):
     def __repr__(self):
         return "%s(%s)" % (
             self.__class__.__name__,
-            mapperutil.state_str(self.state)
+            orm_util.state_str(self.state)
         )
 
 class DeleteState(PostSortRec):
@@ -584,6 +583,6 @@ class DeleteState(PostSortRec):
     def __repr__(self):
         return "%s(%s)" % (
             self.__class__.__name__,
-            mapperutil.state_str(self.state)
+            orm_util.state_str(self.state)
         )
 
index 0978ab693d61110eb20162ccd61ac3bbb2c21727..de55c8991dbd5bbf19a58549e57216be832c9277 100644 (file)
@@ -5,13 +5,11 @@
 # the MIT License: http://www.opensource.org/licenses/mit-license.php
 
 
-from sqlalchemy import sql, util, event, exc as sa_exc, inspection
-from sqlalchemy.sql import expression, util as sql_util, operators
-from sqlalchemy.orm.interfaces import MapperExtension, EXT_CONTINUE,\
-                                PropComparator, MapperProperty
+from .. import sql, util, event, exc as sa_exc, inspection
+from ..sql import expression, util as sql_util, operators
+from .interfaces import PropComparator, MapperProperty
 from itertools import chain
-from sqlalchemy.orm import attributes, exc
-import operator
+from . import attributes, exc
 import re
 
 mapperlib = util.importlater("sqlalchemy.orm", "mapperlib")
@@ -22,6 +20,8 @@ all_cascades = frozenset(("delete", "delete-orphan", "all", "merge",
 
 _INSTRUMENTOR = ('mapper', 'instrumentor')
 
+_none_set = frozenset([None])
+
 class CascadeOptions(frozenset):
     """Keeps track of the options sent to relationship().cascade"""
 
@@ -790,7 +790,7 @@ def with_parent(instance, prop):
                         value_is_parent=True)
 
 
-_extended_entity_info_tuple = util.namedtuple("extended_entity_info", [
+extended_entity_info = util.namedtuple("extended_entity_info", [
     "entity",
     "mapper",
     "selectable",
@@ -800,7 +800,7 @@ _extended_entity_info_tuple = util.namedtuple("extended_entity_info", [
 ])
 def _extended_entity_info(entity, compile=True):
     if isinstance(entity, AliasedClass):
-        return _extended_entity_info_tuple(
+        return extended_entity_info(
             entity,
             entity._AliasedClass__mapper, \
                     entity._AliasedClass__alias, \
@@ -816,15 +816,15 @@ def _extended_entity_info(entity, compile=True):
         class_manager = attributes.manager_of_class(entity)
 
         if class_manager is None:
-            return _extended_entity_info_tuple(entity, None, entity, False, [], None)
+            return extended_entity_info(entity, None, entity, False, [], None)
 
         mapper = class_manager.mapper
     else:
-        return _extended_entity_info_tuple(entity, None, entity, False, [], None)
+        return extended_entity_info(entity, None, entity, False, [], None)
 
     if compile and mapperlib.module._new_mappers:
         mapperlib.configure_mappers()
-    return _extended_entity_info_tuple(
+    return extended_entity_info(
         entity, 
         mapper, \
             mapper._with_polymorphic_selectable, \
index ce85b20ef6899a16b38d5c6611ad4d5435f1db05..ba0fa422052681d1cbc81bf0a5afa3b4a16277fb 100644 (file)
@@ -4,7 +4,7 @@ from sqlalchemy.orm import dynamic_loader, backref
 from test.lib import testing
 from sqlalchemy import Integer, String, ForeignKey, desc, select, func
 from test.lib.schema import Table, Column
-from sqlalchemy.orm import mapper, relationship, create_session, Query, attributes
+from sqlalchemy.orm import mapper, relationship, create_session, Query, attributes, exc as orm_exc
 from sqlalchemy.orm.dynamic import AppenderMixin
 from test.lib.testing import eq_, AssertsCompiledSQL, assert_raises_message, assert_raises
 from test.lib import fixtures
@@ -55,6 +55,24 @@ class DynamicTest(_fixtures.FixtureTest, AssertsCompiledSQL):
             use_default_dialect=True
         )
 
+    def test_detached_raise(self):
+        users, Address, addresses, User = (self.tables.users,
+                                self.classes.Address,
+                                self.tables.addresses,
+                                self.classes.User)
+
+        mapper(User, users, properties={
+            'addresses':dynamic_loader(mapper(Address, addresses))
+        })
+        sess = create_session()
+        u = sess.query(User).get(8)
+        sess.expunge(u)
+        assert_raises(
+            orm_exc.DetachedInstanceError,
+            u.addresses.filter_by,
+            email_address='e'
+        )
+
     def test_order_by(self):
         users, Address, addresses, User = (self.tables.users,
                                 self.classes.Address,
index 56b2b79da11ba383b0e9e8f9a7a61190d06c87f5..9faf54579ccfe51bc97aec849613e3a78192555f 100644 (file)
@@ -674,7 +674,7 @@ class RollbackRecoverTest(_LocalFixture):
         u1.name = 'edward'
         a1.email_address = 'foober'
         s.add(u2)
-        assert_raises(sa_exc.FlushError, s.commit)
+        assert_raises(orm_exc.FlushError, s.commit)
         assert_raises(sa_exc.InvalidRequestError, s.commit)
         s.rollback()
         assert u2 not in s
@@ -708,7 +708,7 @@ class RollbackRecoverTest(_LocalFixture):
         a1.email_address = 'foober'
         s.begin_nested()
         s.add(u2)
-        assert_raises(sa_exc.FlushError, s.commit)
+        assert_raises(orm_exc.FlushError, s.commit)
         assert_raises(sa_exc.InvalidRequestError, s.commit)
         s.rollback()
         assert u2 not in s
index baf7754b3e22605bf15c5d073bcf0cbaba05b509..ddbc159d271dcf8884a93350e1965a446c88dfd4 100644 (file)
@@ -12,7 +12,8 @@ from test.lib import engines, testing, pickleable
 from test.lib.schema import Table
 from test.lib.schema import Column
 from sqlalchemy.orm import mapper, relationship, create_session, \
-    column_property, attributes, Session, reconstructor, object_session
+    column_property, attributes, Session, reconstructor, object_session,\
+    exc as orm_exc
 from test.lib.testing import eq_, ne_
 from test.lib.util import gc_collect
 from test.lib import fixtures
@@ -2466,7 +2467,7 @@ class PartialNullPKTest(fixtures.MappedTest):
         t1 = s.query(T1).first()
         t1.col2 = 5
         assert_raises_message(
-            sa.exc.FlushError,
+            orm_exc.FlushError,
             "Can't update table using NULL for primary key value",
             s.commit
         )
@@ -2479,7 +2480,7 @@ class PartialNullPKTest(fixtures.MappedTest):
         t1 = s.query(T1).first()
         t1.col3 = 'hi'
         assert_raises_message(
-            sa.exc.FlushError,
+            orm_exc.FlushError,
             "Can't update table using NULL for primary key value",
             s.commit
         )
@@ -2492,7 +2493,7 @@ class PartialNullPKTest(fixtures.MappedTest):
         t1 = s.query(T1).first()
         s.delete(t1)
         assert_raises_message(
-            sa.exc.FlushError,
+            orm_exc.FlushError,
             "Can't delete from table using NULL for primary key value",
             s.commit
         )
@@ -2502,7 +2503,7 @@ class PartialNullPKTest(fixtures.MappedTest):
         s = Session()
         s.add(T1(col1=None, col2=None))
         assert_raises_message(
-            sa.exc.FlushError,
+            orm_exc.FlushError,
             r"Instance \<T1 at .+?\> has a NULL "
             "identity key.  If this is an auto-generated value, "
             "check that the database table allows generation ",