]> git.ipfire.org Git - thirdparty/sqlalchemy/sqlalchemy.git/commitdiff
- major refactoring/inlining to loader.instances(), though not really
authorMike Bayer <mike_mp@zzzcomputing.com>
Fri, 29 Aug 2014 00:06:12 +0000 (20:06 -0400)
committerMike Bayer <mike_mp@zzzcomputing.com>
Fri, 29 Aug 2014 00:06:49 +0000 (20:06 -0400)
any speed improvements :(.   code is in a much better place to be run into
C, however
- The ``proc()`` callable passed to the ``create_row_processor()``
method of custom :class:`.Bundle` classes now accepts only a single
"row" argument.
- Deprecated event hooks removed:  ``populate_instance``,
``create_instance``, ``translate_row``, ``append_result``
- the getter() idea is somewhat restored; see ref #3175

17 files changed:
doc/build/changelog/changelog_10.rst
doc/build/changelog/migration_10.rst
doc/build/orm/mapper_config.rst
lib/sqlalchemy/engine/result.py
lib/sqlalchemy/orm/deprecated_interfaces.py
lib/sqlalchemy/orm/descriptor_props.py
lib/sqlalchemy/orm/events.py
lib/sqlalchemy/orm/instrumentation.py
lib/sqlalchemy/orm/interfaces.py
lib/sqlalchemy/orm/loading.py
lib/sqlalchemy/orm/mapper.py
lib/sqlalchemy/orm/query.py
lib/sqlalchemy/orm/strategies.py
lib/sqlalchemy/util/__init__.py
lib/sqlalchemy/util/langhelpers.py
test/orm/test_bundle.py
test/orm/test_events.py

index 5243b7a4dc9255da17d684c3d2d24e39355c98ef..87f32aff6acab5510ceddd5ee1eab1563b25ea01 100644 (file)
     on compatibility concerns, see :doc:`/changelog/migration_10`.
 
 
+    .. change::
+        :tags: change, orm
+
+        The ``proc()`` callable passed to the ``create_row_processor()``
+        method of custom :class:`.Bundle` classes now accepts only a single
+        "row" argument.
+
+        .. seealso::
+
+            :ref:`bundle_api_change`
+
+    .. change::
+        :tags: change, orm
+
+        Deprecated event hooks removed:  ``populate_instance``,
+        ``create_instance``, ``translate_row``, ``append_result``
+
+        .. seealso::
+
+            :ref:`migration_deprecated_orm_events`
+
     .. change::
         :tags: bug, orm
         :tickets: 3145
index c5b214efe0a631609b794cd4e0e6110a3eb937a4..3dc2871b04e8f63306c831a26fa7016e1e88fd02 100644 (file)
@@ -104,6 +104,59 @@ symbol, and no change to the object's state occurs.
 
 :ticket:`3061`
 
+.. _migration_migration_deprecated_orm_events:
+
+Deprecated ORM Event Hooks Removed
+----------------------------------
+
+The following ORM event hooks, some of which have been deprecated since
+0.5, have been removed:   ``translate_row``, ``populate_instance``,
+``append_result``, ``create_instance``.  The use cases for these hooks
+originated in the very early 0.1 / 0.2 series of SQLAlchemy and have long
+since been unnecessary.  In particular, the hooks were largely unusable
+as the behavioral contracts within these events was strongly linked to
+the surrounding internals, such as how an instance needs to be created
+and initialized as well as how columns are located within an ORM-generated
+row.   The removal of these hooks greatly simplifies the mechanics of ORM
+object loading.
+
+.. _bundle_api_change:
+
+API Change for new Bundle feature when custom row loaders are used
+------------------------------------------------------------------
+
+The new :class:`.Bundle` object of 0.9 has a small change in API,
+when the ``create_row_processor()`` method is overridden on a custom class.
+Previously, the sample code looked like::
+
+    from sqlalchemy.orm import Bundle
+
+    class DictBundle(Bundle):
+        def create_row_processor(self, query, procs, labels):
+            """Override create_row_processor to return values as dictionaries"""
+            def proc(row, result):
+                return dict(
+                            zip(labels, (proc(row, result) for proc in procs))
+                        )
+            return proc
+
+The unused ``result`` member is now removed::
+
+    from sqlalchemy.orm import Bundle
+
+    class DictBundle(Bundle):
+        def create_row_processor(self, query, procs, labels):
+            """Override create_row_processor to return values as dictionaries"""
+            def proc(row):
+                return dict(
+                            zip(labels, (proc(row) for proc in procs))
+                        )
+            return proc
+
+.. seealso::
+
+       :ref:`bundles`
+
 .. _migration_3008:
 
 Right inner join nesting now the default for joinedload with innerjoin=True
index d0679c7212dfde76d3c2717ae2b50dc6597cb807..8de341a0da9fe00e209583efc4f823d70882bc30 100644 (file)
@@ -1166,12 +1166,18 @@ return structure with a straight Python dictionary::
     class DictBundle(Bundle):
         def create_row_processor(self, query, procs, labels):
             """Override create_row_processor to return values as dictionaries"""
-            def proc(row, result):
+            def proc(row):
                 return dict(
-                            zip(labels, (proc(row, result) for proc in procs))
+                            zip(labels, (proc(row) for proc in procs))
                         )
             return proc
 
+.. versionchanged:: 1.0
+
+   The ``proc()`` callable passed to the ``create_row_processor()``
+   method of custom :class:`.Bundle` classes now accepts only a single
+   "row" argument.
+
 A result from the above bundle will return dictionary values::
 
     bn = DictBundle('mybundle', MyClass.data1, MyClass.data2)
index 06a81aa6c57d55278c175f55f4beac8354165dbc..3995942efe04606b1481b6dd06fc41f08e893733 100644 (file)
@@ -110,7 +110,7 @@ class RowProxy(BaseRowProxy):
     __slots__ = ()
 
     def __contains__(self, key):
-        return self._parent._has_key(self._row, key)
+        return self._parent._has_key(key)
 
     def __getstate__(self):
         return {
@@ -155,7 +155,7 @@ class RowProxy(BaseRowProxy):
     def has_key(self, key):
         """Return True if this RowProxy contains the given key."""
 
-        return self._parent._has_key(self._row, key)
+        return self._parent._has_key(key)
 
     def items(self):
         """Return a list of tuples, each tuple containing a key/value pair."""
@@ -331,12 +331,28 @@ class ResultMetaData(object):
             map[key] = result
         return result
 
-    def _has_key(self, row, key):
+    def _has_key(self, key):
         if key in self._keymap:
             return True
         else:
             return self._key_fallback(key, False) is not None
 
+    def _getter(self, key):
+        if key in self._keymap:
+            processor, obj, index = self._keymap[key]
+        else:
+            ret = self._key_fallback(key, False)
+            if ret is None:
+                return None
+            processor, obj, index = ret
+
+        if index is None:
+            raise exc.InvalidRequestError(
+                "Ambiguous column name '%s' in result set! "
+                "try 'use_labels' option on select statement." % key)
+
+        return operator.itemgetter(index)
+
     def __getstate__(self):
         return {
             '_pickled_keymap': dict(
@@ -398,6 +414,12 @@ class ResultProxy(object):
             context.engine._should_log_debug()
         self._init_metadata()
 
+    def _getter(self, key):
+        return self._metadata._getter(key)
+
+    def _has_key(self, key):
+        return self._metadata._has_key(key)
+
     def _init_metadata(self):
         metadata = self._cursor_description()
         if metadata is not None:
index fa693c96801a72433b5a8ca71b0209ab82ea9871..2755823233d371cb30bb4204192a3c1027d9c5d0 100644 (file)
@@ -67,10 +67,6 @@ class MapperExtension(object):
             (
                 'init_instance',
                 'init_failed',
-                'translate_row',
-                'create_instance',
-                'append_result',
-                'populate_instance',
                 'reconstruct_instance',
                 'before_insert',
                 'after_insert',
@@ -156,108 +152,6 @@ class MapperExtension(object):
         """
         return EXT_CONTINUE
 
-    def translate_row(self, mapper, context, row):
-        """Perform pre-processing on the given result row and return a
-        new row instance.
-
-        This is called when the mapper first receives a row, before
-        the object identity or the instance itself has been derived
-        from that row.   The given row may or may not be a
-        ``RowProxy`` object - it will always be a dictionary-like
-        object which contains mapped columns as keys.  The
-        returned object should also be a dictionary-like object
-        which recognizes mapped columns as keys.
-
-        If the ultimate return value is EXT_CONTINUE, the row
-        is not translated.
-
-        """
-        return EXT_CONTINUE
-
-    def create_instance(self, mapper, selectcontext, row, class_):
-        """Receive a row when a new object instance is about to be
-        created from that row.
-
-        The method can choose to create the instance itself, or it can return
-        EXT_CONTINUE to indicate normal object creation should take place.
-
-        mapper
-          The mapper doing the operation
-
-        selectcontext
-          The QueryContext generated from the Query.
-
-        row
-          The result row from the database
-
-        class\_
-          The class we are mapping.
-
-        return value
-          A new object instance, or EXT_CONTINUE
-
-        """
-        return EXT_CONTINUE
-
-    def append_result(self, mapper, selectcontext, row, instance,
-                      result, **flags):
-        """Receive an object instance before that instance is appended
-        to a result list.
-
-        If this method returns EXT_CONTINUE, result appending will proceed
-        normally.  if this method returns any other value or None,
-        result appending will not proceed for this instance, giving
-        this extension an opportunity to do the appending itself, if
-        desired.
-
-        mapper
-          The mapper doing the operation.
-
-        selectcontext
-          The QueryContext generated from the Query.
-
-        row
-          The result row from the database.
-
-        instance
-          The object instance to be appended to the result.
-
-        result
-          List to which results are being appended.
-
-        \**flags
-          extra information about the row, same as criterion in
-          ``create_row_processor()`` method of
-          :class:`~sqlalchemy.orm.interfaces.MapperProperty`
-        """
-
-        return EXT_CONTINUE
-
-    def populate_instance(self, mapper, selectcontext, row,
-                          instance, **flags):
-        """Receive an instance before that instance has
-        its attributes populated.
-
-        This usually corresponds to a newly loaded instance but may
-        also correspond to an already-loaded instance which has
-        unloaded attributes to be populated.  The method may be called
-        many times for a single instance, as multiple result rows are
-        used to populate eagerly loaded collections.
-
-        If this method returns EXT_CONTINUE, instance population will
-        proceed normally.  If any other value or None is returned,
-        instance population will not proceed, giving this extension an
-        opportunity to populate the instance itself, if desired.
-
-        .. deprecated:: 0.5
-            Most usages of this hook are obsolete.  For a
-            generic "object has been newly created from a row" hook, use
-            ``reconstruct_instance()``, or the ``@orm.reconstructor``
-            decorator.
-
-        """
-        return EXT_CONTINUE
-
     def reconstruct_instance(self, mapper, instance):
         """Receive an object instance after it has been created via
         ``__new__``, and after initial attribute population has
index f0f9a6468e84d234b960f218f9a07bb47e9b348f..19ff71f73fdb5fc8921ef608a21713c304050bd1 100644 (file)
@@ -372,9 +372,9 @@ class CompositeProperty(DescriptorProperty):
                 property.key, *expr)
 
         def create_row_processor(self, query, procs, labels):
-            def proc(row, result):
+            def proc(row):
                 return self.property.composite_class(
-                    *[proc(row, result) for proc in procs])
+                    *[proc(row) for proc in procs])
             return proc
 
     class Comparator(PropComparator):
index 8edaa274492107e3a9ac26c80aab616d6beb1555..daf7050408e641d633b2873297f0f06d77449e52 100644 (file)
@@ -652,145 +652,6 @@ class MapperEvents(event.Events):
 
         """
 
-    def translate_row(self, mapper, context, row):
-        """Perform pre-processing on the given result row and return a
-        new row instance.
-
-        .. deprecated:: 0.9 the :meth:`.translate_row` event should
-           be considered as legacy.  The row as delivered in a mapper
-           load operation typically requires that highly technical
-           details be accommodated in order to identity the correct
-           column keys are present in the row, rendering this particular
-           event hook as difficult to use and unreliable.
-
-        This listener is typically registered with ``retval=True``.
-        It is called when the mapper first receives a row, before
-        the object identity or the instance itself has been derived
-        from that row.   The given row may or may not be a
-        :class:`.RowProxy` object - it will always be a dictionary-like
-        object which contains mapped columns as keys.  The
-        returned object should also be a dictionary-like object
-        which recognizes mapped columns as keys.
-
-        :param mapper: the :class:`.Mapper` which is the target
-         of this event.
-        :param context: the :class:`.QueryContext`, which includes
-         a handle to the current :class:`.Query` in progress as well
-         as additional state information.
-        :param row: the result row being handled.  This may be
-         an actual :class:`.RowProxy` or may be a dictionary containing
-         :class:`.Column` objects as keys.
-        :return: When configured with ``retval=True``, the function
-         should return a dictionary-like row object, or ``EXT_CONTINUE``,
-         indicating the original row should be used.
-
-
-        """
-
-    def create_instance(self, mapper, context, row, class_):
-        """Receive a row when a new object instance is about to be
-        created from that row.
-
-        .. deprecated:: 0.9 the :meth:`.create_instance` event should
-           be considered as legacy.  Manipulation of the object construction
-           mechanics during a load should not be necessary.
-
-        The method can choose to create the instance itself, or it can return
-        EXT_CONTINUE to indicate normal object creation should take place.
-        This listener is typically registered with ``retval=True``.
-
-        :param mapper: the :class:`.Mapper` which is the target
-         of this event.
-        :param context: the :class:`.QueryContext`, which includes
-         a handle to the current :class:`.Query` in progress as well
-         as additional state information.
-        :param row: the result row being handled.  This may be
-         an actual :class:`.RowProxy` or may be a dictionary containing
-         :class:`.Column` objects as keys.
-        :param class\_: the mapped class.
-        :return: When configured with ``retval=True``, the return value
-         should be a newly created instance of the mapped class,
-         or ``EXT_CONTINUE`` indicating that default object construction
-         should take place.
-
-        """
-
-    def append_result(self, mapper, context, row, target,
-                      result, **flags):
-        """Receive an object instance before that instance is appended
-        to a result list.
-
-        .. deprecated:: 0.9 the :meth:`.append_result` event should
-           be considered as legacy.  It is a difficult to use method
-           whose original purpose is better suited by custom collection
-           classes.
-
-        This is a rarely used hook which can be used to alter
-        the construction of a result list returned by :class:`.Query`.
-
-        :param mapper: the :class:`.Mapper` which is the target
-         of this event.
-        :param context: the :class:`.QueryContext`, which includes
-         a handle to the current :class:`.Query` in progress as well
-         as additional state information.
-        :param row: the result row being handled.  This may be
-         an actual :class:`.RowProxy` or may be a dictionary containing
-         :class:`.Column` objects as keys.
-        :param target: the mapped instance being populated.  If
-         the event is configured with ``raw=True``, this will
-         instead be the :class:`.InstanceState` state-management
-         object associated with the instance.
-        :param result: a list-like object where results are being
-         appended.
-        :param \**flags: Additional state information about the
-         current handling of the row.
-        :return: If this method is registered with ``retval=True``,
-         a return value of ``EXT_STOP`` will prevent the instance
-         from being appended to the given result list, whereas a
-         return value of ``EXT_CONTINUE`` will result in the default
-         behavior of appending the value to the result list.
-
-        """
-
-    def populate_instance(self, mapper, context, row,
-                          target, **flags):
-        """Receive an instance before that instance has
-        its attributes populated.
-
-        .. deprecated:: 0.9 the :meth:`.populate_instance` event should
-           be considered as legacy.  The mechanics of instance population
-           should not need modification; special "on load" rules can as always
-           be accommodated by the :class:`.InstanceEvents.load` event.
-
-        This usually corresponds to a newly loaded instance but may
-        also correspond to an already-loaded instance which has
-        unloaded attributes to be populated.  The method may be called
-        many times for a single instance, as multiple result rows are
-        used to populate eagerly loaded collections.
-
-        Most usages of this hook are obsolete.  For a
-        generic "object has been newly created from a row" hook, use
-        :meth:`.InstanceEvents.load`.
-
-        :param mapper: the :class:`.Mapper` which is the target
-         of this event.
-        :param context: the :class:`.QueryContext`, which includes
-         a handle to the current :class:`.Query` in progress as well
-         as additional state information.
-        :param row: the result row being handled.  This may be
-         an actual :class:`.RowProxy` or may be a dictionary containing
-         :class:`.Column` objects as keys.
-        :param target: the mapped instance.  If
-         the event is configured with ``raw=True``, this will
-         instead be the :class:`.InstanceState` state-management
-         object associated with the instance.
-        :return: When configured with ``retval=True``, a return
-         value of ``EXT_STOP`` will bypass instance population by
-         the mapper. A value of ``EXT_CONTINUE`` indicates that
-         default instance population should take place.
-
-        """
-
     def before_insert(self, mapper, connection, target):
         """Receive an object instance before an INSERT statement
         is emitted corresponding to that instance.
index eb5b65baad830c7f611f869f17769e99d0ecab10..ad7d2d53da11c0b3ef3cc2e373a128927c938b1a 100644 (file)
@@ -41,6 +41,8 @@ class ClassManager(dict):
     MANAGER_ATTR = base.DEFAULT_MANAGER_ATTR
     STATE_ATTR = base.DEFAULT_STATE_ATTR
 
+    _state_setter = staticmethod(util.attrsetter(STATE_ATTR))
+
     deferred_scalar_loader = None
 
     original_init = object.__init__
@@ -288,15 +290,15 @@ class ClassManager(dict):
 
     def new_instance(self, state=None):
         instance = self.class_.__new__(self.class_)
-        setattr(instance, self.STATE_ATTR,
-                self._state_constructor(instance, self)
-                if not state else state)
+        if state is None:
+            state = self._state_constructor(instance, self)
+        self._state_setter(instance, state)
         return instance
 
     def setup_instance(self, instance, state=None):
-        setattr(instance, self.STATE_ATTR,
-                self._state_constructor(instance, self)
-                if not state else state)
+        if state is None:
+            state = self._state_constructor(instance, self)
+        self._state_setter(instance, state)
 
     def teardown_instance(self, instance):
         delattr(instance, self.STATE_ATTR)
@@ -323,7 +325,7 @@ class ClassManager(dict):
                 _new_state_if_none(instance)
         else:
             state = self._state_constructor(instance, self)
-            setattr(instance, self.STATE_ATTR, state)
+            self._state_setter(instance, state)
             return state
 
     def has_state(self, instance):
index 49ec99ce450cd6f790b6ed776e185eb5c773bb97..145c9b6e84fd6cf5ff44df0b7d8fcd06ba229561 100644 (file)
@@ -82,11 +82,11 @@ class MapperProperty(_MappedAttribute, InspectionAttr):
         pass
 
     def create_row_processor(self, context, path,
-                             mapper, row, adapter):
+                             mapper, result, adapter):
         """Return a 3-tuple consisting of three row processing functions.
 
         """
-        return None, None, None
+        return None, None, None, None
 
     def cascade_iterator(self, type_, state, visited_instances=None,
                          halt_on=None):
@@ -443,14 +443,14 @@ class StrategizedProperty(MapperProperty):
             strat = self.strategy
         strat.setup_query(context, entity, path, loader, adapter, **kwargs)
 
-    def create_row_processor(self, context, path, mapper, row, adapter):
+    def create_row_processor(self, context, path, mapper, result, adapter):
         loader = self._get_context_loader(context, path)
         if loader and loader.strategy:
             strat = self._get_strategy(loader.strategy)
         else:
             strat = self.strategy
         return strat.create_row_processor(context, path, loader,
-                                          mapper, row, adapter)
+                                          mapper, result, adapter)
 
     def do_init(self):
         self._strategies = {}
@@ -543,14 +543,14 @@ class LoaderStrategy(object):
         pass
 
     def create_row_processor(self, context, path, loadopt, mapper,
-                             row, adapter):
+                             result, adapter):
         """Return row processing functions which fulfill the contract
         specified by MapperProperty.create_row_processor.
 
         StrategizedProperty delegates its create_row_processor method
         directly to this method. """
 
-        return None, None, None
+        return None, None, None, None
 
     def __str__(self):
         return str(self.parent_property)
index 934967b272519d14a07fcada151e8c23e8d5fdba..87fc89befc2b5896a172a2bf022a387ab05f6464 100644 (file)
@@ -16,7 +16,6 @@ as well as some of the attribute loading strategies.
 
 from .. import util
 from . import attributes, exc as orm_exc, state as statelib
-from .interfaces import EXT_CONTINUE
 from ..sql import util as sql_util
 from .util import _none_set, state_str
 from .. import exc as sa_exc
@@ -44,17 +43,14 @@ def instances(query, cursor, context):
             def filter_fn(row):
                 return tuple(fn(x) for x, fn in zip(row, filter_fns))
 
-    custom_rows = single_entity and \
-        query._entities[0].custom_rows
-
     (process, labels) = \
         list(zip(*[
             query_entity.row_processor(query,
-                                       context, custom_rows)
+                                       context, cursor)
             for query_entity in query._entities
         ]))
 
-    if not custom_rows and not single_entity:
+    if not single_entity:
         keyed_tuple = util.lightweight_named_tuple('result', labels)
 
     while True:
@@ -68,14 +64,11 @@ def instances(query, cursor, context):
         else:
             fetch = cursor.fetchall()
 
-        if custom_rows:
-            rows = []
-            for row in fetch:
-                process[0](row, rows)
-        elif single_entity:
-            rows = [process[0](row, None) for row in fetch]
+        if single_entity:
+            proc = process[0]
+            rows = [proc(row) for row in fetch]
         else:
-            rows = [keyed_tuple([proc(row, None) for proc in process])
+            rows = [keyed_tuple([proc(row) for proc in process])
                     for row in fetch]
 
         if filtered:
@@ -237,7 +230,7 @@ def load_on_ident(query, key,
         return None
 
 
-def instance_processor(mapper, context, path, adapter,
+def instance_processor(mapper, context, result, path, adapter,
                        polymorphic_from=None,
                        only_load_props=None,
                        refresh_state=None,
@@ -260,11 +253,12 @@ def instance_processor(mapper, context, path, adapter,
             polymorphic_on = polymorphic_discriminator
         else:
             polymorphic_on = mapper.polymorphic_on
-        polymorphic_instances = util.PopulateDict(
-            _configure_subclass_mapper(
-                mapper,
-                context, path, adapter)
-        )
+        if polymorphic_on is not None:
+            polymorphic_instances = util.PopulateDict(
+                _configure_subclass_mapper(
+                    mapper,
+                    context, result, path, adapter)
+            )
 
     version_id_col = mapper.version_id_col
 
@@ -277,56 +271,19 @@ def instance_processor(mapper, context, path, adapter,
 
     identity_class = mapper._identity_class
 
-    new_populators = []
-    existing_populators = []
-    eager_populators = []
+    (new_populators, existing_populators,
+        eager_populators) = _populators(
+        mapper, context, path, result, adapter, only_load_props)
 
     load_path = context.query._current_path + path \
         if context.query._current_path.path \
         else path
 
-    def populate_state(state, dict_, row, isnew, only_load_props):
-        if isnew:
-            if context.propagate_options:
-                state.load_options = context.propagate_options
-            if state.load_options:
-                state.load_path = load_path
-
-        if not new_populators:
-            _populators(mapper, context, path, row, adapter,
-                        new_populators,
-                        existing_populators,
-                        eager_populators
-                        )
-
-        if isnew:
-            populators = new_populators
-        else:
-            populators = existing_populators
-
-        if only_load_props is None:
-            for key, populator in populators:
-                populator(state, dict_, row)
-        elif only_load_props:
-            for key, populator in populators:
-                if key in only_load_props:
-                    populator(state, dict_, row)
-
     session_identity_map = context.session.identity_map
 
-    listeners = mapper.dispatch
-
-    # legacy events - I'd very much like to yank these totally
-    translate_row = listeners.translate_row or None
-    create_instance = listeners.create_instance or None
-    populate_instance = listeners.populate_instance or None
-    append_result = listeners.append_result or None
-    ####
-
     populate_existing = context.populate_existing or mapper.always_refresh
-    invoke_all_eagers = context.invoke_all_eagers
-    load_evt = mapper.class_manager.dispatch.load or None
-    refresh_evt = mapper.class_manager.dispatch.refresh or None
+    load_evt = bool(mapper.class_manager.dispatch.load)
+    refresh_evt = bool(mapper.class_manager.dispatch.refresh)
     instance_state = attributes.instance_state
     instance_dict = attributes.instance_dict
 
@@ -335,26 +292,14 @@ def instance_processor(mapper, context, path, adapter,
     else:
         is_not_primary_key = _none_set.intersection
 
-    def _instance(row, result):
-        if not new_populators and invoke_all_eagers:
-            _populators(mapper, context, path, row, adapter,
-                        new_populators,
-                        existing_populators,
-                        eager_populators)
-
-        if translate_row:
-            for fn in translate_row:
-                ret = fn(mapper, context, row)
-                if ret is not EXT_CONTINUE:
-                    row = ret
-                    break
+    def _instance(row):
 
         if polymorphic_on is not None:
             discriminator = row[polymorphic_on]
             if discriminator is not None:
                 _instance = polymorphic_instances[discriminator]
                 if _instance:
-                    return _instance(row, result)
+                    return _instance(row)
 
         # determine identity key
         if refresh_state:
@@ -393,9 +338,9 @@ def instance_processor(mapper, context, path, adapter,
                     "Instance '%s' has version id '%s' which "
                     "does not match database-loaded version id '%s'."
                     % (state_str(state),
-                       mapper._get_state_attr_by_column(
-                        state, dict_,
-                        mapper.version_id_col),
+                        mapper._get_state_attr_by_column(
+                            state, dict_,
+                            mapper.version_id_col),
                        row[version_id_col]))
         elif refresh_state:
             # out of band refresh_state detected (i.e. its not in the
@@ -418,21 +363,7 @@ def instance_processor(mapper, context, path, adapter,
             currentload = True
             loaded_instance = True
 
-            if create_instance:
-                for fn in create_instance:
-                    instance = fn(mapper, context,
-                                  row, mapper.class_)
-                    if instance is not EXT_CONTINUE:
-                        manager = attributes.manager_of_class(
-                            instance.__class__)
-                        # TODO: if manager is None, raise a friendly error
-                        # about returning instances of unmapped types
-                        manager.setup_instance(instance)
-                        break
-                else:
-                    instance = mapper.class_manager.new_instance()
-            else:
-                instance = mapper.class_manager.new_instance()
+            instance = mapper.class_manager.new_instance()
 
             dict_ = instance_dict(instance)
             state = instance_state(instance)
@@ -448,18 +379,15 @@ def instance_processor(mapper, context, path, adapter,
             if isnew:
                 state.runid = context.runid
                 context.progress[state] = dict_
-
-            if populate_instance:
-                for fn in populate_instance:
-                    ret = fn(mapper, context, row, state,
-                             only_load_props=only_load_props,
-                             instancekey=identitykey, isnew=isnew)
-                    if ret is not EXT_CONTINUE:
-                        break
-                else:
-                    populate_state(state, dict_, row, isnew, only_load_props)
+                if context.propagate_options:
+                    state.load_options = context.propagate_options
+                if state.load_options:
+                    state.load_path = load_path
+                for key, populator in new_populators:
+                    populator(state, dict_, row)
             else:
-                populate_state(state, dict_, row, isnew, only_load_props)
+                for key, populator in existing_populators:
+                    populator(state, dict_, row)
 
             if loaded_instance and load_evt:
                 state.manager.dispatch.load(state, context)
@@ -471,72 +399,72 @@ def instance_processor(mapper, context, path, adapter,
             # state is having a partial set of its attributes
             # refreshed.  Populate those attributes,
             # and add to the "context.partials" collection.
+            unloaded = state.unloaded
+
             if state in context.partials:
                 isnew = False
                 (d_, attrs) = context.partials[state]
+                for key, populator in existing_populators:
+                    if key not in attrs:
+                        continue
+                    populator(state, dict_, row)
             else:
                 isnew = True
-                attrs = state.unloaded
+                attrs = unloaded
                 context.partials[state] = (dict_, attrs)
-
-            if populate_instance:
-                for fn in populate_instance:
-                    ret = fn(mapper, context, row, state,
-                             only_load_props=attrs,
-                             instancekey=identitykey, isnew=isnew)
-                    if ret is not EXT_CONTINUE:
-                        break
-                else:
-                    populate_state(state, dict_, row, isnew, attrs)
-            else:
-                populate_state(state, dict_, row, isnew, attrs)
+                if context.propagate_options:
+                    state.load_options = context.propagate_options
+                if state.load_options:
+                    state.load_path = load_path
+                for key, populator in new_populators:
+                    if key not in attrs:
+                        continue
+                    populator(state, dict_, row)
 
             for key, pop in eager_populators:
-                if key not in state.unloaded:
+                if key not in unloaded:
                     pop(state, dict_, row)
 
             if isnew and refresh_evt:
                 state.manager.dispatch.refresh(state, context, attrs)
 
-        if result is not None:
-            if append_result:
-                for fn in append_result:
-                    if fn(mapper, context, row, state,
-                          result, instancekey=identitykey,
-                          isnew=isnew) is not EXT_CONTINUE:
-                        break
-                else:
-                    result.append(instance)
-            else:
-                result.append(instance)
-
         return instance
     return _instance
 
 
-def _populators(mapper, context, path, row, adapter,
-                new_populators, existing_populators, eager_populators):
+def _populators(mapper, context, path, result, adapter, only_load_props):
     """Produce a collection of attribute level row processor
     callables."""
 
+    new_populators = []
+    existing_populators = []
     delayed_populators = []
-    pops = (new_populators, existing_populators, delayed_populators,
-            eager_populators)
-
-    for prop in mapper._props.values():
-
-        for i, pop in enumerate(prop.create_row_processor(
-                context,
-                path,
-                mapper, row, adapter)):
-            if pop is not None:
-                pops[i].append((prop.key, pop))
+    eager_populators = []
+    invoke_eagers = context.invoke_all_eagers
+
+    props = mapper._props.values()
+    if only_load_props is not None:
+        props = (p for p in props if p.key in only_load_props)
+
+    for prop in props:
+        np, ep, dp, gp = prop.create_row_processor(
+            context, path, mapper, result, adapter)
+        if np:
+            new_populators.append((prop.key, np))
+        if ep:
+            existing_populators.append((prop.key, ep))
+        if dp:
+            delayed_populators.append((prop.key, dp))
+        if invoke_eagers and gp:
+            eager_populators.append((prop.key, gp))
 
     if delayed_populators:
-        new_populators.extend(delayed_populators)
+        new_populators += delayed_populators
+
+    return new_populators, existing_populators, eager_populators
 
 
-def _configure_subclass_mapper(mapper, context, path, adapter):
+def _configure_subclass_mapper(mapper, context, result, path, adapter):
     """Produce a mapper level row processor callable factory for mappers
     inheriting this one."""
 
@@ -553,6 +481,7 @@ def _configure_subclass_mapper(mapper, context, path, adapter):
         return instance_processor(
             sub_mapper,
             context,
+            result,
             path,
             adapter,
             polymorphic_from=mapper)
index aab28ee0c5702f87ebc7defd94fa5ecf1cf1153d..539867f2e8eb0d74f539a8c6487d2b3fd2fc3ea5 100644 (file)
@@ -2286,6 +2286,16 @@ class Mapper(InspectionAttr):
     def primary_base_mapper(self):
         return self.class_manager.mapper.base_mapper
 
+    def _result_has_identity_key(self, result, adapter=None):
+        pk_cols = self.primary_key
+        if adapter:
+            pk_cols = [adapter.columns[c] for c in pk_cols]
+        for col in pk_cols:
+            if not result._has_key(col):
+                return False
+        else:
+            return True
+
     def identity_key_from_row(self, row, adapter=None):
         """Return an identity-map key for use in storing/retrieving an
         item from the identity map.
index 15e0aa88102f20302d309041665c2ecb05b21d83..b0564671947bc1aa7f9299bbe859a07734a91b93 100644 (file)
@@ -3003,7 +3003,6 @@ class _MapperEntity(_QueryEntity):
         else:
             self._label_name = self.mapper.class_.__name__
         self.path = self.entity_zero._path_registry
-        self.custom_rows = bool(self.mapper.dispatch.append_result)
 
     def set_with_polymorphic(self, query, cls_or_mappers,
                              selectable, polymorphic_on):
@@ -3082,7 +3081,7 @@ class _MapperEntity(_QueryEntity):
 
         return ret
 
-    def row_processor(self, query, context, custom_rows):
+    def row_processor(self, query, context, result):
         adapter = self._get_entity_clauses(query, context)
 
         if context.adapter and adapter:
@@ -3102,6 +3101,7 @@ class _MapperEntity(_QueryEntity):
             _instance = loading.instance_processor(
                 self.mapper,
                 context,
+                result,
                 self.path,
                 adapter,
                 only_load_props=query._only_load_props,
@@ -3112,6 +3112,7 @@ class _MapperEntity(_QueryEntity):
             _instance = loading.instance_processor(
                 self.mapper,
                 context,
+                result,
                 self.path,
                 adapter,
                 polymorphic_discriminator=self._polymorphic_discriminator
@@ -3277,8 +3278,8 @@ class Bundle(object):
         """
         keyed_tuple = util.lightweight_named_tuple('result', labels)
 
-        def proc(row, result):
-            return keyed_tuple([proc(row, None) for proc in procs])
+        def proc(row):
+            return keyed_tuple([proc(row) for proc in procs])
         return proc
 
 
@@ -3303,7 +3304,6 @@ class _BundleEntity(_QueryEntity):
 
         self.supports_single_entity = self.bundle.single_entity
 
-    custom_rows = False
 
     @property
     def entity_zero(self):
@@ -3345,9 +3345,9 @@ class _BundleEntity(_QueryEntity):
         for ent in self._entities:
             ent.setup_context(query, context)
 
-    def row_processor(self, query, context, custom_rows):
+    def row_processor(self, query, context, result):
         procs, labels = zip(
-            *[ent.row_processor(query, context, custom_rows)
+            *[ent.row_processor(query, context, result)
               for ent in self._entities]
         )
 
@@ -3437,7 +3437,6 @@ class _ColumnEntity(_QueryEntity):
             self.entity_zero = None
 
     supports_single_entity = False
-    custom_rows = False
 
     @property
     def entity_zero_or_selectable(self):
@@ -3474,17 +3473,15 @@ class _ColumnEntity(_QueryEntity):
     def _resolve_expr_against_query_aliases(self, query, expr, context):
         return query._adapt_clause(expr, False, True)
 
-    def row_processor(self, query, context, custom_rows):
+    def row_processor(self, query, context, result):
         column = self._resolve_expr_against_query_aliases(
             query, self.column, context)
 
         if context.adapter:
             column = context.adapter.columns[column]
 
-        def proc(row, result):
-            return row[column]
-
-        return proc, self._label_name
+        getter = result._getter(column)
+        return getter, self._label_name
 
     def setup_context(self, query, context):
         column = self._resolve_expr_against_query_aliases(
index 2d8a81f0a5894954e393eac6d72ae2bc936f949b..29cb67583d885c43c719e5ff6d701f681598f286 100644 (file)
@@ -119,8 +119,8 @@ class UninstrumentedColumnLoader(LoaderStrategy):
 
     def create_row_processor(
             self, context, path, loadopt,
-            mapper, row, adapter):
-        return None, None, None
+            mapper, result, adapter):
+        return None, None, None, None
 
 
 @log.class_logger
@@ -157,21 +157,22 @@ class ColumnLoader(LoaderStrategy):
 
     def create_row_processor(
             self, context, path,
-            loadopt, mapper, row, adapter):
+            loadopt, mapper, result, adapter):
         key = self.key
         # look through list of columns represented here
         # to see which, if any, is present in the row.
         for col in self.columns:
             if adapter:
                 col = adapter.columns[col]
-            if col is not None and col in row:
+            getter = result._getter(col)
+            if getter:
                 def fetch_col(state, dict_, row):
-                    dict_[key] = row[col]
-                return fetch_col, None, None
+                    dict_[key] = getter(row)
+                return fetch_col, None, None, None
         else:
             def expire_for_non_present_col(state, dict_, row):
                 state._expire_attribute_pre_commit(dict_, key)
-            return expire_for_non_present_col, None, None
+            return expire_for_non_present_col, None, None, None
 
 
 @log.class_logger
@@ -189,28 +190,31 @@ class DeferredColumnLoader(LoaderStrategy):
 
     def create_row_processor(
             self, context, path, loadopt,
-            mapper, row, adapter):
+            mapper, result, adapter):
         col = self.columns[0]
         if adapter:
             col = adapter.columns[col]
 
         key = self.key
-        if col in row:
+
+        # TODO: put a result-level contains here
+        getter = result._getter(col)
+        if getter:
             return self.parent_property._get_strategy_by_cls(ColumnLoader).\
                 create_row_processor(
-                    context, path, loadopt, mapper, row, adapter)
+                    context, path, loadopt, mapper, result, adapter)
 
         elif not self.is_class_level:
             set_deferred_for_local_state = InstanceState._row_processor(
                 mapper.class_manager,
                 LoadDeferredColumns(key), key)
-            return set_deferred_for_local_state, None, None
+            return set_deferred_for_local_state, None, None, None
         else:
             def reset_col_for_deferred(state, dict_, row):
                 # reset state on the key so that deferred callables
                 # fire off on next access.
                 state._reset(dict_, key)
-            return reset_col_for_deferred, None, None
+            return reset_col_for_deferred, None, None, None
 
     def init_class_attribute(self, mapper):
         self.is_class_level = True
@@ -333,10 +337,10 @@ class NoLoader(AbstractRelationshipLoader):
 
     def create_row_processor(
             self, context, path, loadopt, mapper,
-            row, adapter):
+            result, adapter):
         def invoke_no_load(state, dict_, row):
             state._initialize(self.key)
-        return invoke_no_load, None, None
+        return invoke_no_load, None, None, None
 
 
 @log.class_logger
@@ -618,7 +622,7 @@ class LazyLoader(AbstractRelationshipLoader):
 
     def create_row_processor(
             self, context, path, loadopt,
-            mapper, row, adapter):
+            mapper, result, adapter):
         key = self.key
         if not self.is_class_level:
             # we are not the primary manager for this attribute
@@ -633,7 +637,7 @@ class LazyLoader(AbstractRelationshipLoader):
                 mapper.class_manager,
                 LoadLazyAttribute(key), key)
 
-            return set_lazy_callable, None, None
+            return set_lazy_callable, None, None, None
         elif context.populate_existing or mapper.always_refresh:
             def reset_for_lazy_callable(state, dict_, row):
                 # we are the primary manager for this attribute on
@@ -646,10 +650,9 @@ class LazyLoader(AbstractRelationshipLoader):
                 # any existing state.
                 state._reset(dict_, key)
 
-            return reset_for_lazy_callable, None, None
+            return reset_for_lazy_callable, None, None, None
         else:
-            return None, None, None
-
+            return None, None, None, None
 
 
 class LoadLazyAttribute(object):
@@ -682,11 +685,11 @@ class ImmediateLoader(AbstractRelationshipLoader):
 
     def create_row_processor(
             self, context, path, loadopt,
-            mapper, row, adapter):
+            mapper, result, adapter):
         def load_immediate(state, dict_, row):
             state.get_impl(self.key).get(state, dict_)
 
-        return None, None, load_immediate
+        return None, None, load_immediate, None
 
 
 @log.class_logger
@@ -997,7 +1000,7 @@ class SubqueryLoader(AbstractRelationshipLoader):
 
     def create_row_processor(
             self, context, path, loadopt,
-            mapper, row, adapter):
+            mapper, result, adapter):
         if not self.parent.class_manager[self.key].impl.supports_population:
             raise sa_exc.InvalidRequestError(
                 "'%s' does not support object "
@@ -1009,7 +1012,7 @@ class SubqueryLoader(AbstractRelationshipLoader):
         subq = path.get(context.attributes, 'subquery')
 
         if subq is None:
-            return None, None, None
+            return None, None, None, None
 
         local_cols = self.parent_property.local_columns
 
@@ -1381,7 +1384,7 @@ class JoinedLoader(AbstractRelationshipLoader):
                     )
                 )
 
-    def _create_eager_adapter(self, context, row, adapter, path, loadopt):
+    def _create_eager_adapter(self, context, result, adapter, path, loadopt):
         user_defined_adapter = self._init_user_defined_eager_proc(
             loadopt, context) if loadopt else False
 
@@ -1399,17 +1402,16 @@ class JoinedLoader(AbstractRelationshipLoader):
             if decorator is None:
                 return False
 
-        try:
-            self.mapper.identity_key_from_row(row, decorator)
+        if self.mapper._result_has_identity_key(result, decorator):
             return decorator
-        except KeyError:
+        else:
             # no identity key - don't return a row
             # processor, will cause a degrade to lazy
             return False
 
     def create_row_processor(
             self, context, path, loadopt, mapper,
-            row, adapter):
+            result, adapter):
         if not self.parent.class_manager[self.key].impl.supports_population:
             raise sa_exc.InvalidRequestError(
                 "'%s' does not support object "
@@ -1421,7 +1423,7 @@ class JoinedLoader(AbstractRelationshipLoader):
 
         eager_adapter = self._create_eager_adapter(
             context,
-            row,
+            result,
             adapter, our_path, loadopt)
 
         if eager_adapter is not False:
@@ -1430,6 +1432,7 @@ class JoinedLoader(AbstractRelationshipLoader):
             _instance = loading.instance_processor(
                 self.mapper,
                 context,
+                result,
                 our_path[self.mapper],
                 eager_adapter)
 
@@ -1441,7 +1444,7 @@ class JoinedLoader(AbstractRelationshipLoader):
             return self.parent_property._get_strategy_by_cls(LazyLoader).\
                 create_row_processor(
                     context, path, loadopt,
-                    mapper, row, adapter)
+                    mapper, result, adapter)
 
     def _create_collection_loader(self, context, key, _instance):
         def load_collection_from_joined_new_row(state, dict_, row):
@@ -1450,7 +1453,9 @@ class JoinedLoader(AbstractRelationshipLoader):
             result_list = util.UniqueAppender(collection,
                                               'append_without_event')
             context.attributes[(state, key)] = result_list
-            _instance(row, result_list)
+            inst = _instance(row)
+            if inst is not None:
+                result_list.append(inst)
 
         def load_collection_from_joined_existing_row(state, dict_, row):
             if (state, key) in context.attributes:
@@ -1466,10 +1471,12 @@ class JoinedLoader(AbstractRelationshipLoader):
                     collection,
                     'append_without_event')
                 context.attributes[(state, key)] = result_list
-            _instance(row, result_list)
+            inst = _instance(row)
+            if inst is not None:
+                result_list.append(inst)
 
         def load_collection_from_joined_exec(state, dict_, row):
-            _instance(row, None)
+            _instance(row)
 
         return load_collection_from_joined_new_row, \
             load_collection_from_joined_existing_row, \
@@ -1479,12 +1486,12 @@ class JoinedLoader(AbstractRelationshipLoader):
         def load_scalar_from_joined_new_row(state, dict_, row):
             # set a scalar object instance directly on the parent
             # object, bypassing InstrumentedAttribute event handlers.
-            dict_[key] = _instance(row, None)
+            dict_[key] = _instance(row)
 
         def load_scalar_from_joined_existing_row(state, dict_, row):
             # call _instance on the row, even though the object has
             # been created, so that we further descend into properties
-            existing = _instance(row, None)
+            existing = _instance(row)
             if existing is not None \
                 and key in dict_ \
                     and existing is not dict_[key]:
@@ -1494,7 +1501,7 @@ class JoinedLoader(AbstractRelationshipLoader):
                     % self)
 
         def load_scalar_from_joined_exec(state, dict_, row):
-            _instance(row, None)
+            _instance(row)
 
         return load_scalar_from_joined_new_row, \
             load_scalar_from_joined_existing_row, \
index d882c26565db3092bf1614aaeeb4b00a44360693..b92beac96578865e54a82b46a63af2340e5d806a 100644 (file)
@@ -34,7 +34,7 @@ from .langhelpers import iterate_attributes, class_hierarchy, \
     classproperty, set_creation_order, warn_exception, warn, NoneType,\
     constructor_copy, methods_equivalent, chop_traceback, asint,\
     generic_repr, counter, PluginLoader, hybridmethod, safe_reraise,\
-    get_callable_argspec, only_once
+    get_callable_argspec, only_once, attrsetter
 
 from .deprecations import warn_deprecated, warn_pending_deprecation, \
     deprecated, pending_deprecation, inject_docstring_text
index 828e8f1f322b0ebdeb05b958a5772d19fc2504ec..f0dd7a08e507dfe856d9f8bebde76ecc0730c724 100644 (file)
@@ -1249,3 +1249,11 @@ def chop_traceback(tb, exclude_prefix=_UNITTEST_RE, exclude_suffix=_SQLA_RE):
     return tb[start:end + 1]
 
 NoneType = type(None)
+
+def attrsetter(attrname):
+    code = \
+        "def set(obj, value):"\
+        "    obj.%s = value" % attrname
+    env = locals().copy()
+    exec(code, env)
+    return env['set']
index 29b8e93822ecad9e953940e2153c64164b339851..a08221e2630c403874dac438fc69746360ad2e88 100644 (file)
@@ -79,10 +79,10 @@ class BundleTest(fixtures.MappedTest, AssertsCompiledSQL):
 
         class MyBundle(Bundle):
             def create_row_processor(self, query, procs, labels):
-                def proc(row, result):
+                def proc(row):
                     return dict(
-                                zip(labels, (proc(row, result) for proc in procs))
-                            )
+                        zip(labels, (proc(row) for proc in procs))
+                    )
                 return proc
 
         b1 = MyBundle('b1', Data.d1, Data.d2)
index f7667b9f1d35be54a794f000857c69c76ebb2fe8..068d73b07cc966ae61feebd1b35567e4dff2c80f 100644 (file)
@@ -85,10 +85,6 @@ class MapperEventsTest(_RemoveListeners, _fixtures.FixtureTest):
         for meth in [
             'init',
             'init_failure',
-            'translate_row',
-            'create_instance',
-            'append_result',
-            'populate_instance',
             'load',
             'refresh',
             'expire',
@@ -131,10 +127,9 @@ class MapperEventsTest(_RemoveListeners, _fixtures.FixtureTest):
         sess.flush()
         eq_(canary,
             ['init', 'before_insert',
-             'after_insert', 'expire', 'translate_row',
-             'populate_instance', 'refresh',
-             'append_result', 'translate_row', 'create_instance',
-             'populate_instance', 'load', 'append_result',
+             'after_insert', 'expire',
+             'refresh',
+             'load',
              'before_update', 'after_update', 'before_delete',
              'after_delete'])
 
@@ -240,16 +235,13 @@ class MapperEventsTest(_RemoveListeners, _fixtures.FixtureTest):
         sess.delete(am)
         sess.flush()
         eq_(canary1, ['init', 'before_insert', 'after_insert',
-            'translate_row', 'populate_instance','refresh',
-            'append_result', 'translate_row', 'create_instance'
-            , 'populate_instance', 'load', 'append_result',
+            'refresh', 'load',
             'before_update', 'after_update', 'before_delete',
             'after_delete'])
         eq_(canary2, [])
         eq_(canary3, ['init', 'before_insert', 'after_insert',
-            'translate_row', 'populate_instance','refresh',
-            'append_result', 'translate_row', 'create_instance'
-            , 'populate_instance', 'load', 'append_result',
+            'refresh',
+            'load',
             'before_update', 'after_update', 'before_delete',
             'after_delete'])
 
@@ -282,16 +274,12 @@ class MapperEventsTest(_RemoveListeners, _fixtures.FixtureTest):
         sess.delete(am)
         sess.flush()
         eq_(canary1, ['init', 'before_insert', 'after_insert',
-            'translate_row', 'populate_instance','refresh',
-            'append_result', 'translate_row', 'create_instance'
-            , 'populate_instance', 'load', 'append_result',
+            'refresh', 'load',
             'before_update', 'after_update', 'before_delete',
             'after_delete'])
         eq_(canary2, [])
         eq_(canary3, ['init', 'before_insert', 'after_insert',
-            'translate_row', 'populate_instance','refresh',
-            'append_result', 'translate_row', 'create_instance'
-            , 'populate_instance', 'load', 'append_result',
+            'refresh', 'load',
             'before_update', 'after_update', 'before_delete',
             'after_delete'])
 
@@ -375,25 +363,6 @@ class MapperEventsTest(_RemoveListeners, _fixtures.FixtureTest):
         eq_(m1.mock_calls, [call()])
         eq_(m2.mock_calls, [call()])
 
-    def test_retval(self):
-        User, users = self.classes.User, self.tables.users
-
-        def create_instance(mapper, context, row, class_):
-            u = User.__new__(User)
-            u.foo = True
-            return u
-
-        mapper(User, users)
-        event.listen(User, 'create_instance', create_instance, retval=True)
-        sess = create_session()
-        u1 = User()
-        u1.name = 'ed'
-        sess.add(u1)
-        sess.flush()
-        sess.expunge_all()
-        u = sess.query(User).first()
-        assert u.foo
-
     def test_instrument_event(self):
         Address, addresses, users, User = (self.classes.Address,
                                 self.tables.addresses,
@@ -1515,26 +1484,10 @@ class MapperExtensionTest(_fixtures.FixtureTest):
                 methods.append('init_failed')
                 return sa.orm.EXT_CONTINUE
 
-            def translate_row(self, mapper, context, row):
-                methods.append('translate_row')
-                return sa.orm.EXT_CONTINUE
-
-            def create_instance(self, mapper, selectcontext, row, class_):
-                methods.append('create_instance')
-                return sa.orm.EXT_CONTINUE
-
             def reconstruct_instance(self, mapper, instance):
                 methods.append('reconstruct_instance')
                 return sa.orm.EXT_CONTINUE
 
-            def append_result(self, mapper, selectcontext, row, instance, result, **flags):
-                methods.append('append_result')
-                return sa.orm.EXT_CONTINUE
-
-            def populate_instance(self, mapper, selectcontext, row, instance, **flags):
-                methods.append('populate_instance')
-                return sa.orm.EXT_CONTINUE
-
             def before_insert(self, mapper, connection, instance):
                 methods.append('before_insert')
                 return sa.orm.EXT_CONTINUE
@@ -1582,9 +1535,8 @@ class MapperExtensionTest(_fixtures.FixtureTest):
         sess.flush()
         eq_(methods,
             ['instrument_class', 'init_instance', 'before_insert',
-             'after_insert', 'translate_row', 'populate_instance',
-             'append_result', 'translate_row', 'create_instance',
-             'populate_instance', 'reconstruct_instance', 'append_result',
+             'after_insert',
+             'reconstruct_instance',
              'before_update', 'after_update', 'before_delete', 'after_delete'])
 
     def test_inheritance(self):
@@ -1614,10 +1566,9 @@ class MapperExtensionTest(_fixtures.FixtureTest):
         sess.flush()
         eq_(methods,
             ['instrument_class', 'instrument_class', 'init_instance',
-             'before_insert', 'after_insert', 'translate_row',
-             'populate_instance', 'append_result', 'translate_row',
-             'create_instance', 'populate_instance', 'reconstruct_instance',
-             'append_result', 'before_update', 'after_update', 'before_delete',
+             'before_insert', 'after_insert',
+             'reconstruct_instance',
+             'before_update', 'after_update', 'before_delete',
              'after_delete'])
 
     def test_before_after_only_collection(self):
@@ -1691,27 +1642,11 @@ class MapperExtensionTest(_fixtures.FixtureTest):
         sess.flush()
         eq_(methods,
             ['instrument_class', 'instrument_class', 'init_instance',
-             'before_insert', 'after_insert', 'translate_row',
-             'populate_instance', 'append_result', 'translate_row',
-             'create_instance', 'populate_instance', 'reconstruct_instance',
-             'append_result', 'before_update', 'after_update', 'before_delete',
+             'before_insert', 'after_insert',
+             'reconstruct_instance',
+             'before_update', 'after_update', 'before_delete',
              'after_delete'])
 
-    def test_create_instance(self):
-        User, users = self.classes.User, self.tables.users
-
-        class CreateUserExt(sa.orm.MapperExtension):
-            def create_instance(self, mapper, selectcontext, row, class_):
-                return User.__new__(User)
-
-        mapper(User, users, extension=CreateUserExt())
-        sess = create_session()
-        u1 = User()
-        u1.name = 'ed'
-        sess.add(u1)
-        sess.flush()
-        sess.expunge_all()
-        assert sess.query(User).first()
 
     def test_unnecessary_methods_not_evented(self):
         users = self.tables.users