on compatibility concerns, see :doc:`/changelog/migration_10`.
+ .. change::
+ :tags: change, orm
+
+ The ``proc()`` callable passed to the ``create_row_processor()``
+ method of custom :class:`.Bundle` classes now accepts only a single
+ "row" argument.
+
+ .. seealso::
+
+ :ref:`bundle_api_change`
+
+ .. change::
+ :tags: change, orm
+
+ Deprecated event hooks removed: ``populate_instance``,
+ ``create_instance``, ``translate_row``, ``append_result``
+
+ .. seealso::
+
+ :ref:`migration_deprecated_orm_events`
+
.. change::
:tags: bug, orm
:tickets: 3145
:ticket:`3061`
+.. _migration_migration_deprecated_orm_events:
+
+Deprecated ORM Event Hooks Removed
+----------------------------------
+
+The following ORM event hooks, some of which have been deprecated since
+0.5, have been removed: ``translate_row``, ``populate_instance``,
+``append_result``, ``create_instance``. The use cases for these hooks
+originated in the very early 0.1 / 0.2 series of SQLAlchemy and have long
+since been unnecessary. In particular, the hooks were largely unusable
+as the behavioral contracts within these events was strongly linked to
+the surrounding internals, such as how an instance needs to be created
+and initialized as well as how columns are located within an ORM-generated
+row. The removal of these hooks greatly simplifies the mechanics of ORM
+object loading.
+
+.. _bundle_api_change:
+
+API Change for new Bundle feature when custom row loaders are used
+------------------------------------------------------------------
+
+The new :class:`.Bundle` object of 0.9 has a small change in API,
+when the ``create_row_processor()`` method is overridden on a custom class.
+Previously, the sample code looked like::
+
+ from sqlalchemy.orm import Bundle
+
+ class DictBundle(Bundle):
+ def create_row_processor(self, query, procs, labels):
+ """Override create_row_processor to return values as dictionaries"""
+ def proc(row, result):
+ return dict(
+ zip(labels, (proc(row, result) for proc in procs))
+ )
+ return proc
+
+The unused ``result`` member is now removed::
+
+ from sqlalchemy.orm import Bundle
+
+ class DictBundle(Bundle):
+ def create_row_processor(self, query, procs, labels):
+ """Override create_row_processor to return values as dictionaries"""
+ def proc(row):
+ return dict(
+ zip(labels, (proc(row) for proc in procs))
+ )
+ return proc
+
+.. seealso::
+
+ :ref:`bundles`
+
.. _migration_3008:
Right inner join nesting now the default for joinedload with innerjoin=True
class DictBundle(Bundle):
def create_row_processor(self, query, procs, labels):
"""Override create_row_processor to return values as dictionaries"""
- def proc(row, result):
+ def proc(row):
return dict(
- zip(labels, (proc(row, result) for proc in procs))
+ zip(labels, (proc(row) for proc in procs))
)
return proc
+.. versionchanged:: 1.0
+
+ The ``proc()`` callable passed to the ``create_row_processor()``
+ method of custom :class:`.Bundle` classes now accepts only a single
+ "row" argument.
+
A result from the above bundle will return dictionary values::
bn = DictBundle('mybundle', MyClass.data1, MyClass.data2)
__slots__ = ()
def __contains__(self, key):
- return self._parent._has_key(self._row, key)
+ return self._parent._has_key(key)
def __getstate__(self):
return {
def has_key(self, key):
"""Return True if this RowProxy contains the given key."""
- return self._parent._has_key(self._row, key)
+ return self._parent._has_key(key)
def items(self):
"""Return a list of tuples, each tuple containing a key/value pair."""
map[key] = result
return result
- def _has_key(self, row, key):
+ def _has_key(self, key):
if key in self._keymap:
return True
else:
return self._key_fallback(key, False) is not None
+ def _getter(self, key):
+ if key in self._keymap:
+ processor, obj, index = self._keymap[key]
+ else:
+ ret = self._key_fallback(key, False)
+ if ret is None:
+ return None
+ processor, obj, index = ret
+
+ if index is None:
+ raise exc.InvalidRequestError(
+ "Ambiguous column name '%s' in result set! "
+ "try 'use_labels' option on select statement." % key)
+
+ return operator.itemgetter(index)
+
def __getstate__(self):
return {
'_pickled_keymap': dict(
context.engine._should_log_debug()
self._init_metadata()
+ def _getter(self, key):
+ return self._metadata._getter(key)
+
+ def _has_key(self, key):
+ return self._metadata._has_key(key)
+
def _init_metadata(self):
metadata = self._cursor_description()
if metadata is not None:
(
'init_instance',
'init_failed',
- 'translate_row',
- 'create_instance',
- 'append_result',
- 'populate_instance',
'reconstruct_instance',
'before_insert',
'after_insert',
"""
return EXT_CONTINUE
- def translate_row(self, mapper, context, row):
- """Perform pre-processing on the given result row and return a
- new row instance.
-
- This is called when the mapper first receives a row, before
- the object identity or the instance itself has been derived
- from that row. The given row may or may not be a
- ``RowProxy`` object - it will always be a dictionary-like
- object which contains mapped columns as keys. The
- returned object should also be a dictionary-like object
- which recognizes mapped columns as keys.
-
- If the ultimate return value is EXT_CONTINUE, the row
- is not translated.
-
- """
- return EXT_CONTINUE
-
- def create_instance(self, mapper, selectcontext, row, class_):
- """Receive a row when a new object instance is about to be
- created from that row.
-
- The method can choose to create the instance itself, or it can return
- EXT_CONTINUE to indicate normal object creation should take place.
-
- mapper
- The mapper doing the operation
-
- selectcontext
- The QueryContext generated from the Query.
-
- row
- The result row from the database
-
- class\_
- The class we are mapping.
-
- return value
- A new object instance, or EXT_CONTINUE
-
- """
- return EXT_CONTINUE
-
- def append_result(self, mapper, selectcontext, row, instance,
- result, **flags):
- """Receive an object instance before that instance is appended
- to a result list.
-
- If this method returns EXT_CONTINUE, result appending will proceed
- normally. if this method returns any other value or None,
- result appending will not proceed for this instance, giving
- this extension an opportunity to do the appending itself, if
- desired.
-
- mapper
- The mapper doing the operation.
-
- selectcontext
- The QueryContext generated from the Query.
-
- row
- The result row from the database.
-
- instance
- The object instance to be appended to the result.
-
- result
- List to which results are being appended.
-
- \**flags
- extra information about the row, same as criterion in
- ``create_row_processor()`` method of
- :class:`~sqlalchemy.orm.interfaces.MapperProperty`
- """
-
- return EXT_CONTINUE
-
- def populate_instance(self, mapper, selectcontext, row,
- instance, **flags):
- """Receive an instance before that instance has
- its attributes populated.
-
- This usually corresponds to a newly loaded instance but may
- also correspond to an already-loaded instance which has
- unloaded attributes to be populated. The method may be called
- many times for a single instance, as multiple result rows are
- used to populate eagerly loaded collections.
-
- If this method returns EXT_CONTINUE, instance population will
- proceed normally. If any other value or None is returned,
- instance population will not proceed, giving this extension an
- opportunity to populate the instance itself, if desired.
-
- .. deprecated:: 0.5
- Most usages of this hook are obsolete. For a
- generic "object has been newly created from a row" hook, use
- ``reconstruct_instance()``, or the ``@orm.reconstructor``
- decorator.
-
- """
- return EXT_CONTINUE
-
def reconstruct_instance(self, mapper, instance):
"""Receive an object instance after it has been created via
``__new__``, and after initial attribute population has
property.key, *expr)
def create_row_processor(self, query, procs, labels):
- def proc(row, result):
+ def proc(row):
return self.property.composite_class(
- *[proc(row, result) for proc in procs])
+ *[proc(row) for proc in procs])
return proc
class Comparator(PropComparator):
"""
- def translate_row(self, mapper, context, row):
- """Perform pre-processing on the given result row and return a
- new row instance.
-
- .. deprecated:: 0.9 the :meth:`.translate_row` event should
- be considered as legacy. The row as delivered in a mapper
- load operation typically requires that highly technical
- details be accommodated in order to identity the correct
- column keys are present in the row, rendering this particular
- event hook as difficult to use and unreliable.
-
- This listener is typically registered with ``retval=True``.
- It is called when the mapper first receives a row, before
- the object identity or the instance itself has been derived
- from that row. The given row may or may not be a
- :class:`.RowProxy` object - it will always be a dictionary-like
- object which contains mapped columns as keys. The
- returned object should also be a dictionary-like object
- which recognizes mapped columns as keys.
-
- :param mapper: the :class:`.Mapper` which is the target
- of this event.
- :param context: the :class:`.QueryContext`, which includes
- a handle to the current :class:`.Query` in progress as well
- as additional state information.
- :param row: the result row being handled. This may be
- an actual :class:`.RowProxy` or may be a dictionary containing
- :class:`.Column` objects as keys.
- :return: When configured with ``retval=True``, the function
- should return a dictionary-like row object, or ``EXT_CONTINUE``,
- indicating the original row should be used.
-
-
- """
-
- def create_instance(self, mapper, context, row, class_):
- """Receive a row when a new object instance is about to be
- created from that row.
-
- .. deprecated:: 0.9 the :meth:`.create_instance` event should
- be considered as legacy. Manipulation of the object construction
- mechanics during a load should not be necessary.
-
- The method can choose to create the instance itself, or it can return
- EXT_CONTINUE to indicate normal object creation should take place.
- This listener is typically registered with ``retval=True``.
-
- :param mapper: the :class:`.Mapper` which is the target
- of this event.
- :param context: the :class:`.QueryContext`, which includes
- a handle to the current :class:`.Query` in progress as well
- as additional state information.
- :param row: the result row being handled. This may be
- an actual :class:`.RowProxy` or may be a dictionary containing
- :class:`.Column` objects as keys.
- :param class\_: the mapped class.
- :return: When configured with ``retval=True``, the return value
- should be a newly created instance of the mapped class,
- or ``EXT_CONTINUE`` indicating that default object construction
- should take place.
-
- """
-
- def append_result(self, mapper, context, row, target,
- result, **flags):
- """Receive an object instance before that instance is appended
- to a result list.
-
- .. deprecated:: 0.9 the :meth:`.append_result` event should
- be considered as legacy. It is a difficult to use method
- whose original purpose is better suited by custom collection
- classes.
-
- This is a rarely used hook which can be used to alter
- the construction of a result list returned by :class:`.Query`.
-
- :param mapper: the :class:`.Mapper` which is the target
- of this event.
- :param context: the :class:`.QueryContext`, which includes
- a handle to the current :class:`.Query` in progress as well
- as additional state information.
- :param row: the result row being handled. This may be
- an actual :class:`.RowProxy` or may be a dictionary containing
- :class:`.Column` objects as keys.
- :param target: the mapped instance being populated. If
- the event is configured with ``raw=True``, this will
- instead be the :class:`.InstanceState` state-management
- object associated with the instance.
- :param result: a list-like object where results are being
- appended.
- :param \**flags: Additional state information about the
- current handling of the row.
- :return: If this method is registered with ``retval=True``,
- a return value of ``EXT_STOP`` will prevent the instance
- from being appended to the given result list, whereas a
- return value of ``EXT_CONTINUE`` will result in the default
- behavior of appending the value to the result list.
-
- """
-
- def populate_instance(self, mapper, context, row,
- target, **flags):
- """Receive an instance before that instance has
- its attributes populated.
-
- .. deprecated:: 0.9 the :meth:`.populate_instance` event should
- be considered as legacy. The mechanics of instance population
- should not need modification; special "on load" rules can as always
- be accommodated by the :class:`.InstanceEvents.load` event.
-
- This usually corresponds to a newly loaded instance but may
- also correspond to an already-loaded instance which has
- unloaded attributes to be populated. The method may be called
- many times for a single instance, as multiple result rows are
- used to populate eagerly loaded collections.
-
- Most usages of this hook are obsolete. For a
- generic "object has been newly created from a row" hook, use
- :meth:`.InstanceEvents.load`.
-
- :param mapper: the :class:`.Mapper` which is the target
- of this event.
- :param context: the :class:`.QueryContext`, which includes
- a handle to the current :class:`.Query` in progress as well
- as additional state information.
- :param row: the result row being handled. This may be
- an actual :class:`.RowProxy` or may be a dictionary containing
- :class:`.Column` objects as keys.
- :param target: the mapped instance. If
- the event is configured with ``raw=True``, this will
- instead be the :class:`.InstanceState` state-management
- object associated with the instance.
- :return: When configured with ``retval=True``, a return
- value of ``EXT_STOP`` will bypass instance population by
- the mapper. A value of ``EXT_CONTINUE`` indicates that
- default instance population should take place.
-
- """
-
def before_insert(self, mapper, connection, target):
"""Receive an object instance before an INSERT statement
is emitted corresponding to that instance.
MANAGER_ATTR = base.DEFAULT_MANAGER_ATTR
STATE_ATTR = base.DEFAULT_STATE_ATTR
+ _state_setter = staticmethod(util.attrsetter(STATE_ATTR))
+
deferred_scalar_loader = None
original_init = object.__init__
def new_instance(self, state=None):
instance = self.class_.__new__(self.class_)
- setattr(instance, self.STATE_ATTR,
- self._state_constructor(instance, self)
- if not state else state)
+ if state is None:
+ state = self._state_constructor(instance, self)
+ self._state_setter(instance, state)
return instance
def setup_instance(self, instance, state=None):
- setattr(instance, self.STATE_ATTR,
- self._state_constructor(instance, self)
- if not state else state)
+ if state is None:
+ state = self._state_constructor(instance, self)
+ self._state_setter(instance, state)
def teardown_instance(self, instance):
delattr(instance, self.STATE_ATTR)
_new_state_if_none(instance)
else:
state = self._state_constructor(instance, self)
- setattr(instance, self.STATE_ATTR, state)
+ self._state_setter(instance, state)
return state
def has_state(self, instance):
pass
def create_row_processor(self, context, path,
- mapper, row, adapter):
+ mapper, result, adapter):
"""Return a 3-tuple consisting of three row processing functions.
"""
- return None, None, None
+ return None, None, None, None
def cascade_iterator(self, type_, state, visited_instances=None,
halt_on=None):
strat = self.strategy
strat.setup_query(context, entity, path, loader, adapter, **kwargs)
- def create_row_processor(self, context, path, mapper, row, adapter):
+ def create_row_processor(self, context, path, mapper, result, adapter):
loader = self._get_context_loader(context, path)
if loader and loader.strategy:
strat = self._get_strategy(loader.strategy)
else:
strat = self.strategy
return strat.create_row_processor(context, path, loader,
- mapper, row, adapter)
+ mapper, result, adapter)
def do_init(self):
self._strategies = {}
pass
def create_row_processor(self, context, path, loadopt, mapper,
- row, adapter):
+ result, adapter):
"""Return row processing functions which fulfill the contract
specified by MapperProperty.create_row_processor.
StrategizedProperty delegates its create_row_processor method
directly to this method. """
- return None, None, None
+ return None, None, None, None
def __str__(self):
return str(self.parent_property)
from .. import util
from . import attributes, exc as orm_exc, state as statelib
-from .interfaces import EXT_CONTINUE
from ..sql import util as sql_util
from .util import _none_set, state_str
from .. import exc as sa_exc
def filter_fn(row):
return tuple(fn(x) for x, fn in zip(row, filter_fns))
- custom_rows = single_entity and \
- query._entities[0].custom_rows
-
(process, labels) = \
list(zip(*[
query_entity.row_processor(query,
- context, custom_rows)
+ context, cursor)
for query_entity in query._entities
]))
- if not custom_rows and not single_entity:
+ if not single_entity:
keyed_tuple = util.lightweight_named_tuple('result', labels)
while True:
else:
fetch = cursor.fetchall()
- if custom_rows:
- rows = []
- for row in fetch:
- process[0](row, rows)
- elif single_entity:
- rows = [process[0](row, None) for row in fetch]
+ if single_entity:
+ proc = process[0]
+ rows = [proc(row) for row in fetch]
else:
- rows = [keyed_tuple([proc(row, None) for proc in process])
+ rows = [keyed_tuple([proc(row) for proc in process])
for row in fetch]
if filtered:
return None
-def instance_processor(mapper, context, path, adapter,
+def instance_processor(mapper, context, result, path, adapter,
polymorphic_from=None,
only_load_props=None,
refresh_state=None,
polymorphic_on = polymorphic_discriminator
else:
polymorphic_on = mapper.polymorphic_on
- polymorphic_instances = util.PopulateDict(
- _configure_subclass_mapper(
- mapper,
- context, path, adapter)
- )
+ if polymorphic_on is not None:
+ polymorphic_instances = util.PopulateDict(
+ _configure_subclass_mapper(
+ mapper,
+ context, result, path, adapter)
+ )
version_id_col = mapper.version_id_col
identity_class = mapper._identity_class
- new_populators = []
- existing_populators = []
- eager_populators = []
+ (new_populators, existing_populators,
+ eager_populators) = _populators(
+ mapper, context, path, result, adapter, only_load_props)
load_path = context.query._current_path + path \
if context.query._current_path.path \
else path
- def populate_state(state, dict_, row, isnew, only_load_props):
- if isnew:
- if context.propagate_options:
- state.load_options = context.propagate_options
- if state.load_options:
- state.load_path = load_path
-
- if not new_populators:
- _populators(mapper, context, path, row, adapter,
- new_populators,
- existing_populators,
- eager_populators
- )
-
- if isnew:
- populators = new_populators
- else:
- populators = existing_populators
-
- if only_load_props is None:
- for key, populator in populators:
- populator(state, dict_, row)
- elif only_load_props:
- for key, populator in populators:
- if key in only_load_props:
- populator(state, dict_, row)
-
session_identity_map = context.session.identity_map
- listeners = mapper.dispatch
-
- # legacy events - I'd very much like to yank these totally
- translate_row = listeners.translate_row or None
- create_instance = listeners.create_instance or None
- populate_instance = listeners.populate_instance or None
- append_result = listeners.append_result or None
- ####
-
populate_existing = context.populate_existing or mapper.always_refresh
- invoke_all_eagers = context.invoke_all_eagers
- load_evt = mapper.class_manager.dispatch.load or None
- refresh_evt = mapper.class_manager.dispatch.refresh or None
+ load_evt = bool(mapper.class_manager.dispatch.load)
+ refresh_evt = bool(mapper.class_manager.dispatch.refresh)
instance_state = attributes.instance_state
instance_dict = attributes.instance_dict
else:
is_not_primary_key = _none_set.intersection
- def _instance(row, result):
- if not new_populators and invoke_all_eagers:
- _populators(mapper, context, path, row, adapter,
- new_populators,
- existing_populators,
- eager_populators)
-
- if translate_row:
- for fn in translate_row:
- ret = fn(mapper, context, row)
- if ret is not EXT_CONTINUE:
- row = ret
- break
+ def _instance(row):
if polymorphic_on is not None:
discriminator = row[polymorphic_on]
if discriminator is not None:
_instance = polymorphic_instances[discriminator]
if _instance:
- return _instance(row, result)
+ return _instance(row)
# determine identity key
if refresh_state:
"Instance '%s' has version id '%s' which "
"does not match database-loaded version id '%s'."
% (state_str(state),
- mapper._get_state_attr_by_column(
- state, dict_,
- mapper.version_id_col),
+ mapper._get_state_attr_by_column(
+ state, dict_,
+ mapper.version_id_col),
row[version_id_col]))
elif refresh_state:
# out of band refresh_state detected (i.e. its not in the
currentload = True
loaded_instance = True
- if create_instance:
- for fn in create_instance:
- instance = fn(mapper, context,
- row, mapper.class_)
- if instance is not EXT_CONTINUE:
- manager = attributes.manager_of_class(
- instance.__class__)
- # TODO: if manager is None, raise a friendly error
- # about returning instances of unmapped types
- manager.setup_instance(instance)
- break
- else:
- instance = mapper.class_manager.new_instance()
- else:
- instance = mapper.class_manager.new_instance()
+ instance = mapper.class_manager.new_instance()
dict_ = instance_dict(instance)
state = instance_state(instance)
if isnew:
state.runid = context.runid
context.progress[state] = dict_
-
- if populate_instance:
- for fn in populate_instance:
- ret = fn(mapper, context, row, state,
- only_load_props=only_load_props,
- instancekey=identitykey, isnew=isnew)
- if ret is not EXT_CONTINUE:
- break
- else:
- populate_state(state, dict_, row, isnew, only_load_props)
+ if context.propagate_options:
+ state.load_options = context.propagate_options
+ if state.load_options:
+ state.load_path = load_path
+ for key, populator in new_populators:
+ populator(state, dict_, row)
else:
- populate_state(state, dict_, row, isnew, only_load_props)
+ for key, populator in existing_populators:
+ populator(state, dict_, row)
if loaded_instance and load_evt:
state.manager.dispatch.load(state, context)
# state is having a partial set of its attributes
# refreshed. Populate those attributes,
# and add to the "context.partials" collection.
+ unloaded = state.unloaded
+
if state in context.partials:
isnew = False
(d_, attrs) = context.partials[state]
+ for key, populator in existing_populators:
+ if key not in attrs:
+ continue
+ populator(state, dict_, row)
else:
isnew = True
- attrs = state.unloaded
+ attrs = unloaded
context.partials[state] = (dict_, attrs)
-
- if populate_instance:
- for fn in populate_instance:
- ret = fn(mapper, context, row, state,
- only_load_props=attrs,
- instancekey=identitykey, isnew=isnew)
- if ret is not EXT_CONTINUE:
- break
- else:
- populate_state(state, dict_, row, isnew, attrs)
- else:
- populate_state(state, dict_, row, isnew, attrs)
+ if context.propagate_options:
+ state.load_options = context.propagate_options
+ if state.load_options:
+ state.load_path = load_path
+ for key, populator in new_populators:
+ if key not in attrs:
+ continue
+ populator(state, dict_, row)
for key, pop in eager_populators:
- if key not in state.unloaded:
+ if key not in unloaded:
pop(state, dict_, row)
if isnew and refresh_evt:
state.manager.dispatch.refresh(state, context, attrs)
- if result is not None:
- if append_result:
- for fn in append_result:
- if fn(mapper, context, row, state,
- result, instancekey=identitykey,
- isnew=isnew) is not EXT_CONTINUE:
- break
- else:
- result.append(instance)
- else:
- result.append(instance)
-
return instance
return _instance
-def _populators(mapper, context, path, row, adapter,
- new_populators, existing_populators, eager_populators):
+def _populators(mapper, context, path, result, adapter, only_load_props):
"""Produce a collection of attribute level row processor
callables."""
+ new_populators = []
+ existing_populators = []
delayed_populators = []
- pops = (new_populators, existing_populators, delayed_populators,
- eager_populators)
-
- for prop in mapper._props.values():
-
- for i, pop in enumerate(prop.create_row_processor(
- context,
- path,
- mapper, row, adapter)):
- if pop is not None:
- pops[i].append((prop.key, pop))
+ eager_populators = []
+ invoke_eagers = context.invoke_all_eagers
+
+ props = mapper._props.values()
+ if only_load_props is not None:
+ props = (p for p in props if p.key in only_load_props)
+
+ for prop in props:
+ np, ep, dp, gp = prop.create_row_processor(
+ context, path, mapper, result, adapter)
+ if np:
+ new_populators.append((prop.key, np))
+ if ep:
+ existing_populators.append((prop.key, ep))
+ if dp:
+ delayed_populators.append((prop.key, dp))
+ if invoke_eagers and gp:
+ eager_populators.append((prop.key, gp))
if delayed_populators:
- new_populators.extend(delayed_populators)
+ new_populators += delayed_populators
+
+ return new_populators, existing_populators, eager_populators
-def _configure_subclass_mapper(mapper, context, path, adapter):
+def _configure_subclass_mapper(mapper, context, result, path, adapter):
"""Produce a mapper level row processor callable factory for mappers
inheriting this one."""
return instance_processor(
sub_mapper,
context,
+ result,
path,
adapter,
polymorphic_from=mapper)
def primary_base_mapper(self):
return self.class_manager.mapper.base_mapper
+ def _result_has_identity_key(self, result, adapter=None):
+ pk_cols = self.primary_key
+ if adapter:
+ pk_cols = [adapter.columns[c] for c in pk_cols]
+ for col in pk_cols:
+ if not result._has_key(col):
+ return False
+ else:
+ return True
+
def identity_key_from_row(self, row, adapter=None):
"""Return an identity-map key for use in storing/retrieving an
item from the identity map.
else:
self._label_name = self.mapper.class_.__name__
self.path = self.entity_zero._path_registry
- self.custom_rows = bool(self.mapper.dispatch.append_result)
def set_with_polymorphic(self, query, cls_or_mappers,
selectable, polymorphic_on):
return ret
- def row_processor(self, query, context, custom_rows):
+ def row_processor(self, query, context, result):
adapter = self._get_entity_clauses(query, context)
if context.adapter and adapter:
_instance = loading.instance_processor(
self.mapper,
context,
+ result,
self.path,
adapter,
only_load_props=query._only_load_props,
_instance = loading.instance_processor(
self.mapper,
context,
+ result,
self.path,
adapter,
polymorphic_discriminator=self._polymorphic_discriminator
"""
keyed_tuple = util.lightweight_named_tuple('result', labels)
- def proc(row, result):
- return keyed_tuple([proc(row, None) for proc in procs])
+ def proc(row):
+ return keyed_tuple([proc(row) for proc in procs])
return proc
self.supports_single_entity = self.bundle.single_entity
- custom_rows = False
@property
def entity_zero(self):
for ent in self._entities:
ent.setup_context(query, context)
- def row_processor(self, query, context, custom_rows):
+ def row_processor(self, query, context, result):
procs, labels = zip(
- *[ent.row_processor(query, context, custom_rows)
+ *[ent.row_processor(query, context, result)
for ent in self._entities]
)
self.entity_zero = None
supports_single_entity = False
- custom_rows = False
@property
def entity_zero_or_selectable(self):
def _resolve_expr_against_query_aliases(self, query, expr, context):
return query._adapt_clause(expr, False, True)
- def row_processor(self, query, context, custom_rows):
+ def row_processor(self, query, context, result):
column = self._resolve_expr_against_query_aliases(
query, self.column, context)
if context.adapter:
column = context.adapter.columns[column]
- def proc(row, result):
- return row[column]
-
- return proc, self._label_name
+ getter = result._getter(column)
+ return getter, self._label_name
def setup_context(self, query, context):
column = self._resolve_expr_against_query_aliases(
def create_row_processor(
self, context, path, loadopt,
- mapper, row, adapter):
- return None, None, None
+ mapper, result, adapter):
+ return None, None, None, None
@log.class_logger
def create_row_processor(
self, context, path,
- loadopt, mapper, row, adapter):
+ loadopt, mapper, result, adapter):
key = self.key
# look through list of columns represented here
# to see which, if any, is present in the row.
for col in self.columns:
if adapter:
col = adapter.columns[col]
- if col is not None and col in row:
+ getter = result._getter(col)
+ if getter:
def fetch_col(state, dict_, row):
- dict_[key] = row[col]
- return fetch_col, None, None
+ dict_[key] = getter(row)
+ return fetch_col, None, None, None
else:
def expire_for_non_present_col(state, dict_, row):
state._expire_attribute_pre_commit(dict_, key)
- return expire_for_non_present_col, None, None
+ return expire_for_non_present_col, None, None, None
@log.class_logger
def create_row_processor(
self, context, path, loadopt,
- mapper, row, adapter):
+ mapper, result, adapter):
col = self.columns[0]
if adapter:
col = adapter.columns[col]
key = self.key
- if col in row:
+
+ # TODO: put a result-level contains here
+ getter = result._getter(col)
+ if getter:
return self.parent_property._get_strategy_by_cls(ColumnLoader).\
create_row_processor(
- context, path, loadopt, mapper, row, adapter)
+ context, path, loadopt, mapper, result, adapter)
elif not self.is_class_level:
set_deferred_for_local_state = InstanceState._row_processor(
mapper.class_manager,
LoadDeferredColumns(key), key)
- return set_deferred_for_local_state, None, None
+ return set_deferred_for_local_state, None, None, None
else:
def reset_col_for_deferred(state, dict_, row):
# reset state on the key so that deferred callables
# fire off on next access.
state._reset(dict_, key)
- return reset_col_for_deferred, None, None
+ return reset_col_for_deferred, None, None, None
def init_class_attribute(self, mapper):
self.is_class_level = True
def create_row_processor(
self, context, path, loadopt, mapper,
- row, adapter):
+ result, adapter):
def invoke_no_load(state, dict_, row):
state._initialize(self.key)
- return invoke_no_load, None, None
+ return invoke_no_load, None, None, None
@log.class_logger
def create_row_processor(
self, context, path, loadopt,
- mapper, row, adapter):
+ mapper, result, adapter):
key = self.key
if not self.is_class_level:
# we are not the primary manager for this attribute
mapper.class_manager,
LoadLazyAttribute(key), key)
- return set_lazy_callable, None, None
+ return set_lazy_callable, None, None, None
elif context.populate_existing or mapper.always_refresh:
def reset_for_lazy_callable(state, dict_, row):
# we are the primary manager for this attribute on
# any existing state.
state._reset(dict_, key)
- return reset_for_lazy_callable, None, None
+ return reset_for_lazy_callable, None, None, None
else:
- return None, None, None
-
+ return None, None, None, None
class LoadLazyAttribute(object):
def create_row_processor(
self, context, path, loadopt,
- mapper, row, adapter):
+ mapper, result, adapter):
def load_immediate(state, dict_, row):
state.get_impl(self.key).get(state, dict_)
- return None, None, load_immediate
+ return None, None, load_immediate, None
@log.class_logger
def create_row_processor(
self, context, path, loadopt,
- mapper, row, adapter):
+ mapper, result, adapter):
if not self.parent.class_manager[self.key].impl.supports_population:
raise sa_exc.InvalidRequestError(
"'%s' does not support object "
subq = path.get(context.attributes, 'subquery')
if subq is None:
- return None, None, None
+ return None, None, None, None
local_cols = self.parent_property.local_columns
)
)
- def _create_eager_adapter(self, context, row, adapter, path, loadopt):
+ def _create_eager_adapter(self, context, result, adapter, path, loadopt):
user_defined_adapter = self._init_user_defined_eager_proc(
loadopt, context) if loadopt else False
if decorator is None:
return False
- try:
- self.mapper.identity_key_from_row(row, decorator)
+ if self.mapper._result_has_identity_key(result, decorator):
return decorator
- except KeyError:
+ else:
# no identity key - don't return a row
# processor, will cause a degrade to lazy
return False
def create_row_processor(
self, context, path, loadopt, mapper,
- row, adapter):
+ result, adapter):
if not self.parent.class_manager[self.key].impl.supports_population:
raise sa_exc.InvalidRequestError(
"'%s' does not support object "
eager_adapter = self._create_eager_adapter(
context,
- row,
+ result,
adapter, our_path, loadopt)
if eager_adapter is not False:
_instance = loading.instance_processor(
self.mapper,
context,
+ result,
our_path[self.mapper],
eager_adapter)
return self.parent_property._get_strategy_by_cls(LazyLoader).\
create_row_processor(
context, path, loadopt,
- mapper, row, adapter)
+ mapper, result, adapter)
def _create_collection_loader(self, context, key, _instance):
def load_collection_from_joined_new_row(state, dict_, row):
result_list = util.UniqueAppender(collection,
'append_without_event')
context.attributes[(state, key)] = result_list
- _instance(row, result_list)
+ inst = _instance(row)
+ if inst is not None:
+ result_list.append(inst)
def load_collection_from_joined_existing_row(state, dict_, row):
if (state, key) in context.attributes:
collection,
'append_without_event')
context.attributes[(state, key)] = result_list
- _instance(row, result_list)
+ inst = _instance(row)
+ if inst is not None:
+ result_list.append(inst)
def load_collection_from_joined_exec(state, dict_, row):
- _instance(row, None)
+ _instance(row)
return load_collection_from_joined_new_row, \
load_collection_from_joined_existing_row, \
def load_scalar_from_joined_new_row(state, dict_, row):
# set a scalar object instance directly on the parent
# object, bypassing InstrumentedAttribute event handlers.
- dict_[key] = _instance(row, None)
+ dict_[key] = _instance(row)
def load_scalar_from_joined_existing_row(state, dict_, row):
# call _instance on the row, even though the object has
# been created, so that we further descend into properties
- existing = _instance(row, None)
+ existing = _instance(row)
if existing is not None \
and key in dict_ \
and existing is not dict_[key]:
% self)
def load_scalar_from_joined_exec(state, dict_, row):
- _instance(row, None)
+ _instance(row)
return load_scalar_from_joined_new_row, \
load_scalar_from_joined_existing_row, \
classproperty, set_creation_order, warn_exception, warn, NoneType,\
constructor_copy, methods_equivalent, chop_traceback, asint,\
generic_repr, counter, PluginLoader, hybridmethod, safe_reraise,\
- get_callable_argspec, only_once
+ get_callable_argspec, only_once, attrsetter
from .deprecations import warn_deprecated, warn_pending_deprecation, \
deprecated, pending_deprecation, inject_docstring_text
return tb[start:end + 1]
NoneType = type(None)
+
+def attrsetter(attrname):
+ code = \
+ "def set(obj, value):"\
+ " obj.%s = value" % attrname
+ env = locals().copy()
+ exec(code, env)
+ return env['set']
class MyBundle(Bundle):
def create_row_processor(self, query, procs, labels):
- def proc(row, result):
+ def proc(row):
return dict(
- zip(labels, (proc(row, result) for proc in procs))
- )
+ zip(labels, (proc(row) for proc in procs))
+ )
return proc
b1 = MyBundle('b1', Data.d1, Data.d2)
for meth in [
'init',
'init_failure',
- 'translate_row',
- 'create_instance',
- 'append_result',
- 'populate_instance',
'load',
'refresh',
'expire',
sess.flush()
eq_(canary,
['init', 'before_insert',
- 'after_insert', 'expire', 'translate_row',
- 'populate_instance', 'refresh',
- 'append_result', 'translate_row', 'create_instance',
- 'populate_instance', 'load', 'append_result',
+ 'after_insert', 'expire',
+ 'refresh',
+ 'load',
'before_update', 'after_update', 'before_delete',
'after_delete'])
sess.delete(am)
sess.flush()
eq_(canary1, ['init', 'before_insert', 'after_insert',
- 'translate_row', 'populate_instance','refresh',
- 'append_result', 'translate_row', 'create_instance'
- , 'populate_instance', 'load', 'append_result',
+ 'refresh', 'load',
'before_update', 'after_update', 'before_delete',
'after_delete'])
eq_(canary2, [])
eq_(canary3, ['init', 'before_insert', 'after_insert',
- 'translate_row', 'populate_instance','refresh',
- 'append_result', 'translate_row', 'create_instance'
- , 'populate_instance', 'load', 'append_result',
+ 'refresh',
+ 'load',
'before_update', 'after_update', 'before_delete',
'after_delete'])
sess.delete(am)
sess.flush()
eq_(canary1, ['init', 'before_insert', 'after_insert',
- 'translate_row', 'populate_instance','refresh',
- 'append_result', 'translate_row', 'create_instance'
- , 'populate_instance', 'load', 'append_result',
+ 'refresh', 'load',
'before_update', 'after_update', 'before_delete',
'after_delete'])
eq_(canary2, [])
eq_(canary3, ['init', 'before_insert', 'after_insert',
- 'translate_row', 'populate_instance','refresh',
- 'append_result', 'translate_row', 'create_instance'
- , 'populate_instance', 'load', 'append_result',
+ 'refresh', 'load',
'before_update', 'after_update', 'before_delete',
'after_delete'])
eq_(m1.mock_calls, [call()])
eq_(m2.mock_calls, [call()])
- def test_retval(self):
- User, users = self.classes.User, self.tables.users
-
- def create_instance(mapper, context, row, class_):
- u = User.__new__(User)
- u.foo = True
- return u
-
- mapper(User, users)
- event.listen(User, 'create_instance', create_instance, retval=True)
- sess = create_session()
- u1 = User()
- u1.name = 'ed'
- sess.add(u1)
- sess.flush()
- sess.expunge_all()
- u = sess.query(User).first()
- assert u.foo
-
def test_instrument_event(self):
Address, addresses, users, User = (self.classes.Address,
self.tables.addresses,
methods.append('init_failed')
return sa.orm.EXT_CONTINUE
- def translate_row(self, mapper, context, row):
- methods.append('translate_row')
- return sa.orm.EXT_CONTINUE
-
- def create_instance(self, mapper, selectcontext, row, class_):
- methods.append('create_instance')
- return sa.orm.EXT_CONTINUE
-
def reconstruct_instance(self, mapper, instance):
methods.append('reconstruct_instance')
return sa.orm.EXT_CONTINUE
- def append_result(self, mapper, selectcontext, row, instance, result, **flags):
- methods.append('append_result')
- return sa.orm.EXT_CONTINUE
-
- def populate_instance(self, mapper, selectcontext, row, instance, **flags):
- methods.append('populate_instance')
- return sa.orm.EXT_CONTINUE
-
def before_insert(self, mapper, connection, instance):
methods.append('before_insert')
return sa.orm.EXT_CONTINUE
sess.flush()
eq_(methods,
['instrument_class', 'init_instance', 'before_insert',
- 'after_insert', 'translate_row', 'populate_instance',
- 'append_result', 'translate_row', 'create_instance',
- 'populate_instance', 'reconstruct_instance', 'append_result',
+ 'after_insert',
+ 'reconstruct_instance',
'before_update', 'after_update', 'before_delete', 'after_delete'])
def test_inheritance(self):
sess.flush()
eq_(methods,
['instrument_class', 'instrument_class', 'init_instance',
- 'before_insert', 'after_insert', 'translate_row',
- 'populate_instance', 'append_result', 'translate_row',
- 'create_instance', 'populate_instance', 'reconstruct_instance',
- 'append_result', 'before_update', 'after_update', 'before_delete',
+ 'before_insert', 'after_insert',
+ 'reconstruct_instance',
+ 'before_update', 'after_update', 'before_delete',
'after_delete'])
def test_before_after_only_collection(self):
sess.flush()
eq_(methods,
['instrument_class', 'instrument_class', 'init_instance',
- 'before_insert', 'after_insert', 'translate_row',
- 'populate_instance', 'append_result', 'translate_row',
- 'create_instance', 'populate_instance', 'reconstruct_instance',
- 'append_result', 'before_update', 'after_update', 'before_delete',
+ 'before_insert', 'after_insert',
+ 'reconstruct_instance',
+ 'before_update', 'after_update', 'before_delete',
'after_delete'])
- def test_create_instance(self):
- User, users = self.classes.User, self.tables.users
-
- class CreateUserExt(sa.orm.MapperExtension):
- def create_instance(self, mapper, selectcontext, row, class_):
- return User.__new__(User)
-
- mapper(User, users, extension=CreateUserExt())
- sess = create_session()
- u1 = User()
- u1.name = 'ed'
- sess.add(u1)
- sess.flush()
- sess.expunge_all()
- assert sess.query(User).first()
def test_unnecessary_methods_not_evented(self):
users = self.tables.users