as well as some of the attribute loading strategies.
"""
-
+from __future__ import absolute_import
from .. import util
from . import attributes, exc as orm_exc
from ..sql import util as sql_util
from .util import _none_set, state_str
from .. import exc as sa_exc
+import collections
_new_runid = util.counter()
except orm_exc.NoResultFound:
return None
+_populator_struct = collections.namedtuple(
+ 'populators', ['new', 'existing', 'eager', 'delayed'])
+
def instance_processor(mapper, context, result, path, adapter,
polymorphic_from=None,
identity_class = mapper._identity_class
+ populators = _populator_struct([], [], [], [])
+
+ props = mapper._props.values()
+ if only_load_props is not None:
+ props = (p for p in props if p.key in only_load_props)
+
+ for prop in props:
+ prop.create_row_processor(
+ context, path, mapper, result, adapter, populators)
+
+ if populators.delayed:
+ populators.new.extend(populators.delayed)
+
(new_populators, existing_populators,
- eager_populators) = _populators(
- mapper, context, path, result, adapter, only_load_props)
+ eager_populators) = (
+ populators.new, populators.existing, populators.eager)
load_path = context.query._current_path + path \
if context.query._current_path.path \
return _instance
-def _populators(mapper, context, path, result, adapter, only_load_props):
- """Produce a collection of attribute level row processor
- callables."""
-
- new_populators = []
- existing_populators = []
- delayed_populators = []
- eager_populators = []
- invoke_eagers = context.invoke_all_eagers
-
- props = mapper._props.values()
- if only_load_props is not None:
- props = (p for p in props if p.key in only_load_props)
-
- for prop in props:
- np, ep, dp, gp = prop.create_row_processor(
- context, path, mapper, result, adapter)
- if np:
- new_populators.append((prop.key, np))
- if ep:
- existing_populators.append((prop.key, ep))
- if dp:
- delayed_populators.append((prop.key, dp))
- if invoke_eagers and gp:
- eager_populators.append((prop.key, gp))
-
- if delayed_populators:
- new_populators += delayed_populators
-
- return new_populators, existing_populators, eager_populators
-
-
def _configure_subclass_mapper(mapper, context, result, path, adapter):
"""Produce a mapper level row processor callable factory for mappers
inheriting this one."""
def create_row_processor(
self, context, path, loadopt,
- mapper, result, adapter):
- return None, None, None, None
+ mapper, result, adapter, populators):
+ pass
@log.class_logger
def create_row_processor(
self, context, path,
- loadopt, mapper, result, adapter):
+ loadopt, mapper, result, adapter, populators):
key = self.key
# look through list of columns represented here
# to see which, if any, is present in the row.
if getter:
def fetch_col(state, dict_, row):
dict_[key] = getter(row)
- return fetch_col, None, None, None
+ populators.new.append((self.key, fetch_col))
+ break
else:
def expire_for_non_present_col(state, dict_, row):
state._expire_attribute_pre_commit(dict_, key)
- return expire_for_non_present_col, None, None, None
+ populators.new.append((self.key, expire_for_non_present_col))
@log.class_logger
def create_row_processor(
self, context, path, loadopt,
- mapper, result, adapter):
+ mapper, result, adapter, populators):
col = self.columns[0]
if adapter:
col = adapter.columns[col]
# TODO: put a result-level contains here
getter = result._getter(col)
if getter:
- return self.parent_property._get_strategy_by_cls(ColumnLoader).\
+ self.parent_property._get_strategy_by_cls(ColumnLoader).\
create_row_processor(
- context, path, loadopt, mapper, result, adapter)
+ context, path, loadopt, mapper, result,
+ adapter, populators)
elif not self.is_class_level:
set_deferred_for_local_state = InstanceState._row_processor(
mapper.class_manager,
LoadDeferredColumns(key), key)
- return set_deferred_for_local_state, None, None, None
+ populators.new.append((self.key, set_deferred_for_local_state))
else:
def reset_col_for_deferred(state, dict_, row):
# reset state on the key so that deferred callables
# fire off on next access.
state._reset(dict_, key)
- return reset_col_for_deferred, None, None, None
+ populators.new.append((self.key, reset_col_for_deferred))
def init_class_attribute(self, mapper):
self.is_class_level = True
def create_row_processor(
self, context, path, loadopt, mapper,
- result, adapter):
+ result, adapter, populators):
def invoke_no_load(state, dict_, row):
state._initialize(self.key)
- return invoke_no_load, None, None, None
+ populators.new.append((self.key, invoke_no_load))
@log.class_logger
def create_row_processor(
self, context, path, loadopt,
- mapper, result, adapter):
+ mapper, result, adapter, populators):
key = self.key
if not self.is_class_level:
# we are not the primary manager for this attribute
mapper.class_manager,
LoadLazyAttribute(key), key)
- return set_lazy_callable, None, None, None
+ populators.new.append((self.key, set_lazy_callable))
elif context.populate_existing or mapper.always_refresh:
def reset_for_lazy_callable(state, dict_, row):
# we are the primary manager for this attribute on
# any existing state.
state._reset(dict_, key)
- return reset_for_lazy_callable, None, None, None
- else:
- return None, None, None, None
+ populators.new.append((self.key, reset_for_lazy_callable))
class LoadLazyAttribute(object):
def create_row_processor(
self, context, path, loadopt,
- mapper, result, adapter):
+ mapper, result, adapter, populators):
def load_immediate(state, dict_, row):
state.get_impl(self.key).get(state, dict_)
- return None, None, load_immediate, None
+ populators.delayed.append((self.key, load_immediate))
@log.class_logger
def create_row_processor(
self, context, path, loadopt,
- mapper, result, adapter):
+ mapper, result, adapter, populators):
if not self.parent.class_manager[self.key].impl.supports_population:
raise sa_exc.InvalidRequestError(
"'%s' does not support object "
subq = path.get(context.attributes, 'subquery')
if subq is None:
- return None, None, None, None
+ return
local_cols = self.parent_property.local_columns
local_cols = [adapter.columns[c] for c in local_cols]
if self.uselist:
- return self._create_collection_loader(collections, local_cols)
+ self._create_collection_loader(
+ context, collections, local_cols, populators)
else:
- return self._create_scalar_loader(collections, local_cols)
+ self._create_scalar_loader(
+ context, collections, local_cols, populators)
- def _create_collection_loader(self, collections, local_cols):
+ def _create_collection_loader(
+ self, context, collections, local_cols, populators):
def load_collection_from_subq(state, dict_, row):
collection = collections.get(
tuple([row[col] for col in local_cols]),
state.get_impl(self.key).\
set_committed_value(state, dict_, collection)
- return load_collection_from_subq, None, None, collections.loader
+ populators.new.append((self.key, load_collection_from_subq))
+ if context.invoke_all_eagers:
+ populators.eager.append((self.key, collections.loader))
- def _create_scalar_loader(self, collections, local_cols):
+ def _create_scalar_loader(
+ self, context, collections, local_cols, populators):
def load_scalar_from_subq(state, dict_, row):
collection = collections.get(
tuple([row[col] for col in local_cols]),
state.get_impl(self.key).\
set_committed_value(state, dict_, scalar)
- return load_scalar_from_subq, None, None, collections.loader
+ populators.new.append((self.key, load_scalar_from_subq))
+ if context.invoke_all_eagers:
+ populators.eager.append((self.key, collections.loader))
@log.class_logger
def create_row_processor(
self, context, path, loadopt, mapper,
- result, adapter):
+ result, adapter, populators):
if not self.parent.class_manager[self.key].impl.supports_population:
raise sa_exc.InvalidRequestError(
"'%s' does not support object "
eager_adapter)
if not self.uselist:
- return self._create_scalar_loader(context, key, _instance)
+ self._create_scalar_loader(context, key, _instance, populators)
else:
- return self._create_collection_loader(context, key, _instance)
+ self._create_collection_loader(
+ context, key, _instance, populators)
else:
- return self.parent_property._get_strategy_by_cls(LazyLoader).\
+ self.parent_property._get_strategy_by_cls(LazyLoader).\
create_row_processor(
context, path, loadopt,
- mapper, result, adapter)
+ mapper, result, adapter, populators)
- def _create_collection_loader(self, context, key, _instance):
+ def _create_collection_loader(self, context, key, _instance, populators):
def load_collection_from_joined_new_row(state, dict_, row):
collection = attributes.init_state_collection(
state, dict_, key)
def load_collection_from_joined_exec(state, dict_, row):
_instance(row)
- return load_collection_from_joined_new_row, \
- load_collection_from_joined_existing_row, \
- None, load_collection_from_joined_exec
+ populators.new.append((self.key, load_collection_from_joined_new_row))
+ populators.existing.append(
+ (self.key, load_collection_from_joined_existing_row))
+ if context.invoke_all_eagers:
+ populators.eager.append(
+ (self.key, load_collection_from_joined_exec))
- def _create_scalar_loader(self, context, key, _instance):
+ def _create_scalar_loader(self, context, key, _instance, populators):
def load_scalar_from_joined_new_row(state, dict_, row):
# set a scalar object instance directly on the parent
# object, bypassing InstrumentedAttribute event handlers.
def load_scalar_from_joined_exec(state, dict_, row):
_instance(row)
- return load_scalar_from_joined_new_row, \
- load_scalar_from_joined_existing_row, \
- None, load_scalar_from_joined_exec
+ populators.new.append((self.key, load_scalar_from_joined_new_row))
+ populators.existing.append(
+ (self.key, load_scalar_from_joined_existing_row))
+ if context.invoke_all_eagers:
+ populators.eager.append((self.key, load_scalar_from_joined_exec))
def single_parent_validator(desc, prop):