# This module is part of SQLAlchemy and is released under
# the MIT License: http://www.opensource.org/licenses/mit-license.php
-"""sqlalchemy.orm.interfaces.LoaderStrategy
+"""sqlalchemy.orm.interfaces.LoaderStrategy
implementations, and related MapperOptions."""
-from .. import exc as sa_exc
+from .. import exc as sa_exc, inspect
from .. import util, log, event
from ..sql import util as sql_util, visitors
from . import (
- attributes, interfaces, exc as orm_exc, loading,
+ attributes, interfaces, exc as orm_exc, loading,
unitofwork, util as orm_util
)
from .util import _none_set
import itertools
def _register_attribute(strategy, mapper, useobject,
- compare_function=None,
+ compare_function=None,
typecallable=None,
uselist=False,
- callable_=None,
- proxy_property=None,
+ callable_=None,
+ proxy_property=None,
active_history=False,
- impl_class=None,
+ impl_class=None,
**kw
):
if prop.key in prop.parent.validators:
fn, include_removes = prop.parent.validators[prop.key]
listen_hooks.append(
- lambda desc, prop: orm_util._validator_events(desc,
+ lambda desc, prop: orm_util._validator_events(desc,
prop.key, fn, include_removes)
)
backref = kw.pop('backref', None)
if backref:
listen_hooks.append(
- lambda desc, prop: attributes.backref_listeners(desc,
- backref,
+ lambda desc, prop: attributes.backref_listeners(desc,
+ backref,
uselist)
)
if prop is m._props.get(prop.key):
desc = attributes.register_attribute_impl(
- m.class_,
- prop.key,
+ m.class_,
+ prop.key,
parent_token=prop,
- uselist=uselist,
- compare_function=compare_function,
+ uselist=uselist,
+ compare_function=compare_function,
useobject=useobject,
- extension=attribute_ext,
- trackparent=useobject and (prop.single_parent
- or prop.direction is interfaces.ONETOMANY),
+ extension=attribute_ext,
+ trackparent=useobject and (prop.single_parent
+ or prop.direction is interfaces.ONETOMANY),
typecallable=typecallable,
- callable_=callable_,
+ callable_=callable_,
active_history=active_history,
impl_class=impl_class,
doc=prop.doc,
super(UninstrumentedColumnLoader, self).__init__(parent)
self.columns = self.parent_property.columns
- def setup_query(self, context, entity, path, adapter,
+ def setup_query(self, context, entity, path, adapter,
column_collection=None, **kwargs):
for c in self.columns:
if adapter:
self.columns = self.parent_property.columns
self.is_composite = hasattr(self.parent_property, 'composite_class')
- def setup_query(self, context, entity, path,
+ def setup_query(self, context, entity, path,
adapter, column_collection, **kwargs):
for c in self.columns:
if adapter:
active_history = active_history
)
- def create_row_processor(self, context, path,
+ def create_row_processor(self, context, path,
mapper, row, adapter):
key = self.key
# look through list of columns represented here
expire_missing=False
)
- def setup_query(self, context, entity, path, adapter,
+ def setup_query(self, context, entity, path, adapter,
only_load_props=None, **kwargs):
if (
- self.group is not None and
+ self.group is not None and
context.attributes.get(('undefer', self.group), False)
) or (only_load_props and self.key in only_load_props):
self.parent_property._get_strategy(ColumnLoader).\
if self.group:
toload = [
- p.key for p in
- localparent.iterate_properties
- if isinstance(p, StrategizedProperty) and
- isinstance(p.strategy, DeferredColumnLoader) and
+ p.key for p in
+ localparent.iterate_properties
+ if isinstance(p, StrategizedProperty) and
+ isinstance(p.strategy, DeferredColumnLoader) and
p.group==self.group
]
else:
if session is None:
raise orm_exc.DetachedInstanceError(
"Parent instance %s is not bound to a Session; "
- "deferred load operation of attribute '%s' cannot proceed" %
+ "deferred load operation of attribute '%s' cannot proceed" %
(orm_util.state_str(state), self.key)
)
query = session.query(localparent)
- if loading.load_on_ident(query, state.key,
+ if loading.load_on_ident(query, state.key,
only_load_props=group, refresh_state=state) is None:
raise orm_exc.ObjectDeletedError(state)
class NoLoader(AbstractRelationshipLoader):
"""Provide loading behavior for a :class:`.RelationshipProperty`
with "lazy=None".
-
+
"""
def init_class_attribute(self, mapper):
self.is_class_level = True
_register_attribute(self, mapper,
- useobject=True,
+ useobject=True,
uselist=self.parent_property.uselist,
typecallable = self.parent_property.collection_class,
)
class LazyLoader(AbstractRelationshipLoader):
"""Provide loading behavior for a :class:`.RelationshipProperty`
with "lazy=True", that is loads when first accessed.
-
+
"""
def __init__(self, parent):
#from sqlalchemy.orm import query
self.use_get = not self.uselist and \
self.mapper._get_clause[0].compare(
- self._lazywhere,
- use_proxies=True,
+ self._lazywhere,
+ use_proxies=True,
equivalents=self.mapper._equivalent_columns
)
def init_class_attribute(self, mapper):
self.is_class_level = True
- # MANYTOONE currently only needs the
+ # MANYTOONE currently only needs the
# "old" value for delete-orphan
- # cascades. the required _SingleParentValidator
+ # cascades. the required _SingleParentValidator
# will enable active_history
- # in that case. otherwise we don't need the
+ # in that case. otherwise we don't need the
# "old" value during backref operations.
- _register_attribute(self,
+ _register_attribute(self,
mapper,
useobject=True,
callable_=self._load_for_state,
not self.use_get,
)
- def lazy_clause(self, state, reverse_direction=False,
- alias_secondary=False,
+ def lazy_clause(self, state, reverse_direction=False,
+ alias_secondary=False,
adapt_source=None):
if state is None:
return self._lazy_none_clause(
- reverse_direction,
+ reverse_direction,
adapt_source=adapt_source)
if not reverse_direction:
if bindparam._identifying_key in bind_to_col:
bindparam.callable = \
lambda: mapper._get_committed_state_attr_by_column(
- state, dict_,
+ state, dict_,
bind_to_col[bindparam._identifying_key])
else:
def visit_bindparam(bindparam):
if bindparam._identifying_key in bind_to_col:
bindparam.callable = \
lambda: mapper._get_state_attr_by_column(
- state, dict_,
+ state, dict_,
bind_to_col[bindparam._identifying_key])
if not session:
raise orm_exc.DetachedInstanceError(
"Parent instance %s is not bound to a Session; "
- "lazy load operation of attribute '%s' cannot proceed" %
+ "lazy load operation of attribute '%s' cannot proceed" %
(orm_util.state_str(state), self.key)
)
- # if we have a simple primary key load, check the
+ # if we have a simple primary key load, check the
# identity map without generating a Query at all
if self.use_get:
ident = self._get_ident_for_use_get(
q = q.order_by(*util.to_list(self.parent_property.order_by))
for rev in self.parent_property._reverse_property:
- # reverse props that are MANYTOONE are loading *this*
+ # reverse props that are MANYTOONE are loading *this*
# object from get(), so don't need to eager out to those.
if rev.direction is interfaces.MANYTOONE and \
rev._use_get and \
if l > 1:
util.warn(
"Multiple rows returned with "
- "uselist=False for lazily-loaded attribute '%s' "
+ "uselist=False for lazily-loaded attribute '%s' "
% self.parent_property)
return result[0]
return None
- def create_row_processor(self, context, path,
+ def create_row_processor(self, context, path,
mapper, row, adapter):
key = self.key
if not self.is_class_level:
def set_lazy_callable(state, dict_, row):
- # we are not the primary manager for this attribute
+ # we are not the primary manager for this attribute
# on this class - set up a
- # per-instance lazyloader, which will override the
+ # per-instance lazyloader, which will override the
# class-level behavior.
- # this currently only happens when using a
+ # this currently only happens when using a
# "lazyload" option on a "no load"
- # attribute - "eager" attributes always have a
+ # attribute - "eager" attributes always have a
# class-level lazyloader installed.
state.set_callable(dict_, key, LoadLazyAttribute(state, key))
return set_lazy_callable, None, None
else:
def reset_for_lazy_callable(state, dict_, row):
- # we are the primary manager for this attribute on
+ # we are the primary manager for this attribute on
# this class - reset its
- # per-instance attribute state, so that the class-level
+ # per-instance attribute state, so that the class-level
# lazy loader is
# executed when next referenced on this instance.
# this is needed in
- # populate_existing() types of scenarios to reset
+ # populate_existing() types of scenarios to reset
# any existing state.
state.reset(dict_, key)
_get_strategy(LazyLoader).\
init_class_attribute(mapper)
- def setup_query(self, context, entity,
+ def setup_query(self, context, entity,
path, adapter, column_collection=None,
parentmapper=None, **kwargs):
pass
- def create_row_processor(self, context, path,
+ def create_row_processor(self, context, path,
mapper, row, adapter):
def load_immediate(state, dict_, row):
state.get_impl(self.key).get(state, dict_)
_get_strategy(LazyLoader).\
init_class_attribute(mapper)
- def setup_query(self, context, entity,
- path, adapter,
+ def setup_query(self, context, entity,
+ path, adapter,
column_collection=None,
parentmapper=None, **kwargs):
else:
effective_entity = self.mapper
- subq_path = context.attributes.get(('subquery_path', None),
+ subq_path = context.attributes.get(('subquery_path', None),
orm_util.PathRegistry.root)
subq_path = subq_path + path
- # if not via query option, check for
+ # if not via query option, check for
# a cycle
if not path.contains(context, "loaderstrategy"):
if self.join_depth:
self._get_leftmost(subq_path)
orig_query = context.attributes.get(
- ("orig_query", SubqueryLoader),
+ ("orig_query", SubqueryLoader),
context.query)
# generate a new Query from the original, then
leftmost_attr
)
- # generate another Query that will join the
+ # generate another Query that will join the
# left alias to the target relationships.
# basically doing a longhand
# "from_self()". (from_self() itself not quite industrial
q = q.order_by(*local_attr)
q = q.add_columns(*local_attr)
- q = self._apply_joins(q, to_join, left_alias,
+ q = self._apply_joins(q, to_join, left_alias,
parent_alias, effective_entity)
q = self._setup_options(q, subq_path, orig_query, effective_entity)
q = self._setup_outermost_orderby(q)
- # add new query to attributes to be picked up
+ # add new query to attributes to be picked up
# by create_row_processor
path.set(context, "subquery", q)
# to look only for significant columns
q = orig_query._clone().correlate(None)
- # TODO: why does polymporphic etc. require hardcoding
+ # TODO: why does polymporphic etc. require hardcoding
# into _adapt_col_list ? Does query.add_columns(...) work
# with polymorphic loading ?
q._set_entities(q._adapt_col_list(leftmost_attr))
# figure out what's being joined. a.k.a. the fun part
to_join = [
- (subq_path[i], subq_path[i+1])
+ (subq_path[i], subq_path[i+1])
for i in xrange(0, len(subq_path), 2)
]
# which needs to be aliased.
if len(to_join) > 1:
- ext = orm_util._extended_entity_info(subq_path[-2])
+ info = inspect(subq_path[-2])
if len(to_join) < 2:
# in the case of a one level eager load, this is the
# leftmost "left_alias".
parent_alias = left_alias
- elif ext.mapper.isa(self.parent):
+ elif info.mapper.isa(self.parent):
# In the case of multiple levels, retrieve
- # it from subq_path[-2]. This is the same as self.parent
- # in the vast majority of cases, and [ticket:2014]
+ # it from subq_path[-2]. This is the same as self.parent
+ # in the vast majority of cases, and [ticket:2014]
# illustrates a case where sub_path[-2] is a subclass
# of self.parent
parent_alias = orm_util.AliasedClass(subq_path[-2])
else:
- # if of_type() were used leading to this relationship,
+ # if of_type() were used leading to this relationship,
# self.parent is more specific than subq_path[-2]
parent_alias = orm_util.AliasedClass(self.parent)
]
return to_join, local_attr, parent_alias
- def _apply_joins(self, q, to_join, left_alias, parent_alias,
+ def _apply_joins(self, q, to_join, left_alias, parent_alias,
effective_entity):
for i, (mapper, key) in enumerate(to_join):
# we need to use query.join() as opposed to
- # orm.join() here because of the
- # rich behavior it brings when dealing with
+ # orm.join() here because of the
+ # rich behavior it brings when dealing with
# "with_polymorphic" mappers. "aliased"
- # and "from_joinpoint" take care of most of
+ # and "from_joinpoint" take care of most of
# the chaining and aliasing for us.
first = i == 0
# these will fire relative to subq_path.
q = q._with_current_path(subq_path)
q = q._conditional_options(*orig_query._with_options)
- if orig_query._populate_existing:
+ if orig_query._populate_existing:
q._populate_existing = orig_query._populate_existing
return q
def _setup_outermost_orderby(self, q):
if self.parent_property.order_by:
# if there's an ORDER BY, alias it the same
- # way joinedloader does, but we have to pull out
+ # way joinedloader does, but we have to pull out
# the "eagerjoin" from the query.
# this really only picks up the "secondary" table
# right now.
if not self.parent.class_manager[self.key].impl.supports_population:
raise sa_exc.InvalidRequestError(
"'%s' does not support object "
- "population - eager loading cannot be applied." %
+ "population - eager loading cannot be applied." %
self)
path = path[self.key]
collections = path.get(context, "collections")
if collections is None:
collections = dict(
- (k, [v[0] for v in v])
+ (k, [v[0] for v in v])
for k, v in itertools.groupby(
- subq,
+ subq,
lambda x:x[1:]
))
path.set(context, 'collections', collections)
def _create_collection_loader(self, collections, local_cols):
def load_collection_from_subq(state, dict_, row):
collection = collections.get(
- tuple([row[col] for col in local_cols]),
+ tuple([row[col] for col in local_cols]),
()
)
state.get_impl(self.key).\
def _create_scalar_loader(self, collections, local_cols):
def load_scalar_from_subq(state, dict_, row):
collection = collections.get(
- tuple([row[col] for col in local_cols]),
+ tuple([row[col] for col in local_cols]),
(None,)
)
if len(collection) > 1:
class JoinedLoader(AbstractRelationshipLoader):
"""Provide loading behavior for a :class:`.RelationshipProperty`
using joined eager loading.
-
+
"""
def __init__(self, parent):
super(JoinedLoader, self).__init__(parent)
with_polymorphic = None
- user_defined_adapter = path.get(context,
- "user_defined_eager_row_processor",
+ user_defined_adapter = path.get(context,
+ "user_defined_eager_row_processor",
False)
if user_defined_adapter is not False:
clauses, adapter, add_to_collection = \
user_defined_adapter
)
else:
- # if not via query option, check for
+ # if not via query option, check for
# a cycle
if not path.contains(context, "loaderstrategy"):
if self.join_depth:
)
with_poly_info = path.get(
- context,
+ context,
"path_with_polymorphic",
None
)
for value in self.mapper._iterate_polymorphic_properties(
mappers=with_polymorphic):
value.setup(
- context,
- entity,
- path,
- clauses,
- parentmapper=self.mapper,
+ context,
+ entity,
+ path,
+ clauses,
+ parentmapper=self.mapper,
column_collection=add_to_collection,
allow_innerjoin=allow_innerjoin)
- def _get_user_defined_adapter(self, context, entity,
+ def _get_user_defined_adapter(self, context, entity,
path, adapter, user_defined_adapter):
adapter = entity._get_entity_clauses(context.query, context)
if adapter and user_defined_adapter:
user_defined_adapter = user_defined_adapter.wrap(adapter)
- path.set(context, "user_defined_eager_row_processor",
+ path.set(context, "user_defined_eager_row_processor",
user_defined_adapter)
elif adapter:
user_defined_adapter = adapter
- path.set(context, "user_defined_eager_row_processor",
+ path.set(context, "user_defined_eager_row_processor",
user_defined_adapter)
add_to_collection = context.primary_columns
return user_defined_adapter, adapter, add_to_collection
- def _generate_row_adapter(self,
+ def _generate_row_adapter(self,
context, entity, path, adapter,
column_collection, parentmapper, allow_innerjoin
):
with_poly_info = path.get(
- context,
- "path_with_polymorphic",
+ context,
+ "path_with_polymorphic",
None
)
if with_poly_info:
else:
to_adapt = orm_util.AliasedClass(self.mapper)
clauses = orm_util.ORMAdapter(
- to_adapt,
+ to_adapt,
equivalents=self.mapper._equivalent_columns,
adapt_required=True)
assert clauses.aliased_class is not None
context.multi_row_eager_loaders = True
innerjoin = allow_innerjoin and path.get(context,
- "eager_join_type",
+ "eager_join_type",
self.parent_property.innerjoin)
if not innerjoin:
# if this is an outer join, all eager joins from
allow_innerjoin = False
context.create_eager_joins.append(
- (self._create_eager_join, context,
- entity, path, adapter,
+ (self._create_eager_join, context,
+ entity, path, adapter,
parentmapper, clauses, innerjoin)
)
path.set(context, "eager_row_processor", clauses)
return clauses, adapter, add_to_collection, allow_innerjoin
- def _create_eager_join(self, context, entity,
- path, adapter, parentmapper,
+ def _create_eager_join(self, context, entity,
+ path, adapter, parentmapper,
clauses, innerjoin):
if parentmapper is None:
localparent = parentmapper
# whether or not the Query will wrap the selectable in a subquery,
- # and then attach eager load joins to that (i.e., in the case of
+ # and then attach eager load joins to that (i.e., in the case of
# LIMIT/OFFSET etc.)
should_nest_selectable = context.multi_row_eager_loaders and \
context.query._should_nest_selectable
if clause is not None:
# join to an existing FROM clause on the query.
# key it to its list index in the eager_joins dict.
- # Query._compile_context will adapt as needed and
+ # Query._compile_context will adapt as needed and
# append to the FROM clause of the select().
entity_key, default_towrap = index, clause
else:
onclause = getattr(
orm_util.AliasedClass(
- self.parent,
+ self.parent,
adapter.selectable
- ),
+ ),
self.key, self.parent_property
)
if onclause is self.parent_property:
- # TODO: this is a temporary hack to
+ # TODO: this is a temporary hack to
# account for polymorphic eager loads where
# the eagerload is referencing via of_type().
join_to_left = True
assert clauses.aliased_class is not None
context.eager_joins[entity_key] = eagerjoin = \
orm_util.join(
- towrap,
- clauses.aliased_class,
- onclause,
- join_to_left=join_to_left,
+ towrap,
+ clauses.aliased_class,
+ onclause,
+ join_to_left=join_to_left,
isouter=not innerjoin
)
if self.parent_property.secondary is None and \
not parentmapper:
# for parentclause that is the non-eager end of the join,
- # ensure all the parent cols in the primaryjoin are actually
+ # ensure all the parent cols in the primaryjoin are actually
# in the
- # columns clause (i.e. are not deferred), so that aliasing applied
+ # columns clause (i.e. are not deferred), so that aliasing applied
# by the Query propagates those columns outward.
- # This has the effect
+ # This has the effect
# of "undefering" those columns.
for col in sql_util.find_columns(
self.parent_property.primaryjoin):
def _create_eager_adapter(self, context, row, adapter, path):
- user_defined_adapter = path.get(context,
- "user_defined_eager_row_processor",
+ user_defined_adapter = path.get(context,
+ "user_defined_eager_row_processor",
False)
if user_defined_adapter is not False:
decorator = user_defined_adapter
- # user defined eagerloads are part of the "primary"
+ # user defined eagerloads are part of the "primary"
# portion of the load.
# the adapters applied to the Query should be honored.
if context.adapter and decorator:
self.mapper.identity_key_from_row(row, decorator)
return decorator
except KeyError:
- # no identity key - dont return a row
+ # no identity key - dont return a row
# processor, will cause a degrade to lazy
return False
if not self.parent.class_manager[self.key].impl.supports_population:
raise sa_exc.InvalidRequestError(
"'%s' does not support object "
- "population - eager loading cannot be applied." %
+ "population - eager loading cannot be applied." %
self)
our_path = path[self.key]
eager_adapter = self._create_eager_adapter(
- context,
- row,
+ context,
+ row,
adapter, our_path)
if eager_adapter is not False:
_instance = loading.instance_processor(
self.mapper,
- context,
+ context,
our_path[self.mapper],
eager_adapter)
super(LoadEagerFromAliasOption, self).__init__(key)
if alias is not None:
if not isinstance(alias, basestring):
- mapper, alias, is_aliased_class = \
- orm_util._entity_info(alias)
+ info = inspect(alias)
+ alias = info.selectable
self.alias = alias
self.chained = chained
(root_mapper, propname) = path.path[-2:]
prop = root_mapper._props[propname]
adapter = query._polymorphic_adapters.get(prop.mapper, None)
- path.setdefault(query,
- "user_defined_eager_row_processor",
+ path.setdefault(query,
+ "user_defined_eager_row_processor",
adapter)
root_mapper, propname = paths[-1].path[-2:]
if self.alias is not None:
if isinstance(self.alias, basestring):
self.alias = prop.target.alias(self.alias)
- paths[-1].set(query, "user_defined_eager_row_processor",
- sql_util.ColumnAdapter(self.alias,
+ paths[-1].set(query, "user_defined_eager_row_processor",
+ sql_util.ColumnAdapter(self.alias,
equivalents=prop.mapper._equivalent_columns)
)
else:
if paths[-1].contains(query, "path_with_polymorphic"):
with_poly_info = paths[-1].get(query, "path_with_polymorphic")
adapter = orm_util.ORMAdapter(
- with_poly_info.entity,
+ with_poly_info.entity,
equivalents=prop.mapper._equivalent_columns,
adapt_required=True)
else:
adapter = query._polymorphic_adapters.get(prop.mapper, None)
- paths[-1].set(query, "user_defined_eager_row_processor",
+ paths[-1].set(query, "user_defined_eager_row_processor",
adapter)
def single_parent_validator(desc, prop):
def _do_check(state, value, oldvalue, initiator):
if value is not None and initiator.key == prop.key:
hasparent = initiator.hasparent(attributes.instance_state(value))
- if hasparent and oldvalue is not value:
+ if hasparent and oldvalue is not value:
raise sa_exc.InvalidRequestError(
"Instance %s is already associated with an instance "
"of %s via its %s attribute, and is only allowed a "
- "single parent." %
+ "single parent." %
(orm_util.instance_str(value), state.class_, prop)
)
return value
def set_(state, value, oldvalue, initiator):
return _do_check(state, value, oldvalue, initiator)
- event.listen(desc, 'append', append, raw=True, retval=True,
+ event.listen(desc, 'append', append, raw=True, retval=True,
active_history=True)
- event.listen(desc, 'set', set_, raw=True, retval=True,
+ event.listen(desc, 'set', set_, raw=True, retval=True,
active_history=True)