# interfaces.py
-# Copyright (C) 2005, 2006, 2007, 2008, 2009, 2010 Michael Bayer mike_mp@zzzcomputing.com
+# Copyright (C) 2005, 2006, 2007, 2008, 2009, 2010 Michael Bayer
+# mike_mp@zzzcomputing.com
#
# This module is part of SQLAlchemy and is released under
# the MIT License: http://www.opensource.org/licenses/mit-license.php
"""
return EXT_CONTINUE
- def append_result(self, mapper, selectcontext, row, instance, result, **flags):
+ def append_result(self, mapper, selectcontext, row, instance,
+ result, **flags):
"""Receive an object instance before that instance is appended
to a result list.
\**flags
extra information about the row, same as criterion in
- ``create_row_processor()`` method of :class:`~sqlalchemy.orm.interfaces.MapperProperty`
+ ``create_row_processor()`` method of
+ :class:`~sqlalchemy.orm.interfaces.MapperProperty`
"""
return EXT_CONTINUE
- def populate_instance(self, mapper, selectcontext, row, instance, **flags):
+ def populate_instance(self, mapper, selectcontext, row,
+ instance, **flags):
"""Receive an instance before that instance has
its attributes populated.
Note that this method is called for all instances that are marked as
"dirty", even those which have no net changes to their column-based
- attributes. An object is marked as dirty when any of its column-based
+ attributes. An object is marked as dirty when any of its column-based
attributes have a "set attribute" operation called or when any of its
- collections are modified. If, at update time, no column-based attributes
- have any net changes, no UPDATE statement will be issued. This means
- that an instance being sent to before_update is *not* a guarantee that
- an UPDATE statement will be issued (although you can affect the outcome
- here).
-
- To detect if the column-based attributes on the object have net changes,
- and will therefore generate an UPDATE statement, use
- ``object_session(instance).is_modified(instance, include_collections=False)``.
+ collections are modified. If, at update time, no column-based
+ attributes have any net changes, no UPDATE statement will be issued.
+ This means that an instance being sent to before_update is *not* a
+ guarantee that an UPDATE statement will be issued (although you can
+ affect the outcome here).
+
+ To detect if the column-based attributes on the object have net
+ changes, and will therefore generate an UPDATE statement, use
+ ``object_session(instance).is_modified(instance,
+ include_collections=False)``.
Column-based attributes can be modified within this method
which will result in the new value being updated. However
return EXT_CONTINUE
class SessionExtension(object):
- """An extension hook object for Sessions. Subclasses may be installed into a Session
- (or sessionmaker) using the ``extension`` keyword argument.
- """
+
+ """An extension hook object for Sessions. Subclasses may be
+ installed into a Session (or sessionmaker) using the ``extension``
+ keyword argument. """
def before_commit(self, session):
"""Execute right before commit is called.
-
- Note that this may not be per-flush if a longer running transaction is ongoing."""
+
+ Note that this may not be per-flush if a longer running
+ transaction is ongoing."""
def after_commit(self, session):
"""Execute after a commit has occured.
-
- Note that this may not be per-flush if a longer running transaction is ongoing."""
+
+ Note that this may not be per-flush if a longer running
+ transaction is ongoing."""
def after_rollback(self, session):
"""Execute after a rollback has occured.
+
+ Note that this may not be per-flush if a longer running
+ transaction is ongoing."""
- Note that this may not be per-flush if a longer running transaction is ongoing."""
-
- def before_flush(self, session, flush_context, instances):
+ def before_flush( self, session, flush_context, instances):
"""Execute before flush process has started.
-
- `instances` is an optional list of objects which were passed to the ``flush()``
- method.
- """
+
+ `instances` is an optional list of objects which were passed to
+ the ``flush()`` method. """
def after_flush(self, session, flush_context):
- """Execute after flush has completed, but before commit has been called.
-
- Note that the session's state is still in pre-flush, i.e. 'new', 'dirty',
- and 'deleted' lists still show pre-flush state as well as the history
- settings on instance attributes."""
+ """Execute after flush has completed, but before commit has been
+ called.
+
+ Note that the session's state is still in pre-flush, i.e. 'new',
+ 'dirty', and 'deleted' lists still show pre-flush state as well
+ as the history settings on instance attributes."""
def after_flush_postexec(self, session, flush_context):
- """Execute after flush has completed, and after the post-exec state occurs.
-
- This will be when the 'new', 'dirty', and 'deleted' lists are in their final
- state. An actual commit() may or may not have occured, depending on whether or not
- the flush started its own transaction or participated in a larger transaction.
- """
+ """Execute after flush has completed, and after the post-exec
+ state occurs.
+
+ This will be when the 'new', 'dirty', and 'deleted' lists are in
+ their final state. An actual commit() may or may not have
+ occured, depending on whether or not the flush started its own
+ transaction or participated in a larger transaction. """
- def after_begin(self, session, transaction, connection):
+ def after_begin( self, session, transaction, connection):
"""Execute after a transaction is begun on a connection
-
- `transaction` is the SessionTransaction. This method is called after an
- engine level transaction is begun on a connection.
- """
+
+ `transaction` is the SessionTransaction. This method is called
+ after an engine level transaction is begun on a connection. """
def after_attach(self, session, instance):
"""Execute after an instance is attached to a session.
+
+ This is called after an add, delete or merge. """
- This is called after an add, delete or merge.
- """
-
- def after_bulk_update(self, session, query, query_context, result):
+ def after_bulk_update( self, session, query, query_context, result):
"""Execute after a bulk update operation to the session.
-
+
This is called after a session.query(...).update()
-
- `query` is the query object that this update operation was called on.
- `query_context` was the query context object.
+
+ `query` is the query object that this update operation was
+ called on. `query_context` was the query context object.
`result` is the result object returned from the bulk operation.
"""
- def after_bulk_delete(self, session, query, query_context, result):
+ def after_bulk_delete( self, session, query, query_context, result):
"""Execute after a bulk delete operation to the session.
-
+
This is called after a session.query(...).delete()
-
- `query` is the query object that this delete operation was called on.
- `query_context` was the query context object.
+
+ `query` is the query object that this delete operation was
+ called on. `query_context` was the query context object.
`result` is the result object returned from the bulk operation.
"""
Callables are of the following form::
def new_execute(state, dict_, row, isnew):
- # process incoming instance state and given row. the instance is
+ # process incoming instance state and given row.
+ # the instance is
# "new" and was just created upon receipt of this row.
"isnew" indicates if the instance was newly created as a
result of reading this row
def existing_execute(state, dict_, row):
- # process incoming instance state and given row. the instance is
+ # process incoming instance state and given row. the
+ # instance is
# "existing" and was created based on a previous row.
return (new_execute, existing_execute)
raise NotImplementedError()
- def cascade_iterator(self, type_, state, visited_instances=None, halt_on=None):
+ def cascade_iterator(self, type_, state, visited_instances=None,
+ halt_on=None):
"""Iterate through instances related to the given instance for
a particular 'cascade', starting with this MapperProperty.
@property
def class_attribute(self):
- """Return the class-bound descriptor corresponding to this MapperProperty."""
-
- return getattr(self.parent.class_, self.key)
-
- def do_init(self):
- """Perform subclass-specific initialization post-mapper-creation steps.
+ """Return the class-bound descriptor corresponding to this
+ MapperProperty."""
- This is a *template* method called by the
- ``MapperProperty`` object's init() method.
+ return getattr(self.parent.class_, self.key)
+ def do_init(self):
+ """Perform subclass-specific initialization post-mapper-creation
+ steps.
+
+ This is a template method called by the ``MapperProperty``
+ object's init() method.
+
"""
+
pass
def post_instrument_class(self, mapper):
raise NotImplementedError("%r" % self)
def adapted(self, adapter):
- """Return a copy of this PropComparator which will use the given adaption function
- on the local side of generated expressions.
-
+ """Return a copy of this PropComparator which will use the given
+ adaption function on the local side of generated expressions.
+
"""
+
return self.__class__(self.prop, self.mapper, adapter)
@staticmethod
def of_type(self, class_):
"""Redefine this object in terms of a polymorphic subclass.
- Returns a new PropComparator from which further criterion can be evaluated.
+ Returns a new PropComparator from which further criterion can be
+ evaluated.
e.g.::
return self.operate(PropComparator.of_type_op, class_)
def any(self, criterion=None, **kwargs):
- """Return true if this collection contains any member that meets the given criterion.
+ """Return true if this collection contains any member that meets the
+ given criterion.
criterion
an optional ClauseElement formulated against the member class' table
return self.operate(PropComparator.any_op, criterion, **kwargs)
def has(self, criterion=None, **kwargs):
- """Return true if this element references a member which meets the given criterion.
+ """Return true if this element references a member which meets the
+ given criterion.
criterion
an optional ClauseElement formulated against the member class' table
"""
def _get_context_strategy(self, context, path):
- cls = context.attributes.get(("loaderstrategy", _reduce_path(path)), None)
+ cls = context.attributes.get(('loaderstrategy',
+ _reduce_path(path)), None)
if cls:
try:
return self.__all_strategies[cls]
pass
def process_query_conditionally(self, query):
- """same as process_query(), except that this option may not apply
- to the given query.
-
+ """same as process_query(), except that this option may not
+ apply to the given query.
+
Used when secondary loaders resend existing options to a new
Query."""
+
self.process_query(query)
class ExtensionOption(MapperOption):
- """a MapperOption that applies a MapperExtension to a query operation."""
-
+
+ """a MapperOption that applies a MapperExtension to a query
+ operation."""
+
def __init__(self, ext):
self.ext = ext
class PropertyOption(MapperOption):
"""A MapperOption that is applied to a property off the mapper or
- one of its child mappers, identified by a dot-separated key.
- """
+ one of its child mappers, identified by a dot-separated key. """
def __init__(self, key, mapper=None):
self.key = key
state['key'] = tuple(ret)
self.__dict__ = state
- def _find_entity(self, query, mapper, raiseerr):
- from sqlalchemy.orm.util import _class_to_mapper, _is_aliased_class
-
+ def _find_entity( self, query, mapper, raiseerr):
+ from sqlalchemy.orm.util import _class_to_mapper, \
+ _is_aliased_class
if _is_aliased_class(mapper):
searchfor = mapper
isa = False
else:
searchfor = _class_to_mapper(mapper)
isa = True
-
for ent in query._mapper_entities:
- if searchfor is ent.path_entity or (
- isa and
- searchfor.common_parent(ent.path_entity)):
+ if searchfor is ent.path_entity or isa \
+ and searchfor.common_parent(ent.path_entity):
return ent
else:
if raiseerr:
- raise sa_exc.ArgumentError(
- "Can't find entity %s in Query. Current list: %r"
- % (searchfor, [
- str(m.path_entity) for m in query._entities
- ]))
+ raise sa_exc.ArgumentError("Can't find entity %s in "
+ "Query. Current list: %r" % (searchfor,
+ [str(m.path_entity) for m in query._entities]))
else:
return None
entity = None
l = []
mappers = []
-
- # _current_path implies we're in a secondary load
- # with an existing path
+
+ # _current_path implies we're in a secondary load with an
+ # existing path
+
current_path = list(query._current_path)
-
tokens = []
for key in util.to_list(self.key):
if isinstance(key, basestring):
tokens += key.split('.')
else:
tokens += [key]
-
for token in tokens:
if isinstance(token, basestring):
if not entity:
if current_path[1] == token:
current_path = current_path[2:]
continue
-
entity = query._entity_zero()
path_element = entity.path_entity
mapper = entity.mapper
mappers.append(mapper)
- prop = mapper.get_property(
- token,
- resolve_synonyms=True,
- raiseerr=raiseerr)
+ prop = mapper.get_property(token,
+ resolve_synonyms=True, raiseerr=raiseerr)
key = token
elif isinstance(token, PropComparator):
prop = token.property
if not entity:
if current_path:
- if current_path[0:2] == [token.parententity, prop.key]:
+ if current_path[0:2] == [token.parententity,
+ prop.key]:
current_path = current_path[2:]
continue
-
- entity = self._find_entity(
- query,
- token.parententity,
- raiseerr)
+ entity = self._find_entity(query,
+ token.parententity, raiseerr)
if not entity:
return [], []
path_element = entity.path_entity
mappers.append(prop.parent)
key = prop.key
else:
- raise sa_exc.ArgumentError("mapper option expects string key "
- "or list of attributes")
-
+ raise sa_exc.ArgumentError('mapper option expects '
+ 'string key or list of attributes')
if prop is None:
return [], []
-
path = build_path(path_element, prop.key, path)
l.append(path)
if getattr(token, '_of_type', None):
path_element = mapper = token._of_type
else:
path_element = mapper = getattr(prop, 'mapper', None)
-
if path_element:
path_element = path_element
-
-
- # if current_path tokens remain, then
- # we didn't have an exact path match.
+
if current_path:
return [], []
-
return l, mappers
class AttributeExtension(object):
"""An event handler for individual attribute change events.
-
+
AttributeExtension is assembled within the descriptors associated
with a mapped class.
-
+
"""
active_history = True
is_chained = False
def process_query_property(self, query, paths, mappers):
- # _get_context_strategy may receive the path in terms of
- # a base mapper - e.g. options(eagerload_all(Company.employees, Engineer.machines))
- # in the polymorphic tests leads to "(Person, 'machines')" in
- # the path due to the mechanics of how the eager strategy builds
- # up the path
+
+ # _get_context_strategy may receive the path in terms of a base
+ # mapper - e.g. options(eagerload_all(Company.employees,
+ # Engineer.machines)) in the polymorphic tests leads to
+ # "(Person, 'machines')" in the path due to the mechanics of how
+ # the eager strategy builds up the path
+
if self.is_chained:
for path in paths:
- query._attributes[("loaderstrategy", _reduce_path(path))] = \
- self.get_strategy_class()
+ query._attributes[('loaderstrategy',
+ _reduce_path(path))] = \
+ self.get_strategy_class()
else:
- query._attributes[("loaderstrategy", _reduce_path(paths[-1]))] = \
- self.get_strategy_class()
+ query._attributes[('loaderstrategy',
+ _reduce_path(paths[-1]))] = \
+ self.get_strategy_class()
def get_strategy_class(self):
raise NotImplementedError()
def setup_query(self, context, entity, path, adapter, **kwargs):
pass
- def create_row_processor(self, selectcontext, path, mapper, row, adapter):
- """Return row processing functions which fulfill the contract specified
- by MapperProperty.create_row_processor.
-
- StrategizedProperty delegates its create_row_processor method directly
- to this method.
- """
+ def create_row_processor(self, selectcontext, path, mapper,
+ row, adapter):
+ """Return row processing functions which fulfill the contract
+ specified by MapperProperty.create_row_processor.
+
+ StrategizedProperty delegates its create_row_processor method
+ directly to this method. """
raise NotImplementedError()
# properties.py
-# Copyright (C) 2005, 2006, 2007, 2008, 2009, 2010 Michael Bayer mike_mp@zzzcomputing.com
+# Copyright (C) 2005, 2006, 2007, 2008, 2009, 2010 Michael Bayer
+# mike_mp@zzzcomputing.com
#
# This module is part of SQLAlchemy and is released under
# the MIT License: http://www.opensource.org/licenses/mit-license.php
"""MapperProperty implementations.
-This is a private module which defines the behavior of invidual ORM-mapped
-attributes.
+This is a private module which defines the behavior of invidual ORM-
+mapped attributes.
"""
from sqlalchemy import sql, util, log
import sqlalchemy.exceptions as sa_exc
-from sqlalchemy.sql.util import (
- ClauseAdapter, criterion_as_pairs, join_condition
- )
+from sqlalchemy.sql.util import ClauseAdapter, criterion_as_pairs, \
+ join_condition
from sqlalchemy.sql import operators, expression
-from sqlalchemy.orm import (
- attributes, dependency, mapper, object_mapper, strategies,
- )
-from sqlalchemy.orm.util import (
- CascadeOptions, _class_to_mapper, _orm_annotate, _orm_deannotate
- )
-from sqlalchemy.orm.interfaces import (
- MANYTOMANY, MANYTOONE, MapperProperty, ONETOMANY, PropComparator,
- StrategizedProperty,
- )
+from sqlalchemy.orm import attributes, dependency, mapper, \
+ object_mapper, strategies
+from sqlalchemy.orm.util import CascadeOptions, _class_to_mapper, \
+ _orm_annotate, _orm_deannotate
+from sqlalchemy.orm.interfaces import MANYTOMANY, MANYTOONE, \
+ MapperProperty, ONETOMANY, PropComparator, StrategizedProperty
NoneType = type(None)
__all__ = ('ColumnProperty', 'CompositeProperty', 'SynonymProperty',
doc=self.doc
)
- def merge(self, session, source_state, source_dict, dest_state, dest_dict, load, _recursive):
+ def merge(self, session, source_state, source_dict, dest_state,
+ dest_dict, load, _recursive):
pass
log.class_logger(SynonymProperty)
def create_row_processor(self, selectcontext, path, mapper, row, adapter):
return (None, None)
- def merge(self, session, source_state, source_dict, dest_state, dest_dict, load, _recursive):
+ def merge(self, session, source_state, source_dict,
+ dest_state, dest_dict, load, _recursive):
pass
self._of_type = _class_to_mapper(of_type)
def adapted(self, adapter):
- """Return a copy of this PropComparator which will use the given adaption function
- on the local side of generated expressions.
-
+ """Return a copy of this PropComparator which will use the
+ given adaption function on the local side of generated
+ expressions.
+
"""
- return self.__class__(self.property, self.mapper, getattr(self, '_of_type', None), adapter)
+
+ return self.__class__(self.property, self.mapper,
+ getattr(self, '_of_type', None),
+ adapter)
@property
def parententity(self):
cls, adapter=self.adapter)
def in_(self, other):
- raise NotImplementedError("in_() not yet supported for relationships. For a "
- "simple many-to-one, use in_() against the set of foreign key values.")
+ raise NotImplementedError('in_() not yet supported for '
+ 'relationships. For a simple many-to-one, use '
+ 'in_() against the set of foreign key values.')
__hash__ = None
if self.property.direction in [ONETOMANY, MANYTOMANY]:
return ~self._criterion_exists()
else:
- return _orm_annotate(
- self.property._optimized_compare(
- None,
- adapt_source=self.adapter)
- )
+ return _orm_annotate(self.property._optimized_compare(
+ None, adapt_source=self.adapter))
elif self.property.uselist:
- raise sa_exc.InvalidRequestError(
- "Can't compare a collection to an object or "
- "collection; use contains() to test for membership.")
+ raise sa_exc.InvalidRequestError("Can't compare a colle"
+ "ction to an object or collection; use "
+ "contains() to test for membership.")
else:
- return _orm_annotate(
- self.property._optimized_compare(
- other,
- adapt_source=self.adapter)
- )
+ return _orm_annotate(self.property._optimized_compare(other,
+ adapt_source=self.adapter))
def _criterion_exists(self, criterion=None, **kwargs):
if getattr(self, '_of_type', None):
source_selectable = None
pj, sj, source, dest, secondary, target_adapter = \
- self.property._create_joins(
- dest_polymorphic=True,
- dest_selectable=to_selectable,
- source_selectable=source_selectable)
+ self.property._create_joins(dest_polymorphic=True,
+ dest_selectable=to_selectable,
+ source_selectable=source_selectable)
for k in kwargs:
crit = self.property.mapper.class_manager[k] == kwargs[k]
raise sa_exc.InvalidRequestError(
"'contains' not implemented for scalar "
"attributes. Use ==")
- clause = self.property._optimized_compare(other, adapt_source=self.adapter)
+ clause = self.property._optimized_compare(other,
+ adapt_source=self.adapter)
if self.property.secondaryjoin is not None:
- clause.negation_clause = self.__negated_contains_or_equals(other)
+ clause.negation_clause = \
+ self.__negated_contains_or_equals(other)
return clause
def state_bindparam(state, col):
o = state.obj() # strong ref
- return lambda: self.property.mapper._get_committed_attr_by_column(o, col)
+ return lambda : \
+ self.property.mapper._get_committed_attr_by_column(o,
+ col)
def adapt(col):
if self.adapter:
def __ne__(self, other):
if isinstance(other, (NoneType, expression._Null)):
if self.property.direction == MANYTOONE:
- return sql.or_(*[x!=None for x in self.property._foreign_keys])
+ return sql.or_(*[x != None for x in
+ self.property._foreign_keys])
else:
return self._criterion_exists()
elif self.property.uselist:
- raise sa_exc.InvalidRequestError(
- "Can't compare a collection to an object or "
- "collection; use contains() to test for membership.")
+ raise sa_exc.InvalidRequestError("Can't compare a collection"
+ " to an object or collection; use "
+ "contains() to test for membership.")
else:
return self.__negated_contains_or_equals(other)
adapt_source=None, alias_secondary=True):
if value is not None:
value = attributes.instance_state(value)
- return self._get_strategy(strategies.LazyLoader).\
- lazy_clause(value,
- reverse_direction=not value_is_parent,
- alias_secondary=alias_secondary, adapt_source=adapt_source)
+ return self._get_strategy(strategies.LazyLoader).lazy_clause(value,
+ reverse_direction=not value_is_parent,
+ alias_secondary=alias_secondary,
+ adapt_source=adapt_source)
def __str__(self):
return str(self.parent.class_.__name__) + "." + self.key
current_state = attributes.instance_state(current)
current_dict = attributes.instance_dict(current)
_recursive[(current_state, self)] = True
- obj = session._merge(current_state, current_dict, load=load, _recursive=_recursive)
+ obj = session._merge(current_state, current_dict,
+ load=load, _recursive=_recursive)
if obj is not None:
dest_list.append(obj)
if not load:
- coll = attributes.init_state_collection(dest_state, dest_dict, self.key)
+ coll = attributes.init_state_collection(dest_state,
+ dest_dict, self.key)
for c in dest_list:
coll.append_without_event(c)
else:
- dest_state.get_impl(self.key)._set_iterable(dest_state, dest_dict, dest_list)
+ dest_state.get_impl(self.key)._set_iterable(dest_state,
+ dest_dict, dest_list)
else:
current = source_dict[self.key]
if current is not None:
current_state = attributes.instance_state(current)
current_dict = attributes.instance_dict(current)
_recursive[(current_state, self)] = True
- obj = session._merge(current_state, current_dict, load=load, _recursive=_recursive)
+ obj = session._merge(current_state, current_dict,
+ load=load, _recursive=_recursive)
else:
obj = None
-
if not load:
dest_dict[self.key] = obj
else:
- dest_state.get_impl(self.key).set(dest_state, dest_dict, obj, None)
+ dest_state.get_impl(self.key).set(dest_state,
+ dest_dict, obj, None)
def cascade_iterator(self, type_, state, visited_instances, halt_on=None):
if not type_ in self.cascade:
passive = attributes.PASSIVE_OFF
if type_ == 'save-update':
- instances = attributes.get_state_history(state, self.key, passive=passive).sum()
+ instances = attributes.get_state_history(state, self.key,
+ passive=passive).sum()
else:
- instances = state.value_as_iterable(self.key, passive=passive)
-
- skip_pending = type_ == 'refresh-expire' and 'delete-orphan' not in self.cascade
+ instances = state.value_as_iterable(self.key,
+ passive=passive)
+ skip_pending = type_ == 'refresh-expire' and 'delete-orphan' \
+ not in self.cascade
if instances:
for c in instances:
other._reverse_property.add(self)
if not other._get_target().common_parent(self.parent):
- raise sa_exc.ArgumentError("reverse_property %r on relationship %s references "
- "relationship %s, which does not reference mapper %s" % (key, self, other, self.parent))
-
- if self.direction in (ONETOMANY, MANYTOONE) and self.direction == other.direction:
- raise sa_exc.ArgumentError("%s and back-reference %s are both of the same direction %r."
- " Did you mean to set remote_side on the many-to-one side ?" % (other, self, self.direction))
+ raise sa_exc.ArgumentError('reverse_property %r on '
+ 'relationship %s references relationship %s, which '
+ 'does not reference mapper %s' % (key, self, other,
+ self.parent))
+ if self.direction in (ONETOMANY, MANYTOONE) and self.direction \
+ == other.direction:
+ raise sa_exc.ArgumentError('%s and back-reference %s are '
+ 'both of the same direction %r. Did you mean to '
+ 'set remote_side on the many-to-one side ?'
+ % (other, self, self.direction))
def do_init(self):
self._get_target()
def _get_target(self):
if not hasattr(self, 'mapper'):
if isinstance(self.argument, type):
- self.mapper = mapper.class_mapper(self.argument, compile=False)
+ self.mapper = mapper.class_mapper(self.argument,
+ compile=False)
elif isinstance(self.argument, mapper.Mapper):
self.mapper = self.argument
elif util.callable(self.argument):
- # accept a callable to suit various deferred-configurational schemes
- self.mapper = mapper.class_mapper(self.argument(), compile=False)
+
+ # accept a callable to suit various deferred-
+ # configurational schemes
+
+ self.mapper = mapper.class_mapper(self.argument(),
+ compile=False)
else:
- raise sa_exc.ArgumentError("relationship '%s' expects a class or a mapper argument (received: %s)" % (self.key, type(self.argument)))
+ raise sa_exc.ArgumentError("relationship '%s' expects "
+ "a class or a mapper argument (received: %s)"
+ % (self.key, type(self.argument)))
assert isinstance(self.mapper, mapper.Mapper), self.mapper
return self.mapper
def _process_dependent_arguments(self):
- # accept callables for other attributes which may require deferred initialization
- for attr in ('order_by', 'primaryjoin', 'secondaryjoin', 'secondary', '_foreign_keys', 'remote_side'):
+ # accept callables for other attributes which may require
+ # deferred initialization
+
+ for attr in (
+ 'order_by',
+ 'primaryjoin',
+ 'secondaryjoin',
+ 'secondary',
+ '_foreign_keys',
+ 'remote_side',
+ ):
if util.callable(getattr(self, attr)):
setattr(self, attr, getattr(self, attr)())
# in the case that InstrumentedAttributes were used to construct
- # primaryjoin or secondaryjoin, remove the "_orm_adapt" annotation so these
- # interact with Query in the same way as the original Table-bound Column objects
- for attr in ('primaryjoin', 'secondaryjoin'):
+ # primaryjoin or secondaryjoin, remove the "_orm_adapt"
+ # annotation so these interact with Query in the same way as the
+ # original Table-bound Column objects
+
+ for attr in 'primaryjoin', 'secondaryjoin':
val = getattr(self, attr)
if val is not None:
util.assert_arg_type(val, sql.ColumnElement, attr)
setattr(self, attr, _orm_deannotate(val))
-
if self.order_by is not False and self.order_by is not None:
- self.order_by = [expression._literal_as_column(x) for x in util.to_list(self.order_by)]
-
- self._foreign_keys = util.column_set(expression._literal_as_column(x) for x in util.to_column_set(self._foreign_keys))
- self.remote_side = util.column_set(expression._literal_as_column(x) for x in util.to_column_set(self.remote_side))
-
+ self.order_by = [expression._literal_as_column(x) for x in
+ util.to_list(self.order_by)]
+ self._foreign_keys = \
+ util.column_set(expression._literal_as_column(x) for x in
+ util.to_column_set(self._foreign_keys))
+ self.remote_side = \
+ util.column_set(expression._literal_as_column(x) for x in
+ util.to_column_set(self.remote_side))
if not self.parent.concrete:
for inheriting in self.parent.iterate_to_root():
- if inheriting is not self.parent and inheriting._get_property(self.key, raiseerr=False):
- util.warn(
- ("Warning: relationship '%s' on mapper '%s' supercedes "
- "the same relationship on inherited mapper '%s'; this "
- "can cause dependency issues during flush") %
- (self.key, self.parent, inheriting))
+ if inheriting is not self.parent \
+ and inheriting._get_property(self.key,
+ raiseerr=False):
+ util.warn("Warning: relationship '%s' on mapper "
+ "'%s' supercedes the same relationship "
+ "on inherited mapper '%s'; this can "
+ "cause dependency issues during flush"
+ % (self.key, self.parent, inheriting))
# TODO: remove 'self.table'
- self.target = self.table = self.mapper.mapped_table
+ self.target = self.table = self.mapper.mapped_table
if self.cascade.delete_orphan:
if self.parent.class_ is self.mapper.class_:
- raise sa_exc.ArgumentError("In relationship '%s', can't establish 'delete-orphan' cascade "
- "rule on a self-referential relationship. "
- "You probably want cascade='all', which includes delete cascading but not orphan detection." %(str(self)))
- self.mapper.primary_mapper().delete_orphans.append((self.key, self.parent.class_))
+ raise sa_exc.ArgumentError("In relationship '%s', "
+ "can't establish 'delete-orphan' cascade rule "
+ "on a self-referential relationship. You "
+ "probably want cascade='all', which includes "
+ "delete cascading but not orphan detection."
+ % str(self))
+ self.mapper.primary_mapper().delete_orphans.append((self.key,
+ self.parent.class_))
def _determine_joins(self):
if self.secondaryjoin is not None and self.secondary is None:
- raise sa_exc.ArgumentError("Property '" + self.key + "' specified with secondary join condition but no secondary argument")
- # if join conditions were not specified, figure them out based on foreign keys
+ raise sa_exc.ArgumentError("Property '" + self.key
+ + "' specified with secondary join condition but "
+ "no secondary argument")
+
+ # if join conditions were not specified, figure them out based
+ # on foreign keys
def _search_for_join(mapper, table):
- # find a join between the given mapper's mapped table and the given table.
- # will try the mapper's local table first for more specificity, then if not
- # found will try the more general mapped table, which in the case of inheritance
- # is a join.
+
+ # find a join between the given mapper's mapped table and
+ # the given table. will try the mapper's local table first
+ # for more specificity, then if not found will try the more
+ # general mapped table, which in the case of inheritance is
+ # a join.
+
try:
return join_condition(mapper.local_table, table)
except sa_exc.ArgumentError, e:
try:
if self.secondary is not None:
if self.secondaryjoin is None:
- self.secondaryjoin = _search_for_join(self.mapper, self.secondary)
+ self.secondaryjoin = _search_for_join(self.mapper,
+ self.secondary)
if self.primaryjoin is None:
- self.primaryjoin = _search_for_join(self.parent, self.secondary)
+ self.primaryjoin = _search_for_join(self.parent,
+ self.secondary)
else:
if self.primaryjoin is None:
- self.primaryjoin = _search_for_join(self.parent, self.target)
+ self.primaryjoin = _search_for_join(self.parent,
+ self.target)
except sa_exc.ArgumentError, e:
- raise sa_exc.ArgumentError("Could not determine join condition between "
- "parent/child tables on relationship %s. "
- "Specify a 'primaryjoin' expression. If this is a "
- "many-to-many relationship, 'secondaryjoin' is needed as well." % (self))
+ raise sa_exc.ArgumentError("Could not determine join "
+ "condition between parent/child tables on "
+ "relationship %s. Specify a 'primaryjoin' "
+ "expression. If this is a many-to-many "
+ "relationship, 'secondaryjoin' is needed as well."
+ % self)
def _col_is_part_of_mappings(self, column):
if self.secondary is None:
self.secondary.c.contains_column(column) is not None
def _determine_synchronize_pairs(self):
-
if self.local_remote_pairs:
if not self._foreign_keys:
- raise sa_exc.ArgumentError("foreign_keys argument is required with _local_remote_pairs argument")
-
+ raise sa_exc.ArgumentError('foreign_keys argument is '
+ 'required with _local_remote_pairs argument')
self.synchronize_pairs = []
-
for l, r in self.local_remote_pairs:
if r in self._foreign_keys:
self.synchronize_pairs.append((l, r))
elif l in self._foreign_keys:
self.synchronize_pairs.append((r, l))
else:
- eq_pairs = criterion_as_pairs(
- self.primaryjoin,
- consider_as_foreign_keys=self._foreign_keys,
- any_operator=self.viewonly
- )
- eq_pairs = [
- (l, r) for l, r in eq_pairs if
- (self._col_is_part_of_mappings(l) and
- self._col_is_part_of_mappings(r))
- or self.viewonly and r in self._foreign_keys
- ]
-
+ eq_pairs = criterion_as_pairs(self.primaryjoin,
+ consider_as_foreign_keys=self._foreign_keys,
+ any_operator=self.viewonly)
+ eq_pairs = [(l, r) for (l, r) in eq_pairs
+ if self._col_is_part_of_mappings(l)
+ and self._col_is_part_of_mappings(r)
+ or self.viewonly and r in self._foreign_keys]
if not eq_pairs:
- if not self.viewonly and criterion_as_pairs(
- self.primaryjoin,
- consider_as_foreign_keys=self._foreign_keys,
- any_operator=True):
- raise sa_exc.ArgumentError("Could not locate any equated, locally "
- "mapped column pairs for primaryjoin condition '%s' on relationship %s. "
- "For more relaxed rules on join conditions, the relationship may be "
- "marked as viewonly=True." % (self.primaryjoin, self)
- )
+ if not self.viewonly \
+ and criterion_as_pairs(self.primaryjoin,
+ consider_as_foreign_keys=self._foreign_keys,
+ any_operator=True):
+ raise sa_exc.ArgumentError("Could not locate any "
+ "equated, locally mapped column pairs for "
+ "primaryjoin condition '%s' on "
+ "relationship %s. For more relaxed rules "
+ "on join conditions, the relationship may "
+ "be marked as viewonly=True."
+ % (self.primaryjoin, self))
else:
if self._foreign_keys:
- raise sa_exc.ArgumentError("Could not determine relationship direction for "
- "primaryjoin condition '%s', on relationship %s. "
- "Do the columns in 'foreign_keys' represent only the 'foreign' columns "
- "in this join condition ?" % (self.primaryjoin, self))
+ raise sa_exc.ArgumentError("Could not determine"
+ " relationship direction for "
+ "primaryjoin condition '%s', on "
+ "relationship %s. Do the columns in "
+ "'foreign_keys' represent only the "
+ "'foreign' columns in this join "
+ "condition ?" % (self.primaryjoin,
+ self))
else:
- raise sa_exc.ArgumentError("Could not determine relationship direction for "
- "primaryjoin condition '%s', on relationship %s. "
- "Specify the 'foreign_keys' argument to indicate which columns "
- "on the relationship are foreign." % (self.primaryjoin, self))
-
+ raise sa_exc.ArgumentError("Could not determine"
+ " relationship direction for "
+ "primaryjoin condition '%s', on "
+ "relationship %s. Specify the "
+ "'foreign_keys' argument to indicate "
+ "which columns on the relationship are "
+ "foreign." % (self.primaryjoin, self))
self.synchronize_pairs = eq_pairs
-
if self.secondaryjoin is not None:
- sq_pairs = criterion_as_pairs(
- self.secondaryjoin,
- consider_as_foreign_keys=self._foreign_keys,
- any_operator=self.viewonly)
-
- sq_pairs = [
- (l, r)
- for l, r in sq_pairs
- if (self._col_is_part_of_mappings(l) and
- self._col_is_part_of_mappings(r)) or
- r in self._foreign_keys
- ]
-
+ sq_pairs = criterion_as_pairs(self.secondaryjoin,
+ consider_as_foreign_keys=self._foreign_keys,
+ any_operator=self.viewonly)
+ sq_pairs = [(l, r) for (l, r) in sq_pairs
+ if self._col_is_part_of_mappings(l)
+ and self._col_is_part_of_mappings(r) or r
+ in self._foreign_keys]
if not sq_pairs:
- if not self.viewonly and criterion_as_pairs(
- self.secondaryjoin,
- consider_as_foreign_keys=self._foreign_keys,
- any_operator=True):
- raise sa_exc.ArgumentError("Could not locate any equated, locally mapped "
- "column pairs for secondaryjoin condition '%s' on relationship %s. "
- "For more relaxed rules on join conditions, the "
- "relationship may be marked as viewonly=True." % (self.secondaryjoin, self)
- )
+ if not self.viewonly \
+ and criterion_as_pairs(self.secondaryjoin,
+ consider_as_foreign_keys=self._foreign_keys,
+ any_operator=True):
+ raise sa_exc.ArgumentError("Could not locate any "
+ "equated, locally mapped column pairs for "
+ "secondaryjoin condition '%s' on "
+ "relationship %s. For more relaxed rules "
+ "on join conditions, the relationship may "
+ "be marked as viewonly=True."
+ % (self.secondaryjoin, self))
else:
- raise sa_exc.ArgumentError("Could not determine relationship direction "
- "for secondaryjoin condition '%s', on relationship %s. "
- "Specify the foreign_keys argument to indicate which "
- "columns on the relationship are foreign." % (self.secondaryjoin, self))
-
+ raise sa_exc.ArgumentError("Could not determine "
+ "relationship direction for secondaryjoin "
+ "condition '%s', on relationship %s. "
+ "Specify the foreign_keys argument to "
+ "indicate which columns on the "
+ "relationship are foreign."
+ % (self.secondaryjoin, self))
self.secondary_synchronize_pairs = sq_pairs
else:
self.secondary_synchronize_pairs = None
-
- self._foreign_keys = util.column_set(r for l, r in self.synchronize_pairs)
+ self._foreign_keys = util.column_set(r for (l, r) in
+ self.synchronize_pairs)
if self.secondary_synchronize_pairs:
- self._foreign_keys.update(r for l, r in self.secondary_synchronize_pairs)
+ self._foreign_keys.update(r for (l, r) in
+ self.secondary_synchronize_pairs)
def _determine_direction(self):
if self.secondaryjoin is not None:
self.direction = MANYTOMANY
-
elif self._refers_to_parent_table():
- # self referential defaults to ONETOMANY unless the "remote" side is present
- # and does not reference any foreign key columns
+
+ # self referential defaults to ONETOMANY unless the "remote"
+ # side is present and does not reference any foreign key
+ # columns
if self.local_remote_pairs:
- remote = [r for l, r in self.local_remote_pairs]
+ remote = [r for (l, r) in self.local_remote_pairs]
elif self.remote_side:
remote = self.remote_side
else:
remote = None
-
- if not remote or self._foreign_keys.\
- difference(l for l, r in self.synchronize_pairs).\
- intersection(remote):
+ if not remote or self._foreign_keys.difference(l for (l,
+ r) in self.synchronize_pairs).intersection(remote):
self.direction = ONETOMANY
else:
self.direction = MANYTOONE
-
else:
- foreign_keys = [f for c, f in self.synchronize_pairs]
-
+ foreign_keys = [f for (c, f) in self.synchronize_pairs]
parentcols = util.column_set(self.parent.mapped_table.c)
targetcols = util.column_set(self.mapper.mapped_table.c)
# fk collection which suggests ONETOMANY.
+
onetomany_fk = targetcols.intersection(foreign_keys)
# fk collection which suggests MANYTOONE.
+
manytoone_fk = parentcols.intersection(foreign_keys)
-
if not onetomany_fk and not manytoone_fk:
- raise sa_exc.ArgumentError(
- "Can't determine relationship direction for relationship '%s' "
- "- foreign key columns are present in neither the "
- "parent nor the child's mapped tables" % self )
-
- elif onetomany_fk and manytoone_fk:
- # fks on both sides. do the same
- # test only based on the local side.
- referents = [c for c, f in self.synchronize_pairs]
+ raise sa_exc.ArgumentError("Can't determine relationshi"
+ "p direction for relationship '%s' - foreign "
+ "key columns are present in neither the parent "
+ "nor the child's mapped tables" % self)
+ elif onetomany_fk and manytoone_fk:
+
+ # fks on both sides. do the same test only based on the
+ # local side.
+
+ referents = [c for (c, f) in self.synchronize_pairs]
onetomany_local = parentcols.intersection(referents)
manytoone_local = targetcols.intersection(referents)
-
if onetomany_local and not manytoone_local:
self.direction = ONETOMANY
elif manytoone_local and not onetomany_local:
self.direction = ONETOMANY
elif manytoone_fk:
self.direction = MANYTOONE
-
if not self.direction:
- raise sa_exc.ArgumentError(
- "Can't determine relationship direction for relationship '%s' "
- "- foreign key columns are present in both the parent and "
- "the child's mapped tables. Specify 'foreign_keys' "
- "argument." % self)
-
- if self.cascade.delete_orphan and not self.single_parent and \
- (self.direction is MANYTOMANY or self.direction is MANYTOONE):
- util.warn("On %s, delete-orphan cascade is not supported on a "
- "many-to-many or many-to-one relationship when single_parent is not set. "
- " Set single_parent=True on the relationship()." % self)
+ raise sa_exc.ArgumentError("Can't determine relationshi"
+ "p direction for relationship '%s' - foreign "
+ "key columns are present in both the parent "
+ "and the child's mapped tables. Specify "
+ "'foreign_keys' argument." % self)
+ if self.cascade.delete_orphan and not self.single_parent \
+ and (self.direction is MANYTOMANY or self.direction
+ is MANYTOONE):
+ util.warn('On %s, delete-orphan cascade is not supported '
+ 'on a many-to-many or many-to-one relationship '
+ 'when single_parent is not set. Set '
+ 'single_parent=True on the relationship().'
+ % self)
def _determine_local_remote_pairs(self):
if not self.local_remote_pairs:
if self.remote_side:
if self.direction is MANYTOONE:
- self.local_remote_pairs = [
- (r, l) for l, r in
- criterion_as_pairs(self.primaryjoin, consider_as_referenced_keys=self.remote_side, any_operator=True)
- ]
+ self.local_remote_pairs = [(r, l) for (l, r) in
+ criterion_as_pairs(self.primaryjoin,
+ consider_as_referenced_keys=self.remote_side,
+ any_operator=True)]
else:
- self.local_remote_pairs = criterion_as_pairs(self.primaryjoin, consider_as_foreign_keys=self.remote_side, any_operator=True)
-
+ self.local_remote_pairs = \
+ criterion_as_pairs(self.primaryjoin,
+ consider_as_foreign_keys=self.remote_side,
+ any_operator=True)
if not self.local_remote_pairs:
- raise sa_exc.ArgumentError("Relationship %s could not determine any local/remote column pairs from remote side argument %r" % (self, self.remote_side))
-
+ raise sa_exc.ArgumentError('Relationship %s could '
+ 'not determine any local/remote column '
+ 'pairs from remote side argument %r'
+ % (self, self.remote_side))
else:
if self.viewonly:
eq_pairs = self.synchronize_pairs
if self.secondaryjoin is not None:
eq_pairs += self.secondary_synchronize_pairs
else:
- eq_pairs = criterion_as_pairs(
- self.primaryjoin,
- consider_as_foreign_keys=self._foreign_keys,
- any_operator=True)
+ eq_pairs = criterion_as_pairs(self.primaryjoin,
+ consider_as_foreign_keys=self._foreign_keys,
+ any_operator=True)
if self.secondaryjoin is not None:
- eq_pairs += criterion_as_pairs(
- self.secondaryjoin,
- consider_as_foreign_keys=self._foreign_keys,
+ eq_pairs += \
+ criterion_as_pairs(self.secondaryjoin,
+ consider_as_foreign_keys=self._foreign_keys,
any_operator=True)
-
- eq_pairs = [
- (l, r) for l, r in eq_pairs
- if self._col_is_part_of_mappings(l) and
- self._col_is_part_of_mappings(r)
- ]
-
+ eq_pairs = [(l, r) for (l, r) in eq_pairs
+ if self._col_is_part_of_mappings(l)
+ and self._col_is_part_of_mappings(r)]
if self.direction is MANYTOONE:
- self.local_remote_pairs = [(r, l) for l, r in eq_pairs]
+ self.local_remote_pairs = [(r, l) for (l, r) in
+ eq_pairs]
else:
self.local_remote_pairs = eq_pairs
elif self.remote_side:
- raise sa_exc.ArgumentError("remote_side argument is redundant "
- "against more detailed _local_remote_side argument.")
-
+ raise sa_exc.ArgumentError('remote_side argument is '
+ 'redundant against more detailed '
+ '_local_remote_side argument.')
for l, r in self.local_remote_pairs:
-
- if self.direction is ONETOMANY and not self._col_is_part_of_mappings(l):
- raise sa_exc.ArgumentError("Local column '%s' is not part of mapping %s. "
- "Specify remote_side argument to indicate which column "
- "lazy join condition should compare against." % (l, self.parent))
-
- elif self.direction is MANYTOONE and not self._col_is_part_of_mappings(r):
- raise sa_exc.ArgumentError("Remote column '%s' is not part of mapping %s. "
- "Specify remote_side argument to indicate which column lazy "
- "join condition should bind." % (r, self.mapper))
-
- self.local_side, self.remote_side = [
- util.ordered_column_set(x) for x in
- zip(*list(self.local_remote_pairs))]
+ if self.direction is ONETOMANY \
+ and not self._col_is_part_of_mappings(l):
+ raise sa_exc.ArgumentError("Local column '%s' is not "
+ "part of mapping %s. Specify remote_side "
+ "argument to indicate which column lazy join "
+ "condition should compare against." % (l,
+ self.parent))
+ elif self.direction is MANYTOONE \
+ and not self._col_is_part_of_mappings(r):
+ raise sa_exc.ArgumentError("Remote column '%s' is not "
+ "part of mapping %s. Specify remote_side "
+ "argument to indicate which column lazy join "
+ "condition should bind." % (r, self.mapper))
+ self.local_side, self.remote_side = [util.ordered_column_set(x)
+ for x in zip(*list(self.local_remote_pairs))]
def _assert_is_primary(self):
- if not self.is_primary() and \
- not mapper.class_mapper(self.parent.class_, compile=False).\
- _get_property(self.key, raiseerr=False):
-
- raise sa_exc.ArgumentError("Attempting to assign a new relationship '%s' to "
- "a non-primary mapper on class '%s'. New relationships can only be "
- "added to the primary mapper, i.e. the very first "
- "mapper created for class '%s' " %
- (self.key, self.parent.class_.__name__, self.parent.class_.__name__))
+ if not self.is_primary() \
+ and not mapper.class_mapper(self.parent.class_,
+ compile=False)._get_property(self.key, raiseerr=False):
+ raise sa_exc.ArgumentError("Attempting to assign a new "
+ "relationship '%s' to a non-primary mapper on "
+ "class '%s'. New relationships can only be added "
+ "to the primary mapper, i.e. the very first mapper "
+ "created for class '%s' " % (self.key,
+ self.parent.class_.__name__,
+ self.parent.class_.__name__))
def _generate_backref(self):
if not self.is_primary():
return
-
if self.backref is not None and not self.back_populates:
if isinstance(self.backref, basestring):
backref_key, kwargs = self.backref, {}
else:
backref_key, kwargs = self.backref
-
mapper = self.mapper.primary_mapper()
- if mapper._get_property(backref_key, raiseerr=False) is not None:
- raise sa_exc.ArgumentError("Error creating backref '%s' on relationship '%s': "
- "property of that name exists on mapper '%s'" % (backref_key, self, mapper))
-
+ if mapper._get_property(backref_key, raiseerr=False) \
+ is not None:
+ raise sa_exc.ArgumentError("Error creating backref "
+ "'%s' on relationship '%s': property of that "
+ "name exists on mapper '%s'" % (backref_key,
+ self, mapper))
if self.secondary is not None:
pj = kwargs.pop('primaryjoin', self.secondaryjoin)
sj = kwargs.pop('secondaryjoin', self.primaryjoin)
if sj:
raise sa_exc.InvalidRequestError(
"Can't assign 'secondaryjoin' on a backref against "
- "a non-secondary relationship.")
-
- foreign_keys = kwargs.pop('foreign_keys', self._foreign_keys)
-
+ "a non-secondary relationship."
+ )
+ foreign_keys = kwargs.pop('foreign_keys',
+ self._foreign_keys)
parent = self.parent.primary_mapper()
kwargs.setdefault('viewonly', self.viewonly)
kwargs.setdefault('post_update', self.post_update)
kwargs.setdefault('passive_updates', self.passive_updates)
-
self.back_populates = backref_key
relationship = RelationshipProperty(
- parent,
- self.secondary,
- pj,
- sj,
- foreign_keys=foreign_keys,
- back_populates=self.key,
- **kwargs)
-
+ parent,
+ self.secondary,
+ pj,
+ sj,
+ foreign_keys=foreign_keys,
+ back_populates=self.key,
+ **kwargs
+ )
mapper._configure_property(backref_key, relationship)
-
-
if self.back_populates:
- self.extension = list(util.to_list(self.extension, default=[]))
- self.extension.append(attributes.GenericBackrefExtension(self.back_populates))
+ self.extension = list(util.to_list(self.extension,
+ default=[]))
+ self.extension.append(
+ attributes.GenericBackrefExtension(self.back_populates))
self._add_reverse_property(self.back_populates)
-
def _post_init(self):
- self.logger.info("%s setup primary join %s", self, self.primaryjoin)
- self.logger.info("%s setup secondary join %s", self, self.secondaryjoin)
- self.logger.info("%s synchronize pairs [%s]", self,
- ",".join("(%s => %s)" % (l, r) for l, r in self.synchronize_pairs))
- self.logger.info("%s secondary synchronize pairs [%s]", self,
- ",".join(("(%s => %s)" % (l, r) for l, r in self.secondary_synchronize_pairs or [])))
- self.logger.info("%s local/remote pairs [%s]", self,
- ",".join("(%s / %s)" % (l, r) for l, r in self.local_remote_pairs))
- self.logger.info("%s relationship direction %s", self, self.direction)
-
+ self.logger.info('%s setup primary join %s', self,
+ self.primaryjoin)
+ self.logger.info('%s setup secondary join %s', self,
+ self.secondaryjoin)
+ self.logger.info('%s synchronize pairs [%s]', self,
+ ','.join('(%s => %s)' % (l, r) for (l, r) in
+ self.synchronize_pairs))
+ self.logger.info('%s secondary synchronize pairs [%s]', self,
+ ','.join('(%s => %s)' % (l, r) for (l, r) in
+ self.secondary_synchronize_pairs or []))
+ self.logger.info('%s local/remote pairs [%s]', self,
+ ','.join('(%s / %s)' % (l, r) for (l, r) in
+ self.local_remote_pairs))
+ self.logger.info('%s relationship direction %s', self,
+ self.direction)
if self.uselist is None:
self.uselist = self.direction is not MANYTOONE
-
if not self.viewonly:
self._dependency_processor = \
- dependency.DependencyProcessor.from_relationship(self)
+ dependency.DependencyProcessor.from_relationship(self)
@util.memoized_property
def _use_get(self):
- """memoize the 'use_get' attribute of this RelationshipLoader's lazyloader."""
-
+ """memoize the 'use_get' attribute of this RelationshipLoader's
+ lazyloader."""
+
strategy = self._get_strategy(strategies.LazyLoader)
return strategy.use_get
aliased = aliased or (source_selectable is not None)
- primaryjoin, secondaryjoin, secondary = self.primaryjoin, self.secondaryjoin, self.secondary
+ primaryjoin, secondaryjoin, secondary = self.primaryjoin, \
+ self.secondaryjoin, self.secondary
# adjust the join condition for single table inheritance,
# in the case that the join is to a subclass
else:
primaryjoin = primaryjoin & single_crit
-
if aliased:
if secondary is not None:
secondary = secondary.alias()
primary_aliasizer = ClauseAdapter(secondary)
if dest_selectable is not None:
secondary_aliasizer = \
- ClauseAdapter(dest_selectable,
- equivalents=self.mapper._equivalent_columns).\
- chain(primary_aliasizer)
+ ClauseAdapter(dest_selectable,
+ equivalents=self.mapper._equivalent_columns).\
+ chain(primary_aliasizer)
else:
secondary_aliasizer = primary_aliasizer
-
if source_selectable is not None:
- primary_aliasizer = ClauseAdapter(secondary).chain(ClauseAdapter(source_selectable, equivalents=self.parent._equivalent_columns))
-
- secondaryjoin = secondary_aliasizer.traverse(secondaryjoin)
+ primary_aliasizer = \
+ ClauseAdapter(secondary).\
+ chain(ClauseAdapter(source_selectable,
+ equivalents=self.parent._equivalent_columns))
+ secondaryjoin = \
+ secondary_aliasizer.traverse(secondaryjoin)
else:
if dest_selectable is not None:
- primary_aliasizer = ClauseAdapter(dest_selectable, exclude=self.local_side, equivalents=self.mapper._equivalent_columns)
+ primary_aliasizer = ClauseAdapter(dest_selectable,
+ exclude=self.local_side,
+ equivalents=self.mapper._equivalent_columns)
if source_selectable is not None:
- primary_aliasizer.chain(ClauseAdapter(source_selectable, exclude=self.remote_side, equivalents=self.parent._equivalent_columns))
+ primary_aliasizer.chain(
+ ClauseAdapter(source_selectable,
+ exclude=self.remote_side,
+ equivalents=self.parent._equivalent_columns))
elif source_selectable is not None:
- primary_aliasizer = ClauseAdapter(source_selectable, exclude=self.remote_side, equivalents=self.parent._equivalent_columns)
-
+ primary_aliasizer = \
+ ClauseAdapter(source_selectable,
+ exclude=self.remote_side,
+ equivalents=self.parent._equivalent_columns)
secondary_aliasizer = None
-
primaryjoin = primary_aliasizer.traverse(primaryjoin)
target_adapter = secondary_aliasizer or primary_aliasizer
target_adapter.include = target_adapter.exclude = None
else:
target_adapter = None
-
if source_selectable is None:
source_selectable = self.parent.local_table
-
if dest_selectable is None:
dest_selectable = self.mapper.local_table
-
- return (primaryjoin, secondaryjoin,
- source_selectable,
- dest_selectable, secondary, target_adapter)
+ return (
+ primaryjoin,
+ secondaryjoin,
+ source_selectable,
+ dest_selectable,
+ secondary,
+ target_adapter,
+ )
PropertyLoader = RelationProperty = RelationshipProperty