]> git.ipfire.org Git - thirdparty/sqlalchemy/sqlalchemy.git/commitdiff
move mapper-level UOW functionality straight into unitofwork also. there's
authorMike Bayer <mike_mp@zzzcomputing.com>
Sat, 14 Jul 2012 16:52:21 +0000 (12:52 -0400)
committerMike Bayer <mike_mp@zzzcomputing.com>
Sat, 14 Jul 2012 16:52:21 +0000 (12:52 -0400)
no need for this to be per-mapper.   can't move dependency init
off of property though as this init needs to happen up front, added a test
to prove it.

lib/sqlalchemy/orm/dependency.py
lib/sqlalchemy/orm/interfaces.py
lib/sqlalchemy/orm/mapper.py
lib/sqlalchemy/orm/properties.py
lib/sqlalchemy/orm/unitofwork.py
test/aaa_profiling/test_zoomark_orm.py
test/orm/test_naturalpks.py

index 1552f6aef145c33594d7e49667260d5d48f08879..881a7bb62545f333b85cb21734ec97720ce8bafb 100644 (file)
@@ -32,7 +32,7 @@ class DependencyProcessor(object):
         if self.passive_updates:
             self._passive_update_flag = attributes.PASSIVE_NO_INITIALIZE
         else:
-            self._passive_update_flag= attributes.PASSIVE_OFF
+            self._passive_update_flag = attributes.PASSIVE_OFF
 
         self.key = prop.key
         if not self.prop.synchronize_pairs:
@@ -48,7 +48,7 @@ class DependencyProcessor(object):
 
     def hasparent(self, state):
         """return True if the given object instance has a parent,
-        according to the ``InstrumentedAttribute`` handled by this 
+        according to the ``InstrumentedAttribute`` handled by this
         ``DependencyProcessor``.
 
         """
@@ -69,29 +69,29 @@ class DependencyProcessor(object):
         before_delete = unitofwork.ProcessAll(uow, self, True, True)
 
         parent_saves = unitofwork.SaveUpdateAll(
-                                        uow, 
+                                        uow,
                                         self.parent.primary_base_mapper
                                         )
         child_saves = unitofwork.SaveUpdateAll(
-                                        uow, 
+                                        uow,
                                         self.mapper.primary_base_mapper
                                         )
 
         parent_deletes = unitofwork.DeleteAll(
-                                        uow, 
+                                        uow,
                                         self.parent.primary_base_mapper
                                         )
         child_deletes = unitofwork.DeleteAll(
-                                        uow, 
+                                        uow,
                                         self.mapper.primary_base_mapper
                                         )
 
-        self.per_property_dependencies(uow, 
-                                        parent_saves, 
-                                        child_saves, 
-                                        parent_deletes, 
-                                        child_deletes, 
-                                        after_save, 
+        self.per_property_dependencies(uow,
+                                        parent_saves,
+                                        child_saves,
+                                        parent_deletes,
+                                        child_deletes,
+                                        after_save,
                                         before_delete
                                         )
 
@@ -99,7 +99,7 @@ class DependencyProcessor(object):
     def per_state_flush_actions(self, uow, states, isdelete):
         """establish actions and dependencies related to a flush.
 
-        These actions will operate on all relevant states 
+        These actions will operate on all relevant states
         individually.    This occurs only if there are cycles
         in the 'aggregated' version of events.
 
@@ -141,14 +141,14 @@ class DependencyProcessor(object):
         # check if the "parent" side is part of the cycle
         if not isdelete:
             parent_saves = unitofwork.SaveUpdateAll(
-                                                uow, 
+                                                uow,
                                                 self.parent.base_mapper)
             parent_deletes = before_delete = None
             if parent_saves in uow.cycles:
                 parent_in_cycles = True
         else:
             parent_deletes = unitofwork.DeleteAll(
-                                                uow, 
+                                                uow,
                                                 self.parent.base_mapper)
             parent_saves = after_save = None
             if parent_deletes in uow.cycles:
@@ -165,19 +165,19 @@ class DependencyProcessor(object):
                 continue
 
             if isdelete:
-                before_delete = unitofwork.ProcessState(uow, 
+                before_delete = unitofwork.ProcessState(uow,
                                                     self, True, state)
                 if parent_in_cycles:
                     parent_deletes = unitofwork.DeleteState(
-                                                uow, 
-                                                state, 
+                                                uow,
+                                                state,
                                                 parent_base_mapper)
             else:
                 after_save = unitofwork.ProcessState(uow, self, False, state)
                 if parent_in_cycles:
                     parent_saves = unitofwork.SaveUpdateState(
-                                                uow, 
-                                                state, 
+                                                uow,
+                                                state,
                                                 parent_base_mapper)
 
             if child_in_cycles:
@@ -190,24 +190,24 @@ class DependencyProcessor(object):
                         if deleted:
                             child_action = (
                                             unitofwork.DeleteState(
-                                                        uow, child_state, 
-                                                        child_base_mapper), 
+                                                        uow, child_state,
+                                                        child_base_mapper),
                                             True)
                         else:
                             child_action = (
                                             unitofwork.SaveUpdateState(
-                                                        uow, child_state, 
-                                                        child_base_mapper), 
+                                                        uow, child_state,
+                                                        child_base_mapper),
                                             False)
                     child_actions.append(child_action)
 
             # establish dependencies between our possibly per-state
             # parent action and our possibly per-state child action.
             for child_action, childisdelete in child_actions:
-                self.per_state_dependencies(uow, parent_saves, 
-                                                parent_deletes, 
-                                                child_action, 
-                                                after_save, before_delete, 
+                self.per_state_dependencies(uow, parent_saves,
+                                                parent_deletes,
+                                                child_action,
+                                                after_save, before_delete,
                                                 isdelete, childisdelete)
 
 
@@ -232,12 +232,12 @@ class DependencyProcessor(object):
             passive = attributes.PASSIVE_OFF
 
         for s in states:
-            # TODO: add a high speed method 
+            # TODO: add a high speed method
             # to InstanceState which returns:  attribute
             # has a non-None value, or had one
             history = uowcommit.get_attribute_history(
-                                            s, 
-                                            self.key, 
+                                            s,
+                                            self.key,
                                             passive)
             if history and not history.empty():
                 return True
@@ -248,7 +248,7 @@ class DependencyProcessor(object):
 
     def _verify_canload(self, state):
         if state is not None and \
-            not self.mapper._canload(state, 
+            not self.mapper._canload(state,
                             allow_subtypes=not self.enable_typechecks):
             if self.mapper._canload(state, allow_subtypes=True):
                 raise exc.FlushError('Attempting to flush an item of type '
@@ -287,11 +287,11 @@ class DependencyProcessor(object):
             return None
 
         process_key = tuple(sorted(
-                        [self.key] + 
+                        [self.key] +
                         [p.key for p in self.prop._reverse_property]
                     ))
         return uow.memo(
-                            ('reverse_key', process_key), 
+                            ('reverse_key', process_key),
                             set
                         )
 
@@ -299,7 +299,7 @@ class DependencyProcessor(object):
         for x in related:
             if x is not None:
                 uowcommit.issue_post_update(
-                        state, 
+                        state,
                         [r for l, r in self.prop.synchronize_pairs]
                 )
                 break
@@ -312,21 +312,21 @@ class DependencyProcessor(object):
 
 class OneToManyDP(DependencyProcessor):
 
-    def per_property_dependencies(self, uow, parent_saves, 
-                                                child_saves, 
-                                                parent_deletes, 
-                                                child_deletes, 
-                                                after_save, 
+    def per_property_dependencies(self, uow, parent_saves,
+                                                child_saves,
+                                                parent_deletes,
+                                                child_deletes,
+                                                after_save,
                                                 before_delete,
                                                 ):
         if self.post_update:
             child_post_updates = unitofwork.IssuePostUpdate(
-                                            uow, 
-                                            self.mapper.primary_base_mapper, 
+                                            uow,
+                                            self.mapper.primary_base_mapper,
                                             False)
             child_pre_updates = unitofwork.IssuePostUpdate(
-                                            uow, 
-                                            self.mapper.primary_base_mapper, 
+                                            uow,
+                                            self.mapper.primary_base_mapper,
                                             True)
 
             uow.dependencies.update([
@@ -352,22 +352,22 @@ class OneToManyDP(DependencyProcessor):
                 (before_delete, child_deletes),
             ])
 
-    def per_state_dependencies(self, uow, 
-                                    save_parent, 
-                                    delete_parent, 
-                                    child_action, 
-                                    after_save, before_delete, 
+    def per_state_dependencies(self, uow,
+                                    save_parent,
+                                    delete_parent,
+                                    child_action,
+                                    after_save, before_delete,
                                     isdelete, childisdelete):
 
         if self.post_update:
 
             child_post_updates = unitofwork.IssuePostUpdate(
-                                            uow, 
-                                            self.mapper.primary_base_mapper, 
+                                            uow,
+                                            self.mapper.primary_base_mapper,
                                             False)
             child_pre_updates = unitofwork.IssuePostUpdate(
-                                            uow, 
-                                            self.mapper.primary_base_mapper, 
+                                            uow,
+                                            self.mapper.primary_base_mapper,
                                             True)
 
             # TODO: this whole block is not covered
@@ -393,7 +393,7 @@ class OneToManyDP(DependencyProcessor):
                 else:
                     uow.dependencies.update([
                         (before_delete, child_pre_updates),
-                        (child_pre_updates, delete_parent), 
+                        (child_pre_updates, delete_parent),
                     ])
         elif not isdelete:
             uow.dependencies.update([
@@ -408,16 +408,16 @@ class OneToManyDP(DependencyProcessor):
             ])
 
     def presort_deletes(self, uowcommit, states):
-        # head object is being deleted, and we manage its list of 
-        # child objects the child objects have to have their 
+        # head object is being deleted, and we manage its list of
+        # child objects the child objects have to have their
         # foreign key to the parent set to NULL
         should_null_fks = not self.cascade.delete and \
                             not self.passive_deletes == 'all'
 
         for state in states:
             history = uowcommit.get_attribute_history(
-                                            state, 
-                                            self.key, 
+                                            state,
+                                            self.key,
                                             self._passive_delete_flag)
             if history:
                 for child in history.deleted:
@@ -430,7 +430,7 @@ class OneToManyDP(DependencyProcessor):
                 if should_null_fks:
                     for child in history.unchanged:
                         if child is not None:
-                            uowcommit.register_object(child, 
+                            uowcommit.register_object(child,
                                     operation="delete", prop=self.prop)
 
 
@@ -447,25 +447,25 @@ class OneToManyDP(DependencyProcessor):
                 passive = attributes.PASSIVE_OFF
 
             history = uowcommit.get_attribute_history(
-                                            state, 
-                                            self.key, 
+                                            state,
+                                            self.key,
                                             passive)
             if history:
                 for child in history.added:
                     if child is not None:
-                        uowcommit.register_object(child, cancel_delete=True, 
-                                                    operation="add", 
+                        uowcommit.register_object(child, cancel_delete=True,
+                                                    operation="add",
                                                     prop=self.prop)
 
                 children_added.update(history.added)
 
                 for child in history.deleted:
                     if not self.cascade.delete_orphan:
-                        uowcommit.register_object(child, isdelete=False, 
-                                                    operation='delete', 
+                        uowcommit.register_object(child, isdelete=False,
+                                                    operation='delete',
                                                     prop=self.prop)
                     elif self.hasparent(child) is False:
-                        uowcommit.register_object(child, isdelete=True, 
+                        uowcommit.register_object(child, isdelete=True,
                                             operation="delete", prop=self.prop)
                         for c, m, st_, dct_ in self.mapper.cascade_iterator(
                                                     'delete', child):
@@ -478,16 +478,16 @@ class OneToManyDP(DependencyProcessor):
                     for child in history.unchanged:
                         if child is not None:
                             uowcommit.register_object(
-                                        child, 
-                                        False, 
+                                        child,
+                                        False,
                                         self.passive_updates,
                                         operation="pk change",
                                         prop=self.prop)
 
     def process_deletes(self, uowcommit, states):
-        # head object is being deleted, and we manage its list of 
-        # child objects the child objects have to have their foreign 
-        # key to the parent set to NULL this phase can be called 
+        # head object is being deleted, and we manage its list of
+        # child objects the child objects have to have their foreign
+        # key to the parent set to NULL this phase can be called
         # safely for any cascade but is unnecessary if delete cascade
         # is on.
 
@@ -496,17 +496,17 @@ class OneToManyDP(DependencyProcessor):
 
             for state in states:
                 history = uowcommit.get_attribute_history(
-                                            state, 
-                                            self.key, 
+                                            state,
+                                            self.key,
                                             self._passive_delete_flag)
                 if history:
                     for child in history.deleted:
                         if child is not None and \
                             self.hasparent(child) is False:
                             self._synchronize(
-                                            state, 
-                                            child, 
-                                            None, True, 
+                                            state,
+                                            child,
+                                            None, True,
                                             uowcommit, False)
                             if self.post_update and child:
                                 self._post_update(child, uowcommit, [state])
@@ -516,18 +516,18 @@ class OneToManyDP(DependencyProcessor):
                                             difference(children_added):
                             if child is not None:
                                 self._synchronize(
-                                            state, 
-                                            child, 
-                                            None, True, 
+                                            state,
+                                            child,
+                                            None, True,
                                             uowcommit, False)
                                 if self.post_update and child:
-                                    self._post_update(child, 
-                                                        uowcommit, 
+                                    self._post_update(child,
+                                                        uowcommit,
                                                         [state])
 
                     # technically, we can even remove each child from the
-                    # collection here too.  but this would be a somewhat 
-                    # inconsistent behavior since it wouldn't happen 
+                    # collection here too.  but this would be a somewhat
+                    # inconsistent behavior since it wouldn't happen
                     #if the old parent wasn't deleted but child was moved.
 
     def process_saves(self, uowcommit, states):
@@ -538,7 +538,7 @@ class OneToManyDP(DependencyProcessor):
                                       attributes.PASSIVE_NO_INITIALIZE)
             if history:
                 for child in history.added:
-                    self._synchronize(state, child, None, 
+                    self._synchronize(state, child, None,
                                         False, uowcommit, False)
                     if child is not None and self.post_update:
                         self._post_update(child, uowcommit, [state])
@@ -546,15 +546,15 @@ class OneToManyDP(DependencyProcessor):
                 for child in history.deleted:
                     if not self.cascade.delete_orphan and \
                         not self.hasparent(child):
-                        self._synchronize(state, child, None, True, 
+                        self._synchronize(state, child, None, True,
                                                     uowcommit, False)
 
                 if self._pks_changed(uowcommit, state):
                     for child in history.unchanged:
-                        self._synchronize(state, child, None, 
+                        self._synchronize(state, child, None,
                                                 False, uowcommit, True)
 
-    def _synchronize(self, state, child, 
+    def _synchronize(self, state, child,
                             associationrow, clearkeys, uowcommit,
                             pks_changed):
         source = state
@@ -566,15 +566,15 @@ class OneToManyDP(DependencyProcessor):
         if clearkeys:
             sync.clear(dest, self.mapper, self.prop.synchronize_pairs)
         else:
-            sync.populate(source, self.parent, dest, self.mapper, 
+            sync.populate(source, self.parent, dest, self.mapper,
                                     self.prop.synchronize_pairs, uowcommit,
                                     self.passive_updates and pks_changed)
 
     def _pks_changed(self, uowcommit, state):
         return sync.source_modified(
-                            uowcommit, 
-                            state, 
-                            self.parent, 
+                            uowcommit,
+                            state,
+                            self.parent,
                             self.prop.synchronize_pairs)
 
 class ManyToOneDP(DependencyProcessor):
@@ -582,22 +582,22 @@ class ManyToOneDP(DependencyProcessor):
         DependencyProcessor.__init__(self, prop)
         self.mapper._dependency_processors.append(DetectKeySwitch(prop))
 
-    def per_property_dependencies(self, uow, 
-                                        parent_saves, 
-                                        child_saves, 
-                                        parent_deletes, 
-                                        child_deletes, 
-                                        after_save, 
+    def per_property_dependencies(self, uow,
+                                        parent_saves,
+                                        child_saves,
+                                        parent_deletes,
+                                        child_deletes,
+                                        after_save,
                                         before_delete):
 
         if self.post_update:
             parent_post_updates = unitofwork.IssuePostUpdate(
-                                            uow, 
-                                            self.parent.primary_base_mapper, 
+                                            uow,
+                                            self.parent.primary_base_mapper,
                                             False)
             parent_pre_updates = unitofwork.IssuePostUpdate(
-                                            uow, 
-                                            self.parent.primary_base_mapper, 
+                                            uow,
+                                            self.parent.primary_base_mapper,
                                             True)
 
             uow.dependencies.update([
@@ -618,19 +618,19 @@ class ManyToOneDP(DependencyProcessor):
                 (parent_deletes, child_deletes)
             ])
 
-    def per_state_dependencies(self, uow, 
-                                    save_parent, 
-                                    delete_parent, 
-                                    child_action, 
-                                    after_save, before_delete, 
+    def per_state_dependencies(self, uow,
+                                    save_parent,
+                                    delete_parent,
+                                    child_action,
+                                    after_save, before_delete,
                                     isdelete, childisdelete):
 
         if self.post_update:
 
             if not isdelete:
                 parent_post_updates = unitofwork.IssuePostUpdate(
-                                            uow, 
-                                            self.parent.primary_base_mapper, 
+                                            uow,
+                                            self.parent.primary_base_mapper,
                                             False)
                 if childisdelete:
                     uow.dependencies.update([
@@ -646,8 +646,8 @@ class ManyToOneDP(DependencyProcessor):
                     ])
             else:
                 parent_pre_updates = unitofwork.IssuePostUpdate(
-                                            uow, 
-                                            self.parent.primary_base_mapper, 
+                                            uow,
+                                            self.parent.primary_base_mapper,
                                             True)
 
                 uow.dependencies.update([
@@ -677,8 +677,8 @@ class ManyToOneDP(DependencyProcessor):
         if self.cascade.delete or self.cascade.delete_orphan:
             for state in states:
                 history = uowcommit.get_attribute_history(
-                                        state, 
-                                        self.key, 
+                                        state,
+                                        self.key,
                                         self._passive_delete_flag)
                 if history:
                     if self.cascade.delete_orphan:
@@ -688,7 +688,7 @@ class ManyToOneDP(DependencyProcessor):
                     for child in todelete:
                         if child is None:
                             continue
-                        uowcommit.register_object(child, isdelete=True, 
+                        uowcommit.register_object(child, isdelete=True,
                                         operation="delete", prop=self.prop)
                         for c, m, st_, dct_ in self.mapper.cascade_iterator(
                                                             'delete', child):
@@ -700,13 +700,13 @@ class ManyToOneDP(DependencyProcessor):
             uowcommit.register_object(state, operation="add", prop=self.prop)
             if self.cascade.delete_orphan:
                 history = uowcommit.get_attribute_history(
-                                        state, 
-                                        self.key, 
+                                        state,
+                                        self.key,
                                         self._passive_delete_flag)
                 if history:
                     for child in history.deleted:
                         if self.hasparent(child) is False:
-                            uowcommit.register_object(child, isdelete=True, 
+                            uowcommit.register_object(child, isdelete=True,
                                         operation="delete", prop=self.prop)
 
                             for c, m, st_, dct_ in self.mapper.cascade_iterator(
@@ -720,15 +720,15 @@ class ManyToOneDP(DependencyProcessor):
                 not self.cascade.delete_orphan and \
                 not self.passive_deletes == 'all':
 
-            # post_update means we have to update our 
+            # post_update means we have to update our
             # row to not reference the child object
             # before we can DELETE the row
             for state in states:
                 self._synchronize(state, None, None, True, uowcommit)
                 if state and self.post_update:
                     history = uowcommit.get_attribute_history(
-                                                state, 
-                                                self.key, 
+                                                state,
+                                                self.key,
                                                 self._passive_delete_flag)
                     if history:
                         self._post_update(state, uowcommit, history.sum())
@@ -736,12 +736,12 @@ class ManyToOneDP(DependencyProcessor):
     def process_saves(self, uowcommit, states):
         for state in states:
             history = uowcommit.get_attribute_history(
-                                    state, 
+                                    state,
                                     self.key,
                                     attributes.PASSIVE_NO_INITIALIZE)
             if history:
                 for child in history.added:
-                    self._synchronize(state, child, None, False, 
+                    self._synchronize(state, child, None, False,
                                             uowcommit, "add")
 
                 if self.post_update:
@@ -758,7 +758,7 @@ class ManyToOneDP(DependencyProcessor):
             not uowcommit.session._contains_state(child):
             util.warn(
                 "Object of type %s not in session, %s "
-                "operation along '%s' won't proceed" % 
+                "operation along '%s' won't proceed" %
                 (mapperutil.state_class_str(child), operation, self.prop))
             return
 
@@ -766,14 +766,14 @@ class ManyToOneDP(DependencyProcessor):
             sync.clear(state, self.parent, self.prop.synchronize_pairs)
         else:
             self._verify_canload(child)
-            sync.populate(child, self.mapper, state, 
-                            self.parent, 
-                            self.prop.synchronize_pairs, 
+            sync.populate(child, self.mapper, state,
+                            self.parent,
+                            self.prop.synchronize_pairs,
                             uowcommit,
-                            False) 
+                            False)
 
 class DetectKeySwitch(DependencyProcessor):
-    """For many-to-one relationships with no one-to-many backref, 
+    """For many-to-one relationships with no one-to-many backref,
     searches for parents through the unit of work when a primary
     key has changed and updates them.
 
@@ -797,7 +797,7 @@ class DetectKeySwitch(DependencyProcessor):
 
     def per_property_flush_actions(self, uow):
         parent_saves = unitofwork.SaveUpdateAll(
-                                        uow, 
+                                        uow,
                                         self.parent.base_mapper)
         after_save = unitofwork.ProcessAll(uow, self, False, False)
         uow.dependencies.update([
@@ -836,7 +836,7 @@ class DetectKeySwitch(DependencyProcessor):
 
     def _key_switchers(self, uow, states):
         switched, notswitched = uow.memo(
-                                        ('pk_switchers', self), 
+                                        ('pk_switchers', self),
                                         lambda: (set(), set())
                                     )
 
@@ -864,29 +864,29 @@ class DetectKeySwitch(DependencyProcessor):
                     related is not None:
                     related_state = attributes.instance_state(dict_[self.key])
                     if related_state in switchers:
-                        uowcommit.register_object(state, 
-                                                    False, 
+                        uowcommit.register_object(state,
+                                                    False,
                                                     self.passive_updates)
                         sync.populate(
-                                    related_state, 
-                                    self.mapper, state, 
-                                    self.parent, self.prop.synchronize_pairs, 
+                                    related_state,
+                                    self.mapper, state,
+                                    self.parent, self.prop.synchronize_pairs,
                                     uowcommit, self.passive_updates)
 
     def _pks_changed(self, uowcommit, state):
-        return bool(state.key) and sync.source_modified(uowcommit, 
-                                    state, 
-                                    self.mapper, 
+        return bool(state.key) and sync.source_modified(uowcommit,
+                                    state,
+                                    self.mapper,
                                     self.prop.synchronize_pairs)
 
 
 class ManyToManyDP(DependencyProcessor):
 
-    def per_property_dependencies(self, uow, parent_saves, 
-                                                child_saves, 
-                                                parent_deletes, 
-                                                child_deletes, 
-                                                after_save, 
+    def per_property_dependencies(self, uow, parent_saves,
+                                                child_saves,
+                                                parent_deletes,
+                                                child_deletes,
+                                                after_save,
                                                 before_delete
                                                 ):
 
@@ -895,9 +895,9 @@ class ManyToManyDP(DependencyProcessor):
             (child_saves, after_save),
             (after_save, child_deletes),
 
-            # a rowswitch on the parent from  deleted to saved 
-            # can make this one occur, as the "save" may remove 
-            # an element from the 
+            # a rowswitch on the parent from  deleted to saved
+            # can make this one occur, as the "save" may remove
+            # an element from the
             # "deleted" list before we have a chance to
             # process its child rows
             (before_delete, parent_saves),
@@ -907,11 +907,11 @@ class ManyToManyDP(DependencyProcessor):
             (before_delete, child_saves),
         ])
 
-    def per_state_dependencies(self, uow, 
-                                    save_parent, 
-                                    delete_parent, 
-                                    child_action, 
-                                    after_save, before_delete, 
+    def per_state_dependencies(self, uow,
+                                    save_parent,
+                                    delete_parent,
+                                    child_action,
+                                    after_save, before_delete,
                                     isdelete, childisdelete):
         if not isdelete:
             if childisdelete:
@@ -934,25 +934,25 @@ class ManyToManyDP(DependencyProcessor):
         # TODO: no tests fail if this whole
         # thing is removed !!!!
         if not self.passive_deletes:
-            # if no passive deletes, load history on 
+            # if no passive deletes, load history on
             # the collection, so that prop_has_changes()
             # returns True
             for state in states:
                 uowcommit.get_attribute_history(
-                                        state, 
-                                        self.key, 
+                                        state,
+                                        self.key,
                                         self._passive_delete_flag)
 
     def presort_saves(self, uowcommit, states):
         if not self.passive_updates:
-            # if no passive updates, load history on 
+            # if no passive updates, load history on
             # each collection where parent has changed PK,
             # so that prop_has_changes() returns True
             for state in states:
                 if self._pks_changed(uowcommit, state):
                     history = uowcommit.get_attribute_history(
-                                        state, 
-                                        self.key, 
+                                        state,
+                                        self.key,
                                         attributes.PASSIVE_OFF)
 
         if not self.cascade.delete_orphan:
@@ -962,16 +962,16 @@ class ManyToManyDP(DependencyProcessor):
         # if delete_orphan check is turned on.
         for state in states:
             history = uowcommit.get_attribute_history(
-                                        state, 
-                                        self.key, 
+                                        state,
+                                        self.key,
                                         attributes.PASSIVE_NO_INITIALIZE)
             if history:
                 for child in history.deleted:
                     if self.hasparent(child) is False:
-                        uowcommit.register_object(child, isdelete=True, 
+                        uowcommit.register_object(child, isdelete=True,
                                             operation="delete", prop=self.prop)
                         for c, m, st_, dct_ in self.mapper.cascade_iterator(
-                                                    'delete', 
+                                                    'delete',
                                                     child):
                             uowcommit.register_object(
                                 st_, isdelete=True)
@@ -984,23 +984,23 @@ class ManyToManyDP(DependencyProcessor):
         processed = self._get_reversed_processed_set(uowcommit)
         tmp = set()
         for state in states:
-            # this history should be cached already, as 
+            # this history should be cached already, as
             # we loaded it in preprocess_deletes
             history = uowcommit.get_attribute_history(
-                                    state, 
-                                    self.key, 
+                                    state,
+                                    self.key,
                                     self._passive_delete_flag)
             if history:
                 for child in history.non_added():
                     if child is None or \
-                        (processed is not None and 
+                        (processed is not None and
                             (state, child) in processed):
                         continue
                     associationrow = {}
                     if not self._synchronize(
-                                        state, 
-                                        child, 
-                                        associationrow, 
+                                        state,
+                                        child,
+                                        associationrow,
                                         False, uowcommit, "delete"):
                         continue
                     secondary_delete.append(associationrow)
@@ -1010,7 +1010,7 @@ class ManyToManyDP(DependencyProcessor):
         if processed is not None:
             processed.update(tmp)
 
-        self._run_crud(uowcommit, secondary_insert, 
+        self._run_crud(uowcommit, secondary_insert,
                         secondary_update, secondary_delete)
 
     def process_saves(self, uowcommit, states):
@@ -1023,7 +1023,7 @@ class ManyToManyDP(DependencyProcessor):
 
         for state in states:
             need_cascade_pks = not self.passive_updates and \
-                                self._pks_changed(uowcommit, state) 
+                                self._pks_changed(uowcommit, state)
             if need_cascade_pks:
                 passive = attributes.PASSIVE_OFF
             else:
@@ -1033,45 +1033,45 @@ class ManyToManyDP(DependencyProcessor):
             if history:
                 for child in history.added:
                     if child is None or \
-                            (processed is not None and 
+                            (processed is not None and
                                 (state, child) in processed):
                         continue
                     associationrow = {}
-                    if not self._synchronize(state, 
-                                        child, 
-                                        associationrow, 
+                    if not self._synchronize(state,
+                                        child,
+                                        associationrow,
                                         False, uowcommit, "add"):
                         continue
                     secondary_insert.append(associationrow)
                 for child in history.deleted:
                     if child is None or \
-                            (processed is not None and 
+                            (processed is not None and
                             (state, child) in processed):
                         continue
                     associationrow = {}
-                    if not self._synchronize(state, 
-                                        child, 
-                                        associationrow, 
+                    if not self._synchronize(state,
+                                        child,
+                                        associationrow,
                                         False, uowcommit, "delete"):
                         continue
                     secondary_delete.append(associationrow)
 
-                tmp.update((c, state) 
+                tmp.update((c, state)
                             for c in history.added + history.deleted)
 
                 if need_cascade_pks:
 
                     for child in history.unchanged:
                         associationrow = {}
-                        sync.update(state, 
-                                    self.parent, 
-                                    associationrow, 
-                                    "old_", 
+                        sync.update(state,
+                                    self.parent,
+                                    associationrow,
+                                    "old_",
                                     self.prop.synchronize_pairs)
-                        sync.update(child, 
-                                    self.mapper, 
-                                    associationrow, 
-                                    "old_", 
+                        sync.update(child,
+                                    self.mapper,
+                                    associationrow,
+                                    "old_",
                                     self.prop.secondary_synchronize_pairs)
 
                         secondary_update.append(associationrow)
@@ -1079,18 +1079,18 @@ class ManyToManyDP(DependencyProcessor):
         if processed is not None:
             processed.update(tmp)
 
-        self._run_crud(uowcommit, secondary_insert, 
+        self._run_crud(uowcommit, secondary_insert,
                         secondary_update, secondary_delete)
 
-    def _run_crud(self, uowcommit, secondary_insert, 
+    def _run_crud(self, uowcommit, secondary_insert,
                                         secondary_update, secondary_delete):
         connection = uowcommit.transaction.connection(self.mapper)
 
         if secondary_delete:
             associationrow = secondary_delete[0]
             statement = self.secondary.delete(sql.and_(*[
-                                c == sql.bindparam(c.key, type_=c.type) 
-                                for c in self.secondary.c 
+                                c == sql.bindparam(c.key, type_=c.type)
+                                for c in self.secondary.c
                                 if c.key in associationrow
                             ]))
             result = connection.execute(statement, secondary_delete)
@@ -1099,7 +1099,7 @@ class ManyToManyDP(DependencyProcessor):
                         result.rowcount != len(secondary_delete):
                 raise exc.StaleDataError(
                         "DELETE statement on table '%s' expected to delete %d row(s); "
-                        "Only %d were matched." % 
+                        "Only %d were matched." %
                         (self.secondary.description, len(secondary_delete),
                         result.rowcount)
                     )
@@ -1107,8 +1107,8 @@ class ManyToManyDP(DependencyProcessor):
         if secondary_update:
             associationrow = secondary_update[0]
             statement = self.secondary.update(sql.and_(*[
-                            c == sql.bindparam("old_" + c.key, type_=c.type) 
-                            for c in self.secondary.c 
+                            c == sql.bindparam("old_" + c.key, type_=c.type)
+                            for c in self.secondary.c
                             if c.key in associationrow
                         ]))
             result = connection.execute(statement, secondary_update)
@@ -1116,7 +1116,7 @@ class ManyToManyDP(DependencyProcessor):
                         result.rowcount != len(secondary_update):
                 raise exc.StaleDataError(
                         "UPDATE statement on table '%s' expected to update %d row(s); "
-                        "Only %d were matched." % 
+                        "Only %d were matched." %
                         (self.secondary.description, len(secondary_update),
                         result.rowcount)
                     )
@@ -1125,7 +1125,7 @@ class ManyToManyDP(DependencyProcessor):
             statement = self.secondary.insert()
             connection.execute(statement, secondary_insert)
 
-    def _synchronize(self, state, child, associationrow, 
+    def _synchronize(self, state, child, associationrow,
                                             clearkeys, uowcommit, operation):
         if associationrow is None:
             return
@@ -1134,13 +1134,13 @@ class ManyToManyDP(DependencyProcessor):
             if not child.deleted:
                 util.warn(
                     "Object of type %s not in session, %s "
-                    "operation along '%s' won't proceed" % 
+                    "operation along '%s' won't proceed" %
                     (mapperutil.state_class_str(child), operation, self.prop))
             return False
 
         self._verify_canload(child)
 
-        sync.populate_dict(state, self.parent, associationrow, 
+        sync.populate_dict(state, self.parent, associationrow,
                                         self.prop.synchronize_pairs)
         sync.populate_dict(child, self.mapper, associationrow,
                                         self.prop.secondary_synchronize_pairs)
@@ -1149,9 +1149,9 @@ class ManyToManyDP(DependencyProcessor):
 
     def _pks_changed(self, uowcommit, state):
         return sync.source_modified(
-                            uowcommit, 
-                            state, 
-                            self.parent, 
+                            uowcommit,
+                            state,
+                            self.parent,
                             self.prop.synchronize_pairs)
 
 _direction_to_processor = {
index 7fa8426b11b2fb2cfad40c7ce941acdeef5d6936..1b1d7dfa96f637b32fa44ae18fae74b02ccd3537 100644 (file)
@@ -61,13 +61,13 @@ class MapperProperty(object):
     attribute, as well as that attribute as it appears on individual
     instances of the class, including attribute instrumentation,
     attribute access, loading behavior, and dependency calculations.
-    
+
     The most common occurrences of :class:`.MapperProperty` are the
-    mapped :class:`.Column`, which is represented in a mapping as 
+    mapped :class:`.Column`, which is represented in a mapping as
     an instance of :class:`.ColumnProperty`,
     and a reference to another class produced by :func:`.relationship`,
     represented in the mapping as an instance of :class:`.RelationshipProperty`.
-    
+
     """
 
     cascade = ()
@@ -87,7 +87,7 @@ class MapperProperty(object):
 
         pass
 
-    def create_row_processor(self, context, path, 
+    def create_row_processor(self, context, path,
                                             mapper, row, adapter):
         """Return a 3-tuple consisting of three row processing functions.
 
@@ -153,8 +153,6 @@ class MapperProperty(object):
         """
         pass
 
-    def per_property_preprocessors(self, uow):
-        pass
 
     def is_primary(self):
         """Return True if this ``MapperProperty``'s mapper is the
@@ -263,13 +261,13 @@ class PropComparator(operators.ColumnOperators):
         """Return true if this collection contains any member that meets the
         given criterion.
 
-        The usual implementation of ``any()`` is 
+        The usual implementation of ``any()`` is
         :meth:`.RelationshipProperty.Comparator.any`.
 
-        :param criterion: an optional ClauseElement formulated against the 
+        :param criterion: an optional ClauseElement formulated against the
           member class' table or attributes.
 
-        :param \**kwargs: key/value pairs corresponding to member class attribute 
+        :param \**kwargs: key/value pairs corresponding to member class attribute
           names which will be compared via equality to the corresponding
           values.
 
@@ -281,13 +279,13 @@ class PropComparator(operators.ColumnOperators):
         """Return true if this element references a member which meets the
         given criterion.
 
-        The usual implementation of ``has()`` is 
+        The usual implementation of ``has()`` is
         :meth:`.RelationshipProperty.Comparator.has`.
 
-        :param criterion: an optional ClauseElement formulated against the 
+        :param criterion: an optional ClauseElement formulated against the
           member class' table or attributes.
 
-        :param \**kwargs: key/value pairs corresponding to member class attribute 
+        :param \**kwargs: key/value pairs corresponding to member class attribute
           names which will be compared via equality to the corresponding
           values.
 
@@ -345,12 +343,12 @@ class StrategizedProperty(MapperProperty):
 
     def setup(self, context, entity, path, adapter, **kwargs):
         self._get_context_strategy(context, path).\
-                    setup_query(context, entity, path, 
+                    setup_query(context, entity, path,
                                     adapter, **kwargs)
 
     def create_row_processor(self, context, path, mapper, row, adapter):
         return self._get_context_strategy(context, path).\
-                    create_row_processor(context, path, 
+                    create_row_processor(context, path,
                                     mapper, row, adapter)
 
     def do_init(self):
@@ -366,7 +364,7 @@ class MapperOption(object):
     """Describe a modification to a Query."""
 
     propagate_to_loaders = False
-    """if True, indicate this option should be carried along 
+    """if True, indicate this option should be carried along
     Query object generated by scalar or object lazy loaders.
     """
 
@@ -447,9 +445,9 @@ class PropertyOption(MapperOption):
                 else:
                     raise sa_exc.ArgumentError(
                         "Can't find property '%s' on any entity "
-                        "specified in this Query.  Note the full path " 
-                        "from root (%s) to target entity must be specified." 
-                        % (token, ",".join(str(x) for 
+                        "specified in this Query.  Note the full path "
+                        "from root (%s) to target entity must be specified."
+                        % (token, ",".join(str(x) for
                             x in query._mapper_entities))
                     )
             else:
@@ -474,16 +472,16 @@ class PropertyOption(MapperOption):
     def _process_paths(self, query, raiseerr):
         """reconcile the 'key' for this PropertyOption with
         the current path and entities of the query.
-        
+
         Return a list of affected paths.
-        
+
         """
         path = orm_util.PathRegistry.root
         entity = None
         paths = []
         no_result = []
 
-        # _current_path implies we're in a 
+        # _current_path implies we're in a
         # secondary load with an existing path
         current_path = list(query._current_path.path)
 
@@ -509,8 +507,8 @@ class PropertyOption(MapperOption):
 
                 if not entity:
                     entity = self._find_entity_basestring(
-                                        query, 
-                                        token, 
+                                        query,
+                                        token,
                                         raiseerr)
                     if entity is None:
                         return no_result
@@ -544,8 +542,8 @@ class PropertyOption(MapperOption):
                 if not entity:
                     entity = self._find_entity_prop_comparator(
                                             query,
-                                            prop.key, 
-                                            token.parententity, 
+                                            prop.key,
+                                            token.parententity,
                                             raiseerr)
                     if not entity:
                         return no_result
@@ -570,7 +568,7 @@ class PropertyOption(MapperOption):
                 path_element = mapper = ext_info.mapper
                 if not ext_info.is_aliased_class:
                     ac = orm_util.with_polymorphic(
-                                ext_info.mapper.base_mapper, 
+                                ext_info.mapper.base_mapper,
                                 ext_info.mapper, aliased=True)
                     ext_info = orm_util._extended_entity_info(ac)
                 path.set(query, "path_with_polymorphic", ext_info)
@@ -584,7 +582,7 @@ class PropertyOption(MapperOption):
                     )
 
         if current_path:
-            # ran out of tokens before 
+            # ran out of tokens before
             # current_path was exhausted.
             assert not tokens
             return no_result
@@ -652,7 +650,7 @@ class LoaderStrategy(object):
     def setup_query(self, context, entity, path, adapter, **kwargs):
         pass
 
-    def create_row_processor(self, context, path, mapper, 
+    def create_row_processor(self, context, path, mapper,
                                 row, adapter):
         """Return row processing functions which fulfill the contract
         specified by MapperProperty.create_row_processor.
index 3339564076b20fc860a9e5f7ed979097d955d508..6c7907d497d96f9d91b40eab49a91234c05ac15b 100644 (file)
@@ -22,7 +22,7 @@ from collections import deque
 from .. import sql, util, log, exc as sa_exc, event, schema
 from ..sql import expression, visitors, operators, util as sql_util
 from . import instrumentation, attributes, \
-                        exc as orm_exc, unitofwork, events, loading
+                        exc as orm_exc, events, loading
 from .interfaces import MapperProperty
 
 from .util import _INSTRUMENTOR, _class_to_mapper, \
@@ -112,7 +112,7 @@ class Mapper(object):
         self.always_refresh = always_refresh
         self.version_id_col = version_id_col
         self.version_id_generator = version_id_generator or \
-                                        (lambda x:(x or 0) + 1)
+                                        (lambda x: (x or 0) + 1)
         self.concrete = concrete
         self.single = False
         self.inherits = inherits
@@ -124,7 +124,8 @@ class Mapper(object):
         self.batch = batch
         self.eager_defaults = eager_defaults
         self.column_prefix = column_prefix
-        self.polymorphic_on = expression._clause_element_as_expr(polymorphic_on)
+        self.polymorphic_on = expression._clause_element_as_expr(
+                                                polymorphic_on)
         self._dependency_processors = []
         self.validators = util.immutabledict()
         self.passive_updates = passive_updates
@@ -1835,34 +1836,6 @@ class Mapper(object):
             ret[t] = table_to_mapper[t]
         return ret
 
-    def _per_mapper_flush_actions(self, uow):
-        saves = unitofwork.SaveUpdateAll(uow, self.base_mapper)
-        deletes = unitofwork.DeleteAll(uow, self.base_mapper)
-        uow.dependencies.add((saves, deletes))
-
-        for dep in self._dependency_processors:
-            dep.per_property_preprocessors(uow)
-
-        for prop in self._props.values():
-            prop.per_property_preprocessors(uow)
-
-    def _per_state_flush_actions(self, uow, states, isdelete):
-
-        base_mapper = self.base_mapper
-        save_all = unitofwork.SaveUpdateAll(uow, base_mapper)
-        delete_all = unitofwork.DeleteAll(uow, base_mapper)
-        for state in states:
-            # keep saves before deletes -
-            # this ensures 'row switch' operations work
-            if isdelete:
-                action = unitofwork.DeleteState(uow, state, base_mapper)
-                uow.dependencies.add((save_all, action))
-            else:
-                action = unitofwork.SaveUpdateState(uow, state, base_mapper)
-                uow.dependencies.add((action, delete_all))
-
-            yield action
-
     def _memo(self, key, callable_):
         if key in self._memoized_values:
             return self._memoized_values[key]
index b17a589fb081e8d1359392dbbc52afe1d38ea1bf..efad54839280b46be321250d06b37049a00e48e7 100644 (file)
@@ -14,27 +14,24 @@ mapped attributes.
 from .. import sql, util, log, exc as sa_exc
 from ..sql import operators, expression
 from . import (
-    attributes, dependency, mapper, 
-    strategies, configure_mappers, relationships
-    )
-from .util import (
-    CascadeOptions, \
-        _orm_annotate, _orm_deannotate, _orm_full_deannotate,
-        _entity_info
+    attributes, mapper,
+    strategies, configure_mappers, relationships,
+    dependency
     )
+from .util import CascadeOptions, \
+        _orm_annotate, _orm_deannotate, _orm_full_deannotate, _entity_info
+
+from .interfaces import MANYTOMANY, MANYTOONE, ONETOMANY,\
+        PropComparator, StrategizedProperty
 
-from .interfaces import (
-    MANYTOMANY, MANYTOONE, MapperProperty, ONETOMANY, 
-    PropComparator, StrategizedProperty
-    )
 mapperlib = util.importlater("sqlalchemy.orm", "mapperlib")
 NoneType = type(None)
 
 from descriptor_props import CompositeProperty, SynonymProperty, \
-            ComparableProperty,ConcreteInheritedProperty
+            ComparableProperty, ConcreteInheritedProperty
 
-__all__ = ('ColumnProperty', 'CompositeProperty', 'SynonymProperty',
-           'ComparableProperty', 'RelationshipProperty', 'RelationProperty')
+__all__ = ['ColumnProperty', 'CompositeProperty', 'SynonymProperty',
+           'ComparableProperty', 'RelationshipProperty', 'RelationProperty']
 
 
 class ColumnProperty(StrategizedProperty):
@@ -68,7 +65,7 @@ class ColumnProperty(StrategizedProperty):
 
         """
         self._orig_columns = [expression._labeled(c) for c in columns]
-        self.columns = [expression._labeled(_orm_full_deannotate(c)) 
+        self.columns = [expression._labeled(_orm_full_deannotate(c))
                             for c in columns]
         self.group = kwargs.pop('group', None)
         self.deferred = kwargs.pop('deferred', False)
@@ -94,7 +91,7 @@ class ColumnProperty(StrategizedProperty):
         if kwargs:
             raise TypeError(
                 "%s received unexpected keyword argument(s): %s" % (
-                    self.__class__.__name__, 
+                    self.__class__.__name__,
                     ', '.join(sorted(kwargs.keys()))))
 
         util.set_creation_order(self)
@@ -117,9 +114,9 @@ class ColumnProperty(StrategizedProperty):
             return
 
         attributes.register_descriptor(
-            mapper.class_, 
-            self.key, 
-            comparator=self.comparator_factory(self, mapper), 
+            mapper.class_,
+            self.key,
+            comparator=self.comparator_factory(self, mapper),
             parententity=mapper,
             doc=self.doc
             )
@@ -137,17 +134,17 @@ class ColumnProperty(StrategizedProperty):
 
     def copy(self):
         return ColumnProperty(
-                        deferred=self.deferred, 
-                        group=self.group, 
+                        deferred=self.deferred,
+                        group=self.group,
                         active_history=self.active_history,
                         *self.columns)
 
-    def _getcommitted(self, state, dict_, column, 
+    def _getcommitted(self, state, dict_, column,
                     passive=attributes.PASSIVE_OFF):
         return state.get_impl(self.key).\
                     get_committed_value(state, dict_, passive=passive)
 
-    def merge(self, session, source_state, source_dict, dest_state, 
+    def merge(self, session, source_state, source_dict, dest_state,
                                 dest_dict, load, _recursive):
         if not self.instrument:
             return
@@ -203,7 +200,7 @@ class RelationshipProperty(StrategizedProperty):
 
     def __init__(self, argument,
         secondary=None, primaryjoin=None,
-        secondaryjoin=None, 
+        secondaryjoin=None,
         foreign_keys=None,
         uselist=None,
         order_by=False,
@@ -221,7 +218,7 @@ class RelationshipProperty(StrategizedProperty):
         active_history=False,
         cascade_backrefs=True,
         load_on_pending=False,
-        strategy_class=None, _local_remote_pairs=None, 
+        strategy_class=None, _local_remote_pairs=None,
         query_class=None):
 
         self.uselist = uselist
@@ -270,7 +267,7 @@ class RelationshipProperty(StrategizedProperty):
             self.cascade = CascadeOptions("save-update, merge")
 
         if self.passive_deletes == 'all' and \
-                    ("delete" in self.cascade or 
+                    ("delete" in self.cascade or
                     "delete-orphan" in self.cascade):
             raise sa_exc.ArgumentError(
                             "Can't set passive_deletes='all' in conjunction "
@@ -291,9 +288,9 @@ class RelationshipProperty(StrategizedProperty):
 
     def instrument_class(self, mapper):
         attributes.register_descriptor(
-            mapper.class_, 
-            self.key, 
-            comparator=self.comparator_factory(self, mapper), 
+            mapper.class_,
+            self.key,
+            comparator=self.comparator_factory(self, mapper),
             parententity=mapper,
             doc=self.doc,
             )
@@ -344,12 +341,12 @@ class RelationshipProperty(StrategizedProperty):
 
             """
             return RelationshipProperty.Comparator(
-                                        self.property, 
-                                        self.mapper, 
+                                        self.property,
+                                        self.mapper,
                                         cls, adapter=self.adapter)
 
         def in_(self, other):
-            """Produce an IN clause - this is not implemented 
+            """Produce an IN clause - this is not implemented
             for :func:`~.orm.relationship`-based attributes at this time.
 
             """
@@ -371,7 +368,7 @@ class RelationshipProperty(StrategizedProperty):
 
               mytable.related_id == <some id>
 
-            Where ``<some id>`` is the primary key of the given 
+            Where ``<some id>`` is the primary key of the given
             object.
 
             The ``==`` operator provides partial functionality for non-
@@ -379,9 +376,9 @@ class RelationshipProperty(StrategizedProperty):
 
             * Comparisons against collections are not supported.
               Use :meth:`~.RelationshipProperty.Comparator.contains`.
-            * Compared to a scalar one-to-many, will produce a 
+            * Compared to a scalar one-to-many, will produce a
               clause that compares the target columns in the parent to
-              the given target. 
+              the given target.
             * Compared to a scalar many-to-many, an alias
               of the association table will be rendered as
               well, forming a natural join that is part of the
@@ -455,9 +452,9 @@ class RelationshipProperty(StrategizedProperty):
                 # limit this adapter to annotated only?
                 criterion = target_adapter.traverse(criterion)
 
-            # only have the "joined left side" of what we 
+            # only have the "joined left side" of what we
             # return be subject to Query adaption.  The right
-            # side of it is used for an exists() subquery and 
+            # side of it is used for an exists() subquery and
             # should not correlate or otherwise reach out
             # to anything in the enclosing query.
             if criterion is not None:
@@ -484,7 +481,7 @@ class RelationshipProperty(StrategizedProperty):
             Will produce a query like::
 
                 SELECT * FROM my_table WHERE
-                EXISTS (SELECT 1 FROM related WHERE related.my_id=my_table.id 
+                EXISTS (SELECT 1 FROM related WHERE related.my_id=my_table.id
                 AND related.x=2)
 
             Because :meth:`~.RelationshipProperty.Comparator.any` uses
@@ -553,7 +550,7 @@ class RelationshipProperty(StrategizedProperty):
             return self._criterion_exists(criterion, **kwargs)
 
         def contains(self, other, **kwargs):
-            """Return a simple expression that tests a collection for 
+            """Return a simple expression that tests a collection for
             containment of a particular item.
 
             :meth:`~.RelationshipProperty.Comparator.contains` is
@@ -561,7 +558,7 @@ class RelationshipProperty(StrategizedProperty):
             :func:`~.orm.relationship` that implements
             one-to-many or many-to-many with ``uselist=True``.
 
-            When used in a simple one-to-many context, an 
+            When used in a simple one-to-many context, an
             expression like::
 
                 MyClass.contains(other)
@@ -644,7 +641,7 @@ class RelationshipProperty(StrategizedProperty):
                         adapt(x) == None)
                         for (x, y) in self.property.local_remote_pairs])
 
-            criterion = sql.and_(*[x==y for (x, y) in 
+            criterion = sql.and_(*[x==y for (x, y) in
                                 zip(
                                     self.property.mapper.primary_key,
                                     self.property.\
@@ -674,9 +671,9 @@ class RelationshipProperty(StrategizedProperty):
               Use
               :meth:`~.RelationshipProperty.Comparator.contains`
               in conjunction with :func:`~.expression.not_`.
-            * Compared to a scalar one-to-many, will produce a 
+            * Compared to a scalar one-to-many, will produce a
               clause that compares the target columns in the parent to
-              the given target. 
+              the given target.
             * Compared to a scalar many-to-many, an alias
               of the association table will be rendered as
               well, forming a natural join that is part of the
@@ -711,26 +708,26 @@ class RelationshipProperty(StrategizedProperty):
                 configure_mappers()
             return self.prop
 
-    def compare(self, op, value, 
-                            value_is_parent=False, 
+    def compare(self, op, value,
+                            value_is_parent=False,
                             alias_secondary=True):
         if op == operators.eq:
             if value is None:
                 if self.uselist:
                     return ~sql.exists([1], self.primaryjoin)
                 else:
-                    return self._optimized_compare(None, 
+                    return self._optimized_compare(None,
                                     value_is_parent=value_is_parent,
                                     alias_secondary=alias_secondary)
             else:
-                return self._optimized_compare(value, 
+                return self._optimized_compare(value,
                                 value_is_parent=value_is_parent,
                                 alias_secondary=alias_secondary)
         else:
             return op(self.comparator, value)
 
-    def _optimized_compare(self, value, value_is_parent=False, 
-                                    adapt_source=None, 
+    def _optimized_compare(self, value, value_is_parent=False,
+                                    adapt_source=None,
                                     alias_secondary=True):
         if value is not None:
             value = attributes.instance_state(value)
@@ -742,12 +739,12 @@ class RelationshipProperty(StrategizedProperty):
     def __str__(self):
         return str(self.parent.class_.__name__) + "." + self.key
 
-    def merge(self, 
+    def merge(self,
                     session,
                     source_state,
                     source_dict,
                     dest_state,
-                    dest_dict, 
+                    dest_dict,
                     load, _recursive):
 
         if load:
@@ -812,7 +809,7 @@ class RelationshipProperty(StrategizedProperty):
                 dest_state.get_impl(self.key).set(dest_state,
                         dest_dict, obj, None)
 
-    def _value_as_iterable(self, state, dict_, key, 
+    def _value_as_iterable(self, state, dict_, key,
                                     passive=attributes.PASSIVE_OFF):
         """Return a list of tuples (state, obj) for the given
         key.
@@ -826,7 +823,7 @@ class RelationshipProperty(StrategizedProperty):
             return []
         elif hasattr(impl, 'get_collection'):
             return [
-                (attributes.instance_state(o), o) for o in 
+                (attributes.instance_state(o), o) for o in
                 impl.get_collection(state, dict_, x, passive=passive)
             ]
         else:
@@ -878,8 +875,8 @@ class RelationshipProperty(StrategizedProperty):
                 raise AssertionError("Attribute '%s' on class '%s' "
                                     "doesn't handle objects "
                                     "of type '%s'" % (
-                                        self.key, 
-                                        self.parent.class_, 
+                                        self.key,
+                                        self.parent.class_,
                                         c.__class__
                                     ))
 
@@ -907,7 +904,7 @@ class RelationshipProperty(StrategizedProperty):
 
     @util.memoized_property
     def mapper(self):
-        """Return the targeted :class:`.Mapper` for this 
+        """Return the targeted :class:`.Mapper` for this
         :class:`.RelationshipProperty`.
 
         This is a lazy-initializing static attribute.
@@ -935,8 +932,8 @@ class RelationshipProperty(StrategizedProperty):
     @util.memoized_property
     @util.deprecated("0.7", "Use .target")
     def table(self):
-        """Return the selectable linked to this 
-        :class:`.RelationshipProperty` object's target 
+        """Return the selectable linked to this
+        :class:`.RelationshipProperty` object's target
         :class:`.Mapper`."""
         return self.target
 
@@ -950,7 +947,7 @@ class RelationshipProperty(StrategizedProperty):
         super(RelationshipProperty, self).do_init()
 
     def _process_dependent_arguments(self):
-        """Convert incoming configuration arguments to their 
+        """Convert incoming configuration arguments to their
         proper form.
 
         Callables are resolved, ORM annotations removed.
@@ -980,20 +977,20 @@ class RelationshipProperty(StrategizedProperty):
         # remote_side are all columns, not strings.
         if self.order_by is not False and self.order_by is not None:
             self.order_by = [
-                    expression._only_column_elements(x, "order_by") 
+                    expression._only_column_elements(x, "order_by")
                     for x in
                     util.to_list(self.order_by)]
 
         self._user_defined_foreign_keys = \
             util.column_set(
-                    expression._only_column_elements(x, "foreign_keys") 
+                    expression._only_column_elements(x, "foreign_keys")
                     for x in util.to_column_set(
                         self._user_defined_foreign_keys
                     ))
 
         self.remote_side = \
             util.column_set(
-                    expression._only_column_elements(x, "remote_side") 
+                    expression._only_column_elements(x, "remote_side")
                     for x in
                     util.to_column_set(self.remote_side))
 
@@ -1034,7 +1031,7 @@ class RelationshipProperty(StrategizedProperty):
         self.secondary_synchronize_pairs = jc.secondary_synchronize_pairs
 
     def _check_conflicts(self):
-        """Test that this relationship is legal, warn about 
+        """Test that this relationship is legal, warn about
         inheritance conflicts."""
 
         if not self.is_primary() \
@@ -1078,7 +1075,7 @@ class RelationshipProperty(StrategizedProperty):
                        % self)
 
     def _columns_are_mapped(self, *cols):
-        """Return True if all columns in the given collection are 
+        """Return True if all columns in the given collection are
         mapped by the tables referenced by this :class:`.Relationship`.
 
         """
@@ -1092,7 +1089,7 @@ class RelationshipProperty(StrategizedProperty):
         return True
 
     def _generate_backref(self):
-        """Interpret the 'backref' instruction to create a 
+        """Interpret the 'backref' instruction to create a
         :func:`.relationship` complementary to this one."""
 
         if not self.is_primary():
@@ -1120,7 +1117,7 @@ class RelationshipProperty(StrategizedProperty):
                 pj = kwargs.pop('primaryjoin', self._join_condition.secondaryjoin)
                 sj = kwargs.pop('secondaryjoin', self._join_condition.primaryjoin)
             else:
-                pj = kwargs.pop('primaryjoin', 
+                pj = kwargs.pop('primaryjoin',
                         self._join_condition.primaryjoin_reverse_remote)
                 sj = kwargs.pop('secondaryjoin', None)
                 if sj:
@@ -1166,12 +1163,8 @@ class RelationshipProperty(StrategizedProperty):
     def _is_self_referential(self):
         return self.mapper.common_parent(self.parent)
 
-    def per_property_preprocessors(self, uow):
-        if not self.viewonly and self._dependency_processor:
-            self._dependency_processor.per_property_preprocessors(uow)
-
-    def _create_joins(self, source_polymorphic=False, 
-                            source_selectable=None, dest_polymorphic=False, 
+    def _create_joins(self, source_polymorphic=False,
+                            source_selectable=None, dest_polymorphic=False,
                             dest_selectable=None, of_type=None):
         if source_selectable is None:
             if source_polymorphic and self.parent.with_polymorphic:
index caf7f2e8e27d1336f391ee3f7b39c12cce305844..c0423939f9db80648a7c9ac8ae91907016d1aeec 100644 (file)
@@ -14,8 +14,9 @@ organizes them in order of dependency, and executes.
 
 from .. import util, event
 from ..util import topological
-from . import attributes, interfaces, persistence, util as orm_util
-session = util.importlater("sqlalchemy.orm", "session")
+from . import attributes, persistence, util as orm_util
+
+sessionlib = util.importlater("sqlalchemy.orm", "session")
 
 def track_cascade_events(descriptor, prop):
     """Establish event listeners on object attributes which handle
@@ -25,10 +26,10 @@ def track_cascade_events(descriptor, prop):
     key = prop.key
 
     def append(state, item, initiator):
-        # process "save_update" cascade rules for when 
+        # process "save_update" cascade rules for when
         # an instance is appended to the list of another instance
 
-        sess = session._state_session(state)
+        sess = sessionlib._state_session(state)
         if sess:
             prop = state.manager.mapper._props[key]
             item_state = attributes.instance_state(item)
@@ -39,7 +40,7 @@ def track_cascade_events(descriptor, prop):
         return item
 
     def remove(state, item, initiator):
-        sess = session._state_session(state)
+        sess = sessionlib._state_session(state)
         if sess:
             prop = state.manager.mapper._props[key]
             # expunge pending orphans
@@ -50,12 +51,12 @@ def track_cascade_events(descriptor, prop):
                     sess.expunge(item)
 
     def set_(state, newvalue, oldvalue, initiator):
-        # process "save_update" cascade rules for when an instance 
+        # process "save_update" cascade rules for when an instance
         # is attached to another instance
         if oldvalue is newvalue:
             return newvalue
 
-        sess = session._state_session(state)
+        sess = sessionlib._state_session(state)
         if sess:
             prop = state.manager.mapper._props[key]
             if newvalue is not None:
@@ -85,12 +86,12 @@ class UOWTransaction(object):
     def __init__(self, session):
         self.session = session
 
-        # dictionary used by external actors to 
+        # dictionary used by external actors to
         # store arbitrary state information.
         self.attributes = {}
 
-        # dictionary of mappers to sets of 
-        # DependencyProcessors, which are also 
+        # dictionary of mappers to sets of
+        # DependencyProcessors, which are also
         # set to be part of the sorted flush actions,
         # which have that mapper as a parent.
         self.deps = util.defaultdict(set)
@@ -105,7 +106,7 @@ class UOWTransaction(object):
         # and determine if a flush action is needed
         self.presort_actions = {}
 
-        # dictionary of PostSortRec objects, each 
+        # dictionary of PostSortRec objects, each
         # one issues work during the flush within
         # a certain ordering.
         self.postsort_actions = {}
@@ -123,7 +124,7 @@ class UOWTransaction(object):
 
         # tracks InstanceStates which will be receiving
         # a "post update" call.  Keys are mappers,
-        # values are a set of states and a set of the 
+        # values are a set of states and a set of the
         # columns which should be included in the update.
         self.post_update_states = util.defaultdict(lambda: (set(), set()))
 
@@ -132,7 +133,7 @@ class UOWTransaction(object):
         return bool(self.states)
 
     def is_deleted(self, state):
-        """return true if the given state is marked as deleted 
+        """return true if the given state is marked as deleted
         within this uowtransaction."""
 
         return state in self.states and self.states[state][0]
@@ -151,9 +152,10 @@ class UOWTransaction(object):
 
         self.states[state] = (isdelete, True)
 
-    def get_attribute_history(self, state, key, 
+    def get_attribute_history(self, state, key,
                             passive=attributes.PASSIVE_NO_INITIALIZE):
-        """facade to attributes.get_state_history(), including caching of results."""
+        """facade to attributes.get_state_history(), including
+        caching of results."""
 
         hashkey = ("history", state, key)
 
@@ -163,13 +165,13 @@ class UOWTransaction(object):
 
         if hashkey in self.attributes:
             history, state_history, cached_passive = self.attributes[hashkey]
-            # if the cached lookup was "passive" and now 
+            # if the cached lookup was "passive" and now
             # we want non-passive, do a non-passive lookup and re-cache
 
             if not cached_passive & attributes.SQL_OK \
                 and passive & attributes.SQL_OK:
                 impl = state.manager[key].impl
-                history = impl.get_history(state, state.dict, 
+                history = impl.get_history(state, state.dict,
                                     attributes.PASSIVE_OFF)
                 if history and impl.uses_objects:
                     state_history = history.as_state()
@@ -197,13 +199,13 @@ class UOWTransaction(object):
         if key not in self.presort_actions:
             self.presort_actions[key] = Preprocess(processor, fromparent)
 
-    def register_object(self, state, isdelete=False, 
+    def register_object(self, state, isdelete=False,
                             listonly=False, cancel_delete=False,
                             operation=None, prop=None):
         if not self.session._contains_state(state):
             if not state.deleted and operation is not None:
                 util.warn("Object of type %s not in session, %s operation "
-                            "along '%s' will not proceed" % 
+                            "along '%s' will not proceed" %
                             (orm_util.state_class_str(state), operation, prop))
             return False
 
@@ -211,7 +213,7 @@ class UOWTransaction(object):
             mapper = state.manager.mapper
 
             if mapper not in self.mappers:
-                mapper._per_mapper_flush_actions(self)
+                self._per_mapper_flush_actions(mapper)
 
             self.mappers[mapper].add(state)
             self.states[state] = (isdelete, listonly)
@@ -226,10 +228,24 @@ class UOWTransaction(object):
         states.add(state)
         cols.update(post_update_cols)
 
+    def _per_mapper_flush_actions(self, mapper):
+        saves = SaveUpdateAll(self, mapper.base_mapper)
+        deletes = DeleteAll(self, mapper.base_mapper)
+        self.dependencies.add((saves, deletes))
+
+        for dep in mapper._dependency_processors:
+            dep.per_property_preprocessors(self)
+
+        for prop in mapper.relationships:
+            if prop.viewonly:
+                continue
+            dep = prop._dependency_processor
+            dep.per_property_preprocessors(self)
+
     @util.memoized_property
     def _mapper_for_dep(self):
-        """return a dynamic mapping of (Mapper, DependencyProcessor) to 
-        True or False, indicating if the DependencyProcessor operates 
+        """return a dynamic mapping of (Mapper, DependencyProcessor) to
+        True or False, indicating if the DependencyProcessor operates
         on objects of that Mapper.
 
         The result is stored in the dictionary persistently once
@@ -237,11 +253,11 @@ class UOWTransaction(object):
 
         """
         return util.PopulateDict(
-                    lambda tup:tup[0]._props.get(tup[1].key) is tup[1].prop
+                    lambda tup: tup[0]._props.get(tup[1].key) is tup[1].prop
                 )
 
     def filter_states_for_dep(self, dep, states):
-        """Filter the given list of InstanceStates to those relevant to the 
+        """Filter the given list of InstanceStates to those relevant to the
         given DependencyProcessor.
 
         """
@@ -273,7 +289,7 @@ class UOWTransaction(object):
 
         # see if the graph of mapper dependencies has cycles.
         self.cycles = cycles = topological.find_cycles(
-                                        self.dependencies, 
+                                        self.dependencies,
                                         self.postsort_actions.values())
 
         if cycles:
@@ -319,20 +335,21 @@ class UOWTransaction(object):
         # execute
         if self.cycles:
             for set_ in topological.sort_as_subsets(
-                                            self.dependencies, 
+                                            self.dependencies,
                                             postsort_actions):
                 while set_:
                     n = set_.pop()
                     n.execute_aggregate(self, set_)
         else:
             for rec in topological.sort(
-                                    self.dependencies, 
+                                    self.dependencies,
                                     postsort_actions):
                 rec.execute(self)
 
 
     def finalize_flush_changes(self):
-        """mark processed objects as clean / deleted after a successful flush().
+        """mark processed objects as clean / deleted after a successful
+        flush().
 
         this method is called within the flush() method after the
         execute() method has succeeded and the transaction has been committed.
@@ -351,7 +368,8 @@ class IterateMappersMixin(object):
     def _mappers(self, uow):
         if self.fromparent:
             return iter(
-                m for m in self.dependency_processor.parent.self_and_descendants
+                m for m in
+                self.dependency_processor.parent.self_and_descendants
                 if uow._mapper_for_dep[(m, self.dependency_processor)]
             )
         else:
@@ -424,7 +442,8 @@ class ProcessAll(IterateMappersMixin, PostSortRec):
         self.dependency_processor = dependency_processor
         self.delete = delete
         self.fromparent = fromparent
-        uow.deps[dependency_processor.parent.base_mapper].add(dependency_processor)
+        uow.deps[dependency_processor.parent.base_mapper].\
+                    add(dependency_processor)
 
     def execute(self, uow):
         states = self._elements(uow)
@@ -471,18 +490,23 @@ class SaveUpdateAll(PostSortRec):
         assert mapper is mapper.base_mapper
 
     def execute(self, uow):
-        persistence.save_obj(self.mapper, 
+        persistence.save_obj(self.mapper,
             uow.states_for_mapper_hierarchy(self.mapper, False, False),
             uow
         )
 
+
     def per_state_flush_actions(self, uow):
-        states = list(uow.states_for_mapper_hierarchy(self.mapper, False, False))
-        for rec in self.mapper._per_state_flush_actions(
-                            uow, 
-                            states, 
-                            False):
-            yield rec
+        states = list(uow.states_for_mapper_hierarchy(
+                                    self.mapper, False, False))
+        base_mapper = self.mapper.base_mapper
+        delete_all = DeleteAll(uow, base_mapper)
+        for state in states:
+            # keep saves before deletes -
+            # this ensures 'row switch' operations work
+            action = SaveUpdateState(uow, state, base_mapper)
+            uow.dependencies.add((action, delete_all))
+            yield action
 
         for dep in uow.deps[self.mapper]:
             states_for_prop = uow.filter_states_for_dep(dep, states)
@@ -500,12 +524,16 @@ class DeleteAll(PostSortRec):
         )
 
     def per_state_flush_actions(self, uow):
-        states = list(uow.states_for_mapper_hierarchy(self.mapper, True, False))
-        for rec in self.mapper._per_state_flush_actions(
-                            uow, 
-                            states, 
-                            True):
-            yield rec
+        states = list(uow.states_for_mapper_hierarchy(
+                                    self.mapper, True, False))
+        base_mapper = self.mapper.base_mapper
+        save_all = SaveUpdateAll(uow, base_mapper)
+        for state in states:
+            # keep saves before deletes -
+            # this ensures 'row switch' operations work
+            action = DeleteState(uow, state, base_mapper)
+            uow.dependencies.add((save_all, action))
+            yield action
 
         for dep in uow.deps[self.mapper]:
             states_for_prop = uow.filter_states_for_dep(dep, states)
@@ -521,8 +549,8 @@ class ProcessState(PostSortRec):
         cls_ = self.__class__
         dependency_processor = self.dependency_processor
         delete = self.delete
-        our_recs = [r for r in recs 
-                        if r.__class__ is cls_ and 
+        our_recs = [r for r in recs
+                        if r.__class__ is cls_ and
                         r.dependency_processor is dependency_processor and
                         r.delete is delete]
         recs.difference_update(our_recs)
@@ -548,13 +576,13 @@ class SaveUpdateState(PostSortRec):
     def execute_aggregate(self, uow, recs):
         cls_ = self.__class__
         mapper = self.mapper
-        our_recs = [r for r in recs 
-                        if r.__class__ is cls_ and 
+        our_recs = [r for r in recs
+                        if r.__class__ is cls_ and
                         r.mapper is mapper]
         recs.difference_update(our_recs)
         persistence.save_obj(mapper,
-                        [self.state] + 
-                        [r.state for r in our_recs], 
+                        [self.state] +
+                        [r.state for r in our_recs],
                         uow)
 
     def __repr__(self):
@@ -571,13 +599,13 @@ class DeleteState(PostSortRec):
     def execute_aggregate(self, uow, recs):
         cls_ = self.__class__
         mapper = self.mapper
-        our_recs = [r for r in recs 
-                        if r.__class__ is cls_ and 
+        our_recs = [r for r in recs
+                        if r.__class__ is cls_ and
                         r.mapper is mapper]
         recs.difference_update(our_recs)
         states = [self.state] + [r.state for r in our_recs]
         persistence.delete_obj(mapper,
-                        [s for s in states if uow.states[s][0]], 
+                        [s for s in states if uow.states[s][0]],
                         uow)
 
     def __repr__(self):
index 99b4d922406e436b75b48e7dc2eeef8a55bd7c84..0cc5f5fe0ebf9ad6b2d45d9eeefa306b2a288e8b 100644 (file)
@@ -335,12 +335,12 @@ class ZooMarkTest(fixtures.TestBase):
     def test_profile_1_create_tables(self):
         self.test_baseline_1_create_tables()
 
-    @profiling.function_call_count(5786, {'2.7+cextension':5683, 
+    @profiling.function_call_count(5786, {'2.7+cextension':5683,
                                             '2.6+cextension':5992})
     def test_profile_1a_populate(self):
         self.test_baseline_1a_populate()
 
-    @profiling.function_call_count(413, {'3.2':398})
+    @profiling.function_call_count(388, {'3.2':378})
     def test_profile_2_insert(self):
         self.test_baseline_2_insert()
 
index f0f3ebb211847b02a534848e804c822522c14141..bb7b8a71db3e6238752f8e0a09e80d66083f73dd 100644 (file)
@@ -33,20 +33,20 @@ class NaturalPKTest(fixtures.MappedTest):
 
         addresses = Table('addresses', metadata,
             Column('email', String(50), primary_key=True),
-            Column('username', String(50), 
+            Column('username', String(50),
                             ForeignKey('users.username', **fk_args)),
             test_needs_fk=True)
 
         items = Table('items', metadata,
             Column('itemname', String(50), primary_key=True),
-            Column('description', String(100)), 
+            Column('description', String(100)),
             test_needs_fk=True)
 
         users_to_items = Table('users_to_items', metadata,
-            Column('username', String(50), 
+            Column('username', String(50),
                                 ForeignKey('users.username', **fk_args),
                                 primary_key=True),
-            Column('itemname', String(50), 
+            Column('itemname', String(50),
                                 ForeignKey('items.itemname', **fk_args),
                                 primary_key=True),
             test_needs_fk=True)
@@ -168,15 +168,15 @@ class NaturalPKTest(fixtures.MappedTest):
         def go():
             sess.flush()
         if not passive_updates:
-            # test passive_updates=False; 
+            # test passive_updates=False;
             #load addresses, update user, update 2 addresses
-            self.assert_sql_count(testing.db, go, 4) 
+            self.assert_sql_count(testing.db, go, 4)
         else:
             # test passive_updates=True; update user
-            self.assert_sql_count(testing.db, go, 1) 
+            self.assert_sql_count(testing.db, go, 1)
         sess.expunge_all()
         assert User(username='jack', addresses=[
-                                        Address(username='jack'), 
+                                        Address(username='jack'),
                                         Address(username='jack')]) == \
                             sess.query(User).get('jack')
 
@@ -189,7 +189,6 @@ class NaturalPKTest(fixtures.MappedTest):
         u1 = sess.query(User).get('fred')
         eq_(User(username='fred', fullname='jack'), u1)
 
-
     @testing.fails_on('sqlite', 'sqlite doesnt support ON UPDATE CASCADE')
     @testing.fails_on('oracle', 'oracle doesnt support ON UPDATE CASCADE')
     def test_manytoone_passive(self):
@@ -198,6 +197,43 @@ class NaturalPKTest(fixtures.MappedTest):
     def test_manytoone_nonpassive(self):
         self._test_manytoone(False)
 
+    def test_manytoone_nonpassive_cold_mapping(self):
+        """test that the mapper-level m2o dependency processor
+        is set up even if the opposite side relationship
+        hasn't yet been part of a flush.
+
+        """
+        users, Address, addresses, User = (self.tables.users,
+                                self.classes.Address,
+                                self.tables.addresses,
+                                self.classes.User)
+
+        with testing.db.begin() as conn:
+            conn.execute(users.insert(),
+                username='jack', fullname='jack'
+                )
+            conn.execute(addresses.insert(),
+                email='jack1', username='jack'
+            )
+            conn.execute(addresses.insert(),
+                email='jack2', username='jack'
+                )
+
+        mapper(User, users)
+        mapper(Address, addresses, properties={
+            'user': relationship(User,
+                    passive_updates=False)
+        })
+
+        sess = create_session()
+        u1 = sess.query(User).first()
+        a1, a2 = sess.query(Address).all()
+        u1.username = 'ed'
+
+        def go():
+            sess.flush()
+        self.assert_sql_count(testing.db, go, 3)
+
     def _test_manytoone(self, passive_updates):
         users, Address, addresses, User = (self.tables.users,
                                 self.classes.Address,
@@ -206,7 +242,7 @@ class NaturalPKTest(fixtures.MappedTest):
 
         mapper(User, users)
         mapper(Address, addresses, properties={
-            'user':relationship(User, passive_updates=passive_updates)
+            'user': relationship(User, passive_updates=passive_updates)
         })
 
         sess = create_session()
@@ -238,6 +274,7 @@ class NaturalPKTest(fixtures.MappedTest):
         eq_([Address(username='ed'), Address(username='ed')],
                 sess.query(Address).all())
 
+
     @testing.fails_on('sqlite', 'sqlite doesnt support ON UPDATE CASCADE')
     @testing.fails_on('oracle', 'oracle doesnt support ON UPDATE CASCADE')
     def test_onetoone_passive(self):
@@ -349,9 +386,9 @@ class NaturalPKTest(fixtures.MappedTest):
     def test_manytomany_passive(self):
         self._test_manytomany(True)
 
-    # mysqldb executemany() of the association table fails to 
+    # mysqldb executemany() of the association table fails to
     # report the correct row count
-    @testing.fails_if(lambda: testing.against('mysql') 
+    @testing.fails_if(lambda: testing.against('mysql')
                             and not testing.against('+zxjdbc'))
     def test_manytomany_nonpassive(self):
         self._test_manytomany(False)
@@ -489,7 +526,7 @@ class ReversePKsTest(fixtures.MappedTest):
         session.add(a_editable)
         session.commit()
 
-        # do the switch in both directions - 
+        # do the switch in both directions -
         # one or the other should raise the error
         # based on platform dictionary ordering
         a_published.status = ARCHIVED
@@ -509,9 +546,9 @@ class ReversePKsTest(fixtures.MappedTest):
 
 
 class SelfReferentialTest(fixtures.MappedTest):
-    # mssql, mysql don't allow 
+    # mssql, mysql don't allow
     # ON UPDATE on self-referential keys
-    __unsupported_on__ = ('mssql','mysql') 
+    __unsupported_on__ = ('mssql','mysql')
 
     @classmethod
     def define_tables(cls, metadata):
@@ -596,8 +633,8 @@ class SelfReferentialTest(fixtures.MappedTest):
         Node, nodes = self.classes.Node, self.tables.nodes
 
         mapper(Node, nodes, properties={
-            'parentnode':relationship(Node, 
-                            remote_side=nodes.c.name, 
+            'parentnode':relationship(Node,
+                            remote_side=nodes.c.name,
                             passive_updates=passive)
             }
         )
@@ -686,7 +723,7 @@ class NonPKCascadeTest(fixtures.MappedTest):
         u1.username = 'ed'
         sess.flush()
         assert u1.addresses[0].username == 'ed'
-        eq_(sa.select([addresses.c.username]).execute().fetchall(), 
+        eq_(sa.select([addresses.c.username]).execute().fetchall(),
                     [('ed',), ('ed',)])
 
         sess.expunge_all()
@@ -698,14 +735,14 @@ class NonPKCascadeTest(fixtures.MappedTest):
         def go():
             sess.flush()
         if not passive_updates:
-            # test passive_updates=False; load addresses, 
+            # test passive_updates=False; load addresses,
             # update user, update 2 addresses
-            self.assert_sql_count(testing.db, go, 4) 
+            self.assert_sql_count(testing.db, go, 4)
         else:
              # test passive_updates=True; update user
             self.assert_sql_count(testing.db, go, 1)
         sess.expunge_all()
-        assert User(username='jack', 
+        assert User(username='jack',
                         addresses=[Address(username='jack'),
                                     Address(username='jack')]) == \
                     sess.query(User).get(u1.id)
@@ -719,7 +756,7 @@ class NonPKCascadeTest(fixtures.MappedTest):
         a1 = sess.query(Address).get(a1.id)
         eq_(a1.username, None)
 
-        eq_(sa.select([addresses.c.username]).execute().fetchall(), 
+        eq_(sa.select([addresses.c.username]).execute().fetchall(),
                         [(None,), (None,)])
 
         u1 = sess.query(User).get(u1.id)
@@ -742,7 +779,7 @@ class CascadeToFKPKTest(fixtures.MappedTest, testing.AssertsCompiledSQL):
             test_needs_fk=True)
 
         Table('addresses', metadata,
-                Column('username', String(50), 
+                Column('username', String(50),
                        ForeignKey('users.username', **fk_args),
                        primary_key=True
                        ),
@@ -777,7 +814,7 @@ class CascadeToFKPKTest(fixtures.MappedTest, testing.AssertsCompiledSQL):
     def _test_o2m_change(self, passive_updates):
         """Change the PK of a related entity to another.
 
-        "on update cascade" is not involved here, so the mapper has 
+        "on update cascade" is not involved here, so the mapper has
         to do the UPDATE itself.
 
         """
@@ -945,7 +982,7 @@ class CascadeToFKPKTest(fixtures.MappedTest, testing.AssertsCompiledSQL):
     def _test_onetomany(self, passive_updates):
         """Change the PK of a related entity via foreign key cascade.
 
-        For databases that require "on update cascade", the mapper 
+        For databases that require "on update cascade", the mapper
         has to identify the row by the new value, not the old, when
         it does the update.
 
@@ -969,7 +1006,7 @@ class CascadeToFKPKTest(fixtures.MappedTest, testing.AssertsCompiledSQL):
         sess.flush()
         eq_(a1.username, 'ed')
         eq_(a2.username, 'ed')
-        eq_(sa.select([addresses.c.username]).execute().fetchall(), 
+        eq_(sa.select([addresses.c.username]).execute().fetchall(),
                 [('ed',), ('ed',)])
 
         u1.username = 'jack'
@@ -986,7 +1023,7 @@ class JoinedInheritanceTest(fixtures.MappedTest):
     """Test cascades of pk->pk/fk on joined table inh."""
 
     # mssql doesn't allow ON UPDATE on self-referential keys
-    __unsupported_on__ = ('mssql',) 
+    __unsupported_on__ = ('mssql',)
 
     __requires__ = 'skip_mysql_on_windows',
 
@@ -1006,13 +1043,13 @@ class JoinedInheritanceTest(fixtures.MappedTest):
             Column('name', String(50), ForeignKey('person.name', **fk_args),
                                         primary_key=True),
             Column('primary_language', String(50)),
-            Column('boss_name', String(50), 
+            Column('boss_name', String(50),
                                     ForeignKey('manager.name', **fk_args)),
                                     test_needs_fk=True
         )
 
         Table('manager', metadata,
-            Column('name', String(50), 
+            Column('name', String(50),
                                     ForeignKey('person.name', **fk_args),
                                     primary_key=True),
             Column('paperwork', String(50)),
@@ -1057,12 +1094,12 @@ class JoinedInheritanceTest(fixtures.MappedTest):
                                 self.classes.Engineer,
                                 self.tables.engineer)
 
-        mapper(Person, person, polymorphic_on=person.c.type, 
+        mapper(Person, person, polymorphic_on=person.c.type,
                 polymorphic_identity='person',
                 passive_updates=passive_updates)
         mapper(Engineer, engineer, inherits=Person,
             polymorphic_identity='engineer', properties={
-            'boss':relationship(Manager, 
+            'boss':relationship(Manager,
                         primaryjoin=manager.c.name==engineer.c.boss_name,
                         passive_updates=passive_updates
                         )
@@ -1087,12 +1124,12 @@ class JoinedInheritanceTest(fixtures.MappedTest):
                                 self.classes.Engineer,
                                 self.tables.engineer)
 
-        mapper(Person, person, polymorphic_on=person.c.type, 
+        mapper(Person, person, polymorphic_on=person.c.type,
                 polymorphic_identity='person',
                         passive_updates=passive_updates)
         mapper(Engineer, engineer, inherits=Person,
                         polymorphic_identity='engineer', properties={
-            'boss':relationship(Manager, 
+            'boss':relationship(Manager,
                         primaryjoin=manager.c.name==engineer.c.boss_name,
                         passive_updates=passive_updates
                         )