]> git.ipfire.org Git - thirdparty/sqlalchemy/sqlalchemy.git/commitdiff
- with InstanceState more public, underscore all its methods
authorMike Bayer <mike_mp@zzzcomputing.com>
Wed, 18 Jul 2012 20:43:02 +0000 (16:43 -0400)
committerMike Bayer <mike_mp@zzzcomputing.com>
Wed, 18 Jul 2012 20:43:02 +0000 (16:43 -0400)
that change object state as these aren't intended for public
use.

14 files changed:
lib/sqlalchemy/orm/attributes.py
lib/sqlalchemy/orm/instrumentation.py
lib/sqlalchemy/orm/loading.py
lib/sqlalchemy/orm/mapper.py
lib/sqlalchemy/orm/persistence.py
lib/sqlalchemy/orm/properties.py
lib/sqlalchemy/orm/session.py
lib/sqlalchemy/orm/state.py
lib/sqlalchemy/orm/strategies.py
test/ext/test_extendedattr.py
test/orm/inheritance/test_basic.py
test/orm/test_attributes.py
test/orm/test_query.py
test/orm/test_sync.py

index 9a1c60aa78d055bc3377634b0b37b8e2567c6a5c..d335d8996025290f42523bf2c297b806745ea26e 100644 (file)
@@ -557,7 +557,7 @@ class AttributeImpl(object):
         """set an attribute value on the given instance and 'commit' it."""
 
         dict_[self.key] = value
-        state.commit(dict_, [self.key])
+        state._commit(dict_, [self.key])
         return value
 
 class ScalarAttributeImpl(AttributeImpl):
@@ -934,7 +934,7 @@ class CollectionAttributeImpl(AttributeImpl):
 
         state.dict[self.key] = user_data
 
-        state.commit(dict_, [self.key])
+        state._commit(dict_, [self.key])
 
         if self.key in state._pending_mutations:
             # pending items exist.  issue a modified event,
index fdc7a646bf3d21169fe07dd6eecce01950d0243d..9a185c9ef0ba04e8a7a5fc89688103873d84f85c 100644 (file)
@@ -55,7 +55,7 @@ class ClassManager(dict):
         self.originals = {}
 
         self._bases = [mgr for mgr in [
-                        manager_of_class(base) 
+                        manager_of_class(base)
                         for base in self.class_.__bases__
                         if isinstance(base, type)
                  ] if mgr is not None]
@@ -103,7 +103,7 @@ class ClassManager(dict):
 
     def _instrument_init(self):
         # TODO: self.class_.__init__ is often the already-instrumented
-        # __init__ from an instrumented superclass.  We still need to make 
+        # __init__ from an instrumented superclass.  We still need to make
         # our own wrapper, but it would
         # be nice to wrap the original __init__ and not our existing wrapper
         # of such, since this adds method overhead.
@@ -189,7 +189,7 @@ class ClassManager(dict):
             self.uninstall_descriptor(key)
         del self[key]
         for cls in self.class_.__subclasses__():
-            manager = manager_of_class(cls) 
+            manager = manager_of_class(cls)
             if manager:
                 manager.uninstrument_attribute(key, True)
 
@@ -254,12 +254,12 @@ class ClassManager(dict):
 
     def new_instance(self, state=None):
         instance = self.class_.__new__(self.class_)
-        setattr(instance, self.STATE_ATTR, 
+        setattr(instance, self.STATE_ATTR,
                     state or self._state_constructor(instance, self))
         return instance
 
     def setup_instance(self, instance, state=None):
-        setattr(instance, self.STATE_ATTR, 
+        setattr(instance, self.STATE_ATTR,
                     state or self._state_constructor(instance, self))
 
     def teardown_instance(self, instance):
@@ -387,7 +387,7 @@ manager_of_class = _default_manager_getter = ClassManager.manager_getter()
 def _generate_init(class_, class_manager):
     """Build an __init__ decorator that triggers ClassManager events."""
 
-    # TODO: we should use the ClassManager's notion of the 
+    # TODO: we should use the ClassManager's notion of the
     # original '__init__' method, once ClassManager is fixed
     # to always reference that.
     original__init__ = class_.__init__
@@ -402,7 +402,7 @@ def _generate_init(class_, class_manager):
 def __init__(%(apply_pos)s):
     new_state = class_manager._new_state_if_none(%(self_arg)s)
     if new_state:
-        return new_state.initialize_instance(%(apply_kw)s)
+        return new_state._initialize_instance(%(apply_kw)s)
     else:
         return original__init__(%(apply_kw)s)
 """
index 789e7dda3935329c160f4af33bc34c50a96bd04c..ab51e8639f245ebbb472621e4c702d009362a70d 100644 (file)
@@ -78,17 +78,17 @@ def instances(query, cursor, context):
 
         if context.refresh_state and query._only_load_props \
                     and context.refresh_state in context.progress:
-            context.refresh_state.commit(
+            context.refresh_state._commit(
                     context.refresh_state.dict, query._only_load_props)
             context.progress.pop(context.refresh_state)
 
-        statelib.InstanceState.commit_all_states(
+        statelib.InstanceState._commit_all_states(
             context.progress.items(),
             session.identity_map
         )
 
-        for ii, (dict_, attrs) in context.partials.iteritems():
-            ii.commit(dict_, attrs)
+        for state, (dict_, attrs) in context.partials.iteritems():
+            state._commit(dict_, attrs)
 
         for row in rows:
             yield row
index 8b4a140f2e3c1cdfa172ca1f76faa311910d9574..4ff26cc2123bbfca82fdef55cf846386f41cba26 100644 (file)
@@ -58,8 +58,32 @@ class Mapper(_InspectionAttr):
     """Define the correlation of class attributes to database table
     columns.
 
-    Instances of this class should be constructed via the
-    :func:`~sqlalchemy.orm.mapper` function.
+    The :class:`.Mapper` object is instantiated using the
+    :func:`~sqlalchemy.orm.mapper` function.    For information
+    about instantiating new :class:`.Mapper` objects, see
+    that function's documentation.
+
+
+    When :func:`.mapper` is used
+    explicitly to link a user defined class with table
+    metadata, this is referred to as *classical mapping*.
+    Modern SQLAlchemy usage tends to favor the
+    :mod:`sqlalchemy.ext.declarative` extension for class
+    configuration, which
+    makes usage of :func:`.mapper` behind the scenes.
+
+    Given a particular class known to be mapped by the ORM,
+    the :class:`.Mapper` which maintains it can be acquired
+    using the :func:`.inspect` function::
+
+        from sqlalchemy import inspect
+
+        mapper = inspect(MyClass)
+
+    A class which was mapped by the :mod:`sqlalchemy.ext.declarative`
+    extension will also have its mapper available via the ``__mapper__``
+    attribute.
+
 
     """
     def __init__(self,
@@ -1462,24 +1486,78 @@ class Mapper(_InspectionAttr):
 
     @util.memoized_property
     def attr(self):
+        """A namespace of all :class:`.MapperProperty` objects
+        associated this mapper.
+
+        This is an object that provides each property based on
+        its key name.  For instance, the mapper for a
+        ``User`` class which has ``User.name`` attribute would
+        provide ``mapper.attr.name``, which would be the
+        :class:`.ColumnProperty` representing the ``name``
+        column.   The namespace object can also be iterated,
+        which would yield each :class:`.MapperProperty`.
+
+        :class:`.Mapper` has several pre-filtered views
+        of this attribute which limit the types of properties
+        returned, inclding :attr:`.synonyms`, :attr:`.column_attrs`,
+        :attr:`.relationships`, and :attr:`.composites`.
+
+
+        """
         if _new_mappers:
             configure_mappers()
         return util.ImmutableProperties(self._props)
 
     @_memoized_configured_property
     def synonyms(self):
+        """Return a namespace of all :class:`.SynonymProperty`
+        properties maintained by this :class:`.Mapper`.
+
+        See also:
+
+        :attr:`.Mapper.attr` - namespace of all :class:`.MapperProperty`
+        objects.
+
+        """
         return self._filter_properties(descriptor_props.SynonymProperty)
 
     @_memoized_configured_property
     def column_attrs(self):
+        """Return a namespace of all :class:`.ColumnProperty`
+        properties maintained by this :class:`.Mapper`.
+
+        See also:
+
+        :attr:`.Mapper.attr` - namespace of all :class:`.MapperProperty`
+        objects.
+
+        """
         return self._filter_properties(properties.ColumnProperty)
 
     @_memoized_configured_property
     def relationships(self):
+        """Return a namespace of all :class:`.RelationshipProperty`
+        properties maintained by this :class:`.Mapper`.
+
+        See also:
+
+        :attr:`.Mapper.attr` - namespace of all :class:`.MapperProperty`
+        objects.
+
+        """
         return self._filter_properties(properties.RelationshipProperty)
 
     @_memoized_configured_property
     def composites(self):
+        """Return a namespace of all :class:`.CompositeProperty`
+        properties maintained by this :class:`.Mapper`.
+
+        See also:
+
+        :attr:`.Mapper.attr` - namespace of all :class:`.MapperProperty`
+        objects.
+
+        """
         return self._filter_properties(descriptor_props.CompositeProperty)
 
     def _filter_properties(self, type_):
index 5c33fd2d83ef7609ead1d7380ab0e6cecd7f2c6d..7872499eadd5415276f889f36e0d517d55027e85 100644 (file)
@@ -21,7 +21,7 @@ from .util import _state_mapper, state_str, _attr_as_key
 from ..sql import expression
 
 def save_obj(base_mapper, states, uowtransaction, single=False):
-    """Issue ``INSERT`` and/or ``UPDATE`` statements for a list 
+    """Issue ``INSERT`` and/or ``UPDATE`` statements for a list
     of objects.
 
     This is called within the context of a UOWTransaction during a
@@ -38,30 +38,30 @@ def save_obj(base_mapper, states, uowtransaction, single=False):
         return
 
     states_to_insert, states_to_update = _organize_states_for_save(
-                                                base_mapper, 
-                                                states, 
+                                                base_mapper,
+                                                states,
                                                 uowtransaction)
 
     cached_connections = _cached_connection_dict(base_mapper)
 
     for table, mapper in base_mapper._sorted_tables.iteritems():
-        insert = _collect_insert_commands(base_mapper, uowtransaction, 
+        insert = _collect_insert_commands(base_mapper, uowtransaction,
                                 table, states_to_insert)
 
-        update = _collect_update_commands(base_mapper, uowtransaction, 
+        update = _collect_update_commands(base_mapper, uowtransaction,
                                 table, states_to_update)
 
         if update:
-            _emit_update_statements(base_mapper, uowtransaction, 
-                                    cached_connections, 
+            _emit_update_statements(base_mapper, uowtransaction,
+                                    cached_connections,
                                     mapper, table, update)
 
         if insert:
-            _emit_insert_statements(base_mapper, uowtransaction, 
-                                    cached_connections, 
+            _emit_insert_statements(base_mapper, uowtransaction,
+                                    cached_connections,
                                     table, insert)
 
-    _finalize_insert_update_commands(base_mapper, uowtransaction, 
+    _finalize_insert_update_commands(base_mapper, uowtransaction,
                                     states_to_insert, states_to_update)
 
 def post_update(base_mapper, states, uowtransaction, post_update_cols):
@@ -72,18 +72,18 @@ def post_update(base_mapper, states, uowtransaction, post_update_cols):
     cached_connections = _cached_connection_dict(base_mapper)
 
     states_to_update = _organize_states_for_post_update(
-                                    base_mapper, 
+                                    base_mapper,
                                     states, uowtransaction)
 
 
     for table, mapper in base_mapper._sorted_tables.iteritems():
-        update = _collect_post_update_commands(base_mapper, uowtransaction, 
-                                            table, states_to_update, 
+        update = _collect_post_update_commands(base_mapper, uowtransaction,
+                                            table, states_to_update,
                                             post_update_cols)
 
         if update:
-            _emit_post_update_statements(base_mapper, uowtransaction, 
-                                    cached_connections, 
+            _emit_post_update_statements(base_mapper, uowtransaction,
+                                    cached_connections,
                                     mapper, table, update)
 
 def delete_obj(base_mapper, states, uowtransaction):
@@ -97,19 +97,19 @@ def delete_obj(base_mapper, states, uowtransaction):
     cached_connections = _cached_connection_dict(base_mapper)
 
     states_to_delete = _organize_states_for_delete(
-                                        base_mapper, 
+                                        base_mapper,
                                         states,
                                         uowtransaction)
 
     table_to_mapper = base_mapper._sorted_tables
 
     for table in reversed(table_to_mapper.keys()):
-        delete = _collect_delete_commands(base_mapper, uowtransaction, 
+        delete = _collect_delete_commands(base_mapper, uowtransaction,
                                 table, states_to_delete)
 
         mapper = table_to_mapper[table]
 
-        _emit_delete_statements(base_mapper, uowtransaction, 
+        _emit_delete_statements(base_mapper, uowtransaction,
                     cached_connections, mapper, table, delete)
 
     for state, state_dict, mapper, has_identity, connection \
@@ -132,7 +132,7 @@ def _organize_states_for_save(base_mapper, states, uowtransaction):
     states_to_update = []
 
     for state, dict_, mapper, connection in _connections_for_states(
-                                            base_mapper, uowtransaction, 
+                                            base_mapper, uowtransaction,
                                             states):
 
         has_identity = bool(state.key)
@@ -146,9 +146,9 @@ def _organize_states_for_save(base_mapper, states, uowtransaction):
         else:
             mapper.dispatch.before_update(mapper, connection, state)
 
-        # detect if we have a "pending" instance (i.e. has 
-        # no instance_key attached to it), and another instance 
-        # with the same identity key already exists as persistent. 
+        # detect if we have a "pending" instance (i.e. has
+        # no instance_key attached to it), and another instance
+        # with the same identity key already exists as persistent.
         # convert to an UPDATE if so.
         if not has_identity and \
             instance_key in uowtransaction.session.identity_map:
@@ -158,14 +158,14 @@ def _organize_states_for_save(base_mapper, states, uowtransaction):
             if not uowtransaction.is_deleted(existing):
                 raise orm_exc.FlushError(
                     "New instance %s with identity key %s conflicts "
-                    "with persistent instance %s" % 
+                    "with persistent instance %s" %
                     (state_str(state), instance_key,
                      state_str(existing)))
 
             base_mapper._log_debug(
                 "detected row switch for identity %s.  "
                 "will update %s, remove %s from "
-                "transaction", instance_key, 
+                "transaction", instance_key,
                 state_str(state), state_str(existing))
 
             # remove the "delete" flag from the existing element
@@ -174,28 +174,28 @@ def _organize_states_for_save(base_mapper, states, uowtransaction):
 
         if not has_identity and not row_switch:
             states_to_insert.append(
-                (state, dict_, mapper, connection, 
+                (state, dict_, mapper, connection,
                 has_identity, instance_key, row_switch)
             )
         else:
             states_to_update.append(
-                (state, dict_, mapper, connection, 
+                (state, dict_, mapper, connection,
                 has_identity, instance_key, row_switch)
             )
 
     return states_to_insert, states_to_update
 
-def _organize_states_for_post_update(base_mapper, states, 
+def _organize_states_for_post_update(base_mapper, states,
                                                 uowtransaction):
     """Make an initial pass across a set of states for UPDATE
     corresponding to post_update.
 
-    This includes obtaining key information for each state 
-    including its dictionary, mapper, the connection to use for 
+    This includes obtaining key information for each state
+    including its dictionary, mapper, the connection to use for
     the execution per state.
 
     """
-    return list(_connections_for_states(base_mapper, uowtransaction, 
+    return list(_connections_for_states(base_mapper, uowtransaction,
                                             states))
 
 def _organize_states_for_delete(base_mapper, states, uowtransaction):
@@ -209,16 +209,16 @@ def _organize_states_for_delete(base_mapper, states, uowtransaction):
     states_to_delete = []
 
     for state, dict_, mapper, connection in _connections_for_states(
-                                            base_mapper, uowtransaction, 
+                                            base_mapper, uowtransaction,
                                             states):
 
         mapper.dispatch.before_delete(mapper, connection, state)
 
-        states_to_delete.append((state, dict_, mapper, 
+        states_to_delete.append((state, dict_, mapper,
                 bool(state.key), connection))
     return states_to_delete
 
-def _collect_insert_commands(base_mapper, uowtransaction, table, 
+def _collect_insert_commands(base_mapper, uowtransaction, table,
                                                 states_to_insert):
     """Identify sets of values to use in INSERT statements for a
     list of states.
@@ -240,7 +240,7 @@ def _collect_insert_commands(base_mapper, uowtransaction, table,
             if col is mapper.version_id_col:
                 params[col.key] = mapper.version_id_generator(None)
             else:
-                # pull straight from the dict for 
+                # pull straight from the dict for
                 # pending objects
                 prop = mapper._columntoproperty[col]
                 value = state_dict.get(prop.key, None)
@@ -257,11 +257,11 @@ def _collect_insert_commands(base_mapper, uowtransaction, table,
                 else:
                     params[col.key] = value
 
-        insert.append((state, state_dict, params, mapper, 
+        insert.append((state, state_dict, params, mapper,
                         connection, value_params, has_all_pks))
     return insert
 
-def _collect_update_commands(base_mapper, uowtransaction, 
+def _collect_update_commands(base_mapper, uowtransaction,
                                 table, states_to_update):
     """Identify sets of values to use in UPDATE statements for a
     list of states.
@@ -290,14 +290,14 @@ def _collect_update_commands(base_mapper, uowtransaction,
             if col is mapper.version_id_col:
                 params[col._label] = \
                     mapper._get_committed_state_attr_by_column(
-                                    row_switch or state, 
-                                    row_switch and row_switch.dict 
+                                    row_switch or state,
+                                    row_switch and row_switch.dict
                                                 or state_dict,
                                     col)
 
                 prop = mapper._columntoproperty[col]
                 history = attributes.get_state_history(
-                    state, prop.key, 
+                    state, prop.key,
                     attributes.PASSIVE_NO_INITIALIZE
                 )
                 if history.added:
@@ -307,20 +307,20 @@ def _collect_update_commands(base_mapper, uowtransaction,
                     params[col.key] = mapper.version_id_generator(
                                                 params[col._label])
 
-                    # HACK: check for history, in case the 
+                    # HACK: check for history, in case the
                     # history is only
-                    # in a different table than the one 
+                    # in a different table than the one
                     # where the version_id_col is.
                     for prop in mapper._columntoproperty.itervalues():
                         history = attributes.get_state_history(
-                                state, prop.key, 
+                                state, prop.key,
                                 attributes.PASSIVE_NO_INITIALIZE)
                         if history.added:
                             hasdata = True
             else:
                 prop = mapper._columntoproperty[col]
                 history = attributes.get_state_history(
-                                state, prop.key, 
+                                state, prop.key,
                                 attributes.PASSIVE_NO_INITIALIZE)
                 if history.added:
                     if isinstance(history.added[0],
@@ -342,7 +342,7 @@ def _collect_update_commands(base_mapper, uowtransaction,
                                 value = history.added[0]
                                 params[col._label] = value
                             else:
-                                # use the old value to 
+                                # use the old value to
                                 # locate the row
                                 value = history.deleted[0]
                                 params[col._label] = value
@@ -372,12 +372,12 @@ def _collect_update_commands(base_mapper, uowtransaction,
                             "Can't update table "
                             "using NULL for primary "
                             "key value")
-            update.append((state, state_dict, params, mapper, 
+            update.append((state, state_dict, params, mapper,
                             connection, value_params))
     return update
 
 
-def _collect_post_update_commands(base_mapper, uowtransaction, table, 
+def _collect_post_update_commands(base_mapper, uowtransaction, table,
                         states_to_update, post_update_cols):
     """Identify sets of values to use in UPDATE statements for a
     list of states within a post_update operation.
@@ -401,20 +401,20 @@ def _collect_post_update_commands(base_mapper, uowtransaction, table,
             elif col in post_update_cols:
                 prop = mapper._columntoproperty[col]
                 history = attributes.get_state_history(
-                            state, prop.key, 
+                            state, prop.key,
                             attributes.PASSIVE_NO_INITIALIZE)
                 if history.added:
                     value = history.added[0]
                     params[col.key] = value
                     hasdata = True
         if hasdata:
-            update.append((state, state_dict, params, mapper, 
+            update.append((state, state_dict, params, mapper,
                             connection))
     return update
 
-def _collect_delete_commands(base_mapper, uowtransaction, table, 
+def _collect_delete_commands(base_mapper, uowtransaction, table,
                                 states_to_delete):
-    """Identify values to use in DELETE statements for a list of 
+    """Identify values to use in DELETE statements for a list of
     states to be deleted."""
 
     delete = util.defaultdict(list)
@@ -446,7 +446,7 @@ def _collect_delete_commands(base_mapper, uowtransaction, table,
     return delete
 
 
-def _emit_update_statements(base_mapper, uowtransaction, 
+def _emit_update_statements(base_mapper, uowtransaction,
                         cached_connections, mapper, table, update):
     """Emit UPDATE statements corresponding to value lists collected
     by _collect_update_commands()."""
@@ -484,13 +484,13 @@ def _emit_update_statements(base_mapper, uowtransaction,
 
         _postfetch(
                 mapper,
-                uowtransaction, 
-                table, 
-                state, 
-                state_dict, 
-                c.context.prefetch_cols, 
+                uowtransaction,
+                table,
+                state,
+                state_dict,
+                c.context.prefetch_cols,
                 c.context.postfetch_cols,
-                c.context.compiled_parameters[0], 
+                c.context.compiled_parameters[0],
                 value_params)
         rows += c.rowcount
 
@@ -503,11 +503,11 @@ def _emit_update_statements(base_mapper, uowtransaction,
 
     elif needs_version_id:
         util.warn("Dialect %s does not support updated rowcount "
-                "- versioning cannot be verified." % 
+                "- versioning cannot be verified." %
                 c.dialect.dialect_description,
                 stacklevel=12)
 
-def _emit_insert_statements(base_mapper, uowtransaction, 
+def _emit_insert_statements(base_mapper, uowtransaction,
                         cached_connections, table, insert):
     """Emit INSERT statements corresponding to value lists collected
     by _collect_insert_commands()."""
@@ -515,10 +515,10 @@ def _emit_insert_statements(base_mapper, uowtransaction,
     statement = base_mapper._memo(('insert', table), table.insert)
 
     for (connection, pkeys, hasvalue, has_all_pks), \
-        records in groupby(insert, 
-                            lambda rec: (rec[4], 
-                                    rec[2].keys(), 
-                                    bool(rec[5]), 
+        records in groupby(insert,
+                            lambda rec: (rec[4],
+                                    rec[2].keys(),
+                                    bool(rec[5]),
                                     rec[6])
     ):
         if has_all_pks and not hasvalue:
@@ -527,19 +527,19 @@ def _emit_insert_statements(base_mapper, uowtransaction,
             c = cached_connections[connection].\
                                 execute(statement, multiparams)
 
-            for (state, state_dict, params, mapper, 
+            for (state, state_dict, params, mapper,
                     conn, value_params, has_all_pks), \
                     last_inserted_params in \
                     zip(records, c.context.compiled_parameters):
                 _postfetch(
                         mapper,
-                        uowtransaction, 
+                        uowtransaction,
                         table,
-                        state, 
+                        state,
                         state_dict,
                         c.context.prefetch_cols,
                         c.context.postfetch_cols,
-                        last_inserted_params, 
+                        last_inserted_params,
                         value_params)
 
         else:
@@ -559,31 +559,31 @@ def _emit_insert_statements(base_mapper, uowtransaction,
 
                 if primary_key is not None:
                     # set primary key attributes
-                    for pk, col in zip(primary_key, 
+                    for pk, col in zip(primary_key,
                                     mapper._pks_by_table[table]):
                         prop = mapper._columntoproperty[col]
                         if state_dict.get(prop.key) is None:
                             # TODO: would rather say:
                             #state_dict[prop.key] = pk
                             mapper._set_state_attr_by_column(
-                                        state, 
-                                        state_dict, 
+                                        state,
+                                        state_dict,
                                         col, pk)
 
                 _postfetch(
                         mapper,
-                        uowtransaction, 
-                        table, 
-                        state, 
+                        uowtransaction,
+                        table,
+                        state,
                         state_dict,
-                        result.context.prefetch_cols, 
+                        result.context.prefetch_cols,
                         result.context.postfetch_cols,
-                        result.context.compiled_parameters[0], 
+                        result.context.compiled_parameters[0],
                         value_params)
 
 
 
-def _emit_post_update_statements(base_mapper, uowtransaction, 
+def _emit_post_update_statements(base_mapper, uowtransaction,
                             cached_connections, mapper, table, update):
     """Emit UPDATE statements corresponding to value lists collected
     by _collect_post_update_commands()."""
@@ -601,19 +601,19 @@ def _emit_post_update_statements(base_mapper, uowtransaction,
 
     # execute each UPDATE in the order according to the original
     # list of states to guarantee row access order, but
-    # also group them into common (connection, cols) sets 
+    # also group them into common (connection, cols) sets
     # to support executemany().
     for key, grouper in groupby(
         update, lambda rec: (rec[4], rec[2].keys())
     ):
         connection = key[0]
-        multiparams = [params for state, state_dict, 
+        multiparams = [params for state, state_dict,
                                 params, mapper, conn in grouper]
         cached_connections[connection].\
                             execute(statement, multiparams)
 
 
-def _emit_delete_statements(base_mapper, uowtransaction, cached_connections, 
+def _emit_delete_statements(base_mapper, uowtransaction, cached_connections,
                                     mapper, table, delete):
     """Emit DELETE statements corresponding to value lists collected
     by _collect_delete_commands()."""
@@ -629,9 +629,9 @@ def _emit_delete_statements(base_mapper, uowtransaction, cached_connections,
 
         if need_version_id:
             clause.clauses.append(
-                mapper.version_id_col == 
+                mapper.version_id_col ==
                 sql.bindparam(
-                        mapper.version_id_col.key, 
+                        mapper.version_id_col.key,
                         type_=mapper.version_id_col.type
                 )
             )
@@ -655,13 +655,13 @@ def _emit_delete_statements(base_mapper, uowtransaction, cached_connections,
                 if rows != len(del_objects):
                     raise orm_exc.StaleDataError(
                         "DELETE statement on table '%s' expected to "
-                        "delete %d row(s); %d were matched." % 
+                        "delete %d row(s); %d were matched." %
                         (table.description, len(del_objects), c.rowcount)
                     )
             else:
                 util.warn(
                     "Dialect %s does not support deleted rowcount "
-                    "- versioning cannot be verified." % 
+                    "- versioning cannot be verified." %
                     connection.dialect.dialect_description,
                     stacklevel=12)
                 connection.execute(statement, del_objects)
@@ -669,7 +669,7 @@ def _emit_delete_statements(base_mapper, uowtransaction, cached_connections,
             connection.execute(statement, del_objects)
 
 
-def _finalize_insert_update_commands(base_mapper, uowtransaction, 
+def _finalize_insert_update_commands(base_mapper, uowtransaction,
                             states_to_insert, states_to_update):
     """finalize state on states that have been inserted or updated,
     including calling after_insert/after_update events.
@@ -681,11 +681,11 @@ def _finalize_insert_update_commands(base_mapper, uowtransaction,
 
         if mapper._readonly_props:
             readonly = state.unmodified_intersection(
-                [p.key for p in mapper._readonly_props 
+                [p.key for p in mapper._readonly_props
                     if p.expire_on_flush or p.key not in state.dict]
             )
             if readonly:
-                state.expire_attributes(state.dict, readonly)
+                state._expire_attributes(state.dict, readonly)
 
         # if eager_defaults option is enabled,
         # refresh whatever has been expired.
@@ -703,7 +703,7 @@ def _finalize_insert_update_commands(base_mapper, uowtransaction,
         else:
             mapper.dispatch.after_update(mapper, connection, state)
 
-def _postfetch(mapper, uowtransaction, table, 
+def _postfetch(mapper, uowtransaction, table,
                 state, dict_, prefetch_cols, postfetch_cols,
                             params, value_params):
     """Expire attributes in need of newly persisted database state,
@@ -718,9 +718,9 @@ def _postfetch(mapper, uowtransaction, table,
             mapper._set_state_attr_by_column(state, dict_, c, params[c.key])
 
     if postfetch_cols:
-        state.expire_attributes(state.dict, 
-                            [mapper._columntoproperty[c].key 
-                            for c in postfetch_cols if c in 
+        state._expire_attributes(state.dict,
+                            [mapper._columntoproperty[c].key
+                            for c in postfetch_cols if c in
                             mapper._columntoproperty]
                         )
 
@@ -728,8 +728,8 @@ def _postfetch(mapper, uowtransaction, table,
     # TODO: this still goes a little too often.  would be nice to
     # have definitive list of "columns that changed" here
     for m, equated_pairs in mapper._table_to_equated[table]:
-        sync.populate(state, m, state, m, 
-                                        equated_pairs, 
+        sync.populate(state, m, state, m,
+                                        equated_pairs,
                                         uowtransaction,
                                         mapper.passive_updates)
 
@@ -742,7 +742,7 @@ def _connections_for_states(base_mapper, uowtransaction, states):
 
     """
     # if session has a connection callable,
-    # organize individual states with the connection 
+    # organize individual states with the connection
     # to use for update
     if uowtransaction.session.connection_callable:
         connection_callable = \
@@ -789,7 +789,7 @@ class BulkUD(object):
         except KeyError:
             raise sa_exc.ArgumentError(
                             "Valid strategies for session synchronization "
-                            "are %s" % (", ".join(sorted(repr(x) 
+                            "are %s" % (", ".join(sorted(repr(x)
                                 for x in lookup.keys()))))
         else:
             return klass(*arg)
@@ -884,7 +884,7 @@ class BulkUpdate(BulkUD):
         }, synchronize_session, query, values)
 
     def _do_exec(self):
-        update_stmt = sql.update(self.primary_table, 
+        update_stmt = sql.update(self.primary_table,
                             self.context.whereclause, self.values)
 
         self.result = self.query.session.execute(
@@ -893,7 +893,7 @@ class BulkUpdate(BulkUD):
 
     def _do_post(self):
         session = self.query.session
-        session.dispatch.after_bulk_update(session, self.query, 
+        session.dispatch.after_bulk_update(session, self.query,
                                 self.context, self.result)
 
 class BulkDelete(BulkUD):
@@ -912,20 +912,20 @@ class BulkDelete(BulkUD):
         }, synchronize_session, query)
 
     def _do_exec(self):
-        delete_stmt = sql.delete(self.primary_table, 
+        delete_stmt = sql.delete(self.primary_table,
                                     self.context.whereclause)
 
-        self.result = self.query.session.execute(delete_stmt, 
+        self.result = self.query.session.execute(delete_stmt,
                                     params=self.query._params)
         self.rowcount = self.result.rowcount
 
     def _do_post(self):
         session = self.query.session
-        session.dispatch.after_bulk_delete(session, self.query, 
+        session.dispatch.after_bulk_delete(session, self.query,
                         self.context, self.result)
 
 class BulkUpdateEvaluate(BulkEvaluate, BulkUpdate):
-    """BulkUD which handles UPDATEs using the "evaluate" 
+    """BulkUD which handles UPDATEs using the "evaluate"
     method of session resolution."""
 
     def _additional_evaluators(self,evaluator_compiler):
@@ -949,27 +949,27 @@ class BulkUpdateEvaluate(BulkEvaluate, BulkUpdate):
             for key in to_evaluate:
                 dict_[key] = self.value_evaluators[key](obj)
 
-            state.commit(dict_, list(to_evaluate))
+            state._commit(dict_, list(to_evaluate))
 
-            # expire attributes with pending changes 
+            # expire attributes with pending changes
             # (there was no autoflush, so they are overwritten)
-            state.expire_attributes(dict_,
+            state._expire_attributes(dict_,
                             set(evaluated_keys).
                                 difference(to_evaluate))
             states.add(state)
         session._register_altered(states)
 
 class BulkDeleteEvaluate(BulkEvaluate, BulkDelete):
-    """BulkUD which handles DELETEs using the "evaluate" 
+    """BulkUD which handles DELETEs using the "evaluate"
     method of session resolution."""
 
     def _do_post_synchronize(self):
         self.query.session._remove_newly_deleted(
-                [attributes.instance_state(obj) 
+                [attributes.instance_state(obj)
                     for obj in self.matched_objects])
 
 class BulkUpdateFetch(BulkFetch, BulkUpdate):
-    """BulkUD which handles UPDATEs using the "fetch" 
+    """BulkUD which handles UPDATEs using the "fetch"
     method of session resolution."""
 
     def _do_post_synchronize(self):
@@ -990,7 +990,7 @@ class BulkUpdateFetch(BulkFetch, BulkUpdate):
         session._register_altered(states)
 
 class BulkDeleteFetch(BulkFetch, BulkDelete):
-    """BulkUD which handles DELETEs using the "fetch" 
+    """BulkUD which handles DELETEs using the "fetch"
     method of session resolution."""
 
     def _do_post_synchronize(self):
index dde0e2908fa6feb24c466d8d4cfcdbd536b4ec6a..4533bbdb062539126a92f5421bc1f4e0fc607476 100644 (file)
@@ -157,7 +157,7 @@ class ColumnProperty(StrategizedProperty):
                 impl = dest_state.get_impl(self.key)
                 impl.set(dest_state, dest_dict, value, None)
         elif dest_state.has_identity and self.key not in dest_dict:
-            dest_state.expire_attributes(dest_dict, [self.key])
+            dest_state._expire_attributes(dest_dict, [self.key])
 
     class Comparator(PropComparator):
         @util.memoized_instancemethod
index 9ce3c8628ee1e8b024bdd83cb42d94c6f2ed004e..1fb1a92853f6e47b65548eaa6a526b6f613ce961 100644 (file)
@@ -31,14 +31,14 @@ def sessionmaker(bind=None, class_=None, autoflush=True, autocommit=False,
                  expire_on_commit=True, **kwargs):
     """Generate a custom-configured :class:`.Session` class.
 
-    The returned object is a subclass of :class:`.Session`, which, when instantiated
-    with no arguments, uses the keyword arguments configured here as its
-    constructor arguments.
+    The returned object is a subclass of :class:`.Session`, which,
+    when instantiated with no arguments, uses the keyword arguments
+    configured here as its constructor arguments.
 
-    It is intended that the :func:`.sessionmaker()` function be called within the
-    global scope of an application, and the returned class be made available
-    to the rest of the application as the single class used to instantiate
-    sessions.
+    It is intended that the :func:`.sessionmaker()` function be called
+    within the global scope of an application, and the returned class
+    be made available to the rest of the application as the single
+    class used to instantiate sessions.
 
     e.g.::
 
@@ -66,9 +66,9 @@ def sessionmaker(bind=None, class_=None, autoflush=True, autocommit=False,
         Session.configure(bind=create_engine('sqlite:///foo.db'))
 
         sess = Session()
-    
+
     For options, see the constructor options for :class:`.Session`.
-    
+
     """
     kwargs['bind'] = bind
     kwargs['autoflush'] = autoflush
@@ -109,8 +109,8 @@ class SessionTransaction(object):
 
     .. versionchanged:: 0.4
         Direct usage of :class:`.SessionTransaction` is not typically
-        necessary; use the :meth:`.Session.rollback` and 
-        :meth:`.Session.commit` methods on :class:`.Session` itself to 
+        necessary; use the :meth:`.Session.rollback` and
+        :meth:`.Session.commit` methods on :class:`.Session` itself to
         control the transaction.
 
     The current instance of :class:`.SessionTransaction` for a given
@@ -120,19 +120,19 @@ class SessionTransaction(object):
     The :class:`.SessionTransaction` object is **not** thread-safe.
 
     See also:
-    
+
     :meth:`.Session.rollback`
-    
+
     :meth:`.Session.commit`
 
     :attr:`.Session.is_active`
-    
+
     :meth:`.SessionEvents.after_commit`
-    
+
     :meth:`.SessionEvents.after_rollback`
-    
+
     :meth:`.SessionEvents.after_soft_rollback`
-    
+
     .. index::
       single: thread safety; SessionTransaction
 
@@ -238,14 +238,14 @@ class SessionTransaction(object):
 
         for s in self.session.identity_map.all_states():
             if not dirty_only or s.modified or s in self._dirty:
-                s.expire(s.dict, self.session.identity_map._modified)
+                s._expire(s.dict, self.session.identity_map._modified)
 
     def _remove_snapshot(self):
         assert self._is_transaction_boundary
 
         if not self.nested and self.session.expire_on_commit:
             for s in self.session.identity_map.all_states():
-                s.expire(s.dict, self.session.identity_map._modified)
+                s._expire(s.dict, self.session.identity_map._modified)
 
     def _connection_for_bind(self, bind):
         self._assert_is_active()
@@ -419,19 +419,19 @@ class Session(object):
         '__contains__', '__iter__', 'add', 'add_all', 'begin', 'begin_nested',
         'close', 'commit', 'connection', 'delete', 'execute', 'expire',
         'expire_all', 'expunge', 'expunge_all', 'flush', 'get_bind',
-        'is_modified', 
-        'merge', 'query', 'refresh', 'rollback', 
+        'is_modified',
+        'merge', 'query', 'refresh', 'rollback',
         'scalar')
 
 
     def __init__(self, bind=None, autoflush=True, expire_on_commit=True,
                 _enable_transaction_accounting=True,
-                 autocommit=False, twophase=False, 
+                 autocommit=False, twophase=False,
                  weak_identity_map=True, binds=None, extension=None,
                  query_cls=query.Query):
         """Construct a new Session.
 
-        See also the :func:`.sessionmaker` function which is used to 
+        See also the :func:`.sessionmaker` function which is used to
         generate a :class:`.Session`-producing callable with a given
         set of arguments.
 
@@ -450,7 +450,7 @@ class Session(object):
           by any of these methods, the ``Session`` is ready for the next usage,
           which will again acquire and maintain a new connection/transaction.
 
-        :param autoflush: When ``True``, all query operations will issue a 
+        :param autoflush: When ``True``, all query operations will issue a
            ``flush()`` call to this ``Session`` before proceeding. This is a
            convenience feature so that ``flush()`` need not be called repeatedly
            in order for database queries to retrieve results. It's typical that
@@ -498,7 +498,7 @@ class Session(object):
            attribute/object access subsequent to a completed transaction will load
            from the most recent database state.
 
-        :param extension: An optional 
+        :param extension: An optional
            :class:`~.SessionExtension` instance, or a list
            of such instances, which will receive pre- and post- commit and flush
            events, as well as a post-rollback event. **Deprecated.**
@@ -516,9 +516,9 @@ class Session(object):
             be called. This allows each database to roll back the entire
             transaction, before each transaction is committed.
 
-        :param weak_identity_map:  Defaults to ``True`` - when set to 
-           ``False``, objects placed in the :class:`.Session` will be 
-           strongly referenced until explicitly removed or the 
+        :param weak_identity_map:  Defaults to ``True`` - when set to
+           ``False``, objects placed in the :class:`.Session` will be
+           strongly referenced until explicitly removed or the
            :class:`.Session` is closed.  **Deprecated** - this option
            is obsolete.
 
@@ -575,7 +575,7 @@ class Session(object):
         transaction or nested transaction, an error is raised, unless
         ``subtransactions=True`` or ``nested=True`` is specified.
 
-        The ``subtransactions=True`` flag indicates that this :meth:`~.Session.begin` 
+        The ``subtransactions=True`` flag indicates that this :meth:`~.Session.begin`
         can create a subtransaction if a transaction is already in progress.
         For documentation on subtransactions, please see :ref:`session_subtransactions`.
 
@@ -633,7 +633,7 @@ class Session(object):
 
         By default, the :class:`.Session` also expires all database
         loaded state on all ORM-managed attributes after transaction commit.
-        This so that subsequent operations load the most recent 
+        This so that subsequent operations load the most recent
         data from the database.   This behavior can be disabled using
         the ``expire_on_commit=False`` option to :func:`.sessionmaker` or
         the :class:`.Session` constructor.
@@ -674,11 +674,11 @@ class Session(object):
 
         self.transaction.prepare()
 
-    def connection(self, mapper=None, clause=None, 
-                        bind=None, 
-                        close_with_result=False, 
+    def connection(self, mapper=None, clause=None,
+                        bind=None,
+                        close_with_result=False,
                         **kw):
-        """Return a :class:`.Connection` object corresponding to this 
+        """Return a :class:`.Connection` object corresponding to this
         :class:`.Session` object's transactional state.
 
         If this :class:`.Session` is configured with ``autocommit=False``,
@@ -686,13 +686,13 @@ class Session(object):
         is returned, or if no transaction is in progress, a new one is begun
         and the :class:`.Connection` returned (note that no transactional state
         is established with the DBAPI until the first SQL statement is emitted).
-        
+
         Alternatively, if this :class:`.Session` is configured with ``autocommit=True``,
-        an ad-hoc :class:`.Connection` is returned using :meth:`.Engine.contextual_connect` 
+        an ad-hoc :class:`.Connection` is returned using :meth:`.Engine.contextual_connect`
         on the underlying :class:`.Engine`.
 
         Ambiguity in multi-bind or unbound :class:`.Session` objects can be resolved through
-        any of the optional keyword arguments.   This ultimately makes usage of the 
+        any of the optional keyword arguments.   This ultimately makes usage of the
         :meth:`.get_bind` method for resolution.
 
         :param bind:
@@ -707,27 +707,27 @@ class Session(object):
           ``clause``.
 
         :param clause:
-            A :class:`.ClauseElement` (i.e. :func:`~.sql.expression.select`, 
-            :func:`~.sql.expression.text`, 
+            A :class:`.ClauseElement` (i.e. :func:`~.sql.expression.select`,
+            :func:`~.sql.expression.text`,
             etc.) which will be used to locate a bind, if a bind
             cannot otherwise be identified.
 
         :param close_with_result: Passed to :meth:`Engine.connect`, indicating
           the :class:`.Connection` should be considered "single use", automatically
-          closing when the first result set is closed.  This flag only has 
+          closing when the first result set is closed.  This flag only has
           an effect if this :class:`.Session` is configured with ``autocommit=True``
           and does not already have a  transaction in progress.
 
         :param \**kw:
           Additional keyword arguments are sent to :meth:`get_bind()`,
-          allowing additional arguments to be passed to custom 
+          allowing additional arguments to be passed to custom
           implementations of :meth:`get_bind`.
 
         """
         if bind is None:
             bind = self.get_bind(mapper, clause=clause, **kw)
 
-        return self._connection_for_bind(bind, 
+        return self._connection_for_bind(bind,
                                         close_with_result=close_with_result)
 
     def _connection_for_bind(self, engine, **kwargs):
@@ -765,9 +765,9 @@ class Session(object):
         set to ``True`` so that an ``autocommit=True`` :class:`.Session`
         with no active transaction will produce a result that auto-closes
         the underlying :class:`.Connection`.
-        
+
         :param clause:
-            A :class:`.ClauseElement` (i.e. :func:`~.sql.expression.select`, 
+            A :class:`.ClauseElement` (i.e. :func:`~.sql.expression.select`,
             :func:`~.sql.expression.text`, etc.) or string SQL statement to be executed.  The clause
             will also be used to locate a bind, if this :class:`.Session`
             is not bound to a single engine already, and the ``mapper``
@@ -787,10 +787,10 @@ class Session(object):
           that connection will be used.  This argument takes
           precedence over ``mapper`` and ``clause`` when locating
           a bind.
-          
+
         :param \**kw:
           Additional keyword arguments are sent to :meth:`get_bind()`,
-          allowing additional arguments to be passed to custom 
+          allowing additional arguments to be passed to custom
           implementations of :meth:`get_bind`.
 
         """
@@ -883,39 +883,39 @@ class Session(object):
 
     def get_bind(self, mapper=None, clause=None):
         """Return a "bind" to which this :class:`.Session` is bound.
-        
-        The "bind" is usually an instance of :class:`.Engine`, 
+
+        The "bind" is usually an instance of :class:`.Engine`,
         except in the case where the :class:`.Session` has been
         explicitly bound directly to a :class:`.Connection`.
 
-        For a multiply-bound or unbound :class:`.Session`, the 
-        ``mapper`` or ``clause`` arguments are used to determine the 
+        For a multiply-bound or unbound :class:`.Session`, the
+        ``mapper`` or ``clause`` arguments are used to determine the
         appropriate bind to return.
-        
+
         Note that the "mapper" argument is usually present
         when :meth:`.Session.get_bind` is called via an ORM
-        operation such as a :meth:`.Session.query`, each 
-        individual INSERT/UPDATE/DELETE operation within a 
+        operation such as a :meth:`.Session.query`, each
+        individual INSERT/UPDATE/DELETE operation within a
         :meth:`.Session.flush`, call, etc.
-        
+
         The order of resolution is:
-        
+
         1. if mapper given and session.binds is present,
            locate a bind based on mapper.
         2. if clause given and session.binds is present,
            locate a bind based on :class:`.Table` objects
            found in the given clause present in session.binds.
         3. if session.bind is present, return that.
-        4. if clause given, attempt to return a bind 
+        4. if clause given, attempt to return a bind
            linked to the :class:`.MetaData` ultimately
            associated with the clause.
         5. if mapper given, attempt to return a bind
-           linked to the :class:`.MetaData` ultimately 
+           linked to the :class:`.MetaData` ultimately
            associated with the :class:`.Table` or other
            selectable to which the mapper is mapped.
         6. No bind can be found, :class:`.UnboundExecutionError`
            is raised.
-         
+
         :param mapper:
           Optional :func:`.mapper` mapped class or instance of
           :class:`.Mapper`.   The bind can be derived from a :class:`.Mapper`
@@ -925,11 +925,11 @@ class Session(object):
           is mapped for a bind.
 
         :param clause:
-            A :class:`.ClauseElement` (i.e. :func:`~.sql.expression.select`, 
-            :func:`~.sql.expression.text`, 
+            A :class:`.ClauseElement` (i.e. :func:`~.sql.expression.select`,
+            :func:`~.sql.expression.text`,
             etc.).  If the ``mapper`` argument is not present or could not produce
             a bind, the given expression construct will be searched for a bound
-            element, typically a :class:`.Table` associated with bound 
+            element, typically a :class:`.Table` associated with bound
             :class:`.MetaData`.
 
         """
@@ -984,22 +984,22 @@ class Session(object):
     @util.contextmanager
     def no_autoflush(self):
         """Return a context manager that disables autoflush.
-        
+
         e.g.::
-        
+
             with session.no_autoflush:
-                
+
                 some_object = SomeClass()
                 session.add(some_object)
                 # won't autoflush
                 some_object.related_thing = session.query(SomeRelated).first()
-        
+
         Operations that proceed within the ``with:`` block
         will not be subject to flushes occurring upon query
         access.  This is useful when initializing a series
         of objects which involve existing database queries,
         where the uncompleted object should not yet be flushed.
-        
+
         .. versionadded:: 0.7.6
 
         """
@@ -1033,10 +1033,10 @@ class Session(object):
         mode is turned on.
 
         :param attribute_names: optional.  An iterable collection of
-          string attribute names indicating a subset of attributes to 
+          string attribute names indicating a subset of attributes to
           be refreshed.
 
-        :param lockmode: Passed to the :class:`~sqlalchemy.orm.query.Query` 
+        :param lockmode: Passed to the :class:`~sqlalchemy.orm.query.Query`
           as used by :meth:`~sqlalchemy.orm.query.Query.with_lockmode`.
 
         """
@@ -1059,27 +1059,27 @@ class Session(object):
     def expire_all(self):
         """Expires all persistent instances within this Session.
 
-        When any attributes on a persistent instance is next accessed, 
+        When any attributes on a persistent instance is next accessed,
         a query will be issued using the
         :class:`.Session` object's current transactional context in order to
         load all expired attributes for the given instance.   Note that
-        a highly isolated transaction will return the same values as were 
+        a highly isolated transaction will return the same values as were
         previously read in that same transaction, regardless of changes
         in database state outside of that transaction.
 
-        To expire individual objects and individual attributes 
+        To expire individual objects and individual attributes
         on those objects, use :meth:`Session.expire`.
 
-        The :class:`.Session` object's default behavior is to 
+        The :class:`.Session` object's default behavior is to
         expire all state whenever the :meth:`Session.rollback`
         or :meth:`Session.commit` methods are called, so that new
         state can be loaded for the new transaction.   For this reason,
-        calling :meth:`Session.expire_all` should not be needed when 
+        calling :meth:`Session.expire_all` should not be needed when
         autocommit is ``False``, assuming the transaction is isolated.
 
         """
         for state in self.identity_map.all_states():
-            state.expire(state.dict, self.identity_map._modified)
+            state._expire(state.dict, self.identity_map._modified)
 
     def expire(self, instance, attribute_names=None):
         """Expire the attributes on an instance.
@@ -1088,14 +1088,14 @@ class Session(object):
         attribute is next accessed, a query will be issued to the
         :class:`.Session` object's current transactional context in order to
         load all expired attributes for the given instance.   Note that
-        a highly isolated transaction will return the same values as were 
+        a highly isolated transaction will return the same values as were
         previously read in that same transaction, regardless of changes
         in database state outside of that transaction.
 
         To expire all objects in the :class:`.Session` simultaneously,
         use :meth:`Session.expire_all`.
 
-        The :class:`.Session` object's default behavior is to 
+        The :class:`.Session` object's default behavior is to
         expire all state whenever the :meth:`Session.rollback`
         or :meth:`Session.commit` methods are called, so that new
         state can be loaded for the new transaction.   For this reason,
@@ -1117,7 +1117,7 @@ class Session(object):
     def _expire_state(self, state, attribute_names):
         self._validate_persistent(state)
         if attribute_names:
-            state.expire_attributes(state.dict, attribute_names)
+            state._expire_attributes(state.dict, attribute_names)
         else:
             # pre-fetch the full cascade since the expire is going to
             # remove associations
@@ -1131,7 +1131,7 @@ class Session(object):
         """Expire a state if persistent, else expunge if pending"""
 
         if state.key:
-            state.expire(state.dict, self.identity_map._modified)
+            state._expire(state.dict, self.identity_map._modified)
         elif state in self._new:
             self._new.pop(state)
             state._detach()
@@ -1210,15 +1210,15 @@ class Session(object):
                 if state.key is None:
                     state.key = instance_key
                 elif state.key != instance_key:
-                    # primary key switch. use discard() in case another 
-                    # state has already replaced this one in the identity 
+                    # primary key switch. use discard() in case another
+                    # state has already replaced this one in the identity
                     # map (see test/orm/test_naturalpks.py ReversePKsTest)
                     self.identity_map.discard(state)
                     state.key = instance_key
 
                 self.identity_map.replace(state)
 
-        statelib.InstanceState.commit_all_states(
+        statelib.InstanceState._commit_all_states(
             ((state, state.dict) for state in states),
             self.identity_map
         )
@@ -1273,8 +1273,8 @@ class Session(object):
 
         mapper = _state_mapper(state)
         for o, m, st_, dct_ in mapper.cascade_iterator(
-                                    'save-update', 
-                                    state, 
+                                    'save-update',
+                                    state,
                                     halt_on=self._contains_state):
             self._save_or_update_impl(st_)
 
@@ -1297,7 +1297,7 @@ class Session(object):
         if state in self._deleted:
             return
 
-        # ensure object is attached to allow the 
+        # ensure object is attached to allow the
         # cascade operation to load deferred attributes
         # and collections
         self._attach(state, include_before=True)
@@ -1342,8 +1342,8 @@ class Session(object):
         try:
             self.autoflush = False
             return self._merge(
-                            attributes.instance_state(instance), 
-                            attributes.instance_dict(instance), 
+                            attributes.instance_state(instance),
+                            attributes.instance_dict(instance),
                             load=load, _recursive=_recursive)
         finally:
             self.autoflush = autoflush
@@ -1381,7 +1381,7 @@ class Session(object):
             new_instance = True
 
         elif not _none_set.issubset(key[1]) or \
-                    (mapper.allow_partial_pks and 
+                    (mapper.allow_partial_pks and
                     not _none_set.issuperset(key[1])):
             merged = self.query(mapper.class_).get(key[1])
         else:
@@ -1405,14 +1405,14 @@ class Session(object):
             # version check if applicable
             if mapper.version_id_col is not None:
                 existing_version = mapper._get_state_attr_by_column(
-                            state, 
-                            state_dict, 
+                            state,
+                            state_dict,
                             mapper.version_id_col,
                             passive=attributes.PASSIVE_NO_INITIALIZE)
 
                 merged_version = mapper._get_state_attr_by_column(
-                            merged_state, 
-                            merged_dict, 
+                            merged_state,
+                            merged_dict,
                             mapper.version_id_col,
                             passive=attributes.PASSIVE_NO_INITIALIZE)
 
@@ -1434,13 +1434,13 @@ class Session(object):
             merged_state.load_options = state.load_options
 
             for prop in mapper.iterate_properties:
-                prop.merge(self, state, state_dict, 
-                                merged_state, merged_dict, 
+                prop.merge(self, state, state_dict,
+                                merged_state, merged_dict,
                                 load, _recursive)
 
         if not load:
             # remove any history
-            merged_state.commit_all(merged_dict, self.identity_map)
+            merged_state._commit_all(merged_dict, self.identity_map)
 
         if new_instance:
             merged_state.manager.dispatch.load(merged_state, None)
@@ -1527,18 +1527,18 @@ class Session(object):
         ''not'' participate in any persistence operations; its state
         for almost all purposes will remain either "transient" or
         "detached", except for the case of relationship loading.
-        
+
         Also note that backrefs will often not work as expected.
         Altering a relationship-bound attribute on the target object
         may not fire off a backref event, if the effective value
         is what was already loaded from a foreign-key-holding value.
-        
+
         The :meth:`.Session.enable_relationship_loading` method supersedes
         the ``load_on_pending`` flag on :func:`.relationship`.   Unlike
         that flag, :meth:`.Session.enable_relationship_loading` allows
         an object to remain transient while still being able to load
-        related items.   
-        
+        related items.
+
         To make a transient object associated with a :class:`.Session`
         via :meth:`.Session.enable_relationship_loading` pending, add
         it to the :class:`.Session` using :meth:`.Session.add` normally.
@@ -1550,7 +1550,7 @@ class Session(object):
         is not intended for general use.
 
         .. versionadded:: 0.8
-        
+
         """
         state = attributes.instance_state(obj)
         self._attach(state, include_before=True)
@@ -1617,7 +1617,7 @@ class Session(object):
 
         Database operations will be issued in the current transactional
         context and do not affect the state of the transaction, unless an
-        error occurs, in which case the entire transaction is rolled back. 
+        error occurs, in which case the entire transaction is rolled back.
         You may flush() as often as you like within a transaction to move
         changes from Python to the database's transaction buffer.
 
@@ -1625,11 +1625,11 @@ class Session(object):
         will create a transaction on the fly that surrounds the entire set of
         operations int the flush.
 
-        :param objects: Optional; restricts the flush operation to operate 
+        :param objects: Optional; restricts the flush operation to operate
           only on elements that are in the given collection.
-          
+
           This feature is for an extremely narrow set of use cases where
-          particular objects may need to be operated upon before the 
+          particular objects may need to be operated upon before the
           full flush() occurs.  It is not intended for general use.
 
         """
@@ -1734,39 +1734,39 @@ class Session(object):
             raise
 
 
-    def is_modified(self, instance, include_collections=True, 
+    def is_modified(self, instance, include_collections=True,
                             passive=True):
-        """Return ``True`` if the given instance has locally 
+        """Return ``True`` if the given instance has locally
         modified attributes.
 
         This method retrieves the history for each instrumented
         attribute on the instance and performs a comparison of the current
         value to its previously committed value, if any.
-        
+
         It is in effect a more expensive and accurate
-        version of checking for the given instance in the 
-        :attr:`.Session.dirty` collection; a full test for 
+        version of checking for the given instance in the
+        :attr:`.Session.dirty` collection; a full test for
         each attribute's net "dirty" status is performed.
-        
+
         E.g.::
-        
+
             return session.is_modified(someobject)
 
         .. versionchanged:: 0.8
-            When using SQLAlchemy 0.7 and earlier, the ``passive`` 
+            When using SQLAlchemy 0.7 and earlier, the ``passive``
             flag should **always** be explicitly set to ``True``,
-            else SQL loads/autoflushes may proceed which can affect 
+            else SQL loads/autoflushes may proceed which can affect
             the modified state itself:
             ``session.is_modified(someobject, passive=True)``\ .
-            In 0.8 and above, the behavior is corrected and 
+            In 0.8 and above, the behavior is corrected and
             this flag is ignored.
 
         A few caveats to this method apply:
 
-        * Instances present in the :attr:`.Session.dirty` collection may report 
-          ``False`` when tested with this method.  This is because 
+        * Instances present in the :attr:`.Session.dirty` collection may report
+          ``False`` when tested with this method.  This is because
           the object may have received change events via attribute
-          mutation, thus placing it in :attr:`.Session.dirty`, 
+          mutation, thus placing it in :attr:`.Session.dirty`,
           but ultimately the state is the same as that loaded from
           the database, resulting in no net change here.
         * Scalar attributes may not have recorded the previously set
@@ -1778,15 +1778,15 @@ class Session(object):
           it skips the expense of a SQL call if the old value isn't present,
           based on the assumption that an UPDATE of the scalar value is
           usually needed, and in those few cases where it isn't, is less
-          expensive on average than issuing a defensive SELECT. 
+          expensive on average than issuing a defensive SELECT.
 
           The "old" value is fetched unconditionally upon set only if the attribute
           container has the ``active_history`` flag set to ``True``. This flag
           is set typically for primary key attributes and scalar object references
-          that are not a simple many-to-one.  To set this flag for 
+          that are not a simple many-to-one.  To set this flag for
           any arbitrary mapped column, use the ``active_history`` argument
           with :func:`.column_property`.
-          
+
         :param instance: mapped instance to be tested for pending changes.
         :param include_collections: Indicates if multivalued collections should be
          included in the operation.  Setting this to ``False`` is a way to detect
@@ -1810,13 +1810,13 @@ class Session(object):
         for attr in state.manager.attributes:
             if \
                 (
-                    not include_collections and 
+                    not include_collections and
                     hasattr(attr.impl, 'get_collection')
                 ) or not hasattr(attr.impl, 'get_history'):
                 continue
 
             (added, unchanged, deleted) = \
-                    attr.impl.get_history(state, dict_, 
+                    attr.impl.get_history(state, dict_,
                             passive=attributes.NO_CHANGE)
 
             if added or deleted:
@@ -1827,14 +1827,14 @@ class Session(object):
     @property
     def is_active(self):
         """True if this :class:`.Session` has an active transaction.
-        
+
         This indicates if the :class:`.Session` is capable of emitting
         SQL, as from the :meth:`.Session.execute`, :meth:`.Session.query`,
-        or :meth:`.Session.flush` methods.   If False, it indicates 
+        or :meth:`.Session.flush` methods.   If False, it indicates
         that the innermost transaction has been rolled back, but enclosing
         :class:`.SessionTransaction` objects remain in the transactional
         stack, which also must be rolled back.
-        
+
         This flag is generally only useful with a :class:`.Session`
         configured in its default mode of ``autocommit=False``.
 
@@ -1844,15 +1844,15 @@ class Session(object):
 
     identity_map = None
     """A mapping of object identities to objects themselves.
-    
+
     Iterating through ``Session.identity_map.values()`` provides
-    access to the full set of persistent objects (i.e., those 
+    access to the full set of persistent objects (i.e., those
     that have row identity) currently in the session.
-    
+
     See also:
-    
+
     :func:`.identity_key` - operations involving identity keys.
-    
+
     """
 
     @property
@@ -1868,9 +1868,9 @@ class Session(object):
     @property
     def dirty(self):
         """The set of all persistent instances considered dirty.
-        
+
         E.g.::
-        
+
             some_mapped_object in session.dirty
 
         Instances are considered dirty when they were modified but not
@@ -1911,7 +1911,7 @@ _sessions = weakref.WeakValueDictionary()
 def make_transient(instance):
     """Make the given instance 'transient'.
 
-    This will remove its association with any 
+    This will remove its association with any
     session and additionally will remove its "identity key",
     such that it's as though the object were newly constructed,
     except retaining its values.   It also resets the
@@ -1919,7 +1919,7 @@ def make_transient(instance):
     had been explicitly deleted by its session.
 
     Attributes which were "expired" or deferred at the
-    instance level are reverted to undefined, and 
+    instance level are reverted to undefined, and
     will not trigger any loads.
 
     """
@@ -1928,7 +1928,7 @@ def make_transient(instance):
     if s:
         s._expunge_state(state)
 
-    # remove expired state and 
+    # remove expired state and
     # deferred callables
     state.callables.clear()
     if state.key:
index 17ffa9e7a7dba2d031723b4839d73c8ccddac3c6..9307c94da9b11011d2575cc215098ed47e4885f0 100644 (file)
@@ -190,7 +190,7 @@ class InstanceState(interfaces._InspectionAttr):
         else:
             return {}
 
-    def initialize_instance(*mixed, **kwargs):
+    def _initialize_instance(*mixed, **kwargs):
         self, instance, args = mixed[0], mixed[1], mixed[2:]
         manager = self.manager
 
@@ -278,20 +278,20 @@ class InstanceState(interfaces._InspectionAttr):
         manager.setup_instance(inst, self)
         manager.dispatch.unpickle(self, state)
 
-    def initialize(self, key):
+    def _initialize(self, key):
         """Set this attribute to an empty value or collection,
            based on the AttributeImpl in use."""
 
         self.manager.get_impl(key).initialize(self, self.dict)
 
-    def reset(self, dict_, key):
+    def _reset(self, dict_, key):
         """Remove the given attribute and any
            callables associated with it."""
 
         dict_.pop(key, None)
         self.callables.pop(key, None)
 
-    def expire_attribute_pre_commit(self, dict_, key):
+    def _expire_attribute_pre_commit(self, dict_, key):
         """a fast expire that can be called by column loaders during a load.
 
         The additional bookkeeping is finished up in commit_all().
@@ -303,14 +303,14 @@ class InstanceState(interfaces._InspectionAttr):
         dict_.pop(key, None)
         self.callables[key] = self
 
-    def set_callable(self, dict_, key, callable_):
+    def _set_callable(self, dict_, key, callable_):
         """Remove the given attribute and set the given callable
            as a loader."""
 
         dict_.pop(key, None)
         self.callables[key] = callable_
 
-    def expire(self, dict_, modified_set):
+    def _expire(self, dict_, modified_set):
         self.expired = True
         if self.modified:
             modified_set.discard(self)
@@ -335,7 +335,7 @@ class InstanceState(interfaces._InspectionAttr):
 
         self.manager.dispatch.expire(self, None)
 
-    def expire_attributes(self, dict_, attribute_names):
+    def _expire_attributes(self, dict_, attribute_names):
         pending = self.__dict__.get('_pending_mutations', None)
 
         for key in attribute_names:
@@ -451,7 +451,7 @@ class InstanceState(interfaces._InspectionAttr):
                         ))
             self.modified = True
 
-    def commit(self, dict_, keys):
+    def _commit(self, dict_, keys):
         """Commit attributes.
 
         This is used by a partial-attribute load operation to mark committed
@@ -472,7 +472,7 @@ class InstanceState(interfaces._InspectionAttr):
                             intersection(dict_):
             del self.callables[key]
 
-    def commit_all(self, dict_, instance_dict=None):
+    def _commit_all(self, dict_, instance_dict=None):
         """commit all attributes unconditionally.
 
         This is used after a flush() or a full load/refresh
@@ -487,10 +487,10 @@ class InstanceState(interfaces._InspectionAttr):
         "expired" after this step if a value was not populated in state.dict.
 
         """
-        self.commit_all_states([(self, dict_)], instance_dict)
+        self._commit_all_states([(self, dict_)], instance_dict)
 
     @classmethod
-    def commit_all_states(self, iter, instance_dict=None):
+    def _commit_all_states(self, iter, instance_dict=None):
         """Mass version of commit_all()."""
 
         for state, dict_ in iter:
index 21214af9a17023d7c990ceec0a3f3fde8b0d2142..ec8ee91084eecbf0796e193d808dcbaf93a7e6b2 100644 (file)
@@ -148,7 +148,7 @@ class ColumnLoader(LoaderStrategy):
                 return fetch_col, None, None
         else:
             def expire_for_non_present_col(state, dict_, row):
-                state.expire_attribute_pre_commit(dict_, key)
+                state._expire_attribute_pre_commit(dict_, key)
             return expire_for_non_present_col, None, None
 
 log.class_logger(ColumnLoader)
@@ -177,13 +177,13 @@ class DeferredColumnLoader(LoaderStrategy):
 
         elif not self.is_class_level:
             def set_deferred_for_local_state(state, dict_, row):
-                state.set_callable(dict_, key, LoadDeferredColumns(state, key))
+                state._set_callable(dict_, key, LoadDeferredColumns(state, key))
             return set_deferred_for_local_state, None, None
         else:
             def reset_col_for_deferred(state, dict_, row):
                 # reset state on the key so that deferred callables
                 # fire off on next access.
-                state.reset(dict_, key)
+                state._reset(dict_, key)
             return reset_col_for_deferred, None, None
 
     def init_class_attribute(self, mapper):
@@ -308,7 +308,7 @@ class NoLoader(AbstractRelationshipLoader):
 
     def create_row_processor(self, context, path, mapper, row, adapter):
         def invoke_no_load(state, dict_, row):
-            state.initialize(self.key)
+            state._initialize(self.key)
         return invoke_no_load, None, None
 
 log.class_logger(NoLoader)
@@ -596,7 +596,7 @@ class LazyLoader(AbstractRelationshipLoader):
                 # "lazyload" option on a "no load"
                 # attribute - "eager" attributes always have a
                 # class-level lazyloader installed.
-                state.set_callable(dict_, key, LoadLazyAttribute(state, key))
+                state._set_callable(dict_, key, LoadLazyAttribute(state, key))
             return set_lazy_callable, None, None
         else:
             def reset_for_lazy_callable(state, dict_, row):
@@ -608,7 +608,7 @@ class LazyLoader(AbstractRelationshipLoader):
                 # this is needed in
                 # populate_existing() types of scenarios to reset
                 # any existing state.
-                state.reset(dict_, key)
+                state._reset(dict_, key)
 
             return reset_for_lazy_callable, None, None
 
index 7143f2eed09d21f5adb1228535dabf6aee2bc9eb..286c1905ff02674885f0a759063fa364b982beb7 100644 (file)
@@ -157,7 +157,7 @@ class UserDefinedExtensionTest(fixtures.ORMTest):
             u.email_address = 'lala@123.com'
 
             self.assert_(u.user_id == 7 and u.user_name == 'john' and u.email_address == 'lala@123.com')
-            attributes.instance_state(u).commit_all(attributes.instance_dict(u))
+            attributes.instance_state(u)._commit_all(attributes.instance_dict(u))
             self.assert_(u.user_id == 7 and u.user_name == 'john' and u.email_address == 'lala@123.com')
 
             u.user_name = 'heythere'
@@ -186,21 +186,21 @@ class UserDefinedExtensionTest(fixtures.ORMTest):
                 assert Foo in instrumentation._instrumentation_factory._state_finders
 
             f = Foo()
-            attributes.instance_state(f).expire(attributes.instance_dict(f), set())
+            attributes.instance_state(f)._expire(attributes.instance_dict(f), set())
             eq_(f.a, "this is a")
             eq_(f.b, 12)
 
             f.a = "this is some new a"
-            attributes.instance_state(f).expire(attributes.instance_dict(f), set())
+            attributes.instance_state(f)._expire(attributes.instance_dict(f), set())
             eq_(f.a, "this is a")
             eq_(f.b, 12)
 
-            attributes.instance_state(f).expire(attributes.instance_dict(f), set())
+            attributes.instance_state(f)._expire(attributes.instance_dict(f), set())
             f.a = "this is another new a"
             eq_(f.a, "this is another new a")
             eq_(f.b, 12)
 
-            attributes.instance_state(f).expire(attributes.instance_dict(f), set())
+            attributes.instance_state(f)._expire(attributes.instance_dict(f), set())
             eq_(f.a, "this is a")
             eq_(f.b, 12)
 
@@ -208,7 +208,7 @@ class UserDefinedExtensionTest(fixtures.ORMTest):
             eq_(f.a, None)
             eq_(f.b, 12)
 
-            attributes.instance_state(f).commit_all(attributes.instance_dict(f))
+            attributes.instance_state(f)._commit_all(attributes.instance_dict(f))
             eq_(f.a, None)
             eq_(f.b, 12)
 
@@ -303,8 +303,8 @@ class UserDefinedExtensionTest(fixtures.ORMTest):
             f1.bars.append(b1)
             eq_(attributes.get_state_history(attributes.instance_state(f1), 'bars'), ([b1], [], []))
 
-            attributes.instance_state(f1).commit_all(attributes.instance_dict(f1))
-            attributes.instance_state(b1).commit_all(attributes.instance_dict(b1))
+            attributes.instance_state(f1)._commit_all(attributes.instance_dict(f1))
+            attributes.instance_state(b1)._commit_all(attributes.instance_dict(b1))
 
             eq_(attributes.get_state_history(attributes.instance_state(f1), 'name'), ((), ['f1'], ()))
             eq_(attributes.get_state_history(attributes.instance_state(f1), 'bars'), ((), [b1], ()))
index 052f8a9bf16637eaa9522bc4900572927a414c47..6c3a34e3ef102c3179b93300497e9948c0278094 100644 (file)
@@ -1639,7 +1639,7 @@ class OptimizedLoadTest(fixtures.MappedTest):
                                 ['counter2']) is None
 
         s1.id = 1
-        attributes.instance_state(s1).commit_all(s1.__dict__, None)
+        attributes.instance_state(s1)._commit_all(s1.__dict__, None)
         assert m._optimized_get_statement(attributes.instance_state(s1),
                                 ['counter2']) is not None
 
index 28c24ff660b7afd2a178c123b0d39fd2ae8b81b2..bff29feb630b2fb7c40f246c1168df9fdf6b3f07 100644 (file)
@@ -45,7 +45,7 @@ class AttributeImplAPITest(fixtures.MappedTest):
         b2 = B()
 
         A.b.impl.append(
-            attributes.instance_state(a1), 
+            attributes.instance_state(a1),
             attributes.instance_dict(a1), b1, None
         )
 
@@ -56,7 +56,7 @@ class AttributeImplAPITest(fixtures.MappedTest):
             "Object <B at .*?> not "
             "associated with <A at .*?> on attribute 'b'",
             A.b.impl.remove,
-                attributes.instance_state(a1), 
+                attributes.instance_state(a1),
                 attributes.instance_dict(a1), b2, None
         )
 
@@ -68,14 +68,14 @@ class AttributeImplAPITest(fixtures.MappedTest):
         b2 = B()
 
         A.b.impl.append(
-            attributes.instance_state(a1), 
+            attributes.instance_state(a1),
             attributes.instance_dict(a1), b1, None
         )
 
         assert a1.b is b1
 
         A.b.impl.pop(
-            attributes.instance_state(a1), 
+            attributes.instance_state(a1),
             attributes.instance_dict(a1), b2, None
         )
         assert a1.b is b1
@@ -87,14 +87,14 @@ class AttributeImplAPITest(fixtures.MappedTest):
         b1 = B()
 
         A.b.impl.append(
-            attributes.instance_state(a1), 
+            attributes.instance_state(a1),
             attributes.instance_dict(a1), b1, None
         )
 
         assert a1.b is b1
 
         A.b.impl.pop(
-            attributes.instance_state(a1), 
+            attributes.instance_state(a1),
             attributes.instance_dict(a1), b1, None
         )
         assert a1.b is None
@@ -107,7 +107,7 @@ class AttributeImplAPITest(fixtures.MappedTest):
         b2 = B()
 
         A.b.impl.append(
-            attributes.instance_state(a1), 
+            attributes.instance_state(a1),
             attributes.instance_dict(a1), b1, None
         )
 
@@ -117,7 +117,7 @@ class AttributeImplAPITest(fixtures.MappedTest):
             ValueError,
             r"list.remove\(x\): x not in list",
             A.b.impl.remove,
-                attributes.instance_state(a1), 
+                attributes.instance_state(a1),
                 attributes.instance_dict(a1), b2, None
         )
 
@@ -129,14 +129,14 @@ class AttributeImplAPITest(fixtures.MappedTest):
         b2 = B()
 
         A.b.impl.append(
-            attributes.instance_state(a1), 
+            attributes.instance_state(a1),
             attributes.instance_dict(a1), b1, None
         )
 
         assert a1.b == [b1]
 
         A.b.impl.pop(
-            attributes.instance_state(a1), 
+            attributes.instance_state(a1),
             attributes.instance_dict(a1), b2, None
         )
         assert a1.b == [b1]
@@ -148,14 +148,14 @@ class AttributeImplAPITest(fixtures.MappedTest):
         b1 = B()
 
         A.b.impl.append(
-            attributes.instance_state(a1), 
+            attributes.instance_state(a1),
             attributes.instance_dict(a1), b1, None
         )
 
         assert a1.b == [b1]
 
         A.b.impl.pop(
-            attributes.instance_state(a1), 
+            attributes.instance_state(a1),
             attributes.instance_dict(a1), b1, None
         )
         assert a1.b == []
@@ -188,7 +188,7 @@ class AttributesTest(fixtures.ORMTest):
         u.email_address = 'lala@123.com'
         self.assert_(u.user_id == 7 and u.user_name == 'john'
                      and u.email_address == 'lala@123.com')
-        attributes.instance_state(u).commit_all(attributes.instance_dict(u))
+        attributes.instance_state(u)._commit_all(attributes.instance_dict(u))
         self.assert_(u.user_id == 7 and u.user_name == 'john'
                      and u.email_address == 'lala@123.com')
         u.user_name = 'heythere'
@@ -294,9 +294,9 @@ class AttributesTest(fixtures.ORMTest):
 
         instrumentation.register_class(Foo)
         instrumentation.register_class(Bar)
-        attributes.register_attribute(Foo, 
-                                    'bars', 
-                                    uselist=True, 
+        attributes.register_attribute(Foo,
+                                    'bars',
+                                    uselist=True,
                                     useobject=True)
 
         assert_raises_message(
@@ -323,28 +323,28 @@ class AttributesTest(fixtures.ORMTest):
         attributes.register_attribute(Foo, 'b', uselist=False, useobject=False)
 
         f = Foo()
-        attributes.instance_state(f).expire(attributes.instance_dict(f),
+        attributes.instance_state(f)._expire(attributes.instance_dict(f),
                 set())
         eq_(f.a, 'this is a')
         eq_(f.b, 12)
         f.a = 'this is some new a'
-        attributes.instance_state(f).expire(attributes.instance_dict(f),
+        attributes.instance_state(f)._expire(attributes.instance_dict(f),
                 set())
         eq_(f.a, 'this is a')
         eq_(f.b, 12)
-        attributes.instance_state(f).expire(attributes.instance_dict(f),
+        attributes.instance_state(f)._expire(attributes.instance_dict(f),
                 set())
         f.a = 'this is another new a'
         eq_(f.a, 'this is another new a')
         eq_(f.b, 12)
-        attributes.instance_state(f).expire(attributes.instance_dict(f),
+        attributes.instance_state(f)._expire(attributes.instance_dict(f),
                 set())
         eq_(f.a, 'this is a')
         eq_(f.b, 12)
         del f.a
         eq_(f.a, None)
         eq_(f.b, 12)
-        attributes.instance_state(f).commit_all(attributes.instance_dict(f),
+        attributes.instance_state(f)._commit_all(attributes.instance_dict(f),
                 set())
         eq_(f.a, None)
         eq_(f.b, 12)
@@ -363,7 +363,7 @@ class AttributesTest(fixtures.ORMTest):
         attributes.register_attribute(MyTest, 'b', uselist=False, useobject=False)
 
         m = MyTest()
-        attributes.instance_state(m).expire(attributes.instance_dict(m), set())
+        attributes.instance_state(m)._expire(attributes.instance_dict(m), set())
         assert 'a' not in m.__dict__
         m2 = pickle.loads(pickle.dumps(m))
         assert 'a' not in m2.__dict__
@@ -399,7 +399,7 @@ class AttributesTest(fixtures.ORMTest):
         self.assert_(u.user_id == 7 and u.user_name == 'john'
                      and u.addresses[0].email_address == 'lala@123.com')
         (u,
-         attributes.instance_state(a).commit_all(attributes.instance_dict(a)))
+         attributes.instance_state(a)._commit_all(attributes.instance_dict(a)))
         self.assert_(u.user_id == 7 and u.user_name == 'john'
                      and u.addresses[0].email_address == 'lala@123.com')
 
@@ -433,17 +433,17 @@ class AttributesTest(fixtures.ORMTest):
 
             def append(self, state, child, initiator):
                 if commit:
-                    state.commit_all(state.dict)
+                    state._commit_all(state.dict)
                 return child
 
             def remove(self, state, child, initiator):
                 if commit:
-                    state.commit_all(state.dict)
+                    state._commit_all(state.dict)
                 return child
 
             def set(self, state, child, oldchild, initiator):
                 if commit:
-                    state.commit_all(state.dict)
+                    state._commit_all(state.dict)
                 return child
 
         instrumentation.register_class(Foo)
@@ -461,26 +461,26 @@ class AttributesTest(fixtures.ORMTest):
                 return attributes.PASSIVE_NO_RESULT
             return b2
 
-        attributes.register_attribute(Foo, 'bars', 
-                               uselist=True, 
-                               useobject=True, 
+        attributes.register_attribute(Foo, 'bars',
+                               uselist=True,
+                               useobject=True,
                                callable_=loadcollection,
                                extension=[ReceiveEvents('bars')])
 
-        attributes.register_attribute(Foo, 'bar', 
-                              uselist=False, 
-                              useobject=True, 
+        attributes.register_attribute(Foo, 'bar',
+                              uselist=False,
+                              useobject=True,
                               callable_=loadscalar,
                               extension=[ReceiveEvents('bar')])
 
-        attributes.register_attribute(Foo, 'scalar', 
-                            uselist=False, 
+        attributes.register_attribute(Foo, 'scalar',
+                            uselist=False,
                             useobject=False, extension=[ReceiveEvents('scalar')])
 
 
         def create_hist():
             def hist(key, shouldmatch, fn, *arg):
-                attributes.instance_state(f1).commit_all(attributes.instance_dict(f1))
+                attributes.instance_state(f1)._commit_all(attributes.instance_dict(f1))
                 fn(*arg)
                 histories.append((shouldmatch,
                                  attributes.get_history(f1, key)))
@@ -552,7 +552,7 @@ class AttributesTest(fixtures.ORMTest):
         x.bars
         b = Bar(id=4)
         b.foos.append(x)
-        attributes.instance_state(x).expire_attributes(attributes.instance_dict(x),
+        attributes.instance_state(x)._expire_attributes(attributes.instance_dict(x),
                 ['bars'])
         assert_raises(AssertionError, b.foos.remove, x)
 
@@ -595,7 +595,7 @@ class AttributesTest(fixtures.ORMTest):
         ])
 
     def test_lazytrackparent(self):
-        """test that the "hasparent" flag works properly 
+        """test that the "hasparent" flag works properly
            when lazy loaders and backrefs are used
 
         """
@@ -616,11 +616,11 @@ class AttributesTest(fixtures.ORMTest):
         # create objects as if they'd been freshly loaded from the database (without history)
         b = Blog()
         p1 = Post()
-        attributes.instance_state(b).set_callable(attributes.instance_dict(b), 
+        attributes.instance_state(b)._set_callable(attributes.instance_dict(b),
                                                     'posts', lambda passive:[p1])
-        attributes.instance_state(p1).set_callable(attributes.instance_dict(p1), 
+        attributes.instance_state(p1)._set_callable(attributes.instance_dict(p1),
                                                     'blog', lambda passive:b)
-        p1, attributes.instance_state(b).commit_all(attributes.instance_dict(b))
+        p1, attributes.instance_state(b)._commit_all(attributes.instance_dict(b))
 
         # no orphans (called before the lazy loaders fire off)
         assert attributes.has_parent(Blog, p1, 'posts', optimistic=True)
@@ -672,11 +672,11 @@ class AttributesTest(fixtures.ORMTest):
             return "this is the bar attr"
         def func3(state, passive):
             return "this is the shared attr"
-        attributes.register_attribute(Foo, 'element', uselist=False, 
+        attributes.register_attribute(Foo, 'element', uselist=False,
                                             callable_=func1, useobject=True)
-        attributes.register_attribute(Foo, 'element2', uselist=False, 
+        attributes.register_attribute(Foo, 'element2', uselist=False,
                                             callable_=func3, useobject=True)
-        attributes.register_attribute(Bar, 'element', uselist=False, 
+        attributes.register_attribute(Bar, 'element', uselist=False,
                                             callable_=func2, useobject=True)
 
         x = Foo()
@@ -728,7 +728,7 @@ class AttributesTest(fixtures.ORMTest):
         x.element = el
         eq_(attributes.get_state_history(attributes.instance_state(x),
             'element'), ([el], (), ()))
-        attributes.instance_state(x).commit_all(attributes.instance_dict(x))
+        attributes.instance_state(x)._commit_all(attributes.instance_dict(x))
         added, unchanged, deleted = \
             attributes.get_state_history(attributes.instance_state(x),
                 'element')
@@ -761,7 +761,7 @@ class AttributesTest(fixtures.ORMTest):
         attributes.register_attribute(Bar, 'id', uselist=False,
                 useobject=True)
         x = Foo()
-        attributes.instance_state(x).commit_all(attributes.instance_dict(x))
+        attributes.instance_state(x)._commit_all(attributes.instance_dict(x))
         x.col2.append(bar4)
         eq_(attributes.get_state_history(attributes.instance_state(x),
             'col2'), ([bar4], [bar1, bar2, bar3], []))
@@ -905,12 +905,12 @@ class GetNoValueTest(fixtures.ORMTest):
         instrumentation.register_class(Foo)
         instrumentation.register_class(Bar)
         if expected is not None:
-            attributes.register_attribute(Foo, 
-                        "attr", useobject=True, 
+            attributes.register_attribute(Foo,
+                        "attr", useobject=True,
                         uselist=False, callable_=lazy_callable)
         else:
-            attributes.register_attribute(Foo, 
-                        "attr", useobject=True, 
+            attributes.register_attribute(Foo,
+                        "attr", useobject=True,
                         uselist=False)
 
         f1 = Foo()
@@ -1054,7 +1054,7 @@ class BackrefTest(fixtures.ORMTest):
         instrumentation.register_class(Port)
         instrumentation.register_class(Jack)
 
-        attributes.register_attribute(Port, 'jack', uselist=False, 
+        attributes.register_attribute(Port, 'jack', uselist=False,
                                             useobject=True, backref="port")
 
         attributes.register_attribute(Jack, 'port', uselist=False,
@@ -1227,7 +1227,7 @@ class PendingBackrefTest(fixtures.ORMTest):
         p4.blog = b
         assert called[0] == 0
         eq_(attributes.instance_state(b).
-                get_history('posts', attributes.PASSIVE_OFF), 
+                get_history('posts', attributes.PASSIVE_OFF),
                             ([p, p4], [p1, p2, p3], []))
         assert called[0] == 1
 
@@ -1250,7 +1250,7 @@ class PendingBackrefTest(fixtures.ORMTest):
         lazy_load = [p, p2]
         # lazy loaded + pending get added together.
         # This isn't seen often with the ORM due
-        # to usual practices surrounding the 
+        # to usual practices surrounding the
         # load/flush/load cycle.
         eq_(b.posts, [p, p2, p])
         eq_(called[0], 1)
@@ -1283,8 +1283,8 @@ class PendingBackrefTest(fixtures.ORMTest):
 
         b = Blog("blog 1")
         p1.blog = b
-        attributes.instance_state(b).commit_all(attributes.instance_dict(b))
-        attributes.instance_state(p1).commit_all(attributes.instance_dict(p1))
+        attributes.instance_state(b)._commit_all(attributes.instance_dict(b))
+        attributes.instance_state(p1)._commit_all(attributes.instance_dict(p1))
         assert b.posts == [Post("post 1")]
 
 class HistoryTest(fixtures.TestBase):
@@ -1295,7 +1295,7 @@ class HistoryTest(fixtures.TestBase):
 
         instrumentation.register_class(Foo)
         attributes.register_attribute(
-                    Foo, 'someattr', 
+                    Foo, 'someattr',
                     uselist=uselist,
                     useobject=useobject,
                     active_history=active_history,
@@ -1321,7 +1321,7 @@ class HistoryTest(fixtures.TestBase):
                     'someattr')
 
     def _commit_someattr(self, f):
-        attributes.instance_state(f).commit(attributes.instance_dict(f),
+        attributes.instance_state(f)._commit(attributes.instance_dict(f),
                 ['someattr'])
 
     def _someattr_committed_state(self, f):
@@ -1331,20 +1331,20 @@ class HistoryTest(fixtures.TestBase):
             attributes.instance_dict(f))
 
     def test_committed_value_init(self):
-        Foo = self._fixture(uselist=False, useobject=False, 
+        Foo = self._fixture(uselist=False, useobject=False,
                                 active_history=False)
         f = Foo()
         eq_(self._someattr_committed_state(f), None)
 
     def test_committed_value_set(self):
-        Foo = self._fixture(uselist=False, useobject=False, 
+        Foo = self._fixture(uselist=False, useobject=False,
                                 active_history=False)
         f = Foo()
         f.someattr = 3
         eq_(self._someattr_committed_state(f), None)
 
     def test_committed_value_set_commit(self):
-        Foo = self._fixture(uselist=False, useobject=False, 
+        Foo = self._fixture(uselist=False, useobject=False,
                                 active_history=False)
         f = Foo()
         f.someattr = 3
@@ -1352,25 +1352,25 @@ class HistoryTest(fixtures.TestBase):
         eq_(self._someattr_committed_state(f), 3)
 
     def test_scalar_init(self):
-        Foo = self._fixture(uselist=False, useobject=False, 
+        Foo = self._fixture(uselist=False, useobject=False,
                                 active_history=False)
         f = Foo()
         eq_(self._someattr_history(f), ((), (), ()))
 
     def test_object_init(self):
-        Foo = self._fixture(uselist=False, useobject=True, 
+        Foo = self._fixture(uselist=False, useobject=True,
                                 active_history=False)
         f = Foo()
         eq_(self._someattr_history(f), ((), (), ()))
 
     def test_object_init_active_history(self):
-        Foo = self._fixture(uselist=False, useobject=True, 
+        Foo = self._fixture(uselist=False, useobject=True,
                                 active_history=True)
         f = Foo()
         eq_(self._someattr_history(f), ((), (), ()))
 
     def test_scalar_no_init_side_effect(self):
-        Foo = self._fixture(uselist=False, useobject=False, 
+        Foo = self._fixture(uselist=False, useobject=False,
                                 active_history=False)
         f = Foo()
         self._someattr_history(f)
@@ -1379,14 +1379,14 @@ class HistoryTest(fixtures.TestBase):
         assert 'someattr' not in attributes.instance_state(f).committed_state
 
     def test_scalar_set(self):
-        Foo = self._fixture(uselist=False, useobject=False, 
+        Foo = self._fixture(uselist=False, useobject=False,
                                 active_history=False)
         f = Foo()
         f.someattr = 'hi'
         eq_(self._someattr_history(f), (['hi'], (), ()))
 
     def test_scalar_set_commit(self):
-        Foo = self._fixture(uselist=False, useobject=False, 
+        Foo = self._fixture(uselist=False, useobject=False,
                                 active_history=False)
         f = Foo()
         f.someattr = 'hi'
@@ -1394,7 +1394,7 @@ class HistoryTest(fixtures.TestBase):
         eq_(self._someattr_history(f), ((), ['hi'], ()))
 
     def test_scalar_set_commit_reset(self):
-        Foo = self._fixture(uselist=False, useobject=False, 
+        Foo = self._fixture(uselist=False, useobject=False,
                                 active_history=False)
         f = Foo()
         f.someattr = 'hi'
@@ -1403,7 +1403,7 @@ class HistoryTest(fixtures.TestBase):
         eq_(self._someattr_history(f), (['there'], (), ['hi']))
 
     def test_scalar_set_commit_reset_commit(self):
-        Foo = self._fixture(uselist=False, useobject=False, 
+        Foo = self._fixture(uselist=False, useobject=False,
                                 active_history=False)
         f = Foo()
         f.someattr = 'hi'
@@ -1413,7 +1413,7 @@ class HistoryTest(fixtures.TestBase):
         eq_(self._someattr_history(f), ((), ['there'], ()))
 
     def test_scalar_set_commit_reset_commit_del(self):
-        Foo = self._fixture(uselist=False, useobject=False, 
+        Foo = self._fixture(uselist=False, useobject=False,
                                 active_history=False)
         f = Foo()
         f.someattr = 'there'
@@ -1422,14 +1422,14 @@ class HistoryTest(fixtures.TestBase):
         eq_(self._someattr_history(f), ((), (), ['there']))
 
     def test_scalar_set_dict(self):
-        Foo = self._fixture(uselist=False, useobject=False, 
+        Foo = self._fixture(uselist=False, useobject=False,
                                 active_history=False)
         f = Foo()
         f.__dict__['someattr'] = 'new'
         eq_(self._someattr_history(f), ((), ['new'], ()))
 
     def test_scalar_set_dict_set(self):
-        Foo = self._fixture(uselist=False, useobject=False, 
+        Foo = self._fixture(uselist=False, useobject=False,
                                 active_history=False)
         f = Foo()
         f.__dict__['someattr'] = 'new'
@@ -1438,7 +1438,7 @@ class HistoryTest(fixtures.TestBase):
         eq_(self._someattr_history(f), (['old'], (), ['new']))
 
     def test_scalar_set_dict_set_commit(self):
-        Foo = self._fixture(uselist=False, useobject=False, 
+        Foo = self._fixture(uselist=False, useobject=False,
                                 active_history=False)
         f = Foo()
         f.__dict__['someattr'] = 'new'
@@ -1448,14 +1448,14 @@ class HistoryTest(fixtures.TestBase):
         eq_(self._someattr_history(f), ((), ['old'], ()))
 
     def test_scalar_set_None(self):
-        Foo = self._fixture(uselist=False, useobject=False, 
+        Foo = self._fixture(uselist=False, useobject=False,
                                 active_history=False)
         f = Foo()
         f.someattr = None
         eq_(self._someattr_history(f), ([None], (), ()))
 
     def test_scalar_set_None_from_dict_set(self):
-        Foo = self._fixture(uselist=False, useobject=False, 
+        Foo = self._fixture(uselist=False, useobject=False,
                                 active_history=False)
         f = Foo()
         f.__dict__['someattr'] = 'new'
@@ -1463,7 +1463,7 @@ class HistoryTest(fixtures.TestBase):
         eq_(self._someattr_history(f), ([None], (), ['new']))
 
     def test_scalar_set_twice_no_commit(self):
-        Foo = self._fixture(uselist=False, useobject=False, 
+        Foo = self._fixture(uselist=False, useobject=False,
                                 active_history=False)
         f = Foo()
         f.someattr = 'one'
@@ -1472,13 +1472,13 @@ class HistoryTest(fixtures.TestBase):
         eq_(self._someattr_history(f), (['two'], (), ()))
 
     def test_scalar_active_init(self):
-        Foo = self._fixture(uselist=False, useobject=False, 
+        Foo = self._fixture(uselist=False, useobject=False,
                                 active_history=True)
         f = Foo()
         eq_(self._someattr_history(f), ((), (), ()))
 
     def test_scalar_active_no_init_side_effect(self):
-        Foo = self._fixture(uselist=False, useobject=False, 
+        Foo = self._fixture(uselist=False, useobject=False,
                                 active_history=True)
         f = Foo()
         self._someattr_history(f)
@@ -1487,14 +1487,14 @@ class HistoryTest(fixtures.TestBase):
         assert 'someattr' not in attributes.instance_state(f).committed_state
 
     def test_scalar_active_set(self):
-        Foo = self._fixture(uselist=False, useobject=False, 
+        Foo = self._fixture(uselist=False, useobject=False,
                                 active_history=True)
         f = Foo()
         f.someattr = 'hi'
         eq_(self._someattr_history(f), (['hi'], (), ()))
 
     def test_scalar_active_set_commit(self):
-        Foo = self._fixture(uselist=False, useobject=False, 
+        Foo = self._fixture(uselist=False, useobject=False,
                                 active_history=True)
         f = Foo()
         f.someattr = 'hi'
@@ -1502,7 +1502,7 @@ class HistoryTest(fixtures.TestBase):
         eq_(self._someattr_history(f), ((), ['hi'], ()))
 
     def test_scalar_active_set_commit_reset(self):
-        Foo = self._fixture(uselist=False, useobject=False, 
+        Foo = self._fixture(uselist=False, useobject=False,
                                 active_history=True)
         f = Foo()
         f.someattr = 'hi'
@@ -1511,7 +1511,7 @@ class HistoryTest(fixtures.TestBase):
         eq_(self._someattr_history(f), (['there'], (), ['hi']))
 
     def test_scalar_active_set_commit_reset_commit(self):
-        Foo = self._fixture(uselist=False, useobject=False, 
+        Foo = self._fixture(uselist=False, useobject=False,
                                 active_history=True)
         f = Foo()
         f.someattr = 'hi'
@@ -1521,7 +1521,7 @@ class HistoryTest(fixtures.TestBase):
         eq_(self._someattr_history(f), ((), ['there'], ()))
 
     def test_scalar_active_set_commit_reset_commit_del(self):
-        Foo = self._fixture(uselist=False, useobject=False, 
+        Foo = self._fixture(uselist=False, useobject=False,
                                 active_history=True)
         f = Foo()
         f.someattr = 'there'
@@ -1530,14 +1530,14 @@ class HistoryTest(fixtures.TestBase):
         eq_(self._someattr_history(f), ((), (), ['there']))
 
     def test_scalar_active_set_dict(self):
-        Foo = self._fixture(uselist=False, useobject=False, 
+        Foo = self._fixture(uselist=False, useobject=False,
                                 active_history=True)
         f = Foo()
         f.__dict__['someattr'] = 'new'
         eq_(self._someattr_history(f), ((), ['new'], ()))
 
     def test_scalar_active_set_dict_set(self):
-        Foo = self._fixture(uselist=False, useobject=False, 
+        Foo = self._fixture(uselist=False, useobject=False,
                                 active_history=True)
         f = Foo()
         f.__dict__['someattr'] = 'new'
@@ -1546,7 +1546,7 @@ class HistoryTest(fixtures.TestBase):
         eq_(self._someattr_history(f), (['old'], (), ['new']))
 
     def test_scalar_active_set_dict_set_commit(self):
-        Foo = self._fixture(uselist=False, useobject=False, 
+        Foo = self._fixture(uselist=False, useobject=False,
                                 active_history=True)
         f = Foo()
         f.__dict__['someattr'] = 'new'
@@ -1556,14 +1556,14 @@ class HistoryTest(fixtures.TestBase):
         eq_(self._someattr_history(f), ((), ['old'], ()))
 
     def test_scalar_active_set_None(self):
-        Foo = self._fixture(uselist=False, useobject=False, 
+        Foo = self._fixture(uselist=False, useobject=False,
                                 active_history=True)
         f = Foo()
         f.someattr = None
         eq_(self._someattr_history(f), ([None], (), ()))
 
     def test_scalar_active_set_None_from_dict_set(self):
-        Foo = self._fixture(uselist=False, useobject=False, 
+        Foo = self._fixture(uselist=False, useobject=False,
                                 active_history=True)
         f = Foo()
         f.__dict__['someattr'] = 'new'
@@ -1571,7 +1571,7 @@ class HistoryTest(fixtures.TestBase):
         eq_(self._someattr_history(f), ([None], (), ['new']))
 
     def test_scalar_active_set_twice_no_commit(self):
-        Foo = self._fixture(uselist=False, useobject=False, 
+        Foo = self._fixture(uselist=False, useobject=False,
                                 active_history=True)
         f = Foo()
         f.someattr = 'one'
@@ -1582,14 +1582,14 @@ class HistoryTest(fixtures.TestBase):
 
 
     def test_scalar_inplace_mutation_set(self):
-        Foo = self._fixture(uselist=False, useobject=False, 
+        Foo = self._fixture(uselist=False, useobject=False,
                                 active_history=False)
         f = Foo()
         f.someattr = {'a': 'b'}
         eq_(self._someattr_history(f), ([{'a': 'b'}], (), ()))
 
     def test_scalar_inplace_mutation_set_commit(self):
-        Foo = self._fixture(uselist=False, useobject=False, 
+        Foo = self._fixture(uselist=False, useobject=False,
                                 active_history=False)
         f = Foo()
         f.someattr = {'a': 'b'}
@@ -1597,7 +1597,7 @@ class HistoryTest(fixtures.TestBase):
         eq_(self._someattr_history(f), ((), [{'a': 'b'}], ()))
 
     def test_scalar_inplace_mutation_set_commit_set(self):
-        Foo = self._fixture(uselist=False, useobject=False, 
+        Foo = self._fixture(uselist=False, useobject=False,
                                 active_history=False)
         f = Foo()
         f.someattr = {'a': 'b'}
@@ -1606,7 +1606,7 @@ class HistoryTest(fixtures.TestBase):
         eq_(self._someattr_history(f), ((), [{'a': 'c'}], ()))
 
     def test_scalar_inplace_mutation_set_commit_flag_modified(self):
-        Foo = self._fixture(uselist=False, useobject=False, 
+        Foo = self._fixture(uselist=False, useobject=False,
                                 active_history=False)
         f = Foo()
         f.someattr = {'a': 'b'}
@@ -1615,7 +1615,7 @@ class HistoryTest(fixtures.TestBase):
         eq_(self._someattr_history(f), ([{'a': 'b'}], (), ()))
 
     def test_scalar_inplace_mutation_set_commit_set_flag_modified(self):
-        Foo = self._fixture(uselist=False, useobject=False, 
+        Foo = self._fixture(uselist=False, useobject=False,
                                 active_history=False)
         f = Foo()
         f.someattr = {'a': 'b'}
@@ -1625,7 +1625,7 @@ class HistoryTest(fixtures.TestBase):
         eq_(self._someattr_history(f), ([{'a': 'c'}], (), ()))
 
     def test_scalar_inplace_mutation_set_commit_flag_modified_set(self):
-        Foo = self._fixture(uselist=False, useobject=False, 
+        Foo = self._fixture(uselist=False, useobject=False,
                                 active_history=False)
         f = Foo()
         f.someattr = {'a': 'b'}
@@ -1782,7 +1782,7 @@ class HistoryTest(fixtures.TestBase):
         f = Foo()
         collection = attributes.init_collection(f, 'someattr')
         collection.append_without_event(new)
-        attributes.instance_state(f).commit_all(attributes.instance_dict(f))
+        attributes.instance_state(f)._commit_all(attributes.instance_dict(f))
         eq_(attributes.get_state_history(attributes.instance_state(f),
             'someattr'), ((), [new], ()))
         f.someattr = [old]
@@ -1868,7 +1868,7 @@ class HistoryTest(fixtures.TestBase):
         f.someattr.append(new)
         eq_(attributes.get_state_history(attributes.instance_state(f),
             'someattr'), ([old, new], [hi], [there]))
-        attributes.instance_state(f).commit(attributes.instance_dict(f),
+        attributes.instance_state(f)._commit(attributes.instance_dict(f),
                 ['someattr'])
         eq_(attributes.get_state_history(attributes.instance_state(f),
             'someattr'), ((), [hi, old, new], ()))
@@ -1883,20 +1883,20 @@ class HistoryTest(fixtures.TestBase):
         f.__dict__['id'] = 1
         collection = attributes.init_collection(f, 'someattr')
         collection.append_without_event(new)
-        attributes.instance_state(f).commit_all(attributes.instance_dict(f))
+        attributes.instance_state(f)._commit_all(attributes.instance_dict(f))
         eq_(attributes.get_state_history(attributes.instance_state(f),
             'someattr'), ((), [new], ()))
         f.someattr.append(old)
         eq_(attributes.get_state_history(attributes.instance_state(f),
             'someattr'), ([old], [new], []))
-        attributes.instance_state(f).commit(attributes.instance_dict(f),
+        attributes.instance_state(f)._commit(attributes.instance_dict(f),
                 ['someattr'])
         eq_(attributes.get_state_history(attributes.instance_state(f),
             'someattr'), ((), [new, old], ()))
         f = Foo()
         collection = attributes.init_collection(f, 'someattr')
         collection.append_without_event(new)
-        attributes.instance_state(f).commit_all(attributes.instance_dict(f))
+        attributes.instance_state(f)._commit_all(attributes.instance_dict(f))
         eq_(attributes.get_state_history(attributes.instance_state(f),
             'someattr'), ((), [new], ()))
         f.id = 1
@@ -1925,7 +1925,7 @@ class HistoryTest(fixtures.TestBase):
         f.someattr.append(hi)
         eq_(attributes.get_state_history(attributes.instance_state(f),
             'someattr'), ([hi, there, hi], [], []))
-        attributes.instance_state(f).commit_all(attributes.instance_dict(f))
+        attributes.instance_state(f)._commit_all(attributes.instance_dict(f))
         eq_(attributes.get_state_history(attributes.instance_state(f),
             'someattr'), ((), [hi, there, hi], ()))
         f.someattr = []
@@ -2020,7 +2020,7 @@ class LazyloadHistoryTest(fixtures.TestBase):
         eq_(attributes.get_state_history(attributes.instance_state(f),
             'bars'), ([bar4], [], []))
         lazy_load = [bar1, bar2, bar3]
-        attributes.instance_state(f).expire_attributes(attributes.instance_dict(f),
+        attributes.instance_state(f)._expire_attributes(attributes.instance_dict(f),
                 ['bars'])
         eq_(attributes.get_state_history(attributes.instance_state(f),
             'bars'), ((), [bar1, bar2, bar3], ()))
@@ -2306,8 +2306,8 @@ class ListenerTest(fixtures.ORMTest):
             (make_a, make_b),
             (make_b, make_c)
         ]
-        elements = [make_a, make_b, make_c, 
-                    instrument_a, instrument_b, instrument_c, 
+        elements = [make_a, make_b, make_c,
+                    instrument_a, instrument_b, instrument_c,
                     attr_a, attr_b, attr_c, events_a]
 
         for i, series in enumerate(all_partial_orderings(ordering, elements)):
index fcda72a8a3103f81cbe63223c34be6bdd368deec..e8d6c0901dd6660ca58d2854ac5d589152c971c1 100644 (file)
@@ -176,7 +176,7 @@ class GetTest(QueryTest):
         assert_raises(sa_exc.InvalidRequestError, q.get, (5, ))
 
     def test_get_null_pk(self):
-        """test that a mapping which can have None in a 
+        """test that a mapping which can have None in a
         PK (i.e. map to an outerjoin) works with get()."""
 
         users, addresses = self.tables.users, self.tables.addresses
@@ -317,7 +317,7 @@ class GetTest(QueryTest):
         s = create_session()
         q = s.query(User).filter(User.id==1)
         eq_(
-            str(q).replace('\n',''), 
+            str(q).replace('\n',''),
             'SELECT users.id AS users_id, users.name AS users_name FROM users WHERE users.id = ?'
             )
 
@@ -432,21 +432,21 @@ class InvalidGenerationsTest(QueryTest, AssertsCompiledSQL):
         s = create_session()
 
         q = s.query(User).order_by(User.id)
-        self.assert_compile(q, 
+        self.assert_compile(q,
             "SELECT users.id AS users_id, users.name AS users_name FROM users ORDER BY users.id",
             use_default_dialect=True)
 
         assert_raises(sa_exc.InvalidRequestError, q._no_select_modifiers, "foo")
 
         q = q.order_by(None)
-        self.assert_compile(q, 
+        self.assert_compile(q,
                 "SELECT users.id AS users_id, users.name AS users_name FROM users",
                 use_default_dialect=True)
 
         assert_raises(sa_exc.InvalidRequestError, q._no_select_modifiers, "foo")
 
         q = q.order_by(False)
-        self.assert_compile(q, 
+        self.assert_compile(q,
                 "SELECT users.id AS users_id, users.name AS users_name FROM users",
                 use_default_dialect=True)
 
@@ -498,7 +498,7 @@ class OperatorTest(QueryTest, AssertsCompiledSQL):
 
         create_session().query(User)
         for (py_op, sql_op) in ((operator.add, '+'), (operator.mul, '*'),
-                                (operator.sub, '-'), 
+                                (operator.sub, '-'),
                                 # Py3k
                                 #(operator.truediv, '/'),
                                 # Py2K
@@ -579,14 +579,14 @@ class OperatorTest(QueryTest, AssertsCompiledSQL):
     def test_relationship(self):
         User, Address = self.classes.User, self.classes.Address
 
-        self._test(User.addresses.any(Address.id==17), 
+        self._test(User.addresses.any(Address.id==17),
                         "EXISTS (SELECT 1 "
                         "FROM addresses "
                         "WHERE users.id = addresses.user_id AND addresses.id = :id_1)"
                     )
 
         u7 = User(id=7)
-        attributes.instance_state(u7).commit_all(attributes.instance_dict(u7))
+        attributes.instance_state(u7)._commit_all(attributes.instance_dict(u7))
 
         self._test(Address.user == u7, ":param_1 = addresses.user_id")
 
@@ -600,7 +600,7 @@ class OperatorTest(QueryTest, AssertsCompiledSQL):
         Node = self.classes.Node
         nalias = aliased(Node)
         self._test(
-            nalias.parent.has(Node.data=='some data'), 
+            nalias.parent.has(Node.data=='some data'),
            "EXISTS (SELECT 1 FROM nodes WHERE nodes.id = nodes_1.parent_id AND nodes.data = :data_1)"
         )
 
@@ -611,14 +611,14 @@ class OperatorTest(QueryTest, AssertsCompiledSQL):
 
         # auto self-referential aliasing
         self._test(
-            Node.children.any(Node.data=='n1'), 
+            Node.children.any(Node.data=='n1'),
                 "EXISTS (SELECT 1 FROM nodes AS nodes_1 WHERE "
                 "nodes.id = nodes_1.parent_id AND nodes_1.data = :data_1)"
         )
 
         # needs autoaliasing
         self._test(
-            Node.children==None, 
+            Node.children==None,
             "NOT (EXISTS (SELECT 1 FROM nodes AS nodes_1 WHERE nodes.id = nodes_1.parent_id))"
         )
 
@@ -633,44 +633,44 @@ class OperatorTest(QueryTest, AssertsCompiledSQL):
         )
 
         self._test(
-            nalias.children==None, 
+            nalias.children==None,
             "NOT (EXISTS (SELECT 1 FROM nodes WHERE nodes_1.id = nodes.parent_id))"
         )
 
         self._test(
-                nalias.children.any(Node.data=='some data'), 
+                nalias.children.any(Node.data=='some data'),
                 "EXISTS (SELECT 1 FROM nodes WHERE "
                 "nodes_1.id = nodes.parent_id AND nodes.data = :data_1)")
 
         # fails, but I think I want this to fail
         #self._test(
-        #        Node.children.any(nalias.data=='some data'), 
+        #        Node.children.any(nalias.data=='some data'),
         #        "EXISTS (SELECT 1 FROM nodes AS nodes_1 WHERE "
         #        "nodes.id = nodes_1.parent_id AND nodes_1.data = :data_1)"
         #        )
 
         self._test(
-            nalias.parent.has(Node.data=='some data'), 
+            nalias.parent.has(Node.data=='some data'),
            "EXISTS (SELECT 1 FROM nodes WHERE nodes.id = nodes_1.parent_id AND nodes.data = :data_1)"
         )
 
         self._test(
-            Node.parent.has(Node.data=='some data'), 
+            Node.parent.has(Node.data=='some data'),
            "EXISTS (SELECT 1 FROM nodes AS nodes_1 WHERE nodes_1.id = nodes.parent_id AND nodes_1.data = :data_1)"
         )
 
         self._test(
-            Node.parent == Node(id=7), 
+            Node.parent == Node(id=7),
             ":param_1 = nodes.parent_id"
         )
 
         self._test(
-            nalias.parent == Node(id=7), 
+            nalias.parent == Node(id=7),
             ":param_1 = nodes_1.parent_id"
         )
 
         self._test(
-            nalias.parent != Node(id=7), 
+            nalias.parent != Node(id=7),
             'nodes_1.parent_id != :parent_id_1 OR nodes_1.parent_id IS NULL'
         )
 
@@ -745,7 +745,7 @@ class ExpressionTest(QueryTest, AssertsCompiledSQL):
                                 self.classes.Address)
 
         session = create_session()
-        s = session.query(User).filter(and_(addresses.c.email_address == bindparam('emailad'), 
+        s = session.query(User).filter(and_(addresses.c.email_address == bindparam('emailad'),
                                         Address.user_id==User.id)).statement
 
         l = list(session.query(User).instances(s.execute(emailad = 'jack@bean.com')))
@@ -816,7 +816,7 @@ class ExpressionTest(QueryTest, AssertsCompiledSQL):
 
         q = session.query(User.id).filter(User.id==7).label('foo')
         self.assert_compile(
-            session.query(q), 
+            session.query(q),
             "SELECT (SELECT users.id FROM users WHERE users.id = :id_1) AS foo"
         )
 
@@ -886,7 +886,7 @@ class ExpressionTest(QueryTest, AssertsCompiledSQL):
 
         s = create_session()
 
-        # TODO: do we want aliased() to detect a query and convert to subquery() 
+        # TODO: do we want aliased() to detect a query and convert to subquery()
         # automatically ?
         q1 = s.query(Address).filter(Address.email_address=='jack@bean.com')
         adalias = aliased(Address, q1.subquery())
@@ -1179,9 +1179,9 @@ class FilterTest(QueryTest, AssertsCompiledSQL):
         )
 
         # o2o
-        eq_([Address(id=1), Address(id=3), Address(id=4)], 
+        eq_([Address(id=1), Address(id=3), Address(id=4)],
             sess.query(Address).filter(Address.dingaling==None).order_by(Address.id).all())
-        eq_([Address(id=1), Address(id=3), Address(id=4)], 
+        eq_([Address(id=1), Address(id=3), Address(id=4)],
             sess.query(Address).filter(Address.dingaling==null()).order_by(Address.id).all())
         eq_([Address(id=2), Address(id=5)], sess.query(Address).filter(Address.dingaling != None).order_by(Address.id).all())
         eq_([Address(id=2), Address(id=5)], sess.query(Address).filter(Address.dingaling != null()).order_by(Address.id).all())
@@ -1244,11 +1244,11 @@ class SetOpsTest(QueryTest, AssertsCompiledSQL):
         ed = s.query(User).filter(User.name=='ed')
         jack = s.query(User).filter(User.name=='jack')
 
-        eq_(fred.union(ed).order_by(User.name).all(), 
+        eq_(fred.union(ed).order_by(User.name).all(),
             [User(name='ed'), User(name='fred')]
         )
 
-        eq_(fred.union(ed, jack).order_by(User.name).all(), 
+        eq_(fred.union(ed, jack).order_by(User.name).all(),
             [User(name='ed'), User(name='fred'), User(name='jack')]
         )
 
@@ -1274,7 +1274,7 @@ class SetOpsTest(QueryTest, AssertsCompiledSQL):
         )
 
     def test_union_literal_expressions_compile(self):
-        """test that column expressions translate during 
+        """test that column expressions translate during
             the _from_statement() portion of union(), others"""
 
         User = self.classes.User
@@ -1314,13 +1314,13 @@ class SetOpsTest(QueryTest, AssertsCompiledSQL):
         for q in (q3.order_by(User.id, "anon_1_param_1"), q6.order_by(User.id, "foo")):
             eq_(q.all(),
                 [
-                    (User(id=7, name=u'jack'), u'x'), 
-                    (User(id=7, name=u'jack'), u'y'), 
-                    (User(id=8, name=u'ed'), u'x'), 
-                    (User(id=8, name=u'ed'), u'y'), 
-                    (User(id=9, name=u'fred'), u'x'), 
-                    (User(id=9, name=u'fred'), u'y'), 
-                    (User(id=10, name=u'chuck'), u'x'), 
+                    (User(id=7, name=u'jack'), u'x'),
+                    (User(id=7, name=u'jack'), u'y'),
+                    (User(id=8, name=u'ed'), u'x'),
+                    (User(id=8, name=u'ed'), u'y'),
+                    (User(id=9, name=u'fred'), u'x'),
+                    (User(id=9, name=u'fred'), u'y'),
+                    (User(id=10, name=u'chuck'), u'x'),
                     (User(id=10, name=u'chuck'), u'y')
                 ]
             )
@@ -1414,11 +1414,11 @@ class SetOpsTest(QueryTest, AssertsCompiledSQL):
         fred = s.query(User).filter(User.name=='fred')
         ed = s.query(User).filter(User.name=='ed')
         jack = s.query(User).filter(User.name=='jack')
-        eq_(fred.intersect(ed, jack).all(), 
+        eq_(fred.intersect(ed, jack).all(),
             []
         )
 
-        eq_(fred.union(ed).intersect(ed.union(jack)).all(), 
+        eq_(fred.union(ed).intersect(ed.union(jack)).all(),
             [User(name='ed')]
         )
 
@@ -1433,9 +1433,9 @@ class SetOpsTest(QueryTest, AssertsCompiledSQL):
 
         def go():
             eq_(
-                fred.union(ed).order_by(User.name).options(joinedload(User.addresses)).all(), 
+                fred.union(ed).order_by(User.name).options(joinedload(User.addresses)).all(),
                 [
-                    User(name='ed', addresses=[Address(), Address(), Address()]), 
+                    User(name='ed', addresses=[Address(), Address(), Address()]),
                     User(name='fred', addresses=[Address()])
                 ]
             )
@@ -1481,7 +1481,7 @@ class CountTest(QueryTest):
         s = create_session()
         # '*' is favored here as the most common character,
         # it is reported that Informix doesn't like count(1),
-        # rumors about Oracle preferring count(1) don't appear 
+        # rumors about Oracle preferring count(1) don't appear
         # to be well founded.
         self.assert_sql_execution(
                 testing.db,
@@ -1490,7 +1490,7 @@ class CountTest(QueryTest):
                     "SELECT count(*) AS count_1 FROM "
                     "(SELECT users.id AS users_id, users.name "
                     "AS users_name FROM users) AS anon_1",
-                    {} 
+                    {}
                 )
         )
 
@@ -1551,9 +1551,9 @@ class DistinctTest(QueryTest):
             create_session().query(User).order_by(User.id).distinct().all()
         )
         eq_(
-            [User(id=7), User(id=9), User(id=8),User(id=10)], 
+            [User(id=7), User(id=9), User(id=8),User(id=10)],
             create_session().query(User).distinct().order_by(desc(User.name)).all()
-        ) 
+        )
 
     def test_joined(self):
         """test that orderbys from a joined table get placed into the columns clause when DISTINCT is used"""
@@ -1897,8 +1897,8 @@ class SynonymTest(QueryTest):
                         options(joinedload(User.orders_syn)).all()
             eq_(result, [
                 User(id=7, name='jack', orders=[
-                    Order(description=u'order 1'), 
-                    Order(description=u'order 3'), 
+                    Order(description=u'order 1'),
+                    Order(description=u'order 3'),
                     Order(description=u'order 5')
                 ])
             ])
@@ -1913,8 +1913,8 @@ class SynonymTest(QueryTest):
                         options(joinedload(User.orders_syn_2)).all()
             eq_(result, [
                 User(id=7, name='jack', orders=[
-                    Order(description=u'order 1'), 
-                    Order(description=u'order 3'), 
+                    Order(description=u'order 1'),
+                    Order(description=u'order 3'),
                     Order(description=u'order 5')
                 ])
             ])
@@ -1929,8 +1929,8 @@ class SynonymTest(QueryTest):
                         options(joinedload('orders_syn_2')).all()
             eq_(result, [
                 User(id=7, name='jack', orders=[
-                    Order(description=u'order 1'), 
-                    Order(description=u'order 3'), 
+                    Order(description=u'order 1'),
+                    Order(description=u'order 3'),
                     Order(description=u'order 5')
                 ])
             ])
@@ -1969,7 +1969,7 @@ class SynonymTest(QueryTest):
             u1 = q.filter_by(**{nameprop:'jack'}).one()
 
             o = sess.query(Order).with_parent(u1, property=orderprop).all()
-            assert [Order(description="order 1"), 
+            assert [Order(description="order 1"),
                     Order(description="order 3"), Order(description="order 5")] == o
 
 
@@ -2026,7 +2026,7 @@ class ImmediateTest(_fixtures.FixtureTest):
                          sess.query(User, Address).join(User.addresses).one)
 
         # this result returns multiple rows, the first
-        # two rows being the same.  but uniquing is 
+        # two rows being the same.  but uniquing is
         # not applied for a column based result.
         assert_raises(sa.orm.exc.MultipleResultsFound,
                        sess.query(User.id).
@@ -2035,10 +2035,10 @@ class ImmediateTest(_fixtures.FixtureTest):
                        order_by(User.id).
                        one)
 
-        # test that a join which ultimately returns 
-        # multiple identities across many rows still 
-        # raises, even though the first two rows are of 
-        # the same identity and unique filtering 
+        # test that a join which ultimately returns
+        # multiple identities across many rows still
+        # raises, even though the first two rows are of
+        # the same identity and unique filtering
         # is applied ([ticket:1688])
         assert_raises(sa.orm.exc.MultipleResultsFound,
                         sess.query(User).
@@ -2198,7 +2198,7 @@ class OptionsTest(QueryTest):
 
         opt = self._option_fixture("orders.items.keywords")
         self._assert_path_result(opt, q, [
-            (User, 'orders'), 
+            (User, 'orders'),
             (User, 'orders', Order, 'items'),
             (User, 'orders', Order, 'items', Item, 'keywords')
         ])
@@ -2213,7 +2213,7 @@ class OptionsTest(QueryTest):
 
         opt = self._option_fixture(User.orders, Order.items, Item.keywords)
         self._assert_path_result(opt, q, [
-            (User, 'orders'), 
+            (User, 'orders'),
             (User, 'orders', Order, 'items'),
             (User, 'orders', Order, 'items', Item, 'keywords')
         ])
@@ -2535,7 +2535,7 @@ class OptionsNoPropTest(_fixtures.FixtureTest):
             r"Mapper\|Keyword\|keywords in this Query."
         )
 
-    @testing.fails_if(lambda:True, 
+    @testing.fails_if(lambda:True,
         "PropertyOption doesn't yet check for relation/column on end result")
     def test_option_against_non_relation_basestring(self):
         Item = self.classes.Item
@@ -2547,7 +2547,7 @@ class OptionsNoPropTest(_fixtures.FixtureTest):
             "does not refer to a mapped entity"
         )
 
-    @testing.fails_if(lambda:True, 
+    @testing.fails_if(lambda:True,
             "PropertyOption doesn't yet check for relation/column on end result")
     def test_option_against_multi_non_relation_basestring(self):
         Item = self.classes.Item
@@ -2649,7 +2649,7 @@ class OptionsNoPropTest(_fixtures.FixtureTest):
     @classmethod
     def setup_mappers(cls):
         users, User, addresses, Address, orders, Order = (
-                    cls.tables.users, cls.classes.User, 
+                    cls.tables.users, cls.classes.User,
                     cls.tables.addresses, cls.classes.Address,
                     cls.tables.orders, cls.classes.Order)
         mapper(User, users, properties={
@@ -2678,9 +2678,9 @@ class OptionsNoPropTest(_fixtures.FixtureTest):
         key = ('loaderstrategy', (class_mapper(Item), 'keywords'))
         assert key in q._attributes
 
-    def _assert_eager_with_entity_exception(self, entity_list, options, 
+    def _assert_eager_with_entity_exception(self, entity_list, options,
                                 message):
-        assert_raises_message(sa.exc.ArgumentError, 
+        assert_raises_message(sa.exc.ArgumentError,
                                 message,
                               create_session().query(*entity_list).options,
                               *options)
index 178d396b94e5f3ec5cdb6bc2dd0adc029cc52f3c..9134e47ce4ae0270c3b0d7bf9a1e34ee9f8354d6 100644 (file)
@@ -21,7 +21,7 @@ class AssertsUOW(object):
             uow.register_object(d, isdelete=True)
         return uow
 
-class SyncTest(fixtures.MappedTest, 
+class SyncTest(fixtures.MappedTest,
                     testing.AssertsExecutionResults, AssertsUOW):
 
     @classmethod
@@ -89,11 +89,11 @@ class SyncTest(fixtures.MappedTest,
             "Can't execute sync rule for source column 't2.id'; "
             r"mapper 'Mapper\|A\|t1' does not map this column.",
             sync.populate,
-                a1, 
-                a_mapper, 
-                b1, 
-                b_mapper, 
-                pairs, 
+                a1,
+                a_mapper,
+                b1,
+                b_mapper,
+                pairs,
                 uowcommit, False
         )
 
@@ -105,11 +105,11 @@ class SyncTest(fixtures.MappedTest,
             "Can't execute sync rule for destination "
             r"column 't1.id'; mapper 'Mapper\|B\|t2' does not map this column.",
             sync.populate,
-                a1, 
-                a_mapper, 
-                b1, 
-                b_mapper, 
-                pairs, 
+                a1,
+                a_mapper,
+                b1,
+                b_mapper,
+                pairs,
                 uowcommit, False
         )
 
@@ -147,7 +147,7 @@ class SyncTest(fixtures.MappedTest,
     def test_update(self):
         uowcommit, a1, b1, a_mapper, b_mapper = self._fixture()
         a1.obj().id = 10
-        a1.commit_all(a1.dict)
+        a1._commit_all(a1.dict)
         a1.obj().id = 12
         pairs = [(a_mapper.c.id, b_mapper.c.id,)]
         dest = {}
@@ -190,25 +190,25 @@ class SyncTest(fixtures.MappedTest,
         a1.obj().id = 10
         pairs = [(a_mapper.c.id, b_mapper.c.id,)]
         eq_(
-            sync.source_modified(uowcommit, a1, a_mapper, pairs), 
+            sync.source_modified(uowcommit, a1, a_mapper, pairs),
             False
         )
 
     def test_source_modified_no_pairs(self):
         uowcommit, a1, b1, a_mapper, b_mapper = self._fixture()
         eq_(
-            sync.source_modified(uowcommit, a1, a_mapper, []), 
+            sync.source_modified(uowcommit, a1, a_mapper, []),
             False
         )
 
     def test_source_modified_modified(self):
         uowcommit, a1, b1, a_mapper, b_mapper = self._fixture()
         a1.obj().id = 10
-        a1.commit_all(a1.dict)
+        a1._commit_all(a1.dict)
         a1.obj().id = 12
         pairs = [(a_mapper.c.id, b_mapper.c.id,)]
         eq_(
-            sync.source_modified(uowcommit, a1, a_mapper, pairs), 
+            sync.source_modified(uowcommit, a1, a_mapper, pairs),
             True
         )