]> git.ipfire.org Git - thirdparty/sqlalchemy/sqlalchemy.git/commitdiff
- mutable examples now move into sqlalchemy.ext.mutable
authorMike Bayer <mike_mp@zzzcomputing.com>
Wed, 29 Dec 2010 20:04:35 +0000 (15:04 -0500)
committerMike Bayer <mike_mp@zzzcomputing.com>
Wed, 29 Dec 2010 20:04:35 +0000 (15:04 -0500)
- streamline interfaces, get Mutable/MutableComposite to be as minimal
in usage as possible
- docs for mutable, warnings regrarding mapper events being global
- move MutableType/mutable=True outwards, move orm tests to its
own module, note in all documentation
- still need more events/tests for correct pickling support of
composites, mutables.  in the case of composites its needed
even without mutation.  see [ticket:2009]

15 files changed:
doc/build/core/types.rst
doc/build/orm/examples.rst
doc/build/orm/extensions/index.rst
examples/mutable_events/__init__.py [deleted file]
examples/mutable_events/composite.py [deleted file]
examples/mutable_events/scalars.py [deleted file]
lib/sqlalchemy/dialects/postgresql/base.py
lib/sqlalchemy/ext/mutable.py [new file with mode: 0644]
lib/sqlalchemy/orm/descriptor_props.py
lib/sqlalchemy/orm/state.py
lib/sqlalchemy/types.py
test/ext/test_mutable.py [new file with mode: 0644]
test/orm/test_legacy_mutable.py [new file with mode: 0644]
test/orm/test_transaction.py
test/orm/test_unitofwork.py

index 1910d9b7d908ec514da77bf4131916706f050082..b7121b5e102c4fe1eb22a94b25b8562ef8deed6c 100644 (file)
@@ -340,14 +340,8 @@ Marshal JSON Strings
 ^^^^^^^^^^^^^^^^^^^^^
 
 This type uses ``simplejson`` to marshal Python data structures
-to/from JSON.   Can be modified to use Python's builtin json encoder.
+to/from JSON.   Can be modified to use Python's builtin json encoder::
 
-Note that the base type is not "mutable", meaning in-place changes to 
-the value will not be detected by the ORM - you instead would need to 
-replace the existing value with a new one to detect changes.  
-The subtype ``MutableJSONEncodedDict``
-adds "mutability" to allow this, but note that "mutable" types add
-a significant performance penalty to the ORM's flush process::
 
     from sqlalchemy.types import TypeDecorator, MutableType, VARCHAR
     import simplejson
@@ -373,14 +367,57 @@ a significant performance penalty to the ORM's flush process::
             if value is not None:
                 value = simplejson.loads(value, use_decimal=True)
             return value
+
+
+Note that the base type is not "mutable", meaning in-place changes to 
+the value will not be detected by the ORM - you instead would need to 
+replace the existing value with a new one to detect changes.  To add
+support for mutability, we need to build a dictionary that detects
+changes, and combine this using the ``sqlalchemy.ext.mutable`` extension
+described in :ref:`mutable_toplevel`::
+
+    from sqlalchemy.ext.mutable import Mutable
+    
+    class MutationDict(Mutable, dict):
+        @classmethod
+        def coerce(cls, key, value):
+            """Convert plain dictionaries to MutationDict."""
+            if not isinstance(value, MutationDict):
+                if isinstance(value, dict):
+                    return MutationDict(value)
+                    
+                # this call will raise ValueError
+                return Mutable.coerce(key, value)
+            else:
+                return value
     
-    class MutableJSONEncodedDict(MutableType, JSONEncodedDict):
-        """Adds mutability to JSONEncodedDict."""
+        def __setitem__(self, key, value):
+            """Detect dictionary set events and emit change events."""
+            
+            dict.__setitem__(self, key, value)
+            self.on_change()
+
+        def __delitem__(self, key):
+            """Detect dictionary del events and emit change events."""
+
+            dict.__delitem__(self, key)
+            self.on_change()
         
-        def copy_value(self, value):
-            return simplejson.loads(
-                        simplejson.dumps(value, use_decimal=True), 
-                        use_decimal=True)
+        # additional dict methods would be overridden here
+
+The new dictionary type can be associated with JSONEncodedDict using
+an event listener established by the :meth:`.Mutable.associate_with`
+method::
+
+    MutationDict.associate_with(JSONEncodedDict)
+
+Alternatively, specific usages of ``JSONEncodedDict`` can be associated
+with ``MutationDict`` via :meth:`.Mutable.as_mutable`::
+
+    Table('mytable', metadata,
+        Column('id', Integer, primary_key=True),
+        Column('data', MutationDict.as_mutable(JSONEncodedDict))
+    )
 
 Creating New Types
 ~~~~~~~~~~~~~~~~~~
index bf3ede6d49e7ebf78f7f5d450234829d4acbe51e..00b18bc7b87a5aab6db14729ff31a740adcbe650 100644 (file)
@@ -87,13 +87,6 @@ Location: /examples/large_collection/
 
 .. automodule:: large_collection
 
-Mutable Data Types
-------------------
-
-Location: /examples/mutable_events/
-
-.. automodule:: mutable_events
-
 Nested Sets
 -----------
 
index 05f86771cae5f1bf900d745698527e96fbababad..b6d5b27d51f536283a606a74d119d6a489dc9ee1 100644 (file)
@@ -7,11 +7,17 @@ ORM Extensions
 SQLAlchemy has a variety of ORM extensions available, which add additional
 functionality to the core behavior.
 
+The extensions build almost entirely on public core and ORM APIs and users should
+be encouraged to read their source code to further their understanding of their
+behavior.   In particular the "Horizontal Sharding", "Hybrid Attributes", and
+"Mutation Tracking" extensions are very succinct.
+
 .. toctree::
     :maxdepth: 1
 
     associationproxy
     declarative
+    mutable
     orderinglist
     horizontal_shard
     hybrid
diff --git a/examples/mutable_events/__init__.py b/examples/mutable_events/__init__.py
deleted file mode 100644 (file)
index b96f1e9..0000000
+++ /dev/null
@@ -1,69 +0,0 @@
-"""
-Illustrates how to build and use "mutable" types, such as dictionaries and
-user-defined classes, as scalar attributes which detect in-place changes.
-These types don't make use of the "mutable=True" flag, which
-performs poorly within the ORM and is being phased out, instead allowing
-changes on data to associate change events with the parent object 
-as they happen in the same way as any other mapped data member.
-
-The example is based around the usage of the event model introduced in
-:ref:`event_toplevel`, along with the :func:`~.attributes.flag_modified` function
-which establishes the "dirty" flag on a particular mapped attribute.  These
-functions are encapsulated in a mixin called ``TrackMutationsMixin``. 
-Subclassing ``dict`` to provide "mutation tracking", then 
-applying it to a custom dictionary type, looks like::
-
-    class JSONEncodedDict(TypeDecorator):
-        "JSON dictionary type from the types documentation"
-        
-        impl = VARCHAR
-
-        def process_bind_param(self, value, dialect):
-            if value is not None:
-                value = simplejson.dumps(value, use_decimal=True)
-            return value
-
-        def process_result_value(self, value, dialect):
-            if value is not None:
-                value = simplejson.loads(value, use_decimal=True)
-            return value
-
-    class MutationDict(TrackMutationsMixin, dict):
-        "Subclass dict to send mutation events to the owning object."
-        
-        def __init__(self, other):
-            self.update(other)
-        
-        def __setitem__(self, key, value):
-            dict.__setitem__(self, key, value)
-            self.on_change()
-    
-        def __delitem__(self, key):
-            dict.__delitem__(self, key)
-            self.on_change()
-
-    # hypothetical mapping
-    Base = declarative_base()
-    class Foo(Base):
-        __tablename__ = 'foo'
-        id = Column(Integer, primary_key=True)
-        data = Column(JSONEncodedDict)
-
-    # add mutation tracking to `Foo.data` as a one off
-    MutationDict.associate_with_attribute(Foo.data)
-
-The explicit step of associating ``MutationDict`` with ``Foo.data`` can be 
-automated across a class of columns using ``associate_with_type()``::
-
-    # add mutation tracking to all mapped attributes
-    # that use JSONEncodedDict
-    MutationDict.associate_with_type(JSONEncodedDict)
-    
-All subsequent mappings will have the ``MutationDict`` wrapper applied to
-all attributes with ``JSONEncodedDict`` as their type.
-
-The example illustrates the usage of several events, including
-:meth:`.on_load`, :meth:`.on_refresh`, :meth:`.on_set`, and 
-:meth:`.on_mapper_configured`.
-
-"""
\ No newline at end of file
diff --git a/examples/mutable_events/composite.py b/examples/mutable_events/composite.py
deleted file mode 100644 (file)
index f46f28e..0000000
+++ /dev/null
@@ -1,139 +0,0 @@
-# this example is probably moving to be an extension.
-
-from sqlalchemy import event
-from sqlalchemy.orm import mapper, composite, object_mapper
-
-from sqlalchemy.util import memoized_property
-import weakref
-
-class _CompositeMutationsMixinMeta(type):
-    def __init__(cls, classname, bases, dict_):
-        cls._setup_listeners()
-        return type.__init__(cls, classname, bases, dict_)
-
-class CompositeMutationsMixin(object):
-    """Mixin that defines transparent propagation of change
-    events to a parent object.
-
-    This class might be moved to be a SQLA extension
-    due to its complexity and potential for widespread use.
-    
-    """
-    __metaclass__ = _CompositeMutationsMixinMeta
-
-    @memoized_property
-    def _parents(self):
-        """Dictionary of parent object->attribute name on the parent."""
-        
-        return weakref.WeakKeyDictionary()
-
-    def __setattr__(self, key, value):
-        object.__setattr__(self, key, value)
-        self.on_change()
-    
-    def on_change(self):
-        """Subclasses should call this method whenever change events occur."""
-        
-        for parent, key in self._parents.items():
-            
-            prop = object_mapper(parent).get_property(key)
-            for value, attr_name in zip(self.__composite_values__(), prop._attribute_keys):
-                setattr(parent, attr_name, value)
-    
-    @classmethod
-    def _listen_on_attribute(cls, attribute):
-        """Establish this type as a mutation listener for the given 
-        mapped descriptor.
-        
-        """
-        key = attribute.key
-        parent_cls = attribute.class_
-        
-        def on_load(state):
-            """Listen for objects loaded or refreshed.   
-            
-            Wrap the target data member's value with 
-            ``TrackMutationsMixin``.
-            
-            """
-            
-            val = state.dict.get(key, None)
-            if val is not None:
-                val._parents[state.obj()] = key
-
-        def on_set(target, value, oldvalue, initiator):
-            """Listen for set/replace events on the target
-            data member.
-            
-            Establish a weak reference to the parent object
-            on the incoming value, remove it for the one 
-            outgoing.
-            
-            """
-            
-            value._parents[target.obj()] = key
-            if isinstance(oldvalue, cls):
-                oldvalue._parents.pop(state.obj(), None)
-            return value
-        
-        event.listen(parent_cls, 'on_load', on_load, raw=True)
-        event.listen(parent_cls, 'on_refresh', on_load, raw=True)
-        event.listen(attribute, 'on_set', on_set, raw=True, retval=True)
-    
-    @classmethod
-    def _setup_listeners(cls):
-        """Associate this wrapper with all future mapped compoistes
-        of the given type.
-        
-        This is a convenience method that calls ``associate_with_attribute`` automatically.
-        
-        """
-        
-        def listen_for_type(mapper, class_):
-            for prop in mapper.iterate_properties:
-                if hasattr(prop, 'composite_class') and issubclass(prop.composite_class, cls):
-                    cls._listen_on_attribute(getattr(class_, prop.key))
-                    
-        event.listen(mapper, 'on_mapper_configured', listen_for_type)
-
-        
-if __name__ == '__main__':
-    from sqlalchemy import Column, Integer, create_engine
-    from sqlalchemy.orm import Session
-    from sqlalchemy.ext.declarative import declarative_base
-
-    class Point(CompositeMutationsMixin):
-        def __init__(self, x, y):
-            self.x = x
-            self.y = y
-        
-        def __composite_values__(self):
-            return self.x, self.y
-            
-        def __eq__(self, other):
-            return isinstance(other, Point) and \
-                other.x == self.x and \
-                other.y == self.y
-    
-    Base = declarative_base()
-    class Foo(Base):
-        __tablename__ = 'foo'
-        id = Column(Integer, primary_key=True)
-        data = composite(Point, Column('x', Integer), Column('y', Integer))
-    
-    e = create_engine('sqlite://', echo=True)
-
-    Base.metadata.create_all(e)
-
-    sess = Session(e)
-    d = Point(3, 4)
-    f1 = Foo(data=d)
-    sess.add(f1)
-    sess.commit()
-
-    f1.data.y = 5
-    sess.commit()
-
-    assert f1.data == Point(3, 5)
-
-        
\ No newline at end of file
diff --git a/examples/mutable_events/scalars.py b/examples/mutable_events/scalars.py
deleted file mode 100644 (file)
index 1c135a9..0000000
+++ /dev/null
@@ -1,153 +0,0 @@
-# this example is probably moving to be an extension.
-
-from sqlalchemy.orm.attributes import flag_modified
-from sqlalchemy import event
-from sqlalchemy.orm import mapper
-from sqlalchemy.util import memoized_property
-import weakref
-
-class TrackMutationsMixin(object):
-    """Mixin that defines transparent propagation of change
-    events to a parent object.
-    
-    This class might be moved to be a SQLA extension
-    due to its complexity and potential for widespread use.
-    
-    """
-    @memoized_property
-    def _parents(self):
-        """Dictionary of parent object->attribute name on the parent."""
-        
-        return weakref.WeakKeyDictionary()
-        
-    def on_change(self):
-        """Subclasses should call this method whenever change events occur."""
-        
-        for parent, key in self._parents.items():
-            flag_modified(parent, key)
-    
-    @classmethod
-    def associate_with_attribute(cls, attribute):
-        """Establish this type as a mutation listener for the given 
-        mapped descriptor.
-        
-        """
-        key = attribute.key
-        parent_cls = attribute.class_
-        
-        def on_load(state):
-            """Listen for objects loaded or refreshed.   
-            
-            Wrap the target data member's value with 
-            ``TrackMutationsMixin``.
-            
-            """
-            val = state.dict.get(key, None)
-            if val is not None:
-                val = cls(val)
-                state.dict[key] = val
-                val._parents[state.obj()] = key
-
-        def on_set(target, value, oldvalue, initiator):
-            """Listen for set/replace events on the target
-            data member.
-            
-            Establish a weak reference to the parent object
-            on the incoming value, remove it for the one 
-            outgoing.
-            
-            """
-            
-            if not isinstance(value, cls):
-                value = cls(value)
-            value._parents[target.obj()] = key
-            if isinstance(oldvalue, cls):
-                oldvalue._parents.pop(state.obj(), None)
-            return value
-        
-        event.listen(parent_cls, 'on_load', on_load, raw=True)
-        event.listen(parent_cls, 'on_refresh', on_load, raw=True)
-        event.listen(attribute, 'on_set', on_set, raw=True, retval=True)
-    
-    @classmethod
-    def associate_with_type(cls, type_):
-        """Associate this wrapper with all future mapped columns 
-        of the given type.
-        
-        This is a convenience method that calls ``associate_with_attribute`` automatically.
-        
-        """
-        
-        def listen_for_type(mapper, class_):
-            for prop in mapper.iterate_properties:
-                if hasattr(prop, 'columns') and isinstance(prop.columns[0].type, type_):
-                    cls.associate_with_attribute(getattr(class_, prop.key))
-                    
-        event.listen(mapper, 'on_mapper_configured', listen_for_type)
-        
-        
-if __name__ == '__main__':
-    from sqlalchemy import Column, Integer, VARCHAR, create_engine
-    from sqlalchemy.orm import Session
-    from sqlalchemy.types import TypeDecorator
-    from sqlalchemy.ext.declarative import declarative_base
-    import simplejson
-
-    class JSONEncodedDict(TypeDecorator):
-        """Represents an immutable structure as a json-encoded string.
-    
-        Usage::
-    
-            JSONEncodedDict(255)
-        
-        """
-
-        impl = VARCHAR
-
-        def process_bind_param(self, value, dialect):
-            if value is not None:
-                value = simplejson.dumps(value, use_decimal=True)
-
-            return value
-
-        def process_result_value(self, value, dialect):
-            if value is not None:
-                value = simplejson.loads(value, use_decimal=True)
-            return value
-    
-    class MutationDict(TrackMutationsMixin, dict):
-        def __init__(self, other):
-            self.update(other)
-        
-        def __setitem__(self, key, value):
-            dict.__setitem__(self, key, value)
-            self.on_change()
-    
-        def __delitem__(self, key):
-            dict.__delitem__(self, key)
-            self.on_change()
-    
-    # TODO: do the metaclass approach the same as composite
-    MutationDict.associate_with_type(JSONEncodedDict)
-    
-    Base = declarative_base()
-    class Foo(Base):
-        __tablename__ = 'foo'
-        id = Column(Integer, primary_key=True)
-        data = Column(JSONEncodedDict)
-    
-    e = create_engine('sqlite://', echo=True)
-
-    Base.metadata.create_all(e)
-
-    sess = Session(e)
-    f1 = Foo(data={'a':'b'})
-    sess.add(f1)
-    sess.commit()
-
-    f1.data['a'] = 'c'
-    sess.commit()
-
-    assert f1.data == {'a':'c'}
-
-        
\ No newline at end of file
index ee0277b6775398df1587f1598c3ab294518476a5..1d83d4a9126e697bbffabed613ccebbded737f94 100644 (file)
@@ -247,7 +247,11 @@ class ARRAY(sqltypes.MutableType, sqltypes.Concatenable, sqltypes.TypeEngine):
           "mutable types" mode in the ORM.  Be sure to read the 
           notes for :class:`.MutableType` regarding ORM 
           performance implications (default changed from ``True`` in 
-          0.7.0).   
+          0.7.0).
+          
+          .. note:: This functionality is now superceded by the
+             ``sqlalchemy.ext.mutable`` extension described in 
+             :ref:`mutable_toplevel`.
         
         :param as_tuple=False: Specify whether return results
           should be converted to tuples from lists. DBAPIs such
diff --git a/lib/sqlalchemy/ext/mutable.py b/lib/sqlalchemy/ext/mutable.py
new file mode 100644 (file)
index 0000000..7dcbfd9
--- /dev/null
@@ -0,0 +1,281 @@
+"""Provide support for tracking of in-place changes to scalar values,
+which are propagated to owning parent objects.
+
+The ``mutable`` extension is a replacement for the :class:`.types.MutableType`
+class as well as the ``mutable=True`` flag available on types which subclass
+it.
+
+
+"""
+from sqlalchemy.orm.attributes import flag_modified
+from sqlalchemy import event, types
+from sqlalchemy.orm import mapper, object_mapper
+from sqlalchemy.util import memoized_property
+import weakref
+
+class Mutable(object):
+    """Mixin that defines transparent propagation of change
+    events to a parent object.
+    
+    """
+    
+    @memoized_property
+    def _parents(self):
+        """Dictionary of parent object->attribute name on the parent."""
+        
+        return weakref.WeakKeyDictionary()
+        
+    def on_change(self):
+        """Subclasses should call this method whenever change events occur."""
+        
+        for parent, key in self._parents.items():
+            flag_modified(parent, key)
+    
+    @classmethod
+    def coerce(cls, key, value):
+        """Given a value, coerce it into this type.
+        
+        By default raises ValueError.
+        """
+        if value is None:
+            return None
+        raise ValueError("Attribute '%s' accepts objects of type %s" % (key, cls))
+        
+        
+    @classmethod
+    def associate_with_attribute(cls, attribute):
+        """Establish this type as a mutation listener for the given 
+        mapped descriptor.
+        
+        """
+        key = attribute.key
+        parent_cls = attribute.class_
+        
+        def on_load(state):
+            """Listen for objects loaded or refreshed.   
+            
+            Wrap the target data member's value with 
+            ``Mutable``.
+            
+            """
+            val = state.dict.get(key, None)
+            if val is not None:
+                val = cls.coerce(key, val)
+                state.dict[key] = val
+                val._parents[state.obj()] = key
+
+        def on_set(target, value, oldvalue, initiator):
+            """Listen for set/replace events on the target
+            data member.
+            
+            Establish a weak reference to the parent object
+            on the incoming value, remove it for the one 
+            outgoing.
+            
+            """
+            
+            if not isinstance(value, cls):
+                value = cls.coerce(key, value) 
+            value._parents[target.obj()] = key
+            if isinstance(oldvalue, cls):
+                oldvalue._parents.pop(state.obj(), None)
+            return value
+        
+        event.listen(parent_cls, 'on_load', on_load, raw=True)
+        event.listen(parent_cls, 'on_refresh', on_load, raw=True)
+        event.listen(attribute, 'on_set', on_set, raw=True, retval=True)
+
+        # TODO: need a deserialize hook here
+
+    @classmethod
+    def associate_with(cls, sqltype):
+        """Associate this wrapper with all future mapped columns 
+        of the given type.
+        
+        This is a convenience method that calls ``associate_with_attribute`` automatically.
+
+        .. warning:: The listeners established by this method are *global*
+           to all mappers, and are *not* garbage collected.   Only use 
+           :meth:`.associate_with` for types that are permanent to an application,
+           not with ad-hoc types else this will cause unbounded growth
+           in memory usage.
+        
+        """
+
+        def listen_for_type(mapper, class_):
+            for prop in mapper.iterate_properties:
+                if hasattr(prop, 'columns'):
+                    if isinstance(prop.columns[0].type, sqltype):
+                        cls.associate_with_attribute(getattr(class_, prop.key))
+                        break
+                    
+        event.listen(mapper, 'on_mapper_configured', listen_for_type)
+    
+    @classmethod
+    def as_mutable(cls, sqltype):
+        """Associate a SQL type with this mutable Python type.
+    
+        This establishes listeners that will detect ORM mappings against
+        the given type, adding mutation event trackers to those mappings.
+    
+        The type is returned, unconditionally as an instance, so that 
+        :meth:`.as_mutable` can be used inline::
+    
+            Table('mytable', metadata,
+                Column('id', Integer, primary_key=True),
+                Column('data', MyMutableType.as_mutable(PickleType))
+            )
+        
+        Note that the returned type is always an instance, even if a class
+        is given, and that only columns which are declared specifically with that
+        type instance receive additional instrumentation.
+    
+        To associate a particular mutable type with all occurences of a 
+        particular type, use the :meth:`.Mutable.associate_with` classmethod
+        of the particular :meth:`.Mutable` subclass to establish a global
+        assoiation.
+    
+        .. warning:: The listeners established by this method are *global*
+           to all mappers, and are *not* garbage collected.   Only use 
+           :meth:`.as_mutable` for types that are permanent to an application,
+           not with ad-hoc types else this will cause unbounded growth
+           in memory usage.
+    
+        """
+        sqltype = types.to_instance(sqltype)
+
+        def listen_for_type(mapper, class_):
+            for prop in mapper.iterate_properties:
+                if hasattr(prop, 'columns'):
+                    if prop.columns[0].type is sqltype:
+                        cls.associate_with_attribute(getattr(class_, prop.key))
+                        break
+                
+        event.listen(mapper, 'on_mapper_configured', listen_for_type)
+        
+        return sqltype
+
+
+class _MutableCompositeMeta(type):
+    def __init__(cls, classname, bases, dict_):
+        cls._setup_listeners()
+        return type.__init__(cls, classname, bases, dict_)
+
+class MutableComposite(object):
+    """Mixin that defines transparent propagation of change
+    events on a SQLAlchemy "composite" object to its
+    owning parent or parents.
+    
+    Composite classes, in addition to meeting the usage contract
+    defined in :ref:`mapper_composite`, also define some system
+    of relaying change events to the given :meth:`.on_change` 
+    method, which will notify all parents of the change.  Below
+    the special Python method ``__setattr__`` is used to intercept
+    all changes::
+    
+        class Point(MutableComposite):
+            def __init__(self, x, y):
+                self.x = x
+                self.y = y
+
+            def __setattr__(self, key, value):
+                object.__setattr__(self, key, value)
+                self.on_change()
+        
+            def __composite_values__(self):
+                return self.x, self.y
+            
+            def __eq__(self, other):
+                return isinstance(other, Point) and \
+                    other.x == self.x and \
+                    other.y == self.y
+
+    :class:`.MutableComposite` defines a metaclass which augments
+    the creation of :class:`.MutableComposite` subclasses with an event
+    that will listen for any :func:`~.orm.composite` mappings against the 
+    new type, establishing listeners that will track parent associations.
+
+    .. warning:: The listeners established by the :class:`.MutableComposite`
+       class are *global* to all mappers, and are *not* garbage collected.   Only use 
+       :class:`.MutableComposite` for types that are permanent to an application,
+       not with ad-hoc types else this will cause unbounded growth
+       in memory usage.
+    
+    """
+    __metaclass__ = _MutableCompositeMeta
+
+    @memoized_property
+    def _parents(self):
+        """Dictionary of parent object->attribute name on the parent."""
+        
+        return weakref.WeakKeyDictionary()
+
+    def on_change(self):
+        """Subclasses should call this method whenever change events occur."""
+        
+        for parent, key in self._parents.items():
+            
+            prop = object_mapper(parent).get_property(key)
+            for value, attr_name in zip(
+                                    self.__composite_values__(), 
+                                    prop._attribute_keys):
+                setattr(parent, attr_name, value)
+    
+    @classmethod
+    def _listen_on_attribute(cls, attribute):
+        """Establish this type as a mutation listener for the given 
+        mapped descriptor.
+        
+        """
+        key = attribute.key
+        parent_cls = attribute.class_
+        
+        def on_load(state):
+            """Listen for objects loaded or refreshed.   
+            
+            Wrap the target data member's value with 
+            ``Mutable``.
+            
+            """
+            
+            val = state.dict.get(key, None)
+            if val is not None:
+                val._parents[state.obj()] = key
+
+        def on_set(target, value, oldvalue, initiator):
+            """Listen for set/replace events on the target
+            data member.
+            
+            Establish a weak reference to the parent object
+            on the incoming value, remove it for the one 
+            outgoing.
+            
+            """
+            
+            value._parents[target.obj()] = key
+            if isinstance(oldvalue, cls):
+                oldvalue._parents.pop(state.obj(), None)
+            return value
+        
+        event.listen(parent_cls, 'on_load', on_load, raw=True)
+        event.listen(parent_cls, 'on_refresh', on_load, raw=True)
+        event.listen(attribute, 'on_set', on_set, raw=True, retval=True)
+
+        # TODO: need a deserialize hook here
+    
+    @classmethod
+    def _setup_listeners(cls):
+        """Associate this wrapper with all future mapped compoistes
+        of the given type.
+        
+        This is a convenience method that calls ``associate_with_attribute`` automatically.
+        
+        """
+        
+        def listen_for_type(mapper, class_):
+            for prop in mapper.iterate_properties:
+                if hasattr(prop, 'composite_class') and issubclass(prop.composite_class, cls):
+                    cls._listen_on_attribute(getattr(class_, prop.key))
+                    
+        event.listen(mapper, 'on_mapper_configured', listen_for_type)
+
index 5f974e2607ec33b00aa7b2da354d54d5a91dff08..d0f871664280ee23aea0be4fa3e5c07fa7d1663d 100644 (file)
@@ -191,6 +191,7 @@ class CompositeProperty(DescriptorProperty):
         event.listen(self.parent, 'on_refresh', load_handler, raw=True)
         event.listen(self.parent, "on_expire", expire_handler, raw=True)
         
+        # TODO: need a deserialize hook here
         
     @util.memoized_property
     def _attribute_keys(self):
index 22be5f58f65f3859f6afdd1e1c2dcc94c830c106..89a84e898473af89a7e77987b4dd6b4a1f794d1a 100644 (file)
@@ -175,7 +175,9 @@ class InstanceState(object):
 
         if 'load_path' in state:
             self.load_path = interfaces.deserialize_path(state['load_path'])
-
+        
+        # TODO: need an event here, link to composite, mutable
+        
     def initialize(self, key):
         """Set this attribute to an empty value or collection, 
            based on the AttributeImpl in use."""
index f5df0236718945dd37f427d1b3709d7368e6546c..1756cf6ff69481cf4d971a38cc67ccc0e5bec9ed 100644 (file)
@@ -94,6 +94,10 @@ class TypeEngine(AbstractType):
         are serialized into strings are examples of "mutable" 
         column structures.
         
+        .. note:: This functionality is now superceded by the
+          ``sqlalchemy.ext.mutable`` extension described in 
+          :ref:`mutable_toplevel`.
+        
         When this method is overridden, :meth:`copy_value` should
         also be supplied.   The :class:`.MutableType` mixin
         is recommended as a helper.
@@ -511,10 +515,10 @@ class TypeDecorator(TypeEngine):
         objects alone.  Values such as dicts, lists which
         are serialized into strings are examples of "mutable" 
         column structures.
-        
-        When this method is overridden, :meth:`copy_value` should
-        also be supplied.   The :class:`.MutableType` mixin
-        is recommended as a helper.
+
+        .. note:: This functionality is now superceded by the
+          ``sqlalchemy.ext.mutable`` extension described in 
+          :ref:`mutable_toplevel`.
         
         """
         return self.impl.is_mutable()
@@ -528,8 +532,16 @@ class TypeDecorator(TypeEngine):
 
 class MutableType(object):
     """A mixin that marks a :class:`TypeEngine` as representing
-    a mutable Python object type.
-
+    a mutable Python object type.   This functionality is used
+    only by the ORM.
+    
+    .. note:: :class:`.MutableType` is superceded as of SQLAlchemy 0.7
+       by the ``sqlalchemy.ext.mutable`` extension described in
+       :ref:`mutable_toplevel`.   This extension provides an event
+       driven approach to in-place mutation detection that does not
+       incur the severe performance penalty of the :class:`.MutableType`
+       approach.
+       
     "mutable" means that changes can occur in place to a value 
     of this type.   Examples includes Python lists, dictionaries,
     and sets, as well as user-defined objects.  The primary
@@ -550,49 +562,28 @@ class MutableType(object):
     represent a copy and compare function for values of this
     type - implementing subclasses should override these
     appropriately.
-
-    The usage of mutable types has significant performance
-    implications when using the ORM. In order to detect changes, the
-    ORM must create a copy of the value when it is first
-    accessed, so that changes to the current value can be compared
-    against the "clean" database-loaded value. Additionally, when the
-    ORM checks to see if any data requires flushing, it must scan
-    through all instances in the session which are known to have
-    "mutable" attributes and compare the current value of each
-    one to its "clean"
-    value. So for example, if the Session contains 6000 objects (a
-    fairly large amount) and autoflush is enabled, every individual
-    execution of :class:`Query` will require a full scan of that subset of
-    the 6000 objects that have mutable attributes, possibly resulting
-    in tens of thousands of additional method calls for every query.
     
-    Note that for small numbers (< 100 in the Session at a time)
-    of objects with "mutable" values, the performance degradation is 
-    negligible.  
+    .. warning:: The usage of mutable types has significant performance
+        implications when using the ORM. In order to detect changes, the
+        ORM must create a copy of the value when it is first
+        accessed, so that changes to the current value can be compared
+        against the "clean" database-loaded value. Additionally, when the
+        ORM checks to see if any data requires flushing, it must scan
+        through all instances in the session which are known to have
+        "mutable" attributes and compare the current value of each
+        one to its "clean"
+        value. So for example, if the Session contains 6000 objects (a
+        fairly large amount) and autoflush is enabled, every individual
+        execution of :class:`Query` will require a full scan of that subset of
+        the 6000 objects that have mutable attributes, possibly resulting
+        in tens of thousands of additional method calls for every query.
     
-    It is perfectly fine to represent "mutable" data types with the
-    "mutable" flag set to False, which eliminates any performance
-    issues. It means that the ORM will only reliably detect changes
-    for values of this type if a newly modified value is of a different 
-    identity (i.e., ``id(value)``) than what was present before - 
-    i.e., instead of operations like these::
-    
-        myobject.somedict['foo'] = 'bar'
-        myobject.someset.add('bar')
-        myobject.somelist.append('bar')
-        
-    You'd instead say::
+        As of SQLAlchemy 0.7, the ``sqlalchemy.ext.mutable`` is provided which
+        allows an event driven approach to in-place mutation detection. This
+        approach should now be favored over the usage of :class:`.MutableType`
+        with ``mutable=True``. ``sqlalchemy.ext.mutable`` is described in
+        :ref:`mutable_toplevel`.
     
-        myobject.somevalue = {'foo':'bar'}
-        myobject.someset = myobject.someset.union(['bar'])
-        myobject.somelist = myobject.somelist + ['bar']
-        
-    A future release of SQLAlchemy will include instrumented
-    collection support for mutable types, such that at least usage of
-    plain Python datastructures will be able to emit events for
-    in-place changes, removing the need for pessimistic scanning for
-    changes.
-
     """
 
     def is_mutable(self):
@@ -1594,7 +1585,11 @@ class PickleType(MutableType, TypeDecorator):
           ``comparator`` argument is present.   See
           :class:`.MutableType` for details on "mutable" type
           behavior. (default changed from ``True`` in 
-          0.7.0).   
+          0.7.0).
+
+          .. note:: This functionality is now superceded by the
+             ``sqlalchemy.ext.mutable`` extension described in 
+             :ref:`mutable_toplevel`.
 
         :param comparator: a 2-arg callable predicate used
           to compare values of this type.  If left as ``None``, 
diff --git a/test/ext/test_mutable.py b/test/ext/test_mutable.py
new file mode 100644 (file)
index 0000000..e9573f5
--- /dev/null
@@ -0,0 +1,238 @@
+from sqlalchemy import Integer
+from sqlalchemy.types import PickleType, TypeDecorator, VARCHAR
+from sqlalchemy.orm import mapper, Session, composite
+from sqlalchemy.orm.mapper import Mapper
+from sqlalchemy.orm.instrumentation import ClassManager
+from test.lib.schema import Table, Column
+from test.lib.testing import eq_
+from test.lib import testing
+from test.orm import _base
+import sys
+
+class _MutableDictTestBase(object):
+    @classmethod
+    def _type_fixture(cls):
+        from sqlalchemy.ext.mutable import Mutable
+        
+        # needed for pickle support
+        global MutationDict
+        
+        class MutationDict(Mutable, dict):
+            @classmethod
+            def coerce(cls, key, value):
+                if not isinstance(value, MutationDict):
+                    if isinstance(value, dict):
+                        return MutationDict(value)
+                    return Mutable.coerce(key, value)
+                else:
+                    return value
+        
+            def __getstate__(self):
+                return dict(self)
+        
+            def __setstate__(self, dict):
+                self.update(dict)
+            
+            def __setitem__(self, key, value):
+                dict.__setitem__(self, key, value)
+                self.on_change()
+    
+            def __delitem__(self, key):
+                dict.__delitem__(self, key)
+                self.on_change()
+        return MutationDict
+    
+    @testing.resolve_artifact_names
+    def setup_mappers(cls):
+        class Foo(_base.BasicEntity):
+            pass
+        
+        mapper(Foo, foo)
+
+    def teardown(self):
+        # clear out mapper events
+        Mapper.dispatch.clear()
+        ClassManager.dispatch.clear()
+        super(_MutableDictTestBase, self).teardown()
+        
+    @testing.resolve_artifact_names
+    def test_in_place_mutation(self):
+        sess = Session()
+
+        f1 = Foo(data={'a':'b'})
+        sess.add(f1)
+        sess.commit()
+
+        f1.data['a'] = 'c'
+        sess.commit()
+
+        eq_(f1.data, {'a':'c'})
+
+    @testing.resolve_artifact_names
+    def _test_non_mutable(self):
+        sess = Session()
+
+        f1 = Foo(non_mutable_data={'a':'b'})
+        sess.add(f1)
+        sess.commit()
+
+        f1.non_mutable_data['a'] = 'c'
+        sess.commit()
+
+        eq_(f1.non_mutable_data, {'a':'b'})
+
+class MutableWithScalarPickleTest(_MutableDictTestBase, _base.MappedTest):
+    @classmethod
+    def define_tables(cls, metadata):
+        MutationDict = cls._type_fixture()
+        
+        Table('foo', metadata,
+            Column('id', Integer, primary_key=True, test_needs_pk=True),
+            Column('data', MutationDict.as_mutable(PickleType)),
+            Column('non_mutable_data', PickleType)
+        )
+    
+    def test_non_mutable(self):
+        self._test_non_mutable()
+        
+class MutableWithScalarJSONTest(_MutableDictTestBase, _base.MappedTest):
+    # json introduced in 2.6
+    __skip_if__ = lambda : sys.version_info < (2, 6),
+
+    @classmethod
+    def define_tables(cls, metadata):
+        import json
+
+        class JSONEncodedDict(TypeDecorator):
+            impl = VARCHAR
+
+            def process_bind_param(self, value, dialect):
+                if value is not None:
+                    value = json.dumps(value)
+
+                return value
+
+            def process_result_value(self, value, dialect):
+                if value is not None:
+                    value = json.loads(value)
+                return value
+        
+        MutationDict = cls._type_fixture()
+
+        Table('foo', metadata,
+            Column('id', Integer, primary_key=True, test_needs_pk=True),
+            Column('data', MutationDict.as_mutable(JSONEncodedDict)),
+            Column('non_mutable_data', JSONEncodedDict)
+        )
+
+    def test_non_mutable(self):
+        self._test_non_mutable()
+
+class MutableAssociationScalarPickleTest(_MutableDictTestBase, _base.MappedTest):
+    @classmethod
+    def define_tables(cls, metadata):
+        MutationDict = cls._type_fixture()
+        MutationDict.associate_with(PickleType)
+        
+        Table('foo', metadata,
+            Column('id', Integer, primary_key=True, test_needs_pk=True),
+            Column('data', PickleType)
+        )
+
+class MutableAssociationScalarJSONTest(_MutableDictTestBase, _base.MappedTest):
+    # json introduced in 2.6
+    __skip_if__ = lambda : sys.version_info < (2, 6),
+
+    @classmethod
+    def define_tables(cls, metadata):
+        import json
+
+        class JSONEncodedDict(TypeDecorator):
+            impl = VARCHAR
+
+            def process_bind_param(self, value, dialect):
+                if value is not None:
+                    value = json.dumps(value)
+
+                return value
+
+            def process_result_value(self, value, dialect):
+                if value is not None:
+                    value = json.loads(value)
+                return value
+
+        MutationDict = cls._type_fixture()
+        MutationDict.associate_with(JSONEncodedDict)
+        
+        Table('foo', metadata,
+            Column('id', Integer, primary_key=True, test_needs_pk=True),
+            Column('data', JSONEncodedDict)
+        )
+        
+class MutableCompositesTest(_base.MappedTest):
+    @classmethod
+    def define_tables(cls, metadata):
+        Table('foo', metadata,
+            Column('id', Integer, primary_key=True, test_needs_pk=True),
+            Column('x', Integer),
+            Column('y', Integer)
+        )
+
+    def teardown(self):
+        # clear out mapper events
+        Mapper.dispatch.clear()
+        ClassManager.dispatch.clear()
+        super(MutableCompositesTest, self).teardown()
+
+    @classmethod
+    def _type_fixture(cls):
+        
+        from sqlalchemy.ext.mutable import Mutable
+        from sqlalchemy.ext.mutable import MutableComposite
+        
+        global Point
+        
+        class Point(MutableComposite):
+            def __init__(self, x, y):
+                self.x = x
+                self.y = y
+
+            def __setattr__(self, key, value):
+                object.__setattr__(self, key, value)
+                self.on_change()
+        
+            def __composite_values__(self):
+                return self.x, self.y
+            
+            def __eq__(self, other):
+                return isinstance(other, Point) and \
+                    other.x == self.x and \
+                    other.y == self.y
+        return Point
+        
+    @classmethod
+    @testing.resolve_artifact_names
+    def setup_mappers(cls):
+        Point = cls._type_fixture()
+        
+        class Foo(_base.BasicEntity):
+            pass
+            
+        mapper(Foo, foo, properties={
+            'data':composite(Point, foo.c.x, foo.c.y)
+        })
+
+    @testing.resolve_artifact_names
+    def test_in_place_mutation(self):
+        sess = Session()
+        d = Point(3, 4)
+        f1 = Foo(data=d)
+        sess.add(f1)
+        sess.commit()
+
+        f1.data.y = 5
+        sess.commit()
+
+        eq_(f1.data, Point(3, 5))
+
+                
\ No newline at end of file
diff --git a/test/orm/test_legacy_mutable.py b/test/orm/test_legacy_mutable.py
new file mode 100644 (file)
index 0000000..f3d3d58
--- /dev/null
@@ -0,0 +1,353 @@
+"""Test the interaction of :class:`.MutableType` as well as the 
+``mutable=True`` flag with the ORM.
+
+For new mutablity functionality, see test.ext.test_mutable.
+
+"""
+from test.lib.testing import eq_
+import operator
+from sqlalchemy.orm import mapper as orm_mapper
+
+import sqlalchemy as sa
+from sqlalchemy import Integer, String, ForeignKey
+from test.lib import testing, pickleable
+from test.lib.schema import Table, Column
+from sqlalchemy.orm import mapper, create_session, Session, attributes
+from test.lib.testing import eq_, ne_
+from test.lib.util import gc_collect
+from test.orm import _base, _fixtures
+
+class MutableTypesTest(_base.MappedTest):
+
+    @classmethod
+    def define_tables(cls, metadata):
+        Table('mutable_t', metadata,
+            Column('id', Integer, primary_key=True,
+                   test_needs_autoincrement=True),
+            Column('data', sa.PickleType(mutable=True)),
+            Column('val', sa.Unicode(30)))
+
+    @classmethod
+    def setup_classes(cls):
+        class Foo(_base.BasicEntity):
+            pass
+
+    @classmethod
+    @testing.resolve_artifact_names
+    def setup_mappers(cls):
+        mapper(Foo, mutable_t)
+
+    @testing.resolve_artifact_names
+    def test_modified_status(self):
+        f1 = Foo(data = pickleable.Bar(4,5))
+        
+        session = Session()
+        session.add(f1)
+        session.commit()
+
+        f2 = session.query(Foo).first()
+        assert 'data' in sa.orm.attributes.instance_state(f2).unmodified
+        eq_(f2.data, f1.data)
+
+        f2.data.y = 19
+        assert f2 in session.dirty
+        assert 'data' not in sa.orm.attributes.instance_state(f2).unmodified
+    
+    @testing.resolve_artifact_names
+    def test_mutations_persisted(self):
+        f1 = Foo(data = pickleable.Bar(4,5))
+        
+        session = Session()
+        session.add(f1)
+        session.commit()
+        f1.data
+        session.close()
+        
+        f2 = session.query(Foo).first()
+        f2.data.y = 19
+        session.commit()
+        f2.data
+        session.close()
+        
+        f3 = session.query(Foo).first()
+        ne_(f3.data,f1.data)
+        eq_(f3.data, pickleable.Bar(4, 19))
+        
+    @testing.resolve_artifact_names
+    def test_no_unnecessary_update(self):
+        f1 = Foo(data = pickleable.Bar(4,5), val = u'hi')
+
+        session = Session()
+        session.add(f1)
+        session.commit()
+
+        self.sql_count_(0, session.commit)
+        
+        f1.val = u'someothervalue'
+        self.assert_sql(testing.db, session.commit, [
+            ("UPDATE mutable_t SET val=:val "
+             "WHERE mutable_t.id = :mutable_t_id",
+             {'mutable_t_id': f1.id, 'val': u'someothervalue'})])
+
+        f1.val = u'hi'
+        f1.data.x = 9
+        self.assert_sql(testing.db, session.commit, [
+            ("UPDATE mutable_t SET data=:data, val=:val "
+             "WHERE mutable_t.id = :mutable_t_id",
+             {'mutable_t_id': f1.id, 'val': u'hi', 'data':f1.data})])
+        
+    @testing.resolve_artifact_names
+    def test_mutated_state_resurrected(self):
+        f1 = Foo(data = pickleable.Bar(4,5), val = u'hi')
+
+        session = Session()
+        session.add(f1)
+        session.commit()
+
+        f1.data.y = 19
+        del f1
+
+        gc_collect()
+        assert len(session.identity_map) == 1
+
+        session.commit()
+
+        assert session.query(Foo).one().data == pickleable.Bar(4, 19)
+
+    @testing.resolve_artifact_names
+    def test_mutated_plus_scalar_state_change_resurrected(self):
+        """test that a non-mutable attribute event subsequent to
+        a mutable event prevents the object from falling into
+        resurrected state.
+        
+         """
+        f1 = Foo(data = pickleable.Bar(4, 5), val=u'some val')
+        session = Session()
+        session.add(f1)
+        session.commit()
+        f1.data.x = 10
+        f1.data.y = 15
+        f1.val=u'some new val'
+
+        assert sa.orm.attributes.instance_state(f1)._strong_obj is not None
+        
+        del f1
+        session.commit()
+        eq_(
+            session.query(Foo.val).all(),
+            [('some new val', )]
+        )
+
+    @testing.resolve_artifact_names
+    def test_non_mutated_state_not_resurrected(self):
+        f1 = Foo(data = pickleable.Bar(4,5))
+        
+        session = Session()
+        session.add(f1)
+        session.commit()
+        
+        session = Session()
+        f1 = session.query(Foo).first()
+        del f1
+        gc_collect()
+
+        assert len(session.identity_map) == 0
+        f1 = session.query(Foo).first()
+        assert not attributes.instance_state(f1).modified
+
+    @testing.resolve_artifact_names
+    def test_scalar_no_net_change_no_update(self):
+        """Test that a no-net-change on a scalar attribute event
+        doesn't cause an UPDATE for a mutable state.
+        
+         """
+
+        f1 = Foo(val=u'hi')
+
+        session = Session()
+        session.add(f1)
+        session.commit()
+        session.close()
+
+        f1 = session.query(Foo).first()
+        f1.val = u'hi'
+        self.sql_count_(0, session.commit)
+
+    @testing.resolve_artifact_names
+    def test_expire_attribute_set(self):
+        """test no SELECT emitted when assigning to an expired
+        mutable attribute.
+        
+        """
+        
+        f1 = Foo(data = pickleable.Bar(4, 5), val=u'some val')
+        session = Session()
+        session.add(f1)
+        session.commit()
+        
+        assert 'data' not in f1.__dict__
+        def go():
+            f1.data = pickleable.Bar(10, 15)
+        self.sql_count_(0, go)
+        session.commit()
+        
+        eq_(f1.data.x, 10)
+
+    @testing.resolve_artifact_names
+    def test_expire_mutate(self):
+        """test mutations are detected on an expired mutable
+        attribute."""
+        
+        f1 = Foo(data = pickleable.Bar(4, 5), val=u'some val')
+        session = Session()
+        session.add(f1)
+        session.commit()
+        
+        assert 'data' not in f1.__dict__
+        def go():
+            f1.data.x = 10
+        self.sql_count_(1, go)
+        session.commit()
+        
+        eq_(f1.data.x, 10)
+        
+    @testing.resolve_artifact_names
+    def test_deferred_attribute_set(self):
+        """test no SELECT emitted when assigning to a deferred
+        mutable attribute.
+        
+        """
+        sa.orm.clear_mappers()
+        mapper(Foo, mutable_t, properties={
+            'data':sa.orm.deferred(mutable_t.c.data)
+        })
+
+        f1 = Foo(data = pickleable.Bar(4, 5), val=u'some val')
+        session = Session()
+        session.add(f1)
+        session.commit()
+        
+        session.close()
+        
+        f1 = session.query(Foo).first()
+        def go():
+            f1.data = pickleable.Bar(10, 15)
+        self.sql_count_(0, go)
+        session.commit()
+        
+        eq_(f1.data.x, 10)
+
+    @testing.resolve_artifact_names
+    def test_deferred_mutate(self):
+        """test mutations are detected on a deferred mutable
+        attribute."""
+        
+        sa.orm.clear_mappers()
+        mapper(Foo, mutable_t, properties={
+            'data':sa.orm.deferred(mutable_t.c.data)
+        })
+
+        f1 = Foo(data = pickleable.Bar(4, 5), val=u'some val')
+        session = Session()
+        session.add(f1)
+        session.commit()
+        
+        session.close()
+        
+        f1 = session.query(Foo).first()
+        def go():
+            f1.data.x = 10
+        self.sql_count_(1, go)
+        session.commit()
+        
+        def go():
+            eq_(f1.data.x, 10)
+        self.sql_count_(1, go)
+
+
+class PickledDictsTest(_base.MappedTest):
+
+    @classmethod
+    def define_tables(cls, metadata):
+        Table('mutable_t', metadata,
+            Column('id', Integer, primary_key=True,
+                   test_needs_autoincrement=True),
+            Column('data', 
+                sa.PickleType(comparator=operator.eq, mutable=True)))
+
+    @classmethod
+    def setup_classes(cls):
+        class Foo(_base.BasicEntity):
+            pass
+
+    @classmethod
+    @testing.resolve_artifact_names
+    def setup_mappers(cls):
+        mapper(Foo, mutable_t)
+
+    @testing.resolve_artifact_names
+    def test_dicts(self):
+        """Dictionaries may not pickle the same way twice."""
+
+        f1 = Foo()
+        f1.data = [ {
+            'personne': {'nom': u'Smith',
+                         'pers_id': 1,
+                         'prenom': u'john',
+                         'civilite': u'Mr',
+                         'int_3': False,
+                         'int_2': False,
+                         'int_1': u'23',
+                         'VenSoir': True,
+                         'str_1': u'Test',
+                         'SamMidi': False,
+                         'str_2': u'chien',
+                         'DimMidi': False,
+                         'SamSoir': True,
+                         'SamAcc': False} } ]
+
+        session = create_session(autocommit=False)
+        session.add(f1)
+        session.commit()
+
+        self.sql_count_(0, session.commit)
+
+        f1.data = [ {
+            'personne': {'nom': u'Smith',
+                         'pers_id': 1,
+                         'prenom': u'john',
+                         'civilite': u'Mr',
+                         'int_3': False,
+                         'int_2': False,
+                         'int_1': u'23',
+                         'VenSoir': True,
+                         'str_1': u'Test',
+                         'SamMidi': False,
+                         'str_2': u'chien',
+                         'DimMidi': False,
+                         'SamSoir': True,
+                         'SamAcc': False} } ]
+
+        self.sql_count_(0, session.commit)
+
+        f1.data[0]['personne']['VenSoir']= False
+        self.sql_count_(1, session.commit)
+
+        session.expunge_all()
+        f = session.query(Foo).get(f1.id)
+        eq_(f.data,
+            [ {
+            'personne': {'nom': u'Smith',
+                         'pers_id': 1,
+                         'prenom': u'john',
+                         'civilite': u'Mr',
+                         'int_3': False,
+                         'int_2': False,
+                         'int_1': u'23',
+                         'VenSoir': False,
+                         'str_1': u'Test',
+                         'SamMidi': False,
+                         'str_2': u'chien',
+                         'DimMidi': False,
+                         'SamSoir': True,
+                         'SamAcc': False} } ])
index 7f0ada49f29e3595c526c2c909a7d4a44dfc03c7..9cb9604a7fdc6352832a4d7554f49a3b6c489156 100644 (file)
@@ -23,7 +23,6 @@ class TransactionTest(FixtureTest):
             })
         mapper(Address, addresses)
 
-
     
 class FixtureDataTest(TransactionTest):
     run_inserts = 'each'
index 469464fd07f78b0d7c1411e0554deba79a166a96..9c472764ec4fc850aed6880c4c9c1a37c5d9491d 100644 (file)
@@ -252,340 +252,8 @@ class BinaryHistTest(_base.MappedTest, testing.AssertsExecutionResults):
             s.flush()
         self.assert_sql_count(testing.db, go, 0)
         
-class MutableTypesTest(_base.MappedTest):
-
-    @classmethod
-    def define_tables(cls, metadata):
-        Table('mutable_t', metadata,
-            Column('id', Integer, primary_key=True,
-                   test_needs_autoincrement=True),
-            Column('data', sa.PickleType(mutable=True)),
-            Column('val', sa.Unicode(30)))
-
-    @classmethod
-    def setup_classes(cls):
-        class Foo(_base.BasicEntity):
-            pass
-
-    @classmethod
-    @testing.resolve_artifact_names
-    def setup_mappers(cls):
-        mapper(Foo, mutable_t)
-
-    @testing.resolve_artifact_names
-    def test_modified_status(self):
-        f1 = Foo(data = pickleable.Bar(4,5))
-        
-        session = Session()
-        session.add(f1)
-        session.commit()
-
-        f2 = session.query(Foo).first()
-        assert 'data' in sa.orm.attributes.instance_state(f2).unmodified
-        eq_(f2.data, f1.data)
-
-        f2.data.y = 19
-        assert f2 in session.dirty
-        assert 'data' not in sa.orm.attributes.instance_state(f2).unmodified
-    
-    @testing.resolve_artifact_names
-    def test_mutations_persisted(self):
-        f1 = Foo(data = pickleable.Bar(4,5))
-        
-        session = Session()
-        session.add(f1)
-        session.commit()
-        f1.data
-        session.close()
-        
-        f2 = session.query(Foo).first()
-        f2.data.y = 19
-        session.commit()
-        f2.data
-        session.close()
-        
-        f3 = session.query(Foo).first()
-        ne_(f3.data,f1.data)
-        eq_(f3.data, pickleable.Bar(4, 19))
-        
-    @testing.resolve_artifact_names
-    def test_no_unnecessary_update(self):
-        f1 = Foo(data = pickleable.Bar(4,5), val = u'hi')
-
-        session = Session()
-        session.add(f1)
-        session.commit()
-
-        self.sql_count_(0, session.commit)
-        
-        f1.val = u'someothervalue'
-        self.assert_sql(testing.db, session.commit, [
-            ("UPDATE mutable_t SET val=:val "
-             "WHERE mutable_t.id = :mutable_t_id",
-             {'mutable_t_id': f1.id, 'val': u'someothervalue'})])
-
-        f1.val = u'hi'
-        f1.data.x = 9
-        self.assert_sql(testing.db, session.commit, [
-            ("UPDATE mutable_t SET data=:data, val=:val "
-             "WHERE mutable_t.id = :mutable_t_id",
-             {'mutable_t_id': f1.id, 'val': u'hi', 'data':f1.data})])
-        
-    @testing.resolve_artifact_names
-    def test_mutated_state_resurrected(self):
-        f1 = Foo(data = pickleable.Bar(4,5), val = u'hi')
-
-        session = Session()
-        session.add(f1)
-        session.commit()
-
-        f1.data.y = 19
-        del f1
-
-        gc_collect()
-        assert len(session.identity_map) == 1
-
-        session.commit()
-
-        assert session.query(Foo).one().data == pickleable.Bar(4, 19)
-
-    @testing.resolve_artifact_names
-    def test_mutated_plus_scalar_state_change_resurrected(self):
-        """test that a non-mutable attribute event subsequent to
-        a mutable event prevents the object from falling into
-        resurrected state.
-        
-         """
-        f1 = Foo(data = pickleable.Bar(4, 5), val=u'some val')
-        session = Session()
-        session.add(f1)
-        session.commit()
-        f1.data.x = 10
-        f1.data.y = 15
-        f1.val=u'some new val'
-
-        assert sa.orm.attributes.instance_state(f1)._strong_obj is not None
-        
-        del f1
-        session.commit()
-        eq_(
-            session.query(Foo.val).all(),
-            [('some new val', )]
-        )
-
-    @testing.resolve_artifact_names
-    def test_non_mutated_state_not_resurrected(self):
-        f1 = Foo(data = pickleable.Bar(4,5))
-        
-        session = Session()
-        session.add(f1)
-        session.commit()
-        
-        session = Session()
-        f1 = session.query(Foo).first()
-        del f1
-        gc_collect()
-
-        assert len(session.identity_map) == 0
-        f1 = session.query(Foo).first()
-        assert not attributes.instance_state(f1).modified
-
-    @testing.resolve_artifact_names
-    def test_scalar_no_net_change_no_update(self):
-        """Test that a no-net-change on a scalar attribute event
-        doesn't cause an UPDATE for a mutable state.
-        
-         """
-
-        f1 = Foo(val=u'hi')
-
-        session = Session()
-        session.add(f1)
-        session.commit()
-        session.close()
-
-        f1 = session.query(Foo).first()
-        f1.val = u'hi'
-        self.sql_count_(0, session.commit)
-
-    @testing.resolve_artifact_names
-    def test_expire_attribute_set(self):
-        """test no SELECT emitted when assigning to an expired
-        mutable attribute.
         
-        """
-        
-        f1 = Foo(data = pickleable.Bar(4, 5), val=u'some val')
-        session = Session()
-        session.add(f1)
-        session.commit()
-        
-        assert 'data' not in f1.__dict__
-        def go():
-            f1.data = pickleable.Bar(10, 15)
-        self.sql_count_(0, go)
-        session.commit()
-        
-        eq_(f1.data.x, 10)
 
-    @testing.resolve_artifact_names
-    def test_expire_mutate(self):
-        """test mutations are detected on an expired mutable
-        attribute."""
-        
-        f1 = Foo(data = pickleable.Bar(4, 5), val=u'some val')
-        session = Session()
-        session.add(f1)
-        session.commit()
-        
-        assert 'data' not in f1.__dict__
-        def go():
-            f1.data.x = 10
-        self.sql_count_(1, go)
-        session.commit()
-        
-        eq_(f1.data.x, 10)
-        
-    @testing.resolve_artifact_names
-    def test_deferred_attribute_set(self):
-        """test no SELECT emitted when assigning to a deferred
-        mutable attribute.
-        
-        """
-        sa.orm.clear_mappers()
-        mapper(Foo, mutable_t, properties={
-            'data':sa.orm.deferred(mutable_t.c.data)
-        })
-
-        f1 = Foo(data = pickleable.Bar(4, 5), val=u'some val')
-        session = Session()
-        session.add(f1)
-        session.commit()
-        
-        session.close()
-        
-        f1 = session.query(Foo).first()
-        def go():
-            f1.data = pickleable.Bar(10, 15)
-        self.sql_count_(0, go)
-        session.commit()
-        
-        eq_(f1.data.x, 10)
-
-    @testing.resolve_artifact_names
-    def test_deferred_mutate(self):
-        """test mutations are detected on a deferred mutable
-        attribute."""
-        
-        sa.orm.clear_mappers()
-        mapper(Foo, mutable_t, properties={
-            'data':sa.orm.deferred(mutable_t.c.data)
-        })
-
-        f1 = Foo(data = pickleable.Bar(4, 5), val=u'some val')
-        session = Session()
-        session.add(f1)
-        session.commit()
-        
-        session.close()
-        
-        f1 = session.query(Foo).first()
-        def go():
-            f1.data.x = 10
-        self.sql_count_(1, go)
-        session.commit()
-        
-        def go():
-            eq_(f1.data.x, 10)
-        self.sql_count_(1, go)
-        
-
-class PickledDictsTest(_base.MappedTest):
-
-    @classmethod
-    def define_tables(cls, metadata):
-        Table('mutable_t', metadata,
-            Column('id', Integer, primary_key=True,
-                   test_needs_autoincrement=True),
-            Column('data', 
-                sa.PickleType(comparator=operator.eq, mutable=True)))
-
-    @classmethod
-    def setup_classes(cls):
-        class Foo(_base.BasicEntity):
-            pass
-
-    @classmethod
-    @testing.resolve_artifact_names
-    def setup_mappers(cls):
-        mapper(Foo, mutable_t)
-
-    @testing.resolve_artifact_names
-    def test_dicts(self):
-        """Dictionaries may not pickle the same way twice."""
-
-        f1 = Foo()
-        f1.data = [ {
-            'personne': {'nom': u'Smith',
-                         'pers_id': 1,
-                         'prenom': u'john',
-                         'civilite': u'Mr',
-                         'int_3': False,
-                         'int_2': False,
-                         'int_1': u'23',
-                         'VenSoir': True,
-                         'str_1': u'Test',
-                         'SamMidi': False,
-                         'str_2': u'chien',
-                         'DimMidi': False,
-                         'SamSoir': True,
-                         'SamAcc': False} } ]
-
-        session = create_session(autocommit=False)
-        session.add(f1)
-        session.commit()
-
-        self.sql_count_(0, session.commit)
-
-        f1.data = [ {
-            'personne': {'nom': u'Smith',
-                         'pers_id': 1,
-                         'prenom': u'john',
-                         'civilite': u'Mr',
-                         'int_3': False,
-                         'int_2': False,
-                         'int_1': u'23',
-                         'VenSoir': True,
-                         'str_1': u'Test',
-                         'SamMidi': False,
-                         'str_2': u'chien',
-                         'DimMidi': False,
-                         'SamSoir': True,
-                         'SamAcc': False} } ]
-
-        self.sql_count_(0, session.commit)
-
-        f1.data[0]['personne']['VenSoir']= False
-        self.sql_count_(1, session.commit)
-
-        session.expunge_all()
-        f = session.query(Foo).get(f1.id)
-        eq_(f.data,
-            [ {
-            'personne': {'nom': u'Smith',
-                         'pers_id': 1,
-                         'prenom': u'john',
-                         'civilite': u'Mr',
-                         'int_3': False,
-                         'int_2': False,
-                         'int_1': u'23',
-                         'VenSoir': False,
-                         'str_1': u'Test',
-                         'SamMidi': False,
-                         'str_2': u'chien',
-                         'DimMidi': False,
-                         'SamSoir': True,
-                         'SamAcc': False} } ])
 
 
 class PKTest(_base.MappedTest):