^^^^^^^^^^^^^^^^^^^^^
This type uses ``simplejson`` to marshal Python data structures
-to/from JSON. Can be modified to use Python's builtin json encoder.
+to/from JSON. Can be modified to use Python's builtin json encoder::
-Note that the base type is not "mutable", meaning in-place changes to
-the value will not be detected by the ORM - you instead would need to
-replace the existing value with a new one to detect changes.
-The subtype ``MutableJSONEncodedDict``
-adds "mutability" to allow this, but note that "mutable" types add
-a significant performance penalty to the ORM's flush process::
from sqlalchemy.types import TypeDecorator, MutableType, VARCHAR
import simplejson
if value is not None:
value = simplejson.loads(value, use_decimal=True)
return value
+
+
+Note that the base type is not "mutable", meaning in-place changes to
+the value will not be detected by the ORM - you instead would need to
+replace the existing value with a new one to detect changes. To add
+support for mutability, we need to build a dictionary that detects
+changes, and combine this using the ``sqlalchemy.ext.mutable`` extension
+described in :ref:`mutable_toplevel`::
+
+ from sqlalchemy.ext.mutable import Mutable
+
+ class MutationDict(Mutable, dict):
+ @classmethod
+ def coerce(cls, key, value):
+ """Convert plain dictionaries to MutationDict."""
+ if not isinstance(value, MutationDict):
+ if isinstance(value, dict):
+ return MutationDict(value)
+
+ # this call will raise ValueError
+ return Mutable.coerce(key, value)
+ else:
+ return value
- class MutableJSONEncodedDict(MutableType, JSONEncodedDict):
- """Adds mutability to JSONEncodedDict."""
+ def __setitem__(self, key, value):
+ """Detect dictionary set events and emit change events."""
+
+ dict.__setitem__(self, key, value)
+ self.on_change()
+
+ def __delitem__(self, key):
+ """Detect dictionary del events and emit change events."""
+
+ dict.__delitem__(self, key)
+ self.on_change()
- def copy_value(self, value):
- return simplejson.loads(
- simplejson.dumps(value, use_decimal=True),
- use_decimal=True)
+ # additional dict methods would be overridden here
+
+The new dictionary type can be associated with JSONEncodedDict using
+an event listener established by the :meth:`.Mutable.associate_with`
+method::
+
+ MutationDict.associate_with(JSONEncodedDict)
+
+Alternatively, specific usages of ``JSONEncodedDict`` can be associated
+with ``MutationDict`` via :meth:`.Mutable.as_mutable`::
+
+ Table('mytable', metadata,
+ Column('id', Integer, primary_key=True),
+ Column('data', MutationDict.as_mutable(JSONEncodedDict))
+ )
Creating New Types
~~~~~~~~~~~~~~~~~~
.. automodule:: large_collection
-Mutable Data Types
-------------------
-
-Location: /examples/mutable_events/
-
-.. automodule:: mutable_events
-
Nested Sets
-----------
SQLAlchemy has a variety of ORM extensions available, which add additional
functionality to the core behavior.
+The extensions build almost entirely on public core and ORM APIs and users should
+be encouraged to read their source code to further their understanding of their
+behavior. In particular the "Horizontal Sharding", "Hybrid Attributes", and
+"Mutation Tracking" extensions are very succinct.
+
.. toctree::
:maxdepth: 1
associationproxy
declarative
+ mutable
orderinglist
horizontal_shard
hybrid
+++ /dev/null
-"""
-Illustrates how to build and use "mutable" types, such as dictionaries and
-user-defined classes, as scalar attributes which detect in-place changes.
-These types don't make use of the "mutable=True" flag, which
-performs poorly within the ORM and is being phased out, instead allowing
-changes on data to associate change events with the parent object
-as they happen in the same way as any other mapped data member.
-
-The example is based around the usage of the event model introduced in
-:ref:`event_toplevel`, along with the :func:`~.attributes.flag_modified` function
-which establishes the "dirty" flag on a particular mapped attribute. These
-functions are encapsulated in a mixin called ``TrackMutationsMixin``.
-Subclassing ``dict`` to provide "mutation tracking", then
-applying it to a custom dictionary type, looks like::
-
- class JSONEncodedDict(TypeDecorator):
- "JSON dictionary type from the types documentation"
-
- impl = VARCHAR
-
- def process_bind_param(self, value, dialect):
- if value is not None:
- value = simplejson.dumps(value, use_decimal=True)
- return value
-
- def process_result_value(self, value, dialect):
- if value is not None:
- value = simplejson.loads(value, use_decimal=True)
- return value
-
- class MutationDict(TrackMutationsMixin, dict):
- "Subclass dict to send mutation events to the owning object."
-
- def __init__(self, other):
- self.update(other)
-
- def __setitem__(self, key, value):
- dict.__setitem__(self, key, value)
- self.on_change()
-
- def __delitem__(self, key):
- dict.__delitem__(self, key)
- self.on_change()
-
- # hypothetical mapping
- Base = declarative_base()
- class Foo(Base):
- __tablename__ = 'foo'
- id = Column(Integer, primary_key=True)
- data = Column(JSONEncodedDict)
-
- # add mutation tracking to `Foo.data` as a one off
- MutationDict.associate_with_attribute(Foo.data)
-
-The explicit step of associating ``MutationDict`` with ``Foo.data`` can be
-automated across a class of columns using ``associate_with_type()``::
-
- # add mutation tracking to all mapped attributes
- # that use JSONEncodedDict
- MutationDict.associate_with_type(JSONEncodedDict)
-
-All subsequent mappings will have the ``MutationDict`` wrapper applied to
-all attributes with ``JSONEncodedDict`` as their type.
-
-The example illustrates the usage of several events, including
-:meth:`.on_load`, :meth:`.on_refresh`, :meth:`.on_set`, and
-:meth:`.on_mapper_configured`.
-
-"""
\ No newline at end of file
+++ /dev/null
-# this example is probably moving to be an extension.
-
-from sqlalchemy import event
-from sqlalchemy.orm import mapper, composite, object_mapper
-
-from sqlalchemy.util import memoized_property
-import weakref
-
-class _CompositeMutationsMixinMeta(type):
- def __init__(cls, classname, bases, dict_):
- cls._setup_listeners()
- return type.__init__(cls, classname, bases, dict_)
-
-class CompositeMutationsMixin(object):
- """Mixin that defines transparent propagation of change
- events to a parent object.
-
- This class might be moved to be a SQLA extension
- due to its complexity and potential for widespread use.
-
- """
- __metaclass__ = _CompositeMutationsMixinMeta
-
- @memoized_property
- def _parents(self):
- """Dictionary of parent object->attribute name on the parent."""
-
- return weakref.WeakKeyDictionary()
-
- def __setattr__(self, key, value):
- object.__setattr__(self, key, value)
- self.on_change()
-
- def on_change(self):
- """Subclasses should call this method whenever change events occur."""
-
- for parent, key in self._parents.items():
-
- prop = object_mapper(parent).get_property(key)
- for value, attr_name in zip(self.__composite_values__(), prop._attribute_keys):
- setattr(parent, attr_name, value)
-
- @classmethod
- def _listen_on_attribute(cls, attribute):
- """Establish this type as a mutation listener for the given
- mapped descriptor.
-
- """
- key = attribute.key
- parent_cls = attribute.class_
-
- def on_load(state):
- """Listen for objects loaded or refreshed.
-
- Wrap the target data member's value with
- ``TrackMutationsMixin``.
-
- """
-
- val = state.dict.get(key, None)
- if val is not None:
- val._parents[state.obj()] = key
-
- def on_set(target, value, oldvalue, initiator):
- """Listen for set/replace events on the target
- data member.
-
- Establish a weak reference to the parent object
- on the incoming value, remove it for the one
- outgoing.
-
- """
-
- value._parents[target.obj()] = key
- if isinstance(oldvalue, cls):
- oldvalue._parents.pop(state.obj(), None)
- return value
-
- event.listen(parent_cls, 'on_load', on_load, raw=True)
- event.listen(parent_cls, 'on_refresh', on_load, raw=True)
- event.listen(attribute, 'on_set', on_set, raw=True, retval=True)
-
- @classmethod
- def _setup_listeners(cls):
- """Associate this wrapper with all future mapped compoistes
- of the given type.
-
- This is a convenience method that calls ``associate_with_attribute`` automatically.
-
- """
-
- def listen_for_type(mapper, class_):
- for prop in mapper.iterate_properties:
- if hasattr(prop, 'composite_class') and issubclass(prop.composite_class, cls):
- cls._listen_on_attribute(getattr(class_, prop.key))
-
- event.listen(mapper, 'on_mapper_configured', listen_for_type)
-
-
-if __name__ == '__main__':
- from sqlalchemy import Column, Integer, create_engine
- from sqlalchemy.orm import Session
- from sqlalchemy.ext.declarative import declarative_base
-
- class Point(CompositeMutationsMixin):
- def __init__(self, x, y):
- self.x = x
- self.y = y
-
- def __composite_values__(self):
- return self.x, self.y
-
- def __eq__(self, other):
- return isinstance(other, Point) and \
- other.x == self.x and \
- other.y == self.y
-
- Base = declarative_base()
- class Foo(Base):
- __tablename__ = 'foo'
- id = Column(Integer, primary_key=True)
- data = composite(Point, Column('x', Integer), Column('y', Integer))
-
- e = create_engine('sqlite://', echo=True)
-
- Base.metadata.create_all(e)
-
- sess = Session(e)
- d = Point(3, 4)
- f1 = Foo(data=d)
- sess.add(f1)
- sess.commit()
-
- f1.data.y = 5
- sess.commit()
-
- assert f1.data == Point(3, 5)
-
-
\ No newline at end of file
+++ /dev/null
-# this example is probably moving to be an extension.
-
-from sqlalchemy.orm.attributes import flag_modified
-from sqlalchemy import event
-from sqlalchemy.orm import mapper
-from sqlalchemy.util import memoized_property
-import weakref
-
-class TrackMutationsMixin(object):
- """Mixin that defines transparent propagation of change
- events to a parent object.
-
- This class might be moved to be a SQLA extension
- due to its complexity and potential for widespread use.
-
- """
- @memoized_property
- def _parents(self):
- """Dictionary of parent object->attribute name on the parent."""
-
- return weakref.WeakKeyDictionary()
-
- def on_change(self):
- """Subclasses should call this method whenever change events occur."""
-
- for parent, key in self._parents.items():
- flag_modified(parent, key)
-
- @classmethod
- def associate_with_attribute(cls, attribute):
- """Establish this type as a mutation listener for the given
- mapped descriptor.
-
- """
- key = attribute.key
- parent_cls = attribute.class_
-
- def on_load(state):
- """Listen for objects loaded or refreshed.
-
- Wrap the target data member's value with
- ``TrackMutationsMixin``.
-
- """
- val = state.dict.get(key, None)
- if val is not None:
- val = cls(val)
- state.dict[key] = val
- val._parents[state.obj()] = key
-
- def on_set(target, value, oldvalue, initiator):
- """Listen for set/replace events on the target
- data member.
-
- Establish a weak reference to the parent object
- on the incoming value, remove it for the one
- outgoing.
-
- """
-
- if not isinstance(value, cls):
- value = cls(value)
- value._parents[target.obj()] = key
- if isinstance(oldvalue, cls):
- oldvalue._parents.pop(state.obj(), None)
- return value
-
- event.listen(parent_cls, 'on_load', on_load, raw=True)
- event.listen(parent_cls, 'on_refresh', on_load, raw=True)
- event.listen(attribute, 'on_set', on_set, raw=True, retval=True)
-
- @classmethod
- def associate_with_type(cls, type_):
- """Associate this wrapper with all future mapped columns
- of the given type.
-
- This is a convenience method that calls ``associate_with_attribute`` automatically.
-
- """
-
- def listen_for_type(mapper, class_):
- for prop in mapper.iterate_properties:
- if hasattr(prop, 'columns') and isinstance(prop.columns[0].type, type_):
- cls.associate_with_attribute(getattr(class_, prop.key))
-
- event.listen(mapper, 'on_mapper_configured', listen_for_type)
-
-
-if __name__ == '__main__':
- from sqlalchemy import Column, Integer, VARCHAR, create_engine
- from sqlalchemy.orm import Session
- from sqlalchemy.types import TypeDecorator
- from sqlalchemy.ext.declarative import declarative_base
- import simplejson
-
- class JSONEncodedDict(TypeDecorator):
- """Represents an immutable structure as a json-encoded string.
-
- Usage::
-
- JSONEncodedDict(255)
-
- """
-
- impl = VARCHAR
-
- def process_bind_param(self, value, dialect):
- if value is not None:
- value = simplejson.dumps(value, use_decimal=True)
-
- return value
-
- def process_result_value(self, value, dialect):
- if value is not None:
- value = simplejson.loads(value, use_decimal=True)
- return value
-
- class MutationDict(TrackMutationsMixin, dict):
- def __init__(self, other):
- self.update(other)
-
- def __setitem__(self, key, value):
- dict.__setitem__(self, key, value)
- self.on_change()
-
- def __delitem__(self, key):
- dict.__delitem__(self, key)
- self.on_change()
-
- # TODO: do the metaclass approach the same as composite
- MutationDict.associate_with_type(JSONEncodedDict)
-
- Base = declarative_base()
- class Foo(Base):
- __tablename__ = 'foo'
- id = Column(Integer, primary_key=True)
- data = Column(JSONEncodedDict)
-
- e = create_engine('sqlite://', echo=True)
-
- Base.metadata.create_all(e)
-
- sess = Session(e)
- f1 = Foo(data={'a':'b'})
- sess.add(f1)
- sess.commit()
-
- f1.data['a'] = 'c'
- sess.commit()
-
- assert f1.data == {'a':'c'}
-
-
\ No newline at end of file
"mutable types" mode in the ORM. Be sure to read the
notes for :class:`.MutableType` regarding ORM
performance implications (default changed from ``True`` in
- 0.7.0).
+ 0.7.0).
+
+ .. note:: This functionality is now superceded by the
+ ``sqlalchemy.ext.mutable`` extension described in
+ :ref:`mutable_toplevel`.
:param as_tuple=False: Specify whether return results
should be converted to tuples from lists. DBAPIs such
--- /dev/null
+"""Provide support for tracking of in-place changes to scalar values,
+which are propagated to owning parent objects.
+
+The ``mutable`` extension is a replacement for the :class:`.types.MutableType`
+class as well as the ``mutable=True`` flag available on types which subclass
+it.
+
+
+"""
+from sqlalchemy.orm.attributes import flag_modified
+from sqlalchemy import event, types
+from sqlalchemy.orm import mapper, object_mapper
+from sqlalchemy.util import memoized_property
+import weakref
+
+class Mutable(object):
+ """Mixin that defines transparent propagation of change
+ events to a parent object.
+
+ """
+
+ @memoized_property
+ def _parents(self):
+ """Dictionary of parent object->attribute name on the parent."""
+
+ return weakref.WeakKeyDictionary()
+
+ def on_change(self):
+ """Subclasses should call this method whenever change events occur."""
+
+ for parent, key in self._parents.items():
+ flag_modified(parent, key)
+
+ @classmethod
+ def coerce(cls, key, value):
+ """Given a value, coerce it into this type.
+
+ By default raises ValueError.
+ """
+ if value is None:
+ return None
+ raise ValueError("Attribute '%s' accepts objects of type %s" % (key, cls))
+
+
+ @classmethod
+ def associate_with_attribute(cls, attribute):
+ """Establish this type as a mutation listener for the given
+ mapped descriptor.
+
+ """
+ key = attribute.key
+ parent_cls = attribute.class_
+
+ def on_load(state):
+ """Listen for objects loaded or refreshed.
+
+ Wrap the target data member's value with
+ ``Mutable``.
+
+ """
+ val = state.dict.get(key, None)
+ if val is not None:
+ val = cls.coerce(key, val)
+ state.dict[key] = val
+ val._parents[state.obj()] = key
+
+ def on_set(target, value, oldvalue, initiator):
+ """Listen for set/replace events on the target
+ data member.
+
+ Establish a weak reference to the parent object
+ on the incoming value, remove it for the one
+ outgoing.
+
+ """
+
+ if not isinstance(value, cls):
+ value = cls.coerce(key, value)
+ value._parents[target.obj()] = key
+ if isinstance(oldvalue, cls):
+ oldvalue._parents.pop(state.obj(), None)
+ return value
+
+ event.listen(parent_cls, 'on_load', on_load, raw=True)
+ event.listen(parent_cls, 'on_refresh', on_load, raw=True)
+ event.listen(attribute, 'on_set', on_set, raw=True, retval=True)
+
+ # TODO: need a deserialize hook here
+
+ @classmethod
+ def associate_with(cls, sqltype):
+ """Associate this wrapper with all future mapped columns
+ of the given type.
+
+ This is a convenience method that calls ``associate_with_attribute`` automatically.
+
+ .. warning:: The listeners established by this method are *global*
+ to all mappers, and are *not* garbage collected. Only use
+ :meth:`.associate_with` for types that are permanent to an application,
+ not with ad-hoc types else this will cause unbounded growth
+ in memory usage.
+
+ """
+
+ def listen_for_type(mapper, class_):
+ for prop in mapper.iterate_properties:
+ if hasattr(prop, 'columns'):
+ if isinstance(prop.columns[0].type, sqltype):
+ cls.associate_with_attribute(getattr(class_, prop.key))
+ break
+
+ event.listen(mapper, 'on_mapper_configured', listen_for_type)
+
+ @classmethod
+ def as_mutable(cls, sqltype):
+ """Associate a SQL type with this mutable Python type.
+
+ This establishes listeners that will detect ORM mappings against
+ the given type, adding mutation event trackers to those mappings.
+
+ The type is returned, unconditionally as an instance, so that
+ :meth:`.as_mutable` can be used inline::
+
+ Table('mytable', metadata,
+ Column('id', Integer, primary_key=True),
+ Column('data', MyMutableType.as_mutable(PickleType))
+ )
+
+ Note that the returned type is always an instance, even if a class
+ is given, and that only columns which are declared specifically with that
+ type instance receive additional instrumentation.
+
+ To associate a particular mutable type with all occurences of a
+ particular type, use the :meth:`.Mutable.associate_with` classmethod
+ of the particular :meth:`.Mutable` subclass to establish a global
+ assoiation.
+
+ .. warning:: The listeners established by this method are *global*
+ to all mappers, and are *not* garbage collected. Only use
+ :meth:`.as_mutable` for types that are permanent to an application,
+ not with ad-hoc types else this will cause unbounded growth
+ in memory usage.
+
+ """
+ sqltype = types.to_instance(sqltype)
+
+ def listen_for_type(mapper, class_):
+ for prop in mapper.iterate_properties:
+ if hasattr(prop, 'columns'):
+ if prop.columns[0].type is sqltype:
+ cls.associate_with_attribute(getattr(class_, prop.key))
+ break
+
+ event.listen(mapper, 'on_mapper_configured', listen_for_type)
+
+ return sqltype
+
+
+class _MutableCompositeMeta(type):
+ def __init__(cls, classname, bases, dict_):
+ cls._setup_listeners()
+ return type.__init__(cls, classname, bases, dict_)
+
+class MutableComposite(object):
+ """Mixin that defines transparent propagation of change
+ events on a SQLAlchemy "composite" object to its
+ owning parent or parents.
+
+ Composite classes, in addition to meeting the usage contract
+ defined in :ref:`mapper_composite`, also define some system
+ of relaying change events to the given :meth:`.on_change`
+ method, which will notify all parents of the change. Below
+ the special Python method ``__setattr__`` is used to intercept
+ all changes::
+
+ class Point(MutableComposite):
+ def __init__(self, x, y):
+ self.x = x
+ self.y = y
+
+ def __setattr__(self, key, value):
+ object.__setattr__(self, key, value)
+ self.on_change()
+
+ def __composite_values__(self):
+ return self.x, self.y
+
+ def __eq__(self, other):
+ return isinstance(other, Point) and \
+ other.x == self.x and \
+ other.y == self.y
+
+ :class:`.MutableComposite` defines a metaclass which augments
+ the creation of :class:`.MutableComposite` subclasses with an event
+ that will listen for any :func:`~.orm.composite` mappings against the
+ new type, establishing listeners that will track parent associations.
+
+ .. warning:: The listeners established by the :class:`.MutableComposite`
+ class are *global* to all mappers, and are *not* garbage collected. Only use
+ :class:`.MutableComposite` for types that are permanent to an application,
+ not with ad-hoc types else this will cause unbounded growth
+ in memory usage.
+
+ """
+ __metaclass__ = _MutableCompositeMeta
+
+ @memoized_property
+ def _parents(self):
+ """Dictionary of parent object->attribute name on the parent."""
+
+ return weakref.WeakKeyDictionary()
+
+ def on_change(self):
+ """Subclasses should call this method whenever change events occur."""
+
+ for parent, key in self._parents.items():
+
+ prop = object_mapper(parent).get_property(key)
+ for value, attr_name in zip(
+ self.__composite_values__(),
+ prop._attribute_keys):
+ setattr(parent, attr_name, value)
+
+ @classmethod
+ def _listen_on_attribute(cls, attribute):
+ """Establish this type as a mutation listener for the given
+ mapped descriptor.
+
+ """
+ key = attribute.key
+ parent_cls = attribute.class_
+
+ def on_load(state):
+ """Listen for objects loaded or refreshed.
+
+ Wrap the target data member's value with
+ ``Mutable``.
+
+ """
+
+ val = state.dict.get(key, None)
+ if val is not None:
+ val._parents[state.obj()] = key
+
+ def on_set(target, value, oldvalue, initiator):
+ """Listen for set/replace events on the target
+ data member.
+
+ Establish a weak reference to the parent object
+ on the incoming value, remove it for the one
+ outgoing.
+
+ """
+
+ value._parents[target.obj()] = key
+ if isinstance(oldvalue, cls):
+ oldvalue._parents.pop(state.obj(), None)
+ return value
+
+ event.listen(parent_cls, 'on_load', on_load, raw=True)
+ event.listen(parent_cls, 'on_refresh', on_load, raw=True)
+ event.listen(attribute, 'on_set', on_set, raw=True, retval=True)
+
+ # TODO: need a deserialize hook here
+
+ @classmethod
+ def _setup_listeners(cls):
+ """Associate this wrapper with all future mapped compoistes
+ of the given type.
+
+ This is a convenience method that calls ``associate_with_attribute`` automatically.
+
+ """
+
+ def listen_for_type(mapper, class_):
+ for prop in mapper.iterate_properties:
+ if hasattr(prop, 'composite_class') and issubclass(prop.composite_class, cls):
+ cls._listen_on_attribute(getattr(class_, prop.key))
+
+ event.listen(mapper, 'on_mapper_configured', listen_for_type)
+
event.listen(self.parent, 'on_refresh', load_handler, raw=True)
event.listen(self.parent, "on_expire", expire_handler, raw=True)
+ # TODO: need a deserialize hook here
@util.memoized_property
def _attribute_keys(self):
if 'load_path' in state:
self.load_path = interfaces.deserialize_path(state['load_path'])
-
+
+ # TODO: need an event here, link to composite, mutable
+
def initialize(self, key):
"""Set this attribute to an empty value or collection,
based on the AttributeImpl in use."""
are serialized into strings are examples of "mutable"
column structures.
+ .. note:: This functionality is now superceded by the
+ ``sqlalchemy.ext.mutable`` extension described in
+ :ref:`mutable_toplevel`.
+
When this method is overridden, :meth:`copy_value` should
also be supplied. The :class:`.MutableType` mixin
is recommended as a helper.
objects alone. Values such as dicts, lists which
are serialized into strings are examples of "mutable"
column structures.
-
- When this method is overridden, :meth:`copy_value` should
- also be supplied. The :class:`.MutableType` mixin
- is recommended as a helper.
+
+ .. note:: This functionality is now superceded by the
+ ``sqlalchemy.ext.mutable`` extension described in
+ :ref:`mutable_toplevel`.
"""
return self.impl.is_mutable()
class MutableType(object):
"""A mixin that marks a :class:`TypeEngine` as representing
- a mutable Python object type.
-
+ a mutable Python object type. This functionality is used
+ only by the ORM.
+
+ .. note:: :class:`.MutableType` is superceded as of SQLAlchemy 0.7
+ by the ``sqlalchemy.ext.mutable`` extension described in
+ :ref:`mutable_toplevel`. This extension provides an event
+ driven approach to in-place mutation detection that does not
+ incur the severe performance penalty of the :class:`.MutableType`
+ approach.
+
"mutable" means that changes can occur in place to a value
of this type. Examples includes Python lists, dictionaries,
and sets, as well as user-defined objects. The primary
represent a copy and compare function for values of this
type - implementing subclasses should override these
appropriately.
-
- The usage of mutable types has significant performance
- implications when using the ORM. In order to detect changes, the
- ORM must create a copy of the value when it is first
- accessed, so that changes to the current value can be compared
- against the "clean" database-loaded value. Additionally, when the
- ORM checks to see if any data requires flushing, it must scan
- through all instances in the session which are known to have
- "mutable" attributes and compare the current value of each
- one to its "clean"
- value. So for example, if the Session contains 6000 objects (a
- fairly large amount) and autoflush is enabled, every individual
- execution of :class:`Query` will require a full scan of that subset of
- the 6000 objects that have mutable attributes, possibly resulting
- in tens of thousands of additional method calls for every query.
- Note that for small numbers (< 100 in the Session at a time)
- of objects with "mutable" values, the performance degradation is
- negligible.
+ .. warning:: The usage of mutable types has significant performance
+ implications when using the ORM. In order to detect changes, the
+ ORM must create a copy of the value when it is first
+ accessed, so that changes to the current value can be compared
+ against the "clean" database-loaded value. Additionally, when the
+ ORM checks to see if any data requires flushing, it must scan
+ through all instances in the session which are known to have
+ "mutable" attributes and compare the current value of each
+ one to its "clean"
+ value. So for example, if the Session contains 6000 objects (a
+ fairly large amount) and autoflush is enabled, every individual
+ execution of :class:`Query` will require a full scan of that subset of
+ the 6000 objects that have mutable attributes, possibly resulting
+ in tens of thousands of additional method calls for every query.
- It is perfectly fine to represent "mutable" data types with the
- "mutable" flag set to False, which eliminates any performance
- issues. It means that the ORM will only reliably detect changes
- for values of this type if a newly modified value is of a different
- identity (i.e., ``id(value)``) than what was present before -
- i.e., instead of operations like these::
-
- myobject.somedict['foo'] = 'bar'
- myobject.someset.add('bar')
- myobject.somelist.append('bar')
-
- You'd instead say::
+ As of SQLAlchemy 0.7, the ``sqlalchemy.ext.mutable`` is provided which
+ allows an event driven approach to in-place mutation detection. This
+ approach should now be favored over the usage of :class:`.MutableType`
+ with ``mutable=True``. ``sqlalchemy.ext.mutable`` is described in
+ :ref:`mutable_toplevel`.
- myobject.somevalue = {'foo':'bar'}
- myobject.someset = myobject.someset.union(['bar'])
- myobject.somelist = myobject.somelist + ['bar']
-
- A future release of SQLAlchemy will include instrumented
- collection support for mutable types, such that at least usage of
- plain Python datastructures will be able to emit events for
- in-place changes, removing the need for pessimistic scanning for
- changes.
-
"""
def is_mutable(self):
``comparator`` argument is present. See
:class:`.MutableType` for details on "mutable" type
behavior. (default changed from ``True`` in
- 0.7.0).
+ 0.7.0).
+
+ .. note:: This functionality is now superceded by the
+ ``sqlalchemy.ext.mutable`` extension described in
+ :ref:`mutable_toplevel`.
:param comparator: a 2-arg callable predicate used
to compare values of this type. If left as ``None``,
--- /dev/null
+from sqlalchemy import Integer
+from sqlalchemy.types import PickleType, TypeDecorator, VARCHAR
+from sqlalchemy.orm import mapper, Session, composite
+from sqlalchemy.orm.mapper import Mapper
+from sqlalchemy.orm.instrumentation import ClassManager
+from test.lib.schema import Table, Column
+from test.lib.testing import eq_
+from test.lib import testing
+from test.orm import _base
+import sys
+
+class _MutableDictTestBase(object):
+ @classmethod
+ def _type_fixture(cls):
+ from sqlalchemy.ext.mutable import Mutable
+
+ # needed for pickle support
+ global MutationDict
+
+ class MutationDict(Mutable, dict):
+ @classmethod
+ def coerce(cls, key, value):
+ if not isinstance(value, MutationDict):
+ if isinstance(value, dict):
+ return MutationDict(value)
+ return Mutable.coerce(key, value)
+ else:
+ return value
+
+ def __getstate__(self):
+ return dict(self)
+
+ def __setstate__(self, dict):
+ self.update(dict)
+
+ def __setitem__(self, key, value):
+ dict.__setitem__(self, key, value)
+ self.on_change()
+
+ def __delitem__(self, key):
+ dict.__delitem__(self, key)
+ self.on_change()
+ return MutationDict
+
+ @testing.resolve_artifact_names
+ def setup_mappers(cls):
+ class Foo(_base.BasicEntity):
+ pass
+
+ mapper(Foo, foo)
+
+ def teardown(self):
+ # clear out mapper events
+ Mapper.dispatch.clear()
+ ClassManager.dispatch.clear()
+ super(_MutableDictTestBase, self).teardown()
+
+ @testing.resolve_artifact_names
+ def test_in_place_mutation(self):
+ sess = Session()
+
+ f1 = Foo(data={'a':'b'})
+ sess.add(f1)
+ sess.commit()
+
+ f1.data['a'] = 'c'
+ sess.commit()
+
+ eq_(f1.data, {'a':'c'})
+
+ @testing.resolve_artifact_names
+ def _test_non_mutable(self):
+ sess = Session()
+
+ f1 = Foo(non_mutable_data={'a':'b'})
+ sess.add(f1)
+ sess.commit()
+
+ f1.non_mutable_data['a'] = 'c'
+ sess.commit()
+
+ eq_(f1.non_mutable_data, {'a':'b'})
+
+class MutableWithScalarPickleTest(_MutableDictTestBase, _base.MappedTest):
+ @classmethod
+ def define_tables(cls, metadata):
+ MutationDict = cls._type_fixture()
+
+ Table('foo', metadata,
+ Column('id', Integer, primary_key=True, test_needs_pk=True),
+ Column('data', MutationDict.as_mutable(PickleType)),
+ Column('non_mutable_data', PickleType)
+ )
+
+ def test_non_mutable(self):
+ self._test_non_mutable()
+
+class MutableWithScalarJSONTest(_MutableDictTestBase, _base.MappedTest):
+ # json introduced in 2.6
+ __skip_if__ = lambda : sys.version_info < (2, 6),
+
+ @classmethod
+ def define_tables(cls, metadata):
+ import json
+
+ class JSONEncodedDict(TypeDecorator):
+ impl = VARCHAR
+
+ def process_bind_param(self, value, dialect):
+ if value is not None:
+ value = json.dumps(value)
+
+ return value
+
+ def process_result_value(self, value, dialect):
+ if value is not None:
+ value = json.loads(value)
+ return value
+
+ MutationDict = cls._type_fixture()
+
+ Table('foo', metadata,
+ Column('id', Integer, primary_key=True, test_needs_pk=True),
+ Column('data', MutationDict.as_mutable(JSONEncodedDict)),
+ Column('non_mutable_data', JSONEncodedDict)
+ )
+
+ def test_non_mutable(self):
+ self._test_non_mutable()
+
+class MutableAssociationScalarPickleTest(_MutableDictTestBase, _base.MappedTest):
+ @classmethod
+ def define_tables(cls, metadata):
+ MutationDict = cls._type_fixture()
+ MutationDict.associate_with(PickleType)
+
+ Table('foo', metadata,
+ Column('id', Integer, primary_key=True, test_needs_pk=True),
+ Column('data', PickleType)
+ )
+
+class MutableAssociationScalarJSONTest(_MutableDictTestBase, _base.MappedTest):
+ # json introduced in 2.6
+ __skip_if__ = lambda : sys.version_info < (2, 6),
+
+ @classmethod
+ def define_tables(cls, metadata):
+ import json
+
+ class JSONEncodedDict(TypeDecorator):
+ impl = VARCHAR
+
+ def process_bind_param(self, value, dialect):
+ if value is not None:
+ value = json.dumps(value)
+
+ return value
+
+ def process_result_value(self, value, dialect):
+ if value is not None:
+ value = json.loads(value)
+ return value
+
+ MutationDict = cls._type_fixture()
+ MutationDict.associate_with(JSONEncodedDict)
+
+ Table('foo', metadata,
+ Column('id', Integer, primary_key=True, test_needs_pk=True),
+ Column('data', JSONEncodedDict)
+ )
+
+class MutableCompositesTest(_base.MappedTest):
+ @classmethod
+ def define_tables(cls, metadata):
+ Table('foo', metadata,
+ Column('id', Integer, primary_key=True, test_needs_pk=True),
+ Column('x', Integer),
+ Column('y', Integer)
+ )
+
+ def teardown(self):
+ # clear out mapper events
+ Mapper.dispatch.clear()
+ ClassManager.dispatch.clear()
+ super(MutableCompositesTest, self).teardown()
+
+ @classmethod
+ def _type_fixture(cls):
+
+ from sqlalchemy.ext.mutable import Mutable
+ from sqlalchemy.ext.mutable import MutableComposite
+
+ global Point
+
+ class Point(MutableComposite):
+ def __init__(self, x, y):
+ self.x = x
+ self.y = y
+
+ def __setattr__(self, key, value):
+ object.__setattr__(self, key, value)
+ self.on_change()
+
+ def __composite_values__(self):
+ return self.x, self.y
+
+ def __eq__(self, other):
+ return isinstance(other, Point) and \
+ other.x == self.x and \
+ other.y == self.y
+ return Point
+
+ @classmethod
+ @testing.resolve_artifact_names
+ def setup_mappers(cls):
+ Point = cls._type_fixture()
+
+ class Foo(_base.BasicEntity):
+ pass
+
+ mapper(Foo, foo, properties={
+ 'data':composite(Point, foo.c.x, foo.c.y)
+ })
+
+ @testing.resolve_artifact_names
+ def test_in_place_mutation(self):
+ sess = Session()
+ d = Point(3, 4)
+ f1 = Foo(data=d)
+ sess.add(f1)
+ sess.commit()
+
+ f1.data.y = 5
+ sess.commit()
+
+ eq_(f1.data, Point(3, 5))
+
+
\ No newline at end of file
--- /dev/null
+"""Test the interaction of :class:`.MutableType` as well as the
+``mutable=True`` flag with the ORM.
+
+For new mutablity functionality, see test.ext.test_mutable.
+
+"""
+from test.lib.testing import eq_
+import operator
+from sqlalchemy.orm import mapper as orm_mapper
+
+import sqlalchemy as sa
+from sqlalchemy import Integer, String, ForeignKey
+from test.lib import testing, pickleable
+from test.lib.schema import Table, Column
+from sqlalchemy.orm import mapper, create_session, Session, attributes
+from test.lib.testing import eq_, ne_
+from test.lib.util import gc_collect
+from test.orm import _base, _fixtures
+
+class MutableTypesTest(_base.MappedTest):
+
+ @classmethod
+ def define_tables(cls, metadata):
+ Table('mutable_t', metadata,
+ Column('id', Integer, primary_key=True,
+ test_needs_autoincrement=True),
+ Column('data', sa.PickleType(mutable=True)),
+ Column('val', sa.Unicode(30)))
+
+ @classmethod
+ def setup_classes(cls):
+ class Foo(_base.BasicEntity):
+ pass
+
+ @classmethod
+ @testing.resolve_artifact_names
+ def setup_mappers(cls):
+ mapper(Foo, mutable_t)
+
+ @testing.resolve_artifact_names
+ def test_modified_status(self):
+ f1 = Foo(data = pickleable.Bar(4,5))
+
+ session = Session()
+ session.add(f1)
+ session.commit()
+
+ f2 = session.query(Foo).first()
+ assert 'data' in sa.orm.attributes.instance_state(f2).unmodified
+ eq_(f2.data, f1.data)
+
+ f2.data.y = 19
+ assert f2 in session.dirty
+ assert 'data' not in sa.orm.attributes.instance_state(f2).unmodified
+
+ @testing.resolve_artifact_names
+ def test_mutations_persisted(self):
+ f1 = Foo(data = pickleable.Bar(4,5))
+
+ session = Session()
+ session.add(f1)
+ session.commit()
+ f1.data
+ session.close()
+
+ f2 = session.query(Foo).first()
+ f2.data.y = 19
+ session.commit()
+ f2.data
+ session.close()
+
+ f3 = session.query(Foo).first()
+ ne_(f3.data,f1.data)
+ eq_(f3.data, pickleable.Bar(4, 19))
+
+ @testing.resolve_artifact_names
+ def test_no_unnecessary_update(self):
+ f1 = Foo(data = pickleable.Bar(4,5), val = u'hi')
+
+ session = Session()
+ session.add(f1)
+ session.commit()
+
+ self.sql_count_(0, session.commit)
+
+ f1.val = u'someothervalue'
+ self.assert_sql(testing.db, session.commit, [
+ ("UPDATE mutable_t SET val=:val "
+ "WHERE mutable_t.id = :mutable_t_id",
+ {'mutable_t_id': f1.id, 'val': u'someothervalue'})])
+
+ f1.val = u'hi'
+ f1.data.x = 9
+ self.assert_sql(testing.db, session.commit, [
+ ("UPDATE mutable_t SET data=:data, val=:val "
+ "WHERE mutable_t.id = :mutable_t_id",
+ {'mutable_t_id': f1.id, 'val': u'hi', 'data':f1.data})])
+
+ @testing.resolve_artifact_names
+ def test_mutated_state_resurrected(self):
+ f1 = Foo(data = pickleable.Bar(4,5), val = u'hi')
+
+ session = Session()
+ session.add(f1)
+ session.commit()
+
+ f1.data.y = 19
+ del f1
+
+ gc_collect()
+ assert len(session.identity_map) == 1
+
+ session.commit()
+
+ assert session.query(Foo).one().data == pickleable.Bar(4, 19)
+
+ @testing.resolve_artifact_names
+ def test_mutated_plus_scalar_state_change_resurrected(self):
+ """test that a non-mutable attribute event subsequent to
+ a mutable event prevents the object from falling into
+ resurrected state.
+
+ """
+ f1 = Foo(data = pickleable.Bar(4, 5), val=u'some val')
+ session = Session()
+ session.add(f1)
+ session.commit()
+ f1.data.x = 10
+ f1.data.y = 15
+ f1.val=u'some new val'
+
+ assert sa.orm.attributes.instance_state(f1)._strong_obj is not None
+
+ del f1
+ session.commit()
+ eq_(
+ session.query(Foo.val).all(),
+ [('some new val', )]
+ )
+
+ @testing.resolve_artifact_names
+ def test_non_mutated_state_not_resurrected(self):
+ f1 = Foo(data = pickleable.Bar(4,5))
+
+ session = Session()
+ session.add(f1)
+ session.commit()
+
+ session = Session()
+ f1 = session.query(Foo).first()
+ del f1
+ gc_collect()
+
+ assert len(session.identity_map) == 0
+ f1 = session.query(Foo).first()
+ assert not attributes.instance_state(f1).modified
+
+ @testing.resolve_artifact_names
+ def test_scalar_no_net_change_no_update(self):
+ """Test that a no-net-change on a scalar attribute event
+ doesn't cause an UPDATE for a mutable state.
+
+ """
+
+ f1 = Foo(val=u'hi')
+
+ session = Session()
+ session.add(f1)
+ session.commit()
+ session.close()
+
+ f1 = session.query(Foo).first()
+ f1.val = u'hi'
+ self.sql_count_(0, session.commit)
+
+ @testing.resolve_artifact_names
+ def test_expire_attribute_set(self):
+ """test no SELECT emitted when assigning to an expired
+ mutable attribute.
+
+ """
+
+ f1 = Foo(data = pickleable.Bar(4, 5), val=u'some val')
+ session = Session()
+ session.add(f1)
+ session.commit()
+
+ assert 'data' not in f1.__dict__
+ def go():
+ f1.data = pickleable.Bar(10, 15)
+ self.sql_count_(0, go)
+ session.commit()
+
+ eq_(f1.data.x, 10)
+
+ @testing.resolve_artifact_names
+ def test_expire_mutate(self):
+ """test mutations are detected on an expired mutable
+ attribute."""
+
+ f1 = Foo(data = pickleable.Bar(4, 5), val=u'some val')
+ session = Session()
+ session.add(f1)
+ session.commit()
+
+ assert 'data' not in f1.__dict__
+ def go():
+ f1.data.x = 10
+ self.sql_count_(1, go)
+ session.commit()
+
+ eq_(f1.data.x, 10)
+
+ @testing.resolve_artifact_names
+ def test_deferred_attribute_set(self):
+ """test no SELECT emitted when assigning to a deferred
+ mutable attribute.
+
+ """
+ sa.orm.clear_mappers()
+ mapper(Foo, mutable_t, properties={
+ 'data':sa.orm.deferred(mutable_t.c.data)
+ })
+
+ f1 = Foo(data = pickleable.Bar(4, 5), val=u'some val')
+ session = Session()
+ session.add(f1)
+ session.commit()
+
+ session.close()
+
+ f1 = session.query(Foo).first()
+ def go():
+ f1.data = pickleable.Bar(10, 15)
+ self.sql_count_(0, go)
+ session.commit()
+
+ eq_(f1.data.x, 10)
+
+ @testing.resolve_artifact_names
+ def test_deferred_mutate(self):
+ """test mutations are detected on a deferred mutable
+ attribute."""
+
+ sa.orm.clear_mappers()
+ mapper(Foo, mutable_t, properties={
+ 'data':sa.orm.deferred(mutable_t.c.data)
+ })
+
+ f1 = Foo(data = pickleable.Bar(4, 5), val=u'some val')
+ session = Session()
+ session.add(f1)
+ session.commit()
+
+ session.close()
+
+ f1 = session.query(Foo).first()
+ def go():
+ f1.data.x = 10
+ self.sql_count_(1, go)
+ session.commit()
+
+ def go():
+ eq_(f1.data.x, 10)
+ self.sql_count_(1, go)
+
+
+class PickledDictsTest(_base.MappedTest):
+
+ @classmethod
+ def define_tables(cls, metadata):
+ Table('mutable_t', metadata,
+ Column('id', Integer, primary_key=True,
+ test_needs_autoincrement=True),
+ Column('data',
+ sa.PickleType(comparator=operator.eq, mutable=True)))
+
+ @classmethod
+ def setup_classes(cls):
+ class Foo(_base.BasicEntity):
+ pass
+
+ @classmethod
+ @testing.resolve_artifact_names
+ def setup_mappers(cls):
+ mapper(Foo, mutable_t)
+
+ @testing.resolve_artifact_names
+ def test_dicts(self):
+ """Dictionaries may not pickle the same way twice."""
+
+ f1 = Foo()
+ f1.data = [ {
+ 'personne': {'nom': u'Smith',
+ 'pers_id': 1,
+ 'prenom': u'john',
+ 'civilite': u'Mr',
+ 'int_3': False,
+ 'int_2': False,
+ 'int_1': u'23',
+ 'VenSoir': True,
+ 'str_1': u'Test',
+ 'SamMidi': False,
+ 'str_2': u'chien',
+ 'DimMidi': False,
+ 'SamSoir': True,
+ 'SamAcc': False} } ]
+
+ session = create_session(autocommit=False)
+ session.add(f1)
+ session.commit()
+
+ self.sql_count_(0, session.commit)
+
+ f1.data = [ {
+ 'personne': {'nom': u'Smith',
+ 'pers_id': 1,
+ 'prenom': u'john',
+ 'civilite': u'Mr',
+ 'int_3': False,
+ 'int_2': False,
+ 'int_1': u'23',
+ 'VenSoir': True,
+ 'str_1': u'Test',
+ 'SamMidi': False,
+ 'str_2': u'chien',
+ 'DimMidi': False,
+ 'SamSoir': True,
+ 'SamAcc': False} } ]
+
+ self.sql_count_(0, session.commit)
+
+ f1.data[0]['personne']['VenSoir']= False
+ self.sql_count_(1, session.commit)
+
+ session.expunge_all()
+ f = session.query(Foo).get(f1.id)
+ eq_(f.data,
+ [ {
+ 'personne': {'nom': u'Smith',
+ 'pers_id': 1,
+ 'prenom': u'john',
+ 'civilite': u'Mr',
+ 'int_3': False,
+ 'int_2': False,
+ 'int_1': u'23',
+ 'VenSoir': False,
+ 'str_1': u'Test',
+ 'SamMidi': False,
+ 'str_2': u'chien',
+ 'DimMidi': False,
+ 'SamSoir': True,
+ 'SamAcc': False} } ])
})
mapper(Address, addresses)
-
class FixtureDataTest(TransactionTest):
run_inserts = 'each'
s.flush()
self.assert_sql_count(testing.db, go, 0)
-class MutableTypesTest(_base.MappedTest):
-
- @classmethod
- def define_tables(cls, metadata):
- Table('mutable_t', metadata,
- Column('id', Integer, primary_key=True,
- test_needs_autoincrement=True),
- Column('data', sa.PickleType(mutable=True)),
- Column('val', sa.Unicode(30)))
-
- @classmethod
- def setup_classes(cls):
- class Foo(_base.BasicEntity):
- pass
-
- @classmethod
- @testing.resolve_artifact_names
- def setup_mappers(cls):
- mapper(Foo, mutable_t)
-
- @testing.resolve_artifact_names
- def test_modified_status(self):
- f1 = Foo(data = pickleable.Bar(4,5))
-
- session = Session()
- session.add(f1)
- session.commit()
-
- f2 = session.query(Foo).first()
- assert 'data' in sa.orm.attributes.instance_state(f2).unmodified
- eq_(f2.data, f1.data)
-
- f2.data.y = 19
- assert f2 in session.dirty
- assert 'data' not in sa.orm.attributes.instance_state(f2).unmodified
-
- @testing.resolve_artifact_names
- def test_mutations_persisted(self):
- f1 = Foo(data = pickleable.Bar(4,5))
-
- session = Session()
- session.add(f1)
- session.commit()
- f1.data
- session.close()
-
- f2 = session.query(Foo).first()
- f2.data.y = 19
- session.commit()
- f2.data
- session.close()
-
- f3 = session.query(Foo).first()
- ne_(f3.data,f1.data)
- eq_(f3.data, pickleable.Bar(4, 19))
-
- @testing.resolve_artifact_names
- def test_no_unnecessary_update(self):
- f1 = Foo(data = pickleable.Bar(4,5), val = u'hi')
-
- session = Session()
- session.add(f1)
- session.commit()
-
- self.sql_count_(0, session.commit)
-
- f1.val = u'someothervalue'
- self.assert_sql(testing.db, session.commit, [
- ("UPDATE mutable_t SET val=:val "
- "WHERE mutable_t.id = :mutable_t_id",
- {'mutable_t_id': f1.id, 'val': u'someothervalue'})])
-
- f1.val = u'hi'
- f1.data.x = 9
- self.assert_sql(testing.db, session.commit, [
- ("UPDATE mutable_t SET data=:data, val=:val "
- "WHERE mutable_t.id = :mutable_t_id",
- {'mutable_t_id': f1.id, 'val': u'hi', 'data':f1.data})])
-
- @testing.resolve_artifact_names
- def test_mutated_state_resurrected(self):
- f1 = Foo(data = pickleable.Bar(4,5), val = u'hi')
-
- session = Session()
- session.add(f1)
- session.commit()
-
- f1.data.y = 19
- del f1
-
- gc_collect()
- assert len(session.identity_map) == 1
-
- session.commit()
-
- assert session.query(Foo).one().data == pickleable.Bar(4, 19)
-
- @testing.resolve_artifact_names
- def test_mutated_plus_scalar_state_change_resurrected(self):
- """test that a non-mutable attribute event subsequent to
- a mutable event prevents the object from falling into
- resurrected state.
-
- """
- f1 = Foo(data = pickleable.Bar(4, 5), val=u'some val')
- session = Session()
- session.add(f1)
- session.commit()
- f1.data.x = 10
- f1.data.y = 15
- f1.val=u'some new val'
-
- assert sa.orm.attributes.instance_state(f1)._strong_obj is not None
-
- del f1
- session.commit()
- eq_(
- session.query(Foo.val).all(),
- [('some new val', )]
- )
-
- @testing.resolve_artifact_names
- def test_non_mutated_state_not_resurrected(self):
- f1 = Foo(data = pickleable.Bar(4,5))
-
- session = Session()
- session.add(f1)
- session.commit()
-
- session = Session()
- f1 = session.query(Foo).first()
- del f1
- gc_collect()
-
- assert len(session.identity_map) == 0
- f1 = session.query(Foo).first()
- assert not attributes.instance_state(f1).modified
-
- @testing.resolve_artifact_names
- def test_scalar_no_net_change_no_update(self):
- """Test that a no-net-change on a scalar attribute event
- doesn't cause an UPDATE for a mutable state.
-
- """
-
- f1 = Foo(val=u'hi')
-
- session = Session()
- session.add(f1)
- session.commit()
- session.close()
-
- f1 = session.query(Foo).first()
- f1.val = u'hi'
- self.sql_count_(0, session.commit)
-
- @testing.resolve_artifact_names
- def test_expire_attribute_set(self):
- """test no SELECT emitted when assigning to an expired
- mutable attribute.
- """
-
- f1 = Foo(data = pickleable.Bar(4, 5), val=u'some val')
- session = Session()
- session.add(f1)
- session.commit()
-
- assert 'data' not in f1.__dict__
- def go():
- f1.data = pickleable.Bar(10, 15)
- self.sql_count_(0, go)
- session.commit()
-
- eq_(f1.data.x, 10)
- @testing.resolve_artifact_names
- def test_expire_mutate(self):
- """test mutations are detected on an expired mutable
- attribute."""
-
- f1 = Foo(data = pickleable.Bar(4, 5), val=u'some val')
- session = Session()
- session.add(f1)
- session.commit()
-
- assert 'data' not in f1.__dict__
- def go():
- f1.data.x = 10
- self.sql_count_(1, go)
- session.commit()
-
- eq_(f1.data.x, 10)
-
- @testing.resolve_artifact_names
- def test_deferred_attribute_set(self):
- """test no SELECT emitted when assigning to a deferred
- mutable attribute.
-
- """
- sa.orm.clear_mappers()
- mapper(Foo, mutable_t, properties={
- 'data':sa.orm.deferred(mutable_t.c.data)
- })
-
- f1 = Foo(data = pickleable.Bar(4, 5), val=u'some val')
- session = Session()
- session.add(f1)
- session.commit()
-
- session.close()
-
- f1 = session.query(Foo).first()
- def go():
- f1.data = pickleable.Bar(10, 15)
- self.sql_count_(0, go)
- session.commit()
-
- eq_(f1.data.x, 10)
-
- @testing.resolve_artifact_names
- def test_deferred_mutate(self):
- """test mutations are detected on a deferred mutable
- attribute."""
-
- sa.orm.clear_mappers()
- mapper(Foo, mutable_t, properties={
- 'data':sa.orm.deferred(mutable_t.c.data)
- })
-
- f1 = Foo(data = pickleable.Bar(4, 5), val=u'some val')
- session = Session()
- session.add(f1)
- session.commit()
-
- session.close()
-
- f1 = session.query(Foo).first()
- def go():
- f1.data.x = 10
- self.sql_count_(1, go)
- session.commit()
-
- def go():
- eq_(f1.data.x, 10)
- self.sql_count_(1, go)
-
-
-class PickledDictsTest(_base.MappedTest):
-
- @classmethod
- def define_tables(cls, metadata):
- Table('mutable_t', metadata,
- Column('id', Integer, primary_key=True,
- test_needs_autoincrement=True),
- Column('data',
- sa.PickleType(comparator=operator.eq, mutable=True)))
-
- @classmethod
- def setup_classes(cls):
- class Foo(_base.BasicEntity):
- pass
-
- @classmethod
- @testing.resolve_artifact_names
- def setup_mappers(cls):
- mapper(Foo, mutable_t)
-
- @testing.resolve_artifact_names
- def test_dicts(self):
- """Dictionaries may not pickle the same way twice."""
-
- f1 = Foo()
- f1.data = [ {
- 'personne': {'nom': u'Smith',
- 'pers_id': 1,
- 'prenom': u'john',
- 'civilite': u'Mr',
- 'int_3': False,
- 'int_2': False,
- 'int_1': u'23',
- 'VenSoir': True,
- 'str_1': u'Test',
- 'SamMidi': False,
- 'str_2': u'chien',
- 'DimMidi': False,
- 'SamSoir': True,
- 'SamAcc': False} } ]
-
- session = create_session(autocommit=False)
- session.add(f1)
- session.commit()
-
- self.sql_count_(0, session.commit)
-
- f1.data = [ {
- 'personne': {'nom': u'Smith',
- 'pers_id': 1,
- 'prenom': u'john',
- 'civilite': u'Mr',
- 'int_3': False,
- 'int_2': False,
- 'int_1': u'23',
- 'VenSoir': True,
- 'str_1': u'Test',
- 'SamMidi': False,
- 'str_2': u'chien',
- 'DimMidi': False,
- 'SamSoir': True,
- 'SamAcc': False} } ]
-
- self.sql_count_(0, session.commit)
-
- f1.data[0]['personne']['VenSoir']= False
- self.sql_count_(1, session.commit)
-
- session.expunge_all()
- f = session.query(Foo).get(f1.id)
- eq_(f.data,
- [ {
- 'personne': {'nom': u'Smith',
- 'pers_id': 1,
- 'prenom': u'john',
- 'civilite': u'Mr',
- 'int_3': False,
- 'int_2': False,
- 'int_1': u'23',
- 'VenSoir': False,
- 'str_1': u'Test',
- 'SamMidi': False,
- 'str_2': u'chien',
- 'DimMidi': False,
- 'SamSoir': True,
- 'SamAcc': False} } ])
class PKTest(_base.MappedTest):