0.8.0b1
=======
- orm
+ - [removed] The legacy "mutable" system of the
+ ORM, including the MutableType class as well
+ as the mutable=True flag on PickleType
+ and postgresql.ARRAY has been removed.
+ In-place mutations are detected by the ORM
+ using the sqlalchemy.ext.mutable extension,
+ introduced in 0.7. The removal of MutableType
+ and associated constructs removes a great
+ deal of complexity from SQLAlchemy's internals.
+ The approach performed poorly as it would incur
+ a scan of the full contents of the Session
+ when in use. [ticket:2442]
+
- [feature] Major rewrite of relationship()
internals now allow join conditions which
include columns pointing to themselves
PGUuid = UUID
-class ARRAY(sqltypes.MutableType, sqltypes.Concatenable, sqltypes.TypeEngine):
+class ARRAY(sqltypes.Concatenable, sqltypes.TypeEngine):
"""Postgresql ARRAY type.
Represents values as Python lists.
"""
__visit_name__ = 'ARRAY'
- def __init__(self, item_type, mutable=False, as_tuple=False):
+ def __init__(self, item_type, as_tuple=False):
"""Construct an ARRAY.
E.g.::
``ARRAY(ARRAY(Integer))`` or such. The type mapping figures out on
the fly
- :param mutable=False: Specify whether lists passed to this
- class should be considered mutable - this enables
- "mutable types" mode in the ORM. Be sure to read the
- notes for :class:`.MutableType` regarding ORM
- performance implications (default changed from ``True`` in
- 0.7.0).
-
- .. note::
-
- This functionality is now superseded by the
- ``sqlalchemy.ext.mutable`` extension described in
- :ref:`mutable_toplevel`.
-
:param as_tuple=False: Specify whether return results
should be converted to tuples from lists. DBAPIs such
as psycopg2 return lists by default. When tuples are
- returned, the results are hashable. This flag can only
- be set to ``True`` when ``mutable`` is set to
- ``False``. (new in 0.6.5)
+ returned, the results are hashable.
"""
if isinstance(item_type, ARRAY):
if isinstance(item_type, type):
item_type = item_type()
self.item_type = item_type
- self.mutable = mutable
- if mutable and as_tuple:
- raise exc.ArgumentError(
- "mutable must be set to False if as_tuple is True."
- )
self.as_tuple = as_tuple
- def copy_value(self, value):
- if value is None:
- return None
- elif self.mutable:
- return list(value)
- else:
- return value
-
def compare_values(self, x, y):
return x == y
- def is_mutable(self):
- return self.mutable
-
def bind_processor(self, dialect):
item_proc = self.item_type.dialect_impl(dialect).bind_processor(dialect)
if item_proc:
self.property.columns[0].type
-class MutableScalarAttributeImpl(ScalarAttributeImpl):
- """represents a scalar value-holding InstrumentedAttribute, which can
- detect changes within the value itself.
-
- """
-
- uses_objects = False
- supports_population = True
-
- def __init__(self, class_, key, callable_, dispatch,
- class_manager, copy_function=None,
- compare_function=None, **kwargs):
- super(ScalarAttributeImpl, self).__init__(
- class_,
- key,
- callable_, dispatch,
- compare_function=compare_function,
- **kwargs)
- class_manager.mutable_attributes.add(key)
- if copy_function is None:
- raise sa_exc.ArgumentError(
- "MutableScalarAttributeImpl requires a copy function")
- self.copy = copy_function
-
- def get_history(self, state, dict_, passive=PASSIVE_OFF):
- if not dict_:
- v = state.committed_state.get(self.key, NO_VALUE)
- else:
- v = dict_.get(self.key, NO_VALUE)
-
- return History.from_scalar_attribute(self, state, v)
-
- def check_mutable_modified(self, state, dict_):
- a, u, d = self.get_history(state, dict_)
- return bool(a or d)
-
- def get(self, state, dict_, passive=PASSIVE_OFF):
- if self.key not in state.mutable_dict:
- ret = ScalarAttributeImpl.get(self, state, dict_, passive=passive)
- if ret is not PASSIVE_NO_RESULT:
- state.mutable_dict[self.key] = ret
- return ret
- else:
- return state.mutable_dict[self.key]
-
- def delete(self, state, dict_):
- ScalarAttributeImpl.delete(self, state, dict_)
- state.mutable_dict.pop(self.key)
-
- def set(self, state, dict_, value, initiator,
- passive=PASSIVE_OFF, check_old=None, pop=False):
- ScalarAttributeImpl.set(self, state, dict_, value,
- initiator, passive, check_old=check_old, pop=pop)
- state.mutable_dict[self.key] = value
class ScalarObjectAttributeImpl(ScalarAttributeImpl):
def register_attribute_impl(class_, key,
uselist=False, callable_=None,
- useobject=False, mutable_scalars=False,
+ useobject=False,
impl_class=None, backref=None, **kw):
manager = manager_of_class(class_)
elif useobject:
impl = ScalarObjectAttributeImpl(class_, key, callable_,
dispatch,**kw)
- elif mutable_scalars:
- impl = MutableScalarAttributeImpl(class_, key, callable_, dispatch,
- class_manager=manager, **kw)
else:
impl = ScalarAttributeImpl(class_, key, callable_, dispatch, **kw)
class IdentityMap(dict):
def __init__(self):
- self._mutable_attrs = set()
self._modified = set()
self._wr = weakref.ref(self)
if state.modified:
self._modified.add(state)
- if state.manager.mutable_attributes:
- self._mutable_attrs.add(state)
def _manage_removed_state(self, state):
del state._instance_dict
- self._mutable_attrs.discard(state)
self._modified.discard(state)
def _dirty_states(self):
- return self._modified.union(s for s in self._mutable_attrs.copy()
- if s.modified)
+ return self._modified
def check_modified(self):
"""return True if any InstanceStates present have been marked as 'modified'."""
- if self._modified:
- return True
- else:
- for state in self._mutable_attrs.copy():
- if state.modified:
- return True
- return False
+ return bool(self._modified)
def has_key(self, key):
return key in self
self.factory = None # where we came from, for inheritance bookkeeping
self.info = {}
self.new_init = None
- self.mutable_attributes = set()
self.local_attrs = {}
self.originals = {}
@util.memoized_property
def _state_constructor(self):
self.dispatch.first_init(self, self.class_)
- if self.mutable_attributes:
- return state.MutableAttrInstanceState
- else:
- return state.InstanceState
+ return state.InstanceState
def manage(self):
"""Mark this instance as the manager for its class."""
del self.local_attrs[key]
self.uninstall_descriptor(key)
del self[key]
- if key in self.mutable_attributes:
- self.mutable_attributes.remove(key)
for cls in self.class_.__subclasses__():
manager = manager_of_class(cls)
if manager:
load_options = EMPTY_SET
load_path = ()
insert_order = None
- mutable_dict = None
_strong_obj = None
modified = False
expired = False
manager.dispatch.init(self, args, kwargs)
- #if manager.mutable_attributes:
- # assert self.__class__ is MutableAttrInstanceState
-
try:
return manager.original_init(*mixed[1:], **kwargs)
except:
d.update(
(k, self.__dict__[k]) for k in (
'committed_state', '_pending_mutations', 'modified', 'expired',
- 'callables', 'key', 'parents', 'load_options', 'mutable_dict',
+ 'callables', 'key', 'parents', 'load_options',
'class_',
) if k in self.__dict__
)
self.__dict__.update([
(k, state[k]) for k in (
- 'key', 'load_options', 'mutable_dict'
+ 'key', 'load_options',
) if k in state
])
self.committed_state.clear()
self.__dict__.pop('_pending_mutations', None)
- self.__dict__.pop('mutable_dict', None)
# clear out 'parents' collection. not
# entirely clear how we can best determine
def expire_attributes(self, dict_, attribute_names):
pending = self.__dict__.get('_pending_mutations', None)
- mutable_dict = self.mutable_dict
for key in attribute_names:
impl = self.manager[key].impl
dict_.pop(key, None)
self.committed_state.pop(key, None)
- if mutable_dict:
- mutable_dict.pop(key, None)
if pending:
pending.pop(key, None)
"""
class_manager = self.manager
- if class_manager.mutable_attributes:
- for key in keys:
- if key in dict_ and key in class_manager.mutable_attributes:
- self.committed_state[key] = self.manager[key].impl.copy(dict_[key])
- else:
- self.committed_state.pop(key, None)
- else:
- for key in keys:
- self.committed_state.pop(key, None)
+ for key in keys:
+ self.committed_state.pop(key, None)
self.expired = False
if key in dict_ and callables[key] is self:
del callables[key]
- for key in self.manager.mutable_attributes:
- if key in dict_:
- self.committed_state[key] = self.manager[key].impl.copy(dict_[key])
-
if instance_dict and self.modified:
instance_dict._modified.discard(self)
return self.state.get_history(self.key,
PASSIVE_NO_INITIALIZE)
-class MutableAttrInstanceState(InstanceState):
- """InstanceState implementation for objects that reference 'mutable'
- attributes.
-
- Has a more involved "cleanup" handler that checks mutable attributes
- for changes upon dereference, resurrecting if needed.
-
- """
-
- @util.memoized_property
- def mutable_dict(self):
- return {}
-
- def _get_modified(self, dict_=None):
- if self.__dict__.get('modified', False):
- return True
- else:
- if dict_ is None:
- dict_ = self.dict
- for key in self.manager.mutable_attributes:
- if self.manager[key].impl.check_mutable_modified(self, dict_):
- return True
- else:
- return False
-
- def _set_modified(self, value):
- self.__dict__['modified'] = value
-
- modified = property(_get_modified, _set_modified)
-
- @property
- def unmodified(self):
- """a set of keys which have no uncommitted changes"""
-
- dict_ = self.dict
-
- return set([
- key for key in self.manager
- if (key not in self.committed_state or
- (key in self.manager.mutable_attributes and
- not self.manager[key].impl.check_mutable_modified(self, dict_)))])
-
- def unmodified_intersection(self, keys):
- """Return self.unmodified.intersection(keys)."""
-
- dict_ = self.dict
-
- return set([
- key for key in keys
- if (key not in self.committed_state or
- (key in self.manager.mutable_attributes and
- not self.manager[key].impl.check_mutable_modified(self, dict_)))])
-
-
- def _is_really_none(self):
- """do a check modified/resurrect.
-
- This would be called in the extremely rare
- race condition that the weakref returned None but
- the cleanup handler had not yet established the
- __resurrect callable as its replacement.
-
- """
- if self.modified:
- self.obj = self.__resurrect
- return self.obj()
- else:
- return None
-
- def reset(self, dict_, key):
- self.mutable_dict.pop(key, None)
- InstanceState.reset(self, dict_, key)
-
- def _cleanup(self, ref):
- """weakref callback.
-
- This method may be called by an asynchronous
- gc.
-
- If the state shows pending changes, the weakref
- is replaced by the __resurrect callable which will
- re-establish an object reference on next access,
- else removes this InstanceState from the owning
- identity map, if any.
-
- """
- if self._get_modified(self.mutable_dict):
- self.obj = self.__resurrect
- else:
- instance_dict = self._instance_dict()
- if instance_dict:
- instance_dict.discard(self)
- self._dispose()
-
- def __resurrect(self):
- """A substitute for the obj() weakref function which resurrects."""
-
- # store strong ref'ed version of the object; will revert
- # to weakref when changes are persisted
- obj = self.manager.new_instance(state=self)
- self.obj = weakref.ref(obj, self._cleanup)
- self._strong_obj = obj
- obj.__dict__.update(self.mutable_dict)
-
- # re-establishes identity attributes from the key
- self.manager.dispatch.resurrect(self)
-
- return obj
class PendingCollection(object):
"""A writable placeholder for an unloaded collection.
def _register_attribute(strategy, mapper, useobject,
compare_function=None,
typecallable=None,
- copy_function=None,
- mutable_scalars=False,
uselist=False,
callable_=None,
proxy_property=None,
m.class_,
prop.key,
parent_token=prop,
- mutable_scalars=mutable_scalars,
uselist=uselist,
- copy_function=copy_function,
compare_function=compare_function,
useobject=useobject,
extension=attribute_ext,
_register_attribute(self, mapper, useobject=False,
compare_function=coltype.compare_values,
- copy_function=coltype.copy_value,
- mutable_scalars=self.columns[0].type.is_mutable(),
active_history = active_history
)
_register_attribute(self, mapper, useobject=False,
compare_function=self.columns[0].type.compare_values,
- copy_function=self.columns[0].type.copy_value,
- mutable_scalars=self.columns[0].type.is_mutable(),
callable_=self._load_for_state,
expire_missing=False
)
'CLOB', 'BLOB', 'BOOLEAN', 'SMALLINT', 'INTEGER', 'DATE', 'TIME',
'String', 'Integer', 'SmallInteger', 'BigInteger', 'Numeric',
'Float', 'DateTime', 'Date', 'Time', 'LargeBinary', 'Binary',
- 'Boolean', 'Unicode', 'MutableType', 'Concatenable',
+ 'Boolean', 'Unicode', 'Concatenable',
'UnicodeText','PickleType', 'Interval', 'Enum' ]
import inspect
return x == y
- def is_mutable(self):
- """Return True if the target Python type is 'mutable'.
-
- This allows systems like the ORM to know if a column value can
- be considered 'not changed' by comparing the identity of
- objects alone. Values such as dicts, lists which
- are serialized into strings are examples of "mutable"
- column structures.
-
- .. note::
-
- This functionality is now superseded by the
- ``sqlalchemy.ext.mutable`` extension described in
- :ref:`mutable_toplevel`.
-
- When this method is overridden, :meth:`copy_value` should
- also be supplied. The :class:`.MutableType` mixin
- is recommended as a helper.
-
- """
- return False
-
def get_dbapi_type(self, dbapi):
"""Return the corresponding type object from the underlying DB-API, if
any.
"""
return self.impl.get_dbapi_type(dbapi)
- def copy_value(self, value):
- """Given a value, produce a copy of it.
-
- By default this calls upon :meth:`.TypeEngine.copy_value`
- of the underlying "impl".
-
- :meth:`.copy_value` will return the object
- itself, assuming "mutability" is not enabled.
- Only the :class:`.MutableType` mixin provides a copy
- function that actually produces a new object.
- The copying function is used by the ORM when
- "mutable" types are used, to memoize the original
- version of an object as loaded from the database,
- which is then compared to the possibly mutated
- version to check for changes.
-
- Modern implementations should use the
- ``sqlalchemy.ext.mutable`` extension described in
- :ref:`mutable_toplevel` for intercepting in-place
- changes to values.
-
- """
- return self.impl.copy_value(value)
-
def compare_values(self, x, y):
"""Given two values, compare them for equality.
"""
return self.impl.compare_values(x, y)
- def is_mutable(self):
- """Return True if the target Python type is 'mutable'.
-
- This allows systems like the ORM to know if a column value can
- be considered 'not changed' by comparing the identity of
- objects alone. Values such as dicts, lists which
- are serialized into strings are examples of "mutable"
- column structures.
-
- .. note::
-
- This functionality is now superseded by the
- ``sqlalchemy.ext.mutable`` extension described in
- :ref:`mutable_toplevel`.
-
- """
- return self.impl.is_mutable()
-
def _adapt_expression(self, op, othertype):
"""
#todo
mapping[dialect_name] = type_
return Variant(self.impl, mapping)
-class MutableType(object):
- """A mixin that marks a :class:`.TypeEngine` as representing
- a mutable Python object type. This functionality is used
- only by the ORM.
-
- .. note::
-
- :class:`.MutableType` is superseded as of SQLAlchemy 0.7
- by the ``sqlalchemy.ext.mutable`` extension described in
- :ref:`mutable_toplevel`. This extension provides an event
- driven approach to in-place mutation detection that does not
- incur the severe performance penalty of the :class:`.MutableType`
- approach.
-
- "mutable" means that changes can occur in place to a value
- of this type. Examples includes Python lists, dictionaries,
- and sets, as well as user-defined objects. The primary
- need for identification of "mutable" types is by the ORM,
- which applies special rules to such values in order to guarantee
- that changes are detected. These rules may have a significant
- performance impact, described below.
-
- A :class:`.MutableType` usually allows a flag called
- ``mutable=False`` to enable/disable the "mutability" flag,
- represented on this class by :meth:`is_mutable`. Examples
- include :class:`.PickleType` and
- :class:`~sqlalchemy.dialects.postgresql.base.ARRAY`. Setting
- this flag to ``True`` enables mutability-specific behavior
- by the ORM.
-
- The :meth:`copy_value` and :meth:`compare_values` functions
- represent a copy and compare function for values of this
- type - implementing subclasses should override these
- appropriately.
-
- .. warning::
-
- The usage of mutable types has significant performance
- implications when using the ORM. In order to detect changes, the
- ORM must create a copy of the value when it is first
- accessed, so that changes to the current value can be compared
- against the "clean" database-loaded value. Additionally, when the
- ORM checks to see if any data requires flushing, it must scan
- through all instances in the session which are known to have
- "mutable" attributes and compare the current value of each
- one to its "clean"
- value. So for example, if the Session contains 6000 objects (a
- fairly large amount) and autoflush is enabled, every individual
- execution of :class:`.Query` will require a full scan of that subset of
- the 6000 objects that have mutable attributes, possibly resulting
- in tens of thousands of additional method calls for every query.
-
- As of SQLAlchemy 0.7, the ``sqlalchemy.ext.mutable`` is provided which
- allows an event driven approach to in-place mutation detection. This
- approach should now be favored over the usage of :class:`.MutableType`
- with ``mutable=True``. ``sqlalchemy.ext.mutable`` is described in
- :ref:`mutable_toplevel`.
-
- """
-
- def is_mutable(self):
- """Return True if the target Python type is 'mutable'.
-
- For :class:`.MutableType`, this method is set to
- return ``True``.
-
- """
- return True
-
- def copy_value(self, value):
- """Unimplemented."""
- raise NotImplementedError()
-
- def compare_values(self, x, y):
- """Compare *x* == *y*."""
- return x == y
def to_instance(typeobj, *arg, **kw):
if typeobj is None:
else:
return super(Enum, self).adapt(impltype, **kw)
-class PickleType(MutableType, TypeDecorator):
+class PickleType(TypeDecorator):
"""Holds Python objects, which are serialized using pickle.
PickleType builds upon the Binary type to apply Python's
the way out, allowing any pickleable Python object to be stored as
a serialized binary field.
+ To allow ORM change events to propagate for elements associated
+ with :class:`.PickleType`, see :ref:`mutable_toplevel`.
+
"""
impl = LargeBinary
def __init__(self, protocol=pickle.HIGHEST_PROTOCOL,
- pickler=None, mutable=False, comparator=None):
+ pickler=None, comparator=None):
"""
Construct a PickleType.
cPickle is not available. May be any object with
pickle-compatible ``dumps` and ``loads`` methods.
- :param mutable: defaults to False; implements
- :meth:`AbstractType.is_mutable`. When ``True``, incoming
- objects will be compared against copies of themselves
- using the Python "equals" operator, unless the
- ``comparator`` argument is present. See
- :class:`.MutableType` for details on "mutable" type
- behavior. (default changed from ``True`` in
- 0.7.0).
-
- .. note::
-
- This functionality is now superseded by the
- ``sqlalchemy.ext.mutable`` extension described in
- :ref:`mutable_toplevel`.
-
:param comparator: a 2-arg callable predicate used
to compare values of this type. If left as ``None``,
the Python "equals" operator is used to compare values.
"""
self.protocol = protocol
self.pickler = pickler or pickle
- self.mutable = mutable
self.comparator = comparator
super(PickleType, self).__init__()
def __reduce__(self):
return PickleType, (self.protocol,
None,
- self.mutable,
self.comparator)
def bind_processor(self, dialect):
return loads(value)
return process
- def copy_value(self, value):
- if self.mutable:
- return self.pickler.loads(
- self.pickler.dumps(value, self.protocol))
- else:
- return value
-
def compare_values(self, x, y):
if self.comparator:
return self.comparator(x, y)
else:
return x == y
- def is_mutable(self):
- """Return True if the target Python type is 'mutable'.
-
- When this method is overridden, :meth:`copy_value` should
- also be supplied. The :class:`.MutableType` mixin
- is recommended as a helper.
-
- """
- return self.mutable
-
class Boolean(TypeEngine, SchemaType):
"""A bool datatype.
metadata.drop_all()
- def test_mutable_identity(self):
- metadata = MetaData(testing.db)
-
- table1 = Table("mytable", metadata,
- Column('col1', Integer, primary_key=True,
- test_needs_autoincrement=True),
- Column('col2', PickleType(comparator=operator.eq, mutable=True))
- )
-
- class Foo(object):
- def __init__(self, col2):
- self.col2 = col2
-
- mapper(Foo, table1)
- metadata.create_all()
-
- session = sessionmaker()()
-
- def go():
- obj = [
- Foo({'a':1}),
- Foo({'b':1}),
- Foo({'c':1}),
- Foo({'d':1}),
- Foo({'e':1}),
- Foo({'f':1}),
- Foo({'g':1}),
- Foo({'h':1}),
- Foo({'i':1}),
- Foo({'j':1}),
- Foo({'k':1}),
- Foo({'l':1}),
- ]
-
- session.add_all(obj)
- session.commit()
-
- testing.eq_(len(session.identity_map._mutable_attrs), 12)
- testing.eq_(len(session.identity_map), 12)
- obj = None
- gc_collect()
- testing.eq_(len(session.identity_map._mutable_attrs), 0)
- testing.eq_(len(session.identity_map), 0)
-
- try:
- go()
- finally:
- metadata.drop_all()
def test_type_compile(self):
from sqlalchemy.dialects.sqlite.base import dialect as SQLiteDialect
uselist=False, useobject=False)
attributes.register_attribute(MyTest, 'email_address',
uselist=False, useobject=False)
- attributes.register_attribute(MyTest, 'some_mutable_data',
- mutable_scalars=True, copy_function=list,
- compare_function=cmp, uselist=False, useobject=False)
attributes.register_attribute(MyTest2, 'a', uselist=False,
useobject=False)
attributes.register_attribute(MyTest2, 'b', uselist=False,
o = MyTest()
o.mt2.append(MyTest2())
o.user_id=7
- o.some_mutable_data = [1,2,3]
o.mt2[0].a = 'abcde'
pk_o = pickle.dumps(o)
self.assert_(o4.user_id == 7)
self.assert_(o4.user_name is None)
self.assert_(o4.email_address is None)
- self.assert_(o4.some_mutable_data == [1,2,3])
self.assert_(len(o4.mt2) == 1)
self.assert_(o4.mt2[0].a == 'abcde')
self.assert_(o4.mt2[0].b is None)
def test_scalar_listener(self):
- # listeners on ScalarAttributeImpl and
- # MutableScalarAttributeImpl aren't used normally. test that
+ # listeners on ScalarAttributeImpl aren't used normally. test that
# they work for the benefit of user extensions
class Foo(object):
instrumentation.register_class(Foo)
attributes.register_attribute(Foo, 'x', uselist=False,
- mutable_scalars=False, useobject=False,
+ useobject=False,
extension=ReceiveEvents())
- attributes.register_attribute(Foo, 'y', uselist=False,
- mutable_scalars=True, useobject=False,
- copy_function=lambda x: x, extension=ReceiveEvents())
f = Foo()
f.x = 5
f.x = 17
del f.x
- f.y = [1,2,3]
- f.y = [4,5,6]
- del f.y
eq_(results, [
('set', f, 5, attributes.NEVER_SET),
('set', f, 17, 5),
('remove', f, 17),
- ('set', f, [1,2,3], attributes.NEVER_SET),
- ('set', f, [4,5,6], [1,2,3]),
- ('remove', f, [4,5,6])
])
def test_lazytrackparent(self):
b3.element = f4
assert attributes.has_parent(Bar, f4, 'element')
- def test_mutablescalars(self):
- """test detection of changes on mutable scalar items"""
-
- class Foo(object):
- pass
-
- instrumentation.register_class(Foo)
- attributes.register_attribute(Foo, 'element', uselist=False,
- copy_function=lambda x: [y for y in x],
- mutable_scalars=True, useobject=False)
- x = Foo()
- x.element = ['one', 'two', 'three']
- attributes.instance_state(x).commit_all(attributes.instance_dict(x))
- x.element[1] = 'five'
- assert attributes.instance_state(x).modified
- instrumentation.unregister_class(Foo)
- instrumentation.register_class(Foo)
- attributes.register_attribute(Foo, 'element', uselist=False,
- useobject=False)
- x = Foo()
- x.element = ['one', 'two', 'three']
- attributes.instance_state(x).commit_all(attributes.instance_dict(x))
- x.element[1] = 'five'
- assert not attributes.instance_state(x).modified
-
def test_descriptorattributes(self):
"""changeset: 1633 broke ability to use ORM to map classes with
unusual descriptor attributes (for example, classes that inherit
eq_(self._someattr_history(f), (['two'], (), ()))
- def test_mutable_scalar_init(self):
- Foo = self._fixture(uselist=False, useobject=False,
- active_history=False,
- mutable_scalars=True,copy_function=dict)
- f = Foo()
- eq_(self._someattr_history(f), ((), (), ()))
-
- def test_mutable_scalar_no_init_side_effect(self):
- Foo = self._fixture(uselist=False, useobject=False,
- active_history=False,
- mutable_scalars=True,copy_function=dict)
- f = Foo()
- self._someattr_history(f)
- assert 'someattr' not in f.__dict__
- assert 'someattr' not in attributes.instance_state(f).committed_state
-
- def test_mutable_scalar_set(self):
- Foo = self._fixture(uselist=False, useobject=False,
- active_history=False,
- mutable_scalars=True,copy_function=dict)
- f = Foo()
- f.someattr = {'foo': 'hi'}
- eq_(self._someattr_history(f), ([{'foo': 'hi'}], (), ()))
-
- def test_mutable_scalar_set_commit(self):
- Foo = self._fixture(uselist=False, useobject=False,
- active_history=False,
- mutable_scalars=True,copy_function=dict)
- f = Foo()
- f.someattr = {'foo': 'hi'}
- self._commit_someattr(f)
- eq_(self._someattr_history(f), ((), [{'foo': 'hi'}], ()))
- eq_(attributes.instance_state(f).committed_state['someattr'],
- {'foo': 'hi'})
-
- def test_mutable_scalar_set_commit_reset(self):
- Foo = self._fixture(uselist=False, useobject=False,
- active_history=False,
- mutable_scalars=True,copy_function=dict)
- f = Foo()
- f.someattr = {'foo': 'hi'}
- self._commit_someattr(f)
- f.someattr['foo'] = 'there'
- eq_(self._someattr_history(f), ([{'foo': 'there'}], (), [{'foo': 'hi'}]))
- eq_(attributes.get_state_history(attributes.instance_state(f),
- 'someattr'), ([{'foo': 'there'}], (), [{'foo': 'hi'}]))
-
- def test_mutable_scalar_set_commit_reset_commit(self):
- Foo = self._fixture(uselist=False, useobject=False,
- active_history=False,
- mutable_scalars=True,copy_function=dict)
- f = Foo()
- f.someattr = {'foo': 'hi'}
- self._commit_someattr(f)
- f.someattr['foo'] = 'there'
- self._commit_someattr(f)
- eq_(self._someattr_history(f), ((), [{'foo': 'there'}], ()))
-
- def test_mutable_scalar_set_dict(self):
- Foo = self._fixture(uselist=False, useobject=False,
- active_history=False,
- mutable_scalars=True,copy_function=dict)
- f = Foo()
- f.__dict__['someattr'] = {'foo': 'new'}
- eq_(self._someattr_history(f), ((), [{'foo': 'new'}], ()))
-
- def test_mutable_scalar_set_dict_set(self):
- Foo = self._fixture(uselist=False, useobject=False,
- active_history=False,
- mutable_scalars=True,copy_function=dict)
- f = Foo()
- f.__dict__['someattr'] = {'foo': 'new'}
- eq_(self._someattr_history(f), ((), [{'foo': 'new'}], ()))
- f.someattr = {'foo': 'old'}
- eq_(self._someattr_history(f), ([{'foo': 'old'}], (), [{'foo': 'new'}]))
-
- def test_mutable_scalar_set_dict_set_commit(self):
- Foo = self._fixture(uselist=False, useobject=False,
- active_history=False,
- mutable_scalars=True,copy_function=dict)
- f = Foo()
- f.__dict__['someattr'] = {'foo': 'new'}
- f.someattr = {'foo': 'old'}
- self._commit_someattr(f)
- eq_(self._someattr_history(f), ((), [{'foo': 'old'}], ()))
def test_scalar_inplace_mutation_set(self):
Foo = self._fixture(uselist=False, useobject=False,
+++ /dev/null
-"""Test the interaction of :class:`.MutableType` as well as the
-``mutable=True`` flag with the ORM.
-
-For new mutablity functionality, see test.ext.test_mutable.
-
-"""
-from test.lib.testing import eq_
-import operator
-from sqlalchemy.orm import mapper as orm_mapper
-
-import sqlalchemy as sa
-from sqlalchemy import Integer, String, ForeignKey
-from test.lib import testing, pickleable
-from test.lib.schema import Table, Column
-from sqlalchemy.orm import mapper, create_session, Session, attributes
-from test.lib.testing import eq_, ne_
-from test.lib.util import gc_collect
-from test.lib import fixtures
-from test.orm import _fixtures
-
-class MutableTypesTest(fixtures.MappedTest):
-
- @classmethod
- def define_tables(cls, metadata):
- Table('mutable_t', metadata,
- Column('id', Integer, primary_key=True,
- test_needs_autoincrement=True),
- Column('data', sa.PickleType(mutable=True)),
- Column('val', sa.Unicode(30)))
-
- @classmethod
- def setup_classes(cls):
- class Foo(cls.Basic):
- pass
-
- @classmethod
- def setup_mappers(cls):
- mutable_t, Foo = cls.tables.mutable_t, cls.classes.Foo
-
- mapper(Foo, mutable_t)
-
- def test_modified_status(self):
- Foo = self.classes.Foo
-
- f1 = Foo(data = pickleable.Bar(4,5))
-
- session = Session()
- session.add(f1)
- session.commit()
-
- f2 = session.query(Foo).first()
- assert 'data' in sa.orm.attributes.instance_state(f2).unmodified
- eq_(f2.data, f1.data)
-
- f2.data.y = 19
- assert f2 in session.dirty
- assert 'data' not in sa.orm.attributes.instance_state(f2).unmodified
-
- def test_mutations_persisted(self):
- Foo = self.classes.Foo
-
- f1 = Foo(data = pickleable.Bar(4,5))
-
- session = Session()
- session.add(f1)
- session.commit()
- f1.data
- session.close()
-
- f2 = session.query(Foo).first()
- f2.data.y = 19
- session.commit()
- f2.data
- session.close()
-
- f3 = session.query(Foo).first()
- ne_(f3.data,f1.data)
- eq_(f3.data, pickleable.Bar(4, 19))
-
- def test_no_unnecessary_update(self):
- Foo = self.classes.Foo
-
- f1 = Foo(data = pickleable.Bar(4,5), val = u'hi')
-
- session = Session()
- session.add(f1)
- session.commit()
-
- self.sql_count_(0, session.commit)
-
- f1.val = u'someothervalue'
- self.assert_sql(testing.db, session.commit, [
- ("UPDATE mutable_t SET val=:val "
- "WHERE mutable_t.id = :mutable_t_id",
- {'mutable_t_id': f1.id, 'val': u'someothervalue'})])
-
- f1.val = u'hi'
- f1.data.x = 9
- self.assert_sql(testing.db, session.commit, [
- ("UPDATE mutable_t SET data=:data, val=:val "
- "WHERE mutable_t.id = :mutable_t_id",
- {'mutable_t_id': f1.id, 'val': u'hi', 'data':f1.data})])
-
- def test_mutated_state_resurrected(self):
- Foo = self.classes.Foo
-
- f1 = Foo(data = pickleable.Bar(4,5), val = u'hi')
-
- session = Session()
- session.add(f1)
- session.commit()
-
- f1.data.y = 19
- del f1
-
- gc_collect()
- assert len(session.identity_map) == 1
-
- session.commit()
-
- assert session.query(Foo).one().data == pickleable.Bar(4, 19)
-
- def test_mutated_plus_scalar_state_change_resurrected(self):
- """test that a non-mutable attribute event subsequent to
- a mutable event prevents the object from falling into
- resurrected state.
-
- """
-
- Foo = self.classes.Foo
-
- f1 = Foo(data = pickleable.Bar(4, 5), val=u'some val')
- session = Session()
- session.add(f1)
- session.commit()
- f1.data.x = 10
- f1.data.y = 15
- f1.val=u'some new val'
-
- assert sa.orm.attributes.instance_state(f1)._strong_obj is not None
-
- del f1
- session.commit()
- eq_(
- session.query(Foo.val).all(),
- [('some new val', )]
- )
-
- def test_non_mutated_state_not_resurrected(self):
- Foo = self.classes.Foo
-
- f1 = Foo(data = pickleable.Bar(4,5))
-
- session = Session()
- session.add(f1)
- session.commit()
-
- session = Session()
- f1 = session.query(Foo).first()
- del f1
- gc_collect()
-
- assert len(session.identity_map) == 0
- f1 = session.query(Foo).first()
- assert not attributes.instance_state(f1).modified
-
- def test_scalar_no_net_change_no_update(self):
- """Test that a no-net-change on a scalar attribute event
- doesn't cause an UPDATE for a mutable state.
-
- """
-
- Foo = self.classes.Foo
-
-
- f1 = Foo(val=u'hi')
-
- session = Session()
- session.add(f1)
- session.commit()
- session.close()
-
- f1 = session.query(Foo).first()
- f1.val = u'hi'
- self.sql_count_(0, session.commit)
-
- def test_expire_attribute_set(self):
- """test no SELECT emitted when assigning to an expired
- mutable attribute.
-
- """
-
- Foo = self.classes.Foo
-
-
- f1 = Foo(data = pickleable.Bar(4, 5), val=u'some val')
- session = Session()
- session.add(f1)
- session.commit()
-
- assert 'data' not in f1.__dict__
- def go():
- f1.data = pickleable.Bar(10, 15)
- self.sql_count_(0, go)
- session.commit()
-
- eq_(f1.data.x, 10)
-
- def test_expire_mutate(self):
- """test mutations are detected on an expired mutable
- attribute."""
-
- Foo = self.classes.Foo
-
-
- f1 = Foo(data = pickleable.Bar(4, 5), val=u'some val')
- session = Session()
- session.add(f1)
- session.commit()
-
- assert 'data' not in f1.__dict__
- def go():
- f1.data.x = 10
- self.sql_count_(1, go)
- session.commit()
-
- eq_(f1.data.x, 10)
-
- def test_deferred_attribute_set(self):
- """test no SELECT emitted when assigning to a deferred
- mutable attribute.
-
- """
-
- mutable_t, Foo = self.tables.mutable_t, self.classes.Foo
-
- sa.orm.clear_mappers()
- mapper(Foo, mutable_t, properties={
- 'data':sa.orm.deferred(mutable_t.c.data)
- })
-
- f1 = Foo(data = pickleable.Bar(4, 5), val=u'some val')
- session = Session()
- session.add(f1)
- session.commit()
-
- session.close()
-
- f1 = session.query(Foo).first()
- def go():
- f1.data = pickleable.Bar(10, 15)
- self.sql_count_(0, go)
- session.commit()
-
- eq_(f1.data.x, 10)
-
- def test_deferred_mutate(self):
- """test mutations are detected on a deferred mutable
- attribute."""
-
- mutable_t, Foo = self.tables.mutable_t, self.classes.Foo
-
-
- sa.orm.clear_mappers()
- mapper(Foo, mutable_t, properties={
- 'data':sa.orm.deferred(mutable_t.c.data)
- })
-
- f1 = Foo(data = pickleable.Bar(4, 5), val=u'some val')
- session = Session()
- session.add(f1)
- session.commit()
-
- session.close()
-
- f1 = session.query(Foo).first()
- def go():
- f1.data.x = 10
- self.sql_count_(1, go)
- session.commit()
-
- def go():
- eq_(f1.data.x, 10)
- self.sql_count_(1, go)
-
-
-class PickledDictsTest(fixtures.MappedTest):
-
- @classmethod
- def define_tables(cls, metadata):
- Table('mutable_t', metadata,
- Column('id', Integer, primary_key=True,
- test_needs_autoincrement=True),
- Column('data',
- sa.PickleType(comparator=operator.eq, mutable=True)))
-
- @classmethod
- def setup_classes(cls):
- class Foo(cls.Basic):
- pass
-
- @classmethod
- def setup_mappers(cls):
- mutable_t, Foo = cls.tables.mutable_t, cls.classes.Foo
-
- mapper(Foo, mutable_t)
-
- def test_dicts(self):
- """Dictionaries may not pickle the same way twice."""
-
- Foo = self.classes.Foo
-
-
- f1 = Foo()
- f1.data = [ {
- 'personne': {'nom': u'Smith',
- 'pers_id': 1,
- 'prenom': u'john',
- 'civilite': u'Mr',
- 'int_3': False,
- 'int_2': False,
- 'int_1': u'23',
- 'VenSoir': True,
- 'str_1': u'Test',
- 'SamMidi': False,
- 'str_2': u'chien',
- 'DimMidi': False,
- 'SamSoir': True,
- 'SamAcc': False} } ]
-
- session = create_session(autocommit=False)
- session.add(f1)
- session.commit()
-
- self.sql_count_(0, session.commit)
-
- f1.data = [ {
- 'personne': {'nom': u'Smith',
- 'pers_id': 1,
- 'prenom': u'john',
- 'civilite': u'Mr',
- 'int_3': False,
- 'int_2': False,
- 'int_1': u'23',
- 'VenSoir': True,
- 'str_1': u'Test',
- 'SamMidi': False,
- 'str_2': u'chien',
- 'DimMidi': False,
- 'SamSoir': True,
- 'SamAcc': False} } ]
-
- self.sql_count_(0, session.commit)
-
- f1.data[0]['personne']['VenSoir']= False
- self.sql_count_(1, session.commit)
-
- session.expunge_all()
- f = session.query(Foo).get(f1.id)
- eq_(f.data,
- [ {
- 'personne': {'nom': u'Smith',
- 'pers_id': 1,
- 'prenom': u'john',
- 'civilite': u'Mr',
- 'int_3': False,
- 'int_2': False,
- 'int_1': u'23',
- 'VenSoir': False,
- 'str_1': u'Test',
- 'SamMidi': False,
- 'str_2': u'chien',
- 'DimMidi': False,
- 'SamSoir': True,
- 'SamAcc': False} } ])