0.4.1
-----
+
- removed regular expression step from most statement compilations.
also fixes [ticket:833]
dialects using the old name.
- orm:
+ - Mapped classes may now define __eq__, __hash__, and __nonzero__ methods
+ with arbitrary sementics. The orm now handles all mapped instances on
+ an identity-only basis. (e.g. 'is' vs '==') [ticket:676]
+
- deferred column attributes no longer trigger a load operation when the
attribute is assigned to. in those cases, the newly assigned
value will be present in the flushes' UPDATE statement unconditionally.
scoped sessions.
- session API has been solidified:
-
+
- it's an error to session.save() an object which is already persistent
[ticket:840]
- session.update() and session.delete() raise an error when updating/deleting
an instance that is already in the session with a different identity.
-
+
- session checks more carefully when determining "object X already in another session";
e.g. if you pickle a series of objects and unpickle (i.e. as in a Pylons HTTP session
or similar), they can go into a new session without any conflict
def __contains__(self, value):
for member in self.col:
+ # testlib.pragma exempt:__eq__
if self._get(member) == value:
return True
return False
del self.col[key]
def __contains__(self, key):
+ # testlib.pragma exempt:__hash__
return key in self.col
has_key = __contains__
def __contains__(self, value):
for member in self.col:
+ # testlib.pragma exempt:__eq__
if self._get(member) == value:
return True
return False
import UserDict
from sqlalchemy import util
from sqlalchemy.orm import interfaces, collections
-from sqlalchemy.orm.mapper import class_mapper
+from sqlalchemy.orm.mapper import class_mapper, identity_equal
from sqlalchemy import exceptions
super(ScalarObjectAttributeImpl, self).__init__(class_, manager, key,
callable_, trackparent=trackparent, extension=extension,
compare_function=compare_function, mutable_scalars=mutable_scalars, **kwargs)
+ if compare_function is None:
+ self.is_equal = identity_equal
def delete(self, state):
old = self.get(state)
if hasattr(attr, 'get_collection'):
self._current = current
+
if original is NO_VALUE:
- s = util.Set([])
+ s = util.IdentitySet([])
else:
- s = util.Set(original)
- self._added_items = []
- self._unchanged_items = []
- self._deleted_items = []
+ s = util.IdentitySet(original)
+
+ # FIXME: the tests have an assumption on the collection's ordering
+ self._added_items = util.OrderedIdentitySet()
+ self._unchanged_items = util.OrderedIdentitySet()
+ self._deleted_items = util.OrderedIdentitySet()
if current:
collection = attr.get_collection(state, current)
for a in collection:
if a in s:
- self._unchanged_items.append(a)
+ self._unchanged_items.add(a)
else:
- self._added_items.append(a)
+ self._added_items.add(a)
for a in s:
if a not in self._unchanged_items:
- self._deleted_items.append(a)
+ self._deleted_items.add(a)
else:
self._current = [current]
if attr.is_equal(current, original) is True:
return len(self._deleted_items) > 0 or len(self._added_items) > 0
def added_items(self):
- return self._added_items
+ return list(self._added_items)
def unchanged_items(self):
- return self._unchanged_items
+ return list(self._unchanged_items)
def deleted_items(self):
- return self._deleted_items
+ return list(self._deleted_items)
class AttributeManager(object):
"""Allow the instrumentation of object attributes."""
def remove(fn):
def remove(self, value, _sa_initiator=None):
+ # testlib.pragma exempt:__eq__
fn(self, value)
__del(self, value, _sa_initiator)
_tidy(remove)
def add(fn):
def add(self, value, _sa_initiator=None):
__set(self, value, _sa_initiator)
+ # testlib.pragma exempt:__hash__
fn(self, value)
_tidy(add)
return add
def discard(fn):
def discard(self, value, _sa_initiator=None):
+ # testlib.pragma exempt:__hash__
if value in self:
__del(self, value, _sa_initiator)
+ # testlib.pragma exempt:__hash__
fn(self, value)
_tidy(discard)
return discard
def remove(fn):
def remove(self, value, _sa_initiator=None):
+ # testlib.pragma exempt:__hash__
if value in self:
__del(self, value, _sa_initiator)
+ # testlib.pragma exempt:__hash__
fn(self, value)
_tidy(remove)
return remove
childlist = self.get_object_dependencies(obj, uowcommit, passive=self.passive_deletes)
if childlist is not None:
for child in childlist.deleted_items() + childlist.unchanged_items():
- if child is None or (reverse_dep and (reverse_dep, "manytomany", child, obj) in uowcommit.attributes):
+ if child is None or (reverse_dep and (reverse_dep, "manytomany", id(child), id(obj)) in uowcommit.attributes):
continue
associationrow = {}
self._synchronize(obj, child, associationrow, False, uowcommit)
secondary_delete.append(associationrow)
- uowcommit.attributes[(self, "manytomany", obj, child)] = True
+ uowcommit.attributes[(self, "manytomany", id(obj), id(child))] = True
else:
for obj in deplist:
childlist = self.get_object_dependencies(obj, uowcommit)
if childlist is None: continue
for child in childlist.added_items():
- if child is None or (reverse_dep and (reverse_dep, "manytomany", child, obj) in uowcommit.attributes):
+ if child is None or (reverse_dep and (reverse_dep, "manytomany", id(child), id(obj)) in uowcommit.attributes):
continue
associationrow = {}
self._synchronize(obj, child, associationrow, False, uowcommit)
- uowcommit.attributes[(self, "manytomany", obj, child)] = True
+ uowcommit.attributes[(self, "manytomany", id(obj), id(child))] = True
secondary_insert.append(associationrow)
for child in childlist.deleted_items():
- if child is None or (reverse_dep and (reverse_dep, "manytomany", child, obj) in uowcommit.attributes):
+ if child is None or (reverse_dep and (reverse_dep, "manytomany", id(child), id(obj)) in uowcommit.attributes):
continue
associationrow = {}
self._synchronize(obj, child, associationrow, False, uowcommit)
- uowcommit.attributes[(self, "manytomany", obj, child)] = True
+ uowcommit.attributes[(self, "manytomany", id(obj), id(child))] = True
secondary_delete.append(associationrow)
if secondary_delete:
c = connection.execute(statement.values(value_params), params)
mapper._postfetch(connection, table, obj, c, c.last_updated_params(), value_params)
- updated_objects.add((obj, connection))
+ # testlib.pragma exempt:__hash__
+ updated_objects.add((id(obj), obj, connection))
rows += c.rowcount
if c.supports_sane_rowcount() and rows != len(update):
mapper._synchronizer.execute(obj, obj)
sync(mapper)
- inserted_objects.add((obj, connection))
+ # testlib.pragma exempt:__hash__
+ inserted_objects.add((id(obj), obj, connection))
if not postupdate:
- for obj, connection in inserted_objects:
+ for id_, obj, connection in inserted_objects:
for mapper in object_mapper(obj).iterate_to_root():
if 'after_insert' in mapper.extension.methods:
mapper.extension.after_insert(mapper, connection, obj)
- for obj, connection in updated_objects:
+ for id_, obj, connection in updated_objects:
for mapper in object_mapper(obj).iterate_to_root():
if 'after_update' in mapper.extension.methods:
mapper.extension.after_update(mapper, connection, obj)
for mapper in object_mapper(obj).iterate_to_root():
if 'before_delete' in mapper.extension.methods:
mapper.extension.before_delete(mapper, connection, obj)
-
+
deleted_objects = util.Set()
table_to_mapper = {}
for mapper in self.base_mapper.polymorphic_iterator():
params[col.key] = mapper.get_attr_by_column(obj, col)
if mapper.version_id_col is not None:
params[mapper.version_id_col.key] = mapper.get_attr_by_column(obj, mapper.version_id_col)
- deleted_objects.add((obj, connection))
+ # testlib.pragma exempt:__hash__
+ deleted_objects.add((id(obj), obj, connection))
for connection, del_objects in delete.iteritems():
mapper = table_to_mapper[table]
def comparator(a, b):
if c.supports_sane_multi_rowcount() and c.rowcount != len(del_objects):
raise exceptions.ConcurrentModificationError("Deleted rowcount %d does not match number of objects deleted %d" % (c.rowcount, len(del_objects)))
- for obj, connection in deleted_objects:
+ for id_, obj, connection in deleted_objects:
for mapper in object_mapper(obj).iterate_to_root():
if 'after_delete' in mapper.extension.methods:
mapper.extension.after_delete(mapper, connection, obj)
"""
if recursive is None:
- recursive=util.Set()
+ recursive=util.IdentitySet()
for prop in self.__props.values():
for c in prop.cascade_iterator(type, object, recursive, halt_on=halt_on):
yield c
"""
if recursive is None:
- recursive=util.Set()
+ recursive=util.IdentitySet()
for prop in self.__props.values():
prop.cascade_callable(type, object, callable_, recursive, halt_on=halt_on)
selectcontext.exec_with_path(self, key, populator, instance, row, ispostselect=ispostselect, isnew=isnew, **flags)
if self.non_primary:
- selectcontext.attributes[('populating_mapper', instance)] = self
+ selectcontext.attributes[('populating_mapper', id(instance))] = self
def _post_instance(self, selectcontext, instance):
post_processors = selectcontext.attributes[('post_processors', self, None)]
return hasattr(object, '_entity_name')
+def identity_equal(a, b):
+ if a is b:
+ return True
+ id_a = getattr(a, '_instance_key', None)
+ id_b = getattr(b, '_instance_key', None)
+ if id_a is None or id_b is None:
+ return False
+ return id_a == id_b
+
def object_mapper(object, entity_name=None, raiseerror=True):
"""Given an object, return the primary Mapper associated with the object instance.
proc[0](context, row)
for instance in context.identity_map.values():
- context.attributes.get(('populating_mapper', instance), object_mapper(instance))._post_instance(context, instance)
+ context.attributes.get(('populating_mapper', id(instance)), object_mapper(instance))._post_instance(context, instance)
# store new stuff in the identity map
for instance in context.identity_map.values():
appender = util.UniqueAppender(collection, 'append_without_event')
# store it in the "scratch" area, which is local to this load operation.
- selectcontext.attributes[(instance, self.key)] = appender
- result_list = selectcontext.attributes[(instance, self.key)]
+ selectcontext.attributes[('appender', id(instance), self.key)] = appender
+ result_list = selectcontext.attributes[('appender', id(instance), self.key)]
if self._should_log_debug:
self.logger.debug("eagerload list instance on %s" % mapperutil.attribute_str(instance, self.key))
else:
self.identity_map = {}
- self.new = util.Set() #OrderedSet()
- self.deleted = util.Set()
+ self.new = util.IdentitySet() #OrderedSet()
+ self.deleted = util.IdentitySet()
self.logger = logging.instance_logger(self, echoflag=session.echo_uow)
def _remove_deleted(self, obj):
"""
# a little bit of inlining for speed
- return util.Set([x for x in self.identity_map.values()
+ return util.IdentitySet([x for x in self.identity_map.values()
if x not in self.deleted
and (
x._state.modified
# create the set of all objects we want to operate upon
if objects is not None:
# specific list passed in
- objset = util.Set(objects)
+ objset = util.IdentitySet(objects)
else:
# or just everything
- objset = util.Set(self.identity_map.values()).union(self.new)
+ objset = util.IdentitySet(self.identity_map.values()).union(self.new)
# store objects whose fate has been decided
- processed = util.Set()
+ processed = util.IdentitySet()
# put all saves/updates into the flush context. detect top-level orphans and throw them into deleted.
for obj in self.new.union(dirty).intersection(objset).difference(self.deleted):
"""
mapper = object_mapper(obj)
task = self.get_task_by_mapper(mapper)
- taskelement = task._objects[obj]
+ taskelement = task._objects[id(obj)]
taskelement.isdelete = "rowswitch"
def unregister_object(self, obj):
no further operations occur upon the instance."""
mapper = object_mapper(obj)
task = self.get_task_by_mapper(mapper)
- if obj in task._objects:
+ if id(obj) in task._objects:
task.delete(obj)
def is_deleted(self, obj):
"""
try:
- rec = self._objects[obj]
+ rec = self._objects[id(obj)]
retval = False
except KeyError:
rec = UOWTaskElement(obj)
- self._objects[obj] = rec
+ self._objects[id(obj)] = rec
retval = True
if not listonly:
rec.listonly = False
"""remove the given object from this UOWTask, if present."""
try:
- del self._objects[obj]
+ del self._objects[id(obj)]
except KeyError:
pass
"""return True if the given object is contained within this UOWTask or inheriting tasks."""
for task in self.polymorphic_tasks():
- if obj in task._objects:
+ if id(obj) in task._objects:
return True
else:
return False
"""return True if the given object is marked as to be deleted within this UOWTask."""
try:
- return self._objects[obj].isdelete
+ return self._objects[id(obj)].isdelete
except KeyError:
return False
def get_dependency_task(obj, depprocessor):
try:
- dp = dependencies[obj]
+ dp = dependencies[id(obj)]
except KeyError:
- dp = dependencies.setdefault(obj, {})
+ dp = dependencies.setdefault(id(obj), {})
try:
l = dp[depprocessor]
except KeyError:
for subtask in task.polymorphic_tasks():
for taskelement in subtask.elements:
obj = taskelement.obj
- object_to_original_task[obj] = subtask
+ object_to_original_task[id(obj)] = subtask
for dep in deps_by_targettask.get(subtask, []):
# is this dependency involved in one of the cycles ?
if not dependency_in_cycles(dep):
# task
if o not in childtask:
childtask.append(o, listonly=True)
- object_to_original_task[o] = childtask
+ object_to_original_task[id(o)] = childtask
# create a tuple representing the "parent/child"
whosdep = dep.whose_dependent_on_who(obj, o)
used_tasks = util.Set()
def make_task_tree(node, parenttask, nexttasks):
- originating_task = object_to_original_task[node.item]
+ originating_task = object_to_original_task[id(node.item)]
used_tasks.add(originating_task)
t = nexttasks.get(originating_task, None)
if t is None:
t = UOWTask(self.uowtransaction, originating_task.mapper)
nexttasks[originating_task] = t
- parenttask.append(None, listonly=False, isdelete=originating_task._objects[node.item].isdelete, childtask=t)
- t.append(node.item, originating_task._objects[node.item].listonly, isdelete=originating_task._objects[node.item].isdelete)
+ parenttask.append(None, listonly=False, isdelete=originating_task._objects[id(node.item)].isdelete, childtask=t)
+ t.append(node.item, originating_task._objects[id(node.item)].listonly, isdelete=originating_task._objects[id(node.item)].isdelete)
- if node.item in dependencies:
- for depprocessor, deptask in dependencies[node.item].iteritems():
+ if id(node.item) in dependencies:
+ for depprocessor, deptask in dependencies[id(node.item)].iteritems():
t.cyclical_dependencies.add(depprocessor.branch(deptask))
nd = {}
for n in node.children:
# or "delete" members due to inheriting mappers which contain tasks
localtask = UOWTask(self.uowtransaction, t2.mapper)
for obj in t2.elements:
- localtask.append(obj, t2.listonly, isdelete=t2._objects[obj].isdelete)
+ localtask.append(obj, t2.listonly, isdelete=t2._objects[id(obj)].isdelete)
for dep in t2.dependencies:
localtask._dependencies.add(dep)
t.childtasks.insert(0, localtask)
nodes = {}
edges = _EdgeCollection()
for item in allitems + [t[0] for t in tuples] + [t[1] for t in tuples]:
- if item not in nodes:
+ if id(item) not in nodes:
node = _Node(item)
- nodes[item] = node
+ nodes[id(item)] = node
for t in tuples:
if t[0] is t[1]:
if allow_self_cycles:
- n = nodes[t[0]]
+ n = nodes[id(t[0])]
n.cycles = util.Set([n])
continue
else:
raise CircularDependencyError("Self-referential dependency detected " + repr(t))
- childnode = nodes[t[1]]
- parentnode = nodes[t[0]]
+ childnode = nodes[id(t[1])]
+ parentnode = nodes[id(t[0])]
edges.add((parentnode, childnode))
queue = []
node = queue.pop()
if not hasattr(node, '_cyclical'):
output.append(node)
- del nodes[node.item]
+ del nodes[id(node.item)]
for childnode in edges.pop_node(node):
queue.append(childnode)
return self._create_batched_tree(output)
def union(self, iterable):
result = type(self)()
+ # testlib.pragma exempt:__hash__
result._members.update(
Set(self._members.iteritems()).union(_iter_id(iterable)))
return result
def difference(self, iterable):
result = type(self)()
+ # testlib.pragma exempt:__hash__
result._members.update(
Set(self._members.iteritems()).difference(_iter_id(iterable)))
return result
def intersection(self, iterable):
result = type(self)()
+ # testlib.pragma exempt:__hash__
result._members.update(
Set(self._members.iteritems()).intersection(_iter_id(iterable)))
return result
def symmetric_difference(self, iterable):
result = type(self)()
+ # testlib.pragma exempt:__hash__
result._members.update(
Set(self._members.iteritems()).symmetric_difference(_iter_id(iterable)))
return result
yield id(item), item
+class OrderedIdentitySet(IdentitySet):
+ def __init__(self, iterable=None):
+ IdentitySet.__init__(self)
+ self._members = OrderedDict()
+ if iterable:
+ for o in iterable:
+ self.add(o)
+
+
class UniqueAppender(object):
- """appends items to a collection such that only unique items
- are added."""
+ """Only adds items to a collection once.
+
+ Additional appends() of the same object are ignored. Membership is
+ determined by identity (``is a``) not equality (``==``).
+ """
def __init__(self, data, via=None):
self.data = data
- self._unique = Set()
+ self._unique = IdentitySet()
if via:
self._data_appender = getattr(data, via)
elif hasattr(data, 'append'):
'after_delete', 'after_insert', 'before_update', 'before_insert', 'after_update', 'populate_instance'])
-if __name__ == "__main__":
+class RequirementsTest(AssertMixin):
+ """Tests the contract for user classes."""
+
+ def setUpAll(self):
+ global metadata, t1, t2, t3, t4, t5, t6
+
+ metadata = MetaData(testbase.db)
+ t1 = Table('ht1', metadata,
+ Column('id', Integer, primary_key=True),
+ Column('value', String(10)))
+ t2 = Table('ht2', metadata,
+ Column('id', Integer, primary_key=True),
+ Column('ht1_id', Integer, ForeignKey('ht1.id')),
+ Column('value', String(10)))
+ t3 = Table('ht3', metadata,
+ Column('id', Integer, primary_key=True),
+ Column('value', String(10)))
+ t4 = Table('ht4', metadata,
+ Column('ht1_id', Integer, ForeignKey('ht1.id'),
+ primary_key=True),
+ Column('ht3_id', Integer, ForeignKey('ht3.id'),
+ primary_key=True))
+ t5 = Table('ht5', metadata,
+ Column('ht1_id', Integer, ForeignKey('ht1.id'),
+ primary_key=True),
+ Column('ht1_id', Integer, ForeignKey('ht1.id'),
+ primary_key=True))
+ t6 = Table('ht6', metadata,
+ Column('ht1a_id', Integer, ForeignKey('ht1.id'),
+ primary_key=True),
+ Column('ht1b_id', Integer, ForeignKey('ht1.id'),
+ primary_key=True),
+ Column('value', String(10)))
+ metadata.create_all()
+
+ def setUp(self):
+ clear_mappers()
+
+ def tearDownAll(self):
+ metadata.drop_all()
+
+ def test_baseclass(self):
+ class OldStyle:
+ pass
+
+ self.assertRaises(exceptions.ArgumentError, mapper, OldStyle, t1)
+
+ class NoWeakrefSupport(str):
+ pass
+
+ # TODO: is weakref support detectable without an instance?
+ #self.assertRaises(exceptions.ArgumentError, mapper, NoWeakrefSupport, t2)
+
+ def test_comparison_overrides(self):
+ """Simple tests to ensure users can supply comparison __methods__.
+
+ The suite-level test --options are better suited to detect
+ problems- they add selected __methods__ across the board on all
+ ORM tests. This test simply shoves a variety of operations
+ through the ORM to catch basic regressions early in a standard
+ test run.
+ """
+
+ # adding these methods directly to each class to avoid decoration
+ # by the testlib decorators.
+ class H1(object):
+ def __init__(self, value='abc'):
+ self.value = value
+ def __nonzero__(self):
+ return False
+ def __hash__(self):
+ return hash(self.value)
+ def __eq__(self, other):
+ if isinstance(other, type(self)):
+ return self.value == other.value
+ return False
+ class H2(object):
+ def __init__(self, value='abc'):
+ self.value = value
+ def __nonzero__(self):
+ return False
+ def __hash__(self):
+ return hash(self.value)
+ def __eq__(self, other):
+ if isinstance(other, type(self)):
+ return self.value == other.value
+ return False
+ class H3(object):
+ def __init__(self, value='abc'):
+ self.value = value
+ def __nonzero__(self):
+ return False
+ def __hash__(self):
+ return hash(self.value)
+ def __eq__(self, other):
+ if isinstance(other, type(self)):
+ return self.value == other.value
+ return False
+ class H6(object):
+ def __init__(self, value='abc'):
+ self.value = value
+ def __nonzero__(self):
+ return False
+ def __hash__(self):
+ return hash(self.value)
+ def __eq__(self, other):
+ if isinstance(other, type(self)):
+ return self.value == other.value
+ return False
+
+ mapper(H1, t1, properties={
+ 'h2s': relation(H2, backref='h1'),
+ 'h3s': relation(H3, secondary=t4, backref='h1s'),
+ 'h1s': relation(H1, secondary=t5, backref='parent_h1'),
+ 't6a': relation(H6, backref='h1a',
+ primaryjoin=t1.c.id==t6.c.ht1a_id),
+ 't6b': relation(H6, backref='h1b',
+ primaryjoin=t1.c.id==t6.c.ht1b_id),
+ })
+ mapper(H2, t2)
+ mapper(H3, t3)
+ mapper(H6, t6)
+
+ s = create_session()
+ for i in range(3):
+ h1 = H1()
+ s.save(h1)
+
+ h1.h2s.append(H2())
+ h1.h3s.extend([H3(), H3()])
+ h1.h1s.append(H1())
+
+ s.flush()
+
+ h6 = H6()
+ h6.h1a = h1
+ h6.h1b = h1
+
+ h6 = H6()
+ h6.h1a = h1
+ h6.h1b = H1()
+
+ h6.h1b.h2s.append(H2())
+
+ s.flush()
+
+ h1.h2s.extend([H2(), H2()])
+ s.flush()
+
+ h1s = s.query(H1).options(eagerload('h2s')).all()
+ self.assertEqual(len(h1s), 5)
+
+ self.assert_unordered_result(h1s, H1,
+ {'h2s': []},
+ {'h2s': []},
+ {'h2s': (H2, [{'value': 'abc'},
+ {'value': 'abc'},
+ {'value': 'abc'}])},
+ {'h2s': []},
+ {'h2s': (H2, [{'value': 'abc'}])})
+
+ h1s = s.query(H1).options(eagerload('h3s')).all()
+
+ self.assertEqual(len(h1s), 5)
+ h1s = s.query(H1).options(eagerload_all('t6a.h1b'),
+ eagerload('h2s'),
+ eagerload_all('h3s.h1s')).all()
+ self.assertEqual(len(h1s), 5)
+
+
+if __name__ == "__main__":
testbase.main()
sess.flush()
assert list(sess.execute(t1.select(), mapper=T1)) == [(1, 'some t1')]
- assert list(sess.execute(t1t3.select(), mapper=T1)) == [(1,1), (1, 2)]
+ assert rowset(sess.execute(t1t3.select(), mapper=T1)) == set([(1,1), (1, 2)])
assert list(sess.execute(t3.select(), mapper=T1)) == [(1, 'some t3'), (2, 'some other t3')]
o2 = T1(data='some other t1', id=1, t3s=[