]> git.ipfire.org Git - thirdparty/sqlalchemy/sqlalchemy.git/commitdiff
Add bulk_replace to AssociationSet, AssociationDict
authorMike Bayer <mike_mp@zzzcomputing.com>
Sun, 3 Feb 2019 18:29:06 +0000 (13:29 -0500)
committerMike Bayer <mike_mp@zzzcomputing.com>
Mon, 4 Feb 2019 15:34:25 +0000 (10:34 -0500)
Implemented a more comprehensive assignment operation (e.g. "bulk replace")
when using association proxy with sets or dictionaries.  Fixes the problem
of redundant proxy objects being created to replace the old ones, which
leads to excessive events and SQL and in the case of unique constraints
will cause the flush to fail.

Fixes: #2642
Change-Id: I57ab27dd9feba057e539267722cce92254fca777

doc/build/changelog/migration_13.rst
doc/build/changelog/unreleased_13/2642.rst [new file with mode: 0644]
lib/sqlalchemy/ext/associationproxy.py
test/ext/test_associationproxy.py

index 81d82850516f1bbb6c30140d4628f4668919a07f..01fbccf22ccd80c48bfcd1ae30a26c2a9ab24418 100644 (file)
@@ -683,6 +683,54 @@ Note that this change may be revised if it leads to problems.
 
 :ticket:`4268`
 
+.. _change_2642:
+
+Implemented bulk replace for sets, dicts with AssociationProxy
+^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
+
+Assignment of a set or dictionary to an association proxy collection should
+now work correctly, whereas before it would re-create association
+proxy members for existing keys, leading to the issue of potential flush
+failures due to the delete+insert of the same object it now should only create
+new association objects where appropriate::
+
+    class A(Base):
+        __tablename__ = "test_a"
+
+        id = Column(Integer, primary_key=True)
+        b_rel = relationship(
+            "B", collection_class=set, cascade="all, delete-orphan",
+        )
+        b = association_proxy("b_rel", "value", creator=lambda x: B(value=x))
+
+
+    class B(Base):
+        __tablename__ = "test_b"
+        __table_args__ = (UniqueConstraint("a_id", "value"),)
+
+        id = Column(Integer, primary_key=True)
+        a_id = Column(Integer, ForeignKey("test_a.id"), nullable=False)
+        value = Column(String)
+
+    # ...
+
+    s = Session(e)
+    a = A(b={"x", "y", "z"})
+    s.add(a)
+    s.commit()
+
+    # re-assign where one B should be deleted, one B added, two
+    # B's maintained
+    a.b = {"x", "z", "q"}
+
+    # only 'q' was added, so only one new B object.  previously
+    # all three would have been re-created leading to flush conflicts
+    # against the deleted ones.
+    assert len(s.new) == 1
+
+
+:ticket:`2642`
+
 .. _change_1103:
 
 Many-to-one backref checks for collection duplicates during remove operation
diff --git a/doc/build/changelog/unreleased_13/2642.rst b/doc/build/changelog/unreleased_13/2642.rst
new file mode 100644 (file)
index 0000000..649b109
--- /dev/null
@@ -0,0 +1,13 @@
+.. change::
+   :tags: bug, ext
+   :tickets: 2642
+
+   Implemented a more comprehensive assignment operation (e.g. "bulk replace")
+   when using association proxy with sets or dictionaries.  Fixes the problem
+   of redundant proxy objects being created to replace the old ones, which
+   leads to excessive events and SQL and in the case of unique constraints
+   will cause the flush to fail.
+
+   .. seealso::
+
+      :ref:`change_2642`
index 12e6c1f52cf36e1b3d535a44f8d7141ad6609a02..873145b4e5d7cbb1953a3ff6ac144e39ec36f35f 100644 (file)
@@ -560,8 +560,7 @@ class AssociationProxyInstance(object):
             proxy = self.get(obj)
             assert self.collection_class is not None
             if proxy is not values:
-                proxy.clear()
-                self._set(proxy, values)
+                proxy._bulk_replace(self, values)
 
     def delete(self, obj):
         if self.owning_class is None:
@@ -959,6 +958,10 @@ class _AssociationCollection(object):
         self.lazy_collection = state["lazy_collection"]
         self.parent._inflate(self)
 
+    def _bulk_replace(self, assoc_proxy, values):
+        self.clear()
+        assoc_proxy._set(self, values)
+
 
 class _AssociationList(_AssociationCollection):
     """Generic, converting, list-to-list proxy."""
@@ -1310,6 +1313,21 @@ class _AssociationDict(_AssociationCollection):
         for key, value in kw:
             self[key] = value
 
+    def _bulk_replace(self, assoc_proxy, values):
+        existing = set(self)
+        constants = existing.intersection(values or ())
+        additions = set(values or ()).difference(constants)
+        removals = existing.difference(constants)
+
+        for key, member in values.items() or ():
+            if key in additions:
+                self[key] = member
+            elif key in constants:
+                self[key] = member
+
+        for key in removals:
+            del self[key]
+
     def copy(self):
         return dict(self.items())
 
@@ -1394,6 +1412,24 @@ class _AssociationSet(_AssociationCollection):
         for value in other:
             self.add(value)
 
+    def _bulk_replace(self, assoc_proxy, values):
+        existing = set(self)
+        constants = existing.intersection(values or ())
+        additions = set(values or ()).difference(constants)
+        removals = existing.difference(constants)
+
+        appender = self.add
+        remover = self.remove
+
+        for member in values or ():
+            if member in additions:
+                appender(member)
+            elif member in constants:
+                appender(member)
+
+        for member in removals:
+            remover(member)
+
     def __ior__(self, other):
         if not collections._set_binops_check_strict(self, other):
             return NotImplemented
index 1bf84a77c3aa669df7527f8d67157c3a133df0d1..4b07a383d1f7e3f680b4c1211a48a311b4ba9e03 100644 (file)
@@ -566,6 +566,25 @@ class CustomDictTest(_CollectionOperations):
 
         assert_raises(TypeError, set, [p1.children])
 
+    def test_bulk_replace(self):
+        Parent = self.Parent
+
+        p1 = Parent("foo")
+        p1.children = {"a": "v a", "b": "v b", "c": "v c"}
+        assocs = set(p1._children.values())
+        keep_assocs = {a for a in assocs if a.foo in ("a", "c")}
+        eq_(len(keep_assocs), 2)
+        remove_assocs = {a for a in assocs if a.foo == "b"}
+
+        p1.children = {"a": "v a", "d": "v d", "c": "v c"}
+        eq_(
+            {a for a in p1._children.values() if a.foo in ("a", "c")},
+            keep_assocs,
+        )
+        assert not remove_assocs.intersection(p1._children.values())
+
+        eq_(p1.children, {"a": "v a", "d": "v d", "c": "v c"})
+
 
 class SetTest(_CollectionOperations):
     collection_class = set
@@ -819,6 +838,22 @@ class SetTest(_CollectionOperations):
                         print("got", repr(p.children))
                         raise
 
+    def test_bulk_replace(self):
+        Parent = self.Parent
+
+        p1 = Parent("foo")
+        p1.children = {"a", "b", "c"}
+        assocs = set(p1._children)
+        keep_assocs = {a for a in assocs if a.name in ("a", "c")}
+        eq_(len(keep_assocs), 2)
+        remove_assocs = {a for a in assocs if a.name == "b"}
+
+        p1.children = {"a", "c", "d"}
+        eq_({a for a in p1._children if a.name in ("a", "c")}, keep_assocs)
+        assert not remove_assocs.intersection(p1._children)
+
+        eq_(p1.children, {"a", "c", "d"})
+
 
 class CustomSetTest(SetTest):
     collection_class = SetCollection