]> git.ipfire.org Git - thirdparty/sqlalchemy/sqlalchemy.git/commitdiff
bulk_updates
authorMike Bayer <mike_mp@zzzcomputing.com>
Thu, 6 Nov 2014 22:29:22 +0000 (17:29 -0500)
committerMike Bayer <mike_mp@zzzcomputing.com>
Thu, 6 Nov 2014 22:31:14 +0000 (17:31 -0500)
examples/performance/bulk_updates.py [new file with mode: 0644]

diff --git a/examples/performance/bulk_updates.py b/examples/performance/bulk_updates.py
new file mode 100644 (file)
index 0000000..9522e4b
--- /dev/null
@@ -0,0 +1,54 @@
+"""This series of tests illustrates different ways to UPDATE a large number
+of rows in bulk.
+
+
+"""
+from . import Profiler
+
+from sqlalchemy.ext.declarative import declarative_base
+from sqlalchemy import Column, Integer, String, create_engine, bindparam
+from sqlalchemy.orm import Session
+
+Base = declarative_base()
+engine = None
+
+
+class Customer(Base):
+    __tablename__ = "customer"
+    id = Column(Integer, primary_key=True)
+    name = Column(String(255))
+    description = Column(String(255))
+
+
+Profiler.init("bulk_updates", num=100000)
+
+
+@Profiler.setup
+def setup_database(dburl, echo, num):
+    global engine
+    engine = create_engine(dburl, echo=echo)
+    Base.metadata.drop_all(engine)
+    Base.metadata.create_all(engine)
+
+    s = Session(engine)
+    for chunk in range(0, num, 10000):
+        s.bulk_insert_mappings(Customer, [
+            {
+                'name': 'customer name %d' % i,
+                'description': 'customer description %d' % i
+            } for i in range(chunk, chunk + 10000)
+        ])
+    s.commit()
+
+
+@Profiler.profile
+def test_orm_flush(n):
+    """UPDATE statements via the ORM flush process."""
+    session = Session(bind=engine)
+    for chunk in range(0, n, 1000):
+        customers = session.query(Customer).\
+            filter(Customer.id.between(chunk, chunk + 1000)).all()
+        for customer in customers:
+            customer.description += "updated"
+        session.flush()
+    session.commit()