From 0c19d765dce89970c0395f57f15eb5b0f09c2a29 Mon Sep 17 00:00:00 2001 From: Mike Bayer Date: Thu, 6 Nov 2014 17:29:22 -0500 Subject: [PATCH] bulk_updates --- examples/performance/bulk_updates.py | 54 ++++++++++++++++++++++++++++ 1 file changed, 54 insertions(+) create mode 100644 examples/performance/bulk_updates.py diff --git a/examples/performance/bulk_updates.py b/examples/performance/bulk_updates.py new file mode 100644 index 0000000000..9522e4bf5a --- /dev/null +++ b/examples/performance/bulk_updates.py @@ -0,0 +1,54 @@ +"""This series of tests illustrates different ways to UPDATE a large number +of rows in bulk. + + +""" +from . import Profiler + +from sqlalchemy.ext.declarative import declarative_base +from sqlalchemy import Column, Integer, String, create_engine, bindparam +from sqlalchemy.orm import Session + +Base = declarative_base() +engine = None + + +class Customer(Base): + __tablename__ = "customer" + id = Column(Integer, primary_key=True) + name = Column(String(255)) + description = Column(String(255)) + + +Profiler.init("bulk_updates", num=100000) + + +@Profiler.setup +def setup_database(dburl, echo, num): + global engine + engine = create_engine(dburl, echo=echo) + Base.metadata.drop_all(engine) + Base.metadata.create_all(engine) + + s = Session(engine) + for chunk in range(0, num, 10000): + s.bulk_insert_mappings(Customer, [ + { + 'name': 'customer name %d' % i, + 'description': 'customer description %d' % i + } for i in range(chunk, chunk + 10000) + ]) + s.commit() + + +@Profiler.profile +def test_orm_flush(n): + """UPDATE statements via the ORM flush process.""" + session = Session(bind=engine) + for chunk in range(0, n, 1000): + customers = session.query(Customer).\ + filter(Customer.id.between(chunk, chunk + 1000)).all() + for customer in customers: + customer.description += "updated" + session.flush() + session.commit() -- 2.47.3