From: Mike Bayer Date: Wed, 3 Sep 2014 23:43:00 +0000 (-0400) Subject: Merge branch 'master' into ticket_3100 X-Git-Tag: rel_1_0_0b1~200 X-Git-Url: http://git.ipfire.org/cgi-bin/gitweb.cgi?a=commitdiff_plain;h=84d4651f3dd86c29a7d9cd00d97227d440219ac5;p=thirdparty%2Fsqlalchemy%2Fsqlalchemy.git Merge branch 'master' into ticket_3100 --- 84d4651f3dd86c29a7d9cd00d97227d440219ac5 diff --cc examples/performance/large_resultsets.py index 268c6dc871,0000000000..7383db7341 mode 100644,000000..100644 --- a/examples/performance/large_resultsets.py +++ b/examples/performance/large_resultsets.py @@@ -1,155 -1,0 +1,153 @@@ +"""In this series of tests, we are looking at time to load 1M very small +and simple rows. + +""" +from . import Profiler + +from sqlalchemy.ext.declarative import declarative_base +from sqlalchemy import Column, Integer, String, create_engine, literal_column +from sqlalchemy.orm import Session, Bundle + +Base = declarative_base() +engine = None + + +class Customer(Base): + __tablename__ = "customer" + id = Column(Integer, primary_key=True) + name = Column(String(255)) + description = Column(String(255)) + + +def setup_database(dburl, echo, num): + global engine + engine = create_engine(dburl, echo=echo) + Base.metadata.drop_all(engine) + Base.metadata.create_all(engine) + + s = Session(engine) + for chunk in range(0, num, 10000): + s.bulk_insert_mappings(Customer, [ + { + 'name': 'customer name %d' % i, + 'description': 'customer description %d' % i + } for i in range(chunk, chunk + 10000) + ]) + s.commit() + + +@Profiler.profile +def test_orm_full_objects(n): + """Load fully tracked objects using the ORM.""" + + sess = Session(engine) + # avoid using all() so that we don't have the overhead of building + # a large list of full objects in memory + for obj in sess.query(Customer).yield_per(1000).limit(n): + pass + + +@Profiler.profile +def test_orm_bundles(n): + """Load lightweight "bundle" objects using the ORM.""" + + sess = Session(engine) + bundle = Bundle('customer', + Customer.id, Customer.name, Customer.description) + for row in sess.query(bundle).yield_per(10000).limit(n): + pass + + +@Profiler.profile +def test_orm_columns(n): + """Load individual columns into named tuples using the ORM.""" + + sess = Session(engine) + for row in sess.query( + Customer.id, Customer.name, + Customer.description).yield_per(10000).limit(n): + pass + + +@Profiler.profile +def test_core_fetchall(n): + """Load Core result rows using Core / fetchall.""" + + with engine.connect() as conn: + result = conn.execute(Customer.__table__.select().limit(n)).fetchall() + for row in result: + data = row['id'], row['name'], row['description'] + + +@Profiler.profile +def test_core_fetchchunks_w_streaming(n): + """Load Core result rows using Core with fetchmany and + streaming results.""" + + with engine.connect() as conn: + result = conn.execution_options(stream_results=True).\ + execute(Customer.__table__.select().limit(n)) + while True: + chunk = result.fetchmany(10000) + if not chunk: + break + for row in chunk: + data = row['id'], row['name'], row['description'] + + +@Profiler.profile +def test_core_fetchchunks(n): + """Load Core result rows using Core / fetchmany.""" + + with engine.connect() as conn: + result = conn.execute(Customer.__table__.select().limit(n)) + while True: + chunk = result.fetchmany(10000) + if not chunk: + break + for row in chunk: + data = row['id'], row['name'], row['description'] + + +@Profiler.profile +def test_dbapi_fetchall(n): + """Load DBAPI cursor rows using fetchall()""" + + _test_dbapi_raw(n, True) + + +@Profiler.profile +def test_dbapi_fetchchunks(n): + """Load DBAPI cursor rows using fetchmany() + (usually doesn't limit memory)""" + + _test_dbapi_raw(n, False) + + +def _test_dbapi_raw(n, fetchall): + compiled = Customer.__table__.select().limit(n).\ + compile( + dialect=engine.dialect, + compile_kwargs={"literal_binds": True}) + + sql = str(compiled) + - import pdb - pdb.set_trace() + conn = engine.raw_connection() + cursor = conn.cursor() + cursor.execute(sql) + + if fetchall: + for row in cursor.fetchall(): + # ensure that we fully fetch! + data = row[0], row[1], row[2] + else: + while True: + chunk = cursor.fetchmany(10000) + if not chunk: + break + for row in chunk: + data = row[0], row[1], row[2] + conn.close() + +if __name__ == '__main__': + Profiler.main(setup_once=setup_database, num=1000000)