]> git.ipfire.org Git - thirdparty/sqlalchemy/sqlalchemy.git/commitdiff
- Added a new extension suite :mod:`sqlalchemy.ext.baked`. This
authorMike Bayer <mike_mp@zzzcomputing.com>
Thu, 12 Mar 2015 00:22:42 +0000 (20:22 -0400)
committerMike Bayer <mike_mp@zzzcomputing.com>
Thu, 12 Mar 2015 00:31:11 +0000 (20:31 -0400)
simple but unusual system allows for a dramatic savings in Python
overhead for the construction and processing of orm :class:`.Query`
objects, from query construction up through rendering of a string
SQL statement.
fixes #3054

18 files changed:
doc/build/changelog/changelog_10.rst
doc/build/changelog/migration_10.rst
doc/build/index.rst
doc/build/orm/extensions/baked.rst [new file with mode: 0644]
doc/build/orm/extensions/index.rst
examples/performance/__init__.py
examples/performance/short_selects.py
lib/sqlalchemy/ext/__init__.py
lib/sqlalchemy/ext/baked.py [new file with mode: 0644]
lib/sqlalchemy/orm/base.py
lib/sqlalchemy/orm/interfaces.py
lib/sqlalchemy/orm/properties.py
lib/sqlalchemy/orm/query.py
lib/sqlalchemy/orm/relationships.py
lib/sqlalchemy/orm/strategies.py
test/ext/test_baked.py [new file with mode: 0644]
test/orm/_fixtures.py
test/perf/orm2010.py

index ca9aa1b7e7af7246ec21ee58270365db0064ba73..4256aa20b0d7177ae0b1415f161f3b322f98cea8 100644 (file)
     series as well.  For changes that are specific to 1.0 with an emphasis
     on compatibility concerns, see :doc:`/changelog/migration_10`.
 
+    .. change::
+        :tags: feature, extensions
+        :tickets: 3054
+
+        Added a new extension suite :mod:`sqlalchemy.ext.baked`.  This
+        simple but unusual system allows for a dramatic savings in Python
+        overhead for the construction and processing of orm :class:`.Query`
+        objects, from query construction up through rendering of a string
+        SQL statement.
+
+        .. seealso::
+
+            :ref:`baked_toplevel`
+
     .. change::
         :tags: bug, postgresql
         :tickets: 3319
index e4f1e0e253d18ba3e9f5ef8a4e83b8df9fc00069..23d230e9440ccc9d35a60c518154c19bf676347a 100644 (file)
@@ -56,6 +56,41 @@ displays.
 
     :ref:`examples_performance`
 
+"Baked" Queries
+---------------
+
+The "baked" query feature is an unusual new approach which allows for
+straightforward construction an invocation of :class:`.Query` objects
+using caching, which upon successive calls features vastly reduced
+Python function call overhead (over 75%).    By  specifying a
+:class:`.Query` object as a series of lambdas which are only invoked
+once, a query as a pre-compiled unit begins to be feasable::
+
+    from sqlalchemy.ext import baked
+    from sqlalchemy import bindparam
+
+    bakery = baked.bakery()
+
+    def search_for_user(session, username, email=None):
+
+        baked_query = bakery(lambda session: session.query(User))
+        baked_query += lambda q: q.filter(User.name == bindparam('username'))
+
+        baked_query += lambda q: q.order_by(User.id)
+
+        if email:
+            baked_query += lambda q: q.filter(User.email == bindparam('email'))
+
+        result = baked_query(session).params(username=username, email=email).all()
+
+        return result
+
+.. seealso::
+
+    :ref:`baked_toplevel`
+
+:ticket:`3054`
+
 .. _feature_3150:
 
 Improvements to declarative mixins, ``@declared_attr`` and related features
index 55dba45fe91ff66f76ec10eaa9587c624fa07961..1990df8e20e96191f6146ccf2432147eef2caabe 100644 (file)
@@ -42,7 +42,8 @@ of Python objects, proceed first to the tutorial.
 
 * **ORM Usage:**
   :doc:`Session Usage and Guidelines <orm/session>` |
-  :doc:`Loading Objects <orm/loading_objects>`
+  :doc:`Loading Objects <orm/loading_objects>` |
+  :doc:`Cached Query Extension <orm/extensions/baked>`
 
 * **Extending the ORM:**
   :doc:`ORM Events and Internals <orm/extending>`
diff --git a/doc/build/orm/extensions/baked.rst b/doc/build/orm/extensions/baked.rst
new file mode 100644 (file)
index 0000000..2fd930c
--- /dev/null
@@ -0,0 +1,207 @@
+.. _baked_toplevel:
+
+Baked Queries
+=============
+
+.. module:: sqlalchemy.ext.baked
+
+``baked`` provides an alternative creational pattern for
+:class:`~.query.Query` objects, which allows for caching of the object's
+construction and string-compilation steps.  This means that for a
+particular :class:`~.query.Query` building scenario that is used more than
+once, all of the Python function invocation involved in building the query
+from its initial construction up through generating a SQL string will only
+occur **once**, rather than for each time that query is built up and executed.
+
+The rationale for this system is to greatly reduce Python interpreter
+overhead for everything that occurs **before the SQL is emitted**.
+The caching of the "baked" system does **not** in any way reduce SQL calls or
+cache the **return results** from the database.  A technique that demonstates
+the caching of the SQL calls and result sets themselves is available in
+:ref:`examples_caching`.
+
+
+.. versionadded:: 1.0.0
+
+.. note::
+
+    The :mod:`sqlalchemy.ext.baked` extension should be considered
+    **experimental** as of 1.0.0.  It provides a dramatically different system
+    of producing queries which has yet to be proven at scale.
+
+Synopsis
+--------
+
+Usage of the baked system starts by producing a so-called "bakery", which
+represents storage for a particular series of query objects::
+
+    from sqlalchemy.ext import baked
+
+    bakery = baked.bakery()
+
+The above "bakery" will store cached data in an LRU cache that defaults
+to 200 elements, noting that an ORM query will typically contain one entry
+for the ORM query as invoked, as well as one entry per database dialect for
+the SQL string.
+
+The bakery allows us to build up a :class:`~.query.Query` object by specifying
+its construction as a series of Python callables, which are typically lambdas.
+For succinct usage, it overrides the ``+=`` operator so that a typical
+query build-up looks like the following::
+
+    from sqlalchemy import bindparam
+
+    def search_for_user(session, username, email=None):
+
+        baked_query = bakery(lambda session: session.query(User))
+        baked_query += lambda q: q.filter(User.name == bindparam('username'))
+
+        baked_query += lambda q: q.order_by(User.id)
+
+        if email:
+            baked_query += lambda q: q.filter(User.email == bindparam('email'))
+
+        result = baked_query(session).params(username=username, email=email).all()
+
+        return result
+
+Following are some observations about the above code:
+
+1. The ``baked_query`` object is an instance of :class:`.BakedQuery`.  This
+   object is essentially the "builder" for a real orm :class:`~.query.Query`
+   object, but it is not itself the *actual* :class:`~.query.Query`
+   object.
+
+2. The actual :class:`~.query.Query` object is not built at all, until the
+   very end of the function when :meth:`.Result.all` is called.
+
+3. The steps that are added to the ``baked_query`` object are all expressed
+   as Python functions,  typically lambdas.  The first lambda given
+   to the :func:`.bakery` function receives a :class:`.Session` as its
+   argument.  The remaining lambdas each receive a :class:`~.query.Query`
+   as their argument.
+
+4. In the above code, even though our application may call upon
+   ``search_for_user()`` many times, and even though within each invocation
+   we build up an entirely new :class:`.BakedQuery` object,
+   *all of the lambdas are only called once*.   Each lambda is **never** called
+   a second time for as long as this query is cached in the bakery.
+
+5. The caching is achieved by storing references to the **lambda objects
+   themselves** in order to formulate a cache key; that is, the fact that the
+   Python interpreter assigns an in-Python identity to these functions is
+   what determines how to identify the query on successive runs. For
+   those invocations of ``search_for_user()`` where the ``email`` parameter
+   is specified, the callable ``lambda q: q.filter(User.email == bindparam('email'))``
+   will be part of the cache key that's retrieved; when ``email`` is
+   ``None``, this callable is not part of the cache key.
+
+6. Because the lambdas are all called only once, it is essential that no
+   variables which may change across calls are referenced **within** the
+   lambdas; instead, assuming these are values to be bound into the
+   SQL string, we use :func:`.bindparam` to construct named parameters,
+   where we apply their actual values later using :meth:`.Result.params`.
+
+Performance
+-----------
+
+The baked query probably looks a little odd, a little bit awkward and
+a little bit verbose.   However, the savings in
+Python performance for a query which is invoked lots of times in an
+application are very dramatic.   The example suite ``short_selects``
+demonstrated in :ref:`examples_performance` illustrates a comparison
+of queries which each return only one row, such as the following regular
+query::
+
+    session = Session(bind=engine)
+    for id_ in random.sample(ids, n):
+        session.query(Customer).filter(Customer.id == id_).one()
+
+compared to the equivalent "baked" query::
+
+    bakery = baked.bakery()
+    s = Session(bind=engine)
+    for id_ in random.sample(ids, n):
+        q = bakery(lambda s: s.query(Customer))
+        q += lambda q: q.filter(Customer.id == bindparam('id'))
+        q(s).params(id=id_).one()
+
+The difference in Python function call count for an iteration of 10000
+calls to each block are::
+
+    test_baked_query : test a baked query of the full entity.
+                       (10000 iterations); total fn calls 1951294
+
+    test_orm_query :   test a straight ORM query of the full entity.
+                       (10000 iterations); total fn calls 7900535
+
+In terms of number of seconds on a powerful laptop, this comes out as::
+
+    test_baked_query : test a baked query of the full entity.
+                       (10000 iterations); total time 2.174126 sec
+
+    test_orm_query :   test a straight ORM query of the full entity.
+                       (10000 iterations); total time 7.958516 sec
+
+Note that this test very intentionally features queries that only return one row.
+For queries that return many rows, the performance advantage of the baked query will have
+less and less of an impact, proportional to the time spent fetching rows.
+It is critical to keep in mind that the **baked query feature only applies to
+building the query itself, not the fetching of results**.  Using the
+baked feature is by no means a guarantee to a much faster application; it is
+only a potentially useful feature for those applications that have been measured
+as being impacted by this particular form of overhead.
+
+.. topic:: Measure twice, cut once
+
+    For background on how to profile a SQLAlchemy application, please see
+    the section :ref:`faq_performance`.  It is essential that performance
+    measurement techniques are used when attempting to improve the performance
+    of an application.
+
+
+Lazy Loading Integration
+------------------------
+
+The baked query can be integrated with SQLAlchemy's lazy loader feature
+transparently.   A future release of SQLAlchemy may enable this by default,
+as its use within lazy loading is completely transparent.    For now,
+to enable baked lazyloading for all lazyloaders systemwide, call upon
+the :func:`.bake_lazy_loaders` function.   This will impact all relationships
+that use the ``lazy='select'`` strategy as well as all use of the :func:`.lazyload`
+per-query strategy.
+
+"Baked" lazy loading may be enabled on a per-:func:`.relationship` basis
+using the ``baked_select`` loader strategy::
+
+    class MyClass(Base):
+        # ...
+
+        widgets = relationship("Widget", lazy="baked_select")
+
+The ``baked_select`` strategy is available once any part of the application
+has imported the ``sqlalchemy.ext.baked`` module.   The "bakery" used by
+this feature is local to the mapper for ``MyClass``.
+
+For per-query use, the :func:`.baked_lazyload` strategy may be used,
+which works like any other loader option.
+
+
+API Documentation
+-----------------
+
+.. autofunction:: bakery
+
+.. autoclass:: BakedQuery
+    :members:
+
+.. autoclass:: Result
+    :members:
+
+.. autofunction:: bake_lazy_loaders
+
+.. autofunction:: unbake_lazy_loaders
+
+.. autofunction:: baked_lazyload
+
+.. autofunction:: baked_lazyload_all
index f7f58e3814d36ae52ac93e41793e58e59902236c..091ceb40a5978e347b02f8c51570305260859e40 100644 (file)
@@ -17,6 +17,7 @@ behavior.   In particular the "Horizontal Sharding", "Hybrid Attributes", and
 
     associationproxy
     automap
+    baked
     declarative/index
     mutable
     orderinglist
index 88ae9b7dc6ce6d2a68a4e6001b2d3a19c64e6a3c..6264ae9f70ee392f3a93731ca03aec980eee3316 100644 (file)
@@ -6,7 +6,7 @@ profile and associated implications:
 * bulk inserts
 * individual inserts, with or without transactions
 * fetching large numbers of rows
-* running lots of small queries (TODO)
+* running lots of short queries
 
 All suites include a variety of use patterns illustrating both Core
 and ORM use, and are generally sorted in order of performance from worst
index 333fb9632354490771e71393efd7dc415dadb5b1..ef1fcff4aad94d79a43087ceba654e7faac49edf 100644 (file)
@@ -9,6 +9,7 @@ from sqlalchemy.ext.declarative import declarative_base
 from sqlalchemy import Column, Integer, String, create_engine, \
     bindparam, select
 from sqlalchemy.orm import Session, deferred
+from sqlalchemy.ext import baked
 import random
 
 Base = declarative_base()
@@ -69,6 +70,30 @@ def test_orm_query_cols_only(n):
         ).filter(Customer.id == id_).one()
 
 
+@Profiler.profile
+def test_baked_query(n):
+    """test a baked query of the full entity."""
+    bakery = baked.bakery()
+    s = Session(bind=engine)
+    for id_ in random.sample(ids, n):
+        q = bakery(lambda s: s.query(Customer))
+        q += lambda q: q.filter(Customer.id == bindparam('id'))
+        q(s).params(id=id_).one()
+
+
+@Profiler.profile
+def test_baked_query_cols_only(n):
+    """test a baked query of only the entity columns."""
+    bakery = baked.bakery()
+    s = Session(bind=engine)
+    for id_ in random.sample(ids, n):
+        q = bakery(
+            lambda s: s.query(
+                Customer.id, Customer.name, Customer.description))
+        q += lambda q: q.filter(Customer.id == bindparam('id'))
+        q(s).params(id=id_).one()
+
+
 @Profiler.profile
 def test_core_new_stmt_each_time(n):
     """test core, creating a new statement each time."""
index ff95c07d68d126d64a20365c0e397fddd8b58601..60a17c65e399786433443c00913ccecf1240d2df 100644 (file)
@@ -4,3 +4,8 @@
 #
 # This module is part of SQLAlchemy and is released under
 # the MIT License: http://www.opensource.org/licenses/mit-license.php
+
+from .. import util as _sa_util
+
+_sa_util.dependencies.resolve_all("sqlalchemy.ext")
+
diff --git a/lib/sqlalchemy/ext/baked.py b/lib/sqlalchemy/ext/baked.py
new file mode 100644 (file)
index 0000000..65d6a86
--- /dev/null
@@ -0,0 +1,499 @@
+# sqlalchemy/ext/baked.py
+# Copyright (C) 2005-2015 the SQLAlchemy authors and contributors
+# <see AUTHORS file>
+#
+# This module is part of SQLAlchemy and is released under
+# the MIT License: http://www.opensource.org/licenses/mit-license.php
+"""Baked query extension.
+
+Provides a creational pattern for the :class:`.query.Query` object which
+allows the fully constructed object, Core select statement, and string
+compiled result to be fully cached.
+
+
+"""
+
+from ..orm.query import Query
+from ..orm import strategies, attributes, properties, \
+    strategy_options, util as orm_util, interfaces
+from .. import log as sqla_log
+from ..sql import util as sql_util
+from ..orm import exc as orm_exc
+from .. import exc as sa_exc
+from .. import util
+
+import copy
+import logging
+
+log = logging.getLogger(__name__)
+
+
+class BakedQuery(object):
+    """A builder object for :class:`.query.Query` objects."""
+
+    __slots__ = 'steps', '_bakery', '_cache_key', '_spoiled'
+
+    def __init__(self, bakery, initial_fn, args=()):
+        if args:
+            self._cache_key = tuple(args)
+        else:
+            self._cache_key = ()
+        self._update_cache_key(initial_fn)
+        self.steps = [initial_fn]
+        self._spoiled = False
+        self._bakery = bakery
+
+    @classmethod
+    def bakery(cls, size=200):
+        """Construct a new bakery."""
+
+        _bakery = util.LRUCache(size)
+
+        def call(initial_fn):
+            return cls(_bakery, initial_fn)
+
+        return call
+
+    def _clone(self):
+        b1 = BakedQuery.__new__(BakedQuery)
+        b1._cache_key = self._cache_key
+        b1.steps = list(self.steps)
+        b1._bakery = self._bakery
+        b1._spoiled = self._spoiled
+        return b1
+
+    def _update_cache_key(self, fn, args=()):
+        self._cache_key += (fn.__code__,) + args
+
+    def __iadd__(self, other):
+        if isinstance(other, tuple):
+            self.add_criteria(*other)
+        else:
+            self.add_criteria(other)
+        return self
+
+    def __add__(self, other):
+        if isinstance(other, tuple):
+            return self.with_criteria(*other)
+        else:
+            return self.with_criteria(other)
+
+    def add_criteria(self, fn, *args):
+        """Add a criteria function to this :class:`.BakedQuery`.
+
+        This is equivalent to using the ``+=`` operator to
+        modify a :class:`.BakedQuery` in-place.
+
+        """
+        self._update_cache_key(fn, args)
+        self.steps.append(fn)
+        return self
+
+    def with_criteria(self, fn, *args):
+        """Add a criteria function to a :class:`.BakedQuery` cloned from this one.
+
+        This is equivalent to using the ``+`` operator to
+        produce a new :class:`.BakedQuery` with modifications.
+
+        """
+        return self._clone().add_criteria(fn, *args)
+
+    def for_session(self, session):
+        """Return a :class:`.Result` object for this :class:`.BakedQuery`.
+
+        This is equivalent to calling the :class:`.BakedQuery` as a
+        Python callable, e.g. ``result = my_baked_query(session)``.
+
+        """
+        return Result(self, session)
+
+    def __call__(self, session):
+        return self.for_session(session)
+
+    def spoil(self, full=False):
+        """Cancel any query caching that will occur on this BakedQuery object.
+
+        The BakedQuery can continue to be used normally, however additional
+        creational functions will not be cached; they will be called
+        on every invocation.
+
+        This is to support the case where a particular step in constructing
+        a baked query disqualifies the query from being cacheable, such
+        as a variant that relies upon some uncacheable value.
+
+        :param full: if False, only functions added to this
+         :class:`.BakedQuery` object subsequent to the spoil step will be
+         non-cached; the state of the :class:`.BakedQuery` up until
+         this point will be pulled from the cache.   If True, then the
+         entire :class:`.Query` object is built from scratch each
+         time, with all creational functions being called on each
+         invocation.
+
+        """
+        if not full:
+            _spoil_point = self._clone()
+            _spoil_point._cache_key += ('_query_only', )
+            self.steps = [_spoil_point._retrieve_baked_query]
+        self._spoiled = True
+        return self
+
+    def _retrieve_baked_query(self, session):
+        query = self._bakery.get(self._cache_key, None)
+        if query is None:
+            query = self._as_query(session)
+            self._bakery[self._cache_key] = query.with_session(None)
+        return query.with_session(session)
+
+    def _bake(self, session):
+        query = self._as_query(session)
+
+        context = query._compile_context()
+        self._bake_subquery_loaders(session, context)
+        context.session = None
+        context.query = query = context.query.with_session(None)
+        query._execution_options = query._execution_options.union(
+            {"compiled_cache": self._bakery}
+        )
+        # we'll be holding onto the query for some of its state,
+        # so delete some compilation-use-only attributes that can take up
+        # space
+        for attr in (
+                '_correlate', '_from_obj', '_mapper_adapter_map',
+                '_joinpath', '_joinpoint'):
+            query.__dict__.pop(attr, None)
+        self._bakery[self._cache_key] = context
+        return context
+
+    def _as_query(self, session):
+        query = self.steps[0](session)
+
+        for step in self.steps[1:]:
+            query = step(query)
+        return query
+
+    def _bake_subquery_loaders(self, session, context):
+        """convert subquery eager loaders in the cache into baked queries.
+
+        For subquery eager loading to work, all we need here is that the
+        Query point to the correct session when it is run.  However, since
+        we are "baking" anyway, we may as well also turn the query into
+        a "baked" query so that we save on performance too.
+
+        """
+        context.attributes['baked_queries'] = baked_queries = []
+        for k, v in list(context.attributes.items()):
+            if isinstance(v, Query):
+                if 'subquery' in k:
+                    bk = BakedQuery(self._bakery, lambda *args: v)
+                    bk._cache_key = self._cache_key + k
+                    bk._bake(session)
+                    baked_queries.append((k, bk._cache_key, v))
+                del context.attributes[k]
+
+    def _unbake_subquery_loaders(self, session, context, params):
+        """Retrieve subquery eager loaders stored by _bake_subquery_loaders
+        and turn them back into Result objects that will iterate just
+        like a Query object.
+
+        """
+        for k, cache_key, query in context.attributes["baked_queries"]:
+            bk = BakedQuery(self._bakery, lambda sess: query.with_session(sess))
+            bk._cache_key = cache_key
+            context.attributes[k] = bk.for_session(session).params(**params)
+
+
+class Result(object):
+    """Invokes a :class:`.BakedQuery` against a :class:`.Session`.
+
+    The :class:`.Result` object is where the actual :class:`.query.Query`
+    object gets created, or retrieved from the cache,
+    against a target :class:`.Session`, and is then invoked for results.
+
+    """
+    __slots__ = 'bq', 'session', '_params'
+
+    def __init__(self, bq, session):
+        self.bq = bq
+        self.session = session
+        self._params = {}
+
+    def params(self, *args, **kw):
+        """Specify parameters to be replaced into the string SQL statement."""
+
+        if len(args) == 1:
+            kw.update(args[0])
+        elif len(args) > 0:
+            raise sa_exc.ArgumentError(
+                "params() takes zero or one positional argument, "
+                "which is a dictionary.")
+        self._params.update(kw)
+        return self
+
+    def _as_query(self):
+        return self.bq._as_query(self.session).params(self._params)
+
+    def __str__(self):
+        return str(self._as_query())
+
+    def __iter__(self):
+        bq = self.bq
+        if bq._spoiled:
+            return iter(self._as_query())
+
+        baked_context = bq._bakery.get(bq._cache_key, None)
+        if baked_context is None:
+            baked_context = bq._bake(self.session)
+
+        context = copy.copy(baked_context)
+        context.session = self.session
+        context.attributes = context.attributes.copy()
+
+        bq._unbake_subquery_loaders(self.session, context, self._params)
+
+        context.statement.use_labels = True
+        if context.autoflush and not context.populate_existing:
+            self.session._autoflush()
+        return context.query.params(self._params).\
+            with_session(self.session)._execute_and_instances(context)
+
+    def first(self):
+        """Return the first row.
+
+        Equivalent to :meth:`.Query.first`.
+
+        """
+        bq = self.bq.with_criteria(lambda q: q.slice(0, 1))
+        ret = list(bq.for_session(self.session).params(self._params))
+        if len(ret) > 0:
+            return ret[0]
+        else:
+            return None
+
+    def one(self):
+        """Return exactly one result or raise an exception.
+
+        Equivalent to :meth:`.Query.one`.
+
+        """
+        ret = list(self)
+
+        l = len(ret)
+        if l == 1:
+            return ret[0]
+        elif l == 0:
+            raise orm_exc.NoResultFound("No row was found for one()")
+        else:
+            raise orm_exc.MultipleResultsFound(
+                "Multiple rows were found for one()")
+
+    def all(self):
+        """Return all rows.
+
+        Equivalent to :meth:`.Query.all`.
+
+        """
+        return list(self)
+
+    def get(self, ident):
+        """Retrieve an object based on identity.
+
+        Equivalent to :meth:`.Query.get`.
+
+        """
+
+        query = self.bq.steps[0](self.session)
+        return query._get_impl(ident, self._load_on_ident)
+
+    def _load_on_ident(self, query, key):
+        """Load the given identity key from the database."""
+
+        ident = key[1]
+
+        mapper = query._mapper_zero()
+
+        _get_clause, _get_params = mapper._get_clause
+
+        def setup(query):
+            _lcl_get_clause = _get_clause
+            q = query._clone()
+            q._get_condition()
+            q._order_by = None
+
+            # None present in ident - turn those comparisons
+            # into "IS NULL"
+            if None in ident:
+                nones = set([
+                    _get_params[col].key for col, value in
+                    zip(mapper.primary_key, ident) if value is None
+                ])
+                _lcl_get_clause = sql_util.adapt_criterion_to_null(
+                    _lcl_get_clause, nones)
+
+            _lcl_get_clause = q._adapt_clause(_lcl_get_clause, True, False)
+            q._criterion = _lcl_get_clause
+            return q
+
+        # cache the query against a key that includes
+        # which positions in the primary key are NULL
+        # (remember, we can map to an OUTER JOIN)
+        bq = self.bq
+
+        bq = bq.with_criteria(setup, tuple(elem is None for elem in ident))
+
+        params = dict([
+            (_get_params[primary_key].key, id_val)
+            for id_val, primary_key in zip(ident, mapper.primary_key)
+        ])
+
+        result = list(bq.for_session(self.session).params(**params))
+        l = len(result)
+        if l > 1:
+            raise orm_exc.MultipleResultsFound()
+        elif l:
+            return result[0]
+        else:
+            return None
+
+
+def bake_lazy_loaders():
+    """Enable the use of baked queries for all lazyloaders systemwide.
+
+    This operation should be safe for all lazy loaders, and will reduce
+    Python overhead for these operations.
+
+    """
+    strategies.LazyLoader._strategy_keys[:] = []
+    BakedLazyLoader._strategy_keys[:] = []
+
+    properties.RelationshipProperty.strategy_for(
+        lazy="select")(BakedLazyLoader)
+    properties.RelationshipProperty.strategy_for(
+        lazy=True)(BakedLazyLoader)
+    properties.RelationshipProperty.strategy_for(
+        lazy="baked_select")(BakedLazyLoader)
+
+
+def unbake_lazy_loaders():
+    """Disable the use of baked queries for all lazyloaders systemwide.
+
+    This operation reverts the changes produced by :func:`.bake_lazy_loaders`.
+
+    """
+    strategies.LazyLoader._strategy_keys[:] = []
+    BakedLazyLoader._strategy_keys[:] = []
+
+    properties.RelationshipProperty.strategy_for(
+        lazy="select")(strategies.LazyLoader)
+    properties.RelationshipProperty.strategy_for(
+        lazy=True)(strategies.LazyLoader)
+    properties.RelationshipProperty.strategy_for(
+        lazy="baked_select")(BakedLazyLoader)
+    assert strategies.LazyLoader._strategy_keys
+
+
+@sqla_log.class_logger
+@properties.RelationshipProperty.strategy_for(lazy="baked_select")
+class BakedLazyLoader(strategies.LazyLoader):
+
+    def _emit_lazyload(self, session, state, ident_key, passive):
+        q = BakedQuery(
+            self.mapper._compiled_cache,
+            lambda session: session.query(self.mapper))
+        q.add_criteria(
+            lambda q: q._adapt_all_clauses()._with_invoke_all_eagers(False),
+            self.parent_property)
+
+        if not self.parent_property.bake_queries:
+            q.spoil(full=True)
+
+        if self.parent_property.secondary is not None:
+            q.add_criteria(
+                lambda q:
+                q.select_from(self.mapper, self.parent_property.secondary))
+
+        pending = not state.key
+
+        # don't autoflush on pending
+        if pending or passive & attributes.NO_AUTOFLUSH:
+            q.add_criteria(lambda q: q.autoflush(False))
+
+        if state.load_path:
+            q.spoil()
+            q.add_criteria(
+                lambda q:
+                q._with_current_path(state.load_path[self.parent_property]))
+
+        if state.load_options:
+            q.spoil()
+            q.add_criteria(
+                lambda q: q._conditional_options(*state.load_options))
+
+        if self.use_get:
+            return q(session)._load_on_ident(
+                session.query(self.mapper), ident_key)
+
+        if self.parent_property.order_by:
+            q.add_criteria(
+                lambda q:
+                q.order_by(*util.to_list(self.parent_property.order_by)))
+
+        for rev in self.parent_property._reverse_property:
+            # reverse props that are MANYTOONE are loading *this*
+            # object from get(), so don't need to eager out to those.
+            if rev.direction is interfaces.MANYTOONE and \
+                rev._use_get and \
+                    not isinstance(rev.strategy, strategies.LazyLoader):
+                q.add_criteria(
+                    lambda q:
+                    q.options(
+                        strategy_options.Load(
+                            rev.parent).baked_lazyload(rev.key)))
+
+        lazy_clause, params = self._generate_lazy_clause(state, passive)
+
+        if pending:
+            if orm_util._none_set.intersection(params.values()):
+                return None
+
+        q.add_criteria(lambda q: q.filter(lazy_clause))
+        result = q(session).params(**params).all()
+        if self.uselist:
+            return result
+        else:
+            l = len(result)
+            if l:
+                if l > 1:
+                    util.warn(
+                        "Multiple rows returned with "
+                        "uselist=False for lazily-loaded attribute '%s' "
+                        % self.parent_property)
+
+                return result[0]
+            else:
+                return None
+
+
+@strategy_options.loader_option()
+def baked_lazyload(loadopt, attr):
+    """Indicate that the given attribute should be loaded using "lazy"
+    loading with a "baked" query used in the load.
+
+    """
+    return loadopt.set_relationship_strategy(attr, {"lazy": "baked_select"})
+
+
+@baked_lazyload._add_unbound_fn
+def baked_lazyload(*keys):
+    return strategy_options._UnboundLoad._from_keys(
+        strategy_options._UnboundLoad.baked_lazyload, keys, False, {})
+
+
+@baked_lazyload._add_unbound_all_fn
+def baked_lazyload_all(*keys):
+    return strategy_options._UnboundLoad._from_keys(
+        strategy_options._UnboundLoad.baked_lazyload, keys, True, {})
+
+baked_lazyload = baked_lazyload._unbound_fn
+baked_lazyload_all = baked_lazyload_all._unbound_all_fn
+
+bakery = BakedQuery.bakery
index 01981f26f9a490916ee76b680932299a806b18aa..c259878f0b8f6ea1170f3456f7b7573fc8cc2e7b 100644 (file)
@@ -329,8 +329,7 @@ def _is_mapped_class(entity):
     return insp is not None and \
         not insp.is_clause_element and \
         (
-            insp.is_mapper
-            or insp.is_aliased_class
+            insp.is_mapper or insp.is_aliased_class
         )
 
 
index 39bc53adb85082a9fa0b88ed7be45cc9574a0b1e..6cc613baa73ad0aa5cba180620396024a7e84846 100644 (file)
@@ -27,6 +27,7 @@ from .base import (ONETOMANY, MANYTOONE, MANYTOMANY,
 from .base import (InspectionAttr, InspectionAttr,
     InspectionAttrInfo, _MappedAttribute)
 import collections
+from .. import inspect
 
 # imported later
 MapperExtension = SessionExtension = AttributeExtension = None
@@ -333,11 +334,11 @@ class PropComparator(operators.ColumnOperators):
 
     """
 
-    __slots__ = 'prop', 'property', '_parentmapper', '_adapt_to_entity'
+    __slots__ = 'prop', 'property', '_parententity', '_adapt_to_entity'
 
     def __init__(self, prop, parentmapper, adapt_to_entity=None):
         self.prop = self.property = prop
-        self._parentmapper = parentmapper
+        self._parententity = parentmapper
         self._adapt_to_entity = adapt_to_entity
 
     def __clause_element__(self):
@@ -350,7 +351,13 @@ class PropComparator(operators.ColumnOperators):
         """Return a copy of this PropComparator which will use the given
         :class:`.AliasedInsp` to produce corresponding expressions.
         """
-        return self.__class__(self.prop, self._parentmapper, adapt_to_entity)
+        return self.__class__(self.prop, self._parententity, adapt_to_entity)
+
+    @property
+    def _parentmapper(self):
+        """legacy; this is renamed to _parententity to be
+        compatible with QueryableAttribute."""
+        return inspect(self._parententity).mapper
 
     @property
     def adapter(self):
@@ -523,7 +530,9 @@ class StrategizedProperty(MapperProperty):
     @classmethod
     def strategy_for(cls, **kw):
         def decorate(dec_cls):
-            if not hasattr(dec_cls, '_strategy_keys'):
+            # ensure each subclass of the strategy has its
+            # own _strategy_keys collection
+            if '_strategy_keys' not in dec_cls.__dict__:
                 dec_cls._strategy_keys = []
             key = tuple(sorted(kw.items()))
             cls._all_strategies[cls][key] = dec_cls
index 238ac83a9c9bd089840fc67973e60e25c2c194d0..5694f72558cb3f9ba955e9efebdedf2665c1364c 100644 (file)
@@ -246,8 +246,8 @@ class ColumnProperty(StrategizedProperty):
                 return self.adapter(self.prop.columns[0])
             else:
                 return self.prop.columns[0]._annotate({
-                    "parententity": self._parentmapper,
-                    "parentmapper": self._parentmapper})
+                    "parententity": self._parententity,
+                    "parentmapper": self._parententity})
 
         def _memoized_attr_info(self):
             ce = self.__clause_element__()
index c6fdf479ed3d12b311dbb3868cc8fb6c597c4ce3..05349cf0bf3c640a8fd97f2f3bcc753fbdee6cc5 100644 (file)
@@ -26,7 +26,7 @@ from . import (
     exc as orm_exc, loading
 )
 from .base import _entity_descriptor, _is_aliased_class, \
-    _is_mapped_class, _orm_columns, _generative
+    _is_mapped_class, _orm_columns, _generative, InspectionAttr
 from .path_registry import PathRegistry
 from .util import (
     AliasedClass, ORMAdapter, join as orm_join, with_parent, aliased
@@ -831,7 +831,9 @@ class Query(object):
         :return: The object instance, or ``None``.
 
         """
+        return self._get_impl(ident, loading.load_on_ident)
 
+    def _get_impl(self, ident, fallback_fn):
         # convert composite types to individual args
         if hasattr(ident, '__composite_values__'):
             ident = ident.__composite_values__()
@@ -862,7 +864,7 @@ class Query(object):
                     return None
                 return instance
 
-        return loading.load_on_ident(self, key)
+        return fallback_fn(self, key)
 
     @_generative()
     def correlate(self, *args):
@@ -3332,7 +3334,7 @@ class _MapperEntity(_QueryEntity):
 
 
 @inspection._self_inspects
-class Bundle(object):
+class Bundle(InspectionAttr):
     """A grouping of SQL expressions that are returned by a :class:`.Query`
     under one namespace.
 
@@ -3528,14 +3530,20 @@ class _ColumnEntity(_QueryEntity):
     def __init__(self, query, column, namespace=None):
         self.expr = column
         self.namespace = namespace
+        search_entities = True
 
         if isinstance(column, util.string_types):
             column = sql.literal_column(column)
             self._label_name = column.name
+            search_entities = False
+            _entity = None
         elif isinstance(column, (
             attributes.QueryableAttribute,
             interfaces.PropComparator
         )):
+            _entity = column._parententity
+            if _entity is not None:
+                search_entities = False
             self._label_name = column.key
             column = column._query_clause_element()
             if isinstance(column, Bundle):
@@ -3558,6 +3566,7 @@ class _ColumnEntity(_QueryEntity):
             )
         else:
             self._label_name = getattr(column, 'key', None)
+            search_entities = True
 
         self.type = type_ = column.type
         if type_.hashable:
@@ -3588,30 +3597,38 @@ class _ColumnEntity(_QueryEntity):
         # leaking out their entities into the main select construct
         self.actual_froms = actual_froms = set(column._from_objects)
 
-        all_elements = [
-            elem for elem in visitors.iterate(column, {})
-            if 'parententity' in elem._annotations
-        ]
-
-        self.entities = util.unique_list([
-            elem._annotations['parententity']
-            for elem in all_elements
-            if 'parententity' in elem._annotations
-        ])
-
-        self._from_entities = set([
-            elem._annotations['parententity']
-            for elem in all_elements
-            if 'parententity' in elem._annotations
-            and actual_froms.intersection(elem._from_objects)
-        ])
-
-        if self.entities:
-            self.entity_zero = self.entities[0]
-        elif self.namespace is not None:
-            self.entity_zero = self.namespace
+        if not search_entities:
+            self.entity_zero = _entity
+            if _entity:
+                self.entities = [_entity]
+            else:
+                self.entities = []
+            self._from_entities = set(self.entities)
         else:
-            self.entity_zero = None
+            all_elements = [
+                elem for elem in visitors.iterate(column, {})
+                if 'parententity' in elem._annotations
+            ]
+
+            self.entities = util.unique_list([
+                elem._annotations['parententity']
+                for elem in all_elements
+                if 'parententity' in elem._annotations
+            ])
+
+            self._from_entities = set([
+                elem._annotations['parententity']
+                for elem in all_elements
+                if 'parententity' in elem._annotations
+                and actual_froms.intersection(elem._from_objects)
+            ])
+
+            if self.entities:
+                self.entity_zero = self.entities[0]
+            elif self.namespace is not None:
+                self.entity_zero = self.namespace
+            else:
+                self.entity_zero = None
 
     supports_single_entity = False
 
@@ -3673,10 +3690,15 @@ class _ColumnEntity(_QueryEntity):
 
 
 class QueryContext(object):
-    multi_row_eager_loaders = False
-    adapter = None
-    froms = ()
-    for_update = None
+    __slots__ = (
+        'multi_row_eager_loaders', 'adapter', 'froms', 'for_update',
+        'query', 'session', 'autoflush', 'populate_existing',
+        'invoke_all_eagers', 'version_check', 'refresh_state',
+        'primary_columns', 'secondary_columns', 'eager_order_by',
+        'eager_joins', 'create_eager_joins', 'propagate_options',
+        'attributes', 'statement', 'from_clause', 'whereclause',
+        'order_by', 'labels', '_for_update_arg', 'runid', 'partials'
+    )
 
     def __init__(self, query):
 
@@ -3693,8 +3715,13 @@ class QueryContext(object):
             self.whereclause = query._criterion
             self.order_by = query._order_by
 
+        self.multi_row_eager_loaders = False
+        self.adapter = None
+        self.froms = ()
+        self.for_update = None
         self.query = query
         self.session = query.session
+        self.autoflush = query._autoflush
         self.populate_existing = query._populate_existing
         self.invoke_all_eagers = query._invoke_all_eagers
         self.version_check = query._version_check
index afd524f7b3e81c75db17b38a9b135fb412d1fb00..e36a644da9b8fbd4f8c377c0738c611d5b290605 100644 (file)
@@ -23,7 +23,7 @@ from . import attributes
 from ..sql.util import (
     ClauseAdapter,
     join_condition, _shallow_annotate, visit_binary_product,
-    _deep_deannotate, selectables_overlap
+    _deep_deannotate, selectables_overlap, adapt_criterion_to_null
 )
 from ..sql import operators, expression, visitors
 from .interfaces import (MANYTOMANY, MANYTOONE, ONETOMANY,
@@ -113,6 +113,7 @@ class RelationshipProperty(StrategizedProperty):
                  active_history=False,
                  cascade_backrefs=True,
                  load_on_pending=False,
+                 bake_queries=True,
                  strategy_class=None, _local_remote_pairs=None,
                  query_class=None,
                  info=None):
@@ -274,6 +275,15 @@ class RelationshipProperty(StrategizedProperty):
             :paramref:`~.relationship.backref` - alternative form
             of backref specification.
 
+        :param bake_queries:
+          Use the :class:`.BakedQuery` cache to cache queries used in lazy
+          loads.  True by default, as this typically improves performance
+          significantly.  Set to False to reduce ORM memory use, or
+          if unresolved stability issues are observed with the baked query
+          cache system.
+
+          .. versionadded:: 1.0.0
+
         :param cascade:
           a comma-separated list of cascade rules which determines how
           Session operations should be "cascaded" from parent to child.
@@ -802,6 +812,7 @@ class RelationshipProperty(StrategizedProperty):
         self.join_depth = join_depth
         self.local_remote_pairs = _local_remote_pairs
         self.extension = extension
+        self.bake_queries = bake_queries
         self.load_on_pending = load_on_pending
         self.comparator_factory = comparator_factory or \
             RelationshipProperty.Comparator
@@ -873,13 +884,13 @@ class RelationshipProperty(StrategizedProperty):
 
             """
             self.prop = prop
-            self._parentmapper = parentmapper
+            self._parententity = parentmapper
             self._adapt_to_entity = adapt_to_entity
             if of_type:
                 self._of_type = of_type
 
         def adapt_to_entity(self, adapt_to_entity):
-            return self.__class__(self.property, self._parentmapper,
+            return self.__class__(self.property, self._parententity,
                                   adapt_to_entity=adapt_to_entity,
                                   of_type=self._of_type)
 
@@ -931,7 +942,7 @@ class RelationshipProperty(StrategizedProperty):
             """
             return RelationshipProperty.Comparator(
                 self.property,
-                self._parentmapper,
+                self._parententity,
                 adapt_to_entity=self._adapt_to_entity,
                 of_type=cls)
 
@@ -1315,16 +1326,69 @@ class RelationshipProperty(StrategizedProperty):
         return self._optimized_compare(
             instance, value_is_parent=True, alias_secondary=alias_secondary)
 
-    def _optimized_compare(self, value, value_is_parent=False,
+    def _optimized_compare(self, state, value_is_parent=False,
                            adapt_source=None,
                            alias_secondary=True):
-        if value is not None:
-            value = attributes.instance_state(value)
-        return self._lazy_strategy.lazy_clause(
-            value,
-            reverse_direction=not value_is_parent,
-            alias_secondary=alias_secondary,
-            adapt_source=adapt_source)
+        if state is not None:
+            state = attributes.instance_state(state)
+
+        reverse_direction = not value_is_parent
+
+        if state is None:
+            return self._lazy_none_clause(
+                reverse_direction,
+                adapt_source=adapt_source)
+
+        if not reverse_direction:
+            criterion, bind_to_col = \
+                self._lazy_strategy._lazywhere, \
+                self._lazy_strategy._bind_to_col
+        else:
+            criterion, bind_to_col = \
+                self._lazy_strategy._rev_lazywhere, \
+                self._lazy_strategy._rev_bind_to_col
+
+        if reverse_direction:
+            mapper = self.mapper
+        else:
+            mapper = self.parent
+
+        dict_ = attributes.instance_dict(state.obj())
+
+        def visit_bindparam(bindparam):
+            if bindparam._identifying_key in bind_to_col:
+                bindparam.callable = \
+                    lambda: mapper._get_state_attr_by_column(
+                        state, dict_,
+                        bind_to_col[bindparam._identifying_key])
+
+        if self.secondary is not None and alias_secondary:
+            criterion = ClauseAdapter(
+                self.secondary.alias()).\
+                traverse(criterion)
+
+        criterion = visitors.cloned_traverse(
+            criterion, {}, {'bindparam': visit_bindparam})
+
+        if adapt_source:
+            criterion = adapt_source(criterion)
+        return criterion
+
+    def _lazy_none_clause(self, reverse_direction=False, adapt_source=None):
+        if not reverse_direction:
+            criterion, bind_to_col = \
+                self._lazy_strategy._lazywhere, \
+                self._lazy_strategy._bind_to_col
+        else:
+            criterion, bind_to_col = \
+                self._lazy_strategy._rev_lazywhere, \
+                self._lazy_strategy._rev_bind_to_col
+
+        criterion = adapt_criterion_to_null(criterion, bind_to_col)
+
+        if adapt_source:
+            criterion = adapt_source(criterion)
+        return criterion
 
     def __str__(self):
         return str(self.parent.class_.__name__) + "." + self.key
index 6116353331eb9163a811f68d723f5eac7c6b0d98..0b2672d66249f377ad989b074796624488d0c607 100644 (file)
@@ -353,7 +353,7 @@ class NoLoader(AbstractRelationshipLoader):
 @log.class_logger
 @properties.RelationshipProperty.strategy_for(lazy=True)
 @properties.RelationshipProperty.strategy_for(lazy="select")
-class LazyLoader(AbstractRelationshipLoader):
+class LazyLoader(AbstractRelationshipLoader, util.MemoizedSlots):
     """Provide loading behavior for a :class:`.RelationshipProperty`
     with "lazy=True", that is loads when first accessed.
 
@@ -421,78 +421,54 @@ class LazyLoader(AbstractRelationshipLoader):
             active_history=active_history
         )
 
-    def lazy_clause(
-            self, state, reverse_direction=False,
-            alias_secondary=False,
-            adapt_source=None,
-            passive=None):
-        if state is None:
-            return self._lazy_none_clause(
-                reverse_direction,
-                adapt_source=adapt_source)
-
-        if not reverse_direction:
-            criterion, bind_to_col = \
-                self._lazywhere, \
-                self._bind_to_col
-        else:
-            criterion, bind_to_col = \
-                self._rev_lazywhere, \
-                self._rev_bind_to_col
+    def _memoized_attr__simple_lazy_clause(self):
+        criterion, bind_to_col = (
+            self._lazywhere,
+            self._bind_to_col
+        )
 
-        if reverse_direction:
-            mapper = self.parent_property.mapper
-        else:
-            mapper = self.parent_property.parent
+        params = []
 
-        o = state.obj()  # strong ref
-        dict_ = attributes.instance_dict(o)
+        def visit_bindparam(bindparam):
+            bindparam.unique = False
+            if bindparam._identifying_key in bind_to_col:
+                params.append((
+                    bindparam.key, bind_to_col[bindparam._identifying_key],
+                    None))
+            else:
+                params.append((bindparam.key, None, bindparam.value))
+
+        criterion = visitors.cloned_traverse(
+            criterion, {}, {'bindparam': visit_bindparam}
+        )
 
-        # use the "committed state" only if we're in a flush
-        # for this state.
+        return criterion, params
 
-        if passive and passive & attributes.LOAD_AGAINST_COMMITTED:
-            def visit_bindparam(bindparam):
-                if bindparam._identifying_key in bind_to_col:
-                    bindparam.callable = \
-                        lambda: mapper._get_committed_state_attr_by_column(
-                            state, dict_,
-                            bind_to_col[bindparam._identifying_key])
-        else:
-            def visit_bindparam(bindparam):
-                if bindparam._identifying_key in bind_to_col:
-                    bindparam.callable = \
-                        lambda: mapper._get_state_attr_by_column(
-                            state, dict_,
-                            bind_to_col[bindparam._identifying_key])
-
-        if self.parent_property.secondary is not None and alias_secondary:
-            criterion = sql_util.ClauseAdapter(
-                self.parent_property.secondary.alias()).\
-                traverse(criterion)
+    def _generate_lazy_clause(self, state, passive):
+        criterion, param_keys = self._simple_lazy_clause
 
-        criterion = visitors.cloned_traverse(
-            criterion, {}, {'bindparam': visit_bindparam})
+        if state is None:
+            return sql_util.adapt_criterion_to_null(
+                criterion, [key for key, ident, value in param_keys])
 
-        if adapt_source:
-            criterion = adapt_source(criterion)
-        return criterion
+        mapper = self.parent_property.parent
 
-    def _lazy_none_clause(self, reverse_direction=False, adapt_source=None):
-        if not reverse_direction:
-            criterion, bind_to_col = \
-                self._lazywhere, \
-                self._bind_to_col
-        else:
-            criterion, bind_to_col = \
-                self._rev_lazywhere, \
-                self._rev_bind_to_col
+        o = state.obj()  # strong ref
+        dict_ = attributes.instance_dict(o)
+
+        params = {}
+        for key, ident, value in param_keys:
+            if ident is not None:
+                if passive and passive & attributes.LOAD_AGAINST_COMMITTED:
+                    value = mapper._get_committed_state_attr_by_column(
+                        state, dict_, ident)
+                else:
+                    value = mapper._get_state_attr_by_column(
+                        state, dict_, ident)
 
-        criterion = sql_util.adapt_criterion_to_null(criterion, bind_to_col)
+            params[key] = value
 
-        if adapt_source:
-            criterion = adapt_source(criterion)
-        return criterion
+        return criterion, params
 
     def _load_for_state(self, state, passive):
         if not state.key and (
@@ -569,10 +545,9 @@ class LazyLoader(AbstractRelationshipLoader):
 
     @util.dependencies("sqlalchemy.orm.strategy_options")
     def _emit_lazyload(
-            self, strategy_options, session, state,
-            ident_key, passive):
-        q = session.query(self.mapper)._adapt_all_clauses()
+            self, strategy_options, session, state, ident_key, passive):
 
+        q = session.query(self.mapper)._adapt_all_clauses()
         if self.parent_property.secondary is not None:
             q = q.select_from(self.mapper, self.parent_property.secondary)
 
@@ -603,17 +578,15 @@ class LazyLoader(AbstractRelationshipLoader):
                 rev._use_get and \
                     not isinstance(rev.strategy, LazyLoader):
                 q = q.options(
-                    strategy_options.Load(rev.parent).
-                    lazyload(rev.key))
+                    strategy_options.Load(rev.parent).lazyload(rev.key))
 
-        lazy_clause = self.lazy_clause(state, passive=passive)
+        lazy_clause, params = self._generate_lazy_clause(
+            state, passive=passive)
 
-        if pending:
-            bind_values = sql_util.bind_values(lazy_clause)
-            if orm_util._none_set.intersection(bind_values):
-                return None
+        if pending and orm_util._none_set.intersection(params.values()):
+            return None
 
-        q = q.filter(lazy_clause)
+        q = q.filter(lazy_clause).params(params)
 
         result = q.all()
         if self.uselist:
@@ -646,7 +619,7 @@ class LazyLoader(AbstractRelationshipLoader):
             # class-level lazyloader installed.
             set_lazy_callable = InstanceState._instance_level_callable_processor(
                 mapper.class_manager,
-                LoadLazyAttribute(key), key)
+                LoadLazyAttribute(key, self._strategy_keys[0]), key)
 
             populators["new"].append((self.key, set_lazy_callable))
         elif context.populate_existing or mapper.always_refresh:
@@ -667,14 +640,15 @@ class LazyLoader(AbstractRelationshipLoader):
 class LoadLazyAttribute(object):
     """serializable loader object used by LazyLoader"""
 
-    def __init__(self, key):
+    def __init__(self, key, strategy_key=(('lazy', 'select'),)):
         self.key = key
+        self.strategy_key = strategy_key
 
     def __call__(self, state, passive=attributes.PASSIVE_OFF):
         key = self.key
         instance_mapper = state.manager.mapper
         prop = instance_mapper._props[key]
-        strategy = prop._strategies[LazyLoader]
+        strategy = prop._strategies[self.strategy_key]
 
         return strategy._load_for_state(state, passive)
 
@@ -1029,6 +1003,12 @@ class SubqueryLoader(AbstractRelationshipLoader):
         if subq is None:
             return
 
+        assert subq.session is context.session, (
+            "Subquery session doesn't refer to that of "
+            "our context.  Are there broken context caching "
+            "schemes being used?"
+        )
+
         local_cols = self.parent_property.local_columns
 
         # cache the loaded collections in the context
diff --git a/test/ext/test_baked.py b/test/ext/test_baked.py
new file mode 100644 (file)
index 0000000..61d0fe1
--- /dev/null
@@ -0,0 +1,768 @@
+from sqlalchemy.orm import Session, subqueryload, \
+    mapper, relationship, lazyload, clear_mappers
+from sqlalchemy.testing import eq_, is_, is_not_, assert_raises
+from sqlalchemy import testing
+from test.orm import _fixtures
+from sqlalchemy.ext.baked import BakedQuery, baked_lazyload, BakedLazyLoader
+from sqlalchemy.ext import baked
+from sqlalchemy import bindparam, func
+from sqlalchemy.orm import exc as orm_exc
+import itertools
+from sqlalchemy.testing import mock
+
+
+class BakedTest(_fixtures.FixtureTest):
+    run_setup_mappers = 'once'
+    run_inserts = 'once'
+    run_deletes = None
+
+    def setup(self):
+        self.bakery = baked.bakery()
+
+
+class StateChangeTest(BakedTest):
+    @classmethod
+    def setup_mappers(cls):
+        User = cls.classes.User
+
+        mapper(User, cls.tables.users)
+
+    def _assert_cache_key(self, key, elements):
+        eq_(
+            key,
+            tuple(elem.__code__ for elem in elements)
+        )
+
+    def test_initial_key(self):
+        User = self.classes.User
+        session = Session()
+        l1 = lambda: session.query(User)
+        q1 = self.bakery(l1)
+        self._assert_cache_key(
+            q1._cache_key,
+            [l1]
+        )
+        eq_(q1.steps, [l1])
+
+    def test_inplace_add(self):
+        User = self.classes.User
+        session = Session()
+        l1 = lambda: session.query(User)
+        l2 = lambda q: q.filter(User.name == bindparam('name'))
+        q1 = self.bakery(l1)
+        self._assert_cache_key(
+            q1._cache_key,
+            [l1]
+        )
+        eq_(q1.steps, [l1])
+
+        q2 = q1.add_criteria(l2)
+        is_(q2, q1)
+
+        self._assert_cache_key(
+            q1._cache_key,
+            [l1, l2]
+        )
+        eq_(q1.steps, [l1, l2])
+
+    def test_inplace_add_operator(self):
+        User = self.classes.User
+        session = Session()
+        l1 = lambda: session.query(User)
+        l2 = lambda q: q.filter(User.name == bindparam('name'))
+        q1 = self.bakery(l1)
+        self._assert_cache_key(
+            q1._cache_key,
+            [l1]
+        )
+
+        q1 += l2
+
+        self._assert_cache_key(
+            q1._cache_key,
+            [l1, l2]
+        )
+
+    def test_chained_add(self):
+        User = self.classes.User
+        session = Session()
+        l1 = lambda: session.query(User)
+        l2 = lambda q: q.filter(User.name == bindparam('name'))
+        q1 = self.bakery(l1)
+
+        q2 = q1.with_criteria(l2)
+        is_not_(q2, q1)
+
+        self._assert_cache_key(
+            q1._cache_key,
+            [l1]
+        )
+        self._assert_cache_key(
+            q2._cache_key,
+            [l1, l2]
+        )
+
+    def test_chained_add_operator(self):
+        User = self.classes.User
+        session = Session()
+        l1 = lambda: session.query(User)
+        l2 = lambda q: q.filter(User.name == bindparam('name'))
+        q1 = self.bakery(l1)
+
+        q2 = q1 + l2
+        is_not_(q2, q1)
+
+        self._assert_cache_key(
+            q1._cache_key,
+            [l1]
+        )
+        self._assert_cache_key(
+            q2._cache_key,
+            [l1, l2]
+        )
+
+
+class LikeQueryTest(BakedTest):
+    @classmethod
+    def setup_mappers(cls):
+        User = cls.classes.User
+
+        mapper(User, cls.tables.users)
+
+    def test_first_no_result(self):
+        User = self.classes.User
+
+        bq = self.bakery(lambda s: s.query(User))
+        bq += lambda q: q.filter(User.name == 'asdf')
+
+        eq_(
+            bq(Session()).first(),
+            None
+        )
+
+    def test_first_multiple_result(self):
+        User = self.classes.User
+
+        bq = self.bakery(lambda s: s.query(User.id))
+        bq += lambda q: q.filter(User.name.like('%ed%')).order_by(User.id)
+
+        eq_(
+            bq(Session()).first(),
+            (8, )
+        )
+
+    def test_one_no_result(self):
+        User = self.classes.User
+
+        bq = self.bakery(lambda s: s.query(User))
+        bq += lambda q: q.filter(User.name == 'asdf')
+
+        assert_raises(
+            orm_exc.NoResultFound,
+            bq(Session()).one
+        )
+
+    def test_one_multiple_result(self):
+        User = self.classes.User
+
+        bq = self.bakery(lambda s: s.query(User))
+        bq += lambda q: q.filter(User.name.like('%ed%'))
+
+        assert_raises(
+            orm_exc.MultipleResultsFound,
+            bq(Session()).one
+        )
+
+    def test_get(self):
+        User = self.classes.User
+
+        bq = self.bakery(lambda s: s.query(User))
+
+        sess = Session()
+
+        def go():
+            u1 = bq(sess).get(7)
+            eq_(u1.name, 'jack')
+        self.assert_sql_count(testing.db, go, 1)
+
+        u1 = sess.query(User).get(7)  # noqa
+
+        def go():
+            u2 = bq(sess).get(7)
+            eq_(u2.name, 'jack')
+        self.assert_sql_count(testing.db, go, 0)
+
+        def go():
+            u2 = bq(sess).get(8)
+            eq_(u2.name, 'ed')
+        self.assert_sql_count(testing.db, go, 1)
+
+    def test_get_pk_w_null(self):
+        """test the re-implementation of logic to do get with IS NULL."""
+
+        class AddressUser(object):
+            pass
+        mapper(
+            AddressUser,
+            self.tables.users.outerjoin(self.tables.addresses),
+            properties={
+                "id": self.tables.users.c.id,
+                "address_id": self.tables.addresses.c.id
+            }
+        )
+
+        bq = self.bakery(lambda s: s.query(AddressUser))
+
+        sess = Session()
+
+        def go():
+            u1 = bq(sess).get((10, None))
+            eq_(u1.name, 'chuck')
+        self.assert_sql_count(testing.db, go, 1)
+
+        u1 = sess.query(AddressUser).get((10, None))  # noqa
+
+        def go():
+            u2 = bq(sess).get((10, None))
+            eq_(u2.name, 'chuck')
+        self.assert_sql_count(testing.db, go, 0)
+
+
+class ResultTest(BakedTest):
+    __backend__ = True
+
+    @classmethod
+    def setup_mappers(cls):
+        User = cls.classes.User
+        Address = cls.classes.Address
+
+        mapper(User, cls.tables.users, properties={
+            "addresses": relationship(
+                Address, order_by=cls.tables.addresses.c.id)
+        })
+        mapper(Address, cls.tables.addresses)
+
+    def test_no_steps(self):
+        User = self.classes.User
+
+        bq = self.bakery(
+            lambda s: s.query(User.id, User.name).order_by(User.id))
+
+        for i in range(3):
+            session = Session()
+            eq_(
+                bq(session).all(),
+                [(7, 'jack'), (8, 'ed'), (9, 'fred'), (10, 'chuck')]
+            )
+
+    def test_different_limits(self):
+        User = self.classes.User
+
+        bq = self.bakery(
+            lambda s: s.query(User.id, User.name).order_by(User.id))
+
+        bq += lambda q: q.limit(bindparam('limit')).offset(bindparam('offset'))
+        session = Session()
+
+        for i in range(4):
+            for limit, offset, exp in [
+                (2, 1, [(8, 'ed'), (9, 'fred')]),
+                (3, 0, [(7, 'jack'), (8, 'ed'), (9, 'fred')]),
+                (1, 2, [(9, 'fred')])
+            ]:
+                eq_(
+                    bq(session).params(limit=limit, offset=offset).all(),
+                    exp
+                )
+
+    def test_spoiled_full_w_params(self):
+        User = self.classes.User
+
+        canary = mock.Mock()
+
+        def fn1(s):
+            canary.fn1()
+            return s.query(User.id, User.name).order_by(User.id)
+
+        def fn2(q):
+            canary.fn2()
+            return q.filter(User.id == bindparam('id'))
+
+        def fn3(q):
+            canary.fn3()
+            return q
+
+        for x in range(3):
+            bq = self.bakery(fn1)
+
+            bq += fn2
+
+            sess = Session()
+            eq_(
+                bq.spoil(full=True).add_criteria(fn3)(sess).params(id=7).all(),
+                [(7, 'jack')]
+            )
+
+        eq_(
+            canary.mock_calls,
+            [mock.call.fn1(), mock.call.fn2(), mock.call.fn3(),
+             mock.call.fn1(), mock.call.fn2(), mock.call.fn3(),
+             mock.call.fn1(), mock.call.fn2(), mock.call.fn3()]
+        )
+
+    def test_spoiled_half_w_params(self):
+        User = self.classes.User
+
+        canary = mock.Mock()
+
+        def fn1(s):
+            canary.fn1()
+            return s.query(User.id, User.name).order_by(User.id)
+
+        def fn2(q):
+            canary.fn2()
+            return q.filter(User.id == bindparam('id'))
+
+        def fn3(q):
+            canary.fn3()
+            return q
+
+        bq = self.bakery(fn1)
+
+        bq += fn2
+
+        for x in range(3):
+            bq = self.bakery(fn1)
+
+            bq += fn2
+
+            sess = Session()
+            eq_(
+                bq.spoil().add_criteria(fn3)(sess).params(id=7).all(),
+                [(7, 'jack')]
+            )
+
+        eq_(
+            canary.mock_calls,
+            [mock.call.fn1(), mock.call.fn2(),
+             mock.call.fn3(), mock.call.fn3(), mock.call.fn3()]
+        )
+
+    def test_w_new_entities(self):
+        """Test that the query can have its entities modified in
+        an arbitrary callable, and that this new entity list is preserved
+        when the query is invoked.
+
+        """
+        User = self.classes.User
+
+        bq = self.bakery(
+            lambda s: s.query(User.id, User.name))
+
+        bq += lambda q: q.from_self().with_entities(
+            func.count(User.id))
+
+        for i in range(3):
+            session = Session()
+            eq_(
+                bq(session).all(),
+                [(4, )]
+            )
+
+    def test_conditional_step(self):
+        """Test a large series of conditionals and assert that
+        results remain correct between all of them within a series
+        of loops.
+
+        """
+        User = self.classes.User
+
+        base_bq = self.bakery(
+            lambda s: s.query(User.id, User.name))
+
+        base_bq += lambda q: q.order_by(User.id)
+
+        for i in range(4):
+            for cond1, cond2, cond3, cond4 in itertools.product(
+                    *[(False, True) for j in range(4)]):
+                bq = base_bq._clone()
+                if cond1:
+                    bq += lambda q: q.filter(User.name != 'jack')
+                    if cond2:
+                        bq += lambda q: q.join(User.addresses)
+                    else:
+                        bq += lambda q: q.outerjoin(User.addresses)
+                elif cond3:
+                    bq += lambda q: q.filter(User.name.like('%ed%'))
+                else:
+                    bq += lambda q: q.filter(User.name == 'jack')
+
+                if cond4:
+                    bq += lambda q: q.from_self().with_entities(
+                        func.count(User.id))
+                sess = Session()
+                result = bq(sess).all()
+                if cond4:
+                    if cond1:
+                        if cond2:
+                            eq_(result, [(4,)])
+                        else:
+                            eq_(result, [(5,)])
+                    elif cond3:
+                        eq_(result, [(2,)])
+                    else:
+                        eq_(result, [(1,)])
+                else:
+                    if cond1:
+                        if cond2:
+                            eq_(
+                                result,
+                                [(8, 'ed'), (8, 'ed'), (8, 'ed'),
+                                 (9, 'fred')]
+                            )
+                        else:
+                            eq_(
+                                result,
+                                [(8, 'ed'), (8, 'ed'), (8, 'ed'),
+                                 (9, 'fred'), (10, 'chuck')]
+                            )
+                    elif cond3:
+                        eq_(result, [(8, 'ed'), (9, 'fred')])
+                    else:
+                        eq_(result, [(7, 'jack')])
+
+                sess.close()
+
+    def test_conditional_step_oneline(self):
+        User = self.classes.User
+
+        base_bq = self.bakery(
+            lambda s: s.query(User.id, User.name))
+
+        base_bq += lambda q: q.order_by(User.id)
+
+        for i in range(4):
+            for cond1 in (False, True):
+                bq = base_bq._clone()
+
+                # we were using (filename, firstlineno) as cache key,
+                # which fails for this kind of thing!
+                bq += (lambda q: q.filter(User.name != 'jack')) if cond1 else (lambda q: q.filter(User.name == 'jack'))  # noqa
+                sess = Session()
+                result = bq(sess).all()
+
+                if cond1:
+                    eq_(result, [(8, u'ed'), (9, u'fred'), (10, u'chuck')])
+                else:
+                    eq_(result, [(7, 'jack')])
+
+                sess.close()
+
+    def test_subquery_eagerloading(self):
+        User = self.classes.User
+        Address = self.classes.Address
+
+        base_bq = self.bakery(
+            lambda s: s.query(User))
+
+        base_bq += lambda q: q.options(subqueryload(User.addresses))
+        base_bq += lambda q: q.order_by(User.id)
+
+        assert_result = [
+            User(id=7, addresses=[
+                Address(id=1, email_address='jack@bean.com')]),
+            User(id=8, addresses=[
+                Address(id=2, email_address='ed@wood.com'),
+                Address(id=3, email_address='ed@bettyboop.com'),
+                Address(id=4, email_address='ed@lala.com'),
+            ]),
+            User(id=9, addresses=[
+                Address(id=5)
+            ]),
+            User(id=10, addresses=[])
+        ]
+
+        for i in range(4):
+            for cond1, cond2 in itertools.product(
+                    *[(False, True) for j in range(2)]):
+                bq = base_bq._clone()
+
+                sess = Session()
+
+                if cond1:
+                    bq += lambda q: q.filter(User.name == 'jack')
+                else:
+                    bq += lambda q: q.filter(User.name.like('%ed%'))
+
+                if cond2:
+                    ct = func.count(Address.id).label('count')
+                    subq = sess.query(
+                        ct,
+                        Address.user_id).group_by(Address.user_id).\
+                        having(ct > 2).subquery()
+
+                    bq += lambda q: q.join(subq)
+
+                if cond2:
+                    if cond1:
+                        def go():
+                            result = bq(sess).all()
+                            eq_([], result)
+                        self.assert_sql_count(testing.db, go, 1)
+                    else:
+                        def go():
+                            result = bq(sess).all()
+                            eq_(assert_result[1:2], result)
+                        self.assert_sql_count(testing.db, go, 2)
+                else:
+                    if cond1:
+                        def go():
+                            result = bq(sess).all()
+                            eq_(assert_result[0:1], result)
+                        self.assert_sql_count(testing.db, go, 2)
+                    else:
+                        def go():
+                            result = bq(sess).all()
+                            eq_(assert_result[1:3], result)
+                        self.assert_sql_count(testing.db, go, 2)
+
+                sess.close()
+
+
+class LazyLoaderTest(BakedTest):
+    run_setup_mappers = 'each'
+
+    def _o2m_fixture(self, lazy="select"):
+        User = self.classes.User
+        Address = self.classes.Address
+
+        mapper(User, self.tables.users, properties={
+            'addresses': relationship(
+                Address, order_by=self.tables.addresses.c.id,
+                lazy=lazy)
+        })
+        mapper(Address, self.tables.addresses)
+        return User, Address
+
+    def _m2o_fixture(self):
+        User = self.classes.User
+        Address = self.classes.Address
+
+        mapper(User, self.tables.users)
+        mapper(Address, self.tables.addresses, properties={
+            'user': relationship(User)
+        })
+        return User, Address
+
+    def test_strategy_lookup(self):
+        """test that the lazy loader strategies aren't getting mixed up
+        with BakedLazyLoader as a subclass.
+
+        """
+        User, Address = self._o2m_fixture()
+
+        ll = User.addresses.property._get_strategy((('lazy', 'select'),))
+        assert not isinstance(ll, BakedLazyLoader)
+        eq_(ll._strategy_keys, [(('lazy', 'select'),), (('lazy', True),)])
+
+        ll = User.addresses.property._get_strategy((('lazy', True),))
+        assert not isinstance(ll, BakedLazyLoader)
+        eq_(ll._strategy_keys, [(('lazy', 'select'),), (('lazy', True),)])
+
+        bl = User.addresses.property._get_strategy((('lazy', 'baked_select'),))
+        assert isinstance(bl, BakedLazyLoader)
+        eq_(bl._strategy_keys, [(('lazy', 'baked_select'),)])
+
+    def test_invocation_per_state(self):
+        """test that BakedLazyLoader is getting invoked with the
+        baked_lazyload() loader.
+
+        """
+        User, Address = self._o2m_fixture()
+
+        sess = Session()
+        q = sess.query(User)
+
+        with mock.patch.object(BakedLazyLoader, "_emit_lazyload") as el:
+            u1 = q.first()
+            u1.addresses
+            # not invoked
+            eq_(el.mock_calls, [])
+
+        sess = Session()
+        q = sess.query(User).options(baked_lazyload(User.addresses))
+        with mock.patch.object(BakedLazyLoader, "_emit_lazyload") as el:
+            u1 = q.first()
+            u1.addresses
+            # invoked
+            is_(
+                el.mock_calls[0][1][1],
+                u1._sa_instance_state
+            )
+
+    def test_invocation_per_mapper(self):
+        """test that BakedLazyLoader is getting invoked with the
+        "baked_select" lazy setting.
+
+        """
+        User, Address = self._o2m_fixture(lazy="baked_select")
+
+        sess = Session()
+        q = sess.query(User).options(lazyload(User.addresses))
+
+        with mock.patch.object(BakedLazyLoader, "_emit_lazyload") as el:
+            u1 = q.first()
+            u1.addresses
+            # not invoked
+            eq_(el.mock_calls, [])
+
+        sess = Session()
+        q = sess.query(User)
+        with mock.patch.object(BakedLazyLoader, "_emit_lazyload") as el:
+            u1 = q.first()
+            u1.addresses
+            # invoked
+            is_(
+                el.mock_calls[0][1][1],
+                u1._sa_instance_state
+            )
+
+    def test_invocation_systemwide_loaders(self):
+        baked.bake_lazy_loaders()
+        try:
+            User, Address = self._o2m_fixture()
+
+            sess = Session()
+            q = sess.query(User).options(lazyload(User.addresses))
+            with mock.patch.object(BakedLazyLoader, "_emit_lazyload") as el:
+                u1 = q.first()
+                u1.addresses
+                # invoked
+                is_(
+                    el.mock_calls[0][1][1],
+                    u1._sa_instance_state
+                )
+        finally:
+            baked.unbake_lazy_loaders()
+
+        clear_mappers()
+        User, Address = self._o2m_fixture()
+        sess = Session()
+        q = sess.query(User).options(lazyload(User.addresses))
+
+        with mock.patch.object(BakedLazyLoader, "_emit_lazyload") as el:
+            u1 = q.first()
+            u1.addresses
+            # not invoked
+            eq_(el.mock_calls, [])
+
+    def test_baked_lazy_loading_option_o2m(self):
+        User, Address = self._o2m_fixture()
+        self._test_baked_lazy_loading(set_option=True)
+
+    def test_baked_lazy_loading_mapped_o2m(self):
+        User, Address = self._o2m_fixture(lazy="baked_select")
+        self._test_baked_lazy_loading(set_option=False)
+
+    def _test_baked_lazy_loading(self, set_option):
+        User, Address = self.classes.User, self.classes.Address
+
+        base_bq = self.bakery(
+            lambda s: s.query(User))
+
+        if set_option:
+            base_bq += lambda q: q.options(baked_lazyload(User.addresses))
+
+        base_bq += lambda q: q.order_by(User.id)
+
+        assert_result = self.static.user_address_result
+
+        for i in range(4):
+            for cond1, cond2 in itertools.product(
+                    *[(False, True) for j in range(2)]):
+                bq = base_bq._clone()
+
+                sess = Session()
+
+                if cond1:
+                    bq += lambda q: q.filter(User.name == 'jack')
+                else:
+                    bq += lambda q: q.filter(User.name.like('%ed%'))
+
+                if cond2:
+                    ct = func.count(Address.id).label('count')
+                    subq = sess.query(
+                        ct,
+                        Address.user_id).group_by(Address.user_id).\
+                        having(ct > 2).subquery()
+
+                    bq += lambda q: q.join(subq)
+
+                if cond2:
+                    if cond1:
+                        def go():
+                            result = bq(sess).all()
+                            eq_([], result)
+                        self.assert_sql_count(testing.db, go, 1)
+                    else:
+                        def go():
+                            result = bq(sess).all()
+                            eq_(assert_result[1:2], result)
+                        self.assert_sql_count(testing.db, go, 2)
+                else:
+                    if cond1:
+                        def go():
+                            result = bq(sess).all()
+                            eq_(assert_result[0:1], result)
+                        self.assert_sql_count(testing.db, go, 2)
+                    else:
+                        def go():
+                            result = bq(sess).all()
+                            eq_(assert_result[1:3], result)
+                        self.assert_sql_count(testing.db, go, 3)
+
+                sess.close()
+
+    def test_baked_lazy_loading_m2o(self):
+        User, Address = self._m2o_fixture()
+
+        base_bq = self.bakery(
+            lambda s: s.query(Address))
+
+        base_bq += lambda q: q.options(baked_lazyload(Address.user))
+        base_bq += lambda q: q.order_by(Address.id)
+
+        assert_result = self.static.address_user_result
+
+        for i in range(4):
+            for cond1 in (False, True):
+                bq = base_bq._clone()
+
+                sess = Session()
+
+                if cond1:
+                    bq += lambda q: q.filter(
+                        Address.email_address == 'jack@bean.com')
+                else:
+                    bq += lambda q: q.filter(
+                        Address.email_address.like('ed@%'))
+
+                if cond1:
+                    def go():
+                        result = bq(sess).all()
+                        eq_(assert_result[0:1], result)
+                    self.assert_sql_count(testing.db, go, 2)
+                else:
+                    def go():
+                        result = bq(sess).all()
+                        eq_(assert_result[1:4], result)
+                    self.assert_sql_count(testing.db, go, 2)
+
+                sess.close()
+
+    # additional tests:
+    # 1. m2m w lazyload
+    # 2. o2m lazyload where m2o backrefs have an eager load, test
+    # that eager load is canceled out
+    # 3. uselist = False, uselist=False assertion
+
index 0f6e522d4364c102493de7d97133c1c655945151..4a2b8993e36ef83e1d705f9c0cf339dd453d17dd 100644 (file)
@@ -294,6 +294,7 @@ class FixtureTest(fixtures.MappedTest):
     def static(self):
         return CannedResults(self)
 
+
 class CannedResults(object):
     """Built on demand, instances use mappers in effect at time of call."""
 
@@ -328,6 +329,20 @@ class CannedResults(object):
             ]),
             User(id=10, addresses=[])]
 
+    @property
+    def address_user_result(self):
+        User, Address = self.test.classes.User, self.test.classes.Address
+        u7 = User(id=7)
+        u8 = User(id=8)
+        u9 = User(id=9)
+        return [
+            Address(id=1, email_address='jack@bean.com', user=u7),
+            Address(id=2, email_address='ed@wood.com', user=u8),
+            Address(id=3, email_address='ed@bettyboop.com', user=u8),
+            Address(id=4, email_address='ed@lala.com', user=u8),
+            Address(id=5, user=u9)
+        ]
+
     @property
     def user_all_result(self):
         User, Address, Order, Item = self.test.classes.User, \
index 633315f24436e94f80effb8287849dfd5827ebea..27fcd4b354172020b3b4fe085ea9206889ca5419 100644 (file)
@@ -62,7 +62,7 @@ Base.metadata.create_all(engine)
 
 sess = Session(engine)
 
-def runit(status, factor=1):
+def runit(status, factor=1, query_runs=5):
     num_bosses = 100 * factor
     num_grunts = num_bosses * 100
 
@@ -104,7 +104,7 @@ def runit(status, factor=1):
     status("Associated grunts w/ bosses and committed")
 
     # do some heavier reading
-    for i in range(int(round(factor / 2.0))):
+    for i in range(query_runs):
         status("Heavy query run #%d" % (i + 1))
 
         report = []