]> git.ipfire.org Git - thirdparty/sqlalchemy/sqlalchemy.git/commitdiff
Turn on caching everywhere, add logging
authorMike Bayer <mike_mp@zzzcomputing.com>
Sun, 7 Jun 2020 00:40:43 +0000 (20:40 -0400)
committerMike Bayer <mike_mp@zzzcomputing.com>
Wed, 10 Jun 2020 19:29:01 +0000 (15:29 -0400)
A variety of caching issues found by running
all tests with statement caching turned on.

The cache system now has a more conservative approach where
any subclass of a SQL element will by default invalidate
the cache key unless it adds the flag inherit_cache=True
at the class level, or if it implements its own caching.

Add working caching to a few elements that were
omitted previously; fix some caching implementations
to suit lesser used edge cases such as json casts
and array slices.

Refine the way BaseCursorResult and CursorMetaData
interact with caching; to suit cases like Alembic
modifying table structures, don't cache the
cursor metadata if it were created against a
cursor.description using non-positional matching,
e.g. "select *".   if a table re-ordered its columns
or added/removed, now that data is obsolete.

Additionally we have to adapt the cursor metadata
_keymap regardless of if we just processed
cursor.description, because if we ran against
a cached SQLCompiler we won't have the right
columns in _keymap.

Other refinements to how and when we do this
adaption as some weird cases
were exposed in the Postgresql dialect,
a text() construct that names just one column that
is not actually in the statement.   Fixed that
also as it looks like a cut-and-paste artifact
that doesn't actually affect anything.

Various issues with re-use of compiled result maps
and cursor metadata in conjunction with tables being
changed, such as change in order of columns.

mappers can be cleared but the class remains, meaning
a mapper has to use itself as the cache key not the class.

lots of bound parameter / literal issues, due to Alembic
creating a straight subclass of bindparam that renders
inline directly.   While we can update Alembic to not
do this, we have to assume other people might be doing
this, so bindparam() implements the inherit_cache=True
logic as well that was a bit involved.

turn on cache stats in logging.

Includes a fix to subqueryloader which moves all setup to
the create_row_processor() phase and elminates any storage
within the compiled context.   This includes some changes
to create_row_processor() signature and a revising of the
technique used to determine if the loader can participate
in polymorphic queries, which is also applied to
selectinloading.

DML update.values() and ordered_values() now coerces the
keys as we have tests that pass an arbitrary class here
which only includes __clause_element__(), so the
key can't be cached unless it is coerced.  this in turn
changed how composite attributes support bulk update
to use the standard approach of ClauseElement with
annotations that are parsed in the ORM context.

memory profiling successfully caught that the Session
from Query was getting passed into _statement_20()
so that was a big win for that test suite.

Apparently Compiler had .execute() and .scalar() methods
stuck on it, these date back to version 0.4 and there
was a single test in the PostgreSQL dialect tests
that exercised it for no apparent reason.   Removed
these methods as well as the concept of a Compiler
holding onto a "bind".

Fixes: #5386
Change-Id: I990b43aab96b42665af1b2187ad6020bee778784

55 files changed:
doc/build/changelog/unreleased_14/bind_removed_from_compiler.rst [new file with mode: 0644]
doc/build/core/tutorial.rst
doc/build/orm/tutorial.rst
lib/sqlalchemy/dialects/postgresql/base.py
lib/sqlalchemy/engine/base.py
lib/sqlalchemy/engine/create.py
lib/sqlalchemy/engine/cursor.py
lib/sqlalchemy/engine/default.py
lib/sqlalchemy/ext/baked.py
lib/sqlalchemy/future/selectable.py
lib/sqlalchemy/orm/attributes.py
lib/sqlalchemy/orm/context.py
lib/sqlalchemy/orm/descriptor_props.py
lib/sqlalchemy/orm/interfaces.py
lib/sqlalchemy/orm/loading.py
lib/sqlalchemy/orm/mapper.py
lib/sqlalchemy/orm/path_registry.py
lib/sqlalchemy/orm/persistence.py
lib/sqlalchemy/orm/properties.py
lib/sqlalchemy/orm/query.py
lib/sqlalchemy/orm/relationships.py
lib/sqlalchemy/orm/strategies.py
lib/sqlalchemy/orm/util.py
lib/sqlalchemy/sql/__init__.py
lib/sqlalchemy/sql/annotation.py
lib/sqlalchemy/sql/base.py
lib/sqlalchemy/sql/compiler.py
lib/sqlalchemy/sql/ddl.py
lib/sqlalchemy/sql/dml.py
lib/sqlalchemy/sql/elements.py
lib/sqlalchemy/sql/functions.py
lib/sqlalchemy/sql/schema.py
lib/sqlalchemy/sql/selectable.py
lib/sqlalchemy/sql/sqltypes.py
lib/sqlalchemy/sql/traversals.py
lib/sqlalchemy/sql/type_api.py
lib/sqlalchemy/sql/visitors.py
lib/sqlalchemy/testing/assertsql.py
test/aaa_profiling/test_memusage.py
test/aaa_profiling/test_misc.py
test/aaa_profiling/test_orm.py
test/aaa_profiling/test_resultset.py
test/dialect/postgresql/test_query.py
test/engine/test_execute.py
test/engine/test_logging.py
test/orm/inheritance/test_polymorphic_rel.py
test/orm/inheritance/test_single.py
test/orm/test_cache_key.py
test/orm/test_subquery_relations.py
test/orm/test_update_delete.py
test/perf/orm2010.py
test/sql/test_compare.py
test/sql/test_compiler.py
test/sql/test_operators.py
test/sql/test_update.py

diff --git a/doc/build/changelog/unreleased_14/bind_removed_from_compiler.rst b/doc/build/changelog/unreleased_14/bind_removed_from_compiler.rst
new file mode 100644 (file)
index 0000000..77c02f5
--- /dev/null
@@ -0,0 +1,8 @@
+.. change::
+    :tags: change, engine
+
+    Removed the concept of a bound engine from the :class:`.Compiler` object,
+    and removed the ``.execute()`` and ``.scalar()`` methods from :class:`.Compiler`.
+    These were essentially forgotten methods from over a decade ago and had no
+    practical use, and it's not appropriate for the :class:`.Compiler` object
+    itself to be maintaining a reference to an :class:`.Engine`.
index a504f85785b61d1506bf0229fd5eb3fb05f9d76e..7db84c7d63b325a00297d22b784a5c81fc2713de 100644 (file)
@@ -157,7 +157,7 @@ each table first before creating, so it's safe to call multiple times:
         fullname VARCHAR,
         PRIMARY KEY (id)
     )
-    ()
+    [...] ()
     COMMIT
     CREATE TABLE addresses (
         id INTEGER NOT NULL,
@@ -166,7 +166,7 @@ each table first before creating, so it's safe to call multiple times:
         PRIMARY KEY (id),
         FOREIGN KEY(user_id) REFERENCES users (id)
     )
-    ()
+    [...] ()
     COMMIT
 
 .. note::
@@ -263,7 +263,7 @@ checked out DBAPI connection resource. Lets feed it our
 
     >>> result = conn.execute(ins)
     {opensql}INSERT INTO users (name, fullname) VALUES (?, ?)
-    ('jack', 'Jack Jones')
+    [...] ('jack', 'Jack Jones')
     COMMIT
 
 So the INSERT statement was now issued to the database. Although we got
@@ -325,7 +325,7 @@ and use it in the "normal" way:
     >>> ins = users.insert()
     >>> conn.execute(ins, id=2, name='wendy', fullname='Wendy Williams')
     {opensql}INSERT INTO users (id, name, fullname) VALUES (?, ?, ?)
-    (2, 'wendy', 'Wendy Williams')
+    [...] (2, 'wendy', 'Wendy Williams')
     COMMIT
     {stop}<sqlalchemy.engine.cursor.LegacyCursorResult object at 0x...>
 
@@ -349,7 +349,7 @@ inserted, as we do here to add some email addresses:
     ...    {'user_id': 2, 'email_address' : 'wendy@aol.com'},
     ... ])
     {opensql}INSERT INTO addresses (user_id, email_address) VALUES (?, ?)
-    ((1, 'jack@yahoo.com'), (1, 'jack@msn.com'), (2, 'www@www.org'), (2, 'wendy@aol.com'))
+    [...] ((1, 'jack@yahoo.com'), (1, 'jack@msn.com'), (2, 'www@www.org'), (2, 'wendy@aol.com'))
     COMMIT
     {stop}<sqlalchemy.engine.cursor.LegacyCursorResult object at 0x...>
 
@@ -384,7 +384,7 @@ statements is the :func:`_expression.select` function:
     >>> result = conn.execute(s)
     {opensql}SELECT users.id, users.name, users.fullname
     FROM users
-    ()
+    [...] ()
 
 Above, we issued a basic :func:`_expression.select` call, placing the ``users`` table
 within the COLUMNS clause of the select, and then executing. SQLAlchemy
@@ -414,7 +414,7 @@ of these tuples as rows are fetched is through tuple assignment:
     {sql}>>> result = conn.execute(s)
     SELECT users.id, users.name, users.fullname
     FROM users
-    ()
+    [...] ()
 
     {stop}>>> for id, name, fullname in result:
     ...     print("name:", name, "; fullname: ", fullname)
@@ -430,7 +430,7 @@ access:
     {sql}>>> result = conn.execute(s)
     SELECT users.id, users.name, users.fullname
     FROM users
-    ()
+    [...] ()
 
     {stop}>>> for row in result:
     ...     print("name:", row.name, "; fullname: ", row.fullname)
@@ -446,7 +446,7 @@ progammatically generated, or contains non-ascii characters, the
     {sql}>>> result = conn.execute(s)
     SELECT users.id, users.name, users.fullname
     FROM users
-    ()
+    [...] ()
 
     {stop}>>> row = result.fetchone()
     >>> print("name:", row._mapping['name'], "; fullname:", row._mapping['fullname'])
@@ -490,7 +490,7 @@ collection:
     ...     print("name:", row._mapping[users.c.name], "; fullname:", row._mapping[users.c.fullname])
     SELECT users.id, users.name, users.fullname
     FROM users
-    ()
+    [...] ()
     {stop}name: jack ; fullname: Jack Jones
     name: wendy ; fullname: Wendy Williams
 
@@ -528,7 +528,7 @@ the ``c`` attribute of the :class:`~sqlalchemy.schema.Table` object:
     {sql}>>> result = conn.execute(s)
     SELECT users.name, users.fullname
     FROM users
-    ()
+    [...] ()
     {stop}>>> for row in result:
     ...     print(row)
     (u'jack', u'Jack Jones')
@@ -546,7 +546,7 @@ our :func:`_expression.select` statement:
     ...     print(row)
     SELECT users.id, users.name, users.fullname, addresses.id, addresses.user_id, addresses.email_address
     FROM users, addresses
-    ()
+    [...] ()
     {stop}(1, u'jack', u'Jack Jones', 1, 1, u'jack@yahoo.com')
     (1, u'jack', u'Jack Jones', 2, 1, u'jack@msn.com')
     (1, u'jack', u'Jack Jones', 3, 2, u'www@www.org')
@@ -571,7 +571,7 @@ WHERE clause.  We do that using :meth:`_expression.Select.where`:
        addresses.user_id, addresses.email_address
     FROM users, addresses
     WHERE users.id = addresses.user_id
-    ()
+    [...] ()
     {stop}(1, u'jack', u'Jack Jones', 1, 1, u'jack@yahoo.com')
     (1, u'jack', u'Jack Jones', 2, 1, u'jack@msn.com')
     (2, u'wendy', u'Wendy Williams', 3, 2, u'www@www.org')
@@ -801,7 +801,7 @@ not have a name:
     FROM users, addresses
     WHERE users.id = addresses.user_id AND users.name BETWEEN ? AND ? AND
     (addresses.email_address LIKE ? OR addresses.email_address LIKE ?)
-    (', ', 'm', 'z', '%@aol.com', '%@msn.com')
+    [...] (', ', 'm', 'z', '%@aol.com', '%@msn.com')
     {stop}[(u'Wendy Williams, wendy@aol.com',)]
 
 Once again, SQLAlchemy figured out the FROM clause for our statement. In fact
@@ -830,7 +830,7 @@ A shortcut to using :func:`.and_` is to chain together multiple
     FROM users, addresses
     WHERE users.id = addresses.user_id AND users.name BETWEEN ? AND ? AND
     (addresses.email_address LIKE ? OR addresses.email_address LIKE ?)
-    (', ', 'm', 'z', '%@aol.com', '%@msn.com')
+    [...] (', ', 'm', 'z', '%@aol.com', '%@msn.com')
     {stop}[(u'Wendy Williams, wendy@aol.com',)]
 
 The way that we can build up a :func:`_expression.select` construct through successive
@@ -865,7 +865,7 @@ unchanged.  Below, we create a :func:`_expression.text` object and execute it:
     FROM users, addresses
     WHERE users.id = addresses.user_id AND users.name BETWEEN ? AND ? AND
     (addresses.email_address LIKE ? OR addresses.email_address LIKE ?)
-    ('m', 'z', '%@aol.com', '%@msn.com')
+    [...] ('m', 'z', '%@aol.com', '%@msn.com')
     {stop}[(u'Wendy Williams, wendy@aol.com',)]
 
 Above, we can see that bound parameters are specified in
@@ -944,7 +944,7 @@ result column names in the textual SQL:
     {opensql}SELECT users.id, addresses.id, users.id, users.name,
         addresses.email_address AS email
     FROM users JOIN addresses ON users.id=addresses.user_id WHERE users.id = 1
-    ()
+    [...] ()
     {stop}
 
 Above, there's three columns in the result that are named "id", but since
@@ -1020,7 +1020,7 @@ need to refer to any pre-established :class:`_schema.Table` metadata:
     FROM users, addresses
     WHERE users.id = addresses.user_id AND users.name BETWEEN 'm' AND 'z'
     AND (addresses.email_address LIKE ? OR addresses.email_address LIKE ?)
-    ('%@aol.com', '%@msn.com')
+    [...] ('%@aol.com', '%@msn.com')
     {stop}[(u'Wendy Williams, wendy@aol.com',)]
 
 .. versionchanged:: 1.0.0
@@ -1074,7 +1074,7 @@ be quoted:
     WHERE users.id = addresses.user_id
     AND users.name BETWEEN 'm' AND 'z'
     AND (addresses.email_address LIKE ? OR addresses.email_address LIKE ?)
-    (', ', '%@aol.com', '%@msn.com')
+    [...] (', ', '%@aol.com', '%@msn.com')
     {stop}[(u'Wendy Williams, wendy@aol.com',)]
 
 Ordering or Grouping by a Label
@@ -1101,7 +1101,7 @@ are rendered fully:
     {sql}>>> conn.execute(stmt).fetchall()
     SELECT addresses.user_id, count(addresses.id) AS num_addresses
     FROM addresses GROUP BY addresses.user_id ORDER BY addresses.user_id, num_addresses
-    ()
+    [...] ()
     {stop}[(1, 2), (2, 2)]
 
 We can use modifiers like :func:`.asc` or :func:`.desc` by passing the string
@@ -1118,7 +1118,7 @@ name:
     {sql}>>> conn.execute(stmt).fetchall()
     SELECT addresses.user_id, count(addresses.id) AS num_addresses
     FROM addresses GROUP BY addresses.user_id ORDER BY addresses.user_id, num_addresses DESC
-    ()
+    [...] ()
     {stop}[(1, 2), (2, 2)]
 
 Note that the string feature here is very much tailored to when we have
@@ -1141,7 +1141,7 @@ by a column name that appears more than once:
     users_2.name, users_2.fullname
     FROM users AS users_1, users AS users_2
     WHERE users_1.name > users_2.name ORDER BY users_1.name
-    ()
+    [...] ()
     {stop}[(2, u'wendy', u'Wendy Williams', 1, u'jack', u'Jack Jones')]
 
 
@@ -1193,7 +1193,7 @@ once for each address.   We create two :class:`_expression.Alias` constructs aga
         AND users.id = addresses_2.user_id
         AND addresses_1.email_address = ?
         AND addresses_2.email_address = ?
-    ('jack@msn.com', 'jack@yahoo.com')
+    [...] ('jack@msn.com', 'jack@yahoo.com')
     {stop}[(1, u'jack', u'Jack Jones')]
 
 Note that the :class:`_expression.Alias` construct generated the names ``addresses_1`` and
@@ -1235,7 +1235,7 @@ by making :class:`.Subquery` of the entire statement:
             AND addresses_1.email_address = ?
             AND addresses_2.email_address = ?) AS anon_1
     WHERE users.id = anon_1.id
-    ('jack@msn.com', 'jack@yahoo.com')
+    [...] ('jack@msn.com', 'jack@yahoo.com')
     {stop}[(u'jack',)]
 
 .. versionchanged:: 1.4 Added the :class:`.Subquery` object and created more of a
@@ -1291,7 +1291,7 @@ here we make use of the :meth:`_expression.Select.select_from` method:
     {sql}>>> conn.execute(s).fetchall()
     SELECT users.fullname
     FROM users JOIN addresses ON addresses.email_address LIKE users.name || ?
-    ('%',)
+    [...] ('%',)
     {stop}[(u'Jack Jones',), (u'Jack Jones',), (u'Wendy Williams',)]
 
 The :meth:`_expression.FromClause.outerjoin` method creates ``LEFT OUTER JOIN`` constructs,
@@ -1350,7 +1350,7 @@ typically acquires using the :meth:`_expression.Select.cte` method on a
      SELECT addresses.id, addresses.user_id, addresses.email_address
     FROM addresses, anon_1
     WHERE addresses.user_id = anon_1.id ORDER BY addresses.id
-    ('wendy',)
+    [...] ('wendy',)
     {stop}[(3, 2, 'www@www.org'), (4, 2, 'wendy@aol.com')]
 
 The CTE construct is a great way to provide a source of rows that is
@@ -1388,7 +1388,7 @@ this form looks like:
      SELECT addresses.id, addresses.user_id, addresses.email_address
     FROM addresses, anon_1
     WHERE addresses.user_id = anon_1.id ORDER BY addresses.id
-    ()
+    [...] ()
     {stop}[(1, 1, 'jack@yahoo.com'), (2, 1, 'jack@msn.com'), (3, 2, 'www@www.org'), (4, 2, 'wendy@aol.com')]
 
 
@@ -1421,7 +1421,7 @@ at execution time, as here where it converts to positional for SQLite:
     SELECT users.id, users.name, users.fullname
     FROM users
     WHERE users.name = ?
-    ('wendy',)
+    [...] ('wendy',)
     {stop}[(2, u'wendy', u'Wendy Williams')]
 
 Another important aspect of :func:`.bindparam` is that it may be assigned a
@@ -1436,7 +1436,7 @@ off to the database:
     SELECT users.id, users.name, users.fullname
     FROM users
     WHERE users.name LIKE ? || '%'
-    ('wendy',)
+    [...] ('wendy',)
     {stop}[(2, u'wendy', u'Wendy Williams')]
 
 
@@ -1462,7 +1462,7 @@ single named value is needed in the execute parameters:
     FROM users LEFT OUTER JOIN addresses ON users.id = addresses.user_id
     WHERE users.name LIKE ? || '%' OR addresses.email_address LIKE ? || '@%'
     ORDER BY addresses.id
-    ('jack', 'jack')
+    [...] ('jack', 'jack')
     {stop}[(1, u'jack', u'Jack Jones', 1, 1, u'jack@yahoo.com'), (1, u'jack', u'Jack Jones', 2, 1, u'jack@msn.com')]
 
 .. seealso::
@@ -1531,7 +1531,7 @@ not important in this case:
     ...     ).scalar()
     {opensql}SELECT max(addresses.email_address) AS maxemail
     FROM addresses
-    ()
+    [...] ()
     {stop}u'www@www.org'
 
 Databases such as PostgreSQL and Oracle which support functions that return
@@ -1648,7 +1648,7 @@ object as arguments:
     >>> conn.execute(s).fetchall()
     {opensql}SELECT CAST(users.id AS VARCHAR) AS id
     FROM users
-    ()
+    [...] ()
     {stop}[('1',), ('2',)]
 
 The :func:`.cast` function is used not just when converting between datatypes,
@@ -1724,7 +1724,7 @@ module level functions :func:`_expression.union` and
     SELECT addresses.id, addresses.user_id, addresses.email_address
     FROM addresses
     WHERE addresses.email_address LIKE ? ORDER BY email_address
-    ('foo@bar.com', '%@yahoo.com')
+    [...] ('foo@bar.com', '%@yahoo.com')
     {stop}[(1, 1, u'jack@yahoo.com')]
 
 Also available, though not supported on all databases, are
@@ -1750,7 +1750,7 @@ Also available, though not supported on all databases, are
     SELECT addresses.id, addresses.user_id, addresses.email_address
     FROM addresses
     WHERE addresses.email_address LIKE ?
-    ('%@%.com', '%@msn.com')
+    [...] ('%@%.com', '%@msn.com')
     {stop}[(1, 1, u'jack@yahoo.com'), (4, 2, u'wendy@aol.com')]
 
 A common issue with so-called "compound" selectables arises due to the fact
@@ -1788,7 +1788,7 @@ want the "union" to be stated as a subquery:
     SELECT addresses.id, addresses.user_id, addresses.email_address
     FROM addresses
     WHERE addresses.email_address LIKE ?
-    ('%@yahoo.com', '%@msn.com', '%@msn.com')
+    [...] ('%@yahoo.com', '%@msn.com', '%@msn.com')
     {stop}[(1, 1, u'jack@yahoo.com')]
 
 .. seealso::
@@ -1868,7 +1868,7 @@ other column within another :func:`_expression.select`:
     FROM addresses
     WHERE users.id = addresses.user_id) AS anon_1
     FROM users
-    ()
+    [...] ()
     {stop}[(u'jack', 2), (u'wendy', 2)]
 
 To apply a non-anonymous column name to our scalar select, we create
@@ -1884,7 +1884,7 @@ it using :meth:`_expression.SelectBase.label` instead:
     FROM addresses
     WHERE users.id = addresses.user_id) AS address_count
     FROM users
-    ()
+    [...] ()
     {stop}[(u'jack', 2), (u'wendy', 2)]
 
 .. seealso::
@@ -1918,7 +1918,7 @@ still have at least one FROM clause of its own.  For example:
         FROM addresses
         WHERE addresses.user_id = users.id
         AND addresses.email_address = ?)
-    ('jack@yahoo.com',)
+    [...] ('jack@yahoo.com',)
     {stop}[(u'jack',)]
 
 Auto-correlation will usually do what's expected, however it can also be controlled.
@@ -1943,7 +1943,7 @@ may be correlated:
      WHERE users.id = (SELECT users.id
      FROM users
      WHERE users.id = addresses.user_id AND users.name = ?)
-     ('jack',)
+     [...] ('jack',)
      {stop}[(u'jack', u'jack@yahoo.com'), (u'jack', u'jack@msn.com')]
 
 To entirely disable a statement from correlating, we can pass ``None``
@@ -1962,7 +1962,7 @@ as the argument:
      WHERE users.id = (SELECT users.id
       FROM users
       WHERE users.name = ?)
-    ('wendy',)
+    [...] ('wendy',)
     {stop}[(u'wendy',)]
 
 We can also control correlation via exclusion, using the :meth:`_expression.Select.correlate_except`
@@ -1985,7 +1985,7 @@ by telling it to correlate all FROM clauses except for ``users``:
      WHERE users.id = (SELECT users.id
      FROM users
      WHERE users.id = addresses.user_id AND users.name = ?)
-     ('jack',)
+     [...] ('jack',)
      {stop}[(u'jack', u'jack@yahoo.com'), (u'jack', u'jack@msn.com')]
 
 .. _lateral_selects:
@@ -2070,7 +2070,7 @@ Ordering is done by passing column expressions to the
     >>> conn.execute(stmt).fetchall()
     {opensql}SELECT users.name
     FROM users ORDER BY users.name
-    ()
+    [...] ()
     {stop}[(u'jack',), (u'wendy',)]
 
 Ascending or descending can be controlled using the :meth:`_expression.ColumnElement.asc`
@@ -2082,7 +2082,7 @@ and :meth:`_expression.ColumnElement.desc` modifiers:
     >>> conn.execute(stmt).fetchall()
     {opensql}SELECT users.name
     FROM users ORDER BY users.name DESC
-    ()
+    [...] ()
     {stop}[(u'wendy',), (u'jack',)]
 
 Grouping refers to the GROUP BY clause, and is usually used in conjunction
@@ -2099,7 +2099,7 @@ This is provided via the :meth:`_expression.SelectBase.group_by` method:
     FROM users JOIN addresses
         ON users.id = addresses.user_id
     GROUP BY users.name
-    ()
+    [...] ()
     {stop}[(u'jack', 2), (u'wendy', 2)]
 
 HAVING can be used to filter results on an aggregate value, after GROUP BY has
@@ -2118,7 +2118,7 @@ method:
         ON users.id = addresses.user_id
     GROUP BY users.name
     HAVING length(users.name) > ?
-    (4,)
+    [...] (4,)
     {stop}[(u'wendy', 2)]
 
 A common system of dealing with duplicates in composed SELECT statements
@@ -2135,7 +2135,7 @@ is the DISTINCT modifier.  A simple DISTINCT clause can be added using the
     {opensql}SELECT DISTINCT users.name
     FROM users, addresses
     WHERE (addresses.email_address LIKE '%' || users.name || '%')
-    ()
+    [...] ()
     {stop}[(u'jack',), (u'wendy',)]
 
 Most database backends support a system of limiting how many rows
@@ -2156,7 +2156,7 @@ into the current backend's methodology:
     {opensql}SELECT users.name, addresses.email_address
     FROM users JOIN addresses ON users.id = addresses.user_id
      LIMIT ? OFFSET ?
-    (1, 1)
+    [...] (1, 1)
     {stop}[(u'jack', u'jack@msn.com')]
 
 
@@ -2181,7 +2181,7 @@ as a value:
     ...             values(fullname="Fullname: " + users.c.name)
     >>> conn.execute(stmt)
     {opensql}UPDATE users SET fullname=(? || users.name)
-    ('Fullname: ',)
+    [...] ('Fullname: ',)
     COMMIT
     {stop}<sqlalchemy.engine.cursor.LegacyCursorResult object at 0x...>
 
@@ -2210,7 +2210,7 @@ as in the example below:
     ...        {'id':6, '_name':'name3'},
     ...     ])
     {opensql}INSERT INTO users (id, name) VALUES (?, (? || ?))
-    ((4, 'name1', ' .. name'), (5, 'name2', ' .. name'), (6, 'name3', ' .. name'))
+    [...] ((4, 'name1', ' .. name'), (5, 'name2', ' .. name'), (6, 'name3', ' .. name'))
     COMMIT
     <sqlalchemy.engine.cursor.LegacyCursorResult object at 0x...>
 
@@ -2226,7 +2226,7 @@ that can be specified:
 
     >>> conn.execute(stmt)
     {opensql}UPDATE users SET name=? WHERE users.name = ?
-    ('ed', 'jack')
+    [...] ('ed', 'jack')
     COMMIT
     {stop}<sqlalchemy.engine.cursor.LegacyCursorResult object at 0x...>
 
@@ -2246,7 +2246,7 @@ used to achieve this:
     ...     {'oldname':'jim', 'newname':'jake'},
     ...     ])
     {opensql}UPDATE users SET name=? WHERE users.name = ?
-    (('ed', 'jack'), ('mary', 'wendy'), ('jake', 'jim'))
+    [...] (('ed', 'jack'), ('mary', 'wendy'), ('jake', 'jim'))
     COMMIT
     {stop}<sqlalchemy.engine.cursor.LegacyCursorResult object at 0x...>
 
@@ -2268,7 +2268,7 @@ subquery using :meth:`_expression.Select.scalar_subquery`:
         FROM addresses
         WHERE addresses.user_id = users.id
         LIMIT ? OFFSET ?)
-    (1, 0)
+    [...] (1, 0)
     COMMIT
     {stop}<sqlalchemy.engine.cursor.LegacyCursorResult object at 0x...>
 
@@ -2385,13 +2385,13 @@ Finally, a delete.  This is accomplished easily enough using the
 
     >>> conn.execute(addresses.delete())
     {opensql}DELETE FROM addresses
-    ()
+    [...] ()
     COMMIT
     {stop}<sqlalchemy.engine.cursor.LegacyCursorResult object at 0x...>
 
     >>> conn.execute(users.delete().where(users.c.name > 'm'))
     {opensql}DELETE FROM users WHERE users.name > ?
-    ('m',)
+    [...] ('m',)
     COMMIT
     {stop}<sqlalchemy.engine.cursor.LegacyCursorResult object at 0x...>
 
@@ -2439,7 +2439,7 @@ The value is available as :attr:`_engine.CursorResult.rowcount`:
 
     >>> result = conn.execute(users.delete())
     {opensql}DELETE FROM users
-    ()
+    [...] ()
     COMMIT
     {stop}>>> result.rowcount
     1
index c08daa7fd6fcef612e6b02b60b33fe3db61adf46..8ea8af4a27b8f62a481be44dc37499ad6dbec5cc 100644 (file)
@@ -210,16 +210,16 @@ the actual ``CREATE TABLE`` statement:
 
     >>> Base.metadata.create_all(engine)
     PRAGMA main.table_info("users")
-    ()
+    [...] ()
     PRAGMA temp.table_info("users")
-    ()
+    [...] ()
     CREATE TABLE users (
         id INTEGER NOT NULL, name VARCHAR,
         fullname VARCHAR,
         nickname VARCHAR,
         PRIMARY KEY (id)
     )
-    ()
+    [...] ()
     COMMIT
 
 .. topic:: Minimal Table Descriptions vs. Full Descriptions
@@ -373,7 +373,7 @@ added:
     {sql}>>> our_user = session.query(User).filter_by(name='ed').first() # doctest:+NORMALIZE_WHITESPACE
     BEGIN (implicit)
     INSERT INTO users (name, fullname, nickname) VALUES (?, ?, ?)
-    ('ed', 'Ed Jones', 'edsnickname')
+    [...] ('ed', 'Ed Jones', 'edsnickname')
     SELECT users.id AS users_id,
             users.name AS users_name,
             users.fullname AS users_fullname,
@@ -381,7 +381,7 @@ added:
     FROM users
     WHERE users.name = ?
      LIMIT ? OFFSET ?
-    ('ed', 1, 0)
+    [...] ('ed', 1, 0)
     {stop}>>> our_user
     <User(name='ed', fullname='Ed Jones', nickname='edsnickname')>
 
@@ -448,13 +448,13 @@ three new ``User`` objects we've added:
 
     {sql}>>> session.commit()
     UPDATE users SET nickname=? WHERE users.id = ?
-    ('eddie', 1)
+    [...] ('eddie', 1)
     INSERT INTO users (name, fullname, nickname) VALUES (?, ?, ?)
-    ('wendy', 'Wendy Williams', 'windy')
+    [...] ('wendy', 'Wendy Williams', 'windy')
     INSERT INTO users (name, fullname, nickname) VALUES (?, ?, ?)
-    ('mary', 'Mary Contrary', 'mary')
+    [...] ('mary', 'Mary Contrary', 'mary')
     INSERT INTO users (name, fullname, nickname) VALUES (?, ?, ?)
-    ('fred', 'Fred Flintstone', 'freddy')
+    [...] ('fred', 'Fred Flintstone', 'freddy')
     COMMIT
 
 :meth:`~.Session.commit` flushes the remaining changes to the
@@ -475,7 +475,7 @@ If we look at Ed's ``id`` attribute, which earlier was ``None``, it now has a va
             users.nickname AS users_nickname
     FROM users
     WHERE users.id = ?
-    (1,)
+    [...] (1,)
     {stop}1
 
 After the :class:`~sqlalchemy.orm.session.Session` inserts new rows in the
@@ -519,16 +519,16 @@ Querying the session, we can see that they're flushed into the current transacti
 
     {sql}>>> session.query(User).filter(User.name.in_(['Edwardo', 'fakeuser'])).all()
     UPDATE users SET name=? WHERE users.id = ?
-    ('Edwardo', 1)
+    [...] ('Edwardo', 1)
     INSERT INTO users (name, fullname, nickname) VALUES (?, ?, ?)
-    ('fakeuser', 'Invalid', '12345')
+    [...] ('fakeuser', 'Invalid', '12345')
     SELECT users.id AS users_id,
             users.name AS users_name,
             users.fullname AS users_fullname,
             users.nickname AS users_nickname
     FROM users
     WHERE users.name IN (?, ?)
-    ('Edwardo', 'fakeuser')
+    [...] ('Edwardo', 'fakeuser')
     {stop}[<User(name='Edwardo', fullname='Ed Jones', nickname='eddie')>, <User(name='fakeuser', fullname='Invalid', nickname='12345')>]
 
 Rolling back, we can see that ``ed_user``'s name is back to ``ed``, and
@@ -548,7 +548,7 @@ Rolling back, we can see that ``ed_user``'s name is back to ``ed``, and
             users.nickname AS users_nickname
     FROM users
     WHERE users.id = ?
-    (1,)
+    [...] (1,)
     {stop}u'ed'
     >>> fake_user in session
     False
@@ -564,7 +564,7 @@ issuing a SELECT illustrates the changes made to the database:
             users.nickname AS users_nickname
     FROM users
     WHERE users.name IN (?, ?)
-    ('ed', 'fakeuser')
+    [...] ('ed', 'fakeuser')
     {stop}[<User(name='ed', fullname='Ed Jones', nickname='eddie')>]
 
 .. _ormtutorial_querying:
@@ -590,7 +590,7 @@ returned:
             users.fullname AS users_fullname,
             users.nickname AS users_nickname
     FROM users ORDER BY users.id
-    ()
+    [...] ()
     {stop}ed Ed Jones
     wendy Wendy Williams
     mary Mary Contrary
@@ -609,7 +609,7 @@ is expressed as tuples:
     SELECT users.name AS users_name,
             users.fullname AS users_fullname
     FROM users
-    ()
+    [...] ()
     {stop}ed Ed Jones
     wendy Wendy Williams
     mary Mary Contrary
@@ -630,7 +630,7 @@ class:
             users.fullname AS users_fullname,
             users.nickname AS users_nickname
     FROM users
-    ()
+    [...] ()
     {stop}<User(name='ed', fullname='Ed Jones', nickname='eddie')> ed
     <User(name='wendy', fullname='Wendy Williams', nickname='windy')> wendy
     <User(name='mary', fullname='Mary Contrary', nickname='mary')> mary
@@ -647,7 +647,7 @@ is mapped to one (such as ``User.name``):
     ...    print(row.name_label)
     SELECT users.name AS name_label
     FROM users
-    (){stop}
+    [...] (){stop}
     ed
     wendy
     mary
@@ -669,7 +669,7 @@ entities are present in the call to :meth:`~.Session.query`, can be controlled u
             user_alias.fullname AS user_alias_fullname,
             user_alias.nickname AS user_alias_nickname
     FROM users AS user_alias
-    (){stop}
+    [...] (){stop}
     <User(name='ed', fullname='Ed Jones', nickname='eddie')>
     <User(name='wendy', fullname='Wendy Williams', nickname='windy')>
     <User(name='mary', fullname='Mary Contrary', nickname='mary')>
@@ -689,7 +689,7 @@ conjunction with ORDER BY:
             users.nickname AS users_nickname
     FROM users ORDER BY users.id
     LIMIT ? OFFSET ?
-    (2, 1){stop}
+    [...] (2, 1){stop}
     <User(name='wendy', fullname='Wendy Williams', nickname='windy')>
     <User(name='mary', fullname='Mary Contrary', nickname='mary')>
 
@@ -703,7 +703,7 @@ and filtering results, which is accomplished either with
     ...    print(name)
     SELECT users.name AS users_name FROM users
     WHERE users.fullname = ?
-    ('Ed Jones',)
+    [...] ('Ed Jones',)
     {stop}ed
 
 ...or :func:`~sqlalchemy.orm.query.Query.filter`, which uses more flexible SQL
@@ -717,7 +717,7 @@ operators with the class-level attributes on your mapped class:
     ...    print(name)
     SELECT users.name AS users_name FROM users
     WHERE users.fullname = ?
-    ('Ed Jones',)
+    [...] ('Ed Jones',)
     {stop}ed
 
 The :class:`~sqlalchemy.orm.query.Query` object is fully **generative**, meaning
@@ -739,7 +739,7 @@ users named "ed" with a full name of "Ed Jones", you can call
             users.nickname AS users_nickname
     FROM users
     WHERE users.name = ? AND users.fullname = ?
-    ('ed', 'Ed Jones')
+    [...] ('ed', 'Ed Jones')
     {stop}<User(name='ed', fullname='Ed Jones', nickname='eddie')>
 
 Common Filter Operators
@@ -861,7 +861,7 @@ database results.  Here's a brief tour:
               users.nickname AS users_nickname
       FROM users
       WHERE users.name LIKE ? ORDER BY users.id
-      ('%ed',)
+      [...] ('%ed',)
       {stop}[<User(name='ed', fullname='Ed Jones', nickname='eddie')>,
             <User(name='fred', fullname='Fred Flintstone', nickname='freddy')>]
 
@@ -893,7 +893,7 @@ database results.  Here's a brief tour:
       FROM users
       WHERE users.name LIKE ? ORDER BY users.id
        LIMIT ? OFFSET ?
-      ('%ed', 1, 0)
+      [...] ('%ed', 1, 0)
       {stop}<User(name='ed', fullname='Ed Jones', nickname='eddie')>
 
 * :meth:`_query.Query.one()` fully fetches all rows, and if not
@@ -937,7 +937,7 @@ database results.  Here's a brief tour:
       SELECT users.id AS users_id
       FROM users
       WHERE users.name = ? ORDER BY users.id
-      ('ed',)
+      [...] ('ed',)
       {stop}1
 
 .. _orm_tutorial_literal_sql:
@@ -965,7 +965,7 @@ by most applicable methods.  For example,
             users.nickname AS users_nickname
     FROM users
     WHERE id<224 ORDER BY id
-    ()
+    [...] ()
     {stop}ed
     wendy
     mary
@@ -985,7 +985,7 @@ method:
             users.nickname AS users_nickname
     FROM users
     WHERE id<? and name=? ORDER BY users.id
-    (224, 'fred')
+    [...] (224, 'fred')
     {stop}<User(name='fred', fullname='Fred Flintstone', nickname='freddy')>
 
 To use an entirely string-based statement, a :func:`_expression.text` construct
@@ -999,7 +999,7 @@ returned by the SQL statement based on column name::
     {sql}>>> session.query(User).from_statement(
     ...  text("SELECT * FROM users where name=:name")).params(name='ed').all()
     SELECT * FROM users where name=?
-    ('ed',)
+    [...] ('ed',)
     {stop}[<User(name='ed', fullname='Ed Jones', nickname='eddie')>]
 
 For better targeting of mapped columns to a textual SELECT, as well as  to
@@ -1013,7 +1013,7 @@ columns are passed in the desired order to :meth:`_expression.TextClause.columns
     >>> stmt = stmt.columns(User.name, User.id, User.fullname, User.nickname)
     {sql}>>> session.query(User).from_statement(stmt).params(name='ed').all()
     SELECT name, id, fullname, nickname FROM users where name=?
-    ('ed',)
+    [...] ('ed',)
     {stop}[<User(name='ed', fullname='Ed Jones', nickname='eddie')>]
 
 When selecting from a :func:`_expression.text` construct, the :class:`_query.Query`
@@ -1028,7 +1028,7 @@ any other case:
     {sql}>>> session.query(User.id, User.name).\
     ...          from_statement(stmt).params(name='ed').all()
     SELECT name, id FROM users where name=?
-    ('ed',)
+    [...] ('ed',)
     {stop}[(1, u'ed')]
 
 .. seealso::
@@ -1052,7 +1052,7 @@ counting called :meth:`~sqlalchemy.orm.query.Query.count()`:
                     users.nickname AS users_nickname
     FROM users
     WHERE users.name LIKE ?) AS anon_1
-    ('%ed',)
+    [...] ('%ed',)
     {stop}2
 
 .. sidebar:: Counting on ``count()``
@@ -1085,7 +1085,7 @@ use it to return the count of each distinct user name:
     {sql}>>> session.query(func.count(User.name), User.name).group_by(User.name).all()
     SELECT count(users.name) AS count_1, users.name AS users_name
     FROM users GROUP BY users.name
-    ()
+    [...] ()
     {stop}[(1, u'ed'), (1, u'fred'), (1, u'mary'), (1, u'wendy')]
 
 To achieve our simple ``SELECT count(*) FROM table``, we can apply it as:
@@ -1095,7 +1095,7 @@ To achieve our simple ``SELECT count(*) FROM table``, we can apply it as:
     {sql}>>> session.query(func.count('*')).select_from(User).scalar()
     SELECT count(?) AS count_1
     FROM users
-    ('*',)
+    [...] ('*',)
     {stop}4
 
 The usage of :meth:`_query.Query.select_from` can be removed if we express the count in terms
@@ -1106,7 +1106,7 @@ of the ``User`` primary key directly:
     {sql}>>> session.query(func.count(User.id)).scalar()
     SELECT count(users.id) AS count_1
     FROM users
-    ()
+    [...] ()
     {stop}4
 
 .. _orm_tutorial_relationship:
@@ -1223,7 +1223,7 @@ already been created:
         PRIMARY KEY (id),
          FOREIGN KEY(user_id) REFERENCES users (id)
     )
-    ()
+    [...] ()
     COMMIT
 
 Working with Related Objects
@@ -1272,11 +1272,11 @@ known as **cascading**:
     >>> session.add(jack)
     {sql}>>> session.commit()
     INSERT INTO users (name, fullname, nickname) VALUES (?, ?, ?)
-    ('jack', 'Jack Bean', 'gjffdd')
+    [...] ('jack', 'Jack Bean', 'gjffdd')
     INSERT INTO addresses (email_address, user_id) VALUES (?, ?)
-    ('jack@google.com', 5)
+    [...] ('jack@google.com', 5)
     INSERT INTO addresses (email_address, user_id) VALUES (?, ?)
-    ('j25@yahoo.com', 5)
+    [...] ('j25@yahoo.com', 5)
     COMMIT
 
 Querying for Jack, we get just Jack back.  No SQL is yet issued for Jack's addresses:
@@ -1292,7 +1292,7 @@ Querying for Jack, we get just Jack back.  No SQL is yet issued for Jack's addre
             users.nickname AS users_nickname
     FROM users
     WHERE users.name = ?
-    ('jack',)
+    [...] ('jack',)
 
     {stop}>>> jack
     <User(name='jack', fullname='Jack Bean', nickname='gjffdd')>
@@ -1308,7 +1308,7 @@ Let's look at the ``addresses`` collection.  Watch the SQL:
             addresses.user_id AS addresses_user_id
     FROM addresses
     WHERE ? = addresses.user_id ORDER BY addresses.id
-    (5,)
+    [...] (5,)
     {stop}[<Address(email_address='jack@google.com')>, <Address(email_address='j25@yahoo.com')>]
 
 When we accessed the ``addresses`` collection, SQL was suddenly issued. This
@@ -1349,7 +1349,7 @@ Below we load the ``User`` and ``Address`` entities at once using this method:
     FROM users, addresses
     WHERE users.id = addresses.user_id
             AND addresses.email_address = ?
-    ('jack@google.com',)
+    [...] ('jack@google.com',)
     {stop}<User(name='jack', fullname='Jack Bean', nickname='gjffdd')>
     <Address(email_address='jack@google.com')>
 
@@ -1367,7 +1367,7 @@ using the :meth:`_query.Query.join` method:
             users.nickname AS users_nickname
     FROM users JOIN addresses ON users.id = addresses.user_id
     WHERE addresses.email_address = ?
-    ('jack@google.com',)
+    [...] ('jack@google.com',)
     {stop}[<User(name='jack', fullname='Jack Bean', nickname='gjffdd')>]
 
 :meth:`_query.Query.join` knows how to join between ``User``
@@ -1435,7 +1435,7 @@ distinct email addresses at the same time:
             ON users.id = addresses_2.user_id
     WHERE addresses_1.email_address = ?
             AND addresses_2.email_address = ?
-    ('jack@google.com', 'j25@yahoo.com')
+    [...] ('jack@google.com', 'j25@yahoo.com')
     {stop}jack jack@google.com j25@yahoo.com
 
 In addition to using the :meth:`_orm.PropComparator.of_type` method, it is
@@ -1496,7 +1496,7 @@ accessible through an attribute called ``c``:
         FROM addresses GROUP BY addresses.user_id) AS anon_1
         ON users.id = anon_1.user_id
     ORDER BY users.id
-    ('*',)
+    [...] ('*',)
     {stop}<User(name='ed', fullname='Ed Jones', nickname='eddie')> None
     <User(name='wendy', fullname='Wendy Williams', nickname='windy')> None
     <User(name='mary', fullname='Mary Contrary', nickname='mary')> None
@@ -1534,7 +1534,7 @@ to associate an "alias" of a mapped class to a subquery:
         FROM addresses
         WHERE addresses.email_address != ?) AS anon_1
         ON users.id = anon_1.user_id
-    ('j25@yahoo.com',)
+    [...] ('j25@yahoo.com',)
     {stop}<User(name='jack', fullname='Jack Bean', nickname='gjffdd')>
     <Address(email_address='jack@google.com')>
 
@@ -1559,7 +1559,7 @@ There is an explicit EXISTS construct, which looks like this:
     WHERE EXISTS (SELECT *
     FROM addresses
     WHERE addresses.user_id = users.id)
-    ()
+    [...] ()
     {stop}jack
 
 The :class:`~sqlalchemy.orm.query.Query` features several operators which make
@@ -1576,7 +1576,7 @@ usage of EXISTS automatically. Above, the statement can be expressed along the
     WHERE EXISTS (SELECT 1
     FROM addresses
     WHERE users.id = addresses.user_id)
-    ()
+    [...] ()
     {stop}jack
 
 :meth:`~.RelationshipProperty.Comparator.any` takes criterion as well, to limit the rows matched:
@@ -1591,7 +1591,7 @@ usage of EXISTS automatically. Above, the statement can be expressed along the
     WHERE EXISTS (SELECT 1
     FROM addresses
     WHERE users.id = addresses.user_id AND addresses.email_address LIKE ?)
-    ('%google%',)
+    [...] ('%google%',)
     {stop}jack
 
 :meth:`~.RelationshipProperty.Comparator.has` is the same operator as
@@ -1609,7 +1609,7 @@ usage of EXISTS automatically. Above, the statement can be expressed along the
     WHERE NOT (EXISTS (SELECT 1
     FROM users
     WHERE users.id = addresses.user_id AND users.name = ?))
-    ('jack',)
+    [...] ('jack',)
     {stop}[]
 
 Common Relationship Operators
@@ -1685,14 +1685,14 @@ at once:
             users.nickname AS users_nickname
     FROM users
     WHERE users.name = ?
-    ('jack',)
+    [...] ('jack',)
     SELECT addresses.user_id AS addresses_user_id,
             addresses.id AS addresses_id,
             addresses.email_address AS addresses_email_address
     FROM addresses
     WHERE addresses.user_id IN (?)
     ORDER BY addresses.id
-    (5,)
+    [...] (5,)
     {stop}>>> jack
     <User(name='jack', fullname='Jack Bean', nickname='gjffdd')>
 
@@ -1728,7 +1728,7 @@ will emit the extra join regardless:
     FROM users
         LEFT OUTER JOIN addresses AS addresses_1 ON users.id = addresses_1.user_id
     WHERE users.name = ? ORDER BY addresses_1.id
-    ('jack',)
+    [...] ('jack',)
 
     {stop}>>> jack
     <User(name='jack', fullname='Jack Bean', nickname='gjffdd')>
@@ -1792,7 +1792,7 @@ attribute:
             addresses.user_id AS addresses_user_id
     FROM addresses JOIN users ON users.id = addresses.user_id
     WHERE users.name = ?
-    ('jack',)
+    [...] ('jack',)
 
     {stop}>>> jacks_addresses
     [<Address(email_address='jack@google.com')>, <Address(email_address='j25@yahoo.com')>]
@@ -1814,9 +1814,9 @@ in the session, then we'll issue a ``count`` query to see that no rows remain:
     >>> session.delete(jack)
     {sql}>>> session.query(User).filter_by(name='jack').count()
     UPDATE addresses SET user_id=? WHERE addresses.id = ?
-    ((None, 1), (None, 2))
+    [...] ((None, 1), (None, 2))
     DELETE FROM users WHERE users.id = ?
-    (5,)
+    [...] (5,)
     SELECT count(*) AS count_1
     FROM (SELECT users.id AS users_id,
             users.name AS users_name,
@@ -1824,7 +1824,7 @@ in the session, then we'll issue a ``count`` query to see that no rows remain:
             users.nickname AS users_nickname
     FROM users
     WHERE users.name = ?) AS anon_1
-    ('jack',)
+    [...] ('jack',)
     {stop}0
 
 So far, so good.  How about Jack's ``Address`` objects ?
@@ -1840,7 +1840,7 @@ So far, so good.  How about Jack's ``Address`` objects ?
                     addresses.user_id AS addresses_user_id
     FROM addresses
     WHERE addresses.email_address IN (?, ?)) AS anon_1
-    ('jack@google.com', 'j25@yahoo.com')
+    [...] ('jack@google.com', 'j25@yahoo.com')
     {stop}2
 
 Uh oh, they're still there ! Analyzing the flush SQL, we can see that the
@@ -1914,7 +1914,7 @@ being deleted:
             users.nickname AS users_nickname
     FROM users
     WHERE users.id = ?
-    (5,)
+    [...] (5,)
     {stop}
 
     # remove one Address (lazy load fires off)
@@ -1924,7 +1924,7 @@ being deleted:
             addresses.user_id AS addresses_user_id
     FROM addresses
     WHERE ? = addresses.user_id
-    (5,)
+    [...] (5,)
     {stop}
 
     # only one address remains
@@ -1932,14 +1932,14 @@ being deleted:
     ...     Address.email_address.in_(['jack@google.com', 'j25@yahoo.com'])
     ... ).count()
     DELETE FROM addresses WHERE addresses.id = ?
-    (2,)
+    [...] (2,)
     SELECT count(*) AS count_1
     FROM (SELECT addresses.id AS addresses_id,
                     addresses.email_address AS addresses_email_address,
                     addresses.user_id AS addresses_user_id
     FROM addresses
     WHERE addresses.email_address IN (?, ?)) AS anon_1
-    ('jack@google.com', 'j25@yahoo.com')
+    [...] ('jack@google.com', 'j25@yahoo.com')
     {stop}1
 
 Deleting Jack will delete both Jack and the remaining ``Address`` associated
@@ -1951,9 +1951,9 @@ with the user:
 
     {sql}>>> session.query(User).filter_by(name='jack').count()
     DELETE FROM addresses WHERE addresses.id = ?
-    (1,)
+    [...] (1,)
     DELETE FROM users WHERE users.id = ?
-    (5,)
+    [...] (5,)
     SELECT count(*) AS count_1
     FROM (SELECT users.id AS users_id,
                     users.name AS users_name,
@@ -1961,7 +1961,7 @@ with the user:
                     users.nickname AS users_nickname
     FROM users
     WHERE users.name = ?) AS anon_1
-    ('jack',)
+    [...] ('jack',)
     {stop}0
 
     {sql}>>> session.query(Address).filter(
@@ -1973,7 +1973,7 @@ with the user:
                     addresses.user_id AS addresses_user_id
     FROM addresses
     WHERE addresses.email_address IN (?, ?)) AS anon_1
-    ('jack@google.com', 'j25@yahoo.com')
+    [...] ('jack@google.com', 'j25@yahoo.com')
     {stop}0
 
 .. topic:: More on Cascades
@@ -2086,7 +2086,7 @@ Create new tables:
         PRIMARY KEY (id),
         UNIQUE (keyword)
     )
-    ()
+    [...] ()
     COMMIT
     CREATE TABLE posts (
         id INTEGER NOT NULL,
@@ -2096,7 +2096,7 @@ Create new tables:
         PRIMARY KEY (id),
         FOREIGN KEY(user_id) REFERENCES users (id)
     )
-    ()
+    [...] ()
     COMMIT
     CREATE TABLE post_keywords (
         post_id INTEGER NOT NULL,
@@ -2105,7 +2105,7 @@ Create new tables:
         FOREIGN KEY(post_id) REFERENCES posts (id),
         FOREIGN KEY(keyword_id) REFERENCES keywords (id)
     )
-    ()
+    [...] ()
     COMMIT
 
 Usage is not too different from what we've been doing.  Let's give Wendy some blog posts:
@@ -2121,7 +2121,7 @@ Usage is not too different from what we've been doing.  Let's give Wendy some bl
             users.nickname AS users_nickname
     FROM users
     WHERE users.name = ?
-    ('wendy',)
+    [...] ('wendy',)
     {stop}
     >>> post = BlogPost("Wendy's Blog Post", "This is a test", wendy)
     >>> session.add(post)
@@ -2144,13 +2144,13 @@ keyword string 'firstpost'":
     ...             filter(BlogPost.keywords.any(keyword='firstpost')).\
     ...             all()
     INSERT INTO keywords (keyword) VALUES (?)
-    ('wendy',)
+    [...] ('wendy',)
     INSERT INTO keywords (keyword) VALUES (?)
-    ('firstpost',)
+    [...] ('firstpost',)
     INSERT INTO posts (user_id, headline, body) VALUES (?, ?, ?)
-    (2, "Wendy's Blog Post", 'This is a test')
+    [...] (2, "Wendy's Blog Post", 'This is a test')
     INSERT INTO post_keywords (post_id, keyword_id) VALUES (?, ?)
-    (...)
+    [...] (...)
     SELECT posts.id AS posts_id,
             posts.user_id AS posts_user_id,
             posts.headline AS posts_headline,
@@ -2161,7 +2161,7 @@ keyword string 'firstpost'":
         WHERE posts.id = post_keywords.post_id
             AND keywords.id = post_keywords.keyword_id
             AND keywords.keyword = ?)
-    ('firstpost',)
+    [...] ('firstpost',)
     {stop}[BlogPost("Wendy's Blog Post", 'This is a test', <User(name='wendy', fullname='Wendy Williams', nickname='windy')>)]
 
 If we want to look up posts owned by the user ``wendy``, we can tell
@@ -2183,7 +2183,7 @@ the query to narrow down to that ``User`` object as a parent:
         WHERE posts.id = post_keywords.post_id
             AND keywords.id = post_keywords.keyword_id
             AND keywords.keyword = ?))
-    (2, 'firstpost')
+    [...] (2, 'firstpost')
     {stop}[BlogPost("Wendy's Blog Post", 'This is a test', <User(name='wendy', fullname='Wendy Williams', nickname='windy')>)]
 
 Or we can use Wendy's own ``posts`` relationship, which is a "dynamic"
@@ -2204,7 +2204,7 @@ relationship, to query straight from there:
         WHERE posts.id = post_keywords.post_id
             AND keywords.id = post_keywords.keyword_id
             AND keywords.keyword = ?))
-    (2, 'firstpost')
+    [...] (2, 'firstpost')
     {stop}[BlogPost("Wendy's Blog Post", 'This is a test', <User(name='wendy', fullname='Wendy Williams', nickname='windy')>)]
 
 Further Reference
index 441e77a378eb0dcf61c456c4703a67c45d3e573d..24e2d13d8d2448db3789926216722fc8ee8c5729 100644 (file)
@@ -3681,7 +3681,7 @@ class PGDialect(default.DefaultDialect):
             WHERE t.typtype = 'd'
         """
 
-        s = sql.text(SQL_DOMAINS).columns(attname=sqltypes.Unicode)
+        s = sql.text(SQL_DOMAINS)
         c = connection.execution_options(future_result=True).execute(s)
 
         domains = {}
index a36f4eee288776c6002e54bff39432b634a3c0fa..3e02a29fec42626c7e30e095b34ac443e5db919b 100644 (file)
@@ -1175,46 +1175,17 @@ class Connection(Connectable):
         )
 
         compiled_cache = execution_options.get(
-            "compiled_cache", self.dialect._compiled_cache
+            "compiled_cache", self.engine._compiled_cache
         )
 
-        if compiled_cache is not None:
-            elem_cache_key = elem._generate_cache_key()
-        else:
-            elem_cache_key = None
-
-        if elem_cache_key:
-            cache_key, extracted_params = elem_cache_key
-            key = (
-                dialect,
-                cache_key,
-                tuple(keys),
-                bool(schema_translate_map),
-                inline,
-            )
-            compiled_sql = compiled_cache.get(key)
-
-            if compiled_sql is None:
-                compiled_sql = elem.compile(
-                    dialect=dialect,
-                    cache_key=elem_cache_key,
-                    column_keys=keys,
-                    inline=inline,
-                    schema_translate_map=schema_translate_map,
-                    linting=self.dialect.compiler_linting
-                    | compiler.WARN_LINTING,
-                )
-                compiled_cache[key] = compiled_sql
-        else:
-            extracted_params = None
-            compiled_sql = elem.compile(
-                dialect=dialect,
-                column_keys=keys,
-                inline=inline,
-                schema_translate_map=schema_translate_map,
-                linting=self.dialect.compiler_linting | compiler.WARN_LINTING,
-            )
-
+        compiled_sql, extracted_params, cache_hit = elem._compile_w_cache(
+            dialect=dialect,
+            compiled_cache=compiled_cache,
+            column_keys=keys,
+            inline=inline,
+            schema_translate_map=schema_translate_map,
+            linting=self.dialect.compiler_linting | compiler.WARN_LINTING,
+        )
         ret = self._execute_context(
             dialect,
             dialect.execution_ctx_cls._init_compiled,
@@ -1225,6 +1196,7 @@ class Connection(Connectable):
             distilled_params,
             elem,
             extracted_params,
+            cache_hit=cache_hit,
         )
         if has_events:
             self.dispatch.after_execute(
@@ -1389,7 +1361,8 @@ class Connection(Connectable):
         statement,
         parameters,
         execution_options,
-        *args
+        *args,
+        **kw
     ):
         """Create an :class:`.ExecutionContext` and execute, returning
         a :class:`_engine.CursorResult`."""
@@ -1407,7 +1380,7 @@ class Connection(Connectable):
                 conn = self._revalidate_connection()
 
             context = constructor(
-                dialect, self, conn, execution_options, *args
+                dialect, self, conn, execution_options, *args, **kw
             )
         except (exc.PendingRollbackError, exc.ResourceClosedError):
             raise
@@ -1455,32 +1428,21 @@ class Connection(Connectable):
 
             self.engine.logger.info(statement)
 
-            stats = context._get_cache_stats()
+            stats = context._get_cache_stats()
 
             if not self.engine.hide_parameters:
-                # TODO: I love the stats but a ton of tests that are hardcoded.
-                # to certain log output are failing.
                 self.engine.logger.info(
-                    "%r",
+                    "[%s] %r",
+                    stats,
                     sql_util._repr_params(
                         parameters, batches=10, ismulti=context.executemany
                     ),
                 )
-                # self.engine.logger.info(
-                #    "[%s] %r",
-                #    stats,
-                #    sql_util._repr_params(
-                #        parameters, batches=10, ismulti=context.executemany
-                #    ),
-                # )
             else:
                 self.engine.logger.info(
-                    "[SQL parameters hidden due to hide_parameters=True]"
+                    "[%s] [SQL parameters hidden due to hide_parameters=True]"
+                    % (stats,)
                 )
-                # self.engine.logger.info(
-                #    "[%s] [SQL parameters hidden due to hide_parameters=True]"
-                #    % (stats,)
-                # )
 
         evt_handled = False
         try:
@@ -2369,6 +2331,7 @@ class Engine(Connectable, log.Identified):
         url,
         logging_name=None,
         echo=None,
+        query_cache_size=500,
         execution_options=None,
         hide_parameters=False,
     ):
@@ -2379,14 +2342,43 @@ class Engine(Connectable, log.Identified):
             self.logging_name = logging_name
         self.echo = echo
         self.hide_parameters = hide_parameters
+        if query_cache_size != 0:
+            self._compiled_cache = util.LRUCache(
+                query_cache_size, size_alert=self._lru_size_alert
+            )
+        else:
+            self._compiled_cache = None
         log.instance_logger(self, echoflag=echo)
         if execution_options:
             self.update_execution_options(**execution_options)
 
+    def _lru_size_alert(self, cache):
+        if self._should_log_info:
+            self.logger.info(
+                "Compiled cache size pruning from %d items to %d.  "
+                "Increase cache size to reduce the frequency of pruning.",
+                len(cache),
+                cache.capacity,
+            )
+
     @property
     def engine(self):
         return self
 
+    def clear_compiled_cache(self):
+        """Clear the compiled cache associated with the dialect.
+
+        This applies **only** to the built-in cache that is established
+        via the :paramref:`.create_engine.query_cache_size` parameter.
+        It will not impact any dictionary caches that were passed via the
+        :paramref:`.Connection.execution_options.query_cache` parameter.
+
+        .. versionadded:: 1.4
+
+        """
+        if self._compiled_cache:
+            self._compiled_cache.clear()
+
     def update_execution_options(self, **opt):
         r"""Update the default execution_options dictionary
         of this :class:`_engine.Engine`.
@@ -2874,6 +2866,7 @@ class OptionEngineMixin(object):
         self.dialect = proxied.dialect
         self.logging_name = proxied.logging_name
         self.echo = proxied.echo
+        self._compiled_cache = proxied._compiled_cache
         self.hide_parameters = proxied.hide_parameters
         log.instance_logger(self, echoflag=self.echo)
 
index 4c912349ea7fa2606cac0dd7acfd3e80c2b33ee5..9bf72eb068c1a50dd9152358f629869de7bbbc35 100644 (file)
@@ -436,7 +436,13 @@ def create_engine(url, **kwargs):
         .. versionadded:: 1.2.3
 
     :param query_cache_size: size of the cache used to cache the SQL string
-     form of queries.  Defaults to zero, which disables caching.
+     form of queries.  Set to zero to disable caching.
+
+     The cache is pruned of its least recently used items when its size reaches
+     N * 1.5.  Defaults to 500, meaning the cache will always store at least
+     500 SQL statements when filled, and will grow up to 750 items at which
+     point it is pruned back down to 500 by removing the 250 least recently
+     used items.
 
      Caching is accomplished on a per-statement basis by generating a
      cache key that represents the statement's structure, then generating
@@ -446,6 +452,11 @@ def create_engine(url, **kwargs):
      bypass the cache.   SQL logging will indicate statistics for each
      statement whether or not it were pull from the cache.
 
+     .. note:: some ORM functions related to unit-of-work persistence as well
+        as some attribute loading strategies will make use of individual
+        per-mapper caches outside of the main cache.
+
+
      .. seealso::
 
         ``engine_caching`` - TODO: this will be an upcoming section describing
index d03d79df72bcdb530f230f56791bd7b3a6d5f134..abffe0d1f196d88d1a387c72008a974423e7166b 100644 (file)
@@ -51,6 +51,7 @@ class CursorResultMetaData(ResultMetaData):
         "_keys",
         "_tuplefilter",
         "_translated_indexes",
+        "_safe_for_cache"
         # don't need _unique_filters support here for now.  Can be added
         # if a need arises.
     )
@@ -104,11 +105,11 @@ class CursorResultMetaData(ResultMetaData):
         return new_metadata
 
     def _adapt_to_context(self, context):
-        """When using a cached result metadata against a new context,
-        we need to rewrite the _keymap so that it has the specific
-        Column objects in the new context inside of it.  this accommodates
-        for select() constructs that contain anonymized columns and
-        are cached.
+        """When using a cached Compiled construct that has a _result_map,
+        for a new statement that used the cached Compiled, we need to ensure
+        the keymap has the Column objects from our new statement as keys.
+        So here we rewrite keymap with new entries for the new columns
+        as matched to those of the cached statement.
 
         """
         if not context.compiled._result_columns:
@@ -124,14 +125,15 @@ class CursorResultMetaData(ResultMetaData):
         # to the result map.
         md = self.__class__.__new__(self.__class__)
 
-        md._keymap = self._keymap.copy()
+        md._keymap = dict(self._keymap)
 
         # match up new columns positionally to the result columns
         for existing, new in zip(
             context.compiled._result_columns,
             invoked_statement._exported_columns_iterator(),
         ):
-            md._keymap[new] = md._keymap[existing[RM_NAME]]
+            if existing[RM_NAME] in md._keymap:
+                md._keymap[new] = md._keymap[existing[RM_NAME]]
 
         md.case_sensitive = self.case_sensitive
         md._processors = self._processors
@@ -147,6 +149,7 @@ class CursorResultMetaData(ResultMetaData):
         self._tuplefilter = None
         self._translated_indexes = None
         self.case_sensitive = dialect.case_sensitive
+        self._safe_for_cache = False
 
         if context.result_column_struct:
             (
@@ -341,6 +344,10 @@ class CursorResultMetaData(ResultMetaData):
             self._keys = [elem[0] for elem in result_columns]
             # pure positional 1-1 case; doesn't need to read
             # the names from cursor.description
+
+            # this metadata is safe to cache because we are guaranteed
+            # to have the columns in the same order for new executions
+            self._safe_for_cache = True
             return [
                 (
                     idx,
@@ -359,9 +366,12 @@ class CursorResultMetaData(ResultMetaData):
                 for idx, rmap_entry in enumerate(result_columns)
             ]
         else:
+
             # name-based or text-positional cases, where we need
             # to read cursor.description names
+
             if textual_ordered:
+                self._safe_for_cache = True
                 # textual positional case
                 raw_iterator = self._merge_textual_cols_by_position(
                     context, cursor_description, result_columns
@@ -369,6 +379,9 @@ class CursorResultMetaData(ResultMetaData):
             elif num_ctx_cols:
                 # compiled SQL with a mismatch of description cols
                 # vs. compiled cols, or textual w/ unordered columns
+                # the order of columns can change if the query is
+                # against a "select *", so not safe to cache
+                self._safe_for_cache = False
                 raw_iterator = self._merge_cols_by_name(
                     context,
                     cursor_description,
@@ -376,7 +389,9 @@ class CursorResultMetaData(ResultMetaData):
                     loose_column_name_matching,
                 )
             else:
-                # no compiled SQL, just a raw string
+                # no compiled SQL, just a raw string, order of columns
+                # can change for "select *"
+                self._safe_for_cache = False
                 raw_iterator = self._merge_cols_by_none(
                     context, cursor_description
                 )
@@ -1152,7 +1167,6 @@ class BaseCursorResult(object):
 
     out_parameters = None
     _metadata = None
-    _metadata_from_cache = False
     _soft_closed = False
     closed = False
 
@@ -1209,33 +1223,38 @@ class BaseCursorResult(object):
     def _init_metadata(self, context, cursor_description):
         if context.compiled:
             if context.compiled._cached_metadata:
-                cached_md = self.context.compiled._cached_metadata
-                self._metadata_from_cache = True
-
-                # result rewrite/ adapt step.  two translations can occur here.
-                # one is if we are invoked against a cached statement, we want
-                # to rewrite the ResultMetaData to reflect the column objects
-                # that are in our current selectable, not the cached one.  the
-                # other is, the CompileState can return an alternative Result
-                # object.   Finally, CompileState might want to tell us to not
-                # actually do the ResultMetaData adapt step if it in fact has
-                # changed the selected columns in any case.
-                compiled = context.compiled
-                if (
-                    compiled
-                    and not compiled._rewrites_selected_columns
-                    and compiled.statement is not context.invoked_statement
-                ):
-                    cached_md = cached_md._adapt_to_context(context)
-
-                self._metadata = metadata = cached_md
-
+                metadata = self.context.compiled._cached_metadata
             else:
-                self._metadata = (
-                    metadata
-                ) = context.compiled._cached_metadata = self._cursor_metadata(
-                    self, cursor_description
-                )
+                metadata = self._cursor_metadata(self, cursor_description)
+                if metadata._safe_for_cache:
+                    context.compiled._cached_metadata = metadata
+
+            # result rewrite/ adapt step.  this is to suit the case
+            # when we are invoked against a cached Compiled object, we want
+            # to rewrite the ResultMetaData to reflect the Column objects
+            # that are in our current SQL statement object, not the one
+            # that is associated with the cached Compiled object.
+            # the Compiled object may also tell us to not
+            # actually do this step; this is to support the ORM where
+            # it is to produce a new Result object in any case, and will
+            # be using the cached Column objects against this database result
+            # so we don't want to rewrite them.
+            #
+            # Basically this step suits the use case where the end user
+            # is using Core SQL expressions and is accessing columns in the
+            # result row using row._mapping[table.c.column].
+            compiled = context.compiled
+            if (
+                compiled
+                and compiled._result_columns
+                and context.cache_hit
+                and not compiled._rewrites_selected_columns
+                and compiled.statement is not context.invoked_statement
+            ):
+                metadata = metadata._adapt_to_context(context)
+
+            self._metadata = metadata
+
         else:
             self._metadata = metadata = self._cursor_metadata(
                 self, cursor_description
index c682a8ee11b745093eea568d52f87e44678e8c2c..4d516e97c573d86dafa36fa2584d9903b5e60950 100644 (file)
@@ -230,7 +230,6 @@ class DefaultDialect(interfaces.Dialect):
         supports_native_boolean=None,
         max_identifier_length=None,
         label_length=None,
-        query_cache_size=0,
         # int() is because the @deprecated_params decorator cannot accommodate
         # the direct reference to the "NO_LINTING" object
         compiler_linting=int(compiler.NO_LINTING),
@@ -262,10 +261,6 @@ class DefaultDialect(interfaces.Dialect):
         if supports_native_boolean is not None:
             self.supports_native_boolean = supports_native_boolean
         self.case_sensitive = case_sensitive
-        if query_cache_size != 0:
-            self._compiled_cache = util.LRUCache(query_cache_size)
-        else:
-            self._compiled_cache = None
 
         self._user_defined_max_identifier_length = max_identifier_length
         if self._user_defined_max_identifier_length:
@@ -794,6 +789,7 @@ class DefaultExecutionContext(interfaces.ExecutionContext):
         parameters,
         invoked_statement,
         extracted_parameters,
+        cache_hit=False,
     ):
         """Initialize execution context for a Compiled construct."""
 
@@ -804,6 +800,7 @@ class DefaultExecutionContext(interfaces.ExecutionContext):
         self.extracted_parameters = extracted_parameters
         self.invoked_statement = invoked_statement
         self.compiled = compiled
+        self.cache_hit = cache_hit
 
         self.execution_options = execution_options
 
@@ -1027,13 +1024,15 @@ class DefaultExecutionContext(interfaces.ExecutionContext):
 
     def _get_cache_stats(self):
         if self.compiled is None:
-            return "raw SQL"
+            return "raw sql"
 
         now = time.time()
         if self.compiled.cache_key is None:
-            return "gen %.5fs" % (now - self.compiled._gen_time,)
+            return "no key %.5fs" % (now - self.compiled._gen_time,)
+        elif self.cache_hit:
+            return "cached for %.4gs" % (now - self.compiled._gen_time,)
         else:
-            return "cached %.5fs" % (now - self.compiled._gen_time,)
+            return "generated in %.5fs" % (now - self.compiled._gen_time,)
 
     @util.memoized_property
     def engine(self):
index f95a30fda09318b42a5a5ff2532a2aba8a346e3e..4f40637c5b38a39a2979b5b3468913cebf19697a 100644 (file)
@@ -412,7 +412,6 @@ class Result(object):
         result = self.session.execute(
             statement, params, execution_options=execution_options
         )
-
         if result._attributes.get("is_single_entity", False):
             result = result.scalars()
 
index 407ec963320ad44058cec9b738a0fe93b423899c..53fc7c107208e28ed5556dbd289687936f8a3515 100644 (file)
@@ -11,6 +11,7 @@ class Select(_LegacySelect):
     _is_future = True
     _setup_joins = ()
     _legacy_setup_joins = ()
+    inherit_cache = True
 
     @classmethod
     def _create_select(cls, *entities):
index 262a1efc91a0423f9492f5e7ecbafd97db0c8fbd..bf07061c682405c39984dbc08f2cd960be137f83 100644 (file)
@@ -85,16 +85,16 @@ class QueryableAttribute(
         self,
         class_,
         key,
+        parententity,
         impl=None,
         comparator=None,
-        parententity=None,
         of_type=None,
     ):
         self.class_ = class_
         self.key = key
+        self._parententity = parententity
         self.impl = impl
         self.comparator = comparator
-        self._parententity = parententity
         self._of_type = of_type
 
         manager = manager_of_class(class_)
@@ -197,9 +197,13 @@ class QueryableAttribute(
     @util.memoized_property
     def expression(self):
         return self.comparator.__clause_element__()._annotate(
-            {"orm_key": self.key}
+            {"orm_key": self.key, "entity_namespace": self._entity_namespace}
         )
 
+    @property
+    def _entity_namespace(self):
+        return self._parententity
+
     @property
     def _annotations(self):
         return self.__clause_element__()._annotations
@@ -230,9 +234,9 @@ class QueryableAttribute(
         return QueryableAttribute(
             self.class_,
             self.key,
-            self.impl,
-            self.comparator.of_type(entity),
             self._parententity,
+            impl=self.impl,
+            comparator=self.comparator.of_type(entity),
             of_type=inspection.inspect(entity),
         )
 
@@ -301,6 +305,8 @@ class InstrumentedAttribute(QueryableAttribute):
 
     """
 
+    inherit_cache = True
+
     def __set__(self, instance, value):
         self.impl.set(
             instance_state(instance), instance_dict(instance), value, None
@@ -320,6 +326,11 @@ class InstrumentedAttribute(QueryableAttribute):
             return self.impl.get(instance_state(instance), dict_)
 
 
+HasEntityNamespace = util.namedtuple(
+    "HasEntityNamespace", ["entity_namespace"]
+)
+
+
 def create_proxied_attribute(descriptor):
     """Create an QueryableAttribute / user descriptor hybrid.
 
@@ -364,6 +375,15 @@ def create_proxied_attribute(descriptor):
                 and getattr(self.class_, self.key).impl.uses_objects
             )
 
+        @property
+        def _entity_namespace(self):
+            if hasattr(self._comparator, "_parententity"):
+                return self._comparator._parententity
+            else:
+                # used by hybrid attributes which try to remain
+                # agnostic of any ORM concepts like mappers
+                return HasEntityNamespace(self.class_)
+
         @property
         def property(self):
             return self.comparator.property
index a16db66f6d28e871445b2aab05ce39317ec6e75f..588b83571499db3762b654fec2618e7eae11d815 100644 (file)
@@ -63,6 +63,8 @@ class QueryContext(object):
         "post_load_paths",
         "identity_token",
         "yield_per",
+        "loaders_require_buffering",
+        "loaders_require_uniquing",
     )
 
     class default_load_options(Options):
@@ -80,21 +82,23 @@ class QueryContext(object):
     def __init__(
         self,
         compile_state,
+        statement,
         session,
         load_options,
         execution_options=None,
         bind_arguments=None,
     ):
-
         self.load_options = load_options
         self.execution_options = execution_options or _EMPTY_DICT
         self.bind_arguments = bind_arguments or _EMPTY_DICT
         self.compile_state = compile_state
-        self.query = query = compile_state.select_statement
+        self.query = statement
         self.session = session
+        self.loaders_require_buffering = False
+        self.loaders_require_uniquing = False
 
         self.propagated_loader_options = {
-            o for o in query._with_options if o.propagate_to_loaders
+            o for o in statement._with_options if o.propagate_to_loaders
         }
 
         self.attributes = dict(compile_state.attributes)
@@ -237,6 +241,7 @@ class ORMCompileState(CompileState):
         )
         querycontext = QueryContext(
             compile_state,
+            statement,
             session,
             load_options,
             execution_options,
@@ -278,8 +283,6 @@ class ORMFromStatementCompileState(ORMCompileState):
     _has_orm_entities = False
     multi_row_eager_loaders = False
     compound_eager_adapter = None
-    loaders_require_buffering = False
-    loaders_require_uniquing = False
 
     @classmethod
     def create_for_statement(cls, statement_container, compiler, **kw):
@@ -386,8 +389,6 @@ class ORMSelectCompileState(ORMCompileState, SelectState):
     _has_orm_entities = False
     multi_row_eager_loaders = False
     compound_eager_adapter = None
-    loaders_require_buffering = False
-    loaders_require_uniquing = False
 
     correlate = None
     _where_criteria = ()
@@ -416,7 +417,14 @@ class ORMSelectCompileState(ORMCompileState, SelectState):
 
         self = cls.__new__(cls)
 
-        self.select_statement = select_statement
+        if select_statement._execution_options:
+            # execution options should not impact the compilation of a
+            # query, and at the moment subqueryloader is putting some things
+            # in here that we explicitly don't want stuck in a cache.
+            self.select_statement = select_statement._clone()
+            self.select_statement._execution_options = util.immutabledict()
+        else:
+            self.select_statement = select_statement
 
         # indicates this select() came from Query.statement
         self.for_statement = (
@@ -654,6 +662,8 @@ class ORMSelectCompileState(ORMCompileState, SelectState):
             )
             self._setup_with_polymorphics()
 
+        # entities will also set up polymorphic adapters for mappers
+        # that have with_polymorphic configured
         _QueryEntity.to_compile_state(self, query._raw_columns)
         return self
 
@@ -1810,10 +1820,12 @@ class ORMSelectCompileState(ORMCompileState, SelectState):
                 self._where_criteria += (single_crit,)
 
 
-def _column_descriptions(query_or_select_stmt):
-    ctx = ORMSelectCompileState._create_entities_collection(
-        query_or_select_stmt
-    )
+def _column_descriptions(query_or_select_stmt, compile_state=None):
+    if compile_state is None:
+        compile_state = ORMSelectCompileState._create_entities_collection(
+            query_or_select_stmt
+        )
+    ctx = compile_state
     return [
         {
             "name": ent._label_name,
@@ -2097,6 +2109,7 @@ class _MapperEntity(_QueryEntity):
             only_load_props = refresh_state = None
 
         _instance = loading._instance_processor(
+            self,
             self.mapper,
             context,
             result,
index 027f2521b164b5fafc56f8b9c1942b831ef06b1b..39cf86e34fb59bf17228504b3e927724d8f13bb7 100644 (file)
@@ -411,7 +411,6 @@ class CompositeProperty(DescriptorProperty):
         def expression(self):
             clauses = self.clauses._annotate(
                 {
-                    "bundle": True,
                     "parententity": self._parententity,
                     "parentmapper": self._parententity,
                     "orm_key": self.prop.key,
index 6c0f5d3ef4f6c692bc885841a4bf06f9d705746e..9782d92b749f5e11c3ddf473d42c228256ffe7ab 100644 (file)
@@ -158,7 +158,7 @@ class MapperProperty(
         """
 
     def create_row_processor(
-        self, context, path, mapper, result, adapter, populators
+        self, context, query_entity, path, mapper, result, adapter, populators
     ):
         """Produce row processing functions and append to the given
         set of populators lists.
@@ -539,7 +539,7 @@ class StrategizedProperty(MapperProperty):
         "_wildcard_token",
         "_default_path_loader_key",
     )
-
+    inherit_cache = True
     strategy_wildcard_key = None
 
     def _memoized_attr__wildcard_token(self):
@@ -600,7 +600,7 @@ class StrategizedProperty(MapperProperty):
         )
 
     def create_row_processor(
-        self, context, path, mapper, result, adapter, populators
+        self, context, query_entity, path, mapper, result, adapter, populators
     ):
         loader = self._get_context_loader(context, path)
         if loader and loader.strategy:
@@ -608,7 +608,14 @@ class StrategizedProperty(MapperProperty):
         else:
             strat = self.strategy
         strat.create_row_processor(
-            context, path, loader, mapper, result, adapter, populators
+            context,
+            query_entity,
+            path,
+            loader,
+            mapper,
+            result,
+            adapter,
+            populators,
         )
 
     def do_init(self):
@@ -668,7 +675,7 @@ class StrategizedProperty(MapperProperty):
         )
 
 
-class ORMOption(object):
+class ORMOption(HasCacheKey):
     """Base class for option objects that are passed to ORM queries.
 
     These options may be consumed by :meth:`.Query.options`,
@@ -696,7 +703,7 @@ class ORMOption(object):
     _is_compile_state = False
 
 
-class LoaderOption(HasCacheKey, ORMOption):
+class LoaderOption(ORMOption):
     """Describe a loader modification to an ORM statement at compilation time.
 
     .. versionadded:: 1.4
@@ -736,9 +743,6 @@ class UserDefinedOption(ORMOption):
     def __init__(self, payload=None):
         self.payload = payload
 
-    def _gen_cache_key(self, *arg, **kw):
-        return ()
-
 
 @util.deprecated_cls(
     "1.4",
@@ -855,7 +859,15 @@ class LoaderStrategy(object):
         """
 
     def create_row_processor(
-        self, context, path, loadopt, mapper, result, adapter, populators
+        self,
+        context,
+        query_entity,
+        path,
+        loadopt,
+        mapper,
+        result,
+        adapter,
+        populators,
     ):
         """Establish row processing functions for a given QueryContext.
 
index 424ed5dfeead71f8705ef4414051d64a77325eb9..a33e1b77d5250f347c01ba2028af58f6082e56a1 100644 (file)
@@ -72,8 +72,8 @@ def instances(cursor, context):
         )
 
         if context.yield_per and (
-            context.compile_state.loaders_require_buffering
-            or context.compile_state.loaders_require_uniquing
+            context.loaders_require_buffering
+            or context.loaders_require_uniquing
         ):
             raise sa_exc.InvalidRequestError(
                 "Can't use yield_per with eager loaders that require uniquing "
@@ -545,6 +545,7 @@ def _warn_for_runid_changed(state):
 
 
 def _instance_processor(
+    query_entity,
     mapper,
     context,
     result,
@@ -648,6 +649,7 @@ def _instance_processor(
                         # to see if one fits
                         prop.create_row_processor(
                             context,
+                            query_entity,
                             path,
                             mapper,
                             result,
@@ -667,7 +669,7 @@ def _instance_processor(
     populators = {key: list(value) for key, value in cached_populators.items()}
     for prop in getters["todo"]:
         prop.create_row_processor(
-            context, path, mapper, result, adapter, populators
+            context, query_entity, path, mapper, result, adapter, populators
         )
 
     propagated_loader_options = context.propagated_loader_options
@@ -925,6 +927,7 @@ def _instance_processor(
         _instance = _decorate_polymorphic_switch(
             _instance,
             context,
+            query_entity,
             mapper,
             result,
             path,
@@ -1081,6 +1084,7 @@ def _validate_version_id(mapper, state, dict_, row, getter):
 def _decorate_polymorphic_switch(
     instance_fn,
     context,
+    query_entity,
     mapper,
     result,
     path,
@@ -1112,6 +1116,7 @@ def _decorate_polymorphic_switch(
                 return False
 
             return _instance_processor(
+                query_entity,
                 sub_mapper,
                 context,
                 result,
index c4cb89c0382b8ff5827888e116234aff11cb84dc..bec6da74d464d230e85604cfb43b0988b169594d 100644 (file)
@@ -720,7 +720,7 @@ class Mapper(
         return self
 
     _cache_key_traversal = [
-        ("class_", visitors.ExtendedInternalTraversal.dp_plain_obj)
+        ("mapper", visitors.ExtendedInternalTraversal.dp_plain_obj),
     ]
 
     @property
index 2e59417132df451323f9bb9519ec3fe86f955e80..ac7a64c308a6518172f375115b08bfeab02009f2 100644 (file)
@@ -216,6 +216,8 @@ class RootRegistry(PathRegistry):
 
     """
 
+    inherit_cache = True
+
     path = natural_path = ()
     has_entity = False
     is_aliased_class = False
@@ -248,6 +250,8 @@ class PathToken(HasCacheKey, str):
 class TokenRegistry(PathRegistry):
     __slots__ = ("token", "parent", "path", "natural_path")
 
+    inherit_cache = True
+
     def __init__(self, parent, token):
         token = PathToken.intern(token)
 
@@ -280,6 +284,7 @@ class TokenRegistry(PathRegistry):
 
 class PropRegistry(PathRegistry):
     is_unnatural = False
+    inherit_cache = True
 
     def __init__(self, parent, prop):
         # restate this path in terms of the
@@ -439,6 +444,7 @@ class AbstractEntityRegistry(PathRegistry):
 class SlotsEntityRegistry(AbstractEntityRegistry):
     # for aliased class, return lightweight, no-cycles created
     # version
+    inherit_cache = True
 
     __slots__ = (
         "key",
@@ -454,6 +460,8 @@ class CachingEntityRegistry(AbstractEntityRegistry, dict):
     # for long lived mapper, return dict based caching
     # version that creates reference cycles
 
+    inherit_cache = True
+
     def __getitem__(self, entity):
         if isinstance(entity, (int, slice)):
             return self.path[entity]
index 19d43d354dd6db049c92fdb26e89fa337c65eba7..8393eaf74d0ea2672dddede0980ac8380bf18525 100644 (file)
@@ -38,6 +38,7 @@ from ..sql.base import Options
 from ..sql.dml import DeleteDMLState
 from ..sql.dml import UpdateDMLState
 from ..sql.elements import BooleanClauseList
+from ..sql.util import _entity_namespace_key
 
 
 def _bulk_insert(
@@ -1820,8 +1821,12 @@ class BulkUDCompileState(CompileState):
                     if isinstance(k, util.string_types):
                         desc = sql.util._entity_namespace_key(mapper, k)
                         values.extend(desc._bulk_update_tuples(v))
-                    elif isinstance(k, attributes.QueryableAttribute):
-                        values.extend(k._bulk_update_tuples(v))
+                    elif "entity_namespace" in k._annotations:
+                        k_anno = k._annotations
+                        attr = _entity_namespace_key(
+                            k_anno["entity_namespace"], k_anno["orm_key"]
+                        )
+                        values.extend(attr._bulk_update_tuples(v))
                     else:
                         values.append((k, v))
                 else:
index 02f0752a54152c55de78eb162fbbaf0ee202b89b..5fb3beca3c160cbad5b42f8a0fe5496bab4bc4d9 100644 (file)
@@ -45,6 +45,7 @@ class ColumnProperty(StrategizedProperty):
     """
 
     strategy_wildcard_key = "column"
+    inherit_cache = True
 
     __slots__ = (
         "_orig_columns",
index 284ea9d72bc29db57636e490ddf7901d362d5386..cdad55320c337d19d326216dfaa99428ca31bc6a 100644 (file)
@@ -61,6 +61,7 @@ from ..sql.selectable import LABEL_STYLE_NONE
 from ..sql.selectable import LABEL_STYLE_TABLENAME_PLUS_COL
 from ..sql.selectable import SelectStatementGrouping
 from ..sql.util import _entity_namespace_key
+from ..sql.visitors import InternalTraversal
 from ..util import collections_abc
 
 __all__ = ["Query", "QueryContext", "aliased"]
@@ -423,6 +424,7 @@ class Query(
                 _label_style=self._label_style,
                 compile_options=compile_options,
             )
+            stmt.__dict__.pop("session", None)
 
         stmt._propagate_attrs = self._propagate_attrs
         return stmt
@@ -1725,7 +1727,6 @@ class Query(
 
         """
         from_entity = self._filter_by_zero()
-
         if from_entity is None:
             raise sa_exc.InvalidRequestError(
                 "Can't use filter_by when the first entity '%s' of a query "
@@ -2900,7 +2901,10 @@ class Query(
             compile_state = self._compile_state(for_statement=False)
 
             context = QueryContext(
-                compile_state, self.session, self.load_options
+                compile_state,
+                compile_state.statement,
+                self.session,
+                self.load_options,
             )
 
         result = loading.instances(result_proxy, context)
@@ -3376,7 +3380,12 @@ class Query(
 
     def _compile_context(self, for_statement=False):
         compile_state = self._compile_state(for_statement=for_statement)
-        context = QueryContext(compile_state, self.session, self.load_options)
+        context = QueryContext(
+            compile_state,
+            compile_state.statement,
+            self.session,
+            self.load_options,
+        )
 
         return context
 
@@ -3397,6 +3406,11 @@ class FromStatement(SelectStatementGrouping, Executable):
 
     _for_update_arg = None
 
+    _traverse_internals = [
+        ("_raw_columns", InternalTraversal.dp_clauseelement_list),
+        ("element", InternalTraversal.dp_clauseelement),
+    ] + Executable._executable_traverse_internals
+
     def __init__(self, entities, element):
         self._raw_columns = [
             coercions.expect(
index 683f2b97876c840b6507408850f2b12b98e50d2a..bedc5415369bf612dce987ed308030bee272bbe2 100644 (file)
@@ -107,6 +107,7 @@ class RelationshipProperty(StrategizedProperty):
     """
 
     strategy_wildcard_key = "relationship"
+    inherit_cache = True
 
     _persistence_only = dict(
         passive_deletes=False,
index f67c23aab87bdd8b8c91e4c61d15b6899c6a9fd6..5f039aff71ee453529ccbb05b753b9cd9ecd4881 100644 (file)
@@ -25,6 +25,7 @@ from .base import _DEFER_FOR_STATE
 from .base import _RAISE_FOR_STATE
 from .base import _SET_DEFERRED_EXPIRED
 from .context import _column_descriptions
+from .context import ORMCompileState
 from .interfaces import LoaderStrategy
 from .interfaces import StrategizedProperty
 from .session import _state_session
@@ -156,7 +157,15 @@ class UninstrumentedColumnLoader(LoaderStrategy):
             column_collection.append(c)
 
     def create_row_processor(
-        self, context, path, loadopt, mapper, result, adapter, populators
+        self,
+        context,
+        query_entity,
+        path,
+        loadopt,
+        mapper,
+        result,
+        adapter,
+        populators,
     ):
         pass
 
@@ -224,7 +233,15 @@ class ColumnLoader(LoaderStrategy):
         )
 
     def create_row_processor(
-        self, context, path, loadopt, mapper, result, adapter, populators
+        self,
+        context,
+        query_entity,
+        path,
+        loadopt,
+        mapper,
+        result,
+        adapter,
+        populators,
     ):
         # look through list of columns represented here
         # to see which, if any, is present in the row.
@@ -281,7 +298,15 @@ class ExpressionColumnLoader(ColumnLoader):
         memoized_populators[self.parent_property] = fetch
 
     def create_row_processor(
-        self, context, path, loadopt, mapper, result, adapter, populators
+        self,
+        context,
+        query_entity,
+        path,
+        loadopt,
+        mapper,
+        result,
+        adapter,
+        populators,
     ):
         # look through list of columns represented here
         # to see which, if any, is present in the row.
@@ -332,7 +357,15 @@ class DeferredColumnLoader(LoaderStrategy):
         self.group = self.parent_property.group
 
     def create_row_processor(
-        self, context, path, loadopt, mapper, result, adapter, populators
+        self,
+        context,
+        query_entity,
+        path,
+        loadopt,
+        mapper,
+        result,
+        adapter,
+        populators,
     ):
 
         # for a DeferredColumnLoader, this method is only used during a
@@ -542,7 +575,15 @@ class NoLoader(AbstractRelationshipLoader):
         )
 
     def create_row_processor(
-        self, context, path, loadopt, mapper, result, adapter, populators
+        self,
+        context,
+        query_entity,
+        path,
+        loadopt,
+        mapper,
+        result,
+        adapter,
+        populators,
     ):
         def invoke_no_load(state, dict_, row):
             if self.uselist:
@@ -985,7 +1026,15 @@ class LazyLoader(AbstractRelationshipLoader, util.MemoizedSlots):
                 return None
 
     def create_row_processor(
-        self, context, path, loadopt, mapper, result, adapter, populators
+        self,
+        context,
+        query_entity,
+        path,
+        loadopt,
+        mapper,
+        result,
+        adapter,
+        populators,
     ):
         key = self.key
 
@@ -1039,12 +1088,27 @@ class PostLoader(AbstractRelationshipLoader):
     """A relationship loader that emits a second SELECT statement."""
 
     def _immediateload_create_row_processor(
-        self, context, path, loadopt, mapper, result, adapter, populators
+        self,
+        context,
+        query_entity,
+        path,
+        loadopt,
+        mapper,
+        result,
+        adapter,
+        populators,
     ):
         return self.parent_property._get_strategy(
             (("lazy", "immediate"),)
         ).create_row_processor(
-            context, path, loadopt, mapper, result, adapter, populators
+            context,
+            query_entity,
+            path,
+            loadopt,
+            mapper,
+            result,
+            adapter,
+            populators,
         )
 
 
@@ -1057,21 +1121,16 @@ class ImmediateLoader(PostLoader):
             (("lazy", "select"),)
         ).init_class_attribute(mapper)
 
-    def setup_query(
+    def create_row_processor(
         self,
-        compile_state,
-        entity,
+        context,
+        query_entity,
         path,
         loadopt,
+        mapper,
+        result,
         adapter,
-        column_collection=None,
-        parentmapper=None,
-        **kwargs
-    ):
-        pass
-
-    def create_row_processor(
-        self, context, path, loadopt, mapper, result, adapter, populators
+        populators,
     ):
         def load_immediate(state, dict_, row):
             state.get_impl(self.key).get(state, dict_)
@@ -1093,120 +1152,6 @@ class SubqueryLoader(PostLoader):
             (("lazy", "select"),)
         ).init_class_attribute(mapper)
 
-    def setup_query(
-        self,
-        compile_state,
-        entity,
-        path,
-        loadopt,
-        adapter,
-        column_collection=None,
-        parentmapper=None,
-        **kwargs
-    ):
-        if (
-            not compile_state.compile_options._enable_eagerloads
-            or compile_state.compile_options._for_refresh_state
-        ):
-            return
-
-        compile_state.loaders_require_buffering = True
-
-        path = path[self.parent_property]
-
-        # build up a path indicating the path from the leftmost
-        # entity to the thing we're subquery loading.
-        with_poly_entity = path.get(
-            compile_state.attributes, "path_with_polymorphic", None
-        )
-        if with_poly_entity is not None:
-            effective_entity = with_poly_entity
-        else:
-            effective_entity = self.entity
-
-        subq_path = compile_state.attributes.get(
-            ("subquery_path", None), orm_util.PathRegistry.root
-        )
-
-        subq_path = subq_path + path
-
-        # if not via query option, check for
-        # a cycle
-        if not path.contains(compile_state.attributes, "loader"):
-            if self.join_depth:
-                if (
-                    (
-                        compile_state.current_path.length
-                        if compile_state.current_path
-                        else 0
-                    )
-                    + path.length
-                ) / 2 > self.join_depth:
-                    return
-            elif subq_path.contains_mapper(self.mapper):
-                return
-
-        (
-            leftmost_mapper,
-            leftmost_attr,
-            leftmost_relationship,
-        ) = self._get_leftmost(subq_path)
-
-        orig_query = compile_state.attributes.get(
-            ("orig_query", SubqueryLoader), compile_state.select_statement
-        )
-
-        # generate a new Query from the original, then
-        # produce a subquery from it.
-        left_alias = self._generate_from_original_query(
-            compile_state,
-            orig_query,
-            leftmost_mapper,
-            leftmost_attr,
-            leftmost_relationship,
-            entity.entity_zero,
-        )
-
-        # generate another Query that will join the
-        # left alias to the target relationships.
-        # basically doing a longhand
-        # "from_self()".  (from_self() itself not quite industrial
-        # strength enough for all contingencies...but very close)
-
-        q = query.Query(effective_entity)
-
-        def set_state_options(compile_state):
-            compile_state.attributes.update(
-                {
-                    ("orig_query", SubqueryLoader): orig_query,
-                    ("subquery_path", None): subq_path,
-                }
-            )
-
-        q = q._add_context_option(set_state_options, None)._disable_caching()
-
-        q = q._set_enable_single_crit(False)
-        to_join, local_attr, parent_alias = self._prep_for_joins(
-            left_alias, subq_path
-        )
-
-        q = q.add_columns(*local_attr)
-        q = self._apply_joins(
-            q, to_join, left_alias, parent_alias, effective_entity
-        )
-
-        q = self._setup_options(q, subq_path, orig_query, effective_entity)
-        q = self._setup_outermost_orderby(q)
-
-        # add new query to attributes to be picked up
-        # by create_row_processor
-        # NOTE: be sure to consult baked.py for some hardcoded logic
-        # about this structure as well
-        assert q.session is None
-        path.set(
-            compile_state.attributes, "subqueryload_data", {"query": q},
-        )
-
     def _get_leftmost(self, subq_path):
         subq_path = subq_path.path
         subq_mapper = orm_util._class_to_mapper(subq_path[0])
@@ -1267,27 +1212,34 @@ class SubqueryLoader(PostLoader):
                 q,
                 *{
                     ent["entity"]
-                    for ent in _column_descriptions(orig_query)
+                    for ent in _column_descriptions(
+                        orig_query, compile_state=orig_compile_state
+                    )
                     if ent["entity"] is not None
                 }
             )
 
-        # for column information, look to the compile state that is
-        # already being passed through
-        compile_state = orig_compile_state
-
         # select from the identity columns of the outer (specifically, these
-        # are the 'local_cols' of the property).  This will remove
-        # other columns from the query that might suggest the right entity
-        # which is why we do _set_select_from above.
-        target_cols = compile_state._adapt_col_list(
+        # are the 'local_cols' of the property).  This will remove other
+        # columns from the query that might suggest the right entity which is
+        # why we do set select_from above.   The attributes we have are
+        # coerced and adapted using the original query's adapter, which is
+        # needed only for the case of adapting a subclass column to
+        # that of a polymorphic selectable, e.g. we have
+        # Engineer.primary_language and the entity is Person.  All other
+        # adaptations, e.g. from_self, select_entity_from(), will occur
+        # within the new query when it compiles, as the compile_state we are
+        # using here is only a partial one.  If the subqueryload is from a
+        # with_polymorphic() or other aliased() object, left_attr will already
+        # be the correct attributes so no adaptation is needed.
+        target_cols = orig_compile_state._adapt_col_list(
             [
-                sql.coercions.expect(sql.roles.ByOfRole, o)
+                sql.coercions.expect(sql.roles.ColumnsClauseRole, o)
                 for o in leftmost_attr
             ],
-            compile_state._get_current_adapter(),
+            orig_compile_state._get_current_adapter(),
         )
-        q._set_entities(target_cols)
+        q._raw_columns = target_cols
 
         distinct_target_key = leftmost_relationship.distinct_target_key
 
@@ -1461,13 +1413,13 @@ class SubqueryLoader(PostLoader):
             "_data",
         )
 
-        def __init__(self, context, subq_info):
+        def __init__(self, context, subq):
             # avoid creating a cycle by storing context
             # even though that's preferable
             self.session = context.session
             self.execution_options = context.execution_options
             self.load_options = context.load_options
-            self.subq = subq_info["query"]
+            self.subq = subq
             self._data = None
 
         def get(self, key, default):
@@ -1499,12 +1451,148 @@ class SubqueryLoader(PostLoader):
             if self._data is None:
                 self._load()
 
+    def _setup_query_from_rowproc(
+        self, context, path, entity, loadopt, adapter,
+    ):
+        compile_state = context.compile_state
+        if (
+            not compile_state.compile_options._enable_eagerloads
+            or compile_state.compile_options._for_refresh_state
+        ):
+            return
+
+        context.loaders_require_buffering = True
+
+        path = path[self.parent_property]
+
+        # build up a path indicating the path from the leftmost
+        # entity to the thing we're subquery loading.
+        with_poly_entity = path.get(
+            compile_state.attributes, "path_with_polymorphic", None
+        )
+        if with_poly_entity is not None:
+            effective_entity = with_poly_entity
+        else:
+            effective_entity = self.entity
+
+        subq_path = context.query._execution_options.get(
+            ("subquery_path", None), orm_util.PathRegistry.root
+        )
+
+        subq_path = subq_path + path
+
+        # if not via query option, check for
+        # a cycle
+        if not path.contains(compile_state.attributes, "loader"):
+            if self.join_depth:
+                if (
+                    (
+                        compile_state.current_path.length
+                        if compile_state.current_path
+                        else 0
+                    )
+                    + path.length
+                ) / 2 > self.join_depth:
+                    return
+            elif subq_path.contains_mapper(self.mapper):
+                return
+
+        (
+            leftmost_mapper,
+            leftmost_attr,
+            leftmost_relationship,
+        ) = self._get_leftmost(subq_path)
+
+        # use the current query being invoked, not the compile state
+        # one.  this is so that we get the current parameters.  however,
+        # it means we can't use the existing compile state, we have to make
+        # a new one.    other approaches include possibly using the
+        # compiled query but swapping the params, seems only marginally
+        # less time spent but more complicated
+        orig_query = context.query._execution_options.get(
+            ("orig_query", SubqueryLoader), context.query
+        )
+
+        # make a new compile_state for the query that's probably cached, but
+        # we're sort of undoing a bit of that caching :(
+        compile_state_cls = ORMCompileState._get_plugin_class_for_plugin(
+            orig_query, "orm"
+        )
+
+        # this would create the full blown compile state, which we don't
+        # need
+        # orig_compile_state = compile_state_cls.create_for_statement(
+        # orig_query, None)
+
+        # this is the more "quick" version, however it's not clear how
+        # much of this we need.    in particular I can't get a test to
+        # fail if the "set_base_alias" is missing and not sure why that is.
+        orig_compile_state = compile_state_cls._create_entities_collection(
+            orig_query
+        )
+
+        # generate a new Query from the original, then
+        # produce a subquery from it.
+        left_alias = self._generate_from_original_query(
+            orig_compile_state,
+            orig_query,
+            leftmost_mapper,
+            leftmost_attr,
+            leftmost_relationship,
+            entity,
+        )
+
+        # generate another Query that will join the
+        # left alias to the target relationships.
+        # basically doing a longhand
+        # "from_self()".  (from_self() itself not quite industrial
+        # strength enough for all contingencies...but very close)
+
+        q = query.Query(effective_entity)
+
+        q._execution_options = q._execution_options.union(
+            {
+                ("orig_query", SubqueryLoader): orig_query,
+                ("subquery_path", None): subq_path,
+            }
+        )
+
+        q = q._set_enable_single_crit(False)
+        to_join, local_attr, parent_alias = self._prep_for_joins(
+            left_alias, subq_path
+        )
+
+        q = q.add_columns(*local_attr)
+        q = self._apply_joins(
+            q, to_join, left_alias, parent_alias, effective_entity
+        )
+
+        q = self._setup_options(q, subq_path, orig_query, effective_entity)
+        q = self._setup_outermost_orderby(q)
+
+        return q
+
     def create_row_processor(
-        self, context, path, loadopt, mapper, result, adapter, populators
+        self,
+        context,
+        query_entity,
+        path,
+        loadopt,
+        mapper,
+        result,
+        adapter,
+        populators,
     ):
         if context.refresh_state:
             return self._immediateload_create_row_processor(
-                context, path, loadopt, mapper, result, adapter, populators
+                context,
+                query_entity,
+                path,
+                loadopt,
+                mapper,
+                result,
+                adapter,
+                populators,
             )
 
         if not self.parent.class_manager[self.key].impl.supports_population:
@@ -1513,16 +1601,27 @@ class SubqueryLoader(PostLoader):
                 "population - eager loading cannot be applied." % self
             )
 
-        path = path[self.parent_property]
+        # a little dance here as the "path" is still something that only
+        # semi-tracks the exact series of things we are loading, still not
+        # telling us about with_polymorphic() and stuff like that when it's at
+        # the root..  the initial MapperEntity is more accurate for this case.
+        if len(path) == 1:
+            if not orm_util._entity_isa(query_entity.entity_zero, self.parent):
+                return
+        elif not orm_util._entity_isa(path[-1], self.parent):
+            return
 
-        subq_info = path.get(context.attributes, "subqueryload_data")
+        subq = self._setup_query_from_rowproc(
+            context, path, path[-1], loadopt, adapter,
+        )
 
-        if subq_info is None:
+        if subq is None:
             return
 
-        subq = subq_info["query"]
-
         assert subq.session is None
+
+        path = path[self.parent_property]
+
         local_cols = self.parent_property.local_columns
 
         # cache the loaded collections in the context
@@ -1530,7 +1629,7 @@ class SubqueryLoader(PostLoader):
         # call upon create_row_processor again
         collections = path.get(context.attributes, "collections")
         if collections is None:
-            collections = self._SubqCollections(context, subq_info)
+            collections = self._SubqCollections(context, subq)
             path.set(context.attributes, "collections", collections)
 
         if adapter:
@@ -1634,7 +1733,6 @@ class JoinedLoader(AbstractRelationshipLoader):
         if not compile_state.compile_options._enable_eagerloads:
             return
         elif self.uselist:
-            compile_state.loaders_require_uniquing = True
             compile_state.multi_row_eager_loaders = True
 
         path = path[self.parent_property]
@@ -2142,7 +2240,15 @@ class JoinedLoader(AbstractRelationshipLoader):
             return False
 
     def create_row_processor(
-        self, context, path, loadopt, mapper, result, adapter, populators
+        self,
+        context,
+        query_entity,
+        path,
+        loadopt,
+        mapper,
+        result,
+        adapter,
+        populators,
     ):
         if not self.parent.class_manager[self.key].impl.supports_population:
             raise sa_exc.InvalidRequestError(
@@ -2150,6 +2256,9 @@ class JoinedLoader(AbstractRelationshipLoader):
                 "population - eager loading cannot be applied." % self
             )
 
+        if self.uselist:
+            context.loaders_require_uniquing = True
+
         our_path = path[self.parent_property]
 
         eager_adapter = self._create_eager_adapter(
@@ -2160,6 +2269,7 @@ class JoinedLoader(AbstractRelationshipLoader):
             key = self.key
 
             _instance = loading._instance_processor(
+                query_entity,
                 self.mapper,
                 context,
                 result,
@@ -2177,7 +2287,14 @@ class JoinedLoader(AbstractRelationshipLoader):
             self.parent_property._get_strategy(
                 (("lazy", "select"),)
             ).create_row_processor(
-                context, path, loadopt, mapper, result, adapter, populators
+                context,
+                query_entity,
+                path,
+                loadopt,
+                mapper,
+                result,
+                adapter,
+                populators,
             )
 
     def _create_collection_loader(self, context, key, _instance, populators):
@@ -2382,11 +2499,26 @@ class SelectInLoader(PostLoader, util.MemoizedSlots):
         return util.preloaded.ext_baked.bakery(size=50)
 
     def create_row_processor(
-        self, context, path, loadopt, mapper, result, adapter, populators
+        self,
+        context,
+        query_entity,
+        path,
+        loadopt,
+        mapper,
+        result,
+        adapter,
+        populators,
     ):
         if context.refresh_state:
             return self._immediateload_create_row_processor(
-                context, path, loadopt, mapper, result, adapter, populators
+                context,
+                query_entity,
+                path,
+                loadopt,
+                mapper,
+                result,
+                adapter,
+                populators,
             )
 
         if not self.parent.class_manager[self.key].impl.supports_population:
@@ -2395,13 +2527,20 @@ class SelectInLoader(PostLoader, util.MemoizedSlots):
                 "population - eager loading cannot be applied." % self
             )
 
+        # a little dance here as the "path" is still something that only
+        # semi-tracks the exact series of things we are loading, still not
+        # telling us about with_polymorphic() and stuff like that when it's at
+        # the root..  the initial MapperEntity is more accurate for this case.
+        if len(path) == 1:
+            if not orm_util._entity_isa(query_entity.entity_zero, self.parent):
+                return
+        elif not orm_util._entity_isa(path[-1], self.parent):
+            return
+
         selectin_path = (
             context.compile_state.current_path or orm_util.PathRegistry.root
         ) + path
 
-        if not orm_util._entity_isa(path[-1], self.parent):
-            return
-
         if loading.PostLoad.path_exists(
             context, selectin_path, self.parent_property
         ):
@@ -2427,7 +2566,6 @@ class SelectInLoader(PostLoader, util.MemoizedSlots):
                     return
             elif selectin_path_w_prop.contains_mapper(self.mapper):
                 return
-
         loading.PostLoad.callable_for_path(
             context,
             selectin_path,
@@ -2543,7 +2681,39 @@ class SelectInLoader(PostLoader, util.MemoizedSlots):
                 )
             )
 
-        orig_query = context.query
+        # a test which exercises what these comments talk about is
+        # test_selectin_relations.py -> test_twolevel_selectin_w_polymorphic
+        #
+        # effective_entity above is given to us in terms of the cached
+        # statement, namely this one:
+        orig_query = context.compile_state.select_statement
+
+        # the actual statement that was requested is this one:
+        #  context_query = context.query
+        #
+        # that's not the cached one, however.  So while it is of the identical
+        # structure, if it has entities like AliasedInsp, which we get from
+        # aliased() or with_polymorphic(), the AliasedInsp will likely be a
+        # different object identity each time, and will not match up
+        # hashing-wise to the corresponding AliasedInsp that's in the
+        # cached query, meaning it won't match on paths and loader lookups
+        # and loaders like this one will be skipped if it is used in options.
+        #
+        # Now we want to transfer loader options from the parent query to the
+        # "selectinload" query we're about to run.   Which query do we transfer
+        # the options from?  We use the cached query, because the options in
+        # that query will be in terms of the effective entity we were just
+        # handed.
+        #
+        # But now the selectinload/ baked query we are running is *also*
+        # cached.  What if it's cached and running from some previous iteration
+        # of that AliasedInsp?  Well in that case it will also use the previous
+        # iteration of the loader options.   If the baked query expires and
+        # gets generated again, it will be handed the current effective_entity
+        # and the current _with_options, again in terms of whatever
+        # compile_state.select_statement happens to be right now, so the
+        # query will still be internally consistent and loader callables
+        # will be correctly invoked.
 
         q._add_lazyload_options(
             orig_query._with_options, path[self.parent_property]
index 85f4f85d1ee723760ea83a389dbf21389d8e0016..f7a97bfe5e9d50b1b355ce816f9c8b052d46c222 100644 (file)
@@ -1187,9 +1187,9 @@ class Bundle(ORMColumnsClauseRole, SupportsCloneAnnotations, InspectionAttr):
         return cloned
 
     def __clause_element__(self):
-        annotations = self._annotations.union(
-            {"bundle": self, "entity_namespace": self}
-        )
+        # ensure existing entity_namespace remains
+        annotations = {"bundle": self, "entity_namespace": self}
+        annotations.update(self._annotations)
         return expression.ClauseList(
             _literal_as_text_role=roles.ColumnsClauseRole,
             group=False,
@@ -1258,6 +1258,8 @@ class _ORMJoin(expression.Join):
 
     __visit_name__ = expression.Join.__visit_name__
 
+    inherit_cache = True
+
     def __init__(
         self,
         left,
index 78de80734923c66e280f7ba17a67fa233506c7e4..a25c1b0832d53bfd750b0928f56422eae1e9b79e 100644 (file)
@@ -100,6 +100,7 @@ def __go(lcls):
     from .elements import AnnotatedColumnElement
     from .elements import ClauseList  # noqa
     from .selectable import AnnotatedFromClause  # noqa
+    from .traversals import _preconfigure_traversals
 
     from . import base
     from . import coercions
@@ -122,6 +123,8 @@ def __go(lcls):
     _prepare_annotations(FromClause, AnnotatedFromClause)
     _prepare_annotations(ClauseList, Annotated)
 
+    _preconfigure_traversals(ClauseElement)
+
     _sa_util.preloaded.import_prefix("sqlalchemy.sql")
 
     from . import naming  # noqa
index 08ed121d306207c8014e339e2f8105f77d273937..8a0d6ec280ce656b823edba97ec01503d3b73129 100644 (file)
@@ -338,6 +338,15 @@ def _new_annotation_type(cls, base_cls):
         anno_cls._traverse_internals = list(cls._traverse_internals) + [
             ("_annotations", InternalTraversal.dp_annotations_key)
         ]
+    elif cls.__dict__.get("inherit_cache", False):
+        anno_cls._traverse_internals = list(cls._traverse_internals) + [
+            ("_annotations", InternalTraversal.dp_annotations_key)
+        ]
+
+    # some classes include this even if they have traverse_internals
+    # e.g. BindParameter, add it if present.
+    if cls.__dict__.get("inherit_cache", False):
+        anno_cls.inherit_cache = True
 
     anno_cls._is_column_operators = issubclass(cls, operators.ColumnOperators)
 
index 5dd3b519ac6bed1cdb60707e060e95d76a0fb20c..5f2ce8f14fcb0a43669b766cc1b7e15df9f3a4f0 100644 (file)
@@ -624,18 +624,13 @@ class Executable(Generative):
     _bind = None
     _with_options = ()
     _with_context_options = ()
-    _cache_enable = True
 
     _executable_traverse_internals = [
         ("_with_options", ExtendedInternalTraversal.dp_has_cache_key_list),
         ("_with_context_options", ExtendedInternalTraversal.dp_plain_obj),
-        ("_cache_enable", ExtendedInternalTraversal.dp_plain_obj),
+        ("_propagate_attrs", ExtendedInternalTraversal.dp_propagate_attrs),
     ]
 
-    @_generative
-    def _disable_caching(self):
-        self._cache_enable = HasCacheKey()
-
     @_generative
     def options(self, *options):
         """Apply options to this statement.
index 2519438d1b757b6182a57195a31a0b1bc07c508f..61178291ad2d066f458b463c35e4b002eaf58e72 100644 (file)
@@ -373,6 +373,8 @@ class Compiled(object):
 
     _cached_metadata = None
 
+    _result_columns = None
+
     schema_translate_map = None
 
     execution_options = util.immutabledict()
@@ -433,7 +435,6 @@ class Compiled(object):
         self,
         dialect,
         statement,
-        bind=None,
         schema_translate_map=None,
         render_schema_translate=False,
         compile_kwargs=util.immutabledict(),
@@ -463,7 +464,6 @@ class Compiled(object):
         """
 
         self.dialect = dialect
-        self.bind = bind
         self.preparer = self.dialect.identifier_preparer
         if schema_translate_map:
             self.schema_translate_map = schema_translate_map
@@ -527,24 +527,6 @@ class Compiled(object):
         """Return the bind params for this compiled object."""
         return self.construct_params()
 
-    def execute(self, *multiparams, **params):
-        """Execute this compiled object."""
-
-        e = self.bind
-        if e is None:
-            raise exc.UnboundExecutionError(
-                "This Compiled object is not bound to any Engine "
-                "or Connection.",
-                code="2afi",
-            )
-        return e._execute_compiled(self, multiparams, params)
-
-    def scalar(self, *multiparams, **params):
-        """Execute this compiled object and return the result's
-        scalar value."""
-
-        return self.execute(*multiparams, **params).scalar()
-
 
 class TypeCompiler(util.with_metaclass(util.EnsureKWArgType, object)):
     """Produces DDL specification for TypeEngine objects."""
@@ -687,6 +669,13 @@ class SQLCompiler(Compiled):
 
     insert_prefetch = update_prefetch = ()
 
+    _cache_key_bind_match = None
+    """a mapping that will relate the BindParameter object we compile
+    to those that are part of the extracted collection of parameters
+    in the cache key, if we were given a cache key.
+
+    """
+
     def __init__(
         self,
         dialect,
@@ -717,6 +706,9 @@ class SQLCompiler(Compiled):
 
         self.cache_key = cache_key
 
+        if cache_key:
+            self._cache_key_bind_match = {b: b for b in cache_key[1]}
+
         # compile INSERT/UPDATE defaults/sequences inlined (no pre-
         # execute)
         self.inline = inline or getattr(statement, "_inline", False)
@@ -875,8 +867,9 @@ class SQLCompiler(Compiled):
                     replace_context=err,
                 )
 
+            ckbm = self._cache_key_bind_match
             resolved_extracted = {
-                b.key: extracted
+                ckbm[b]: extracted
                 for b, extracted in zip(orig_extracted, extracted_parameters)
             }
         else:
@@ -907,7 +900,7 @@ class SQLCompiler(Compiled):
                 else:
                     if resolved_extracted:
                         value_param = resolved_extracted.get(
-                            bindparam.key, bindparam
+                            bindparam, bindparam
                         )
                     else:
                         value_param = bindparam
@@ -936,9 +929,7 @@ class SQLCompiler(Compiled):
                         )
 
                 if resolved_extracted:
-                    value_param = resolved_extracted.get(
-                        bindparam.key, bindparam
-                    )
+                    value_param = resolved_extracted.get(bindparam, bindparam)
                 else:
                     value_param = bindparam
 
@@ -2021,6 +2012,19 @@ class SQLCompiler(Compiled):
                     )
 
         self.binds[bindparam.key] = self.binds[name] = bindparam
+
+        # if we are given a cache key that we're going to match against,
+        # relate the bindparam here to one that is most likely present
+        # in the "extracted params" portion of the cache key.  this is used
+        # to set up a positional mapping that is used to determine the
+        # correct parameters for a subsequent use of this compiled with
+        # a different set of parameter values.   here, we accommodate for
+        # parameters that may have been cloned both before and after the cache
+        # key was been generated.
+        ckbm = self._cache_key_bind_match
+        if ckbm:
+            ckbm.update({bp: bindparam for bp in bindparam._cloned_set})
+
         if bindparam.isoutparam:
             self.has_out_parameters = True
 
index 5690306516da366caafbaee372aba784193fc732..d3730b12457e286af633731fd6a8b64cf1d572b0 100644 (file)
@@ -28,6 +28,9 @@ class _DDLCompiles(ClauseElement):
 
         return dialect.ddl_compiler(dialect, self, **kw)
 
+    def _compile_w_cache(self, *arg, **kw):
+        raise NotImplementedError()
+
 
 class DDLElement(roles.DDLRole, Executable, _DDLCompiles):
     """Base class for DDL expression constructs.
index a82641d77c9b8c63491af6c890a22560fc4b358e..50b2a935a74c51ebe804d52cee295a2865b7de93 100644 (file)
@@ -641,7 +641,7 @@ class ValuesBase(UpdateBase):
         if self._preserve_parameter_order:
             arg = [
                 (
-                    k,
+                    coercions.expect(roles.DMLColumnRole, k),
                     coercions.expect(
                         roles.ExpressionElementRole,
                         v,
@@ -654,7 +654,7 @@ class ValuesBase(UpdateBase):
             self._ordered_values = arg
         else:
             arg = {
-                k: coercions.expect(
+                coercions.expect(roles.DMLColumnRole, k): coercions.expect(
                     roles.ExpressionElementRole,
                     v,
                     type_=NullType(),
@@ -772,6 +772,7 @@ class Insert(ValuesBase):
         ]
         + HasPrefixes._has_prefixes_traverse_internals
         + DialectKWArgs._dialect_kwargs_traverse_internals
+        + Executable._executable_traverse_internals
     )
 
     @ValuesBase._constructor_20_deprecations(
@@ -997,6 +998,7 @@ class Update(DMLWhereBase, ValuesBase):
         ]
         + HasPrefixes._has_prefixes_traverse_internals
         + DialectKWArgs._dialect_kwargs_traverse_internals
+        + Executable._executable_traverse_internals
     )
 
     @ValuesBase._constructor_20_deprecations(
@@ -1187,7 +1189,7 @@ class Update(DMLWhereBase, ValuesBase):
             )
         arg = [
             (
-                k,
+                coercions.expect(roles.DMLColumnRole, k),
                 coercions.expect(
                     roles.ExpressionElementRole,
                     v,
@@ -1238,6 +1240,7 @@ class Delete(DMLWhereBase, UpdateBase):
         ]
         + HasPrefixes._has_prefixes_traverse_internals
         + DialectKWArgs._dialect_kwargs_traverse_internals
+        + Executable._executable_traverse_internals
     )
 
     @ValuesBase._constructor_20_deprecations(
index 8e1b623a73f46918bd9ec2abbefba1e16a1c984d..60c816ee6d1dea1fbb75d9b7647002f996464259 100644 (file)
@@ -381,6 +381,7 @@ class ClauseElement(
         try:
             traverse_internals = self._traverse_internals
         except AttributeError:
+            # user-defined classes may not have a _traverse_internals
             return
 
         for attrname, obj, meth in _copy_internals.run_generated_dispatch(
@@ -410,6 +411,7 @@ class ClauseElement(
         try:
             traverse_internals = self._traverse_internals
         except AttributeError:
+            # user-defined classes may not have a _traverse_internals
             return []
 
         return itertools.chain.from_iterable(
@@ -516,10 +518,62 @@ class ClauseElement(
                 dialect = bind.dialect
             elif self.bind:
                 dialect = self.bind.dialect
-                bind = self.bind
             else:
                 dialect = default.StrCompileDialect()
-        return self._compiler(dialect, bind=bind, **kw)
+
+        return self._compiler(dialect, **kw)
+
+    def _compile_w_cache(
+        self,
+        dialect,
+        compiled_cache=None,
+        column_keys=None,
+        inline=False,
+        schema_translate_map=None,
+        **kw
+    ):
+        if compiled_cache is not None:
+            elem_cache_key = self._generate_cache_key()
+        else:
+            elem_cache_key = None
+
+        cache_hit = False
+
+        if elem_cache_key:
+            cache_key, extracted_params = elem_cache_key
+            key = (
+                dialect,
+                cache_key,
+                tuple(column_keys),
+                bool(schema_translate_map),
+                inline,
+            )
+            compiled_sql = compiled_cache.get(key)
+
+            if compiled_sql is None:
+                compiled_sql = self._compiler(
+                    dialect,
+                    cache_key=elem_cache_key,
+                    column_keys=column_keys,
+                    inline=inline,
+                    schema_translate_map=schema_translate_map,
+                    **kw
+                )
+                compiled_cache[key] = compiled_sql
+            else:
+                cache_hit = True
+        else:
+            extracted_params = None
+            compiled_sql = self._compiler(
+                dialect,
+                cache_key=elem_cache_key,
+                column_keys=column_keys,
+                inline=inline,
+                schema_translate_map=schema_translate_map,
+                **kw
+            )
+
+        return compiled_sql, extracted_params, cache_hit
 
     def _compiler(self, dialect, **kw):
         """Return a compiler appropriate for this ClauseElement, given a
@@ -1035,6 +1089,10 @@ class BindParameter(roles.InElementRole, ColumnElement):
     _is_bind_parameter = True
     _key_is_anon = False
 
+    # bindparam implements its own _gen_cache_key() method however
+    # we check subclasses for this flag, else no cache key is generated
+    inherit_cache = True
+
     def __init__(
         self,
         key,
@@ -1396,6 +1454,13 @@ class BindParameter(roles.InElementRole, ColumnElement):
         return c
 
     def _gen_cache_key(self, anon_map, bindparams):
+        _gen_cache_ok = self.__class__.__dict__.get("inherit_cache", False)
+
+        if not _gen_cache_ok:
+            if anon_map is not None:
+                anon_map[NO_CACHE] = True
+            return None
+
         idself = id(self)
         if idself in anon_map:
             return (anon_map[idself], self.__class__)
@@ -2082,6 +2147,7 @@ class ClauseList(
     roles.InElementRole,
     roles.OrderByRole,
     roles.ColumnsClauseRole,
+    roles.DMLColumnRole,
     ClauseElement,
 ):
     """Describe a list of clauses, separated by an operator.
@@ -2174,6 +2240,7 @@ class ClauseList(
 
 class BooleanClauseList(ClauseList, ColumnElement):
     __visit_name__ = "clauselist"
+    inherit_cache = True
 
     _tuple_values = False
 
@@ -3428,6 +3495,8 @@ class CollectionAggregate(UnaryExpression):
 
 
 class AsBoolean(WrapsColumnExpression, UnaryExpression):
+    inherit_cache = True
+
     def __init__(self, element, operator, negate):
         self.element = element
         self.type = type_api.BOOLEANTYPE
@@ -3474,6 +3543,7 @@ class BinaryExpression(ColumnElement):
         ("operator", InternalTraversal.dp_operator),
         ("negate", InternalTraversal.dp_operator),
         ("modifiers", InternalTraversal.dp_plain_dict),
+        ("type", InternalTraversal.dp_type,),  # affects JSON CAST operators
     ]
 
     _is_implicitly_boolean = True
@@ -3482,41 +3552,6 @@ class BinaryExpression(ColumnElement):
 
     """
 
-    def _gen_cache_key(self, anon_map, bindparams):
-        # inlined for performance
-
-        idself = id(self)
-
-        if idself in anon_map:
-            return (anon_map[idself], self.__class__)
-        else:
-            # inline of
-            # id_ = anon_map[idself]
-            anon_map[idself] = id_ = str(anon_map.index)
-            anon_map.index += 1
-
-        if self._cache_key_traversal is NO_CACHE:
-            anon_map[NO_CACHE] = True
-            return None
-
-        result = (id_, self.__class__)
-
-        return result + (
-            ("left", self.left._gen_cache_key(anon_map, bindparams)),
-            ("right", self.right._gen_cache_key(anon_map, bindparams)),
-            ("operator", self.operator),
-            ("negate", self.negate),
-            (
-                "modifiers",
-                tuple(
-                    (key, self.modifiers[key])
-                    for key in sorted(self.modifiers)
-                )
-                if self.modifiers
-                else None,
-            ),
-        )
-
     def __init__(
         self, left, right, operator, type_=None, negate=None, modifiers=None
     ):
@@ -3587,15 +3622,30 @@ class Slice(ColumnElement):
     __visit_name__ = "slice"
 
     _traverse_internals = [
-        ("start", InternalTraversal.dp_plain_obj),
-        ("stop", InternalTraversal.dp_plain_obj),
-        ("step", InternalTraversal.dp_plain_obj),
+        ("start", InternalTraversal.dp_clauseelement),
+        ("stop", InternalTraversal.dp_clauseelement),
+        ("step", InternalTraversal.dp_clauseelement),
     ]
 
-    def __init__(self, start, stop, step):
-        self.start = start
-        self.stop = stop
-        self.step = step
+    def __init__(self, start, stop, step, _name=None):
+        self.start = coercions.expect(
+            roles.ExpressionElementRole,
+            start,
+            name=_name,
+            type_=type_api.INTEGERTYPE,
+        )
+        self.stop = coercions.expect(
+            roles.ExpressionElementRole,
+            stop,
+            name=_name,
+            type_=type_api.INTEGERTYPE,
+        )
+        self.step = coercions.expect(
+            roles.ExpressionElementRole,
+            step,
+            name=_name,
+            type_=type_api.INTEGERTYPE,
+        )
         self.type = type_api.NULLTYPE
 
     def self_group(self, against=None):
index 6b1172eba49cc68357bc7d424c0a895619e09bb9..7b723f371a26ca791c185758dd5ecaee398fc553 100644 (file)
@@ -744,6 +744,7 @@ class GenericFunction(util.with_metaclass(_GenericMeta, Function)):
 
     coerce_arguments = True
     _register = False
+    inherit_cache = True
 
     def __init__(self, *args, **kwargs):
         parsed_args = kwargs.pop("_parsed_args", None)
@@ -808,6 +809,8 @@ class next_value(GenericFunction):
 
 
 class AnsiFunction(GenericFunction):
+    inherit_cache = True
+
     def __init__(self, *args, **kwargs):
         GenericFunction.__init__(self, *args, **kwargs)
 
@@ -815,6 +818,8 @@ class AnsiFunction(GenericFunction):
 class ReturnTypeFromArgs(GenericFunction):
     """Define a function whose return type is the same as its arguments."""
 
+    inherit_cache = True
+
     def __init__(self, *args, **kwargs):
         args = [
             coercions.expect(
@@ -832,30 +837,34 @@ class ReturnTypeFromArgs(GenericFunction):
 
 class coalesce(ReturnTypeFromArgs):
     _has_args = True
+    inherit_cache = True
 
 
 class max(ReturnTypeFromArgs):  # noqa
-    pass
+    inherit_cache = True
 
 
 class min(ReturnTypeFromArgs):  # noqa
-    pass
+    inherit_cache = True
 
 
 class sum(ReturnTypeFromArgs):  # noqa
-    pass
+    inherit_cache = True
 
 
 class now(GenericFunction):  # noqa
     type = sqltypes.DateTime
+    inherit_cache = True
 
 
 class concat(GenericFunction):
     type = sqltypes.String
+    inherit_cache = True
 
 
 class char_length(GenericFunction):
     type = sqltypes.Integer
+    inherit_cache = True
 
     def __init__(self, arg, **kwargs):
         GenericFunction.__init__(self, arg, **kwargs)
@@ -863,6 +872,7 @@ class char_length(GenericFunction):
 
 class random(GenericFunction):
     _has_args = True
+    inherit_cache = True
 
 
 class count(GenericFunction):
@@ -887,6 +897,7 @@ class count(GenericFunction):
 
     """
     type = sqltypes.Integer
+    inherit_cache = True
 
     def __init__(self, expression=None, **kwargs):
         if expression is None:
@@ -896,38 +907,47 @@ class count(GenericFunction):
 
 class current_date(AnsiFunction):
     type = sqltypes.Date
+    inherit_cache = True
 
 
 class current_time(AnsiFunction):
     type = sqltypes.Time
+    inherit_cache = True
 
 
 class current_timestamp(AnsiFunction):
     type = sqltypes.DateTime
+    inherit_cache = True
 
 
 class current_user(AnsiFunction):
     type = sqltypes.String
+    inherit_cache = True
 
 
 class localtime(AnsiFunction):
     type = sqltypes.DateTime
+    inherit_cache = True
 
 
 class localtimestamp(AnsiFunction):
     type = sqltypes.DateTime
+    inherit_cache = True
 
 
 class session_user(AnsiFunction):
     type = sqltypes.String
+    inherit_cache = True
 
 
 class sysdate(AnsiFunction):
     type = sqltypes.DateTime
+    inherit_cache = True
 
 
 class user(AnsiFunction):
     type = sqltypes.String
+    inherit_cache = True
 
 
 class array_agg(GenericFunction):
@@ -951,6 +971,7 @@ class array_agg(GenericFunction):
     """
 
     type = sqltypes.ARRAY
+    inherit_cache = True
 
     def __init__(self, *args, **kwargs):
         args = [
@@ -978,6 +999,7 @@ class OrderedSetAgg(GenericFunction):
     :meth:`.FunctionElement.within_group` method."""
 
     array_for_multi_clause = False
+    inherit_cache = True
 
     def within_group_type(self, within_group):
         func_clauses = self.clause_expr.element
@@ -1000,6 +1022,8 @@ class mode(OrderedSetAgg):
 
     """
 
+    inherit_cache = True
+
 
 class percentile_cont(OrderedSetAgg):
     """implement the ``percentile_cont`` ordered-set aggregate function.
@@ -1016,6 +1040,7 @@ class percentile_cont(OrderedSetAgg):
     """
 
     array_for_multi_clause = True
+    inherit_cache = True
 
 
 class percentile_disc(OrderedSetAgg):
@@ -1033,6 +1058,7 @@ class percentile_disc(OrderedSetAgg):
     """
 
     array_for_multi_clause = True
+    inherit_cache = True
 
 
 class rank(GenericFunction):
@@ -1048,6 +1074,7 @@ class rank(GenericFunction):
     """
 
     type = sqltypes.Integer()
+    inherit_cache = True
 
 
 class dense_rank(GenericFunction):
@@ -1063,6 +1090,7 @@ class dense_rank(GenericFunction):
     """
 
     type = sqltypes.Integer()
+    inherit_cache = True
 
 
 class percent_rank(GenericFunction):
@@ -1078,6 +1106,7 @@ class percent_rank(GenericFunction):
     """
 
     type = sqltypes.Numeric()
+    inherit_cache = True
 
 
 class cume_dist(GenericFunction):
@@ -1093,6 +1122,7 @@ class cume_dist(GenericFunction):
     """
 
     type = sqltypes.Numeric()
+    inherit_cache = True
 
 
 class cube(GenericFunction):
@@ -1109,6 +1139,7 @@ class cube(GenericFunction):
 
     """
     _has_args = True
+    inherit_cache = True
 
 
 class rollup(GenericFunction):
@@ -1125,6 +1156,7 @@ class rollup(GenericFunction):
 
     """
     _has_args = True
+    inherit_cache = True
 
 
 class grouping_sets(GenericFunction):
@@ -1158,3 +1190,4 @@ class grouping_sets(GenericFunction):
 
     """
     _has_args = True
+    inherit_cache = True
index ee411174cd9b6a0471821281cdfd900f5297951a..29ca81d26d27691334cafc3c9501510470a251ce 100644 (file)
@@ -1013,6 +1013,8 @@ class Column(DialectKWArgs, SchemaItem, ColumnClause):
 
     __visit_name__ = "column"
 
+    inherit_cache = True
+
     def __init__(self, *args, **kwargs):
         r"""
         Construct a new ``Column`` object.
index a95fc561ae1bbac34afab4baafbe5093c41181cd..54f293967a1af5c6fc954376335fc1d6d55da1ee 100644 (file)
@@ -61,6 +61,8 @@ if util.TYPE_CHECKING:
 
 
 class _OffsetLimitParam(BindParameter):
+    inherit_cache = True
+
     @property
     def _limit_offset_value(self):
         return self.effective_value
@@ -1426,6 +1428,8 @@ class Alias(roles.DMLTableRole, AliasedReturnsRows):
 
     __visit_name__ = "alias"
 
+    inherit_cache = True
+
     @classmethod
     def _factory(cls, selectable, name=None, flat=False):
         """Return an :class:`_expression.Alias` object.
@@ -1500,6 +1504,8 @@ class Lateral(AliasedReturnsRows):
     __visit_name__ = "lateral"
     _is_lateral = True
 
+    inherit_cache = True
+
     @classmethod
     def _factory(cls, selectable, name=None):
         """Return a :class:`_expression.Lateral` object.
@@ -1626,7 +1632,7 @@ class CTE(Generative, HasPrefixes, HasSuffixes, AliasedReturnsRows):
         AliasedReturnsRows._traverse_internals
         + [
             ("_cte_alias", InternalTraversal.dp_clauseelement),
-            ("_restates", InternalTraversal.dp_clauseelement_unordered_set),
+            ("_restates", InternalTraversal.dp_clauseelement_list),
             ("recursive", InternalTraversal.dp_boolean),
         ]
         + HasPrefixes._has_prefixes_traverse_internals
@@ -1651,7 +1657,7 @@ class CTE(Generative, HasPrefixes, HasSuffixes, AliasedReturnsRows):
         name=None,
         recursive=False,
         _cte_alias=None,
-        _restates=frozenset(),
+        _restates=(),
         _prefixes=None,
         _suffixes=None,
     ):
@@ -1692,7 +1698,7 @@ class CTE(Generative, HasPrefixes, HasSuffixes, AliasedReturnsRows):
             self.element.union(other),
             name=self.name,
             recursive=self.recursive,
-            _restates=self._restates.union([self]),
+            _restates=self._restates + (self,),
             _prefixes=self._prefixes,
             _suffixes=self._suffixes,
         )
@@ -1702,7 +1708,7 @@ class CTE(Generative, HasPrefixes, HasSuffixes, AliasedReturnsRows):
             self.element.union_all(other),
             name=self.name,
             recursive=self.recursive,
-            _restates=self._restates.union([self]),
+            _restates=self._restates + (self,),
             _prefixes=self._prefixes,
             _suffixes=self._suffixes,
         )
@@ -1918,6 +1924,8 @@ class Subquery(AliasedReturnsRows):
 
     _is_subquery = True
 
+    inherit_cache = True
+
     @classmethod
     def _factory(cls, selectable, name=None):
         """Return a :class:`.Subquery` object.
@@ -3783,15 +3791,15 @@ class Select(
             ("_group_by_clauses", InternalTraversal.dp_clauseelement_list,),
             ("_setup_joins", InternalTraversal.dp_setup_join_tuple,),
             ("_legacy_setup_joins", InternalTraversal.dp_setup_join_tuple,),
-            ("_correlate", InternalTraversal.dp_clauseelement_unordered_set),
-            (
-                "_correlate_except",
-                InternalTraversal.dp_clauseelement_unordered_set,
-            ),
+            ("_correlate", InternalTraversal.dp_clauseelement_list),
+            ("_correlate_except", InternalTraversal.dp_clauseelement_list,),
+            ("_limit_clause", InternalTraversal.dp_clauseelement),
+            ("_offset_clause", InternalTraversal.dp_clauseelement),
             ("_for_update_arg", InternalTraversal.dp_clauseelement),
             ("_distinct", InternalTraversal.dp_boolean),
             ("_distinct_on", InternalTraversal.dp_clauseelement_list),
             ("_label_style", InternalTraversal.dp_plain_obj),
+            ("_is_future", InternalTraversal.dp_boolean),
         ]
         + HasPrefixes._has_prefixes_traverse_internals
         + HasSuffixes._has_suffixes_traverse_internals
@@ -4522,7 +4530,7 @@ class Select(
         if fromclauses and fromclauses[0] is None:
             self._correlate = ()
         else:
-            self._correlate = set(self._correlate).union(
+            self._correlate = self._correlate + tuple(
                 coercions.expect(roles.FromClauseRole, f) for f in fromclauses
             )
 
@@ -4560,7 +4568,7 @@ class Select(
         if fromclauses and fromclauses[0] is None:
             self._correlate_except = ()
         else:
-            self._correlate_except = set(self._correlate_except or ()).union(
+            self._correlate_except = (self._correlate_except or ()) + tuple(
                 coercions.expect(roles.FromClauseRole, f) for f in fromclauses
             )
 
@@ -4866,6 +4874,7 @@ class ScalarSelect(roles.InElementRole, Generative, Grouping):
     _from_objects = []
     _is_from_container = True
     _is_implicitly_boolean = False
+    inherit_cache = True
 
     def __init__(self, element):
         self.element = element
@@ -4899,6 +4908,7 @@ class Exists(UnaryExpression):
     """
 
     _from_objects = []
+    inherit_cache = True
 
     def __init__(self, *args, **kwargs):
         """Construct a new :class:`_expression.Exists` against an existing
index 732b775f629a7b4317e7b98faccf2dc66c25651c..9cd9d50582b45384dfccb182528e1ecf3d05ffc1 100644 (file)
@@ -2616,26 +2616,10 @@ class ARRAY(SchemaEventTarget, Indexable, Concatenable, TypeEngine):
                 return_type = self.type
                 if self.type.zero_indexes:
                     index = slice(index.start + 1, index.stop + 1, index.step)
-                index = Slice(
-                    coercions.expect(
-                        roles.ExpressionElementRole,
-                        index.start,
-                        name=self.expr.key,
-                        type_=type_api.INTEGERTYPE,
-                    ),
-                    coercions.expect(
-                        roles.ExpressionElementRole,
-                        index.stop,
-                        name=self.expr.key,
-                        type_=type_api.INTEGERTYPE,
-                    ),
-                    coercions.expect(
-                        roles.ExpressionElementRole,
-                        index.step,
-                        name=self.expr.key,
-                        type_=type_api.INTEGERTYPE,
-                    ),
+                slice_ = Slice(
+                    index.start, index.stop, index.step, _name=self.expr.key
                 )
+                return operators.getitem, slice_, return_type
             else:
                 if self.type.zero_indexes:
                     index += 1
@@ -2647,7 +2631,7 @@ class ARRAY(SchemaEventTarget, Indexable, Concatenable, TypeEngine):
                         self.type.__class__, **adapt_kw
                     )
 
-            return operators.getitem, index, return_type
+                return operators.getitem, index, return_type
 
         def contains(self, *arg, **kw):
             raise NotImplementedError(
index 68281f33d1048915dcdc85256c65ef2aa55cc830..ed0bfa27a27fb8a5686b941963e2c579ae0cfeaa 100644 (file)
@@ -19,6 +19,7 @@ NO_CACHE = util.symbol("no_cache")
 CACHE_IN_PLACE = util.symbol("cache_in_place")
 CALL_GEN_CACHE_KEY = util.symbol("call_gen_cache_key")
 STATIC_CACHE_KEY = util.symbol("static_cache_key")
+PROPAGATE_ATTRS = util.symbol("propagate_attrs")
 ANON_NAME = util.symbol("anon_name")
 
 
@@ -31,10 +32,74 @@ def compare(obj1, obj2, **kw):
     return strategy.compare(obj1, obj2, **kw)
 
 
+def _preconfigure_traversals(target_hierarchy):
+
+    stack = [target_hierarchy]
+    while stack:
+        cls = stack.pop()
+        stack.extend(cls.__subclasses__())
+
+        if hasattr(cls, "_traverse_internals"):
+            cls._generate_cache_attrs()
+            _copy_internals.generate_dispatch(
+                cls,
+                cls._traverse_internals,
+                "_generated_copy_internals_traversal",
+            )
+            _get_children.generate_dispatch(
+                cls,
+                cls._traverse_internals,
+                "_generated_get_children_traversal",
+            )
+
+
 class HasCacheKey(object):
     _cache_key_traversal = NO_CACHE
     __slots__ = ()
 
+    @classmethod
+    def _generate_cache_attrs(cls):
+        """generate cache key dispatcher for a new class.
+
+        This sets the _generated_cache_key_traversal attribute once called
+        so should only be called once per class.
+
+        """
+        inherit = cls.__dict__.get("inherit_cache", False)
+
+        if inherit:
+            _cache_key_traversal = getattr(cls, "_cache_key_traversal", None)
+            if _cache_key_traversal is None:
+                try:
+                    _cache_key_traversal = cls._traverse_internals
+                except AttributeError:
+                    cls._generated_cache_key_traversal = NO_CACHE
+                    return NO_CACHE
+
+            # TODO: wouldn't we instead get this from our superclass?
+            # also, our superclass may not have this yet, but in any case,
+            # we'd generate for the superclass that has it.   this is a little
+            # more complicated, so for the moment this is a little less
+            # efficient on startup but simpler.
+            return _cache_key_traversal_visitor.generate_dispatch(
+                cls, _cache_key_traversal, "_generated_cache_key_traversal"
+            )
+        else:
+            _cache_key_traversal = cls.__dict__.get(
+                "_cache_key_traversal", None
+            )
+            if _cache_key_traversal is None:
+                _cache_key_traversal = cls.__dict__.get(
+                    "_traverse_internals", None
+                )
+                if _cache_key_traversal is None:
+                    cls._generated_cache_key_traversal = NO_CACHE
+                    return NO_CACHE
+
+            return _cache_key_traversal_visitor.generate_dispatch(
+                cls, _cache_key_traversal, "_generated_cache_key_traversal"
+            )
+
     @util.preload_module("sqlalchemy.sql.elements")
     def _gen_cache_key(self, anon_map, bindparams):
         """return an optional cache key.
@@ -72,14 +137,18 @@ class HasCacheKey(object):
         else:
             id_ = None
 
-        _cache_key_traversal = self._cache_key_traversal
-        if _cache_key_traversal is None:
-            try:
-                _cache_key_traversal = self._traverse_internals
-            except AttributeError:
-                _cache_key_traversal = NO_CACHE
+        try:
+            dispatcher = self.__class__.__dict__[
+                "_generated_cache_key_traversal"
+            ]
+        except KeyError:
+            # most of the dispatchers are generated up front
+            # in sqlalchemy/sql/__init__.py ->
+            # traversals.py-> _preconfigure_traversals().
+            # this block will generate any remaining dispatchers.
+            dispatcher = self.__class__._generate_cache_attrs()
 
-        if _cache_key_traversal is NO_CACHE:
+        if dispatcher is NO_CACHE:
             if anon_map is not None:
                 anon_map[NO_CACHE] = True
             return None
@@ -87,19 +156,13 @@ class HasCacheKey(object):
         result = (id_, self.__class__)
 
         # inline of _cache_key_traversal_visitor.run_generated_dispatch()
-        try:
-            dispatcher = self.__class__.__dict__[
-                "_generated_cache_key_traversal"
-            ]
-        except KeyError:
-            dispatcher = _cache_key_traversal_visitor.generate_dispatch(
-                self, _cache_key_traversal, "_generated_cache_key_traversal"
-            )
 
         for attrname, obj, meth in dispatcher(
             self, _cache_key_traversal_visitor
         ):
             if obj is not None:
+                # TODO: see if C code can help here as Python lacks an
+                # efficient switch construct
                 if meth is CACHE_IN_PLACE:
                     # cache in place is always going to be a Python
                     # tuple, dict, list, etc. so we can do a boolean check
@@ -116,6 +179,15 @@ class HasCacheKey(object):
                         attrname,
                         obj._gen_cache_key(anon_map, bindparams),
                     )
+                elif meth is PROPAGATE_ATTRS:
+                    if obj:
+                        result += (
+                            attrname,
+                            obj["compile_state_plugin"],
+                            obj["plugin_subject"]._gen_cache_key(
+                                anon_map, bindparams
+                            ),
+                        )
                 elif meth is InternalTraversal.dp_annotations_key:
                     # obj is here is the _annotations dict.   however,
                     # we want to use the memoized cache key version of it.
@@ -332,6 +404,8 @@ class _CacheKey(ExtendedInternalTraversal):
     visit_type = STATIC_CACHE_KEY
     visit_anon_name = ANON_NAME
 
+    visit_propagate_attrs = PROPAGATE_ATTRS
+
     def visit_inspectable(self, attrname, obj, parent, anon_map, bindparams):
         return (attrname, inspect(obj)._gen_cache_key(anon_map, bindparams))
 
@@ -445,10 +519,16 @@ class _CacheKey(ExtendedInternalTraversal):
     def visit_setup_join_tuple(
         self, attrname, obj, parent, anon_map, bindparams
     ):
+        is_legacy = "legacy" in attrname
+
         return tuple(
             (
-                target._gen_cache_key(anon_map, bindparams),
-                onclause._gen_cache_key(anon_map, bindparams)
+                target
+                if is_legacy and isinstance(target, str)
+                else target._gen_cache_key(anon_map, bindparams),
+                onclause
+                if is_legacy and isinstance(onclause, str)
+                else onclause._gen_cache_key(anon_map, bindparams)
                 if onclause is not None
                 else None,
                 from_._gen_cache_key(anon_map, bindparams)
@@ -711,6 +791,11 @@ class _CopyInternals(InternalTraversal):
             for sequence in element
         ]
 
+    def visit_propagate_attrs(
+        self, attrname, parent, element, clone=_clone, **kw
+    ):
+        return element
+
 
 _copy_internals = _CopyInternals()
 
@@ -782,6 +867,9 @@ class _GetChildren(InternalTraversal):
     def visit_dml_multi_values(self, element, **kw):
         return ()
 
+    def visit_propagate_attrs(self, element, **kw):
+        return ()
+
 
 _get_children = _GetChildren()
 
@@ -916,6 +1004,13 @@ class TraversalComparatorStrategy(InternalTraversal, util.MemoizedSlots):
         ):
             return COMPARE_FAILED
 
+    def visit_propagate_attrs(
+        self, attrname, left_parent, left, right_parent, right, **kw
+    ):
+        return self.compare_inner(
+            left.get("plugin_subject", None), right.get("plugin_subject", None)
+        )
+
     def visit_has_cache_key_list(
         self, attrname, left_parent, left, right_parent, right, **kw
     ):
index ccda21e1120328bed585c6a31664581a7e5c7c60..fe3634baddd61017efb0adffa492d388531b8fbe 100644 (file)
@@ -555,7 +555,12 @@ class TypeEngine(Traversible):
     def _static_cache_key(self):
         names = util.get_cls_kwargs(self.__class__)
         return (self.__class__,) + tuple(
-            (k, self.__dict__[k])
+            (
+                k,
+                self.__dict__[k]._static_cache_key
+                if isinstance(self.__dict__[k], TypeEngine)
+                else self.__dict__[k],
+            )
             for k in names
             if k in self.__dict__ and not k.startswith("_")
         )
index 5de68f504fc779fb9f4abe72592bc8ef9c0dc40f..904702003035421f8a5429be76924da718c5d9e0 100644 (file)
@@ -217,18 +217,23 @@ class InternalTraversal(util.with_metaclass(_InternalTraversalType, object)):
         try:
             dispatcher = target.__class__.__dict__[generate_dispatcher_name]
         except KeyError:
+            # most of the dispatchers are generated up front
+            # in sqlalchemy/sql/__init__.py ->
+            # traversals.py-> _preconfigure_traversals().
+            # this block will generate any remaining dispatchers.
             dispatcher = self.generate_dispatch(
-                target, internal_dispatch, generate_dispatcher_name
+                target.__class__, internal_dispatch, generate_dispatcher_name
             )
         return dispatcher(target, self)
 
     def generate_dispatch(
-        self, target, internal_dispatch, generate_dispatcher_name
+        self, target_cls, internal_dispatch, generate_dispatcher_name
     ):
         dispatcher = _generate_dispatcher(
             self, internal_dispatch, generate_dispatcher_name
         )
-        setattr(target.__class__, generate_dispatcher_name, dispatcher)
+        # assert isinstance(target_cls, type)
+        setattr(target_cls, generate_dispatcher_name, dispatcher)
         return dispatcher
 
     dp_has_cache_key = symbol("HC")
@@ -263,10 +268,6 @@ class InternalTraversal(util.with_metaclass(_InternalTraversalType, object)):
 
     """
 
-    dp_clauseelement_unordered_set = symbol("CU")
-    """Visit an unordered set of :class:`_expression.ClauseElement`
-    objects. """
-
     dp_fromclause_ordered_set = symbol("CO")
     """Visit an ordered set of :class:`_expression.FromClause` objects. """
 
@@ -414,6 +415,10 @@ class InternalTraversal(util.with_metaclass(_InternalTraversalType, object)):
 
     """
 
+    dp_propagate_attrs = symbol("PA")
+    """Visit the propagate attrs dict.   this hardcodes to the particular
+    elements we care about right now."""
+
 
 class ExtendedInternalTraversal(InternalTraversal):
     """defines additional symbols that are useful in caching applications.
index 7988b4ec9c219bba149dae959df1e2461da3bf4d..48cbb4694c03fb4e75a9f6830276c10afb47c722 100644 (file)
@@ -97,13 +97,13 @@ class CompiledSQL(SQLMatchRule):
         else:
             map_ = None
 
-        if isinstance(context.compiled.statement, _DDLCompiles):
+        if isinstance(execute_observed.clauseelement, _DDLCompiles):
 
-            compiled = context.compiled.statement.compile(
+            compiled = execute_observed.clauseelement.compile(
                 dialect=compare_dialect, schema_translate_map=map_
             )
         else:
-            compiled = context.compiled.statement.compile(
+            compiled = execute_observed.clauseelement.compile(
                 dialect=compare_dialect,
                 column_keys=context.compiled.column_keys,
                 inline=context.compiled.inline,
index 92b16d4975981ba970dbb6e3d1ac21c3c60a7518..68281031708ba0d0bbf0c5a137754c04f82d1e39 100644 (file)
@@ -140,7 +140,9 @@ def profile_memory(
                         )
 
                     if assert_no_sessions:
-                        assert len(_sessions) == 0, "sessions remain"
+                        assert len(_sessions) == 0, "%d sessions remain" % (
+                            len(_sessions),
+                        )
 
                     # queue.put(('samples', samples))
 
@@ -186,6 +188,27 @@ def profile_memory(
                 else:
                     queue.put(("result", True, "success"))
 
+        def run_plain(*func_args):
+            import queue as _queue
+
+            q = _queue.Queue()
+            profile(q, func_args)
+
+            while True:
+                row = q.get()
+                typ = row[0]
+                if typ == "samples":
+                    print("sample gc sizes:", row[1])
+                elif typ == "status":
+                    print(row[1])
+                elif typ == "result":
+                    break
+                else:
+                    assert False, "can't parse row"
+            assert row[1], row[2]
+
+        # return run_plain
+
         def run_in_process(*func_args):
             queue = multiprocessing.Queue()
             proc = multiprocessing.Process(
@@ -221,7 +244,15 @@ class EnsureZeroed(fixtures.ORMTest):
     def setup(self):
         _sessions.clear()
         _mapper_registry.clear()
-        self.engine = engines.testing_engine(options={"use_reaper": False})
+
+        # enable query caching, however make the cache small so that
+        # the tests don't take too long.  issues w/ caching include making
+        # sure sessions don't get stuck inside of it.  However it will
+        # make tests like test_mapper_reset take a long time because mappers
+        # are very much a part of what's in the cache.
+        self.engine = engines.testing_engine(
+            options={"use_reaper": False, "query_cache_size": 10}
+        )
 
 
 class MemUsageTest(EnsureZeroed):
@@ -771,6 +802,7 @@ class MemUsageWBackendTest(EnsureZeroed):
             sess = Session()
             sess.query(B).options(subqueryload(B.as_.of_type(ASub))).all()
             sess.close()
+            del sess
 
         try:
             go()
@@ -948,7 +980,9 @@ class MemUsageWBackendTest(EnsureZeroed):
                 sess.delete(a)
             sess.flush()
 
-            # don't need to clear_mappers()
+            # mappers necessarily find themselves in the compiled cache,
+            # so to allow them to be GC'ed clear out the cache
+            self.engine.clear_compiled_cache()
             del B
             del A
 
@@ -1154,6 +1188,28 @@ class CycleTest(_fixtures.FixtureTest):
 
         go()
 
+    def test_proxied_attribute(self):
+        from sqlalchemy.ext import hybrid
+
+        users = self.tables.users
+
+        class Foo(object):
+            @hybrid.hybrid_property
+            def user_name(self):
+                return self.name
+
+        mapper(Foo, users)
+
+        # unfortunately there's a lot of cycles with an aliased()
+        # for now, however calling upon clause_element does not seem
+        # to make it worse which is what this was looking to test
+        @assert_cycles(68)
+        def go():
+            a1 = aliased(Foo)
+            a1.user_name.__clause_element__()
+
+        go()
+
     def test_raise_from(self):
         @assert_cycles()
         def go():
index 761bf46636e0ed0aa7153a523a694eec5b6f493c..585d6d5b1b1eb5a9975bdd1802f1612814bb620f 100644 (file)
@@ -111,6 +111,7 @@ class CacheKeyTest(fixtures.TestBase):
         current_key = None
         for stmt in stmt_fixture_one:
             key = stmt._generate_cache_key()
+            assert key is not None
             if current_key:
                 eq_(key, current_key)
             else:
@@ -121,6 +122,7 @@ class CacheKeyTest(fixtures.TestBase):
         current_key = None
         for stmt in stmt_fixture_one:
             key = stmt._generate_cache_key()
+            assert key is not None
             if current_key:
                 eq_(key, current_key)
             else:
index 188e8e929aa4370dfdd4e3f02c00a7b36c3de41a..8f06220e2876ee07b1fe907775d53da43603090c 100644 (file)
@@ -18,13 +18,29 @@ from sqlalchemy.orm import relationship
 from sqlalchemy.orm import selectinload
 from sqlalchemy.orm import Session
 from sqlalchemy.orm import sessionmaker
+from sqlalchemy.testing import config
 from sqlalchemy.testing import fixtures
 from sqlalchemy.testing import profiling
 from sqlalchemy.testing.schema import Column
 from sqlalchemy.testing.schema import Table
 
 
-class MergeTest(fixtures.MappedTest):
+class NoCache(object):
+    run_setup_bind = "each"
+
+    @classmethod
+    def setup_class(cls):
+        super(NoCache, cls).setup_class()
+        cls._cache = config.db._compiled_cache
+        config.db._compiled_cache = None
+
+    @classmethod
+    def teardown_class(cls):
+        config.db._compiled_cache = cls._cache
+        super(NoCache, cls).teardown_class()
+
+
+class MergeTest(NoCache, fixtures.MappedTest):
     __requires__ = ("python_profiling_backend",)
 
     @classmethod
@@ -140,7 +156,7 @@ class MergeTest(fixtures.MappedTest):
         self.assert_sql_count(testing.db, go2, 2)
 
 
-class LoadManyToOneFromIdentityTest(fixtures.MappedTest):
+class LoadManyToOneFromIdentityTest(NoCache, fixtures.MappedTest):
 
     """test overhead associated with many-to-one fetches.
 
@@ -239,7 +255,7 @@ class LoadManyToOneFromIdentityTest(fixtures.MappedTest):
         go()
 
 
-class MergeBackrefsTest(fixtures.MappedTest):
+class MergeBackrefsTest(NoCache, fixtures.MappedTest):
     __requires__ = ("python_profiling_backend",)
 
     @classmethod
@@ -333,7 +349,7 @@ class MergeBackrefsTest(fixtures.MappedTest):
             s.merge(a)
 
 
-class DeferOptionsTest(fixtures.MappedTest):
+class DeferOptionsTest(NoCache, fixtures.MappedTest):
     __requires__ = ("python_profiling_backend",)
 
     @classmethod
@@ -397,7 +413,7 @@ class DeferOptionsTest(fixtures.MappedTest):
         ).all()
 
 
-class AttributeOverheadTest(fixtures.MappedTest):
+class AttributeOverheadTest(NoCache, fixtures.MappedTest):
     __requires__ = ("python_profiling_backend",)
 
     @classmethod
@@ -476,7 +492,7 @@ class AttributeOverheadTest(fixtures.MappedTest):
         go()
 
 
-class SessionTest(fixtures.MappedTest):
+class SessionTest(NoCache, fixtures.MappedTest):
     __requires__ = ("python_profiling_backend",)
 
     @classmethod
@@ -542,7 +558,7 @@ class SessionTest(fixtures.MappedTest):
         go()
 
 
-class QueryTest(fixtures.MappedTest):
+class QueryTest(NoCache, fixtures.MappedTest):
     __requires__ = ("python_profiling_backend",)
 
     @classmethod
@@ -604,7 +620,7 @@ class QueryTest(fixtures.MappedTest):
         go()
 
 
-class SelectInEagerLoadTest(fixtures.MappedTest):
+class SelectInEagerLoadTest(NoCache, fixtures.MappedTest):
     """basic test for selectin() loading, which uses a baked query.
 
     if the baked query starts spoiling due to some bug in cache keys,
@@ -695,7 +711,7 @@ class SelectInEagerLoadTest(fixtures.MappedTest):
         go()
 
 
-class JoinedEagerLoadTest(fixtures.MappedTest):
+class JoinedEagerLoadTest(NoCache, fixtures.MappedTest):
     __requires__ = ("python_profiling_backend",)
 
     @classmethod
@@ -880,7 +896,7 @@ class JoinedEagerLoadTest(fixtures.MappedTest):
         go()
 
 
-class JoinConditionTest(fixtures.DeclarativeMappedTest):
+class JoinConditionTest(NoCache, fixtures.DeclarativeMappedTest):
     __requires__ = ("python_profiling_backend",)
 
     @classmethod
@@ -969,7 +985,7 @@ class JoinConditionTest(fixtures.DeclarativeMappedTest):
         go()
 
 
-class BranchedOptionTest(fixtures.MappedTest):
+class BranchedOptionTest(NoCache, fixtures.MappedTest):
     __requires__ = ("python_profiling_backend",)
 
     @classmethod
@@ -1182,7 +1198,7 @@ class BranchedOptionTest(fixtures.MappedTest):
         go()
 
 
-class AnnotatedOverheadTest(fixtures.MappedTest):
+class AnnotatedOverheadTest(NoCache, fixtures.MappedTest):
     __requires__ = ("python_profiling_backend",)
 
     @classmethod
index b22676ad775dd6e393df46ef8638994928f8968a..119a5ee6a81cc147efa6bc19ffe692527245f04e 100644 (file)
@@ -86,11 +86,17 @@ class ResultSetTest(fixtures.TestBase, AssertsExecutionResults):
 
     @profiling.function_call_count()
     def test_string(self):
-        [tuple(row) for row in t.select().execute().fetchall()]
+        with testing.db.connect().execution_options(
+            compiled_cache=None
+        ) as conn:
+            [tuple(row) for row in conn.execute(t.select()).fetchall()]
 
     @profiling.function_call_count()
     def test_unicode(self):
-        [tuple(row) for row in t2.select().execute().fetchall()]
+        with testing.db.connect().execution_options(
+            compiled_cache=None
+        ) as conn:
+            [tuple(row) for row in conn.execute(t2.select()).fetchall()]
 
     @profiling.function_call_count(variance=0.10)
     def test_raw_string(self):
@@ -110,13 +116,17 @@ class ResultSetTest(fixtures.TestBase, AssertsExecutionResults):
 
     @profiling.function_call_count()
     def test_fetch_by_key_legacy(self):
-        with testing.db.connect() as conn:
+        with testing.db.connect().execution_options(
+            compiled_cache=None
+        ) as conn:
             for row in conn.execute(t.select()).fetchall():
                 [row["field%d" % fnum] for fnum in range(NUM_FIELDS)]
 
     @profiling.function_call_count()
     def test_fetch_by_key_mappings(self):
-        with testing.db.connect() as conn:
+        with testing.db.connect().execution_options(
+            compiled_cache=None
+        ) as conn:
             for row in conn.execute(t.select()).mappings().fetchall():
                 [row["field%d" % fnum] for fnum in range(NUM_FIELDS)]
 
@@ -126,7 +136,9 @@ class ResultSetTest(fixtures.TestBase, AssertsExecutionResults):
     def test_one_or_none(self, one_or_first, rows_present):
         # TODO: this is not testing the ORM level "scalar_mapping"
         # mode which has a different performance profile
-        with testing.db.connect() as conn:
+        with testing.db.connect().execution_options(
+            compiled_cache=None
+        ) as conn:
             stmt = t.select()
             if rows_present == 0:
                 stmt = stmt.where(1 == 0)
index 7f311778e7d201fa3f6172ee88923f6bcacdd482..fad2ad8f53a8a9e42b238af96132c327fedec252 100644 (file)
@@ -3,7 +3,6 @@
 import datetime
 
 from sqlalchemy import and_
-from sqlalchemy import bindparam
 from sqlalchemy import Column
 from sqlalchemy import Date
 from sqlalchemy import DateTime
@@ -52,20 +51,6 @@ class InsertTest(fixtures.TestBase, AssertsExecutionResults):
         self.metadata.drop_all()
         self.metadata.clear()
 
-    def test_compiled_insert(self):
-        table = Table(
-            "testtable",
-            self.metadata,
-            Column("id", Integer, primary_key=True),
-            Column("data", String(30)),
-        )
-        self.metadata.create_all()
-        ins = table.insert(
-            inline=True, values={"data": bindparam("x")}
-        ).compile()
-        ins.execute({"x": "five"}, {"x": "seven"})
-        eq_(table.select().execute().fetchall(), [(1, "five"), (2, "seven")])
-
     def test_foreignkey_missing_insert(self):
         Table("t1", self.metadata, Column("id", Integer, primary_key=True))
         t2 = Table(
@@ -602,7 +587,9 @@ class InsertTest(fixtures.TestBase, AssertsExecutionResults):
     def _assert_data_noautoincrement(self, table):
         engine = engines.testing_engine(options={"implicit_returning": False})
 
-        with engine.connect() as conn:
+        # turning off the cache because we are checking for compile-time
+        # warnings
+        with engine.connect().execution_options(compiled_cache=None) as conn:
             conn.execute(table.insert(), {"id": 30, "data": "d1"})
 
             with expect_warnings(
index 41f94aa980fce9f73cfc919ce85403b18495242e..3ad8aa59415a114748f11ff26a2b1811f4320378 100644 (file)
@@ -840,7 +840,7 @@ class CompiledCacheTest(fixtures.TestBase):
 
         ins = users.insert()
         with patch.object(
-            ins, "compile", Mock(side_effect=ins.compile)
+            ins, "_compiler", Mock(side_effect=ins._compiler)
         ) as compile_mock:
             cached_conn.execute(ins, {"user_name": "u1"})
             cached_conn.execute(ins, {"user_name": "u2"})
@@ -881,7 +881,7 @@ class CompiledCacheTest(fixtures.TestBase):
 
         ins = photo.insert()
         with patch.object(
-            ins, "compile", Mock(side_effect=ins.compile)
+            ins, "_compiler", Mock(side_effect=ins._compiler)
         ) as compile_mock:
             cached_conn.execute(ins, {"photo_blob": blob})
         eq_(compile_mock.call_count, 1)
@@ -908,7 +908,7 @@ class CompiledCacheTest(fixtures.TestBase):
         upd = users.update().where(users.c.user_id == bindparam("b_user_id"))
 
         with patch.object(
-            upd, "compile", Mock(side_effect=upd.compile)
+            upd, "_compiler", Mock(side_effect=upd._compiler)
         ) as compile_mock:
             cached_conn.execute(
                 upd,
index b906b87be455bd73c6aa52def6881bb0d1e60d80..af6bc1d369a4d7f2740684f9fc8e5cdf281e32e0 100644 (file)
@@ -56,7 +56,8 @@ class LogParamsTest(fixtures.TestBase):
         )
         eq_(
             self.buf.buffer[1].message,
-            "[{'data': '0'}, {'data': '1'}, {'data': '2'}, {'data': '3'}, "
+            "[raw sql] [{'data': '0'}, {'data': '1'}, {'data': '2'}, "
+            "{'data': '3'}, "
             "{'data': '4'}, {'data': '5'}, {'data': '6'}, {'data': '7'}"
             "  ... displaying 10 of 100 total bound "
             "parameter sets ...  {'data': '98'}, {'data': '99'}]",
@@ -85,7 +86,7 @@ class LogParamsTest(fixtures.TestBase):
         )
         eq_(
             self.buf.buffer[1].message,
-            "[SQL parameters hidden due to hide_parameters=True]",
+            "[raw sql] [SQL parameters hidden due to hide_parameters=True]",
         )
 
     def test_log_large_list_of_tuple(self):
@@ -96,7 +97,7 @@ class LogParamsTest(fixtures.TestBase):
         )
         eq_(
             self.buf.buffer[1].message,
-            "[('0',), ('1',), ('2',), ('3',), ('4',), ('5',), "
+            "[raw sql] [('0',), ('1',), ('2',), ('3',), ('4',), ('5',), "
             "('6',), ('7',)  ... displaying 10 of 100 total "
             "bound parameter sets ...  ('98',), ('99',)]",
         )
@@ -115,7 +116,10 @@ class LogParamsTest(fixtures.TestBase):
                 "[parameters: ([1, 2, 3], 'hi')]\n" in str(exc_info)
             )
 
-            eq_(self.buf.buffer[1].message, "([1, 2, 3], 'hi')")
+            eq_regex(
+                self.buf.buffer[1].message,
+                r"\[generated .*\] \(\[1, 2, 3\], 'hi'\)",
+            )
 
     def test_repr_params_positional_array(self):
         eq_(
@@ -223,7 +227,7 @@ class LogParamsTest(fixtures.TestBase):
 
         eq_(
             self.buf.buffer[1].message,
-            "('%s ... (4702 characters truncated) ... %s',)"
+            "[raw sql] ('%s ... (4702 characters truncated) ... %s',)"
             % (largeparam[0:149], largeparam[-149:]),
         )
 
@@ -238,8 +242,8 @@ class LogParamsTest(fixtures.TestBase):
 
         eq_(
             self.buf.buffer[1].message,
-            "('%s', '%s', '%s ... (372 characters truncated) ... %s')"
-            % (lp1, lp2, lp3[0:149], lp3[-149:]),
+            "[raw sql] ('%s', '%s', '%s ... (372 characters truncated) "
+            "... %s')" % (lp1, lp2, lp3[0:149], lp3[-149:]),
         )
 
     def test_log_large_parameter_multiple(self):
@@ -257,7 +261,8 @@ class LogParamsTest(fixtures.TestBase):
 
         eq_(
             self.buf.buffer[1].message,
-            "[('%s ... (4702 characters truncated) ... %s',), ('%s',), "
+            "[raw sql] [('%s ... (4702 characters truncated) ... %s',), "
+            "('%s',), "
             "('%s ... (372 characters truncated) ... %s',)]"
             % (lp1[0:149], lp1[-149:], lp2, lp3[0:149], lp3[-149:]),
         )
@@ -342,7 +347,7 @@ class LogParamsTest(fixtures.TestBase):
 
         eq_(
             self.buf.buffer[1].message,
-            "('%s ... (4702 characters truncated) ... %s',)"
+            "[raw sql] ('%s ... (4702 characters truncated) ... %s',)"
             % (largeparam[0:149], largeparam[-149:]),
         )
 
index e7e2530b2991b45e1096dad8e6db0500bc469c29..3c363302b11935aa3958a7e4e522bc613b80739a 100644 (file)
@@ -10,6 +10,7 @@ from sqlalchemy.orm import create_session
 from sqlalchemy.orm import defaultload
 from sqlalchemy.orm import join
 from sqlalchemy.orm import joinedload
+from sqlalchemy.orm import selectinload
 from sqlalchemy.orm import subqueryload
 from sqlalchemy.orm import with_polymorphic
 from sqlalchemy.testing import assert_raises
@@ -79,7 +80,7 @@ class _PolymorphicTestBase(object):
         count = {"": 14, "Polymorphic": 9}.get(self.select_type, 10)
         self.assert_sql_count(testing.db, go, count)
 
-    def test_primary_eager_aliasing_one(self):
+    def test_primary_eager_aliasing_joinedload(self):
         # For both joinedload() and subqueryload(), if the original q is
         # not loading the subclass table, the joinedload doesn't happen.
 
@@ -96,7 +97,9 @@ class _PolymorphicTestBase(object):
         count = {"": 6, "Polymorphic": 3}.get(self.select_type, 4)
         self.assert_sql_count(testing.db, go, count)
 
-    def test_primary_eager_aliasing_two(self):
+    def test_primary_eager_aliasing_subqueryload(self):
+        # test that subqueryload does not occur because the parent
+        # row cannot support it
         sess = create_session()
 
         def go():
@@ -111,6 +114,23 @@ class _PolymorphicTestBase(object):
         count = {"": 14, "Polymorphic": 7}.get(self.select_type, 8)
         self.assert_sql_count(testing.db, go, count)
 
+    def test_primary_eager_aliasing_selectinload(self):
+        # test that selectinload does not occur because the parent
+        # row cannot support it
+        sess = create_session()
+
+        def go():
+            eq_(
+                sess.query(Person)
+                .order_by(Person.person_id)
+                .options(selectinload(Engineer.machines))
+                .all(),
+                all_employees,
+            )
+
+        count = {"": 14, "Polymorphic": 7}.get(self.select_type, 8)
+        self.assert_sql_count(testing.db, go, count)
+
     def test_primary_eager_aliasing_three(self):
 
         # assert the JOINs don't over JOIN
index 101e815fe02c05d0f6ac6adb670a3b2f1dc52479..11668cd054f80c5a1d996fb9ba9467bb13f9c60c 100644 (file)
@@ -12,7 +12,6 @@ from sqlalchemy import testing
 from sqlalchemy import true
 from sqlalchemy.orm import aliased
 from sqlalchemy.orm import Bundle
-from sqlalchemy.orm import class_mapper
 from sqlalchemy.orm import create_session
 from sqlalchemy.orm import joinedload
 from sqlalchemy.orm import mapper
@@ -24,6 +23,7 @@ from sqlalchemy.orm import with_polymorphic
 from sqlalchemy.testing import AssertsCompiledSQL
 from sqlalchemy.testing import eq_
 from sqlalchemy.testing import fixtures
+from sqlalchemy.testing.assertsql import CompiledSQL
 from sqlalchemy.testing.schema import Column
 from sqlalchemy.testing.schema import Table
 
@@ -583,23 +583,13 @@ class RelationshipFromSingleTest(
         )
 
     @classmethod
-    def setup_classes(cls):
-        class Employee(cls.Comparable):
-            pass
-
-        class Manager(Employee):
-            pass
-
-        class Stuff(cls.Comparable):
-            pass
-
-    def test_subquery_load(self):
+    def setup_mappers(cls):
         employee, employee_stuff, Employee, Stuff, Manager = (
-            self.tables.employee,
-            self.tables.employee_stuff,
-            self.classes.Employee,
-            self.classes.Stuff,
-            self.classes.Manager,
+            cls.tables.employee,
+            cls.tables.employee_stuff,
+            cls.classes.Employee,
+            cls.classes.Stuff,
+            cls.classes.Manager,
         )
 
         mapper(
@@ -616,32 +606,62 @@ class RelationshipFromSingleTest(
         )
         mapper(Stuff, employee_stuff)
 
-        sess = create_session()
-        context = (
-            sess.query(Manager)
-            .options(subqueryload("stuff"))
-            ._compile_context()
+    @classmethod
+    def setup_classes(cls):
+        class Employee(cls.Comparable):
+            pass
+
+        class Manager(Employee):
+            pass
+
+        class Stuff(cls.Comparable):
+            pass
+
+    @classmethod
+    def insert_data(cls, connection):
+        Employee, Stuff, Manager = cls.classes("Employee", "Stuff", "Manager")
+        s = Session(connection)
+
+        s.add_all(
+            [
+                Employee(
+                    name="e1", stuff=[Stuff(name="es1"), Stuff(name="es2")]
+                ),
+                Manager(
+                    name="m1", stuff=[Stuff(name="ms1"), Stuff(name="ms2")]
+                ),
+            ]
         )
-        subq = context.attributes[
-            (
-                "subqueryload_data",
-                (class_mapper(Manager), class_mapper(Manager).attrs.stuff),
-            )
-        ]["query"]
+        s.commit()
 
-        self.assert_compile(
-            subq,
-            "SELECT employee_stuff.id AS "
-            "employee_stuff_id, employee_stuff.employee"
-            "_id AS employee_stuff_employee_id, "
-            "employee_stuff.name AS "
-            "employee_stuff_name, anon_1.employee_id "
-            "AS anon_1_employee_id FROM (SELECT "
-            "employee.id AS employee_id FROM employee "
-            "WHERE employee.type IN ([POSTCOMPILE_type_1])) AS anon_1 "
-            "JOIN employee_stuff ON anon_1.employee_id "
-            "= employee_stuff.employee_id",
-            use_default_dialect=True,
+    def test_subquery_load(self):
+        Employee, Stuff, Manager = self.classes("Employee", "Stuff", "Manager")
+
+        sess = create_session()
+
+        with self.sql_execution_asserter(testing.db) as asserter:
+            sess.query(Manager).options(subqueryload("stuff")).all()
+
+        asserter.assert_(
+            CompiledSQL(
+                "SELECT employee.id AS employee_id, employee.name AS "
+                "employee_name, employee.type AS employee_type "
+                "FROM employee WHERE employee.type IN ([POSTCOMPILE_type_1])",
+                params=[{"type_1": ["manager"]}],
+            ),
+            CompiledSQL(
+                "SELECT employee_stuff.id AS "
+                "employee_stuff_id, employee_stuff.employee"
+                "_id AS employee_stuff_employee_id, "
+                "employee_stuff.name AS "
+                "employee_stuff_name, anon_1.employee_id "
+                "AS anon_1_employee_id FROM (SELECT "
+                "employee.id AS employee_id FROM employee "
+                "WHERE employee.type IN ([POSTCOMPILE_type_1])) AS anon_1 "
+                "JOIN employee_stuff ON anon_1.employee_id "
+                "= employee_stuff.employee_id",
+                params=[{"type_1": ["manager"]}],
+            ),
         )
 
 
index b431ea6b256f9b09c89c620aaa429b8cb03dc276..3ade73247232b49fe4de0ac505f040be3e806ff3 100644 (file)
@@ -1,10 +1,13 @@
 from sqlalchemy import inspect
+from sqlalchemy import text
 from sqlalchemy.future import select as future_select
 from sqlalchemy.orm import aliased
 from sqlalchemy.orm import defaultload
 from sqlalchemy.orm import defer
+from sqlalchemy.orm import join as orm_join
 from sqlalchemy.orm import joinedload
 from sqlalchemy.orm import Load
+from sqlalchemy.orm import selectinload
 from sqlalchemy.orm import Session
 from sqlalchemy.orm import subqueryload
 from sqlalchemy.orm import with_polymorphic
@@ -71,6 +74,8 @@ class CacheKeyTest(CacheKeyFixture, _fixtures.FixtureTest):
                 defer("id"),
                 defer("*"),
                 defer(Address.id),
+                subqueryload(User.orders),
+                selectinload(User.orders),
                 joinedload(User.addresses).defer(Address.id),
                 joinedload(aliased(User).addresses).defer(Address.id),
                 joinedload(User.addresses).defer("id"),
@@ -163,6 +168,65 @@ class CacheKeyTest(CacheKeyFixture, _fixtures.FixtureTest):
                 future_select(User)
                 .join(Address, User.addresses)
                 .join_from(User, User.orders),
+                future_select(User.id, Order.id).select_from(
+                    orm_join(User, Order, User.orders)
+                ),
+            ),
+            compare_values=True,
+        )
+
+    def test_orm_query_w_orm_joins(self):
+
+        User, Address, Keyword, Order, Item = self.classes(
+            "User", "Address", "Keyword", "Order", "Item"
+        )
+
+        a1 = aliased(Address)
+
+        self._run_cache_key_fixture(
+            lambda: stmt_20(
+                Session().query(User).join(User.addresses),
+                Session().query(User).join(User.orders),
+                Session().query(User).join(User.addresses).join(User.orders),
+                Session()
+                .query(User)
+                .join("addresses")
+                .join("dingalings", from_joinpoint=True),
+                Session().query(User).join("addresses"),
+                Session().query(User).join("orders"),
+                Session().query(User).join("addresses").join("orders"),
+                Session().query(User).join(Address, User.addresses),
+                Session().query(User).join(a1, "addresses"),
+                Session().query(User).join(a1, "addresses", aliased=True),
+                Session().query(User).join(User.addresses.of_type(a1)),
+            ),
+            compare_values=True,
+        )
+
+    def test_orm_query_from_statement(self):
+        User, Address, Keyword, Order, Item = self.classes(
+            "User", "Address", "Keyword", "Order", "Item"
+        )
+
+        self._run_cache_key_fixture(
+            lambda: stmt_20(
+                Session()
+                .query(User)
+                .from_statement(text("select * from user")),
+                Session()
+                .query(User)
+                .options(selectinload(User.addresses))
+                .from_statement(text("select * from user")),
+                Session()
+                .query(User)
+                .options(subqueryload(User.addresses))
+                .from_statement(text("select * from user")),
+                Session()
+                .query(User)
+                .from_statement(text("select * from user order by id")),
+                Session()
+                .query(User.id)
+                .from_statement(text("select * from user")),
             ),
             compare_values=True,
         )
index 8b54e83bf50a284a892b6c788a6facfed02be0be..c7c07ffd3efae1fd1d23166c2b06b767641ce1a0 100644 (file)
@@ -1,13 +1,13 @@
 import sqlalchemy as sa
 from sqlalchemy import bindparam
 from sqlalchemy import ForeignKey
-from sqlalchemy import inspect
 from sqlalchemy import Integer
 from sqlalchemy import literal_column
 from sqlalchemy import select
 from sqlalchemy import String
 from sqlalchemy import testing
 from sqlalchemy.orm import aliased
+from sqlalchemy.orm import backref
 from sqlalchemy.orm import clear_mappers
 from sqlalchemy.orm import close_all_sessions
 from sqlalchemy.orm import create_session
@@ -86,6 +86,44 @@ class EagerTest(_fixtures.FixtureTest, testing.AssertsCompiledSQL):
 
         self.assert_sql_count(testing.db, go, 2)
 
+    def test_params_arent_cached(self):
+        users, Address, addresses, User = (
+            self.tables.users,
+            self.classes.Address,
+            self.tables.addresses,
+            self.classes.User,
+        )
+
+        mapper(
+            User,
+            users,
+            properties={
+                "addresses": relationship(
+                    mapper(Address, addresses),
+                    lazy="subquery",
+                    order_by=Address.id,
+                )
+            },
+        )
+        query_cache = {}
+        sess = create_session()
+
+        u1 = (
+            sess.query(User)
+            .execution_options(query_cache=query_cache)
+            .filter(User.id == 7)
+            .one()
+        )
+
+        u2 = (
+            sess.query(User)
+            .execution_options(query_cache=query_cache)
+            .filter(User.id == 8)
+            .one()
+        )
+        eq_(len(u1.addresses), 1)
+        eq_(len(u2.addresses), 3)
+
     def test_from_aliased(self):
         users, Dingaling, User, dingalings, Address, addresses = (
             self.tables.users,
@@ -2689,33 +2727,40 @@ class CyclicalInheritingEagerTestTwo(
             movies = relationship("Movie", foreign_keys=Movie.director_id)
             name = Column(String(50))
 
+    @classmethod
+    def insert_data(cls, connection):
+        Director, Movie = cls.classes("Director", "Movie")
+        s = Session(connection)
+        s.add_all([Director(movies=[Movie(title="m1"), Movie(title="m2")])])
+        s.commit()
+
     def test_from_subclass(self):
         Director = self.classes.Director
 
         s = create_session()
 
-        ctx = s.query(Director).options(subqueryload("*"))._compile_context()
-
-        q = ctx.attributes[
-            (
-                "subqueryload_data",
-                (inspect(Director), inspect(Director).attrs.movies),
-            )
-        ]["query"]
-        self.assert_compile(
-            q,
-            "SELECT movie.id AS movie_id, "
-            "persistent.id AS persistent_id, "
-            "movie.director_id AS movie_director_id, "
-            "movie.title AS movie_title, "
-            "anon_1.director_id AS anon_1_director_id "
-            "FROM (SELECT director.id AS director_id "
-            "FROM persistent JOIN director "
-            "ON persistent.id = director.id) AS anon_1 "
-            "JOIN (persistent JOIN movie "
-            "ON persistent.id = movie.id) "
-            "ON anon_1.director_id = movie.director_id",
-            dialect="default",
+        with self.sql_execution_asserter(testing.db) as asserter:
+            s.query(Director).options(subqueryload("*")).all()
+        asserter.assert_(
+            CompiledSQL(
+                "SELECT director.id AS director_id, "
+                "persistent.id AS persistent_id, director.name "
+                "AS director_name FROM persistent JOIN director "
+                "ON persistent.id = director.id"
+            ),
+            CompiledSQL(
+                "SELECT movie.id AS movie_id, "
+                "persistent.id AS persistent_id, "
+                "movie.director_id AS movie_director_id, "
+                "movie.title AS movie_title, "
+                "anon_1.director_id AS anon_1_director_id "
+                "FROM (SELECT director.id AS director_id "
+                "FROM persistent JOIN director "
+                "ON persistent.id = director.id) AS anon_1 "
+                "JOIN (persistent JOIN movie "
+                "ON persistent.id = movie.id) "
+                "ON anon_1.director_id = movie.director_id",
+            ),
         )
 
     def test_integrate(self):
@@ -2762,7 +2807,9 @@ class SubqueryloadDistinctTest(
             )
             path = Column(String(255))
             director_id = Column(Integer, ForeignKey("director.id"))
-            director = relationship(Director, backref="photos")
+            director = relationship(
+                Director, backref=backref("photos", order_by=id)
+            )
 
         class Movie(Base):
             __tablename__ = "movie"
@@ -2824,87 +2871,58 @@ class SubqueryloadDistinctTest(
 
         s = create_session(testing.db)
 
-        q = s.query(Movie).options(
-            subqueryload(Movie.director).subqueryload(Director.photos)
-        )
-        ctx = q._compile_context()
-
-        q2 = ctx.attributes[
-            (
-                "subqueryload_data",
-                (inspect(Movie), inspect(Movie).attrs.director),
+        with self.sql_execution_asserter(testing.db) as asserter:
+            result = (
+                s.query(Movie)
+                .options(
+                    subqueryload(Movie.director).subqueryload(Director.photos)
+                )
+                .all()
             )
-        ]["query"]
-        self.assert_compile(
-            q2,
-            "SELECT director.id AS director_id, "
-            "director.name AS director_name, "
-            "anon_1.movie_director_id AS anon_1_movie_director_id "
-            "FROM (SELECT%s movie.director_id AS movie_director_id "
-            "FROM movie) AS anon_1 "
-            "JOIN director ON director.id = anon_1.movie_director_id"
-            % (" DISTINCT" if expect_distinct else ""),
+        asserter.assert_(
+            CompiledSQL(
+                "SELECT movie.id AS movie_id, movie.director_id "
+                "AS movie_director_id, movie.title AS movie_title FROM movie"
+            ),
+            CompiledSQL(
+                "SELECT director.id AS director_id, "
+                "director.name AS director_name, "
+                "anon_1.movie_director_id AS anon_1_movie_director_id "
+                "FROM (SELECT%s movie.director_id AS movie_director_id "
+                "FROM movie) AS anon_1 "
+                "JOIN director ON director.id = anon_1.movie_director_id"
+                % (" DISTINCT" if expect_distinct else ""),
+            ),
+            CompiledSQL(
+                "SELECT director_photo.id AS director_photo_id, "
+                "director_photo.path AS director_photo_path, "
+                "director_photo.director_id AS director_photo_director_id, "
+                "director_1.id AS director_1_id "
+                "FROM (SELECT%s movie.director_id AS movie_director_id "
+                "FROM movie) AS anon_1 "
+                "JOIN director AS director_1 "
+                "ON director_1.id = anon_1.movie_director_id "
+                "JOIN director_photo "
+                "ON director_1.id = director_photo.director_id "
+                "ORDER BY director_photo.id"
+                % (" DISTINCT" if expect_distinct else ""),
+            ),
         )
 
-        ctx2 = q2._compile_context()
-        stmt = q2.statement
-
-        result = s.connection().execute(stmt)
-        rows = result.fetchall()
-
-        if expect_distinct:
-            eq_(rows, [(1, "Woody Allen", 1)])
-        else:
-            eq_(rows, [(1, "Woody Allen", 1), (1, "Woody Allen", 1)])
-
-        q3 = ctx2.attributes[
-            (
-                "subqueryload_data",
-                (inspect(Director), inspect(Director).attrs.photos),
-            )
-        ]["query"]
-
-        self.assert_compile(
-            q3,
-            "SELECT director_photo.id AS director_photo_id, "
-            "director_photo.path AS director_photo_path, "
-            "director_photo.director_id AS director_photo_director_id, "
-            "director_1.id AS director_1_id "
-            "FROM (SELECT%s movie.director_id AS movie_director_id "
-            "FROM movie) AS anon_1 "
-            "JOIN director AS director_1 "
-            "ON director_1.id = anon_1.movie_director_id "
-            "JOIN director_photo "
-            "ON director_1.id = director_photo.director_id"
-            % (" DISTINCT" if expect_distinct else ""),
-        )
-
-        stmt = q3.statement
-
-        result = s.connection().execute(stmt)
-
-        rows = result.fetchall()
-        if expect_distinct:
-            eq_(
-                set(tuple(t) for t in rows),
-                set([(1, "/1.jpg", 1, 1), (2, "/2.jpg", 1, 1)]),
-            )
-        else:
-            # oracle might not order the way we expect here
-            eq_(
-                set(tuple(t) for t in rows),
-                set(
-                    [
-                        (1, "/1.jpg", 1, 1),
-                        (2, "/2.jpg", 1, 1),
-                        (1, "/1.jpg", 1, 1),
-                        (2, "/2.jpg", 1, 1),
-                    ]
-                ),
-            )
-
-        movies = q.all()  # noqa
-
+        eq_(
+            [
+                (
+                    movie.title,
+                    movie.director.name,
+                    [photo.path for photo in movie.director.photos],
+                )
+                for movie in result
+            ],
+            [
+                ("Manhattan", "Woody Allen", ["/1.jpg", "/2.jpg"]),
+                ("Sweet and Lowdown", "Woody Allen", ["/1.jpg", "/2.jpg"]),
+            ],
+        )
         # check number of persistent objects in session
         eq_(len(list(s)), 5)
 
@@ -2919,24 +2937,41 @@ class SubqueryloadDistinctTest(
 
         s = create_session(testing.db)
 
-        q = s.query(Credit).options(
-            subqueryload(Credit.movie).subqueryload(Movie.director)
+        with self.sql_execution_asserter(testing.db) as asserter:
+            result = (
+                s.query(Credit)
+                .options(
+                    subqueryload(Credit.movie).subqueryload(Movie.director)
+                )
+                .all()
+            )
+        asserter.assert_(
+            CompiledSQL(
+                "SELECT credit.id AS credit_id, credit.movie_id AS "
+                "credit_movie_id FROM credit"
+            ),
+            CompiledSQL(
+                "SELECT movie.id AS movie_id, movie.director_id "
+                "AS movie_director_id, movie.title AS movie_title, "
+                "anon_1.credit_movie_id AS anon_1_credit_movie_id "
+                "FROM (SELECT DISTINCT credit.movie_id AS credit_movie_id "
+                "FROM credit) AS anon_1 JOIN movie ON movie.id = "
+                "anon_1.credit_movie_id"
+            ),
+            CompiledSQL(
+                "SELECT director.id AS director_id, director.name "
+                "AS director_name, movie_1.director_id AS movie_1_director_id "
+                "FROM (SELECT DISTINCT credit.movie_id AS credit_movie_id "
+                "FROM credit) AS anon_1 JOIN movie AS movie_1 ON "
+                "movie_1.id = anon_1.credit_movie_id JOIN director "
+                "ON director.id = movie_1.director_id"
+            ),
         )
 
-        ctx = q._compile_context()
-
-        q2 = ctx.attributes[
-            ("subqueryload_data", (inspect(Credit), Credit.movie.property))
-        ]["query"]
-        ctx2 = q2._compile_context()
-        q3 = ctx2.attributes[
-            ("subqueryload_data", (inspect(Movie), Movie.director.property))
-        ]["query"]
-
-        stmt = q3.statement
-
-        result = s.connection().execute(stmt)
-        eq_(result.fetchall(), [(1, "Woody Allen", 1), (1, "Woody Allen", 1)])
+        eq_(
+            [credit.movie.director.name for credit in result],
+            ["Woody Allen", "Woody Allen", "Woody Allen"],
+        )
 
 
 class JoinedNoLoadConflictTest(fixtures.DeclarativeMappedTest):
@@ -3196,6 +3231,13 @@ class FromSelfTest(fixtures.DeclarativeMappedTest):
     neutron is currently dependent on this use case which means others
     are too.
 
+    Additionally tests functionality related to #5836, where we are using the
+    non-cached context.query, rather than
+    context.compile_state.select_statement to generate the subquery.  this is
+    so we get the current parameters from the new statement being run, but it
+    also means we have to get a new CompileState from that query in order to
+    deal with the correct entities.
+
     """
 
     @classmethod
@@ -3213,55 +3255,147 @@ class FromSelfTest(fixtures.DeclarativeMappedTest):
             id = Column(Integer, primary_key=True)
             a_id = Column(ForeignKey("a.id"))
             a = relationship("A")
+            ds = relationship("D", order_by="D.id")
 
         class C(Base, ComparableEntity):
             __tablename__ = "c"
             id = Column(Integer, primary_key=True)
             a_id = Column(ForeignKey("a.id"))
 
+        class D(Base, ComparableEntity):
+            __tablename__ = "d"
+            id = Column(Integer, primary_key=True)
+            b_id = Column(ForeignKey("b.id"))
+
     @classmethod
     def insert_data(cls, connection):
-        A, B, C = cls.classes("A", "B", "C")
+        A, B, C, D = cls.classes("A", "B", "C", "D")
 
         s = Session(connection)
 
-        as_ = [A(id=i, cs=[C(), C()]) for i in range(1, 5)]
+        as_ = [A(id=i, cs=[C(), C()],) for i in range(1, 5)]
 
-        s.add_all([B(a=as_[0]), B(a=as_[1]), B(a=as_[2]), B(a=as_[3])])
+        s.add_all(
+            [
+                B(a=as_[0], ds=[D()]),
+                B(a=as_[1], ds=[D()]),
+                B(a=as_[2]),
+                B(a=as_[3]),
+            ]
+        )
 
         s.commit()
 
-    def test_subqload_w_from_self(self):
+    def test_subq_w_from_self_one(self):
         A, B, C = self.classes("A", "B", "C")
 
         s = Session()
 
-        q = (
-            s.query(B)
-            .join(B.a)
-            .filter(B.id < 4)
-            .filter(A.id > 1)
-            .from_self()
-            .options(subqueryload(B.a).subqueryload(A.cs))
-            .from_self()
-        )
+        cache = {}
+
+        for i in range(3):
+            q = (
+                s.query(B)
+                .execution_options(compiled_cache=cache)
+                .join(B.a)
+                .filter(B.id < 4)
+                .filter(A.id > 1)
+                .from_self()
+                .options(subqueryload(B.a).subqueryload(A.cs))
+                .from_self()
+            )
 
-        def go():
-            results = q.all()
-            eq_(
-                results,
-                [
-                    B(
-                        a=A(cs=[C(a_id=2, id=3), C(a_id=2, id=4)], id=2),
-                        a_id=2,
-                        id=2,
-                    ),
-                    B(
-                        a=A(cs=[C(a_id=3, id=5), C(a_id=3, id=6)], id=3),
-                        a_id=3,
-                        id=3,
-                    ),
-                ],
+            def go():
+                results = q.all()
+                eq_(
+                    results,
+                    [
+                        B(
+                            a=A(cs=[C(a_id=2, id=3), C(a_id=2, id=4)], id=2),
+                            a_id=2,
+                            id=2,
+                        ),
+                        B(
+                            a=A(cs=[C(a_id=3, id=5), C(a_id=3, id=6)], id=3),
+                            a_id=3,
+                            id=3,
+                        ),
+                    ],
+                )
+
+            self.assert_sql_execution(
+                testing.db,
+                go,
+                CompiledSQL(
+                    "SELECT anon_1.anon_2_b_id AS anon_1_anon_2_b_id, "
+                    "anon_1.anon_2_b_a_id AS anon_1_anon_2_b_a_id FROM "
+                    "(SELECT anon_2.b_id AS anon_2_b_id, anon_2.b_a_id "
+                    "AS anon_2_b_a_id FROM (SELECT b.id AS b_id, b.a_id "
+                    "AS b_a_id FROM b JOIN a ON a.id = b.a_id "
+                    "WHERE b.id < :id_1 AND a.id > :id_2) AS anon_2) AS anon_1"
+                ),
+                CompiledSQL(
+                    "SELECT a.id AS a_id, anon_1.anon_2_anon_3_b_a_id AS "
+                    "anon_1_anon_2_anon_3_b_a_id FROM (SELECT DISTINCT "
+                    "anon_2.anon_3_b_a_id AS anon_2_anon_3_b_a_id FROM "
+                    "(SELECT anon_3.b_id AS anon_3_b_id, anon_3.b_a_id "
+                    "AS anon_3_b_a_id FROM (SELECT b.id AS b_id, b.a_id "
+                    "AS b_a_id FROM b JOIN a ON a.id = b.a_id "
+                    "WHERE b.id < :id_1 AND a.id > :id_2) AS anon_3) "
+                    "AS anon_2) AS anon_1 JOIN a "
+                    "ON a.id = anon_1.anon_2_anon_3_b_a_id"
+                ),
+                CompiledSQL(
+                    "SELECT c.id AS c_id, c.a_id AS c_a_id, a_1.id "
+                    "AS a_1_id FROM (SELECT DISTINCT anon_2.anon_3_b_a_id AS "
+                    "anon_2_anon_3_b_a_id FROM "
+                    "(SELECT anon_3.b_id AS anon_3_b_id, anon_3.b_a_id "
+                    "AS anon_3_b_a_id FROM (SELECT b.id AS b_id, b.a_id "
+                    "AS b_a_id FROM b JOIN a ON a.id = b.a_id "
+                    "WHERE b.id < :id_1 AND a.id > :id_2) AS anon_3) "
+                    "AS anon_2) AS anon_1 JOIN a AS a_1 ON a_1.id = "
+                    "anon_1.anon_2_anon_3_b_a_id JOIN c ON a_1.id = c.a_id "
+                    "ORDER BY c.id"
+                ),
             )
 
-        self.assert_sql_count(testing.db, go, 3)
+            s.close()
+
+    def test_subq_w_from_self_two(self):
+
+        A, B, C = self.classes("A", "B", "C")
+
+        s = Session()
+        cache = {}
+
+        for i in range(3):
+
+            def go():
+                q = (
+                    s.query(B)
+                    .execution_options(compiled_cache=cache)
+                    .join(B.a)
+                    .from_self()
+                )
+                q = q.options(subqueryload(B.ds))
+
+                q.all()
+
+            self.assert_sql_execution(
+                testing.db,
+                go,
+                CompiledSQL(
+                    "SELECT anon_1.b_id AS anon_1_b_id, anon_1.b_a_id AS "
+                    "anon_1_b_a_id FROM (SELECT b.id AS b_id, b.a_id "
+                    "AS b_a_id FROM b JOIN a ON a.id = b.a_id) AS anon_1"
+                ),
+                CompiledSQL(
+                    "SELECT d.id AS d_id, d.b_id AS d_b_id, "
+                    "anon_1.anon_2_b_id AS anon_1_anon_2_b_id "
+                    "FROM (SELECT anon_2.b_id AS anon_2_b_id FROM "
+                    "(SELECT b.id AS b_id, b.a_id AS b_a_id FROM b "
+                    "JOIN a ON a.id = b.a_id) AS anon_2) AS anon_1 "
+                    "JOIN d ON anon_1.anon_2_b_id = d.b_id ORDER BY d.id"
+                ),
+            )
+            s.close()
index 5430fbffc542d60875cdb0d416f8a431ff22ceaa..12a8417ba4b29da4078e39b5aee38eb2261b0956 100644 (file)
@@ -168,8 +168,8 @@ class UpdateDeleteTest(fixtures.MappedTest):
         s = Session()
 
         assert_raises_message(
-            exc.InvalidRequestError,
-            "Invalid expression type: 5",
+            exc.ArgumentError,
+            "SET/VALUES column expression or string key expected, got .*Thing",
             s.query(User).update,
             {Thing(): "moonbeam"},
             synchronize_session="evaluate",
index 56821977059e2586e1dfc285bec4b0717629beef..a96ba23d79389f3f8693e185d35a0f16d87bfbf7 100644 (file)
@@ -68,7 +68,7 @@ if os.path.exists("orm2010.db"):
     os.remove("orm2010.db")
 # use a file based database so that cursor.execute() has some
 # palpable overhead.
-engine = create_engine("sqlite:///orm2010.db", query_cache_size=100)
+engine = create_engine("sqlite:///orm2010.db")
 
 Base.metadata.create_all(engine)
 
@@ -194,11 +194,11 @@ def run_with_time(factor):
     def status(msg):
         print("%d - %s" % (time.time() - now, msg))
 
-    runit_persist(status, 10)
+    runit_persist(status, factor)
 
     print("Total time: %d" % (time.time() - now))
 
-    runit_query_runs(status, 10)
+    runit_query_runs(status, factor)
 
     print("Total time: %d" % (time.time() - now))
 
index 3f74bdbccc3dc2d5b58bb6bd695d8fee0ae1a90d..6aaafc716ac079f4a67f84015d1645678c829869 100644 (file)
@@ -13,6 +13,7 @@ from sqlalchemy import exists
 from sqlalchemy import extract
 from sqlalchemy import Float
 from sqlalchemy import Integer
+from sqlalchemy import literal_column
 from sqlalchemy import MetaData
 from sqlalchemy import or_
 from sqlalchemy import select
@@ -43,6 +44,7 @@ from sqlalchemy.sql.base import HasCacheKey
 from sqlalchemy.sql.elements import _label_reference
 from sqlalchemy.sql.elements import _textual_label_reference
 from sqlalchemy.sql.elements import Annotated
+from sqlalchemy.sql.elements import BindParameter
 from sqlalchemy.sql.elements import ClauseElement
 from sqlalchemy.sql.elements import ClauseList
 from sqlalchemy.sql.elements import CollationClause
@@ -68,6 +70,8 @@ from sqlalchemy.testing import is_not_
 from sqlalchemy.testing import is_true
 from sqlalchemy.testing import ne_
 from sqlalchemy.testing.util import random_choices
+from sqlalchemy.types import ARRAY
+from sqlalchemy.types import JSON
 from sqlalchemy.util import class_hierarchy
 
 meta = MetaData()
@@ -187,6 +191,10 @@ class CoreFixtures(object):
             column("q").like("somstr", escape="\\"),
             column("q").like("somstr", escape="X"),
         ),
+        lambda: (
+            column("q", ARRAY(Integer))[3] == 5,
+            column("q", ARRAY(Integer))[3:5] == 5,
+        ),
         lambda: (
             table_a.c.a,
             table_a.c.a._annotate({"orm": True}),
@@ -233,6 +241,15 @@ class CoreFixtures(object):
             cast(column("q"), Float),
             cast(column("p"), Integer),
         ),
+        lambda: (
+            column("x", JSON)["key1"],
+            column("x", JSON)["key1"].as_boolean(),
+            column("x", JSON)["key1"].as_float(),
+            column("x", JSON)["key1"].as_integer(),
+            column("x", JSON)["key1"].as_string(),
+            column("y", JSON)["key1"].as_integer(),
+            column("y", JSON)["key1"].as_string(),
+        ),
         lambda: (
             bindparam("x"),
             bindparam("y"),
@@ -334,6 +351,11 @@ class CoreFixtures(object):
             select([table_a.c.a]),
             select([table_a.c.a, table_a.c.b]),
             select([table_a.c.b, table_a.c.a]),
+            select([table_a.c.b, table_a.c.a]).limit(5),
+            select([table_a.c.b, table_a.c.a]).limit(5).offset(10),
+            select([table_a.c.b, table_a.c.a])
+            .limit(literal_column("foobar"))
+            .offset(10),
             select([table_a.c.b, table_a.c.a]).apply_labels(),
             select([table_a.c.a]).where(table_a.c.b == 5),
             select([table_a.c.a])
@@ -827,18 +849,21 @@ class CacheKeyFixture(object):
                     ne_(a_key.key, b_key.key)
 
             # ClauseElement-specific test to ensure the cache key
-            # collected all the bound parameters
+            # collected all the bound parameters that aren't marked
+            # as "literal execute"
             if isinstance(case_a[a], ClauseElement) and isinstance(
                 case_b[b], ClauseElement
             ):
                 assert_a_params = []
                 assert_b_params = []
-                visitors.traverse(
-                    case_a[a], {}, {"bindparam": assert_a_params.append}
-                )
-                visitors.traverse(
-                    case_b[b], {}, {"bindparam": assert_b_params.append}
-                )
+
+                for elem in visitors.iterate(case_a[a]):
+                    if elem.__visit_name__ == "bindparam":
+                        assert_a_params.append(elem)
+
+                for elem in visitors.iterate(case_b[b]):
+                    if elem.__visit_name__ == "bindparam":
+                        assert_b_params.append(elem)
 
                 # note we're asserting the order of the params as well as
                 # if there are dupes or not.  ordering has to be
@@ -907,6 +932,39 @@ class CacheKeyTest(CacheKeyFixture, CoreFixtures, fixtures.TestBase):
             for fixture in fixtures_:
                 self._run_cache_key_fixture(fixture, compare_values)
 
+    def test_literal_binds(self):
+        def fixture():
+            return (
+                bindparam(None, value="x", literal_execute=True),
+                bindparam(None, value="y", literal_execute=True),
+            )
+
+        self._run_cache_key_fixture(
+            fixture, True,
+        )
+
+    def test_bindparam_subclass_nocache(self):
+        # does not implement inherit_cache
+        class _literal_bindparam(BindParameter):
+            pass
+
+        l1 = _literal_bindparam(None, value="x1")
+        is_(l1._generate_cache_key(), None)
+
+    def test_bindparam_subclass_ok_cache(self):
+        # implements inherit_cache
+        class _literal_bindparam(BindParameter):
+            inherit_cache = True
+
+        def fixture():
+            return (
+                _literal_bindparam(None, value="x1"),
+                _literal_bindparam(None, value="x2"),
+                _literal_bindparam(None),
+            )
+
+        self._run_cache_key_fixture(fixture, True)
+
     def test_cache_key_unknown_traverse(self):
         class Foobar1(ClauseElement):
             _traverse_internals = [
@@ -1264,6 +1322,30 @@ class CompareClausesTest(fixtures.TestBase):
 
         is_false(l1.compare(l2))
 
+    def test_cache_key_limit_offset_values(self):
+        s1 = select([column("q")]).limit(10)
+        s2 = select([column("q")]).limit(25)
+        s3 = select([column("q")]).limit(25).offset(5)
+        s4 = select([column("q")]).limit(25).offset(18)
+        s5 = select([column("q")]).limit(7).offset(12)
+        s6 = select([column("q")]).limit(literal_column("q")).offset(12)
+
+        for should_eq_left, should_eq_right in [(s1, s2), (s3, s4), (s3, s5)]:
+            eq_(
+                should_eq_left._generate_cache_key().key,
+                should_eq_right._generate_cache_key().key,
+            )
+
+        for shouldnt_eq_left, shouldnt_eq_right in [
+            (s1, s3),
+            (s5, s6),
+            (s2, s3),
+        ]:
+            ne_(
+                shouldnt_eq_left._generate_cache_key().key,
+                shouldnt_eq_right._generate_cache_key().key,
+            )
+
     def test_compare_labels(self):
         is_true(column("q").label(None).compare(column("q").label(None)))
 
index 9881d1247ceaf8facebc20fc8e21b5635766034a..c12543f82ae8f10f07531ad838088459a276ae35 100644 (file)
@@ -75,6 +75,7 @@ from sqlalchemy.sql import elements
 from sqlalchemy.sql import label
 from sqlalchemy.sql import operators
 from sqlalchemy.sql import table
+from sqlalchemy.sql import util as sql_util
 from sqlalchemy.sql.elements import BooleanClauseList
 from sqlalchemy.sql.expression import ClauseList
 from sqlalchemy.sql.expression import HasPrefixes
@@ -85,7 +86,9 @@ from sqlalchemy.testing import eq_
 from sqlalchemy.testing import eq_ignore_whitespace
 from sqlalchemy.testing import fixtures
 from sqlalchemy.testing import is_
+from sqlalchemy.testing import is_true
 from sqlalchemy.testing import mock
+from sqlalchemy.testing import ne_
 from sqlalchemy.util import u
 
 table1 = table(
@@ -3493,6 +3496,140 @@ class BindParameterTest(AssertsCompiledSQL, fixtures.TestBase):
             extracted_parameters=s1_cache_key[1],
         )
 
+    def test_construct_params_w_bind_clones_post(self):
+        """test that a BindParameter that has been cloned after the cache
+        key was generated still matches up when construct_params()
+        is called with an extracted parameter collection.
+
+        This case occurs now with the ORM as the ORM construction will
+        frequently run clause adaptation on elements of the statement within
+        compilation, after the cache key has been generated.  this adaptation
+        hits BindParameter objects which will change their key as they
+        will usually have unqique=True.   So the construct_params() process
+        when it links its internal bind_names to the cache key binds,
+        must do this badsed on bindparam._identifying_key, which does not
+        change across clones, rather than .key which usually will.
+
+        """
+
+        stmt = select([table1.c.myid]).where(table1.c.myid == 5)
+
+        # get the original bindparam.
+        original_bind = stmt._where_criteria[0].right
+
+        # it's anonymous so unique=True
+        is_true(original_bind.unique)
+
+        # cache key against hte original param
+        cache_key = stmt._generate_cache_key()
+
+        # now adapt the statement
+        stmt_adapted = sql_util.ClauseAdapter(table1).traverse(stmt)
+
+        # new bind parameter has a different key but same
+        # identifying key
+        new_bind = stmt_adapted._where_criteria[0].right
+        eq_(original_bind._identifying_key, new_bind._identifying_key)
+        ne_(original_bind.key, new_bind.key)
+
+        # compile the adapted statement but set the cache key to the one
+        # generated from the unadapted statement.  this will look like
+        # when the ORM runs clause adaption inside of visit_select, after
+        # the cache key is generated but before the compiler is given the
+        # core select statement to actually render.
+        compiled = stmt_adapted.compile(cache_key=cache_key)
+
+        # params set up as 5
+        eq_(compiled.construct_params(params={},), {"myid_1": 5})
+
+        # also works w the original cache key
+        eq_(
+            compiled.construct_params(
+                params={}, extracted_parameters=cache_key[1]
+            ),
+            {"myid_1": 5},
+        )
+
+        # now make a totally new statement with the same cache key
+        new_stmt = select([table1.c.myid]).where(table1.c.myid == 10)
+        new_cache_key = new_stmt._generate_cache_key()
+
+        # cache keys match
+        eq_(cache_key.key, new_cache_key.key)
+
+        # ensure we get "10" from construct params.   if it matched
+        # based on .key and not ._identifying_key, it would not see that
+        # the bind parameter is part of the cache key.
+        eq_(
+            compiled.construct_params(
+                params={}, extracted_parameters=new_cache_key[1]
+            ),
+            {"myid_1": 10},
+        )
+
+    def test_construct_params_w_bind_clones_pre(self):
+        """test that a BindParameter that has been cloned before the cache
+        key was generated, and was doubled up just to make sure it has to
+        be unique, still matches up when construct_params()
+        is called with an extracted parameter collection.
+
+        other ORM feaures like optimized_compare() end up doing something
+        like this, such as if there are multiple "has()" or "any()" which would
+        have cloned the join condition and changed the values of bound
+        parameters.
+
+        """
+
+        stmt = select([table1.c.myid]).where(table1.c.myid == 5)
+
+        original_bind = stmt._where_criteria[0].right
+        # it's anonymous so unique=True
+        is_true(original_bind.unique)
+
+        b1 = original_bind._clone()
+        b1.value = 10
+        b2 = original_bind._clone()
+        b2.value = 12
+
+        # make a new statement that uses the clones as distinct
+        # parameters
+        modified_stmt = select([table1.c.myid]).where(
+            or_(table1.c.myid == b1, table1.c.myid == b2)
+        )
+
+        cache_key = modified_stmt._generate_cache_key()
+        compiled = modified_stmt.compile(cache_key=cache_key)
+
+        eq_(
+            compiled.construct_params(params={}), {"myid_1": 10, "myid_2": 12},
+        )
+
+        # make a new statement doing the same thing and make sure
+        # the binds match up correctly
+        new_stmt = select([table1.c.myid]).where(table1.c.myid == 8)
+
+        new_original_bind = new_stmt._where_criteria[0].right
+        new_b1 = new_original_bind._clone()
+        new_b1.value = 20
+        new_b2 = new_original_bind._clone()
+        new_b2.value = 18
+        modified_new_stmt = select([table1.c.myid]).where(
+            or_(table1.c.myid == new_b1, table1.c.myid == new_b2)
+        )
+
+        new_cache_key = modified_new_stmt._generate_cache_key()
+
+        # cache keys match
+        eq_(cache_key.key, new_cache_key.key)
+
+        # ensure we get both values
+        eq_(
+            compiled.construct_params(
+                params={}, extracted_parameters=new_cache_key[1]
+            ),
+            {"myid_1": 20, "myid_2": 18},
+        )
+
     def test_tuple_expanding_in_no_values(self):
         expr = tuple_(table1.c.myid, table1.c.name).in_(
             [(1, "foo"), (5, "bar")]
index af01d9a3d37b0b68feb12984606f8cf4df2e40b6..4c5c51b4a163a73085556828b33569e8e3885963 100644 (file)
@@ -113,11 +113,17 @@ class DefaultColumnComparatorTest(fixtures.TestBase):
     def test_operate(self, operator, right):
         left = column("left")
 
+        if operators.is_comparison(operator):
+            type_ = sqltypes.BOOLEANTYPE
+        else:
+            type_ = sqltypes.NULLTYPE
+
         assert left.comparator.operate(operator, right).compare(
             BinaryExpression(
                 coercions.expect(roles.WhereHavingRole, left),
                 coercions.expect(roles.WhereHavingRole, right),
                 operator,
+                type_=type_,
             )
         )
 
@@ -129,6 +135,7 @@ class DefaultColumnComparatorTest(fixtures.TestBase):
                 coercions.expect(roles.WhereHavingRole, right),
                 operator,
                 modifiers=modifiers,
+                type_=type_,
             )
         )
 
@@ -167,6 +174,7 @@ class DefaultColumnComparatorTest(fixtures.TestBase):
                     "left", value=[1, 2, 3], unique=True, expanding=True
                 ),
                 operators.in_op,
+                type_=sqltypes.BOOLEANTYPE,
             )
         )
         self._loop_test(operators.in_op, [1, 2, 3])
@@ -180,6 +188,7 @@ class DefaultColumnComparatorTest(fixtures.TestBase):
                     "left", value=[1, 2, 3], unique=True, expanding=True
                 ),
                 operators.notin_op,
+                type_=sqltypes.BOOLEANTYPE,
             )
         )
         self._loop_test(operators.notin_op, [1, 2, 3])
index 22c4b17434836467a2c0e6201cc7c00dc4ec3dfd..664862dcb3139120c59772bb697f31de6d2fd2de 100644 (file)
@@ -111,10 +111,46 @@ class UpdateTest(_UpdateFromTestBase, fixtures.TablesTest, AssertsCompiledSQL):
     def test_update_literal_binds(self):
         table1 = self.tables.mytable
 
+        stmt = (
+            table1.update().values(name="jack").where(table1.c.name == "jill")
+        )
+
+        self.assert_compile(
+            stmt,
+            "UPDATE mytable SET name='jack' WHERE mytable.name = 'jill'",
+            literal_binds=True,
+        )
+
+    def test_update_custom_key_thing(self):
         table1 = self.tables.mytable
 
+        class Thing(object):
+            def __clause_element__(self):
+                return table1.c.name
+
         stmt = (
-            table1.update().values(name="jack").where(table1.c.name == "jill")
+            table1.update()
+            .values({Thing(): "jack"})
+            .where(table1.c.name == "jill")
+        )
+
+        self.assert_compile(
+            stmt,
+            "UPDATE mytable SET name='jack' WHERE mytable.name = 'jill'",
+            literal_binds=True,
+        )
+
+    def test_update_ordered_custom_key_thing(self):
+        table1 = self.tables.mytable
+
+        class Thing(object):
+            def __clause_element__(self):
+                return table1.c.name
+
+        stmt = (
+            table1.update()
+            .ordered_values((Thing(), "jack"))
+            .where(table1.c.name == "jill")
         )
 
         self.assert_compile(
@@ -123,6 +159,34 @@ class UpdateTest(_UpdateFromTestBase, fixtures.TablesTest, AssertsCompiledSQL):
             literal_binds=True,
         )
 
+    def test_update_broken_custom_key_thing(self):
+        table1 = self.tables.mytable
+
+        class Thing(object):
+            def __clause_element__(self):
+                return 5
+
+        assert_raises_message(
+            exc.ArgumentError,
+            "SET/VALUES column expression or string key expected, got .*Thing",
+            table1.update().values,
+            {Thing(): "jack"},
+        )
+
+    def test_update_ordered_broken_custom_key_thing(self):
+        table1 = self.tables.mytable
+
+        class Thing(object):
+            def __clause_element__(self):
+                return 5
+
+        assert_raises_message(
+            exc.ArgumentError,
+            "SET/VALUES column expression or string key expected, got .*Thing",
+            table1.update().ordered_values,
+            (Thing(), "jack"),
+        )
+
     def test_correlated_update_one(self):
         table1 = self.tables.mytable