--- /dev/null
+.. change::
+ :tags: bug, sql
+ :tickets: 6184
+
+ Added a new flag to the :class:`_engine.Dialect` class called
+ :attr:`_engine.Dialect.supports_statement_cache`. This flag now needs to be present
+ directly on a dialect class in order for SQLAlchemy's
+ :ref:`query cache <sql_caching>` to take effect for that dialect. The
+ rationale is based on discovered issues such as :ticket:`6173` revealing
+ that dialects which hardcode literal values from the compiled statement,
+ often the numerical parameters used for LIMIT / OFFSET, will not be
+ compatible with caching until these dialects are revised to use the
+ parameters present in the statement only. For third party dialects where
+ this flag is not applied, the SQL logging will show the message "dialect
+ does not support caching", indicating the dialect should seek to apply this
+ flag once they have verified that no per-statement literal values are being
+ rendered within the compilation phase.
+
+ .. seealso::
+
+ :ref:`engine_thirdparty_caching`
\ No newline at end of file
with engine.connect().execution_options(compiled_cache=None) as conn:
conn.execute(table.select())
+.. _engine_thirdparty_caching:
+
+Caching for Third Party Dialects
+---------------------------------
+
+The caching feature requires that the dialect's compiler produces a SQL
+construct that is generically reusable given a particular cache key. This means
+that any literal values in a statement, such as the LIMIT/OFFSET values for
+a SELECT, can not be hardcoded in the dialect's compilation scheme, as
+the compiled string will not be re-usable. SQLAlchemy supports rendered
+bound parameters using the :meth:`_sql.BindParameter.render_literal_execute`
+method which can be applied to the existing ``Select._limit_clause`` and
+``Select._offset_clause`` attributes by a custom compiler.
+
+As there are many third party dialects, many of which may be generating
+literal values from SQL statements without the benefit of the newer "literal execute"
+feature, SQLAlchemy as of version 1.4.5 has added a flag to dialects known as
+:attr:`_engine.Dialect.supports_statement_cache`. This flag is tested to be present
+directly on a dialect class, and not any superclasses, so that even a third
+party dialect that subclasses an existing cacheable SQLAlchemy dialect such
+as ``sqlalchemy.dialects.postgresql.PGDialect`` must still specify this flag,
+once the dialect has been altered as needed and tested for reusability of
+compiled SQL statements with differing parameters.
+
+For all third party dialects that don't support this flag, the logging for
+such a dialect will indicate ``dialect does not support caching``. Dialect
+authors can apply the flag as follows::
+
+ from sqlalchemy.engine.default import DefaultDialect
+
+ class MyDialect(DefaultDialect):
+ supports_statement_cache = True
+
+The flag needs to be applied to all subclasses of the dialect as well::
+
+ class MyDBAPIForMyDialect(MyDialect):
+ supports_statement_cache = True
+
+.. versionadded:: 1.4.5
+
+
.. _engine_lambda_caching:
Using Lambdas to add significant speed gains to statement production
"""Firebird dialect"""
name = "firebird"
+ supports_statement_cache = True
max_identifier_length = 31
class FBDialect_fdb(FBDialect_kinterbasdb):
+ supports_statement_cache = True
+
def __init__(self, enable_rowcount=True, retaining=False, **kwargs):
super(FBDialect_fdb, self).__init__(
enable_rowcount=enable_rowcount, retaining=retaining, **kwargs
class FBDialect_kinterbasdb(FBDialect):
driver = "kinterbasdb"
+ supports_statement_cache = True
supports_sane_rowcount = False
supports_sane_multi_rowcount = False
execution_ctx_cls = FBExecutionContext_kinterbasdb
class MSDialect(default.DefaultDialect):
# will assume it's at least mssql2005
name = "mssql"
+ supports_statement_cache = True
supports_default_values = True
supports_empty_insert = False
execution_ctx_cls = MSExecutionContext
# this is only needed if "native ODBC" mode is used,
# which is now disabled by default.
# statement_compiler = MSSQLStrictCompiler
+ supports_statement_cache = True
execution_ctx_cls = MSExecutionContext_mxodbc
class MSDialect_pymssql(MSDialect):
+ supports_statement_cache = True
supports_native_decimal = True
driver = "pymssql"
class MSDialect_pyodbc(PyODBCConnector, MSDialect):
+ supports_statement_cache = True
# mssql still has problems with this on Linux
supports_sane_rowcount_returning = False
class MySQLDialect_aiomysql(MySQLDialect_pymysql):
driver = "aiomysql"
+ supports_statement_cache = True
supports_server_side_cursors = True
_sscursor = AsyncAdapt_aiomysql_ss_cursor
"""
name = "mysql"
+ supports_statement_cache = True
+
supports_alter = True
# MySQL has no true "boolean" type; we
class MySQLDialect_cymysql(MySQLDialect_mysqldb):
driver = "cymysql"
+ supports_statement_cache = True
description_encoding = None
supports_sane_rowcount = True
class MariaDBDialect(MySQLDialect):
is_mariadb = True
+ supports_statement_cache = True
name = "mariadb"
MariaDBDialect,
driver_cls,
),
- {},
+ {"supports_statement_cache": True},
)
class MySQLDialect_mariadbconnector(MySQLDialect):
driver = "mariadbconnector"
+ supports_statement_cache = True
# set this to True at the module level to prevent the driver from running
# against a backend that server detects as MySQL. currently this appears to
class MySQLDialect_mysqlconnector(MySQLDialect):
driver = "mysqlconnector"
+ supports_statement_cache = True
supports_unicode_binds = True
class MySQLDialect_mysqldb(MySQLDialect):
driver = "mysqldb"
+ supports_statement_cache = True
supports_unicode_statements = True
supports_sane_rowcount = True
supports_sane_multi_rowcount = True
class MySQLDialect_oursql(MySQLDialect):
driver = "oursql"
+ supports_statement_cache = True
if util.py2k:
supports_unicode_binds = True
class MySQLDialect_pymysql(MySQLDialect_mysqldb):
driver = "pymysql"
+ supports_statement_cache = True
description_encoding = None
class MySQLDialect_pyodbc(PyODBCConnector, MySQLDialect):
+ supports_statement_cache = True
colspecs = util.update_copy(MySQLDialect.colspecs, {Time: _pyodbcTIME})
supports_unicode_statements = True
execution_ctx_cls = MySQLExecutionContext_pyodbc
offset_clause = select._offset_clause
if select._simple_int_clause(limit_clause):
- limit_clause = limit_clause._render_literal_execute()
+ limit_clause = limit_clause.render_literal_execute()
if select._simple_int_clause(offset_clause):
- offset_clause = offset_clause._render_literal_execute()
+ offset_clause = offset_clause.render_literal_execute()
# currently using form at:
# https://blogs.oracle.com/oraclemagazine/\
class OracleDialect(default.DefaultDialect):
name = "oracle"
+ supports_statement_cache = True
supports_alter = True
supports_unicode_statements = False
supports_unicode_binds = False
class OracleDialect_cx_oracle(OracleDialect):
+ supports_statement_cache = True
execution_ctx_cls = OracleExecutionContext_cx_oracle
statement_compiler = OracleCompiler_cx_oracle
class PGDialect_asyncpg(PGDialect):
driver = "asyncpg"
+ supports_statement_cache = True
supports_unicode_statements = True
supports_server_side_cursors = True
class PGDialect(default.DefaultDialect):
name = "postgresql"
+ supports_statement_cache = True
supports_alter = True
max_identifier_length = 63
supports_sane_rowcount = True
class PGDialect_pg8000(PGDialect):
driver = "pg8000"
+ supports_statement_cache = True
supports_unicode_statements = True
class PGDialect_psycopg2(PGDialect):
driver = "psycopg2"
+
+ supports_statement_cache = True
+
if util.py2k:
# turn off supports_unicode_statements for Python 2. psycopg2 supports
# unicode statements in Py2K. But! it does not support unicode *bound
class PGDialect_psycopg2cffi(PGDialect_psycopg2):
driver = "psycopg2cffi"
supports_unicode_statements = True
+ supports_statement_cache = True
# psycopg2cffi's first release is 2.5.0, but reports
# __version__ as 2.4.4. Subsequent releases seem to have
class PGDialect_pygresql(PGDialect):
driver = "pygresql"
+ supports_statement_cache = True
statement_compiler = _PGCompiler
preparer = _PGIdentifierPreparer
class PGDialect_pypostgresql(PGDialect):
driver = "pypostgresql"
+ supports_statement_cache = True
supports_unicode_statements = True
supports_unicode_binds = True
description_encoding = None
class SQLiteDialect_aiosqlite(SQLiteDialect_pysqlite):
driver = "aiosqlite"
+ supports_statement_cache = True
is_async = True
supports_cast = True
supports_multivalues_insert = True
tuple_in_values = True
+ supports_statement_cache = True
default_paramstyle = "qmark"
execution_ctx_cls = SQLiteExecutionContext
class SQLiteDialect_pysqlcipher(SQLiteDialect_pysqlite):
driver = "pysqlcipher"
+ supports_statement_cache = True
pragmas = ("kdf_iter", "cipher", "cipher_page_size", "cipher_use_hmac")
class SQLiteDialect_pysqlite(SQLiteDialect):
default_paramstyle = "qmark"
+ supports_statement_cache = True
colspecs = util.update_copy(
SQLiteDialect.colspecs,
supports_unicode_statements = False
supports_sane_rowcount = False
supports_sane_multi_rowcount = False
+ supports_statement_cache = True
supports_native_boolean = False
supports_unicode_binds = False
class SybaseDialect_mxodbc(MxODBCConnector, SybaseDialect):
execution_ctx_cls = SybaseExecutionContext_mxodbc
+ supports_statement_cache = True
dialect = SybaseDialect_mxodbc
class SybaseDialect_pyodbc(PyODBCConnector, SybaseDialect):
execution_ctx_cls = SybaseExecutionContext_pyodbc
+ supports_statement_cache = True
colspecs = {sqltypes.Numeric: _SybNumeric_pyodbc}
execution_ctx_cls = SybaseExecutionContext_pysybase
statement_compiler = SybaseSQLCompiler_pysybase
+ supports_statement_cache = True
+
colspecs = {sqltypes.Numeric: _SybNumeric, sqltypes.Float: sqltypes.Float}
@classmethod
CACHE_MISS = util.symbol("CACHE_MISS")
CACHING_DISABLED = util.symbol("CACHING_DISABLED")
NO_CACHE_KEY = util.symbol("NO_CACHE_KEY")
+NO_DIALECT_SUPPORT = util.symbol("NO_DIALECT_SUPPORT")
class DefaultDialect(interfaces.Dialect):
supports_comments = False
inline_comments = False
use_setinputsizes = False
+ supports_statement_cache = True
# the first value we'd get for an autoincrement
# column.
CACHE_MISS = CACHE_MISS
CACHING_DISABLED = CACHING_DISABLED
NO_CACHE_KEY = NO_CACHE_KEY
+ NO_DIALECT_SUPPORT = NO_DIALECT_SUPPORT
@util.deprecated_params(
convert_unicode=(
self._encoder = codecs.getencoder(self.encoding)
self._decoder = processors.to_unicode_processor_factory(self.encoding)
+ @util.memoized_property
+ def _supports_statement_cache(self):
+ return (
+ self.__class__.__dict__.get("supports_statement_cache", False)
+ is True
+ )
+
@util.memoized_property
def _type_memos(self):
return weakref.WeakKeyDictionary()
type_compiler = compiler.StrSQLTypeCompiler
preparer = compiler.IdentifierPreparer
+ supports_statement_cache = True
+
supports_sequences = True
sequences_optional = True
preexecute_autoincrement_sequences = False
return "generated in %.5fs" % (now - self.compiled._gen_time,)
elif ch is CACHING_DISABLED:
return "caching disabled %.5fs" % (now - self.compiled._gen_time,)
+ elif ch is NO_DIALECT_SUPPORT:
+ return "dialect %s+%s does not support caching %.5fs" % (
+ self.dialect.name,
+ self.dialect.driver,
+ now - self.compiled._gen_time,
+ )
else:
return "unknown"
_has_events = False
+ supports_statement_cache = True
+ """indicates if this dialect supports caching.
+
+ All dialects that are compatible with statement caching should set this
+ flag to True directly on each dialect class and subclass that supports
+ it. SQLAlchemy tests that this flag is locally present on each dialect
+ subclass before it will use statement caching. This is to provide
+ safety for legacy or new dialects that are not yet fully tested to be
+ compliant with SQL statement caching.
+
+ .. versionadded:: 1.4.5
+
+ .. seealso::
+
+ :ref:`engine_thirdparty_caching`
+
+ """
+
def create_connect_args(self, url):
"""Build DB-API compatible connection arguments.
schema_translate_map=None,
**kw
):
- if compiled_cache is not None:
+ if compiled_cache is not None and dialect._supports_statement_cache:
elem_cache_key = self._generate_cache_key()
else:
elem_cache_key = None
schema_translate_map=schema_translate_map,
**kw
)
- cache_hit = (
- dialect.CACHING_DISABLED
- if compiled_cache is None
- else dialect.NO_CACHE_KEY
- )
+
+ if not dialect._supports_statement_cache:
+ cache_hit = dialect.NO_DIALECT_SUPPORT
+ elif compiled_cache is None:
+ cache_hit = dialect.CACHING_DISABLED
+ else:
+ cache_hit = dialect.NO_CACHE_KEY
return compiled_sql, extracted_params, cache_hit
else:
return self.value
+ def render_literal_execute(self):
+ """Produce a copy of this bound parameter that will enable the
+ :paramref:`_sql.BindParameter.literal_execute` flag.
+
+ The :paramref:`_sql.BindParameter.literal_execute` flag will
+ have the effect of the parameter rendered in the compiled SQL
+ string using ``[POSTCOMPILE]`` form, which is a special form that
+ is converted to be a rendering of the literal value of the parameter
+ at SQL execution time. The rationale is to support caching
+ of SQL statement strings that can embed per-statement literal values,
+ such as LIMIT and OFFSET parameters, in the final SQL string that
+ is passed to the DBAPI. Dialects in particular may want to use
+ this method within custom compilation schemes.
+
+ .. versionadded:: 1.4.5
+
+ .. seealso::
+
+ :ref:`engine_thirdparty_caching`
+
+ """
+ return self.__class__(
+ self.key,
+ self.value,
+ type_=self.type,
+ literal_execute=True,
+ )
+
def _with_binary_element_type(self, type_):
c = ClauseElement._clone(self)
c.type = type_
def _limit_offset_value(self):
return self.effective_value
- def _render_literal_execute(self):
- return _OffsetLimitParam(
- self.key,
- self.value,
- type_=self.type,
- literal_execute=True,
- )
-
@util.deprecated(
"1.4",
)
],
)
+
+
+class DialectDoesntSupportCachingTest(fixtures.TestBase):
+ """test the opt-in caching flag added in :ticket:`6184`."""
+
+ __only_on__ = "sqlite+pysqlite"
+
+ __requires__ = ("sqlite_memory",)
+
+ @testing.fixture()
+ def sqlite_no_cache_dialect(self, testing_engine):
+ from sqlalchemy.dialects.sqlite.pysqlite import SQLiteDialect_pysqlite
+ from sqlalchemy.dialects.sqlite.base import SQLiteCompiler
+ from sqlalchemy.sql import visitors
+
+ class MyCompiler(SQLiteCompiler):
+ def translate_select_structure(self, select_stmt, **kwargs):
+ select = select_stmt
+
+ if not getattr(select, "_mydialect_visit", None):
+ select = visitors.cloned_traverse(select_stmt, {}, {})
+ if select._limit_clause is not None:
+ # create a bindparam with a fixed name and hardcode
+ # it to the given limit. this breaks caching.
+ select._limit_clause = bindparam(
+ "limit", value=select._limit, literal_execute=True
+ )
+
+ select._mydialect_visit = True
+
+ return select
+
+ class MyDialect(SQLiteDialect_pysqlite):
+ statement_compiler = MyCompiler
+
+ from sqlalchemy.dialects import registry
+
+ def go(name):
+ return MyDialect
+
+ with mock.patch.object(registry, "load", go):
+ eng = testing_engine()
+ yield eng
+
+ @testing.fixture
+ def data_fixture(self, sqlite_no_cache_dialect):
+ m = MetaData()
+ t = Table("t1", m, Column("x", Integer))
+ with sqlite_no_cache_dialect.begin() as conn:
+ t.create(conn)
+ conn.execute(t.insert(), [{"x": 1}, {"x": 2}, {"x": 3}, {"x": 4}])
+
+ return t
+
+ def test_no_cache(self, sqlite_no_cache_dialect, data_fixture):
+ eng = sqlite_no_cache_dialect
+
+ def go(lim):
+ with eng.connect() as conn:
+ result = conn.execute(
+ select(data_fixture).order_by(data_fixture.c.x).limit(lim)
+ )
+ return result
+
+ r1 = go(2)
+ r2 = go(3)
+
+ eq_(r1.all(), [(1,), (2,)])
+ eq_(r2.all(), [(1,), (2,), (3,)])
+
+ def test_it_caches(self, sqlite_no_cache_dialect, data_fixture):
+ eng = sqlite_no_cache_dialect
+ eng.dialect.__class__.supports_statement_cache = True
+ del eng.dialect.__dict__["_supports_statement_cache"]
+
+ def go(lim):
+ with eng.connect() as conn:
+ result = conn.execute(
+ select(data_fixture).order_by(data_fixture.c.x).limit(lim)
+ )
+ return result
+
+ r1 = go(2)
+ r2 = go(3)
+
+ eq_(r1.all(), [(1,), (2,)])
+
+ # wrong answer
+ eq_(
+ r2.all(),
+ [
+ (1,),
+ (2,),
+ ],
+ )
except exc.DBAPIError:
return False
+ @property
+ def sqlite_memory(self):
+ return only_on(self._sqlite_memory_db)
+
@property
def reflects_json_type(self):
return only_on(