]> git.ipfire.org Git - thirdparty/sqlalchemy/sqlalchemy.git/commitdiff
- Added new 'compiled_cache' execution option. A dictionary
authorMike Bayer <mike_mp@zzzcomputing.com>
Wed, 7 Apr 2010 17:59:18 +0000 (13:59 -0400)
committerMike Bayer <mike_mp@zzzcomputing.com>
Wed, 7 Apr 2010 17:59:18 +0000 (13:59 -0400)
where Compiled objects will be cached when the Connection
compiles a clause expression into a dialect- and parameter-
specific Compiled object.  It is the user's responsibility to
manage the size of this dictionary, which will have keys
corresponding to the dialect, clause element, the column
names within the VALUES or SET clause of an INSERT or UPDATE,
as well as the "batch" mode for an INSERT or UPDATE statement.

CHANGES
lib/sqlalchemy/engine/base.py
lib/sqlalchemy/sql/expression.py
test/engine/test_execute.py

diff --git a/CHANGES b/CHANGES
index 3ecac29edc4c4a5db32174121dd37d4d5ac081bd..550428a44491f2d5b902214394bc5e29f8976c8a 100644 (file)
--- a/CHANGES
+++ b/CHANGES
@@ -73,6 +73,15 @@ CHANGES
   - Fixed bug in execution_options() feature whereby the existing
     Transaction and other state information from the parent 
     connection would not be propagated to the sub-connection.
+
+  - Added new 'compiled_cache' execution option.  A dictionary 
+    where Compiled objects will be cached when the Connection
+    compiles a clause expression into a dialect- and parameter- 
+    specific Compiled object.  It is the user's responsibility to
+    manage the size of this dictionary, which will have keys
+    corresponding to the dialect, clause element, the column
+    names within the VALUES or SET clause of an INSERT or UPDATE, 
+    as well as the "batch" mode for an INSERT or UPDATE statement.
     
 - ext
   - the compiler extension now allows @compiles decorators
index f040ec92012cde1c4a66e8231c8090d5a30cb404..4c5a6a82b6e7d9babd4fee9178abfee57536ca9f 100644 (file)
@@ -1150,10 +1150,22 @@ class Connection(Connectable):
         else:
             keys = []
 
+        if 'compiled_cache' in self._execution_options:
+            key = self.dialect, elem, tuple(keys), len(params) > 1
+            if key in self._execution_options['compiled_cache']:
+                compiled_sql = self._execution_options['compiled_cache'][key]
+            else:
+                compiled_sql = elem.compile(
+                                dialect=self.dialect, column_keys=keys, 
+                                inline=len(params) > 1)
+                self._execution_options['compiled_cache'][key] = compiled_sql
+        else:
+            compiled_sql = elem.compile(
+                            dialect=self.dialect, column_keys=keys, 
+                            inline=len(params) > 1)
+
         context = self.__create_execution_context(
-                        compiled_sql=elem.compile(
-                                        dialect=self.dialect, column_keys=keys, 
-                                        inline=len(params) > 1),
+                        compiled_sql=compiled_sql,
                         parameters=params
                     )
         return self.__execute_context(context)
index 5958a0bc40c780f18672bbea11a73853bcdbed03..1222a144fecf892ac753e4548f866aa0013a83db 100644 (file)
@@ -2276,6 +2276,23 @@ class Executable(_Generative):
           of many DBAPIs.  The flag is currently understood only by the
           psycopg2 dialect.
 
+        * compiled_cache - a dictionary where :class:`Compiled` objects
+          will be cached when the :class:`Connection` compiles a clause 
+          expression into a dialect- and parameter-specific 
+          :class:`Compiled` object.   It is the user's responsibility to
+          manage the size of this dictionary, which will have keys
+          corresponding to the dialect, clause element, the column
+          names within the VALUES or SET clause of an INSERT or UPDATE, 
+          as well as the "batch" mode for an INSERT or UPDATE statement.
+          The format of this dictionary is not guaranteed to stay the
+          same in future releases.
+          
+          This option is usually more appropriate
+          to use via the 
+          :meth:`sqlalchemy.engine.base.Connection.execution_options()`
+          method of :class:`Connection`, rather than upon individual 
+          statement objects, though the effect is the same.
+          
         See also:
         
             :meth:`sqlalchemy.engine.base.Connection.execution_options()`
index 8fd5e7eb60c4b4d71a8b3f74928992ad104eff2d..e83166c9ac04d37ab88875079059c63e056d609c 100644 (file)
@@ -111,6 +111,37 @@ class ExecuteTest(TestBase):
             (1, None)
         ])
 
+class CompiledCacheTest(TestBase):
+    @classmethod
+    def setup_class(cls):
+        global users, metadata
+        metadata = MetaData(testing.db)
+        users = Table('users', metadata,
+            Column('user_id', INT, primary_key = True),
+            Column('user_name', VARCHAR(20)),
+        )
+        metadata.create_all()
+
+    @engines.close_first
+    def teardown(self):
+        testing.db.connect().execute(users.delete())
+        
+    @classmethod
+    def teardown_class(cls):
+        metadata.drop_all()
+    
+    def test_cache(self):
+        conn = testing.db.connect()
+        cache = {}
+        cached_conn = conn.execution_options(compiled_cache=cache)
+        
+        ins = users.insert()
+        cached_conn.execute(ins, {'user_name':'u1'})
+        cached_conn.execute(ins, {'user_name':'u2'})
+        cached_conn.execute(ins, {'user_name':'u3'})
+        assert len(cache) == 1
+        eq_(conn.execute("select count(1) from users").scalar(), 3)
+    
 class LogTest(TestBase):
     def _test_logger(self, eng, eng_name, pool_name):
         buf = logging.handlers.BufferingHandler(100)