]> git.ipfire.org Git - thirdparty/sqlalchemy/sqlalchemy.git/commitdiff
Don't double-process ResultMetaData for BufferedColumnResultProxy
authorMike Bayer <mike_mp@zzzcomputing.com>
Wed, 27 Apr 2016 16:37:58 +0000 (11:37 -0500)
committerMike Bayer <mike_mp@zzzcomputing.com>
Wed, 27 Apr 2016 16:37:58 +0000 (11:37 -0500)
Fixed a bug in the result proxy used mainly by Oracle when binary and
other LOB types are in play, such that when query / statement caching
were used, the type-level result processors, notably that required by
the binary type itself but also any other processor, would become lost
after the first run of the statement due to it being removed from the
cached result metadata.

Change-Id: I751940866cffb4f48de46edc8137482eab59790c
Fixes: #3699
doc/build/changelog/changelog_10.rst
lib/sqlalchemy/engine/result.py
test/sql/test_resultset.py

index a30e3c567890ed5b2305673a7df4931f3af17fa8..352f00c8da6ab1a0726c3e2e337b5bab6d7221b5 100644 (file)
 .. changelog::
     :version: 1.0.13
 
+    .. change::
+        :tags: bug, oracle
+        :tickets: 3699
+
+        Fixed a bug in the result proxy used mainly by Oracle when binary and
+        other LOB types are in play, such that when query / statement caching
+        were used, the type-level result processors, notably that required by
+        the binary type itself but also any other processor, would become lost
+        after the first run of the statement due to it being removed from the
+        cached result metadata.
+
     .. change::
         :tags: bug, examples
         :tickets: 3698
index 773022ed2b34e8a93d77c5a576a3250f0faa7722..c9eb53eb1c3e1c8ef69e3dba69d910bdbe237827 100644 (file)
@@ -198,6 +198,7 @@ class ResultMetaData(object):
         dialect = context.dialect
         self.case_sensitive = dialect.case_sensitive
         self.matched_on_name = False
+        self._orig_processors = None
 
         if context.result_column_struct:
             result_columns, cols_are_ordered, textual_ordered = \
@@ -1394,16 +1395,21 @@ class BufferedColumnResultProxy(ResultProxy):
 
     def _init_metadata(self):
         super(BufferedColumnResultProxy, self)._init_metadata()
+
         metadata = self._metadata
-        # orig_processors will be used to preprocess each row when they are
-        # constructed.
-        metadata._orig_processors = metadata._processors
-        # replace the all type processors by None processors.
-        metadata._processors = [None for _ in range(len(metadata.keys))]
-        keymap = {}
-        for k, (func, obj, index) in metadata._keymap.items():
-            keymap[k] = (None, obj, index)
-        self._metadata._keymap = keymap
+
+        # don't double-replace the processors, in the case
+        # of a cached ResultMetaData
+        if metadata._orig_processors is None:
+            # orig_processors will be used to preprocess each row when
+            # they are constructed.
+            metadata._orig_processors = metadata._processors
+            # replace the all type processors by None processors.
+            metadata._processors = [None for _ in range(len(metadata.keys))]
+            keymap = {}
+            for k, (func, obj, index) in metadata._keymap.items():
+                keymap[k] = (None, obj, index)
+            metadata._keymap = keymap
 
     def fetchall(self):
         # can't call cursor.fetchall(), since rows must be
index f74e51f620897f2efd34d93c1637008f0b5df277..f52412b6831af95173ddf51a51d8026a3795fb29 100644 (file)
@@ -6,7 +6,7 @@ from sqlalchemy import util
 from sqlalchemy import (
     exc, sql, func, select, String, Integer, MetaData, ForeignKey,
     VARCHAR, INT, CHAR, text, type_coerce, literal_column,
-    TypeDecorator, table, column)
+    TypeDecorator, table, column, literal)
 from sqlalchemy.engine import result as _result
 from sqlalchemy.testing.schema import Table, Column
 import operator
@@ -1620,18 +1620,60 @@ class AlternateResultProxyTest(fixtures.TablesTest):
             r.fetchall
         )
 
-    def test_plain(self):
+    def test_basic_plain(self):
         self._test_proxy(_result.ResultProxy)
 
-    def test_buffered_row_result_proxy(self):
+    def test_basic_buffered_row_result_proxy(self):
         self._test_proxy(_result.BufferedRowResultProxy)
 
-    def test_fully_buffered_result_proxy(self):
+    def test_basic_fully_buffered_result_proxy(self):
         self._test_proxy(_result.FullyBufferedResultProxy)
 
-    def test_buffered_column_result_proxy(self):
+    def test_basic_buffered_column_result_proxy(self):
         self._test_proxy(_result.BufferedColumnResultProxy)
 
+    def test_resultprocessor_plain(self):
+        self._test_result_processor(_result.ResultProxy, False)
+
+    def test_resultprocessor_plain_cached(self):
+        self._test_result_processor(_result.ResultProxy, True)
+
+    def test_resultprocessor_buffered_column(self):
+        self._test_result_processor(_result.BufferedColumnResultProxy, False)
+
+    def test_resultprocessor_buffered_column_cached(self):
+        self._test_result_processor(_result.BufferedColumnResultProxy, True)
+
+    def test_resultprocessor_buffered_row(self):
+        self._test_result_processor(_result.BufferedRowResultProxy, False)
+
+    def test_resultprocessor_buffered_row_cached(self):
+        self._test_result_processor(_result.BufferedRowResultProxy, True)
+
+    def test_resultprocessor_fully_buffered(self):
+        self._test_result_processor(_result.FullyBufferedResultProxy, False)
+
+    def test_resultprocessor_fully_buffered_cached(self):
+        self._test_result_processor(_result.FullyBufferedResultProxy, True)
+
+    def _test_result_processor(self, cls, use_cache):
+        class MyType(TypeDecorator):
+            impl = String()
+
+            def process_result_value(self, value, dialect):
+                return "HI " + value
+
+        with self._proxy_fixture(cls):
+            with self.engine.connect() as conn:
+                if use_cache:
+                    cache = {}
+                    conn = conn.execution_options(compiled_cache=cache)
+
+                stmt = select([literal("THERE", type_=MyType())])
+                for i in range(2):
+                    r = conn.execute(stmt)
+                    eq_(r.scalar(), "HI THERE")
+
     def test_buffered_row_growth(self):
         with self._proxy_fixture(_result.BufferedRowResultProxy):
             with self.engine.connect() as conn: