]> git.ipfire.org Git - thirdparty/sqlalchemy/sqlalchemy.git/commitdiff
Don't double-process ResultMetaData for BufferedColumnResultProxy
authorMike Bayer <mike_mp@zzzcomputing.com>
Wed, 27 Apr 2016 16:37:58 +0000 (11:37 -0500)
committerMike Bayer <mike_mp@zzzcomputing.com>
Wed, 27 Apr 2016 16:47:07 +0000 (11:47 -0500)
Fixed a bug in the result proxy used mainly by Oracle when binary and
other LOB types are in play, such that when query / statement caching
were used, the type-level result processors, notably that required by
the binary type itself but also any other processor, would become lost
after the first run of the statement due to it being removed from the
cached result metadata.

Change-Id: I751940866cffb4f48de46edc8137482eab59790c
Fixes: #3699
(cherry picked from commit f3bc60bdd809235cbeb3f414717ac0e273269cf9)

doc/build/changelog/changelog_10.rst
lib/sqlalchemy/engine/result.py
test/sql/test_resultset.py

index a30e3c567890ed5b2305673a7df4931f3af17fa8..352f00c8da6ab1a0726c3e2e337b5bab6d7221b5 100644 (file)
 .. changelog::
     :version: 1.0.13
 
+    .. change::
+        :tags: bug, oracle
+        :tickets: 3699
+
+        Fixed a bug in the result proxy used mainly by Oracle when binary and
+        other LOB types are in play, such that when query / statement caching
+        were used, the type-level result processors, notably that required by
+        the binary type itself but also any other processor, would become lost
+        after the first run of the statement due to it being removed from the
+        cached result metadata.
+
     .. change::
         :tags: bug, examples
         :tickets: 3698
index a4bfa02be6cfc08bc42ee44a25bab2c1326232be..480501594845153dcc9b0cd4985fd8dd99c6a45e 100644 (file)
@@ -192,6 +192,7 @@ class ResultMetaData(object):
         typemap = dialect.dbapi_type_map
         translate_colname = context._translate_colname
         self.case_sensitive = case_sensitive = dialect.case_sensitive
+        self._orig_processors = None
 
         if context.result_column_struct:
             result_columns, cols_are_ordered = context.result_column_struct
@@ -1238,16 +1239,21 @@ class BufferedColumnResultProxy(ResultProxy):
 
     def _init_metadata(self):
         super(BufferedColumnResultProxy, self)._init_metadata()
+
         metadata = self._metadata
-        # orig_processors will be used to preprocess each row when they are
-        # constructed.
-        metadata._orig_processors = metadata._processors
-        # replace the all type processors by None processors.
-        metadata._processors = [None for _ in range(len(metadata.keys))]
-        keymap = {}
-        for k, (func, obj, index) in metadata._keymap.items():
-            keymap[k] = (None, obj, index)
-        self._metadata._keymap = keymap
+
+        # don't double-replace the processors, in the case
+        # of a cached ResultMetaData
+        if metadata._orig_processors is None:
+            # orig_processors will be used to preprocess each row when
+            # they are constructed.
+            metadata._orig_processors = metadata._processors
+            # replace the all type processors by None processors.
+            metadata._processors = [None for _ in range(len(metadata.keys))]
+            keymap = {}
+            for k, (func, obj, index) in metadata._keymap.items():
+                keymap[k] = (None, obj, index)
+            metadata._keymap = keymap
 
     def fetchall(self):
         # can't call cursor.fetchall(), since rows must be
index f79318236ad83bcdd8f8de1faeac9254a94c4861..67815a93093b35644e7f725ff75ac95bedef36e6 100644 (file)
@@ -6,7 +6,7 @@ from sqlalchemy import util
 from sqlalchemy import (
     exc, sql, func, select, String, Integer, MetaData, ForeignKey,
     VARCHAR, INT, CHAR, text, type_coerce, literal_column,
-    TypeDecorator, table, column)
+    TypeDecorator, table, column, literal)
 from sqlalchemy.engine import result as _result
 from sqlalchemy.testing.schema import Table, Column
 import operator
@@ -1433,18 +1433,60 @@ class AlternateResultProxyTest(fixtures.TablesTest):
             r.fetchall
         )
 
-    def test_plain(self):
+    def test_basic_plain(self):
         self._test_proxy(_result.ResultProxy)
 
-    def test_buffered_row_result_proxy(self):
+    def test_basic_buffered_row_result_proxy(self):
         self._test_proxy(_result.BufferedRowResultProxy)
 
-    def test_fully_buffered_result_proxy(self):
+    def test_basic_fully_buffered_result_proxy(self):
         self._test_proxy(_result.FullyBufferedResultProxy)
 
-    def test_buffered_column_result_proxy(self):
+    def test_basic_buffered_column_result_proxy(self):
         self._test_proxy(_result.BufferedColumnResultProxy)
 
+    def test_resultprocessor_plain(self):
+        self._test_result_processor(_result.ResultProxy, False)
+
+    def test_resultprocessor_plain_cached(self):
+        self._test_result_processor(_result.ResultProxy, True)
+
+    def test_resultprocessor_buffered_column(self):
+        self._test_result_processor(_result.BufferedColumnResultProxy, False)
+
+    def test_resultprocessor_buffered_column_cached(self):
+        self._test_result_processor(_result.BufferedColumnResultProxy, True)
+
+    def test_resultprocessor_buffered_row(self):
+        self._test_result_processor(_result.BufferedRowResultProxy, False)
+
+    def test_resultprocessor_buffered_row_cached(self):
+        self._test_result_processor(_result.BufferedRowResultProxy, True)
+
+    def test_resultprocessor_fully_buffered(self):
+        self._test_result_processor(_result.FullyBufferedResultProxy, False)
+
+    def test_resultprocessor_fully_buffered_cached(self):
+        self._test_result_processor(_result.FullyBufferedResultProxy, True)
+
+    def _test_result_processor(self, cls, use_cache):
+        class MyType(TypeDecorator):
+            impl = String()
+
+            def process_result_value(self, value, dialect):
+                return "HI " + value
+
+        with self._proxy_fixture(cls):
+            with self.engine.connect() as conn:
+                if use_cache:
+                    cache = {}
+                    conn = conn.execution_options(compiled_cache=cache)
+
+                stmt = select([literal("THERE", type_=MyType())])
+                for i in range(2):
+                    r = conn.execute(stmt)
+                    eq_(r.scalar(), "HI THERE")
+
     def test_buffered_row_growth(self):
         with self._proxy_fixture(_result.BufferedRowResultProxy):
             with self.engine.connect() as conn: