]> git.ipfire.org Git - thirdparty/sqlalchemy/sqlalchemy.git/commitdiff
- added PrefetchingResultProxy support to pre-fetch LOB columns when they are
authorMike Bayer <mike_mp@zzzcomputing.com>
Tue, 6 Feb 2007 01:31:50 +0000 (01:31 +0000)
committerMike Bayer <mike_mp@zzzcomputing.com>
Tue, 6 Feb 2007 01:31:50 +0000 (01:31 +0000)
known to be present, fixes [ticket:435]

CHANGES
lib/sqlalchemy/databases/oracle.py
lib/sqlalchemy/engine/base.py
lib/sqlalchemy/engine/default.py

diff --git a/CHANGES b/CHANGES
index e11af50b757e556fac304e8daab6c13a7b24a117..891b88bd380fee0130f4dfcbc604b40badf0f552 100644 (file)
--- a/CHANGES
+++ b/CHANGES
@@ -32,6 +32,8 @@
   (other places that rowid is invalid) still a TODO.  allows polymorphic mappings
   to function, [ticket:436]
   - sequences on a non-pk column will properly fire off on INSERT
+  - added PrefetchingResultProxy support to pre-fetch LOB columns when they are 
+  known to be present, fixes [ticket:435]
 - mysql:
   - fix to reflection on older DB's that might return array() type for 
   "show variables like" statements
index 2db90959243d09f7d696301b7a89ad79c0e4e9c8..0717331622cd58c6bd7c904e7d0e78d5f8907dfa 100644 (file)
@@ -318,6 +318,17 @@ class OracleDialect(ansisql.ANSIDialect):
         if context is not None:
             context._rowcount = rowcount
 
+    def create_result_proxy_args(self, connection, cursor):
+        args = super(OracleDialect, self).create_result_proxy_args(connection, cursor)
+        if cursor and cursor.description:
+            for column in cursor.description:
+                type_code = column[1]
+                if type_code in (cx_Oracle.BFILE, cx_Oracle.CLOB, cx_Oracle.NCLOB,
+                                 cx_Oracle.BLOB, cx_Oracle.LONG_BINARY, cx_Oracle.LONG_STRING):
+                    args['should_prefetch'] = True
+                    break
+        return args
+
 class OracleCompiler(ansisql.ANSICompiler):
     """oracle compiler modifies the lexical structure of Select statements to work under 
     non-ANSI configured Oracle databases, if the use_ansi flag is False."""
index 8871cac1daf1d8c7405d3e6d5621a98fb86f2427..3ec3e028f77cd5db67dfa30f510d70345b93508e 100644 (file)
@@ -121,6 +121,9 @@ class Dialect(sql.AbstractDialect):
     def create_cursor(self, connection):
         """return a new cursor generated from the given connection"""
         raise NotImplementedError()
+    def create_result_proxy_args(self, connection, cursor):
+        """returns a dictionary of arguments that should be passed to ResultProxy()."""
+        raise NotImplementedError()
     def compile(self, clauseelement, parameters=None):
         """compile the given ClauseElement using this Dialect.
         
@@ -265,7 +268,8 @@ class Connection(Connectable):
         return default.accept_schema_visitor(self.__engine.dialect.defaultrunner(self.__engine, self.proxy, **kwargs))
     def execute_text(self, statement, parameters=None):
         cursor = self._execute_raw(statement, parameters)
-        return ResultProxy(self.__engine, self, cursor)
+        rpargs = self.__engine.dialect.create_result_proxy_args(self, cursor)
+        return ResultProxy(self.__engine, self, cursor, **rpargs)
     def _params_to_listofdicts(self, *multiparams, **params):
         if len(multiparams) == 0:
             return [params]
@@ -304,7 +308,8 @@ class Connection(Connectable):
         context.pre_exec(self.__engine, proxy, compiled, parameters)
         proxy(str(compiled), parameters)
         context.post_exec(self.__engine, proxy, compiled, parameters)
-        return ResultProxy(self.__engine, self, cursor, context, typemap=compiled.typemap, columns=compiled.columns)
+        rpargs = self.__engine.dialect.create_result_proxy_args(self, cursor)
+        return ResultProxy(self.__engine, self, cursor, context, typemap=compiled.typemap, columns=compiled.columns, **rpargs)
         
     # poor man's multimethod/generic function thingy
     executors = {
@@ -551,8 +556,14 @@ class ResultProxy(object):
             return self
         def convert_result_value(self, arg, engine):
             raise exceptions.InvalidRequestError("Ambiguous column name '%s' in result set! try 'use_labels' option on select statement." % (self.key))
+        
+    def __new__(cls, *args, **kwargs):
+        if cls is ResultProxy and kwargs.has_key('should_prefetch') and kwargs['should_prefetch']:
+            return PrefetchingResultProxy(*args, **kwargs)
+        else:
+            return object.__new__(cls, *args, **kwargs)
     
-    def __init__(self, engine, connection, cursor, executioncontext=None, typemap=None, columns=None):
+    def __init__(self, engine, connection, cursor, executioncontext=None, typemap=None, columns=None, should_prefetch=None):
         """ResultProxy objects are constructed via the execute() method on SQLEngine."""
         self.connection = connection
         self.dialect = engine.dialect
@@ -612,6 +623,7 @@ class ResultProxy(object):
         try:
             return self.__key_cache[key]
         except KeyError:
+            # TODO: use has_key on these, too many potential KeyErrors being raised
             if isinstance(key, sql.ColumnElement):
                 try:
                     rec = self.props[key._label.lower()]
@@ -714,9 +726,6 @@ class ResultProxy(object):
         if row is not None:
             return RowProxy(self, row)
         else:
-            # controversy!  can we auto-close the cursor after results are consumed ?
-            # what if the returned rows are still hanging around, and are "live" objects 
-            # and not just plain tuples ?
             self.close()
             return None
 
@@ -730,7 +739,48 @@ class ResultProxy(object):
                 return None
         finally:
             self.close()
+            
+class PrefetchingResultProxy(ResultProxy):
+    """ResultProxy that loads all columns into memory each time fetchone() is
+    called.  If fetchmany() or fetchall() are called, the full grid of results
+    is fetched.
+    """
+    def _get_col(self, row, key):
+        rec = self._convert_key(key)
+        return row[rec[1]]
     
+    def fetchall(self):
+        l = []
+        while True:
+            row = self.fetchone()
+            if row is not None:
+                l.append(row)
+            else:
+                break
+        return l
+            
+    def fetchmany(self, size=None):
+        if size is None:
+            return self.fetchall()
+        l = []
+        for i in xrange(size):
+            row = self.fetchone()
+            if row is not None:
+                l.append(row)
+            else:
+                break
+        return l
+        
+    def fetchone(self):
+        sup = super(PrefetchingResultProxy, self)
+        row = self.cursor.fetchone()
+        if row is not None:
+            row = [sup._get_col(row, i) for i in xrange(len(row))]
+            return RowProxy(self, row)
+        else:
+            self.close()
+            return None
+        
 class RowProxy(object):
     """proxies a single cursor row for a parent ResultProxy.  Mostly follows 
     "ordered dictionary" behavior, mapping result values to the string-based column name,
index 2f6283b5d367d7c068445b263c2b2d0d233cc9dc..40210e88f6cf781fb02d78260f5aeac4ac0253e1 100644 (file)
@@ -65,6 +65,8 @@ class DefaultDialect(base.Dialect):
         return base.DefaultRunner(engine, proxy)
     def create_cursor(self, connection):
         return connection.cursor()
+    def create_result_proxy_args(self, connection, cursor):
+        return dict(should_prefetch=False)
         
     def _set_paramstyle(self, style):
         self._paramstyle = style