]> git.ipfire.org Git - thirdparty/sqlalchemy/sqlalchemy.git/commitdiff
ResultProxy pre-caches the dialect_impl()s for each TypeEngine, saving the
authorMike Bayer <mike_mp@zzzcomputing.com>
Thu, 31 May 2007 16:47:48 +0000 (16:47 +0000)
committerMike Bayer <mike_mp@zzzcomputing.com>
Thu, 31 May 2007 16:47:48 +0000 (16:47 +0000)
method calls later.  knocks 5000 function calls off the masseagerload.py test.

lib/sqlalchemy/engine/base.py
lib/sqlalchemy/orm/mapper.py

index f4cb6bb36b6ce7117f5b54494528a46956b54278..2999bad927a8a1eadc1bc51e7531290ca386fd2b 100644 (file)
@@ -873,14 +873,17 @@ class ResultProxy(object):
         self.__props = {}
         self.__keys = []
         metadata = self.cursor.description
+
         if metadata is not None:
             for i, item in enumerate(metadata):
                 # sqlite possibly prepending table name to colnames so strip
                 colname = item[0].split('.')[-1]
                 if self.context.typemap is not None:
-                    rec = (self.context.typemap.get(colname.lower(), types.NULLTYPE), i)
+                    type = self.context.typemap.get(colname.lower(), types.NULLTYPE)
                 else:
-                    rec = (types.NULLTYPE, i)
+                    type = types.NULLTYPE
+                rec = (type, type.dialect_impl(self.dialect), i)
+
                 if rec[0] is None:
                     raise DBAPIError("None for metadata " + colname)
                 if self.__props.setdefault(colname.lower(), rec) is not rec:
@@ -992,7 +995,7 @@ class ResultProxy(object):
 
     def _get_col(self, row, key):
         rec = self._convert_key(key)
-        return rec[0].dialect_impl(self.dialect).convert_result_value(row[rec[1]], self.dialect)
+        return rec[1].convert_result_value(row[rec[2]], self.dialect)
     
     def _fetchone_impl(self):
         return self.cursor.fetchone()
@@ -1104,7 +1107,7 @@ class BufferedColumnResultProxy(ResultProxy):
     """
     def _get_col(self, row, key):
         rec = self._convert_key(key)
-        return row[rec[1]]
+        return row[rec[2]]
     
     def _process_row(self, row):
         sup = super(BufferedColumnResultProxy, self)
index 223c270b0f322039ffe84415cf63f4c2f2758d4c..aff5705050db18200fb8c5fcbfd301480662c5cd 100644 (file)
@@ -1446,8 +1446,8 @@ class Mapper(object):
         # including modifying any of its related items lists, as its already
         # been exposed to being modified by the application.
 
-        populate_existing = context.populate_existing or self.always_refresh
         identitykey = self.identity_key_from_row(row)
+        populate_existing = context.populate_existing or self.always_refresh
         if context.session.has_key(identitykey):
             instance = context.session._get(identitykey)
             if self.__should_log_debug: