]> git.ipfire.org Git - thirdparty/sqlalchemy/sqlalchemy.git/commitdiff
some lists to iterators
authorMike Bayer <mike_mp@zzzcomputing.com>
Fri, 30 Jan 2009 21:22:19 +0000 (21:22 +0000)
committerMike Bayer <mike_mp@zzzcomputing.com>
Fri, 30 Jan 2009 21:22:19 +0000 (21:22 +0000)
lib/sqlalchemy/dialects/informix/informixdb.py
lib/sqlalchemy/dialects/sybase/base.py
lib/sqlalchemy/engine/default.py
lib/sqlalchemy/orm/identity.py
lib/sqlalchemy/orm/query.py
lib/sqlalchemy/orm/scoping.py
lib/sqlalchemy/pool.py
lib/sqlalchemy/schema.py
lib/sqlalchemy/types.py

index ddfd597065839bcd02916aa9f07941d92ebaf83f..68c4feebbee56eca1fdfc6f3ab35828a1c0fcfdb 100644 (file)
@@ -53,8 +53,7 @@ class Informix_informixdb(InformixDialect):
     
     @classmethod
     def dbapi(cls):
-        import informixdb
-        return informixdb
+        return __import__('informixdb')
 
     def create_connect_args(self, url):
         if url.host:
index 300edebf905bf2daeb062543015b1709a5191633..16b739ae967886ecd3473670ab884c1f301e4e67 100644 (file)
@@ -360,7 +360,6 @@ class SybaseDialect(default.DefaultDialect):
 
         c = connection.execute(s)
         row = c.fetchone()
-        print "has_table: " + tablename + ": " + str(bool(row is not None))
         return row is not None
 
     def reflecttable(self, connection, table, include_columns):
@@ -444,7 +443,7 @@ class SybaseDialect(default.DefaultDialect):
             else:
                 foreignKeys[primary_table][0].append('%s'%(foreign_column))
                 foreignKeys[primary_table][1].append('%s.%s'%(primary_table, primary_column))
-        for primary_table in foreignKeys.keys():
+        for primary_table in foreignKeys.iterkeys():
             #table.append_constraint(schema.ForeignKeyConstraint(['%s.%s'%(foreign_table, foreign_column)], ['%s.%s'%(primary_table,primary_column)]))
             table.append_constraint(schema.ForeignKeyConstraint(foreignKeys[primary_table][0], foreignKeys[primary_table][1], link_to_name=True))
 
index b136622704d3421e34ae26f4fcdbaec70d748f00..b719219a5df794c2c20b8af18d4c0bb5a047d7b7 100644 (file)
@@ -71,7 +71,8 @@ class DefaultDialect(base.Dialect):
         self.type_compiler = self.type_compiler(self)
         
         if label_length and label_length > self.max_identifier_length:
-            raise exc.ArgumentError("Label length of %d is greater than this dialect's maximum identifier length of %d" % (label_length, self.max_identifier_length))
+            raise exc.ArgumentError("Label length of %d is greater than this dialect's"
+                    " maximum identifier length of %d" % (label_length, self.max_identifier_length))
         self.label_length = label_length
         
         if not hasattr(self, 'description_encoding'):
index 43642728b8103c5566c428061501b88cba27dd8a..0f8b31d0fd4ec075cafa81dec048639c67d78e7d 100644 (file)
@@ -161,7 +161,7 @@ class WeakInstanceDict(IdentityMap):
         
 class StrongInstanceDict(IdentityMap):
     def all_states(self):
-        return [attributes.instance_state(o) for o in self.values()]
+        return [attributes.instance_state(o) for o in self.itervalues()]
     
     def contains_state(self, state):
         return state.key in self and attributes.instance_state(self[state.key]) is state
index e85ecaa58c7e52ddb62f9d727fbc1752e2dab4c3..de7f66882ba7b66cdfdebd9a56243b2e45df2cc3 100644 (file)
@@ -1352,7 +1352,7 @@ class Query(object):
 
             session._finalize_loaded(context.progress)
 
-            for ii, attrs in context.partials.items():
+            for ii, attrs in context.partials.iteritems():
                 ii.commit(attrs)
 
             for row in rows:
@@ -1648,7 +1648,7 @@ class Query(object):
                 eval_condition = evaluator_compiler.process(self.whereclause or expression._Null)
 
                 value_evaluators = {}
-                for key,value in values.items():
+                for key,value in values.iteritems():
                     key = expression._column_as_key(key)
                     value_evaluators[key] = evaluator_compiler.process(expression._literal_as_binds(value))
             except evaluator.UnevaluatableError:
index 5559784c76c1f3e1bc0b7a9fc9492f619c055285..c8d90a5c1b223f6c0fdf156ae225ab8bd5b01048 100644 (file)
@@ -169,7 +169,7 @@ class _ScopedExt(MapperExtension):
 
     def _default__init__(ext, mapper):
         def __init__(self, **kwargs):
-            for key, value in kwargs.items():
+            for key, value in kwargs.iteritems():
                 if ext.validate:
                     if not mapper.get_property(key, resolve_synonyms=False,
                                                raiseerr=False):
index 2d4e223c45f8fe966b1b25cb680e198ceb58a35d..aca4663774d990adfc9462b5d133aef13149b3b7 100644 (file)
@@ -51,7 +51,7 @@ def clear_managers():
     All pools and connections are disposed.
     """
 
-    for manager in proxies.values():
+    for manager in proxies.itervalues():
         manager.close()
     proxies.clear()
 
index c2fb5ee5a6b7bbc82e0850e4c9d314fcdabdc38a..363b433d5ce6f7ef0ad5dd79a44db6acb8a5b847 100644 (file)
@@ -1315,7 +1315,7 @@ class PrimaryKeyConstraint(Constraint):
         if kwargs:
             raise exc.ArgumentError(
                 'Unknown PrimaryKeyConstraint argument(s): %s' %
-                ', '.join(repr(x) for x in kwargs.keys()))
+                ', '.join(repr(x) for x in kwargs.iterkeys()))
 
         super(PrimaryKeyConstraint, self).__init__(**constraint_args)
         self.__colnames = list(columns)
@@ -1382,7 +1382,7 @@ class UniqueConstraint(Constraint):
         if kwargs:
             raise exc.ArgumentError(
                 'Unknown UniqueConstraint argument(s): %s' %
-                ', '.join(repr(x) for x in kwargs.keys()))
+                ', '.join(repr(x) for x in kwargs.iterkeys()))
 
         super(UniqueConstraint, self).__init__(**constraint_args)
         self.__colnames = list(columns)
@@ -1622,9 +1622,9 @@ class MetaData(SchemaItem):
         
         from sqlalchemy.sql.util import sort_tables
         if tables is None:
-            tables = self.tables.values()
+            tables = self.tables.itervalues()
         else:
-            tables = set(tables).intersection(self.tables.values())
+            tables = set(tables).intersection(self.tables.itervalues())
         ret = sort_tables(tables)
         if reverse:
             ret = reversed(ret)
@@ -1636,7 +1636,7 @@ class MetaData(SchemaItem):
         dependency.
         """
         from sqlalchemy.sql.util import sort_tables
-        return sort_tables(self.tables.values())
+        return sort_tables(self.tables.itervalues())
         
     def reflect(self, bind=None, schema=None, only=None):
         """Load all available table definitions from the database.
@@ -1682,7 +1682,7 @@ class MetaData(SchemaItem):
 
         available = util.OrderedSet(bind.engine.table_names(schema,
                                                             connection=conn))
-        current = set(self.tables.keys())
+        current = set(self.tables.iterkeys())
 
         if only is None:
             load = [name for name in available if name not in current]
@@ -1882,7 +1882,7 @@ class ThreadLocalMetaData(MetaData):
     def dispose(self):
         """Dispose all bound engines, in all thread contexts."""
 
-        for e in self.__engines.values():
+        for e in self.__engines.itervalues():
             if hasattr(e, 'dispose'):
                 e.dispose()
 
index aaea145fd3d9c5f43474223c3f91dd7b9db0f697..3f738245346ed027841f9dd0630c1306fda83218 100644 (file)
@@ -1008,7 +1008,9 @@ NULLTYPE = NullType()
 # type which usually resolves to TEXT/CLOB
 type_map = {
     str : VARCHAR,
+    # Py2K
     unicode : NCHAR,
+    # end Py2K
     int : Integer,
     float : Numeric,
     bool: Boolean,