]> git.ipfire.org Git - thirdparty/sqlalchemy/sqlalchemy.git/commitdiff
a pass where we try to squash down as many list()/keys() combinations
authorMike Bayer <mike_mp@zzzcomputing.com>
Sun, 26 May 2013 23:06:13 +0000 (19:06 -0400)
committerMike Bayer <mike_mp@zzzcomputing.com>
Sun, 26 May 2013 23:06:13 +0000 (19:06 -0400)
as possible

22 files changed:
lib/sqlalchemy/__init__.py
lib/sqlalchemy/engine/default.py
lib/sqlalchemy/event.py
lib/sqlalchemy/ext/associationproxy.py
lib/sqlalchemy/ext/declarative/api.py
lib/sqlalchemy/ext/declarative/base.py
lib/sqlalchemy/ext/mutable.py
lib/sqlalchemy/orm/attributes.py
lib/sqlalchemy/orm/collections.py
lib/sqlalchemy/orm/persistence.py
lib/sqlalchemy/orm/session.py
lib/sqlalchemy/orm/state.py
lib/sqlalchemy/orm/strategies.py
lib/sqlalchemy/orm/unitofwork.py
lib/sqlalchemy/pool.py
lib/sqlalchemy/schema.py
lib/sqlalchemy/sql/compiler.py
lib/sqlalchemy/testing/engines.py
lib/sqlalchemy/testing/entities.py
lib/sqlalchemy/testing/schema.py
lib/sqlalchemy/util/_collections.py
test/aaa_profiling/test_resultset.py

index 5b7123b6eb1e35de5a7d7efd8e52f31ee0014016..21e06f5483d4caed094e814e93a5044fc501eacf 100644 (file)
@@ -117,7 +117,7 @@ from .inspection import inspect
 from .engine import create_engine, engine_from_config
 
 
-__all__ = sorted(name for name, obj in list(locals().items())
+__all__ = sorted(name for name, obj in locals().items()
                  if not (name.startswith('_') or _inspect.ismodule(obj)))
 
 __version__ = '0.8.2'
index e9645f36391ebb289fcbb14d974e9b5a613b588c..85d11ff36794379829f5a5ae9f4c9317b246bc22 100644 (file)
@@ -734,7 +734,7 @@ class DefaultExecutionContext(interfaces.ExecutionContext):
                                 e, None, None, None, self)
         else:
             inputsizes = {}
-            for key in list(self.compiled.bind_names.values()):
+            for key in self.compiled.bind_names.values():
                 typeengine = types[key]
                 dbtype = typeengine.dialect_impl(self.dialect).\
                                 get_dbapi_type(self.dialect.dbapi)
index 33626cf2ad1836e4c0451417a05df40eb2274c87..d8c44dd05118d57e6a8b99f99a227bdff93ac5f7 100644 (file)
@@ -300,7 +300,7 @@ class _DispatchDescriptor(object):
     def clear(self):
         """Clear all class level listeners"""
 
-        for dispatcher in list(self._clslevel.values()):
+        for dispatcher in self._clslevel.values():
             dispatcher[:] = []
 
     def for_modify(self, obj):
index 39f8a7cf65cc9d5868909fa6808a6f37a21d5771..08875c3a0dbcbaf2adabf824243fb37b056c4d5b 100644 (file)
@@ -778,7 +778,7 @@ class _AssociationDict(_AssociationCollection):
         iteritems = _iteritems
 
         def values(self):
-            return [self._get(member) for member in list(self.col.values())]
+            return [self._get(member) for member in self.col.values()]
 
         def items(self):
             return [(k, self._get(self.col[k])) for k in self]
index bc0c5a38c6ea4302b19280329403b8d32cb55d9f..2f222f6829b127ea5f6027efb81a50d0778c9816 100644 (file)
@@ -424,7 +424,7 @@ class DeferredReflection(object):
     def prepare(cls, engine):
         """Reflect all :class:`.Table` objects for all current
         :class:`.DeferredReflection` subclasses"""
-        to_map = [m for m in list(_MapperConfig.configs.values())
+        to_map = [m for m in _MapperConfig.configs.values()
                     if issubclass(m.cls, cls)]
         for thingy in to_map:
             cls._sa_decl_prepare(thingy.local_table, engine)
index 2099f9eb0e2ce7fb0ec8e85e027525882c6440a7..9187ed7f89c87611e385f0608d052734ef93e778 100644 (file)
@@ -57,7 +57,7 @@ def _as_declarative(cls, classname, dict_):
 
         class_mapped = _declared_mapping_info(base) is not None
 
-        for name, obj in list(vars(base).items()):
+        for name, obj in vars(base).items():
             if name == '__mapper_args__':
                 if not mapper_args_fn and (
                                         not class_mapped or
@@ -129,7 +129,7 @@ def _as_declarative(cls, classname, dict_):
                         ret.doc = obj.__doc__
 
     # apply inherited columns as we should
-    for k, v in list(potential_columns.items()):
+    for k, v in potential_columns.items():
         dict_[k] = v
 
     if inherited_table_args and not tablename:
index 08c0bdf134952222eef3047b9dd43b14e21d8878..b1b851f7231c2229c1d1bfe333d511fd62e0cd05 100644 (file)
@@ -485,7 +485,7 @@ class Mutable(MutableBase):
     def changed(self):
         """Subclasses should call this method whenever change events occur."""
 
-        for parent, key in list(self._parents.items()):
+        for parent, key in self._parents.items():
             flag_modified(parent, key)
 
     @classmethod
@@ -579,7 +579,7 @@ class MutableComposite(MutableBase):
     def changed(self):
         """Subclasses should call this method whenever change events occur."""
 
-        for parent, key in list(self._parents.items()):
+        for parent, key in self._parents.items():
 
             prop = object_mapper(parent).get_property(key)
             for value, attr_name in zip(
index 05724704761d8f1b5096dedde4005f15a93fce53..bfba695b843098663d255e944af78d3de5f838d1 100644 (file)
@@ -866,7 +866,7 @@ class CollectionAttributeImpl(AttributeImpl):
         self.collection_factory = typecallable
 
     def __copy(self, item):
-        return [y for y in list(collections.collection_adapter(item))]
+        return [y for y in collections.collection_adapter(item)]
 
     def get_history(self, state, dict_, passive=PASSIVE_OFF):
         current = self.get(state, dict_, passive=passive)
index bb7882ee9ae5813fb47550aef47247ae4c1184dc..03917d1128132742297fb6963edcf0ea1c844299 100644 (file)
@@ -883,7 +883,7 @@ def _instrument_class(cls):
     # search for _sa_instrument_role-decorated methods in
     # method resolution order, assign to roles
     for supercls in cls.__mro__:
-        for name, method in list(vars(supercls).items()):
+        for name, method in vars(supercls).items():
             if not util.callable(method):
                 continue
 
@@ -917,11 +917,11 @@ def _instrument_class(cls):
     collection_type = util.duck_type_collection(cls)
     if collection_type in __interfaces:
         canned_roles, decorators = __interfaces[collection_type]
-        for role, name in list(canned_roles.items()):
+        for role, name in canned_roles.items():
             roles.setdefault(role, name)
 
         # apply ABC auto-decoration to methods that need it
-        for method, decorator in list(decorators.items()):
+        for method, decorator in decorators.items():
             fn = getattr(cls, method, None)
             if (fn and method not in methods and
                 not hasattr(fn, '_sa_instrumented')):
@@ -952,12 +952,12 @@ def _instrument_class(cls):
 
     # apply ad-hoc instrumentation from decorators, class-level defaults
     # and implicit role declarations
-    for method_name, (before, argument, after) in list(methods.items()):
+    for method_name, (before, argument, after) in methods.items():
         setattr(cls, method_name,
                 _instrument_membership_mutator(getattr(cls, method_name),
                                                before, argument, after))
     # intern the role map
-    for role, method_name in list(roles.items()):
+    for role, method_name in roles.items():
         setattr(cls, '_sa_%s' % role, getattr(cls, method_name))
 
     setattr(cls, '_sa_instrumented', id(cls))
@@ -1250,7 +1250,7 @@ def _dict_decorators():
         def update(self, __other=Unspecified, **kw):
             if __other is not Unspecified:
                 if hasattr(__other, 'keys'):
-                    for key in list(__other.keys()):
+                    for key in list(__other):
                         if (key not in self or
                             self[key] is not __other[key]):
                             self[key] = __other[key]
index 0eedea793ff3c3e54a631ddf68bfdcfd31f1a3f1..8ad1dbe9bdf7d4aaba4e9f619296423fa32459b1 100644 (file)
@@ -803,7 +803,7 @@ class BulkUD(object):
             raise sa_exc.ArgumentError(
                             "Valid strategies for session synchronization "
                             "are %s" % (", ".join(sorted(repr(x)
-                                for x in list(lookup.keys())))))
+                                for x in lookup))))
         else:
             return klass(*arg)
 
index cbb508cf7f166689265fffc89c80d2a9d735746a..5a4486eef5bc71b2de966e8f1b9440d68fc0f78b 100644 (file)
@@ -35,7 +35,7 @@ class _SessionClassMethods(object):
     def close_all(cls):
         """Close *all* sessions in memory."""
 
-        for sess in list(_sessions.values()):
+        for sess in _sessions.values():
             sess.close()
 
     @classmethod
@@ -250,7 +250,7 @@ class SessionTransaction(object):
             if s.key:
                 del s.key
 
-        for s, (oldkey, newkey) in list(self._key_switches.items()):
+        for s, (oldkey, newkey) in self._key_switches.items():
             self.session.identity_map.discard(s)
             s.key = oldkey
             self.session.identity_map.replace(s)
@@ -2237,7 +2237,7 @@ class sessionmaker(_SessionClassMethods):
             session = Session()  # invokes sessionmaker.__call__()
 
         """
-        for k, v in list(self.kw.items()):
+        for k, v in self.kw.items():
             local_kw.setdefault(k, v)
         return self.class_(**local_kw)
 
@@ -2256,7 +2256,7 @@ class sessionmaker(_SessionClassMethods):
         return "%s(class_=%r%s)" % (
                     self.__class__.__name__,
                     self.class_.__name__,
-                    ", ".join("%s=%r" % (k, v) for k, v in list(self.kw.items()))
+                    ", ".join("%s=%r" % (k, v) for k, v in self.kw.items())
                 )
 
 _sessions = weakref.WeakValueDictionary()
index 8fe37e41cee9a885b99d2c8e87093a710303506a..6ade91b3e078a629dd3448353e0215394b8f3555 100644 (file)
@@ -417,7 +417,7 @@ class InstanceState(interfaces._InspectionAttr):
            against this set when a refresh operation occurs.
 
         """
-        return set([k for k, v in list(self.callables.items()) if v is self])
+        return set([k for k, v in self.callables.items() if v is self])
 
     def _instance_dict(self):
         return None
index e80745c79cdae0ad9a04820f02df37cbd438360e..4651c71b793336b51d31b81870b95111bbf6824e 100644 (file)
@@ -359,7 +359,7 @@ class LazyLoader(AbstractRelationshipLoader):
                         )
 
         if self.use_get:
-            for col in list(self._equated_columns.keys()):
+            for col in list(self._equated_columns):
                 if col in self.mapper._equivalent_columns:
                     for c in self.mapper._equivalent_columns[col]:
                         self._equated_columns[c] = self._equated_columns[col]
index 54b0ebf72c7eb57f196d3675cf2b126ac221e147..aa5f7836c0c723b792a7e516f1d8b27fcd9c7999 100644 (file)
@@ -342,7 +342,7 @@ class UOWTransaction(object):
                     for dep in convert[edge[1]]:
                         self.dependencies.add((edge[0], dep))
 
-        return set([a for a in list(self.postsort_actions.values())
+        return set([a for a in self.postsort_actions.values()
                     if not a.disabled
                     ]
                 ).difference(cycles)
@@ -461,7 +461,7 @@ class PostSortRec(object):
     def __repr__(self):
         return "%s(%s)" % (
             self.__class__.__name__,
-            ",".join(str(x) for x in list(self.__dict__.values()))
+            ",".join(str(x) for x in self.__dict__.values())
         )
 
 
index 562a76163638915eb40022e2bda4ca36f75e706c..0470e9e485249ea903c93d4cbe58b5ddcd9c5a05 100644 (file)
@@ -1004,7 +1004,7 @@ class _DBProxy(object):
         self._create_pool_mutex = threading.Lock()
 
     def close(self):
-        for key in list(self.pools.keys()):
+        for key in list(self.pools):
             del self.pools[key]
 
     def __del__(self):
index 8e5c94abaca422546b39d3ada2b249ac79666e07..3a74cbd59dc221013b707038e4a0bcd406aab68c 100644 (file)
@@ -1044,7 +1044,7 @@ class Column(SchemaItem, expression.ColumnClause):
         if self.key in table._columns:
             col = table._columns.get(self.key)
             if col is not self:
-                for fk in list(col.foreign_keys):
+                for fk in col.foreign_keys:
                     table.foreign_keys.remove(fk)
                     if fk.constraint in table.constraints:
                         # this might have been removed
index c3aea159a2bb56ca8333922621a09054c6ee1a95..b2c4a94c07a45ab1d8d55a20423932e02b4f5f67 100644 (file)
@@ -2073,11 +2073,11 @@ class DDLCompiler(engine.Compiled):
         remote_table = list(constraint._elements.values())[0].column.table
         text += "FOREIGN KEY(%s) REFERENCES %s (%s)" % (
             ', '.join(preparer.quote(f.parent.name, f.parent.quote)
-                      for f in list(constraint._elements.values())),
+                      for f in constraint._elements.values()),
             self.define_constraint_remote_table(
                             constraint, remote_table, preparer),
             ', '.join(preparer.quote(f.column.name, f.column.quote)
-                      for f in list(constraint._elements.values()))
+                      for f in constraint._elements.values())
         )
         text += self.define_constraint_match(constraint)
         text += self.define_constraint_cascades(constraint)
index cfaacc4907bbbbacd6d2b5003cc1339f88240aa0..efc0103f219ddf16bdbf51889066354b5817d54a 100644 (file)
@@ -8,7 +8,7 @@ from .util import decorator
 from .. import event, pool
 import re
 import warnings
-
+from .. import util
 
 class ConnectionKiller(object):
 
@@ -37,12 +37,12 @@ class ConnectionKiller(object):
                     "rollback/close connection: %s" % e)
 
     def rollback_all(self):
-        for rec in list(self.proxy_refs.keys()):
+        for rec in list(self.proxy_refs):
             if rec is not None and rec.is_valid:
                 self._safe(rec.rollback)
 
     def close_all(self):
-        for rec in list(self.proxy_refs.keys()):
+        for rec in list(self.proxy_refs):
             if rec is not None:
                 self._safe(rec._close)
 
@@ -66,7 +66,7 @@ class ConnectionKiller(object):
 
         self.conns = set()
 
-        for rec in list(self.testing_engines.keys()):
+        for rec in list(self.testing_engines):
             if rec is not config.db:
                 rec.dispose()
 
@@ -75,7 +75,7 @@ class ConnectionKiller(object):
         for conn in self.conns:
             self._safe(conn.close)
         self.conns = set()
-        for rec in list(self.testing_engines.keys()):
+        for rec in list(self.testing_engines):
             rec.dispose()
 
     def assert_all_closed(self):
@@ -353,24 +353,22 @@ class ReplayableSession(object):
     Callable = object()
     NoAttribute = object()
 
-# start Py3K
-    Natives = set([getattr(types, t)
-                   for t in dir(types) if not t.startswith('_')]). \
-                   union([type(t) if not isinstance(t, type)
-                            else t for t in list(__builtins__.values())]).\
+    if util.py2k:
+        Natives = set([getattr(types, t)
+                   for t in dir(types) if not t.startswith('_')]).\
                    difference([getattr(types, t)
-                            for t in ('FunctionType', 'BuiltinFunctionType',
-                                      'MethodType', 'BuiltinMethodType',
-                                      'LambdaType', )])
-# end Py3K
-# start Py2K
-#    Natives = set([getattr(types, t)
-#                   for t in dir(types) if not t.startswith('_')]). \
-#                   difference([getattr(types, t)
-#                           for t in ('FunctionType', 'BuiltinFunctionType',
-#                                     'MethodType', 'BuiltinMethodType',
-#                                     'LambdaType', 'UnboundMethodType',)])
-# end Py2K
+                           for t in ('FunctionType', 'BuiltinFunctionType',
+                                     'MethodType', 'BuiltinMethodType',
+                                     'LambdaType', 'UnboundMethodType',)])
+    else:
+        Natives = set([getattr(types, t)
+                       for t in dir(types) if not t.startswith('_')]).\
+                       union([type(t) if not isinstance(t, type)
+                                else t for t in __builtins__.values()]).\
+                       difference([getattr(types, t)
+                                for t in ('FunctionType', 'BuiltinFunctionType',
+                                          'MethodType', 'BuiltinMethodType',
+                                          'LambdaType', )])
 
     def __init__(self):
         self.buffer = deque()
index 221c23c56b74d02080ccc98d5ab11db1b19f5aec..c0dd58650bc836ae328e550bcf2441d3a535e6ac 100644 (file)
@@ -67,7 +67,7 @@ class ComparableEntity(BasicEntity):
                 a = self
                 b = other
 
-            for attr in list(a.__dict__.keys()):
+            for attr in list(a.__dict__):
                 if attr.startswith('_'):
                     continue
                 value = getattr(a, attr)
index 6f3e87cc91d3565a09893b82296589ce9e27b194..025bbaabeb1becc96cbe46e7e712c2d5ec42855b 100644 (file)
@@ -11,7 +11,7 @@ table_options = {}
 def Table(*args, **kw):
     """A schema.Table wrapper/hook for dialect-specific tweaks."""
 
-    test_opts = dict([(k, kw.pop(k)) for k in list(kw.keys())
+    test_opts = dict([(k, kw.pop(k)) for k in list(kw)
                       if k.startswith('test_')])
 
     kw.update(table_options)
@@ -58,7 +58,7 @@ def Table(*args, **kw):
 def Column(*args, **kw):
     """A schema.Column wrapper/hook for dialect-specific tweaks."""
 
-    test_opts = dict([(k, kw.pop(k)) for k in list(kw.keys())
+    test_opts = dict([(k, kw.pop(k)) for k in list(kw)
                       if k.startswith('test_')])
 
     if not config.requirements.foreign_key_ddl.enabled:
index fddedf4f7f74bde202579d966203747caf11a858..c10d19ea121d198b9954816b4588b22002abf9d7 100644 (file)
@@ -94,7 +94,7 @@ class KeyedTuple(tuple):
         .. versionadded:: 0.8
 
         """
-        return dict((key, self.__dict__[key]) for key in list(self.keys()))
+        return dict((key, self.__dict__[key]) for key in self.keys())
 
 
 class ImmutableContainer(object):
@@ -242,7 +242,7 @@ class OrderedDict(dict):
     def update(self, ____sequence=None, **kwargs):
         if ____sequence is not None:
             if hasattr(____sequence, 'keys'):
-                for key in list(____sequence.keys()):
+                for key in ____sequence.keys():
                     self.__setitem__(key, ____sequence[key])
             else:
                 for key, value in ____sequence:
@@ -273,7 +273,7 @@ class OrderedDict(dict):
         return iter(list(self.keys()))
 
     def items(self):
-        return [(key, self[key]) for key in list(self.keys())]
+        return [(key, self[key]) for key in self.keys()]
 
     def iteritems(self):
         return iter(list(self.items()))
index 95a1fa3ae6345689bc4720e40159b9c7c6c8eb03..27e60410da1187e0a8cedc8f75be863fbe8c4047 100644 (file)
@@ -2,6 +2,7 @@ from sqlalchemy import *
 from sqlalchemy.testing import fixtures, AssertsExecutionResults, profiling
 from sqlalchemy import testing
 from sqlalchemy.testing import eq_
+from sqlalchemy.util import u
 NUM_FIELDS = 10
 NUM_RECORDS = 1000
 
@@ -19,10 +20,10 @@ class ResultSetTest(fixtures.TestBase, AssertsExecutionResults):
 
     def setup(self):
         metadata.create_all()
-        t.insert().execute([dict(('field%d' % fnum, 'value%d' % fnum)
+        t.insert().execute([dict(('field%d' % fnum, u('value%d' % fnum))
                            for fnum in range(NUM_FIELDS)) for r_num in
                            range(NUM_RECORDS)])
-        t2.insert().execute([dict(('field%d' % fnum, 'value%d' % fnum)
+        t2.insert().execute([dict(('field%d' % fnum, u('value%d' % fnum))
                             for fnum in range(NUM_FIELDS)) for r_num in
                             range(NUM_RECORDS)])