]> git.ipfire.org Git - thirdparty/sqlalchemy/sqlalchemy.git/commitdiff
- Removed 2.3 set emulations/enhancements.
authorJason Kirtland <jek@discorporate.us>
Tue, 15 Jul 2008 19:23:52 +0000 (19:23 +0000)
committerJason Kirtland <jek@discorporate.us>
Tue, 15 Jul 2008 19:23:52 +0000 (19:23 +0000)
  (sets.Set-based collections & DB-API returns still work.)

48 files changed:
lib/sqlalchemy/databases/firebird.py
lib/sqlalchemy/databases/maxdb.py
lib/sqlalchemy/databases/mssql.py
lib/sqlalchemy/databases/mysql.py
lib/sqlalchemy/databases/sqlite.py
lib/sqlalchemy/databases/sybase.py
lib/sqlalchemy/ext/associationproxy.py
lib/sqlalchemy/orm/attributes.py
lib/sqlalchemy/orm/collections.py
lib/sqlalchemy/orm/dependency.py
lib/sqlalchemy/orm/evaluator.py
lib/sqlalchemy/orm/mapper.py
lib/sqlalchemy/orm/properties.py
lib/sqlalchemy/orm/query.py
lib/sqlalchemy/orm/session.py
lib/sqlalchemy/orm/unitofwork.py
lib/sqlalchemy/orm/util.py
lib/sqlalchemy/schema.py
lib/sqlalchemy/sql/compiler.py
lib/sqlalchemy/sql/expression.py
lib/sqlalchemy/sql/operators.py
lib/sqlalchemy/sql/util.py
lib/sqlalchemy/sql/visitors.py
lib/sqlalchemy/topological.py
lib/sqlalchemy/util.py
test/base/dependency.py
test/base/utils.py
test/engine/reflection.py
test/ext/declarative.py
test/orm/_base.py
test/orm/_fixtures.py
test/orm/collection.py
test/orm/generative.py
test/orm/mapper.py
test/orm/naturalpks.py
test/orm/relationships.py
test/orm/session.py
test/orm/unitofwork.py
test/sql/defaults.py
test/sql/generative.py
test/testlib/__init__.py
test/testlib/compat.py
test/testlib/engines.py
test/testlib/fixtures.py
test/testlib/orm.py
test/testlib/profiling.py
test/testlib/testing.py
test/zblog/blog.py

index c7b64a6b26f73ef298e77e61a653df145bead259..aba834f64a76aab80ada8eb15f869eb1dcd0dde9 100644 (file)
@@ -747,7 +747,7 @@ class FBDefaultRunner(base.DefaultRunner):
             self.dialect.identifier_preparer.format_sequence(seq))
 
 
-RESERVED_WORDS = util.Set(
+RESERVED_WORDS = set(
     ["action", "active", "add", "admin", "after", "all", "alter", "and", "any",
      "as", "asc", "ascending", "at", "auto", "autoddl", "avg", "based", "basename",
      "base_name", "before", "begin", "between", "bigint", "blob", "blobedit", "buffer",
index b3f232a17c1392250d0deabaae0569610fa96c04..c9ea2b57923a7f54b4b708003911a80b2222c668 100644 (file)
@@ -602,7 +602,7 @@ class MaxDBDialect(default.DefaultDialect):
         if not rows:
             raise exc.NoSuchTableError(table.fullname)
 
-        include_columns = util.Set(include_columns or [])
+        include_columns = set(include_columns or [])
 
         for row in rows:
             (name, mode, col_type, encoding, length, scale,
@@ -666,7 +666,7 @@ class MaxDBDialect(default.DefaultDialect):
         for fkeyname, fkey in fk_sets:
             fkey = list(fkey)
             if include_columns:
-                key_cols = util.Set([r.COLUMNNAME for r in fkey])
+                key_cols = set([r.COLUMNNAME for r in fkey])
                 if key_cols != include_columns:
                     continue
 
@@ -732,7 +732,7 @@ class MaxDBCompiler(compiler.DefaultCompiler):
 
     # These functions must be written without parens when called with no
     # parameters.  e.g. 'SELECT DATE FROM DUAL' not 'SELECT DATE() FROM DUAL'
-    bare_functions = util.Set([
+    bare_functions = set([
         'CURRENT_SCHEMA', 'DATE', 'FALSE', 'SYSDBA', 'TIME', 'TIMESTAMP',
         'TIMEZONE', 'TRANSACTION', 'TRUE', 'USER', 'UID', 'USERGROUP',
         'UTCDATE', 'UTCDIFF'])
@@ -893,7 +893,7 @@ class MaxDBDefaultRunner(engine_base.DefaultRunner):
 
 
 class MaxDBIdentifierPreparer(compiler.IdentifierPreparer):
-    reserved_words = util.Set([
+    reserved_words = set([
         'abs', 'absolute', 'acos', 'adddate', 'addtime', 'all', 'alpha',
         'alter', 'any', 'ascii', 'asin', 'atan', 'atan2', 'avg', 'binary',
         'bit', 'boolean', 'byte', 'case', 'ceil', 'ceiling', 'char',
index ce9ca441e6b79076f01809bbbef4f490a7f87b3e..1aea72a9640463bb5432355d302f6ddde189c5cf 100644 (file)
@@ -47,7 +47,7 @@ from sqlalchemy import types as sqltypes
 from sqlalchemy.util import Decimal as _python_Decimal
 
 
-MSSQL_RESERVED_WORDS = util.Set(['function'])
+MSSQL_RESERVED_WORDS = set(['function'])
 
 class MSNumeric(sqltypes.Numeric):
     def result_processor(self, dialect):
index f40fa71c166606756d732ebb1b7652973b6460b7..704abbe6d48db8ab467795be8e8c963e03cfc9f2 100644 (file)
@@ -175,7 +175,7 @@ __all__ = (
     'MSTinyText', 'MSVarBinary', 'MSYear' )
 
 
-RESERVED_WORDS = util.Set(
+RESERVED_WORDS = set(
     ['accessible', 'add', 'all', 'alter', 'analyze','and', 'as', 'asc',
      'asensitive', 'before', 'between', 'bigint', 'binary', 'blob', 'both',
      'by', 'call', 'cascade', 'case', 'change', 'char', 'character', 'check',
@@ -1265,12 +1265,12 @@ class MSSet(MSString):
                 if not value:
                     value.add('')
                 # ..some return sets.Set, even for pythons that have __builtin__.set
-                if not isinstance(value, util.Set):
-                    value = util.Set(value)
+                if not isinstance(value, set):
+                    value = set(value)
                 return value
             # ...and some versions return strings
             if value is not None:
-                return util.Set(value.split(','))
+                return set(value.split(','))
             else:
                 return value
         return process
@@ -1282,7 +1282,7 @@ class MSSet(MSString):
                 pass
             else:
                 if None in value:
-                    value = util.Set(value)
+                    value = set(value)
                     value.remove(None)
                     value.add('')
                 value = ','.join(value)
@@ -1984,7 +1984,7 @@ class MySQLCompiler(compiler.DefaultCompiler):
             return ' \n LIMIT %s' % (limit,)
 
     def visit_update(self, update_stmt):
-        self.stack.append({'from':util.Set([update_stmt.table])})
+        self.stack.append({'from': set([update_stmt.table])})
 
         self.isupdate = True
         colparams = self._get_colparams(update_stmt)
@@ -2104,7 +2104,7 @@ class MySQLSchemaReflector(object):
         keys, constraints = [], []
 
         if only:
-            only = util.Set(only)
+            only = set(only)
 
         for line in re.split(r'\r?\n', show_create):
             if line.startswith('  ' + self.preparer.initial_quote):
@@ -2244,7 +2244,7 @@ class MySQLSchemaReflector(object):
             flavor = spec['type']
             col_names = [s[0] for s in spec['columns']]
 
-            if only and not util.Set(col_names).issubset(only):
+            if only and not set(col_names).issubset(only):
                 if flavor is None:
                     flavor = 'index'
                 self.logger.info(
@@ -2280,7 +2280,7 @@ class MySQLSchemaReflector(object):
             ref_schema = len(spec['table']) > 1 and spec['table'][-2] or None
 
             loc_names = spec['local']
-            if only and not util.Set(loc_names).issubset(only):
+            if only and not set(loc_names).issubset(only):
                 self.logger.info(
                     "Omitting FOREIGN KEY for (%s), key covers ommitted "
                     "columns." % (', '.join(loc_names)))
@@ -2295,8 +2295,8 @@ class MySQLSchemaReflector(object):
                     autoload=True, autoload_with=connection)
 
             ref_names = spec['foreign']
-            if not util.Set(ref_names).issubset(
-                util.Set([c.name for c in ref_table.c])):
+            if not set(ref_names).issubset(
+                set(c.name for c in ref_table.c)):
                 raise exc.InvalidRequestError(
                     "Foreign key columns (%s) are not present on "
                     "foreign table %s" %
index 26f1ecb911615d3c78365d5314800146e8b9c0ca..b6e362ef2a02484da7a6fee75353162f03e1d9e6 100644 (file)
@@ -475,7 +475,7 @@ class SQLiteSchemaDropper(compiler.SchemaDropper):
     pass
 
 class SQLiteIdentifierPreparer(compiler.IdentifierPreparer):
-    reserved_words = util.Set([
+    reserved_words = set([
         'add', 'after', 'all', 'alter', 'analyze', 'and', 'as', 'asc',
         'attach', 'autoincrement', 'before', 'begin', 'between', 'by',
         'cascade', 'case', 'cast', 'check', 'collate', 'column', 'commit',
index fd5b875a24d3009c5d84cd19dae6e273318b3891..aea77f8bfe647bed85561afa7e1e16b1810a41e3 100644 (file)
@@ -46,7 +46,7 @@ __all__ = [
     ]
 
 
-RESERVED_WORDS = util.Set([
+RESERVED_WORDS = set([
     "add", "all", "alter", "and",
     "any", "as", "asc", "backup",
     "begin", "between", "bigint", "binary",
index b16004511bbf5703b9f313b522b9eceaf6fcd37a..5415bf98857c076902c9371274678d7ccdb77574 100644 (file)
@@ -229,7 +229,7 @@ class AssociationProxy(object):
             return _AssociationList(lazy_collection, creator, getter, setter)
         elif self.collection_class is dict:
             return _AssociationDict(lazy_collection, creator, getter, setter)
-        elif self.collection_class is util.Set:
+        elif self.collection_class is set:
             return _AssociationSet(lazy_collection, creator, getter, setter)
         else:
             raise exceptions.ArgumentError(
@@ -245,7 +245,7 @@ class AssociationProxy(object):
             proxy.extend(values)
         elif self.collection_class is dict:
             proxy.update(values)
-        elif self.collection_class is util.Set:
+        elif self.collection_class is set:
             proxy.update(values)
         else:
             raise exceptions.ArgumentError(
@@ -760,15 +760,15 @@ class _AssociationSet(object):
         return self
 
     def _set(self):
-        return util.Set(iter(self))
+        return set(iter(self))
 
     def union(self, other):
-        return util.Set(self).union(other)
+        return set(self).union(other)
 
     __or__ = union
 
     def difference(self, other):
-        return util.Set(self).difference(other)
+        return set(self).difference(other)
 
     __sub__ = difference
 
@@ -784,12 +784,12 @@ class _AssociationSet(object):
         return self
 
     def intersection(self, other):
-        return util.Set(self).intersection(other)
+        return set(self).intersection(other)
 
     __and__ = intersection
 
     def intersection_update(self, other):
-        want, have = self.intersection(other), util.Set(self)
+        want, have = self.intersection(other), set(self)
 
         remove, add = have - want, want - have
 
@@ -801,7 +801,7 @@ class _AssociationSet(object):
     def __iand__(self, other):
         if not collections._set_binops_check_strict(self, other):
             return NotImplemented
-        want, have = self.intersection(other), util.Set(self)
+        want, have = self.intersection(other), set(self)
 
         remove, add = have - want, want - have
 
@@ -812,12 +812,12 @@ class _AssociationSet(object):
         return self
 
     def symmetric_difference(self, other):
-        return util.Set(self).symmetric_difference(other)
+        return set(self).symmetric_difference(other)
 
     __xor__ = symmetric_difference
 
     def symmetric_difference_update(self, other):
-        want, have = self.symmetric_difference(other), util.Set(self)
+        want, have = self.symmetric_difference(other), set(self)
 
         remove, add = have - want, want - have
 
@@ -829,7 +829,7 @@ class _AssociationSet(object):
     def __ixor__(self, other):
         if not collections._set_binops_check_strict(self, other):
             return NotImplemented
-        want, have = self.symmetric_difference(other), util.Set(self)
+        want, have = self.symmetric_difference(other), set(self)
 
         remove, add = have - want, want - have
 
@@ -840,43 +840,43 @@ class _AssociationSet(object):
         return self
 
     def issubset(self, other):
-        return util.Set(self).issubset(other)
+        return set(self).issubset(other)
 
     def issuperset(self, other):
-        return util.Set(self).issuperset(other)
+        return set(self).issuperset(other)
 
     def clear(self):
         self.col.clear()
 
     def copy(self):
-        return util.Set(self)
+        return set(self)
 
     def __eq__(self, other):
-        return util.Set(self) == other
+        return set(self) == other
 
     def __ne__(self, other):
-        return util.Set(self) != other
+        return set(self) != other
 
     def __lt__(self, other):
-        return util.Set(self) < other
+        return set(self) < other
 
     def __le__(self, other):
-        return util.Set(self) <= other
+        return set(self) <= other
 
     def __gt__(self, other):
-        return util.Set(self) > other
+        return set(self) > other
 
     def __ge__(self, other):
-        return util.Set(self) >= other
+        return set(self) >= other
 
     def __repr__(self):
-        return repr(util.Set(self))
+        return repr(set(self))
 
     def __hash__(self):
         raise TypeError("%s objects are unhashable" % type(self).__name__)
 
     for func_name, func in locals().items():
         if (callable(func) and func.func_name == func_name and
-            not func.__doc__ and hasattr(util.Set, func_name)):
-            func.__doc__ = getattr(util.Set, func_name).__doc__
+            not func.__doc__ and hasattr(set, func_name)):
+            func.__doc__ = getattr(set, func_name).__doc__
     del func_name, func
index 9009e4c4f6384c587aa208054408cdf509e51cc1..85a842c08a1c0ea804a35bf27dee92139c7be0a3 100644 (file)
@@ -877,7 +877,7 @@ class InstanceState(object):
     def unmodified(self):
         """a set of keys which have no uncommitted changes"""
 
-        return util.Set(
+        return set(
             key for key in self.manager.keys()
             if (key not in self.committed_state or
                 (key in self.manager.mutable_attributes and
@@ -892,14 +892,14 @@ class InstanceState(object):
         was never populated or modified.
         
         """
-        return util.Set(
+        return set(
             key for key in self.manager.keys()
             if key not in self.committed_state and key not in self.dict)
 
     unloaded = property(unloaded)
 
     def expire_attributes(self, attribute_names):
-        self.expired_attributes = util.Set(self.expired_attributes)
+        self.expired_attributes = set(self.expired_attributes)
 
         if attribute_names is None:
             attribute_names = self.manager.keys()
@@ -1031,7 +1031,7 @@ class ClassManager(dict):
         self.factory = None  # where we came from, for inheritance bookkeeping
         self.info = {}
         self.mappers = {}
-        self.mutable_attributes = util.Set()
+        self.mutable_attributes = set()
         self.local_attrs = {}
         self.originals = {}
         for base in class_.__mro__[-2:0:-1]:   # reverse, skipping 1st and last
@@ -1594,7 +1594,7 @@ def collect_management_factories_for(cls):
 
     """
     hierarchy = util.class_hierarchy(cls)
-    factories = util.Set()
+    factories = set()
     for member in hierarchy:
         manager = manager_of_class(member)
         if manager is not None:
index aed206957ce742de0a61085e092d449e59a19896..98ce70c204b2c190512d09d883f440a76261cb09 100644 (file)
@@ -105,7 +105,7 @@ import weakref
 import sqlalchemy.exceptions as sa_exc
 from sqlalchemy import schema
 import sqlalchemy.util as sautil
-from sqlalchemy.util import attrgetter, Set
+from sqlalchemy.util import attrgetter
 
 
 __all__ = ['collection', 'collection_adapter',
@@ -1127,10 +1127,7 @@ def _dict_decorators():
     return l
 
 
-try:
-    _set_binop_bases = (set, frozenset, sets.BaseSet)
-except NameError:
-    _set_binop_bases = (sets.BaseSet,)
+_set_binop_bases = (set, frozenset, sets.BaseSet)
 
 def _set_binops_check_strict(self, obj):
     """Allow only set, frozenset and self.__class__-derived objects in binops."""
@@ -1139,7 +1136,7 @@ def _set_binops_check_strict(self, obj):
 def _set_binops_check_loose(self, obj):
     """Allow anything set-like to participate in set binops."""
     return (isinstance(obj, _set_binop_bases + (self.__class__,)) or
-            sautil.duck_type_collection(obj) == sautil.Set)
+            sautil.duck_type_collection(obj) == set)
 
 
 def _set_decorators():
@@ -1147,7 +1144,7 @@ def _set_decorators():
 
     def _tidy(fn):
         setattr(fn, '_sa_instrumented', True)
-        fn.__doc__ = getattr(getattr(Set, fn.__name__), '__doc__')
+        fn.__doc__ = getattr(getattr(set, fn.__name__), '__doc__')
 
     Unspecified = sautil.symbol('Unspecified')
 
@@ -1240,7 +1237,7 @@ def _set_decorators():
 
     def intersection_update(fn):
         def intersection_update(self, other):
-            want, have = self.intersection(other), Set(self)
+            want, have = self.intersection(other), set(self)
             remove, add = have - want, want - have
 
             for item in remove:
@@ -1254,7 +1251,7 @@ def _set_decorators():
         def __iand__(self, other):
             if not _set_binops_check_strict(self, other):
                 return NotImplemented
-            want, have = self.intersection(other), Set(self)
+            want, have = self.intersection(other), set(self)
             remove, add = have - want, want - have
 
             for item in remove:
@@ -1267,7 +1264,7 @@ def _set_decorators():
 
     def symmetric_difference_update(fn):
         def symmetric_difference_update(self, other):
-            want, have = self.symmetric_difference(other), Set(self)
+            want, have = self.symmetric_difference(other), set(self)
             remove, add = have - want, want - have
 
             for item in remove:
@@ -1281,7 +1278,7 @@ def _set_decorators():
         def __ixor__(self, other):
             if not _set_binops_check_strict(self, other):
                 return NotImplemented
-            want, have = self.symmetric_difference(other), Set(self)
+            want, have = self.symmetric_difference(other), set(self)
             remove, add = have - want, want - have
 
             for item in remove:
@@ -1306,8 +1303,8 @@ class InstrumentedList(list):
        'remover': 'remove',
        'iterator': '__iter__', }
 
-class InstrumentedSet(Set):
-    """An instrumented version of the built-in set (or Set)."""
+class InstrumentedSet(set):
+    """An instrumented version of the built-in set."""
 
     __instrumentation__ = {
        'appender': 'add',
@@ -1322,7 +1319,7 @@ class InstrumentedDict(dict):
 
 __canned_instrumentation = {
     list: InstrumentedList,
-    Set: InstrumentedSet,
+    set: InstrumentedSet,
     dict: InstrumentedDict,
     }
 
@@ -1331,7 +1328,7 @@ __interfaces = {
             'remover':  'remove',
             'iterator': '__iter__',
             '_decorators': _list_decorators(), },
-    Set: { 'appender': 'add',
+    set: { 'appender': 'add',
            'remover': 'remove',
            'iterator': '__iter__',
            '_decorators': _set_decorators(), },
index 54208484b53112e59cf246309d691efd89e9961e..b14dd991ae2a321441a2dcf1a739c764b03700f9 100644 (file)
@@ -279,7 +279,7 @@ class DetectKeySwitch(DependencyProcessor):
             self._process_key_switches(deplist, uowcommit)
 
     def _process_key_switches(self, deplist, uowcommit):
-        switchers = util.Set(s for s in deplist if self._pks_changed(uowcommit, s))
+        switchers = set(s for s in deplist if self._pks_changed(uowcommit, s))
         if switchers:
             # yes, we're doing a linear search right now through the UOW.  only
             # takes effect when primary key values have actually changed.
index 2fbf3eac44da7d10db6df5756477c428f27e13d4..03955afa8a50dbce9718c59f28aa9936e5703e98 100644 (file)
@@ -1,17 +1,17 @@
+import operator
 from sqlalchemy.sql import operators, functions
 from sqlalchemy.sql import expression as sql
-from sqlalchemy.util import Set
-import operator
+
 
 class UnevaluatableError(Exception):
     pass
 
-_straight_ops = Set(getattr(operators, op)
+_straight_ops = set(getattr(operators, op)
                     for op in ('add', 'mul', 'sub', 'div', 'mod', 'truediv',
                                'lt', 'le', 'ne', 'gt', 'ge', 'eq'))
 
 
-_notimplemented_ops = Set(getattr(operators, op)
+_notimplemented_ops = set(getattr(operators, op)
                           for op in ('like_op', 'notlike_op', 'ilike_op',
                                      'notilike_op', 'between_op', 'in_op',
                                      'notin_op', 'endswith_op', 'concat_op'))
index 97bf6644f047594012d9d5f1bb2bc3e90850d131..aec4e1dff53d3eaeae7ef9ea285a0a397058a268 100644 (file)
@@ -185,7 +185,7 @@ class Mapper(object):
         self.exclude_properties = exclude_properties
 
         # a set of all mappers which inherit from this one.
-        self._inheriting_mappers = util.Set()
+        self._inheriting_mappers = set()
 
         self.compiled = False
 
@@ -258,7 +258,7 @@ class Mapper(object):
             mappers = []
 
         if selectable:
-            tables = util.Set(sqlutil.find_tables(selectable, include_aliases=True))
+            tables = set(sqlutil.find_tables(selectable, include_aliases=True))
             mappers = [m for m in mappers if m.local_table in tables]
 
         return mappers
@@ -487,7 +487,7 @@ class Mapper(object):
                         # TODO: this exception not covered
                         raise sa_exc.ArgumentError("Mapper '%s' specifies a polymorphic_identity of '%s', but no mapper in it's hierarchy specifies the 'polymorphic_on' column argument" % (str(self), self.polymorphic_identity))
         else:
-            self._all_tables = util.Set()
+            self._all_tables = set()
             self.base_mapper = self
             self.mapped_table = self.local_table
             if self.polymorphic_identity:
@@ -509,11 +509,11 @@ class Mapper(object):
         self._pks_by_table = {}
         self._cols_by_table = {}
 
-        all_cols = util.Set(chain(*[col.proxy_set for col in self._columntoproperty]))
-        pk_cols = util.Set(c for c in all_cols if c.primary_key)
+        all_cols = set(chain(*[col.proxy_set for col in self._columntoproperty]))
+        pk_cols = set(c for c in all_cols if c.primary_key)
 
         # identify primary key columns which are also mapped by this mapper.
-        for t in util.Set(self.tables + [self.mapped_table]):
+        for t in set(self.tables + [self.mapped_table]):
             self._all_tables.add(t)
             if t.primary_key and pk_cols.issuperset(t.primary_key):
                 # ordering is important since it determines the ordering of mapper.primary_key (and therefore query.get())
@@ -522,7 +522,7 @@ class Mapper(object):
 
         # determine cols that aren't expressed within our tables; mark these
         # as "read only" properties which are refreshed upon INSERT/UPDATE
-        self._readonly_props = util.Set(
+        self._readonly_props = set(
             self._columntoproperty[col]
             for col in self._columntoproperty
             if not hasattr(col, 'table') or col.table not in self._cols_by_table)
@@ -593,11 +593,11 @@ class Mapper(object):
                 if binary.left in result:
                     result[binary.left].add(binary.right)
                 else:
-                    result[binary.left] = util.Set([binary.right])
+                    result[binary.left] = set((binary.right,))
                 if binary.right in result:
                     result[binary.right].add(binary.left)
                 else:
-                    result[binary.right] = util.Set([binary.left])
+                    result[binary.right] = set((binary.left,))
         for mapper in self.base_mapper.polymorphic_iterator():
             if mapper.inherit_condition:
                 visitors.traverse(mapper.inherit_condition, {}, {'binary':visit_binary})
@@ -920,7 +920,7 @@ class Mapper(object):
         if adapter:
             pk_cols = [adapter.columns[c] for c in pk_cols]
 
-        return (self._identity_class, tuple([row[column] for column in pk_cols]), self.entity_name)
+        return (self._identity_class, tuple(row[column] for column in pk_cols), self.entity_name)
 
     def identity_key_from_primary_key(self, primary_key):
         """Return an identity-map key for use in storing/retrieving an
@@ -1034,8 +1034,8 @@ class Mapper(object):
                     self.__log_debug("detected row switch for identity %s.  will update %s, remove %s from transaction" % (instance_key, state_str(state), state_str(existing)))
                 uowtransaction.set_row_switch(existing)
 
-        inserted_objects = util.Set()
-        updated_objects = util.Set()
+        inserted_objects = set()
+        updated_objects = set()
 
         table_to_mapper = {}
         for mapper in self.base_mapper.polymorphic_iterator():
@@ -1186,9 +1186,9 @@ class Mapper(object):
             for state, mapper, connection, has_identity in tups:
                 
                 # expire readonly attributes
-                readonly = state.unmodified.intersection([
+                readonly = state.unmodified.intersection(
                     p.key for p in mapper._readonly_props
-                ])
+                )
                 
                 if readonly:
                     _expire_state(state, readonly)
@@ -1370,7 +1370,7 @@ class Mapper(object):
 
         identity_class, entity_name = self._identity_class, self.entity_name
         def identity_key(row):
-            return (identity_class, tuple([row[column] for column in pk_cols]), entity_name)
+            return (identity_class, tuple(row[column] for column in pk_cols), entity_name)
 
         new_populators = []
         existing_populators = []
@@ -1544,7 +1544,7 @@ class Mapper(object):
 
     def _optimized_get_statement(self, state, attribute_names):
         props = self.__props
-        tables = util.Set([props[key].parent.local_table for key in attribute_names])
+        tables = set(props[key].parent.local_table for key in attribute_names)
         if self.base_mapper.local_table in tables:
             return None
 
index 76bcde0dc94e99cca251892491c827c61f1a0a34..46c95109fb844a7acaf91ea653bd15f9ed729ce6 100644 (file)
@@ -638,7 +638,7 @@ class PropertyLoader(StrategizedProperty):
         else:
             self.secondary_synchronize_pairs = None
 
-        self._foreign_keys = util.Set(r for l, r in self.synchronize_pairs)
+        self._foreign_keys = set(r for l, r in self.synchronize_pairs)
         if self.secondary_synchronize_pairs:
             self._foreign_keys.update(r for l, r in self.secondary_synchronize_pairs)
 
index 6bc1ce7feada18a94e0ab87049b8607bb82ff6e5..f1bd9eb83b7431810015b0a1f68b1922e06ec0de 100644 (file)
@@ -72,7 +72,7 @@ class Query(object):
         self._params = {}
         self._yield_per = None
         self._criterion = None
-        self._correlate = util.Set()
+        self._correlate = set()
         self._joinpoint = None
         self._with_labels = False
         self.__joinable_tables = None
@@ -89,7 +89,7 @@ class Query(object):
         self._polymorphic_adapters = {}
         self._filter_aliases = None
         self._from_obj_alias = None
-        self.__currenttables = util.Set()
+        self.__currenttables = set()
 
         for ent in util.to_list(entities):
             _QueryEntity(self, ent, entity_name=entity_name)
@@ -304,7 +304,7 @@ class Query(object):
         if refresh_state:
             self._refresh_state = refresh_state
         if only_load_props:
-            self._only_load_props = util.Set(only_load_props)
+            self._only_load_props = set(only_load_props)
         return self
 
     def _clone(self):
@@ -757,7 +757,7 @@ class Query(object):
     outerjoin = util.array_as_starargs_decorator(outerjoin)
 
     def __join(self, keys, outerjoin, create_aliases, from_joinpoint):
-        self.__currenttables = util.Set(self.__currenttables)
+        self.__currenttables = set(self.__currenttables)
         self._polymorphic_adapters = self._polymorphic_adapters.copy()
 
         if not from_joinpoint:
@@ -1085,7 +1085,7 @@ class Query(object):
             rowtuple.keys = labels.keys
         
         while True:
-            context.progress = util.Set()
+            context.progress = set()
             context.partials = {}
 
             if self._yield_per:
@@ -1316,7 +1316,7 @@ class Query(object):
                     state.commit(list(to_evaluate))
                     
                     # expire attributes with pending changes (there was no autoflush, so they are overwritten)
-                    state.expire_attributes(util.Set(evaluated_keys).difference(to_evaluate))
+                    state.expire_attributes(set(evaluated_keys).difference(to_evaluate))
                     
         elif synchronize_session == 'expire':
             target_mapper = self._mapper_zero()
@@ -1629,7 +1629,7 @@ class _ColumnEntity(_QueryEntity):
 
         self.column = column
         self.entity_name = None
-        self.froms = util.Set()
+        self.froms = set()
         self.entities = util.OrderedSet(
             elem._annotations['parententity']
             for elem in visitors.iterate(column, {})
index 0325b5eaeb75df11b602d5c3a06217e1927b9e5d..1f4e44e1507d1af86f8cb630cd1a3f046d654c7d 100644 (file)
@@ -278,12 +278,12 @@ class SessionTransaction(object):
     def _restore_snapshot(self):
         assert self._is_transaction_boundary
 
-        for s in util.Set(self._deleted).union(self.session._deleted):
+        for s in set(self._deleted).union(self.session._deleted):
             self.session._update_impl(s)
 
         assert not self.session._deleted
 
-        for s in util.Set(self._new).union(self.session._new):
+        for s in set(self._new).union(self.session._new):
             self.session._expunge_state(s)
 
         for s in self.session.identity_map.all_states():
@@ -351,7 +351,7 @@ class SessionTransaction(object):
 
         if self._parent is None and self.session.twophase:
             try:
-                for t in util.Set(self._connections.values()):
+                for t in set(self._connections.values()):
                     t[1].prepare()
             except:
                 self.rollback()
@@ -366,7 +366,7 @@ class SessionTransaction(object):
             self._prepare_impl()
 
         if self._parent is None or self.nested:
-            for t in util.Set(self._connections.values()):
+            for t in set(self._connections.values()):
                 t[1].commit()
 
             if self.session.extension is not None:
@@ -398,7 +398,7 @@ class SessionTransaction(object):
         return self._parent
 
     def _rollback_impl(self):
-        for t in util.Set(self._connections.values()):
+        for t in set(self._connections.values()):
             t[1].rollback()
 
         self._restore_snapshot()
@@ -412,7 +412,7 @@ class SessionTransaction(object):
     def close(self):
         self.session.transaction = self._parent
         if self._parent is None:
-            for connection, transaction, autoclose in util.Set(self._connections.values()):
+            for connection, transaction, autoclose in set(self._connections.values()):
                 if autoclose:
                     connection.close()
                 else:
@@ -1339,10 +1339,10 @@ class Session(object):
             self.identity_map.modified = False
             return
 
-        deleted = util.Set(self._deleted)
-        new = util.Set(self._new)
+        deleted = set(self._deleted)
+        new = set(self._new)
 
-        dirty = util.Set(dirty).difference(deleted)
+        dirty = set(dirty).difference(deleted)
 
         flush_context = UOWTransaction(self)
 
@@ -1352,7 +1352,7 @@ class Session(object):
         # create the set of all objects we want to operate upon
         if objects:
             # specific list passed in
-            objset = util.Set()
+            objset = set()
             for o in objects:
                 try:
                     state = attributes.instance_state(o)
@@ -1361,10 +1361,10 @@ class Session(object):
                 objset.add(state)
         else:
             # or just everything
-            objset = util.Set(self.identity_map.all_states()).union(new)
+            objset = set(self.identity_map.all_states()).union(new)
 
         # store objects whose fate has been decided
-        processed = util.Set()
+        processed = set()
 
         # put all saves/updates into the flush context.  detect top-level
         # orphans and throw them into deleted.
index ec3275f0673017491558b90606c10a36e3307018..9a008e5cb1b80c3f2ed18adfd209279620725bd2 100644 (file)
@@ -106,7 +106,7 @@ class UOWTransaction(object):
         
         # stores tuples of mapper/dependent mapper pairs,
         # representing a partial ordering fed into topological sort
-        self.dependencies = util.Set()
+        self.dependencies = set()
         
         # dictionary of mappers to UOWTasks
         self.tasks = {}
@@ -344,8 +344,8 @@ class UOWTask(object):
         self._objects = {} 
 
         self.dependent_tasks = []
-        self.dependencies = util.Set()
-        self.cyclical_dependencies = util.Set()
+        self.dependencies = set()
+        self.cyclical_dependencies = set()
 
     def polymorphic_tasks(self):
         """return an iterator of UOWTask objects corresponding to the inheritance sequence
@@ -403,7 +403,7 @@ class UOWTask(object):
         # postupdates are UPDATED immeditely (for now)
         # convert post_update_cols list to a Set so that __hash__() is used to compare columns
         # instead of __eq__()
-        self.mapper._save_obj([state], self.uowtransaction, postupdate=True, post_update_cols=util.Set(post_update_cols))
+        self.mapper._save_obj([state], self.uowtransaction, postupdate=True, post_update_cols=set(post_update_cols))
 
     def __contains__(self, state):
         """return True if the given object is contained within this UOWTask or inheriting tasks."""
@@ -483,7 +483,7 @@ class UOWTask(object):
             allobjects += [e.state for e in task.polymorphic_elements]
         tuples = []
 
-        cycles = util.Set(cycles)
+        cycles = set(cycles)
 
         extradeplist = []
         dependencies = {}
@@ -572,7 +572,7 @@ class UOWTask(object):
 
         head = topological.sort_as_tree(tuples, allobjects)
         
-        used_tasks = util.Set()
+        used_tasks = set()
         def make_task_tree(node, parenttask, nexttasks):
             (state, cycles, children) = node
             originating_task = object_to_original_task[state]
index e600694e0588bdc6d4e0e05898d9788d0e7e4cb1..09990615ab15f25df4114b2c2175f30b70efc61f 100644 (file)
@@ -12,9 +12,9 @@ from sqlalchemy.sql import expression, util as sql_util, operators
 from sqlalchemy.orm.interfaces import MapperExtension, EXT_CONTINUE, PropComparator, MapperProperty
 from sqlalchemy.orm import attributes, exc
 
-all_cascades = util.FrozenSet(("delete", "delete-orphan", "all", "merge",
-                               "expunge", "save-update", "refresh-expire",
-                               "none"))
+all_cascades = frozenset(("delete", "delete-orphan", "all", "merge",
+                          "expunge", "save-update", "refresh-expire",
+                          "none"))
 
 _INSTRUMENTOR = ('mapper', 'instrumentor')
 
@@ -22,7 +22,7 @@ class CascadeOptions(object):
     """Keeps track of the options sent to relation().cascade"""
 
     def __init__(self, arg=""):
-        values = util.Set(c.strip() for c in arg.split(','))
+        values = set(c.strip() for c in arg.split(','))
         self.delete_orphan = "delete-orphan" in values
         self.delete = "delete" in values or "all" in values
         self.save_update = "save-update" in values or "all" in values
@@ -49,7 +49,7 @@ def polymorphic_union(table_map, typecolname, aliasname='p_union'):
     this is used.
     """
 
-    colnames = util.Set()
+    colnames = set()
     colnamemaps = {}
     types = {}
     for key in table_map.keys():
@@ -161,8 +161,8 @@ class ExtensionCarrier(object):
 
     """
 
-    interface = util.Set(method for method in dir(MapperExtension)
-                         if not method.startswith('_'))
+    interface = set(method for method in dir(MapperExtension)
+                    if not method.startswith('_'))
 
     def __init__(self, extensions=None):
         self.methods = {}
index ac5c790097598cf5ae61f44a127cec19a1cf2aac..5742303d6da473e0ab372afd57c950db6bf92c7c 100644 (file)
@@ -205,8 +205,8 @@ class Table(SchemaItem, expression.TableClause):
         super(Table, self).__init__(name)
         self.metadata = metadata
         self.schema = kwargs.pop('schema', kwargs.pop('owner', None))
-        self.indexes = util.Set()
-        self.constraints = util.Set()
+        self.indexes = set()
+        self.constraints = set()
         self._columns = expression.ColumnCollection()
         self.primary_key = PrimaryKeyConstraint()
         self._foreign_keys = util.OrderedSet()
@@ -277,7 +277,7 @@ class Table(SchemaItem, expression.TableClause):
         True if any of them would be disallowed if sent to an existing
         Table singleton.
         """
-        return bool(args) or bool(util.Set(kwargs).difference(
+        return bool(args) or bool(set(kwargs).difference(
             ['autoload', 'autoload_with', 'schema', 'owner']))
 
     def __extra_kwargs(self, **kwargs):
@@ -569,7 +569,7 @@ class Column(SchemaItem, expression._ColumnClause):
         self.quote = kwargs.pop('quote', None)
         self.onupdate = kwargs.pop('onupdate', None)
         self.autoincrement = kwargs.pop('autoincrement', True)
-        self.constraints = util.Set()
+        self.constraints = set()
         self.foreign_keys = util.OrderedSet()
         util.set_creation_order(self)
 
@@ -1541,7 +1541,7 @@ class MetaData(SchemaItem):
         if tables is None:
             tables = self.tables.values()
         else:
-            tables = util.Set(tables).intersection(self.tables.values())
+            tables = set(tables).intersection(self.tables.values())
         return iter(sort_tables(tables, reverse=reverse))
 
     def reflect(self, bind=None, schema=None, only=None):
@@ -1588,7 +1588,7 @@ class MetaData(SchemaItem):
 
         available = util.OrderedSet(bind.engine.table_names(schema,
                                                             connection=conn))
-        current = util.Set(self.tables.keys())
+        current = set(self.tables.keys())
 
         if only is None:
             load = [name for name in available if name not in current]
index f21badf219a8c11655f186c8ff73f4e6ad1b1b28..044e5d5fe6d76d00fc3c55ede069a58a6417eab7 100644 (file)
@@ -23,7 +23,7 @@ from sqlalchemy import schema, engine, util, exc
 from sqlalchemy.sql import operators, functions
 from sqlalchemy.sql import expression as sql
 
-RESERVED_WORDS = util.Set([
+RESERVED_WORDS = set([
     'all', 'analyse', 'analyze', 'and', 'any', 'array',
     'as', 'asc', 'asymmetric', 'authorization', 'between',
     'binary', 'both', 'case', 'cast', 'check', 'collate',
@@ -491,7 +491,7 @@ class DefaultCompiler(engine.Compiled):
 
         froms = select._get_display_froms(existingfroms)
 
-        correlate_froms = util.Set(sql._from_objects(*froms))
+        correlate_froms = set(sql._from_objects(*froms))
 
         # TODO: might want to propigate existing froms for select(select(select))
         # where innermost select should correlate to outermost
@@ -610,7 +610,7 @@ class DefaultCompiler(engine.Compiled):
                  ', '.join(c[1] for c in colparams)))
 
     def visit_update(self, update_stmt):
-        self.stack.append({'from':util.Set([update_stmt.table])})
+        self.stack.append({'from': set([update_stmt.table])})
 
         self.isupdate = True
         colparams = self._get_colparams(update_stmt)
@@ -716,7 +716,7 @@ class DefaultCompiler(engine.Compiled):
         return values
 
     def visit_delete(self, delete_stmt):
-        self.stack.append({'from':util.Set([delete_stmt.table])})
+        self.stack.append({'from': set([delete_stmt.table])})
         self.isdelete = True
 
         text = "DELETE FROM " + self.preparer.format_table(delete_stmt.table)
@@ -770,7 +770,7 @@ class SchemaGenerator(DDLBase):
     def __init__(self, dialect, connection, checkfirst=False, tables=None, **kwargs):
         super(SchemaGenerator, self).__init__(connection, **kwargs)
         self.checkfirst = checkfirst
-        self.tables = tables and util.Set(tables) or None
+        self.tables = tables and set(tables) or None
         self.preparer = dialect.identifier_preparer
         self.dialect = dialect
 
index 2237391010f2ed01208b64f943db233afefa7e91..fb989dee05e732edde59742876a92b4e2e3d2289 100644 (file)
@@ -867,7 +867,7 @@ def _cloned_intersection(a, b):
     The returned set is in terms of the enties present within 'a'.
     
     """
-    all_overlap = util.Set(_expand_cloned(a)).intersection(_expand_cloned(b))
+    all_overlap = set(_expand_cloned(a)).intersection(_expand_cloned(b))
     return a.intersection(
         [
             elem for elem in a if all_overlap.intersection(elem._cloned_set)
@@ -1501,14 +1501,14 @@ class ColumnElement(ClauseElement, _CompareMixin):
     
     def base_columns(self):
         if not hasattr(self, '_base_columns'):
-            self._base_columns = util.Set(c for c in self.proxy_set
-                                          if not hasattr(c, 'proxies'))
+            self._base_columns = set(c for c in self.proxy_set
+                                     if not hasattr(c, 'proxies'))
         return self._base_columns
     base_columns = property(base_columns)
 
     def proxy_set(self):
         if not hasattr(self, '_proxy_set'):
-            s = util.Set([self])
+            s = set([self])
             if hasattr(self, 'proxies'):
                 for c in self.proxies:
                     s.update(c.proxy_set)
@@ -1637,7 +1637,7 @@ class ColumnCollection(util.OrderedProperties):
         # have to use a Set here, because it will compare the identity
         # of the column, not just using "==" for comparison which will always return a
         # "True" value (i.e. a BinaryClause...)
-        return col in util.Set(self)
+        return col in set(self)
 
 class ColumnSet(util.OrderedSet):
     def contains_column(self, col):
@@ -1712,7 +1712,7 @@ class FromClause(Selectable):
 
         An example would be an Alias of a Table is derived from that Table.
         """
-        return fromclause in util.Set(self._cloned_set)
+        return fromclause in set(self._cloned_set)
 
     def replace_selectable(self, old, alias):
         """replace all occurences of FromClause 'old' with the given Alias object, returning a copy of this ``FromClause``."""
@@ -1805,7 +1805,7 @@ class FromClause(Selectable):
             return
         self._columns = ColumnCollection()
         self._primary_key = ColumnSet()
-        self._foreign_keys = util.Set()
+        self._foreign_keys = set()
         self._oid_column = None
         self._populate_column_collection()
 
@@ -2422,7 +2422,7 @@ class Alias(FromClause):
     description = property(description)
 
     def is_derived_from(self, fromclause):
-        if fromclause in util.Set(self._cloned_set):
+        if fromclause in set(self._cloned_set):
             return True
         return self.element.is_derived_from(fromclause)
 
@@ -2681,7 +2681,7 @@ class TableClause(_Immutable, FromClause):
         self._oid_column = _ColumnClause('oid', self, _is_oid=True)
         self._columns = ColumnCollection()
         self._primary_key = ColumnSet()
-        self._foreign_keys = util.Set()
+        self._foreign_keys = set()
         for c in columns:
             self.append_column(c)
         
@@ -2963,7 +2963,7 @@ class Select(_SelectBaseMixin, FromClause):
         self._should_correlate = correlate
         self._distinct = distinct
 
-        self._correlate = util.Set()
+        self._correlate = set()
         self._froms = util.OrderedSet()
 
         if columns:
@@ -3059,7 +3059,7 @@ class Select(_SelectBaseMixin, FromClause):
     inner_columns = property(inner_columns)
 
     def is_derived_from(self, fromclause):
-        if self in util.Set(fromclause._cloned_set):
+        if self in set(fromclause._cloned_set):
             return True
         
         for f in self.locate_all_froms():
@@ -3071,8 +3071,8 @@ class Select(_SelectBaseMixin, FromClause):
         self._reset_exported()
         from_cloned = dict((f, clone(f))
                            for f in self._froms.union(self._correlate))
-        self._froms = util.Set(from_cloned[f] for f in self._froms)
-        self._correlate = util.Set(from_cloned[f] for f in self._correlate)
+        self._froms = set(from_cloned[f] for f in self._froms)
+        self._correlate = set(from_cloned[f] for f in self._correlate)
         self._raw_columns = [clone(c) for c in self._raw_columns]
         for attr in ('_whereclause', '_having', '_order_by_clause', '_group_by_clause'):
             if getattr(self, attr) is not None:
@@ -3168,7 +3168,7 @@ class Select(_SelectBaseMixin, FromClause):
         s = self._generate()
         s._should_correlate = False
         if fromclauses == (None,):
-            s._correlate = util.Set()
+            s._correlate = set()
         else:
             s._correlate = s._correlate.union(fromclauses)
         return s
index 37070a451958c2750a25df292f75f000f366953f..cc96f2e64ee9f29f0d6e6db3477e6124d5bad0f0 100644 (file)
@@ -5,7 +5,7 @@
 
 from operator import and_, or_, inv, add, mul, sub, div, mod, truediv, \
      lt, le, ne, gt, ge, eq
-from sqlalchemy.util import Set, symbol
+from sqlalchemy.util import symbol
 
 def from_():
     raise NotImplementedError()
@@ -76,7 +76,7 @@ def desc_op(a):
 def asc_op(a):
     return a.asc()
 
-_commutative = Set([eq, ne, add, mul])
+_commutative = set([eq, ne, add, mul])
 def is_commutative(op):
     return op in _commutative
 
index d7a0d0502af116852f2f906372acd1318b3e3c14..c097443937a42fb59171e564d862836c258da056 100644 (file)
@@ -64,7 +64,7 @@ def find_tables(clause, check_columns=False, include_aliases=False, include_join
 def find_columns(clause):
     """locate Column objects within the given expression."""
     
-    cols = util.Set()
+    cols = set()
     def visit_column(col):
         cols.add(col)
     visitors.traverse(clause, {}, {'column':visit_column})
@@ -82,7 +82,7 @@ def join_condition(a, b, ignore_nonexistent_tables=False):
     
     """
     crit = []
-    constraints = util.Set()
+    constraints = set()
     for fk in b.foreign_keys:
         try:
             col = fk.get_referent(a)
@@ -212,7 +212,7 @@ def reduce_columns(columns, *clauses):
 
     columns = util.OrderedSet(columns)
 
-    omit = util.Set()
+    omit = set()
     for col in columns:
         for fk in col.foreign_keys:
             for c in columns:
@@ -225,7 +225,7 @@ def reduce_columns(columns, *clauses):
     if clauses:
         def visit_binary(binary):
             if binary.operator == operators.eq:
-                cols = util.Set(chain(*[c.proxy_set for c in columns.difference(omit)]))
+                cols = set(chain(*[c.proxy_set for c in columns.difference(omit)]))
                 if binary.left in cols and binary.right in cols:
                     for c in columns:
                         if c.shares_lineage(binary.right):
@@ -279,7 +279,7 @@ def folded_equivalents(join, equivs=None):
 
     """
     if equivs is None:
-        equivs = util.Set()
+        equivs = set()
     def visit_binary(binary):
         if binary.operator == operators.eq and binary.left.name == binary.right.name:
             equivs.add(binary.right)
@@ -294,7 +294,7 @@ def folded_equivalents(join, equivs=None):
         right = folded_equivalents(join.right, equivs)
     else:
         right = list(join.right.columns)
-    used = util.Set()
+    used = set()
     for c in left + right:
         if c in equivs:
             if c.name not in used:
index 738dae9c7e1ee7f3b1b1204da598671467a401cd..2106522d2e8bd0345f833d88efafd5e48d1e2651 100644 (file)
@@ -154,7 +154,7 @@ def cloned_traverse(obj, opts, visitors):
 
 def replacement_traverse(obj, opts, replace):
     cloned = {}
-    stop_on = util.Set(opts.get('stop_on', []))
+    stop_on = set(opts.get('stop_on', []))
 
     def clone(element):
         newelem = replace(element)
index c4c610b354186b86b591df397ff5f39ff116402a..bfcfc9c0ad3e6f48d8995159135802085cbd746f 100644 (file)
@@ -64,7 +64,7 @@ class _Node(object):
 
     def __init__(self, item):
         self.item = item
-        self.dependencies = util.Set()
+        self.dependencies = set()
         self.children = []
         self.cycles = None
 
@@ -84,7 +84,7 @@ class _Node(object):
     def all_deps(self):
         """Return a set of dependencies for this node and all its cycles."""
 
-        deps = util.Set(self.dependencies)
+        deps = set(self.dependencies)
         if self.cycles is not None:
             for c in self.cycles:
                 deps.update(c.dependencies)
@@ -102,10 +102,10 @@ class _EdgeCollection(object):
 
         (parentnode, childnode) = edge
         if parentnode not in self.parent_to_children:
-            self.parent_to_children[parentnode] = util.Set()
+            self.parent_to_children[parentnode] = set()
         self.parent_to_children[parentnode].add(childnode)
         if childnode not in self.child_to_parents:
-            self.child_to_parents[childnode] = util.Set()
+            self.child_to_parents[childnode] = set()
         self.child_to_parents[childnode].add(parentnode)
         parentnode.dependencies.add(childnode)
 
@@ -176,7 +176,7 @@ def _sort(tuples, allitems, allow_cycles=False, ignore_self_cycles=False):
         if t[0] is t[1]:
             if allow_cycles:
                 n = nodes[t[0]]
-                n.cycles = util.Set([n])
+                n.cycles = set([n])
             elif not ignore_self_cycles:
                 raise CircularDependencyError("Self-referential dependency detected " + repr(t))
             continue
@@ -197,7 +197,7 @@ def _sort(tuples, allitems, allow_cycles=False, ignore_self_cycles=False):
             if allow_cycles:
                 for cycle in _find_cycles(edges):
                     lead = cycle[0][0]
-                    lead.cycles = util.Set()
+                    lead.cycles = set()
                     for edge in cycle:
                         n = edges.remove(edge)
                         lead.cycles.add(edge[0])
@@ -239,11 +239,11 @@ def _organize_as_tree(nodes):
     # in reverse topological order
     for node in util.reversed(nodes):
         # nodes subtree and cycles contain the node itself
-        subtree = util.Set([node])
+        subtree = set([node])
         if node.cycles is not None:
-            cycles = util.Set(node.cycles)
+            cycles = set(node.cycles)
         else:
-            cycles = util.Set()
+            cycles = set()
         # get a set of dependent nodes of node and its cycles
         nodealldeps = node.all_deps()
         if nodealldeps:
@@ -270,7 +270,7 @@ def _organize_as_tree(nodes):
     return (head.item, [n.item for n in head.cycles or []], head.children)
 
 def _find_cycles(edges):
-    involved_in_cycles = util.Set()
+    involved_in_cycles = set()
     cycles = {}
     def traverse(node, goal=None, cycle=None):
         if goal is None:
@@ -284,7 +284,7 @@ def _find_cycles(edges):
                 continue
             cycle.append(key)
             if traverse(key, goal, cycle):
-                cycset = util.Set(cycle)
+                cycset = set(cycle)
                 for x in cycle:
                     involved_in_cycles.add(x)
                     if x in cycles:
index 7a6b964aadb4cb1063252af54b5e2866c00f17f0..a100d931d3afd8ce8db1a53b45eee41534ca59eb 100644 (file)
@@ -16,62 +16,11 @@ except ImportError:
     import dummy_thread as thread
     import dummy_threading as threading
 
-try:
-    Set = set
-    FrozenSet = frozenset
-    set_types = set, sets.Set
-except NameError:
-    set_types = sets.Set,
-
-    def py24_style_ops():
-        """Layer some of __builtin__.set's binop behavior onto sets.Set."""
+# TODO: 2.6 will whine about importing `sets`, but I think we still need it to
+# around to support older DB-API modules that return the 2.3 style set.
+set_types = set, sets.Set
 
-        def _binary_sanity_check(self, other):
-            pass
-        def issubset(self, iterable):
-            other = type(self)(iterable)
-            return sets.Set.issubset(self, other)
-        def __le__(self, other):
-            sets.Set._binary_sanity_check(self, other)
-            return sets.Set.__le__(self, other)
-        def issuperset(self, iterable):
-            other = type(self)(iterable)
-            return sets.Set.issuperset(self, other)
-        def __ge__(self, other):
-            sets.Set._binary_sanity_check(self, other)
-            return sets.Set.__ge__(self, other)
-        # lt and gt still require a BaseSet
-        def __lt__(self, other):
-            sets.Set._binary_sanity_check(self, other)
-            return sets.Set.__lt__(self, other)
-        def __gt__(self, other):
-            sets.Set._binary_sanity_check(self, other)
-            return sets.Set.__gt__(self, other)
-
-        def __ior__(self, other):
-            if not isinstance(other, sets.BaseSet):
-                return NotImplemented
-            return sets.Set.__ior__(self, other)
-        def __iand__(self, other):
-            if not isinstance(other, sets.BaseSet):
-                return NotImplemented
-            return sets.Set.__iand__(self, other)
-        def __ixor__(self, other):
-            if not isinstance(other, sets.BaseSet):
-                return NotImplemented
-            return sets.Set.__ixor__(self, other)
-        def __isub__(self, other):
-            if not isinstance(other, sets.BaseSet):
-                return NotImplemented
-            return sets.Set.__isub__(self, other)
-        return locals()
-
-    py24_style_ops = py24_style_ops()
-    Set = type('Set', (sets.Set,), py24_style_ops)
-    FrozenSet = type('FrozenSet', (sets.ImmutableSet,), py24_style_ops)
-    del py24_style_ops
-
-EMPTY_SET = FrozenSet()
+EMPTY_SET = frozenset()
 
 try:
     import cPickle as pickle
@@ -233,9 +182,9 @@ def array_as_starargs_fn_decorator(fn):
 
 def to_set(x):
     if x is None:
-        return Set()
-    if not isinstance(x, Set):
-        return Set(to_list(x))
+        return set()
+    if not isinstance(x, set):
+        return set(to_list(x))
     else:
         return x
 
@@ -308,12 +257,12 @@ def get_cls_kwargs(cls):
 
     for c in cls.__mro__:
         if '__init__' in c.__dict__:
-            stack = Set([c])
+            stack = set([c])
             break
     else:
         return []
 
-    args = Set()
+    args = set()
     while stack:
         class_ = stack.pop()
         ctr = class_.__dict__.get('__init__', False)
@@ -437,7 +386,7 @@ def class_hierarchy(cls):
     class systemwide that derives from object.
 
     """
-    hier = Set([cls])
+    hier = set([cls])
     process = list(cls.__mro__)
     while process:
         c = process.pop()
@@ -482,10 +431,10 @@ def duck_type_collection(specimen, default=None):
     """
 
     if hasattr(specimen, '__emulates__'):
-        # canonicalize set vs sets.Set to a standard: util.Set
+        # canonicalize set vs sets.Set to a standard: the builtin set
         if (specimen.__emulates__ is not None and
             issubclass(specimen.__emulates__, set_types)):
-            return Set
+            return set
         else:
             return specimen.__emulates__
 
@@ -493,14 +442,14 @@ def duck_type_collection(specimen, default=None):
     if isa(specimen, list):
         return list
     elif isa(specimen, set_types):
-        return Set
+        return set
     elif isa(specimen, dict):
         return dict
 
     if hasattr(specimen, 'append'):
         return list
     elif hasattr(specimen, 'add'):
-        return Set
+        return set
     elif hasattr(specimen, 'set'):
         return dict
     else:
@@ -798,9 +747,9 @@ except ImportError:
             def __setattr__(self, key, value):
                 self._tdict[(thread.get_ident(), key)] = value
 
-class OrderedSet(Set):
+class OrderedSet(set):
     def __init__(self, d=None):
-        Set.__init__(self)
+        set.__init__(self)
         self._list = []
         if d is not None:
             self.update(d)
@@ -808,24 +757,24 @@ class OrderedSet(Set):
     def add(self, element):
         if element not in self:
             self._list.append(element)
-        Set.add(self, element)
+        set.add(self, element)
 
     def remove(self, element):
-        Set.remove(self, element)
+        set.remove(self, element)
         self._list.remove(element)
 
     def insert(self, pos, element):
         if element not in self:
             self._list.insert(pos, element)
-        Set.add(self, element)
+        set.add(self, element)
 
     def discard(self, element):
         if element in self:
             self._list.remove(element)
-            Set.remove(self, element)
+            set.remove(self, element)
 
     def clear(self):
-        Set.clear(self)
+        set.clear(self)
         self._list = []
 
     def __getitem__(self, key):
@@ -855,13 +804,13 @@ class OrderedSet(Set):
     __or__ = union
 
     def intersection(self, other):
-        other = Set(other)
+        other = set(other)
         return self.__class__(a for a in self if a in other)
 
     __and__ = intersection
 
     def symmetric_difference(self, other):
-        other = Set(other)
+        other = set(other)
         result = self.__class__(a for a in self if a not in other)
         result.update(a for a in other if a not in self)
         return result
@@ -869,21 +818,21 @@ class OrderedSet(Set):
     __xor__ = symmetric_difference
 
     def difference(self, other):
-        other = Set(other)
+        other = set(other)
         return self.__class__(a for a in self if a not in other)
 
     __sub__ = difference
 
     def intersection_update(self, other):
-        other = Set(other)
-        Set.intersection_update(self, other)
+        other = set(other)
+        set.intersection_update(self, other)
         self._list = [ a for a in self._list if a in other]
         return self
 
     __iand__ = intersection_update
 
     def symmetric_difference_update(self, other):
-        Set.symmetric_difference_update(self, other)
+        set.symmetric_difference_update(self, other)
         self._list =  [ a for a in self._list if a in self]
         self._list += [ a for a in other._list if a in self]
         return self
@@ -891,20 +840,12 @@ class OrderedSet(Set):
     __ixor__ = symmetric_difference_update
 
     def difference_update(self, other):
-        Set.difference_update(self, other)
+        set.difference_update(self, other)
         self._list = [ a for a in self._list if a in self]
         return self
 
     __isub__ = difference_update
 
-    if hasattr(Set, '__getstate__'):
-        def __getstate__(self):
-            base = Set.__getstate__(self)
-            return base, self._list
-
-        def __setstate__(self, state):
-            Set.__setstate__(self, state[0])
-            self._list = state[1]
 
 class IdentitySet(object):
     """A set that considers only object id() for uniqueness.
@@ -913,7 +854,7 @@ class IdentitySet(object):
     two 'foo' strings in one of these sets, for example.  Use sparingly.
     """
 
-    _working_set = Set
+    _working_set = set
 
     def __init__(self, iterable=None):
         self._members = _IterableUpdatableDict()
@@ -1211,7 +1152,7 @@ class WeakCompositeKey(object):
     until any one of its members is garbage collected.
 
     """
-    keys = Set()
+    keys = set()
 
     def __init__(self, *args):
         self.args = [self.__ref(arg) for arg in args]
@@ -1312,17 +1253,17 @@ def as_interface(obj, cls=None, methods=None, required=None):
     if isinstance(cls, type) and isinstance(obj, cls):
         return obj
 
-    interface = Set(methods or [m for m in dir(cls) if not m.startswith('_')])
-    implemented = Set(dir(obj))
+    interface = set(methods or [m for m in dir(cls) if not m.startswith('_')])
+    implemented = set(dir(obj))
 
     complies = operator.ge
     if isinstance(required, type):
         required = interface
     elif not required:
-        required = Set()
+        required = set()
         complies = operator.gt
     else:
-        required = Set(required)
+        required = set(required)
 
     if complies(implemented.intersection(interface), required):
         return obj
@@ -1338,7 +1279,7 @@ def as_interface(obj, cls=None, methods=None, required=None):
 
     if cls:
         AnonymousInterface.__name__ = 'Anonymous' + cls.__name__
-    found = Set()
+    found = set()
 
     for method, impl in dictlike_iteritems(obj):
         if method not in interface:
index f891bc92e583af060fe6ab1d069e9c8a42b683b7..1318ef528a5e91aa9344596fb00c67ec16159537 100644 (file)
@@ -1,6 +1,5 @@
 import testenv; testenv.configure_for_tests()
 import sqlalchemy.topological as topological
-from sqlalchemy import util
 from testlib import TestBase
 
 
@@ -26,7 +25,7 @@ class DependencySortTest(TestBase):
 
         if collection is None:
             collection = []
-        items = util.Set()
+        items = set()
         def assert_unique(node):
             for item in [i for i in node[1] or [node[0]]]:
                 assert item not in items
index 8b44de84e102938b19effc65c518b269f454a317..3ce956a16d609bb3af7e9aea8c3f53c422dfc0f6 100644 (file)
@@ -3,7 +3,7 @@ import threading, unittest
 from sqlalchemy import util, sql, exc
 from testlib import TestBase
 from testlib.testing import eq_, is_, ne_
-from testlib.compat import frozenset, set, sorted
+
 
 class OrderedDictTest(TestBase):
     def test_odict(self):
@@ -351,16 +351,14 @@ class DuckTypeCollectionTest(TestBase):
 
         for type_ in (set,
                       sets.Set,
-                      util.Set,
                       SetLike,
                       ForcedSet):
-            eq_(util.duck_type_collection(type_), util.Set)
+            eq_(util.duck_type_collection(type_), set)
             instance = type_()
-            eq_(util.duck_type_collection(instance), util.Set)
+            eq_(util.duck_type_collection(instance), set)
 
         for type_ in (frozenset,
-                      sets.ImmutableSet,
-                      util.FrozenSet):
+                      sets.ImmutableSet):
             is_(util.duck_type_collection(type_), None)
             instance = type_()
             is_(util.duck_type_collection(instance), None)
index 259733027e110c76210b878ddc064ec0a2c25459..3f8d4fff7495729ac5dec6a555a835bad4e3e2b0 100644 (file)
@@ -3,7 +3,6 @@ import StringIO, unicodedata
 import sqlalchemy as sa
 from testlib.sa import MetaData, Table, Column
 from testlib import TestBase, ComparesTables, testing, engines, sa as tsa
-from testlib.compat import set
 
 
 metadata, users = None, None
@@ -588,12 +587,11 @@ class CreateDropTest(TestBase):
         self.assertEqual( testing.db.has_table('items'), False )
 
     def test_tablenames(self):
-        from sqlalchemy.util import Set
         metadata.create_all(bind=testing.db)
         # we only check to see if all the explicitly created tables are there, rather than
         # assertEqual -- the test db could have "extra" tables if there is a misconfigured
         # template.  (*cough* tsearch2 w/ the pg windows installer.)
-        self.assert_(not Set(metadata.tables) - Set(testing.db.table_names()))
+        self.assert_(not set(metadata.tables) - set(testing.db.table_names()))
         metadata.drop_all(bind=testing.db)
 
 class SchemaManipulationTest(TestBase):
index 21947b1e654a39b7158fe0b895e2b768e97d38db..8ea3abd61a1f6e5e0ed6167504bdecbf372dad9d 100644 (file)
@@ -5,7 +5,6 @@ from testlib import sa, testing
 from testlib.sa import MetaData, Table, Column, Integer, String, ForeignKey, ForeignKeyConstraint
 from testlib.sa.orm import relation, create_session
 from testlib.testing import eq_
-from testlib.compat import set
 from orm._base import ComparableEntity
 
 
index de3bde3b4ad33456e12f64251a0f08a91d475103..4523a322332db4b6cfca6be4bc6e5a0934f2e7f4 100644 (file)
@@ -4,7 +4,7 @@ import sys
 import types
 from testlib import config, sa, testing
 from testlib.testing import resolve_artifact_names, adict
-from testlib.compat import set, sorted, _function_named
+from testlib.compat import _function_named
 
 
 _repr_stack = set()
index 6be6c7bd036b2552951dd9ac1c6ce1b9a9c7a267..77dd510b263875bd0ca13cb2e702eaa6e5fbd465 100644 (file)
@@ -1,6 +1,5 @@
 from testlib.sa import MetaData, Table, Column, Integer, String, ForeignKey
 from testlib.sa.orm import attributes
-from testlib.compat import set
 from testlib.testing import fixture
 from orm import _base
 
index a6303d1fc2e629058a4a7961d4941322006a25a0..fd0f3890923393c58591cbe03ade5d796c784ab8 100644 (file)
@@ -10,17 +10,9 @@ from testlib.sa import Table, Column, Integer, String, ForeignKey
 from testlib.sa import util, exc as sa_exc
 from testlib.sa.orm import create_session, mapper, relation, \
     attributes
-from testlib.compat import set, frozenset
 from orm import _base
 
 
-try:
-    py_set = set
-except NameError:
-    import sets
-    py_set = sets.Set
-
-
 class Canary(sa.orm.interfaces.AttributeExtension):
     def __init__(self):
         self.data = set()
@@ -749,7 +741,7 @@ class CollectionsTest(_base.ORMTest):
 
     def test_set_emulates(self):
         class SetIsh(object):
-            __emulates__ = py_set
+            __emulates__ = set
             def __init__(self):
                 self.data = set()
             def add(self, item):
index dd27092029fb4d0cd1b728f21aa51067ea2a0caf..6b3bf8c847bc9e7660aa967bdb4e2634c9b2128d 100644 (file)
@@ -3,7 +3,6 @@ from testlib import testing, sa
 from testlib.sa import Table, Column, Integer, String, ForeignKey, MetaData, func
 from sqlalchemy.orm import mapper, relation, create_session
 from testlib.testing import eq_
-from testlib.compat import set
 from orm import _base, _fixtures
 
 
index 61ff8f250feac2f278a17c0b6fa3165434064941..cc994acefe301ed9a398e600511a4b8bae789409 100644 (file)
@@ -6,7 +6,6 @@ from testlib.sa import MetaData, Table, Column, Integer, String, ForeignKey
 from testlib.sa.orm import mapper, relation, backref, create_session
 from testlib.sa.orm import defer, deferred, synonym, attributes
 from testlib.testing import eq_
-from testlib.compat import set
 import pickleable
 from orm import _base, _fixtures
 
index d3fd5db29adcec8ccea7752d104e1608379d5396..c9a5a71768a0167789a65c9689be1f80498b8700 100644 (file)
@@ -7,7 +7,6 @@ from testlib import sa, testing
 from testlib.sa import Table, Column, Integer, String, ForeignKey
 from testlib.sa.orm import mapper, relation, create_session
 from testlib.testing import eq_
-from testlib.compat import sorted
 from orm import _base
 
 class NaturalPKTest(_base.MappedTest):
index 54b6975c23c0e6729aa76be1fc7dc53a325ed411..87f674b9f695d6b410fc347dc0845c224f1cc315 100644 (file)
@@ -4,7 +4,6 @@ from testlib import sa, testing
 from testlib.sa import Table, Column, Integer, String, ForeignKey
 from testlib.sa.orm import mapper, relation, backref, create_session
 from testlib.testing import eq_, startswith_
-from testlib.compat import set
 from orm import _base
 
 
index 1e2b3c9dd6cd0acea6b35f5963fd4f2b6b81aa5f..df9536650b5e99dd58c1ca191d996e8168b94bfe 100644 (file)
@@ -7,7 +7,6 @@ from testlib import engines, sa, testing, config
 from testlib.sa import Table, Column, Integer, String
 from testlib.sa.orm import mapper, relation, backref
 from testlib.testing import eq_
-from testlib.compat import set
 from engine import _base as engine_base
 from orm import _base, _fixtures
 
index 371335b0792034a4a00af6b136f26fdd03a42186..90134d1428752959b469a2052635fe475411a64c 100644 (file)
@@ -10,7 +10,6 @@ from testlib import engines, sa, testing
 from testlib.sa import Table, Column, Integer, String, ForeignKey, literal_column
 from testlib.sa.orm import mapper, relation, create_session, column_property
 from testlib.testing import eq_, ne_
-from testlib.compat import set
 from orm import _base, _fixtures
 from engine import _base as engine_base
 import pickleable
index fbea5888eb4a63a0119bb4016bcab300cdc24337..dfd626b72ba518cf9841c7575cb0ab71585e73f1 100644 (file)
@@ -4,7 +4,6 @@ from sqlalchemy import Sequence, Column, func
 from testlib import sa, testing
 from testlib.sa import MetaData, Table, Integer, String, ForeignKey
 from testlib.testing import eq_
-from testlib.compat import set
 from sql import _base
 
 
index cb5406b754a2e7769862b36ac9896781feb6ff5f..f6b849e8a35b9384dd5af28b0836f704eb240a8f 100644 (file)
@@ -197,7 +197,7 @@ class ClauseTest(TestBase, AssertsCompiledSQL):
         assert c1 == str(clause)
         assert str(clause2) == c1 + " SOME MODIFIER=:lala"
         assert clause.bindparams.keys() == ['bar']
-        assert util.Set(clause2.bindparams.keys()) == util.Set(['bar', 'lala'])
+        assert set(clause2.bindparams.keys()) == set(['bar', 'lala'])
 
     def test_select(self):
         s2 = select([t1])
index d0850cf423086fe6bd70e3297743bec2c3de31d5..5dea60322b953d65714b35c469b8f9361f0dfdcb 100644 (file)
@@ -18,7 +18,7 @@ from testlib.orm import mapper
 import testlib.profiling as profiling
 import testlib.engines as engines
 import testlib.requires as requires
-from testlib.compat import set, frozenset, sorted, _function_named
+from testlib.compat import _function_named
 
 
 __all__ = ('testing',
@@ -28,7 +28,7 @@ __all__ = ('testing',
            'TestBase', 'AssertsExecutionResults', 'ORMTest',
            'AssertsCompiledSQL', 'ComparesTables',
            'profiling', 'engines',
-           'set', 'frozenset', 'sorted', '_function_named')
+           '_function_named')
 
 
 testing.requires = requires
index fcb7fa1e9256af355f03b2c4676f97d42e4e50ff..0b157e64a06d59b96ec9ff4906c68ae3bec8092b 100644 (file)
@@ -1,94 +1,7 @@
 import new
 
-__all__ = 'set', 'frozenset', 'sorted', '_function_named', 'deque', 'reversed'
+__all__ = '_function_named',
 
-try:
-    set = set
-except NameError:
-    import sets
-
-    # keep this in sync with sqlalchemy.util.Set
-    # can't just import it in testlib because of coverage, load order, etc.
-    class set(sets.Set):
-        def _binary_sanity_check(self, other):
-            pass
-
-        def issubset(self, iterable):
-            other = type(self)(iterable)
-            return sets.Set.issubset(self, other)
-        def __le__(self, other):
-            sets.Set._binary_sanity_check(self, other)
-            return sets.Set.__le__(self, other)
-        def issuperset(self, iterable):
-            other = type(self)(iterable)
-            return sets.Set.issuperset(self, other)
-        def __ge__(self, other):
-            sets.Set._binary_sanity_check(self, other)
-            return sets.Set.__ge__(self, other)
-
-        # lt and gt still require a BaseSet
-        def __lt__(self, other):
-            sets.Set._binary_sanity_check(self, other)
-            return sets.Set.__lt__(self, other)
-        def __gt__(self, other):
-            sets.Set._binary_sanity_check(self, other)
-            return sets.Set.__gt__(self, other)
-
-        def __ior__(self, other):
-            if not isinstance(other, sets.BaseSet):
-                return NotImplemented
-            return sets.Set.__ior__(self, other)
-        def __iand__(self, other):
-            if not isinstance(other, sets.BaseSet):
-                return NotImplemented
-            return sets.Set.__iand__(self, other)
-        def __ixor__(self, other):
-            if not isinstance(other, sets.BaseSet):
-                return NotImplemented
-            return sets.Set.__ixor__(self, other)
-        def __isub__(self, other):
-            if not isinstance(other, sets.BaseSet):
-                return NotImplemented
-            return sets.Set.__isub__(self, other)
-
-try:
-    frozenset = frozenset
-except NameError:
-    import sets
-    from sets import ImmutableSet as frozenset
-
-try:
-    sorted = sorted
-except NameError:
-    def sorted(iterable, cmp=None):
-        l = list(iterable)
-        if cmp:
-            l.sort(cmp)
-        else:
-            l.sort()
-        return l
-
-try:
-    reversed = reversed
-except NameError:
-    def reversed(seq):
-        i = len(seq) - 1
-        while  i >= 0:
-            yield seq[i]
-            i -= 1
-        raise StopIteration()
-
-try:
-    from collections import deque
-except ImportError:
-    class deque(list):
-        def appendleft(self, x):
-            self.insert(0, x)
-        def popleft(self):
-            return self.pop(0)
-        def extendleft(self, iterable):
-            for x in reversed(list(iterable)):
-                self.insert(0, x)
 
 def _function_named(fn, newname):
     try:
index 69f5f1865158c3dca0698a382c46b7cf454728b5..73ac80632e5b1174f6431866189ad99d5a0fb02d 100644 (file)
@@ -1,6 +1,7 @@
 import sys, types, weakref
+from collections import deque
 from testlib import config
-from testlib.compat import set, _function_named, deque
+from testlib.compat import _function_named
 
 class ConnectionKiller(object):
     def __init__(self):
index baaac4660b7ffbc8dcb936dc24387f652bbf4206..854cd7c5aef9de7735d58c1eb9b0b6a4f0d0d329 100644 (file)
@@ -1,7 +1,6 @@
 from testlib.sa import MetaData, Table, Column, Integer, String, ForeignKey
 from testlib.sa.orm import attributes
 from testlib.testing import ORMTest
-from testlib.compat import set
 
 
 __all__ = ['keywords', 'addresses', 'Base', 'Keyword', 'FixtureTest',
index d9664f52f2cfe105f13d4f66f83aec85b3bdc41f..b460102a615b6a4c16500da2a01fe294ca3d8f5e 100644 (file)
@@ -1,6 +1,5 @@
 import inspect, re
 from testlib import config, testing
-from testlib.compat import sorted
 
 sa = None
 orm = None
index e423b9904257bbdab69adca19ba47c3c438fc604..5a406a75687c4ab43b144b18abeee44e03d9ab17 100644 (file)
@@ -1,7 +1,7 @@
 """Profiling support for unit and performance tests."""
 
 import os, sys
-from testlib.compat import set, _function_named
+from testlib.compat import _function_named
 import testlib.config
 
 __all__ = 'profiled', 'function_call_count', 'conditional_call_count'
index e09899e0c4b01d756dd9b817840ebf0b7b965370..1c3b0f0bc839bff8cf6de97fca3c12e872c41b0d 100644 (file)
@@ -12,7 +12,7 @@ import warnings
 from cStringIO import StringIO
 
 import testlib.config as config
-from testlib.compat import set, _function_named, reversed
+from testlib.compat import _function_named
 
 # Delayed imports
 MetaData = None
index 9e48a202f032e7380db8700244a9da9f7a29c8bb..4c7635430d1e6bb70c539e6ab37830aca9c201c7 100644 (file)
@@ -1,7 +1,7 @@
-__all__ = ['Blog', 'Post', 'Topic', 'TopicAssociation', 'Comment']
-
 import datetime
-from testlib.compat import *
+
+
+__all__ = ['Blog', 'Post', 'Topic', 'TopicAssociation', 'Comment']
 
 class Blog(object):
     def __init__(self, owner=None):