]> git.ipfire.org Git - thirdparty/sqlalchemy/sqlalchemy.git/commitdiff
-whitespace bonanza, contd
authorMike Bayer <mike_mp@zzzcomputing.com>
Sat, 28 Jul 2012 21:05:50 +0000 (17:05 -0400)
committerMike Bayer <mike_mp@zzzcomputing.com>
Sat, 28 Jul 2012 21:05:50 +0000 (17:05 -0400)
156 files changed:
doc/build/builder/builders.py
doc/build/conf.py
doc/build/testdocs.py
examples/association/__init__.py
examples/association/basic_association.py
examples/association/dict_of_sets_with_default.py
examples/beaker_caching/__init__.py
examples/beaker_caching/advanced.py
examples/beaker_caching/caching_query.py
examples/beaker_caching/environment.py
examples/beaker_caching/fixture_data.py
examples/beaker_caching/helloworld.py
examples/beaker_caching/local_session_caching.py
examples/beaker_caching/model.py
examples/beaker_caching/relation_caching.py
examples/custom_attributes/custom_management.py
examples/dynamic_dict/__init__.py
examples/elementtree/__init__.py
examples/elementtree/optimized_al.py
examples/elementtree/pickle.py
examples/generic_associations/__init__.py
examples/generic_associations/discriminator_on_association.py
examples/generic_associations/table_per_association.py
examples/generic_associations/table_per_related.py
examples/graphs/directed_graph.py
examples/inheritance/concrete.py
examples/inheritance/joined.py
examples/inheritance/single.py
examples/nested_sets/nested_sets.py
examples/postgis/postgis.py
examples/sharding/__init__.py
examples/versioning/_lib.py
examples/versioning/history_meta.py
examples/versioning/test_versioning.py
ez_setup.py
lib/sqlalchemy/connectors/mxodbc.py
lib/sqlalchemy/connectors/pyodbc.py
lib/sqlalchemy/connectors/zxJDBC.py
lib/sqlalchemy/dialects/__init__.py
lib/sqlalchemy/dialects/access/base.py
lib/sqlalchemy/dialects/firebird/__init__.py
lib/sqlalchemy/dialects/informix/base.py
lib/sqlalchemy/dialects/maxdb/base.py
lib/sqlalchemy/dialects/mssql/__init__.py
lib/sqlalchemy/dialects/mssql/adodbapi.py
lib/sqlalchemy/dialects/mssql/pymssql.py
lib/sqlalchemy/dialects/mssql/pyodbc.py
lib/sqlalchemy/dialects/mssql/zxjdbc.py
lib/sqlalchemy/dialects/mysql/base.py
lib/sqlalchemy/dialects/mysql/mysqldb.py
lib/sqlalchemy/dialects/mysql/oursql.py
lib/sqlalchemy/dialects/mysql/pyodbc.py
lib/sqlalchemy/dialects/oracle/cx_oracle.py
lib/sqlalchemy/dialects/postgresql/__init__.py
lib/sqlalchemy/dialects/sqlite/base.py
lib/sqlalchemy/dialects/sqlite/pysqlite.py
lib/sqlalchemy/dialects/sybase/pyodbc.py
lib/sqlalchemy/dialects/sybase/pysybase.py
lib/sqlalchemy/engine/__init__.py
lib/sqlalchemy/engine/ddl.py
lib/sqlalchemy/engine/strategies.py
lib/sqlalchemy/engine/threadlocal.py
lib/sqlalchemy/event.py
lib/sqlalchemy/log.py
lib/sqlalchemy/orm/dynamic.py
lib/sqlalchemy/orm/evaluator.py
lib/sqlalchemy/orm/exc.py
lib/sqlalchemy/orm/identity.py
lib/sqlalchemy/orm/scoping.py
lib/sqlalchemy/orm/sync.py
lib/sqlalchemy/pool.py
lib/sqlalchemy/processors.py
lib/sqlalchemy/sql/functions.py
lib/sqlalchemy/sql/visitors.py
lib/sqlalchemy/util/queue.py
lib/sqlalchemy/util/topological.py
test/aaa_profiling/test_memusage.py
test/aaa_profiling/test_zoomark.py
test/base/test_dependency.py
test/base/test_events.py
test/base/test_except.py
test/base/test_utils.py
test/bootstrap/noseplugin.py
test/dialect/test_firebird.py
test/dialect/test_maxdb.py
test/dialect/test_mysql.py
test/dialect/test_postgresql.py
test/dialect/test_pyodbc.py
test/dialect/test_sqlite.py
test/engine/test_ddlevents.py
test/engine/test_parseconnect.py
test/engine/test_reconnect.py
test/engine/test_reflection.py
test/engine/test_transaction.py
test/ext/test_associationproxy.py
test/ext/test_compiler.py
test/ext/test_declarative.py
test/ext/test_declarative_mixin.py
test/ext/test_declarative_reflection.py
test/lib/__init__.py
test/lib/fixtures.py
test/lib/profiling.py
test/lib/requires.py
test/lib/testing.py
test/orm/_fixtures.py
test/orm/inheritance/test_abc_inheritance.py
test/orm/inheritance/test_assorted_poly.py
test/orm/inheritance/test_magazine.py
test/orm/inheritance/test_poly_persistence.py
test/orm/inheritance/test_polymorphic_rel.py
test/orm/inheritance/test_relationship.py
test/orm/inheritance/test_single.py
test/orm/inheritance/test_with_poly.py
test/orm/test_assorted_eager.py
test/orm/test_backref_mutations.py
test/orm/test_cascade.py
test/orm/test_collection.py
test/orm/test_compile.py
test/orm/test_cycles.py
test/orm/test_default_strategies.py
test/orm/test_defaults.py
test/orm/test_descriptor.py
test/orm/test_dynamic.py
test/orm/test_events.py
test/orm/test_expire.py
test/orm/test_hasparent.py
test/orm/test_joins.py
test/orm/test_lazy_relations.py
test/orm/test_load_on_fks.py
test/orm/test_lockmode.py
test/orm/test_manytomany.py
test/orm/test_merge.py
test/orm/test_of_type.py
test/orm/test_pickled.py
test/orm/test_rel_fn.py
test/orm/test_relationships.py
test/orm/test_selectable.py
test/orm/test_session.py
test/orm/test_subquery_relations.py
test/orm/test_transaction.py
test/orm/test_unitofworkv2.py
test/orm/test_update_delete.py
test/orm/test_versioning.py
test/perf/stress_all.py
test/sql/test_constraints.py
test/sql/test_defaults.py
test/sql/test_functions.py
test/sql/test_generative.py
test/sql/test_labels.py
test/sql/test_metadata.py
test/sql/test_query.py
test/sql/test_quote.py
test/sql/test_rowcount.py
test/sql/test_types.py
test/sql/test_unicode.py
test/sql/test_update.py

index 66ccf8dd190f3f91c54a421653d2a94735799552..be684f03942a45e99b9d43f2944dfbb2ca54dc98 100644 (file)
@@ -25,7 +25,7 @@ class MakoBridge(TemplateBridge):
         builder.config.html_context['site_base'] = builder.config['site_base']
 
         self.lookup = TemplateLookup(directories=builder.config.templates_path,
-            #format_exceptions=True, 
+            #format_exceptions=True,
             imports=[
                 "from builder import util"
             ]
@@ -46,7 +46,7 @@ class MakoBridge(TemplateBridge):
 
         # RTD layout
         if rtd:
-            # add variables if not present, such 
+            # add variables if not present, such
             # as if local test of READTHEDOCS variable
             if 'MEDIA_URL' not in context:
                 context['MEDIA_URL'] = "http://media.readthedocs.org/"
@@ -107,14 +107,14 @@ class PyConWithSQLLexer(RegexLexer):
             'sqlpopup':[
                 (
                     r'(.*?\n)((?:PRAGMA|BEGIN|SELECT|INSERT|DELETE|ROLLBACK|COMMIT|ALTER|UPDATE|CREATE|DROP|PRAGMA|DESCRIBE).*?(?:{stop}\n?|$))',
-                    bygroups(using(PythonConsoleLexer), Token.Sql.Popup), 
+                    bygroups(using(PythonConsoleLexer), Token.Sql.Popup),
                     "#pop"
                 )
             ],
             'opensqlpopup':[
                 (
                     r'.*?(?:{stop}\n*|$)',
-                    Token.Sql, 
+                    Token.Sql,
                     "#pop"
                 )
             ]
@@ -136,14 +136,14 @@ class PythonWithSQLLexer(RegexLexer):
             'sqlpopup':[
                 (
                     r'(.*?\n)((?:PRAGMA|BEGIN|SELECT|INSERT|DELETE|ROLLBACK|COMMIT|ALTER|UPDATE|CREATE|DROP|PRAGMA|DESCRIBE).*?(?:{stop}\n?|$))',
-                    bygroups(using(PythonLexer), Token.Sql.Popup), 
+                    bygroups(using(PythonLexer), Token.Sql.Popup),
                     "#pop"
                 )
             ],
             'opensqlpopup':[
                 (
                     r'.*?(?:{stop}\n*|$)',
-                    Token.Sql, 
+                    Token.Sql,
                     "#pop"
                 )
             ]
index 1175028c9102df662d3142a4498f800e8f3c6fd8..9079232d512ac92ac2b1e796ce9d5ced37242461 100644 (file)
@@ -70,7 +70,7 @@ release_date = "unreleased"
 
 site_base = "http://www.sqlalchemy.org"
 
-# arbitrary number recognized by builders.py, incrementing this 
+# arbitrary number recognized by builders.py, incrementing this
 # will force a rebuild
 build_number = 3
 
index a07bcd77abf9dd22745d30098d8fe3a6c56e8782..815aa86694ba58dee859365426cc2d58d64549c0 100644 (file)
@@ -20,8 +20,8 @@ handler.setFormatter(logging.Formatter('%(message)s'))
 rootlogger.addHandler(handler)
 
 
-def teststring(s, name, globs=None, verbose=None, report=True, 
-               optionflags=0, extraglobs=None, raise_on_error=False, 
+def teststring(s, name, globs=None, verbose=None, report=True,
+               optionflags=0, extraglobs=None, raise_on_error=False,
                parser=doctest.DocTestParser()):
 
     from doctest import DebugRunner, DocTestRunner, master
index 12d2ea6975c29d71f79b26d7616ee5415e1f70b1..df736f4fbd262fc68827c08a83fcae795bedef83 100644 (file)
@@ -6,7 +6,7 @@ classes that are associated in a many-to-many pattern.
 
 This directory includes the following examples:
 
-* basic_association.py - illustrate a many-to-many relationship between an 
+* basic_association.py - illustrate a many-to-many relationship between an
   "Order" and a collection of "Item" objects, associating a purchase price
   with each via an association object called "OrderItem"
 * proxied_association.py - same example as basic_association, adding in
index cd86aa504fafdb3651b7fed625d4286fac616068..29a473fced33a35c455f01b8dfa24fb460386759 100644 (file)
@@ -3,7 +3,7 @@
 The association object pattern is a form of many-to-many which
 associates additional data with each association between parent/child.
 
-The example illustrates an "order", referencing a collection 
+The example illustrates an "order", referencing a collection
 of "items", with a particular price paid associated with each "item".
 
 """
@@ -83,7 +83,7 @@ if __name__ == '__main__':
 
     # query the order, print items
     order = session.query(Order).filter_by(customer_name='john smith').one()
-    print [(order_item.item.description, order_item.price) 
+    print [(order_item.item.description, order_item.price)
            for order_item in order.order_items]
 
     # print customers who bought 'MySQL Crowbar' on sale
index 0720fdab9987b67496e1f38e2ffd941cca7bc0b4..63c0f45318fc3ecb0aa78c21d6dce68e1c5846c6 100644 (file)
@@ -44,7 +44,7 @@ class B(Base):
     key = Column(String)
 
     values = association_proxy("elements", "value")
-    """Bridge the association from 'elements' over to the 
+    """Bridge the association from 'elements' over to the
     'value' element of C."""
 
     def __init__(self, key, values=None):
index cc9f71d8b8c0a76c4cb2c7e9b53510660acf157c..7e7b6279174829d1ea54a8e5b275535de0cd8396 100644 (file)
@@ -1,18 +1,18 @@
 """
 Illustrates how to embed Beaker cache functionality within
 the Query object, allowing full cache control as well as the
-ability to pull "lazy loaded" attributes from long term cache 
+ability to pull "lazy loaded" attributes from long term cache
 as well.
 
 In this demo, the following techniques are illustrated:
 
 * Using custom subclasses of Query
-* Basic technique of circumventing Query to pull from a 
+* Basic technique of circumventing Query to pull from a
   custom cache source instead of the database.
 * Rudimental caching with Beaker, using "regions" which allow
   global control over a fixed set of configurations.
-* Using custom MapperOption objects to configure options on 
-  a Query, including the ability to invoke the options 
+* Using custom MapperOption objects to configure options on
+  a Query, including the ability to invoke the options
   deep within an object graph when lazy loads occur.
 
 E.g.::
@@ -49,10 +49,10 @@ The demo scripts themselves, in order of complexity, are run as follows::
 Listing of files:
 
     environment.py - Establish the Session, the Beaker cache
-    manager, data / cache file paths, and configurations, 
+    manager, data / cache file paths, and configurations,
     bootstrap fixture data if necessary.
 
-    caching_query.py - Represent functions and classes 
+    caching_query.py - Represent functions and classes
     which allow the usage of Beaker caching with SQLAlchemy.
     Introduces a query option called FromCache.
 
index c16e02f33a0300cc08e0ab310792713a1d5f2795..31beeff6f9833fb296965face7c37a3a50d972ab 100644 (file)
@@ -1,6 +1,6 @@
 """advanced.py
 
-Illustrate usage of Query combined with the FromCache option, 
+Illustrate usage of Query combined with the FromCache option,
 including front-end loading, cache invalidation, namespace techniques
 and collection caching.
 
@@ -17,12 +17,12 @@ def load_name_range(start, end, invalidate=False):
     start/end are integers, range is then
     "person <start>" - "person <end>".
 
-    The cache option we set up is called "name_range", indicating 
+    The cache option we set up is called "name_range", indicating
     a range of names for the Person class.
 
     The `Person.addresses` collections are also cached.  Its basically
     another level of tuning here, as that particular cache option
-    can be transparently replaced with joinedload(Person.addresses). 
+    can be transparently replaced with joinedload(Person.addresses).
     The effect is that each Person and his/her Address collection
     is cached either together or separately, affecting the kind of
     SQL that emits for unloaded Person objects as well as the distribution
@@ -63,13 +63,13 @@ print ", ".join([p.name for p in load_name_range(2, 12)])
 print "\ntwenty five through forty, invalidate first:\n"
 print ", ".join([p.name for p in load_name_range(25, 40, True)])
 
-# illustrate the address loading from either cache/already 
+# illustrate the address loading from either cache/already
 # on the Person
 print "\n\nPeople plus addresses, two through twelve, addresses possibly from cache"
 for p in load_name_range(2, 12):
     print p.format_full()
 
-# illustrate the address loading from either cache/already 
+# illustrate the address loading from either cache/already
 # on the Person
 print "\n\nPeople plus addresses, two through twelve, addresses from cache"
 for p in load_name_range(2, 12):
index a6a1261113a974d83f45ecc78c89e42c8c5e0fa1..ae0c9c903d600f45baf28c4d9403ce32509c3052 100644 (file)
@@ -11,7 +11,7 @@ The three new concepts introduced here are:
    parameters on a Query
  * RelationshipCache - a variant of FromCache which is specific
    to a query invoked during a lazy load.
- * _params_from_query - extracts value parameters from 
+ * _params_from_query - extracts value parameters from
    a Query.
 
 The rest of what's here are standard SQLAlchemy and
@@ -23,30 +23,30 @@ from sqlalchemy.orm.query import Query
 from sqlalchemy.sql import visitors
 
 class CachingQuery(Query):
-    """A Query subclass which optionally loads full results from a Beaker 
+    """A Query subclass which optionally loads full results from a Beaker
     cache region.
 
     The CachingQuery stores additional state that allows it to consult
     a Beaker cache before accessing the database:
 
-    * A "region", which is a cache region argument passed to a 
+    * A "region", which is a cache region argument passed to a
       Beaker CacheManager, specifies a particular cache configuration
       (including backend implementation, expiration times, etc.)
     * A "namespace", which is a qualifying name that identifies a
-      group of keys within the cache.  A query that filters on a name 
-      might use the name "by_name", a query that filters on a date range 
+      group of keys within the cache.  A query that filters on a name
+      might use the name "by_name", a query that filters on a date range
       to a joined table might use the name "related_date_range".
 
     When the above state is present, a Beaker cache is retrieved.
 
-    The "namespace" name is first concatenated with 
-    a string composed of the individual entities and columns the Query 
+    The "namespace" name is first concatenated with
+    a string composed of the individual entities and columns the Query
     requests, i.e. such as ``Query(User.id, User.name)``.
 
     The Beaker cache is then loaded from the cache manager based
     on the region and composed namespace.  The key within the cache
     itself is then constructed against the bind parameters specified
-    by this query, which are usually literals defined in the 
+    by this query, which are usually literals defined in the
     WHERE clause.
 
     The FromCache and RelationshipCache mapper options below represent
@@ -137,7 +137,7 @@ def _get_cache_parameters(query):
     return cache, cache_key
 
 def _namespace_from_query(namespace, query):
-    # cache namespace - the token handed in by the 
+    # cache namespace - the token handed in by the
     # option + class we're querying against
     namespace = " ".join([namespace] + [str(x) for x in query._entities])
 
@@ -151,7 +151,7 @@ def _set_cache_parameters(query, region, namespace, cache_key):
     if hasattr(query, '_cache_parameters'):
         region, namespace, cache_key = query._cache_parameters
         raise ValueError("This query is already configured "
-                        "for region %r namespace %r" % 
+                        "for region %r namespace %r" %
                         (region, namespace)
                     )
     query._cache_parameters = region, namespace, cache_key
@@ -171,10 +171,10 @@ class FromCache(MapperOption):
         be a name uniquely describing the target Query's
         lexical structure.
 
-        :param cache_key: optional.  A string cache key 
+        :param cache_key: optional.  A string cache key
         that will serve as the key to the query.   Use this
         if your query has a huge amount of parameters (such
-        as when using in_()) which correspond more simply to 
+        as when using in_()) which correspond more simply to
         some other identifier.
 
         """
@@ -188,7 +188,7 @@ class FromCache(MapperOption):
         _set_cache_parameters(query, self.region, self.namespace, self.cache_key)
 
 class RelationshipCache(MapperOption):
-    """Specifies that a Query as called within a "lazy load" 
+    """Specifies that a Query as called within a "lazy load"
        should load results from a cache."""
 
     propagate_to_loaders = True
@@ -228,9 +228,9 @@ class RelationshipCache(MapperOption):
                 if (cls, key) in self._relationship_options:
                     relationship_option = self._relationship_options[(cls, key)]
                     _set_cache_parameters(
-                            query, 
-                            relationship_option.region, 
-                            relationship_option.namespace, 
+                            query,
+                            relationship_option.region,
+                            relationship_option.namespace,
                             None)
 
     def and_(self, option):
index 740c5977acd28bfbe147168bbfbe526cc4ec5949..ccc625117d94179afae436e1109a57a93a1588a5 100644 (file)
@@ -1,6 +1,6 @@
 """environment.py
 
-Establish data / cache file paths, and configurations, 
+Establish data / cache file paths, and configurations,
 bootstrap fixture data if necessary.
 
 """
index 09f020cea8be0b241ade7f6534482047f787175c..b77bbcb954927e7e9660fc118a25e9da99167036 100644 (file)
@@ -37,7 +37,7 @@ def install():
         person = Person(
                     "person %.2d" % i,
                     Address(
-                        street="street %.2d" % i, 
+                        street="street %.2d" % i,
                         postal_code=all_post_codes[random.randint(0, len(all_post_codes) - 1)]
                     )
                 )
index f64fcdd2e4a4ee3b7103322f24cf0233eea7dda8..6f696c502b5090f16188fee9309c0f3b368bb339 100644 (file)
@@ -15,12 +15,12 @@ people = Session.query(Person).options(FromCache("default", "all_people")).all()
 # remove the Session.  next query starts from scratch.
 Session.remove()
 
-# load again, using the same FromCache option. now they're cached 
+# load again, using the same FromCache option. now they're cached
 # under "all_people", no SQL is emitted.
 print "loading people....again!"
 people = Session.query(Person).options(FromCache("default", "all_people")).all()
 
-# want to load on some different kind of query ?  change the namespace 
+# want to load on some different kind of query ?  change the namespace
 # you send to FromCache
 print "loading people two through twelve"
 people_two_through_twelve = Session.query(Person).\
@@ -30,7 +30,7 @@ people_two_through_twelve = Session.query(Person).\
 
 # the data is cached under the "namespace" you send to FromCache, *plus*
 # the bind parameters of the query.    So this query, having
-# different literal parameters under "Person.name.between()" than the 
+# different literal parameters under "Person.name.between()" than the
 # previous one, issues new SQL...
 print "loading people five through fifteen"
 people_five_through_fifteen = Session.query(Person).\
@@ -48,8 +48,8 @@ people_two_through_twelve = Session.query(Person).\
 
 
 # invalidate the cache for the three queries we've done.  Recreate
-# each Query, which includes at the very least the same FromCache, 
-# same list of objects to be loaded, and the same parameters in the 
+# each Query, which includes at the very least the same FromCache,
+# same list of objects to be loaded, and the same parameters in the
 # same order, then call invalidate().
 print "invalidating everything"
 Session.query(Person).options(FromCache("default", "all_people")).invalidate()
index b63858362666547a069067f8cad0ff98a76bb1cd..2d803557863a5803ef75e21eaf166dc164b17347 100644 (file)
@@ -12,7 +12,7 @@ from beaker import cache, container
 import collections
 
 class ScopedSessionNamespace(container.MemoryNamespaceManager):
-    """A Beaker cache type which will cache objects locally on 
+    """A Beaker cache type which will cache objects locally on
     the current session.
 
     When used with the query_cache system, the effect is that the objects
@@ -86,10 +86,10 @@ if __name__ == '__main__':
 
     # identity is preserved - person10 is the *same* object that's
     # ultimately inside the cache.   So it is safe to manipulate
-    # the not-queried-for attributes of objects when using such a 
-    # cache without the need to invalidate - however, any change 
-    # that would change the results of a cached query, such as 
-    # inserts, deletes, or modification to attributes that are 
+    # the not-queried-for attributes of objects when using such a
+    # cache without the need to invalidate - however, any change
+    # that would change the results of a cached query, such as
+    # inserts, deletes, or modification to attributes that are
     # part of query criterion, still require careful invalidation.
     from caching_query import _get_cache_parameters
     cache, key = _get_cache_parameters(q)
index 629b263a795702deb22dd9ba7a7486c980d57d95..a6733962db1a91b5535e645035159e4dee6837b4 100644 (file)
@@ -1,5 +1,5 @@
 """Model.   We are modeling Person objects with a collection
-of Address objects.  Each Address has a PostalCode, which 
+of Address objects.  Each Address has a PostalCode, which
 in turn references a City and then a Country:
 
 Person --(1..n)--> Address
@@ -70,7 +70,7 @@ class Address(Base):
     def __str__(self):
         return "%s\t"\
               "%s, %s\t"\
-              "%s" % (self.street, self.city.name, 
+              "%s" % (self.street, self.city.name,
                 self.postal_code.code, self.country.name)
 
 class Person(Base):
index 1691b071b64ff5dbcfc3b1f7ed908225840753ea..f1e5c7886f16f0f0401b5704846331e7497ab4f8 100644 (file)
@@ -1,7 +1,7 @@
 """relationship_caching.py
 
-Load a set of Person and Address objects, specifying that 
-related PostalCode, City, Country objects should be pulled from long 
+Load a set of Person and Address objects, specifying that
+related PostalCode, City, Country objects should be pulled from long
 term cache.
 
 """
index 5ab2236e733f6cec3ec96c9c154fe5f01661aae6..12b745db68b98d42e9e38f61d196f76501030a8d 100644 (file)
@@ -1,10 +1,10 @@
 """Illustrates customized class instrumentation, using
 the :mod:`sqlalchemy.ext.instrumentation` extension package.
 
-In this example, mapped classes are modified to 
+In this example, mapped classes are modified to
 store their state in a dictionary attached to an attribute
 named "_goofy_dict", instead of using __dict__.
-this example illustrates how to replace SQLAlchemy's class 
+this example illustrates how to replace SQLAlchemy's class
 descriptors with a user-defined system.
 
 
@@ -66,12 +66,12 @@ class MyClass(object):
 if __name__ == '__main__':
     meta = MetaData(create_engine('sqlite://'))
 
-    table1 = Table('table1', meta, 
-                    Column('id', Integer, primary_key=True), 
+    table1 = Table('table1', meta,
+                    Column('id', Integer, primary_key=True),
                     Column('name', Text))
-    table2 = Table('table2', meta, 
-                    Column('id', Integer, primary_key=True), 
-                    Column('name', Text), 
+    table2 = Table('table2', meta,
+                    Column('id', Integer, primary_key=True),
+                    Column('name', Text),
                     Column('t1id', Integer, ForeignKey('table1.id')))
     meta.create_all()
 
index 69ac409522a8f33a57545ff2a2e1dccb324b59cd..3df907cc540663d857d0f932d63cfb811405eb05 100644 (file)
@@ -1,5 +1,5 @@
 """Illustrates how to place a dictionary-like facade on top of a "dynamic" relation, so
-that dictionary operations (assuming simple string keys) can operate upon a large 
+that dictionary operations (assuming simple string keys) can operate upon a large
 collection without loading the full collection at once.
 
 """
\ No newline at end of file
index 8d47f4aceae28cab2f6341cdfa2c10e1ee91dbe6..ee1e9e193a3204ab85217b53a66ca42d81b50b0c 100644 (file)
@@ -15,8 +15,8 @@ In order of complexity:
   represented in a separate table.  The nodes are associated in a hierarchy using an adjacency list
   structure.  A query function is introduced which can search for nodes along any path with a given
   structure of attributes, basically a (very narrow) subset of xpath.
-* ``optimized_al.py`` - Uses the same strategy as ``adjacency_list.py``, but associates each 
-  DOM row with its owning document row, so that a full document of DOM nodes can be 
+* ``optimized_al.py`` - Uses the same strategy as ``adjacency_list.py``, but associates each
+  DOM row with its owning document row, so that a full document of DOM nodes can be
   loaded using O(1) queries - the construction of the "hierarchy" is performed after
   the load in a non-recursive fashion and is much more efficient.
 
@@ -27,7 +27,7 @@ E.g.::
     session.add(Document(file, doc))
     session.commit()
 
-    # locate documents with a certain path/attribute structure 
+    # locate documents with a certain path/attribute structure
     for document in find_document('/somefile/header/field2[@attr=foo]'):
         # dump the XML
         print document
index 102f6c37391b601093e9630048b80b25e4ba0dce..1cec6136619ca464a9a66b93fc0ed24438174d67 100644 (file)
@@ -1,6 +1,6 @@
 """This script duplicates adjacency_list.py, but optimizes the loading
-of XML nodes to be based on a "flattened" datamodel. Any number of XML documents, 
-each of arbitrary complexity, can be loaded in their entirety via a single query 
+of XML nodes to be based on a "flattened" datamodel. Any number of XML documents,
+each of arbitrary complexity, can be loaded in their entirety via a single query
 which joins on only three tables.
 
 """
@@ -25,7 +25,7 @@ documents = Table('documents', meta,
     Column('filename', String(30), unique=True),
 )
 
-# stores XML nodes in an adjacency list model.  This corresponds to 
+# stores XML nodes in an adjacency list model.  This corresponds to
 # Element and SubElement objects.
 elements = Table('elements', meta,
     Column('element_id', Integer, primary_key=True),
@@ -61,15 +61,15 @@ class Document(object):
 
 ########################## PART IV - Persistence Mapping #####################
 
-# Node class.  a non-public class which will represent 
+# Node class.  a non-public class which will represent
 # the DB-persisted Element/SubElement object.  We cannot create mappers for
-# ElementTree elements directly because they are at the very least not new-style 
+# ElementTree elements directly because they are at the very least not new-style
 # classes, and also may be backed by native implementations.
 # so here we construct an adapter.
 class _Node(object):
     pass
 
-# Attribute class.  also internal, this will represent the key/value attributes stored for 
+# Attribute class.  also internal, this will represent the key/value attributes stored for
 # a particular Node.
 class _Attribute(object):
     def __init__(self, name, value):
index 28bee4672a6237f03df3ad29b3c5451fd113510c..d40af275bdecaf3ac0586ce7ccda8b22bc770579 100644 (file)
@@ -1,6 +1,6 @@
 """illustrates a quick and dirty way to persist an XML document expressed using ElementTree and pickle.
 
-This is a trivial example using PickleType to marshal/unmarshal the ElementTree 
+This is a trivial example using PickleType to marshal/unmarshal the ElementTree
 document into a binary column.  Compare to explicit.py which stores the individual components of the ElementTree
 structure in distinct rows using two additional mapped entities.  Note that the usage of both
 styles of persistence are identical, as is the structure of the main Document class.
index b166d9161073087b20f52c67d4ed29daeb725d56..36d50266e2431ef532e6b585d17cfcc0f4d0b1d4 100644 (file)
@@ -1,8 +1,8 @@
 """
-Illustrates various methods of associating multiple types of 
+Illustrates various methods of associating multiple types of
 parents with a particular child object.
 
-The examples all use the declarative extension along with 
+The examples all use the declarative extension along with
 declarative mixins.   Each one presents the identical use
 case at the end - two classes, ``Customer`` and ``Supplier``, both
 subclassing the ``HasAddresses`` mixin, which ensures that the
index a73b4df1dcde22b53748fcece05becf94f672bf4..3c170d5c889cddb25de3c27d3f5109911f59e104 100644 (file)
@@ -12,8 +12,8 @@ that refers to a particular table is present, the extra association
 table is used so that traditional foreign key constraints may be used.
 
 This configuration has the advantage that a fixed set of tables
-are used, with no extra-table-per-parent needed.   The individual 
-Address record can also locate its parent with no need to scan 
+are used, with no extra-table-per-parent needed.   The individual
+Address record can also locate its parent with no need to scan
 amongst many tables.
 
 """
@@ -26,7 +26,7 @@ from sqlalchemy.ext.associationproxy import association_proxy
 class Base(object):
     """Base class which provides automated table name
     and surrogate primary key column.
-    
+
     """
     @declared_attr
     def __tablename__(cls):
@@ -37,17 +37,17 @@ Base = declarative_base(cls=Base)
 class AddressAssociation(Base):
     """Associates a collection of Address objects
     with a particular parent.
-    
+
     """
     __tablename__ = "address_association"
 
     @classmethod
     def creator(cls, discriminator):
-        """Provide a 'creator' function to use with 
+        """Provide a 'creator' function to use with
         the association proxy."""
 
         return lambda addresses:AddressAssociation(
-                                addresses=addresses, 
+                                addresses=addresses,
                                 discriminator=discriminator)
 
     discriminator = Column(String)
@@ -59,37 +59,37 @@ class AddressAssociation(Base):
         return getattr(self, "%s_parent" % self.discriminator)
 
 class Address(Base):
-    """The Address class.   
-    
-    This represents all address records in a 
+    """The Address class.
+
+    This represents all address records in a
     single table.
-    
+
     """
-    association_id = Column(Integer, 
+    association_id = Column(Integer,
                         ForeignKey("address_association.id")
                     )
     street = Column(String)
     city = Column(String)
     zip = Column(String)
     association = relationship(
-                    "AddressAssociation", 
+                    "AddressAssociation",
                     backref="addresses")
 
     parent = association_proxy("association", "parent")
 
     def __repr__(self):
         return "%s(street=%r, city=%r, zip=%r)" % \
-            (self.__class__.__name__, self.street, 
+            (self.__class__.__name__, self.street,
             self.city, self.zip)
 
 class HasAddresses(object):
     """HasAddresses mixin, creates a relationship to
     the address_association table for each parent.
-    
+
     """
     @declared_attr
     def address_association_id(cls):
-        return Column(Integer, 
+        return Column(Integer,
                                 ForeignKey("address_association.id"))
 
     @declared_attr
@@ -99,8 +99,8 @@ class HasAddresses(object):
                     "address_association", "addresses",
                     creator=AddressAssociation.creator(discriminator)
                 )
-        return relationship("AddressAssociation", 
-                    backref=backref("%s_parent" % discriminator, 
+        return relationship("AddressAssociation",
+                    backref=backref("%s_parent" % discriminator,
                                         uselist=False))
 
 
@@ -117,7 +117,7 @@ session = Session(engine)
 
 session.add_all([
     Customer(
-        name='customer 1', 
+        name='customer 1',
         addresses=[
             Address(
                     street='123 anywhere street',
index 86ee212dc66427091e8175b1baf73d60cb1b5f5a..e1ff2be5bb73fd565f48c1d2e271bf88011fdd03 100644 (file)
@@ -6,7 +6,7 @@ for all parents.
 
 This configuration has the advantage that all Address
 rows are in one table, so that the definition of "Address"
-can be maintained in one place.   The association table 
+can be maintained in one place.   The association table
 contains the foreign key to Address so that Address
 has no dependency on the system.
 
@@ -20,7 +20,7 @@ from sqlalchemy.orm import Session, relationship
 class Base(object):
     """Base class which provides automated table name
     and surrogate primary key column.
-    
+
     """
     @declared_attr
     def __tablename__(cls):
@@ -29,11 +29,11 @@ class Base(object):
 Base = declarative_base(cls=Base)
 
 class Address(Base):
-    """The Address class.   
-    
-    This represents all address records in a 
+    """The Address class.
+
+    This represents all address records in a
     single table.
-    
+
     """
     street = Column(String)
     city = Column(String)
@@ -41,23 +41,23 @@ class Address(Base):
 
     def __repr__(self):
         return "%s(street=%r, city=%r, zip=%r)" % \
-            (self.__class__.__name__, self.street, 
+            (self.__class__.__name__, self.street,
             self.city, self.zip)
 
 class HasAddresses(object):
     """HasAddresses mixin, creates a new address_association
     table for each parent.
-    
+
     """
     @declared_attr
     def addresses(cls):
         address_association = Table(
             "%s_addresses" % cls.__tablename__,
             cls.metadata,
-            Column("address_id", ForeignKey("address.id"), 
+            Column("address_id", ForeignKey("address.id"),
                                 primary_key=True),
-            Column("%s_id" % cls.__tablename__, 
-                                ForeignKey("%s.id" % cls.__tablename__), 
+            Column("%s_id" % cls.__tablename__,
+                                ForeignKey("%s.id" % cls.__tablename__),
                                 primary_key=True),
         )
         return relationship(Address, secondary=address_association)
@@ -75,7 +75,7 @@ session = Session(engine)
 
 session.add_all([
     Customer(
-        name='customer 1', 
+        name='customer 1',
         addresses=[
             Address(
                     street='123 anywhere street',
index 3130960b0060fc273a6edffbf30a2dac8955c47d..693908189f9efd71967effcaea3eeb7b7fa71d42 100644 (file)
@@ -17,7 +17,7 @@ from sqlalchemy.orm import Session, relationship
 class Base(object):
     """Base class which provides automated table name
     and surrogate primary key column.
-    
+
     """
     @declared_attr
     def __tablename__(cls):
@@ -26,13 +26,13 @@ class Base(object):
 Base = declarative_base(cls=Base)
 
 class Address(object):
-    """Define columns that will be present in each 
+    """Define columns that will be present in each
     'Address' table.
-    
+
     This is a declarative mixin, so additional mapped
     attributes beyond simple columns specified here
     should be set up using @declared_attr.
-    
+
     """
     street = Column(String)
     city = Column(String)
@@ -40,13 +40,13 @@ class Address(object):
 
     def __repr__(self):
         return "%s(street=%r, city=%r, zip=%r)" % \
-            (self.__class__.__name__, self.street, 
+            (self.__class__.__name__, self.street,
             self.city, self.zip)
 
 class HasAddresses(object):
     """HasAddresses mixin, creates a new Address class
     for each parent.
-    
+
     """
     @declared_attr
     def addresses(cls):
@@ -54,9 +54,9 @@ class HasAddresses(object):
             "%sAddress" % cls.__name__,
             (Address, Base,),
             dict(
-                __tablename__ = "%s_address" % 
+                __tablename__ = "%s_address" %
                             cls.__tablename__,
-                parent_id = Column(Integer, 
+                parent_id = Column(Integer,
                     ForeignKey("%s.id" % cls.__tablename__)),
                 parent = relationship(cls)
             )
@@ -76,7 +76,7 @@ session = Session(engine)
 
 session.add_all([
     Customer(
-        name='customer 1', 
+        name='customer 1',
         addresses=[
             Customer.Address(
                     street='123 anywhere street',
index 3ba602f00202d2ef1903ca451a5a2c1c2177a1c3..b822cda89f9ce2285c806fb895d8383c0f59039b 100644 (file)
@@ -29,19 +29,19 @@ class Node(Base):
 class Edge(Base):
     __tablename__ = 'edge'
 
-    lower_id = Column(Integer, 
-                        ForeignKey('node.node_id'), 
+    lower_id = Column(Integer,
+                        ForeignKey('node.node_id'),
                         primary_key=True)
 
-    higher_id = Column(Integer, 
-                        ForeignKey('node.node_id'), 
+    higher_id = Column(Integer,
+                        ForeignKey('node.node_id'),
                         primary_key=True)
 
     lower_node = relationship(Node,
-                                primaryjoin=lower_id==Node.node_id, 
+                                primaryjoin=lower_id==Node.node_id,
                                 backref='lower_edges')
     higher_node = relationship(Node,
-                                primaryjoin=higher_id==Node.node_id, 
+                                primaryjoin=higher_id==Node.node_id,
                                 backref='higher_edges')
 
     # here we have lower.node_id <= higher.node_id
index 84fc79cd5edd1da72b3871bdeffad11838228eba..75741df6d5a99208827af4da90a62bd9bc839a9f 100644 (file)
@@ -4,13 +4,13 @@ from sqlalchemy.orm import mapper, sessionmaker, polymorphic_union
 
 metadata = MetaData()
 
-managers_table = Table('managers', metadata, 
+managers_table = Table('managers', metadata,
     Column('employee_id', Integer, primary_key=True),
     Column('name', String(50)),
     Column('manager_data', String(40))
 )
 
-engineers_table = Table('engineers', metadata, 
+engineers_table = Table('engineers', metadata,
     Column('employee_id', Integer, primary_key=True),
     Column('name', String(50)),
     Column('engineer_info', String(40))
index aa4c185189dee0ec6d562a701f0662c056a89927..4d3dc08d04ed6984dde54ded209689d042f59aa2 100644 (file)
@@ -12,7 +12,7 @@ class Company(Base):
     id = Column(Integer, primary_key=True)
     name = Column(String(50))
 
-    employees = relationship("Person", 
+    employees = relationship("Person",
                     backref='company',
                     cascade='all, delete-orphan')
 
@@ -46,7 +46,7 @@ class Engineer(Person):
     def __repr__(self):
         return "Engineer %s, status %s, engineer_name %s, "\
                 "primary_language %s" % \
-                    (self.name, self.status, 
+                    (self.name, self.status,
                         self.engineer_name, self.primary_language)
 
 class Manager(Person):
@@ -70,19 +70,19 @@ session = Session(engine)
 
 c = Company(name='company1', employees=[
     Manager(
-        name='pointy haired boss', 
+        name='pointy haired boss',
         status='AAB',
         manager_name='manager1'),
-    Engineer(name='dilbert', 
+    Engineer(name='dilbert',
         status='BBA',
-        engineer_name='engineer1', 
+        engineer_name='engineer1',
         primary_language='java'),
     Person(name='joesmith'),
-    Engineer(name='wally', 
+    Engineer(name='wally',
             status='CGG',
-            engineer_name='engineer2', 
+            engineer_name='engineer2',
             primary_language='python'),
-    Manager(name='jsmith', 
+    Manager(name='jsmith',
                 status='ABA',
                 manager_name='manager2')
 ])
@@ -109,7 +109,7 @@ c = session.query(Company).get(1)
 for e in c.employees:
     print e
 
-# query using with_polymorphic. 
+# query using with_polymorphic.
 eng_manager = with_polymorphic(Person, [Engineer, Manager], aliased=True)
 print session.query(eng_manager).\
             filter(
@@ -127,7 +127,7 @@ print session.query(Company).\
         eng_manager,
         Company.employees
     ).filter(
-        or_(eng_manager.Engineer.engineer_name=='engineer1', 
+        or_(eng_manager.Engineer.engineer_name=='engineer1',
             eng_manager.Manager.manager_name=='manager2')
     ).all()
 
index a7883fcbff5f1cbea6e21fb319e0b9a108dfbb9d..b2f934120854b633a030565287b0e6732da46df5 100644 (file)
@@ -5,11 +5,11 @@ from sqlalchemy.orm import mapper, relationship, sessionmaker
 metadata = MetaData()
 
 # a table to store companies
-companies = Table('companies', metadata, 
+companies = Table('companies', metadata,
    Column('company_id', Integer, primary_key=True),
    Column('name', String(50)))
 
-employees_table = Table('employees', metadata, 
+employees_table = Table('employees', metadata,
     Column('employee_id', Integer, primary_key=True),
     Column('company_id', Integer, ForeignKey('companies.company_id')),
     Column('name', String(50)),
@@ -31,7 +31,7 @@ class Engineer(Person):
     def __repr__(self):
         return "Engineer %s, status %s, engineer_name %s, "\
                     "primary_language %s" % \
-                        (self.name, self.status, 
+                        (self.name, self.status,
                         self.engineer_name, self.primary_language)
 class Manager(Person):
     def __repr__(self):
index 55d734d4ed0fc71baa55ad49dec11976cd0614bd..e35ea61c3719c84cae459f9e688adaedd1f2a051 100644 (file)
@@ -45,7 +45,7 @@ class NestedSetExtension(MapperExtension):
 class Employee(Base):
     __tablename__ = 'personnel'
     __mapper_args__ = {
-        'extension':NestedSetExtension(), 
+        'extension':NestedSetExtension(),
         'batch':False  # allows extension to fire for each instance before going to the next.
     }
 
index a1a93c732a7da940edc851957f634f458a24887d..247265e17c403c274cbd917739991a8dcb1eb726 100644 (file)
@@ -32,7 +32,7 @@ class PersistentGisElement(GisElement):
 class TextualGisElement(GisElement, expression.Function):
     """Represents a Geometry value as expressed within application code; i.e. in wkt format.
 
-    Extends expression.Function so that the value is interpreted as 
+    Extends expression.Function so that the value is interpreted as
     GeomFromText(value) in a SQL expression context.
 
     """
@@ -74,7 +74,7 @@ class Geometry(TypeEngine):
                 return value
         return process
 
-# other datatypes can be added as needed, which 
+# other datatypes can be added as needed, which
 # currently only affect DDL statements.
 
 class Point(Geometry):
@@ -92,7 +92,7 @@ class LineString(Curve):
 # DDL integration
 
 class GISDDL(object):
-    """A DDL extension which integrates SQLAlchemy table create/drop 
+    """A DDL extension which integrates SQLAlchemy table create/drop
     methods with PostGis' AddGeometryColumn/DropGeometryColumn functions.
 
     Usage::
@@ -162,7 +162,7 @@ def _to_postgis(value):
 
 
 class GisAttribute(AttributeExtension):
-    """Intercepts 'set' events on a mapped instance attribute and 
+    """Intercepts 'set' events on a mapped instance attribute and
     converts the incoming value to a GIS expression.
 
     """
@@ -198,8 +198,8 @@ def GISColumn(*args, **kw):
 
     """
     return column_property(
-                Column(*args, **kw), 
-                extension=GisAttribute(), 
+                Column(*args, **kw),
+                extension=GisAttribute(),
                 comparator_factory=GisComparator
             )
 
index d4b4639494e906c00e7b3e2c93dc24b62e23818d..dacc815f9b7b6cd447eb8e1dba5bead68152919b 100644 (file)
@@ -10,8 +10,8 @@ The basic components of a "sharded" mapping are:
 * a function which can return a list of shard ids which apply to a particular
   instance identifier; this is called "id_chooser".  If it returns all shard ids,
   all shards will be searched.
-* a function which can return a list of shard ids to try, given a particular 
-  Query ("query_chooser").  If it returns all shard ids, all shards will be 
+* a function which can return a list of shard ids to try, given a particular
+  Query ("query_chooser").  If it returns all shard ids, all shards will be
   queried and the results joined together.
 
 In this example, four sqlite databases will store information about weather
@@ -22,9 +22,9 @@ single shard being requested.
 
 The construction of generic sharding routines is an ambitious approach
 to the issue of organizing instances among multiple databases.   For a
-more plain-spoken alternative, the "distinct entity" approach 
+more plain-spoken alternative, the "distinct entity" approach
 is a simple method of assigning objects to different tables (and potentially
-database nodes) in an explicit way - described on the wiki at 
+database nodes) in an explicit way - described on the wiki at
 `EntityName <http://www.sqlalchemy.org/trac/wiki/UsageRecipes/EntityName>`_.
 
 """
index d5f2cb0b75581d27c42507c73838dd3cd58ddc3d..ec0da4709045f4f2f031063687f874775f8ae505 100644 (file)
@@ -1,7 +1,7 @@
 """copy of ComparableEntity and eq_() from test.lib.
 
 This is just to support running the example outside of
-the SQLA testing environment which is no longer part of 
+the SQLA testing environment which is no longer part of
 SQLAlchemy as of 0.7.
 
 """
index 1226a8f62e6f96589273077ad336a806b7a29e5b..53359939419d52db775d5326c4f74906b4e02807 100644 (file)
@@ -71,9 +71,9 @@ def _history_mapper(local_mapper):
     versioned_cls = type.__new__(type, "%sHistory" % cls.__name__, bases, {})
 
     m = mapper(
-            versioned_cls, 
-            table, 
-            inherits=super_history_mapper, 
+            versioned_cls,
+            table,
+            inherits=super_history_mapper,
             polymorphic_on=polymorphic_on,
             polymorphic_identity=local_mapper.polymorphic_identity
             )
@@ -129,9 +129,9 @@ def create_version(obj, session, deleted = False):
             try:
                 prop = obj_mapper.get_property_by_column(obj_col)
             except UnmappedColumnError:
-                # in the case of single table inheritance, there may be 
+                # in the case of single table inheritance, there may be
                 # columns on the mapped table intended for the subclass only.
-                # the "unmapped" status of the subclass column on the 
+                # the "unmapped" status of the subclass column on the
                 # base class is a feature of the declarative module as of sqla 0.5.2.
                 continue
 
index 389dba91876d83dbccd7ada4572f0ada4b98d099..9781fdc5d3864e3b8b55f2e4382c3cfdf450c918 100644 (file)
@@ -185,8 +185,8 @@ class TestVersioning(TestCase):
         eq_(
             sess.query(BaseClassHistory).order_by(BaseClassHistory.id).all(),
             [
-                SubClassSeparatePkHistory(id=1, name=u'sep1', type=u'sep', version=1), 
-                BaseClassHistory(id=2, name=u'base1', type=u'base', version=1), 
+                SubClassSeparatePkHistory(id=1, name=u'sep1', type=u'sep', version=1),
+                BaseClassHistory(id=2, name=u'base1', type=u'base', version=1),
                 SubClassSamePkHistory(id=3, name=u'same1', type=u'same', version=1)
             ]
         )
@@ -196,9 +196,9 @@ class TestVersioning(TestCase):
         eq_(
             sess.query(BaseClassHistory).order_by(BaseClassHistory.id, BaseClassHistory.version).all(),
             [
-                SubClassSeparatePkHistory(id=1, name=u'sep1', type=u'sep', version=1), 
-                BaseClassHistory(id=2, name=u'base1', type=u'base', version=1), 
-                SubClassSamePkHistory(id=3, name=u'same1', type=u'same', version=1), 
+                SubClassSeparatePkHistory(id=1, name=u'sep1', type=u'sep', version=1),
+                BaseClassHistory(id=2, name=u'base1', type=u'base', version=1),
+                SubClassSamePkHistory(id=3, name=u'same1', type=u'same', version=1),
                 SubClassSamePkHistory(id=3, name=u'same1', type=u'same', version=2)
             ]
         )
@@ -207,10 +207,10 @@ class TestVersioning(TestCase):
         eq_(
             sess.query(BaseClassHistory).order_by(BaseClassHistory.id, BaseClassHistory.version).all(),
             [
-                SubClassSeparatePkHistory(id=1, name=u'sep1', type=u'sep', version=1), 
-                BaseClassHistory(id=2, name=u'base1', type=u'base', version=1), 
-                BaseClassHistory(id=2, name=u'base1mod', type=u'base', version=2), 
-                SubClassSamePkHistory(id=3, name=u'same1', type=u'same', version=1), 
+                SubClassSeparatePkHistory(id=1, name=u'sep1', type=u'sep', version=1),
+                BaseClassHistory(id=2, name=u'base1', type=u'base', version=1),
+                BaseClassHistory(id=2, name=u'base1mod', type=u'base', version=2),
+                SubClassSamePkHistory(id=3, name=u'same1', type=u'same', version=1),
                 SubClassSamePkHistory(id=3, name=u'same1', type=u'same', version=2)
             ]
         )
index b74adc0654d5fd429fdbe2ae4e6bef7dd5a88118..a8c797c3b3a0493dd918fd5e13e2f629da8ea61e 100644 (file)
@@ -100,7 +100,7 @@ def use_setuptools(
     try:
         import pkg_resources
     except ImportError:
-        return do_download()       
+        return do_download()
     try:
         pkg_resources.require("setuptools>="+version); return
     except pkg_resources.VersionConflict, e:
index f3ce924d164b12260cbc152b45bc1416071dd181..4456f351f4c21760619edc4d843698f3b66db4ca 100644 (file)
@@ -117,7 +117,7 @@ class MxODBCConnector(Connector):
             return False
 
     def _get_server_version_info(self, connection):
-        # eGenix suggests using conn.dbms_version instead 
+        # eGenix suggests using conn.dbms_version instead
         # of what we're doing here
         dbapi_con = connection.connection
         version = []
index f190329f74d82fab28bc70b7c67ce146315ae642..7ef0922cf7a92b265967ff5a74a002d6ec3e3654 100644 (file)
@@ -70,7 +70,7 @@ class PyODBCConnector(Connector):
                 if 'port' in keys and not 'port' in query:
                     port = ',%d' % int(keys.pop('port'))
 
-                connectors = ["DRIVER={%s}" % 
+                connectors = ["DRIVER={%s}" %
                                 keys.pop('driver', self.pyodbc_driver_name),
                               'Server=%s%s' % (keys.pop('host', ''), port),
                               'Database=%s' % keys.pop('database', '') ]
@@ -83,9 +83,9 @@ class PyODBCConnector(Connector):
                 connectors.append("Trusted_Connection=Yes")
 
             # if set to 'Yes', the ODBC layer will try to automagically
-            # convert textual data from your database encoding to your 
-            # client encoding.  This should obviously be set to 'No' if 
-            # you query a cp1253 encoded database from a latin1 client... 
+            # convert textual data from your database encoding to your
+            # client encoding.  This should obviously be set to 'No' if
+            # you query a cp1253 encoded database from a latin1 client...
             if 'odbc_autotranslate' in keys:
                 connectors.append("AutoTranslate=%s" %
                                     keys.pop("odbc_autotranslate"))
@@ -126,7 +126,7 @@ class PyODBCConnector(Connector):
         if self._user_supports_unicode_binds is not None:
             self.supports_unicode_binds = self._user_supports_unicode_binds
         else:
-            self.supports_unicode_binds = (not self.freetds or 
+            self.supports_unicode_binds = (not self.freetds or
                                             self.freetds_driver_version >= '0.91'
                                             ) and not self.easysoft
         # end Py2K
index 46ab7efa550b70c1e1ad6864e8b89d4f1e63dfcf..1db7a619dc6f59026b5348afeefc1bde7636e1f3 100644 (file)
@@ -33,7 +33,7 @@ class ZxJDBCConnector(Connector):
     def _create_jdbc_url(self, url):
         """Create a JDBC url from a :class:`~sqlalchemy.engine.url.URL`"""
         return 'jdbc:%s://%s%s/%s' % (self.jdbc_db_name, url.host,
-                                      url.port is not None 
+                                      url.port is not None
                                         and ':%s' % url.port or '',
                                       url.database)
 
@@ -41,8 +41,8 @@ class ZxJDBCConnector(Connector):
         opts = self._driver_kwargs()
         opts.update(url.query)
         return [
-                [self._create_jdbc_url(url), 
-                url.username, url.password, 
+                [self._create_jdbc_url(url),
+                url.username, url.password,
                 self.jdbc_driver_name],
                 opts]
 
index 4b34da82e75748619d6d7cc78cb20544191bb121..69212cd6e84648e904ce0ba1a7a3e5d9e025b928 100644 (file)
@@ -22,10 +22,10 @@ from .. import util
 
 def _auto_fn(name):
     """default dialect importer.
-    
+
     plugs into the :class:`.PluginLoader`
     as a first-hit system.
-    
+
     """
     if "." in name:
         dialect, driver = name.split(".")
index 29f10c5608f2aec5d640faea901a41c7cbd3a8e4..f107c9c8c103a2920bb2535c693bad2f37a73a40 100644 (file)
@@ -11,7 +11,7 @@ Support for the Microsoft Access database.
 
 .. note::
 
-    The Access dialect is **non-functional as of SQLAlchemy 0.6**, 
+    The Access dialect is **non-functional as of SQLAlchemy 0.6**,
     pending development efforts to bring it up-to-date.
 
 
@@ -125,7 +125,7 @@ class AccessExecutionContext(default.DefaultExecutionContext):
                 # self._last_inserted_ids[0] is None:
                 self.cursor.execute("SELECT @@identity AS lastrowid")
                 row = self.cursor.fetchone()
-                self._last_inserted_ids = [int(row[0])] 
+                self._last_inserted_ids = [int(row[0])]
                 #+ self._last_inserted_ids[1:]
                 # print "LAST ROW ID", self._last_inserted_ids
 
@@ -260,7 +260,7 @@ class AccessDialect(default.DefaultDialect):
 
                 colargs = \
                 {
-                    'nullable': not(col.Required or 
+                    'nullable': not(col.Required or
                                     col.Attributes & const.dbAutoIncrField),
                 }
                 default = col.DefaultValue
@@ -287,7 +287,7 @@ class AccessDialect(default.DefaultDialect):
                         if isinstance(thecol.type, AcInteger) and \
                                 not (thecol.default and
                                 isinstance(
-                                        thecol.default.arg, 
+                                        thecol.default.arg,
                                         schema.Sequence
                                 )):
                             thecol.autoincrement = False
@@ -322,7 +322,7 @@ class AccessDialect(default.DefaultDialect):
         # This is necessary, so we get the latest updates
         dtbs = daoEngine.OpenDatabase(connection.engine.url.database)
 
-        names = [t.Name for t in dtbs.TableDefs 
+        names = [t.Name for t in dtbs.TableDefs
                 if t.Name[:4] != "MSys" and t.Name[:4] != "~TMP"]
         dtbs.Close()
         return names
@@ -373,7 +373,7 @@ class AccessCompiler(compiler.SQLCompiler):
                           'length':             'len',
                           }
     def visit_function(self, func):
-        """Access function names differ from the ANSI SQL names; 
+        """Access function names differ from the ANSI SQL names;
         rewrite common ones"""
         func.name = self.function_rewrites.get(func.name, func.name)
         return super(AccessCompiler, self).visit_function(func)
index 665e32267f3cd10661970f1e32524da1cff337c7..f79588d24d8292f148b3cef4f4046ca022143a6c 100644 (file)
@@ -14,7 +14,7 @@ from sqlalchemy.dialects.firebird.base import \
     dialect
 
 __all__ = (
-    'SMALLINT', 'BIGINT', 'FLOAT', 'FLOAT', 'DATE', 'TIME', 
+    'SMALLINT', 'BIGINT', 'FLOAT', 'FLOAT', 'DATE', 'TIME',
     'TEXT', 'NUMERIC', 'FLOAT', 'TIMESTAMP', 'VARCHAR', 'CHAR', 'BLOB',
     'dialect'
 )
index ff096acc6e630900f480423381fe2c2d4a93940f..d1c5933f4ca395e633cfbc2b9dda3c529b2dac28 100644 (file)
@@ -10,7 +10,7 @@
 .. note::
 
     The Informix dialect functions on current SQLAlchemy versions
-    but is not regularly tested, and may have many issues and 
+    but is not regularly tested, and may have many issues and
     caveats not currently handled.
 
 """
@@ -467,7 +467,7 @@ class InformixDialect(default.DefaultDialect):
         c = connection.execute(
         """select t1.constrname as cons_name,
                  t4.colname as local_column, t7.tabname as remote_table,
-                 t6.colname as remote_column, t7.owner as remote_owner 
+                 t6.colname as remote_column, t7.owner as remote_owner
             from sysconstraints as t1 , systables as t2 ,
                  sysindexes as t3 , syscolumns as t4 ,
                  sysreferences as t5 , syscolumns as t6 , systables as t7 ,
@@ -476,7 +476,7 @@ class InformixDialect(default.DefaultDialect):
              and t3.tabid = t2.tabid and t3.idxname = t1.idxname
              and t4.tabid = t2.tabid and t4.colno in (t3.part1, t3.part2, t3.part3,
              t3.part4, t3.part5, t3.part6, t3.part7, t3.part8, t3.part9, t3.part10,
-             t3.part11, t3.part11, t3.part12, t3.part13, t3.part4, t3.part15, t3.part16) 
+             t3.part11, t3.part11, t3.part12, t3.part13, t3.part4, t3.part15, t3.part16)
              and t5.constrid = t1.constrid and t8.constrid = t5.primary
              and t6.tabid = t5.ptabid and t6.colno in (t9.part1, t9.part2, t9.part3,
              t9.part4, t9.part5, t9.part6, t9.part7, t9.part8, t9.part9, t9.part10,
@@ -523,7 +523,7 @@ class InformixDialect(default.DefaultDialect):
 
         # Select the column positions from sysindexes for sysconstraints
         data = connection.execute(
-            """select t2.* 
+            """select t2.*
             from systables as t1, sysindexes as t2, sysconstraints as t3
             where t1.tabid=t2.tabid and t1.tabname=? and t1.owner=?
             and t2.idxname=t3.idxname and t3.constrtype='P'""",
@@ -545,7 +545,7 @@ class InformixDialect(default.DefaultDialect):
         c = connection.execute(
             """select t1.colname
             from syscolumns as t1, systables as t2
-            where t2.tabname=? and t1.tabid = t2.tabid and 
+            where t2.tabname=? and t1.tabid = t2.tabid and
             t1.colno in (%s)""" % place_holder,
             table_name, *colpositions
         ).fetchall()
@@ -570,7 +570,7 @@ class InformixDialect(default.DefaultDialect):
             c = connection.execute(
                 """select t1.colname
                 from syscolumns as t1, systables as t2
-                where t2.tabname=? and t1.tabid = t2.tabid and 
+                where t2.tabname=? and t1.tabid = t2.tabid and
                 t1.colno in (%s)""" % place_holder,
                 table_name, *colnames
             ).fetchall()
index 76adf97ff91af02f6be8b75508a2b665f8e9772f..f52fc4fa6fdade09f52f5dee8451e22f457441ca 100644 (file)
@@ -8,7 +8,7 @@
 
 .. note::
 
-    The MaxDB dialect is **non-functional as of SQLAlchemy 0.6**, 
+    The MaxDB dialect is **non-functional as of SQLAlchemy 0.6**,
     pending development efforts to bring it up-to-date.
 
 Overview
@@ -255,7 +255,7 @@ class MaxTimestamp(sqltypes.DateTime):
                                     value[20:])])
         else:
             raise exc.InvalidRequestError(
-                "datetimeformat '%s' is not supported." % 
+                "datetimeformat '%s' is not supported." %
                 dialect.datetimeformat)
         return process
 
@@ -283,18 +283,18 @@ class MaxDate(sqltypes.Date):
                 if value is None:
                     return None
                 else:
-                    return datetime.date(int(value[0:4]), int(value[4:6]), 
+                    return datetime.date(int(value[0:4]), int(value[4:6]),
                                          int(value[6:8]))
         elif dialect.datetimeformat == 'iso':
             def process(value):
                 if value is None:
                     return None
                 else:
-                    return datetime.date(int(value[0:4]), int(value[5:7]), 
+                    return datetime.date(int(value[0:4]), int(value[5:7]),
                                          int(value[8:10]))
         else:
             raise exc.InvalidRequestError(
-                "datetimeformat '%s' is not supported." % 
+                "datetimeformat '%s' is not supported." %
                 dialect.datetimeformat)
         return process
 
@@ -322,7 +322,7 @@ class MaxTime(sqltypes.Time):
                 if value is None:
                     return None
                 else:
-                    return datetime.time(int(value[0:4]), int(value[4:6]), 
+                    return datetime.time(int(value[0:4]), int(value[4:6]),
                                          int(value[6:8]))
         elif dialect.datetimeformat == 'iso':
             def process(value):
@@ -333,7 +333,7 @@ class MaxTime(sqltypes.Time):
                                          int(value[8:10]))
         else:
             raise exc.InvalidRequestError(
-                "datetimeformat '%s' is not supported." % 
+                "datetimeformat '%s' is not supported." %
                 dialect.datetimeformat)
         return process
 
index 8a2101c51f3be8736929bea9cf09c3a550afa94b..e262d208b720821d009cc8bfe5f69097e27377da 100644 (file)
@@ -18,9 +18,9 @@ from sqlalchemy.dialects.mssql.base import \
 
 
 __all__ = (
-    'INTEGER', 'BIGINT', 'SMALLINT', 'TINYINT', 'VARCHAR', 'NVARCHAR', 'CHAR', 
+    'INTEGER', 'BIGINT', 'SMALLINT', 'TINYINT', 'VARCHAR', 'NVARCHAR', 'CHAR',
     'NCHAR', 'TEXT', 'NTEXT', 'DECIMAL', 'NUMERIC', 'FLOAT', 'DATETIME',
-    'DATETIME2', 'DATETIMEOFFSET', 'DATE', 'TIME', 'SMALLDATETIME', 
+    'DATETIME2', 'DATETIMEOFFSET', 'DATE', 'TIME', 'SMALLDATETIME',
     'BINARY', 'VARBINARY', 'BIT', 'REAL', 'IMAGE', 'TIMESTAMP',
     'MONEY', 'SMALLMONEY', 'UNIQUEIDENTIFIER', 'SQL_VARIANT', 'dialect'
 )
\ No newline at end of file
index 21e63288044377d0581660be5e52cf9df070714b..5b23282692ff4742dbec2bfd2f6e24fe99e6bc6c 100644 (file)
@@ -16,7 +16,7 @@ import sys
 class MSDateTime_adodbapi(MSDateTime):
     def result_processor(self, dialect, coltype):
         def process(value):
-            # adodbapi will return datetimes with empty time 
+            # adodbapi will return datetimes with empty time
             # values as datetime.date() objects.
             # Promote them back to full datetime.datetime()
             if type(value) is datetime.date:
@@ -49,7 +49,7 @@ class MSDialect_adodbapi(MSDialect):
 
         connectors = ["Provider=SQLOLEDB"]
         if 'port' in keys:
-            connectors.append ("Data Source=%s, %s" % 
+            connectors.append ("Data Source=%s, %s" %
                                 (keys.get("host"), keys.get("port")))
         else:
             connectors.append ("Data Source=%s" % keys.get("host"))
index 9cc42c093b33a51e1c050e05dfe380a426a50d62..f9f2e7a48e9396f2b1f469a90a945a5a4838fa59 100644 (file)
@@ -21,8 +21,8 @@ Sample connect string::
     mssql+pymssql://<username>:<password>@<freetds_name>
 
 Adding "?charset=utf8" or similar will cause pymssql to return
-strings as Python unicode objects.   This can potentially improve 
-performance in some scenarios as decoding of strings is 
+strings as Python unicode objects.   This can potentially improve
+performance in some scenarios as decoding of strings is
 handled natively.
 
 Limitations
index 17dcbfecd784428538507bdbf5b39265c373c0e3..b3b1641e0f861b66b12cfa43c1a03b065590f440 100644 (file)
@@ -35,14 +35,14 @@ Examples of pyodbc connection string URLs:
 
     dsn=mydsn;UID=user;PWD=pass;LANGUAGE=us_english
 
-* ``mssql+pyodbc://user:pass@host/db`` - connects using a connection 
+* ``mssql+pyodbc://user:pass@host/db`` - connects using a connection
   that would appear like::
 
     DRIVER={SQL Server};Server=host;Database=db;UID=user;PWD=pass
 
 * ``mssql+pyodbc://user:pass@host:123/db`` - connects using a connection
   string which includes the port
-  information using the comma syntax. This will create the following 
+  information using the comma syntax. This will create the following
   connection string::
 
     DRIVER={SQL Server};Server=host,123;Database=db;UID=user;PWD=pass
@@ -83,9 +83,9 @@ the python shell. For example::
 Unicode Binds
 ^^^^^^^^^^^^^
 
-The current state of PyODBC on a unix backend with FreeTDS and/or 
+The current state of PyODBC on a unix backend with FreeTDS and/or
 EasySoft is poor regarding unicode; different OS platforms and versions of UnixODBC
-versus IODBC versus FreeTDS/EasySoft versus PyODBC itself dramatically 
+versus IODBC versus FreeTDS/EasySoft versus PyODBC itself dramatically
 alter how strings are received.  The PyODBC dialect attempts to use all the information
 it knows to determine whether or not a Python unicode literal can be
 passed directly to the PyODBC driver or not; while SQLAlchemy can encode
@@ -93,13 +93,13 @@ these to bytestrings first, some users have reported that PyODBC mis-handles
 bytestrings for certain encodings and requires a Python unicode object,
 while the author has observed widespread cases where a Python unicode
 is completely misinterpreted by PyODBC, particularly when dealing with
-the information schema tables used in table reflection, and the value 
+the information schema tables used in table reflection, and the value
 must first be encoded to a bytestring.
 
 It is for this reason that whether or not unicode literals for bound
-parameters be sent to PyODBC can be controlled using the 
-``supports_unicode_binds`` parameter to ``create_engine()``.  When 
-left at its default of ``None``, the PyODBC dialect will use its 
+parameters be sent to PyODBC can be controlled using the
+``supports_unicode_binds`` parameter to ``create_engine()``.  When
+left at its default of ``None``, the PyODBC dialect will use its
 best guess as to whether or not the driver deals with unicode literals
 well.  When ``False``, unicode literals will be encoded first, and when
 ``True`` unicode literals will be passed straight through.  This is an interim
@@ -199,7 +199,7 @@ class MSExecutionContext_pyodbc(MSExecutionContext):
 
         super(MSExecutionContext_pyodbc, self).pre_exec()
 
-        # don't embed the scope_identity select into an 
+        # don't embed the scope_identity select into an
         # "INSERT .. DEFAULT VALUES"
         if self._select_lastrowid and \
                 self.dialect.use_scope_identity and \
@@ -211,11 +211,11 @@ class MSExecutionContext_pyodbc(MSExecutionContext):
     def post_exec(self):
         if self._embedded_scope_identity:
             # Fetch the last inserted id from the manipulated statement
-            # We may have to skip over a number of result sets with 
+            # We may have to skip over a number of result sets with
             # no data (due to triggers, etc.)
             while True:
                 try:
-                    # fetchall() ensures the cursor is consumed 
+                    # fetchall() ensures the cursor is consumed
                     # without closing it (FreeTDS particularly)
                     row = self.cursor.fetchall()[0]
                     break
index bfa358c0c35d9ea04e16b449246fc7f550ef0b82..4bbd82c07e2e8b379c9618aa46d123daab5bfdbe 100644 (file)
@@ -68,7 +68,7 @@ class MSDialect_zxjdbc(ZxJDBCConnector, MSDialect):
 
     def _get_server_version_info(self, connection):
         return tuple(
-                    int(x) 
+                    int(x)
                     for x in connection.connection.dbversion.split('.')
                 )
 
index 5a020f416103c7c5c524ce985ca2dbdaaf285502..ff1cf625c323ba238ae90de3012952750b8b6837 100644 (file)
@@ -44,7 +44,7 @@ Connection Timeouts
 -------------------
 
 MySQL features an automatic connection close behavior, for connections that have
-been idle for eight hours or more.   To circumvent having this issue, use the 
+been idle for eight hours or more.   To circumvent having this issue, use the
 ``pool_recycle`` option which controls the maximum age of any connection::
 
     engine = create_engine('mysql+mysqldb://...', pool_recycle=3600)
@@ -87,15 +87,15 @@ to be used.
 Transaction Isolation Level
 ---------------------------
 
-:func:`.create_engine` accepts an ``isolation_level`` 
-parameter which results in the command ``SET SESSION 
-TRANSACTION ISOLATION LEVEL <level>`` being invoked for 
+:func:`.create_engine` accepts an ``isolation_level``
+parameter which results in the command ``SET SESSION
+TRANSACTION ISOLATION LEVEL <level>`` being invoked for
 every new connection. Valid values for this parameter are
-``READ COMMITTED``, ``READ UNCOMMITTED``, 
+``READ COMMITTED``, ``READ UNCOMMITTED``,
 ``REPEATABLE READ``, and ``SERIALIZABLE``::
 
     engine = create_engine(
-                    "mysql://scott:tiger@localhost/test", 
+                    "mysql://scott:tiger@localhost/test",
                     isolation_level="READ UNCOMMITTED"
                 )
 
@@ -193,7 +193,7 @@ usual definition of "number of rows matched by an UPDATE or DELETE" statement.
 This is in contradiction to the default setting on most MySQL DBAPI drivers,
 which is "number of rows actually modified/deleted".  For this reason, the
 SQLAlchemy MySQL dialects always set the ``constants.CLIENT.FOUND_ROWS`` flag,
-or whatever is equivalent for the DBAPI in use, on connect, unless the flag value 
+or whatever is equivalent for the DBAPI in use, on connect, unless the flag value
 is overridden using DBAPI-specific options
 (such as ``client_flag`` for the MySQL-Python driver, ``found_rows`` for the
 OurSQL driver).
@@ -260,7 +260,7 @@ Index Types
 ~~~~~~~~~~~~~
 
 Some MySQL storage engines permit you to specify an index type when creating
-an index or primary key constraint. SQLAlchemy provides this feature via the 
+an index or primary key constraint. SQLAlchemy provides this feature via the
 ``mysql_using`` parameter on :class:`.Index`::
 
     Index('my_index', my_table.c.data, mysql_using='hash')
@@ -270,7 +270,7 @@ As well as the ``mysql_using`` parameter on :class:`.PrimaryKeyConstraint`::
     PrimaryKeyConstraint("data", mysql_using='hash')
 
 The value passed to the keyword argument will be simply passed through to the
-underlying CREATE INDEX or PRIMARY KEY clause, so it *must* be a valid index 
+underlying CREATE INDEX or PRIMARY KEY clause, so it *must* be a valid index
 type for your MySQL storage engine.
 
 More information can be found at:
@@ -1307,13 +1307,13 @@ class MySQLCompiler(compiler.SQLCompiler):
 
     def get_select_precolumns(self, select):
         """Add special MySQL keywords in place of DISTINCT.
-        
-        .. note:: 
-        
+
+        .. note::
+
           this usage is deprecated.  :meth:`.Select.prefix_with`
           should be used for special keywords at the start
           of a SELECT.
-          
+
         """
         if isinstance(select._distinct, basestring):
             return select._distinct.upper() + " "
@@ -1361,16 +1361,16 @@ class MySQLCompiler(compiler.SQLCompiler):
             if limit is None:
                 # hardwire the upper limit.  Currently
                 # needed by OurSQL with Python 3
-                # (https://bugs.launchpad.net/oursql/+bug/686232), 
+                # (https://bugs.launchpad.net/oursql/+bug/686232),
                 # but also is consistent with the usage of the upper
                 # bound as part of MySQL's "syntax" for OFFSET with
                 # no LIMIT
                 return ' \n LIMIT %s, %s' % (
-                                self.process(sql.literal(offset)), 
+                                self.process(sql.literal(offset)),
                                 "18446744073709551615")
             else:
                 return ' \n LIMIT %s, %s' % (
-                                self.process(sql.literal(offset)), 
+                                self.process(sql.literal(offset)),
                                 self.process(sql.literal(limit)))
         else:
             # No offset provided, so just use the limit
@@ -1384,10 +1384,10 @@ class MySQLCompiler(compiler.SQLCompiler):
             return None
 
     def update_tables_clause(self, update_stmt, from_table, extra_froms, **kw):
-        return ', '.join(t._compiler_dispatch(self, asfrom=True, **kw) 
+        return ', '.join(t._compiler_dispatch(self, asfrom=True, **kw)
                     for t in [from_table] + list(extra_froms))
 
-    def update_from_clause(self, update_stmt, from_table, 
+    def update_from_clause(self, update_stmt, from_table,
                                 extra_froms, from_hints, **kw):
         return None
 
@@ -1416,7 +1416,7 @@ class MySQLDDLCompiler(compiler.DDLCompiler):
             constraint_string += "KEY %s (%s)" % (
                         self.preparer.quote(
                             "idx_autoinc_%s" % auto_inc_column.name, None
-                        ), 
+                        ),
                         self.preparer.format_column(auto_inc_column)
                     )
 
@@ -1453,7 +1453,7 @@ class MySQLDDLCompiler(compiler.DDLCompiler):
 
         opts = dict(
             (
-                k[len(self.dialect.name)+1:].upper(), 
+                k[len(self.dialect.name)+1:].upper(),
                 v
             )
             for k, v in table.kwargs.items()
@@ -1469,7 +1469,7 @@ class MySQLDDLCompiler(compiler.DDLCompiler):
                 arg = "'%s'" % arg.replace("\\", "\\\\").replace("'", "''")
 
             if opt in ('DATA_DIRECTORY', 'INDEX_DIRECTORY',
-                       'DEFAULT_CHARACTER_SET', 'CHARACTER_SET', 
+                       'DEFAULT_CHARACTER_SET', 'CHARACTER_SET',
                        'DEFAULT_CHARSET',
                        'DEFAULT_COLLATE'):
                 opt = opt.replace('_', ' ')
@@ -1489,7 +1489,7 @@ class MySQLDDLCompiler(compiler.DDLCompiler):
         table = preparer.format_table(index.table)
         columns = [preparer.quote(c.name, c.quote) for c in index.columns]
         name = preparer.quote(
-                    self._index_identifier(index.name), 
+                    self._index_identifier(index.name),
                     index.quote)
 
         text = "CREATE "
@@ -1598,24 +1598,24 @@ class MySQLTypeCompiler(compiler.GenericTypeCompiler):
         if type_.precision is None:
             return self._extend_numeric(type_, "NUMERIC")
         elif type_.scale is None:
-            return self._extend_numeric(type_, 
-                            "NUMERIC(%(precision)s)" % 
+            return self._extend_numeric(type_,
+                            "NUMERIC(%(precision)s)" %
                             {'precision': type_.precision})
         else:
-            return self._extend_numeric(type_, 
-                            "NUMERIC(%(precision)s, %(scale)s)" % 
+            return self._extend_numeric(type_,
+                            "NUMERIC(%(precision)s, %(scale)s)" %
                             {'precision': type_.precision, 'scale' : type_.scale})
 
     def visit_DECIMAL(self, type_):
         if type_.precision is None:
             return self._extend_numeric(type_, "DECIMAL")
         elif type_.scale is None:
-            return self._extend_numeric(type_, 
-                            "DECIMAL(%(precision)s)" % 
+            return self._extend_numeric(type_,
+                            "DECIMAL(%(precision)s)" %
                             {'precision': type_.precision})
         else:
-            return self._extend_numeric(type_, 
-                            "DECIMAL(%(precision)s, %(scale)s)" % 
+            return self._extend_numeric(type_,
+                            "DECIMAL(%(precision)s, %(scale)s)" %
                             {'precision': type_.precision, 'scale' : type_.scale})
 
     def visit_DOUBLE(self, type_):
@@ -1638,7 +1638,7 @@ class MySQLTypeCompiler(compiler.GenericTypeCompiler):
         if self._mysql_type(type_) and \
             type_.scale is not None and \
             type_.precision is not None:
-            return self._extend_numeric(type_, 
+            return self._extend_numeric(type_,
                             "FLOAT(%s, %s)" % (type_.precision, type_.scale))
         elif type_.precision is not None:
             return self._extend_numeric(type_, "FLOAT(%s)" % (type_.precision,))
@@ -1647,24 +1647,24 @@ class MySQLTypeCompiler(compiler.GenericTypeCompiler):
 
     def visit_INTEGER(self, type_):
         if self._mysql_type(type_) and type_.display_width is not None:
-            return self._extend_numeric(type_, 
-                        "INTEGER(%(display_width)s)" % 
+            return self._extend_numeric(type_,
+                        "INTEGER(%(display_width)s)" %
                         {'display_width': type_.display_width})
         else:
             return self._extend_numeric(type_, "INTEGER")
 
     def visit_BIGINT(self, type_):
         if self._mysql_type(type_) and type_.display_width is not None:
-            return self._extend_numeric(type_, 
-                        "BIGINT(%(display_width)s)" % 
+            return self._extend_numeric(type_,
+                        "BIGINT(%(display_width)s)" %
                         {'display_width': type_.display_width})
         else:
             return self._extend_numeric(type_, "BIGINT")
 
     def visit_MEDIUMINT(self, type_):
         if self._mysql_type(type_) and type_.display_width is not None:
-            return self._extend_numeric(type_, 
-                        "MEDIUMINT(%(display_width)s)" % 
+            return self._extend_numeric(type_,
+                        "MEDIUMINT(%(display_width)s)" %
                         {'display_width': type_.display_width})
         else:
             return self._extend_numeric(type_, "MEDIUMINT")
@@ -1677,8 +1677,8 @@ class MySQLTypeCompiler(compiler.GenericTypeCompiler):
 
     def visit_SMALLINT(self, type_):
         if self._mysql_type(type_) and type_.display_width is not None:
-            return self._extend_numeric(type_, 
-                        "SMALLINT(%(display_width)s)" % 
+            return self._extend_numeric(type_,
+                        "SMALLINT(%(display_width)s)" %
                         {'display_width': type_.display_width}
                     )
         else:
@@ -1728,7 +1728,7 @@ class MySQLTypeCompiler(compiler.GenericTypeCompiler):
             return self._extend_string(type_, {}, "VARCHAR(%d)" % type_.length)
         else:
             raise exc.CompileError(
-                    "VARCHAR requires a length on dialect %s" % 
+                    "VARCHAR requires a length on dialect %s" %
                     self.dialect.name)
 
     def visit_CHAR(self, type_):
@@ -1744,7 +1744,7 @@ class MySQLTypeCompiler(compiler.GenericTypeCompiler):
             return self._extend_string(type_, {'national':True}, "VARCHAR(%(length)s)" % {'length': type_.length})
         else:
             raise exc.CompileError(
-                    "NVARCHAR requires a length on dialect %s" % 
+                    "NVARCHAR requires a length on dialect %s" %
                     self.dialect.name)
 
     def visit_NCHAR(self, type_):
@@ -1805,8 +1805,8 @@ class MySQLIdentifierPreparer(compiler.IdentifierPreparer):
             quote = '"'
 
         super(MySQLIdentifierPreparer, self).__init__(
-                                                dialect, 
-                                                initial_quote=quote, 
+                                                dialect,
+                                                initial_quote=quote,
                                                 escape_quote=quote)
 
     def _quote_free_identifiers(self, *ids):
@@ -1839,7 +1839,7 @@ class MySQLDialect(default.DefaultDialect):
     preparer = MySQLIdentifierPreparer
 
     # default SQL compilation settings -
-    # these are modified upon initialize(), 
+    # these are modified upon initialize(),
     # i.e. first connect
     _backslash_escapes = True
     _server_ansiquotes = False
@@ -1856,7 +1856,7 @@ class MySQLDialect(default.DefaultDialect):
         else:
             return None
 
-    _isolation_lookup = set(['SERIALIZABLE', 
+    _isolation_lookup = set(['SERIALIZABLE',
                 'READ UNCOMMITTED', 'READ COMMITTED', 'REPEATABLE READ'])
 
     def set_isolation_level(self, connection, level):
@@ -1864,7 +1864,7 @@ class MySQLDialect(default.DefaultDialect):
         if level not in self._isolation_lookup:
             raise exc.ArgumentError(
                 "Invalid value '%s' for isolation_level. "
-                "Valid isolation levels for %s are %s" % 
+                "Valid isolation levels for %s are %s" %
                 (level, self.name, ", ".join(self._isolation_lookup))
                 )
         cursor = connection.cursor()
@@ -1936,7 +1936,7 @@ class MySQLDialect(default.DefaultDialect):
             return self._extract_error_code(e) in \
                         (2006, 2013, 2014, 2045, 2055)
         elif isinstance(e, self.dbapi.InterfaceError):
-            # if underlying connection is closed, 
+            # if underlying connection is closed,
             # this is the error you get
             return "(0, '')" in str(e)
         else:
@@ -2148,9 +2148,9 @@ class MySQLDialect(default.DefaultDialect):
 
     def _parsed_state_or_create(self, connection, table_name, schema=None, **kw):
         return self._setup_parser(
-                        connection, 
-                        table_name, 
-                        schema, 
+                        connection,
+                        table_name,
+                        schema,
                         info_cache=kw.get('info_cache', None)
                     )
 
@@ -2158,7 +2158,7 @@ class MySQLDialect(default.DefaultDialect):
     def _tabledef_parser(self):
         """return the MySQLTableDefinitionParser, generate if needed.
 
-        The deferred creation ensures that the dialect has 
+        The deferred creation ensures that the dialect has
         retrieved server version information first.
 
         """
index 656e105a70c94615223cd75dc1eb1b4e070c1430..240f30251bd6f10d02720fa27e2652eae4f07591 100644 (file)
@@ -26,20 +26,20 @@ MySQLdb will accommodate Python ``unicode`` objects if the
 ``use_unicode=1`` parameter, or the ``charset`` parameter,
 is passed as a connection argument.
 
-Without this setting, many MySQL server installations default to 
+Without this setting, many MySQL server installations default to
 a ``latin1`` encoding for client connections, which has the effect
-of all data being converted into ``latin1``, even if you have ``utf8`` 
+of all data being converted into ``latin1``, even if you have ``utf8``
 or another character set configured on your tables
 and columns.  With versions 4.1 and higher, you can change the connection
 character set either through server configuration or by including the
 ``charset`` parameter.  The ``charset``
-parameter as received by MySQL-Python also has the side-effect of 
+parameter as received by MySQL-Python also has the side-effect of
 enabling ``use_unicode=1``::
 
     # set client encoding to utf8; all strings come back as unicode
     create_engine('mysql+mysqldb:///mydb?charset=utf8')
 
-Manually configuring ``use_unicode=0`` will cause MySQL-python to 
+Manually configuring ``use_unicode=0`` will cause MySQL-python to
 return encoded strings::
 
     # set client encoding to utf8; all strings come back as utf8 str
@@ -57,9 +57,9 @@ It is strongly advised to use the latest version of MySQL-Python.
 from sqlalchemy.dialects.mysql.base import (MySQLDialect, MySQLExecutionContext,
                                             MySQLCompiler, MySQLIdentifierPreparer)
 from sqlalchemy.connectors.mysqldb import (
-                        MySQLDBExecutionContext, 
-                        MySQLDBCompiler, 
-                        MySQLDBIdentifierPreparer, 
+                        MySQLDBExecutionContext,
+                        MySQLDBCompiler,
+                        MySQLDBIdentifierPreparer,
                         MySQLDBConnector
                     )
 
index 2a3c6b09c7a5f101df4f490eac7ed9e34d3c4649..8f7bebe9c5f2a9318e79963b7dc6ba0f3d5e165c 100644 (file)
@@ -108,9 +108,9 @@ class MySQLDialect_oursql(MySQLDialect):
         arg = "'%s'" % arg
         connection.execution_options(_oursql_plain_query=True).execute(query % arg)
 
-    # Because mysql is bad, these methods have to be 
+    # Because mysql is bad, these methods have to be
     # reimplemented to use _PlainQuery. Basically, some queries
-    # refuse to return any data if they're run through 
+    # refuse to return any data if they're run through
     # the parameterized query API, or refuse to be parameterized
     # in the first place.
     def do_begin_twophase(self, connection, xid):
@@ -135,7 +135,7 @@ class MySQLDialect_oursql(MySQLDialect):
     # Q: why didn't we need all these "plain_query" overrides earlier ?
     # am i on a newer/older version of OurSQL ?
     def has_table(self, connection, table_name, schema=None):
-        return MySQLDialect.has_table(self, 
+        return MySQLDialect.has_table(self,
                                         connection.connect().\
                                             execution_options(_oursql_plain_query=True),
                                         table_name, schema)
@@ -183,7 +183,7 @@ class MySQLDialect_oursql(MySQLDialect):
 
     def initialize(self, connection):
         return MySQLDialect.initialize(
-                            self, 
+                            self,
                             connection.execution_options(_oursql_plain_query=True)
                             )
 
@@ -222,7 +222,7 @@ class MySQLDialect_oursql(MySQLDialect):
         opts.setdefault('found_rows', True)
 
         ssl = {}
-        for key in ['ssl_ca', 'ssl_key', 'ssl_cert', 
+        for key in ['ssl_ca', 'ssl_key', 'ssl_cert',
                         'ssl_capath', 'ssl_cipher']:
             if key in opts:
                 ssl[key[4:]] = opts[key]
index 20a16988aa723fcc79e947edc5b85a4a8a74f976..6271286f92cea184e2ce41ae2ca71149774a7ed7 100644 (file)
@@ -20,7 +20,7 @@ Connect string::
 Limitations
 -----------
 
-The mysql-pyodbc dialect is subject to unresolved character encoding issues 
+The mysql-pyodbc dialect is subject to unresolved character encoding issues
 which exist within the current ODBC drivers available.
 (see http://code.google.com/p/pyodbc/issues/detail?id=25).   Consider usage
 of OurSQL, MySQLdb, or MySQL-connector/Python.
index 6e2bc2760fec7bd1602a2bd589630a42a6017bdc..6f0569c30adbf68f3465c387f60f0b08a3627713 100644 (file)
@@ -9,19 +9,19 @@
 Driver
 ------
 
-The Oracle dialect uses the cx_oracle driver, available at 
-http://cx-oracle.sourceforge.net/ .   The dialect has several behaviors 
+The Oracle dialect uses the cx_oracle driver, available at
+http://cx-oracle.sourceforge.net/ .   The dialect has several behaviors
 which are specifically tailored towards compatibility with this module.
 Version 5.0 or greater is **strongly** recommended, as SQLAlchemy makes
-extensive use of the cx_oracle output converters for numeric and 
+extensive use of the cx_oracle output converters for numeric and
 string conversions.
 
 Connecting
 ----------
 
-Connecting with create_engine() uses the standard URL approach of 
-``oracle://user:pass@host:port/dbname[?key=value&key=value...]``.  If dbname is present, the 
-host, port, and dbname tokens are converted to a TNS name using the cx_oracle 
+Connecting with create_engine() uses the standard URL approach of
+``oracle://user:pass@host:port/dbname[?key=value&key=value...]``.  If dbname is present, the
+host, port, and dbname tokens are converted to a TNS name using the cx_oracle
 :func:`makedsn()` function.  Otherwise, the host token is taken directly as a TNS name.
 
 Additional arguments which may be specified either as query string arguments on the
@@ -53,7 +53,7 @@ handler so that all string based result values are returned as unicode as well.
 Generally, the ``NLS_LANG`` environment variable determines the nature
 of the encoding to be used.
 
-Note that this behavior is disabled when Oracle 8 is detected, as it has been 
+Note that this behavior is disabled when Oracle 8 is detected, as it has been
 observed that issues remain when passing Python unicodes to cx_oracle with Oracle 8.
 
 LOB Objects
@@ -71,7 +71,7 @@ To disable this processing, pass ``auto_convert_lobs=False`` to :func:`create_en
 Two Phase Transaction Support
 -----------------------------
 
-Two Phase transactions are implemented using XA transactions.  Success has been reported 
+Two Phase transactions are implemented using XA transactions.  Success has been reported
 with this feature but it should be regarded as experimental.
 
 Precision Numerics
@@ -95,14 +95,14 @@ If precision numerics aren't required, the decimal handling
 can be disabled by passing the flag ``coerce_to_decimal=False``
 to :func:`.create_engine`::
 
-    engine = create_engine("oracle+cx_oracle://dsn", 
+    engine = create_engine("oracle+cx_oracle://dsn",
                         coerce_to_decimal=False)
 
 .. versionadded:: 0.7.6
     Add the ``coerce_to_decimal`` flag.
 
-Another alternative to performance is to use the 
-`cdecimal <http://pypi.python.org/pypi/cdecimal/>`_ library; 
+Another alternative to performance is to use the
+`cdecimal <http://pypi.python.org/pypi/cdecimal/>`_ library;
 see :class:`.Numeric` for additional notes.
 
 The handler attempts to use the "precision" and "scale"
@@ -160,7 +160,7 @@ class _OracleNumeric(sqltypes.Numeric):
     def result_processor(self, dialect, coltype):
         # we apply a cx_oracle type handler to all connections
         # that converts floating point strings to Decimal().
-        # However, in some subquery situations, Oracle doesn't 
+        # However, in some subquery situations, Oracle doesn't
         # give us enough information to determine int or Decimal.
         # It could even be int/Decimal differently on each row,
         # regardless of the scale given for the originating type.
@@ -190,7 +190,7 @@ class _OracleNumeric(sqltypes.Numeric):
                 else:
                     return None
         else:
-            # cx_oracle 4 behavior, will assume 
+            # cx_oracle 4 behavior, will assume
             # floats
             return super(_OracleNumeric, self).\
                             result_processor(dialect, coltype)
@@ -237,7 +237,7 @@ class _NativeUnicodeMixin(object):
     # end Py2K
 
     # we apply a connection output handler that returns
-    # unicode in all cases, so the "native_unicode" flag 
+    # unicode in all cases, so the "native_unicode" flag
     # will be set for the default String.result_processor.
 
 class _OracleChar(_NativeUnicodeMixin, sqltypes.CHAR):
@@ -317,15 +317,15 @@ class OracleExecutionContext_cx_oracle(OracleExecutionContext):
             getattr(self.compiled, '_quoted_bind_names', None)
         if quoted_bind_names:
             if not self.dialect.supports_unicode_statements:
-                # if DBAPI doesn't accept unicode statements, 
+                # if DBAPI doesn't accept unicode statements,
                 # keys in self.parameters would have been encoded
                 # here.  so convert names in quoted_bind_names
                 # to encoded as well.
                 quoted_bind_names = \
                                 dict(
-                                    (fromname.encode(self.dialect.encoding), 
-                                    toname.encode(self.dialect.encoding)) 
-                                    for fromname, toname in 
+                                    (fromname.encode(self.dialect.encoding),
+                                    toname.encode(self.dialect.encoding))
+                                    for fromname, toname in
                                     quoted_bind_names.items()
                                 )
             for param in self.parameters:
@@ -334,10 +334,10 @@ class OracleExecutionContext_cx_oracle(OracleExecutionContext):
                     del param[fromname]
 
         if self.dialect.auto_setinputsizes:
-            # cx_oracle really has issues when you setinputsizes 
+            # cx_oracle really has issues when you setinputsizes
             # on String, including that outparams/RETURNING
             # breaks for varchars
-            self.set_input_sizes(quoted_bind_names, 
+            self.set_input_sizes(quoted_bind_names,
                                  exclude_types=self.dialect._cx_oracle_string_types
                                 )
 
@@ -370,7 +370,7 @@ class OracleExecutionContext_cx_oracle(OracleExecutionContext):
     def get_result_proxy(self):
         if hasattr(self, 'out_parameters') and self.compiled.returning:
             returning_params = dict(
-                                    (k, v.getvalue()) 
+                                    (k, v.getvalue())
                                     for k, v in self.out_parameters.items()
                                 )
             return ReturningResultProxy(self, returning_params)
@@ -396,7 +396,7 @@ class OracleExecutionContext_cx_oracle(OracleExecutionContext):
                         impl_type = type.dialect_impl(self.dialect)
                         dbapi_type = impl_type.get_dbapi_type(self.dialect.dbapi)
                         result_processor = impl_type.\
-                                                    result_processor(self.dialect, 
+                                                    result_processor(self.dialect,
                                                     dbapi_type)
                         if result_processor is not None:
                             out_parameters[name] = \
@@ -405,7 +405,7 @@ class OracleExecutionContext_cx_oracle(OracleExecutionContext):
                             out_parameters[name] = self.out_parameters[name].getvalue()
             else:
                 result.out_parameters = dict(
-                                            (k, v.getvalue()) 
+                                            (k, v.getvalue())
                                             for k, v in self.out_parameters.items()
                                         )
 
@@ -414,13 +414,13 @@ class OracleExecutionContext_cx_oracle(OracleExecutionContext):
 class OracleExecutionContext_cx_oracle_with_unicode(OracleExecutionContext_cx_oracle):
     """Support WITH_UNICODE in Python 2.xx.
 
-    WITH_UNICODE allows cx_Oracle's Python 3 unicode handling 
-    behavior under Python 2.x. This mode in some cases disallows 
-    and in other cases silently passes corrupted data when 
-    non-Python-unicode strings (a.k.a. plain old Python strings) 
-    are passed as arguments to connect(), the statement sent to execute(), 
+    WITH_UNICODE allows cx_Oracle's Python 3 unicode handling
+    behavior under Python 2.x. This mode in some cases disallows
+    and in other cases silently passes corrupted data when
+    non-Python-unicode strings (a.k.a. plain old Python strings)
+    are passed as arguments to connect(), the statement sent to execute(),
     or any of the bind parameter keys or values sent to execute().
-    This optional context therefore ensures that all statements are 
+    This optional context therefore ensures that all statements are
     passed as Python unicode objects.
 
     """
@@ -451,7 +451,7 @@ class ReturningResultProxy(base.FullyBufferedResultProxy):
         return ret
 
     def _buffer_rows(self):
-        return collections.deque([tuple(self._returning_params["ret_%d" % i] 
+        return collections.deque([tuple(self._returning_params["ret_%d" % i]
                     for i, c in enumerate(self._returning_params))])
 
 class OracleDialect_cx_oracle(OracleDialect):
@@ -483,11 +483,11 @@ class OracleDialect_cx_oracle(OracleDialect):
 
     execute_sequence_format = list
 
-    def __init__(self, 
-                auto_setinputsizes=True, 
-                auto_convert_lobs=True, 
-                threaded=True, 
-                allow_twophase=True, 
+    def __init__(self,
+                auto_setinputsizes=True,
+                auto_convert_lobs=True,
+                threaded=True,
+                allow_twophase=True,
                 coerce_to_decimal=True,
                 arraysize=50, **kwargs):
         OracleDialect.__init__(self, **kwargs)
@@ -510,11 +510,11 @@ class OracleDialect_cx_oracle(OracleDialect):
 
         self._cx_oracle_string_types = types("STRING", "UNICODE", "NCLOB", "CLOB")
         self._cx_oracle_unicode_types = types("UNICODE", "NCLOB")
-        self._cx_oracle_binary_types = types("BFILE", "CLOB", "NCLOB", "BLOB") 
+        self._cx_oracle_binary_types = types("BFILE", "CLOB", "NCLOB", "BLOB")
         self.supports_unicode_binds = self.cx_oracle_ver >= (5, 0)
 
         self.supports_native_decimal = (
-                                        self.cx_oracle_ver >= (5, 0) and 
+                                        self.cx_oracle_ver >= (5, 0) and
                                         coerce_to_decimal
                                     )
 
@@ -572,12 +572,12 @@ class OracleDialect_cx_oracle(OracleDialect):
         self._detect_decimal_char(connection)
 
     def _detect_decimal_char(self, connection):
-        """detect if the decimal separator character is not '.', as 
+        """detect if the decimal separator character is not '.', as
         is the case with european locale settings for NLS_LANG.
 
         cx_oracle itself uses similar logic when it formats Python
-        Decimal objects to strings on the bind side (as of 5.0.3), 
-        as Oracle sends/receives string numerics only in the 
+        Decimal objects to strings on the bind side (as of 5.0.3),
+        as Oracle sends/receives string numerics only in the
         current locale.
 
         """
@@ -588,14 +588,14 @@ class OracleDialect_cx_oracle(OracleDialect):
         cx_Oracle = self.dbapi
         conn = connection.connection
 
-        # override the output_type_handler that's 
-        # on the cx_oracle connection with a plain 
+        # override the output_type_handler that's
+        # on the cx_oracle connection with a plain
         # one on the cursor
 
-        def output_type_handler(cursor, name, defaultType, 
+        def output_type_handler(cursor, name, defaultType,
                                 size, precision, scale):
             return cursor.var(
-                        cx_Oracle.STRING, 
+                        cx_Oracle.STRING,
                         255, arraysize=cursor.arraysize)
 
         cursor = conn.cursor()
@@ -625,7 +625,7 @@ class OracleDialect_cx_oracle(OracleDialect):
             return
 
         cx_Oracle = self.dbapi
-        def output_type_handler(cursor, name, defaultType, 
+        def output_type_handler(cursor, name, defaultType,
                                     size, precision, scale):
             # convert all NUMBER with precision + positive scale to Decimal
             # this almost allows "native decimal" mode.
@@ -633,22 +633,22 @@ class OracleDialect_cx_oracle(OracleDialect):
                     defaultType == cx_Oracle.NUMBER and \
                     precision and scale > 0:
                 return cursor.var(
-                            cx_Oracle.STRING, 
-                            255, 
-                            outconverter=self._to_decimal, 
+                            cx_Oracle.STRING,
+                            255,
+                            outconverter=self._to_decimal,
                             arraysize=cursor.arraysize)
             # if NUMBER with zero precision and 0 or neg scale, this appears
-            # to indicate "ambiguous".  Use a slower converter that will 
-            # make a decision based on each value received - the type 
+            # to indicate "ambiguous".  Use a slower converter that will
+            # make a decision based on each value received - the type
             # may change from row to row (!).   This kills
             # off "native decimal" mode, handlers still needed.
             elif self.supports_native_decimal and \
                     defaultType == cx_Oracle.NUMBER \
                     and not precision and scale <= 0:
                 return cursor.var(
-                            cx_Oracle.STRING, 
-                            255, 
-                            outconverter=self._detect_decimal, 
+                            cx_Oracle.STRING,
+                            255,
+                            outconverter=self._detect_decimal,
                             arraysize=cursor.arraysize)
             # allow all strings to come back natively as Unicode
             elif defaultType in (cx_Oracle.STRING, cx_Oracle.FIXED_CHAR):
@@ -712,7 +712,7 @@ class OracleDialect_cx_oracle(OracleDialect):
 
     def _get_server_version_info(self, connection):
         return tuple(
-                        int(x) 
+                        int(x)
                         for x in connection.connection.version.split('.')
                     )
 
index bc0c31275a5ed6c0bb3a2c51343a524f7a54bacc..339634020fc61fe08a35b03db675286964f542b8 100644 (file)
@@ -14,7 +14,7 @@ from sqlalchemy.dialects.postgresql.base import \
     DATE, BYTEA, BOOLEAN, INTERVAL, ARRAY, ENUM, dialect
 
 __all__ = (
-'INTEGER', 'BIGINT', 'SMALLINT', 'VARCHAR', 'CHAR', 'TEXT', 'NUMERIC', 'FLOAT', 'REAL', 'INET', 
+'INTEGER', 'BIGINT', 'SMALLINT', 'VARCHAR', 'CHAR', 'TEXT', 'NUMERIC', 'FLOAT', 'REAL', 'INET',
 'CIDR', 'UUID', 'BIT', 'MACADDR', 'DOUBLE_PRECISION', 'TIMESTAMP', 'TIME',
 'DATE', 'BYTEA', 'BOOLEAN', 'INTERVAL', 'ARRAY', 'ENUM', 'dialect'
 )
index b5cb3b782055e2aa853840210df25c0d386bbee0..717d6b49a0709bacf0808510c6f7fb29c8b5c347 100644 (file)
@@ -12,7 +12,7 @@ section regarding that driver.
 Date and Time Types
 -------------------
 
-SQLite does not have built-in DATE, TIME, or DATETIME types, and pysqlite does not provide 
+SQLite does not have built-in DATE, TIME, or DATETIME types, and pysqlite does not provide
 out of the box functionality for translating values between Python `datetime` objects
 and a SQLite-supported format.  SQLAlchemy's own :class:`~sqlalchemy.types.DateTime`
 and related types provide date formatting and parsing functionality when SQlite is used.
@@ -36,19 +36,19 @@ Two things to note:
   This is regardless of the AUTOINCREMENT keyword being present or not.
 
 To specifically render the AUTOINCREMENT keyword on the primary key
-column when rendering DDL, add the flag ``sqlite_autoincrement=True`` 
+column when rendering DDL, add the flag ``sqlite_autoincrement=True``
 to the Table construct::
 
     Table('sometable', metadata,
-            Column('id', Integer, primary_key=True), 
+            Column('id', Integer, primary_key=True),
             sqlite_autoincrement=True)
 
 Transaction Isolation Level
 ---------------------------
 
-:func:`.create_engine` accepts an ``isolation_level`` parameter which results in 
-the command ``PRAGMA read_uncommitted <level>`` being invoked for every new 
-connection.   Valid values for this parameter are ``SERIALIZABLE`` and 
+:func:`.create_engine` accepts an ``isolation_level`` parameter which results in
+the command ``PRAGMA read_uncommitted <level>`` being invoked for every new
+connection.   Valid values for this parameter are ``SERIALIZABLE`` and
 ``READ UNCOMMITTED`` corresponding to a value of 0 and 1, respectively.
 See the section :ref:`pysqlite_serializable` for an important workaround
 when using serializable isolation with Pysqlite.
@@ -57,31 +57,31 @@ Database Locking Behavior / Concurrency
 ---------------------------------------
 
 Note that SQLite is not designed for a high level of concurrency.   The database
-itself, being a file, is locked completely during write operations and within 
+itself, being a file, is locked completely during write operations and within
 transactions, meaning exactly one connection has exclusive access to the database
 during this period - all other connections will be blocked during this time.
 
 The Python DBAPI specification also calls for a connection model that is always
 in a transaction; there is no BEGIN method, only commit and rollback.  This implies
-that a SQLite DBAPI driver would technically allow only serialized access to a 
+that a SQLite DBAPI driver would technically allow only serialized access to a
 particular database file at all times.   The pysqlite driver attempts to ameliorate this by
 deferring the actual BEGIN statement until the first DML (INSERT, UPDATE, or
 DELETE) is received within a transaction.  While this breaks serializable isolation,
 it at least delays the exclusive locking inherent in SQLite's design.
 
-SQLAlchemy's default mode of usage with the ORM is known 
-as "autocommit=False", which means the moment the :class:`.Session` begins to be 
+SQLAlchemy's default mode of usage with the ORM is known
+as "autocommit=False", which means the moment the :class:`.Session` begins to be
 used, a transaction is begun.   As the :class:`.Session` is used, the autoflush
-feature, also on by default, will flush out pending changes to the database 
+feature, also on by default, will flush out pending changes to the database
 before each query.  The effect of this is that a :class:`.Session` used in its
 default mode will often emit DML early on, long before the transaction is actually
-committed.  This again will have the effect of serializing access to the SQLite 
+committed.  This again will have the effect of serializing access to the SQLite
 database.   If highly concurrent reads are desired against the SQLite database,
 it is advised that the autoflush feature be disabled, and potentially even
 that autocommit be re-enabled, which has the effect of each SQL statement and
 flush committing changes immediately.
 
-For more information on SQLite's lack of concurrency by design, please 
+For more information on SQLite's lack of concurrency by design, please
 see `Situations Where Another RDBMS May Work Better - High Concurrency <http://www.sqlite.org/whentouse.html>`_
 near the bottom of the page.
 
@@ -112,30 +112,30 @@ class _DateTimeMixin(object):
 
 class DATETIME(_DateTimeMixin, sqltypes.DateTime):
     """Represent a Python datetime object in SQLite using a string.
-    
+
     The default string storage format is::
-    
+
         "%(year)04d-%(month)02d-%(day)02d %(hour)02d:%(min)02d:%(second)02d.%(microsecond)06d"
-    
+
     e.g.::
-    
+
         2011-03-15 12:05:57.10558
-    
-    The storage format can be customized to some degree using the 
+
+    The storage format can be customized to some degree using the
     ``storage_format`` and ``regexp`` parameters, such as::
-        
+
         import re
         from sqlalchemy.dialects.sqlite import DATETIME
-        
+
         dt = DATETIME(
                 storage_format="%(year)04d/%(month)02d/%(day)02d %(hour)02d:%(min)02d:%(second)02d",
                 regexp=re.compile("(\d+)/(\d+)/(\d+) (\d+)-(\d+)-(\d+)")
             )
-    
-    :param storage_format: format string which will be applied to the 
+
+    :param storage_format: format string which will be applied to the
      dict with keys year, month, day, hour, minute, second, and microsecond.
-    
-    :param regexp: regular expression which will be applied to 
+
+    :param regexp: regular expression which will be applied to
      incoming result rows. If the regexp contains named groups, the
      resulting match dict is applied to the Python datetime() constructor
      as keyword arguments. Otherwise, if positional groups are used, the
@@ -204,16 +204,16 @@ class DATE(_DateTimeMixin, sqltypes.Date):
     """Represent a Python date object in SQLite using a string.
 
     The default string storage format is::
-    
+
         "%(year)04d-%(month)02d-%(day)02d"
-    
+
     e.g.::
-    
+
         2011-03-15
-    
-    The storage format can be customized to some degree using the 
+
+    The storage format can be customized to some degree using the
     ``storage_format`` and ``regexp`` parameters, such as::
-    
+
         import re
         from sqlalchemy.dialects.sqlite import DATE
 
@@ -221,11 +221,11 @@ class DATE(_DateTimeMixin, sqltypes.Date):
                 storage_format="%(month)02d/%(day)02d/%(year)04d",
                 regexp=re.compile("(?P<month>\d+)/(?P<day>\d+)/(?P<year>\d+)")
             )
-    
-    :param storage_format: format string which will be applied to the 
+
+    :param storage_format: format string which will be applied to the
      dict with keys year, month, and day.
-    
-    :param regexp: regular expression which will be applied to 
+
+    :param regexp: regular expression which will be applied to
      incoming result rows. If the regexp contains named groups, the
      resulting match dict is applied to the Python date() constructor
      as keyword arguments. Otherwise, if positional groups are used, the
@@ -261,18 +261,18 @@ class DATE(_DateTimeMixin, sqltypes.Date):
 
 class TIME(_DateTimeMixin, sqltypes.Time):
     """Represent a Python time object in SQLite using a string.
-    
+
     The default string storage format is::
-    
+
         "%(hour)02d:%(minute)02d:%(second)02d.%(microsecond)06d"
-    
+
     e.g.::
-    
+
         12:05:57.10558
-    
-    The storage format can be customized to some degree using the 
+
+    The storage format can be customized to some degree using the
     ``storage_format`` and ``regexp`` parameters, such as::
-    
+
         import re
         from sqlalchemy.dialects.sqlite import TIME
 
@@ -280,11 +280,11 @@ class TIME(_DateTimeMixin, sqltypes.Time):
                 storage_format="%(hour)02d-%(minute)02d-%(second)02d-%(microsecond)06d",
                 regexp=re.compile("(\d+)-(\d+)-(\d+)-(?:-(\d+))?")
             )
-    
-    :param storage_format: format string which will be applied to the 
+
+    :param storage_format: format string which will be applied to the
      dict with keys hour, minute, second, and microsecond.
-    
-    :param regexp: regular expression which will be applied to 
+
+    :param regexp: regular expression which will be applied to
      incoming result rows. If the regexp contains named groups, the
      resulting match dict is applied to the Python time() constructor
      as keyword arguments. Otherwise, if positional groups are used, the
@@ -447,7 +447,7 @@ class SQLiteDDLCompiler(compiler.DDLCompiler):
                 issubclass(c.type._type_affinity, sqltypes.Integer) and \
                 not c.foreign_keys:
                 return None
+
         return super(SQLiteDDLCompiler, self).\
                     visit_primary_key_constraint(constraint)
 
@@ -522,7 +522,7 @@ class SQLiteExecutionContext(default.DefaultExecutionContext):
 
     def _translate_colname(self, colname):
         # adjust for dotted column names.  SQLite
-        # in the case of UNION may store col names as 
+        # in the case of UNION may store col names as
         # "tablename.colname"
         # in cursor.description
         if not self._preserve_raw_colnames  and "." in colname:
@@ -559,7 +559,7 @@ class SQLiteDialect(default.DefaultDialect):
 
         # this flag used by pysqlite dialect, and perhaps others in the
         # future, to indicate the driver is handling date/timestamp
-        # conversions (and perhaps datetime/time as well on some 
+        # conversions (and perhaps datetime/time as well on some
         # hypothetical driver ?)
         self.native_datetime = native_datetime
 
@@ -579,9 +579,9 @@ class SQLiteDialect(default.DefaultDialect):
         except KeyError:
             raise exc.ArgumentError(
                 "Invalid value '%s' for isolation_level. "
-                "Valid isolation levels for %s are %s" % 
+                "Valid isolation levels for %s are %s" %
                 (level, self.name, ", ".join(self._isolation_lookup))
-                ) 
+                )
         cursor = connection.cursor()
         cursor.execute("PRAGMA read_uncommitted = %d" % isolation_level)
         cursor.close()
@@ -592,11 +592,11 @@ class SQLiteDialect(default.DefaultDialect):
         res = cursor.fetchone()
         if res:
             value = res[0]
-        else: 
+        else:
             # http://www.sqlite.org/changes.html#version_3_3_3
-            # "Optional READ UNCOMMITTED isolation (instead of the 
-            # default isolation level of SERIALIZABLE) and 
-            # table level locking when database connections 
+            # "Optional READ UNCOMMITTED isolation (instead of the
+            # default isolation level of SERIALIZABLE) and
+            # table level locking when database connections
             # share a common cache.""
             # pre-SQLite 3.3.0 default to 0
             value = 0
@@ -712,7 +712,7 @@ class SQLiteDialect(default.DefaultDialect):
             pragma = "PRAGMA "
         qtable = quote(table_name)
         c = _pragma_cursor(
-                    connection.execute("%stable_info(%s)" % 
+                    connection.execute("%stable_info(%s)" %
                     (pragma, qtable)))
         found_table = False
         columns = []
@@ -721,7 +721,7 @@ class SQLiteDialect(default.DefaultDialect):
             if row is None:
                 break
             (name, type_, nullable, default, has_default, primary_key) = \
-                (row[1], row[2].upper(), not row[3], 
+                (row[1], row[2].upper(), not row[3],
                 row[4], row[4] is not None, row[5])
             name = re.sub(r'^\"|\"$', '', name)
             match = re.match(r'(\w+)(\(.*?\))?', type_)
@@ -838,7 +838,7 @@ class SQLiteDialect(default.DefaultDialect):
 
 
 def _pragma_cursor(cursor):
-    """work around SQLite issue whereby cursor.description 
+    """work around SQLite issue whereby cursor.description
     is blank when PRAGMA returns no rows."""
 
     if cursor.closed:
index 71f91aa3644a838dfadcf9f6fad6c3c03ee4dccf..c18fd302f2e6cde8b68bb405ef0eb0fc3480067d 100644 (file)
@@ -12,15 +12,15 @@ module included with the Python distribution.
 Driver
 ------
 
-When using Python 2.5 and above, the built in ``sqlite3`` driver is 
+When using Python 2.5 and above, the built in ``sqlite3`` driver is
 already installed and no additional installation is needed.  Otherwise,
 the ``pysqlite2`` driver needs to be present.  This is the same driver as
 ``sqlite3``, just with a different name.
 
 The ``pysqlite2`` driver will be loaded first, and if not found, ``sqlite3``
 is loaded.  This allows an explicitly installed pysqlite driver to take
-precedence over the built in one.   As with all dialects, a specific 
-DBAPI module may be provided to :func:`~sqlalchemy.create_engine()` to control 
+precedence over the built in one.   As with all dialects, a specific
+DBAPI module may be provided to :func:`~sqlalchemy.create_engine()` to control
 this explicitly::
 
     from sqlite3 import dbapi2 as sqlite
@@ -64,25 +64,25 @@ The sqlite ``:memory:`` identifier is the default if no filepath is present.  Sp
 Compatibility with sqlite3 "native" date and datetime types
 -----------------------------------------------------------
 
-The pysqlite driver includes the sqlite3.PARSE_DECLTYPES and 
+The pysqlite driver includes the sqlite3.PARSE_DECLTYPES and
 sqlite3.PARSE_COLNAMES options, which have the effect of any column
 or expression explicitly cast as "date" or "timestamp" will be converted
-to a Python date or datetime object.  The date and datetime types provided 
-with the pysqlite dialect are not currently compatible with these options, 
-since they render the ISO date/datetime including microseconds, which 
+to a Python date or datetime object.  The date and datetime types provided
+with the pysqlite dialect are not currently compatible with these options,
+since they render the ISO date/datetime including microseconds, which
 pysqlite's driver does not.   Additionally, SQLAlchemy does not at
-this time automatically render the "cast" syntax required for the 
+this time automatically render the "cast" syntax required for the
 freestanding functions "current_timestamp" and "current_date" to return
-datetime/date types natively.   Unfortunately, pysqlite 
+datetime/date types natively.   Unfortunately, pysqlite
 does not provide the standard DBAPI types in ``cursor.description``,
-leaving SQLAlchemy with no way to detect these types on the fly 
+leaving SQLAlchemy with no way to detect these types on the fly
 without expensive per-row type checks.
 
 Keeping in mind that pysqlite's parsing option is not recommended,
-nor should be necessary, for use with SQLAlchemy, usage of PARSE_DECLTYPES 
+nor should be necessary, for use with SQLAlchemy, usage of PARSE_DECLTYPES
 can be forced if one configures "native_datetime=True" on create_engine()::
 
-    engine = create_engine('sqlite://', 
+    engine = create_engine('sqlite://',
                     connect_args={'detect_types': sqlite3.PARSE_DECLTYPES|sqlite3.PARSE_COLNAMES},
                     native_datetime=True
                     )
@@ -99,7 +99,7 @@ Threading/Pooling Behavior
 Pysqlite's default behavior is to prohibit the usage of a single connection
 in more than one thread.   This is controlled by the ``check_same_thread``
 Pysqlite flag.   This default is intended to work with older versions
-of SQLite that did not support multithreaded operation under 
+of SQLite that did not support multithreaded operation under
 various circumstances.  In particular, older SQLite versions
 did not allow a ``:memory:`` database to be used in multiple threads
 under any circumstances.
@@ -109,9 +109,9 @@ SQLAlchemy sets up pooling to work with Pysqlite's default behavior:
 * When a ``:memory:`` SQLite database is specified, the dialect by default will use
   :class:`.SingletonThreadPool`. This pool maintains a single connection per
   thread, so that all access to the engine within the current thread use the
-  same ``:memory:`` database - other threads would access a different 
+  same ``:memory:`` database - other threads would access a different
   ``:memory:`` database.
-* When a file-based database is specified, the dialect will use :class:`.NullPool` 
+* When a file-based database is specified, the dialect will use :class:`.NullPool`
   as the source of connections. This pool closes and discards connections
   which are returned to the pool immediately. SQLite file-based connections
   have extremely low overhead, so pooling is not necessary. The scheme also
@@ -141,7 +141,7 @@ can be passed to Pysqlite as ``False``::
                         connect_args={'check_same_thread':False},
                         poolclass=StaticPool)
 
-Note that using a ``:memory:`` database in multiple threads requires a recent 
+Note that using a ``:memory:`` database in multiple threads requires a recent
 version of SQLite.
 
 Using Temporary Tables with SQLite
@@ -175,8 +175,8 @@ Unicode
 
 The pysqlite driver only returns Python ``unicode`` objects in result sets, never
 plain strings, and accommodates ``unicode`` objects within bound parameter
-values in all cases.   Regardless of the SQLAlchemy string type in use, 
-string-based result values will by Python ``unicode`` in Python 2.  
+values in all cases.   Regardless of the SQLAlchemy string type in use,
+string-based result values will by Python ``unicode`` in Python 2.
 The :class:`.Unicode` type should still be used to indicate those columns that
 require unicode, however, so that non-``unicode`` values passed inadvertently
 will emit a warning.  Pysqlite will emit an error if a non-``unicode`` string
@@ -191,7 +191,7 @@ The pysqlite DBAPI driver has a long-standing bug in which transactional
 state is not begun until the first DML statement, that is INSERT, UPDATE
 or DELETE, is emitted.  A SELECT statement will not cause transactional
 state to begin.   While this mode of usage is fine for typical situations
-and has the advantage that the SQLite database file is not prematurely 
+and has the advantage that the SQLite database file is not prematurely
 locked, it breaks serializable transaction isolation, which requires
 that the database file be locked upon any SQL being emitted.
 
index 35d8d15420ac48f85f42b3bd39b12ab25282ffd2..70bdd71a26f1133e3b23aedebdbb2a35fd709b66 100644 (file)
@@ -17,7 +17,7 @@ Connect strings are of the form::
 Unicode Support
 ---------------
 
-The pyodbc driver currently supports usage of these Sybase types with 
+The pyodbc driver currently supports usage of these Sybase types with
 Unicode or multibyte strings::
 
     CHAR
@@ -43,7 +43,7 @@ from sqlalchemy.util.compat import decimal
 class _SybNumeric_pyodbc(sqltypes.Numeric):
     """Turns Decimals with adjusted() < -6 into floats.
 
-    It's not yet known how to get decimals with many 
+    It's not yet known how to get decimals with many
     significant digits or very large adjusted() into Sybase
     via pyodbc.
 
index e3bfae06cd6bbe07bac1ff27fb55d084402e2e6d..bf8c2096b8cfb8c373c552558bb80cdfe99e64e2 100644 (file)
@@ -38,7 +38,7 @@ class SybaseExecutionContext_pysybase(SybaseExecutionContext):
     def set_ddl_autocommit(self, dbapi_connection, value):
         if value:
             # call commit() on the Sybase connection directly,
-            # to avoid any side effects of calling a Connection 
+            # to avoid any side effects of calling a Connection
             # transactional method inside of pre_exec()
             dbapi_connection.commit()
 
@@ -83,7 +83,7 @@ class SybaseDialect_pysybase(SybaseDialect):
 
     def _get_server_version_info(self, connection):
         vers = connection.scalar("select @@version_number")
-        # i.e. 15500, 15000, 12500 == (15, 5, 0, 0), (15, 0, 0, 0), 
+        # i.e. 15500, 15000, 12500 == (15, 5, 0, 0), (15, 0, 0, 0),
         # (12, 5, 0, 0)
         return (vers / 1000, vers % 1000 / 100, vers % 100 / 10, vers % 10)
 
index 0f8c098504addad0c8f4ed0fc637097df54b58a8..500dd0dba537b17c18d1c99128c543b9dd7d1cc7 100644 (file)
@@ -101,8 +101,8 @@ default_strategy = 'plain'
 def create_engine(*args, **kwargs):
     """Create a new :class:`.Engine` instance.
 
-    The standard calling form is to send the URL as the 
-    first positional argument, usually a string 
+    The standard calling form is to send the URL as the
+    first positional argument, usually a string
     that indicates database dialect and connection arguments.
     Additional keyword arguments may then follow it which
     establish various options on the resulting :class:`.Engine`
@@ -111,14 +111,14 @@ def create_engine(*args, **kwargs):
 
     The string form of the URL is
     ``dialect+driver://user:password@host/dbname[?key=value..]``, where
-    ``dialect`` is a database name such as ``mysql``, ``oracle``, 
-    ``postgresql``, etc., and ``driver`` the name of a DBAPI, such as 
-    ``psycopg2``, ``pyodbc``, ``cx_oracle``, etc.  Alternatively, 
+    ``dialect`` is a database name such as ``mysql``, ``oracle``,
+    ``postgresql``, etc., and ``driver`` the name of a DBAPI, such as
+    ``psycopg2``, ``pyodbc``, ``cx_oracle``, etc.  Alternatively,
     the URL can be an instance of :class:`~sqlalchemy.engine.url.URL`.
 
-    ``**kwargs`` takes a wide variety of options which are routed 
-    towards their appropriate components.  Arguments may be 
-    specific to the :class:`.Engine`, the underlying :class:`.Dialect`, as well as the 
+    ``**kwargs`` takes a wide variety of options which are routed
+    towards their appropriate components.  Arguments may be
+    specific to the :class:`.Engine`, the underlying :class:`.Dialect`, as well as the
     :class:`.Pool`.  Specific dialects also accept keyword arguments that
     are unique to that dialect.   Here, we describe the parameters
     that are common to most :func:`.create_engine()` usage.
@@ -136,11 +136,11 @@ def create_engine(*args, **kwargs):
     :ref:`engines_toplevel`
 
     :ref:`connections_toplevel`
-    
+
     :param assert_unicode:  Deprecated.  This flag
         sets an engine-wide default value for
-        the ``assert_unicode`` flag on the 
-        :class:`.String` type - see that 
+        the ``assert_unicode`` flag on the
+        :class:`.String` type - see that
         type for further details.
 
     :param case_sensitive=True: if False, result column names
@@ -159,16 +159,16 @@ def create_engine(*args, **kwargs):
     :param convert_unicode=False: if set to True, sets
         the default behavior of ``convert_unicode`` on the
         :class:`.String` type to ``True``, regardless
-        of a setting of ``False`` on an individual 
+        of a setting of ``False`` on an individual
         :class:`.String` type, thus causing all :class:`.String`
         -based columns
         to accommodate Python ``unicode`` objects.  This flag
-        is useful as an engine-wide setting when using a 
+        is useful as an engine-wide setting when using a
         DBAPI that does not natively support Python
         ``unicode`` objects and raises an error when
         one is received (such as pyodbc with FreeTDS).
-        
-        See :class:`.String` for further details on 
+
+        See :class:`.String` for further details on
         what this flag indicates.
 
     :param creator: a callable which returns a DBAPI connection.
@@ -192,43 +192,43 @@ def create_engine(*args, **kwargs):
         :ref:`dbengine_logging` for information on how to configure logging
         directly.
 
-    :param encoding: Defaults to ``utf-8``.  This is the string 
-        encoding used by SQLAlchemy for string encode/decode 
-        operations which occur within SQLAlchemy, **outside of 
-        the DBAPI.**  Most modern DBAPIs feature some degree of 
+    :param encoding: Defaults to ``utf-8``.  This is the string
+        encoding used by SQLAlchemy for string encode/decode
+        operations which occur within SQLAlchemy, **outside of
+        the DBAPI.**  Most modern DBAPIs feature some degree of
         direct support for Python ``unicode`` objects,
         what you see in Python 2 as a string of the form
-        ``u'some string'``.  For those scenarios where the 
+        ``u'some string'``.  For those scenarios where the
         DBAPI is detected as not supporting a Python ``unicode``
-        object, this encoding is used to determine the 
+        object, this encoding is used to determine the
         source/destination encoding.  It is **not used**
         for those cases where the DBAPI handles unicode
         directly.
-        
+
         To properly configure a system to accommodate Python
-        ``unicode`` objects, the DBAPI should be 
+        ``unicode`` objects, the DBAPI should be
         configured to handle unicode to the greatest
         degree as is appropriate - see
         the notes on unicode pertaining to the specific
-        target database in use at :ref:`dialect_toplevel`. 
-        
-        Areas where string encoding may need to be accommodated 
-        outside of the DBAPI include zero or more of: 
-        
-        * the values passed to bound parameters, corresponding to 
+        target database in use at :ref:`dialect_toplevel`.
+
+        Areas where string encoding may need to be accommodated
+        outside of the DBAPI include zero or more of:
+
+        * the values passed to bound parameters, corresponding to
           the :class:`.Unicode` type or the :class:`.String` type
           when ``convert_unicode`` is ``True``;
-        * the values returned in result set columns corresponding 
-          to the :class:`.Unicode` type or the :class:`.String` 
+        * the values returned in result set columns corresponding
+          to the :class:`.Unicode` type or the :class:`.String`
           type when ``convert_unicode`` is ``True``;
-        * the string SQL statement passed to the DBAPI's 
-          ``cursor.execute()`` method; 
-        * the string names of the keys in the bound parameter 
-          dictionary passed to the DBAPI's ``cursor.execute()`` 
+        * the string SQL statement passed to the DBAPI's
+          ``cursor.execute()`` method;
+        * the string names of the keys in the bound parameter
+          dictionary passed to the DBAPI's ``cursor.execute()``
           as well as ``cursor.setinputsizes()`` methods;
-        * the string column names retrieved from the DBAPI's 
+        * the string column names retrieved from the DBAPI's
           ``cursor.description`` attribute.
-          
+
         When using Python 3, the DBAPI is required to support
         *all* of the above values as Python ``unicode`` objects,
         which in Python 3 are just known as ``str``.  In Python 2,
@@ -244,9 +244,9 @@ def create_engine(*args, **kwargs):
     :param implicit_returning=True: When ``True``, a RETURNING-
         compatible construct, if available, will be used to
         fetch newly generated primary key values when a single row
-        INSERT statement is emitted with no existing returning() 
-        clause.  This applies to those backends which support RETURNING 
-        or a compatible construct, including Postgresql, Firebird, Oracle, 
+        INSERT statement is emitted with no existing returning()
+        clause.  This applies to those backends which support RETURNING
+        or a compatible construct, including Postgresql, Firebird, Oracle,
         Microsoft SQL Server.   Set this to ``False`` to disable
         the automatic usage of RETURNING.
 
@@ -256,13 +256,13 @@ def create_engine(*args, **kwargs):
         "_(counter)". If ``None``, the value of
         ``dialect.max_identifier_length`` is used instead.
 
-    :param listeners: A list of one or more 
-        :class:`~sqlalchemy.interfaces.PoolListener` objects which will 
+    :param listeners: A list of one or more
+        :class:`~sqlalchemy.interfaces.PoolListener` objects which will
         receive connection pool events.
 
     :param logging_name:  String identifier which will be used within
         the "name" field of logging records generated within the
-        "sqlalchemy.engine" logger. Defaults to a hexstring of the 
+        "sqlalchemy.engine" logger. Defaults to a hexstring of the
         object's id.
 
     :param max_overflow=10: the number of connections to allow in
@@ -294,8 +294,8 @@ def create_engine(*args, **kwargs):
         of pool to be used.
 
     :param pool_logging_name:  String identifier which will be used within
-       the "name" field of logging records generated within the 
-       "sqlalchemy.pool" logger. Defaults to a hexstring of the object's 
+       the "name" field of logging records generated within the
+       "sqlalchemy.pool" logger. Defaults to a hexstring of the object's
        id.
 
     :param pool_size=5: the number of connections to keep open
@@ -315,7 +315,7 @@ def create_engine(*args, **kwargs):
         server configuration as well).
 
     :param pool_reset_on_return='rollback': set the "reset on return"
-        behavior of the pool, which is whether ``rollback()``, 
+        behavior of the pool, which is whether ``rollback()``,
         ``commit()``, or nothing is called upon connections
         being returned to the pool.  See the docstring for
         ``reset_on_return`` at :class:`.Pool`.
index d6fdaee2ee7e45c8f7d0890d51c0748fdbddd278..c3b32505eb752b346fed91c158ef4e1ec7849ba2 100644 (file)
@@ -28,7 +28,7 @@ class SchemaGenerator(DDLBase):
         if table.schema:
             self.dialect.validate_identifier(table.schema)
         return not self.checkfirst or \
-                not self.dialect.has_table(self.connection, 
+                not self.dialect.has_table(self.connection,
                                     table.name, schema=table.schema)
 
     def _can_create_sequence(self, sequence):
@@ -39,8 +39,8 @@ class SchemaGenerator(DDLBase):
                  (
                  not self.checkfirst or
                  not self.dialect.has_sequence(
-                            self.connection, 
-                            sequence.name, 
+                            self.connection,
+                            sequence.name,
                             schema=sequence.schema)
                  )
             )
@@ -50,9 +50,9 @@ class SchemaGenerator(DDLBase):
             tables = self.tables
         else:
             tables = metadata.tables.values()
-        collection = [t for t in sql_util.sort_tables(tables) 
+        collection = [t for t in sql_util.sort_tables(tables)
                         if self._can_create_table(t)]
-        seq_coll = [s for s in metadata._sequences.values() 
+        seq_coll = [s for s in metadata._sequences.values()
                         if s.column is None and self._can_create_sequence(s)]
 
         metadata.dispatch.before_create(metadata, self.connection,
@@ -95,7 +95,7 @@ class SchemaGenerator(DDLBase):
 
     def visit_sequence(self, sequence, create_ok=False):
         if not create_ok and not self._can_create_sequence(sequence):
-            return 
+            return
         self.connection.execute(schema.CreateSequence(sequence))
 
     def visit_index(self, index):
@@ -116,9 +116,9 @@ class SchemaDropper(DDLBase):
             tables = self.tables
         else:
             tables = metadata.tables.values()
-        collection = [t for t in reversed(sql_util.sort_tables(tables)) 
+        collection = [t for t in reversed(sql_util.sort_tables(tables))
                                 if self._can_drop_table(t)]
-        seq_coll = [s for s in metadata._sequences.values() 
+        seq_coll = [s for s in metadata._sequences.values()
                                 if s.column is None and self._can_drop_sequence(s)]
 
         metadata.dispatch.before_drop(metadata, self.connection,
@@ -141,7 +141,7 @@ class SchemaDropper(DDLBase):
         self.dialect.validate_identifier(table.name)
         if table.schema:
             self.dialect.validate_identifier(table.schema)
-        return not self.checkfirst or self.dialect.has_table(self.connection, 
+        return not self.checkfirst or self.dialect.has_table(self.connection,
                                             table.name, schema=table.schema)
 
     def _can_drop_sequence(self, sequence):
@@ -150,8 +150,8 @@ class SchemaDropper(DDLBase):
                  not sequence.optional) and
                 (not self.checkfirst or
                  self.dialect.has_sequence(
-                                self.connection, 
-                                sequence.name, 
+                                self.connection,
+                                sequence.name,
                                 schema=sequence.schema))
             )
 
index a781cb451c0e66556f28556eba4695c8ba25cfc1..1e321603ed92f75ede7bd89fc0ff062404165713 100644 (file)
@@ -80,7 +80,7 @@ class DefaultEngineStrategy(EngineStrategy):
                     return dialect.connect(*cargs, **cparams)
                 except Exception, e:
                     # Py3K
-                    #raise exc.DBAPIError.instance(None, None, 
+                    #raise exc.DBAPIError.instance(None, None,
                     #                   e, dialect.dbapi.Error,
                     #                   connection_invalidated=
                     #                       dialect.is_disconnect(e, None, None)
@@ -245,8 +245,8 @@ class MockEngineStrategy(EngineStrategy):
             from sqlalchemy.engine import ddl
             ddl.SchemaDropper(self.dialect, self, **kwargs).traverse_single(entity)
 
-        def _run_visitor(self, visitorcallable, element, 
-                                        connection=None, 
+        def _run_visitor(self, visitorcallable, element,
+                                        connection=None,
                                         **kwargs):
             kwargs['checkfirst'] = False
             visitorcallable(self.dialect, self,
index f0d6803dcf10c701bd795233e7869aa490c99eee..7def7dd9b9e4e5a4743541347517b2c32e36fbf6 100644 (file)
@@ -7,7 +7,7 @@
 """Provides a thread-local transactional wrapper around the root Engine class.
 
 The ``threadlocal`` module is invoked when using the ``strategy="threadlocal"`` flag
-with :func:`~sqlalchemy.engine.create_engine`.  This module is semi-private and is 
+with :func:`~sqlalchemy.engine.create_engine`.  This module is semi-private and is
 invoked automatically when the threadlocal engine strategy is used.
 """
 
index 03018f5756b4539e0fd3dbc62540b11ef8dab27c..8e9064cfcbf3e50408dbe8159e9821ce1125fc8a 100644 (file)
@@ -25,8 +25,8 @@ def listen(target, identifier, fn, *args, **kw):
                 list(const.columns)[0].name
             )
         event.listen(
-                UniqueConstraint, 
-                "after_parent_attach", 
+                UniqueConstraint,
+                "after_parent_attach",
                 unique_constraint_name)
 
     """
@@ -90,12 +90,12 @@ class _UnpickleDispatch(object):
             raise AttributeError("No class with a 'dispatch' member present.")
 
 class _Dispatch(object):
-    """Mirror the event listening definitions of an Events class with 
+    """Mirror the event listening definitions of an Events class with
     listener collections.
 
-    Classes which define a "dispatch" member will return a 
-    non-instantiated :class:`._Dispatch` subclass when the member 
-    is accessed at the class level.  When the "dispatch" member is 
+    Classes which define a "dispatch" member will return a
+    non-instantiated :class:`._Dispatch` subclass when the member
+    is accessed at the class level.  When the "dispatch" member is
     accessed at the instance level of its owner, an instance
     of the :class:`._Dispatch` class is returned.
 
@@ -103,7 +103,7 @@ class _Dispatch(object):
     class defined, by the :func:`._create_dispatcher_class` function.
     The original :class:`.Events` classes remain untouched.
     This decouples the construction of :class:`.Events` subclasses from
-    the implementation used by the event internals, and allows 
+    the implementation used by the event internals, and allows
     inspecting tools like Sphinx to work in an unsurprising
     way against the public API.
 
@@ -127,7 +127,7 @@ def _event_descriptors(target):
     return [getattr(target, k) for k in dir(target) if _is_event_name(k)]
 
 class _EventMeta(type):
-    """Intercept new Event subclasses and create 
+    """Intercept new Event subclasses and create
     associated _Dispatch classes."""
 
     def __init__(cls, classname, bases, dict_):
@@ -135,14 +135,14 @@ class _EventMeta(type):
         return type.__init__(cls, classname, bases, dict_)
 
 def _create_dispatcher_class(cls, classname, bases, dict_):
-    """Create a :class:`._Dispatch` class corresponding to an 
+    """Create a :class:`._Dispatch` class corresponding to an
     :class:`.Events` class."""
 
     # there's all kinds of ways to do this,
     # i.e. make a Dispatch class that shares the '_listen' method
     # of the Event class, this is the straight monkeypatch.
     dispatch_base = getattr(cls, 'dispatch', _Dispatch)
-    cls.dispatch = dispatch_cls = type("%sDispatch" % classname, 
+    cls.dispatch = dispatch_cls = type("%sDispatch" % classname,
                                         (dispatch_base, ), {})
     dispatch_cls._listen = cls._listen
     dispatch_cls._clear = cls._clear
@@ -236,8 +236,8 @@ class _DispatchDescriptor(object):
         for cls in target.__mro__[1:]:
             if cls in self._clslevel:
                 clslevel.extend([
-                    fn for fn 
-                    in self._clslevel[cls] 
+                    fn for fn
+                    in self._clslevel[cls]
                     if fn not in clslevel
                 ])
 
@@ -278,7 +278,7 @@ class _DispatchDescriptor(object):
 
 class _EmptyListener(object):
     """Serves as a class-level interface to the events
-    served by a _DispatchDescriptor, when there are no 
+    served by a _DispatchDescriptor, when there are no
     instance-level events present.
 
     Is replaced by _ListenerCollection when instance-level
@@ -298,7 +298,7 @@ class _EmptyListener(object):
         """Return an event collection which can be modified.
 
         For _EmptyListener at the instance level of
-        a dispatcher, this generates a new 
+        a dispatcher, this generates a new
         _ListenerCollection, applies it to the instance,
         and returns it.
 
@@ -379,7 +379,7 @@ class _ListenerCollection(object):
     # I'm not entirely thrilled about the overhead here,
     # but this allows class-level listeners to be added
     # at any point.
-    # 
+    #
     # In the absense of instance-level listeners,
     # we stay with the _EmptyListener object when called
     # at the instance level.
@@ -403,8 +403,8 @@ class _ListenerCollection(object):
         existing_listeners = self.listeners
         existing_listener_set = set(existing_listeners)
         self.propagate.update(other.propagate)
-        existing_listeners.extend([l for l 
-                                in other.listeners 
+        existing_listeners.extend([l for l
+                                in other.listeners
                                 if l not in existing_listener_set
                                 and not only_propagate or l in self.propagate
                                 ])
@@ -431,7 +431,7 @@ class _ListenerCollection(object):
         self.propagate.clear()
 
 class dispatcher(object):
-    """Descriptor used by target classes to 
+    """Descriptor used by target classes to
     deliver the _Dispatch class at the class level
     and produce new _Dispatch instances for target
     instances.
index d82cae634326a673dcbc7aef6746b8de36bfbef0..5370c64313dfdd80cb0be59b8b9eaae11f1ce14b 100644 (file)
@@ -12,7 +12,7 @@ module.  The regular dotted module namespace is used, starting at
 'sqlalchemy'.  For class-level logging, the class name is appended.
 
 The "echo" keyword parameter, available on SQLA :class:`.Engine`
-and :class:`.Pool` objects, corresponds to a logger specific to that 
+and :class:`.Pool` objects, corresponds to a logger specific to that
 instance only.
 
 """
@@ -60,7 +60,7 @@ class InstanceLogger(object):
     """A logger adapter (wrapper) for :class:`.Identified` subclasses.
 
     This allows multiple instances (e.g. Engine or Pool instances)
-    to share a logger, but have its verbosity controlled on a 
+    to share a logger, but have its verbosity controlled on a
     per-instance basis.
 
     The basic functionality is to return a logging level
@@ -185,7 +185,7 @@ def instance_logger(instance, echoflag=None):
         logger = logging.getLogger(name)
     else:
         # if a specified echo flag, return an EchoLogger,
-        # which checks the flag, overrides normal log 
+        # which checks the flag, overrides normal log
         # levels by calling logger._log()
         logger = InstanceLogger(echoflag, name)
 
index c2f4aff02604c97a9b221c7e46819bb86747fbad..d2cb0ab05b082ae397c1134783f72355e16a746e 100644 (file)
@@ -14,7 +14,7 @@ basic add/delete mutation.
 from .. import log, util
 from ..sql import operators
 from . import (
-    attributes, object_session, util as orm_util, strategies, 
+    attributes, object_session, util as orm_util, strategies,
     object_mapper, exc as orm_exc, collections
     )
 from .query import Query
@@ -60,7 +60,7 @@ class DynamicAttributeImpl(attributes.AttributeImpl):
         else:
             return self.query_class(self, state)
 
-    def get_collection(self, state, dict_, user_data=None, 
+    def get_collection(self, state, dict_, user_data=None,
                             passive=attributes.PASSIVE_NO_INITIALIZE):
         if not passive & attributes.SQL_OK:
             return self._get_collection_history(state,
@@ -94,7 +94,7 @@ class DynamicAttributeImpl(attributes.AttributeImpl):
         if self.key not in state.committed_state:
             state.committed_state[self.key] = CollectionHistory(self, state)
 
-        state._modified_event(dict_, 
+        state._modified_event(dict_,
                                 self,
                                 attributes.NEVER_SET)
 
@@ -104,7 +104,7 @@ class DynamicAttributeImpl(attributes.AttributeImpl):
         return state.committed_state[self.key]
 
     def set(self, state, dict_, value, initiator,
-                        passive=attributes.PASSIVE_OFF, 
+                        passive=attributes.PASSIVE_OFF,
                         check_old=None, pop=False):
         if initiator and initiator.parent_token is self.parent_token:
             return
@@ -141,8 +141,8 @@ class DynamicAttributeImpl(attributes.AttributeImpl):
     def get_all_pending(self, state, dict_):
         c = self._get_collection_history(state, attributes.PASSIVE_NO_INITIALIZE)
         return [
-                (attributes.instance_state(x), x) 
-                for x in 
+                (attributes.instance_state(x), x)
+                for x in
                 c.added_items + c.unchanged_items + c.deleted_items
             ]
 
@@ -159,12 +159,12 @@ class DynamicAttributeImpl(attributes.AttributeImpl):
         else:
             return c
 
-    def append(self, state, dict_, value, initiator, 
+    def append(self, state, dict_, value, initiator,
                             passive=attributes.PASSIVE_OFF):
         if initiator is not self:
             self.fire_append_event(state, dict_, value, initiator)
 
-    def remove(self, state, dict_, value, initiator, 
+    def remove(self, state, dict_, value, initiator,
                             passive=attributes.PASSIVE_OFF):
         if initiator is not self:
             self.fire_remove_event(state, dict_, value, initiator)
@@ -203,9 +203,9 @@ class AppenderMixin(object):
         mapper = object_mapper(instance)
         prop = mapper._props[self.attr.key]
         self._criterion = prop.compare(
-                            operators.eq, 
-                            instance, 
-                            value_is_parent=True, 
+                            operators.eq,
+                            instance,
+                            value_is_parent=True,
                             alias_secondary=False)
 
         if self.attr.order_by:
@@ -279,12 +279,12 @@ class AppenderMixin(object):
 
     def append(self, item):
         self.attr.append(
-            attributes.instance_state(self.instance), 
+            attributes.instance_state(self.instance),
             attributes.instance_dict(self.instance), item, None)
 
     def remove(self, item):
         self.attr.remove(
-            attributes.instance_state(self.instance), 
+            attributes.instance_state(self.instance),
             attributes.instance_dict(self.instance), item, None)
 
 
index 5de514da8fe36316a91c8fba04349eb66da80ca5..0bc635db657384ac4fb821b7a0d43f4b10a44ea6 100644 (file)
@@ -11,10 +11,10 @@ class UnevaluatableError(Exception):
     pass
 
 _straight_ops = set(getattr(operators, op)
-                    for op in ('add', 'mul', 'sub', 
+                    for op in ('add', 'mul', 'sub',
                                 # Py2K
                                 'div',
-                                # end Py2K 
+                                # end Py2K
                                 'mod', 'truediv',
                                'lt', 'le', 'ne', 'gt', 'ge', 'eq'))
 
@@ -71,13 +71,13 @@ class EvaluatorCompiler(object):
                 return True
         else:
             raise UnevaluatableError(
-                "Cannot evaluate clauselist with operator %s" % 
+                "Cannot evaluate clauselist with operator %s" %
                 clause.operator)
 
         return evaluate
 
     def visit_binary(self, clause):
-        eval_left,eval_right = map(self.process, 
+        eval_left,eval_right = map(self.process,
                                 [clause.left, clause.right])
         operator = clause.operator
         if operator is operators.is_:
@@ -95,7 +95,7 @@ class EvaluatorCompiler(object):
                 return operator(eval_left(obj), eval_right(obj))
         else:
             raise UnevaluatableError(
-                    "Cannot evaluate %s with operator %s" % 
+                    "Cannot evaluate %s with operator %s" %
                     (type(clause).__name__, clause.operator))
         return evaluate
 
@@ -109,7 +109,7 @@ class EvaluatorCompiler(object):
                 return not value
             return evaluate
         raise UnevaluatableError(
-                    "Cannot evaluate %s with operator %s" % 
+                    "Cannot evaluate %s with operator %s" %
                     (type(clause).__name__, clause.operator))
 
     def visit_bindparam(self, clause):
index d42dd42a77db8788bd8b74d475d85259bccc92df..783434504997c523fef756408d406f2b3a526018 100644 (file)
@@ -18,13 +18,13 @@ class StaleDataError(sa_exc.SQLAlchemyError):
     Conditions which cause this to happen include:
 
     * A flush may have attempted to update or delete rows
-      and an unexpected number of rows were matched during 
-      the UPDATE or DELETE statement.   Note that when 
+      and an unexpected number of rows were matched during
+      the UPDATE or DELETE statement.   Note that when
       version_id_col is used, rows in UPDATE or DELETE statements
       are also matched against the current known version
       identifier.
 
-    * A mapped object with version_id_col was refreshed, 
+    * A mapped object with version_id_col was refreshed,
       and the version number coming back from the database does
       not match that of the object itself.
 
@@ -52,7 +52,7 @@ class ObjectDereferencedError(sa_exc.SQLAlchemyError):
     """An operation cannot complete due to an object being garbage collected."""
 
 class DetachedInstanceError(sa_exc.SQLAlchemyError):
-    """An attempt to access unloaded attributes on a 
+    """An attempt to access unloaded attributes on a
     mapped instance that is detached."""
 
 class UnmappedInstanceError(UnmappedError):
@@ -91,21 +91,21 @@ class UnmappedClassError(UnmappedError):
 class ObjectDeletedError(sa_exc.InvalidRequestError):
     """A refresh operation failed to retrieve the database
     row corresponding to an object's known primary key identity.
-    
-    A refresh operation proceeds when an expired attribute is 
+
+    A refresh operation proceeds when an expired attribute is
     accessed on an object, or when :meth:`.Query.get` is
     used to retrieve an object which is, upon retrieval, detected
     as expired.   A SELECT is emitted for the target row
     based on primary key; if no row is returned, this
     exception is raised.
-    
-    The true meaning of this exception is simply that 
+
+    The true meaning of this exception is simply that
     no row exists for the primary key identifier associated
-    with a persistent object.   The row may have been 
+    with a persistent object.   The row may have been
     deleted, or in some cases the primary key updated
     to a new value, outside of the ORM's management of the target
-    object.   
-    
+    object.
+
     """
     def __init__(self, state, msg=None):
         if not msg:
index 4ba54b2f0fd3feae621a4e25472ade0bc5701a27..6fd184350e68e6057b4a46940fb823273bf99018 100644 (file)
@@ -117,7 +117,7 @@ class WeakInstanceDict(IdentityMap):
                         o = existing_state._is_really_none()
                     if o is not None:
                         raise AssertionError("A conflicting state is already "
-                                        "present in the identity map for key %r" 
+                                        "present in the identity map for key %r"
                                         % (key, ))
                 else:
                     return
index 940ae1db9ffc1ca140dece4cfec995f32748584c..649ab7b8783f8bde7858b9da2212ebb45e223e14 100644 (file)
@@ -15,10 +15,10 @@ __all__ = ['ScopedSession']
 
 class ScopedSession(object):
     """Provides thread-local management of Sessions.
-    
+
     Typical invocation is via the :func:`.scoped_session`
     function::
-    
+
       Session = scoped_session(sessionmaker())
 
     The internal registry is accessible,
@@ -70,7 +70,7 @@ class ScopedSession(object):
         self.session_factory.configure(**kwargs)
 
     def query_property(self, query_cls=None):
-        """return a class property which produces a `Query` object 
+        """return a class property which produces a `Query` object
         against the class when called.
 
         e.g.::
@@ -121,7 +121,7 @@ def makeprop(name):
     def get(self):
         return getattr(self.registry(), name)
     return property(get, set)
-for prop in ('bind', 'dirty', 'deleted', 'new', 'identity_map', 
+for prop in ('bind', 'dirty', 'deleted', 'new', 'identity_map',
                 'is_active', 'autoflush', 'no_autoflush'):
     setattr(ScopedSession, prop, makeprop(prop))
 
index 6f1e6c166d3752b7f34cc4b7192bcf3b7b66fa8f..2b2f6d0922c97759b069f4d64ae8ed6e5f21504f 100644 (file)
@@ -4,14 +4,14 @@
 # This module is part of SQLAlchemy and is released under
 # the MIT License: http://www.opensource.org/licenses/mit-license.php
 
-"""private module containing functions used for copying data 
+"""private module containing functions used for copying data
 between instances based on join conditions.
 
 """
 
 from . import exc, util as orm_util, attributes
 
-def populate(source, source_mapper, dest, dest_mapper, 
+def populate(source, source_mapper, dest, dest_mapper,
                         synchronize_pairs, uowcommit, flag_cascaded_pks):
     source_dict = source.dict
     dest_dict = dest.dict
@@ -20,7 +20,7 @@ def populate(source, source_mapper, dest, dest_mapper,
         try:
             # inline of source_mapper._get_state_attr_by_column
             prop = source_mapper._columntoproperty[l]
-            value = source.manager[prop.key].impl.get(source, source_dict, 
+            value = source.manager[prop.key].impl.get(source, source_dict,
                                                     attributes.PASSIVE_OFF)
         except exc.UnmappedColumnError:
             _raise_col_to_prop(False, source_mapper, l, dest_mapper, r)
@@ -47,7 +47,7 @@ def clear(dest, dest_mapper, synchronize_pairs):
         if r.primary_key:
             raise AssertionError(
                                 "Dependency rule tried to blank-out primary key "
-                                "column '%s' on instance '%s'" % 
+                                "column '%s' on instance '%s'" %
                                 (r, orm_util.state_str(dest))
                             )
         try:
@@ -75,7 +75,7 @@ def populate_dict(source, source_mapper, dict_, synchronize_pairs):
         dict_[r.key] = value
 
 def source_modified(uowcommit, source, source_mapper, synchronize_pairs):
-    """return true if the source object has changes from an old to a 
+    """return true if the source object has changes from an old to a
     new value on the given synchronize pairs
 
     """
@@ -84,7 +84,7 @@ def source_modified(uowcommit, source, source_mapper, synchronize_pairs):
             prop = source_mapper._columntoproperty[l]
         except exc.UnmappedColumnError:
             _raise_col_to_prop(False, source_mapper, l, None, r)
-        history = uowcommit.get_attribute_history(source, prop.key, 
+        history = uowcommit.get_attribute_history(source, prop.key,
                                         attributes.PASSIVE_NO_INITIALIZE)
         return bool(history.deleted)
     else:
index 4f06bd5d9fd07761532faeadfd8b49e606517684..0b3d7d0ebe2ee15841cc8becdc4659049e36797a 100644 (file)
@@ -25,7 +25,7 @@ from .util import threading, memoized_property, \
 proxies = {}
 
 def manage(module, **params):
-    """Return a proxy for a DB-API module that automatically 
+    """Return a proxy for a DB-API module that automatically
     pools connections.
 
     Given a DB-API 2.0 module and pool management parameters, returns
@@ -64,11 +64,11 @@ reset_none = util.symbol('reset_none')
 class Pool(log.Identified):
     """Abstract base class for connection pools."""
 
-    def __init__(self, 
-                    creator, recycle=-1, echo=None, 
+    def __init__(self,
+                    creator, recycle=-1, echo=None,
                     use_threadlocal=False,
                     logging_name=None,
-                    reset_on_return=True, 
+                    reset_on_return=True,
                     listeners=None,
                     events=None,
                     _dispatch=None):
@@ -85,8 +85,8 @@ class Pool(log.Identified):
           replaced with a newly opened connection. Defaults to -1.
 
         :param logging_name:  String identifier which will be used within
-          the "name" field of logging records generated within the 
-          "sqlalchemy.pool" logger. Defaults to a hexstring of the object's 
+          the "name" field of logging records generated within the
+          "sqlalchemy.pool" logger. Defaults to a hexstring of the object's
           id.
 
         :param echo: If True, connections being pulled and retrieved
@@ -119,7 +119,7 @@ class Pool(log.Identified):
           :class:`~sqlalchemy.interfaces.PoolListener`-like objects or
           dictionaries of callables that receive events when DB-API
           connections are created, checked out and checked in to the
-          pool.  This has been superseded by 
+          pool.  This has been superseded by
           :func:`~sqlalchemy.event.listen`.
 
         """
@@ -141,7 +141,7 @@ class Pool(log.Identified):
             self._reset_on_return = reset_commit
         else:
             raise exc.ArgumentError(
-                        "Invalid value for 'reset_on_return': %r" 
+                        "Invalid value for 'reset_on_return': %r"
                                     % reset_on_return)
 
         self.echo = echo
@@ -190,8 +190,8 @@ class Pool(log.Identified):
         """Return a new :class:`.Pool`, of the same class as this one
         and configured with identical creation arguments.
 
-        This method is used in conjunection with :meth:`dispose` 
-        to close out an entire :class:`.Pool` and create a new one in 
+        This method is used in conjunection with :meth:`dispose`
+        to close out an entire :class:`.Pool` and create a new one in
         its place.
 
         """
@@ -204,7 +204,7 @@ class Pool(log.Identified):
         This method leaves the possibility of checked-out connections
         remaining open, as it only affects connections that are
         idle in the pool.
-        
+
         See also the :meth:`Pool.recreate` method.
 
         """
@@ -213,11 +213,11 @@ class Pool(log.Identified):
 
     def _replace(self):
         """Dispose + recreate this pool.
-        
-        Subclasses may employ special logic to 
+
+        Subclasses may employ special logic to
         move threads waiting on this pool to the
         new one.
-        
+
         """
         self.dispose()
         return self.recreate()
@@ -225,8 +225,8 @@ class Pool(log.Identified):
     def connect(self):
         """Return a DBAPI connection from the pool.
 
-        The connection is instrumented such that when its 
-        ``close()`` method is called, the connection will be returned to 
+        The connection is instrumented such that when its
+        ``close()`` method is called, the connection will be returned to
         the pool.
 
         """
@@ -372,11 +372,11 @@ def _finalize_fairy(connection, connection_record, pool, ref, echo):
     if connection_record is not None:
         connection_record.fairy = None
         if echo:
-            pool.logger.debug("Connection %r being returned to pool", 
+            pool.logger.debug("Connection %r being returned to pool",
                                     connection)
         if connection_record.finalize_callback:
             connection_record.finalize_callback(connection)
-            del connection_record.finalize_callback 
+            del connection_record.finalize_callback
         if pool.dispatch.checkin:
             pool.dispatch.checkin(connection, connection_record)
         pool._return_conn(connection_record)
@@ -399,13 +399,13 @@ class _ConnectionFairy(object):
             rec = self._connection_record = pool._do_get()
             conn = self.connection = self._connection_record.get_connection()
             rec.fairy = weakref.ref(
-                            self, 
+                            self,
                             lambda ref:_finalize_fairy and _finalize_fairy(conn, rec, pool, ref, _echo)
                         )
             _refs.add(rec)
         except:
             # helps with endless __getattr__ loops later on
-            self.connection = None 
+            self.connection = None
             self._connection_record = None
             raise
         if self._echo:
@@ -467,7 +467,7 @@ class _ConnectionFairy(object):
         attempts = 2
         while attempts > 0:
             try:
-                self._pool.dispatch.checkout(self.connection, 
+                self._pool.dispatch.checkout(self.connection,
                                             self._connection_record,
                                             self)
                 return self
@@ -510,7 +510,7 @@ class _ConnectionFairy(object):
             self._close()
 
     def _close(self):
-        _finalize_fairy(self.connection, self._connection_record, 
+        _finalize_fairy(self.connection, self._connection_record,
                             self._pool, None, self._echo)
         self.connection = None
         self._connection_record = None
@@ -523,7 +523,7 @@ class SingletonThreadPool(Pool):
 
     Options are the same as those of :class:`.Pool`, as well as:
 
-    :param pool_size: The number of threads in which to maintain connections 
+    :param pool_size: The number of threads in which to maintain connections
         at once.  Defaults to five.
 
     :class:`.SingletonThreadPool` is used by the SQLite dialect
@@ -541,12 +541,12 @@ class SingletonThreadPool(Pool):
 
     def recreate(self):
         self.logger.info("Pool recreating")
-        return self.__class__(self._creator, 
-            pool_size=self.size, 
-            recycle=self._recycle, 
-            echo=self.echo, 
+        return self.__class__(self._creator,
+            pool_size=self.size,
+            recycle=self._recycle,
+            echo=self.echo,
             logging_name=self._orig_logging_name,
-            use_threadlocal=self._use_threadlocal, 
+            use_threadlocal=self._use_threadlocal,
             _dispatch=self.dispatch)
 
     def dispose(self):
@@ -599,7 +599,7 @@ class DummyLock(object):
 class QueuePool(Pool):
     """A :class:`.Pool` that imposes a limit on the number of open connections.
 
-    :class:`.QueuePool` is the default pooling implementation used for 
+    :class:`.QueuePool` is the default pooling implementation used for
     all :class:`.Engine` objects, unless the SQLite dialect is in use.
 
     """
@@ -658,18 +658,18 @@ class QueuePool(Pool):
           :meth:`unique_connection` method is provided to bypass the
           threadlocal behavior installed into :meth:`connect`.
 
-        :param reset_on_return: Determine steps to take on 
-          connections as they are returned to the pool.   
+        :param reset_on_return: Determine steps to take on
+          connections as they are returned to the pool.
           reset_on_return can have any of these values:
 
           * 'rollback' - call rollback() on the connection,
             to release locks and transaction resources.
             This is the default value.  The vast majority
             of use cases should leave this value set.
-          * True - same as 'rollback', this is here for 
+          * True - same as 'rollback', this is here for
             backwards compatibility.
           * 'commit' - call commit() on the connection,
-            to release locks and transaction resources. 
+            to release locks and transaction resources.
             A commit here may be desirable for databases that
             cache query plans if a commit is emitted,
             such as Microsoft SQL Server.  However, this
@@ -681,7 +681,7 @@ class QueuePool(Pool):
             that has no transaction support at all,
             namely MySQL MyISAM.   By not doing anything,
             performance can be improved.   This
-            setting should **never be selected** for a 
+            setting should **never be selected** for a
             database that supports transactions,
             as it will lead to deadlocks and stale
             state.
@@ -732,7 +732,7 @@ class QueuePool(Pool):
                 else:
                     raise exc.TimeoutError(
                             "QueuePool limit of size %d overflow %d reached, "
-                            "connection timed out, timeout %d" % 
+                            "connection timed out, timeout %d" %
                             (self.size(), self.overflow(), self._timeout))
 
             self._overflow_lock.acquire()
@@ -749,10 +749,10 @@ class QueuePool(Pool):
 
     def recreate(self):
         self.logger.info("Pool recreating")
-        return self.__class__(self._creator, pool_size=self._pool.maxsize, 
+        return self.__class__(self._creator, pool_size=self._pool.maxsize,
                           max_overflow=self._max_overflow,
-                          timeout=self._timeout, 
-                          recycle=self._recycle, echo=self.echo, 
+                          timeout=self._timeout,
+                          recycle=self._recycle, echo=self.echo,
                           logging_name=self._orig_logging_name,
                           use_threadlocal=self._use_threadlocal,
                           _dispatch=self.dispatch)
@@ -777,9 +777,9 @@ class QueuePool(Pool):
     def status(self):
         return "Pool size: %d  Connections in pool: %d "\
                 "Current Overflow: %d Current Checked out "\
-                "connections: %d" % (self.size(), 
-                                    self.checkedin(), 
-                                    self.overflow(), 
+                "connections: %d" % (self.size(),
+                                    self.checkedin(),
+                                    self.overflow(),
                                     self.checkedout())
 
     def size(self):
@@ -822,11 +822,11 @@ class NullPool(Pool):
     def recreate(self):
         self.logger.info("Pool recreating")
 
-        return self.__class__(self._creator, 
-            recycle=self._recycle, 
-            echo=self.echo, 
+        return self.__class__(self._creator,
+            recycle=self._recycle,
+            echo=self.echo,
             logging_name=self._orig_logging_name,
-            use_threadlocal=self._use_threadlocal, 
+            use_threadlocal=self._use_threadlocal,
             _dispatch=self.dispatch)
 
     def dispose(self):
@@ -915,7 +915,7 @@ class AssertionPool(Pool):
 
     def recreate(self):
         self.logger.info("Pool recreating")
-        return self.__class__(self._creator, echo=self.echo, 
+        return self.__class__(self._creator, echo=self.echo,
                             logging_name=self._orig_logging_name,
                             _dispatch=self.dispatch)
 
@@ -982,7 +982,7 @@ class _DBProxy(object):
             try:
                 if key not in self.pools:
                     kw.pop('sa_pool_key', None)
-                    pool = self.poolclass(lambda: 
+                    pool = self.poolclass(lambda:
                                 self.module.connect(*args, **kw), **self.kw)
                     self.pools[key] = pool
                     return pool
@@ -1021,6 +1021,6 @@ class _DBProxy(object):
             return kw['sa_pool_key']
 
         return tuple(
-            list(args) + 
+            list(args) +
             [(k, kw[k]) for k in sorted(kw)]
         )
index a3adbe29338ae60bee67cbbfcd22139e4d7be253..240263febf8b818a34ef606a28b9d69d8181938c 100644 (file)
@@ -5,7 +5,7 @@
 # This module is part of SQLAlchemy and is released under
 # the MIT License: http://www.opensource.org/licenses/mit-license.php
 
-"""defines generic type conversion functions, as used in bind and result 
+"""defines generic type conversion functions, as used in bind and result
 processors.
 
 They all share one common characteristic: None is passed through unchanged.
@@ -116,9 +116,9 @@ try:
 
     def to_decimal_processor_factory(target_class, scale=10):
         # Note that the scale argument is not taken into account for integer
-        # values in the C implementation while it is in the Python one. 
-        # For example, the Python implementation might return 
-        # Decimal('5.00000') whereas the C implementation will 
+        # values in the C implementation while it is in the Python one.
+        # For example, the Python implementation might return
+        # Decimal('5.00000') whereas the C implementation will
         # return Decimal('5'). These are equivalent of course.
         return DecimalResultProcessor(target_class, "%%.%df" % scale).process
 
index 22f127fcc18d0bb4ff7ab31a55cdbb0f9198c03e..5a480f0c3c9ac4d92e9986c1e74674c0cca576b8 100644 (file)
@@ -33,11 +33,11 @@ class GenericFunction(Function):
 class next_value(Function):
     """Represent the 'next value', given a :class:`.Sequence`
     as it's single argument.
-    
+
     Compiles into the appropriate function on each backend,
     or will raise NotImplementedError if used on a backend
     that does not provide support for sequences.
-    
+
     """
     type = sqltypes.Integer()
     name = "next_value"
index a79168e75ccd8a5e227ce896e3cb451c5e4723b4..6f2c8299210ef2f705a1a39036fa922c23c10f94 100644 (file)
@@ -8,15 +8,15 @@
 
 SQLAlchemy schema and expression constructs rely on a Python-centric
 version of the classic "visitor" pattern as the primary way in which
-they apply functionality.  The most common use of this pattern 
-is statement compilation, where individual expression classes match 
-up to rendering methods that produce a string result.   Beyond this, 
-the visitor system is also used to inspect expressions for various 
-information and patterns, as well as for usage in 
+they apply functionality.  The most common use of this pattern
+is statement compilation, where individual expression classes match
+up to rendering methods that produce a string result.   Beyond this,
+the visitor system is also used to inspect expressions for various
+information and patterns, as well as for usage in
 some kinds of expression transformation.  Other kinds of transformation
 use a non-visitor traversal system.
 
-For many examples of how the visit system is used, see the 
+For many examples of how the visit system is used, see the
 sqlalchemy.sql.util and the sqlalchemy.sql.compiler modules.
 For an introduction to clause adaption, see
 http://techspot.zzzeek.org/2008/01/23/expression-transformations/
@@ -28,18 +28,18 @@ import re
 from .. import util
 import operator
 
-__all__ = ['VisitableType', 'Visitable', 'ClauseVisitor', 
-    'CloningVisitor', 'ReplacingCloningVisitor', 'iterate', 
+__all__ = ['VisitableType', 'Visitable', 'ClauseVisitor',
+    'CloningVisitor', 'ReplacingCloningVisitor', 'iterate',
     'iterate_depthfirst', 'traverse_using', 'traverse',
     'cloned_traverse', 'replacement_traverse']
 
 class VisitableType(type):
     """Metaclass which assigns a `_compiler_dispatch` method to classes
     having a `__visit_name__` attribute.
-    
+
     The _compiler_dispatch attribute becomes an instance method which
     looks approximately like the following::
-    
+
         def _compiler_dispatch (self, visitor, **kw):
             '''Look for an attribute named "visit_" + self.__visit_name__
             on the visitor, and call it with the same kw params.'''
@@ -92,7 +92,7 @@ class Visitable(object):
     __metaclass__ = VisitableType
 
 class ClauseVisitor(object):
-    """Base class for visitor objects which can traverse using 
+    """Base class for visitor objects which can traverse using
     the traverse() function.
 
     """
@@ -144,7 +144,7 @@ class ClauseVisitor(object):
         return self
 
 class CloningVisitor(ClauseVisitor):
-    """Base class for visitor objects which can traverse using 
+    """Base class for visitor objects which can traverse using
     the cloned_traverse() function.
 
     """
@@ -160,7 +160,7 @@ class CloningVisitor(ClauseVisitor):
         return cloned_traverse(obj, self.__traverse_options__, self._visitor_dict)
 
 class ReplacingCloningVisitor(CloningVisitor):
-    """Base class for visitor objects which can traverse using 
+    """Base class for visitor objects which can traverse using
     the replacement_traverse() function.
 
     """
@@ -168,8 +168,8 @@ class ReplacingCloningVisitor(CloningVisitor):
     def replace(self, elem):
         """receive pre-copied elements during a cloning traversal.
 
-        If the method returns a new element, the element is used 
-        instead of creating a simple copy of the element.  Traversal 
+        If the method returns a new element, the element is used
+        instead of creating a simple copy of the element.  Traversal
         will halt on the newly returned element if it is re-encountered.
         """
         return None
@@ -232,7 +232,7 @@ def traverse_depthfirst(obj, opts, visitors):
     return traverse_using(iterate_depthfirst(obj, opts), obj, visitors)
 
 def cloned_traverse(obj, opts, visitors):
-    """clone the given expression structure, allowing 
+    """clone the given expression structure, allowing
     modifications by visitors."""
 
     cloned = util.column_dict()
@@ -256,7 +256,7 @@ def cloned_traverse(obj, opts, visitors):
 
 
 def replacement_traverse(obj, opts, replace):
-    """clone the given expression structure, allowing element 
+    """clone the given expression structure, allowing element
     replacement by a given replacement function."""
 
     cloned = util.column_dict()
index 5714565779298e3fd8967b37aa688b3f0db17cf6..65073328827fba4400fc4d7a40f60327ba2d61b5 100644 (file)
@@ -186,7 +186,7 @@ class Queue:
     def abort(self, context):
         """Issue an 'abort', will force any thread waiting on get()
         to stop waiting and raise SAAbort.
-        
+
         """
         self._sqla_abort_context = context
         if not self.not_full.acquire(False):
index 2ba86b23d0f10ecae8866fa42c582a8156403a8a..58227af206f389be908e0d591c9079fd811f6d97 100644 (file)
@@ -29,7 +29,7 @@ def sort_as_subsets(tuples, allitems):
         if not output:
             raise CircularDependencyError(
                     "Circular dependency detected.",
-                    find_cycles(tuples, allitems), 
+                    find_cycles(tuples, allitems),
                     _gen_edges(edges)
                 )
 
@@ -56,7 +56,7 @@ def find_cycles(tuples, allitems):
 
     output = set()
 
-    # we'd like to find all nodes that are 
+    # we'd like to find all nodes that are
     # involved in cycles, so we do the full
     # pass through the whole thing for each
     # node in the original list.
@@ -86,7 +86,7 @@ def find_cycles(tuples, allitems):
 
 def _gen_edges(edges):
     return set([
-                    (right, left) 
-                    for left in edges 
-                    for right in edges[left] 
+                    (right, left)
+                    for left in edges
+                    for right in edges[left]
                 ])
index 04cf82a15d44df6271328639ec0387ee68a14891..9e58a10aaedc8e175f8dea54b909fc84f87fe4ef 100644 (file)
@@ -272,7 +272,7 @@ class MemUsageTest(EnsureZeroed):
             x = counter[0]
             dec = 10
             while dec > 0:
-                # trying to count in binary here, 
+                # trying to count in binary here,
                 # works enough to trip the test case
                 if pow(2, dec) < x:
                     setattr(w1, 'col%d' % dec, counter[0])
@@ -523,7 +523,7 @@ class MemUsageTest(EnsureZeroed):
                 pass
 
             mapper(A, table1, properties={
-                'bs':relationship(B, secondary=table3, 
+                'bs':relationship(B, secondary=table3,
                                     backref='as', order_by=table3.c.t1)
             })
             mapper(B, table2)
index de634d83e61ded80f35da22dc877a3421fbc95d6..b8ef4090da3aed8daebfaca194dca2d41d3e803f 100644 (file)
@@ -377,7 +377,7 @@ class ZooMarkTest(fixtures.TestBase):
     def test_profile_2_insert(self):
         self.test_baseline_2_insert()
 
-    @profiling.function_call_count(3118, {'2.7':3333, 
+    @profiling.function_call_count(3118, {'2.7':3333,
                                         '2.7+cextension':3109, '2.6':3109})
     def test_profile_3_properties(self):
         self.test_baseline_3_properties()
@@ -394,16 +394,16 @@ class ZooMarkTest(fixtures.TestBase):
     def test_profile_5_aggregates(self):
         self.test_baseline_5_aggregates()
 
-    @profiling.function_call_count(1788, {'2.4': 1118, '3.2':1647, 
+    @profiling.function_call_count(1788, {'2.4': 1118, '3.2':1647,
                                         '2.7+cextension':1698})
     def test_profile_6_editing(self):
         self.test_baseline_6_editing()
 
-    @profiling.function_call_count(2252, {'2.4': 1673, 
+    @profiling.function_call_count(2252, {'2.4': 1673,
                                             '2.6':2412,
                                             '2.7':2412,
                                             '3.2':2396,
-                                            '2.7+cextension':2110, 
+                                            '2.7+cextension':2110,
                                             '2.6+cextension': 2252})
     def test_profile_7_multiview(self):
         self.test_baseline_7_multiview()
index 4be3c839018defd2a254343ca35eeb64f9599515..f3e19982b0713179056a9d576885b72046162fcc 100644 (file)
@@ -86,7 +86,7 @@ class DependencySortTest(fixtures.TestBase):
             eq_(err.cycles, set(['node1', 'node3', 'node2', 'node5',
                 'node4']))
             eq_(err.edges, set([('node3', 'node1'), ('node4', 'node1'),
-                ('node2', 'node3'), ('node1', 'node2'), 
+                ('node2', 'node3'), ('node1', 'node2'),
                 ('node4','node5'), ('node5', 'node4')]))
 
     def test_raise_on_cycle_two(self):
@@ -108,7 +108,7 @@ class DependencySortTest(fixtures.TestBase):
         except exc.CircularDependencyError, err:
             eq_(err.cycles, set(['node1', 'node3', 'node2']))
             eq_(err.edges, set([('node3', 'node1'), ('node2', 'node3'),
-                ('node3', 'node2'), ('node1', 'node2'), 
+                ('node3', 'node2'), ('node1', 'node2'),
                 ('node2','node4')]))
 
     def test_raise_on_cycle_three(self):
@@ -224,7 +224,7 @@ class DependencySortTest(fixtures.TestBase):
             ])
         # node6 only became present here once [ticket:2282] was addressed.
         eq_(
-            topological.find_cycles(tuples, allnodes), 
+            topological.find_cycles(tuples, allnodes),
             set(['node1','node2', 'node4', 'node6'])
         )
 
@@ -258,23 +258,23 @@ class DependencySortTest(fixtures.TestBase):
 
     def test_find_multiple_cycles_four(self):
         tuples = [
-            ('node6', 'node2'), 
-            ('node15', 'node19'), 
+            ('node6', 'node2'),
+            ('node15', 'node19'),
             ('node19', 'node2'), ('node4', 'node10'),
             ('node15', 'node13'),
-            ('node17', 'node11'), ('node1', 'node19'), ('node15', 'node8'), 
-            ('node6', 'node20'), ('node14', 'node11'), ('node6', 'node14'), 
+            ('node17', 'node11'), ('node1', 'node19'), ('node15', 'node8'),
+            ('node6', 'node20'), ('node14', 'node11'), ('node6', 'node14'),
             ('node11', 'node2'), ('node10', 'node20'), ('node1', 'node11'),
              ('node20', 'node19'), ('node4', 'node20'), ('node15', 'node20'),
              ('node9', 'node19'), ('node11', 'node10'), ('node11', 'node19'),
               ('node13', 'node6'), ('node3', 'node15'), ('node9', 'node11'),
-              ('node4', 'node17'), ('node2', 'node20'), ('node19', 'node10'), 
+              ('node4', 'node17'), ('node2', 'node20'), ('node19', 'node10'),
               ('node8', 'node4'), ('node11', 'node3'), ('node6', 'node1')
         ]
         allnodes = ['node%d' % i for i in xrange(1, 21)]
         eq_(
-            topological.find_cycles(tuples, allnodes), 
-            set(['node11', 'node10', 'node13', 'node15', 'node14', 'node17', 
-            'node19', 'node20', 'node8', 'node1', 'node3', 
+            topological.find_cycles(tuples, allnodes),
+            set(['node11', 'node10', 'node13', 'node15', 'node14', 'node17',
+            'node19', 'node20', 'node8', 'node1', 'node3',
             'node2', 'node4', 'node6'])
         )
index 61a4b9c71b67f09744578ef92c97eb0639117a26..57c06c328fa6976e4c0b747c8ebbff7c958bc29c 100644 (file)
@@ -347,7 +347,7 @@ class TestCustomTargets(fixtures.TestBase):
         )
 
         assert_raises(
-            exc.InvalidRequestError, 
+            exc.InvalidRequestError,
             event.listen,
             listen, "event_one", self.Target
         )
index 18a06b9bd1f106ab1d4f2d4f9f439cc0ec387ae5..597f1132bd6fdec94dec168d5b86e5fb7afc8c91 100644 (file)
@@ -5,8 +5,8 @@ from sqlalchemy import exc as sa_exceptions
 from test.lib import fixtures
 from test.lib.testing import eq_
 
-# Py3K 
-#StandardError = BaseException 
+# Py3K
+#StandardError = BaseException
 # Py2K
 from exceptions import StandardError, KeyboardInterrupt, SystemExit
 # end Py2K
@@ -54,7 +54,7 @@ class WrapTest(fixtures.TestBase):
     def test_tostring_large_dict(self):
         try:
             raise sa_exceptions.DBAPIError.instance('this is a message'
-                    , 
+                    ,
                 {'a': 1, 'b': 2, 'c': 3, 'd': 4, 'e': 5, 'f': 6, 'g': 7, 'h':
                 8, 'i': 9, 'j': 10, 'k': 11,
                 }, OperationalError(), DatabaseError)
@@ -64,8 +64,8 @@ class WrapTest(fixtures.TestBase):
 
     def test_tostring_large_list(self):
         try:
-            raise sa_exceptions.DBAPIError.instance('this is a message', 
-                [1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11,], 
+            raise sa_exceptions.DBAPIError.instance('this is a message',
+                [1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11,],
                 OperationalError(), DatabaseError)
         except sa_exceptions.DBAPIError, exc:
             assert str(exc).startswith("(OperationalError)  'this is a "
@@ -73,9 +73,9 @@ class WrapTest(fixtures.TestBase):
 
     def test_tostring_large_executemany(self):
         try:
-            raise sa_exceptions.DBAPIError.instance('this is a message', 
-                [{1: 1}, {1: 1}, {1: 1}, {1: 1}, {1: 1}, {1: 1}, 
-                {1: 1}, {1:1}, {1: 1}, {1: 1},], 
+            raise sa_exceptions.DBAPIError.instance('this is a message',
+                [{1: 1}, {1: 1}, {1: 1}, {1: 1}, {1: 1}, {1: 1},
+                {1: 1}, {1:1}, {1: 1}, {1: 1},],
                 OperationalError(), DatabaseError)
         except sa_exceptions.DBAPIError, exc:
             eq_(str(exc) ,
@@ -84,7 +84,7 @@ class WrapTest(fixtures.TestBase):
                 "1}, {1: 1}, {1: 1}]")
         try:
             raise sa_exceptions.DBAPIError.instance('this is a message', [
-                {1: 1}, {1: 1}, {1: 1}, {1: 1}, {1: 1}, {1: 1}, {1: 1}, 
+                {1: 1}, {1: 1}, {1: 1}, {1: 1}, {1: 1}, {1: 1}, {1: 1},
                 {1:1}, {1: 1}, {1: 1}, {1: 1},
                 ], OperationalError(), DatabaseError)
         except sa_exceptions.DBAPIError, exc:
@@ -95,7 +95,7 @@ class WrapTest(fixtures.TestBase):
                 "bound parameter sets ...  {1: 1}, {1: 1}]"
             )
         try:
-            raise sa_exceptions.DBAPIError.instance('this is a message', 
+            raise sa_exceptions.DBAPIError.instance('this is a message',
                 [
                 (1, ), (1, ), (1, ), (1, ), (1, ), (1, ), (1, ), (1, ), (1, ),
                 (1, ),
@@ -127,7 +127,7 @@ class WrapTest(fixtures.TestBase):
 
     def test_db_error_noncompliant_dbapi(self):
         try:
-            raise sa_exceptions.DBAPIError.instance('', [], OutOfSpec(), 
+            raise sa_exceptions.DBAPIError.instance('', [], OutOfSpec(),
                         DatabaseError)
         except sa_exceptions.DBAPIError, e:
             self.assert_(e.__class__ is sa_exceptions.DBAPIError)
index f827b5879a9279b979d51f943075b6c4b6345377..a9ff7da9881198f9e3c49ffef746a2ed8fc14d07 100644 (file)
@@ -299,7 +299,7 @@ class IdentitySetTest(fixtures.TestBase):
             ids2 - ids1,
             IdentitySet([o2, o3])
         )
-        
+
         ids2 -= ids1
         eq_(ids2, IdentitySet([o2, o3]))
 
index 89b164cbc5e4ce7fd0b13a29ffecaab2b84d3027..d653fa502074cdfd850ed988f67de8065877e724 100644 (file)
@@ -16,7 +16,7 @@ from test.bootstrap.config import (
     _create_testing_engine, _engine_pool, _engine_strategy, _engine_uri, _list_dbs, _log,
     _prep_testing_database, _require, _reverse_topological, _server_side_cursors,
     _monkeypatch_cdecimal, _zero_timeout,
-    _set_table_options, base_config, db, db_label, db_url, file_config, post_configure, 
+    _set_table_options, base_config, db, db_label, db_url, file_config, post_configure,
     pre_configure)
 
 log = logging.getLogger('nose.plugins.sqlalchemy')
index 3241db730ae01836598fc6d4bf158f65ec094f78..41533dbbdc5b302d40527237c6f2801e6497d6d1 100644 (file)
@@ -94,7 +94,7 @@ class DomainReflectionTest(fixtures.TestBase, AssertsExecutionResults):
 
 
 class BuggyDomainReflectionTest(fixtures.TestBase, AssertsExecutionResults):
-    """Test Firebird domains (and some other reflection bumps), 
+    """Test Firebird domains (and some other reflection bumps),
     see [ticket:1663] and http://tracker.firebirdsql.org/browse/CORE-356"""
 
     __only_on__ = 'firebird'
@@ -325,7 +325,7 @@ class TypesTest(fixtures.TestBase):
     @testing.provide_metadata
     def test_infinite_float(self):
         metadata = self.metadata
-        t = Table('t', metadata, 
+        t = Table('t', metadata,
             Column('data', Float)
         )
         metadata.create_all()
index 8d76529d786f7334be69a43e2203e857442f2c8d..e0c3eafbe911d29b8c750418208ce273e29737f7 100644 (file)
@@ -10,7 +10,7 @@ from test.lib import *
 
 
 # TODO
-# - add "Database" test, a quick check for join behavior on different 
+# - add "Database" test, a quick check for join behavior on different
 # max versions
 # - full max-specific reflection suite
 # - datetime tests
index 8a880645c2d208d353dca6e446d4fbb4bfd9ec57..d794fd2b8c5756ecd65c5e8f0f8fd78e30e837a1 100644 (file)
@@ -26,7 +26,7 @@ class CompileTest(fixtures.TestBase, AssertsCompiledSQL):
             Column("master_ssl_verify_server_cert", Integer))
         x = select([table.c.col1, table.c.master_ssl_verify_server_cert])
 
-        self.assert_compile(x, 
+        self.assert_compile(x,
             '''SELECT mysql_table.col1, mysql_table.`master_ssl_verify_server_cert` FROM mysql_table''')
 
     def test_create_index_simple(self):
@@ -66,7 +66,7 @@ class CompileTest(fixtures.TestBase, AssertsCompiledSQL):
 
     def test_create_pk_plain(self):
         m = MetaData()
-        tbl = Table('testtbl', m, Column('data', String(255)), 
+        tbl = Table('testtbl', m, Column('data', String(255)),
             PrimaryKeyConstraint('data'))
 
         self.assert_compile(schema.CreateTable(tbl),
@@ -75,7 +75,7 @@ class CompileTest(fixtures.TestBase, AssertsCompiledSQL):
 
     def test_create_pk_with_using(self):
         m = MetaData()
-        tbl = Table('testtbl', m, Column('data', String(255)), 
+        tbl = Table('testtbl', m, Column('data', String(255)),
             PrimaryKeyConstraint('data', mysql_using='btree'))
 
         self.assert_compile(schema.CreateTable(tbl),
@@ -86,7 +86,7 @@ class CompileTest(fixtures.TestBase, AssertsCompiledSQL):
 class DialectTest(fixtures.TestBase):
     __only_on__ = 'mysql'
 
-    @testing.only_on(['mysql+mysqldb', 'mysql+oursql'], 
+    @testing.only_on(['mysql+mysqldb', 'mysql+oursql'],
                     'requires particular SSL arguments')
     def test_ssl_arguments(self):
         dialect = testing.db.dialect
@@ -98,12 +98,12 @@ class DialectTest(fixtures.TestBase):
         for k in ('use_unicode', 'found_rows', 'client_flag'):
             kwarg.pop(k, None)
         eq_(
-            kwarg, 
+            kwarg,
             {
-                'passwd': 'tiger', 'db': 'test', 
-                'ssl': {'ca': '/ca.pem', 'cert': '/cert.pem', 
-                        'key': '/key.pem'}, 
-                'host': 'localhost', 'user': 'scott', 
+                'passwd': 'tiger', 'db': 'test',
+                'ssl': {'ca': '/ca.pem', 'cert': '/cert.pem',
+                        'key': '/key.pem'},
+                'host': 'localhost', 'user': 'scott',
                 'port': 3306
             }
         )
@@ -158,7 +158,7 @@ class TypesTest(fixtures.TestBase, AssertsExecutionResults, AssertsCompiledSQL):
 
         columns = [
             # column type, args, kwargs, expected ddl
-            # e.g. Column(Integer(10, unsigned=True)) == 
+            # e.g. Column(Integer(10, unsigned=True)) ==
             # 'INTEGER(10) UNSIGNED'
             (mysql.MSNumeric, [], {},
              'NUMERIC'),
@@ -592,7 +592,7 @@ class TypesTest(fixtures.TestBase, AssertsExecutionResults, AssertsCompiledSQL):
             # there's a slight assumption here that this test can
             # complete within the scope of a single second.
             # if needed, can break out the eq_() just to check for
-            # timestamps that are within a few seconds of "now" 
+            # timestamps that are within a few seconds of "now"
             # using timedelta.
 
             now = testing.db.execute("select now()").scalar()
@@ -730,7 +730,7 @@ class TypesTest(fixtures.TestBase, AssertsExecutionResults, AssertsCompiledSQL):
         enum_table.drop(checkfirst=True)
         enum_table.create()
 
-        assert_raises(exc.DBAPIError, enum_table.insert().execute, 
+        assert_raises(exc.DBAPIError, enum_table.insert().execute,
                         e1=None, e2=None, e3=None, e4=None)
 
         assert_raises(exc.StatementError, enum_table.insert().execute,
@@ -745,8 +745,8 @@ class TypesTest(fixtures.TestBase, AssertsExecutionResults, AssertsCompiledSQL):
 
         res = enum_table.select().execute().fetchall()
 
-        expected = [(None, 'a', 'a', None, 'a', None, None, None), 
-                    ('a', 'a', 'a', 'a', 'a', 'a', 'a', "'a'"), 
+        expected = [(None, 'a', 'a', None, 'a', None, None, None),
+                    ('a', 'a', 'a', 'a', 'a', 'a', 'a', "'a'"),
                     ('b', 'b', 'b', 'b', 'b', 'b', 'b', 'b')]
 
         # This is known to fail with MySQLDB 1.2.2 beta versions
@@ -786,8 +786,8 @@ class TypesTest(fixtures.TestBase, AssertsExecutionResults, AssertsCompiledSQL):
             t1.insert().execute(value=u'drôle', value2=u'drôle')
             t1.insert().execute(value=u'réveillé', value2=u'réveillé')
             t1.insert().execute(value=u'S’il', value2=u'S’il')
-            eq_(t1.select().order_by(t1.c.id).execute().fetchall(), 
-                [(1, u'drôle', u'drôle'), (2, u'réveillé', u'réveillé'), 
+            eq_(t1.select().order_by(t1.c.id).execute().fetchall(),
+                [(1, u'drôle', u'drôle'), (2, u'réveillé', u'réveillé'),
                             (3, u'S’il', u'S’il')]
             )
 
@@ -802,7 +802,7 @@ class TypesTest(fixtures.TestBase, AssertsExecutionResults, AssertsCompiledSQL):
             assert t2.c.value.type.enums[0:2] == \
                     (u'réveillé', u'drôle') #, u'S’il') # eh ?
             assert t2.c.value2.type.enums[0:2] == \
-                    (u'réveillé', u'drôle') #, u'S’il') # eh ? 
+                    (u'réveillé', u'drôle') #, u'S’il') # eh ?
         finally:
             metadata.drop_all()
 
@@ -1153,7 +1153,7 @@ class SQLTest(fixtures.TestBase, AssertsCompiledSQL):
         )
 
         eq_(gen(prefixes=['ALL']), 'SELECT ALL q')
-        eq_(gen(prefixes=['DISTINCTROW']), 
+        eq_(gen(prefixes=['DISTINCTROW']),
                 'SELECT DISTINCTROW q')
 
         # Interaction with MySQL prefix extensions
@@ -1182,7 +1182,7 @@ class SQLTest(fixtures.TestBase, AssertsCompiledSQL):
         )
 
         self.assert_compile(
-            select(['q'], distinct='ALL', 
+            select(['q'], distinct='ALL',
                     prefixes=['HIGH_PRIORITY', 'SQL_SMALL_RESULT']),
             'SELECT HIGH_PRIORITY SQL_SMALL_RESULT ALL q'
         )
@@ -1211,7 +1211,7 @@ class SQLTest(fixtures.TestBase, AssertsCompiledSQL):
             )
         self.assert_compile(
             select([t]).limit(10),
-            "SELECT t.col1, t.col2 FROM t  LIMIT %s", 
+            "SELECT t.col1, t.col2 FROM t  LIMIT %s",
             {'param_1':10})
 
         self.assert_compile(
@@ -1232,9 +1232,9 @@ class SQLTest(fixtures.TestBase, AssertsCompiledSQL):
         ):
             type_ = sqltypes.to_instance(type_)
             assert_raises_message(
-                exc.CompileError, 
+                exc.CompileError,
                 "VARCHAR requires a length on dialect mysql",
-                type_.compile, 
+                type_.compile,
             dialect=mysql.dialect())
 
             t1 = Table('sometable', MetaData(),
@@ -1289,7 +1289,7 @@ class SQLTest(fixtures.TestBase, AssertsCompiledSQL):
             # 'SIGNED INTEGER' is a bigint, so this is ok.
             (m.MSBigInteger, "CAST(t.col AS SIGNED INTEGER)"),
             (m.MSBigInteger(unsigned=False), "CAST(t.col AS SIGNED INTEGER)"),
-            (m.MSBigInteger(unsigned=True), 
+            (m.MSBigInteger(unsigned=True),
                             "CAST(t.col AS UNSIGNED INTEGER)"),
             (m.MSBit, "t.col"),
 
@@ -1412,7 +1412,7 @@ class SQLTest(fixtures.TestBase, AssertsCompiledSQL):
         tname = 'zyrenian_zyme_zyzzogeton_zyzzogeton'
         cname = 'zyrenian_zyme_zyzzogeton_zo'
 
-        t1 = Table(tname, MetaData(), 
+        t1 = Table(tname, MetaData(),
                     Column(cname, Integer, index=True),
                 )
         ix1 = list(t1.indexes)[0]
index b50b0dcbbfdfdd8e89519dbf9db1824ac53dfb75..a039b02215b78deccbffae3af6a1d8ab0d7bb854 100644 (file)
@@ -155,7 +155,7 @@ class CompileTest(fixtures.TestBase, AssertsCompiledSQL):
     def test_create_index_with_ops(self):
         m = MetaData()
         tbl = Table('testtbl', m,
-                    Column('data', String), 
+                    Column('data', String),
                     Column('data2', Integer, key='d2'))
 
         idx = Index('test_idx1', tbl.c.data,
@@ -269,7 +269,7 @@ class CompileTest(fixtures.TestBase, AssertsCompiledSQL):
             Column("variadic", Integer))
         x = select([table.c.col1, table.c.variadic])
 
-        self.assert_compile(x, 
+        self.assert_compile(x,
             '''SELECT pg_table.col1, pg_table."variadic" FROM pg_table''')
 
     def test_from_only(self):
@@ -382,7 +382,7 @@ class FloatCoercionTest(fixtures.TablesTest, AssertsExecutionResults):
     @testing.provide_metadata
     def test_arrays(self):
         metadata = self.metadata
-        t1 = Table('t', metadata, 
+        t1 = Table('t', metadata,
             Column('x', postgresql.ARRAY(Float)),
             Column('y', postgresql.ARRAY(REAL)),
             Column('z', postgresql.ARRAY(postgresql.DOUBLE_PRECISION)),
@@ -392,7 +392,7 @@ class FloatCoercionTest(fixtures.TablesTest, AssertsExecutionResults):
         t1.insert().execute(x=[5], y=[5], z=[6], q=[decimal.Decimal("6.4")])
         row = t1.select().execute().first()
         eq_(
-            row, 
+            row,
             ([5], [5], [6], [decimal.Decimal("6.4")])
         )
 
@@ -465,7 +465,7 @@ class EnumTest(fixtures.TestBase, AssertsExecutionResults, AssertsCompiledSQL):
         metadata = MetaData(testing.db)
         t1 = Table('table', metadata,
             Column('id', Integer, primary_key=True),
-            Column('value', 
+            Column('value',
                     Enum(u'réveillé', u'drôle', u'S’il',
                             name='onetwothreetype'))
         )
@@ -475,7 +475,7 @@ class EnumTest(fixtures.TestBase, AssertsExecutionResults, AssertsCompiledSQL):
             t1.insert().execute(value=u'drôle')
             t1.insert().execute(value=u'réveillé')
             t1.insert().execute(value=u'S’il')
-            eq_(t1.select().order_by(t1.c.id).execute().fetchall(), 
+            eq_(t1.select().order_by(t1.c.id).execute().fetchall(),
                 [(1, u'drôle'), (2, u'réveillé'), (3, u'S’il')]
             )
             m2 = MetaData(testing.db)
@@ -504,11 +504,11 @@ class EnumTest(fixtures.TestBase, AssertsExecutionResults, AssertsCompiledSQL):
     def test_disable_create(self):
         metadata = self.metadata
 
-        e1 = postgresql.ENUM('one', 'two', 'three', 
+        e1 = postgresql.ENUM('one', 'two', 'three',
                             name="myenum",
                             create_type=False)
 
-        t1 = Table('e1', metadata, 
+        t1 = Table('e1', metadata,
             Column('c1', e1)
         )
         # table can be created separately
@@ -529,7 +529,7 @@ class EnumTest(fixtures.TestBase, AssertsExecutionResults, AssertsCompiledSQL):
         """
         metadata = self.metadata
 
-        e1 = Enum('one', 'two', 'three', 
+        e1 = Enum('one', 'two', 'three',
                             name="myenum")
         t1 = Table('e1', metadata,
             Column('c1', e1)
@@ -666,9 +666,9 @@ class NumericInterpretationTest(fixtures.TestBase):
     @testing.provide_metadata
     def test_numeric_default(self):
         metadata = self.metadata
-        # pg8000 appears to fail when the value is 0, 
+        # pg8000 appears to fail when the value is 0,
         # returns an int instead of decimal.
-        t =Table('t', metadata, 
+        t =Table('t', metadata,
             Column('id', Integer, primary_key=True),
             Column('nd', Numeric(asdecimal=True), default=1),
             Column('nf', Numeric(asdecimal=False), default=1),
@@ -1245,8 +1245,8 @@ class DistinctOnTest(fixtures.TestBase, AssertsCompiledSQL):
     __dialect__ = postgresql.dialect()
 
     def setup(self):
-        self.table = Table('t', MetaData(), 
-                Column('id',Integer, primary_key=True), 
+        self.table = Table('t', MetaData(),
+                Column('id',Integer, primary_key=True),
                 Column('a', String),
                 Column('b', String),
             )
@@ -1278,7 +1278,7 @@ class DistinctOnTest(fixtures.TestBase, AssertsCompiledSQL):
 
     def test_on_columns_inline_list(self):
         self.assert_compile(
-            select([self.table], 
+            select([self.table],
                     distinct=[self.table.c.a, self.table.c.b]).
                     order_by(self.table.c.a, self.table.c.b),
             "SELECT DISTINCT ON (t.a, t.b) t.id, "
@@ -1538,28 +1538,28 @@ class ReflectionTest(fixtures.TestBase):
 
             m1 = MetaData()
 
-            t2_schema = Table('some_other_table', 
-                                m1, 
-                                schema="test_schema_2", 
-                                autoload=True, 
+            t2_schema = Table('some_other_table',
+                                m1,
+                                schema="test_schema_2",
+                                autoload=True,
                                 autoload_with=conn)
-            t1_schema = Table('some_table', 
-                                m1, 
-                                schema="test_schema", 
+            t1_schema = Table('some_table',
+                                m1,
+                                schema="test_schema",
                                 autoload=True,
                                 autoload_with=conn)
 
-            t2_no_schema = Table('some_other_table', 
-                                m1, 
-                                autoload=True, 
+            t2_no_schema = Table('some_other_table',
+                                m1,
+                                autoload=True,
                                 autoload_with=conn)
 
-            t1_no_schema = Table('some_table', 
-                                m1, 
-                                autoload=True, 
+            t1_no_schema = Table('some_table',
+                                m1,
+                                autoload=True,
                                 autoload_with=conn)
 
-            # OK, this because, "test_schema" is 
+            # OK, this because, "test_schema" is
             # in the search path, and might as well be
             # the default too.  why would we assign
             # a "schema" to the Table ?
@@ -1624,7 +1624,7 @@ class ReflectionTest(fixtures.TestBase):
 
     @testing.provide_metadata
     def test_index_reflection_modified(self):
-        """reflect indexes when a column name has changed - PG 9 
+        """reflect indexes when a column name has changed - PG 9
         does not update the name of the column in the index def.
         [ticket:2141]
 
@@ -1721,7 +1721,7 @@ class MiscTest(fixtures.TestBase, AssertsExecutionResults, AssertsCompiledSQL):
         current_encoding = c.connection.connection.encoding
         c.close()
 
-        # attempt to use an encoding that's not 
+        # attempt to use an encoding that's not
         # already set
         if current_encoding == 'UTF8':
             test_encoding = 'LATIN1'
@@ -1992,9 +1992,9 @@ class ArrayTest(fixtures.TestBase, AssertsExecutionResults):
                     for v in value
                 ]
 
-        arrtable = Table('arrtable', metadata, 
-                        Column('id', Integer, primary_key=True), 
-                        Column('intarr',postgresql.ARRAY(Integer)), 
+        arrtable = Table('arrtable', metadata,
+                        Column('id', Integer, primary_key=True),
+                        Column('intarr',postgresql.ARRAY(Integer)),
                          Column('strarr',postgresql.ARRAY(Unicode())),
                         Column('dimarr', ProcValue)
                     )
@@ -2064,15 +2064,15 @@ class ArrayTest(fixtures.TestBase, AssertsExecutionResults):
         metadata.create_all()
         testing.db.execute(t1.insert(), id=1, data=["1","2","3"], data2=[5.4, 5.6])
         testing.db.execute(t1.insert(), id=2, data=["4", "5", "6"], data2=[1.0])
-        testing.db.execute(t1.insert(), id=3, data=[["4", "5"], ["6", "7"]], 
+        testing.db.execute(t1.insert(), id=3, data=[["4", "5"], ["6", "7"]],
                         data2=[[5.4, 5.6], [1.0, 1.1]])
 
         r = testing.db.execute(t1.select().order_by(t1.c.id)).fetchall()
         eq_(
-            r, 
+            r,
             [
-                (1, ('1', '2', '3'), (5.4, 5.6)), 
-                (2, ('4', '5', '6'), (1.0,)), 
+                (1, ('1', '2', '3'), (5.4, 5.6)),
+                (2, ('4', '5', '6'), (1.0,)),
                 (3, (('4', '5'), ('6', '7')), ((5.4, 5.6), (1.0, 1.1)))
             ]
         )
@@ -2329,7 +2329,7 @@ class UUIDTest(fixtures.TestBase):
     def test_uuid_string(self):
         import uuid
         self._test_round_trip(
-            Table('utable', MetaData(), 
+            Table('utable', MetaData(),
                 Column('data', postgresql.UUID())
             ),
             str(uuid.uuid4()),
@@ -2343,7 +2343,7 @@ class UUIDTest(fixtures.TestBase):
     def test_uuid_uuid(self):
         import uuid
         self._test_round_trip(
-            Table('utable', MetaData(), 
+            Table('utable', MetaData(),
                 Column('data', postgresql.UUID(as_uuid=True))
             ),
             uuid.uuid4(),
@@ -2402,16 +2402,16 @@ class MatchTest(fixtures.TestBase, AssertsCompiledSQL):
                                   {'id': 2, 'description': 'Ruby'}])
         matchtable.insert().execute([{'id': 1, 'title'
                                     : 'Agile Web Development with Rails'
-                                    , 'category_id': 2}, 
+                                    , 'category_id': 2},
                                     {'id': 2,
                                     'title': 'Dive Into Python',
-                                    'category_id': 1}, 
+                                    'category_id': 1},
                                     {'id': 3, 'title'
                                     : "Programming Matz's Ruby",
-                                    'category_id': 2}, 
+                                    'category_id': 2},
                                     {'id': 4, 'title'
                                     : 'The Definitive Guide to Django',
-                                    'category_id': 1}, 
+                                    'category_id': 1},
                                     {'id': 5, 'title'
                                     : 'Python in a Nutshell',
                                     'category_id': 1}])
@@ -2504,12 +2504,12 @@ class TupleTest(fixtures.TestBase):
                 testing.db.execute(
                     select([
                             tuple_(
-                                literal_column("'a'"), 
+                                literal_column("'a'"),
                                 literal_column("'b'")
                             ).\
                                 in_([
                                     tuple_(*[
-                                            literal_column("'%s'" % letter) 
+                                            literal_column("'%s'" % letter)
                                             for letter in elem
                                         ]) for elem in test
                                 ])
index c2aec726bcd42665e01629c48d3e0acebcfbe7f2..52d6bc7c454723e123a50cec7300b21b44fdf7a4 100644 (file)
@@ -12,6 +12,6 @@ class PyODBCTest(fixtures.TestBase):
             ("crap.crap.crap", ()),
         ]:
             eq_(
-                connector._parse_dbapi_version(vers), 
+                connector._parse_dbapi_version(vers),
                 expected
             )
\ No newline at end of file
index d41d8789955c0e4537cf03579023f10a360df636..4ce3d8fb6e235cce81828deb1d4b2f5023fb9385 100644 (file)
@@ -75,11 +75,11 @@ class TestTypes(fixtures.TestBase, AssertsExecutionResults):
         t.create(engine)
         try:
             engine.execute(t.insert(), {'d1': datetime.date(2010, 5,
-                           10), 
+                           10),
                           'd2': datetime.datetime( 2010, 5, 10, 12, 15, 25,
                           )})
             row = engine.execute(t.select()).first()
-            eq_(row, (1, datetime.date(2010, 5, 10), 
+            eq_(row, (1, datetime.date(2010, 5, 10),
             datetime.datetime( 2010, 5, 10, 12, 15, 25, )))
             r = engine.execute(func.current_date()).scalar()
             assert isinstance(r, basestring)
@@ -336,7 +336,7 @@ class DefaultsTest(fixtures.TestBase, AssertsCompiledSQL):
             m2 = MetaData(db)
             t2 = Table('r_defaults', m2, autoload=True)
             self.assert_compile(
-                CreateTable(t2), 
+                CreateTable(t2),
                 "CREATE TABLE r_defaults (data VARCHAR(40) "
                 "DEFAULT 'my_default', val INTEGER DEFAULT 0 "
                 "NOT NULL)"
@@ -346,7 +346,7 @@ class DefaultsTest(fixtures.TestBase, AssertsCompiledSQL):
 
     @testing.provide_metadata
     def test_boolean_default(self):
-        t= Table("t", self.metadata, 
+        t= Table("t", self.metadata,
                 Column("x", Boolean, server_default=sql.false()))
         t.create(testing.db)
         testing.db.execute(t.insert())
@@ -399,7 +399,7 @@ class DialectTest(fixtures.TestBase, AssertsExecutionResults):
         CREATE TABLE "django_admin_log" (
             "id" integer NOT NULL PRIMARY KEY,
             "action_time" datetime NOT NULL,
-            "content_type_id" integer NULL 
+            "content_type_id" integer NULL
                     REFERENCES "django_content_type" ("id"),
             "object_id" text NULL,
             "change_message" text NOT NULL
@@ -542,26 +542,26 @@ class SQLTest(fixtures.TestBase, AssertsCompiledSQL):
             sql.false(), "0"
         )
         self.assert_compile(
-            sql.true(), 
+            sql.true(),
             "1"
         )
 
     def test_constraints_with_schemas(self):
         metadata = MetaData()
-        t1 = Table('t1', metadata, 
+        t1 = Table('t1', metadata,
                         Column('id', Integer, primary_key=True),
                         schema='master')
-        t2 = Table('t2', metadata, 
+        t2 = Table('t2', metadata,
                         Column('id', Integer, primary_key=True),
                         Column('t1_id', Integer, ForeignKey('master.t1.id')),
                         schema='master'
                     )
-        t3 = Table('t3', metadata, 
+        t3 = Table('t3', metadata,
                         Column('id', Integer, primary_key=True),
                         Column('t1_id', Integer, ForeignKey('master.t1.id')),
                         schema='alternate'
                     )
-        t4 = Table('t4', metadata, 
+        t4 = Table('t4', metadata,
                         Column('id', Integer, primary_key=True),
                         Column('t1_id', Integer, ForeignKey('master.t1.id')),
                     )
@@ -692,17 +692,17 @@ class MatchTest(fixtures.TestBase, AssertsCompiledSQL):
         metadata = MetaData(testing.db)
         testing.db.execute("""
         CREATE VIRTUAL TABLE cattable using FTS3 (
-            id INTEGER NOT NULL, 
-            description VARCHAR(50), 
+            id INTEGER NOT NULL,
+            description VARCHAR(50),
             PRIMARY KEY (id)
         )
         """)
         cattable = Table('cattable', metadata, autoload=True)
         testing.db.execute("""
         CREATE VIRTUAL TABLE matchtable using FTS3 (
-            id INTEGER NOT NULL, 
+            id INTEGER NOT NULL,
             title VARCHAR(200),
-            category_id INTEGER NOT NULL, 
+            category_id INTEGER NOT NULL,
             PRIMARY KEY (id)
         )
         """)
@@ -867,7 +867,7 @@ class ReflectFKConstraintTest(fixtures.TestBase):
 
     def test_name_not_none(self):
         # we don't have names for PK constraints,
-        # it appears we get back None in the pragma for 
+        # it appears we get back None in the pragma for
         # FKs also (also it doesn't even appear to be documented on sqlite's docs
         # at http://www.sqlite.org/pragma.html#pragma_foreign_key_list
         # how did we ever know that's the "name" field ??)
index c1616fcfbd8642b08a21d3c00171728af30eb005..f910dd5eaae8a9951832a047637a8bd4bb35083f 100644 (file)
@@ -264,16 +264,16 @@ class DDLExecutionTest(fixtures.TestBase):
     def test_deprecated_append_ddl_listener_table(self):
         metadata, users, engine = self.metadata, self.users, self.engine
         canary = []
-        users.append_ddl_listener('before-create', 
+        users.append_ddl_listener('before-create',
                             lambda e, t, b:canary.append('mxyzptlk')
                         )
-        users.append_ddl_listener('after-create', 
+        users.append_ddl_listener('after-create',
                             lambda e, t, b:canary.append('klptzyxm')
                         )
-        users.append_ddl_listener('before-drop', 
+        users.append_ddl_listener('before-drop',
                             lambda e, t, b:canary.append('xyzzy')
                         )
-        users.append_ddl_listener('after-drop', 
+        users.append_ddl_listener('after-drop',
                             lambda e, t, b:canary.append('fnord')
                         )
 
@@ -293,16 +293,16 @@ class DDLExecutionTest(fixtures.TestBase):
     def test_deprecated_append_ddl_listener_metadata(self):
         metadata, users, engine = self.metadata, self.users, self.engine
         canary = []
-        metadata.append_ddl_listener('before-create', 
+        metadata.append_ddl_listener('before-create',
                             lambda e, t, b, tables=None:canary.append('mxyzptlk')
                         )
-        metadata.append_ddl_listener('after-create', 
+        metadata.append_ddl_listener('after-create',
                             lambda e, t, b, tables=None:canary.append('klptzyxm')
                         )
-        metadata.append_ddl_listener('before-drop', 
+        metadata.append_ddl_listener('before-drop',
                             lambda e, t, b, tables=None:canary.append('xyzzy')
                         )
-        metadata.append_ddl_listener('after-drop', 
+        metadata.append_ddl_listener('after-drop',
                             lambda e, t, b, tables=None:canary.append('fnord')
                         )
 
@@ -541,7 +541,7 @@ class DDLTest(fixtures.TestBase, AssertsCompiledSQL):
         assert DDL('').execute_if(callable_=lambda d, y,z, **kw: True).\
                         _should_execute(tbl, cx)
         assert(DDL('').execute_if(
-                        callable_=lambda d, y,z, **kw: z.engine.name 
+                        callable_=lambda d, y,z, **kw: z.engine.name
                         != 'bogus').
                _should_execute(tbl, cx))
 
index dcb149be87a84762108f2f22f31c918a91cc8356..622df3fdf385f686a4e801c0dfb09f3697f4c622 100644 (file)
@@ -140,7 +140,7 @@ pool_timeout=10
         assert e.echo is True
 
         for param, values in [
-            ('convert_unicode', ('true', 'false', 'force')), 
+            ('convert_unicode', ('true', 'false', 'force')),
             ('echo', ('true', 'false', 'debug')),
             ('echo_pool', ('true', 'false', 'debug')),
             ('use_native_unicode', ('true', 'false')),
@@ -191,7 +191,7 @@ pool_timeout=10
             assert e.pool._reset_on_return is expected
 
         assert_raises(
-            exc.ArgumentError, 
+            exc.ArgumentError,
             create_engine, "postgresql://",
             pool_reset_on_return='hi', module=dbapi,
             _initialize=False
@@ -250,7 +250,7 @@ pool_timeout=10
         every backend.
 
         """
-        # pretend pysqlite throws the 
+        # pretend pysqlite throws the
         # "Cannot operate on a closed database." error
         # on connect.   IRL we'd be getting Oracle's "shutdown in progress"
 
index da9e54292546709c00e1c7675f22e8796824d4c7..360a20eb238df0d8ec8867949d2cd81e6f91de1e 100644 (file)
@@ -58,7 +58,7 @@ class MockReconnectTest(fixtures.TestBase):
         # note - using straight create_engine here
         # since we are testing gc
         db = create_engine(
-                    'postgresql://foo:bar@localhost/test', 
+                    'postgresql://foo:bar@localhost/test',
                     module=dbapi, _initialize=False)
 
         # monkeypatch disconnect checker
@@ -205,7 +205,7 @@ class CursorErrTest(fixtures.TestBase):
         dbapi = MDBAPI()
 
         db = testing_engine(
-                    'postgresql://foo:bar@localhost/test', 
+                    'postgresql://foo:bar@localhost/test',
                     options=dict(module=dbapi, _initialize=False))
 
     def test_cursor_explode(self):
@@ -451,7 +451,7 @@ class RecycleTest(fixtures.TestBase):
 
             # set the pool recycle down to 1.
             # we aren't doing this inline with the
-            # engine create since cx_oracle takes way 
+            # engine create since cx_oracle takes way
             # too long to create the 1st connection and don't
             # want to build a huge delay into this test.
 
index be2acb1f36803b72766ef0414d429e19af941590..2713bd80b0a5d2b89a404c5080c7856d744c98f7 100644 (file)
@@ -135,11 +135,11 @@ class ReflectionTest(fixtures.TestBase, ComparesTables):
         t2 = Table('t', m2, old_z, old_q)
         eq_(t2.primary_key.columns, (t2.c.z, ))
         t2 = Table('t', m2, old_y,
-                        extend_existing=True, 
-                        autoload=True, 
+                        extend_existing=True,
+                        autoload=True,
                         autoload_with=testing.db)
         eq_(
-            set(t2.columns.keys()), 
+            set(t2.columns.keys()),
             set(['x', 'y', 'z', 'q', 'id'])
         )
         eq_(t2.primary_key.columns, (t2.c.id, ))
@@ -150,11 +150,11 @@ class ReflectionTest(fixtures.TestBase, ComparesTables):
 
         m3 = MetaData()
         t3 = Table('t', m3, Column('z', Integer))
-        t3 = Table('t', m3, extend_existing=False, 
-                        autoload=True, 
+        t3 = Table('t', m3, extend_existing=False,
+                        autoload=True,
                         autoload_with=testing.db)
         eq_(
-            set(t3.columns.keys()), 
+            set(t3.columns.keys()),
             set(['z'])
         )
 
@@ -165,12 +165,12 @@ class ReflectionTest(fixtures.TestBase, ComparesTables):
         t4 = Table('t', m4, old_z, old_q)
         eq_(t4.primary_key.columns, (t4.c.z, ))
         t4 = Table('t', m4, old_y,
-                        extend_existing=True, 
-                        autoload=True, 
+                        extend_existing=True,
+                        autoload=True,
                         autoload_replace=False,
                         autoload_with=testing.db)
         eq_(
-            set(t4.columns.keys()), 
+            set(t4.columns.keys()),
             set(['x', 'y', 'z', 'q', 'id'])
         )
         eq_(t4.primary_key.columns, (t4.c.id, ))
@@ -202,9 +202,9 @@ class ReflectionTest(fixtures.TestBase, ComparesTables):
 
     @testing.provide_metadata
     def test_autoload_replace_foreign_key_nonpresent(self):
-        """test autoload_replace=False with col plus FK 
+        """test autoload_replace=False with col plus FK
         establishes the FK not present in the DB.
-        
+
         """
         a = Table('a', self.metadata, Column('id', Integer, primary_key=True))
         b = Table('b', self.metadata, Column('id', Integer, primary_key=True),
@@ -214,8 +214,8 @@ class ReflectionTest(fixtures.TestBase, ComparesTables):
         m2 = MetaData()
         b2 = Table('b', m2, Column('a_id', Integer, sa.ForeignKey('a.id')))
         a2 = Table('a', m2, autoload=True, autoload_with=testing.db)
-        b2 = Table('b', m2, extend_existing=True, autoload=True, 
-                                autoload_with=testing.db, 
+        b2 = Table('b', m2, extend_existing=True, autoload=True,
+                                autoload_with=testing.db,
                                 autoload_replace=False)
 
         assert b2.c.id is not None
@@ -225,9 +225,9 @@ class ReflectionTest(fixtures.TestBase, ComparesTables):
     @testing.provide_metadata
     def test_autoload_replace_foreign_key_ispresent(self):
         """test autoload_replace=False with col plus FK mirroring
-        DB-reflected FK skips the reflected FK and installs 
+        DB-reflected FK skips the reflected FK and installs
         the in-python one only.
-        
+
         """
         a = Table('a', self.metadata, Column('id', Integer, primary_key=True))
         b = Table('b', self.metadata, Column('id', Integer, primary_key=True),
@@ -237,8 +237,8 @@ class ReflectionTest(fixtures.TestBase, ComparesTables):
         m2 = MetaData()
         b2 = Table('b', m2, Column('a_id', Integer, sa.ForeignKey('a.id')))
         a2 = Table('a', m2, autoload=True, autoload_with=testing.db)
-        b2 = Table('b', m2, extend_existing=True, autoload=True, 
-                                autoload_with=testing.db, 
+        b2 = Table('b', m2, extend_existing=True, autoload=True,
+                                autoload_with=testing.db,
                                 autoload_replace=False)
 
         assert b2.c.id is not None
@@ -259,8 +259,8 @@ class ReflectionTest(fixtures.TestBase, ComparesTables):
         m2 = MetaData()
         b2 = Table('b', m2, Column('a_id', Integer))
         a2 = Table('a', m2, autoload=True, autoload_with=testing.db)
-        b2 = Table('b', m2, extend_existing=True, autoload=True, 
-                                autoload_with=testing.db, 
+        b2 = Table('b', m2, extend_existing=True, autoload=True,
+                                autoload_with=testing.db,
                                 autoload_replace=False)
 
         assert b2.c.id is not None
@@ -434,7 +434,7 @@ class ReflectionTest(fixtures.TestBase, ComparesTables):
 
         meta4 = MetaData(testing.db)
 
-        u4 = Table('users', meta4, 
+        u4 = Table('users', meta4,
                 Column('id', sa.Integer, key='u_id', primary_key=True),
                 autoload=True)
 
@@ -496,7 +496,7 @@ class ReflectionTest(fixtures.TestBase, ComparesTables):
 
     @testing.provide_metadata
     def test_override_keys(self):
-        """test that columns can be overridden with a 'key', 
+        """test that columns can be overridden with a 'key',
         and that ForeignKey targeting during reflection still works."""
 
         meta = self.metadata
@@ -511,7 +511,7 @@ class ReflectionTest(fixtures.TestBase, ComparesTables):
         )
         meta.create_all()
         m2 = MetaData(testing.db)
-        a2 = Table('a', m2, 
+        a2 = Table('a', m2,
                 Column('x', sa.Integer, primary_key=True, key='x1'),
                 autoload=True)
         b2 = Table('b', m2, autoload=True)
@@ -562,7 +562,7 @@ class ReflectionTest(fixtures.TestBase, ComparesTables):
 
         meta.create_all()
         meta2 = MetaData(testing.db)
-        a2 = Table('addresses', meta2, 
+        a2 = Table('addresses', meta2,
                 Column('user_id',sa.Integer, sa.ForeignKey('users.id')),
                 autoload=True)
         u2 = Table('users', meta2, autoload=True)
@@ -701,7 +701,7 @@ class ReflectionTest(fixtures.TestBase, ComparesTables):
 
 
     @testing.crashes('oracle', 'FIXME: unknown, confirm not fails_on')
-    @testing.fails_on('+informixdb', 
+    @testing.fails_on('+informixdb',
                         "FIXME: should be supported via the "
                         "DELIMITED env var but that breaks "
                         "everything else for now")
@@ -727,15 +727,15 @@ class ReflectionTest(fixtures.TestBase, ComparesTables):
             check_col = 'true'
         quoter = meta.bind.dialect.identifier_preparer.quote_identifier
 
-        table_b = Table('false', meta, 
-                    Column('create', sa.Integer, primary_key=True), 
+        table_b = Table('false', meta,
+                    Column('create', sa.Integer, primary_key=True),
                     Column('true', sa.Integer,sa.ForeignKey('select.not')),
                     sa.CheckConstraint('%s <> 1'
                         % quoter(check_col), name='limit')
                     )
 
-        table_c = Table('is', meta, 
-                Column('or', sa.Integer, nullable=False, primary_key=True), 
+        table_c = Table('is', meta,
+                Column('or', sa.Integer, nullable=False, primary_key=True),
                 Column('join', sa.Integer, nullable=False, primary_key=True),
                 sa.PrimaryKeyConstraint('or', 'join', name='to')
                 )
@@ -885,15 +885,15 @@ class ReflectionTest(fixtures.TestBase, ComparesTables):
 
             m2.reflect(views=False)
             eq_(
-                set(m2.tables), 
+                set(m2.tables),
                 set(['users', 'email_addresses', 'dingalings'])
             )
 
             m2 = MetaData(testing.db)
             m2.reflect(views=True)
             eq_(
-                set(m2.tables), 
-                set(['email_addresses_v', 'users_v', 
+                set(m2.tables),
+                set(['email_addresses_v', 'users_v',
                             'users', 'dingalings', 'email_addresses'])
             )
         finally:
@@ -905,16 +905,16 @@ class CreateDropTest(fixtures.TestBase):
     def setup_class(cls):
         global metadata, users
         metadata = MetaData()
-        users = Table('users', metadata, 
+        users = Table('users', metadata,
                     Column('user_id', sa.Integer,
                       sa.Sequence('user_id_seq', optional=True),
-                      primary_key=True), 
+                      primary_key=True),
                     Column('user_name',sa.String(40)))
 
         addresses = Table('email_addresses', metadata,
                       Column('address_id', sa.Integer,
                           sa.Sequence('address_id_seq', optional=True),
-                          primary_key=True), 
+                          primary_key=True),
                       Column('user_id',
                           sa.Integer, sa.ForeignKey(users.c.user_id)),
                       Column('email_address', sa.String(40)))
@@ -989,8 +989,8 @@ class SchemaManipulationTest(fixtures.TestBase):
         meta = MetaData()
 
         users = Table('users', meta, Column('id', sa.Integer))
-        addresses = Table('addresses', meta, 
-                        Column('id', sa.Integer), 
+        addresses = Table('addresses', meta,
+                        Column('id', sa.Integer),
                         Column('user_id', sa.Integer))
 
         fk = sa.ForeignKeyConstraint(['user_id'],[users.c.id])
@@ -1027,7 +1027,7 @@ class UnicodeReflectionTest(fixtures.TestBase):
             (u'\u6e2c\u8a66', u'col_\u6e2c\u8a66', u'ix_\u6e2c\u8a66'),
         ]
 
-        # as you can see, our options for this kind of thing 
+        # as you can see, our options for this kind of thing
         # are really limited unless you're on PG or SQLite
 
         # forget about it on these backends
@@ -1037,7 +1037,7 @@ class UnicodeReflectionTest(fixtures.TestBase):
         elif testing.against("mysql") and \
             not testing.requires._has_mysql_fully_case_sensitive():
             names = no_multibyte_period.union(no_case_sensitivity)
-        # mssql + pyodbc + freetds can't compare multibyte names to 
+        # mssql + pyodbc + freetds can't compare multibyte names to
         # information_schema.tables.table_name
         elif testing.against("mssql"):
             names = no_multibyte_period.union(no_has_table)
@@ -1208,8 +1208,8 @@ class SchemaTest(fixtures.TestBase):
         m2 = MetaData(schema="test_schema", bind=testing.db)
         m2.reflect()
         eq_(
-            set(m2.tables), 
-            set(['test_schema.dingalings', 'test_schema.users', 
+            set(m2.tables),
+            set(['test_schema.dingalings', 'test_schema.users',
                 'test_schema.email_addresses'])
         )
 
@@ -1286,7 +1286,7 @@ def createTables(meta, schema=None):
     )
     dingalings = Table("dingalings", meta,
               Column('dingaling_id', sa.Integer, primary_key=True),
-              Column('address_id', sa.Integer, 
+              Column('address_id', sa.Integer,
                     sa.ForeignKey('%semail_addresses.address_id' % schema_prefix)),
               Column('data', sa.String(30)),
               schema=schema,
@@ -1364,11 +1364,11 @@ class CaseSensitiveTest(fixtures.TablesTest):
 
     @classmethod
     def define_tables(cls, metadata):
-        Table('SomeTable', metadata, 
+        Table('SomeTable', metadata,
             Column('x', Integer, primary_key=True),
             test_needs_fk=True
         )
-        Table('SomeOtherTable', metadata, 
+        Table('SomeOtherTable', metadata,
             Column('x', Integer, primary_key=True),
             Column('y', Integer, sa.ForeignKey("SomeTable.x")),
             test_needs_fk=True
@@ -1387,8 +1387,8 @@ class CaseSensitiveTest(fixtures.TablesTest):
         eq_(t1.name, "SomeTable")
         assert t1.c.x is not None
 
-    @testing.fails_if(lambda: 
-            testing.against(('mysql', '<', (5, 5))) and 
+    @testing.fails_if(lambda:
+            testing.against(('mysql', '<', (5, 5))) and
             not testing.requires._has_mysql_fully_case_sensitive()
             )
     def test_reflect_via_fk(self):
index 04a3e642cf74bb086bca4bc1d596aa83c2bc531a..709f0d2f1a10ab1426750f96ffd553154038a85f 100644 (file)
@@ -1204,7 +1204,7 @@ class IsolationLevelTest(fixtures.TestBase):
         eng = testing_engine(options=dict())
         conn = eng.connect()
         eq_(
-            eng.dialect.get_isolation_level(conn.connection), 
+            eng.dialect.get_isolation_level(conn.connection),
             self._default_isolation_level()
         )
 
@@ -1212,13 +1212,13 @@ class IsolationLevelTest(fixtures.TestBase):
                 conn.connection, self._non_default_isolation_level()
             )
         eq_(
-            eng.dialect.get_isolation_level(conn.connection), 
+            eng.dialect.get_isolation_level(conn.connection),
             self._non_default_isolation_level()
         )
 
         eng.dialect.reset_isolation_level(conn.connection)
         eq_(
-            eng.dialect.get_isolation_level(conn.connection), 
+            eng.dialect.get_isolation_level(conn.connection),
             self._default_isolation_level()
         )
 
@@ -1243,17 +1243,17 @@ class IsolationLevelTest(fixtures.TestBase):
     def test_invalid_level(self):
         eng = testing_engine(options=dict(isolation_level='FOO'))
         assert_raises_message(
-            exc.ArgumentError, 
+            exc.ArgumentError,
                 "Invalid value '%s' for isolation_level. "
-                "Valid isolation levels for %s are %s" % 
-                ("FOO", eng.dialect.name, 
+                "Valid isolation levels for %s are %s" %
+                ("FOO", eng.dialect.name,
                 ", ".join(eng.dialect._isolation_lookup)),
             eng.connect)
 
     def test_per_connection(self):
         from sqlalchemy.pool import QueuePool
         eng = testing_engine(options=dict(
-                                poolclass=QueuePool, 
+                                poolclass=QueuePool,
                                 pool_size=2, max_overflow=0))
 
         c1 = eng.connect()
@@ -1292,7 +1292,7 @@ class IsolationLevelTest(fixtures.TestBase):
             r"on Connection.execution_options\(\), or "
             r"per-engine using the isolation_level "
             r"argument to create_engine\(\).",
-            select([1]).execution_options, 
+            select([1]).execution_options,
                     isolation_level=self._non_default_isolation_level()
         )
 
@@ -1305,7 +1305,7 @@ class IsolationLevelTest(fixtures.TestBase):
             r"To set engine-wide isolation level, "
             r"use the isolation_level argument to create_engine\(\).",
             create_engine,
-            testing.db.url, 
+            testing.db.url,
                 execution_options={'isolation_level':
                             self._non_default_isolation_level}
         )
index f260e65cee93e19384d772f7dabf7218a88925f1..7ae4673635c1b7823d27e857da12245b7e9e6a6d 100644 (file)
@@ -641,8 +641,8 @@ class ProxyFactoryTest(ListTest):
                     )
 
         class Parent(object):
-            children = association_proxy('_children', 'name', 
-                        proxy_factory=CustomProxy, 
+            children = association_proxy('_children', 'name',
+                        proxy_factory=CustomProxy,
                         proxy_bulk_set=CustomProxy.extend
                     )
 
@@ -1017,17 +1017,17 @@ class ComparatorTest(fixtures.MappedTest, AssertsCompiledSQL):
 
     @classmethod
     def define_tables(cls, metadata):
-        Table('userkeywords', metadata, 
+        Table('userkeywords', metadata,
           Column('keyword_id', Integer,ForeignKey('keywords.id'), primary_key=True),
           Column('user_id', Integer, ForeignKey('users.id'))
         )
-        Table('users', metadata, 
+        Table('users', metadata,
             Column('id', Integer,
               primary_key=True, test_needs_autoincrement=True),
             Column('name', String(64)),
             Column('singular_id', Integer, ForeignKey('singular.id'))
         )
-        Table('keywords', metadata, 
+        Table('keywords', metadata,
             Column('id', Integer,
               primary_key=True, test_needs_autoincrement=True),
             Column('keyword', String(64)),
@@ -1090,7 +1090,7 @@ class ComparatorTest(fixtures.MappedTest, AssertsCompiledSQL):
         })
 
         mapper(UserKeyword, userkeywords, properties={
-            'user' : relationship(User, backref='user_keywords'), 
+            'user' : relationship(User, backref='user_keywords'),
             'keyword' : relationship(Keyword)
         })
         mapper(Singular, singular, properties={
@@ -1288,7 +1288,7 @@ class ComparatorTest(fixtures.MappedTest, AssertsCompiledSQL):
         User = self.classes.User
         self.assert_compile(
             self.session.query(User).join(
-                        User.keywords.local_attr, 
+                        User.keywords.local_attr,
                         User.keywords.remote_attr),
             "SELECT users.id AS users_id, users.name AS users_name, "
             "users.singular_id AS users_singular_id "
@@ -1321,7 +1321,7 @@ class DictOfTupleUpdateTest(fixtures.TestBase):
 
         m = MetaData()
         a = Table('a', m, Column('id', Integer, primary_key=True))
-        b = Table('b', m, Column('id', Integer, primary_key=True), 
+        b = Table('b', m, Column('id', Integer, primary_key=True),
                     Column('aid', Integer, ForeignKey('a.id')))
         mapper(A, a, properties={
             'orig':relationship(B, collection_class=attribute_mapped_collection('key'))
index 4e8a40ff306eef207cf9ab28433be2ca54b4b309..ee64548f54115dc0d2a9ca317c748163a8c62aea 100644 (file)
@@ -123,7 +123,7 @@ class UserDefinedTest(fixtures.TestBase, AssertsCompiledSQL):
         )
 
     def test_annotations(self):
-        """test that annotated clause constructs use the 
+        """test that annotated clause constructs use the
         decorated class' compiler.
 
         """
@@ -356,7 +356,7 @@ class DefaultOnExistingTest(fixtures.TestBase, AssertsCompiledSQL):
             return "BIND(%s)" % compiler.visit_bindparam(element, **kw)
 
         self.assert_compile(
-            t.select().where(t.c.c == 5), 
+            t.select().where(t.c.c == 5),
             "SELECT t.a, t.b, t.c FROM t WHERE t.c = BIND(:c_1)",
             use_default_dialect=True
         )
@@ -373,7 +373,7 @@ class DefaultOnExistingTest(fixtures.TestBase, AssertsCompiledSQL):
             return "BIND(%s)" % compiler.visit_bindparam(element, **kw)
 
         self.assert_compile(
-            t.insert(), 
+            t.insert(),
             "INSERT INTO t (a, b) VALUES (BIND(:a), BIND(:b))",
             {'a':1, 'b':2},
             use_default_dialect=True
index 36cd598abb54ae8981e600ffe84c6799b4ae5675..e9494b2952b8d2e346b30258cc18644e6a2c8a74 100644 (file)
@@ -279,8 +279,8 @@ class DeclarativeTest(DeclarativeTestBase):
             id = Column(Integer, primary_key=True,
                         test_needs_autoincrement=True)
             email = Column(String(50))
-            user_id = Column(Integer) 
-            user = relationship("User", 
+            user_id = Column(Integer)
+            user = relationship("User",
                 primaryjoin="remote(User.id)==foreign(Address.user_id)"
             )
 
@@ -371,7 +371,7 @@ class DeclarativeTest(DeclarativeTestBase):
             name = Column(String(50))
             props = relationship('Prop', secondary='fooschema.user_to_prop',
                          primaryjoin='User.id==fooschema.user_to_prop.c.user_id',
-                         secondaryjoin='fooschema.user_to_prop.c.prop_id==Prop.id', 
+                         secondaryjoin='fooschema.user_to_prop.c.prop_id==Prop.id',
                          backref='users')
 
         class Prop(Base):
@@ -383,7 +383,7 @@ class DeclarativeTest(DeclarativeTestBase):
             name = Column(String(50))
 
         user_to_prop = Table('user_to_prop', Base.metadata,
-                     Column('user_id', Integer, ForeignKey('fooschema.users.id')), 
+                     Column('user_id', Integer, ForeignKey('fooschema.users.id')),
                      Column('prop_id',Integer, ForeignKey('fooschema.props.id')),
                      schema='fooschema')
         configure_mappers()
@@ -503,7 +503,7 @@ class DeclarativeTest(DeclarativeTestBase):
         except exc.InvalidRequestError:
             assert sa.util.compat.py32
 
-        # the exception is preserved.  Remains the 
+        # the exception is preserved.  Remains the
         # same through repeated calls.
         for i in range(3):
             assert_raises_message(sa.exc.InvalidRequestError,
@@ -1006,9 +1006,9 @@ class DeclarativeTest(DeclarativeTestBase):
 
         class User(Base, fixtures.ComparableEntity):
             __tablename__ = 'user'
-            id = Column(Integer, primary_key=True, 
+            id = Column(Integer, primary_key=True,
                             test_needs_autoincrement=True)
-            address = composite(AddressComposite, 
+            address = composite(AddressComposite,
                 Column('street', String(50)),
                 Column('state', String(2)),
             )
@@ -1016,13 +1016,13 @@ class DeclarativeTest(DeclarativeTestBase):
         Base.metadata.create_all()
         sess = Session()
         sess.add(User(
-                address=AddressComposite('123 anywhere street', 
+                address=AddressComposite('123 anywhere street',
                                 'MD')
                 ))
         sess.commit()
         eq_(
-            sess.query(User).all(), 
-            [User(address=AddressComposite('123 anywhere street', 
+            sess.query(User).all(),
+            [User(address=AddressComposite('123 anywhere street',
                                 'MD'))]
         )
 
@@ -1036,23 +1036,23 @@ class DeclarativeTest(DeclarativeTestBase):
 
         class User(Base, fixtures.ComparableEntity):
             __tablename__ = 'user'
-            id = Column(Integer, primary_key=True, 
+            id = Column(Integer, primary_key=True,
                             test_needs_autoincrement=True)
             street = Column(String(50))
             state = Column(String(2))
-            address = composite(AddressComposite, 
+            address = composite(AddressComposite,
                 street, state)
 
         Base.metadata.create_all()
         sess = Session()
         sess.add(User(
-                address=AddressComposite('123 anywhere street', 
+                address=AddressComposite('123 anywhere street',
                                 'MD')
                 ))
         sess.commit()
         eq_(
-            sess.query(User).all(), 
-            [User(address=AddressComposite('123 anywhere street', 
+            sess.query(User).all(),
+            [User(address=AddressComposite('123 anywhere street',
                                 'MD'))]
         )
 
index db176aa6de60ee606977888878321ba77d0fce28..0876ebe63a9e6e2a83969a1e1c68efd6f942a392 100644 (file)
@@ -540,7 +540,7 @@ class DeclarativeMixinTest(DeclarativeTestBase):
             pass
 
         eq_(
-            MyModel.__mapper__.polymorphic_on.name, 
+            MyModel.__mapper__.polymorphic_on.name,
             'type_'
         )
         assert MyModel.__mapper__.polymorphic_on.table is not None
@@ -804,8 +804,8 @@ class DeclarativeMixinTest(DeclarativeTestBase):
 
             class Model(Base, ColumnMixin):
 
-                __table__ = Table('foo', Base.metadata, 
-                                Column('data',Integer), 
+                __table__ = Table('foo', Base.metadata,
+                                Column('data',Integer),
                                 Column('id', Integer,primary_key=True))
                 foo = relationship("Dest")
 
@@ -826,8 +826,8 @@ class DeclarativeMixinTest(DeclarativeTestBase):
 
             class Model(Base, ColumnMixin):
 
-                __table__ = Table('foo', Base.metadata, 
-                                Column('data',Integer), 
+                __table__ = Table('foo', Base.metadata,
+                                Column('data',Integer),
                                 Column('tada', Integer),
                                 Column('id', Integer,primary_key=True))
                 foo = relationship("Dest")
index a99c05af985f0571c22a592ae8031a4834963644..6efc6e64e285bd349087d91dd0882bc1b5078369 100644 (file)
@@ -23,7 +23,7 @@ class DeclarativeReflectionTest(DeclarativeReflectionBase):
 
     @classmethod
     def define_tables(cls, metadata):
-        Table('users', metadata, 
+        Table('users', metadata,
             Column('id', Integer,
                 primary_key=True, test_needs_autoincrement=True),
               Column('name', String(50)), test_needs_fk=True)
@@ -162,7 +162,7 @@ class DeferredReflectPKFKTest(DeferredReflectBase):
             Column('id', Integer,
                 primary_key=True, test_needs_autoincrement=True),
         )
-        Table("b", metadata, 
+        Table("b", metadata,
             Column('id', Integer,
                 ForeignKey('a.id'),
                 primary_key=True),
@@ -170,12 +170,12 @@ class DeferredReflectPKFKTest(DeferredReflectBase):
         )
 
     def test_pk_fk(self):
-        class B(decl.DeferredReflection, fixtures.ComparableEntity, 
+        class B(decl.DeferredReflection, fixtures.ComparableEntity,
                             Base):
             __tablename__ = 'b'
             a = relationship("A")
 
-        class A(decl.DeferredReflection, fixtures.ComparableEntity, 
+        class A(decl.DeferredReflection, fixtures.ComparableEntity,
                             Base):
             __tablename__ = 'a'
 
@@ -185,7 +185,7 @@ class DeferredReflectionTest(DeferredReflectBase):
 
     @classmethod
     def define_tables(cls, metadata):
-        Table('users', metadata, 
+        Table('users', metadata,
             Column('id', Integer,
                 primary_key=True, test_needs_autoincrement=True),
               Column('name', String(50)), test_needs_fk=True)
@@ -217,12 +217,12 @@ class DeferredReflectionTest(DeferredReflectBase):
         eq_(a1.user, User(name='u1'))
 
     def test_basic_deferred(self):
-        class User(decl.DeferredReflection, fixtures.ComparableEntity, 
+        class User(decl.DeferredReflection, fixtures.ComparableEntity,
                             Base):
             __tablename__ = 'users'
             addresses = relationship("Address", backref="user")
 
-        class Address(decl.DeferredReflection, fixtures.ComparableEntity, 
+        class Address(decl.DeferredReflection, fixtures.ComparableEntity,
                             Base):
             __tablename__ = 'addresses'
 
@@ -250,12 +250,12 @@ class DeferredReflectionTest(DeferredReflectBase):
         self._roundtrip()
 
     def test_redefine_fk_double(self):
-        class User(decl.DeferredReflection, fixtures.ComparableEntity, 
+        class User(decl.DeferredReflection, fixtures.ComparableEntity,
                             Base):
             __tablename__ = 'users'
             addresses = relationship("Address", backref="user")
 
-        class Address(decl.DeferredReflection, fixtures.ComparableEntity, 
+        class Address(decl.DeferredReflection, fixtures.ComparableEntity,
                             Base):
             __tablename__ = 'addresses'
             user_id = Column(Integer, ForeignKey('users.id'))
@@ -266,7 +266,7 @@ class DeferredReflectionTest(DeferredReflectBase):
     def test_mapper_args_deferred(self):
         """test that __mapper_args__ is not called until *after* table reflection"""
 
-        class User(decl.DeferredReflection, fixtures.ComparableEntity, 
+        class User(decl.DeferredReflection, fixtures.ComparableEntity,
                             Base):
             __tablename__ = 'users'
 
@@ -301,7 +301,7 @@ class DeferredInhReflectBase(DeferredReflectBase):
         Bar = Base._decl_class_registry['Bar']
 
         s = Session(testing.db)
+
         s.add_all([
             Bar(data='d1', bar_data='b1'),
             Bar(data='d2', bar_data='b2'),
@@ -325,7 +325,7 @@ class DeferredSingleInhReflectionTest(DeferredInhReflectBase):
     @classmethod
     def define_tables(cls, metadata):
         Table("foo", metadata,
-            Column('id', Integer, primary_key=True, 
+            Column('id', Integer, primary_key=True,
                         test_needs_autoincrement=True),
             Column('type', String(32)),
             Column('data', String(30)),
@@ -333,10 +333,10 @@ class DeferredSingleInhReflectionTest(DeferredInhReflectBase):
         )
 
     def test_basic(self):
-        class Foo(decl.DeferredReflection, fixtures.ComparableEntity, 
+        class Foo(decl.DeferredReflection, fixtures.ComparableEntity,
                     Base):
             __tablename__ = 'foo'
-            __mapper_args__ = {"polymorphic_on":"type", 
+            __mapper_args__ = {"polymorphic_on":"type",
                         "polymorphic_identity":"foo"}
 
         class Bar(Foo):
@@ -346,10 +346,10 @@ class DeferredSingleInhReflectionTest(DeferredInhReflectBase):
         self._roundtrip()
 
     def test_add_subclass_column(self):
-        class Foo(decl.DeferredReflection, fixtures.ComparableEntity, 
+        class Foo(decl.DeferredReflection, fixtures.ComparableEntity,
                     Base):
             __tablename__ = 'foo'
-            __mapper_args__ = {"polymorphic_on":"type", 
+            __mapper_args__ = {"polymorphic_on":"type",
                         "polymorphic_identity":"foo"}
 
         class Bar(Foo):
@@ -360,10 +360,10 @@ class DeferredSingleInhReflectionTest(DeferredInhReflectBase):
         self._roundtrip()
 
     def test_add_pk_column(self):
-        class Foo(decl.DeferredReflection, fixtures.ComparableEntity, 
+        class Foo(decl.DeferredReflection, fixtures.ComparableEntity,
                     Base):
             __tablename__ = 'foo'
-            __mapper_args__ = {"polymorphic_on":"type", 
+            __mapper_args__ = {"polymorphic_on":"type",
                         "polymorphic_identity":"foo"}
             id = Column(Integer, primary_key=True)
 
@@ -377,7 +377,7 @@ class DeferredJoinedInhReflectionTest(DeferredInhReflectBase):
     @classmethod
     def define_tables(cls, metadata):
         Table("foo", metadata,
-            Column('id', Integer, primary_key=True, 
+            Column('id', Integer, primary_key=True,
                         test_needs_autoincrement=True),
             Column('type', String(32)),
             Column('data', String(30)),
@@ -390,10 +390,10 @@ class DeferredJoinedInhReflectionTest(DeferredInhReflectBase):
         )
 
     def test_basic(self):
-        class Foo(decl.DeferredReflection, fixtures.ComparableEntity, 
+        class Foo(decl.DeferredReflection, fixtures.ComparableEntity,
                     Base):
             __tablename__ = 'foo'
-            __mapper_args__ = {"polymorphic_on":"type", 
+            __mapper_args__ = {"polymorphic_on":"type",
                         "polymorphic_identity":"foo"}
 
         class Bar(Foo):
@@ -404,10 +404,10 @@ class DeferredJoinedInhReflectionTest(DeferredInhReflectBase):
         self._roundtrip()
 
     def test_add_subclass_column(self):
-        class Foo(decl.DeferredReflection, fixtures.ComparableEntity, 
+        class Foo(decl.DeferredReflection, fixtures.ComparableEntity,
                     Base):
             __tablename__ = 'foo'
-            __mapper_args__ = {"polymorphic_on":"type", 
+            __mapper_args__ = {"polymorphic_on":"type",
                         "polymorphic_identity":"foo"}
 
         class Bar(Foo):
@@ -419,10 +419,10 @@ class DeferredJoinedInhReflectionTest(DeferredInhReflectBase):
         self._roundtrip()
 
     def test_add_pk_column(self):
-        class Foo(decl.DeferredReflection, fixtures.ComparableEntity, 
+        class Foo(decl.DeferredReflection, fixtures.ComparableEntity,
                     Base):
             __tablename__ = 'foo'
-            __mapper_args__ = {"polymorphic_on":"type", 
+            __mapper_args__ = {"polymorphic_on":"type",
                         "polymorphic_identity":"foo"}
             id = Column(Integer, primary_key=True)
 
@@ -434,10 +434,10 @@ class DeferredJoinedInhReflectionTest(DeferredInhReflectBase):
         self._roundtrip()
 
     def test_add_fk_pk_column(self):
-        class Foo(decl.DeferredReflection, fixtures.ComparableEntity, 
+        class Foo(decl.DeferredReflection, fixtures.ComparableEntity,
                     Base):
             __tablename__ = 'foo'
-            __mapper_args__ = {"polymorphic_on":"type", 
+            __mapper_args__ = {"polymorphic_on":"type",
                         "polymorphic_identity":"foo"}
 
         class Bar(Foo):
index 68876c44791bd2f171a3369e57bf300441310516..b36db71fcaf73e2f7e4a9665da2146d5e11e991c 100644 (file)
@@ -1,6 +1,6 @@
 """Testing environment and utilities.
 
-This package contains base classes and routines used by 
+This package contains base classes and routines used by
 the unit tests.   Tests are based on Nose and bootstrapped
 by noseplugin.NoseSQLAlchemy.
 
index 41a72c9a49906ebf27f8f212c8149a46a13e60f7..451eeb43b27ba52bc25cd6e05f19b20991bf751b 100644 (file)
@@ -268,7 +268,7 @@ class MappedTest(_ORMTest, TablesTest, testing.AssertsExecutionResults):
         """Run a setup method, framing the operation with a Base class
         that will catch new subclasses to be established within
         the "classes" registry.
-        
+
         """
         cls_registry = cls.classes
         class FindFixture(type):
@@ -289,7 +289,7 @@ class MappedTest(_ORMTest, TablesTest, testing.AssertsExecutionResults):
 
     def _teardown_each_mappers(self):
         # some tests create mappers in the test bodies
-        # and will define setup_mappers as None - 
+        # and will define setup_mappers as None -
         # clear mappers in any case
         if self.run_setup_mappers != 'once':
             sa.orm.clear_mappers()
@@ -328,7 +328,7 @@ class DeclarativeMappedTest(MappedTest):
                         cls, classname, bases, dict_)
         class DeclarativeBasic(object):
             __table_cls__ = schema.Table
-        _DeclBase = declarative_base(metadata=cls.declarative_meta, 
+        _DeclBase = declarative_base(metadata=cls.declarative_meta,
                             metaclass=FindFixtureDeclarative,
                             cls=DeclarativeBasic)
         cls.DeclarativeBasic = _DeclBase
index bac9e549fee328a0360f91ba57bd7734e798cf9d..f4714571852a15b9253a9c33c60d7eee4f85f5a6 100644 (file)
@@ -69,12 +69,12 @@ def profiled(target=None, **target_opts):
                 else:
                     stats.print_stats()
 
-                print_callers = target_opts.get('print_callers', 
+                print_callers = target_opts.get('print_callers',
                                                 profile_config['print_callers'])
                 if print_callers:
                     stats.print_callers()
 
-                print_callees = target_opts.get('print_callees', 
+                print_callees = target_opts.get('print_callees',
                                                 profile_config['print_callees'])
                 if print_callees:
                     stats.print_callees()
index 88049d7fb2f68304e74f5f77a2c08e17e6e3df69..31b835b28e1f5d07a9bedd362ae0df1d05130336 100644 (file)
@@ -77,9 +77,9 @@ def identity(fn):
 def reflectable_autoincrement(fn):
     """Target database must support tables that can automatically generate
     PKs assuming they were reflected.
-    
+
     this is essentially all the DBs in "identity" plus Postgresql, which
-    has SERIAL support.  FB and Oracle (and sybase?) require the Sequence to 
+    has SERIAL support.  FB and Oracle (and sybase?) require the Sequence to
     be explicitly added, including if the table was reflected.
     """
     return _chain_decorators_on(
@@ -151,7 +151,7 @@ def update_from(fn):
     """Target must support UPDATE..FROM syntax"""
     return _chain_decorators_on(
         fn,
-        only_on(('postgresql', 'mssql', 'mysql'), 
+        only_on(('postgresql', 'mssql', 'mysql'),
             "Backend does not support UPDATE..FROM")
     )
 
@@ -388,7 +388,7 @@ def python25(fn):
 def cpython(fn):
     return _chain_decorators_on(
          fn,
-         skip_if(lambda: util.jython or util.pypy, 
+         skip_if(lambda: util.jython or util.pypy,
            "cPython interpreter needed"
          )
     )
@@ -424,11 +424,11 @@ def sqlite(fn):
 
 def ad_hoc_engines(fn):
     """Test environment must allow ad-hoc engine/connection creation.
-    
+
     DBs that scale poorly for many connections, even when closed, i.e.
     Oracle, may use the "--low-connections" option which flags this requirement
     as not present.
-    
+
     """
     return _chain_decorators_on(
         fn,
@@ -456,6 +456,6 @@ def selectone(fn):
     """target driver must support the literal statement 'select 1'"""
     return _chain_decorators_on(
         fn,
-        skip_if(lambda: testing.against('oracle'), 
+        skip_if(lambda: testing.against('oracle'),
             "non-standard SELECT scalar syntax")
     )
index d3bccb53c04cdb5074f5b8293e8252ee39e42e58..02d5922355bd87c54983584a2e26a69c1ba5e938 100644 (file)
@@ -95,7 +95,7 @@ def db_spec(*dbs):
 
 
 def fails_on(dbs, reason):
-    """Mark a test as expected to fail on the specified database 
+    """Mark a test as expected to fail on the specified database
     implementation.
 
     Unlike ``crashes``, tests marked as ``fails_on`` will be run
@@ -425,7 +425,7 @@ def resetwarnings():
     util.warn = util.langhelpers.warn = testing_warn
 
     warnings.filterwarnings('ignore',
-                            category=sa_exc.SAPendingDeprecationWarning) 
+                            category=sa_exc.SAPendingDeprecationWarning)
     warnings.filterwarnings('error', category=sa_exc.SADeprecationWarning)
     warnings.filterwarnings('error', category=sa_exc.SAWarning)
 
@@ -479,9 +479,9 @@ def _chain_decorators_on(fn, *decorators):
 
 def run_as_contextmanager(ctx, fn, *arg, **kw):
     """Run the given function under the given contextmanager,
-    simulating the behavior of 'with' to support older 
+    simulating the behavior of 'with' to support older
     Python versions.
-    
+
     """
 
     obj = ctx.__enter__()
@@ -576,8 +576,8 @@ class adict(dict):
 
 
 class AssertsCompiledSQL(object):
-    def assert_compile(self, clause, result, params=None, 
-                        checkparams=None, dialect=None, 
+    def assert_compile(self, clause, result, params=None,
+                        checkparams=None, dialect=None,
                         checkpositional=None,
                         use_default_dialect=False,
                         allow_dialect_select=False):
index 7431a3a8363caf4f25db65c951762069af0bf6ba..d0d3a9ec4d023f84a581ac21a0571072c2c028b6 100644 (file)
@@ -82,7 +82,7 @@ class FixtureTest(fixtures.MappedTest):
         mapper(Keyword, keywords)
 
         mapper(Node, nodes, properties={
-            'children':relationship(Node, 
+            'children':relationship(Node,
                 backref=backref('parent', remote_side=[nodes.c.id])
             )
         })
index e1304e26e5d182239a57ee4c6116b4ab7129c1b3..19d4f923b6d0bee38e4889ad4f558e4796ad2715 100644 (file)
@@ -111,10 +111,10 @@ def produce_test(parent, child, direction):
             parent_class = parent_mapper.class_
             child_class = child_mapper.class_
 
-            parent_mapper.add_property("collection", 
-                                relationship(child_mapper, 
-                                            primaryjoin=relationshipjoin, 
-                                            foreign_keys=foreign_keys, 
+            parent_mapper.add_property("collection",
+                                relationship(child_mapper,
+                                            primaryjoin=relationshipjoin,
+                                            foreign_keys=foreign_keys,
                                             remote_side=remote_side, uselist=True))
 
             sess = create_session()
index 985d892d81bc53f0d45541d802f6bf9d3f7c3a87..6ff989e736333d305871573ac7ce68d34384a4e1 100644 (file)
@@ -29,17 +29,17 @@ class RelationshipTest1(fixtures.MappedTest):
         global people, managers
 
         people = Table('people', metadata,
-           Column('person_id', Integer, Sequence('person_id_seq', 
-                                        optional=True), 
+           Column('person_id', Integer, Sequence('person_id_seq',
+                                        optional=True),
                                         primary_key=True),
-           Column('manager_id', Integer, 
-                                ForeignKey('managers.person_id', 
+           Column('manager_id', Integer,
+                                ForeignKey('managers.person_id',
                                 use_alter=True, name="mpid_fq")),
            Column('name', String(50)),
            Column('type', String(30)))
 
         managers = Table('managers', metadata,
-           Column('person_id', Integer, ForeignKey('people.person_id'), 
+           Column('person_id', Integer, ForeignKey('people.person_id'),
                                     primary_key=True),
            Column('status', String(30)),
            Column('manager_name', String(50))
@@ -64,7 +64,7 @@ class RelationshipTest1(fixtures.MappedTest):
         mapper(Manager, managers, inherits=Person,
                inherit_condition=people.c.person_id==managers.c.person_id)
 
-        eq_(class_mapper(Person).get_property('manager').synchronize_pairs, 
+        eq_(class_mapper(Person).get_property('manager').synchronize_pairs,
                 [(managers.c.person_id,people.c.manager_id)])
 
         session = create_session()
@@ -86,9 +86,9 @@ class RelationshipTest1(fixtures.MappedTest):
             pass
 
         mapper(Person, people)
-        mapper(Manager, managers, inherits=Person, 
+        mapper(Manager, managers, inherits=Person,
                             inherit_condition=people.c.person_id==
-                                        managers.c.person_id, 
+                                        managers.c.person_id,
         properties={
             'employee':relationship(Person, primaryjoin=(
                                             people.c.manager_id ==
@@ -115,7 +115,7 @@ class RelationshipTest2(fixtures.MappedTest):
     def define_tables(cls, metadata):
         global people, managers, data
         people = Table('people', metadata,
-           Column('person_id', Integer, primary_key=True, 
+           Column('person_id', Integer, primary_key=True,
                                         test_needs_autoincrement=True),
            Column('name', String(50)),
            Column('type', String(30)))
@@ -128,7 +128,7 @@ class RelationshipTest2(fixtures.MappedTest):
            )
 
         data = Table('data', metadata,
-            Column('person_id', Integer, ForeignKey('managers.person_id'), 
+            Column('person_id', Integer, ForeignKey('managers.person_id'),
                                                 primary_key=True),
             Column('data', String(30))
             )
@@ -155,14 +155,14 @@ class RelationshipTest2(fixtures.MappedTest):
         if jointype == "join1":
             poly_union = polymorphic_union({
                 'person':people.select(people.c.type=='person'),
-                'manager':join(people, managers, 
+                'manager':join(people, managers,
                         people.c.person_id==managers.c.person_id)
             }, None)
             polymorphic_on=poly_union.c.type
         elif jointype == "join2":
             poly_union = polymorphic_union({
                 'person':people.select(people.c.type=='person'),
-                'manager':managers.join(people, 
+                'manager':managers.join(people,
                         people.c.person_id==managers.c.person_id)
             }, None)
             polymorphic_on=poly_union.c.type
@@ -176,35 +176,35 @@ class RelationshipTest2(fixtures.MappedTest):
                     self.data = data
             mapper(Data, data)
 
-        mapper(Person, people, 
-                            with_polymorphic=('*', poly_union), 
-                            polymorphic_identity='person', 
+        mapper(Person, people,
+                            with_polymorphic=('*', poly_union),
+                            polymorphic_identity='person',
                             polymorphic_on=polymorphic_on)
 
         if usedata:
-            mapper(Manager, managers, 
-                            inherits=Person, 
+            mapper(Manager, managers,
+                            inherits=Person,
                             inherit_condition=people.c.person_id==
-                                    managers.c.person_id, 
+                                    managers.c.person_id,
                             polymorphic_identity='manager',
                   properties={
                     'colleague':relationship(
-                                Person, 
+                                Person,
                                 primaryjoin=managers.c.manager_id==
-                                    people.c.person_id, 
+                                    people.c.person_id,
                                 lazy='select', uselist=False),
                     'data':relationship(Data, uselist=False)
                  }
             )
         else:
-            mapper(Manager, managers, inherits=Person, 
+            mapper(Manager, managers, inherits=Person,
                                 inherit_condition=people.c.person_id==
-                                    managers.c.person_id, 
+                                    managers.c.person_id,
                                     polymorphic_identity='manager',
                   properties={
-                    'colleague':relationship(Person, 
+                    'colleague':relationship(Person,
                                 primaryjoin=managers.c.manager_id==
-                                        people.c.person_id, 
+                                        people.c.person_id,
                                         lazy='select', uselist=False)
                  }
             )
@@ -231,20 +231,20 @@ class RelationshipTest3(fixtures.MappedTest):
     def define_tables(cls, metadata):
         global people, managers, data
         people = Table('people', metadata,
-           Column('person_id', Integer, primary_key=True, 
+           Column('person_id', Integer, primary_key=True,
                                             test_needs_autoincrement=True),
            Column('colleague_id', Integer, ForeignKey('people.person_id')),
            Column('name', String(50)),
            Column('type', String(30)))
 
         managers = Table('managers', metadata,
-           Column('person_id', Integer, ForeignKey('people.person_id'), 
+           Column('person_id', Integer, ForeignKey('people.person_id'),
                                             primary_key=True),
            Column('status', String(30)),
            )
 
         data = Table('data', metadata,
-           Column('person_id', Integer, ForeignKey('people.person_id'), 
+           Column('person_id', Integer, ForeignKey('people.person_id'),
                                             primary_key=True),
            Column('data', String(30))
            )
@@ -263,13 +263,13 @@ def _generate_test(jointype="join1", usedata=False):
 
         if jointype == "join1":
             poly_union = polymorphic_union({
-                'manager':managers.join(people, 
+                'manager':managers.join(people,
                                     people.c.person_id==managers.c.person_id),
                 'person':people.select(people.c.type=='person')
             }, None)
         elif jointype =="join2":
             poly_union = polymorphic_union({
-                'manager':join(people, managers, 
+                'manager':join(people, managers,
                                     people.c.person_id==managers.c.person_id),
                 'person':people.select(people.c.type=='person')
             }, None)
@@ -282,34 +282,34 @@ def _generate_test(jointype="join1", usedata=False):
             mapper(Data, data)
 
         if usedata:
-            mapper(Person, people, 
-                                with_polymorphic=('*', poly_union), 
-                                polymorphic_identity='person', 
+            mapper(Person, people,
+                                with_polymorphic=('*', poly_union),
+                                polymorphic_identity='person',
                                 polymorphic_on=people.c.type,
                   properties={
-                    'colleagues':relationship(Person, 
+                    'colleagues':relationship(Person,
                                 primaryjoin=people.c.colleague_id==
-                                        people.c.person_id, 
-                                remote_side=people.c.colleague_id, 
+                                        people.c.person_id,
+                                remote_side=people.c.colleague_id,
                                 uselist=True),
                     'data':relationship(Data, uselist=False)
                     }
             )
         else:
-            mapper(Person, people, 
-                    with_polymorphic=('*', poly_union), 
-                    polymorphic_identity='person', 
+            mapper(Person, people,
+                    with_polymorphic=('*', poly_union),
+                    polymorphic_identity='person',
                     polymorphic_on=people.c.type,
                   properties={
-                    'colleagues':relationship(Person, 
+                    'colleagues':relationship(Person,
                         primaryjoin=people.c.colleague_id==people.c.person_id,
                         remote_side=people.c.colleague_id, uselist=True)
                     }
             )
 
-        mapper(Manager, managers, inherits=Person, 
+        mapper(Manager, managers, inherits=Person,
                     inherit_condition=people.c.person_id==
-                        managers.c.person_id, 
+                        managers.c.person_id,
                         polymorphic_identity='manager')
 
         sess = create_session()
@@ -355,22 +355,22 @@ class RelationshipTest4(fixtures.MappedTest):
     def define_tables(cls, metadata):
         global people, engineers, managers, cars
         people = Table('people', metadata,
-           Column('person_id', Integer, primary_key=True, 
+           Column('person_id', Integer, primary_key=True,
                                     test_needs_autoincrement=True),
            Column('name', String(50)))
 
         engineers = Table('engineers', metadata,
-           Column('person_id', Integer, ForeignKey('people.person_id'), 
+           Column('person_id', Integer, ForeignKey('people.person_id'),
                                     primary_key=True),
            Column('status', String(30)))
 
         managers = Table('managers', metadata,
-           Column('person_id', Integer, ForeignKey('people.person_id'), 
+           Column('person_id', Integer, ForeignKey('people.person_id'),
                                     primary_key=True),
            Column('longer_status', String(70)))
 
         cars = Table('cars', metadata,
-           Column('car_id', Integer, primary_key=True, 
+           Column('car_id', Integer, primary_key=True,
                                     test_needs_autoincrement=True),
            Column('owner', Integer, ForeignKey('people.person_id')))
 
@@ -411,17 +411,17 @@ class RelationshipTest4(fixtures.MappedTest):
                 'manager':people.join(managers),
             }, "type", 'employee_join')
 
-        person_mapper   = mapper(Person, people, 
-                                    with_polymorphic=('*', employee_join), 
-                                    polymorphic_on=employee_join.c.type, 
+        person_mapper   = mapper(Person, people,
+                                    with_polymorphic=('*', employee_join),
+                                    polymorphic_on=employee_join.c.type,
                                     polymorphic_identity='person')
-        engineer_mapper = mapper(Engineer, engineers, 
-                                    inherits=person_mapper, 
+        engineer_mapper = mapper(Engineer, engineers,
+                                    inherits=person_mapper,
                                     polymorphic_identity='engineer')
-        manager_mapper  = mapper(Manager, managers, 
-                                    inherits=person_mapper, 
+        manager_mapper  = mapper(Manager, managers,
+                                    inherits=person_mapper,
                                     polymorphic_identity='manager')
-        car_mapper      = mapper(Car, cars, 
+        car_mapper      = mapper(Car, cars,
                                     properties= {'employee':
                                             relationship(person_mapper)})
 
@@ -485,23 +485,23 @@ class RelationshipTest5(fixtures.MappedTest):
     def define_tables(cls, metadata):
         global people, engineers, managers, cars
         people = Table('people', metadata,
-           Column('person_id', Integer, primary_key=True, 
+           Column('person_id', Integer, primary_key=True,
                                 test_needs_autoincrement=True),
            Column('name', String(50)),
            Column('type', String(50)))
 
         engineers = Table('engineers', metadata,
-           Column('person_id', Integer, ForeignKey('people.person_id'), 
+           Column('person_id', Integer, ForeignKey('people.person_id'),
                                 primary_key=True),
            Column('status', String(30)))
 
         managers = Table('managers', metadata,
-           Column('person_id', Integer, ForeignKey('people.person_id'), 
+           Column('person_id', Integer, ForeignKey('people.person_id'),
                                 primary_key=True),
            Column('longer_status', String(70)))
 
         cars = Table('cars', metadata,
-           Column('car_id', Integer, primary_key=True, 
+           Column('car_id', Integer, primary_key=True,
                                 test_needs_autoincrement=True),
            Column('owner', Integer, ForeignKey('people.person_id')))
 
@@ -530,14 +530,14 @@ class RelationshipTest5(fixtures.MappedTest):
             def __repr__(self):
                 return "Car number %d" % self.car_id
 
-        person_mapper   = mapper(Person, people, 
-                                    polymorphic_on=people.c.type, 
+        person_mapper   = mapper(Person, people,
+                                    polymorphic_on=people.c.type,
                                     polymorphic_identity='person')
-        engineer_mapper = mapper(Engineer, engineers, 
-                                    inherits=person_mapper, 
+        engineer_mapper = mapper(Engineer, engineers,
+                                    inherits=person_mapper,
                                     polymorphic_identity='engineer')
-        manager_mapper  = mapper(Manager, managers, 
-                                    inherits=person_mapper, 
+        manager_mapper  = mapper(Manager, managers,
+                                    inherits=person_mapper,
                                     polymorphic_identity='manager')
         car_mapper      = mapper(Car, cars, properties= {
                                     'manager':relationship(
@@ -564,15 +564,15 @@ class RelationshipTest6(fixtures.MappedTest):
     def define_tables(cls, metadata):
         global people, managers, data
         people = Table('people', metadata,
-           Column('person_id', Integer, primary_key=True, 
+           Column('person_id', Integer, primary_key=True,
                                 test_needs_autoincrement=True),
            Column('name', String(50)),
            )
 
         managers = Table('managers', metadata,
-           Column('person_id', Integer, ForeignKey('people.person_id'), 
+           Column('person_id', Integer, ForeignKey('people.person_id'),
                                 primary_key=True),
-           Column('colleague_id', Integer, 
+           Column('colleague_id', Integer,
                             ForeignKey('managers.person_id')),
            Column('status', String(30)),
            )
@@ -585,13 +585,13 @@ class RelationshipTest6(fixtures.MappedTest):
 
         mapper(Person, people)
 
-        mapper(Manager, managers, inherits=Person, 
+        mapper(Manager, managers, inherits=Person,
                             inherit_condition=people.c.person_id==\
                                     managers.c.person_id,
               properties={
-                'colleague':relationship(Manager, 
+                'colleague':relationship(Manager,
                                 primaryjoin=managers.c.colleague_id==\
-                                    managers.c.person_id, 
+                                    managers.c.person_id,
                                     lazy='select', uselist=False)
              }
         )
@@ -613,7 +613,7 @@ class RelationshipTest7(fixtures.MappedTest):
     def define_tables(cls, metadata):
         global people, engineers, managers, cars, offroad_cars
         cars = Table('cars', metadata,
-                Column('car_id', Integer, primary_key=True, 
+                Column('car_id', Integer, primary_key=True,
                                     test_needs_autoincrement=True),
                 Column('name', String(30)))
 
@@ -622,20 +622,20 @@ class RelationshipTest7(fixtures.MappedTest):
                                     nullable=False,primary_key=True))
 
         people = Table('people', metadata,
-                Column('person_id', Integer, primary_key=True, 
+                Column('person_id', Integer, primary_key=True,
                                     test_needs_autoincrement=True),
-                Column('car_id', Integer, ForeignKey('cars.car_id'), 
+                Column('car_id', Integer, ForeignKey('cars.car_id'),
                                     nullable=False),
                 Column('name', String(50)))
 
         engineers = Table('engineers', metadata,
-                Column('person_id', Integer, ForeignKey('people.person_id'), 
+                Column('person_id', Integer, ForeignKey('people.person_id'),
                                     primary_key=True),
                 Column('field', String(30)))
 
 
         managers = Table('managers', metadata,
-                Column('person_id', Integer, ForeignKey('people.person_id'), 
+                Column('person_id', Integer, ForeignKey('people.person_id'),
                                     primary_key=True),
                 Column('category', String(70)))
 
@@ -659,12 +659,12 @@ class RelationshipTest7(fixtures.MappedTest):
 
         class Engineer(Person):
             def __repr__(self):
-                return "Engineer %s, field %s" % (self.name, 
+                return "Engineer %s, field %s" % (self.name,
                                                 self.field)
 
         class Manager(Person):
             def __repr__(self):
-                return "Manager %s, category %s" % (self.name, 
+                return "Manager %s, category %s" % (self.name,
                                                 self.category)
 
         class Car(PersistentObject):
@@ -686,7 +686,7 @@ class RelationshipTest7(fixtures.MappedTest):
         car_join = polymorphic_union(
             {
                 'car' : cars.outerjoin(offroad_cars).\
-                        select(offroad_cars.c.car_id == None, 
+                        select(offroad_cars.c.car_id == None,
                                 fold_equivalents=True),
                 'offroad' : cars.join(offroad_cars)
             }, "type", 'car_join')
@@ -695,20 +695,20 @@ class RelationshipTest7(fixtures.MappedTest):
                 with_polymorphic=('*', car_join) ,polymorphic_on=car_join.c.type,
                 polymorphic_identity='car',
                 )
-        offroad_car_mapper = mapper(Offraod_Car, offroad_cars, 
+        offroad_car_mapper = mapper(Offraod_Car, offroad_cars,
                                     inherits=car_mapper, polymorphic_identity='offroad')
         person_mapper = mapper(Person, people,
-                with_polymorphic=('*', employee_join), 
+                with_polymorphic=('*', employee_join),
                 polymorphic_on=employee_join.c.type,
                 polymorphic_identity='person',
                 properties={
                     'car':relationship(car_mapper)
                     })
-        engineer_mapper = mapper(Engineer, engineers, 
-                                        inherits=person_mapper, 
+        engineer_mapper = mapper(Engineer, engineers,
+                                        inherits=person_mapper,
                                         polymorphic_identity='engineer')
-        manager_mapper  = mapper(Manager, managers, 
-                                        inherits=person_mapper, 
+        manager_mapper  = mapper(Manager, managers,
+                                        inherits=person_mapper,
                                         polymorphic_identity='manager')
 
         session = create_session()
@@ -735,13 +735,13 @@ class RelationshipTest8(fixtures.MappedTest):
     def define_tables(cls, metadata):
         global taggable, users
         taggable = Table('taggable', metadata,
-                         Column('id', Integer, primary_key=True, 
+                         Column('id', Integer, primary_key=True,
                                             test_needs_autoincrement=True),
                          Column('type', String(30)),
                          Column('owner_id', Integer, ForeignKey('taggable.id')),
                          )
         users = Table ('users', metadata,
-                       Column('id', Integer, ForeignKey('taggable.id'), 
+                       Column('id', Integer, ForeignKey('taggable.id'),
                                             primary_key=True),
                        Column('data', String(50)),
                        )
@@ -753,9 +753,9 @@ class RelationshipTest8(fixtures.MappedTest):
         class User(Taggable):
             pass
 
-        mapper( Taggable, taggable, 
-                    polymorphic_on=taggable.c.type, 
-                    polymorphic_identity='taggable', 
+        mapper( Taggable, taggable,
+                    polymorphic_on=taggable.c.type,
+                    polymorphic_identity='taggable',
         properties = {
             'owner' : relationship (User,
                        primaryjoin=taggable.c.owner_id ==taggable.c.id,
@@ -764,7 +764,7 @@ class RelationshipTest8(fixtures.MappedTest):
         })
 
 
-        mapper(User, users, inherits=Taggable, 
+        mapper(User, users, inherits=Taggable,
                 polymorphic_identity='user',
                inherit_condition=users.c.id == taggable.c.id,
                )
@@ -796,33 +796,33 @@ class GenerativeTest(fixtures.TestBase, AssertsExecutionResults):
         metadata = MetaData(testing.db)
         # table definitions
         status = Table('status', metadata,
-           Column('status_id', Integer, primary_key=True, 
+           Column('status_id', Integer, primary_key=True,
                                     test_needs_autoincrement=True),
            Column('name', String(20)))
 
         people = Table('people', metadata,
-           Column('person_id', Integer, primary_key=True, 
+           Column('person_id', Integer, primary_key=True,
                                     test_needs_autoincrement=True),
-           Column('status_id', Integer, ForeignKey('status.status_id'), 
+           Column('status_id', Integer, ForeignKey('status.status_id'),
                                     nullable=False),
            Column('name', String(50)))
 
         engineers = Table('engineers', metadata,
-           Column('person_id', Integer, ForeignKey('people.person_id'), 
+           Column('person_id', Integer, ForeignKey('people.person_id'),
                                     primary_key=True),
            Column('field', String(30)))
 
         managers = Table('managers', metadata,
-           Column('person_id', Integer, ForeignKey('people.person_id'), 
+           Column('person_id', Integer, ForeignKey('people.person_id'),
                                     primary_key=True),
            Column('category', String(70)))
 
         cars = Table('cars', metadata,
-           Column('car_id', Integer, primary_key=True, 
+           Column('car_id', Integer, primary_key=True,
                                         test_needs_autoincrement=True),
-           Column('status_id', Integer, ForeignKey('status.status_id'), 
+           Column('status_id', Integer, ForeignKey('status.status_id'),
                                         nullable=False),
-           Column('owner', Integer, ForeignKey('people.person_id'), 
+           Column('owner', Integer, ForeignKey('people.person_id'),
                                         nullable=False))
 
         metadata.create_all()
@@ -868,18 +868,18 @@ class GenerativeTest(fixtures.TestBase, AssertsExecutionResults):
 
         status_mapper   = mapper(Status, status)
         person_mapper   = mapper(Person, people,
-            with_polymorphic=('*', employee_join), 
+            with_polymorphic=('*', employee_join),
             polymorphic_on=employee_join.c.type,
-            polymorphic_identity='person', 
+            polymorphic_identity='person',
             properties={'status':relationship(status_mapper)})
-        engineer_mapper = mapper(Engineer, engineers, 
-                                        inherits=person_mapper, 
+        engineer_mapper = mapper(Engineer, engineers,
+                                        inherits=person_mapper,
                                         polymorphic_identity='engineer')
-        manager_mapper  = mapper(Manager, managers, 
-                                        inherits=person_mapper, 
+        manager_mapper  = mapper(Manager, managers,
+                                        inherits=person_mapper,
                                         polymorphic_identity='manager')
         car_mapper      = mapper(Car, cars, properties= {
-                    'employee':relationship(person_mapper), 
+                    'employee':relationship(person_mapper),
                     'status':relationship(status_mapper)})
 
         session = create_session()
@@ -891,10 +891,10 @@ class GenerativeTest(fixtures.TestBase, AssertsExecutionResults):
         session.add(dead)
         session.flush()
 
-        # TODO: we haven't created assertions for all 
+        # TODO: we haven't created assertions for all
         # the data combinations created here
 
-        # creating 5 managers named from M1 to M5 
+        # creating 5 managers named from M1 to M5
         # and 5 engineers named from E1 to E5
         # M4, M5, E4 and E5 are dead
         for i in range(1,5):
@@ -933,13 +933,13 @@ class GenerativeTest(fixtures.TestBase, AssertsExecutionResults):
                             "status Status active]")
         r = session.query(Engineer).join('status').\
                         filter(Person.name.in_(
-                            ['E2', 'E3', 'E4', 'M4', 'M2', 'M1']) & 
+                            ['E2', 'E3', 'E4', 'M4', 'M2', 'M1']) &
                             (status.c.name=="active")).order_by(Person.name)
         eq_(str(list(r)), "[Engineer E2, field X, status Status "
                             "active, Engineer E3, field X, status "
                             "Status active]")
 
-        r = session.query(Person).filter(exists([1], 
+        r = session.query(Person).filter(exists([1],
                             Car.owner==Person.person_id))
         eq_(str(list(r)), "[Engineer E4, field X, status Status dead]")
 
@@ -949,20 +949,20 @@ class MultiLevelTest(fixtures.MappedTest):
         global table_Employee, table_Engineer, table_Manager
         table_Employee = Table( 'Employee', metadata,
             Column( 'name', type_= String(100), ),
-            Column( 'id', primary_key= True, type_= Integer, 
+            Column( 'id', primary_key= True, type_= Integer,
                                     test_needs_autoincrement=True),
             Column( 'atype', type_= String(100), ),
         )
 
         table_Engineer = Table( 'Engineer', metadata,
             Column( 'machine', type_= String(100), ),
-            Column( 'id', Integer, ForeignKey( 'Employee.id', ), 
+            Column( 'id', Integer, ForeignKey( 'Employee.id', ),
                                         primary_key= True),
         )
 
         table_Manager = Table( 'Manager', metadata,
             Column( 'duties', type_= String(100), ),
-            Column( 'id', Integer, ForeignKey( 'Engineer.id', ), 
+            Column( 'id', Integer, ForeignKey( 'Engineer.id', ),
                                         primary_key= True, ),
         )
 
@@ -971,23 +971,23 @@ class MultiLevelTest(fixtures.MappedTest):
             def set( me, **kargs):
                 for k,v in kargs.iteritems(): setattr( me, k, v)
                 return me
-            def __str__(me): 
+            def __str__(me):
                 return str(me.__class__.__name__)+':'+str(me.name)
             __repr__ = __str__
-        class Engineer(Employee): 
+        class Engineer(Employee):
             pass
-        class Manager(Engineer): 
+        class Manager(Engineer):
             pass
 
         pu_Employee = polymorphic_union( {
-                    'Manager':  table_Employee.join( 
+                    'Manager':  table_Employee.join(
                                     table_Engineer).join( table_Manager),
-                    'Engineer': select([table_Employee, 
-                                    table_Engineer.c.machine], 
-                                    table_Employee.c.atype == 'Engineer', 
+                    'Engineer': select([table_Employee,
+                                    table_Engineer.c.machine],
+                                    table_Employee.c.atype == 'Engineer',
                                     from_obj=[
                                     table_Employee.join(table_Engineer)]),
-                    'Employee': table_Employee.select( 
+                    'Employee': table_Employee.select(
                                     table_Employee.c.atype == 'Employee'),
                 }, None, 'pu_employee', )
 
@@ -1000,9 +1000,9 @@ class MultiLevelTest(fixtures.MappedTest):
         pu_Engineer = polymorphic_union( {
                     'Manager':  table_Employee.join( table_Engineer).
                                     join( table_Manager),
-                    'Engineer': select([table_Employee, 
-                                    table_Engineer.c.machine], 
-                                    table_Employee.c.atype == 'Engineer', 
+                    'Engineer': select([table_Employee,
+                                    table_Engineer.c.machine],
+                                    table_Employee.c.atype == 'Engineer',
                                     from_obj=[
                                         table_Employee.join(table_Engineer)
                                     ]),
@@ -1025,7 +1025,7 @@ class MultiLevelTest(fixtures.MappedTest):
 
         a = Employee().set( name= 'one')
         b = Engineer().set( egn= 'two', machine= 'any')
-        c = Manager().set( name= 'head', machine= 'fast', 
+        c = Manager().set( name= 'head', machine= 'fast',
                                 duties= 'many')
 
         session = create_session()
@@ -1044,13 +1044,13 @@ class ManyToManyPolyTest(fixtures.MappedTest):
             collection_table
         base_item_table = Table(
             'base_item', metadata,
-            Column('id', Integer, primary_key=True, 
+            Column('id', Integer, primary_key=True,
                                     test_needs_autoincrement=True),
             Column('child_name', String(255), default=None))
 
         item_table = Table(
             'item', metadata,
-            Column('id', Integer, ForeignKey('base_item.id'), 
+            Column('id', Integer, ForeignKey('base_item.id'),
                                         primary_key=True),
             Column('dummy', Integer, default=0))
 
@@ -1061,7 +1061,7 @@ class ManyToManyPolyTest(fixtures.MappedTest):
 
         collection_table = Table(
             'collection', metadata,
-            Column('id', Integer, primary_key=True, 
+            Column('id', Integer, primary_key=True,
                                     test_needs_autoincrement=True),
             Column('name', Unicode(255)))
 
@@ -1084,8 +1084,8 @@ class ManyToManyPolyTest(fixtures.MappedTest):
             with_polymorphic=('*', item_join),
             polymorphic_on=base_item_table.c.child_name,
             polymorphic_identity='BaseItem',
-            properties=dict(collections=relationship(Collection, 
-                                    secondary=base_item_collection_table, 
+            properties=dict(collections=relationship(Collection,
+                                    secondary=base_item_collection_table,
                                     backref="items")))
 
         mapper(
@@ -1102,7 +1102,7 @@ class CustomPKTest(fixtures.MappedTest):
     def define_tables(cls, metadata):
         global t1, t2
         t1 = Table('t1', metadata,
-            Column('id', Integer, primary_key=True, 
+            Column('id', Integer, primary_key=True,
                                     test_needs_autoincrement=True),
             Column('type', String(30), nullable=False),
             Column('data', String(30)))
@@ -1127,9 +1127,9 @@ class CustomPKTest(fixtures.MappedTest):
         d['t2'] = t1.join(t2)
         pjoin = polymorphic_union(d, None, 'pjoin')
 
-        mapper(T1, t1, polymorphic_on=t1.c.type, 
-                            polymorphic_identity='t1', 
-                            with_polymorphic=('*', pjoin), 
+        mapper(T1, t1, polymorphic_on=t1.c.type,
+                            polymorphic_identity='t1',
+                            with_polymorphic=('*', pjoin),
                             primary_key=[pjoin.c.id])
         mapper(T2, t2, inherits=T1, polymorphic_identity='t2')
         ot1 = T1()
@@ -1140,7 +1140,7 @@ class CustomPKTest(fixtures.MappedTest):
         sess.flush()
         sess.expunge_all()
 
-        # query using get(), using only one value.  
+        # query using get(), using only one value.
         # this requires the select_table mapper
         # has the same single-col primary key.
         assert sess.query(T1).get(ot1.id).id == ot1.id
@@ -1165,8 +1165,8 @@ class CustomPKTest(fixtures.MappedTest):
         d['t2'] = t1.join(t2)
         pjoin = polymorphic_union(d, None, 'pjoin')
 
-        mapper(T1, t1, polymorphic_on=t1.c.type, 
-                            polymorphic_identity='t1', 
+        mapper(T1, t1, polymorphic_on=t1.c.type,
+                            polymorphic_identity='t1',
                             with_polymorphic=('*', pjoin))
         mapper(T2, t2, inherits=T1, polymorphic_identity='t2')
         assert len(class_mapper(T1).primary_key) == 1
@@ -1179,7 +1179,7 @@ class CustomPKTest(fixtures.MappedTest):
         sess.flush()
         sess.expunge_all()
 
-        # query using get(), using only one value.  this requires the 
+        # query using get(), using only one value.  this requires the
         # select_table mapper
         # has the same single-col primary key.
         assert sess.query(T1).get(ot1.id).id == ot1.id
@@ -1194,7 +1194,7 @@ class InheritingEagerTest(fixtures.MappedTest):
         global people, employees, tags, peopleTags
 
         people = Table('people', metadata,
-                           Column('id', Integer, primary_key=True, 
+                           Column('id', Integer, primary_key=True,
                                         test_needs_autoincrement=True),
                            Column('_type', String(30), nullable=False),
                           )
@@ -1206,7 +1206,7 @@ class InheritingEagerTest(fixtures.MappedTest):
                         )
 
         tags = Table('tags', metadata,
-                           Column('id', Integer, primary_key=True, 
+                           Column('id', Integer, primary_key=True,
                                     test_needs_autoincrement=True),
                            Column('label', String(50), nullable=False),
                        )
@@ -1233,10 +1233,10 @@ class InheritingEagerTest(fixtures.MappedTest):
            def __init__(self, label):
                self.label = label
 
-        mapper(Person, people, polymorphic_on=people.c._type, 
+        mapper(Person, people, polymorphic_on=people.c._type,
                             polymorphic_identity='person', properties={
-            'tags': relationship(Tag, 
-                            secondary=peopleTags, 
+            'tags': relationship(Tag,
+                            secondary=peopleTags,
                             backref='people', lazy='joined')
         })
         mapper(Employee, employees, inherits=Person,
@@ -1264,24 +1264,24 @@ class InheritingEagerTest(fixtures.MappedTest):
 class MissingPolymorphicOnTest(fixtures.MappedTest):
     @classmethod
     def define_tables(cls, metadata):
-        tablea = Table('tablea', metadata, 
-            Column('id', Integer, primary_key=True, 
+        tablea = Table('tablea', metadata,
+            Column('id', Integer, primary_key=True,
                             test_needs_autoincrement=True),
             Column('adata', String(50)),
             )
-        tableb = Table('tableb', metadata, 
-            Column('id', Integer, primary_key=True, 
+        tableb = Table('tableb', metadata,
+            Column('id', Integer, primary_key=True,
                             test_needs_autoincrement=True),
             Column('aid', Integer, ForeignKey('tablea.id')),
             Column('data', String(50)),
             )
-        tablec = Table('tablec', metadata, 
-            Column('id', Integer, ForeignKey('tablea.id'), 
+        tablec = Table('tablec', metadata,
+            Column('id', Integer, ForeignKey('tablea.id'),
                                     primary_key=True),
             Column('cdata', String(50)),
             )
-        tabled = Table('tabled', metadata, 
-            Column('id', Integer, ForeignKey('tablec.id'), 
+        tabled = Table('tabled', metadata,
+            Column('id', Integer, ForeignKey('tablec.id'),
                                     primary_key=True),
             Column('ddata', String(50)),
             )
@@ -1303,13 +1303,13 @@ class MissingPolymorphicOnTest(fixtures.MappedTest):
         A, B, C, D = self.classes.A, self.classes.B, self.classes.C, \
             self.classes.D
         poly_select = select(
-                        [tablea, tableb.c.data.label('discriminator')], 
+                        [tablea, tableb.c.data.label('discriminator')],
                         from_obj=tablea.join(tableb)).alias('poly')
 
         mapper(B, tableb)
-        mapper(A, tablea, 
+        mapper(A, tablea,
                     with_polymorphic=('*', poly_select),
-                     polymorphic_on=poly_select.c.discriminator, 
+                     polymorphic_on=poly_select.c.discriminator,
         properties={
             'b':relationship(B, uselist=False)
         })
@@ -1324,9 +1324,9 @@ class MissingPolymorphicOnTest(fixtures.MappedTest):
         sess.flush()
         sess.expunge_all()
         eq_(
-            sess.query(A).all(), 
+            sess.query(A).all(),
             [
-                C(cdata='c1', adata='a1'), 
+                C(cdata='c1', adata='a1'),
                 D(cdata='c2', adata='a2', ddata='d2')
             ]
         )
@@ -1335,17 +1335,17 @@ class JoinedInhAdjacencyTest(fixtures.MappedTest):
     @classmethod
     def define_tables(cls, metadata):
         Table('people', metadata,
-                 Column('id', Integer, primary_key=True, 
+                 Column('id', Integer, primary_key=True,
                                 test_needs_autoincrement=True),
                  Column('type', String(30)),
                  )
         Table('users', metadata,
-              Column('id', Integer, ForeignKey('people.id'), 
+              Column('id', Integer, ForeignKey('people.id'),
                                 primary_key=True),
               Column('supervisor_id', Integer, ForeignKey('people.id')),
         )
         Table('dudes', metadata,
-              Column('id', Integer, ForeignKey('users.id'), 
+              Column('id', Integer, ForeignKey('users.id'),
                                 primary_key=True),
         )
 
@@ -1463,13 +1463,13 @@ class Ticket2419Test(fixtures.DeclarativeMappedTest):
         class A(Base):
             __tablename__ = "a"
 
-            id = Column(Integer, primary_key=True, 
+            id = Column(Integer, primary_key=True,
                         test_needs_autoincrement=True)
 
         class B(Base):
             __tablename__ = "b"
 
-            id = Column(Integer, primary_key=True, 
+            id = Column(Integer, primary_key=True,
                         test_needs_autoincrement=True)
             ds = relationship("D")
             es = relationship("E")
@@ -1494,7 +1494,7 @@ class Ticket2419Test(fixtures.DeclarativeMappedTest):
                             test_needs_autoincrement=True)
             b_id = Column(Integer, ForeignKey('b.id'))
 
-    @testing.fails_on("oracle", 
+    @testing.fails_on("oracle",
             "seems like oracle's query engine can't "
             "handle this, not clear if there's an "
             "expression-level bug on our end though")
index 840270e589f9e375bef37ffaf95d615d863f02bc..a1118aa86fa65aa44b2cf6940419b7c75f9f53fc 100644 (file)
@@ -176,10 +176,10 @@ def _generate_round_trip_test(use_unions=False, use_joins=False):
                 'magazine': relationship(Magazine, backref=backref('pages', order_by=page_table.c.page_no))
             })
 
-        classified_page_mapper = mapper(ClassifiedPage, 
-                                    classified_page_table, 
-                                    inherits=magazine_page_mapper, 
-                                    polymorphic_identity='c', 
+        classified_page_mapper = mapper(ClassifiedPage,
+                                    classified_page_table,
+                                    inherits=magazine_page_mapper,
+                                    polymorphic_identity='c',
                                     primary_key=[page_table.c.id])
 
 
index 6939479b11cee978296a457f3dfcf3874dc04b44..5b5844b7032c7e3065d427c3010c4e7c87d3d335 100644 (file)
@@ -27,20 +27,20 @@ class PolymorphTest(fixtures.MappedTest):
         global companies, people, engineers, managers, boss
 
         companies = Table('companies', metadata,
-           Column('company_id', Integer, primary_key=True, 
+           Column('company_id', Integer, primary_key=True,
                                         test_needs_autoincrement=True),
            Column('name', String(50)))
 
         people = Table('people', metadata,
-           Column('person_id', Integer, primary_key=True, 
+           Column('person_id', Integer, primary_key=True,
                                         test_needs_autoincrement=True),
-           Column('company_id', Integer, ForeignKey('companies.company_id'), 
+           Column('company_id', Integer, ForeignKey('companies.company_id'),
                                         nullable=False),
            Column('name', String(50)),
            Column('type', String(30)))
 
         engineers = Table('engineers', metadata,
-           Column('person_id', Integer, ForeignKey('people.person_id'), 
+           Column('person_id', Integer, ForeignKey('people.person_id'),
                                         primary_key=True),
            Column('status', String(30)),
            Column('engineer_name', String(50)),
@@ -48,14 +48,14 @@ class PolymorphTest(fixtures.MappedTest):
           )
 
         managers = Table('managers', metadata,
-           Column('person_id', Integer, ForeignKey('people.person_id'), 
+           Column('person_id', Integer, ForeignKey('people.person_id'),
                                         primary_key=True),
            Column('status', String(30)),
            Column('manager_name', String(50))
            )
 
         boss = Table('boss', metadata,
-            Column('boss_id', Integer, ForeignKey('managers.person_id'), 
+            Column('boss_id', Integer, ForeignKey('managers.person_id'),
                                         primary_key=True),
             Column('golf_swing', String(30)),
             )
@@ -74,14 +74,14 @@ class InsertOrderTest(PolymorphTest):
                 'person':people.select(people.c.type=='person'),
             }, None, 'pjoin')
 
-        person_mapper = mapper(Person, people, 
-                                with_polymorphic=('*', person_join), 
-                                polymorphic_on=person_join.c.type, 
+        person_mapper = mapper(Person, people,
+                                with_polymorphic=('*', person_join),
+                                polymorphic_on=person_join.c.type,
                                 polymorphic_identity='person')
 
-        mapper(Engineer, engineers, inherits=person_mapper, 
+        mapper(Engineer, engineers, inherits=person_mapper,
                                 polymorphic_identity='engineer')
-        mapper(Manager, managers, inherits=person_mapper, 
+        mapper(Manager, managers, inherits=person_mapper,
                                 polymorphic_identity='manager')
         mapper(Company, companies, properties={
             'employees': relationship(Person,
@@ -113,16 +113,16 @@ class RoundTripTest(PolymorphTest):
 def _generate_round_trip_test(include_base, lazy_relationship,
                                     redefine_colprop, with_polymorphic):
     """generates a round trip test.
-    
+
     include_base - whether or not to include the base 'person' type in
     the union.
-    
+
     lazy_relationship - whether or not the Company relationship to
     People is lazy or eager.
-    
+
     redefine_colprop - if we redefine the 'name' column to be
     'people_name' on the base Person class
-    
+
     use_literal_join - primary join condition is explicitly specified
     """
     def test_roundtrip(self):
@@ -158,21 +158,21 @@ def _generate_round_trip_test(include_base, lazy_relationship,
             manager_with_polymorphic = None
 
         if redefine_colprop:
-            person_mapper = mapper(Person, people, 
-                                with_polymorphic=person_with_polymorphic, 
-                                polymorphic_on=people.c.type, 
-                                polymorphic_identity='person', 
+            person_mapper = mapper(Person, people,
+                                with_polymorphic=person_with_polymorphic,
+                                polymorphic_on=people.c.type,
+                                polymorphic_identity='person',
                                 properties= {'person_name':people.c.name})
         else:
-            person_mapper = mapper(Person, people, 
-                                with_polymorphic=person_with_polymorphic, 
-                                polymorphic_on=people.c.type, 
+            person_mapper = mapper(Person, people,
+                                with_polymorphic=person_with_polymorphic,
+                                polymorphic_on=people.c.type,
                                 polymorphic_identity='person')
 
-        mapper(Engineer, engineers, inherits=person_mapper, 
+        mapper(Engineer, engineers, inherits=person_mapper,
                                 polymorphic_identity='engineer')
-        mapper(Manager, managers, inherits=person_mapper, 
-                                with_polymorphic=manager_with_polymorphic, 
+        mapper(Manager, managers, inherits=person_mapper,
+                                with_polymorphic=manager_with_polymorphic,
                                 polymorphic_identity='manager')
 
         mapper(Boss, boss, inherits=Manager, polymorphic_identity='boss')
@@ -190,19 +190,19 @@ def _generate_round_trip_test(include_base, lazy_relationship,
             person_attribute_name = 'name'
 
         employees = [
-                Manager(status='AAB', manager_name='manager1', 
+                Manager(status='AAB', manager_name='manager1',
                             **{person_attribute_name:'pointy haired boss'}),
-                Engineer(status='BBA', engineer_name='engineer1', 
-                            primary_language='java', 
+                Engineer(status='BBA', engineer_name='engineer1',
+                            primary_language='java',
                             **{person_attribute_name:'dilbert'}),
             ]
         if include_base:
             employees.append(Person(**{person_attribute_name:'joesmith'}))
         employees += [
-            Engineer(status='CGG', engineer_name='engineer2', 
-                            primary_language='python', 
+            Engineer(status='CGG', engineer_name='engineer2',
+                            primary_language='python',
                             **{person_attribute_name:'wally'}),
-            Manager(status='ABA', manager_name='manager2', 
+            Manager(status='ABA', manager_name='manager2',
                             **{person_attribute_name:'jsmith'})
         ]
 
@@ -222,7 +222,7 @@ def _generate_round_trip_test(include_base, lazy_relationship,
         session.expunge_all()
 
         eq_(session.query(Person).filter(
-                            Person.person_id==dilbert.person_id).one(), 
+                            Person.person_id==dilbert.person_id).one(),
                             dilbert)
         session.expunge_all()
 
@@ -242,9 +242,9 @@ def _generate_round_trip_test(include_base, lazy_relationship,
             else:
                 self.assert_sql_count(testing.db, go, 6)
 
-        # test selecting from the query, using the base 
+        # test selecting from the query, using the base
         # mapped table (people) as the selection criterion.
-        # in the case of the polymorphic Person query, 
+        # in the case of the polymorphic Person query,
         # the "people" selectable should be adapted to be "person_join"
         eq_(
             session.query(Person).filter(
@@ -264,9 +264,9 @@ def _generate_round_trip_test(include_base, lazy_relationship,
             dilbert
         )
 
-        # test selecting from the query, joining against 
+        # test selecting from the query, joining against
         # an alias of the base "people" table.  test that
-        # the "palias" alias does *not* get sucked up 
+        # the "palias" alias does *not* get sucked up
         # into the "person_join" conversion.
         palias = people.alias("palias")
         dilbert = session.query(Person).get(dilbert.person_id)
@@ -287,35 +287,35 @@ def _generate_round_trip_test(include_base, lazy_relationship,
         session.expunge_all()
 
         def go():
-            session.query(Person).filter(getattr(Person, 
+            session.query(Person).filter(getattr(Person,
                             person_attribute_name)=='dilbert').first()
         self.assert_sql_count(testing.db, go, 1)
         session.expunge_all()
-        dilbert = session.query(Person).filter(getattr(Person, 
+        dilbert = session.query(Person).filter(getattr(Person,
                             person_attribute_name)=='dilbert').first()
         def go():
-            # assert that only primary table is queried for 
+            # assert that only primary table is queried for
             # already-present-in-session
-            d = session.query(Person).filter(getattr(Person, 
+            d = session.query(Person).filter(getattr(Person,
                             person_attribute_name)=='dilbert').first()
         self.assert_sql_count(testing.db, go, 1)
 
         # test standalone orphans
-        daboss = Boss(status='BBB', 
-                        manager_name='boss', 
-                        golf_swing='fore', 
+        daboss = Boss(status='BBB',
+                        manager_name='boss',
+                        golf_swing='fore',
                         **{person_attribute_name:'daboss'})
         session.add(daboss)
         assert_raises(sa_exc.DBAPIError, session.flush)
 
         c = session.query(Company).first()
         daboss.company = c
-        manager_list = [e for e in c.employees 
+        manager_list = [e for e in c.employees
                             if isinstance(e, Manager)]
         session.flush()
         session.expunge_all()
 
-        eq_(session.query(Manager).order_by(Manager.person_id).all(), 
+        eq_(session.query(Manager).order_by(Manager.person_id).all(),
                                 manager_list)
         c = session.query(Company).first()
 
@@ -337,11 +337,11 @@ for lazy_relationship in [True, False]:
         for with_polymorphic in ['unions', 'joins', 'auto', 'none']:
             if with_polymorphic == 'unions':
                 for include_base in [True, False]:
-                    _generate_round_trip_test(include_base, 
-                                    lazy_relationship, 
+                    _generate_round_trip_test(include_base,
+                                    lazy_relationship,
                                     redefine_colprop, with_polymorphic)
             else:
-                _generate_round_trip_test(False, 
-                                    lazy_relationship, 
+                _generate_round_trip_test(False,
+                                    lazy_relationship,
                                     redefine_colprop, with_polymorphic)
 
index 50593d39c74e81f018631a9d5cc01e88c0199ea0..6d7bcb6760df2cfaef7a3b344b6198f536ba31f9 100644 (file)
@@ -39,7 +39,7 @@ class _PolymorphicTestBase(object):
 
     def test_loads_at_once(self):
         """
-        Test that all objects load from the full query, when 
+        Test that all objects load from the full query, when
         with_polymorphic is used.
         """
 
@@ -50,7 +50,7 @@ class _PolymorphicTestBase(object):
         self.assert_sql_count(testing.db, go, count)
 
     def test_primary_eager_aliasing_one(self):
-        # For both joinedload() and subqueryload(), if the original q is 
+        # For both joinedload() and subqueryload(), if the original q is
         # not loading the subclass table, the joinedload doesn't happen.
 
         sess = create_session()
@@ -89,7 +89,7 @@ class _PolymorphicTestBase(object):
 
     def test_get_one(self):
         """
-        For all mappers, ensure the primary key has been calculated as 
+        For all mappers, ensure the primary key has been calculated as
         just the "person_id" column.
         """
         sess = create_session()
@@ -405,7 +405,7 @@ class _PolymorphicTestBase(object):
     def test_join_from_columns_or_subclass_six(self):
         sess = create_session()
         if self.select_type == '':
-            # this now raises, due to [ticket:1892].  Manager.person_id 
+            # this now raises, due to [ticket:1892].  Manager.person_id
             # is now the "person_id" column on Manager. SQL is incorrect.
             assert_raises(
                 sa_exc.DBAPIError,
@@ -414,8 +414,8 @@ class _PolymorphicTestBase(object):
                           Manager.person_id == paperwork.c.person_id)
                     .order_by(Person.name).all)
         elif self.select_type == 'Unions':
-            # with the union, not something anyone would really be using 
-            # here, it joins to the full result set.  This is 0.6's 
+            # with the union, not something anyone would really be using
+            # here, it joins to the full result set.  This is 0.6's
             # behavior and is more or less wrong.
             expected = [
                 (u'dilbert',),
@@ -432,7 +432,7 @@ class _PolymorphicTestBase(object):
                     .order_by(Person.name).all(),
                 expected)
         else:
-            # when a join is present and managers.person_id is available, 
+            # when a join is present and managers.person_id is available,
             # you get the managers.
             expected = [
                 (u'dogbert',),
@@ -501,7 +501,7 @@ class _PolymorphicTestBase(object):
     # need it anymore.
     def test_polymorphic_option(self):
         """
-        Test that polymorphic loading sets state.load_path with its 
+        Test that polymorphic loading sets state.load_path with its
         actual mapper on a subclass, and not the superclass mapper.
 
         This only works for non-aliased mappers.
@@ -540,7 +540,7 @@ class _PolymorphicTestBase(object):
 
     def test_expire(self):
         """
-        Test that individual column refresh doesn't get tripped up by 
+        Test that individual column refresh doesn't get tripped up by
         the select_table mapper.
         """
 
@@ -596,7 +596,7 @@ class _PolymorphicTestBase(object):
     def test_with_polymorphic_five(self):
         sess = create_session()
         def go():
-            # limit the polymorphic join down to just "Person", 
+            # limit the polymorphic join down to just "Person",
             # overriding select_table
             eq_(sess.query(Person)
                     .with_polymorphic(Person).all(),
@@ -615,7 +615,7 @@ class _PolymorphicTestBase(object):
 
     def test_with_polymorphic_seven(self):
         sess = create_session()
-        # compare to entities without related collections to prevent 
+        # compare to entities without related collections to prevent
         # additional lazy SQL from firing on loaded entities
         eq_(sess.query(Person).with_polymorphic('*').all(),
             self._emps_wo_relationships_fixture())
@@ -673,8 +673,8 @@ class _PolymorphicTestBase(object):
 
         # query one is company->Person/Engineer->Machines
         # query two is Person/Engineer subq
-        # query three is Machines subq 
-        # (however this test can't tell if the Q was a 
+        # query three is Machines subq
+        # (however this test can't tell if the Q was a
         # lazyload or subqload ...)
         # query four is managers + boss for row #3
         # query five is managers for row #4
@@ -940,8 +940,8 @@ class _PolymorphicTestBase(object):
                 .filter(Engineer.engineer_name == 'vlad').one(),
             c2)
 
-        # same, using explicit join condition.  Query.join() must 
-        # adapt the on clause here to match the subquery wrapped around 
+        # same, using explicit join condition.  Query.join() must
+        # adapt the on clause here to match the subquery wrapped around
         # "people join engineers".
         eq_(sess.query(Company)
                 .join(Engineer, Company.company_id == Engineer.company_id)
@@ -984,10 +984,10 @@ class _PolymorphicTestBase(object):
             expected)
 
     def test_nesting_queries(self):
-        # query.statement places a flag "no_adapt" on the returned 
-        # statement.  This prevents the polymorphic adaptation in the 
-        # second "filter" from hitting it, which would pollute the 
-        # subquery and usually results in recursion overflow errors 
+        # query.statement places a flag "no_adapt" on the returned
+        # statement.  This prevents the polymorphic adaptation in the
+        # second "filter" from hitting it, which would pollute the
+        # subquery and usually results in recursion overflow errors
         # within the adaption.
         sess = create_session()
         subq = (sess.query(engineers.c.person_id)
@@ -1159,8 +1159,8 @@ class _PolymorphicTestBase(object):
 
     #def test_mixed_entities(self):
     #    sess = create_session()
-        # TODO: I think raise error on these for now.  different 
-        # inheritance/loading schemes have different results here, 
+        # TODO: I think raise error on these for now.  different
+        # inheritance/loading schemes have different results here,
         # all incorrect
         #
         # eq_(
@@ -1170,8 +1170,8 @@ class _PolymorphicTestBase(object):
     #def test_mixed_entities(self):
     #    sess = create_session()
         # eq_(sess.query(
-        #             Person.name, 
-        #             Engineer.primary_language, 
+        #             Person.name,
+        #             Engineer.primary_language,
         #             Manager.manager_name)
         #          .all(),
         #     [])
index 8db5f6b3b291cb3b0cbaaa3c81aa11771396ff8c..a4e19b988bcc657cb3a2506cf261e40d531095fe 100644 (file)
@@ -562,7 +562,7 @@ class SelfReferentialM2MTest(fixtures.MappedTest, AssertsCompiledSQL):
         sess.add(c1)
         sess.flush()
 
-        # test that the splicing of the join works here, doesn't break in 
+        # test that the splicing of the join works here, doesn't break in
         # the middle of "parent join child1"
         q = sess.query(Child1).options(joinedload('left_child2'))
         self.assert_compile(q.limit(1).with_labels().statement,
index d05551ef4a93c977fd65edfe1b4dc7217a9fd1a2..774626c48e888b6b7eab18050c6979bd620a220c 100644 (file)
@@ -93,7 +93,7 @@ class SingleInheritanceTest(testing.AssertsCompiledSQL, fixtures.MappedTest):
 
         ealias = aliased(Engineer)
         eq_(
-            session.query(Manager, ealias).all(), 
+            session.query(Manager, ealias).all(),
             [(m1, e1), (m1, e2)]
         )
 
@@ -124,7 +124,7 @@ class SingleInheritanceTest(testing.AssertsCompiledSQL, fixtures.MappedTest):
 
         # TODO: I think raise error on this for now
         # self.assertEquals(
-        #    session.query(Employee.name, Manager.manager_data, Engineer.engineer_info).all(), 
+        #    session.query(Employee.name, Manager.manager_data, Engineer.engineer_info).all(),
         #    []
         # )
 
@@ -169,7 +169,7 @@ class SingleInheritanceTest(testing.AssertsCompiledSQL, fixtures.MappedTest):
         sess.flush()
 
         eq_(
-            sess.query(Manager).select_from(employees.select().limit(10)).all(), 
+            sess.query(Manager).select_from(employees.select().limit(10)).all(),
             [m1, m2]
         )
 
@@ -389,7 +389,7 @@ class RelationshipToSingleTest(testing.AssertsCompiledSQL, fixtures.MappedTest):
             "SELECT companies.company_id AS companies_company_id, "
             "companies.name AS companies_name, employees.name AS employees_name "
             "FROM companies LEFT OUTER JOIN employees ON companies.company_id "
-            "= employees.company_id AND employees.type IN (:type_1)" 
+            "= employees.company_id AND employees.type IN (:type_1)"
         )
 
     def test_outer_join_alias(self):
@@ -450,7 +450,7 @@ class RelationshipToSingleTest(testing.AssertsCompiledSQL, fixtures.MappedTest):
         eq_(c2.engineers, [e1])
 
         sess.expunge_all()
-        eq_(sess.query(Company).order_by(Company.name).all(), 
+        eq_(sess.query(Company).order_by(Company.name).all(),
             [
                 Company(name='c1', engineers=[JuniorEngineer(name='Ed')]),
                 Company(name='c2', engineers=[Engineer(name='Kurt')])
@@ -459,7 +459,7 @@ class RelationshipToSingleTest(testing.AssertsCompiledSQL, fixtures.MappedTest):
 
         # eager load join should limit to only "Engineer"
         sess.expunge_all()
-        eq_(sess.query(Company).options(joinedload('engineers')).order_by(Company.name).all(), 
+        eq_(sess.query(Company).options(joinedload('engineers')).order_by(Company.name).all(),
             [
                 Company(name='c1', engineers=[JuniorEngineer(name='Ed')]),
                 Company(name='c2', engineers=[Engineer(name='Kurt')])
index d0de0aa226850bccdec614f68984d356cc7e2a97..dc0035b6a8e246e532ffc3c1f84eec7380cfd540 100644 (file)
@@ -32,12 +32,12 @@ class _WithPolymorphicBase(_PolymorphicFixtureBase):
 
         eq_(
             sess.query(pa.name, pa.Engineer.primary_language, pa.Manager.manager_name).\
-                filter(or_(pa.Engineer.primary_language=='java', 
+                filter(or_(pa.Engineer.primary_language=='java',
                                 pa.Manager.manager_name=='dogbert')).\
                 order_by(pa.Engineer.type).all(),
             [
                 (u'dilbert', u'java', None),
-                (u'dogbert', None, u'dogbert'), 
+                (u'dogbert', None, u'dogbert'),
             ]
         )
 
@@ -50,7 +50,7 @@ class _WithPolymorphicBase(_PolymorphicFixtureBase):
         eq_(
             [(p1.name, type(p1), p2.name, type(p2)) for (p1, p2) in sess.query(
                 pa, pa_alias
-            ).join(pa_alias, 
+            ).join(pa_alias,
                     or_(
                         pa.Engineer.primary_language==\
                         pa_alias.Engineer.primary_language,
@@ -62,9 +62,9 @@ class _WithPolymorphicBase(_PolymorphicFixtureBase):
                     )
                 ).order_by(pa.name, pa_alias.name)],
             [
-                (u'dilbert', Engineer, u'dilbert', Engineer), 
-                (u'dogbert', Manager, u'pointy haired boss', Boss), 
-                (u'vlad', Engineer, u'vlad', Engineer), 
+                (u'dilbert', Engineer, u'dilbert', Engineer),
+                (u'dogbert', Manager, u'pointy haired boss', Boss),
+                (u'vlad', Engineer, u'vlad', Engineer),
                 (u'wally', Engineer, u'wally', Engineer)
             ]
         )
@@ -76,9 +76,9 @@ class _WithPolymorphicBase(_PolymorphicFixtureBase):
 
         eq_(
             [row for row in sess.query(
-                pa.name, pa.Engineer.primary_language, 
+                pa.name, pa.Engineer.primary_language,
                 pa_alias.name, pa_alias.Engineer.primary_language
-            ).join(pa_alias, 
+            ).join(pa_alias,
                     or_(
                         pa.Engineer.primary_language==\
                         pa_alias.Engineer.primary_language,
@@ -90,9 +90,9 @@ class _WithPolymorphicBase(_PolymorphicFixtureBase):
                     )
                 ).order_by(pa.name, pa_alias.name)],
             [
-                (u'dilbert', u'java', u'dilbert', u'java'), 
-                (u'dogbert', None, u'pointy haired boss', None), 
-                (u'vlad', u'cobol', u'vlad', u'cobol'), 
+                (u'dilbert', u'java', u'dilbert', u'java'),
+                (u'dogbert', None, u'pointy haired boss', None),
+                (u'vlad', u'cobol', u'vlad', u'cobol'),
                 (u'wally', u'c++', u'wally', u'c++')
             ]
         )
index 2eddfde9c204f351db06073c6eb811f650b0f7b2..dded00256f2708ddac327e1d832fe01a6f87a587 100644 (file)
@@ -2,7 +2,7 @@
 
 Derived from mailing list-reported problems and trac tickets.
 
-These are generally very old 0.1-era tests and at some point should 
+These are generally very old 0.1-era tests and at some point should
 be cleaned up and modernized.
 
 """
index b3214984f43118c8d3ba2eb3feb46e46c6268c10..7e425a457e9ffb1df60162526e86a13e4390c901 100644 (file)
@@ -121,7 +121,7 @@ class O2MCollectionTest(_fixtures.FixtureTest):
         # backref fires
         assert a1.user is u2
 
-        # everything expires, no changes in 
+        # everything expires, no changes in
         # u1.addresses, so all is fine
         sess.commit()
         assert a1 not in u1.addresses
@@ -143,7 +143,7 @@ class O2MCollectionTest(_fixtures.FixtureTest):
         u1.addresses
 
         # direct set - the "old" is "fetched",
-        # but only from the local session - not the 
+        # but only from the local session - not the
         # database, due to the PASSIVE_NO_FETCH flag.
         # this is a more fine grained behavior introduced
         # in 0.6
@@ -207,7 +207,7 @@ class O2MCollectionTest(_fixtures.FixtureTest):
         sess.add_all([u1, u2, a1])
         sess.commit()
 
-        # direct set - the fetching of the 
+        # direct set - the fetching of the
         # "old" u1 here allows the backref
         # to remove it from the addresses collection
         a1.user = u2
@@ -230,7 +230,7 @@ class O2MCollectionTest(_fixtures.FixtureTest):
         # u1.addresses is loaded
         u1.addresses
 
-        # direct set - the fetching of the 
+        # direct set - the fetching of the
         # "old" u1 here allows the backref
         # to remove it from the addresses collection
         a1.user = u2
@@ -455,8 +455,8 @@ class O2OScalarOrphanTest(_fixtures.FixtureTest):
 
         mapper(Address, addresses)
         mapper(User, users, properties = {
-            'address':relationship(Address, uselist=False, 
-                backref=backref('user', single_parent=True, 
+            'address':relationship(Address, uselist=False,
+                backref=backref('user', single_parent=True,
                                     cascade="all, delete-orphan"))
         })
 
@@ -491,7 +491,7 @@ class M2MCollectionMoveTest(_fixtures.FixtureTest):
                                 cls.classes.Item)
 
         mapper(Item, items, properties={
-            'keywords':relationship(Keyword, secondary=item_keywords, 
+            'keywords':relationship(Keyword, secondary=item_keywords,
                                     backref='items')
         })
         mapper(Keyword, keywords)
@@ -603,8 +603,8 @@ class M2MScalarMoveTest(_fixtures.FixtureTest):
                                 cls.classes.Item)
 
         mapper(Item, items, properties={
-            'keyword':relationship(Keyword, secondary=item_keywords, 
-                                    uselist=False, 
+            'keyword':relationship(Keyword, secondary=item_keywords,
+                                    uselist=False,
                                     backref=backref("item", uselist=False))
         })
         mapper(Keyword, keywords)
@@ -718,7 +718,7 @@ class M2MStaleBackrefTest(_fixtures.FixtureTest):
                                 cls.classes.Item)
 
         mapper(Item, items, properties={
-            'keywords':relationship(Keyword, secondary=item_keywords, 
+            'keywords':relationship(Keyword, secondary=item_keywords,
                                     backref='items')
         })
         mapper(Keyword, keywords)
index 3edf8af65bd8263133b87d04ef5c71d25dbc0011..20088c070eead5b1a4bb715b4b44c2835ee6f937 100644 (file)
@@ -65,7 +65,7 @@ class CascadeArgTest(fixtures.MappedTest):
 
     def test_cascade_immutable(self):
         assert isinstance(
-            orm_util.CascadeOptions("all, delete-orphan"), 
+            orm_util.CascadeOptions("all, delete-orphan"),
             frozenset)
 
 class O2MCascadeDeleteOrphanTest(fixtures.MappedTest):
@@ -388,7 +388,7 @@ class O2MCascadeTest(fixtures.MappedTest):
     @classmethod
     def setup_mappers(cls):
         users, User, Address, addresses = (
-                    cls.tables.users, cls.classes.User, 
+                    cls.tables.users, cls.classes.User,
                     cls.classes.Address, cls.tables.addresses)
 
         mapper(Address, addresses)
@@ -508,8 +508,8 @@ class NoSaveCascadeFlushTest(_fixtures.FixtureTest):
 
     run_inserts = None
 
-    def _one_to_many_fixture(self, o2m_cascade=True, 
-                                    m2o_cascade=True, 
+    def _one_to_many_fixture(self, o2m_cascade=True,
+                                    m2o_cascade=True,
                                     o2m=False,
                                     m2o=False,
                                     o2m_cascade_backrefs=True,
@@ -523,10 +523,10 @@ class NoSaveCascadeFlushTest(_fixtures.FixtureTest):
         if o2m:
             if m2o:
                 addresses_rel = {'addresses':relationship(
-                                Address, 
+                                Address,
                                 cascade_backrefs=o2m_cascade_backrefs,
                                 cascade=o2m_cascade and 'save-update' or '',
-                                backref=backref('user', 
+                                backref=backref('user',
                                             cascade=m2o_cascade and 'save-update' or '',
                                             cascade_backrefs=m2o_cascade_backrefs
                                         )
@@ -534,7 +534,7 @@ class NoSaveCascadeFlushTest(_fixtures.FixtureTest):
 
             else:
                 addresses_rel = {'addresses':relationship(
-                                Address, 
+                                Address,
                                 cascade=o2m_cascade and 'save-update' or '',
                                 cascade_backrefs=o2m_cascade_backrefs,
                                 )}
@@ -552,8 +552,8 @@ class NoSaveCascadeFlushTest(_fixtures.FixtureTest):
         mapper(User, users, properties=addresses_rel)
         mapper(Address, addresses, properties=user_rel)
 
-    def _many_to_many_fixture(self, fwd_cascade=True, 
-                                    bkd_cascade=True, 
+    def _many_to_many_fixture(self, fwd_cascade=True,
+                                    bkd_cascade=True,
                                     fwd=False,
                                     bkd=False,
                                     fwd_cascade_backrefs=True,
@@ -568,11 +568,11 @@ class NoSaveCascadeFlushTest(_fixtures.FixtureTest):
         if fwd:
             if bkd:
                 keywords_rel = {'keywords':relationship(
-                                Keyword, 
+                                Keyword,
                                 secondary=item_keywords,
                                 cascade_backrefs=fwd_cascade_backrefs,
                                 cascade=fwd_cascade and 'save-update' or '',
-                                backref=backref('items', 
+                                backref=backref('items',
                                             cascade=bkd_cascade and 'save-update' or '',
                                             cascade_backrefs=bkd_cascade_backrefs
                                         )
@@ -580,7 +580,7 @@ class NoSaveCascadeFlushTest(_fixtures.FixtureTest):
 
             else:
                 keywords_rel = {'keywords':relationship(
-                                Keyword, 
+                                Keyword,
                                 secondary=item_keywords,
                                 cascade=fwd_cascade and 'save-update' or '',
                                 cascade_backrefs=fwd_cascade_backrefs,
@@ -664,7 +664,7 @@ class NoSaveCascadeFlushTest(_fixtures.FixtureTest):
     def test_o2m_backref_child_transient(self):
         User, Address = self.classes.User, self.classes.Address
 
-        self._one_to_many_fixture(o2m=True, m2o=True, 
+        self._one_to_many_fixture(o2m=True, m2o=True,
                                     o2m_cascade=False)
         sess = Session()
         u1 = User(name='u1')
@@ -680,7 +680,7 @@ class NoSaveCascadeFlushTest(_fixtures.FixtureTest):
     def test_o2m_backref_child_transient_nochange(self):
         User, Address = self.classes.User, self.classes.Address
 
-        self._one_to_many_fixture(o2m=True, m2o=True, 
+        self._one_to_many_fixture(o2m=True, m2o=True,
                                     o2m_cascade=False)
         sess = Session()
         u1 = User(name='u1')
@@ -698,7 +698,7 @@ class NoSaveCascadeFlushTest(_fixtures.FixtureTest):
     def test_o2m_backref_child_expunged(self):
         User, Address = self.classes.User, self.classes.Address
 
-        self._one_to_many_fixture(o2m=True, m2o=True, 
+        self._one_to_many_fixture(o2m=True, m2o=True,
                                     o2m_cascade=False)
         sess = Session()
         u1 = User(name='u1')
@@ -718,7 +718,7 @@ class NoSaveCascadeFlushTest(_fixtures.FixtureTest):
     def test_o2m_backref_child_expunged_nochange(self):
         User, Address = self.classes.User, self.classes.Address
 
-        self._one_to_many_fixture(o2m=True, m2o=True, 
+        self._one_to_many_fixture(o2m=True, m2o=True,
                                     o2m_cascade=False)
         sess = Session()
         u1 = User(name='u1')
@@ -936,7 +936,7 @@ class NoSaveCascadeFlushTest(_fixtures.FixtureTest):
     def test_m2m_backref_child_transient(self):
         Item, Keyword = self.classes.Item, self.classes.Keyword
 
-        self._many_to_many_fixture(fwd=True, bkd=True, 
+        self._many_to_many_fixture(fwd=True, bkd=True,
                                     fwd_cascade=False)
         sess = Session()
         i1 = Item(description='i1')
@@ -952,7 +952,7 @@ class NoSaveCascadeFlushTest(_fixtures.FixtureTest):
     def test_m2m_backref_child_transient_nochange(self):
         Item, Keyword = self.classes.Item, self.classes.Keyword
 
-        self._many_to_many_fixture(fwd=True, bkd=True, 
+        self._many_to_many_fixture(fwd=True, bkd=True,
                                     fwd_cascade=False)
         sess = Session()
         i1 = Item(description='i1')
@@ -970,7 +970,7 @@ class NoSaveCascadeFlushTest(_fixtures.FixtureTest):
     def test_m2m_backref_child_expunged(self):
         Item, Keyword = self.classes.Item, self.classes.Keyword
 
-        self._many_to_many_fixture(fwd=True, bkd=True, 
+        self._many_to_many_fixture(fwd=True, bkd=True,
                                     fwd_cascade=False)
         sess = Session()
         i1 = Item(description='i1')
@@ -990,7 +990,7 @@ class NoSaveCascadeFlushTest(_fixtures.FixtureTest):
     def test_m2m_backref_child_expunged_nochange(self):
         Item, Keyword = self.classes.Item, self.classes.Keyword
 
-        self._many_to_many_fixture(fwd=True, bkd=True, 
+        self._many_to_many_fixture(fwd=True, bkd=True,
                                     fwd_cascade=False)
         sess = Session()
         i1 = Item(description='i1')
@@ -1433,19 +1433,19 @@ class M2OCascadeDeleteNoOrphanTest(fixtures.MappedTest):
     @classmethod
     def define_tables(cls, metadata):
         Table('t1', metadata, Column('id', Integer, primary_key=True,
-              test_needs_autoincrement=True), 
-              Column('data',String(50)), 
+              test_needs_autoincrement=True),
+              Column('data',String(50)),
               Column('t2id', Integer, ForeignKey('t2.id')))
 
-        Table('t2', metadata, 
+        Table('t2', metadata,
             Column('id', Integer, primary_key=True,
-              test_needs_autoincrement=True), 
-              Column('data',String(50)), 
+              test_needs_autoincrement=True),
+              Column('data',String(50)),
               Column('t3id', Integer, ForeignKey('t3.id')))
 
-        Table('t3', metadata, 
+        Table('t3', metadata,
             Column('id', Integer, primary_key=True,
-              test_needs_autoincrement=True), 
+              test_needs_autoincrement=True),
               Column('data', String(50)))
 
     @classmethod
@@ -1734,7 +1734,7 @@ class M2MCascadeTest(fixtures.MappedTest):
                                 self.tables.atob)
 
         mapper(A, a, properties={
-            'bs':relationship(B, secondary=atob, 
+            'bs':relationship(B, secondary=atob,
                         cascade="all, delete-orphan")
         })
         mapper(B, b)
@@ -1776,8 +1776,8 @@ class M2MCascadeTest(fixtures.MappedTest):
 
 
         mapper(A, a, properties={
-            'bs':relationship(B, 
-                secondary=atob, 
+            'bs':relationship(B,
+                secondary=atob,
                 cascade="all, delete-orphan", single_parent=True,
                 backref=backref('a', uselist=False))
         })
@@ -1801,7 +1801,7 @@ class O2MSelfReferentialDetelOrphanTest(fixtures.MappedTest):
     @classmethod
     def define_tables(cls, metadata):
         Table('node', metadata,
-            Column('id', Integer, primary_key=True, 
+            Column('id', Integer, primary_key=True,
                         test_needs_autoincrement=True),
             Column('parent_id', Integer, ForeignKey('node.id'))
         )
@@ -1817,10 +1817,10 @@ class O2MSelfReferentialDetelOrphanTest(fixtures.MappedTest):
         node = cls.tables.node
         mapper(Node, node, properties={
             "children":relationship(
-                            Node, 
-                            cascade="all, delete-orphan", 
+                            Node,
+                            cascade="all, delete-orphan",
                             backref=backref(
-                                    "parent", 
+                                    "parent",
                                     remote_side=node.c.id
                                 )
                             )
@@ -1856,12 +1856,12 @@ class NoBackrefCascadeTest(_fixtures.FixtureTest):
 
         mapper(Address, addresses)
         mapper(User, users, properties={
-                'addresses':relationship(Address, backref='user', 
+                'addresses':relationship(Address, backref='user',
                             cascade_backrefs=False)
         })
 
         mapper(Dingaling, dingalings, properties={
-                'address' : relationship(Address, backref='dingalings', 
+                'address' : relationship(Address, backref='dingalings',
                             cascade_backrefs=False)
         })
 
@@ -1984,7 +1984,7 @@ class PendingOrphanTestSingleLevel(fixtures.MappedTest):
             pass
 
     def test_pending_standalone_orphan(self):
-        """Standalone 'orphan' objects can now be persisted, if the underlying 
+        """Standalone 'orphan' objects can now be persisted, if the underlying
         constraints of the database allow it.
 
         This now supports persisting of objects based on foreign key
@@ -2021,7 +2021,7 @@ class PendingOrphanTestSingleLevel(fixtures.MappedTest):
         assert_raises(sa_exc.DBAPIError, s.commit)
         s.rollback()
 
-        # can assign o.user_id by foreign key, 
+        # can assign o.user_id by foreign key,
         # flush succeeds
         u = User()
         s.add(u)
@@ -2044,7 +2044,7 @@ class PendingOrphanTestSingleLevel(fixtures.MappedTest):
 
         mapper(Address, addresses)
         mapper(User, users, properties=dict(
-            addresses=relationship(Address, cascade="all,delete-orphan", 
+            addresses=relationship(Address, cascade="all,delete-orphan",
                                         backref="user")
         ))
         s = create_session()
@@ -2398,14 +2398,14 @@ class DoubleParentM2OOrphanTest(fixtures.MappedTest):
 class CollectionAssignmentOrphanTest(fixtures.MappedTest):
     @classmethod
     def define_tables(cls, metadata):
-        Table('table_a', metadata, 
+        Table('table_a', metadata,
             Column('id', Integer,
               primary_key=True, test_needs_autoincrement=True),
               Column('name', String(30)))
-        Table('table_b', metadata, 
+        Table('table_b', metadata,
             Column('id', Integer,
               primary_key=True, test_needs_autoincrement=True),
-              Column('name', String(30)), 
+              Column('name', String(30)),
               Column('a_id', Integer, ForeignKey('table_a.id')))
 
     def test_basic(self):
@@ -2553,7 +2553,7 @@ class O2MConflictTest(fixtures.MappedTest):
                                 self.tables.child)
 
         mapper(Parent, parent, properties={
-            'child':relationship(Child, uselist=False, 
+            'child':relationship(Child, uselist=False,
                                     cascade="all, delete, delete-orphan")
         })
         mapper(Child, child)
@@ -2567,8 +2567,8 @@ class O2MConflictTest(fixtures.MappedTest):
                                 self.tables.child)
 
         mapper(Parent, parent, properties={
-            'child':relationship(Child, uselist=False, 
-                                        cascade="all, delete, delete-orphan", 
+            'child':relationship(Child, uselist=False,
+                                        cascade="all, delete, delete-orphan",
                                         backref='parent')
         })
         mapper(Child, child)
@@ -2583,8 +2583,8 @@ class O2MConflictTest(fixtures.MappedTest):
 
         mapper(Parent, parent)
         mapper(Child, child, properties = {
-            'parent' : relationship(Parent, uselist=False, single_parent=True, 
-                                backref=backref('child', uselist=False), 
+            'parent' : relationship(Parent, uselist=False, single_parent=True,
+                                backref=backref('child', uselist=False),
                                 cascade="all,delete,delete-orphan")
         })
         self._do_move_test(True)
@@ -2598,8 +2598,8 @@ class O2MConflictTest(fixtures.MappedTest):
 
         mapper(Parent, parent)
         mapper(Child, child, properties = {
-            'parent' : relationship(Parent, uselist=False, single_parent=True, 
-                                backref=backref('child', uselist=True), 
+            'parent' : relationship(Parent, uselist=False, single_parent=True,
+                                backref=backref('child', uselist=True),
                                 cascade="all,delete,delete-orphan")
         })
         self._do_move_test(True)
@@ -2616,7 +2616,7 @@ class PartialFlushTest(fixtures.MappedTest):
             Column("descr", String(50))
         )
 
-        Table("noninh_child", metadata, 
+        Table("noninh_child", metadata,
             Column('id', Integer, primary_key=True,
                                 test_needs_autoincrement=True),
             Column('base_id', Integer, ForeignKey('base.id'))
@@ -2654,7 +2654,7 @@ class PartialFlushTest(fixtures.MappedTest):
         sess.flush([b1])
 
         # c1, c2 get cascaded into the session on o2m.
-        # not sure if this is how I like this 
+        # not sure if this is how I like this
         # to work but that's how it works for now.
         assert c1 in sess and c1 not in sess.new
         assert c2 in sess and c2 not in sess.new
@@ -2701,7 +2701,7 @@ class PartialFlushTest(fixtures.MappedTest):
             inherits=Base,
             properties={'parent': relationship(
                 Parent,
-                backref='children', 
+                backref='children',
                 primaryjoin=inh_child.c.parent_id == parent.c.id
             )}
         )
index 42a0ded34be38cefa9aa8fae4181eb3c5973a492..b3de03aaef61506d9f1a8020d13e6a8a2d320d48 100644 (file)
@@ -1567,7 +1567,7 @@ class DictHelpersTest(fixtures.MappedTest):
             ((Foo.id, Foo.bar_id), Foo(id=3, bar_id=12), (3, 12))
         ):
             eq_(
-                collections.column_mapped_collection(spec)().keyfunc(obj), 
+                collections.column_mapped_collection(spec)().keyfunc(obj),
                 expected
             )
 
@@ -1622,11 +1622,11 @@ class ColumnMappedWSerialize(fixtures.MappedTest):
 
     @classmethod
     def define_tables(cls, metadata):
-        Table('foo', metadata, 
+        Table('foo', metadata,
             Column('id', Integer(), primary_key=True),
             Column('b', String(128))
         )
-        Table('bar', metadata, 
+        Table('bar', metadata,
             Column('id', Integer(), primary_key=True),
             Column('foo_id', Integer, ForeignKey('foo.id')),
             Column('bat_id', Integer),
@@ -1673,7 +1673,7 @@ class ColumnMappedWSerialize(fixtures.MappedTest):
         for spec, obj, expected in specs:
             coll = collections.column_mapped_collection(spec)()
             eq_(
-                coll.keyfunc(obj), 
+                coll.keyfunc(obj),
                 expected
             )
             # ensure we do the right thing with __reduce__
index 68505b0e69455da8dcebba0cfecb0d23c0c2a01c..1b2714d7001bdb7265f0444ae257b0c5521d8c5f 100644 (file)
@@ -164,7 +164,7 @@ class CompileTest(fixtures.ORMTest):
         meta = MetaData()
 
         a = Table('a', meta, Column('id', Integer, primary_key=True))
-        b = Table('b', meta, Column('id', Integer, primary_key=True), 
+        b = Table('b', meta, Column('id', Integer, primary_key=True),
                                 Column('a_id', Integer, ForeignKey('a.id')))
 
         class A(object):pass
index 81f9c1ccdeed4bbf24ff10b575ec2dab8ac1706a..dce8e04deb10ad51459d630a1df4363de8d2b413 100644 (file)
@@ -478,7 +478,7 @@ class BiDirectionalOneToManyTest(fixtures.MappedTest):
 
 
 class BiDirectionalOneToManyTest2(fixtures.MappedTest):
-    """Two mappers with a one-to-many relationship to each other, 
+    """Two mappers with a one-to-many relationship to each other,
     with a second one-to-many on one of the mappers"""
 
     run_define_tables = 'each'
@@ -667,8 +667,8 @@ class OneToManyManyToOneTest(fixtures.MappedTest):
         sess.delete(p)
 
         self.assert_sql_execution(
-            testing.db, 
-            sess.flush, 
+            testing.db,
+            sess.flush,
             ExactSQL("UPDATE person SET favorite_ball_id=:favorite_ball_id "
                 "WHERE person.id = :person_id",
                 lambda ctx: {'person_id': p.id, 'favorite_ball_id': None}),
@@ -718,7 +718,7 @@ class OneToManyManyToOneTest(fixtures.MappedTest):
             p2, b1.person
         )
 
-        # do it the other way 
+        # do it the other way
         p3.balls.append(b1)
         sess.commit()
         eq_(
@@ -798,7 +798,7 @@ class OneToManyManyToOneTest(fixtures.MappedTest):
 
         sess.delete(p)
 
-        self.assert_sql_execution(testing.db, sess.flush, 
+        self.assert_sql_execution(testing.db, sess.flush,
             CompiledSQL("UPDATE ball SET person_id=:person_id "
                 "WHERE ball.id = :ball_id",
                 lambda ctx:[
@@ -912,7 +912,7 @@ class SelfReferentialPostUpdateTest(fixtures.MappedTest):
         # pre-trigger lazy loader on 'cats' to make the test easier
         cats.children
         self.assert_sql_execution(
-            testing.db, 
+            testing.db,
             session.flush,
             AllOf(
             CompiledSQL("UPDATE node SET prev_sibling_id=:prev_sibling_id "
@@ -935,12 +935,12 @@ class SelfReferentialPostUpdateTest(fixtures.MappedTest):
         session.delete(root)
 
         self.assert_sql_execution(
-            testing.db, 
+            testing.db,
             session.flush,
             CompiledSQL("UPDATE node SET next_sibling_id=:next_sibling_id "
-                "WHERE node.id = :node_id", 
+                "WHERE node.id = :node_id",
                 lambda ctx: [
-                            {'node_id': about.id, 'next_sibling_id': None}, 
+                            {'node_id': about.id, 'next_sibling_id': None},
                             {'node_id': stories.id, 'next_sibling_id': None}
                         ]
             ),
@@ -1180,7 +1180,7 @@ class PostUpdateBatchingTest(fixtures.MappedTest):
         p1.c3 = c31
 
         self.assert_sql_execution(
-            testing.db, 
+            testing.db,
             sess.flush,
             CompiledSQL(
                 "UPDATE parent SET c1_id=:c1_id, c2_id=:c2_id, "
@@ -1192,7 +1192,7 @@ class PostUpdateBatchingTest(fixtures.MappedTest):
         p1.c1 = p1.c2 = p1.c3 = None
 
         self.assert_sql_execution(
-            testing.db, 
+            testing.db,
             sess.flush,
             CompiledSQL(
                 "UPDATE parent SET c1_id=:c1_id, c2_id=:c2_id, "
index 7dbc9adcb4d8df7f809997cbaddfcfd78b774025..675cebda8343eecaabdc0f0985bf8b7ef88c8dde 100644 (file)
@@ -16,11 +16,11 @@ class DefaultStrategyOptionsTest(_fixtures.FixtureTest):
             # keywords are not part of self.static.user_all_result, so
             # verify all the item keywords were loaded, with no more sql.
             # 'any' verifies at least some items have keywords; we build
-            # a list for any([...]) instead of any(...) to prove we've 
+            # a list for any([...]) instead of any(...) to prove we've
             # iterated all the items with no sql.
             f = util.flatten_iterator
-            assert any([i.keywords for i in 
-                f([o.items for o in f([u.orders for u in users])])]) 
+            assert any([i.keywords for i in
+                f([o.items for o in f([u.orders for u in users])])])
         self.assert_sql_count(testing.db, go, 0)
 
     def _assert_addresses_loaded(self, users):
@@ -85,13 +85,13 @@ class DefaultStrategyOptionsTest(_fixtures.FixtureTest):
         mapper(User, users, properties=dict(
             addresses=relationship(Address, lazy=True,
                                order_by=addresses.c.id),
-            orders=relationship(Order, 
+            orders=relationship(Order,
                             order_by=orders.c.id)))
 
         return create_session()
 
     def test_downgrade_baseline(self):
-        """Mapper strategy defaults load as expected 
+        """Mapper strategy defaults load as expected
         (compare to rest of DefaultStrategyOptionsTest downgrade tests)."""
         sess = self._downgrade_fixture()
         users = []
@@ -107,11 +107,11 @@ class DefaultStrategyOptionsTest(_fixtures.FixtureTest):
         self._assert_fully_loaded(users)
 
     def test_disable_eagerloads(self):
-        """Mapper eager load strategy defaults can be shut off 
+        """Mapper eager load strategy defaults can be shut off
         with enable_eagerloads(False)."""
 
-        # While this isn't testing a mapper option, it is included 
-        # as baseline reference for how XYZload('*') option 
+        # While this isn't testing a mapper option, it is included
+        # as baseline reference for how XYZload('*') option
         # should work, namely, it shouldn't affect later queries
         # (see other test_select_s)
         sess = self._downgrade_fixture()
@@ -156,8 +156,8 @@ class DefaultStrategyOptionsTest(_fixtures.FixtureTest):
             sa.orm.subqueryload, '*', User.addresses
         )
     def test_select_with_joinedload(self):
-        """Mapper load strategy defaults can be downgraded with 
-        lazyload('*') option, while explicit joinedload() option 
+        """Mapper load strategy defaults can be downgraded with
+        lazyload('*') option, while explicit joinedload() option
         is still honored"""
         sess = self._downgrade_fixture()
         users = []
@@ -181,8 +181,8 @@ class DefaultStrategyOptionsTest(_fixtures.FixtureTest):
         self.assert_sql_count(testing.db, go, 3)
 
     def test_select_with_subqueryload(self):
-        """Mapper load strategy defaults can be downgraded with 
-        lazyload('*') option, while explicit subqueryload() option 
+        """Mapper load strategy defaults can be downgraded with
+        lazyload('*') option, while explicit subqueryload() option
         is still honored"""
         sess = self._downgrade_fixture()
         users = []
@@ -215,8 +215,8 @@ class DefaultStrategyOptionsTest(_fixtures.FixtureTest):
         eq_(users, self.static.user_all_result)
 
     def test_noload_with_joinedload(self):
-        """Mapper load strategy defaults can be downgraded with 
-        noload('*') option, while explicit joinedload() option 
+        """Mapper load strategy defaults can be downgraded with
+        noload('*') option, while explicit joinedload() option
         is still honored"""
         sess = self._downgrade_fixture()
         users = []
@@ -240,8 +240,8 @@ class DefaultStrategyOptionsTest(_fixtures.FixtureTest):
         self.assert_sql_count(testing.db, go, 0)
 
     def test_noload_with_subqueryload(self):
-        """Mapper load strategy defaults can be downgraded with 
-        noload('*') option, while explicit subqueryload() option 
+        """Mapper load strategy defaults can be downgraded with
+        noload('*') option, while explicit subqueryload() option
         is still honored"""
         sess = self._downgrade_fixture()
         users = []
@@ -268,7 +268,7 @@ class DefaultStrategyOptionsTest(_fixtures.FixtureTest):
         self.assert_sql_count(testing.db, go, 0)
 
     def test_joined(self):
-        """Mapper load strategy defaults can be upgraded with 
+        """Mapper load strategy defaults can be upgraded with
         joinedload('*') option."""
         sess = self._upgrade_fixture()
         users = []
@@ -285,7 +285,7 @@ class DefaultStrategyOptionsTest(_fixtures.FixtureTest):
         self._assert_fully_loaded(users)
 
     def test_joined_with_lazyload(self):
-        """Mapper load strategy defaults can be upgraded with 
+        """Mapper load strategy defaults can be upgraded with
         joinedload('*') option, while explicit lazyload() option
         is still honored"""
         sess = self._upgrade_fixture()
@@ -316,7 +316,7 @@ class DefaultStrategyOptionsTest(_fixtures.FixtureTest):
         self.assert_sql_count(testing.db, go, 1)
 
     def test_joined_with_subqueryload(self):
-        """Mapper load strategy defaults can be upgraded with 
+        """Mapper load strategy defaults can be upgraded with
         joinedload('*') option, while explicit subqueryload() option
         is still honored"""
         sess = self._upgrade_fixture()
@@ -335,7 +335,7 @@ class DefaultStrategyOptionsTest(_fixtures.FixtureTest):
         self._assert_fully_loaded(users)
 
     def test_subquery(self):
-        """Mapper load strategy defaults can be upgraded with 
+        """Mapper load strategy defaults can be upgraded with
         subqueryload('*') option."""
         sess = self._upgrade_fixture()
         users = []
@@ -352,7 +352,7 @@ class DefaultStrategyOptionsTest(_fixtures.FixtureTest):
         self._assert_fully_loaded(users)
 
     def test_subquery_with_lazyload(self):
-        """Mapper load strategy defaults can be upgraded with 
+        """Mapper load strategy defaults can be upgraded with
         subqueryload('*') option, while explicit lazyload() option
         is still honored"""
         sess = self._upgrade_fixture()
@@ -382,8 +382,8 @@ class DefaultStrategyOptionsTest(_fixtures.FixtureTest):
         self.assert_sql_count(testing.db, go, 1)
 
     def test_subquery_with_joinedload(self):
-        """Mapper load strategy defaults can be upgraded with 
-        subqueryload('*') option, while multiple explicit 
+        """Mapper load strategy defaults can be upgraded with
+        subqueryload('*') option, while multiple explicit
         joinedload() options are still honored"""
         sess = self._upgrade_fixture()
         users = []
index 8063d92b71f46901c8465972d1071ce62becb973..a246cddaea470f12e830c1a883a1a04733b4a9cf 100644 (file)
@@ -42,7 +42,7 @@ class TriggerDefaultsTest(fixtures.MappedTest):
             sa.DDL("CREATE TRIGGER dt_ins BEFORE INSERT ON dt "
                          "FOR EACH ROW BEGIN "
                          "SET NEW.col2='ins'; SET NEW.col4='ins'; END",
-                         on=lambda ddl, event, target, bind, **kw: 
+                         on=lambda ddl, event, target, bind, **kw:
                                 bind.engine.name not in ('oracle', 'mssql', 'sqlite')
                 ),
             ):
@@ -67,7 +67,7 @@ class TriggerDefaultsTest(fixtures.MappedTest):
             sa.DDL("CREATE TRIGGER dt_up BEFORE UPDATE ON dt "
                         "FOR EACH ROW BEGIN "
                         "SET NEW.col3='up'; SET NEW.col4='up'; END",
-                        on=lambda ddl, event, target, bind, **kw: 
+                        on=lambda ddl, event, target, bind, **kw:
                                 bind.engine.name not in ('oracle', 'mssql', 'sqlite')
                     ),
             ):
index c306ebd05c3bbc40f8c5490930e23545e1e63199..33308880ea60c8cd9d88ee1757b40d027d775ca1 100644 (file)
@@ -9,7 +9,7 @@ from test.lib import fixtures
 from test.lib.testing import eq_
 
 class TestDescriptor(descriptor_props.DescriptorProperty):
-    def __init__(self, cls, key, descriptor=None, doc=None, 
+    def __init__(self, cls, key, descriptor=None, doc=None,
             comparator_factory = None):
         self.parent = cls.__mapper__
         self.key = key
index ba0fa422052681d1cbc81bf0a5afa3b4a16277fb..07f01822b908d7d07e2d574237e6ed0c29a78ffe 100644 (file)
@@ -49,7 +49,7 @@ class DynamicTest(_fixtures.FixtureTest, AssertsCompiledSQL):
 
         u = q.filter(User.id==7).first()
         self.assert_compile(
-            u.addresses.statement, 
+            u.addresses.statement,
             "SELECT addresses.id, addresses.user_id, addresses.email_address FROM "
             "addresses WHERE :param_1 = addresses.user_id",
             use_default_dialect=True
@@ -86,7 +86,7 @@ class DynamicTest(_fixtures.FixtureTest, AssertsCompiledSQL):
         u = sess.query(User).get(8)
         eq_(
             list(u.addresses.order_by(desc(Address.email_address))),
-             [Address(email_address=u'ed@wood.com'), Address(email_address=u'ed@lala.com'), 
+             [Address(email_address=u'ed@wood.com'), Address(email_address=u'ed@lala.com'),
               Address(email_address=u'ed@bettyboop.com')]
             )
 
@@ -209,7 +209,7 @@ class DynamicTest(_fixtures.FixtureTest, AssertsCompiledSQL):
         assert o1 in i1.orders.all()
         assert i1 in o1.items.all()
 
-    @testing.exclude('mysql', 'between', 
+    @testing.exclude('mysql', 'between',
             ((5, 1,49), (5, 1, 52)),
             'https://bugs.launchpad.net/ubuntu/+source/mysql-5.1/+bug/706988')
     def test_association_nonaliased(self):
@@ -220,8 +220,8 @@ class DynamicTest(_fixtures.FixtureTest, AssertsCompiledSQL):
                                 self.classes.Item)
 
         mapper(Order, orders, properties={
-            'items':relationship(Item, secondary=order_items, 
-                                lazy="dynamic", 
+            'items':relationship(Item, secondary=order_items,
+                                lazy="dynamic",
                                 order_by=order_items.c.item_id)
         })
         mapper(Item, items)
@@ -239,7 +239,7 @@ class DynamicTest(_fixtures.FixtureTest, AssertsCompiledSQL):
             use_default_dialect=True
         )
 
-        # filter criterion against the secondary table 
+        # filter criterion against the secondary table
         # works
         eq_(
             o.items.filter(order_items.c.item_id==2).all(),
@@ -506,7 +506,7 @@ class SessionTest(_fixtures.FixtureTest):
         sess.flush()
         sess.commit()
         u1.addresses.append(Address(email_address='foo@bar.com'))
-        eq_(u1.addresses.order_by(Address.id).all(), 
+        eq_(u1.addresses.order_by(Address.id).all(),
                  [Address(email_address='lala@hoho.com'), Address(email_address='foo@bar.com')])
         sess.rollback()
         eq_(u1.addresses.all(), [Address(email_address='lala@hoho.com')])
index 182005d38d326e2adf789103b5dd1ebd182c8eb8..33088b5820e495121664ea727d514e8e3fc160bd 100644 (file)
@@ -129,11 +129,11 @@ class MapperEventsTest(_RemoveListeners, _fixtures.FixtureTest):
         sess.flush()
         eq_(canary,
             ['init', 'before_insert',
-             'after_insert', 'expire', 'translate_row', 
+             'after_insert', 'expire', 'translate_row',
              'populate_instance', 'refresh',
              'append_result', 'translate_row', 'create_instance',
              'populate_instance', 'load', 'append_result',
-             'before_update', 'after_update', 'before_delete', 
+             'before_update', 'after_update', 'before_delete',
              'after_delete'])
 
     def test_merge(self):
@@ -226,10 +226,10 @@ class MapperEventsTest(_RemoveListeners, _fixtures.FixtureTest):
         sess.add(k1)
         sess.flush()
         eq_(canary1,
-            ['init', 
+            ['init',
             'before_insert', 'after_insert'])
         eq_(canary2,
-            ['init', 
+            ['init',
             'before_insert', 'after_insert'])
 
         canary1[:]= []
@@ -468,7 +468,7 @@ class SessionEventsTest(_RemoveListeners, _fixtures.FixtureTest):
         assert my_listener in s.dispatch.before_flush
 
     def test_sessionmaker_listen(self):
-        """test that listen can be applied to individual 
+        """test that listen can be applied to individual
         scoped_session() classes."""
 
         def my_listener_one(*arg, **kw):
@@ -568,16 +568,16 @@ class SessionEventsTest(_RemoveListeners, _fixtures.FixtureTest):
 
         mapper(User, users)
 
-        sess, canary = self._listener_fixture(autoflush=False, 
+        sess, canary = self._listener_fixture(autoflush=False,
                             autocommit=True, expire_on_commit=False)
 
         u = User(name='u1')
         sess.add(u)
         sess.flush()
         eq_(
-            canary, 
+            canary,
             [ 'before_attach', 'after_attach', 'before_flush', 'after_begin',
-            'after_flush', 'after_flush_postexec', 
+            'after_flush', 'after_flush_postexec',
             'before_commit', 'after_commit',]
         )
 
@@ -597,10 +597,10 @@ class SessionEventsTest(_RemoveListeners, _fixtures.FixtureTest):
             sess.commit
         )
         sess.rollback()
-        eq_(canary, ['before_attach', 'after_attach', 'before_commit', 'before_flush', 
-        'after_begin', 'after_flush', 'after_flush_postexec', 
-        'after_commit', 'before_attach', 'after_attach', 'before_commit', 
-        'before_flush', 'after_begin', 'after_rollback', 
+        eq_(canary, ['before_attach', 'after_attach', 'before_commit', 'before_flush',
+        'after_begin', 'after_flush', 'after_flush_postexec',
+        'after_commit', 'before_attach', 'after_attach', 'before_commit',
+        'before_flush', 'after_begin', 'after_rollback',
         'after_soft_rollback', 'after_soft_rollback'])
 
     def test_can_use_session_in_outer_rollback_hook(self):
@@ -760,7 +760,7 @@ class SessionEventsTest(_RemoveListeners, _fixtures.FixtureTest):
         u = User(name='u1')
         sess.add(u)
         sess.flush()
-        eq_(sess.query(User).order_by(User.name).all(), 
+        eq_(sess.query(User).order_by(User.name).all(),
             [
                 User(name='another u1'),
                 User(name='u1')
@@ -768,7 +768,7 @@ class SessionEventsTest(_RemoveListeners, _fixtures.FixtureTest):
         )
 
         sess.flush()
-        eq_(sess.query(User).order_by(User.name).all(), 
+        eq_(sess.query(User).order_by(User.name).all(),
             [
                 User(name='another u1'),
                 User(name='u1')
@@ -777,7 +777,7 @@ class SessionEventsTest(_RemoveListeners, _fixtures.FixtureTest):
 
         u.name='u2'
         sess.flush()
-        eq_(sess.query(User).order_by(User.name).all(), 
+        eq_(sess.query(User).order_by(User.name).all(),
             [
                 User(name='another u1'),
                 User(name='another u2'),
@@ -787,7 +787,7 @@ class SessionEventsTest(_RemoveListeners, _fixtures.FixtureTest):
 
         sess.delete(u)
         sess.flush()
-        eq_(sess.query(User).order_by(User.name).all(), 
+        eq_(sess.query(User).order_by(User.name).all(),
             [
                 User(name='another u1'),
             ]
@@ -808,14 +808,14 @@ class SessionEventsTest(_RemoveListeners, _fixtures.FixtureTest):
         u = User(name='u1')
         sess.add(u)
         sess.flush()
-        eq_(sess.query(User).order_by(User.name).all(), 
+        eq_(sess.query(User).order_by(User.name).all(),
             [User(name='u1')]
         )
 
         sess.add(User(name='u2'))
         sess.flush()
         sess.expunge_all()
-        eq_(sess.query(User).order_by(User.name).all(), 
+        eq_(sess.query(User).order_by(User.name).all(),
             [
                 User(name='u1 modified'),
                 User(name='u2')
@@ -825,7 +825,7 @@ class SessionEventsTest(_RemoveListeners, _fixtures.FixtureTest):
 
 
 class MapperExtensionTest(_fixtures.FixtureTest):
-    """Superseded by MapperEventsTest - test backwards 
+    """Superseded by MapperEventsTest - test backwards
     compatibility of MapperExtension."""
 
     run_inserts = None
@@ -977,10 +977,10 @@ class MapperExtensionTest(_fixtures.FixtureTest):
         sess.add(k1)
         sess.flush()
         eq_(methods1,
-            ['instrument_class', 'init_instance', 
+            ['instrument_class', 'init_instance',
             'before_insert', 'after_insert'])
         eq_(methods2,
-            ['instrument_class', 'init_instance', 
+            ['instrument_class', 'init_instance',
             'before_insert', 'after_insert'])
 
         del methods1[:]
@@ -1060,7 +1060,7 @@ class MapperExtensionTest(_fixtures.FixtureTest):
 class AttributeExtensionTest(fixtures.MappedTest):
     @classmethod
     def define_tables(cls, metadata):
-        Table('t1', 
+        Table('t1',
             metadata,
             Column('id', Integer, primary_key=True),
             Column('type', String(40)),
@@ -1113,7 +1113,7 @@ class AttributeExtensionTest(fixtures.MappedTest):
         eq_(b1.data, 'ex1b2')
         eq_(c1.data, 'ex2c2')
 
-        eq_(ext_msg, ["Ex1 'a1'", "Ex1 'b1'", "Ex2 'c1'", 
+        eq_(ext_msg, ["Ex1 'a1'", "Ex1 'b1'", "Ex2 'c1'",
                     "Ex1 'a2'", "Ex1 'b2'", "Ex2 'c2'"])
 
 
index c73306665c55321a23d7e58b31fa9e5a360a15eb..16ca8b0ba29ebe988249baf5a36a70f012ddf266 100644 (file)
@@ -67,7 +67,7 @@ class ExpireTest(_fixtures.FixtureTest):
         u = s.query(User).get(7)
         s.expunge_all()
 
-        assert_raises_message(sa_exc.InvalidRequestError, 
+        assert_raises_message(sa_exc.InvalidRequestError,
                         r"is not persistent within this Session", s.expire, u)
 
     def test_get_refreshes(self):
@@ -138,12 +138,12 @@ class ExpireTest(_fixtures.FixtureTest):
         s.rollback()
 
         assert u in s
-        # but now its back, rollback has occurred, the 
+        # but now its back, rollback has occurred, the
         # _remove_newly_deleted is reverted
         eq_(u.name, 'chuck')
 
     def test_deferred(self):
-        """test that unloaded, deferred attributes aren't included in the 
+        """test that unloaded, deferred attributes aren't included in the
         expiry list."""
 
         Order, orders = self.classes.Order, self.tables.orders
@@ -185,7 +185,7 @@ class ExpireTest(_fixtures.FixtureTest):
                                 self.classes.User)
 
         mapper(User, users, properties={
-            'addresses':relationship(Address, 
+            'addresses':relationship(Address,
                     order_by=addresses.c.email_address)
         })
         mapper(Address, addresses)
@@ -193,21 +193,21 @@ class ExpireTest(_fixtures.FixtureTest):
         u = s.query(User).get(8)
         adlist = u.addresses
         eq_(adlist, [
-            Address(email_address='ed@bettyboop.com'), 
+            Address(email_address='ed@bettyboop.com'),
             Address(email_address='ed@lala.com'),
-            Address(email_address='ed@wood.com'), 
+            Address(email_address='ed@wood.com'),
         ])
         a1 = u.addresses[2]
         a1.email_address = 'aaaaa'
         s.expire(u, ['addresses'])
         eq_(u.addresses, [
-            Address(email_address='aaaaa'), 
-            Address(email_address='ed@bettyboop.com'), 
+            Address(email_address='aaaaa'),
+            Address(email_address='ed@bettyboop.com'),
             Address(email_address='ed@lala.com'),
         ])
 
     def test_refresh_collection_exception(self):
-        """test graceful failure for currently unsupported 
+        """test graceful failure for currently unsupported
         immediate refresh of a collection"""
 
         users, Address, addresses, User = (self.tables.users,
@@ -222,12 +222,12 @@ class ExpireTest(_fixtures.FixtureTest):
         mapper(Address, addresses)
         s = create_session(autoflush=True, autocommit=False)
         u = s.query(User).get(8)
-        assert_raises_message(sa_exc.InvalidRequestError, 
-                        "properties specified for refresh", 
+        assert_raises_message(sa_exc.InvalidRequestError,
+                        "properties specified for refresh",
                         s.refresh, u, ['addresses'])
 
         # in contrast to a regular query with no columns
-        assert_raises_message(sa_exc.InvalidRequestError, 
+        assert_raises_message(sa_exc.InvalidRequestError,
                         "no columns with which to SELECT", s.query().all)
 
     def test_refresh_cancels_expire(self):
@@ -862,7 +862,7 @@ class ExpireTest(_fixtures.FixtureTest):
                                 self.classes.User)
 
         mapper(User, users, properties={
-            'addresses':relationship(Address, backref='user', lazy='joined', 
+            'addresses':relationship(Address, backref='user', lazy='joined',
                                     order_by=addresses.c.id),
             })
         mapper(Address, addresses)
@@ -941,7 +941,7 @@ class ExpireTest(_fixtures.FixtureTest):
         u1 = sess.query(User).options(undefer(User.name)).first()
         assert 'name' not in attributes.instance_state(u1).callables
 
-        # mass expire, the attribute was loaded, 
+        # mass expire, the attribute was loaded,
         # the attribute gets the callable
         sess.expire(u1)
         assert isinstance(
@@ -954,7 +954,7 @@ class ExpireTest(_fixtures.FixtureTest):
         assert 'name' not in attributes.instance_state(u1).callables
 
         # mass expire, attribute was loaded but then deleted,
-        # the callable goes away - the state wants to flip 
+        # the callable goes away - the state wants to flip
         # it back to its "deferred" loader.
         sess.expunge_all()
         u1 = sess.query(User).options(undefer(User.name)).first()
@@ -1164,7 +1164,7 @@ class ExpiredPendingTest(_fixtures.FixtureTest):
         # which attach to u1 will expect to be "pending"
         sess.expire(u1, ['addresses'])
 
-        # attach an Address.  now its "pending" 
+        # attach an Address.  now its "pending"
         # in user.addresses
         a2 = Address(email_address='a2')
         a2.user = u1
index 3940c03dab710d936c67b901a7c3b66d337be1ca..73b9fb3b2f2efc5511df0aed7674a9a948b6dbad 100644 (file)
@@ -19,7 +19,7 @@ class ParentRemovalTest(fixtures.MappedTest):
     """Test that the 'hasparent' flag gets flipped to False
     only if we're sure this object is the real parent.
 
-    In ambiguous cases a stale data exception is 
+    In ambiguous cases a stale data exception is
     raised.
 
     """
@@ -120,8 +120,8 @@ class ParentRemovalTest(fixtures.MappedTest):
 
         u1 = s.query(User).first()
 
-        # primary key change.  now we 
-        # can't rely on state.key as the 
+        # primary key change.  now we
+        # can't rely on state.key as the
         # identifier.
         u1.id = 5
         a1.user_id = 5
@@ -146,7 +146,7 @@ class ParentRemovalTest(fixtures.MappedTest):
     def test_stale_state_negative_child_expired(self):
         """illustrate the current behavior of
         expiration on the child.
-        
+
         there's some uncertainty here in how
         this use case should work.
 
index 6c43a2f39840fabf43e1d8c0746283eea73556b0..086897186fc5620209b7fe0cdb8b5ee1760032ab 100644 (file)
@@ -57,7 +57,7 @@ class QueryTest(_fixtures.FixtureTest):
         mapper(Keyword, keywords)
 
         mapper(Node, nodes, properties={
-            'children':relationship(Node, 
+            'children':relationship(Node,
                 backref=backref('parent', remote_side=[nodes.c.id])
             )
         })
@@ -140,17 +140,17 @@ class InheritedJoinTest(fixtures.MappedTest, AssertsCompiledSQL):
 
         mapper(Machine, machines)
 
-        mapper(Person, people, 
-            polymorphic_on=people.c.type, 
-            polymorphic_identity='person', 
-            order_by=people.c.person_id, 
+        mapper(Person, people,
+            polymorphic_on=people.c.type,
+            polymorphic_identity='person',
+            order_by=people.c.person_id,
             properties={
                 'paperwork':relationship(Paperwork, order_by=paperwork.c.paperwork_id)
             })
         mapper(Engineer, engineers, inherits=Person, polymorphic_identity='engineer', properties={
                 'machines':relationship(Machine, order_by=machines.c.machine_id)
             })
-        mapper(Manager, managers, 
+        mapper(Manager, managers,
                     inherits=Person, polymorphic_identity='manager')
         mapper(Boss, boss, inherits=Manager, polymorphic_identity='boss')
         mapper(Paperwork, paperwork)
@@ -405,7 +405,7 @@ class JoinTest(QueryTest, AssertsCompiledSQL):
         )
 
     def test_multi_tuple_form(self):
-        """test the 'tuple' form of join, now superseded 
+        """test the 'tuple' form of join, now superseded
         by the two-element join() form.
 
         Not deprecating this style as of yet.
@@ -432,7 +432,7 @@ class JoinTest(QueryTest, AssertsCompiledSQL):
 
         self.assert_compile(
             sess.query(User).join(
-                                (Order, User.id==Order.user_id), 
+                                (Order, User.id==Order.user_id),
                                 (Item, Order.items)),
             "SELECT users.id AS users_id, users.name AS users_name "
             "FROM users JOIN orders ON users.id = orders.user_id "
@@ -617,8 +617,8 @@ class JoinTest(QueryTest, AssertsCompiledSQL):
         for oalias,ialias in [(True, True), (False, False), (True, False), (False, True)]:
             eq_(
                 sess.query(User).join('orders', aliased=oalias).\
-                                join('items', 
-                                        from_joinpoint=True, 
+                                join('items',
+                                        from_joinpoint=True,
                                         aliased=ialias).\
                                 filter(Item.description == 'item 4').all(),
                 [User(name='jack')]
@@ -628,7 +628,7 @@ class JoinTest(QueryTest, AssertsCompiledSQL):
             eq_(
                 sess.query(User).join('orders', aliased=oalias).\
                                 filter(Order.user_id==9).\
-                                join('items', from_joinpoint=True, 
+                                join('items', from_joinpoint=True,
                                             aliased=ialias).\
                                 filter(Item.description=='item 4').all(),
                 []
@@ -637,7 +637,7 @@ class JoinTest(QueryTest, AssertsCompiledSQL):
         orderalias = aliased(Order)
         itemalias = aliased(Item)
         eq_(
-            sess.query(User).join(orderalias, 'orders'). 
+            sess.query(User).join(orderalias, 'orders').
                                 join(itemalias, 'items', from_joinpoint=True).
                                 filter(itemalias.description == 'item 4').all(),
             [User(name='jack')]
@@ -692,7 +692,7 @@ class JoinTest(QueryTest, AssertsCompiledSQL):
             sess.query(User).join, Address, Address.user,
         )
 
-        # but this one would silently fail 
+        # but this one would silently fail
         adalias = aliased(Address)
         assert_raises(
             sa_exc.InvalidRequestError,
@@ -848,7 +848,7 @@ class JoinTest(QueryTest, AssertsCompiledSQL):
         # be using the aliased flag in this way.
         self.assert_compile(
             sess.query(User).join(User.orders, aliased=True).
-                join(Item, 
+                join(Item,
                     and_(Order.id==order_items.c.order_id, order_items.c.item_id==Item.id),
                     from_joinpoint=True, aliased=True
                 ),
@@ -862,7 +862,7 @@ class JoinTest(QueryTest, AssertsCompiledSQL):
         oalias = orders.select()
         self.assert_compile(
             sess.query(User).join(oalias, User.orders).
-                join(Item, 
+                join(Item,
                     and_(Order.id==order_items.c.order_id, order_items.c.item_id==Item.id),
                     from_joinpoint=True
                 ),
@@ -938,7 +938,7 @@ class JoinTest(QueryTest, AssertsCompiledSQL):
         )
 
         eq_(
-            sess.query(User.name).join(Order, User.id==Order.user_id). 
+            sess.query(User.name).join(Order, User.id==Order.user_id).
                                 join(order_items, Order.id==order_items.c.order_id).
                                 join(Item, order_items.c.item_id==Item.id).
                                 filter(Item.description == 'item 4').all(),
@@ -1063,8 +1063,8 @@ class JoinTest(QueryTest, AssertsCompiledSQL):
             sess.query(OrderAlias).join('items').filter_by(description='item 3').\
                 order_by(OrderAlias.id).all(),
             [
-                Order(address_id=1,description=u'order 1',isopen=0,user_id=7,id=1), 
-                Order(address_id=4,description=u'order 2',isopen=0,user_id=9,id=2), 
+                Order(address_id=1,description=u'order 1',isopen=0,user_id=7,id=1),
+                Order(address_id=4,description=u'order 2',isopen=0,user_id=9,id=2),
                 Order(address_id=1,description=u'order 3',isopen=1,user_id=7,id=3)
             ]
         )
@@ -1076,8 +1076,8 @@ class JoinTest(QueryTest, AssertsCompiledSQL):
                         filter_by(description='item 3').\
                 order_by(User.id, OrderAlias.id).all(),
             [
-                (User(name=u'jack',id=7), Order(address_id=1,description=u'order 1',isopen=0,user_id=7,id=1), u'item 3'), 
-                (User(name=u'jack',id=7), Order(address_id=1,description=u'order 3',isopen=1,user_id=7,id=3), u'item 3'), 
+                (User(name=u'jack',id=7), Order(address_id=1,description=u'order 1',isopen=0,user_id=7,id=1), u'item 3'),
+                (User(name=u'jack',id=7), Order(address_id=1,description=u'order 3',isopen=1,user_id=7,id=3), u'item 3'),
                 (User(name=u'fred',id=9), Order(address_id=4,description=u'order 2',isopen=0,user_id=9,id=2), u'item 3')
             ]
         )
@@ -1112,7 +1112,7 @@ class JoinTest(QueryTest, AssertsCompiledSQL):
         IAlias = aliased(Item)
         q = sess.query(Order, IAlias).select_from(join(Order, IAlias, 'items')).filter(IAlias.description=='item 3')
         l = q.all()
-        eq_(l, 
+        eq_(l,
             [
                 (order1, item3),
                 (order2, item3),
@@ -1235,10 +1235,10 @@ class JoinTest(QueryTest, AssertsCompiledSQL):
         eq_(
             sess.query(User, ualias).filter(User.id > ualias.id).order_by(desc(ualias.id), User.name).all(),
             [
-                (User(id=10,name=u'chuck'), User(id=9,name=u'fred')), 
-                (User(id=10,name=u'chuck'), User(id=8,name=u'ed')), 
-                (User(id=9,name=u'fred'), User(id=8,name=u'ed')), 
-                (User(id=10,name=u'chuck'), User(id=7,name=u'jack')), 
+                (User(id=10,name=u'chuck'), User(id=9,name=u'fred')),
+                (User(id=10,name=u'chuck'), User(id=8,name=u'ed')),
+                (User(id=9,name=u'fred'), User(id=8,name=u'ed')),
+                (User(id=10,name=u'chuck'), User(id=7,name=u'jack')),
                 (User(id=8,name=u'ed'), User(id=7,name=u'jack')),
                 (User(id=9,name=u'fred'), User(id=7,name=u'jack'))
             ]
@@ -1338,7 +1338,7 @@ class JoinFromSelectableTest(fixtures.MappedTest, AssertsCompiledSQL):
 
     @classmethod
     def define_tables(cls, metadata):
-        Table('table1', metadata, 
+        Table('table1', metadata,
             Column('id', Integer, primary_key=True)
         )
         Table('table2', metadata,
@@ -1563,9 +1563,9 @@ class SelfRefMixedTest(fixtures.MappedTest, AssertsCompiledSQL):
                 backref=backref('parent', remote_side=[nodes.c.id])
             ),
             'subs' : relationship(Sub),
-            'assoc':relationship(Node, 
-                            secondary=assoc_table, 
-                            primaryjoin=nodes.c.id==assoc_table.c.left_id, 
+            'assoc':relationship(Node,
+                            secondary=assoc_table,
+                            primaryjoin=nodes.c.id==assoc_table.c.left_id,
                             secondaryjoin=nodes.c.id==assoc_table.c.right_id)
         })
         mapper(Sub, sub_table)
@@ -1618,13 +1618,13 @@ class CreateJoinsTest(fixtures.ORMTest, AssertsCompiledSQL):
     def _inherits_fixture(self):
         m = MetaData()
         base = Table('base', m, Column('id', Integer, primary_key=True))
-        a = Table('a', m, 
+        a = Table('a', m,
                 Column('id', Integer, ForeignKey('base.id'), primary_key=True),
                 Column('b_id', Integer, ForeignKey('b.id')))
-        b = Table('b', m, 
+        b = Table('b', m,
                 Column('id', Integer, ForeignKey('base.id'), primary_key=True),
                 Column('c_id', Integer, ForeignKey('c.id')))
-        c = Table('c', m, 
+        c = Table('c', m,
                 Column('id', Integer, ForeignKey('base.id'), primary_key=True))
         class Base(object):
             pass
@@ -1809,7 +1809,7 @@ class SelfReferentialTest(fixtures.MappedTest, AssertsCompiledSQL):
                 filter(Node.data=='n122').filter(parent.data=='n12').\
                 filter(grandparent.data=='n1').from_self().limit(1)
 
-        # parent, grandparent *are* inside the from_self(), so they 
+        # parent, grandparent *are* inside the from_self(), so they
         # should get aliased to the outside.
         self.assert_compile(
             q,
@@ -1983,7 +1983,7 @@ class SelfReferentialTest(fixtures.MappedTest, AssertsCompiledSQL):
         sess = create_session()
         eq_(sess.query(Node).filter(Node.children.any(Node.data=='n1')).all(), [])
         eq_(sess.query(Node).filter(Node.children.any(Node.data=='n12')).all(), [Node(data='n1')])
-        eq_(sess.query(Node).filter(~Node.children.any()).order_by(Node.id).all(), 
+        eq_(sess.query(Node).filter(~Node.children.any()).order_by(Node.id).all(),
                 [Node(data='n11'), Node(data='n13'),Node(data='n121'),Node(data='n122'),Node(data='n123'),])
 
     def test_has(self):
@@ -1991,7 +1991,7 @@ class SelfReferentialTest(fixtures.MappedTest, AssertsCompiledSQL):
 
         sess = create_session()
 
-        eq_(sess.query(Node).filter(Node.parent.has(Node.data=='n12')).order_by(Node.id).all(), 
+        eq_(sess.query(Node).filter(Node.parent.has(Node.data=='n12')).order_by(Node.id).all(),
             [Node(data='n121'),Node(data='n122'),Node(data='n123')])
         eq_(sess.query(Node).filter(Node.parent.has(Node.data=='n122')).all(), [])
         eq_(sess.query(Node).filter(~Node.parent.has()).all(), [Node(data='n1')])
index dd50dfa3dfb16dd82b7e37a0432c41fa5a0c3323..297d027f5db8c9d854208ca8e46b83cf1dc51aaf 100644 (file)
@@ -264,7 +264,7 @@ class LazyTest(_fixtures.FixtureTest):
                 User(id=8, address=Address(id=3)),
                 User(id=9, address=None),
                 User(id=10, address=None),
-            ], 
+            ],
             list(q)
         )
 
@@ -397,7 +397,7 @@ class LazyTest(_fixtures.FixtureTest):
             SomeDBInteger,
         ]:
             m = sa.MetaData()
-            users = Table('users', m, 
+            users = Table('users', m,
                 Column('id', Integer, primary_key=True, test_needs_autoincrement=True),
                 Column('name', String(30), nullable=False),
             )
@@ -486,7 +486,7 @@ class LazyTest(_fixtures.FixtureTest):
         self.assert_sql_count(testing.db, go, 1)
 
 class GetterStateTest(_fixtures.FixtureTest):
-    """test lazyloader on non-existent attribute returns 
+    """test lazyloader on non-existent attribute returns
     expected attribute symbols, maintain expected state"""
 
     run_inserts = None
@@ -516,8 +516,8 @@ class GetterStateTest(_fixtures.FixtureTest):
         User, Address, sess, a1 = self._u_ad_fixture(False)
         eq_(
             Address.user.impl.get(
-                attributes.instance_state(a1), 
-                attributes.instance_dict(a1), 
+                attributes.instance_state(a1),
+                attributes.instance_dict(a1),
                 passive=attributes.PASSIVE_RETURN_NEVER_SET),
             attributes.NEVER_SET
         )
@@ -528,8 +528,8 @@ class GetterStateTest(_fixtures.FixtureTest):
         User, Address, sess, a1 = self._u_ad_fixture(False)
         eq_(
             Address.user.impl.get_history(
-                attributes.instance_state(a1), 
-                attributes.instance_dict(a1), 
+                attributes.instance_state(a1),
+                attributes.instance_dict(a1),
                 passive=attributes.PASSIVE_RETURN_NEVER_SET),
             ((), (), ())
         )
@@ -540,8 +540,8 @@ class GetterStateTest(_fixtures.FixtureTest):
         User, Address, sess, a1 = self._u_ad_fixture(False)
         eq_(
             Address.user.impl.get(
-                attributes.instance_state(a1), 
-                attributes.instance_dict(a1), 
+                attributes.instance_state(a1),
+                attributes.instance_dict(a1),
                 passive=attributes.PASSIVE_NO_INITIALIZE),
             attributes.PASSIVE_NO_RESULT
         )
@@ -552,8 +552,8 @@ class GetterStateTest(_fixtures.FixtureTest):
         User, Address, sess, a1 = self._u_ad_fixture(False)
         eq_(
             Address.user.impl.get_history(
-                attributes.instance_state(a1), 
-                attributes.instance_dict(a1), 
+                attributes.instance_state(a1),
+                attributes.instance_dict(a1),
                 passive=attributes.PASSIVE_NO_INITIALIZE),
             attributes.HISTORY_BLANK
         )
@@ -564,8 +564,8 @@ class GetterStateTest(_fixtures.FixtureTest):
         User, Address, sess, a1 = self._u_ad_fixture(True)
         eq_(
             Address.user.impl.get(
-                attributes.instance_state(a1), 
-                attributes.instance_dict(a1), 
+                attributes.instance_state(a1),
+                attributes.instance_dict(a1),
                 passive=attributes.PASSIVE_NO_INITIALIZE),
             attributes.PASSIVE_NO_RESULT
         )
@@ -576,8 +576,8 @@ class GetterStateTest(_fixtures.FixtureTest):
         User, Address, sess, a1 = self._u_ad_fixture(True)
         eq_(
             Address.user.impl.get_history(
-                attributes.instance_state(a1), 
-                attributes.instance_dict(a1), 
+                attributes.instance_state(a1),
+                attributes.instance_dict(a1),
                 passive=attributes.PASSIVE_NO_INITIALIZE),
             attributes.HISTORY_BLANK
         )
@@ -588,8 +588,8 @@ class GetterStateTest(_fixtures.FixtureTest):
         User, Address, sess, a1 = self._u_ad_fixture(True)
         eq_(
             Address.user.impl.get(
-                attributes.instance_state(a1), 
-                attributes.instance_dict(a1), 
+                attributes.instance_state(a1),
+                attributes.instance_dict(a1),
                 passive=attributes.PASSIVE_RETURN_NEVER_SET),
             User(name='ed')
         )
@@ -598,8 +598,8 @@ class GetterStateTest(_fixtures.FixtureTest):
         User, Address, sess, a1 = self._u_ad_fixture(True)
         eq_(
             Address.user.impl.get_history(
-                attributes.instance_state(a1), 
-                attributes.instance_dict(a1), 
+                attributes.instance_state(a1),
+                attributes.instance_dict(a1),
                 passive=attributes.PASSIVE_RETURN_NEVER_SET),
             ((), [User(name='ed'), ], ())
         )
index be355808e1c665dab15b4f239ca792dd91ef3a5f..05b78ccb8785c2edb1e3f8dfa6b3b4f224f07106 100644 (file)
@@ -251,7 +251,7 @@ class LoadOnFKsTest(AssertsExecutionResults, fixtures.TestBase):
                             #if manualflush and (not loadrel or fake_autoexpire):
                             #    # a flush occurs, we get p2
                             #    assert c1.parent is p2
-                            #elif not loadrel and not loadfk: 
+                            #elif not loadrel and not loadfk:
                             #    # problematically - we get None since committed state
                             #    # is empty when c1.parent_id was mutated, since we want
                             #    # to save on selects.  this is
index 9fc2ea0740a641223c8b990cd466ef5f8caa2fc9..b2bc608659bdb72c02b5c988d769dc7a47b9ab98 100644 (file)
@@ -43,8 +43,8 @@ class LockModeTest(_fixtures.FixtureTest, AssertsCompiledSQL):
         User = self.classes.User
         sess = Session()
         assert_raises_message(
-            Exception, "Unknown lockmode 'unknown_mode'", 
-            self.assert_compile, 
+            Exception, "Unknown lockmode 'unknown_mode'",
+            self.assert_compile,
             sess.query(User.id).with_lockmode('unknown_mode'), None,
             dialect=default.DefaultDialect()
         )
index db7f635658f26cc3b28dcaf6f15471b130521616..ed9075833bcb0059706d64e78dabba4d442e2ada 100644 (file)
@@ -107,7 +107,7 @@ class M2MTest(fixtures.MappedTest):
         mapper(Place, place, properties={
             'places': relationship(
                         Place,
-                        secondary=place_place, 
+                        secondary=place_place,
                         primaryjoin=place.c.place_id==place_place.c.pl1_id,
                         secondaryjoin=place.c.place_id==place_place.c.pl2_id,
                         order_by=place_place.c.pl2_id
@@ -162,7 +162,7 @@ class M2MTest(fixtures.MappedTest):
         mapper(Place, place, properties={
             'child_places': relationship(
                         Place,
-                        secondary=place_place, 
+                        secondary=place_place,
                         primaryjoin=place.c.place_id==place_place.c.pl1_id,
                         secondaryjoin=place.c.place_id==place_place.c.pl2_id,
                         order_by=place_place.c.pl2_id,
@@ -268,7 +268,7 @@ class M2MTest(fixtures.MappedTest):
                                 self.tables.transition)
 
         mapper(Place, place, properties={
-            'transitions':relationship(Transition, secondary=place_input, 
+            'transitions':relationship(Transition, secondary=place_input,
                                             passive_updates=False)
         })
         mapper(Transition, transition)
index c45a0e9da277cd8257a24c3d1914b855c801c42f..47203b874e2b81c0f51855dd3f2465b49f9d5b8e 100644 (file)
@@ -48,7 +48,7 @@ class MergeTest(_fixtures.FixtureTest):
         eq_(sess.query(User).first(), User(id=7, name='fred'))
 
     def test_transient_to_pending_no_pk(self):
-        """test that a transient object with no PK attribute 
+        """test that a transient object with no PK attribute
         doesn't trigger a needless load."""
 
         User, users = self.classes.User, self.tables.users
@@ -219,7 +219,7 @@ class MergeTest(_fixtures.FixtureTest):
                 Address(id=3, email_address='fred3')])))
 
     def test_unsaved_cascade(self):
-        """Merge of a transient entity with two child transient 
+        """Merge of a transient entity with two child transient
         entities, with a bidirectional relationship."""
 
         users, Address, addresses, User = (self.tables.users,
@@ -271,7 +271,7 @@ class MergeTest(_fixtures.FixtureTest):
         sess = create_session()
 
         # merge empty stuff.  goes in as NULL.
-        # not sure what this was originally trying to 
+        # not sure what this was originally trying to
         # test.
         u1 = sess.merge(User(id=1))
         sess.flush()
@@ -309,7 +309,7 @@ class MergeTest(_fixtures.FixtureTest):
         sess.flush()
 
         # blow it away from u5, but don't
-        # mark as expired.  so it would just 
+        # mark as expired.  so it would just
         # be blank.
         del u5.data
 
@@ -349,7 +349,7 @@ class MergeTest(_fixtures.FixtureTest):
         assert u1.addresses.keys() == ['foo@bar.com']
 
     def test_attribute_cascade(self):
-        """Merge of a persistent entity with two child 
+        """Merge of a persistent entity with two child
         persistent entities."""
 
         users, Address, addresses, User = (self.tables.users,
@@ -359,7 +359,7 @@ class MergeTest(_fixtures.FixtureTest):
 
 
         mapper(User, users, properties={
-            'addresses':relationship(mapper(Address, addresses), 
+            'addresses':relationship(mapper(Address, addresses),
                         backref='user')
         })
         load = self.load_tracker(User)
@@ -557,7 +557,7 @@ class MergeTest(_fixtures.FixtureTest):
         sess2 = create_session()
         a2 = sess2.merge(a1)
         eq_(
-            attributes.get_history(a2, 'user'), 
+            attributes.get_history(a2, 'user'),
             ([u2], (), ())
         )
         assert a2 in sess2.dirty
@@ -567,7 +567,7 @@ class MergeTest(_fixtures.FixtureTest):
         sess2 = create_session()
         a2 = sess2.merge(a1, load=False)
         eq_(
-            attributes.get_history(a2, 'user'), 
+            attributes.get_history(a2, 'user'),
             ((), [u1], ())
         )
         assert a2 not in sess2.dirty
@@ -581,7 +581,7 @@ class MergeTest(_fixtures.FixtureTest):
 
 
         mapper(Order, orders, properties={
-            'items':relationship(mapper(Item, items), 
+            'items':relationship(mapper(Item, items),
                         secondary=order_items)})
 
         load = self.load_tracker(Order)
@@ -672,7 +672,7 @@ class MergeTest(_fixtures.FixtureTest):
                                 uselist = False, backref='user')
         })
         sess = sessionmaker()()
-        u = User(id=7, name="fred", 
+        u = User(id=7, name="fred",
                     address=Address(id=1, email_address='foo@bar.com'))
         sess.add(u)
         sess.commit()
@@ -696,12 +696,12 @@ class MergeTest(_fixtures.FixtureTest):
 
         sess = create_session()
         u = User()
-        assert_raises_message(sa.exc.InvalidRequestError, 
-                "load=False option does not support", 
+        assert_raises_message(sa.exc.InvalidRequestError,
+                "load=False option does not support",
                 sess.merge, u, load=False)
 
     def test_no_load_with_backrefs(self):
-        """load=False populates relationships in both 
+        """load=False populates relationships in both
         directions without requiring a load"""
 
         users, Address, addresses, User = (self.tables.users,
@@ -710,7 +710,7 @@ class MergeTest(_fixtures.FixtureTest):
                                 self.classes.User)
 
         mapper(User, users, properties={
-            'addresses':relationship(mapper(Address, addresses), 
+            'addresses':relationship(mapper(Address, addresses),
                                 backref='user')
         })
 
@@ -740,7 +740,7 @@ class MergeTest(_fixtures.FixtureTest):
 
     def test_dontload_with_eager(self):
         """
-        
+
         This test illustrates that with load=False, we can't just copy
         the committed_state of the merged instance over; since it
         references collection objects which themselves are to be merged.
@@ -749,7 +749,7 @@ class MergeTest(_fixtures.FixtureTest):
         moment I'd rather not support this use case; if you are merging
         with load=False, you're typically dealing with caching and the
         merged objects shouldnt be 'dirty'.
-        
+
         """
 
         users, Address, addresses, User = (self.tables.users,
@@ -852,7 +852,7 @@ class MergeTest(_fixtures.FixtureTest):
     def test_no_load_preserves_parents(self):
         """Merge with load=False does not trigger a 'delete-orphan'
         operation.
-        
+
         merge with load=False sets attributes without using events.
         this means the 'hasparent' flag is not propagated to the newly
         merged instance. in fact this works out OK, because the
@@ -861,7 +861,7 @@ class MergeTest(_fixtures.FixtureTest):
         this collection when _is_orphan() is called, it does not count
         as an orphan (i.e. this is the 'optimistic' logic in
         mapper._is_orphan().)
-        
+
         """
 
         users, Address, addresses, User = (self.tables.users,
@@ -871,7 +871,7 @@ class MergeTest(_fixtures.FixtureTest):
 
         mapper(User, users, properties={
             'addresses':relationship(mapper(Address, addresses),
-                                 backref='user', 
+                                 backref='user',
                                  cascade="all, delete-orphan")})
         sess = create_session()
         u = User()
@@ -972,7 +972,7 @@ class MergeTest(_fixtures.FixtureTest):
 
         a1 = Address(user=s.merge(User(id=1, name='ed')), email_address='x')
         before_id = id(a1.user)
-        a2 = Address(user=s.merge(User(id=1, name='jack')), 
+        a2 = Address(user=s.merge(User(id=1, name='jack')),
                             email_address='x')
         after_id = id(a1.user)
         other_id = id(a2.user)
@@ -991,7 +991,7 @@ class MergeTest(_fixtures.FixtureTest):
         m = mapper(User, users, properties={
             'addresses':relationship(mapper(Address, addresses),
                                 backref='user')})
-        user = User(id=8, name='fred', 
+        user = User(id=8, name='fred',
                         addresses=[Address(email_address='user')])
         merged_user = sess.merge(user)
         assert merged_user in sess.new
@@ -1104,7 +1104,7 @@ class MergeTest(_fixtures.FixtureTest):
 
 
 class M2ONoUseGetLoadingTest(fixtures.MappedTest):
-    """Merge a one-to-many.  The many-to-one on the other side is set up 
+    """Merge a one-to-many.  The many-to-one on the other side is set up
     so that use_get is False.   See if skipping the "m2o" merge
     vs. doing it saves on SQL calls.
 
@@ -1113,12 +1113,12 @@ class M2ONoUseGetLoadingTest(fixtures.MappedTest):
     @classmethod
     def define_tables(cls, metadata):
         Table('user', metadata,
-            Column('id', Integer, primary_key=True, 
+            Column('id', Integer, primary_key=True,
                             test_needs_autoincrement=True),
             Column('name', String(50)),
         )
         Table('address', metadata,
-            Column('id', Integer, primary_key=True, 
+            Column('id', Integer, primary_key=True,
                             test_needs_autoincrement=True),
             Column('user_id', Integer, ForeignKey('user.id')),
             Column('email', String(50)),
@@ -1137,11 +1137,11 @@ class M2ONoUseGetLoadingTest(fixtures.MappedTest):
         user, address = cls.tables.user, cls.tables.address
         mapper(User, user, properties={
             'addresses':relationship(Address, backref=
-                    backref('user', 
+                    backref('user',
                         # needlessly complex primaryjoin so that the
                         # use_get flag is False
                         primaryjoin=and_(
-                                user.c.id==address.c.user_id, 
+                                user.c.id==address.c.user_id,
                                 user.c.id==user.c.id
                            )
                     )
@@ -1156,20 +1156,20 @@ class M2ONoUseGetLoadingTest(fixtures.MappedTest):
         User, Address = cls.classes.User, cls.classes.Address
         s = Session()
         s.add_all([
-            User(id=1, name='u1', addresses=[Address(id=1, email='a1'), 
+            User(id=1, name='u1', addresses=[Address(id=1, email='a1'),
                                         Address(id=2, email='a2')])
         ])
         s.commit()
 
     # "persistent" - we get at an Address that was already present.
-    # With the "skip bidirectional" check removed, the "set" emits SQL 
+    # With the "skip bidirectional" check removed, the "set" emits SQL
     # for the "previous" version in any case,
     # address.user_id is 1, you get a load.
     def test_persistent_access_none(self):
         User, Address = self.classes.User, self.classes.Address
         s = Session()
         def go():
-            u1 = User(id=1, 
+            u1 = User(id=1,
                 addresses =[Address(id=1), Address(id=2)]
             )
             u2 = s.merge(u1)
@@ -1179,7 +1179,7 @@ class M2ONoUseGetLoadingTest(fixtures.MappedTest):
         User, Address = self.classes.User, self.classes.Address
         s = Session()
         def go():
-            u1 = User(id=1, 
+            u1 = User(id=1,
                 addresses =[Address(id=1), Address(id=2)]
             )
             u2 = s.merge(u1)
@@ -1191,7 +1191,7 @@ class M2ONoUseGetLoadingTest(fixtures.MappedTest):
         User, Address = self.classes.User, self.classes.Address
         s = Session()
         def go():
-            u1 = User(id=1, 
+            u1 = User(id=1,
                 addresses =[Address(id=1), Address(id=2)]
             )
             u2 = s.merge(u1)
@@ -1210,8 +1210,8 @@ class M2ONoUseGetLoadingTest(fixtures.MappedTest):
         User, Address = self.classes.User, self.classes.Address
         s = Session()
         def go():
-            u1 = User(id=1, 
-                addresses =[Address(id=1), Address(id=2), 
+            u1 = User(id=1,
+                addresses =[Address(id=1), Address(id=2),
                                 Address(id=3, email='a3')]
             )
             u2 = s.merge(u1)
@@ -1223,8 +1223,8 @@ class M2ONoUseGetLoadingTest(fixtures.MappedTest):
         User, Address = self.classes.User, self.classes.Address
         s = Session()
         def go():
-            u1 = User(id=1, 
-                addresses =[Address(id=1), Address(id=2), 
+            u1 = User(id=1,
+                addresses =[Address(id=1), Address(id=2),
                                 Address(id=3, email='a3')]
             )
             u2 = s.merge(u1)
@@ -1237,8 +1237,8 @@ class M2ONoUseGetLoadingTest(fixtures.MappedTest):
 class MutableMergeTest(fixtures.MappedTest):
     @classmethod
     def define_tables(cls, metadata):
-        Table("data", metadata, 
-            Column('id', Integer, primary_key=True, 
+        Table("data", metadata,
+            Column('id', Integer, primary_key=True,
                             test_needs_autoincrement=True),
             Column('data', PickleType(comparator=operator.eq))
         )
@@ -1265,7 +1265,7 @@ class MutableMergeTest(fixtures.MappedTest):
 class CompositeNullPksTest(fixtures.MappedTest):
     @classmethod
     def define_tables(cls, metadata):
-        Table("data", metadata, 
+        Table("data", metadata,
             Column('pk1', String(10), primary_key=True),
             Column('pk2', String(10), primary_key=True),
         )
@@ -1334,7 +1334,7 @@ class LoadOnPendingTest(fixtures.MappedTest):
         r = self.classes.Rock(id=0, description='moldy')
         r.bug = bug
         m = self.sess.merge(r)
-        # we've already passed ticket #2374 problem since merge() returned, 
+        # we've already passed ticket #2374 problem since merge() returned,
         # but for good measure:
         assert m is not r
         eq_(m,r)
@@ -1354,13 +1354,13 @@ class LoadOnPendingTest(fixtures.MappedTest):
         self._merge_delete_orphan_o2o_with(self.classes.Bug(id=1))
 
 class PolymorphicOnTest(fixtures.MappedTest):
-    """Test merge() of polymorphic object when polymorphic_on 
+    """Test merge() of polymorphic object when polymorphic_on
     isn't a Column"""
 
     @classmethod
     def define_tables(cls, metadata):
         Table('employees', metadata,
-            Column('employee_id', Integer, primary_key=True, 
+            Column('employee_id', Integer, primary_key=True,
                             test_needs_autoincrement=True),
             Column('type', String(1), nullable=False),
             Column('data', String(50)),
@@ -1376,9 +1376,9 @@ class PolymorphicOnTest(fixtures.MappedTest):
             pass
 
     def _setup_polymorphic_on_mappers(self):
-        employee_mapper = mapper(self.classes.Employee, 
+        employee_mapper = mapper(self.classes.Employee,
             self.tables.employees,
-            polymorphic_on=case(value=self.tables.employees.c.type, 
+            polymorphic_on=case(value=self.tables.employees.c.type,
                 whens={
                     'E': 'employee',
                     'M': 'manager',
@@ -1398,7 +1398,7 @@ class PolymorphicOnTest(fixtures.MappedTest):
         """
         self._setup_polymorphic_on_mappers()
 
-        m = self.classes.Manager(employee_id=55, type='M', 
+        m = self.classes.Manager(employee_id=55, type='M',
                                 data='original data')
         self.sess.add(m)
         self.sess.commit()
@@ -1407,7 +1407,7 @@ class PolymorphicOnTest(fixtures.MappedTest):
         m = self.classes.Manager(employee_id=55, data='updated data')
         merged = self.sess.merge(m)
 
-        # we've already passed ticket #2449 problem since 
+        # we've already passed ticket #2449 problem since
         # merge() returned, but for good measure:
         assert m is not merged
         eq_(m,merged)
index 8f4f4e6962a9aa6cc19ce7084ff1f54a59e2e8d6..f52fe3a8c5ca28d2813b2772003c63ef3540e845 100644 (file)
@@ -271,7 +271,7 @@ class SubclassRelationshipTest(testing.AssertsCompiledSQL, fixtures.DeclarativeM
             cls.classes.SubJob
         return [
             ParentThing(
-                container=DataContainer(name="d1", 
+                container=DataContainer(name="d1",
                     jobs=[
                         SubJob(attr="s1"),
                         SubJob(attr="s2")
@@ -385,7 +385,7 @@ class SubclassRelationshipTest(testing.AssertsCompiledSQL, fixtures.DeclarativeM
         q = s.query(ParentThing).\
                         options(
                             subqueryload_all(
-                                ParentThing.container, 
+                                ParentThing.container,
                                 DataContainer.jobs.of_type(SubJob)
                         ))
         def go():
@@ -405,7 +405,7 @@ class SubclassRelationshipTest(testing.AssertsCompiledSQL, fixtures.DeclarativeM
         q = s.query(ParentThing).\
                         options(
                             joinedload_all(
-                                ParentThing.container, 
+                                ParentThing.container,
                                 DataContainer.jobs.of_type(SubJob)
                         ))
         def go():
@@ -430,7 +430,7 @@ class SubclassRelationshipTest(testing.AssertsCompiledSQL, fixtures.DeclarativeM
                             DataContainer.jobs.of_type(Job_P).\
                                 any(Job_P.id < Job.id)
                         )
-        self.assert_compile(q, 
+        self.assert_compile(q,
             "SELECT job.id AS job_id, job.type AS job_type, "
             "job.container_id "
             "AS job_container_id "
@@ -459,7 +459,7 @@ class SubclassRelationshipTest(testing.AssertsCompiledSQL, fixtures.DeclarativeM
                             DataContainer.jobs.of_type(Job_A).\
                                 any(and_(Job_A.id < Job.id, Job_A.type=='fred'))
                         )
-        self.assert_compile(q, 
+        self.assert_compile(q,
             "SELECT job.id AS job_id, job.type AS job_type, "
             "job.container_id AS job_container_id "
             "FROM data_container JOIN job ON data_container.id = job.container_id "
@@ -480,7 +480,7 @@ class SubclassRelationshipTest(testing.AssertsCompiledSQL, fixtures.DeclarativeM
 
         s = Session()
         q = s.query(DataContainer).join(DataContainer.jobs.of_type(Job_P))
-        self.assert_compile(q, 
+        self.assert_compile(q,
             "SELECT data_container.id AS data_container_id, "
             "data_container.name AS data_container_name "
             "FROM data_container JOIN (SELECT job.id AS job_id, "
@@ -498,12 +498,12 @@ class SubclassRelationshipTest(testing.AssertsCompiledSQL, fixtures.DeclarativeM
 
         s = Session()
         q = s.query(DataContainer).join(DataContainer.jobs.of_type(SubJob))
-        # note the of_type() here renders JOIN for the Job->SubJob. 
+        # note the of_type() here renders JOIN for the Job->SubJob.
         # this is because it's using the SubJob mapper directly within
         # query.join().  When we do joinedload() etc., we're instead
         # doing a with_polymorphic(), and there we need the join to be
         # outer by default.
-        self.assert_compile(q, 
+        self.assert_compile(q,
             "SELECT data_container.id AS data_container_id, "
             "data_container.name AS data_container_name "
             "FROM data_container JOIN (SELECT job.id AS job_id, "
@@ -524,7 +524,7 @@ class SubclassRelationshipTest(testing.AssertsCompiledSQL, fixtures.DeclarativeM
 
         s = Session()
         q = s.query(DataContainer).join(DataContainer.jobs.of_type(Job_P))
-        self.assert_compile(q, 
+        self.assert_compile(q,
             "SELECT data_container.id AS data_container_id, "
             "data_container.name AS data_container_name "
             "FROM data_container JOIN (SELECT job.id AS job_id, "
@@ -544,7 +544,7 @@ class SubclassRelationshipTest(testing.AssertsCompiledSQL, fixtures.DeclarativeM
 
         s = Session()
         q = s.query(DataContainer).join(DataContainer.jobs.of_type(Job_A))
-        self.assert_compile(q, 
+        self.assert_compile(q,
             "SELECT data_container.id AS data_container_id, "
             "data_container.name AS data_container_name "
             "FROM data_container JOIN job AS job_1 "
@@ -561,7 +561,7 @@ class SubclassRelationshipTest(testing.AssertsCompiledSQL, fixtures.DeclarativeM
 
         s = Session()
         q = s.query(DataContainer).join(Job_P, DataContainer.jobs)
-        self.assert_compile(q, 
+        self.assert_compile(q,
             "SELECT data_container.id AS data_container_id, "
             "data_container.name AS data_container_name "
             "FROM data_container JOIN (SELECT job.id AS job_id, "
index bb5bca984d3c032810ab7a14346009ac03558719..88867edcba900613ddf6159248d870d58925f025 100644 (file)
@@ -185,7 +185,7 @@ class PickleTest(fixtures.MappedTest):
         sess = Session()
         u1 = User(name='ed', addresses=[
                         Address(
-                            email_address='ed@bar.com', 
+                            email_address='ed@bar.com',
                         )
                 ])
 
@@ -219,7 +219,7 @@ class PickleTest(fixtures.MappedTest):
         sess.expunge_all()
 
         u1 = sess.query(User).\
-                options(sa.orm.defer('name'), 
+                options(sa.orm.defer('name'),
                         sa.orm.defer('addresses.email_address')).\
                         get(u1.id)
         assert 'name' not in u1.__dict__
@@ -305,16 +305,16 @@ class PickleTest(fixtures.MappedTest):
         u2 = pickle.loads(pickle.dumps(u1))
 
     def test_collection_setstate(self):
-        """test a particular cycle that requires CollectionAdapter 
+        """test a particular cycle that requires CollectionAdapter
         to not rely upon InstanceState to deserialize."""
 
         m = MetaData()
-        c1 = Table('c1', m, 
-            Column('parent_id', String, 
+        c1 = Table('c1', m,
+            Column('parent_id', String,
                         ForeignKey('p.id'), primary_key=True)
         )
         c2 = Table('c2', m,
-            Column('parent_id', String, 
+            Column('parent_id', String,
                         ForeignKey('p.id'), primary_key=True)
         )
         p = Table('p', m,
@@ -354,7 +354,7 @@ class PickleTest(fixtures.MappedTest):
 
         mapper(User, users, properties={
             'addresses':relationship(
-                            Address, 
+                            Address,
                             collection_class=
                             attribute_mapped_collection('email_address')
                         )
@@ -365,7 +365,7 @@ class PickleTest(fixtures.MappedTest):
         for loads, dumps in picklers():
             repickled = loads(dumps(u1))
             eq_(u1.addresses, repickled.addresses)
-            eq_(repickled.addresses['email1'], 
+            eq_(repickled.addresses['email1'],
                     Address(email_address="email1"))
 
     def test_column_mapped_collection(self):
@@ -373,7 +373,7 @@ class PickleTest(fixtures.MappedTest):
 
         mapper(User, users, properties={
             'addresses':relationship(
-                            Address, 
+                            Address,
                             collection_class=
                             column_mapped_collection(
                                 addresses.c.email_address)
@@ -388,7 +388,7 @@ class PickleTest(fixtures.MappedTest):
         for loads, dumps in picklers():
             repickled = loads(dumps(u1))
             eq_(u1.addresses, repickled.addresses)
-            eq_(repickled.addresses['email1'], 
+            eq_(repickled.addresses['email1'],
                     Address(email_address="email1"))
 
     def test_composite_column_mapped_collection(self):
@@ -396,7 +396,7 @@ class PickleTest(fixtures.MappedTest):
 
         mapper(User, users, properties={
             'addresses':relationship(
-                            Address, 
+                            Address,
                             collection_class=
                             column_mapped_collection([
                                 addresses.c.id,
@@ -412,7 +412,7 @@ class PickleTest(fixtures.MappedTest):
         for loads, dumps in picklers():
             repickled = loads(dumps(u1))
             eq_(u1.addresses, repickled.addresses)
-            eq_(repickled.addresses[(1, 'email1')], 
+            eq_(repickled.addresses[(1, 'email1')],
                     Address(id=1, email_address="email1"))
 
 class PolymorphicDeferredTest(fixtures.MappedTest):
@@ -536,7 +536,7 @@ class CustomSetupTeardownTest(fixtures.MappedTest):
               test_needs_fk=True
         )
     def test_rebuild_state(self):
-        """not much of a 'test', but illustrate how to 
+        """not much of a 'test', but illustrate how to
         remove instance-level state before pickling.
 
         """
index 3f43762be6dfcb5c9ab08ed5e316272dc6153814..f5fa1d4c9748a6247943006046cbaf4914d0be77 100644 (file)
@@ -66,23 +66,23 @@ class _JoinFixtures(object):
             Column('sub_id', Integer, ForeignKey('rel_sub.id'))
         )
         cls.rel_sub = Table('rel_sub', m,
-            Column('id', Integer, ForeignKey('base_w_sub_rel.id'), 
+            Column('id', Integer, ForeignKey('base_w_sub_rel.id'),
                                 primary_key=True)
         )
         cls.base = Table('base', m,
             Column('id', Integer, primary_key=True),
         )
         cls.sub = Table('sub', m,
-            Column('id', Integer, ForeignKey('base.id'), 
+            Column('id', Integer, ForeignKey('base.id'),
                                 primary_key=True),
         )
         cls.sub_w_base_rel = Table('sub_w_base_rel', m,
-            Column('id', Integer, ForeignKey('base.id'), 
+            Column('id', Integer, ForeignKey('base.id'),
                                 primary_key=True),
             Column('base_id', Integer, ForeignKey('base.id'))
         )
-        cls.sub_w_sub_rel = Table('sub_w_sub_rel', m, 
-            Column('id', Integer, ForeignKey('base.id'), 
+        cls.sub_w_sub_rel = Table('sub_w_sub_rel', m,
+            Column('id', Integer, ForeignKey('base.id'),
                                 primary_key=True),
             Column('sub_id', Integer, ForeignKey('sub.id'))
         )
@@ -91,14 +91,14 @@ class _JoinFixtures(object):
             Column('base_id', Integer, ForeignKey('base.id'))
         )
 
-        cls.three_tab_a = Table('three_tab_a', m, 
+        cls.three_tab_a = Table('three_tab_a', m,
             Column('id', Integer, primary_key=True),
         )
-        cls.three_tab_b = Table('three_tab_b', m, 
+        cls.three_tab_b = Table('three_tab_b', m,
             Column('id', Integer, primary_key=True),
             Column('aid', Integer, ForeignKey('three_tab_a.id'))
         )
-        cls.three_tab_c = Table('three_tab_c', m, 
+        cls.three_tab_c = Table('three_tab_c', m,
             Column('id', Integer, primary_key=True),
             Column('aid', Integer, ForeignKey('three_tab_a.id')),
             Column('bid', Integer, ForeignKey('three_tab_b.id'))
@@ -112,9 +112,9 @@ class _JoinFixtures(object):
             else:
                 return True
         return relationships.JoinCondition(
-            self.three_tab_a, 
+            self.three_tab_a,
             self.three_tab_b,
-            self.three_tab_a, 
+            self.three_tab_a,
             self.three_tab_b,
             support_sync=False,
             can_be_synced_fn=_can_sync,
@@ -127,9 +127,9 @@ class _JoinFixtures(object):
 
     def _join_fixture_m2m(self, **kw):
         return relationships.JoinCondition(
-                    self.m2mleft, 
-                    self.m2mright, 
-                    self.m2mleft, 
+                    self.m2mleft,
+                    self.m2mright,
+                    self.m2mleft,
                     self.m2mright,
                     secondary=self.m2msecondary,
                     **kw
@@ -137,17 +137,17 @@ class _JoinFixtures(object):
 
     def _join_fixture_o2m(self, **kw):
         return relationships.JoinCondition(
-                    self.left, 
-                    self.right, 
-                    self.left, 
+                    self.left,
+                    self.right,
+                    self.left,
                     self.right,
                     **kw
                 )
 
     def _join_fixture_m2o(self, **kw):
         return relationships.JoinCondition(
-                    self.right, 
-                    self.left, 
+                    self.right,
+                    self.left,
                     self.right,
                     self.left,
                     **kw
@@ -187,7 +187,7 @@ class _JoinFixtures(object):
             self.composite_selfref,
             self.composite_selfref,
             self.composite_selfref,
-            remote_side=set([self.composite_selfref.c.id, 
+            remote_side=set([self.composite_selfref.c.id,
                             self.composite_selfref.c.group_id]),
             **kw
         )
@@ -278,7 +278,7 @@ class _JoinFixtures(object):
         )
 
     def _join_fixture_o2m_joined_sub_to_base(self, **kw):
-        left = self.base.join(self.sub_w_base_rel, 
+        left = self.base.join(self.sub_w_base_rel,
                         self.base.c.id==self.sub_w_base_rel.c.id)
         return relationships.JoinCondition(
             left,
@@ -290,12 +290,12 @@ class _JoinFixtures(object):
 
     def _join_fixture_m2o_joined_sub_to_sub_on_base(self, **kw):
         # this is a late add - a variant of the test case
-        # in #2491 where we join on the base cols instead.  only 
+        # in #2491 where we join on the base cols instead.  only
         # m2o has a problem at the time of this test.
         left = self.base.join(self.sub, self.base.c.id==self.sub.c.id)
         right = self.base.join(self.sub_w_base_rel, self.base.c.id==self.sub_w_base_rel.c.id)
         return relationships.JoinCondition(
-            left, 
+            left,
             right,
             self.sub,
             self.sub_w_base_rel,
@@ -315,7 +315,7 @@ class _JoinFixtures(object):
 
     def _join_fixture_m2o_sub_to_joined_sub(self, **kw):
         # see test.orm.test_mapper:MapperTest.test_add_column_prop_deannotate,
-        right = self.base.join(self.right_w_base_rel, 
+        right = self.base.join(self.right_w_base_rel,
                         self.base.c.id==self.right_w_base_rel.c.id)
         return relationships.JoinCondition(
             self.right_w_base_rel,
@@ -326,7 +326,7 @@ class _JoinFixtures(object):
 
     def _join_fixture_m2o_sub_to_joined_sub_func(self, **kw):
         # see test.orm.test_mapper:MapperTest.test_add_column_prop_deannotate,
-        right = self.base.join(self.right_w_base_rel, 
+        right = self.base.join(self.right_w_base_rel,
                         self.base.c.id==self.right_w_base_rel.c.id)
         return relationships.JoinCondition(
             self.right_w_base_rel,
@@ -338,22 +338,22 @@ class _JoinFixtures(object):
         )
 
     def _join_fixture_o2o_joined_sub_to_base(self, **kw):
-        left = self.base.join(self.sub, 
+        left = self.base.join(self.sub,
                         self.base.c.id==self.sub.c.id)
 
         # see test_relationships->AmbiguousJoinInterpretedAsSelfRef
         return relationships.JoinCondition(
             left,
             self.sub,
-            left, 
+            left,
             self.sub,
         )
 
     def _join_fixture_o2m_to_annotated_func(self, **kw):
         return relationships.JoinCondition(
-                    self.left, 
-                    self.right, 
-                    self.left, 
+                    self.left,
+                    self.right,
+                    self.left,
                     self.right,
                     primaryjoin=self.left.c.id==
                         foreign(func.foo(self.right.c.lid)),
@@ -362,9 +362,9 @@ class _JoinFixtures(object):
 
     def _join_fixture_o2m_to_oldstyle_func(self, **kw):
         return relationships.JoinCondition(
-                    self.left, 
-                    self.right, 
-                    self.left, 
+                    self.left,
+                    self.right,
+                    self.left,
                     self.right,
                     primaryjoin=self.left.c.id==
                         func.foo(self.right.c.lid),
@@ -382,10 +382,10 @@ class _JoinFixtures(object):
             fn
         )
 
-    def _assert_raises_no_relevant_fks(self, fn, expr, relname, 
+    def _assert_raises_no_relevant_fks(self, fn, expr, relname,
         primary, *arg, **kw):
         assert_raises_message(
-            exc.ArgumentError, 
+            exc.ArgumentError,
             r"Could not locate any relevant foreign key columns "
             r"for %s join condition '%s' on relationship %s.  "
             r"Ensure that referencing columns are associated with "
@@ -397,10 +397,10 @@ class _JoinFixtures(object):
             fn, *arg, **kw
         )
 
-    def _assert_raises_no_equality(self, fn, expr, relname, 
+    def _assert_raises_no_equality(self, fn, expr, relname,
         primary, *arg, **kw):
         assert_raises_message(
-            sa.exc.ArgumentError, 
+            sa.exc.ArgumentError,
             "Could not locate any simple equality expressions "
             "involving locally mapped foreign key columns for %s join "
             "condition '%s' on relationship %s.  "
@@ -434,7 +434,7 @@ class _JoinFixtures(object):
                 exc.AmbiguousForeignKeysError,
                 "Could not determine join condition between "
                 "parent/child tables on relationship %s - "
-                "there are no foreign keys linking these tables.  " 
+                "there are no foreign keys linking these tables.  "
                 % (relname,),
                 fn, *arg, **kw)
 
@@ -544,8 +544,8 @@ class ColumnCollectionsTest(_JoinFixtures, fixtures.TestBase, AssertsCompiledSQL
         eq_(
             joincond.local_remote_pairs,
             [
-                (self.left.c.x, self.right.c.x), 
-                (self.left.c.x, self.right.c.y), 
+                (self.left.c.x, self.right.c.x),
+                (self.left.c.x, self.right.c.y),
                 (self.left.c.y, self.right.c.x),
                 (self.left.c.y, self.right.c.y)
             ]
@@ -557,8 +557,8 @@ class ColumnCollectionsTest(_JoinFixtures, fixtures.TestBase, AssertsCompiledSQL
         eq_(
             joincond.local_remote_pairs,
             [
-                (self.left.c.x, self.right.c.x), 
-                (self.left.c.x, self.right.c.y), 
+                (self.left.c.x, self.right.c.x),
+                (self.left.c.x, self.right.c.y),
                 (self.left.c.y, self.right.c.x),
                 (self.left.c.y, self.right.c.y)
             ]
@@ -640,7 +640,7 @@ class ColumnCollectionsTest(_JoinFixtures, fixtures.TestBase, AssertsCompiledSQL
         joincond = self._join_fixture_m2o_composite_selfref()
         eq_(
             joincond.remote_columns,
-            set([self.composite_selfref.c.id, 
+            set([self.composite_selfref.c.id,
                 self.composite_selfref.c.group_id])
         )
 
@@ -683,7 +683,7 @@ class ColumnCollectionsTest(_JoinFixtures, fixtures.TestBase, AssertsCompiledSQL
         joincond = self._join_fixture_m2m()
         eq_(
             joincond.local_remote_pairs,
-            [(self.m2mleft.c.id, self.m2msecondary.c.lid), 
+            [(self.m2mleft.c.id, self.m2msecondary.c.lid),
             (self.m2mright.c.id, self.m2msecondary.c.rid)]
         )
 
@@ -695,7 +695,7 @@ class ColumnCollectionsTest(_JoinFixtures, fixtures.TestBase, AssertsCompiledSQL
         )
         eq_(
             joincond.local_remote_pairs,
-            [(self.m2mleft.c.id, self.m2msecondary.c.lid), 
+            [(self.m2mleft.c.id, self.m2msecondary.c.lid),
             (self.m2mright.c.id, self.m2msecondary.c.rid)]
         )
 
@@ -809,20 +809,20 @@ class DetermineJoinTest(_JoinFixtures, fixtures.TestBase, AssertsCompiledSQL):
             "should be counted as containing a foreign "
             "key reference to the parent table.",
             relationships.JoinCondition,
-                    self.left, 
-                    self.right_multi_fk, 
-                    self.left, 
-                    self.right_multi_fk, 
+                    self.left,
+                    self.right_multi_fk,
+                    self.left,
+                    self.right_multi_fk,
         )
 
     def test_determine_join_no_fks_o2m(self):
         self._assert_raises_no_join(
             relationships.JoinCondition,
             "None", None,
-                    self.left, 
-                    self.selfref, 
-                    self.left, 
-                    self.selfref, 
+                    self.left,
+                    self.selfref,
+                    self.left,
+                    self.selfref,
         )
 
 
@@ -831,10 +831,10 @@ class DetermineJoinTest(_JoinFixtures, fixtures.TestBase, AssertsCompiledSQL):
         self._assert_raises_ambig_join(
             relationships.JoinCondition,
             "None", self.m2msecondary_ambig_fks,
-            self.m2mleft, 
-            self.m2mright, 
-            self.m2mleft, 
-            self.m2mright, 
+            self.m2mleft,
+            self.m2mright,
+            self.m2mleft,
+            self.m2mright,
             secondary=self.m2msecondary_ambig_fks
         )
 
@@ -842,22 +842,22 @@ class DetermineJoinTest(_JoinFixtures, fixtures.TestBase, AssertsCompiledSQL):
         self._assert_raises_no_join(
             relationships.JoinCondition,
             "None", self.m2msecondary_no_fks,
-                    self.m2mleft, 
-                    self.m2mright, 
-                    self.m2mleft, 
-                    self.m2mright, 
+                    self.m2mleft,
+                    self.m2mright,
+                    self.m2mleft,
+                    self.m2mright,
                     secondary=self.m2msecondary_no_fks
         )
 
     def _join_fixture_fks_ambig_m2m(self):
         return relationships.JoinCondition(
-                    self.m2mleft, 
-                    self.m2mright, 
-                    self.m2mleft, 
-                    self.m2mright, 
+                    self.m2mleft,
+                    self.m2mright,
+                    self.m2mleft,
+                    self.m2mright,
                     secondary=self.m2msecondary_ambig_fks,
                     consider_as_foreign_keys=[
-                        self.m2msecondary_ambig_fks.c.lid1, 
+                        self.m2msecondary_ambig_fks.c.lid1,
                         self.m2msecondary_ambig_fks.c.rid1]
         )
 
@@ -879,8 +879,8 @@ class AdaptedJoinTest(_JoinFixtures, fixtures.TestBase, AssertsCompiledSQL):
         joincond = self._join_fixture_o2m_selfref()
         left = select([joincond.parent_selectable]).alias('pj')
         pj, sj, sec, adapter, ds = joincond.join_targets(
-                                    left, 
-                                    joincond.child_selectable, 
+                                    left,
+                                    joincond.child_selectable,
                                     True)
         self.assert_compile(
             pj, "pj.id = selfref.sid"
@@ -888,8 +888,8 @@ class AdaptedJoinTest(_JoinFixtures, fixtures.TestBase, AssertsCompiledSQL):
 
         right = select([joincond.child_selectable]).alias('pj')
         pj, sj, sec, adapter, ds = joincond.join_targets(
-                                    joincond.parent_selectable, 
-                                    right, 
+                                    joincond.parent_selectable,
+                                    right,
                                     True)
         self.assert_compile(
             pj, "selfref.id = pj.sid"
@@ -899,8 +899,8 @@ class AdaptedJoinTest(_JoinFixtures, fixtures.TestBase, AssertsCompiledSQL):
     def test_join_targets_o2m_plain(self):
         joincond = self._join_fixture_o2m()
         pj, sj, sec, adapter, ds = joincond.join_targets(
-                                    joincond.parent_selectable, 
-                                    joincond.child_selectable, 
+                                    joincond.parent_selectable,
+                                    joincond.child_selectable,
                                     False)
         self.assert_compile(
             pj, "lft.id = rgt.lid"
@@ -910,8 +910,8 @@ class AdaptedJoinTest(_JoinFixtures, fixtures.TestBase, AssertsCompiledSQL):
         joincond = self._join_fixture_o2m()
         left = select([joincond.parent_selectable]).alias('pj')
         pj, sj, sec, adapter, ds = joincond.join_targets(
-                                    left, 
-                                    joincond.child_selectable, 
+                                    left,
+                                    joincond.child_selectable,
                                     True)
         self.assert_compile(
             pj, "pj.id = rgt.lid"
@@ -921,8 +921,8 @@ class AdaptedJoinTest(_JoinFixtures, fixtures.TestBase, AssertsCompiledSQL):
         joincond = self._join_fixture_o2m()
         right = select([joincond.child_selectable]).alias('pj')
         pj, sj, sec, adapter, ds = joincond.join_targets(
-                                    joincond.parent_selectable, 
-                                    right, 
+                                    joincond.parent_selectable,
+                                    right,
                                     True)
         self.assert_compile(
             pj, "lft.id = pj.lid"
@@ -932,11 +932,11 @@ class AdaptedJoinTest(_JoinFixtures, fixtures.TestBase, AssertsCompiledSQL):
         joincond = self._join_fixture_o2m_composite_selfref()
         right = select([joincond.child_selectable]).alias('pj')
         pj, sj, sec, adapter, ds = joincond.join_targets(
-                                    joincond.parent_selectable, 
-                                    right, 
+                                    joincond.parent_selectable,
+                                    right,
                                     True)
         self.assert_compile(
-            pj, 
+            pj,
             "pj.group_id = composite_selfref.group_id "
             "AND composite_selfref.id = pj.parent_id"
         )
@@ -945,11 +945,11 @@ class AdaptedJoinTest(_JoinFixtures, fixtures.TestBase, AssertsCompiledSQL):
         joincond = self._join_fixture_m2o_composite_selfref()
         right = select([joincond.child_selectable]).alias('pj')
         pj, sj, sec, adapter, ds = joincond.join_targets(
-                                    joincond.parent_selectable, 
-                                    right, 
+                                    joincond.parent_selectable,
+                                    right,
                                     True)
         self.assert_compile(
-            pj, 
+            pj,
             "pj.group_id = composite_selfref.group_id "
             "AND pj.id = composite_selfref.parent_id"
         )
@@ -966,7 +966,7 @@ class LazyClauseTest(_JoinFixtures, fixtures.TestBase, AssertsCompiledSQL):
     def _test_lazy_clause_o2m_reverse(self):
         joincond = self._join_fixture_o2m()
         self.assert_compile(
-            relationships.create_lazy_clause(joincond, 
+            relationships.create_lazy_clause(joincond,
                                 reverse_direction=True),
             ""
         )
index 6e610b0cfb3a91c8a029a43f6d256c7cb99e0495..394a1fe7ac44137c5fe1e8a3203a00817b977cf3 100644 (file)
@@ -16,10 +16,10 @@ from test.orm import _fixtures
 from sqlalchemy import exc
 
 class _RelationshipErrors(object):
-    def _assert_raises_no_relevant_fks(self, fn, expr, relname, 
+    def _assert_raises_no_relevant_fks(self, fn, expr, relname,
         primary, *arg, **kw):
         assert_raises_message(
-            sa.exc.ArgumentError, 
+            sa.exc.ArgumentError,
             "Could not locate any relevant foreign key columns "
             "for %s join condition '%s' on relationship %s.  "
             "Ensure that referencing columns are associated with "
@@ -31,10 +31,10 @@ class _RelationshipErrors(object):
             fn, *arg, **kw
         )
 
-    def _assert_raises_no_equality(self, fn, expr, relname, 
+    def _assert_raises_no_equality(self, fn, expr, relname,
         primary, *arg, **kw):
         assert_raises_message(
-            sa.exc.ArgumentError, 
+            sa.exc.ArgumentError,
             "Could not locate any simple equality expressions "
             "involving locally mapped foreign key columns for %s join "
             "condition '%s' on relationship %s.  "
@@ -139,23 +139,23 @@ class DependencyTwoParentTest(fixtures.MappedTest):
     @classmethod
     def define_tables(cls, metadata):
         Table("tbl_a", metadata,
-            Column("id", Integer, primary_key=True, 
+            Column("id", Integer, primary_key=True,
                                 test_needs_autoincrement=True),
             Column("name", String(128)))
         Table("tbl_b", metadata,
-            Column("id", Integer, primary_key=True, 
+            Column("id", Integer, primary_key=True,
                                 test_needs_autoincrement=True),
             Column("name", String(128)))
         Table("tbl_c", metadata,
-            Column("id", Integer, primary_key=True, 
+            Column("id", Integer, primary_key=True,
                                 test_needs_autoincrement=True),
-            Column("tbl_a_id", Integer, ForeignKey("tbl_a.id"), 
+            Column("tbl_a_id", Integer, ForeignKey("tbl_a.id"),
                                 nullable=False),
             Column("name", String(128)))
         Table("tbl_d", metadata,
-            Column("id", Integer, primary_key=True, 
+            Column("id", Integer, primary_key=True,
                                 test_needs_autoincrement=True),
-            Column("tbl_c_id", Integer, ForeignKey("tbl_c.id"), 
+            Column("tbl_c_id", Integer, ForeignKey("tbl_c.id"),
                                 nullable=False),
             Column("tbl_b_id", Integer, ForeignKey("tbl_b.id")),
             Column("name", String(128)))
@@ -183,11 +183,11 @@ class DependencyTwoParentTest(fixtures.MappedTest):
                                 cls.tables.tbl_d)
 
         mapper(A, tbl_a, properties=dict(
-            c_rows=relationship(C, cascade="all, delete-orphan", 
+            c_rows=relationship(C, cascade="all, delete-orphan",
                                     backref="a_row")))
         mapper(B, tbl_b)
         mapper(C, tbl_c, properties=dict(
-            d_rows=relationship(D, cascade="all, delete-orphan", 
+            d_rows=relationship(D, cascade="all, delete-orphan",
                                     backref="c_row")))
         mapper(D, tbl_d, properties=dict(
             b_row=relationship(B)))
@@ -232,7 +232,7 @@ class DependencyTwoParentTest(fixtures.MappedTest):
 
 class CompositeSelfRefFKTest(fixtures.MappedTest):
     """Tests a composite FK where, in
-    the relationship(), one col points 
+    the relationship(), one col points
     to itself in the same table.
 
     this is a very unusual case::
@@ -255,7 +255,7 @@ class CompositeSelfRefFKTest(fixtures.MappedTest):
     @classmethod
     def define_tables(cls, metadata):
         Table('company_t', metadata,
-              Column('company_id', Integer, primary_key=True, 
+              Column('company_id', Integer, primary_key=True,
                                 test_needs_autoincrement=True),
               Column('name', String(30)))
 
@@ -292,9 +292,9 @@ class CompositeSelfRefFKTest(fixtures.MappedTest):
 
         mapper(Company, company_t)
         mapper(Employee, employee_t, properties= {
-            'company':relationship(Company, 
+            'company':relationship(Company,
                             primaryjoin=employee_t.c.company_id==
-                                                company_t.c.company_id, 
+                                                company_t.c.company_id,
                                                 backref='employees'),
             'reports_to':relationship(Employee, primaryjoin=
                 sa.and_(
@@ -303,8 +303,8 @@ class CompositeSelfRefFKTest(fixtures.MappedTest):
                 ),
                 remote_side=[employee_t.c.emp_id, employee_t.c.company_id],
                 foreign_keys=[employee_t.c.reports_to_id, employee_t.c.company_id],
-                backref=backref('employees', 
-                    foreign_keys=[employee_t.c.reports_to_id, 
+                backref=backref('employees',
+                    foreign_keys=[employee_t.c.reports_to_id,
                                 employee_t.c.company_id]))
         })
 
@@ -321,7 +321,7 @@ class CompositeSelfRefFKTest(fixtures.MappedTest):
             'company':relationship(Company, backref='employees'),
             'reports_to':relationship(Employee,
                 remote_side=[employee_t.c.emp_id, employee_t.c.company_id],
-                foreign_keys=[employee_t.c.reports_to_id, 
+                foreign_keys=[employee_t.c.reports_to_id,
                                     employee_t.c.company_id],
                 backref=backref('employees', foreign_keys=
                     [employee_t.c.reports_to_id, employee_t.c.company_id])
@@ -361,7 +361,7 @@ class CompositeSelfRefFKTest(fixtures.MappedTest):
                         (employee_t.c.reports_to_id, employee_t.c.emp_id),
                         (employee_t.c.company_id, employee_t.c.company_id)
                 ],
-                foreign_keys=[employee_t.c.reports_to_id, 
+                foreign_keys=[employee_t.c.reports_to_id,
                                     employee_t.c.company_id],
                 backref=backref('employees', foreign_keys=
                     [employee_t.c.reports_to_id, employee_t.c.company_id])
@@ -477,7 +477,7 @@ class CompositeJoinPartialFK(fixtures.MappedTest, AssertsCompiledSQL):
             Column('z', Integer),
         )
         Table("child", metadata,
-            Column('id', Integer, primary_key=True, 
+            Column('id', Integer, primary_key=True,
                         test_needs_autoincrement=True),
             Column('x', Integer),
             Column('y', Integer),
@@ -520,7 +520,7 @@ class FKsAsPksTest(fixtures.MappedTest):
     @classmethod
     def define_tables(cls, metadata):
         Table("tableA", metadata,
-              Column("id",Integer,primary_key=True, 
+              Column("id",Integer,primary_key=True,
                             test_needs_autoincrement=True),
               Column("foo",Integer,),
               test_needs_fk=True)
@@ -538,7 +538,7 @@ class FKsAsPksTest(fixtures.MappedTest):
             pass
 
     def test_onetoone_switch(self):
-        """test that active history is enabled on a 
+        """test that active history is enabled on a
         one-to-many/one that has use_get==True"""
 
         tableB, A, B, tableA = (self.tables.tableB,
@@ -643,7 +643,7 @@ class FKsAsPksTest(fixtures.MappedTest):
         sess.flush()
 
     def test_delete_cascade_BtoA(self):
-        """No 'blank the PK' error when the child is to 
+        """No 'blank the PK' error when the child is to
         be deleted as part of a cascade"""
 
         tableB, A, B, tableA = (self.tables.tableB,
@@ -674,7 +674,7 @@ class FKsAsPksTest(fixtures.MappedTest):
             sa.orm.clear_mappers()
 
     def test_delete_cascade_AtoB(self):
-        """No 'blank the PK' error when the child is to 
+        """No 'blank the PK' error when the child is to
         be deleted as part of a cascade"""
 
         tableB, A, B, tableA = (self.tables.tableB,
@@ -754,23 +754,23 @@ class FKsAsPksTest(fixtures.MappedTest):
         assert b1 not in sess
 
 class UniqueColReferenceSwitchTest(fixtures.MappedTest):
-    """test a relationship based on a primary 
+    """test a relationship based on a primary
     join against a unique non-pk column"""
 
     @classmethod
     def define_tables(cls, metadata):
         Table("table_a", metadata,
-                        Column("id", Integer, primary_key=True, 
+                        Column("id", Integer, primary_key=True,
                                         test_needs_autoincrement=True),
-                        Column("ident", String(10), nullable=False, 
+                        Column("ident", String(10), nullable=False,
                                         unique=True),
                         )
 
         Table("table_b", metadata,
-                        Column("id", Integer, primary_key=True, 
+                        Column("id", Integer, primary_key=True,
                                         test_needs_autoincrement=True),
-                        Column("a_ident", String(10), 
-                                        ForeignKey('table_a.ident'), 
+                        Column("a_ident", String(10),
+                                        ForeignKey('table_a.ident'),
                                         nullable=False),
                         )
 
@@ -873,21 +873,21 @@ class RelationshipToSelectableTest(fixtures.MappedTest):
             eq_(old.id, new.id)
 
 class FKEquatedToConstantTest(fixtures.MappedTest):
-    """test a relationship with a non-column entity in the primary join, 
-    is not viewonly, and also has the non-column's clause mentioned in the 
+    """test a relationship with a non-column entity in the primary join,
+    is not viewonly, and also has the non-column's clause mentioned in the
     foreign keys list.
 
     """
 
     @classmethod
     def define_tables(cls, metadata):
-        Table('tags', metadata, Column("id", Integer, primary_key=True, 
+        Table('tags', metadata, Column("id", Integer, primary_key=True,
                                             test_needs_autoincrement=True),
             Column("data", String(50)),
         )
 
-        Table('tag_foo', metadata, 
-            Column("id", Integer, primary_key=True, 
+        Table('tag_foo', metadata,
+            Column("id", Integer, primary_key=True,
                                         test_needs_autoincrement=True),
             Column('tagid', Integer),
             Column("data", String(50)),
@@ -902,7 +902,7 @@ class FKEquatedToConstantTest(fixtures.MappedTest):
             pass
 
         mapper(Tag, tags, properties={
-            'foo':relationship(TagInstance, 
+            'foo':relationship(TagInstance,
                primaryjoin=sa.and_(tag_foo.c.data=='iplc_case',
                                 tag_foo.c.tagid==tags.c.id),
                foreign_keys=[tag_foo.c.tagid, tag_foo.c.data],
@@ -921,13 +921,13 @@ class FKEquatedToConstantTest(fixtures.MappedTest):
 
         # relationship works
         eq_(
-            sess.query(Tag).all(), 
+            sess.query(Tag).all(),
             [Tag(data='some tag', foo=[TagInstance(data='iplc_case')])]
         )
 
         # both TagInstances were persisted
         eq_(
-            sess.query(TagInstance).order_by(TagInstance.data).all(), 
+            sess.query(TagInstance).order_by(TagInstance.data).all(),
             [TagInstance(data='iplc_case'), TagInstance(data='not_iplc_case')]
         )
 
@@ -935,13 +935,13 @@ class BackrefPropagatesForwardsArgs(fixtures.MappedTest):
 
     @classmethod
     def define_tables(cls, metadata):
-        Table('users', metadata, 
-            Column('id', Integer, primary_key=True, 
+        Table('users', metadata,
+            Column('id', Integer, primary_key=True,
                                         test_needs_autoincrement=True),
             Column('name', String(50))
         )
-        Table('addresses', metadata, 
-            Column('id', Integer, primary_key=True, 
+        Table('addresses', metadata,
+            Column('id', Integer, primary_key=True,
                                         test_needs_autoincrement=True),
             Column('user_id', Integer),
             Column('email', String(50))
@@ -962,8 +962,8 @@ class BackrefPropagatesForwardsArgs(fixtures.MappedTest):
 
 
         mapper(User, users, properties={
-            'addresses':relationship(Address, 
-                        primaryjoin=addresses.c.user_id==users.c.id, 
+            'addresses':relationship(Address,
+                        primaryjoin=addresses.c.user_id==users.c.id,
                         foreign_keys=addresses.c.user_id,
                         backref='user')
         })
@@ -991,13 +991,13 @@ class AmbiguousJoinInterpretedAsSelfRef(fixtures.MappedTest):
     @classmethod
     def define_tables(cls, metadata):
         subscriber_table = Table('subscriber', metadata,
-           Column('id', Integer, primary_key=True, 
+           Column('id', Integer, primary_key=True,
                             test_needs_autoincrement=True),
           )
 
         address_table = Table('address',
                  metadata,
-                 Column('subscriber_id', Integer, 
+                 Column('subscriber_id', Integer,
                             ForeignKey('subscriber.id'), primary_key=True),
                  Column('type', String(1), primary_key=True),
                  )
@@ -1006,8 +1006,8 @@ class AmbiguousJoinInterpretedAsSelfRef(fixtures.MappedTest):
     def setup_mappers(cls):
         subscriber, address = cls.tables.subscriber, cls.tables.address
 
-        subscriber_and_address = subscriber.join(address, 
-            and_(address.c.subscriber_id==subscriber.c.id, 
+        subscriber_and_address = subscriber.join(address,
+            and_(address.c.subscriber_id==subscriber.c.id,
                 address.c.type.in_(['A', 'B', 'C'])))
 
         class Address(cls.Comparable):
@@ -1020,7 +1020,7 @@ class AmbiguousJoinInterpretedAsSelfRef(fixtures.MappedTest):
 
         mapper(Subscriber, subscriber_and_address, properties={
            'id':[subscriber.c.id, address.c.subscriber_id],
-           'addresses' : relationship(Address, 
+           'addresses' : relationship(Address,
                 backref=backref("customer"))
            })
 
@@ -1050,8 +1050,8 @@ class AmbiguousJoinInterpretedAsSelfRef(fixtures.MappedTest):
         eq_(
             sess.query(Subscriber).order_by(Subscriber.type).all(),
             [
-                Subscriber(id=1, type=u'A'), 
-                Subscriber(id=2, type=u'B'), 
+                Subscriber(id=1, type=u'A'),
+                Subscriber(id=2, type=u'B'),
                 Subscriber(id=2, type=u'C')
             ]
         )
@@ -1123,11 +1123,11 @@ class ManualBackrefTest(_fixtures.FixtureTest):
             'dingaling':relationship(Dingaling)
         })
 
-        assert_raises_message(sa.exc.ArgumentError, 
+        assert_raises_message(sa.exc.ArgumentError,
             r"reverse_property 'dingaling' on relationship "
             "User.addresses references "
             "relationship Address.dingaling, which does not "
-            "reference mapper Mapper\|User\|users", 
+            "reference mapper Mapper\|User\|users",
             configure_mappers)
 
 class JoinConditionErrorTest(fixtures.TestBase):
@@ -1162,7 +1162,7 @@ class JoinConditionErrorTest(fixtures.TestBase):
 
     def test_only_column_elements(self):
         m = MetaData()
-        t1 = Table('t1', m, 
+        t1 = Table('t1', m,
             Column('id', Integer, primary_key=True),
             Column('foo_id', Integer, ForeignKey('t2.id')),
         )
@@ -1204,16 +1204,16 @@ class JoinConditionErrorTest(fixtures.TestBase):
                 c2 = relationship(C1, **kw)
 
             assert_raises_message(
-                sa.exc.ArgumentError, 
+                sa.exc.ArgumentError,
                 "Column-based expression object expected "
-                "for argument '%s'; got: '%s', type %r" % 
+                "for argument '%s'; got: '%s', type %r" %
                 (argname, arg[0], type(arg[0])),
                 configure_mappers)
 
 
     def test_fk_error_not_raised_unrelated(self):
         m = MetaData()
-        t1 = Table('t1', m, 
+        t1 = Table('t1', m,
             Column('id', Integer, primary_key=True),
             Column('foo_id', Integer, ForeignKey('t2.nonexistent_id')),
         )
@@ -1237,7 +1237,7 @@ class JoinConditionErrorTest(fixtures.TestBase):
 
     def test_join_error_raised(self):
         m = MetaData()
-        t1 = Table('t1', m, 
+        t1 = Table('t1', m,
             Column('id', Integer, primary_key=True),
         )
         t2 = Table('t2', m,
@@ -1263,27 +1263,27 @@ class JoinConditionErrorTest(fixtures.TestBase):
         clear_mappers()
 
 class TypeMatchTest(fixtures.MappedTest):
-    """test errors raised when trying to add items 
+    """test errors raised when trying to add items
         whose type is not handled by a relationship"""
 
     @classmethod
     def define_tables(cls, metadata):
         Table("a", metadata,
-              Column('aid', Integer, primary_key=True, 
+              Column('aid', Integer, primary_key=True,
                                 test_needs_autoincrement=True),
               Column('data', String(30)))
         Table("b", metadata,
-               Column('bid', Integer, primary_key=True, 
+               Column('bid', Integer, primary_key=True,
                                 test_needs_autoincrement=True),
                Column("a_id", Integer, ForeignKey("a.aid")),
                Column('data', String(30)))
         Table("c", metadata,
-              Column('cid', Integer, primary_key=True, 
+              Column('cid', Integer, primary_key=True,
                                 test_needs_autoincrement=True),
               Column("b_id", Integer, ForeignKey("b.bid")),
               Column('data', String(30)))
         Table("d", metadata,
-              Column('did', Integer, primary_key=True, 
+              Column('did', Integer, primary_key=True,
                                 test_needs_autoincrement=True),
               Column("a_id", Integer, ForeignKey("a.aid")),
               Column('data', String(30)))
@@ -1336,7 +1336,7 @@ class TypeMatchTest(fixtures.MappedTest):
         sess.add(b1)
         sess.add(c1)
         assert_raises_message(sa.orm.exc.FlushError,
-                                 "Attempting to flush an item", 
+                                 "Attempting to flush an item",
                                  sess.flush)
 
     def test_o2m_nopoly_onflush(self):
@@ -1361,7 +1361,7 @@ class TypeMatchTest(fixtures.MappedTest):
         sess.add(b1)
         sess.add(c1)
         assert_raises_message(sa.orm.exc.FlushError,
-                                 "Attempting to flush an item", 
+                                 "Attempting to flush an item",
                                  sess.flush)
 
     def test_m2o_nopoly_onflush(self):
@@ -1382,7 +1382,7 @@ class TypeMatchTest(fixtures.MappedTest):
         sess.add(b1)
         sess.add(d1)
         assert_raises_message(sa.orm.exc.FlushError,
-                                 "Attempting to flush an item", 
+                                 "Attempting to flush an item",
                                  sess.flush)
 
     def test_m2o_oncascade(self):
@@ -1401,7 +1401,7 @@ class TypeMatchTest(fixtures.MappedTest):
         d1.a = b1
         sess = create_session()
         assert_raises_message(AssertionError,
-                             "doesn't handle objects of type", 
+                             "doesn't handle objects of type",
                              sess.add, d1)
 
 class TypedAssociationTable(fixtures.MappedTest):
@@ -1462,11 +1462,11 @@ class ViewOnlyM2MBackrefTest(fixtures.MappedTest):
     @classmethod
     def define_tables(cls, metadata):
         Table("t1", metadata,
-            Column('id', Integer, primary_key=True, 
+            Column('id', Integer, primary_key=True,
                                 test_needs_autoincrement=True),
             Column('data', String(40)))
         Table("t2", metadata,
-            Column('id', Integer, primary_key=True, 
+            Column('id', Integer, primary_key=True,
                                 test_needs_autoincrement=True),
             Column('data', String(40)),
         )
@@ -1484,7 +1484,7 @@ class ViewOnlyM2MBackrefTest(fixtures.MappedTest):
         class B(fixtures.ComparableEntity):pass
 
         mapper(A, t1, properties={
-            'bs':relationship(B, secondary=t1t2, 
+            'bs':relationship(B, secondary=t1t2,
                                 backref=backref('as_', viewonly=True))
         })
         mapper(B, t2)
@@ -1508,16 +1508,16 @@ class ViewOnlyOverlappingNames(fixtures.MappedTest):
     @classmethod
     def define_tables(cls, metadata):
         Table("t1", metadata,
-            Column('id', Integer, primary_key=True, 
+            Column('id', Integer, primary_key=True,
                                     test_needs_autoincrement=True),
             Column('data', String(40)))
         Table("t2", metadata,
-            Column('id', Integer, primary_key=True, 
+            Column('id', Integer, primary_key=True,
                                     test_needs_autoincrement=True),
             Column('data', String(40)),
             Column('t1id', Integer, ForeignKey('t1.id')))
         Table("t3", metadata,
-            Column('id', Integer, primary_key=True, 
+            Column('id', Integer, primary_key=True,
                                     test_needs_autoincrement=True),
             Column('data', String(40)),
             Column('t2id', Integer, ForeignKey('t2.id')))
@@ -1575,16 +1575,16 @@ class ViewOnlyUniqueNames(fixtures.MappedTest):
     @classmethod
     def define_tables(cls, metadata):
         Table("t1", metadata,
-            Column('t1id', Integer, primary_key=True, 
+            Column('t1id', Integer, primary_key=True,
                                         test_needs_autoincrement=True),
             Column('data', String(40)))
         Table("t2", metadata,
-            Column('t2id', Integer, primary_key=True, 
+            Column('t2id', Integer, primary_key=True,
                                         test_needs_autoincrement=True),
             Column('data', String(40)),
             Column('t1id_ref', Integer, ForeignKey('t1.t1id')))
         Table("t3", metadata,
-            Column('t3id', Integer, primary_key=True, 
+            Column('t3id', Integer, primary_key=True,
                                         test_needs_autoincrement=True),
             Column('data', String(40)),
             Column('t2id_ref', Integer, ForeignKey('t2.t2id')))
@@ -1765,11 +1765,11 @@ class ViewOnlyRepeatedLocalColumn(fixtures.MappedTest):
     @classmethod
     def define_tables(cls, metadata):
         Table('foos', metadata,
-              Column('id', Integer, primary_key=True, 
+              Column('id', Integer, primary_key=True,
                                         test_needs_autoincrement=True),
               Column('data', String(50)))
 
-        Table('bars', metadata, Column('id', Integer, primary_key=True, 
+        Table('bars', metadata, Column('id', Integer, primary_key=True,
                                         test_needs_autoincrement=True),
               Column('fid1', Integer, ForeignKey('foos.id')),
               Column('fid2', Integer, ForeignKey('foos.id')),
@@ -1816,16 +1816,16 @@ class ViewOnlyComplexJoin(_RelationshipErrors, fixtures.MappedTest):
     @classmethod
     def define_tables(cls, metadata):
         Table('t1', metadata,
-            Column('id', Integer, primary_key=True, 
+            Column('id', Integer, primary_key=True,
                                     test_needs_autoincrement=True),
             Column('data', String(50)))
         Table('t2', metadata,
-            Column('id', Integer, primary_key=True, 
+            Column('id', Integer, primary_key=True,
                                     test_needs_autoincrement=True),
             Column('data', String(50)),
             Column('t1id', Integer, ForeignKey('t1.id')))
         Table('t3', metadata,
-            Column('id', Integer, primary_key=True, 
+            Column('id', Integer, primary_key=True,
                                     test_needs_autoincrement=True),
             Column('data', String(50)))
         Table('t2tot3', metadata,
@@ -1902,11 +1902,11 @@ class ExplicitLocalRemoteTest(fixtures.MappedTest):
     @classmethod
     def define_tables(cls, metadata):
         Table('t1', metadata,
-            Column('id', String(50), primary_key=True, 
+            Column('id', String(50), primary_key=True,
                                         test_needs_autoincrement=True),
             Column('data', String(50)))
         Table('t2', metadata,
-            Column('id', Integer, primary_key=True, 
+            Column('id', Integer, primary_key=True,
                                         test_needs_autoincrement=True),
             Column('data', String(50)),
             Column('t1id', String(50)))
@@ -2104,25 +2104,25 @@ class InvalidRemoteSideTest(fixtures.MappedTest):
             't1s':relationship(T1, backref='parent')
         })
 
-        assert_raises_message(sa.exc.ArgumentError, 
+        assert_raises_message(sa.exc.ArgumentError,
             "T1.t1s and back-reference T1.parent are "
             "both of the same direction <symbol 'ONETOMANY>.  Did you "
-            "mean to set remote_side on the many-to-one side ?", 
+            "mean to set remote_side on the many-to-one side ?",
             configure_mappers)
 
     def test_m2o_backref(self):
         T1, t1 = self.classes.T1, self.tables.t1
 
         mapper(T1, t1, properties={
-            't1s':relationship(T1, 
-                        backref=backref('parent', remote_side=t1.c.id), 
+            't1s':relationship(T1,
+                        backref=backref('parent', remote_side=t1.c.id),
                         remote_side=t1.c.id)
         })
 
-        assert_raises_message(sa.exc.ArgumentError, 
+        assert_raises_message(sa.exc.ArgumentError,
             "T1.t1s and back-reference T1.parent are "
             "both of the same direction <symbol 'MANYTOONE>.  Did you "
-            "mean to set remote_side on the many-to-one side ?", 
+            "mean to set remote_side on the many-to-one side ?",
             configure_mappers)
 
     def test_o2m_explicit(self):
@@ -2134,25 +2134,25 @@ class InvalidRemoteSideTest(fixtures.MappedTest):
         })
 
         # can't be sure of ordering here
-        assert_raises_message(sa.exc.ArgumentError, 
+        assert_raises_message(sa.exc.ArgumentError,
             "both of the same direction <symbol 'ONETOMANY>.  Did you "
-            "mean to set remote_side on the many-to-one side ?", 
+            "mean to set remote_side on the many-to-one side ?",
             configure_mappers)
 
     def test_m2o_explicit(self):
         T1, t1 = self.classes.T1, self.tables.t1
 
         mapper(T1, t1, properties={
-            't1s':relationship(T1, back_populates='parent', 
+            't1s':relationship(T1, back_populates='parent',
                                 remote_side=t1.c.id),
-            'parent':relationship(T1, back_populates='t1s', 
+            'parent':relationship(T1, back_populates='t1s',
                                 remote_side=t1.c.id)
         })
 
         # can't be sure of ordering here
-        assert_raises_message(sa.exc.ArgumentError, 
+        assert_raises_message(sa.exc.ArgumentError,
             "both of the same direction <symbol 'MANYTOONE>.  Did you "
-            "mean to set remote_side on the many-to-one side ?", 
+            "mean to set remote_side on the many-to-one side ?",
             configure_mappers)
 
 class AmbiguousFKResolutionTest(_RelationshipErrors, fixtures.MappedTest):
@@ -2166,11 +2166,11 @@ class AmbiguousFKResolutionTest(_RelationshipErrors, fixtures.MappedTest):
             Column('aid_1', Integer, ForeignKey('a.id')),
             Column('aid_2', Integer, ForeignKey('a.id')),
         )
-        Table("atob", metadata, 
+        Table("atob", metadata,
             Column('aid', Integer),
             Column('bid', Integer),
         )
-        Table("atob_ambiguous", metadata, 
+        Table("atob_ambiguous", metadata,
             Column('aid1', Integer, ForeignKey('a.id')),
             Column('bid1', Integer, ForeignKey('b.id')),
             Column('aid2', Integer, ForeignKey('a.id')),
@@ -2276,7 +2276,7 @@ class AmbiguousFKResolutionTest(_RelationshipErrors, fixtures.MappedTest):
         A, B = self.classes.A, self.classes.B
         a, b, a_to_b = self.tables.a, self.tables.b, self.tables.atob_ambiguous
         mapper(A, a, properties={
-            'bs':relationship(B, secondary=a_to_b, 
+            'bs':relationship(B, secondary=a_to_b,
                         foreign_keys=[a_to_b.c.aid1, a_to_b.c.bid1])
         })
         mapper(B, b)
@@ -2378,7 +2378,7 @@ class InvalidRelationshipEscalationTest(_RelationshipErrors, fixtures.MappedTest
                                 self.tables.bars_with_fks,
                                 self.tables.foos)
 
-        # very unique - the join between parent/child 
+        # very unique - the join between parent/child
         # has no fks, but there is an fk join between two other
         # tables in the join condition, for those users that try creating
         # these big-long-string-of-joining-many-tables primaryjoins.
@@ -2396,7 +2396,7 @@ class InvalidRelationshipEscalationTest(_RelationshipErrors, fixtures.MappedTest
         self._assert_raises_no_equality(
             sa.orm.configure_mappers,
             "bars_with_fks.fid = foos_with_fks.id "
-            "AND foos_with_fks.id = foos.id", 
+            "AND foos_with_fks.id = foos.id",
             "Foo.bars", "primary"
         )
 
@@ -2470,7 +2470,7 @@ class InvalidRelationshipEscalationTest(_RelationshipErrors, fixtures.MappedTest
                             primaryjoin=foos.c.id>foos.c.fid)})
         mapper(Bar, bars)
 
-        self._assert_raises_no_relevant_fks(configure_mappers, 
+        self._assert_raises_no_relevant_fks(configure_mappers,
                 "foos.id > foos.fid", "Foo.foos", "primary"
             )
 
@@ -2487,7 +2487,7 @@ class InvalidRelationshipEscalationTest(_RelationshipErrors, fixtures.MappedTest
                             foreign_keys=[foos.c.fid])})
         mapper(Bar, bars)
 
-        self._assert_raises_no_equality(configure_mappers, 
+        self._assert_raises_no_equality(configure_mappers,
                 "foos.id > foos.fid", "Foo.foos", "primary"
             )
 
@@ -2623,16 +2623,16 @@ class InvalidRelationshipEscalationTestM2M(_RelationshipErrors, fixtures.MappedT
               Column('id', Integer, primary_key=True))
 
         Table('foobars_with_fks', metadata,
-            Column('fid', Integer, ForeignKey('foos.id')), 
+            Column('fid', Integer, ForeignKey('foos.id')),
             Column('bid', Integer, ForeignKey('bars.id'))
         )
 
         Table('foobars_with_many_columns', metadata,
-              Column('fid', Integer), 
+              Column('fid', Integer),
               Column('bid', Integer),
-              Column('fid1', Integer), 
+              Column('fid1', Integer),
               Column('bid1', Integer),
-              Column('fid2', Integer), 
+              Column('fid2', Integer),
               Column('bid2', Integer),
               )
 
@@ -2656,7 +2656,7 @@ class InvalidRelationshipEscalationTestM2M(_RelationshipErrors, fixtures.MappedT
 
         self._assert_raises_no_join(
             configure_mappers,
-            "Foo.bars", 
+            "Foo.bars",
             "foobars"
         )
 
@@ -2675,7 +2675,7 @@ class InvalidRelationshipEscalationTestM2M(_RelationshipErrors, fixtures.MappedT
 
         self._assert_raises_no_join(
             configure_mappers,
-            "Foo.bars", 
+            "Foo.bars",
             "foobars"
         )
 
@@ -2688,7 +2688,7 @@ class InvalidRelationshipEscalationTestM2M(_RelationshipErrors, fixtures.MappedT
                                 self.tables.foos)
 
         mapper(Foo, foos, properties={
-            'bars': relationship(Bar, secondary=foobars, 
+            'bars': relationship(Bar, secondary=foobars,
                                 primaryjoin=foos.c.id==foobars.c.fid,
                                 secondaryjoin=foobars.c.bid==bars.c.id)})
         mapper(Bar, bars)
@@ -2704,8 +2704,8 @@ class InvalidRelationshipEscalationTestM2M(_RelationshipErrors, fixtures.MappedT
 
         sa.orm.clear_mappers()
         mapper(Foo, foos, properties={
-                        'bars': relationship(Bar, 
-                                secondary=foobars_with_many_columns, 
+                        'bars': relationship(Bar,
+                                secondary=foobars_with_many_columns,
                               primaryjoin=foos.c.id==
                                         foobars_with_many_columns.c.fid,
                               secondaryjoin=foobars_with_many_columns.c.bid==
@@ -2738,7 +2738,7 @@ class InvalidRelationshipEscalationTestM2M(_RelationshipErrors, fixtures.MappedT
         mapper(Bar, bars)
 
         self._assert_raises_no_equality(
-                configure_mappers, 
+                configure_mappers,
                 'foos.id > foobars.fid',
                 "Foo.bars",
                 "primary")
@@ -2751,7 +2751,7 @@ class InvalidRelationshipEscalationTestM2M(_RelationshipErrors, fixtures.MappedT
                              secondaryjoin=foobars_with_fks.c.bid<=bars.c.id)})
         mapper(Bar, bars)
         self._assert_raises_no_equality(
-                configure_mappers, 
+                configure_mappers,
                 'foos.id > foobars_with_fks.fid',
                 "Foo.bars",
                 "primary")
@@ -2832,7 +2832,7 @@ class ActiveHistoryFlagTest(_fixtures.FixtureTest):
         User, users = self.classes.User, self.tables.users
 
         mapper(User, users, properties={
-            'name':column_property(users.c.name, 
+            'name':column_property(users.c.name,
                                 active_history=True)
         })
         u1 = User(name='jack')
@@ -2867,8 +2867,8 @@ class ActiveHistoryFlagTest(_fixtures.FixtureTest):
                     other.description == self.description
         mapper(Order, orders, properties={
             'composite':composite(
-                                MyComposite, 
-                                orders.c.description, 
+                                MyComposite,
+                                orders.c.description,
                                 orders.c.isopen,
                                 active_history=True)
         })
index 97849f845b35fb8ed9c726ac3194bf76ce9f3dac..1a46e3b6d799ae7295045c96a1be4fc58d1d3eec 100644 (file)
@@ -44,7 +44,7 @@ class SelectableNoFromsTest(fixtures.MappedTest, AssertsCompiledSQL):
 
         selectable = select(["x", "y", "z"]).alias()
         assert_raises_message(
-            sa.exc.ArgumentError, 
+            sa.exc.ArgumentError,
             "could not assemble any primary key columns",
             mapper, Subset, selectable
         )
index 828571f9d0a10f1ef10892ebfd7e81046aed7a56..81188dec931ff51626f05c9f942edef218cbaba3 100644 (file)
@@ -1071,7 +1071,7 @@ class IsModifiedTest(_fixtures.FixtureTest):
         s.expire_all()
         u.name = 'newname'
 
-        # can't predict result here 
+        # can't predict result here
         # deterministically, depending on if
         # 'name' or 'addresses' is tested first
         mod  = s.is_modified(u)
index 53c50634eb0e9822d86022c9e3a418ce4340d704..37b7edb6b8bce1be52ce7a83fd1563c1041a193c 100644 (file)
@@ -809,7 +809,7 @@ class LoadOnExistingTest(_fixtures.FixtureTest):
         User, Order, Item = self.classes.User, \
             self.classes.Order, self.classes.Item
         mapper(User, self.tables.users, properties={
-            'orders':relationship(Order), 
+            'orders':relationship(Order),
         })
         mapper(Order, self.tables.orders, properties={
             'items':relationship(Item, secondary=self.tables.order_items),
@@ -1197,7 +1197,7 @@ class SelfReferentialTest(fixtures.MappedTest):
 class InheritanceToRelatedTest(fixtures.MappedTest):
     @classmethod
     def define_tables(cls, metadata):
-        Table('foo', metadata, 
+        Table('foo', metadata,
             Column("id", Integer, primary_key=True),
             Column("type", String(50)),
             Column("related_id", Integer, ForeignKey("related.id"))
@@ -1254,9 +1254,9 @@ class InheritanceToRelatedTest(fixtures.MappedTest):
         mapper(cls.classes.Foo, cls.tables.foo, properties={
             'related':relationship(cls.classes.Related)
         }, polymorphic_on=cls.tables.foo.c.type)
-        mapper(cls.classes.Bar, cls.tables.bar, polymorphic_identity='bar', 
+        mapper(cls.classes.Bar, cls.tables.bar, polymorphic_identity='bar',
                     inherits=cls.classes.Foo)
-        mapper(cls.classes.Baz, cls.tables.baz, polymorphic_identity='baz', 
+        mapper(cls.classes.Baz, cls.tables.baz, polymorphic_identity='baz',
                     inherits=cls.classes.Foo)
         mapper(cls.classes.Related, cls.tables.related)
 
index 9faf54579ccfe51bc97aec849613e3a78192555f..b657cd4640bf0a3e0b86fedf48f26c9645849821 100644 (file)
@@ -65,7 +65,7 @@ class SessionTransactionTest(FixtureTest):
         try:
             conn = testing.db.connect()
             trans = conn.begin()
-            sess = create_session(bind=conn, autocommit=False, 
+            sess = create_session(bind=conn, autocommit=False,
                                 autoflush=True)
             u1 = User(name='u1')
             sess.add(u1)
@@ -133,7 +133,7 @@ class SessionTransactionTest(FixtureTest):
         mapper(Address, addresses)
 
         engine2 = engines.testing_engine()
-        sess = create_session(autocommit=True, autoflush=False, 
+        sess = create_session(autocommit=True, autoflush=False,
                             twophase=True)
         sess.bind_mapper(User, testing.db)
         sess.bind_mapper(Address, engine2)
@@ -338,7 +338,7 @@ class SessionTransactionTest(FixtureTest):
         sess.add(u2)
         def go():
             sess.rollback()
-        assert_warnings(go, 
+        assert_warnings(go,
             ["Session's state has been changed on a "
             "non-active transaction - this state "
             "will be discarded."],
@@ -351,7 +351,7 @@ class SessionTransactionTest(FixtureTest):
         u1.name = 'newname'
         def go():
             sess.rollback()
-        assert_warnings(go, 
+        assert_warnings(go,
             ["Session's state has been changed on a "
             "non-active transaction - this state "
             "will be discarded."],
@@ -364,7 +364,7 @@ class SessionTransactionTest(FixtureTest):
         sess.delete(u1)
         def go():
             sess.rollback()
-        assert_warnings(go, 
+        assert_warnings(go,
             ["Session's state has been changed on a "
             "non-active transaction - this state "
             "will be discarded."],
@@ -412,7 +412,7 @@ class _LocalFixture(FixtureTest):
         users, addresses = cls.tables.users, cls.tables.addresses
         mapper(User, users, properties={
             'addresses':relationship(Address, backref='user',
-                                 cascade="all, delete-orphan", 
+                                 cascade="all, delete-orphan",
                                     order_by=addresses.c.id),
             })
         mapper(Address, addresses)
@@ -456,7 +456,7 @@ class FixtureDataTest(_LocalFixture):
 class CleanSavepointTest(FixtureTest):
     """test the behavior for [ticket:2452] - rollback on begin_nested()
     only expires objects tracked as being modified in that transaction.
-    
+
     """
     run_inserts = None
 
@@ -491,7 +491,7 @@ class CleanSavepointTest(FixtureTest):
     def test_rollback_ignores_clean_on_savepoint_agg_upd_eval(self):
         User, users = self.classes.User, self.tables.users
         def update_fn(s, u2):
-            s.query(User).filter_by(name='u2').update(dict(name='u2modified'), 
+            s.query(User).filter_by(name='u2').update(dict(name='u2modified'),
                                     synchronize_session='evaluate')
         self._run_test(update_fn)
 
@@ -499,7 +499,7 @@ class CleanSavepointTest(FixtureTest):
     def test_rollback_ignores_clean_on_savepoint_agg_upd_fetch(self):
         User, users = self.classes.User, self.tables.users
         def update_fn(s, u2):
-            s.query(User).filter_by(name='u2').update(dict(name='u2modified'), 
+            s.query(User).filter_by(name='u2').update(dict(name='u2modified'),
                                     synchronize_session='fetch')
         self._run_test(update_fn)
 
@@ -614,7 +614,7 @@ class AutoExpireTest(_LocalFixture):
         u1.addresses.remove(a1)
 
         s.flush()
-        eq_(s.query(Address).filter(Address.email_address=='foo').all(), 
+        eq_(s.query(Address).filter(Address.email_address=='foo').all(),
                 [])
         s.rollback()
         assert a1 not in s.deleted
@@ -688,7 +688,7 @@ class RollbackRecoverTest(_LocalFixture):
         s.commit()
         eq_(
             s.query(User).all(),
-            [User(id=1, name='edward', 
+            [User(id=1, name='edward',
                 addresses=[Address(email_address='foober')])]
         )
 
@@ -719,7 +719,7 @@ class RollbackRecoverTest(_LocalFixture):
         s.commit()
         eq_(
             s.query(User).all(),
-            [User(id=1, name='edward', 
+            [User(id=1, name='edward',
                 addresses=[Address(email_address='foober')])]
         )
 
@@ -740,17 +740,17 @@ class SavepointTest(_LocalFixture):
         u1.name = 'edward'
         u2.name = 'jackward'
         s.add_all([u3, u4])
-        eq_(s.query(User.name).order_by(User.id).all(), 
+        eq_(s.query(User.name).order_by(User.id).all(),
                     [('edward',), ('jackward',), ('wendy',), ('foo',)])
         s.rollback()
         assert u1.name == 'ed'
         assert u2.name == 'jack'
-        eq_(s.query(User.name).order_by(User.id).all(), 
+        eq_(s.query(User.name).order_by(User.id).all(),
                     [('ed',), ('jack',)])
         s.commit()
         assert u1.name == 'ed'
         assert u2.name == 'jack'
-        eq_(s.query(User.name).order_by(User.id).all(), 
+        eq_(s.query(User.name).order_by(User.id).all(),
                     [('ed',), ('jack',)])
 
     @testing.requires.savepoints
@@ -781,18 +781,18 @@ class SavepointTest(_LocalFixture):
         u1.name = 'edward'
         u2.name = 'jackward'
         s.add_all([u3, u4])
-        eq_(s.query(User.name).order_by(User.id).all(), 
+        eq_(s.query(User.name).order_by(User.id).all(),
                 [('edward',), ('jackward',), ('wendy',), ('foo',)])
         s.commit()
         def go():
             assert u1.name == 'edward'
             assert u2.name == 'jackward'
-            eq_(s.query(User.name).order_by(User.id).all(), 
+            eq_(s.query(User.name).order_by(User.id).all(),
                     [('edward',), ('jackward',), ('wendy',), ('foo',)])
         self.assert_sql_count(testing.db, go, 1)
 
         s.commit()
-        eq_(s.query(User.name).order_by(User.id).all(), 
+        eq_(s.query(User.name).order_by(User.id).all(),
                 [('edward',), ('jackward',), ('wendy',), ('foo',)])
 
     @testing.requires.savepoints
@@ -810,7 +810,7 @@ class SavepointTest(_LocalFixture):
         s.add(u2)
         eq_(s.query(User).order_by(User.id).all(),
             [
-                User(name='edward', addresses=[Address(email_address='foo'), 
+                User(name='edward', addresses=[Address(email_address='foo'),
                                         Address(email_address='bar')]),
                 User(name='jack', addresses=[Address(email_address='bat')])
             ]
@@ -818,14 +818,14 @@ class SavepointTest(_LocalFixture):
         s.rollback()
         eq_(s.query(User).order_by(User.id).all(),
             [
-                User(name='edward', addresses=[Address(email_address='foo'), 
+                User(name='edward', addresses=[Address(email_address='foo'),
                                         Address(email_address='bar')]),
             ]
         )
         s.commit()
         eq_(s.query(User).order_by(User.id).all(),
             [
-                User(name='edward', addresses=[Address(email_address='foo'), 
+                User(name='edward', addresses=[Address(email_address='foo'),
                                         Address(email_address='bar')]),
             ]
         )
@@ -949,7 +949,7 @@ class AccountingFlagsTest(_LocalFixture):
     def test_preflush_no_accounting(self):
         User, users = self.classes.User, self.tables.users
 
-        sess = Session(_enable_transaction_accounting=False, 
+        sess = Session(_enable_transaction_accounting=False,
                         autocommit=True, autoflush=False)
         u1 = User(name='ed')
         sess.add(u1)
index 30557edefac6d0ed61c014588b4b06e2ebccc839..0dbe5091027393cbb4a988879fdd37b02031d22f 100644 (file)
@@ -28,7 +28,7 @@ class AssertsUOW(object):
         print postsort_actions
         eq_(len(postsort_actions), expected, postsort_actions)
 
-class UOWTest(_fixtures.FixtureTest, 
+class UOWTest(_fixtures.FixtureTest,
                 testing.AssertsExecutionResults, AssertsUOW):
     run_inserts = None
 
@@ -55,17 +55,17 @@ class RudimentaryFlushTest(UOWTest):
                 sess.flush,
                 CompiledSQL(
                     "INSERT INTO users (name) VALUES (:name)",
-                    {'name': 'u1'} 
+                    {'name': 'u1'}
                 ),
                 CompiledSQL(
                     "INSERT INTO addresses (user_id, email_address) "
                     "VALUES (:user_id, :email_address)",
-                    lambda ctx: {'email_address': 'a1', 'user_id':u1.id} 
+                    lambda ctx: {'email_address': 'a1', 'user_id':u1.id}
                 ),
                 CompiledSQL(
                     "INSERT INTO addresses (user_id, email_address) "
                     "VALUES (:user_id, :email_address)",
-                    lambda ctx: {'email_address': 'a2', 'user_id':u1.id} 
+                    lambda ctx: {'email_address': 'a2', 'user_id':u1.id}
                 ),
             )
 
@@ -160,17 +160,17 @@ class RudimentaryFlushTest(UOWTest):
                 sess.flush,
                 CompiledSQL(
                     "INSERT INTO users (name) VALUES (:name)",
-                    {'name': 'u1'} 
+                    {'name': 'u1'}
                 ),
                 CompiledSQL(
                     "INSERT INTO addresses (user_id, email_address) "
                     "VALUES (:user_id, :email_address)",
-                    lambda ctx: {'email_address': 'a1', 'user_id':u1.id} 
+                    lambda ctx: {'email_address': 'a1', 'user_id':u1.id}
                 ),
                 CompiledSQL(
                     "INSERT INTO addresses (user_id, email_address) "
                     "VALUES (:user_id, :email_address)",
-                    lambda ctx: {'email_address': 'a2', 'user_id':u1.id} 
+                    lambda ctx: {'email_address': 'a2', 'user_id':u1.id}
                 ),
             )
 
@@ -280,8 +280,8 @@ class RudimentaryFlushTest(UOWTest):
         session.delete(c2)
         session.delete(parent)
 
-        # testing that relationships 
-        # are loaded even if all ids/references are 
+        # testing that relationships
+        # are loaded even if all ids/references are
         # expired
         self.assert_sql_execution(
             testing.db,
@@ -462,7 +462,7 @@ class RudimentaryFlushTest(UOWTest):
             testing.db,
             sess.flush,
             CompiledSQL(
-                "INSERT INTO users (id, name) VALUES (:id, :name)", 
+                "INSERT INTO users (id, name) VALUES (:id, :name)",
                 {'id':1, 'name':'u1'}),
             CompiledSQL(
                 "INSERT INTO addresses (id, user_id, email_address) "
@@ -511,9 +511,9 @@ class RudimentaryFlushTest(UOWTest):
             sess.flush,
             CompiledSQL(
                 "INSERT INTO nodes (id, parent_id, data) VALUES "
-                "(:id, :parent_id, :data)", 
-                [{'parent_id': None, 'data': None, 'id': 1}, 
-                {'parent_id': 1, 'data': None, 'id': 2}, 
+                "(:id, :parent_id, :data)",
+                [{'parent_id': None, 'data': None, 'id': 1},
+                {'parent_id': 1, 'data': None, 'id': 2},
                 {'parent_id': 2, 'data': None, 'id': 3}]
                 ),
         )
@@ -561,7 +561,7 @@ class RudimentaryFlushTest(UOWTest):
                 testing.db,
                 sess.flush,
                 CompiledSQL("UPDATE items SET description=:description "
-                            "WHERE items.id = :items_id", 
+                            "WHERE items.id = :items_id",
                             lambda ctx:{'description':'i2', 'items_id':i1.id})
         )
 
@@ -689,9 +689,9 @@ class SingleCycleTest(UOWTest):
         self.assert_sql_execution(
                 testing.db,
                 sess.flush,
-                CompiledSQL("DELETE FROM nodes WHERE nodes.id = :id", 
+                CompiledSQL("DELETE FROM nodes WHERE nodes.id = :id",
                         lambda ctx:[{'id':n2.id}, {'id':n3.id}]),
-                CompiledSQL("DELETE FROM nodes WHERE nodes.id = :id", 
+                CompiledSQL("DELETE FROM nodes WHERE nodes.id = :id",
                         lambda ctx: {'id':n1.id})
         )
 
@@ -715,13 +715,13 @@ class SingleCycleTest(UOWTest):
                 sess.flush,
                 AllOf(
                     CompiledSQL("UPDATE nodes SET parent_id=:parent_id "
-                        "WHERE nodes.id = :nodes_id", 
+                        "WHERE nodes.id = :nodes_id",
                         lambda ctx: {'nodes_id':n3.id, 'parent_id':None}),
                     CompiledSQL("UPDATE nodes SET parent_id=:parent_id "
-                        "WHERE nodes.id = :nodes_id", 
+                        "WHERE nodes.id = :nodes_id",
                         lambda ctx: {'nodes_id':n2.id, 'parent_id':None}),
                 ),
-                CompiledSQL("DELETE FROM nodes WHERE nodes.id = :id", 
+                CompiledSQL("DELETE FROM nodes WHERE nodes.id = :id",
                     lambda ctx:{'id':n1.id})
         )
 
@@ -781,9 +781,9 @@ class SingleCycleTest(UOWTest):
         self.assert_sql_execution(
                 testing.db,
                 sess.flush,
-                CompiledSQL("DELETE FROM nodes WHERE nodes.id = :id", 
+                CompiledSQL("DELETE FROM nodes WHERE nodes.id = :id",
                         lambda ctx:[{'id':n2.id},{'id':n3.id}]),
-                CompiledSQL("DELETE FROM nodes WHERE nodes.id = :id", 
+                CompiledSQL("DELETE FROM nodes WHERE nodes.id = :id",
                         lambda ctx: {'id':n1.id})
         )
 
@@ -834,7 +834,7 @@ class SingleCycleTest(UOWTest):
         Node, nodes = self.classes.Node, self.tables.nodes
 
         mapper(Node, nodes, properties={
-            'children':relationship(Node, 
+            'children':relationship(Node,
                                     backref=backref('parent',
                                                 remote_side=nodes.c.id))
         })
@@ -856,7 +856,7 @@ class SingleCycleTest(UOWTest):
         Node, nodes = self.classes.Node, self.tables.nodes
 
         mapper(Node, nodes, properties={
-            'children':relationship(Node, 
+            'children':relationship(Node,
                 backref=backref('parent', remote_side=nodes.c.id)
             )
         })
@@ -875,37 +875,37 @@ class SingleCycleTest(UOWTest):
             sess.flush,
             CompiledSQL(
                 "INSERT INTO nodes (parent_id, data) VALUES "
-                "(:parent_id, :data)", 
+                "(:parent_id, :data)",
                 lambda ctx:{'parent_id':None, 'data':'n1'}
             ),
             CompiledSQL(
                 "INSERT INTO nodes (parent_id, data) VALUES "
-                "(:parent_id, :data)", 
+                "(:parent_id, :data)",
                 lambda ctx:{'parent_id':n1.id, 'data':'n11'}
             ),
             CompiledSQL(
                 "INSERT INTO nodes (parent_id, data) VALUES "
-                "(:parent_id, :data)", 
+                "(:parent_id, :data)",
                 lambda ctx:{'parent_id':n1.id, 'data':'n12'}
             ),
             CompiledSQL(
                 "INSERT INTO nodes (parent_id, data) VALUES "
-                "(:parent_id, :data)", 
+                "(:parent_id, :data)",
                 lambda ctx:{'parent_id':n1.id, 'data':'n13'}
             ),
             CompiledSQL(
                 "INSERT INTO nodes (parent_id, data) VALUES "
-                "(:parent_id, :data)", 
+                "(:parent_id, :data)",
                 lambda ctx:{'parent_id':n12.id, 'data':'n121'}
             ),
             CompiledSQL(
                 "INSERT INTO nodes (parent_id, data) VALUES "
-                "(:parent_id, :data)", 
+                "(:parent_id, :data)",
                 lambda ctx:{'parent_id':n12.id, 'data':'n122'}
             ),
             CompiledSQL(
                 "INSERT INTO nodes (parent_id, data) VALUES "
-                "(:parent_id, :data)", 
+                "(:parent_id, :data)",
                 lambda ctx:{'parent_id':n12.id, 'data':'n123'}
             ),
         )
@@ -975,8 +975,8 @@ class SingleCycleTest(UOWTest):
         session.delete(c2)
         session.delete(parent)
 
-        # testing that relationships 
-        # are loaded even if all ids/references are 
+        # testing that relationships
+        # are loaded even if all ids/references are
         # expired
         self.assert_sql_execution(
             testing.db,
@@ -1060,29 +1060,29 @@ class SingleCyclePlusAttributeTest(fixtures.MappedTest,
 
         n1.foobars.append(FooBar())
         # saveupdateall/deleteall for FooBar added here,
-        # plus processstate node.foobars 
+        # plus processstate node.foobars
         # currently the "all" procs stay in pairs
         self._assert_uow_size(sess, 6)
 
         sess.flush()
 
-class SingleCycleM2MTest(fixtures.MappedTest, 
+class SingleCycleM2MTest(fixtures.MappedTest,
                     testing.AssertsExecutionResults, AssertsUOW):
 
     @classmethod
     def define_tables(cls, metadata):
         nodes = Table('nodes', metadata,
-            Column('id', Integer, 
-                            primary_key=True, 
+            Column('id', Integer,
+                            primary_key=True,
                             test_needs_autoincrement=True),
             Column('data', String(30)),
             Column('favorite_node_id', Integer, ForeignKey('nodes.id'))
         )
 
         node_to_nodes =Table('node_to_nodes', metadata,
-            Column('left_node_id', Integer, 
+            Column('left_node_id', Integer,
                             ForeignKey('nodes.id'),primary_key=True),
-            Column('right_node_id', Integer, 
+            Column('right_node_id', Integer,
                             ForeignKey('nodes.id'),primary_key=True),
             )
 
@@ -1127,10 +1127,10 @@ class SingleCycleM2MTest(fixtures.MappedTest,
                             node_to_nodes.c.right_node_id).\
                     order_by(node_to_nodes.c.left_node_id,
                             node_to_nodes.c.right_node_id).\
-                    all(), 
+                    all(),
             sorted([
-                    (n1.id, n2.id), (n1.id, n3.id), (n1.id, n4.id), 
-                    (n2.id, n3.id), (n2.id, n5.id), 
+                    (n1.id, n2.id), (n1.id, n3.id), (n1.id, n4.id),
+                    (n2.id, n3.id), (n2.id, n5.id),
                     (n3.id, n5.id), (n3.id, n4.id)
                 ])
         )
@@ -1155,8 +1155,8 @@ class SingleCycleM2MTest(fixtures.MappedTest,
                     "node_to_nodes.left_node_id = :left_node_id AND "
                     "node_to_nodes.right_node_id = :right_node_id",
                     lambda ctx:[
-                        {'right_node_id': n2.id, 'left_node_id': n1.id}, 
-                        {'right_node_id': n3.id, 'left_node_id': n1.id}, 
+                        {'right_node_id': n2.id, 'left_node_id': n1.id},
+                        {'right_node_id': n3.id, 'left_node_id': n1.id},
                         {'right_node_id': n4.id, 'left_node_id': n1.id}
                     ]
                 ),
@@ -1182,9 +1182,9 @@ class SingleCycleM2MTest(fixtures.MappedTest,
                 "= :left_node_id AND node_to_nodes.right_node_id = "
                 ":right_node_id",
                 lambda ctx:[
-                    {'right_node_id': n5.id, 'left_node_id': n3.id}, 
-                    {'right_node_id': n4.id, 'left_node_id': n3.id}, 
-                    {'right_node_id': n3.id, 'left_node_id': n2.id}, 
+                    {'right_node_id': n5.id, 'left_node_id': n3.id},
+                    {'right_node_id': n4.id, 'left_node_id': n3.id},
+                    {'right_node_id': n3.id, 'left_node_id': n2.id},
                     {'right_node_id': n5.id, 'left_node_id': n2.id}
                 ]
             ),
@@ -1204,7 +1204,7 @@ class RowswitchAccountingTest(fixtures.MappedTest):
         Table('parent', metadata,
             Column('id', Integer, primary_key=True)
         )
-        Table('child', metadata, 
+        Table('child', metadata,
             Column('id', Integer, ForeignKey('parent.id'), primary_key=True)
         )
 
@@ -1219,7 +1219,7 @@ class RowswitchAccountingTest(fixtures.MappedTest):
             pass
 
         mapper(Parent, parent, properties={
-            'child':relationship(Child, uselist=False, 
+            'child':relationship(Child, uselist=False,
                                     cascade="all, delete-orphan",
                                     backref="parent")
         })
@@ -1255,14 +1255,14 @@ class BatchInsertsTest(fixtures.MappedTest, testing.AssertsExecutionResults):
     @classmethod
     def define_tables(cls, metadata):
         Table('t', metadata,
-            Column('id', Integer, primary_key=True, 
+            Column('id', Integer, primary_key=True,
                         test_needs_autoincrement=True),
             Column('data', String(50)),
             Column('def_', String(50), server_default='def1')
         )
 
     def test_batch_interaction(self):
-        """test batching groups same-structured, primary 
+        """test batching groups same-structured, primary
         key present statements together.
 
         """
@@ -1299,8 +1299,8 @@ class BatchInsertsTest(fixtures.MappedTest, testing.AssertsExecutionResults):
             ),
             CompiledSQL(
                 "INSERT INTO t (id, data) VALUES (:id, :data)",
-                [{'data': 't3', 'id': 3}, 
-                    {'data': 't4', 'id': 4}, 
+                [{'data': 't3', 'id': 3},
+                    {'data': 't4', 'id': 4},
                     {'data': 't5', 'id': 5}]
             ),
             CompiledSQL(
@@ -1313,7 +1313,7 @@ class BatchInsertsTest(fixtures.MappedTest, testing.AssertsExecutionResults):
             ),
             CompiledSQL(
                 "INSERT INTO t (id, data, def_) VALUES (:id, :data, :def_)",
-                [{'data': 't9', 'id': 9, 'def_':'def2'}, 
+                [{'data': 't9', 'id': 9, 'def_':'def2'},
                 {'data': 't10', 'id': 10, 'def_':'def3'}]
             ),
             CompiledSQL(
index af23cd1fc39a4c02a5cb658e69f40d45f8e8f1d3..252c1cfa335c07fb7c0846018fd4089324e367ee 100644 (file)
@@ -12,7 +12,7 @@ class UpdateDeleteTest(fixtures.MappedTest):
     @classmethod
     def define_tables(cls, metadata):
         Table('users', metadata,
-              Column('id', Integer, primary_key=True, 
+              Column('id', Integer, primary_key=True,
                         test_needs_autoincrement=True),
               Column('name', String(32)),
               Column('age', Integer))
@@ -66,13 +66,13 @@ class UpdateDeleteTest(fixtures.MappedTest):
             (s.query(User).distinct(), "distinct")
         ):
             assert_raises_message(
-                exc.InvalidRequestError, 
-                r"Can't call Query.update\(\) when %s\(\) has been called" % mname, 
-                q.update, 
+                exc.InvalidRequestError,
+                r"Can't call Query.update\(\) when %s\(\) has been called" % mname,
+                q.update,
                 {'name':'ed'})
             assert_raises_message(
-                exc.InvalidRequestError, 
-                r"Can't call Query.delete\(\) when %s\(\) has been called" % mname, 
+                exc.InvalidRequestError,
+                r"Can't call Query.delete\(\) when %s\(\) has been called" % mname,
                 q.delete)
 
 
@@ -157,7 +157,7 @@ class UpdateDeleteTest(fixtures.MappedTest):
 
         assert_raises(exc.InvalidRequestError,
             sess.query(User).
-                filter(User.name == select([func.max(User.name)])).delete, 
+                filter(User.name == select([func.max(User.name)])).delete,
                 synchronize_session='evaluate'
         )
 
@@ -328,7 +328,7 @@ class UpdateDeleteTest(fixtures.MappedTest):
         john.name = 'j2'
 
         sess.query(User).filter_by(name='j2').\
-                            update({'age':42}, 
+                            update({'age':42},
                             synchronize_session='evaluate')
         eq_(john.age, 42)
 
@@ -340,7 +340,7 @@ class UpdateDeleteTest(fixtures.MappedTest):
         john.name = 'j2'
 
         sess.query(User).filter_by(name='j2').\
-                            update({'age':42}, 
+                            update({'age':42},
                             synchronize_session='fetch')
         eq_(john.age, 42)
 
@@ -376,10 +376,10 @@ class UpdateDeleteTest(fixtures.MappedTest):
         sess.expire(john, ['age'])
 
         # eval must be before the update.  otherwise
-        # we eval john, age has been expired and doesn't 
+        # we eval john, age has been expired and doesn't
         # match the new value coming in
         sess.query(User).filter_by(name='john').filter_by(age=25).\
-                            update({'name':'j2', 'age':40}, 
+                            update({'name':'j2', 'age':40},
                             synchronize_session='evaluate')
         eq_(john.name, 'j2')
         eq_(john.age, 40)
@@ -392,7 +392,7 @@ class UpdateDeleteTest(fixtures.MappedTest):
         sess.expire(john, ['age'])
 
         sess.query(User).filter_by(name='john').filter_by(age=25).\
-                            update({'name':'j2', 'age':40}, 
+                            update({'name':'j2', 'age':40},
                             synchronize_session='fetch')
         eq_(john.name, 'j2')
         eq_(john.age, 40)
@@ -427,13 +427,13 @@ class UpdateDeleteRelatedTest(fixtures.MappedTest):
     @classmethod
     def define_tables(cls, metadata):
         Table('users', metadata,
-              Column('id', Integer, primary_key=True, 
+              Column('id', Integer, primary_key=True,
                         test_needs_autoincrement=True),
               Column('name', String(32)),
               Column('age', Integer))
 
         Table('documents', metadata,
-              Column('id', Integer, primary_key=True, 
+              Column('id', Integer, primary_key=True,
                         test_needs_autoincrement=True),
               Column('user_id', None, ForeignKey('users.id')),
               Column('title', String(32)))
@@ -474,7 +474,7 @@ class UpdateDeleteRelatedTest(fixtures.MappedTest):
 
         mapper(User, users)
         mapper(Document, documents, properties={
-            'user': relationship(User, lazy='joined', 
+            'user': relationship(User, lazy='joined',
                         backref=backref('documents', lazy='select'))
         })
 
@@ -488,7 +488,7 @@ class UpdateDeleteRelatedTest(fixtures.MappedTest):
                 update({'title': Document.title+Document.title}, synchronize_session='fetch')
 
         eq_([foo.title, bar.title, baz.title], ['foofoo','barbar', 'baz'])
-        eq_(sess.query(Document.title).order_by(Document.id).all(), 
+        eq_(sess.query(Document.title).order_by(Document.id).all(),
                 zip(['foofoo','barbar', 'baz']))
 
     def test_update_with_explicit_joinedload(self):
@@ -517,7 +517,7 @@ class ExpressionUpdateTest(fixtures.MappedTest):
     @classmethod
     def define_tables(cls, metadata):
         data = Table('data', metadata,
-            Column('id', Integer, primary_key=True, 
+            Column('id', Integer, primary_key=True,
                     test_needs_autoincrement=True),
             Column('counter', Integer, nullable=False, default=0)
         )
index accce0372fb45b8dc729a0824a1255cb93e6e420..721bd1286d2bbe16c364a5b3e4e1edbc5bdb023d 100644 (file)
@@ -51,7 +51,7 @@ class VersioningTest(fixtures.MappedTest):
     def _fixture(self):
         Foo, version_table = self.classes.Foo, self.tables.version_table
 
-        mapper(Foo, version_table, 
+        mapper(Foo, version_table,
                 version_id_col=version_table.c.version_id)
         s1 = Session()
         return s1
@@ -97,7 +97,7 @@ class VersioningTest(fixtures.MappedTest):
         # Only dialects with a sane rowcount can detect the
         # StaleDataError
         if testing.db.dialect.supports_sane_rowcount:
-            assert_raises_message(sa.orm.exc.StaleDataError, 
+            assert_raises_message(sa.orm.exc.StaleDataError,
             r"UPDATE statement on table 'version_table' expected "
             r"to update 1 row\(s\); 0 were matched.",
             s1.commit),
@@ -117,7 +117,7 @@ class VersioningTest(fixtures.MappedTest):
 
         if testing.db.dialect.supports_sane_rowcount:
             assert_raises_message(
-                sa.orm.exc.StaleDataError, 
+                sa.orm.exc.StaleDataError,
                 r"DELETE statement on table 'version_table' expected "
                 r"to delete 2 row\(s\); 1 were matched.",
                 s1.commit)
@@ -128,8 +128,8 @@ class VersioningTest(fixtures.MappedTest):
     def test_bump_version(self):
         """test that version number can be bumped.
 
-        Ensures that the UPDATE or DELETE is against the 
-        last committed version of version_id_col, not the modified 
+        Ensures that the UPDATE or DELETE is against the
+        last committed version of version_id_col, not the modified
         state.
 
         """
@@ -177,7 +177,7 @@ class VersioningTest(fixtures.MappedTest):
 
         # load, version is wrong
         assert_raises_message(
-                sa.orm.exc.StaleDataError, 
+                sa.orm.exc.StaleDataError,
                 r"Instance .* has version id '\d+' which does not "
                 r"match database-loaded version id '\d+'",
                 s1.query(Foo).with_lockmode('read').get, f1s1.id
@@ -351,7 +351,7 @@ class RowSwitchTest(fixtures.MappedTest):
                                 cls.classes.C,
                                 cls.classes.P)
 
-        mapper(P, p, version_id_col=p.c.version_id, 
+        mapper(P, p, version_id_col=p.c.version_id,
             properties={
             'c':relationship(C, uselist=False, cascade='all, delete-orphan')
         })
@@ -418,7 +418,7 @@ class AlternateGeneratorTest(fixtures.MappedTest):
                                 cls.classes.C,
                                 cls.classes.P)
 
-        mapper(P, p, version_id_col=p.c.version_id, 
+        mapper(P, p, version_id_col=p.c.version_id,
             version_id_generator=lambda x:make_uuid(),
             properties={
             'c':relationship(C, uselist=False, cascade='all, delete-orphan')
@@ -466,7 +466,7 @@ class AlternateGeneratorTest(fixtures.MappedTest):
 
         Session = sessionmaker()
 
-        # TODO: not sure this test is 
+        # TODO: not sure this test is
         # testing exactly what its looking for
 
         sess1 = Session()
@@ -528,7 +528,7 @@ class InheritanceTwoVersionIdsTest(fixtures.MappedTest):
                                 self.tables.base,
                                 self.classes.Sub)
 
-        mapper(Base, base, 
+        mapper(Base, base,
                 version_id_col=base.c.version_id)
         mapper(Sub, sub, inherits=Base)
 
@@ -546,7 +546,7 @@ class InheritanceTwoVersionIdsTest(fixtures.MappedTest):
                                 self.tables.base,
                                 self.classes.Sub)
 
-        mapper(Base, base, 
+        mapper(Base, base,
                 version_id_col=base.c.version_id)
         mapper(Sub, sub, inherits=Base)
 
@@ -568,7 +568,7 @@ class InheritanceTwoVersionIdsTest(fixtures.MappedTest):
                                 self.classes.Sub)
 
         mapper(Base, base)
-        mapper(Sub, sub, inherits=Base, 
+        mapper(Sub, sub, inherits=Base,
                 version_id_col=sub.c.version_id)
 
         session = Session()
@@ -588,7 +588,7 @@ class InheritanceTwoVersionIdsTest(fixtures.MappedTest):
                                 self.tables.base,
                                 self.classes.Sub)
 
-        mapper(Base, base, 
+        mapper(Base, base,
                 version_id_col=base.c.version_id)
 
         assert_raises_message(
@@ -599,5 +599,5 @@ class InheritanceTwoVersionIdsTest(fixtures.MappedTest):
             "version_id_col should only be specified on "
             "the base-most mapper that includes versioning.",
             mapper,
-            Sub, sub, inherits=Base, 
+            Sub, sub, inherits=Base,
                 version_id_col=sub.c.version_id)
index a19be95795ea0640b8279bba96d25ef4507417ec..b5d210eefe68f8798f80f08068d7987c83b9738d 100644 (file)
@@ -137,7 +137,7 @@ unicodetest = (Unicode(20, assert_unicode=False), genunicodevalue,
 if test_types:
     tests = [booleantest, datetimetest, decimaltest, intervaltest,
              pickletypetest, typedecoratortest, unicodetest]
-    for engineurl in ('postgresql://scott:tiger@localhost/test', 
+    for engineurl in ('postgresql://scott:tiger@localhost/test',
                         'sqlite://', 'mysql://scott:tiger@localhost/test'):
         print "\n%s\n" % engineurl
         for datatype, genvalue, kwargs in tests:
@@ -156,7 +156,7 @@ if test_methods:
                getitem_str_results, getitem_fallback_results,
                getitem_int_results, getitem_long_results, getitem_obj_results,
                slice_results]
-    for engineurl in ('postgresql://scott:tiger@localhost/test', 
+    for engineurl in ('postgresql://scott:tiger@localhost/test',
                        'sqlite://', 'mysql://scott:tiger@localhost/test'):
         print "\n%s\n" % engineurl
         test_table = prepare(Unicode(20, assert_unicode=False),
index fcc6c085e46c37a085ab306a0619c45a6138c973..2869839dcdcad13fa71f9925d7e842803cc73882 100644 (file)
@@ -194,7 +194,7 @@ class ConstraintTest(fixtures.TestBase, AssertsExecutionResults, AssertsCompiled
                 ('sometable', 'this_name_alsois_long', 'ix_sometable_t_3cf1'),
             ]:
 
-                t1 = Table(tname, MetaData(), 
+                t1 = Table(tname, MetaData(),
                             Column(cname, Integer, index=True),
                         )
                 ix1 = list(t1.indexes)[0]
@@ -213,24 +213,24 @@ class ConstraintTest(fixtures.TestBase, AssertsExecutionResults, AssertsCompiled
         assert_raises(
             exc.IdentifierError,
             schema.CreateIndex(Index(
-                        "this_other_name_is_too_long_for_what_were_doing", 
+                        "this_other_name_is_too_long_for_what_were_doing",
                         t1.c.c)).compile,
             dialect=dialect
         )
 
     def test_index_declartion_inline(self):
-        t1 = Table('t1', metadata, 
+        t1 = Table('t1', metadata,
             Column('x', Integer),
             Column('y', Integer),
             Index('foo', 'x', 'y')
         )
         self.assert_compile(
-            schema.CreateIndex(list(t1.indexes)[0]), 
+            schema.CreateIndex(list(t1.indexes)[0]),
             "CREATE INDEX foo ON t1 (x, y)"
         )
 
     def test_index_asserts_cols_standalone(self):
-        t1 = Table('t1', metadata, 
+        t1 = Table('t1', metadata,
             Column('x', Integer)
         )
         t2 = Table('t2', metadata,
@@ -244,7 +244,7 @@ class ConstraintTest(fixtures.TestBase, AssertsExecutionResults, AssertsCompiled
         )
 
     def test_index_asserts_cols_inline(self):
-        t1 = Table('t1', metadata, 
+        t1 = Table('t1', metadata,
             Column('x', Integer)
         )
         assert_raises_message(
@@ -384,7 +384,7 @@ class ConstraintCompilationTest(fixtures.TestBase, AssertsCompiledSQL):
 
     def test_multiple(self):
         m = MetaData()
-        foo = Table("foo", m, 
+        foo = Table("foo", m,
             Column('id', Integer, primary_key=True),
             Column('bar', Integer, primary_key=True)
         )
@@ -433,11 +433,11 @@ class ConstraintCompilationTest(fixtures.TestBase, AssertsCompiledSQL):
         m.drop_all(e)
 
         e.assert_sql([
-            'CREATE TABLE t (a INTEGER)', 
-            'CREATE TABLE t2 (a INTEGER, b INTEGER, CONSTRAINT fk_tb FOREIGN KEY(b) REFERENCES t (a))', 
-            'ALTER TABLE t2 ADD CONSTRAINT fk_ta FOREIGN KEY(a) REFERENCES t (a)', 
-            'ALTER TABLE t2 DROP CONSTRAINT fk_ta', 
-            'DROP TABLE t2', 
+            'CREATE TABLE t (a INTEGER)',
+            'CREATE TABLE t2 (a INTEGER, b INTEGER, CONSTRAINT fk_tb FOREIGN KEY(b) REFERENCES t (a))',
+            'ALTER TABLE t2 ADD CONSTRAINT fk_ta FOREIGN KEY(a) REFERENCES t (a)',
+            'ALTER TABLE t2 DROP CONSTRAINT fk_ta',
+            'DROP TABLE t2',
             'DROP TABLE t'
         ])
 
@@ -456,7 +456,7 @@ class ConstraintCompilationTest(fixtures.TestBase, AssertsCompiledSQL):
         )
 
         constraint = CheckConstraint('a < b',name="my_test_constraint",
-                                        deferrable=True,initially='DEFERRED', 
+                                        deferrable=True,initially='DEFERRED',
                                         table=t)
 
 
index ec08cd28e4d314c6c48bb4eca8b4105c828b07c1..7a6c6d0097c9391a06b7f5006647ac46b924b37f 100644 (file)
@@ -305,7 +305,7 @@ class DefaultTest(fixtures.TestBase):
 
     def test_no_embed_in_sql(self):
         """Using a DefaultGenerator, Sequence, DefaultClause
-        in the columns, where clause of a select, or in the values 
+        in the columns, where clause of a select, or in the values
         clause of insert, update, raises an informative error"""
 
         for const in (
@@ -330,7 +330,7 @@ class DefaultTest(fixtures.TestBase):
             )
 
     def test_missing_many_param(self):
-        assert_raises_message(exc.StatementError, 
+        assert_raises_message(exc.StatementError,
             "A value is required for bind parameter 'col7', in parameter group 1",
             t.insert().execute,
             {'col4':7, 'col7':12, 'col8':19},
@@ -558,8 +558,8 @@ class AutoIncrementTest(fixtures.TablesTest):
             Column('id', Integer(), primary_key=True)
         )
         x = Table('x', m,
-            Column('id', Integer(), 
-                ForeignKey('y.id'), 
+            Column('id', Integer(),
+                ForeignKey('y.id'),
                 autoincrement="ignore_fk", primary_key=True)
         )
         assert x._autoincrement_column is x.c.id
@@ -570,8 +570,8 @@ class AutoIncrementTest(fixtures.TablesTest):
             Column('id', Integer(), primary_key=True)
         )
         x = Table('x', m,
-            Column('id', Integer(), 
-                ForeignKey('y.id'), 
+            Column('id', Integer(),
+                ForeignKey('y.id'),
                 primary_key=True)
         )
         assert x._autoincrement_column is None
@@ -652,7 +652,7 @@ class SequenceExecTest(fixtures.TestBase):
         self._assert_seq_result(s.execute(testing.db))
 
     def test_explicit_optional(self):
-        """test dialect executes a Sequence, returns nextval, whether 
+        """test dialect executes a Sequence, returns nextval, whether
         or not "optional" is set """
 
         s = Sequence("my_sequence", optional=True)
@@ -721,7 +721,7 @@ class SequenceExecTest(fixtures.TestBase):
 
     @testing.provide_metadata
     def test_inserted_pk_no_returning(self):
-        """test inserted_primary_key contains [None] when 
+        """test inserted_primary_key contains [None] when
         pk_col=next_value(), implicit returning is not used."""
 
         metadata = self.metadata
@@ -740,7 +740,7 @@ class SequenceExecTest(fixtures.TestBase):
     @testing.requires.returning
     @testing.provide_metadata
     def test_inserted_pk_implicit_returning(self):
-        """test inserted_primary_key contains the result when 
+        """test inserted_primary_key contains the result when
         pk_col=next_value(), when implicit returning is used."""
 
         metadata = self.metadata
@@ -762,8 +762,8 @@ class SequenceTest(fixtures.TestBase, testing.AssertsCompiledSQL):
     @testing.fails_on('firebird', 'no FB support for start/increment')
     def test_start_increment(self):
         for seq in (
-                Sequence('foo_seq'), 
-                Sequence('foo_seq', start=8), 
+                Sequence('foo_seq'),
+                Sequence('foo_seq', start=8),
                 Sequence('foo_seq', increment=5)):
             seq.create(testing.db)
             try:
@@ -782,11 +782,11 @@ class SequenceTest(fixtures.TestBase, testing.AssertsCompiledSQL):
         return testing.db.dialect.has_sequence(testing.db, name)
 
     def test_nextval_render(self):
-        """test dialect renders the "nextval" construct, 
+        """test dialect renders the "nextval" construct,
         whether or not "optional" is set """
 
         for s in (
-                Sequence("my_seq"), 
+                Sequence("my_seq"),
                 Sequence("my_seq", optional=True)):
             assert str(s.next_value().
                     compile(dialect=testing.db.dialect)) in (
@@ -796,7 +796,7 @@ class SequenceTest(fixtures.TestBase, testing.AssertsCompiledSQL):
             )
 
     def test_nextval_unsupported(self):
-        """test next_value() used on non-sequence platform 
+        """test next_value() used on non-sequence platform
         raises NotImplementedError."""
 
         s = Sequence("my_seq")
@@ -844,7 +844,7 @@ class SequenceTest(fixtures.TestBase, testing.AssertsCompiledSQL):
         s1 = Sequence("s1", metadata=metadata)
         s2 = Sequence("s2", metadata=metadata)
         s3 = Sequence("s3")
-        t = Table('t', metadata, 
+        t = Table('t', metadata,
                     Column('c', Integer, s3, primary_key=True))
         assert s3.metadata is metadata
 
@@ -1017,7 +1017,7 @@ class SpecialTypePKTest(fixtures.TestBase):
 class ServerDefaultsOnPKTest(fixtures.TestBase):
     @testing.provide_metadata
     def test_string_default_none_on_insert(self):
-        """Test that without implicit returning, we return None for 
+        """Test that without implicit returning, we return None for
         a string server default.
 
         That is, we don't want to attempt to pre-execute "server_default"
@@ -1027,7 +1027,7 @@ class ServerDefaultsOnPKTest(fixtures.TestBase):
         """
 
         metadata = self.metadata
-        t = Table('x', metadata, 
+        t = Table('x', metadata,
                 Column('y', String(10), server_default='key_one', primary_key=True),
                 Column('data', String(10)),
                 implicit_returning=False
@@ -1046,7 +1046,7 @@ class ServerDefaultsOnPKTest(fixtures.TestBase):
         """With implicit_returning, we get a string PK default back no problem."""
 
         metadata = self.metadata
-        t = Table('x', metadata, 
+        t = Table('x', metadata,
                 Column('y', String(10), server_default='key_one', primary_key=True),
                 Column('data', String(10))
                 )
@@ -1061,8 +1061,8 @@ class ServerDefaultsOnPKTest(fixtures.TestBase):
     @testing.provide_metadata
     def test_int_default_none_on_insert(self):
         metadata = self.metadata
-        t = Table('x', metadata, 
-                Column('y', Integer, 
+        t = Table('x', metadata,
+                Column('y', Integer,
                         server_default='5', primary_key=True),
                 Column('data', String(10)),
                 implicit_returning=False
@@ -1084,8 +1084,8 @@ class ServerDefaultsOnPKTest(fixtures.TestBase):
     @testing.provide_metadata
     def test_autoincrement_reflected_from_server_default(self):
         metadata = self.metadata
-        t = Table('x', metadata, 
-                Column('y', Integer, 
+        t = Table('x', metadata,
+                Column('y', Integer,
                         server_default='5', primary_key=True),
                 Column('data', String(10)),
                 implicit_returning=False
@@ -1100,8 +1100,8 @@ class ServerDefaultsOnPKTest(fixtures.TestBase):
     @testing.provide_metadata
     def test_int_default_none_on_insert_reflected(self):
         metadata = self.metadata
-        t = Table('x', metadata, 
-                Column('y', Integer, 
+        t = Table('x', metadata,
+                Column('y', Integer,
                         server_default='5', primary_key=True),
                 Column('data', String(10)),
                 implicit_returning=False
@@ -1128,8 +1128,8 @@ class ServerDefaultsOnPKTest(fixtures.TestBase):
     @testing.provide_metadata
     def test_int_default_on_insert_with_returning(self):
         metadata = self.metadata
-        t = Table('x', metadata, 
-                Column('y', Integer, 
+        t = Table('x', metadata,
+                Column('y', Integer,
                         server_default='5', primary_key=True),
                 Column('data', String(10))
                 )
index 961845bac701d93f792b3638184f162914fd8159..2f9c6f90867e2f8cd822fe002dd5cf4a78b308c5 100644 (file)
@@ -36,13 +36,13 @@ class CompileTest(fixtures.TestBase, AssertsCompiledSQL):
                     GenericFunction.__init__(self, args=[arg], **kwargs)
 
             self.assert_compile(
-                            fake_func('foo'), 
-                            "fake_func(%s)" % 
-                            bindtemplate % {'name':'param_1', 'position':1}, 
+                            fake_func('foo'),
+                            "fake_func(%s)" %
+                            bindtemplate % {'name':'param_1', 'position':1},
                             dialect=dialect)
 
     def test_use_labels(self):
-        self.assert_compile(select([func.foo()], use_labels=True), 
+        self.assert_compile(select([func.foo()], use_labels=True),
             "SELECT foo() AS foo_1"
         )
     def test_underscores(self):
@@ -105,12 +105,12 @@ class CompileTest(fixtures.TestBase, AssertsCompiledSQL):
 
         for fn in [func.coalesce, func.max, func.min, func.sum]:
             for args, type_ in [
-                            ((datetime.date(2007, 10, 5), 
+                            ((datetime.date(2007, 10, 5),
                                 datetime.date(2005, 10, 15)), sqltypes.Date),
                             ((3, 5), sqltypes.Integer),
                             ((decimal.Decimal(3), decimal.Decimal(5)), sqltypes.Numeric),
                             (("foo", "bar"), sqltypes.String),
-                            ((datetime.datetime(2007, 10, 5, 8, 3, 34), 
+                            ((datetime.datetime(2007, 10, 5, 8, 3, 34),
                                 datetime.datetime(2005, 10, 15, 14, 45, 33)), sqltypes.DateTime)
                         ]:
                 assert isinstance(fn(*args).type, type_), "%s / %s" % (fn(), type_)
@@ -149,7 +149,7 @@ class CompileTest(fixtures.TestBase, AssertsCompiledSQL):
         self.assert_compile(func.lala.hoho(7), "lala.hoho(:hoho_1)")
 
         # test None becomes NULL
-        self.assert_compile(func.my_func(1,2,None,3), 
+        self.assert_compile(func.my_func(1,2,None,3),
                         "my_func(:my_func_1, :my_func_2, NULL, :my_func_3)")
 
         # test pickling
index 29b7cd482f9a9b76c9498caddb44805adfdca3f4..b785599607e6dc438c7eb4e6816641e6301fc67f 100644 (file)
@@ -8,7 +8,7 @@ from sqlalchemy.sql import util as sql_util
 from test.lib.testing import eq_, ne_, assert_raises
 
 class TraversalTest(fixtures.TestBase, AssertsExecutionResults):
-    """test ClauseVisitor's traversal, particularly its 
+    """test ClauseVisitor's traversal, particularly its
     ability to copy and modify a ClauseElement in place."""
 
     @classmethod
@@ -16,7 +16,7 @@ class TraversalTest(fixtures.TestBase, AssertsExecutionResults):
         global A, B
 
         # establish two ficticious ClauseElements.
-        # define deep equality semantics as well as deep 
+        # define deep equality semantics as well as deep
         # identity semantics.
         class A(ClauseElement):
             __visit_name__ = 'a'
@@ -79,7 +79,7 @@ class TraversalTest(fixtures.TestBase, AssertsExecutionResults):
         a1 = A("expr1")
         struct = B(a1, A("expr2"), B(A("expr1b"), A("expr2b")), A("expr3"))
         struct2 = B(a1, A("expr2"), B(A("expr1b"), A("expr2b")), A("expr3"))
-        struct3 = B(a1, A("expr2"), B(A("expr1b"), 
+        struct3 = B(a1, A("expr2"), B(A("expr1b"),
                         A("expr2bmodified")), A("expr3"))
 
         assert a1.is_other(a1)
@@ -90,7 +90,7 @@ class TraversalTest(fixtures.TestBase, AssertsExecutionResults):
         assert not struct.is_other(struct3)
 
     def test_clone(self):
-        struct = B(A("expr1"), A("expr2"), B(A("expr1b"), 
+        struct = B(A("expr1"), A("expr2"), B(A("expr1b"),
                         A("expr2b")), A("expr3"))
 
         class Vis(CloningVisitor):
@@ -105,7 +105,7 @@ class TraversalTest(fixtures.TestBase, AssertsExecutionResults):
         assert not struct.is_other(s2)
 
     def test_no_clone(self):
-        struct = B(A("expr1"), A("expr2"), B(A("expr1b"), 
+        struct = B(A("expr1"), A("expr2"), B(A("expr1b"),
                                 A("expr2b")), A("expr3"))
 
         class Vis(ClauseVisitor):
@@ -120,11 +120,11 @@ class TraversalTest(fixtures.TestBase, AssertsExecutionResults):
         assert struct.is_other(s2)
 
     def test_change_in_place(self):
-        struct = B(A("expr1"), A("expr2"), B(A("expr1b"), 
+        struct = B(A("expr1"), A("expr2"), B(A("expr1b"),
                                 A("expr2b")), A("expr3"))
-        struct2 = B(A("expr1"), A("expr2modified"), B(A("expr1b"), 
+        struct2 = B(A("expr1"), A("expr2modified"), B(A("expr1b"),
                                 A("expr2b")), A("expr3"))
-        struct3 = B(A("expr1"), A("expr2"), B(A("expr1b"), 
+        struct3 = B(A("expr1"), A("expr2"), B(A("expr1b"),
                                 A("expr2bmodified")), A("expr3"))
 
         class Vis(CloningVisitor):
@@ -280,7 +280,7 @@ class ClauseTest(fixtures.TestBase, AssertsCompiledSQL):
             column("col2"),
             column("col3"),
             )
-        t3 = Table('table3', MetaData(), 
+        t3 = Table('table3', MetaData(),
             Column('col1', Integer),
             Column('col2', Integer)
         )
@@ -294,7 +294,7 @@ class ClauseTest(fixtures.TestBase, AssertsCompiledSQL):
 
 
         f = t.c.col1 * 5
-        self.assert_compile(select([f]), 
+        self.assert_compile(select([f]),
                     "SELECT t1.col1 * :col1_1 AS anon_1 FROM t1")
 
         f.anon_label
@@ -302,7 +302,7 @@ class ClauseTest(fixtures.TestBase, AssertsCompiledSQL):
         a = t.alias()
         f = sql_util.ClauseAdapter(a).traverse(f)
 
-        self.assert_compile(select([f]), 
+        self.assert_compile(select([f]),
                     "SELECT t1_1.col1 * :col1_1 AS anon_1 FROM t1 AS t1_1")
 
     def test_join(self):
@@ -323,7 +323,7 @@ class ClauseTest(fixtures.TestBase, AssertsCompiledSQL):
 
         aliased = t1.select().alias()
         aliased2 = t1.alias()
+
         adapter = sql_util.ColumnAdapter(aliased)
 
         f = select([
@@ -333,7 +333,7 @@ class ClauseTest(fixtures.TestBase, AssertsCompiledSQL):
 
         s = select([aliased2]).select_from(aliased)
         eq_(str(s), str(f))
+
         f = select([
             adapter.columns[func.count(aliased2.c.col1)]
         ]).select_from(aliased)
@@ -352,7 +352,7 @@ class ClauseTest(fixtures.TestBase, AssertsCompiledSQL):
 
         # fixed by [ticket:2419].   the inside columns
         # on aliased3 have _is_clone_of pointers to those of
-        # aliased2.  corresponding_column checks these 
+        # aliased2.  corresponding_column checks these
         # now.
         adapter = sql_util.ColumnAdapter(aliased1)
         f1 = select([
@@ -376,7 +376,7 @@ class ClauseTest(fixtures.TestBase, AssertsCompiledSQL):
         aliased3 = cloned_traverse(aliased2, {}, {})
 
         # also fixed by [ticket:2419].  When we look at the
-        # *outside* columns of aliased3, they previously did not 
+        # *outside* columns of aliased3, they previously did not
         # have an _is_clone_of pointer.   But we now modified _make_proxy
         # to assign this.
         adapter = sql_util.ColumnAdapter(aliased1)
@@ -401,7 +401,7 @@ class ClauseTest(fixtures.TestBase, AssertsCompiledSQL):
         aliased3 = cloned_traverse(aliased2, {}, {})
 
         # also fixed by [ticket:2419].  When we look at the
-        # *outside* columns of aliased3, they previously did not 
+        # *outside* columns of aliased3, they previously did not
         # have an _is_clone_of pointer.   But we now modified _make_proxy
         # to assign this.
         adapter = sql_util.ColumnAdapter(aliased1)
@@ -517,14 +517,14 @@ class ClauseTest(fixtures.TestBase, AssertsCompiledSQL):
         assert sql_util.ClauseAdapter(u).traverse(t1) is u
 
     def test_binds(self):
-        """test that unique bindparams change their name upon clone() 
+        """test that unique bindparams change their name upon clone()
         to prevent conflicts"""
 
         s = select([t1], t1.c.col1==bindparam(None, unique=True)).alias()
         s2 = CloningVisitor().traverse(s).alias()
         s3 = select([s], s.c.col2==s2.c.col2)
 
-        self.assert_compile(s3, 
+        self.assert_compile(s3,
             "SELECT anon_1.col1, anon_1.col2, anon_1.col3 FROM "
             "(SELECT table1.col1 AS col1, table1.col2 AS col2, "
             "table1.col3 AS col3 FROM table1 WHERE table1.col1 = :param_1) "
@@ -536,7 +536,7 @@ class ClauseTest(fixtures.TestBase, AssertsCompiledSQL):
         s = select([t1], t1.c.col1==4).alias()
         s2 = CloningVisitor().traverse(s).alias()
         s3 = select([s], s.c.col2==s2.c.col2)
-        self.assert_compile(s3, 
+        self.assert_compile(s3,
             "SELECT anon_1.col1, anon_1.col2, anon_1.col3 FROM "
             "(SELECT table1.col1 AS col1, table1.col2 AS col2, "
             "table1.col3 AS col3 FROM table1 WHERE table1.col1 = :col1_1) "
@@ -547,14 +547,14 @@ class ClauseTest(fixtures.TestBase, AssertsCompiledSQL):
 
     def test_extract(self):
         s = select([extract('foo', t1.c.col1).label('col1')])
-        self.assert_compile(s, 
+        self.assert_compile(s,
                 "SELECT EXTRACT(foo FROM table1.col1) AS col1 FROM table1")
 
         s2 = CloningVisitor().traverse(s).alias()
         s3 = select([s2.c.col1])
-        self.assert_compile(s, 
+        self.assert_compile(s,
                 "SELECT EXTRACT(foo FROM table1.col1) AS col1 FROM table1")
-        self.assert_compile(s3, 
+        self.assert_compile(s3,
                 "SELECT anon_1.col1 FROM (SELECT EXTRACT(foo FROM "
                 "table1.col1) AS col1 FROM table1) AS anon_1")
 
@@ -562,8 +562,8 @@ class ClauseTest(fixtures.TestBase, AssertsCompiledSQL):
     @testing.emits_warning('.*replaced by another column with the same key')
     def test_alias(self):
         subq = t2.select().alias('subq')
-        s = select([t1.c.col1, subq.c.col1], 
-                    from_obj=[t1, subq, 
+        s = select([t1.c.col1, subq.c.col1],
+                    from_obj=[t1, subq,
                         t1.join(subq, t1.c.col1==subq.c.col2)]
                     )
         orig = str(s)
@@ -580,21 +580,21 @@ class ClauseTest(fixtures.TestBase, AssertsCompiledSQL):
         assert orig == str(s) == str(s3) == str(s4)
 
         subq = subq.alias('subq')
-        s = select([t1.c.col1, subq.c.col1], 
-                    from_obj=[t1, subq, 
+        s = select([t1.c.col1, subq.c.col1],
+                    from_obj=[t1, subq,
                         t1.join(subq, t1.c.col1==subq.c.col2)]
                     )
         s5 = CloningVisitor().traverse(s)
         assert orig == str(s) == str(s5)
 
     def test_correlated_select(self):
-        s = select(['*'], t1.c.col1==t2.c.col1, 
+        s = select(['*'], t1.c.col1==t2.c.col1,
                     from_obj=[t1, t2]).correlate(t2)
         class Vis(CloningVisitor):
             def visit_select(self, select):
                 select.append_whereclause(t1.c.col2==7)
 
-        self.assert_compile(Vis().traverse(s), 
+        self.assert_compile(Vis().traverse(s),
                     "SELECT * FROM table1 WHERE table1.col1 = table2.col1 "
                     "AND table1.col2 = :col2_1")
 
@@ -851,8 +851,8 @@ class ClauseAdapterTest(fixtures.TestBase, AssertsCompiledSQL):
         m = MetaData()
         a=Table( 'a',m,
           Column( 'id',    Integer, primary_key=True),
-          Column( 'xxx_id', Integer, 
-                        ForeignKey( 'a.id', name='adf',use_alter=True ) 
+          Column( 'xxx_id', Integer,
+                        ForeignKey( 'a.id', name='adf',use_alter=True )
         )
         )
 
@@ -887,7 +887,7 @@ class ClauseAdapterTest(fixtures.TestBase, AssertsCompiledSQL):
 
         alias = select([a]).select_from(a.join(b, a.c.x==b.c.x)).alias()
 
-        # two levels of indirection from c.x->b.x->a.x, requires recursive 
+        # two levels of indirection from c.x->b.x->a.x, requires recursive
         # corresponding_column call
         adapt = sql_util.ClauseAdapter(alias,
                 equivalents={b.c.x: set([a.c.x]), c.c.x: set([b.c.x])})
@@ -1267,7 +1267,7 @@ class SelectTest(fixtures.TestBase, AssertsCompiledSQL):
 
         assert_raises(
             exc.ArgumentError,
-            select().execution_options, 
+            select().execution_options,
                 isolation_level='READ_COMMITTED'
         )
 
index 7c65a1d0138bfed692fc72541bc88def1404d5f4..c814a01308ce7721753901c9ed175620abafe285 100644 (file)
@@ -27,14 +27,14 @@ class LongLabelsTest(fixtures.TablesTest, AssertsCompiledSQL):
     def define_tables(cls, metadata):
         table1 = Table("some_large_named_table", metadata,
             Column("this_is_the_primarykey_column", Integer,
-                            primary_key=True, 
+                            primary_key=True,
                             test_needs_autoincrement=True),
             Column("this_is_the_data_column", String(30))
             )
 
         table2 = Table("table_with_exactly_29_characs", metadata,
             Column("this_is_the_primarykey_column", Integer,
-                            primary_key=True, 
+                            primary_key=True,
                             test_needs_autoincrement=True),
             Column("this_is_the_data_column", String(30))
             )
@@ -46,13 +46,13 @@ class LongLabelsTest(fixtures.TablesTest, AssertsCompiledSQL):
         table1 = cls.tables.table1
         table2 = cls.tables.table2
         for data in [
-            {"this_is_the_primarykey_column":1, 
+            {"this_is_the_primarykey_column":1,
                         "this_is_the_data_column":"data1"},
-            {"this_is_the_primarykey_column":2, 
+            {"this_is_the_primarykey_column":2,
                         "this_is_the_data_column":"data2"},
-            {"this_is_the_primarykey_column":3, 
+            {"this_is_the_primarykey_column":3,
                         "this_is_the_data_column":"data3"},
-            {"this_is_the_primarykey_column":4, 
+            {"this_is_the_primarykey_column":4,
                         "this_is_the_data_column":"data4"}
         ]:
             testing.db.execute(
@@ -61,7 +61,7 @@ class LongLabelsTest(fixtures.TablesTest, AssertsCompiledSQL):
             )
         testing.db.execute(
             table2.insert(),
-            {"this_is_the_primary_key_column":1, 
+            {"this_is_the_primary_key_column":1,
             "this_is_the_data_column":"data"}
         )
 
@@ -78,7 +78,7 @@ class LongLabelsTest(fixtures.TablesTest, AssertsCompiledSQL):
 
     def test_too_long_name_disallowed(self):
         m = MetaData(testing.db)
-        t1 = Table("this_name_is_too_long_for_what_were_doing_in_this_test", 
+        t1 = Table("this_name_is_too_long_for_what_were_doing_in_this_test",
                         m, Column('foo', Integer))
         assert_raises(exceptions.IdentifierError, m.create_all)
         assert_raises(exceptions.IdentifierError, m.drop_all)
@@ -87,11 +87,11 @@ class LongLabelsTest(fixtures.TablesTest, AssertsCompiledSQL):
 
     def test_basic_result(self):
         table1 = self.tables.table1
-        s = table1.select(use_labels=True, 
+        s = table1.select(use_labels=True,
                         order_by=[table1.c.this_is_the_primarykey_column])
 
         result = [
-            (row[table1.c.this_is_the_primarykey_column], 
+            (row[table1.c.this_is_the_primarykey_column],
             row[table1.c.this_is_the_data_column])
             for row in testing.db.execute(s)
         ]
@@ -104,18 +104,18 @@ class LongLabelsTest(fixtures.TablesTest, AssertsCompiledSQL):
 
     def test_result_limit(self):
         table1 = self.tables.table1
-        # some dialects such as oracle (and possibly ms-sql 
+        # some dialects such as oracle (and possibly ms-sql
         # in a future version)
         # generate a subquery for limits/offsets.
-        # ensure that the generated result map corresponds 
+        # ensure that the generated result map corresponds
         # to the selected table, not
         # the select query
-        s = table1.select(use_labels=True, 
+        s = table1.select(use_labels=True,
                         order_by=[table1.c.this_is_the_primarykey_column]).\
                         limit(2)
 
         result = [
-            (row[table1.c.this_is_the_primarykey_column], 
+            (row[table1.c.this_is_the_primarykey_column],
             row[table1.c.this_is_the_data_column])
             for row in testing.db.execute(s)
         ]
@@ -127,12 +127,12 @@ class LongLabelsTest(fixtures.TablesTest, AssertsCompiledSQL):
     @testing.requires.offset
     def test_result_limit_offset(self):
         table1 = self.tables.table1
-        s = table1.select(use_labels=True, 
+        s = table1.select(use_labels=True,
                         order_by=[table1.c.this_is_the_primarykey_column]).\
                         limit(2).offset(1)
 
         result = [
-            (row[table1.c.this_is_the_primarykey_column], 
+            (row[table1.c.this_is_the_primarykey_column],
             row[table1.c.this_is_the_data_column])
             for row in testing.db.execute(s)
         ]
@@ -170,7 +170,7 @@ class LongLabelsTest(fixtures.TablesTest, AssertsCompiledSQL):
         dialect.max_identifier_length = IDENT_LENGTH
         self.assert_compile(
             select([table1, ta]).select_from(
-                        table1.join(ta, 
+                        table1.join(ta,
                             table1.c.this_is_the_data_column==
                             ta.c.this_is_the_data_column)).\
                         where(ta.c.this_is_the_data_column=='data3'),
@@ -210,13 +210,13 @@ class LongLabelsTest(fixtures.TablesTest, AssertsCompiledSQL):
     @testing.provide_metadata
     def test_insert_no_pk(self):
         t = Table("some_other_large_named_table", self.metadata,
-            Column("this_is_the_primarykey_column", Integer, 
-                            Sequence("this_is_some_large_seq"), 
+            Column("this_is_the_primarykey_column", Integer,
+                            Sequence("this_is_some_large_seq"),
                             primary_key=True),
             Column("this_is_the_data_column", String(30))
             )
         t.create(testing.db, checkfirst=True)
-        testing.db.execute(t.insert(), 
+        testing.db.execute(t.insert(),
                 **{"this_is_the_data_column":"data1"})
 
     @testing.requires.subqueries
@@ -238,7 +238,7 @@ class LongLabelsTest(fixtures.TablesTest, AssertsCompiledSQL):
         q = table1.select(table1.c.this_is_the_primarykey_column == 4).alias()
         x = select([q], use_labels=True)
 
-        self.assert_compile(x, 
+        self.assert_compile(x,
             "SELECT anon_1.this_is_the_primarykey_column AS "
             "anon_1_this_is_the_prim_1, anon_1.this_is_the_data_column "
             "AS anon_1_this_is_the_data_2 "
@@ -249,7 +249,7 @@ class LongLabelsTest(fixtures.TablesTest, AssertsCompiledSQL):
             "AS this_is_the_data_column "
             "FROM some_large_named_table "
             "WHERE some_large_named_table.this_is_the_primarykey_column "
-            "= :this_is_the_primarykey__1) AS anon_1", 
+            "= :this_is_the_primarykey__1) AS anon_1",
             dialect=compile_dialect)
 
         eq_(
@@ -264,13 +264,13 @@ class LongLabelsTest(fixtures.TablesTest, AssertsCompiledSQL):
         x = select([q])
 
         compile_dialect = default.DefaultDialect(label_length=10)
-        self.assert_compile(x, 
+        self.assert_compile(x,
             "SELECT foo.this_1, foo.this_2 FROM "
             "(SELECT some_large_named_table."
             "this_is_the_primarykey_column AS this_1, "
             "some_large_named_table.this_is_the_data_column AS this_2 "
             "FROM some_large_named_table WHERE "
-            "some_large_named_table.this_is_the_primarykey_column = :this_1) AS foo", 
+            "some_large_named_table.this_is_the_primarykey_column = :this_1) AS foo",
             dialect=compile_dialect)
 
         compile_dialect = default.DefaultDialect(label_length=4)
@@ -278,19 +278,19 @@ class LongLabelsTest(fixtures.TablesTest, AssertsCompiledSQL):
             "(SELECT some_large_named_table.this_is_the_primarykey_column "
             "AS _1, some_large_named_table.this_is_the_data_column AS _2 "
             "FROM some_large_named_table WHERE "
-            "some_large_named_table.this_is_the_primarykey_column = :_1) AS foo", 
+            "some_large_named_table.this_is_the_primarykey_column = :_1) AS foo",
         dialect=compile_dialect)
 
         q = table1.select(table1.c.this_is_the_primarykey_column == 4).alias()
         x = select([q], use_labels=True)
 
         compile_dialect = default.DefaultDialect(label_length=10)
-        self.assert_compile(x, 
+        self.assert_compile(x,
             "SELECT anon_1.this_2 AS anon_1, anon_1.this_4 AS anon_3 FROM "
             "(SELECT some_large_named_table.this_is_the_primarykey_column "
             "AS this_2, some_large_named_table.this_is_the_data_column AS this_4 "
             "FROM some_large_named_table WHERE "
-            "some_large_named_table.this_is_the_primarykey_column = :this_1) AS anon_1", 
+            "some_large_named_table.this_is_the_primarykey_column = :this_1) AS anon_1",
             dialect=compile_dialect)
 
         compile_dialect = default.DefaultDialect(label_length=4)
@@ -298,7 +298,7 @@ class LongLabelsTest(fixtures.TablesTest, AssertsCompiledSQL):
             "(SELECT some_large_named_table.this_is_the_primarykey_column "
             "AS _2, some_large_named_table.this_is_the_data_column AS _4 "
             "FROM some_large_named_table WHERE "
-            "some_large_named_table.this_is_the_primarykey_column = :_1) AS _1", 
+            "some_large_named_table.this_is_the_primarykey_column = :_1) AS _1",
             dialect=compile_dialect)
 
     def test_adjustable_result_schema_column(self):
@@ -321,7 +321,7 @@ class LongLabelsTest(fixtures.TablesTest, AssertsCompiledSQL):
 
     def test_adjustable_result_lightweight_column(self):
 
-        table1 = table("some_large_named_table", 
+        table1 = table("some_large_named_table",
             column("this_is_the_primarykey_column"),
             column("this_is_the_data_column")
         )
index ecbf8ad7545a07279af18ccda399519cb06f09ca..3e9f87fe7052822fce9f8fbfb1db140e8ec1c256 100644 (file)
@@ -19,7 +19,7 @@ from test.lib.testing import eq_
 class MetaDataTest(fixtures.TestBase, ComparesTables):
     def test_metadata_connect(self):
         metadata = MetaData()
-        t1 = Table('table1', metadata, 
+        t1 = Table('table1', metadata,
             Column('col1', Integer, primary_key=True),
             Column('col2', String(20)))
         metadata.bind = testing.db
@@ -58,7 +58,7 @@ class MetaDataTest(fixtures.TestBase, ComparesTables):
             Column('bar', Integer(), info={'foo':'bar'}),
         ]:
             c2 = col.copy()
-            for attr in ('name', 'type', 'nullable', 
+            for attr in ('name', 'type', 'nullable',
                         'primary_key', 'key', 'unique', 'info',
                         'doc'):
                 eq_(getattr(col, attr), getattr(c2, attr))
@@ -148,14 +148,14 @@ class MetaDataTest(fixtures.TestBase, ComparesTables):
     @testing.provide_metadata
     def test_dupe_tables(self):
         metadata = self.metadata
-        t1 = Table('table1', metadata, 
+        t1 = Table('table1', metadata,
             Column('col1', Integer, primary_key=True),
             Column('col2', String(20)))
 
         metadata.create_all()
         t1 = Table('table1', metadata, autoload=True)
         def go():
-            t2 = Table('table1', metadata, 
+            t2 = Table('table1', metadata,
                 Column('col1', Integer, primary_key=True),
                 Column('col2', String(20)))
         assert_raises_message(
@@ -173,11 +173,11 @@ class MetaDataTest(fixtures.TestBase, ComparesTables):
         m = MetaData()
         t1 = Table('t', m, c1, c2)
 
-        kw = dict(onupdate="X", 
+        kw = dict(onupdate="X",
                         ondelete="Y", use_alter=True, name='f1',
                         deferrable="Z", initially="Q", link_to_name=True)
 
-        fk1 = ForeignKey(c1, **kw) 
+        fk1 = ForeignKey(c1, **kw)
         fk2 = ForeignKeyConstraint((c1,), (c2,), **kw)
 
         t1.append_constraint(fk2)
@@ -190,10 +190,10 @@ class MetaDataTest(fixtures.TestBase, ComparesTables):
 
     def test_check_constraint_copy(self):
         r = lambda x: x
-        c = CheckConstraint("foo bar", 
-                            name='name', 
-                            initially=True, 
-                            deferrable=True, 
+        c = CheckConstraint("foo bar",
+                            name='name',
+                            initially=True,
+                            deferrable=True,
                             _create_rule = r)
         c2 = c.copy()
         eq_(c2.name, 'name')
@@ -273,7 +273,7 @@ class MetaDataTest(fixtures.TestBase, ComparesTables):
 
         table2 = Table('othertable', meta,
             Column('id', Integer, Sequence('foo_seq'), primary_key=True),
-            Column('myid', Integer, 
+            Column('myid', Integer,
                         ForeignKey('mytable.myid'),
                     ),
             test_needs_fk=True,
@@ -359,7 +359,7 @@ class MetaDataTest(fixtures.TestBase, ComparesTables):
         m2 = pickle.loads(pickle.dumps(m1))
 
         s2 = Sequence("x_seq")
-        t2 = Table('a', m2, 
+        t2 = Table('a', m2,
              Column('id',Integer,primary_key=True),
              Column('x', Integer, s2),
              extend_existing=True)
@@ -373,7 +373,7 @@ class MetaDataTest(fixtures.TestBase, ComparesTables):
 
         m1 = MetaData()
         s1 = Sequence("x_seq")
-        t = Table('a', m1, 
+        t = Table('a', m1,
              Column('x', Integer, s1)
         )
         assert m1._sequences['x_seq'] is s1
@@ -601,9 +601,9 @@ class MetaDataTest(fixtures.TestBase, ComparesTables):
         table_c = table.tometadata(meta2, schema=None)
         table2_c = table2.tometadata(meta2, schema=None)
 
-        eq_(str(table_c.join(table2_c).onclause), 
+        eq_(str(table_c.join(table2_c).onclause),
                 str(table_c.c.myid == table2_c.c.myid))
-        eq_(str(table_c.join(table2_c).onclause), 
+        eq_(str(table_c.join(table2_c).onclause),
                 "someschema.mytable.myid = someschema.othertable.myid")
 
     def test_tometadata_strip_schema(self):
@@ -644,21 +644,21 @@ class MetaDataTest(fixtures.TestBase, ComparesTables):
         ck = schema.CheckConstraint("x > y", name="someconstraint")
 
         for const, exp in (
-            (Sequence("my_seq"), 
+            (Sequence("my_seq"),
                 "Sequence('my_seq')"),
-            (Sequence("my_seq", start=5), 
+            (Sequence("my_seq", start=5),
                 "Sequence('my_seq', start=5)"),
-            (Column("foo", Integer), 
+            (Column("foo", Integer),
                 "Column('foo', Integer(), table=None)"),
-            (Table("bar", MetaData(), Column("x", String)), 
+            (Table("bar", MetaData(), Column("x", String)),
                 "Table('bar', MetaData(bind=None), "
                 "Column('x', String(), table=<bar>), schema=None)"),
-            (schema.DefaultGenerator(for_update=True), 
+            (schema.DefaultGenerator(for_update=True),
                 "DefaultGenerator(for_update=True)"),
             (schema.Index("bar", "c"), "Index('bar')"),
             (i1, "Index('bar', Column('x', Integer(), table=<foo>))"),
             (schema.FetchedValue(), "FetchedValue()"),
-            (ck, 
+            (ck,
                     "CheckConstraint("
                     "%s"
                     ", name='someconstraint')" % repr(ck.sqltext)),
@@ -675,7 +675,7 @@ class TableTest(fixtures.TestBase, AssertsCompiledSQL):
                       prefixes = ["TEMPORARY"])
 
         self.assert_compile(
-            schema.CreateTable(table1), 
+            schema.CreateTable(table1),
             "CREATE TEMPORARY TABLE temporary_table_1 (col1 INTEGER)"
         )
 
@@ -683,7 +683,7 @@ class TableTest(fixtures.TestBase, AssertsCompiledSQL):
                       Column("col1", Integer),
                       prefixes = ["VIRTUAL"])
         self.assert_compile(
-          schema.CreateTable(table2), 
+          schema.CreateTable(table2),
           "CREATE VIRTUAL TABLE temporary_table_2 (col1 INTEGER)"
         )
 
@@ -741,7 +741,7 @@ class SchemaTest(fixtures.TestBase, AssertsCompiledSQL):
     def test_default_schema_metadata_fk_alt_remote(self):
         m = MetaData(schema="foo")
         t1 = Table('t1', m, Column('x', Integer))
-        t2 = Table('t2', m, Column('x', Integer, ForeignKey('t1.x')), 
+        t2 = Table('t2', m, Column('x', Integer, ForeignKey('t1.x')),
                                 schema="bar")
         assert t2.c.x.references(t1.c.x)
 
@@ -797,8 +797,8 @@ class SchemaTest(fixtures.TestBase, AssertsCompiledSQL):
 class UseExistingTest(fixtures.TablesTest):
     @classmethod
     def define_tables(cls, metadata):
-        Table('users', metadata, 
-                    Column('id', Integer, primary_key=True), 
+        Table('users', metadata,
+                    Column('id', Integer, primary_key=True),
                     Column('name', String(30)))
 
     def _useexisting_fixture(self):
@@ -836,7 +836,7 @@ class UseExistingTest(fixtures.TablesTest):
         meta2 = self._useexisting_fixture()
         assert_raises(
             exc.ArgumentError,
-            Table, 'users', meta2, keep_existing=True, 
+            Table, 'users', meta2, keep_existing=True,
                 extend_existing=True
         )
 
@@ -845,13 +845,13 @@ class UseExistingTest(fixtures.TablesTest):
         meta2 = self._useexisting_fixture()
         assert_raises(
             exc.ArgumentError,
-            Table, 'users', meta2, useexisting=True, 
+            Table, 'users', meta2, useexisting=True,
                 extend_existing=True
         )
 
     def test_keep_existing_no_dupe_constraints(self):
         meta2 = self._notexisting_fixture()
-        users = Table('users', meta2, 
+        users = Table('users', meta2,
             Column('id', Integer),
             Column('name', Unicode),
             UniqueConstraint('name'),
@@ -861,7 +861,7 @@ class UseExistingTest(fixtures.TablesTest):
         assert 'id' in users.c
         eq_(len(users.constraints), 2)
 
-        u2 = Table('users', meta2, 
+        u2 = Table('users', meta2,
             Column('id', Integer),
             Column('name', Unicode),
             UniqueConstraint('name'),
@@ -871,7 +871,7 @@ class UseExistingTest(fixtures.TablesTest):
 
     def test_extend_existing_dupes_constraints(self):
         meta2 = self._notexisting_fixture()
-        users = Table('users', meta2, 
+        users = Table('users', meta2,
             Column('id', Integer),
             Column('name', Unicode),
             UniqueConstraint('name'),
@@ -881,7 +881,7 @@ class UseExistingTest(fixtures.TablesTest):
         assert 'id' in users.c
         eq_(len(users.constraints), 2)
 
-        u2 = Table('users', meta2, 
+        u2 = Table('users', meta2,
             Column('id', Integer),
             Column('name', Unicode),
             UniqueConstraint('name'),
@@ -904,7 +904,7 @@ class UseExistingTest(fixtures.TablesTest):
 
     def test_keep_existing_add_column(self):
         meta2 = self._useexisting_fixture()
-        users = Table('users', meta2, 
+        users = Table('users', meta2,
                         Column('foo', Integer),
                         autoload=True,
                       keep_existing=True)
@@ -918,14 +918,14 @@ class UseExistingTest(fixtures.TablesTest):
 
     def test_keep_existing_quote_no_orig(self):
         meta2 = self._notexisting_fixture()
-        users = Table('users', meta2, quote=True, 
+        users = Table('users', meta2, quote=True,
                         autoload=True,
                       keep_existing=True)
         assert users.quote
 
     def test_keep_existing_add_column_no_orig(self):
         meta2 = self._notexisting_fixture()
-        users = Table('users', meta2, 
+        users = Table('users', meta2,
                         Column('foo', Integer),
                         autoload=True,
                       keep_existing=True)
@@ -939,13 +939,13 @@ class UseExistingTest(fixtures.TablesTest):
 
     def test_keep_existing_quote_no_reflection(self):
         meta2 = self._useexisting_fixture()
-        users = Table('users', meta2, quote=True, 
+        users = Table('users', meta2, quote=True,
                       keep_existing=True)
         assert not users.quote
 
     def test_keep_existing_add_column_no_reflection(self):
         meta2 = self._useexisting_fixture()
-        users = Table('users', meta2, 
+        users = Table('users', meta2,
                         Column('foo', Integer),
                       keep_existing=True)
         assert "foo" not in users.c
@@ -964,7 +964,7 @@ class UseExistingTest(fixtures.TablesTest):
 
     def test_extend_existing_add_column(self):
         meta2 = self._useexisting_fixture()
-        users = Table('users', meta2, 
+        users = Table('users', meta2,
                         Column('foo', Integer),
                         autoload=True,
                       extend_existing=True)
@@ -978,14 +978,14 @@ class UseExistingTest(fixtures.TablesTest):
 
     def test_extend_existing_quote_no_orig(self):
         meta2 = self._notexisting_fixture()
-        users = Table('users', meta2, quote=True, 
+        users = Table('users', meta2, quote=True,
                         autoload=True,
                       extend_existing=True)
         assert users.quote
 
     def test_extend_existing_add_column_no_orig(self):
         meta2 = self._notexisting_fixture()
-        users = Table('users', meta2, 
+        users = Table('users', meta2,
                         Column('foo', Integer),
                         autoload=True,
                       extend_existing=True)
@@ -999,13 +999,13 @@ class UseExistingTest(fixtures.TablesTest):
 
     def test_extend_existing_quote_no_reflection(self):
         meta2 = self._useexisting_fixture()
-        users = Table('users', meta2, quote=True, 
+        users = Table('users', meta2, quote=True,
                       extend_existing=True)
         assert users.quote
 
     def test_extend_existing_add_column_no_reflection(self):
         meta2 = self._useexisting_fixture()
-        users = Table('users', meta2, 
+        users = Table('users', meta2,
                         Column('foo', Integer),
                       extend_existing=True)
         assert "foo" in users.c
@@ -1014,16 +1014,16 @@ class ConstraintTest(fixtures.TestBase):
     def _single_fixture(self):
         m = MetaData()
 
-        t1 = Table('t1', m, 
+        t1 = Table('t1', m,
             Column('a', Integer),
             Column('b', Integer)
         )
 
-        t2 = Table('t2', m, 
+        t2 = Table('t2', m,
             Column('a', Integer, ForeignKey('t1.a'))
         )
 
-        t3 = Table('t3', m, 
+        t3 = Table('t3', m,
             Column('a', Integer)
         )
         return t1, t2, t3
@@ -1090,7 +1090,7 @@ class ColumnDefinitionTest(AssertsCompiledSQL, fixtures.TestBase):
 
         c = Column(Integer)
         assert_raises_message(
-            exc.ArgumentError, 
+            exc.ArgumentError,
             "Column must be constructed with a non-blank name or assign a "
             "non-blank .name ",
             Table, 't', MetaData(), c)
@@ -1099,7 +1099,7 @@ class ColumnDefinitionTest(AssertsCompiledSQL, fixtures.TestBase):
 
         c = Column('', Integer)
         assert_raises_message(
-            exc.ArgumentError, 
+            exc.ArgumentError,
             "Column must be constructed with a non-blank name or assign a "
             "non-blank .name ",
             Table, 't', MetaData(), c)
@@ -1109,7 +1109,7 @@ class ColumnDefinitionTest(AssertsCompiledSQL, fixtures.TestBase):
         t = Table('t', MetaData(), c)
 
         assert_raises_message(
-            exc.ArgumentError, 
+            exc.ArgumentError,
             "Column object already assigned to Table 't'",
             Table, 'q', MetaData(), c)
 
@@ -1335,7 +1335,7 @@ class CatchAllEventsTest(fixtures.TestBase):
         event.listen(schema.SchemaItem, "after_parent_attach", after_attach)
 
         m = MetaData()
-        t1 = Table('t1', m, 
+        t1 = Table('t1', m,
             Column('id', Integer, Sequence('foo_id'), primary_key=True),
             Column('bar', String, ForeignKey('t2.id'))
         )
@@ -1375,7 +1375,7 @@ class CatchAllEventsTest(fixtures.TestBase):
             evt(target)
 
         m = MetaData()
-        t1 = Table('t1', m, 
+        t1 = Table('t1', m,
             Column('id', Integer, Sequence('foo_id'), primary_key=True),
             Column('bar', String, ForeignKey('t2.id')),
             Column('bat', Integer, unique=True),
@@ -1390,10 +1390,10 @@ class CatchAllEventsTest(fixtures.TestBase):
         eq_(
             canary,
             [
-            'PrimaryKeyConstraint->Table', 'PrimaryKeyConstraint->t1', 
+            'PrimaryKeyConstraint->Table', 'PrimaryKeyConstraint->t1',
             'ForeignKeyConstraint->Table', 'ForeignKeyConstraint->t1',
             'UniqueConstraint->Table', 'UniqueConstraint->t1',
-            'PrimaryKeyConstraint->Table', 'PrimaryKeyConstraint->t2', 
+            'PrimaryKeyConstraint->Table', 'PrimaryKeyConstraint->t2',
             'CheckConstraint->Table', 'CheckConstraint->t2',
             'UniqueConstraint->Table', 'UniqueConstraint->t2'
             ]
index 8a439c15cab1bd15d2ec0693aec7d68b52e4da76..0aefd40078e430df928465a780ac3b866dd76721 100644 (file)
@@ -49,7 +49,7 @@ class QueryTest(fixtures.TestBase):
     def test_insert_heterogeneous_params(self):
         """test that executemany parameters are asserted to match the parameter set of the first."""
 
-        assert_raises_message(exc.StatementError, 
+        assert_raises_message(exc.StatementError,
             r"A value is required for bind parameter 'user_name', in "
             "parameter group 2 \(original cause: (sqlalchemy.exc.)?InvalidRequestError: A "
             "value is required for bind parameter 'user_name', in "
@@ -60,7 +60,7 @@ class QueryTest(fixtures.TestBase):
             {'user_id':9}
         )
 
-        # this succeeds however.   We aren't yet doing 
+        # this succeeds however.   We aren't yet doing
         # a length check on all subsequent parameters.
         users.insert().execute(
             {'user_id':7},
@@ -99,7 +99,7 @@ class QueryTest(fixtures.TestBase):
                 ret[col.key] = id
 
             if result.lastrow_has_defaults():
-                criterion = and_(*[col==id for col, id in 
+                criterion = and_(*[col==id for col, id in
                                     zip(table.primary_key, result.inserted_primary_key)])
                 row = engine.execute(table.select(criterion)).first()
                 for c in table.c:
@@ -217,7 +217,7 @@ class QueryTest(fixtures.TestBase):
 
         for engine in test_engines:
 
-            r = engine.execute(users.insert(), 
+            r = engine.execute(users.insert(),
                 {'user_name':'jack'},
             )
             assert r.closed
@@ -312,7 +312,7 @@ class QueryTest(fixtures.TestBase):
         content = Table('content', self.metadata,
             Column('type', String(30)),
         )
-        bar = Table('bar', self.metadata, 
+        bar = Table('bar', self.metadata,
             Column('content_type', String(30))
         )
         self.metadata.create_all(testing.db)
@@ -348,7 +348,7 @@ class QueryTest(fixtures.TestBase):
                     result = util.pickle.loads(util.pickle.dumps(result))
 
                 eq_(
-                    result, 
+                    result,
                     [(7, "jack"), (8, "ed"), (9, "fred")]
                 )
                 if use_labels:
@@ -365,7 +365,7 @@ class QueryTest(fixtures.TestBase):
                 if not pickle or use_labels:
                     assert_raises(exc.NoSuchColumnError, lambda: result[0][addresses.c.user_id])
                 else:
-                    # test with a different table.  name resolution is 
+                    # test with a different table.  name resolution is
                     # causing 'user_id' to match when use_labels wasn't used.
                     eq_(result[0][addresses.c.user_id], 7)
 
@@ -387,7 +387,7 @@ class QueryTest(fixtures.TestBase):
             (unprintable(), "unprintable element.*"),
         ]:
             assert_raises_message(
-                exc.NoSuchColumnError, 
+                exc.NoSuchColumnError,
                 msg % repl,
                 lambda: row[accessor]
             )
@@ -740,7 +740,7 @@ class QueryTest(fixtures.TestBase):
             dict(user_id=1, user_name='john'),
         )
 
-        # test a little sqlite weirdness - with the UNION, 
+        # test a little sqlite weirdness - with the UNION,
         # cols come back as "query_users.user_id" in cursor.description
         r = text("select query_users.user_id, query_users.user_name from query_users "
             "UNION select query_users.user_id, query_users.user_name from query_users",
@@ -785,7 +785,7 @@ class QueryTest(fixtures.TestBase):
         )
         # test using literal tablename.colname
         r = text('select query_users.user_id AS "query_users.user_id", '
-                'query_users.user_name AS "query_users.user_name" from query_users', 
+                'query_users.user_name AS "query_users.user_name" from query_users',
                 bind=testing.db).execution_options(sqlite_raw_colnames=True).execute().first()
         eq_(r['query_users.user_id'], 1)
         eq_(r['query_users.user_name'], "john")
@@ -1054,9 +1054,9 @@ class QueryTest(fixtures.TestBase):
         )
         shadowed.create(checkfirst=True)
         try:
-            shadowed.insert().execute(shadow_id=1, shadow_name='The Shadow', parent='The Light', 
-                                            row='Without light there is no shadow', 
-                                            _parent='Hidden parent', 
+            shadowed.insert().execute(shadow_id=1, shadow_name='The Shadow', parent='The Light',
+                                            row='Without light there is no shadow',
+                                            _parent='Hidden parent',
                                             _row='Hidden row')
             r = shadowed.select(shadowed.c.shadow_id==1).execute().first()
             self.assert_(r.shadow_id == r['shadow_id'] == r[shadowed.c.shadow_id] == 1)
@@ -1104,7 +1104,7 @@ class QueryTest(fixtures.TestBase):
     @testing.fails_on('firebird', "uses sql-92 rules")
     @testing.fails_on('sybase', "uses sql-92 rules")
     @testing.fails_on('mssql+mxodbc', "uses sql-92 rules")
-    @testing.fails_if(lambda: 
+    @testing.fails_if(lambda:
                          testing.against('mssql+pyodbc') and not testing.db.dialect.freetds,
                          "uses sql-92 rules")
     def test_bind_in(self):
@@ -1144,7 +1144,7 @@ class QueryTest(fixtures.TestBase):
     @testing.emits_warning('.*empty sequence.*')
     @testing.requires.boolean_col_expressions
     def test_in_filtering_advanced(self):
-        """test the behavior of the in_() function when 
+        """test the behavior of the in_() function when
         comparing against an empty collection, specifically
         that a proper boolean value is generated.
 
@@ -1167,7 +1167,7 @@ class QueryTest(fixtures.TestBase):
 class PercentSchemaNamesTest(fixtures.TestBase):
     """tests using percent signs, spaces in table and column names.
 
-    Doesn't pass for mysql, postgresql, but this is really a 
+    Doesn't pass for mysql, postgresql, but this is really a
     SQLAlchemy bug - we should be escaping out %% signs for this
     operation the same way we do for text() and column labels.
 
@@ -1181,7 +1181,7 @@ class PercentSchemaNamesTest(fixtures.TestBase):
             Column("percent%", Integer),
             Column("spaces % more spaces", Integer),
         )
-        lightweight_percent_table = sql.table('percent%table', 
+        lightweight_percent_table = sql.table('percent%table',
             sql.column("percent%"),
             sql.column("spaces % more spaces"),
         )
@@ -1194,7 +1194,7 @@ class PercentSchemaNamesTest(fixtures.TestBase):
     def teardown_class(cls):
         metadata.drop_all()
 
-    @testing.skip_if(lambda: testing.against('postgresql'), 
+    @testing.skip_if(lambda: testing.against('postgresql'),
                     "psycopg2 2.4 no longer accepts % in bind placeholders")
     def test_single_roundtrip(self):
         percent_table.insert().execute(
@@ -1211,7 +1211,7 @@ class PercentSchemaNamesTest(fixtures.TestBase):
         )
         self._assert_table()
 
-    @testing.skip_if(lambda: testing.against('postgresql'), 
+    @testing.skip_if(lambda: testing.against('postgresql'),
                 "psycopg2 2.4 no longer accepts % in bind placeholders")
     @testing.crashes('mysql+mysqldb', "MySQLdb handles executemany() "
                         "inconsistently vs. execute()")
@@ -1228,9 +1228,9 @@ class PercentSchemaNamesTest(fixtures.TestBase):
 
     def _assert_table(self):
         for table in (
-                    percent_table, 
-                    percent_table.alias(), 
-                    lightweight_percent_table, 
+                    percent_table,
+                    percent_table.alias(),
+                    lightweight_percent_table,
                     lightweight_percent_table.alias()):
             eq_(
                 list(
@@ -1314,7 +1314,7 @@ class KeyTargetingTest(fixtures.TablesTest):
         content = Table('content', metadata,
             Column('t', String(30), key="type"),
         )
-        bar = Table('bar', metadata, 
+        bar = Table('bar', metadata,
             Column('ctype', String(30), key="content_type")
         )
 
@@ -1611,7 +1611,7 @@ class CompoundTest(fixtures.TestBase):
         eq_(found2, wanted)
 
     def test_union_all_lightweight(self):
-        """like test_union_all, but breaks the sub-union into 
+        """like test_union_all, but breaks the sub-union into
         a subquery with an explicit column reference on the outside,
         more palatable to a wider variety of engines.
 
index 952b14763868e75742f92b49af20d67712913114..a714002b1a9d7eabbead5c66f9031be2fc72f780 100644 (file)
@@ -94,7 +94,7 @@ class QuoteTest(fixtures.TestBase, AssertsCompiledSQL):
         self.assert_compile(t1.select().apply_labels(), '''SELECT "foo"."t1"."col1" AS "foo_t1_col1" FROM "foo"."t1"''')
         a = t1.select().alias('anon')
         b = select([1], a.c.col1==2, from_obj=a)
-        self.assert_compile(b, 
+        self.assert_compile(b,
             '''SELECT 1 FROM (SELECT "foo"."t1"."col1" AS "col1" FROM '''\
             '''"foo"."t1") AS anon WHERE anon."col1" = :col1_1'''
         )
@@ -104,15 +104,15 @@ class QuoteTest(fixtures.TestBase, AssertsCompiledSQL):
             Column('ColumnOne', Integer, quote=False), quote=False, schema="FooBar", quote_schema=False)
         self.assert_compile(t1.select(), "SELECT FooBar.TableOne.ColumnOne FROM FooBar.TableOne")
 
-        self.assert_compile(t1.select().apply_labels(), 
+        self.assert_compile(t1.select().apply_labels(),
             "SELECT FooBar.TableOne.ColumnOne AS "\
-            "FooBar_TableOne_ColumnOne FROM FooBar.TableOne"   # TODO: is this what we really want here ?  what if table/schema 
+            "FooBar_TableOne_ColumnOne FROM FooBar.TableOne"   # TODO: is this what we really want here ?  what if table/schema
                                                                # *are* quoted?
         )
 
         a = t1.select().alias('anon')
         b = select([1], a.c.ColumnOne==2, from_obj=a)
-        self.assert_compile(b, 
+        self.assert_compile(b,
             "SELECT 1 FROM (SELECT FooBar.TableOne.ColumnOne AS "\
             "ColumnOne FROM FooBar.TableOne) AS anon WHERE anon.ColumnOne = :ColumnOne_1"
         )
@@ -142,8 +142,8 @@ class QuoteTest(fixtures.TestBase, AssertsCompiledSQL):
         if labels arent quoted, a query in postgresql in particular will fail since it produces:
 
         SELECT LaLa.lowercase, LaLa."UPPERCASE", LaLa."MixedCase", LaLa."ASC"
-        FROM (SELECT DISTINCT "WorstCase1".lowercase AS lowercase, 
-                "WorstCase1"."UPPERCASE" AS UPPERCASE, 
+        FROM (SELECT DISTINCT "WorstCase1".lowercase AS lowercase,
+                "WorstCase1"."UPPERCASE" AS UPPERCASE,
                 "WorstCase1"."MixedCase" AS MixedCase, "WorstCase1"."ASC" AS ASC \nFROM "WorstCase1") AS LaLa
 
         where the "UPPERCASE" column of "LaLa" doesnt exist.
@@ -179,7 +179,7 @@ class QuoteTest(fixtures.TestBase, AssertsCompiledSQL):
             Column("order", Integer))
         x = select([table.c.col1, table.c['from'], table.c.louisville, table.c.order])
 
-        self.assert_compile(x, 
+        self.assert_compile(x,
             '''SELECT "ImATable".col1, "ImATable"."from", "ImATable".louisville, "ImATable"."order" FROM "ImATable"''')
 
 
index 5d95baa81c4b91fb23931407228924772fa3e76b..c14fa22a1322f6c361f9c277dc7940fe42e11963 100644 (file)
@@ -13,8 +13,8 @@ class FoundRowsTest(fixtures.TestBase, AssertsExecutionResults):
         metadata = MetaData(testing.db)
 
         employees_table = Table('employees', metadata,
-            Column('employee_id', Integer, 
-                        Sequence('employee_id_seq', optional=True), 
+            Column('employee_id', Integer,
+                        Sequence('employee_id_seq', optional=True),
                         primary_key=True),
             Column('name', String(50)),
             Column('department', String(1)),
index 659c7f2ae3f6578e6a2fd44429407ad777ccc5b3..91bf17175f0219fc656c8fce9ce84ce570dae0e8 100644 (file)
@@ -26,7 +26,7 @@ class AdaptTest(fixtures.TestBase):
         ]
 
     def _all_dialects(self):
-        return [d.base.dialect() for d in 
+        return [d.base.dialect() for d in
                 self._all_dialect_modules()]
 
     def _types_for_mod(self, mod):
@@ -327,11 +327,11 @@ class UserDefinedTest(fixtures.TablesTest, AssertsCompiledSQL):
         self.assert_compile(t, "VARCHAR(50)", dialect=sl)
         self.assert_compile(t, "FLOAT", dialect=pg)
         eq_(
-            t.dialect_impl(dialect=sl).impl.__class__, 
+            t.dialect_impl(dialect=sl).impl.__class__,
             String().dialect_impl(dialect=sl).__class__
         )
         eq_(
-                t.dialect_impl(dialect=pg).impl.__class__, 
+                t.dialect_impl(dialect=pg).impl.__class__,
                 Float().dialect_impl(pg).__class__
         )
 
@@ -695,8 +695,8 @@ class UnicodeTest(fixtures.TestBase, AssertsExecutionResults):
                 ('oracle','cx_oracle'),
             )), \
             "name: %s driver %s returns_unicode_strings=%s" % \
-                                        (testing.db.name, 
-                                         testing.db.driver, 
+                                        (testing.db.name,
+                                         testing.db.driver,
                                          testing.db.dialect.returns_unicode_strings)
 
     def test_round_trip(self):
@@ -787,7 +787,7 @@ class UnicodeTest(fixtures.TestBase, AssertsExecutionResults):
 
             eq_(uni(unicodedata), unicodedata.encode('utf-8'))
 
-        # using convert unicode at engine level - 
+        # using convert unicode at engine level -
         # this should not be raising a warning
         unicode_engine = engines.utf8_engine(options={'convert_unicode':True,})
         unicode_engine.dialect.supports_unicode_binds = False
@@ -836,7 +836,7 @@ class UnicodeTest(fixtures.TestBase, AssertsExecutionResults):
         engine = engines.testing_engine(options={'encoding':'ascii'})
         m.create_all(engine)
         try:
-            # insert a row that should be ascii and 
+            # insert a row that should be ascii and
             # coerce from unicode with ignore on the bind side
             engine.execute(
                 table.insert(),
@@ -871,7 +871,7 @@ class UnicodeTest(fixtures.TestBase, AssertsExecutionResults):
 
             # one row will be ascii with ignores,
             # the other will be either ascii with the ignores
-            # or just the straight unicode+ utf8 value if the 
+            # or just the straight unicode+ utf8 value if the
             # dialect just returns unicode
             result = engine.execute(table.select().order_by(table.c.sort))
             ascii_row = result.fetchone()
@@ -929,10 +929,10 @@ class EnumTest(fixtures.TestBase):
     def teardown_class(cls):
         metadata.drop_all()
 
-    @testing.fails_on('postgresql+zxjdbc', 
+    @testing.fails_on('postgresql+zxjdbc',
                         'zxjdbc fails on ENUM: column "XXX" is of type XXX '
                         'but expression is of type character varying')
-    @testing.fails_on('postgresql+pg8000', 
+    @testing.fails_on('postgresql+pg8000',
                         'zxjdbc fails on ENUM: column "XXX" is of type XXX '
                         'but expression is of type text')
     def test_round_trip(self):
@@ -943,7 +943,7 @@ class EnumTest(fixtures.TestBase):
         ])
 
         eq_(
-            enum_table.select().order_by(enum_table.c.id).execute().fetchall(), 
+            enum_table.select().order_by(enum_table.c.id).execute().fetchall(),
             [
                 (1, 'two'),
                 (2, 'two'),
@@ -960,7 +960,7 @@ class EnumTest(fixtures.TestBase):
 
         eq_(
             non_native_enum_table.select().
-                    order_by(non_native_enum_table.c.id).execute().fetchall(), 
+                    order_by(non_native_enum_table.c.id).execute().fetchall(),
             [
                 (1, 'two'),
                 (2, 'two'),
@@ -978,19 +978,19 @@ class EnumTest(fixtures.TestBase):
         eq_(e1.adapt(ENUM).name, 'foo')
         eq_(e1.adapt(ENUM).schema, 'bar')
 
-    @testing.crashes('mysql', 
+    @testing.crashes('mysql',
                     'Inconsistent behavior across various OS/drivers'
                 )
     def test_constraint(self):
-        assert_raises(exc.DBAPIError, 
+        assert_raises(exc.DBAPIError,
             enum_table.insert().execute,
             {'id':4, 'someenum':'four'}
         )
 
-    @testing.fails_on('mysql', 
+    @testing.fails_on('mysql',
                     "the CHECK constraint doesn't raise an exception for unknown reason")
     def test_non_native_constraint(self):
-        assert_raises(exc.DBAPIError, 
+        assert_raises(exc.DBAPIError,
             non_native_enum_table.insert().execute,
             {'id':4, 'someenum':'four'}
         )
@@ -1004,7 +1004,7 @@ class EnumTest(fixtures.TestBase):
             Column('x', Enum("x", "y", name="pge"))
         )
         t.create(e, checkfirst=False)
-        # basically looking for the start of 
+        # basically looking for the start of
         # the constraint, or the ENUM def itself,
         # depending on backend.
         assert "('x'," in e.print_sql()
@@ -1058,32 +1058,32 @@ class BinaryTest(fixtures.TestBase, AssertsExecutionResults):
         stream1 =self.load_stream('binary_data_one.dat')
         stream2 =self.load_stream('binary_data_two.dat')
         binary_table.insert().execute(
-                            primary_id=1, 
-                            misc='binary_data_one.dat', 
-                            data=stream1, 
-                            data_slice=stream1[0:100], 
-                            pickled=testobj1, 
+                            primary_id=1,
+                            misc='binary_data_one.dat',
+                            data=stream1,
+                            data_slice=stream1[0:100],
+                            pickled=testobj1,
                             mypickle=testobj3)
         binary_table.insert().execute(
-                            primary_id=2, 
-                            misc='binary_data_two.dat', 
-                            data=stream2, 
-                            data_slice=stream2[0:99], 
+                            primary_id=2,
+                            misc='binary_data_two.dat',
+                            data=stream2,
+                            data_slice=stream2[0:99],
                             pickled=testobj2)
         binary_table.insert().execute(
-                            primary_id=3, 
-                            misc='binary_data_two.dat', 
-                            data=None, 
-                            data_slice=stream2[0:99], 
+                            primary_id=3,
+                            misc='binary_data_two.dat',
+                            data=None,
+                            data_slice=stream2[0:99],
                             pickled=None)
 
         for stmt in (
             binary_table.select(order_by=binary_table.c.primary_id),
             text(
-                "select * from binary_table order by binary_table.primary_id", 
-                typemap={'pickled':PickleType, 
-                        'mypickle':MyPickleType, 
-                        'data':LargeBinary, 'data_slice':LargeBinary}, 
+                "select * from binary_table order by binary_table.primary_id",
+                typemap={'pickled':PickleType,
+                        'mypickle':MyPickleType,
+                        'data':LargeBinary, 'data_slice':LargeBinary},
                 bind=testing.db)
         ):
             l = stmt.execute().fetchall()
@@ -1154,9 +1154,9 @@ class ExpressionTest(fixtures.TestBase, AssertsExecutionResults, AssertsCompiled
         meta.create_all()
 
         test_table.insert().execute({
-                                'id':1, 
-                                'data':'somedata', 
-                                'atimestamp':datetime.date(2007, 10, 15), 
+                                'id':1,
+                                'data':'somedata',
+                                'atimestamp':datetime.date(2007, 10, 15),
                                 'avalue':25, 'bvalue':'foo'})
 
     @classmethod
@@ -1180,7 +1180,7 @@ class ExpressionTest(fixtures.TestBase, AssertsExecutionResults, AssertsCompiled
         eq_(
             testing.db.execute(
                     select([test_table.c.id, test_table.c.data, test_table.c.atimestamp])
-                    .where(expr), 
+                    .where(expr),
                     {"thedate":datetime.date(2007, 10, 15)}).fetchall(),
             [(1, 'somedata', datetime.date(2007, 10, 15))]
         )
@@ -1199,9 +1199,9 @@ class ExpressionTest(fixtures.TestBase, AssertsExecutionResults, AssertsCompiled
         eq_(expr.right.type._type_affinity, String)
 
         eq_(
-            testing.db.execute(test_table.select().where(expr), 
+            testing.db.execute(test_table.select().where(expr),
                 {"somevalue":"foo"}).fetchall(),
-            [(1, 'somedata', 
+            [(1, 'somedata',
                 datetime.date(2007, 10, 15), 25, 'BIND_INfooBIND_OUT')]
         )
 
@@ -1358,7 +1358,7 @@ class ExpressionTest(fixtures.TestBase, AssertsExecutionResults, AssertsCompiled
 
     def test_null_comparison(self):
         eq_(
-            str(column('a', types.NullType()) + column('b', types.NullType())), 
+            str(column('a', types.NullType()) + column('b', types.NullType())),
             "a + b"
         )
 
@@ -1577,7 +1577,7 @@ class NumericTest(fixtures.TestBase):
         self._do_test(
             Numeric(precision=8, scale=4),
             [15.7563, decimal.Decimal("15.7563"), None],
-            [decimal.Decimal("15.7563"), None], 
+            [decimal.Decimal("15.7563"), None],
         )
 
     def test_numeric_as_float(self):
@@ -1597,7 +1597,7 @@ class NumericTest(fixtures.TestBase):
         self._do_test(
             Float(precision=8, asdecimal=True),
             [15.7563, decimal.Decimal("15.7563"), None],
-            [decimal.Decimal("15.7563"), None], 
+            [decimal.Decimal("15.7563"), None],
             filter_ = lambda n:n is not None and round(n, 5) or None
         )
 
@@ -1613,8 +1613,8 @@ class NumericTest(fixtures.TestBase):
     def test_precision_decimal(self):
         numbers = set([
             decimal.Decimal("54.234246451650"),
-            decimal.Decimal("0.004354"), 
-            decimal.Decimal("900.0"), 
+            decimal.Decimal("0.004354"),
+            decimal.Decimal("900.0"),
         ])
 
         self._do_test(
@@ -1627,7 +1627,7 @@ class NumericTest(fixtures.TestBase):
     def test_enotation_decimal(self):
         """test exceedingly small decimals.
 
-        Decimal reports values with E notation when the exponent 
+        Decimal reports values with E notation when the exponent
         is greater than 6.
 
         """
@@ -1652,7 +1652,7 @@ class NumericTest(fixtures.TestBase):
             numbers
         )
 
-    @testing.fails_on("sybase+pyodbc", 
+    @testing.fails_on("sybase+pyodbc",
                         "Don't know how do get these values through FreeTDS + Sybase")
     @testing.fails_on("firebird", "Precision must be from 1 to 18")
     def test_enotation_decimal_large(self):
@@ -1692,7 +1692,7 @@ class NumericTest(fixtures.TestBase):
         "this may be a bug due to the difficulty in handling "
         "oracle precision numerics"
     )
-    @testing.fails_on('postgresql+pg8000', 
+    @testing.fails_on('postgresql+pg8000',
         "pg-8000 does native decimal but truncates the decimals.")
     def test_numeric_no_decimal(self):
         numbers = set([
@@ -1794,8 +1794,8 @@ class IntervalTest(fixtures.TestBase, AssertsExecutionResults):
         small_delta = datetime.timedelta(days=15, seconds=5874)
         delta = datetime.timedelta(414)
         interval_table.insert().execute(
-                                native_interval=small_delta, 
-                                native_interval_args=delta, 
+                                native_interval=small_delta,
+                                native_interval_args=delta,
                                 non_native_interval=delta
                                 )
         row = interval_table.select().execute().first()
@@ -1851,22 +1851,22 @@ class BooleanTest(fixtures.TestBase, AssertsExecutionResults):
         res3 = select([bool_table.c.id, bool_table.c.value]).\
                 order_by(bool_table.c.id).\
                 execute().fetchall()
-        eq_(res3, [(1, True), (2, False), 
-                    (3, True), (4, True), 
+        eq_(res3, [(1, True), (2, False),
+                    (3, True), (4, True),
                     (5, True), (6, None)])
 
         # ensure we're getting True/False, not just ints
         assert res3[0][1] is True
         assert res3[1][1] is False
 
-    @testing.fails_on('mysql', 
+    @testing.fails_on('mysql',
             "The CHECK clause is parsed but ignored by all storage engines.")
-    @testing.fails_on('mssql', 
+    @testing.fails_on('mssql',
             "FIXME: MS-SQL 2005 doesn't honor CHECK ?!?")
     @testing.skip_if(lambda: testing.db.dialect.supports_native_boolean)
     def test_constraint(self):
         assert_raises((exc.IntegrityError, exc.ProgrammingError),
-                        testing.db.execute, 
+                        testing.db.execute,
                         "insert into booltest (id, value) values(1, 5)")
 
     @testing.skip_if(lambda: testing.db.dialect.supports_native_boolean)
@@ -1885,7 +1885,7 @@ class PickleTest(fixtures.TestBase):
         ):
             assert p1.compare_values(p1.copy_value(obj), obj)
 
-        assert_raises(NotImplementedError, 
+        assert_raises(NotImplementedError,
                         p1.compare_values,
                         pickleable.BrokenComparable('foo'),
                         pickleable.BrokenComparable('foo'))
index 19f672f77e3eaf0e1677b454370b4e4607f1d203..e3fa0a4b3337791de5b1cdc2769d5084da9694dc 100644 (file)
@@ -130,7 +130,7 @@ class EscapesDefaultsTest(fixtures.TestBase):
             select([column(u'special_col')]).select_from(t1).execute().close()
             assert isinstance(engine.dialect.identifier_preparer.format_sequence(Sequence('special_col')), unicode)
 
-            # now execute, run the sequence.  it should run in u"Special_col.nextid" or similar as 
+            # now execute, run the sequence.  it should run in u"Special_col.nextid" or similar as
             # a unicode object; cx_oracle asserts that this is None or a String (postgresql lets it pass thru).
             # ensure that executioncontext._exec_default() is encoding.
             t1.insert().execute(data='foo')
index f900a164cf73cc6aebc2b2aa898a8124d7844f36..79079e5127977d4c16e25953d05ecee1c25c9dd5 100644 (file)
@@ -11,13 +11,13 @@ class _UpdateFromTestBase(object):
     @classmethod
     def define_tables(cls, metadata):
         Table('users', metadata,
-              Column('id', Integer, primary_key=True, 
+              Column('id', Integer, primary_key=True,
                             test_needs_autoincrement=True),
               Column('name', String(30), nullable=False),
         )
 
         Table('addresses', metadata,
-              Column('id', Integer, primary_key=True, 
+              Column('id', Integer, primary_key=True,
                             test_needs_autoincrement=True),
               Column('user_id', None, ForeignKey('users.id')),
               Column('name', String(30), nullable=False),
@@ -25,7 +25,7 @@ class _UpdateFromTestBase(object):
         )
 
         Table("dingalings", metadata,
-              Column('id', Integer, primary_key=True, 
+              Column('id', Integer, primary_key=True,
                             test_needs_autoincrement=True),
               Column('address_id', None, ForeignKey('addresses.id')),
               Column('data', String(30)),
@@ -93,7 +93,7 @@ class UpdateFromCompileTest(_UpdateFromTestBase, fixtures.TablesTest, AssertsCom
             "AND addresses.email_address = :email_address_1 "
             "AND addresses.id = dingalings.address_id AND "
             "dingalings.id = :id_1",
-            checkparams={u'email_address_1': 'e1', u'id_1': 2, 
+            checkparams={u'email_address_1': 'e1', u'id_1': 2,
                                 'name': 'newname'}
         )
 
@@ -113,8 +113,8 @@ class UpdateFromCompileTest(_UpdateFromTestBase, fixtures.TablesTest, AssertsCom
 
     def test_render_subquery(self):
         users, addresses = self.tables.users, self.tables.addresses
-        subq = select([addresses.c.id, 
-                        addresses.c.user_id, 
+        subq = select([addresses.c.id,
+                        addresses.c.user_id,
                         addresses.c.email_address]).\
                             where(addresses.c.id==7).alias()
         self.assert_compile(
@@ -128,7 +128,7 @@ class UpdateFromCompileTest(_UpdateFromTestBase, fixtures.TablesTest, AssertsCom
             "email_address FROM addresses WHERE addresses.id = "
             ":id_1) AS anon_1 WHERE users.id = anon_1.user_id "
             "AND anon_1.email_address = :email_address_1",
-            checkparams={u'email_address_1': 'e1', 
+            checkparams={u'email_address_1': 'e1',
                             u'id_1': 7, 'name': 'newname'}
         )
 
@@ -214,7 +214,7 @@ class UpdateFromRoundTripTest(_UpdateFromTestBase, fixtures.TablesTest):
         testing.db.execute(
             addresses.update().\
                 values({
-                        addresses.c.email_address:users.c.name, 
+                        addresses.c.email_address:users.c.name,
                         users.c.name:'ed2'
                 }).\
                 where(users.c.id==addresses.c.user_id).\
@@ -246,14 +246,14 @@ class UpdateFromMultiTableUpdateDefaultsTest(_UpdateFromTestBase, fixtures.Table
     @classmethod
     def define_tables(cls, metadata):
         Table('users', metadata,
-              Column('id', Integer, primary_key=True, 
+              Column('id', Integer, primary_key=True,
                             test_needs_autoincrement=True),
               Column('name', String(30), nullable=False),
               Column('some_update', String(30), onupdate="im the update")
         )
 
         Table('addresses', metadata,
-              Column('id', Integer, primary_key=True, 
+              Column('id', Integer, primary_key=True,
                             test_needs_autoincrement=True),
               Column('user_id', None, ForeignKey('users.id')),
               Column('email_address', String(50), nullable=False),
@@ -282,7 +282,7 @@ class UpdateFromMultiTableUpdateDefaultsTest(_UpdateFromTestBase, fixtures.Table
         ret = testing.db.execute(
             addresses.update().\
                 values({
-                        addresses.c.email_address:users.c.name, 
+                        addresses.c.email_address:users.c.name,
                         users.c.name:'ed2'
                 }).\
                 where(users.c.id==addresses.c.user_id).\
@@ -316,7 +316,7 @@ class UpdateFromMultiTableUpdateDefaultsTest(_UpdateFromTestBase, fixtures.Table
         ret = testing.db.execute(
             addresses.update().\
                 values({
-                        'email_address':users.c.name, 
+                        'email_address':users.c.name,
                 }).\
                 where(users.c.id==addresses.c.user_id).\
                 where(users.c.name=='ed')
@@ -333,7 +333,7 @@ class UpdateFromMultiTableUpdateDefaultsTest(_UpdateFromTestBase, fixtures.Table
                 (4, 9, "fred@fred.com")
             ]
         )
-        # users table not actually updated, 
+        # users table not actually updated,
         # so no onupdate
         eq_(
             testing.db.execute(