]> git.ipfire.org Git - thirdparty/sqlalchemy/sqlalchemy.git/commitdiff
upgrade to black 20.8b1
authorMike Bayer <mike_mp@zzzcomputing.com>
Mon, 28 Sep 2020 18:24:11 +0000 (14:24 -0400)
committerMike Bayer <mike_mp@zzzcomputing.com>
Mon, 28 Sep 2020 18:24:11 +0000 (14:24 -0400)
It's better, the majority of these changes look more readable to me.
also found some docstrings that had formatting / quoting issues.

Cross-implemented with master

Change-Id: I582a45fde3a5648b2f36bab96bad56881321899b

105 files changed:
.github/workflows/scripts/can_install.py
.pre-commit-config.yaml
examples/dogpile_caching/caching_query.py
examples/versioned_rows/versioned_rows_w_versionid.py
examples/vertical/dictlike-polymorphic.py
lib/sqlalchemy/__init__.py
lib/sqlalchemy/connectors/mxodbc.py
lib/sqlalchemy/dialects/mssql/base.py
lib/sqlalchemy/dialects/mssql/mxodbc.py
lib/sqlalchemy/dialects/mysql/cymysql.py
lib/sqlalchemy/dialects/mysql/types.py
lib/sqlalchemy/dialects/oracle/cx_oracle.py
lib/sqlalchemy/dialects/postgresql/base.py
lib/sqlalchemy/dialects/postgresql/hstore.py
lib/sqlalchemy/dialects/postgresql/json.py
lib/sqlalchemy/dialects/postgresql/ranges.py
lib/sqlalchemy/engine/base.py
lib/sqlalchemy/engine/interfaces.py
lib/sqlalchemy/engine/reflection.py
lib/sqlalchemy/event/attr.py
lib/sqlalchemy/event/base.py
lib/sqlalchemy/event/registry.py
lib/sqlalchemy/exc.py
lib/sqlalchemy/ext/associationproxy.py
lib/sqlalchemy/ext/baked.py
lib/sqlalchemy/ext/hybrid.py
lib/sqlalchemy/ext/orderinglist.py
lib/sqlalchemy/inspection.py
lib/sqlalchemy/orm/attributes.py
lib/sqlalchemy/orm/deprecated_interfaces.py
lib/sqlalchemy/orm/descriptor_props.py
lib/sqlalchemy/orm/loading.py
lib/sqlalchemy/orm/mapper.py
lib/sqlalchemy/orm/query.py
lib/sqlalchemy/orm/relationships.py
lib/sqlalchemy/orm/session.py
lib/sqlalchemy/orm/state.py
lib/sqlalchemy/schema.py
lib/sqlalchemy/sql/base.py
lib/sqlalchemy/sql/compiler.py
lib/sqlalchemy/sql/ddl.py
lib/sqlalchemy/sql/dml.py
lib/sqlalchemy/sql/elements.py
lib/sqlalchemy/sql/schema.py
lib/sqlalchemy/sql/selectable.py
lib/sqlalchemy/sql/sqltypes.py
lib/sqlalchemy/sql/type_api.py
lib/sqlalchemy/testing/assertions.py
lib/sqlalchemy/testing/provision.py
lib/sqlalchemy/testing/requirements.py
lib/sqlalchemy/testing/suite/test_reflection.py
lib/sqlalchemy/testing/suite/test_select.py
lib/sqlalchemy/testing/suite/test_types.py
lib/sqlalchemy/util/compat.py
lib/sqlalchemy/util/langhelpers.py
setup.cfg
test/aaa_profiling/test_zoomark_orm.py
test/base/test_warnings.py
test/dialect/mssql/test_engine.py
test/dialect/mssql/test_query.py
test/dialect/mssql/test_reflection.py
test/dialect/mssql/test_types.py
test/dialect/oracle/test_dialect.py
test/dialect/oracle/test_reflection.py
test/dialect/postgresql/test_compiler.py
test/dialect/postgresql/test_reflection.py
test/dialect/postgresql/test_types.py
test/dialect/test_sqlite.py
test/engine/test_deprecations.py
test/engine/test_reflection.py
test/engine/test_transaction.py
test/ext/declarative/test_inheritance.py
test/ext/test_associationproxy.py
test/ext/test_horizontal_shard.py
test/orm/_fixtures.py
test/orm/inheritance/test_basic.py
test/orm/inheritance/test_polymorphic_rel.py
test/orm/inheritance/test_productspec.py
test/orm/inheritance/test_relationship.py
test/orm/test_attributes.py
test/orm/test_cascade.py
test/orm/test_cycles.py
test/orm/test_defaults.py
test/orm/test_deferred.py
test/orm/test_deprecations.py
test/orm/test_eager_relations.py
test/orm/test_events.py
test/orm/test_expire.py
test/orm/test_joins.py
test/orm/test_merge.py
test/orm/test_naturalpks.py
test/orm/test_of_type.py
test/orm/test_query.py
test/orm/test_relationships.py
test/orm/test_selectin_relations.py
test/orm/test_subquery_relations.py
test/orm/test_unitofwork.py
test/requirements.py
test/sql/test_compiler.py
test/sql/test_cte.py
test/sql/test_defaults.py
test/sql/test_metadata.py
test/sql/test_operators.py
test/sql/test_resultset.py
test/sql/test_sequences.py

index 61685d9789a4c84808b5d37fd2c070f61920d59f..ecb24b5623f65476858191bde88b3b3d22ae0ee4 100644 (file)
@@ -1,4 +1,5 @@
 import sys
+
 from packaging import tags
 
 to_check = "--"
index de29c68c6b1248b324ccae3ab46585b7878fcba2..91111eb0ba2745f0bb6eb7503ebeae3ab143daf9 100644 (file)
@@ -2,7 +2,7 @@
 # See https://pre-commit.com/hooks.html for more hooks
 repos:
 -   repo: https://github.com/python/black
-    rev: 19.10b0
+    rev: 20.8b1
     hooks:
     -   id: black
 
index 3d528b880de2aaad5a5ddbee687468a9c85fc85d..20e04ebed340db6322753ce8deddac64f600914a 100644 (file)
@@ -47,15 +47,15 @@ class CachingQuery(Query):
 
     def __iter__(self):
         """override __iter__ to pull results from dogpile
-           if particular attributes have been configured.
+        if particular attributes have been configured.
 
-           Note that this approach does *not* detach the loaded objects from
-           the current session. If the cache backend is an in-process cache
-           (like "memory") and lives beyond the scope of the current session's
-           transaction, those objects may be expired. The method here can be
-           modified to first expunge() each loaded item from the current
-           session before returning the list of items, so that the items
-           in the cache are not the same ones in the current Session.
+        Note that this approach does *not* detach the loaded objects from
+        the current session. If the cache backend is an in-process cache
+        (like "memory") and lives beyond the scope of the current session's
+        transaction, those objects may be expired. The method here can be
+        modified to first expunge() each loaded item from the current
+        session before returning the list of items, so that the items
+        in the cache are not the same ones in the current Session.
 
         """
         super_ = super(CachingQuery, self)
@@ -67,15 +67,15 @@ class CachingQuery(Query):
 
     def _execute_and_instances(self, context):
         """override _execute_and_instances to pull results from dogpile
-            if the query is invoked directly from an external context.
+         if the query is invoked directly from an external context.
 
-           This method is necessary in order to maintain compatibility
-           with the "baked query" system now used by default in some
-           relationship loader scenarios.   Note also the
-           RelationshipCache._generate_cache_key method which enables
-           the baked query to be used within lazy loads.
+        This method is necessary in order to maintain compatibility
+        with the "baked query" system now used by default in some
+        relationship loader scenarios.   Note also the
+        RelationshipCache._generate_cache_key method which enables
+        the baked query to be used within lazy loads.
 
-           .. versionadded:: 1.2.7
+        .. versionadded:: 1.2.7
         """
         super_ = super(CachingQuery, self)
 
@@ -208,7 +208,7 @@ class FromCache(MapperOption):
 
 class RelationshipCache(MapperOption):
     """Specifies that a Query as called within a "lazy load"
-       should load results from a cache."""
+    should load results from a cache."""
 
     propagate_to_loaders = True
 
index 4861fb3669c4eab080e139bdda5efe88e25a1f64..c1531dc06c727f88b1df84fb4fe4d648796bdc4f 100644 (file)
@@ -101,14 +101,17 @@ session.commit()
 e1.data = "e2"
 session.commit()
 
-assert session.query(
-    Example.id,
-    Example.version_id,
-    Example.is_current_version,
-    Example.calc_is_current_version,
-    Example.data,
-).order_by(Example.id, Example.version_id).all() == (
-    [(1, 1, False, False, "e1"), (1, 2, True, True, "e2")]
+assert (
+    session.query(
+        Example.id,
+        Example.version_id,
+        Example.is_current_version,
+        Example.calc_is_current_version,
+        Example.data,
+    )
+    .order_by(Example.id, Example.version_id)
+    .all()
+    == ([(1, 1, False, False, "e1"), (1, 2, True, True, "e2")])
 )
 
 # example 2, versioning with a parent
@@ -159,12 +162,15 @@ session.commit()
 assert p1.child_id == 1
 assert p1.child.version_id == 2
 
-assert session.query(
-    Child.id,
-    Child.version_id,
-    Child.is_current_version,
-    Child.calc_is_current_version,
-    Child.data,
-).order_by(Child.id, Child.version_id).all() == (
-    [(1, 1, False, False, "c1"), (1, 2, True, True, "c2")]
+assert (
+    session.query(
+        Child.id,
+        Child.version_id,
+        Child.is_current_version,
+        Child.calc_is_current_version,
+        Child.data,
+    )
+    .order_by(Child.id, Child.version_id)
+    .all()
+    == ([(1, 1, False, False, "c1"), (1, 2, True, True, "c2")])
 )
index 73d12ee4f2592db307fd29bffbe9ce1f938f3cc6..3a79fade878bf0427dcea289608dea298d289f0f 100644 (file)
@@ -67,9 +67,8 @@ class PolymorphicVerticalProperty(object):
 
     @value.comparator
     class value(PropComparator):
-        """A comparator for .value, builds a polymorphic comparison via CASE.
-
-        """
+        """A comparator for .value, builds a polymorphic comparison via
+        CASE."""
 
         def __init__(self, cls):
             self.cls = cls
index ca351aee429704da0badd41d7e3c88bef97cd0dc..406911a3b16db6d1357307e8fb7dcd8f48652a3c 100644 (file)
@@ -124,7 +124,7 @@ from .engine import create_engine  # noqa nosort
 from .engine import engine_from_config  # noqa nosort
 
 
-__version__ = '1.3.20'
+__version__ = "1.3.20"
 
 
 def __go(lcls):
index 0a34cfe003964876f53cbf07f9a5dcbb2765c1aa..57c7e0b2a6c5dece50759ef7d92d4344867d0a36 100644 (file)
@@ -54,7 +54,7 @@ class MxODBCConnector(Connector):
 
     @classmethod
     def _load_mx_exceptions(cls):
-        """ Import mxODBC exception classes into the module namespace,
+        """Import mxODBC exception classes into the module namespace,
         as if they had been imported normally. This is done here
         to avoid requiring all SQLAlchemy users to install mxODBC.
         """
@@ -72,7 +72,7 @@ class MxODBCConnector(Connector):
         return connect
 
     def _error_handler(self):
-        """ Return a handler that adjusts mxODBC's raised Warnings to
+        """Return a handler that adjusts mxODBC's raised Warnings to
         emit Python standard warnings.
         """
         from mx.ODBC.Error import Warning as MxOdbcWarning
index 53a8fc8b5e28efe72b808b8699900985c18e1771..7fccea35b31cb58d9224a5c64882abc18d667311 100644 (file)
@@ -968,7 +968,7 @@ class TIME(sqltypes.TIME):
                     self.__zero_date, value.time()
                 )
             elif isinstance(value, datetime.time):
-                """ issue #5339
+                """issue #5339
                 per: https://github.com/mkleehammer/pyodbc/wiki/Tips-and-Tricks-by-Database-Platform#time-columns
                 pass TIME value as string
                 """  # noqa
@@ -1195,9 +1195,7 @@ class SQL_VARIANT(sqltypes.TypeEngine):
 
 
 class TryCast(sql.elements.Cast):
-    """Represent a SQL Server TRY_CAST expression.
-
-    """
+    """Represent a SQL Server TRY_CAST expression."""
 
     __visit_name__ = "try_cast"
 
index 5df1351d2bce20914b3739026e6a70f21bcc314c..011086db15684a8307360af845030f226f34f5fc 100644 (file)
@@ -55,8 +55,7 @@ from ...connectors.mxodbc import MxODBCConnector
 
 
 class _MSNumeric_mxodbc(_MSNumeric_pyodbc):
-    """Include pyodbc's numeric processor.
-    """
+    """Include pyodbc's numeric processor."""
 
 
 class _MSDate_mxodbc(_MSDate):
index 2b45f5ddba27f0830a44533f9c2b22f5efe87bc0..d51c03a052c880bee1f64e28642adb8e31d9a29d 100644 (file)
@@ -28,8 +28,8 @@ from ... import util
 
 class _cymysqlBIT(BIT):
     def result_processor(self, dialect, coltype):
-        """Convert a MySQL's 64 bit, variable length binary string to a long.
-        """
+        """Convert a MySQL's 64 bit, variable length binary string to a
+        long."""
 
         def process(value):
             if value is not None:
index 3b455cfb1fa373d676721185ea890eb1304d319c..59497500015401da341c41c476eb28bbc0293d2b 100644 (file)
@@ -440,9 +440,7 @@ class TIME(sqltypes.TIME):
 
 
 class TIMESTAMP(sqltypes.TIMESTAMP):
-    """MySQL TIMESTAMP type.
-
-    """
+    """MySQL TIMESTAMP type."""
 
     __visit_name__ = "TIMESTAMP"
 
@@ -467,9 +465,7 @@ class TIMESTAMP(sqltypes.TIMESTAMP):
 
 
 class DATETIME(sqltypes.DATETIME):
-    """MySQL DATETIME type.
-
-    """
+    """MySQL DATETIME type."""
 
     __visit_name__ = "DATETIME"
 
index ece6401f32787d89706be67d4611f88734a5ef93..18150bd334ceaf28e1f97a7aa5ac4b79ae97e1d6 100644 (file)
@@ -1001,7 +1001,11 @@ class OracleDialect_cx_oracle(OracleDialect):
             # allow all strings to come back natively as Unicode
             elif (
                 dialect.coerce_to_unicode
-                and default_type in (cx_Oracle.STRING, cx_Oracle.FIXED_CHAR,)
+                and default_type
+                in (
+                    cx_Oracle.STRING,
+                    cx_Oracle.FIXED_CHAR,
+                )
                 and default_type is not cx_Oracle.CLOB
                 and default_type is not cx_Oracle.NCLOB
             ):
@@ -1049,7 +1053,9 @@ class OracleDialect_cx_oracle(OracleDialect):
                 cx_Oracle.BLOB,
             ):
                 return cursor.var(
-                    cx_Oracle.LONG_BINARY, size, cursor.arraysize,
+                    cx_Oracle.LONG_BINARY,
+                    size,
+                    cursor.arraysize,
                 )
 
         return output_type_handler
index 2c68a5969b867fd4a804ac335dfc2a2b06dfc2e9..1295f76597b088703f67d9bb457c0e8cbcb815b6 100644 (file)
@@ -1311,7 +1311,7 @@ class UUID(sqltypes.TypeEngine):
          as Python uuid objects, converting to/from string via the
          DBAPI.
 
-         """
+        """
         if as_uuid and _python_UUID is None:
             raise NotImplementedError(
                 "This version of Python does not support "
@@ -2609,10 +2609,14 @@ class PGDialect(default.DefaultDialect):
 
     def initialize(self, connection):
         super(PGDialect, self).initialize(connection)
-        self.implicit_returning = self.server_version_info > (
-            8,
-            2,
-        ) and self.__dict__.get("implicit_returning", True)
+        self.implicit_returning = (
+            self.server_version_info
+            > (
+                8,
+                2,
+            )
+            and self.__dict__.get("implicit_returning", True)
+        )
         self.supports_native_enum = self.server_version_info >= (8, 3)
         if not self.supports_native_enum:
             self.colspecs = self.colspecs.copy()
index 4e048feb05ec9ca88cd014f8dae0981db8aaccda..2ef700771c2896cc71ec661f229686054b7775bd 100644 (file)
@@ -160,13 +160,11 @@ class HSTORE(sqltypes.Indexable, sqltypes.Concatenable, sqltypes.TypeEngine):
             return self.operate(HAS_KEY, other, result_type=sqltypes.Boolean)
 
         def has_all(self, other):
-            """Boolean expression.  Test for presence of all keys in jsonb
-            """
+            """Boolean expression.  Test for presence of all keys in jsonb"""
             return self.operate(HAS_ALL, other, result_type=sqltypes.Boolean)
 
         def has_any(self, other):
-            """Boolean expression.  Test for presence of any key in jsonb
-            """
+            """Boolean expression.  Test for presence of any key in jsonb"""
             return self.operate(HAS_ANY, other, result_type=sqltypes.Boolean)
 
         def contains(self, other, **kwargs):
index 255f1af21dd2f86d5a2ee69fa7a4bfa848b587f1..47e924eb972b4f091f634281c64dab0a6ce5a0da 100644 (file)
@@ -195,7 +195,7 @@ class JSON(sqltypes.JSON):
 
          .. versionadded:: 1.1
 
-         """
+        """
         super(JSON, self).__init__(none_as_null=none_as_null)
         if astext_type is not None:
             self.astext_type = astext_type
@@ -291,13 +291,11 @@ class JSONB(JSON):
             return self.operate(HAS_KEY, other, result_type=sqltypes.Boolean)
 
         def has_all(self, other):
-            """Boolean expression.  Test for presence of all keys in jsonb
-            """
+            """Boolean expression.  Test for presence of all keys in jsonb"""
             return self.operate(HAS_ALL, other, result_type=sqltypes.Boolean)
 
         def has_any(self, other):
-            """Boolean expression.  Test for presence of any key in jsonb
-            """
+            """Boolean expression.  Test for presence of any key in jsonb"""
             return self.operate(HAS_ANY, other, result_type=sqltypes.Boolean)
 
         def contains(self, other, **kwargs):
index a31d958ed93b9dc7baa82947ef3f3ac25a929bf9..ddc12c096d3dd61090493717837161417e6c26de 100644 (file)
@@ -100,48 +100,36 @@ class RangeOperators(object):
 
 
 class INT4RANGE(RangeOperators, sqltypes.TypeEngine):
-    """Represent the PostgreSQL INT4RANGE type.
-
-    """
+    """Represent the PostgreSQL INT4RANGE type."""
 
     __visit_name__ = "INT4RANGE"
 
 
 class INT8RANGE(RangeOperators, sqltypes.TypeEngine):
-    """Represent the PostgreSQL INT8RANGE type.
-
-    """
+    """Represent the PostgreSQL INT8RANGE type."""
 
     __visit_name__ = "INT8RANGE"
 
 
 class NUMRANGE(RangeOperators, sqltypes.TypeEngine):
-    """Represent the PostgreSQL NUMRANGE type.
-
-    """
+    """Represent the PostgreSQL NUMRANGE type."""
 
     __visit_name__ = "NUMRANGE"
 
 
 class DATERANGE(RangeOperators, sqltypes.TypeEngine):
-    """Represent the PostgreSQL DATERANGE type.
-
-    """
+    """Represent the PostgreSQL DATERANGE type."""
 
     __visit_name__ = "DATERANGE"
 
 
 class TSRANGE(RangeOperators, sqltypes.TypeEngine):
-    """Represent the PostgreSQL TSRANGE type.
-
-    """
+    """Represent the PostgreSQL TSRANGE type."""
 
     __visit_name__ = "TSRANGE"
 
 
 class TSTZRANGE(RangeOperators, sqltypes.TypeEngine):
-    """Represent the PostgreSQL TSTZRANGE type.
-
-    """
+    """Represent the PostgreSQL TSTZRANGE type."""
 
     __visit_name__ = "TSTZRANGE"
index 6c9eb41ccdc3ae088b1a8619146f9593c0cb4185..5f46021984353d4c6fc491243625811bd604b95a 100644 (file)
@@ -344,7 +344,7 @@ class Connection(Connectable):
         return c
 
     def get_execution_options(self):
-        """ Get the non-SQL options which will take effect during execution.
+        """Get the non-SQL options which will take effect during execution.
 
         .. versionadded:: 1.3
 
@@ -1746,9 +1746,7 @@ class Transaction(object):
         self.connection._discard_transaction(self)
 
     def rollback(self):
-        """Roll back this :class:`.Transaction`.
-
-        """
+        """Roll back this :class:`.Transaction`."""
         if self._parent.is_active:
             self._do_rollback()
             self.is_active = False
@@ -2020,7 +2018,7 @@ class Engine(Connectable, log.Identified):
         return OptionEngine(self, opt)
 
     def get_execution_options(self):
-        """ Get the non-SQL options which will take effect during execution.
+        """Get the non-SQL options which will take effect during execution.
 
         .. versionadded: 1.3
 
index b424ce90df3c0576b21bd7579f27606e887f8914..e57ec6847186b7a795b0947e0f6f6529f556ca4e 100644 (file)
@@ -532,7 +532,7 @@ class Dialect(object):
         :param dbapi_connection: a DBAPI connection, typically
          proxied within a :class:`.ConnectionFairy`.
 
-         """
+        """
 
         raise NotImplementedError()
 
@@ -543,7 +543,7 @@ class Dialect(object):
         :param dbapi_connection: a DBAPI connection, typically
          proxied within a :class:`.ConnectionFairy`.
 
-         """
+        """
 
         raise NotImplementedError()
 
@@ -1294,8 +1294,7 @@ class Connectable(object):
         ":meth:`_schema.MetaData.create_all`.",
     )
     def create(self, entity, **kwargs):
-        """Emit CREATE statements for the given schema entity.
-        """
+        """Emit CREATE statements for the given schema entity."""
 
         raise NotImplementedError()
 
@@ -1308,8 +1307,7 @@ class Connectable(object):
         ":meth:`_schema.MetaData.drop_all`.",
     )
     def drop(self, entity, **kwargs):
-        """Emit DROP statements for the given schema entity.
-        """
+        """Emit DROP statements for the given schema entity."""
 
         raise NotImplementedError()
 
index 93d5827d7c0ffdce54d63fa88d4db397a2c32b0a..283c6f31977f73566d6746a4617104ddac6b3f94 100644 (file)
@@ -150,8 +150,7 @@ class Inspector(object):
         return self.dialect.default_schema_name
 
     def get_schema_names(self):
-        """Return all schema names.
-        """
+        """Return all schema names."""
 
         if hasattr(self.dialect, "get_schema_names"):
             return self.dialect.get_schema_names(
index cefb640a180c16b273cfe56507643a4f83d19f3d..d9f31a8006ddfff072dfc14ae7665e56922517c9 100644 (file)
@@ -373,7 +373,7 @@ class _ListenerCollection(_CompoundListener):
 
     def _update(self, other, only_propagate=True):
         """Populate from the listeners in another :class:`_Dispatch`
-            object."""
+        object."""
 
         existing_listeners = self.listeners
         existing_listener_set = set(existing_listeners)
index a87c1fe4484962d77911b5278307284f79cd49a9..560717cd27330b42737be0d0e2b2ccfce003e40c 100644 (file)
@@ -142,7 +142,7 @@ class _Dispatch(object):
 
     def _update(self, other, only_propagate=True):
         """Populate from the listeners in another :class:`_Dispatch`
-            object."""
+        object."""
         for ls in other._event_descriptors:
             if isinstance(ls, _EmptyListener):
                 continue
index 19b9174b71c41e7621df3fac8460add8a1caee77..56b310276d4214dd2bc36ae6834f9ac8b5854d5a 100644 (file)
@@ -139,8 +139,7 @@ def _clear(owner, elements):
 
 
 class _EventKey(object):
-    """Represent :func:`.listen` arguments.
-    """
+    """Represent :func:`.listen` arguments."""
 
     __slots__ = (
         "target",
@@ -239,8 +238,7 @@ class _EventKey(object):
                 collection.remove(self.with_wrapper(listener_fn))
 
     def contains(self):
-        """Return True if this event key is registered to listen.
-        """
+        """Return True if this event key is registered to listen."""
         return self._key in _key_to_collection
 
     def base_listen(
index b86803097d30ab64835ae2fb504bdc49e70ef9f9..a03fd8bb52a4477bbf39e987b8f4133e93fa384b 100644 (file)
@@ -35,7 +35,11 @@ class SQLAlchemyError(Exception):
         else:
             return (
                 "(Background on this error at: "
-                "http://sqlalche.me/e/%s/%s)" % (_version_token, self.code,)
+                "http://sqlalche.me/e/%s/%s)"
+                % (
+                    _version_token,
+                    self.code,
+                )
             )
 
     def _message(self, as_unicode=compat.py3k):
index 5151ddb2a37534c99492bf23e8730fdadb46f203..2f8d8627c31abc4b825596fcdbd8b20135dfc481 100644 (file)
@@ -840,8 +840,7 @@ class AmbiguousAssociationProxyInstance(AssociationProxyInstance):
 
 
 class ObjectAssociationProxyInstance(AssociationProxyInstance):
-    """an :class:`.AssociationProxyInstance` that has an object as a target.
-    """
+    """an :class:`.AssociationProxyInstance` that has an object as a target."""
 
     _target_is_object = True
     _is_canonical = True
index 229ad1506fd0dba4d5e864ddbc8ed0cd6662bf03..d510f6451e5d0fb475ae122a6160729c35e0b975 100644 (file)
@@ -171,8 +171,7 @@ class BakedQuery(object):
         return self._cache_key + (session._query_cls,)
 
     def _with_lazyload_options(self, options, effective_path, cache_path=None):
-        """Cloning version of _add_lazyload_options.
-        """
+        """Cloning version of _add_lazyload_options."""
         q = self._clone()
         q._add_lazyload_options(options, effective_path, cache_path=cache_path)
         return q
index 3361c95efe7964cb712b44a59dc214d883a86b78..c654313cf14221cdd61214f61d0a4b300baac5c9 100644 (file)
@@ -950,7 +950,7 @@ class hybrid_property(interfaces.InspectionAttrInfo):
 
             :ref:`hybrid_reuse_subclass`
 
-         """
+        """
         return self
 
     def getter(self, fget):
index 1f0e9d7e13198b11a88fd0b42fe64e8b6509b805..6882d647147034bdbaae8ceb3da0ab161c86cc20 100644 (file)
@@ -377,7 +377,7 @@ class OrderingList(list):
 
 
 def _reconstitute(cls, dict_, items):
-    """ Reconstitute an :class:`.OrderingList`.
+    """Reconstitute an :class:`.OrderingList`.
 
     This is the adjoint to :meth:`.OrderingList.__reduce__`.  It is used for
     unpickling :class:`.OrderingList` objects.
index 270f189bef702efd298d62f23175611aba00e4ca..4d9ee30c44fcaded93a8a8e9e794878d124c2371 100644 (file)
@@ -54,7 +54,7 @@ def inspect(subject, raiseerr=True):
      :class:`sqlalchemy.exc.NoInspectionAvailable`
      is raised.  If ``False``, ``None`` is returned.
 
-     """
+    """
     type_ = type(subject)
     for cls in type_.__mro__:
         if cls in _registrars:
index 32be3b5aa33b13b093a901da2a6b73671ef3c5ed..15476a2c46de4b6290ce89d4834ba0208f756545 100644 (file)
@@ -886,9 +886,9 @@ class ScalarAttributeImpl(AttributeImpl):
 
 class ScalarObjectAttributeImpl(ScalarAttributeImpl):
     """represents a scalar-holding InstrumentedAttribute,
-       where the target object is also instrumented.
+    where the target object is also instrumented.
 
-       Adds events to delete/set operations.
+    Adds events to delete/set operations.
 
     """
 
@@ -983,9 +983,7 @@ class ScalarObjectAttributeImpl(ScalarAttributeImpl):
         check_old=None,
         pop=False,
     ):
-        """Set a value on the given InstanceState.
-
-        """
+        """Set a value on the given InstanceState."""
         if self.dispatch._active_history:
             old = self.get(
                 state,
index ad407f5e6a70103c30b1069b56d96d6ab87f7809..fcc41bb67db93b356cea8c1bbf6111995ccecaaf 100644 (file)
@@ -398,7 +398,7 @@ class SessionExtension(object):
         """Execute before flush process has started.
 
         `instances` is an optional list of objects which were passed to
-        the ``flush()`` method. """
+        the ``flush()`` method."""
 
     def after_flush(self, session, flush_context):
         """Execute after flush has completed, but before commit has been
@@ -415,18 +415,18 @@ class SessionExtension(object):
         This will be when the 'new', 'dirty', and 'deleted' lists are in
         their final state.  An actual commit() may or may not have
         occurred, depending on whether or not the flush started its own
-        transaction or participated in a larger transaction. """
+        transaction or participated in a larger transaction."""
 
     def after_begin(self, session, transaction, connection):
         """Execute after a transaction is begun on a connection
 
         `transaction` is the SessionTransaction. This method is called
-        after an engine level transaction is begun on a connection. """
+        after an engine level transaction is begun on a connection."""
 
     def after_attach(self, session, instance):
         """Execute after an instance is attached to a session.
 
-        This is called after an add, delete or merge. """
+        This is called after an add, delete or merge."""
 
     def after_bulk_update(self, session, query, query_context, result):
         """Execute after a bulk update operation to the session.
index c23197adadf064a0cc3df6c0474758fac7d6b5b3..ca84a30988c6bf7ceec80d16df51c4ca0b35e0bf 100644 (file)
@@ -27,7 +27,7 @@ from ..sql import expression
 
 class DescriptorProperty(MapperProperty):
     """:class:`.MapperProperty` which proxies access to a
-        user-defined descriptor."""
+    user-defined descriptor."""
 
     doc = None
 
index a255cccc7b5eb89a1b66056f1ab98d7163a6818d..9b4119ef8b4987d2b34474aed5dba75f9d10f7fc 100644 (file)
@@ -362,7 +362,7 @@ def _instance_processor(
     _polymorphic_from=None,
 ):
     """Produce a mapper level row processor callable
-       which processes rows into mapped instances."""
+    which processes rows into mapped instances."""
 
     # note that this method, most of which exists in a closure
     # called _instance(), resists being broken out, as
@@ -878,9 +878,7 @@ def _decorate_polymorphic_switch(
 
 
 class PostLoad(object):
-    """Track loaders and states for "post load" operations.
-
-    """
+    """Track loaders and states for "post load" operations."""
 
     __slots__ = "loaders", "states", "load_keys"
 
index 016953255734429392ea9221c3ce638caa3a732a..6f3170a9edce1fd17ad460e715070bf186c99bd6 100644 (file)
@@ -1348,8 +1348,7 @@ class Mapper(InspectionAttr):
 
     @classmethod
     def _configure_all(cls):
-        """Class-level path to the :func:`.configure_mappers` call.
-        """
+        """Class-level path to the :func:`.configure_mappers` call."""
         configure_mappers()
 
     def dispose(self):
@@ -1864,7 +1863,7 @@ class Mapper(InspectionAttr):
 
     def _property_from_column(self, key, prop):
         """generate/update a :class:`.ColumnProprerty` given a
-        :class:`_schema.Column` object. """
+        :class:`_schema.Column` object."""
 
         # we were passed a Column or a list of Columns;
         # generate a properties.ColumnProperty
@@ -2052,8 +2051,7 @@ class Mapper(InspectionAttr):
         return key in self._props
 
     def get_property(self, key, _configure_mappers=True):
-        """return a MapperProperty associated with the given key.
-        """
+        """return a MapperProperty associated with the given key."""
 
         if _configure_mappers and Mapper._new_mappers:
             configure_mappers()
index 7de2d939c8e33efbfd5c71fba209dfd1f4e23288..e7115dd317f3578057d8f2f85e144ff6badfb32b 100644 (file)
@@ -1705,7 +1705,7 @@ class Query(object):
         return self.with_hint(None, text, dialect_name)
 
     def get_execution_options(self):
-        """ Get the non-SQL options which will take effect during execution.
+        """Get the non-SQL options which will take effect during execution.
 
         .. versionadded:: 1.3
 
@@ -1717,7 +1717,7 @@ class Query(object):
 
     @_generative()
     def execution_options(self, **kwargs):
-        """ Set non-SQL options which take effect during execution.
+        """Set non-SQL options which take effect during execution.
 
         The options are the same as those accepted by
         :meth:`_engine.Connection.execution_options`.
index 8e4bcd63ca3f396d5155691d48f8ea055f02bcf0..59044d35b777e3c28bee00959363372e6e78190c 100644 (file)
@@ -3525,7 +3525,7 @@ class JoinCondition(object):
 class _ColInAnnotations(object):
     """Seralizable equivalent to:
 
-        lambda c: "name" in c._annotations
+    lambda c: "name" in c._annotations
     """
 
     def __init__(self, name):
index b3cc1334db106b3c054efc514848e603f51cab3a..5ed79ea5039750fe491ac5bebd738e09808aa4eb 100644 (file)
@@ -1539,7 +1539,8 @@ class Session(_SessionClassMethods):
             except sa_exc.NoInspectionAvailable as err:
                 if isinstance(mapper, type):
                     util.raise_(
-                        exc.UnmappedClassError(mapper), replace_context=err,
+                        exc.UnmappedClassError(mapper),
+                        replace_context=err,
                     )
                 else:
                     raise
@@ -1685,7 +1686,8 @@ class Session(_SessionClassMethods):
             state = attributes.instance_state(instance)
         except exc.NO_STATE as err:
             util.raise_(
-                exc.UnmappedInstanceError(instance), replace_context=err,
+                exc.UnmappedInstanceError(instance),
+                replace_context=err,
             )
 
         self._expire_state(state, attribute_names)
@@ -1797,7 +1799,8 @@ class Session(_SessionClassMethods):
             state = attributes.instance_state(instance)
         except exc.NO_STATE as err:
             util.raise_(
-                exc.UnmappedInstanceError(instance), replace_context=err,
+                exc.UnmappedInstanceError(instance),
+                replace_context=err,
             )
         self._expire_state(state, attribute_names)
 
@@ -1854,7 +1857,8 @@ class Session(_SessionClassMethods):
             state = attributes.instance_state(instance)
         except exc.NO_STATE as err:
             util.raise_(
-                exc.UnmappedInstanceError(instance), replace_context=err,
+                exc.UnmappedInstanceError(instance),
+                replace_context=err,
             )
         if state.session_id is not self.hash_key:
             raise sa_exc.InvalidRequestError(
@@ -2008,7 +2012,8 @@ class Session(_SessionClassMethods):
             state = attributes.instance_state(instance)
         except exc.NO_STATE as err:
             util.raise_(
-                exc.UnmappedInstanceError(instance), replace_context=err,
+                exc.UnmappedInstanceError(instance),
+                replace_context=err,
             )
 
         self._save_or_update_state(state)
@@ -2045,7 +2050,8 @@ class Session(_SessionClassMethods):
             state = attributes.instance_state(instance)
         except exc.NO_STATE as err:
             util.raise_(
-                exc.UnmappedInstanceError(instance), replace_context=err,
+                exc.UnmappedInstanceError(instance),
+                replace_context=err,
             )
 
         self._delete_impl(state, instance, head=True)
@@ -2476,7 +2482,8 @@ class Session(_SessionClassMethods):
             state = attributes.instance_state(instance)
         except exc.NO_STATE as err:
             util.raise_(
-                exc.UnmappedInstanceError(instance), replace_context=err,
+                exc.UnmappedInstanceError(instance),
+                replace_context=err,
             )
         return self._contains_state(state)
 
@@ -2575,7 +2582,8 @@ class Session(_SessionClassMethods):
 
                 except exc.NO_STATE as err:
                     util.raise_(
-                        exc.UnmappedInstanceError(o), replace_context=err,
+                        exc.UnmappedInstanceError(o),
+                        replace_context=err,
                     )
                 objset.add(state)
         else:
@@ -3445,7 +3453,8 @@ def object_session(instance):
         state = attributes.instance_state(instance)
     except exc.NO_STATE as err:
         util.raise_(
-            exc.UnmappedInstanceError(instance), replace_context=err,
+            exc.UnmappedInstanceError(instance),
+            replace_context=err,
         )
     else:
         return _state_session(state)
index e1aa4ddc294d53d63e77a88cbcf572fc4b263a22..70ff8d80d0f8b52870a75c3719bbf85da5677311 100644 (file)
@@ -212,7 +212,7 @@ class InstanceState(interfaces.InspectionAttrInfo):
 
             :ref:`session_object_states`
 
-            """
+        """
         return self.key is not None and self._attached and not self._deleted
 
     @property
@@ -529,7 +529,7 @@ class InstanceState(interfaces.InspectionAttrInfo):
 
     def _reset(self, dict_, key):
         """Remove the given attribute and any
-           callables associated with it."""
+        callables associated with it."""
 
         old = dict_.pop(key, None)
         if old is not None and self.manager[key].impl.collection:
index cbc3716a930184cfc31142b12c415bad98abdb37..1bf849a92a8ce43eaab30f3eb9a7d745e893123a 100644 (file)
@@ -48,8 +48,8 @@ from .sql.schema import DefaultGenerator  # noqa
 from .sql.schema import FetchedValue  # noqa
 from .sql.schema import ForeignKey  # noqa
 from .sql.schema import ForeignKeyConstraint  # noqa
-from .sql.schema import Index  # noqa
 from .sql.schema import IdentityOptions  # noqa
+from .sql.schema import Index  # noqa
 from .sql.schema import MetaData  # noqa
 from .sql.schema import PassiveDefault  # noqa
 from .sql.schema import PrimaryKeyConstraint  # noqa
index 0c350d3f48c4e45bf2b7ba1fa8572cac5391a84d..ed85cd4d5e30cc09e89fe36349fcdbfd241968b4 100644 (file)
@@ -403,9 +403,7 @@ class Executable(Generative):
         return self._execution_options
 
     def execute(self, *multiparams, **params):
-        """Compile and execute this :class:`.Executable`.
-
-        """
+        """Compile and execute this :class:`.Executable`."""
         e = self.bind
         if e is None:
             label = getattr(self, "description", self.__class__.__name__)
@@ -492,18 +490,18 @@ class ColumnCollection(util.OrderedProperties):
 
     def replace(self, column):
         """Add the given column to this collection, removing unaliased
-           versions of this column  as well as existing columns with the
-           same key.
+        versions of this column  as well as existing columns with the
+        same key.
 
-           E.g.::
+        E.g.::
 
-                t = Table('sometable', metadata, Column('col1', Integer))
-                t.columns.replace(Column('col1', Integer, key='columnone'))
+             t = Table('sometable', metadata, Column('col1', Integer))
+             t.columns.replace(Column('col1', Integer, key='columnone'))
 
-           will remove the original 'col1' from the collection, and add
-           the new column under the name 'columnname'.
+        will remove the original 'col1' from the collection, and add
+        the new column under the name 'columnname'.
 
-           Used by schema.Column to override columns during table reflection.
+        Used by schema.Column to override columns during table reflection.
 
         """
         remove_col = None
index f06429bb9410dfa87e0eb358dca5a0af1bd45e19..3864af45b383c4c3fbd46cb7b03f20271388e521 100644 (file)
@@ -329,8 +329,7 @@ class Compiled(object):
         "does nothing.",
     )
     def compile(self):
-        """Produce the internal string representation of this element.
-        """
+        """Produce the internal string representation of this element."""
         pass
 
     def _execute_on_connection(self, connection, multiparams, params):
index 3b16394dfd35895cc8cf89a981ed7a397450451b..1e3d110b69d4cb7b16ed577cbd366ff6608b8e3e 100644 (file)
@@ -1038,7 +1038,9 @@ class SchemaDropper(DDLBase):
 
 
 def sort_tables(
-    tables, skip_fn=None, extra_dependencies=None,
+    tables,
+    skip_fn=None,
+    extra_dependencies=None,
 ):
     """Sort a collection of :class:`_schema.Table` objects based on
     dependency.
index c653dcef3ca6a2cc4eb1e70a13b94f585f527fe2..0362435d2fce6be8afe67a0c4470d48333b56878 100644 (file)
@@ -31,9 +31,7 @@ from .. import util
 class UpdateBase(
     HasCTE, DialectKWArgs, HasPrefixes, Executable, ClauseElement
 ):
-    """Form the base for ``INSERT``, ``UPDATE``, and ``DELETE`` statements.
-
-    """
+    """Form the base for ``INSERT``, ``UPDATE``, and ``DELETE`` statements."""
 
     __visit_name__ = "update_base"
 
index 64824df42db532e603acdeccf85ff8166857fae2..86cf950000bf44a96d4a744f2e05b27042d085e2 100644 (file)
@@ -3351,8 +3351,8 @@ class Slice(ColumnElement):
 
 
 class IndexExpression(BinaryExpression):
-    """Represent the class of expressions that are like an "index" operation.
-    """
+    """Represent the class of expressions that are like an "index"
+    operation."""
 
     pass
 
index 2d8097114a3a9c7381b92dfaffc2ab9b85566ec4..469a3af72b083d767a221861412305f4d086484c 100644 (file)
@@ -875,9 +875,7 @@ class Table(DialectKWArgs, SchemaItem, TableClause):
         ":class:`.DDLEvents`.",
     )
     def append_ddl_listener(self, event_name, listener):
-        """Append a DDL event listener to this ``Table``.
-
-        """
+        """Append a DDL event listener to this ``Table``."""
 
         def adapt_listener(target, connection, **kw):
             listener(event_name, target, connection)
@@ -2815,8 +2813,7 @@ class DefaultClause(FetchedValue):
     "future release.  Please refer to :class:`.DefaultClause`.",
 )
 class PassiveDefault(DefaultClause):
-    """A DDL-specified DEFAULT column value.
-    """
+    """A DDL-specified DEFAULT column value."""
 
     def __init__(self, *arg, **kw):
         DefaultClause.__init__(self, *arg, **kw)
@@ -3698,10 +3695,14 @@ class PrimaryKeyConstraint(ColumnCollectionConstraint):
             if col.autoincrement is True:
                 _validate_autoinc(col, True)
                 return col
-            elif col.autoincrement in (
-                "auto",
-                "ignore_fk",
-            ) and _validate_autoinc(col, False):
+            elif (
+                col.autoincrement
+                in (
+                    "auto",
+                    "ignore_fk",
+                )
+                and _validate_autoinc(col, False)
+            ):
                 return col
 
         else:
@@ -4519,10 +4520,7 @@ class MetaData(SchemaItem):
         ":class:`.DDLEvents`.",
     )
     def append_ddl_listener(self, event_name, listener):
-        """Append a DDL event listener to this ``MetaData``.
-
-
-        """
+        """Append a DDL event listener to this ``MetaData``."""
 
         def adapt_listener(target, connection, **kw):
             tables = kw["tables"]
index 1e73d64552b9e1c990c1e62f7d4e74737ac84728..5a13444edbeb5d1fb611071ea9e2e8e828ce33dc 100644 (file)
@@ -2278,8 +2278,7 @@ class GenerativeSelect(SelectBase):
 
     @property
     def for_update(self):
-        """Provide legacy dialect support for the ``for_update`` attribute.
-        """
+        """Provide legacy dialect support for the ``for_update`` attribute."""
         if self._for_update_arg is not None:
             return self._for_update_arg.legacy_for_update_value
         else:
@@ -4033,9 +4032,7 @@ class ScalarSelect(Generative, Grouping):
 
 
 class Exists(UnaryExpression):
-    """Represent an ``EXISTS`` clause.
-
-    """
+    """Represent an ``EXISTS`` clause."""
 
     __visit_name__ = UnaryExpression.__visit_name__
     _from_objects = []
index f6d1c49ea3b871ee60e17af0bf3f16637ced48de..ac8e9ed060ec0670416070ddf0f69c90a6ec9c40 100644 (file)
@@ -2224,7 +2224,7 @@ class JSON(Indexable, TypeEngine):
 
               :attr:`.types.JSON.NULL`
 
-         """
+        """
         self.none_as_null = none_as_null
 
     class JSONElementType(TypeEngine):
index 75d8b9f86ee8310fb08bed48a20b8da0e4f51362..fb7b2ec321146b8002350e7c5e8275cd40a43761 100644 (file)
@@ -1144,10 +1144,7 @@ class TypeDecorator(SchemaEventTarget, TypeEngine):
 
     @util.memoized_property
     def _has_literal_processor(self):
-        """memoized boolean, check if process_literal_param is implemented.
-
-
-        """
+        """memoized boolean, check if process_literal_param is implemented."""
 
         return (
             self.__class__.process_literal_param.__code__
index 74d38ad20d0f3e27f8bf23c2fd931ccc6f25ce56..2c9baf36f9c5603389daa84538ba3f4bcb150fa6 100644 (file)
@@ -334,7 +334,10 @@ def assert_raises(except_cls, callable_, *args, **kw):
 
 def assert_raises_context_ok(except_cls, callable_, *args, **kw):
     _assert_raises(
-        except_cls, callable_, args, kw,
+        except_cls,
+        callable_,
+        args,
+        kw,
     )
 
 
@@ -369,7 +372,10 @@ def _assert_raises(
         if msg is not None:
             assert re.search(
                 msg, util.text_type(err), re.UNICODE
-            ), "%r !~ %s" % (msg, err,)
+            ), "%r !~ %s" % (
+                msg,
+                err,
+            )
         if check_context and not are_we_already_in_a_traceback:
             _assert_proper_exception_context(err)
         print(util.text_type(err).encode("utf-8"))
@@ -504,9 +510,12 @@ class ComparesTables(object):
             assert reflected_table.primary_key.columns[c.name] is not None
 
     def assert_types_base(self, c1, c2):
-        assert c1.type._compare_type_affinity(c2.type), (
-            "On column %r, type '%s' doesn't correspond to type '%s'"
-            % (c1.name, c1.type, c2.type)
+        assert c1.type._compare_type_affinity(
+            c2.type
+        ), "On column %r, type '%s' doesn't correspond to type '%s'" % (
+            c1.name,
+            c1.type,
+            c2.type,
         )
 
 
index 660a3bd24c16b24398924b58e9327764e56e5c98..7669cdc32e0e852f794c7871f7d5f3953af42f53 100644 (file)
@@ -110,8 +110,7 @@ def drop_db(cfg, eng, ident):
 
 @register.init
 def update_db_opts(db_url, db_opts):
-    """Set database options (db_opts) for a test database that we created.
-    """
+    """Set database options (db_opts) for a test database that we created."""
     pass
 
 
index df3aa4613b38da69114cb24de2e9c303188cdf78..0008b8c7b1957aa547f7d7094a34fe6fe8116a33 100644 (file)
@@ -46,7 +46,7 @@ class SuiteRequirements(Requirements):
 
     @property
     def on_update_cascade(self):
-        """"target database must support ON UPDATE..CASCADE behavior in
+        """target database must support ON UPDATE..CASCADE behavior in
         foreign keys."""
 
         return exclusions.open()
@@ -335,7 +335,7 @@ class SuiteRequirements(Requirements):
 
     @property
     def implements_get_lastrowid(self):
-        """"target dialect implements the executioncontext.get_lastrowid()
+        """target dialect implements the executioncontext.get_lastrowid()
         method without reliance on RETURNING.
 
         """
@@ -343,7 +343,7 @@ class SuiteRequirements(Requirements):
 
     @property
     def emulated_lastrowid(self):
-        """"target dialect retrieves cursor.lastrowid, or fetches
+        """target dialect retrieves cursor.lastrowid, or fetches
         from a database-side function after an insert() construct executes,
         within the get_lastrowid() method.
 
@@ -355,7 +355,7 @@ class SuiteRequirements(Requirements):
 
     @property
     def dbapi_lastrowid(self):
-        """"target platform includes a 'lastrowid' accessor on the DBAPI
+        """target platform includes a 'lastrowid' accessor on the DBAPI
         cursor object.
 
         """
@@ -376,17 +376,16 @@ class SuiteRequirements(Requirements):
 
     @property
     def cross_schema_fk_reflection(self):
-        """target system must support reflection of inter-schema foreign keys
-
-        """
+        """target system must support reflection of inter-schema foreign
+        keys"""
         return exclusions.closed()
 
     @property
     def implicit_default_schema(self):
         """target system has a strong concept of 'default' schema that can
-           be referred to implicitly.
+        be referred to implicitly.
 
-           basically, PostgreSQL.
+        basically, PostgreSQL.
 
         """
         return exclusions.closed()
@@ -464,8 +463,8 @@ class SuiteRequirements(Requirements):
 
     @property
     def view_reflection(self):
-        """target database must support inspection of the full CREATE VIEW definition.
-        """
+        """target database must support inspection of the full CREATE VIEW
+        definition."""
         return self.views
 
     @property
@@ -563,9 +562,7 @@ class SuiteRequirements(Requirements):
 
     @property
     def symbol_names_w_double_quote(self):
-        """Target driver can create tables with a name like 'some " table'
-
-        """
+        """Target driver can create tables with a name like 'some " table'"""
         return exclusions.open()
 
     @property
@@ -713,7 +710,7 @@ class SuiteRequirements(Requirements):
 
     @property
     def json_array_indexes(self):
-        """"target platform supports numeric array indexes
+        """target platform supports numeric array indexes
         within a JSON structure"""
 
         return self.json_type
index ff9d2fd2ec528c6759e2aa3910d3434d3381bd19..ce14b042edfe3f8e0ec76821491a802ca61d7516 100644 (file)
@@ -93,7 +93,10 @@ class QuotedNameArgumentTest(fixtures.TablesTest):
             Column("related_id", Integer),
             sa.PrimaryKeyConstraint("id", name="pk quote ' one"),
             sa.Index("ix quote ' one", "name"),
-            sa.UniqueConstraint("data", name="uq quote' one",),
+            sa.UniqueConstraint(
+                "data",
+                name="uq quote' one",
+            ),
             sa.ForeignKeyConstraint(
                 ["id"], ["related.id"], name="fk quote ' one"
             ),
@@ -112,7 +115,10 @@ class QuotedNameArgumentTest(fixtures.TablesTest):
                 Column("related_id", Integer),
                 sa.PrimaryKeyConstraint("id", name='pk quote " two'),
                 sa.Index('ix quote " two', "name"),
-                sa.UniqueConstraint("data", name='uq quote" two',),
+                sa.UniqueConstraint(
+                    "data",
+                    name='uq quote" two',
+                ),
                 sa.ForeignKeyConstraint(
                     ["id"], ["related.id"], name='fk quote " two'
                 ),
@@ -1298,11 +1304,17 @@ class ComputedReflectionTest(fixtures.ComputedReflectionFixtureTest):
         )
         if testing.requires.computed_columns_virtual.enabled:
             self.check_column(
-                data, "computed_virtual", "normal+2", False,
+                data,
+                "computed_virtual",
+                "normal+2",
+                False,
             )
         if testing.requires.computed_columns_stored.enabled:
             self.check_column(
-                data, "computed_stored", "normal-42", True,
+                data,
+                "computed_stored",
+                "normal-42",
+                True,
             )
 
     @testing.requires.schemas
@@ -1322,11 +1334,17 @@ class ComputedReflectionTest(fixtures.ComputedReflectionFixtureTest):
         )
         if testing.requires.computed_columns_virtual.enabled:
             self.check_column(
-                data, "computed_virtual", "normal/2", False,
+                data,
+                "computed_virtual",
+                "normal/2",
+                False,
             )
         if testing.requires.computed_columns_stored.enabled:
             self.check_column(
-                data, "computed_stored", "normal*42", True,
+                data,
+                "computed_stored",
+                "normal*42",
+                True,
             )
 
 
index 23e66a3fae34d770b2d899bc710273398ff41b9b..ff0daec3ffcbac58dce059ab5f56446f3590ea42 100644 (file)
@@ -789,7 +789,8 @@ class IsOrIsNotDistinctFromTest(fixtures.TablesTest):
             tbl.select(tbl.c.col_a.is_distinct_from(tbl.c.col_b))
         ).fetchall()
         eq_(
-            len(result), expected_row_count_for_is,
+            len(result),
+            expected_row_count_for_is,
         )
 
         expected_row_count_for_isnot = (
@@ -799,5 +800,6 @@ class IsOrIsNotDistinctFromTest(fixtures.TablesTest):
             tbl.select(tbl.c.col_a.isnot_distinct_from(tbl.c.col_b))
         ).fetchall()
         eq_(
-            len(result), expected_row_count_for_isnot,
+            len(result),
+            expected_row_count_for_isnot,
         )
index e3f362f3203902d0d82fb65a4a41eb2b9ab0e315..9bf8b7b4afd3624b0b73def7162420b14090d3ed 100644 (file)
@@ -604,9 +604,7 @@ class NumericTest(_LiteralRoundTripFixture, fixtures.TestBase):
 
     @testing.requires.precision_numerics_enotation_large
     def test_enotation_decimal_large(self):
-        """test exceedingly large decimals.
-
-        """
+        """test exceedingly large decimals."""
 
         numbers = set(
             [
@@ -928,7 +926,8 @@ class JSONTest(_LiteralRoundTripFixture, fixtures.TablesTest):
 
         conn = connection
         conn.execute(
-            self.tables.data_table.insert(), {"name": "r1", "data": JSON.NULL},
+            self.tables.data_table.insert(),
+            {"name": "r1", "data": JSON.NULL},
         )
 
         eq_(
index c1f4988217f5551895adee12f864e0f0795408d3..4ba37b5778b2d2776f32cdfaa33cf6a38c542875 100644 (file)
@@ -303,8 +303,7 @@ else:
 if py35:
 
     def _formatannotation(annotation, base_module=None):
-        """vendored from python 3.7
-        """
+        """vendored from python 3.7"""
 
         if getattr(annotation, "__module__", None) == "typing":
             return repr(annotation).replace("typing.", "")
index 1fedda256447ed62adffa11e10bc80aced47354b..ffa5e03958db115897505306088c87a25d1cdee9 100644 (file)
@@ -66,7 +66,8 @@ class safe_reraise(object):
             self._exc_info = None  # remove potential circular references
             if not self.warn_only:
                 compat.raise_(
-                    exc_value, with_traceback=exc_tb,
+                    exc_value,
+                    with_traceback=exc_tb,
                 )
         else:
             if not compat.py3k and self._exc_info and self._exc_info[1]:
@@ -698,10 +699,10 @@ def class_hierarchy(cls):
 
 def iterate_attributes(cls):
     """iterate all the keys and attributes associated
-       with a class, without using getattr().
+    with a class, without using getattr().
 
-       Does not use getattr() so that class-sensitive
-       descriptors (i.e. property.__get__()) are not called.
+    Does not use getattr() so that class-sensitive
+    descriptors (i.e. property.__get__()) are not called.
 
     """
     keys = dir(cls)
@@ -1739,8 +1740,8 @@ def inject_param_text(doctext, inject_params):
 
 
 def repr_tuple_names(names):
-    """ Trims a list of strings from the middle and return a string of up to
-        four elements. Strings greater than 11 characters will be truncated"""
+    """Trims a list of strings from the middle and return a string of up to
+    four elements. Strings greater than 11 characters will be truncated"""
     if len(names) == 0:
         return None
     flag = len(names) <= 4
index e5e58c48b9a80d2ae1ee371072c1f79f73eddb40..ff68592b8f86e7b3e10de1b331cf0c714deac5d5 100644 (file)
--- a/setup.cfg
+++ b/setup.cfg
@@ -13,7 +13,7 @@ sign = 1
 identity = C4DAFEE1
 
 [flake8]
-show-source = true
+show-source = false
 enable-extensions = G
 # E203 is due to https://github.com/PyCQA/pycodestyle/issues/373
 ignore =
index 62b154436c4b65356305e57e2d57cffb0b3352a9..71ff06250000e46ba4b5cad4fc0464f89ddacca5 100644 (file)
@@ -31,10 +31,7 @@ Zoo = Animal = session = None
 
 class ZooMarkTest(replay_fixture.ReplayFixtureTest):
 
-    """Runs the ZooMark and squawks if method counts vary from the norm.
-
-
-    """
+    """Runs the ZooMark and squawks if method counts vary from the norm."""
 
     __requires__ = ("cpython",)
     __only_on__ = "postgresql+psycopg2"
index c8807df09a9500d043d1df54331b5f0322a192a5..0cbab7f28247ce194806efeee7557b34d686bee7 100644 (file)
@@ -15,7 +15,7 @@ class WarnDeprecatedLimitedTest(fixtures.TestBase):
             )
 
     def test_warn_deprecated_limited_cap(self):
-        """ warn_deprecated_limited() and warn_limited() use
+        """warn_deprecated_limited() and warn_limited() use
         _hash_limit_string
 
         actually just verifying that _hash_limit_string works as expected
index d99799a6a4dc22542fadde5401816bafd85bf5ad..d99fef22c5cbabc05ce911d16e36d5f238ba3995 100644 (file)
@@ -515,7 +515,9 @@ class IsolationLevelDetectTest(fixtures.TestBase):
 
         result = []
 
-        def fail_on_exec(stmt,):
+        def fail_on_exec(
+            stmt,
+        ):
             if view is not None and view in stmt:
                 result.append(("SERIALIZABLE",))
             else:
@@ -568,7 +570,8 @@ class InvalidTransactionFalsePositiveTest(fixtures.TablesTest):
     @classmethod
     def insert_data(cls, connection):
         connection.execute(
-            cls.tables.error_t.insert(), [{"error_code": "01002"}],
+            cls.tables.error_t.insert(),
+            [{"error_code": "01002"}],
         )
 
     def test_invalid_transaction_detection(self, connection):
index bfd984ce7fba0f9dbd595a1c53f0c61042b24a0d..9969b84df8879c6aa498ad8225f9f94cedc04264 100644 (file)
@@ -261,9 +261,12 @@ class QueryUnicodeTest(fixtures.TestBase):
                 ).encode("UTF-8")
             )
             r = con.execute(t1.select()).first()
-            assert isinstance(r[1], util.text_type), (
-                "%s is %s instead of unicode, working on %s"
-                % (r[1], type(r[1]), meta.bind)
+            assert isinstance(
+                r[1], util.text_type
+            ), "%s is %s instead of unicode, working on %s" % (
+                r[1],
+                type(r[1]),
+                meta.bind,
             )
             eq_(r[1], util.ue("abc \xc3\xa9 def"))
 
index 6d74d19f70eaeaf81b4138e0d1ec452d45c0c44f..a7718e2e1cd55ee68588dd01227cf1aefc40ad50 100644 (file)
@@ -395,13 +395,11 @@ class ReflectHugeViewTest(fixtures.TestBase):
                 for i in range(self.col_num)
             ]
         )
-        self.view_str = view_str = (
-            "CREATE VIEW huge_named_view AS SELECT %s FROM base_table"
-            % (
-                ",".join(
-                    "long_named_column_number_%d" % i
-                    for i in range(self.col_num)
-                )
+        self.view_str = (
+            view_str
+        ) = "CREATE VIEW huge_named_view AS SELECT %s FROM base_table" % (
+            ",".join(
+                "long_named_column_number_%d" % i for i in range(self.col_num)
             )
         )
         assert len(view_str) > 4000
@@ -505,7 +503,8 @@ class OwnerPlusDBTest(fixtures.TestBase):
         eq_(schema, expected_schema)
 
         mock_connection = mock.Mock(
-            dialect=dialect, scalar=mock.Mock(return_value="Some  Database"),
+            dialect=dialect,
+            scalar=mock.Mock(return_value="Some  Database"),
         )
         mock_lambda = mock.Mock()
         base._switch_db(schema, mock_connection, mock_lambda, "x", y="bar")
index 8c9679a70f6802060895308c0070a11c117bd0f8..6ea5b48efe6b8f58412a33198b54de22a99082cd 100644 (file)
@@ -789,7 +789,9 @@ class TypeRoundTripTest(
     @testing.metadata_fixture()
     def datetimeoffset_fixture(self, metadata):
         t = Table(
-            "test_dates", metadata, Column("adatetimeoffset", DATETIMEOFFSET),
+            "test_dates",
+            metadata,
+            Column("adatetimeoffset", DATETIMEOFFSET),
         )
 
         return t
@@ -886,7 +888,8 @@ class TypeRoundTripTest(
                 return
 
             conn.execute(
-                t.insert(), adatetimeoffset=dto_param_value,
+                t.insert(),
+                adatetimeoffset=dto_param_value,
             )
 
             row = conn.execute(t.select()).first()
index 181667e63f3d69862c9cc8747a490c655f740894..b077669e579a7ca838d7b495958c899512885b7b 100644 (file)
@@ -103,10 +103,22 @@ class EncodingErrorsTest(fixtures.TestBase):
         )
 
     _oracle_char_combinations = testing.combinations(
-        ("STRING", cx_Oracle_STRING,),
-        ("FIXED_CHAR", cx_Oracle_FIXED_CHAR,),
-        ("CLOB", cx_Oracle_CLOB,),
-        ("NCLOB", cx_Oracle_NCLOB,),
+        (
+            "STRING",
+            cx_Oracle_STRING,
+        ),
+        (
+            "FIXED_CHAR",
+            cx_Oracle_FIXED_CHAR,
+        ),
+        (
+            "CLOB",
+            cx_Oracle_CLOB,
+        ),
+        (
+            "NCLOB",
+            cx_Oracle_NCLOB,
+        ),
         argnames="cx_oracle_type",
         id_="ia",
     )
@@ -149,7 +161,9 @@ class EncodingErrorsTest(fixtures.TestBase):
     @_oracle_char_combinations
     @testing.requires.python2
     def test_encoding_errors_sqla_py2k(
-        self, cx_Oracle, cx_oracle_type,
+        self,
+        cx_Oracle,
+        cx_oracle_type,
     ):
         ignore_dialect = cx_oracle.dialect(
             dbapi=cx_Oracle, encoding_errors="ignore"
@@ -167,7 +181,9 @@ class EncodingErrorsTest(fixtures.TestBase):
     @_oracle_char_combinations
     @testing.requires.python2
     def test_no_encoding_errors_sqla_py2k(
-        self, cx_Oracle, cx_oracle_type,
+        self,
+        cx_Oracle,
+        cx_oracle_type,
     ):
         plain_dialect = cx_oracle.dialect(dbapi=cx_Oracle)
 
@@ -183,7 +199,9 @@ class EncodingErrorsTest(fixtures.TestBase):
     @_oracle_char_combinations
     @testing.requires.python3
     def test_encoding_errors_cx_oracle_py3k(
-        self, cx_Oracle, cx_oracle_type,
+        self,
+        cx_Oracle,
+        cx_oracle_type,
     ):
         ignore_dialect = cx_oracle.dialect(
             dbapi=cx_Oracle, encoding_errors="ignore"
@@ -200,7 +218,10 @@ class EncodingErrorsTest(fixtures.TestBase):
             cursor.mock_calls,
             [
                 mock.call.var(
-                    mock.ANY, None, cursor.arraysize, encodingErrors="ignore",
+                    mock.ANY,
+                    None,
+                    cursor.arraysize,
+                    encodingErrors="ignore",
                 )
             ],
         )
@@ -208,7 +229,9 @@ class EncodingErrorsTest(fixtures.TestBase):
     @_oracle_char_combinations
     @testing.requires.python3
     def test_no_encoding_errors_cx_oracle_py3k(
-        self, cx_Oracle, cx_oracle_type,
+        self,
+        cx_Oracle,
+        cx_oracle_type,
     ):
         plain_dialect = cx_oracle.dialect(dbapi=cx_Oracle)
 
index 458906b78727b85349d94e8297ca3f562dbc7efc..4ce60db4c36dfd5fcf144264806ddad2bd1b370a 100644 (file)
@@ -212,7 +212,12 @@ drop synonym %(test_schema)s.local_table;
             ),
             {"text": "my table comment"},
         )
-        eq_(insp.get_table_comment("parent",), {"text": "my local comment"})
+        eq_(
+            insp.get_table_comment(
+                "parent",
+            ),
+            {"text": "my local comment"},
+        )
         eq_(
             insp.get_table_comment(
                 "parent", schema=testing.db.dialect.default_schema_name
index 7e082166ba7ad3088d911343228f13a32c2c648b..1a92f6b66f0e41b8cf008ba8d0e2f9fc3751b0d2 100644 (file)
@@ -1228,7 +1228,9 @@ class CompileTest(fixtures.TestBase, AssertsCompiledSQL):
         )
 
         self.assert_compile(
-            c.any(5), "%(param_1)s = ANY (x)", checkparams={"param_1": 5},
+            c.any(5),
+            "%(param_1)s = ANY (x)",
+            checkparams={"param_1": 5},
         )
 
         self.assert_compile(
@@ -1238,7 +1240,9 @@ class CompileTest(fixtures.TestBase, AssertsCompiledSQL):
         )
 
         self.assert_compile(
-            c.all(5), "%(param_1)s = ALL (x)", checkparams={"param_1": 5},
+            c.all(5),
+            "%(param_1)s = ALL (x)",
+            checkparams={"param_1": 5},
         )
 
         self.assert_compile(
@@ -2216,8 +2220,7 @@ class DistinctOnTest(fixtures.TestBase, AssertsCompiledSQL):
 
 class FullTextSearchTest(fixtures.TestBase, AssertsCompiledSQL):
 
-    """Tests for full text searching
-    """
+    """Tests for full text searching"""
 
     __dialect__ = postgresql.dialect()
 
@@ -2238,8 +2241,8 @@ class FullTextSearchTest(fixtures.TestBase, AssertsCompiledSQL):
 
     def _raise_query(self, q):
         """
-            useful for debugging. just do...
-            self._raise_query(q)
+        useful for debugging. just do...
+        self._raise_query(q)
         """
         c = q.compile(dialect=postgresql.dialect())
         raise ValueError(c)
index d7f5b5381c7ab47968328880e85e365e6a06a2e7..b92d46c7bea7f284614e861f95ba4b0bc317f188 100644 (file)
@@ -877,8 +877,7 @@ class ReflectionTest(fixtures.TestBase):
 
     @testing.provide_metadata
     def test_index_reflection(self):
-        """ Reflecting partial & expression-based indexes should warn
-        """
+        """Reflecting partial & expression-based indexes should warn"""
 
         metadata = self.metadata
 
index 9735540ee4dd703d489d0e9d141c4153445165af..4aa85cc926a9def1bf468c95e62b6a42f17f02f9 100644 (file)
@@ -1880,7 +1880,10 @@ class ArrayEnum(fixtures.TestBase):
                 c = "ccc"
 
             tbl.append_column(
-                Column("pyenum_col", array_cls(enum_cls(MyEnum)),),
+                Column(
+                    "pyenum_col",
+                    array_cls(enum_cls(MyEnum)),
+                ),
             )
 
         self.metadata.create_all(connection)
@@ -1917,7 +1920,10 @@ class ArrayJSON(fixtures.TestBase):
             "json_table",
             self.metadata,
             Column("id", Integer, primary_key=True),
-            Column("json_col", array_cls(json_cls),),
+            Column(
+                "json_col",
+                array_cls(json_cls),
+            ),
         )
 
         self.metadata.create_all(connection)
index c6ac94aeb66ee5900ee95f9cda415efa0b4daedb..46ff6c9e7a6b35d82b1b750f9bf153416829ecbc 100644 (file)
@@ -65,9 +65,7 @@ class TestTypes(fixtures.TestBase, AssertsExecutionResults):
     __only_on__ = "sqlite"
 
     def test_boolean(self):
-        """Test that the boolean only treats 1 as True
-
-        """
+        """Test that the boolean only treats 1 as True"""
 
         meta = MetaData(testing.db)
         t = Table(
@@ -604,7 +602,7 @@ class DialectTest(
         'true', 'false', and 'column' are undocumented reserved words
         when used as column identifiers (as of 3.5.1).  Covering them
         here to ensure they remain in place if the dialect's
-        reserved_words set is updated in the future. """
+        reserved_words set is updated in the future."""
 
         meta = MetaData(testing.db)
         t = Table(
@@ -654,7 +652,7 @@ class DialectTest(
 
     @testing.provide_metadata
     def test_quoted_identifiers_functional_two(self):
-        """"test the edgiest of edge cases, quoted table/col names
+        """ "test the edgiest of edge cases, quoted table/col names
         that start and end with quotes.
 
         SQLite claims to have fixed this in
index 8bae599a9d02b62124291ed3779e6d04309eadd4..39b8aee384104ef767f3a8ac2d45aa8daaea0c12 100644 (file)
@@ -1729,7 +1729,7 @@ class ExplicitAutoCommitDeprecatedTest(fixtures.TestBase):
     """test the 'autocommit' flag on select() and text() objects.
 
     Requires PostgreSQL so that we may define a custom function which
-    modifies the database. """
+    modifies the database."""
 
     __only_on__ = "postgresql"
 
index fef8363cfbb1137877b237678c0ca95c5c94dc84..b31be17629680bd25900239a6410d34ae1d3764e 100644 (file)
@@ -101,7 +101,9 @@ class ReflectionTest(fixtures.TestBase, ComparesTables):
         self.assert_tables_equal(addresses, reflected_addresses)
 
     @testing.provide_metadata
-    def test_autoload_with_imply_autoload(self,):
+    def test_autoload_with_imply_autoload(
+        self,
+    ):
         meta = self.metadata
         t = Table(
             "t",
@@ -880,7 +882,7 @@ class ReflectionTest(fixtures.TestBase, ComparesTables):
     def test_override_existing_fk(self):
         """test that you can override columns and specify new foreign
         keys to other reflected tables, on columns which *do* already
-        have that foreign key, and that the FK is not duped. """
+        have that foreign key, and that the FK is not duped."""
 
         meta = self.metadata
         Table(
@@ -2293,9 +2295,15 @@ class ComputedColumnTest(fixtures.ComputedReflectionFixtureTest):
         )
         if testing.requires.computed_columns_virtual.enabled:
             self.check_table_column(
-                table, "computed_virtual", "normal+2", False,
+                table,
+                "computed_virtual",
+                "normal+2",
+                False,
             )
         if testing.requires.computed_columns_stored.enabled:
             self.check_table_column(
-                table, "computed_stored", "normal-42", True,
+                table,
+                "computed_stored",
+                "normal-42",
+                True,
             )
index 6ca018df8487d63eb5e924d2ab3db8b8cdb38af3..d59d8b31bb7caabc340007ed42f1dd26fe58e08e 100644 (file)
@@ -787,7 +787,7 @@ class ExplicitAutoCommitTest(fixtures.TestBase):
     """test the 'autocommit' flag on select() and text() objects.
 
     Requires PostgreSQL so that we may define a custom function which
-    modifies the database. """
+    modifies the database."""
 
     __only_on__ = "postgresql"
 
index aba6d41915f32bce5827ec47a8859415cfb18c89..e8db1e959986b692232a42cf2961c021e6b717bb 100644 (file)
@@ -894,9 +894,7 @@ class DeclarativeInheritanceTest(DeclarativeTestBase):
         )
 
     def test_columns_single_inheritance_cascading_resolution_pk(self):
-        """An additional test for #4352 in terms of the requested use case.
-
-        """
+        """An additional test for #4352 in terms of the requested use case."""
 
         class TestBase(Base):
             __abstract__ = True
index 41d1bdd45e8d3cb5210374c4333cb349062f2d2c..a1d707ccd2ce4b86042d4e3b815f4ee3b21bb59a 100644 (file)
@@ -3176,7 +3176,10 @@ class MultiOwnerTest(
         self._assert_raises_ambiguous(lambda: D.c_data == 5)
 
     def test_rel_expressions_not_available(self):
-        B, D, = self.classes("B", "D")
+        (
+            B,
+            D,
+        ) = self.classes("B", "D")
 
         self._assert_raises_ambiguous(lambda: D.c_data.any(B.id == 5))
 
index a3d1d957fa096dc0cb6bba6521c1563ddb060667..75dabf64fceae49fadf98fd318a93365e45c8cbc 100644 (file)
@@ -464,8 +464,7 @@ class AttachedFileShardTest(ShardTest, fixtures.TestBase):
 
 
 class SelectinloadRegressionTest(fixtures.DeclarativeMappedTest):
-    """test #4175
-    """
+    """test #4175"""
 
     @classmethod
     def setup_classes(cls):
index 2b4fca148932b02c789c7347678b59a6da1c80d1..3c4acf4bbf3dfe825b312e74769b7360d729593a 100644 (file)
@@ -15,9 +15,7 @@ __all__ = ()
 
 
 class FixtureTest(fixtures.MappedTest):
-    """A MappedTest pre-configured with a common set of fixtures.
-
-    """
+    """A MappedTest pre-configured with a common set of fixtures."""
 
     run_define_tables = "once"
     run_setup_classes = "once"
index a94f82cd25bc23d1818ff46797efab096dd46e07..9c131591a6bf671b0269988e61f15fd7f5bf07c8 100644 (file)
@@ -893,8 +893,7 @@ class PolymorphicAttributeManagementTest(fixtures.MappedTest):
         )
 
     def test_entirely_oob_assignment(self):
-        """test warn on an unknown polymorphic identity.
-        """
+        """test warn on an unknown polymorphic identity."""
         B = self.classes.B
 
         sess = Session()
@@ -2533,7 +2532,7 @@ class OptimizedLoadTest(fixtures.MappedTest):
         )
 
     def test_optimized_passes(self):
-        """"test that the 'optimized load' routine doesn't crash when
+        """ "test that the 'optimized load' routine doesn't crash when
         a column in the join condition is not available."""
 
         base, sub = self.tables.base, self.tables.sub
index ad9b57015081548e356ee4ee83b0741ad8df6ef0..544da404ff8f780df76ae16cbf084f719399168f 100644 (file)
@@ -1395,7 +1395,8 @@ class _PolymorphicTestBase(object):
             ("vlad", "Elbonia, Inc."),
         ]
         eq_(
-            q(self, sess).all(), expected,
+            q(self, sess).all(),
+            expected,
         )
 
     def test_mixed_entities_two(self):
index aab3570c9daa13aa2ada29066f00f6e32b709e05..9516ed4319a237f72fb1066575eea81893332136 100644 (file)
@@ -325,7 +325,7 @@ class InheritTest(fixtures.MappedTest):
         """this tests the RasterDocument being attached to the Assembly, but
         *not* the Document.  this means only a "sub-class" task, i.e.
         corresponding to an inheriting mapper but not the base mapper,
-        is created. """
+        is created."""
 
         product_mapper = mapper(
             Product,
index 48c71209f0d28a204b772ca41a7950a289454162..59087ec1be10bed8b2a239a06ca9a088e78297eb 100644 (file)
@@ -1351,9 +1351,7 @@ class SameNamedPropTwoPolymorphicSubClassesTest(fixtures.MappedTest):
 
 
 class SubClassToSubClassFromParentTest(fixtures.MappedTest):
-    """test #2617
-
-    """
+    """test #2617"""
 
     run_setup_classes = "once"
     run_setup_mappers = "once"
index 49e3ecb8efb495ffc71569c1b5d30367cbf04392..2d5829ed298addd7d07d0e95f7853c8333ef7780 100644 (file)
@@ -575,7 +575,7 @@ class AttributesTest(fixtures.ORMTest):
 
     def test_lazytrackparent(self):
         """test that the "hasparent" flag works properly
-           when lazy loaders and backrefs are used
+        when lazy loaders and backrefs are used
 
         """
 
@@ -837,7 +837,7 @@ class AttributesTest(fixtures.ORMTest):
         """changeset: 1633 broke ability to use ORM to map classes with
         unusual descriptor attributes (for example, classes that inherit
         from ones implementing zope.interface.Interface). This is a
-        simple regression test to prevent that defect. """
+        simple regression test to prevent that defect."""
 
         class des(object):
             def __get__(self, instance, owner):
@@ -1093,7 +1093,7 @@ class UtilTest(fixtures.ORMTest):
 
     def test_set_commited_value_none_uselist(self):
         """test that set_committed_value->None to a uselist generates an
-        empty list """
+        empty list"""
 
         class Foo(object):
             pass
index e726551ab7510120beb8eeb2dc92225441b378d4..114759a0bd52a87eb737c8506b88f58995001590 100644 (file)
@@ -3619,8 +3619,8 @@ class O2MConflictTest(fixtures.MappedTest):
 
 
 class PartialFlushTest(fixtures.MappedTest):
-    """test cascade behavior as it relates to object lists passed to flush().
-    """
+    """test cascade behavior as it relates to object lists passed to
+    flush()."""
 
     @classmethod
     def define_tables(cls, metadata):
index 7dd349a74ab1ca43e8fa1187ce5dd064ff85f636..dd406303005a3394c0343845323fa3e75a30d2de 100644 (file)
@@ -1162,10 +1162,7 @@ class OneToManyManyToOneTest(fixtures.MappedTest):
 
 
 class SelfReferentialPostUpdateTest(fixtures.MappedTest):
-    """Post_update on a single self-referential mapper.
-
-
-    """
+    """Post_update on a single self-referential mapper."""
 
     @classmethod
     def define_tables(cls, metadata):
@@ -1502,8 +1499,8 @@ class SelfReferentialPostUpdateTest3(fixtures.MappedTest):
 
 
 class PostUpdateBatchingTest(fixtures.MappedTest):
-    """test that lots of post update cols batch together into a single UPDATE.
-    """
+    """test that lots of post update cols batch together into a single
+    UPDATE."""
 
     @classmethod
     def define_tables(cls, metadata):
index 4184483e84ae48591aff56908a22cb506b61db35..2c87a47590a23907ecd17f6ba08c4b85b9ad2cef 100644 (file)
@@ -309,7 +309,10 @@ class ComputedDefaultsOnUpdateTest(fixtures.MappedTest):
             True,
             testing.requires.computed_columns_on_update_returning,
         ),
-        ("noneagerload", False,),
+        (
+            "noneagerload",
+            False,
+        ),
         id_="ia",
     )
     def test_update_computed(self, eager):
index 28d771234ef9e30664ea2183cb7f580e474d0a26..f8817bbd76bc3ca03551c4ce62ff5a0975f0f360 100644 (file)
@@ -268,8 +268,8 @@ class DeferredTest(AssertsCompiledSQL, _fixtures.FixtureTest):
         self.sql_count_(0, go)
 
     def test_preserve_changes(self):
-        """A deferred load operation doesn't revert modifications on attributes
-        """
+        """A deferred load operation doesn't revert modifications on
+        attributes"""
 
         orders, Order = self.tables.orders, self.classes.Order
 
@@ -821,7 +821,7 @@ class DeferredOptionsTest(AssertsCompiledSQL, _fixtures.FixtureTest):
 
     def test_locates_col(self):
         """changed in 1.0 - we don't search for deferred cols in the result
-        now.  """
+        now."""
 
         orders, Order = self.tables.orders, self.classes.Order
 
index 4cc75eca8585dbc626a103d853285d9f6e798be9..156898fcedeeb1f5cbb4c6afbd3b5f46ed992090 100644 (file)
@@ -2237,7 +2237,7 @@ class NonPrimaryRelationshipLoaderTest(_fixtures.FixtureTest):
 
     def test_selectload(self):
         """tests lazy loading with two relationships simultaneously,
-        from the same table, using aliases.  """
+        from the same table, using aliases."""
 
         users, orders, User, Address, Order, addresses = (
             self.tables.users,
@@ -2288,7 +2288,7 @@ class NonPrimaryRelationshipLoaderTest(_fixtures.FixtureTest):
 
     def test_joinedload(self):
         """Eager loading with two relationships simultaneously,
-            from the same table, using aliases."""
+        from the same table, using aliases."""
 
         users, orders, User, Address, Order, addresses = (
             self.tables.users,
index b7909a8e9c490e629db7e0a5daa2171b833cf667..eebf4f115b3c82d32e1b53d2f0a55d73753d89c7 100644 (file)
@@ -221,7 +221,7 @@ class EagerTest(_fixtures.FixtureTest, testing.AssertsCompiledSQL):
 
     def test_orderby_related(self):
         """A regular mapper select on a single table can
-            order by a relationship to a second table"""
+        order by a relationship to a second table"""
 
         Address, addresses, users, User = (
             self.classes.Address,
@@ -714,7 +714,7 @@ class EagerTest(_fixtures.FixtureTest, testing.AssertsCompiledSQL):
 
     def test_double_w_ac(self):
         """Eager loading with two relationships simultaneously,
-            from the same table, using aliases."""
+        from the same table, using aliases."""
 
         (
             users,
@@ -788,7 +788,7 @@ class EagerTest(_fixtures.FixtureTest, testing.AssertsCompiledSQL):
 
     def test_double_w_ac_against_subquery(self):
         """Eager loading with two relationships simultaneously,
-            from the same table, using aliases."""
+        from the same table, using aliases."""
 
         (
             users,
index 54ad88a20e75d36a8928b151c17cbeba55547640..e76cba03bad9552d6ea51ac30911f2c0b917c271 100644 (file)
@@ -766,7 +766,7 @@ class DeclarativeEventListenTest(
 
 class DeferredMapperEventsTest(_RemoveListeners, _fixtures.FixtureTest):
 
-    """"test event listeners against unmapped classes.
+    """ "test event listeners against unmapped classes.
 
     This incurs special logic.  Note if we ever do the "remove" case,
     it has to get all of these, too.
index fafaba48424c739d23579ebd689a1cc48e03b37f..b8b0baa6dc31f2da0c509b9e1e0f375368c91c20 100644 (file)
@@ -1069,8 +1069,8 @@ class ExpireTest(_fixtures.FixtureTest):
         eq_(len(list(sess)), 9)
 
     def test_state_change_col_to_deferred(self):
-        """Behavioral test to verify the current activity of loader callables
-        """
+        """Behavioral test to verify the current activity of loader
+        callables"""
 
         users, User = self.tables.users, self.classes.User
 
@@ -1117,8 +1117,8 @@ class ExpireTest(_fixtures.FixtureTest):
         assert "name" not in attributes.instance_state(u1).callables
 
     def test_state_deferred_to_col(self):
-        """Behavioral test to verify the current activity of loader callables
-        """
+        """Behavioral test to verify the current activity of loader
+        callables"""
 
         users, User = self.tables.users, self.classes.User
 
@@ -1157,8 +1157,8 @@ class ExpireTest(_fixtures.FixtureTest):
         assert "name" not in attributes.instance_state(u1).callables
 
     def test_state_noload_to_lazy(self):
-        """Behavioral test to verify the current activity of loader callables
-        """
+        """Behavioral test to verify the current activity of loader
+        callables"""
 
         users, Address, addresses, User = (
             self.tables.users,
index 845c6e6f246fd414d2e2b9719e0136380feda6b8..fdc26aba1dd3918cac4c781eca4bbff3623f0d0f 100644 (file)
@@ -771,7 +771,10 @@ class JoinTest(QueryTest, AssertsCompiledSQL):
         )
 
     def test_single_prop_4(self):
-        Order, User, = (self.classes.Order, self.classes.User)
+        (
+            Order,
+            User,
+        ) = (self.classes.Order, self.classes.User)
 
         sess = create_session()
         oalias1 = aliased(Order)
@@ -787,7 +790,10 @@ class JoinTest(QueryTest, AssertsCompiledSQL):
         )
 
     def test_single_prop_5(self):
-        Order, User, = (self.classes.Order, self.classes.User)
+        (
+            Order,
+            User,
+        ) = (self.classes.Order, self.classes.User)
 
         sess = create_session()
         self.assert_compile(
@@ -829,7 +835,10 @@ class JoinTest(QueryTest, AssertsCompiledSQL):
         )
 
     def test_single_prop_8(self):
-        Order, User, = (self.classes.Order, self.classes.User)
+        (
+            Order,
+            User,
+        ) = (self.classes.Order, self.classes.User)
 
         sess = create_session()
         # same as before using an aliased() for User as well
index 2317ec8cecd436ca2ab0547f3dfb5ff307c6b769..7c96c93bd0259207f551508cc6e2dece3450457e 100644 (file)
@@ -770,7 +770,7 @@ class MergeTest(_fixtures.FixtureTest):
 
     def test_no_relationship_cascade(self):
         """test that merge doesn't interfere with a relationship()
-           target that specifically doesn't include 'merge' cascade.
+        target that specifically doesn't include 'merge' cascade.
         """
 
         Address, addresses, users, User = (
index d7a4b0fab4be2b8e1f856fa07f7e493fcd34753b..b13ab00057a700d7adf5fb37dfae35e887b1071c 100644 (file)
@@ -266,10 +266,13 @@ class NaturalPKTest(fixtures.MappedTest):
             # test passive_updates=True; update user
             self.assert_sql_count(testing.db, go, 1)
         sess.expunge_all()
-        assert User(
-            username="jack",
-            addresses=[Address(username="jack"), Address(username="jack")],
-        ) == sess.query(User).get("jack")
+        assert (
+            User(
+                username="jack",
+                addresses=[Address(username="jack"), Address(username="jack")],
+            )
+            == sess.query(User).get("jack")
+        )
 
         u1 = sess.query(User).get("jack")
         u1.addresses = []
@@ -1117,10 +1120,13 @@ class NonPKCascadeTest(fixtures.MappedTest):
             # test passive_updates=True; update user
             self.assert_sql_count(testing.db, go, 1)
         sess.expunge_all()
-        assert User(
-            username="jack",
-            addresses=[Address(username="jack"), Address(username="jack")],
-        ) == sess.query(User).get(u1.id)
+        assert (
+            User(
+                username="jack",
+                addresses=[Address(username="jack"), Address(username="jack")],
+            )
+            == sess.query(User).get(u1.id)
+        )
         sess.expunge_all()
 
         u1 = sess.query(User).get(u1.id)
index bf7ee2828675feeae65c6308989adc3d72fd97ca..0cb9b7d1a2ec26ee4f1bb366ee45015e2ecc965a 100644 (file)
@@ -770,7 +770,10 @@ class SubclassRelationshipTest(
         )
 
     def test_any_walias(self):
-        DataContainer, Job, = (self.classes.DataContainer, self.classes.Job)
+        (
+            DataContainer,
+            Job,
+        ) = (self.classes.DataContainer, self.classes.Job)
 
         Job_A = aliased(Job)
 
@@ -860,7 +863,10 @@ class SubclassRelationshipTest(
         )
 
     def test_join_walias(self):
-        DataContainer, Job, = (self.classes.DataContainer, self.classes.Job)
+        (
+            DataContainer,
+            Job,
+        ) = (self.classes.DataContainer, self.classes.Job)
 
         Job_A = aliased(Job)
 
index eb27e039459d85e3e5d361893bb637b3a12871a6..40ec724eeca4593e22fb9ccb8647b90f3dbc490c 100644 (file)
@@ -2831,15 +2831,11 @@ class FilterTest(QueryTest, AssertsCompiledSQL):
         ).all()
 
         # test that the contents are not adapted by the aliased join
-        assert (
-            [User(id=7), User(id=8)]
-            == sess.query(User)
-            .join("addresses", aliased=True)
-            .filter(
-                ~User.addresses.any(Address.email_address == "fred@fred.com")
-            )
-            .all()
-        )
+        assert [User(id=7), User(id=8)] == sess.query(User).join(
+            "addresses", aliased=True
+        ).filter(
+            ~User.addresses.any(Address.email_address == "fred@fred.com")
+        ).all()
 
         assert [User(id=10)] == sess.query(User).outerjoin(
             "addresses", aliased=True
@@ -2853,15 +2849,11 @@ class FilterTest(QueryTest, AssertsCompiledSQL):
         sess = create_session()
 
         # test that any() doesn't overcorrelate
-        assert (
-            [User(id=7), User(id=8)]
-            == sess.query(User)
-            .join("addresses")
-            .filter(
-                ~User.addresses.any(Address.email_address == "fred@fred.com")
-            )
-            .all()
-        )
+        assert [User(id=7), User(id=8)] == sess.query(User).join(
+            "addresses"
+        ).filter(
+            ~User.addresses.any(Address.email_address == "fred@fred.com")
+        ).all()
 
     def test_has(self):
         # see also HasAnyTest, a newer suite which tests these at the level of
@@ -2877,41 +2869,40 @@ class FilterTest(QueryTest, AssertsCompiledSQL):
             Address.user.has(name="fred")
         ).all()
 
-        assert (
-            [Address(id=2), Address(id=3), Address(id=4), Address(id=5)]
-            == sess.query(Address)
-            .filter(Address.user.has(User.name.like("%ed%")))
-            .order_by(Address.id)
-            .all()
-        )
+        assert [
+            Address(id=2),
+            Address(id=3),
+            Address(id=4),
+            Address(id=5),
+        ] == sess.query(Address).filter(
+            Address.user.has(User.name.like("%ed%"))
+        ).order_by(
+            Address.id
+        ).all()
 
-        assert (
-            [Address(id=2), Address(id=3), Address(id=4)]
-            == sess.query(Address)
-            .filter(Address.user.has(User.name.like("%ed%"), id=8))
-            .order_by(Address.id)
-            .all()
-        )
+        assert [Address(id=2), Address(id=3), Address(id=4)] == sess.query(
+            Address
+        ).filter(Address.user.has(User.name.like("%ed%"), id=8)).order_by(
+            Address.id
+        ).all()
 
         # test has() doesn't overcorrelate
-        assert (
-            [Address(id=2), Address(id=3), Address(id=4)]
-            == sess.query(Address)
-            .join("user")
-            .filter(Address.user.has(User.name.like("%ed%"), id=8))
-            .order_by(Address.id)
-            .all()
-        )
+        assert [Address(id=2), Address(id=3), Address(id=4)] == sess.query(
+            Address
+        ).join("user").filter(
+            Address.user.has(User.name.like("%ed%"), id=8)
+        ).order_by(
+            Address.id
+        ).all()
 
         # test has() doesn't get subquery contents adapted by aliased join
-        assert (
-            [Address(id=2), Address(id=3), Address(id=4)]
-            == sess.query(Address)
-            .join("user", aliased=True)
-            .filter(Address.user.has(User.name.like("%ed%"), id=8))
-            .order_by(Address.id)
-            .all()
-        )
+        assert [Address(id=2), Address(id=3), Address(id=4)] == sess.query(
+            Address
+        ).join("user", aliased=True).filter(
+            Address.user.has(User.name.like("%ed%"), id=8)
+        ).order_by(
+            Address.id
+        ).all()
 
         dingaling = sess.query(Dingaling).get(2)
         assert [User(id=9)] == sess.query(User).filter(
@@ -3436,7 +3427,7 @@ class SetOpsTest(QueryTest, AssertsCompiledSQL):
 
     def test_union_literal_expressions_compile(self):
         """test that column expressions translate during
-            the _from_statement() portion of union(), others"""
+        the _from_statement() portion of union(), others"""
 
         User = self.classes.User
 
@@ -3628,25 +3619,20 @@ class AggregateTest(QueryTest):
         User, Address = self.classes.User, self.classes.Address
 
         sess = create_session()
-        assert (
-            [User(name="ed", id=8)]
-            == sess.query(User)
-            .order_by(User.id)
-            .group_by(User)
-            .join("addresses")
-            .having(func.count(Address.id) > 2)
-            .all()
-        )
+        assert [User(name="ed", id=8)] == sess.query(User).order_by(
+            User.id
+        ).group_by(User).join("addresses").having(
+            func.count(Address.id) > 2
+        ).all()
 
-        assert (
-            [User(name="jack", id=7), User(name="fred", id=9)]
-            == sess.query(User)
-            .order_by(User.id)
-            .group_by(User)
-            .join("addresses")
-            .having(func.count(Address.id) < 2)
-            .all()
-        )
+        assert [
+            User(name="jack", id=7),
+            User(name="fred", id=9),
+        ] == sess.query(User).order_by(User.id).group_by(User).join(
+            "addresses"
+        ).having(
+            func.count(Address.id) < 2
+        ).all()
 
 
 class ExistsTest(QueryTest, AssertsCompiledSQL):
index 0ac0c172a98caa9c919d4a0ba5363ad6f569898c..fb55919184920d7dda046f3c1bbefefa7da01364 100644 (file)
@@ -2354,7 +2354,7 @@ class JoinConditionErrorTest(fixtures.TestBase):
 class TypeMatchTest(fixtures.MappedTest):
 
     """test errors raised when trying to add items
-        whose type is not handled by a relationship"""
+    whose type is not handled by a relationship"""
 
     @classmethod
     def define_tables(cls, metadata):
@@ -3079,7 +3079,9 @@ class ViewOnlySyncBackref(fixtures.MappedTest):
             return
 
         mapper(
-            A, self.tables.t1, properties={"bs": rel()},
+            A,
+            self.tables.t1,
+            properties={"bs": rel()},
         )
         mapper(B, self.tables.t2)
 
@@ -3623,9 +3625,7 @@ class ViewOnlyComplexJoin(_RelationshipErrors, fixtures.MappedTest):
 
 
 class FunctionAsPrimaryJoinTest(fixtures.DeclarativeMappedTest):
-    """test :ticket:`3831`
-
-    """
+    """test :ticket:`3831`"""
 
     __only_on__ = "sqlite"
 
index f4c17b482b3073f0a437651f136c25fdd97eb27c..f3e7bfcca280b2a8248279ee94a208430794bb87 100644 (file)
@@ -435,7 +435,7 @@ class EagerTest(_fixtures.FixtureTest, testing.AssertsCompiledSQL):
 
     def test_orderby_related(self):
         """A regular mapper select on a single table can
-            order by a relationship to a second table"""
+        order by a relationship to a second table"""
 
         Address, addresses, users, User = (
             self.classes.Address,
index 825375d0607b26f4e078b3cd9c01718a3b973151..d882d8deedab90e9e94243cf0e72c84673886bfe 100644 (file)
@@ -433,7 +433,7 @@ class EagerTest(_fixtures.FixtureTest, testing.AssertsCompiledSQL):
 
     def test_orderby_related(self):
         """A regular mapper select on a single table can
-            order by a relationship to a second table"""
+        order by a relationship to a second table"""
 
         Address, addresses, users, User = (
             self.classes.Address,
index 6e68b5899622d17fd25063845025c1859cf21505..4e4a7777ce95c6017656e301056744708500cbec 100644 (file)
@@ -3488,7 +3488,9 @@ class EnsurePKSortableTest(fixtures.MappedTest):
                 )
 
             assert_raises_message(
-                sa.exc.InvalidRequestError, message, s.flush,
+                sa.exc.InvalidRequestError,
+                message,
+                s.flush,
             )
         else:
             s.flush()
index 3f83ca967c33101d5a1e378f6026541ccae70505..55d491eb13f8ee30ac2fb2abf13918c3a93262d9 100644 (file)
@@ -459,16 +459,16 @@ class DefaultRequirements(SuiteRequirements):
 
     @property
     def cross_schema_fk_reflection(self):
-        """target system must support reflection of inter-schema foreign keys
-        """
+        """target system must support reflection of inter-schema foreign
+        keys"""
         return only_on(["postgresql", "mysql", "mssql"])
 
     @property
     def implicit_default_schema(self):
         """target system has a strong concept of 'default' schema that can
-           be referred to implicitly.
+        be referred to implicitly.
 
-           basically, PostgreSQL.
+        basically, PostgreSQL.
 
         """
         return only_on(["postgresql"])
@@ -795,9 +795,7 @@ class DefaultRequirements(SuiteRequirements):
 
     @property
     def symbol_names_w_double_quote(self):
-        """Target driver can create tables with a name like 'some " table'
-
-        """
+        """Target driver can create tables with a name like 'some " table'"""
 
         return skip_if(
             [no_support("oracle", "ORA-03001: unimplemented feature")]
@@ -805,7 +803,7 @@ class DefaultRequirements(SuiteRequirements):
 
     @property
     def emulated_lastrowid(self):
-        """"target dialect retrieves cursor.lastrowid or an equivalent
+        """ "target dialect retrieves cursor.lastrowid or an equivalent
         after an insert() construct executes.
         """
         return fails_on_everything_except(
@@ -818,7 +816,7 @@ class DefaultRequirements(SuiteRequirements):
 
     @property
     def dbapi_lastrowid(self):
-        """"target backend includes a 'lastrowid' accessor on the DBAPI
+        """ "target backend includes a 'lastrowid' accessor on the DBAPI
         cursor object.
 
         """
index 2b025309840c19bcbfaa271dc0f08fa866d738ac..20ab18f4247e29975275fc626c726e1e09065dbc 100644 (file)
@@ -559,9 +559,7 @@ class SelectTest(fixtures.TestBase, AssertsCompiledSQL):
         eq_(s.positiontup, ["a", "b", "c"])
 
     def test_nested_label_targeting(self):
-        """test nested anonymous label generation.
-
-        """
+        """test nested anonymous label generation."""
         s1 = table1.select()
         s2 = s1.alias()
         s3 = select([s2], use_labels=True)
@@ -2959,7 +2957,7 @@ class BindParameterTest(AssertsCompiledSQL, fixtures.TestBase):
 
     def _test_binds_no_hash_collision(self):
         """test that construct_params doesn't corrupt dict
-            due to hash collisions"""
+        due to hash collisions"""
 
         total_params = 100000
 
@@ -4088,7 +4086,11 @@ class SchemaTest(fixtures.TestBase, AssertsCompiledSQL):
             {
                 "anotherid": (
                     "anotherid",
-                    (t1.c.anotherid, "anotherid", "anotherid",),
+                    (
+                        t1.c.anotherid,
+                        "anotherid",
+                        "anotherid",
+                    ),
                     t1.c.anotherid.type,
                 )
             },
index 5639ec35cbb96fbe8b4bae80151d5ad45e0e004b..179f751c7a8b1c0dfd3a6e02c00d096f3b4ab73e 100644 (file)
@@ -275,9 +275,7 @@ class CTETest(fixtures.TestBase, AssertsCompiledSQL):
         )
 
     def test_recursive_union_alias_two(self):
-        """
-
-        """
+        """"""
 
         # I know, this is the PG VALUES keyword,
         # we're cheating here.  also yes we need the SELECT,
index f72f446798e695312ccfcdbcfe2f24b2c49bcbdc..7d813e8b234f0f62dfe753ac0fa97df3c4d10031 100644 (file)
@@ -250,7 +250,12 @@ class DefaultObjectTest(fixtures.TestBase):
             Column("boolcol1", sa.Boolean, default=True),
             Column("boolcol2", sa.Boolean, default=False),
             # python function which uses ExecutionContext
-            Column("col7", Integer, default=lambda: 5, onupdate=lambda: 10,),
+            Column(
+                "col7",
+                Integer,
+                default=lambda: 5,
+                onupdate=lambda: 10,
+            ),
             # python builtin
             Column(
                 "col8",
index d8d7c4ad9d63cc19f30d7c0d8afa86d2057f05c2..5e1fdbddf25ee10a2725c6230e7ab1dc4fed9a8e 100644 (file)
@@ -729,7 +729,11 @@ class MetaDataTest(fixtures.TestBase, ComparesTables):
                 "Column('foo', Integer(), table=None, primary_key=True, "
                 "nullable=False, onupdate=%s, default=%s, server_default=%s, "
                 "comment='foo')"
-                % (ColumnDefault(1), ColumnDefault(42), DefaultClause("42"),),
+                % (
+                    ColumnDefault(1),
+                    ColumnDefault(42),
+                    DefaultClause("42"),
+                ),
             ),
             (
                 Table("bar", MetaData(), Column("x", String)),
@@ -5155,7 +5159,8 @@ class CopyDialectOptionsTest(fixtures.TestBase):
     @classmethod
     def check_dialect_options_(cls, t):
         eq_(
-            t.dialect_kwargs["copydialectoptionstest_some_table_arg"], "a1",
+            t.dialect_kwargs["copydialectoptionstest_some_table_arg"],
+            "a1",
         )
         eq_(
             t.c.foo.dialect_kwargs["copydialectoptionstest_some_column_arg"],
@@ -5198,7 +5203,9 @@ class CopyDialectOptionsTest(fixtures.TestBase):
                 copydialectoptionstest_some_table_arg="a1",
             )
             Index(
-                "idx", t1.c.foo, copydialectoptionstest_some_index_arg="a4",
+                "idx",
+                t1.c.foo,
+                copydialectoptionstest_some_index_arg="a4",
             )
 
             self.check_dialect_options_(t1)
index e3224084dc9de310c179c7a641bed7e837137529..1a7411d873640cff4657830d370bd80e78923f72 100644 (file)
@@ -995,8 +995,7 @@ class BooleanEvalTest(fixtures.TestBase, testing.AssertsCompiledSQL):
 
 class ConjunctionTest(fixtures.TestBase, testing.AssertsCompiledSQL):
 
-    """test interaction of and_()/or_() with boolean , null constants
-    """
+    """test interaction of and_()/or_() with boolean , null constants"""
 
     __dialect__ = default.DefaultDialect(supports_native_boolean=True)
 
index 41dbc4838b6cff665a7a6a8c0f67eb5ec22ea310..550c0ddcb931615893598a19577a29a086add199 100644 (file)
@@ -1476,7 +1476,8 @@ class PositionalTextTest(fixtures.TablesTest):
     @classmethod
     def insert_data(cls, connection):
         connection.execute(
-            cls.tables.text1.insert(), [dict(a="a1", b="b1", c="c1", d="d1")],
+            cls.tables.text1.insert(),
+            [dict(a="a1", b="b1", c="c1", d="d1")],
         )
 
     def test_via_column(self):
index 9f16576a2f7b6faf63ff1e7a2659b72d8f9591c7..3df92a179048ebd80dc12d6f7af8b951154b30cb 100644 (file)
@@ -123,14 +123,14 @@ class LegacySequenceExecTest(fixtures.TestBase):
 
     def test_explicit_optional(self):
         """test dialect executes a Sequence, returns nextval, whether
-        or not "optional" is set """
+        or not "optional" is set"""
 
         s = Sequence("my_sequence", optional=True)
         self._assert_seq_result(s.execute(testing.db))
 
     def test_func_implicit_connectionless_execute(self):
         """test func.next_value().execute()/.scalar() works
-        with connectionless execution. """
+        with connectionless execution."""
 
         s = Sequence("my_sequence", metadata=MetaData(testing.db))
         self._assert_seq_result(s.next_value().execute().scalar())
@@ -177,21 +177,21 @@ class SequenceExecTest(fixtures.TestBase):
 
     def test_execute_optional(self, connection):
         """test dialect executes a Sequence, returns nextval, whether
-        or not "optional" is set """
+        or not "optional" is set"""
 
         s = Sequence("my_sequence", optional=True)
         self._assert_seq_result(connection.execute(s))
 
     def test_execute_next_value(self, connection):
         """test func.next_value().execute()/.scalar() works
-        with connectionless execution. """
+        with connectionless execution."""
 
         s = Sequence("my_sequence")
         self._assert_seq_result(connection.scalar(s.next_value()))
 
     def test_execute_optional_next_value(self, connection):
         """test func.next_value().execute()/.scalar() works
-        with connectionless execution. """
+        with connectionless execution."""
 
         s = Sequence("my_sequence", optional=True)
         self._assert_seq_result(connection.scalar(s.next_value()))
@@ -521,7 +521,8 @@ class SequenceAsServerDefaultTest(
         )
 
         eq_(
-            connection.execute("select id from t_seq_test_2").scalar(), 1,
+            connection.execute("select id from t_seq_test_2").scalar(),
+            1,
         )
 
     def test_default_core_server_only(self, connection):