]> git.ipfire.org Git - thirdparty/sqlalchemy/sqlalchemy.git/commitdiff
upgrade to black 20.8b1
authorMike Bayer <mike_mp@zzzcomputing.com>
Mon, 28 Sep 2020 18:08:59 +0000 (14:08 -0400)
committerMike Bayer <mike_mp@zzzcomputing.com>
Mon, 28 Sep 2020 19:17:26 +0000 (15:17 -0400)
It's better, the majority of these changes look more readable to me.
also found some docstrings that had formatting / quoting issues.

Change-Id: I582a45fde3a5648b2f36bab96bad56881321899b

169 files changed:
.pre-commit-config.yaml
examples/asyncio/async_orm.py
examples/asyncio/basic.py
examples/asyncio/greenlet_orm.py
examples/dogpile_caching/caching_query.py
examples/versioned_rows/versioned_rows_w_versionid.py
examples/vertical/dictlike-polymorphic.py
lib/sqlalchemy/connectors/mxodbc.py
lib/sqlalchemy/dialects/mssql/base.py
lib/sqlalchemy/dialects/mssql/mxodbc.py
lib/sqlalchemy/dialects/mysql/cymysql.py
lib/sqlalchemy/dialects/mysql/mariadb.py
lib/sqlalchemy/dialects/mysql/types.py
lib/sqlalchemy/dialects/oracle/cx_oracle.py
lib/sqlalchemy/dialects/postgresql/asyncpg.py
lib/sqlalchemy/dialects/postgresql/base.py
lib/sqlalchemy/dialects/postgresql/hstore.py
lib/sqlalchemy/dialects/postgresql/json.py
lib/sqlalchemy/dialects/postgresql/ranges.py
lib/sqlalchemy/dialects/sqlite/pysqlite.py
lib/sqlalchemy/engine/base.py
lib/sqlalchemy/engine/cursor.py
lib/sqlalchemy/engine/interfaces.py
lib/sqlalchemy/engine/reflection.py
lib/sqlalchemy/engine/result.py
lib/sqlalchemy/engine/url.py
lib/sqlalchemy/event/api.py
lib/sqlalchemy/event/attr.py
lib/sqlalchemy/event/base.py
lib/sqlalchemy/event/registry.py
lib/sqlalchemy/exc.py
lib/sqlalchemy/ext/associationproxy.py
lib/sqlalchemy/ext/asyncio/engine.py
lib/sqlalchemy/ext/asyncio/result.py
lib/sqlalchemy/ext/asyncio/session.py
lib/sqlalchemy/ext/baked.py
lib/sqlalchemy/ext/hybrid.py
lib/sqlalchemy/ext/orderinglist.py
lib/sqlalchemy/inspection.py
lib/sqlalchemy/orm/__init__.py
lib/sqlalchemy/orm/attributes.py
lib/sqlalchemy/orm/context.py
lib/sqlalchemy/orm/decl_api.py
lib/sqlalchemy/orm/decl_base.py
lib/sqlalchemy/orm/descriptor_props.py
lib/sqlalchemy/orm/dynamic.py
lib/sqlalchemy/orm/loading.py
lib/sqlalchemy/orm/mapper.py
lib/sqlalchemy/orm/persistence.py
lib/sqlalchemy/orm/query.py
lib/sqlalchemy/orm/relationships.py
lib/sqlalchemy/orm/session.py
lib/sqlalchemy/orm/state.py
lib/sqlalchemy/orm/strategies.py
lib/sqlalchemy/orm/util.py
lib/sqlalchemy/schema.py
lib/sqlalchemy/sql/base.py
lib/sqlalchemy/sql/coercions.py
lib/sqlalchemy/sql/crud.py
lib/sqlalchemy/sql/ddl.py
lib/sqlalchemy/sql/dml.py
lib/sqlalchemy/sql/elements.py
lib/sqlalchemy/sql/lambdas.py
lib/sqlalchemy/sql/schema.py
lib/sqlalchemy/sql/sqltypes.py
lib/sqlalchemy/sql/type_api.py
lib/sqlalchemy/sql/util.py
lib/sqlalchemy/testing/assertions.py
lib/sqlalchemy/testing/provision.py
lib/sqlalchemy/testing/requirements.py
lib/sqlalchemy/testing/suite/test_insert.py
lib/sqlalchemy/testing/suite/test_reflection.py
lib/sqlalchemy/testing/suite/test_results.py
lib/sqlalchemy/testing/suite/test_select.py
lib/sqlalchemy/testing/suite/test_sequence.py
lib/sqlalchemy/testing/suite/test_types.py
lib/sqlalchemy/testing/warnings.py
lib/sqlalchemy/util/_collections.py
lib/sqlalchemy/util/compat.py
lib/sqlalchemy/util/deprecations.py
lib/sqlalchemy/util/langhelpers.py
lib/sqlalchemy/util/queue.py
test/aaa_profiling/test_orm.py
test/aaa_profiling/test_resultset.py
test/base/test_result.py
test/base/test_warnings.py
test/dialect/mssql/test_compiler.py
test/dialect/mssql/test_engine.py
test/dialect/mssql/test_query.py
test/dialect/mssql/test_reflection.py
test/dialect/mssql/test_sequence.py
test/dialect/mssql/test_types.py
test/dialect/mysql/test_dialect.py
test/dialect/mysql/test_query.py
test/dialect/mysql/test_types.py
test/dialect/oracle/test_dialect.py
test/dialect/oracle/test_reflection.py
test/dialect/oracle/test_types.py
test/dialect/postgresql/test_compiler.py
test/dialect/postgresql/test_dialect.py
test/dialect/postgresql/test_reflection.py
test/dialect/postgresql/test_types.py
test/dialect/test_sqlite.py
test/engine/test_execute.py
test/engine/test_parseconnect.py
test/engine/test_reflection.py
test/engine/test_transaction.py
test/ext/asyncio/test_engine_py3k.py
test/ext/test_associationproxy.py
test/ext/test_baked.py
test/ext/test_horizontal_shard.py
test/orm/_fixtures.py
test/orm/declarative/test_inheritance.py
test/orm/inheritance/test_abc_inheritance.py
test/orm/inheritance/test_assorted_poly.py
test/orm/inheritance/test_basic.py
test/orm/inheritance/test_polymorphic_rel.py
test/orm/inheritance/test_productspec.py
test/orm/inheritance/test_relationship.py
test/orm/test_attributes.py
test/orm/test_bind.py
test/orm/test_cache_key.py
test/orm/test_cascade.py
test/orm/test_composites.py
test/orm/test_core_compilation.py
test/orm/test_cycles.py
test/orm/test_dataclasses_py3k.py
test/orm/test_defaults.py
test/orm/test_deferred.py
test/orm/test_deprecations.py
test/orm/test_eager_relations.py
test/orm/test_events.py
test/orm/test_expire.py
test/orm/test_froms.py
test/orm/test_joins.py
test/orm/test_merge.py
test/orm/test_naturalpks.py
test/orm/test_of_type.py
test/orm/test_query.py
test/orm/test_relationship_criteria.py
test/orm/test_relationships.py
test/orm/test_selectin_relations.py
test/orm/test_subquery_relations.py
test/orm/test_transaction.py
test/orm/test_unitofwork.py
test/orm/test_update_delete.py
test/requirements.py
test/sql/test_compare.py
test/sql/test_compiler.py
test/sql/test_cte.py
test/sql/test_defaults.py
test/sql/test_deprecations.py
test/sql/test_external_traversal.py
test/sql/test_functions.py
test/sql/test_identity_column.py
test/sql/test_insert_exec.py
test/sql/test_metadata.py
test/sql/test_operators.py
test/sql/test_query.py
test/sql/test_quote.py
test/sql/test_resultset.py
test/sql/test_returning.py
test/sql/test_roles.py
test/sql/test_selectable.py
test/sql/test_sequences.py
test/sql/test_types.py
test/sql/test_update.py
test/sql/test_values.py
tox.ini

index 00a097cf3d76bf6090515136d4c4d18908ec0dcd..ca602f0f0eb449d9fa3f016392ca565f0498e41c 100644 (file)
@@ -2,7 +2,7 @@
 # See https://pre-commit.com/hooks.html for more hooks
 repos:
 -   repo: https://github.com/python/black
-    rev: 19.10b0
+    rev: 20.8b1
     hooks:
     -   id: black
 
index b1054a239fd9bf770582af2a195091bf17a8d711..52df8bd2fc4d4a264788e8e3867a0e53def935b0 100644 (file)
@@ -38,7 +38,8 @@ async def async_main():
     """Main program function."""
 
     engine = create_async_engine(
-        "postgresql+asyncpg://scott:tiger@localhost/test", echo=True,
+        "postgresql+asyncpg://scott:tiger@localhost/test",
+        echo=True,
     )
 
     async with engine.begin() as conn:
index 05cdd8a05c9680099ae8f80ee33ed5db92dd25b8..efdb7e9e8734c4508ee10caebdc8e33ff408e3bd 100644 (file)
@@ -27,7 +27,8 @@ t1 = Table(
 async def async_main():
     # engine is an instance of AsyncEngine
     engine = create_async_engine(
-        "postgresql+asyncpg://scott:tiger@localhost/test", echo=True,
+        "postgresql+asyncpg://scott:tiger@localhost/test",
+        echo=True,
     )
 
     # conn is an instance of AsyncConnection
index e0b568c4b8ed90e93f6da81fe385a2a18836ee62..7429b6853b8d27a2de610f64295e042fb46adb73 100644 (file)
@@ -64,7 +64,8 @@ async def async_main():
     """Main program function."""
 
     engine = create_async_engine(
-        "postgresql+asyncpg://scott:tiger@localhost/test", echo=True,
+        "postgresql+asyncpg://scott:tiger@localhost/test",
+        echo=True,
     )
     async with engine.begin() as conn:
         await conn.run_sync(Base.metadata.drop_all)
index f994473612b8bed39938e5f974a558b7c33d3be6..d1199090c89b0411f4a98cb899efab343d0e737f 100644 (file)
@@ -146,7 +146,7 @@ class FromCache(UserDefinedOption):
 
 class RelationshipCache(FromCache):
     """Specifies that a Query as called within a "lazy load"
-       should load results from a cache."""
+    should load results from a cache."""
 
     propagate_to_loaders = True
 
index 7a1fe541974ccd63259b3c6d597972868fdad940..790d2ed14bdd402028a265d0e74f8c436e448b5c 100644 (file)
@@ -101,14 +101,17 @@ session.commit()
 e1.data = "e2"
 session.commit()
 
-assert session.query(
-    Example.id,
-    Example.version_id,
-    Example.is_current_version,
-    Example.calc_is_current_version,
-    Example.data,
-).order_by(Example.id, Example.version_id).all() == (
-    [(1, 1, False, False, "e1"), (1, 2, True, True, "e2")]
+assert (
+    session.query(
+        Example.id,
+        Example.version_id,
+        Example.is_current_version,
+        Example.calc_is_current_version,
+        Example.data,
+    )
+    .order_by(Example.id, Example.version_id)
+    .all()
+    == ([(1, 1, False, False, "e1"), (1, 2, True, True, "e2")])
 )
 
 # example 2, versioning with a parent
@@ -159,12 +162,15 @@ session.commit()
 assert p1.child_id == 1
 assert p1.child.version_id == 2
 
-assert session.query(
-    Child.id,
-    Child.version_id,
-    Child.is_current_version,
-    Child.calc_is_current_version,
-    Child.data,
-).order_by(Child.id, Child.version_id).all() == (
-    [(1, 1, False, False, "c1"), (1, 2, True, True, "c2")]
+assert (
+    session.query(
+        Child.id,
+        Child.version_id,
+        Child.is_current_version,
+        Child.calc_is_current_version,
+        Child.data,
+    )
+    .order_by(Child.id, Child.version_id)
+    .all()
+    == ([(1, 1, False, False, "c1"), (1, 2, True, True, "c2")])
 )
index 73d12ee4f2592db307fd29bffbe9ce1f938f3cc6..23a6e093d9c9bad3fd1750035f0079b490f40479 100644 (file)
@@ -67,9 +67,8 @@ class PolymorphicVerticalProperty(object):
 
     @value.comparator
     class value(PropComparator):
-        """A comparator for .value, builds a polymorphic comparison via CASE.
-
-        """
+        """A comparator for .value, builds a polymorphic comparison
+        via CASE."""
 
         def __init__(self, cls):
             self.cls = cls
index e243aba80f68bd372b361d7a87a7035b61f10c3c..e630f36e3de5ed03408a3696810139885bf4b823 100644 (file)
@@ -66,7 +66,7 @@ class MxODBCConnector(Connector):
 
     @classmethod
     def _load_mx_exceptions(cls):
-        """ Import mxODBC exception classes into the module namespace,
+        """Import mxODBC exception classes into the module namespace,
         as if they had been imported normally. This is done here
         to avoid requiring all SQLAlchemy users to install mxODBC.
         """
@@ -84,7 +84,7 @@ class MxODBCConnector(Connector):
         return connect
 
     def _error_handler(self):
-        """ Return a handler that adjusts mxODBC's raised Warnings to
+        """Return a handler that adjusts mxODBC's raised Warnings to
         emit Python standard warnings.
         """
         from mx.ODBC.Error import Warning as MxOdbcWarning
index 2cbdc19aacc24e25064143bcebf8e68d1def71b9..87ccc8427c8ff37adea9ab43904981210e9f4509 100644 (file)
@@ -1033,7 +1033,7 @@ class TIME(sqltypes.TIME):
                     self.__zero_date, value.time()
                 )
             elif isinstance(value, datetime.time):
-                """ issue #5339
+                """issue #5339
                 per: https://github.com/mkleehammer/pyodbc/wiki/Tips-and-Tricks-by-Database-Platform#time-columns
                 pass TIME value as string
                 """  # noqa
@@ -1260,9 +1260,7 @@ class SQL_VARIANT(sqltypes.TypeEngine):
 
 
 class TryCast(sql.elements.Cast):
-    """Represent a SQL Server TRY_CAST expression.
-
-    """
+    """Represent a SQL Server TRY_CAST expression."""
 
     __visit_name__ = "try_cast"
 
@@ -1579,8 +1577,12 @@ class MSExecutionContext(default.DefaultExecutionContext):
         elif (
             self.isinsert or self.isupdate or self.isdelete
         ) and self.compiled.returning:
-            self.cursor_fetch_strategy = _cursor.FullyBufferedCursorFetchStrategy(  # noqa
-                self.cursor, self.cursor.description, self.cursor.fetchall()
+            self.cursor_fetch_strategy = (
+                _cursor.FullyBufferedCursorFetchStrategy(
+                    self.cursor,
+                    self.cursor.description,
+                    self.cursor.fetchall(),
+                )
             )
 
         if self._enable_identity_insert:
@@ -1729,8 +1731,8 @@ class MSSQLCompiler(compiler.SQLCompiler):
         return text
 
     def limit_clause(self, select, **kw):
-        """ MSSQL 2012 supports OFFSET/FETCH operators
-            Use it instead subquery with row_number
+        """MSSQL 2012 supports OFFSET/FETCH operators
+        Use it instead subquery with row_number
 
         """
 
index 998153d7a77b504804b6c78bfc17726fefe758a0..b274c2a2b267c6c4d1901a755aa22e3e7c0dfa5b 100644 (file)
@@ -59,8 +59,7 @@ from ...connectors.mxodbc import MxODBCConnector
 
 
 class _MSNumeric_mxodbc(_MSNumeric_pyodbc):
-    """Include pyodbc's numeric processor.
-    """
+    """Include pyodbc's numeric processor."""
 
 
 class _MSDate_mxodbc(_MSDate):
index 2b45f5ddba27f0830a44533f9c2b22f5efe87bc0..f1d0aedaf99751232acf883887d95e10d9159ef3 100644 (file)
@@ -28,8 +28,7 @@ from ... import util
 
 class _cymysqlBIT(BIT):
     def result_processor(self, dialect, coltype):
-        """Convert a MySQL's 64 bit, variable length binary string to a long.
-        """
+        """Convert MySQL's 64 bit, variable length binary string to a long."""
 
         def process(value):
             if value is not None:
index c6cadcd603979063525a04dbe665c02e3620aa5f..0dbb579e815380e71e7defed7fa7742b39fc5116 100644 (file)
@@ -13,5 +13,10 @@ def loader(driver):
     driver_cls = getattr(driver_mod, driver).dialect
 
     return type(
-        "MariaDBDialect_%s" % driver, (MariaDBDialect, driver_cls,), {}
+        "MariaDBDialect_%s" % driver,
+        (
+            MariaDBDialect,
+            driver_cls,
+        ),
+        {},
     )
index 3b455cfb1fa373d676721185ea890eb1304d319c..59497500015401da341c41c476eb28bbc0293d2b 100644 (file)
@@ -440,9 +440,7 @@ class TIME(sqltypes.TIME):
 
 
 class TIMESTAMP(sqltypes.TIMESTAMP):
-    """MySQL TIMESTAMP type.
-
-    """
+    """MySQL TIMESTAMP type."""
 
     __visit_name__ = "TIMESTAMP"
 
@@ -467,9 +465,7 @@ class TIMESTAMP(sqltypes.TIMESTAMP):
 
 
 class DATETIME(sqltypes.DATETIME):
-    """MySQL DATETIME type.
-
-    """
+    """MySQL DATETIME type."""
 
     __visit_name__ = "DATETIME"
 
index 651a6e6736ea1ef2b6d99026df0f9fa4a46e2677..d1b69100f769134c551487641cd76db23687e919 100644 (file)
@@ -970,7 +970,11 @@ class OracleDialect_cx_oracle(OracleDialect):
             # allow all strings to come back natively as Unicode
             elif (
                 dialect.coerce_to_unicode
-                and default_type in (cx_Oracle.STRING, cx_Oracle.FIXED_CHAR,)
+                and default_type
+                in (
+                    cx_Oracle.STRING,
+                    cx_Oracle.FIXED_CHAR,
+                )
                 and default_type is not cx_Oracle.CLOB
                 and default_type is not cx_Oracle.NCLOB
             ):
@@ -1018,7 +1022,9 @@ class OracleDialect_cx_oracle(OracleDialect):
                 cx_Oracle.BLOB,
             ):
                 return cursor.var(
-                    cx_Oracle.LONG_BINARY, size, cursor.arraysize,
+                    cx_Oracle.LONG_BINARY,
+                    size,
+                    cursor.arraysize,
                 )
 
         return output_type_handler
index 1f988153c5ea548b9f2b182b4a1225da76784c4b..8b20de2b6dc9f2adae1cbb151b768eb35c664222 100644 (file)
@@ -576,11 +576,13 @@ class AsyncAdapt_asyncpg_dbapi:
 
         if async_fallback:
             return AsyncAdaptFallback_asyncpg_connection(
-                self, await_fallback(self.asyncpg.connect(*arg, **kw)),
+                self,
+                await_fallback(self.asyncpg.connect(*arg, **kw)),
             )
         else:
             return AsyncAdapt_asyncpg_connection(
-                self, await_only(self.asyncpg.connect(*arg, **kw)),
+                self,
+                await_only(self.asyncpg.connect(*arg, **kw)),
             )
 
     class Error(Exception):
index 8786938668560c2ea0b8710c3b302c2edc28fcd3..5ed56db56044e8541a0803f8463d65339407477f 100644 (file)
@@ -1338,9 +1338,7 @@ class TIME(sqltypes.TIME):
 
 class INTERVAL(sqltypes.NativeForEmulated, sqltypes._AbstractInterval):
 
-    """PostgreSQL INTERVAL type.
-
-    """
+    """PostgreSQL INTERVAL type."""
 
     __visit_name__ = "INTERVAL"
     native = True
@@ -1414,7 +1412,7 @@ class UUID(sqltypes.TypeEngine):
          as Python uuid objects, converting to/from string via the
          DBAPI.
 
-         """
+        """
         self.as_uuid = as_uuid
 
     def coerce_compared_value(self, op, value):
@@ -3026,7 +3024,9 @@ class PGDialect(default.DefaultDialect):
                     type_=sqltypes.Unicode,
                 ),
                 sql.bindparam(
-                    "schema", util.text_type(schema), type_=sqltypes.Unicode,
+                    "schema",
+                    util.text_type(schema),
+                    type_=sqltypes.Unicode,
                 ),
             )
         )
@@ -3200,7 +3200,9 @@ class PGDialect(default.DefaultDialect):
                 "n.nspname=:schema"
             ).bindparams(
                 sql.bindparam(
-                    "schema", util.text_type(schema), type_=sqltypes.Unicode,
+                    "schema",
+                    util.text_type(schema),
+                    type_=sqltypes.Unicode,
                 ),
             )
         )
index cb89f7c5f8de170fecea2cc2f67f8ff116488bf9..15ec2a585ea6c6c5f7f59888a544d4acb9ba516e 100644 (file)
@@ -160,13 +160,11 @@ class HSTORE(sqltypes.Indexable, sqltypes.Concatenable, sqltypes.TypeEngine):
             return self.operate(HAS_KEY, other, result_type=sqltypes.Boolean)
 
         def has_all(self, other):
-            """Boolean expression.  Test for presence of all keys in jsonb
-            """
+            """Boolean expression.  Test for presence of all keys in jsonb"""
             return self.operate(HAS_ALL, other, result_type=sqltypes.Boolean)
 
         def has_any(self, other):
-            """Boolean expression.  Test for presence of any key in jsonb
-            """
+            """Boolean expression.  Test for presence of any key in jsonb"""
             return self.operate(HAS_ANY, other, result_type=sqltypes.Boolean)
 
         def contains(self, other, **kwargs):
index 9ffe9cfe88b89e51ec9fd5b3d77a2c24abed864e..63e1656e06ca95e0571371cff749391a70106bb0 100644 (file)
@@ -204,7 +204,7 @@ class JSON(sqltypes.JSON):
 
          .. versionadded:: 1.1
 
-         """
+        """
         super(JSON, self).__init__(none_as_null=none_as_null)
         if astext_type is not None:
             self.astext_type = astext_type
@@ -300,13 +300,11 @@ class JSONB(JSON):
             return self.operate(HAS_KEY, other, result_type=sqltypes.Boolean)
 
         def has_all(self, other):
-            """Boolean expression.  Test for presence of all keys in jsonb
-            """
+            """Boolean expression.  Test for presence of all keys in jsonb"""
             return self.operate(HAS_ALL, other, result_type=sqltypes.Boolean)
 
         def has_any(self, other):
-            """Boolean expression.  Test for presence of any key in jsonb
-            """
+            """Boolean expression.  Test for presence of any key in jsonb"""
             return self.operate(HAS_ANY, other, result_type=sqltypes.Boolean)
 
         def contains(self, other, **kwargs):
index a31d958ed93b9dc7baa82947ef3f3ac25a929bf9..ddc12c096d3dd61090493717837161417e6c26de 100644 (file)
@@ -100,48 +100,36 @@ class RangeOperators(object):
 
 
 class INT4RANGE(RangeOperators, sqltypes.TypeEngine):
-    """Represent the PostgreSQL INT4RANGE type.
-
-    """
+    """Represent the PostgreSQL INT4RANGE type."""
 
     __visit_name__ = "INT4RANGE"
 
 
 class INT8RANGE(RangeOperators, sqltypes.TypeEngine):
-    """Represent the PostgreSQL INT8RANGE type.
-
-    """
+    """Represent the PostgreSQL INT8RANGE type."""
 
     __visit_name__ = "INT8RANGE"
 
 
 class NUMRANGE(RangeOperators, sqltypes.TypeEngine):
-    """Represent the PostgreSQL NUMRANGE type.
-
-    """
+    """Represent the PostgreSQL NUMRANGE type."""
 
     __visit_name__ = "NUMRANGE"
 
 
 class DATERANGE(RangeOperators, sqltypes.TypeEngine):
-    """Represent the PostgreSQL DATERANGE type.
-
-    """
+    """Represent the PostgreSQL DATERANGE type."""
 
     __visit_name__ = "DATERANGE"
 
 
 class TSRANGE(RangeOperators, sqltypes.TypeEngine):
-    """Represent the PostgreSQL TSRANGE type.
-
-    """
+    """Represent the PostgreSQL TSRANGE type."""
 
     __visit_name__ = "TSRANGE"
 
 
 class TSTZRANGE(RangeOperators, sqltypes.TypeEngine):
-    """Represent the PostgreSQL TSTZRANGE type.
-
-    """
+    """Represent the PostgreSQL TSTZRANGE type."""
 
     __visit_name__ = "TSTZRANGE"
index a8b23492164b95f476c2018498839ea3fb51fcae..eb855016ecbb74ec0c8f4010dd523f77cf807dc6 100644 (file)
@@ -515,7 +515,9 @@ class SQLiteDialect_pysqlite(SQLiteDialect):
                 dbapi_connection = connection
 
             dbapi_connection.create_function(
-                "regexp", 2, regexp,
+                "regexp",
+                2,
+                regexp,
             )
 
         fns = [set_regexp]
index e6215540d190c7151a33910419517a0441e82f94..9a6bdd7f36617635c9fe2ab4f9290b7b4b1bfac7 100644 (file)
@@ -67,9 +67,7 @@ class Connection(Connectable):
         _dispatch=None,
         _has_events=None,
     ):
-        """Construct a new Connection.
-
-        """
+        """Construct a new Connection."""
         self.engine = engine
         self.dialect = engine.dialect
         self.__branch_from = _branch_from
@@ -347,7 +345,7 @@ class Connection(Connectable):
         return c
 
     def get_execution_options(self):
-        """ Get the non-SQL options which will take effect during execution.
+        """Get the non-SQL options which will take effect during execution.
 
         .. versionadded:: 1.3
 
@@ -1234,7 +1232,11 @@ class Connection(Connectable):
 
         for fn in self.dispatch.before_execute:
             elem, event_multiparams, event_params = fn(
-                self, elem, event_multiparams, event_params, execution_options,
+                self,
+                elem,
+                event_multiparams,
+                event_params,
+                execution_options,
             )
 
         if event_multiparams:
@@ -2124,9 +2126,7 @@ class Transaction(object):
             assert not self.is_active
 
     def rollback(self):
-        """Roll back this :class:`.Transaction`.
-
-        """
+        """Roll back this :class:`.Transaction`."""
         try:
             self._do_rollback()
         finally:
@@ -2618,7 +2618,7 @@ class Engine(Connectable, log.Identified):
         return self._option_cls(self, opt)
 
     def get_execution_options(self):
-        """ Get the non-SQL options which will take effect during execution.
+        """Get the non-SQL options which will take effect during execution.
 
         .. versionadded: 1.3
 
index 43afa3628afb4bb27ce6c240d05347f7fa059e2c..6c4a756c9a87b6b8734b81fbf04ab56ca120f8eb 100644 (file)
@@ -1034,7 +1034,8 @@ class BufferedRowCursorFetchStrategy(CursorFetchStrategy):
     @classmethod
     def create(cls, result):
         return BufferedRowCursorFetchStrategy(
-            result.cursor, result.context.execution_options,
+            result.cursor,
+            result.context.execution_options,
         )
 
     def _buffer_rows(self, result, dbapi_cursor):
@@ -1204,9 +1205,7 @@ _NO_RESULT_METADATA = _NoResultMetaData()
 
 
 class BaseCursorResult(object):
-    """Base class for database result objects.
-
-    """
+    """Base class for database result objects."""
 
     out_parameters = None
     _metadata = None
index e0e4a9a8333e359ab45a9de8a6d51ae5ecaf6fde..27de5aaafa8149d4849dc439cdb006c62d9e5886 100644 (file)
@@ -530,7 +530,7 @@ class Dialect(object):
         :param dbapi_connection: a DBAPI connection, typically
          proxied within a :class:`.ConnectionFairy`.
 
-         """
+        """
 
         raise NotImplementedError()
 
@@ -541,7 +541,7 @@ class Dialect(object):
         :param dbapi_connection: a DBAPI connection, typically
          proxied within a :class:`.ConnectionFairy`.
 
-         """
+        """
 
         raise NotImplementedError()
 
index 198b5e568f54ec54e23b68012c2e48690d0721c6..812f7ceeca727f1cbbb1a1a7a87afc1b7ceb3218 100644 (file)
@@ -230,8 +230,7 @@ class Inspector(object):
         return self.dialect.default_schema_name
 
     def get_schema_names(self):
-        """Return all schema names.
-        """
+        """Return all schema names."""
 
         if hasattr(self.dialect, "get_schema_names"):
             with self._operation_context() as conn:
index 56abca9a9f7be0c9dec809c8228b041c6f627887..8b9b413c4681cc768d3f1a7853ba8b96ef6d3055 100644 (file)
@@ -167,7 +167,10 @@ class SimpleResultMetaData(ResultMetaData):
 
         if extra:
             recs_names = [
-                ((name,) + extras, (index, name, extras),)
+                (
+                    (name,) + extras,
+                    (index, name, extras),
+                )
                 for index, (name, extras) in enumerate(zip(self._keys, extra))
             ]
         else:
@@ -407,7 +410,10 @@ class ResultInternal(InPlaceGenerative):
             rows = [
                 made_row
                 for made_row, sig_row in [
-                    (made_row, strategy(made_row) if strategy else made_row,)
+                    (
+                        made_row,
+                        strategy(made_row) if strategy else made_row,
+                    )
                     for made_row in made_rows
                 ]
                 if sig_row not in uniques and not uniques.add(sig_row)
@@ -543,7 +549,10 @@ class ResultInternal(InPlaceGenerative):
         return manyrows
 
     def _only_one_row(
-        self, raise_for_second_row, raise_for_none, scalar,
+        self,
+        raise_for_second_row,
+        raise_for_none,
+        scalar,
     ):
         onerow = self._fetchone_impl
 
@@ -1400,10 +1409,7 @@ class MappingResult(FilterResult):
 
     def columns(self, *col_expressions):
         # type: (*object) -> MappingResult
-        r"""Establish the columns that should be returned in each row.
-
-
-        """
+        r"""Establish the columns that should be returned in each row."""
         return self._column_slices(col_expressions)
 
     def partitions(self, size=None):
index 6d2f4aa244979cf6238384a0b1a67ee41244180c..58f59642cf320c67ec8549a04295884b3dfd1407 100644 (file)
@@ -140,7 +140,7 @@ class URL(
             :class:`_engine.URL`, use the :meth:`_engine.URL.set` and
             :meth:`_engine.URL.update_query` methods.
 
-    """
+        """
 
         return cls(
             cls._assert_str(drivername, "drivername"),
@@ -205,7 +205,9 @@ class URL(
 
         return util.immutabledict(
             {
-                _assert_str(key): _assert_value(value,)
+                _assert_str(key): _assert_value(
+                    value,
+                )
                 for key, value in dict_items
             }
         )
index b36c448ceb3194fafd15503ade8f70b98a50fb5c..cd09235c3b8fc63f6b312c6121cbbdfb46f2e29e 100644 (file)
@@ -196,8 +196,6 @@ def remove(target, identifier, fn):
 
 
 def contains(target, identifier, fn):
-    """Return True if the given target/ident/fn is set up to listen.
-
-    """
+    """Return True if the given target/ident/fn is set up to listen."""
 
     return _event_key(target, identifier, fn).contains()
index abb264f98f154c43afd1ea4ed9e80f022a8f8908..baa3cd28a6d2435fa9b967d39772849a138e0cd8 100644 (file)
@@ -379,7 +379,7 @@ class _ListenerCollection(_CompoundListener):
 
     def _update(self, other, only_propagate=True):
         """Populate from the listeners in another :class:`_Dispatch`
-            object."""
+        object."""
 
         existing_listeners = self.listeners
         existing_listener_set = set(existing_listeners)
index c78080738f9d261893e83e34395da46e4c99aa0f..daa6f9aeabbbdcb56415033f9ff17050d17d25bf 100644 (file)
@@ -142,7 +142,7 @@ class _Dispatch(object):
 
     def _update(self, other, only_propagate=True):
         """Populate from the listeners in another :class:`_Dispatch`
-            object."""
+        object."""
         for ls in other._event_descriptors:
             if isinstance(ls, _EmptyListener):
                 continue
index 144dd45dc6e127e46f4ea7a57ad84933ca4f8cfc..58680f3564047305849ce8db49eb1256fbff692f 100644 (file)
@@ -139,8 +139,7 @@ def _clear(owner, elements):
 
 
 class _EventKey(object):
-    """Represent :func:`.listen` arguments.
-    """
+    """Represent :func:`.listen` arguments."""
 
     __slots__ = (
         "target",
@@ -239,8 +238,7 @@ class _EventKey(object):
                 collection.remove(self.with_wrapper(listener_fn))
 
     def contains(self):
-        """Return True if this event key is registered to listen.
-        """
+        """Return True if this event key is registered to listen."""
         return self._key in _key_to_collection
 
     def base_listen(
index b80bf9b01149fbd0be7cbc08d40f992c18d7713e..7e4a3f53eb8e75455ffc140817bc16d38828d1b7 100644 (file)
@@ -35,7 +35,11 @@ class SQLAlchemyError(Exception):
         else:
             return (
                 "(Background on this error at: "
-                "http://sqlalche.me/e/%s/%s)" % (_version_token, self.code,)
+                "http://sqlalche.me/e/%s/%s)"
+                % (
+                    _version_token,
+                    self.code,
+                )
             )
 
     def _message(self, as_unicode=compat.py3k):
index 3ea77a952e1f8f33ff48fb7e9c9ee4662d25f579..a2c6b596f4bb25c208707ef03c7e56e2ff96dba2 100644 (file)
@@ -840,8 +840,7 @@ class AmbiguousAssociationProxyInstance(AssociationProxyInstance):
 
 
 class ObjectAssociationProxyInstance(AssociationProxyInstance):
-    """an :class:`.AssociationProxyInstance` that has an object as a target.
-    """
+    """an :class:`.AssociationProxyInstance` that has an object as a target."""
 
     _target_is_object = True
     _is_canonical = True
index 619cf85086318b51eba0ce36d03971dbc36fb07f..4a92fb1f2c1c556a0e05704aa0ec6fd174bdacee 100644 (file)
@@ -85,16 +85,12 @@ class AsyncConnection(StartableContext):
         return self.sync_connection
 
     def begin(self) -> "AsyncTransaction":
-        """Begin a transaction prior to autobegin occurring.
-
-        """
+        """Begin a transaction prior to autobegin occurring."""
         self._sync_connection()
         return AsyncTransaction(self)
 
     def begin_nested(self) -> "AsyncTransaction":
-        """Begin a nested transaction and return a transaction handle.
-
-        """
+        """Begin a nested transaction and return a transaction handle."""
         self._sync_connection()
         return AsyncTransaction(self, nested=True)
 
@@ -154,7 +150,10 @@ class AsyncConnection(StartableContext):
         conn = self._sync_connection()
 
         result = await greenlet_spawn(
-            conn.exec_driver_sql, statement, parameters, execution_options,
+            conn.exec_driver_sql,
+            statement,
+            parameters,
+            execution_options,
         )
         if result.context._is_server_side:
             raise async_exc.AsyncMethodRequired(
@@ -230,7 +229,10 @@ class AsyncConnection(StartableContext):
         conn = self._sync_connection()
 
         result = await greenlet_spawn(
-            conn._execute_20, statement, parameters, execution_options,
+            conn._execute_20,
+            statement,
+            parameters,
+            execution_options,
         )
         if result.context._is_server_side:
             raise async_exc.AsyncMethodRequired(
@@ -261,7 +263,7 @@ class AsyncConnection(StartableContext):
         return result.scalar()
 
     async def run_sync(self, fn: Callable, *arg, **kw) -> Any:
-        """"Invoke the given sync callable passing self as the first argument.
+        """Invoke the given sync callable passing self as the first argument.
 
         This method maintains the asyncio event loop all the way through
         to the database connection by running the given callable in a
@@ -418,9 +420,7 @@ class AsyncTransaction(StartableContext):
         await greenlet_spawn(self._sync_transaction().close)
 
     async def rollback(self):
-        """Roll back this :class:`.Transaction`.
-
-        """
+        """Roll back this :class:`.Transaction`."""
         await greenlet_spawn(self._sync_transaction().rollback)
 
     async def commit(self):
index 52b40acbabf0c028deb8c55636f43f51b7ca752e..7f8a707d5283e0ba97ea1a5fdcf155554b518ae7 100644 (file)
@@ -553,10 +553,7 @@ class AsyncMappingResult(FilterResult):
 
     def columns(self, *col_expressions):
         # type: (*object) -> AsyncMappingResult
-        r"""Establish the columns that should be returned in each row.
-
-
-        """
+        r"""Establish the columns that should be returned in each row."""
         return self._column_slices(col_expressions)
 
     async def partitions(self, size=None):
index 1673017808ad98d1e16d61ac2898257c539df411..cb06aa26d5f886c17987f0fc105e180b8409f68a 100644 (file)
@@ -269,9 +269,7 @@ class AsyncSessionTransaction(StartableContext):
         return self.sync_transaction
 
     async def rollback(self):
-        """Roll back this :class:`_asyncio.AsyncTransaction`.
-
-        """
+        """Roll back this :class:`_asyncio.AsyncTransaction`."""
         await greenlet_spawn(self._sync_transaction().rollback)
 
     async def commit(self):
index 2886773878fc5ecc77b40ca78a7222ff305c67be..8a2023e96fb768509999267f948d9f3eb8aa7b54 100644 (file)
@@ -173,8 +173,7 @@ class BakedQuery(object):
         return self._cache_key + (session._query_cls,)
 
     def _with_lazyload_options(self, options, effective_path, cache_path=None):
-        """Cloning version of _add_lazyload_options.
-        """
+        """Cloning version of _add_lazyload_options."""
         q = self._clone()
         q._add_lazyload_options(options, effective_path, cache_path=cache_path)
         return q
index 75bb028f0b95ebe13ccde31207d1aee5629f1e19..83562502ab8e82dd7275185013248c6600b98d01 100644 (file)
@@ -950,7 +950,7 @@ class hybrid_property(interfaces.InspectionAttrInfo):
 
             :ref:`hybrid_reuse_subclass`
 
-         """
+        """
         return self
 
     def getter(self, fget):
index 7b6b779977edb6e554d9976214136f9539edca03..03ea096e7429908d24893cf9e586aea037a88142 100644 (file)
@@ -376,7 +376,7 @@ class OrderingList(list):
 
 
 def _reconstitute(cls, dict_, items):
-    """ Reconstitute an :class:`.OrderingList`.
+    """Reconstitute an :class:`.OrderingList`.
 
     This is the adjoint to :meth:`.OrderingList.__reduce__`.  It is used for
     unpickling :class:`.OrderingList` objects.
index 270f189bef702efd298d62f23175611aba00e4ca..4d9ee30c44fcaded93a8a8e9e794878d124c2371 100644 (file)
@@ -54,7 +54,7 @@ def inspect(subject, raiseerr=True):
      :class:`sqlalchemy.exc.NoInspectionAvailable`
      is raised.  If ``False``, ``None`` is returned.
 
-     """
+    """
     type_ = type(subject)
     for cls in type_.__mro__:
         if cls in _registrars:
index 7f2c61a05c924e4a9ce4cb5ab26b798eebfdb35a..e36797d47fc135aab994ca6f2aae3e9d6436611a 100644 (file)
@@ -119,9 +119,7 @@ relationship = public_factory(RelationshipProperty, ".orm.relationship")
 
 @_sa_util.deprecated_20("relation", "Please use :func:`.relationship`.")
 def relation(*arg, **kw):
-    """A synonym for :func:`relationship`.
-
-    """
+    """A synonym for :func:`relationship`."""
 
     return relationship(*arg, **kw)
 
index 07b147f10f85dde904670f07b8ad4f5a5d5d2d54..c7ef97c6cd12ac9a35619a33cb47a5fab639770d 100644 (file)
@@ -968,9 +968,9 @@ class ScalarAttributeImpl(AttributeImpl):
 
 class ScalarObjectAttributeImpl(ScalarAttributeImpl):
     """represents a scalar-holding InstrumentedAttribute,
-       where the target object is also instrumented.
+    where the target object is also instrumented.
 
-       Adds events to delete/set operations.
+    Adds events to delete/set operations.
 
     """
 
@@ -1065,9 +1065,7 @@ class ScalarObjectAttributeImpl(ScalarAttributeImpl):
         check_old=None,
         pop=False,
     ):
-        """Set a value on the given InstanceState.
-
-        """
+        """Set a value on the given InstanceState."""
         if self.dispatch._active_history:
             old = self.get(
                 state,
index c24e04aa59bbb9018b5c94cf2164d7d702dfd601..5e9cf9cce3dd5e2af34078ad3ccfe0c7667db178 100644 (file)
@@ -1223,7 +1223,14 @@ class ORMSelectCompileState(ORMCompileState, SelectState):
             # figure out the final "left" and "right" sides and create an
             # ORMJoin to add to our _from_obj tuple
             self._join_left_to_right(
-                left, right, onclause, prop, False, False, isouter, full,
+                left,
+                right,
+                onclause,
+                prop,
+                False,
+                False,
+                isouter,
+                full,
             )
 
     def _legacy_join(self, args):
@@ -1822,7 +1829,8 @@ class ORMSelectCompileState(ORMCompileState, SelectState):
             self._mapper_loads_polymorphically_with(
                 right_mapper,
                 sql_util.ColumnAdapter(
-                    right_mapper.selectable, right_mapper._equivalent_columns,
+                    right_mapper.selectable,
+                    right_mapper._equivalent_columns,
                 ),
             )
         # if the onclause is a ClauseElement, adapt it with any
@@ -2538,7 +2546,11 @@ class _ORMColumnEntity(_ColumnEntity):
     )
 
     def __init__(
-        self, compile_state, column, parententity, parent_bundle=None,
+        self,
+        compile_state,
+        column,
+        parententity,
+        parent_bundle=None,
     ):
 
         annotations = column._annotations
index 70fffa295139918a404e48d6cb838b9b60da6e38..4d9766204e78ab77cde970e0d81fb42a0d00f33c 100644 (file)
@@ -375,7 +375,12 @@ def declarative_base(
         metadata=metadata,
         class_registry=class_registry,
         constructor=constructor,
-    ).generate_base(mapper=mapper, cls=cls, name=name, metaclass=metaclass,)
+    ).generate_base(
+        mapper=mapper,
+        cls=cls,
+        name=name,
+        metaclass=metaclass,
+    )
 
 
 class registry(object):
@@ -458,7 +463,11 @@ class registry(object):
         clsregistry.remove_class(cls.__name__, cls, self._class_registry)
 
     def generate_base(
-        self, mapper=None, cls=object, name="Base", metaclass=DeclarativeMeta,
+        self,
+        mapper=None,
+        cls=object,
+        name="Base",
+        metaclass=DeclarativeMeta,
     ):
         """Generate a declarative base class.
 
index 0e0c79b1f98dbbe16b3094f087a57e94087abc7b..0e89e729fb2e8a90e24073a306230d84b121e04c 100644 (file)
@@ -203,7 +203,11 @@ class _ImperativeMapperConfig(_MapperConfig):
     __slots__ = ("dict_", "local_table", "inherits")
 
     def __init__(
-        self, registry, cls_, table, mapper_kw,
+        self,
+        registry,
+        cls_,
+        table,
+        mapper_kw,
     ):
         super(_ImperativeMapperConfig, self).__init__(registry, cls_)
 
@@ -223,7 +227,8 @@ class _ImperativeMapperConfig(_MapperConfig):
         mapper_cls = mapper
 
         return self.set_cls_attribute(
-            "__mapper__", mapper_cls(self.cls, self.local_table, **mapper_kw),
+            "__mapper__",
+            mapper_cls(self.cls, self.local_table, **mapper_kw),
         )
 
     def _setup_inheritance(self, mapper_kw):
@@ -274,7 +279,12 @@ class _ClassScanMapperConfig(_MapperConfig):
     )
 
     def __init__(
-        self, registry, cls_, dict_, table, mapper_kw,
+        self,
+        registry,
+        cls_,
+        dict_,
+        table,
+        mapper_kw,
     ):
 
         super(_ClassScanMapperConfig, self).__init__(registry, cls_)
index c2efa24a192a49fdaaba578d12ac23f4608c9322..713891d9155a93aeb514606a242c0f256d3743a9 100644 (file)
@@ -26,7 +26,7 @@ from ..sql import expression
 
 class DescriptorProperty(MapperProperty):
     """:class:`.MapperProperty` which proxies access to a
-        user-defined descriptor."""
+    user-defined descriptor."""
 
     doc = None
 
index 48161a256fe8bb8639d48ad74b2352fbbe66499c..4426041e33495bcf404aac4d8141deef323dc9b2 100644 (file)
@@ -487,12 +487,15 @@ class AppenderQuery(Generative):
                 iterator = (
                     (item,)
                     for item in self.attr._get_collection_history(
-                        state, attributes.PASSIVE_NO_INITIALIZE,
+                        state,
+                        attributes.PASSIVE_NO_INITIALIZE,
                     ).added_items
                 )
 
                 row_metadata = _result.SimpleResultMetaData(
-                    (self.mapper.class_.__name__,), [], _unique_filters=[id],
+                    (self.mapper.class_.__name__,),
+                    [],
+                    _unique_filters=[id],
                 )
 
                 return _result.IteratorResult(row_metadata, iterator).scalars()
index a7dd1c5478659456a675c25d5407d96b4c262f2f..ecb704a04d3e62ac46d1ce4b997496a61ed9340b 100644 (file)
@@ -639,7 +639,7 @@ def _instance_processor(
     _polymorphic_from=None,
 ):
     """Produce a mapper level row processor callable
-       which processes rows into mapped instances."""
+    which processes rows into mapped instances."""
 
     # note that this method, most of which exists in a closure
     # called _instance(), resists being broken out, as
@@ -1253,9 +1253,7 @@ def _decorate_polymorphic_switch(
 
 
 class PostLoad(object):
-    """Track loaders and states for "post load" operations.
-
-    """
+    """Track loaders and states for "post load" operations."""
 
     __slots__ = "loaders", "states", "load_keys"
 
index 296ddf385d4077a3f8f068842346f7193dd72d3a..7b94bfa872b06c904a6e1cc57109cb0977c392f5 100644 (file)
@@ -1275,8 +1275,7 @@ class Mapper(
 
     @classmethod
     def _configure_all(cls):
-        """Class-level path to the :func:`.configure_mappers` call.
-        """
+        """Class-level path to the :func:`.configure_mappers` call."""
         configure_mappers()
 
     def dispose(self):
@@ -1799,7 +1798,7 @@ class Mapper(
     @util.preload_module("sqlalchemy.orm.descriptor_props")
     def _property_from_column(self, key, prop):
         """generate/update a :class:`.ColumnProperty` given a
-        :class:`_schema.Column` object. """
+        :class:`_schema.Column` object."""
         descriptor_props = util.preloaded.orm_descriptor_props
         # we were passed a Column or a list of Columns;
         # generate a properties.ColumnProperty
@@ -1983,8 +1982,7 @@ class Mapper(
         return key in self._props
 
     def get_property(self, key, _configure_mappers=True):
-        """return a MapperProperty associated with the given key.
-        """
+        """return a MapperProperty associated with the given key."""
 
         if _configure_mappers and Mapper._new_mappers:
             configure_mappers()
index d05381c1d25fe9bd8867da27b5a3aaad94fa3e06..fa126a279b75198aeb88830fa8f171b85ddbeabc 100644 (file)
@@ -1179,7 +1179,8 @@ def _emit_insert_statements(
                         c.returned_defaults_rows or (),
                     ):
                         for pk, col in zip(
-                            inserted_primary_key, mapper._pks_by_table[table],
+                            inserted_primary_key,
+                            mapper._pks_by_table[table],
                         ):
                             prop = mapper_rec._columntoproperty[col]
                             if state_dict.get(prop.key) is None:
@@ -2236,7 +2237,8 @@ class BulkORMUpdate(UpdateDMLState, BulkUDCompileState):
             session.identity_map[identity_key]
             for identity_key in [
                 target_mapper.identity_key_from_primary_key(
-                    list(primary_key), identity_token=identity_token,
+                    list(primary_key),
+                    identity_token=identity_token,
                 )
                 for primary_key, identity_token in [
                     (row[0:-1], row[-1]) for row in matched_rows
@@ -2337,7 +2339,8 @@ class BulkORMDelete(DeleteDMLState, BulkUDCompileState):
             # TODO: inline this and call remove_newly_deleted
             # once
             identity_key = target_mapper.identity_key_from_primary_key(
-                list(primary_key), identity_token=identity_token,
+                list(primary_key),
+                identity_token=identity_token,
             )
             if identity_key in session.identity_map:
                 session._remove_newly_deleted(
index e7bfc25b7b500b813247647c29e27b4df01532c4..0e7fd2fc314e34876adac82394e2991ebcdfeda1 100644 (file)
@@ -435,7 +435,10 @@ class Query(
         return stmt
 
     def subquery(
-        self, name=None, with_labels=False, reduce_columns=False,
+        self,
+        name=None,
+        with_labels=False,
+        reduce_columns=False,
     ):
         """Return the full SELECT statement represented by
         this :class:`_query.Query`, embedded within an
@@ -1496,7 +1499,7 @@ class Query(
         return fn(self)
 
     def get_execution_options(self):
-        """ Get the non-SQL options which will take effect during execution.
+        """Get the non-SQL options which will take effect during execution.
 
         .. versionadded:: 1.3
 
@@ -1508,7 +1511,7 @@ class Query(
 
     @_generative
     def execution_options(self, **kwargs):
-        """ Set non-SQL options which take effect during execution.
+        """Set non-SQL options which take effect during execution.
 
         Options allowed here include all of those accepted by
         :meth:`_engine.Connection.execution_options`, as well as a series
@@ -3030,7 +3033,9 @@ class Query(
 
         """
 
-        bulk_del = BulkDelete(self,)
+        bulk_del = BulkDelete(
+            self,
+        )
         if self.dispatch.before_compile_delete:
             for fn in self.dispatch.before_compile_delete:
                 new_query = fn(bulk_del.query, bulk_del)
index cd1502073e11c1169651e302dc4bbe9ebf6bcdd6..13611f2bb71b1516ace12d547d1c616eec4ee9e1 100644 (file)
@@ -3636,9 +3636,7 @@ class JoinCondition(object):
 
 
 class _ColInAnnotations(object):
-    """Seralizable object that tests for a name in c._annotations.
-
-    """
+    """Seralizable object that tests for a name in c._annotations."""
 
     __slots__ = ("name",)
 
index bebe015f74f28eb0198b17b121724e054799babb..e32e055103e989a7cba50a6c5b16889ccef2a6a9 100644 (file)
@@ -47,7 +47,7 @@ _sessions = weakref.WeakValueDictionary()
 
 def _state_session(state):
     """Given an :class:`.InstanceState`, return the :class:`.Session`
-        associated, if any.
+    associated, if any.
     """
     if state.session_id:
         try:
@@ -404,7 +404,11 @@ class SessionTransaction(object):
     _rollback_exception = None
 
     def __init__(
-        self, session, parent=None, nested=False, autobegin=False,
+        self,
+        session,
+        parent=None,
+        nested=False,
+        autobegin=False,
     ):
         self.session = session
         self._connections = {}
@@ -1865,7 +1869,8 @@ class Session(_SessionClassMethods):
             except sa_exc.NoInspectionAvailable as err:
                 if isinstance(mapper, type):
                     util.raise_(
-                        exc.UnmappedClassError(mapper), replace_context=err,
+                        exc.UnmappedClassError(mapper),
+                        replace_context=err,
                     )
                 else:
                     raise
@@ -2098,7 +2103,8 @@ class Session(_SessionClassMethods):
             state = attributes.instance_state(instance)
         except exc.NO_STATE as err:
             util.raise_(
-                exc.UnmappedInstanceError(instance), replace_context=err,
+                exc.UnmappedInstanceError(instance),
+                replace_context=err,
             )
 
         self._expire_state(state, attribute_names)
@@ -2204,7 +2210,8 @@ class Session(_SessionClassMethods):
             state = attributes.instance_state(instance)
         except exc.NO_STATE as err:
             util.raise_(
-                exc.UnmappedInstanceError(instance), replace_context=err,
+                exc.UnmappedInstanceError(instance),
+                replace_context=err,
             )
         self._expire_state(state, attribute_names)
 
@@ -2242,7 +2249,8 @@ class Session(_SessionClassMethods):
             state = attributes.instance_state(instance)
         except exc.NO_STATE as err:
             util.raise_(
-                exc.UnmappedInstanceError(instance), replace_context=err,
+                exc.UnmappedInstanceError(instance),
+                replace_context=err,
             )
         if state.session_id is not self.hash_key:
             raise sa_exc.InvalidRequestError(
@@ -2396,7 +2404,8 @@ class Session(_SessionClassMethods):
             state = attributes.instance_state(instance)
         except exc.NO_STATE as err:
             util.raise_(
-                exc.UnmappedInstanceError(instance), replace_context=err,
+                exc.UnmappedInstanceError(instance),
+                replace_context=err,
             )
 
         self._save_or_update_state(state)
@@ -2433,7 +2442,8 @@ class Session(_SessionClassMethods):
             state = attributes.instance_state(instance)
         except exc.NO_STATE as err:
             util.raise_(
-                exc.UnmappedInstanceError(instance), replace_context=err,
+                exc.UnmappedInstanceError(instance),
+                replace_context=err,
             )
 
         self._delete_impl(state, instance, head=True)
@@ -2663,7 +2673,10 @@ class Session(_SessionClassMethods):
         if execution_options:
             statement = statement.execution_options(**execution_options)
         return db_load_fn(
-            self, statement, primary_key_identity, load_options=load_options,
+            self,
+            statement,
+            primary_key_identity,
+            load_options=load_options,
         )
 
     def merge(self, instance, load=True):
@@ -3060,7 +3073,8 @@ class Session(_SessionClassMethods):
             state = attributes.instance_state(instance)
         except exc.NO_STATE as err:
             util.raise_(
-                exc.UnmappedInstanceError(instance), replace_context=err,
+                exc.UnmappedInstanceError(instance),
+                replace_context=err,
             )
         return self._contains_state(state)
 
@@ -3159,7 +3173,8 @@ class Session(_SessionClassMethods):
 
                 except exc.NO_STATE as err:
                     util.raise_(
-                        exc.UnmappedInstanceError(o), replace_context=err,
+                        exc.UnmappedInstanceError(o),
+                        replace_context=err,
                     )
                 objset.add(state)
         else:
@@ -4062,7 +4077,8 @@ def object_session(instance):
         state = attributes.instance_state(instance)
     except exc.NO_STATE as err:
         util.raise_(
-            exc.UnmappedInstanceError(instance), replace_context=err,
+            exc.UnmappedInstanceError(instance),
+            replace_context=err,
         )
     else:
         return _state_session(state)
index 2332834977832eee5791b21a2bfb6f0fd388331d..b139d5933b5996d1ff27bb0c6a7575cfa2ed2ccb 100644 (file)
@@ -212,7 +212,7 @@ class InstanceState(interfaces.InspectionAttrInfo):
 
             :ref:`session_object_states`
 
-            """
+        """
         return self.key is not None and self._attached and not self._deleted
 
     @property
@@ -533,7 +533,7 @@ class InstanceState(interfaces.InspectionAttrInfo):
 
     def _reset(self, dict_, key):
         """Remove the given attribute and any
-           callables associated with it."""
+        callables associated with it."""
 
         old = dict_.pop(key, None)
         if old is not None and self.manager[key].impl.collection:
index 325bd4dc1e7570e3f2c3454c0b48785067260bc9..900691688e825430bd4f4d180def349d3fc76865 100644 (file)
@@ -1455,7 +1455,12 @@ class SubqueryLoader(PostLoader):
                 self._load()
 
     def _setup_query_from_rowproc(
-        self, context, path, entity, loadopt, adapter,
+        self,
+        context,
+        path,
+        entity,
+        loadopt,
+        adapter,
     ):
         compile_state = context.compile_state
         if (
@@ -1627,7 +1632,11 @@ class SubqueryLoader(PostLoader):
             return
 
         subq = self._setup_query_from_rowproc(
-            context, path, path[-1], loadopt, adapter,
+            context,
+            path,
+            path[-1],
+            loadopt,
+            adapter,
         )
 
         if subq is None:
@@ -1879,7 +1888,9 @@ class JoinedLoader(AbstractRelationshipLoader):
                     prop.mapper, None
                 )
         path.set(
-            target_attributes, "user_defined_eager_row_processor", adapter,
+            target_attributes,
+            "user_defined_eager_row_processor",
+            adapter,
         )
 
         return adapter
index 170e4487e5ec72a4b4000a7078eee2b0f492b425..f902014efb6627db3f40b12e18418bd811e950ec 100644 (file)
@@ -487,7 +487,8 @@ class AliasedClass(object):
 
         if alias is None:
             alias = mapper._with_polymorphic_selectable._anonymous_fromclause(
-                name=name, flat=flat,
+                name=name,
+                flat=flat,
             )
 
         self._aliased_insp = AliasedInsp(
index fe4c60a2d507137d14f506d3689dfb9fe25b70ea..b83b5525f0eaaadce31d7fb9ed413b96dde42100 100644 (file)
@@ -48,8 +48,8 @@ from .sql.schema import DefaultGenerator  # noqa
 from .sql.schema import FetchedValue  # noqa
 from .sql.schema import ForeignKey  # noqa
 from .sql.schema import ForeignKeyConstraint  # noqa
-from .sql.schema import Index  # noqa
 from .sql.schema import Identity  # noqa
+from .sql.schema import Index  # noqa
 from .sql.schema import MetaData  # noqa
 from .sql.schema import PrimaryKeyConstraint  # noqa
 from .sql.schema import SchemaItem  # noqa
index 67ee8c907135cc41574372a62f7f90659f73e3d0..f9b5ce7e19338d1f0420de1fe1a06b89bf4a3c9b 100644 (file)
@@ -558,10 +558,7 @@ class _MetaOptions(type):
 
 
 class Options(util.with_metaclass(_MetaOptions)):
-    """A cacheable option dictionary with defaults.
-
-
-    """
+    """A cacheable option dictionary with defaults."""
 
     def __init__(self, **kw):
         self.__dict__.update(kw)
@@ -635,7 +632,7 @@ class Options(util.with_metaclass(_MetaOptions)):
     def from_execution_options(
         cls, key, attrs, exec_options, statement_exec_options
     ):
-        """"process Options argument in terms of execution options.
+        """process Options argument in terms of execution options.
 
 
         e.g.::
@@ -706,9 +703,7 @@ class ExecutableOption(HasCopyInternals, HasCacheKey):
     __visit_name__ = "executable_option"
 
     def _clone(self):
-        """Create a shallow copy of this ExecutableOption.
-
-        """
+        """Create a shallow copy of this ExecutableOption."""
         c = self.__class__.__new__(self.__class__)
         c.__dict__ = dict(self.__dict__)
         return c
@@ -812,7 +807,7 @@ class Executable(Generative):
 
     @_generative
     def execution_options(self, **kw):
-        """ Set non-SQL options for the statement which take effect during
+        """Set non-SQL options for the statement which take effect during
         execution.
 
         Execution options can be set on a per-statement or
@@ -858,7 +853,7 @@ class Executable(Generative):
         self._execution_options = self._execution_options.union(kw)
 
     def get_execution_options(self):
-        """ Get the non-SQL options which will take effect during execution.
+        """Get the non-SQL options which will take effect during execution.
 
         .. versionadded:: 1.3
 
@@ -877,9 +872,7 @@ class Executable(Generative):
         ":class:`.Session`.",
     )
     def execute(self, *multiparams, **params):
-        """Compile and execute this :class:`.Executable`.
-
-        """
+        """Compile and execute this :class:`.Executable`."""
         e = self.bind
         if e is None:
             label = getattr(self, "description", self.__class__.__name__)
@@ -1388,18 +1381,18 @@ class DedupeColumnCollection(ColumnCollection):
 
     def replace(self, column):
         """add the given column to this collection, removing unaliased
-           versions of this column  as well as existing columns with the
-           same key.
+        versions of this column  as well as existing columns with the
+        same key.
 
-            e.g.::
+        e.g.::
 
-                t = Table('sometable', metadata, Column('col1', Integer))
-                t.columns.replace(Column('col1', Integer, key='columnone'))
+            t = Table('sometable', metadata, Column('col1', Integer))
+            t.columns.replace(Column('col1', Integer, key='columnone'))
 
-            will remove the original 'col1' from the collection, and add
-            the new column under the name 'columnname'.
+        will remove the original 'col1' from the collection, and add
+        the new column under the name 'columnname'.
 
-           Used by schema.Column to override columns during table reflection.
+        Used by schema.Column to override columns during table reflection.
 
         """
 
index 154564a081d0bb1b74881079ae1be4095bea65f1..558ced8bde886dc7e1dcf28851e656ec12e4a433 100644 (file)
@@ -37,9 +37,13 @@ def _is_literal(element):
 
     """
 
-    return not isinstance(
-        element, (Visitable, schema.SchemaEventTarget),
-    ) and not hasattr(element, "__clause_element__")
+    return (
+        not isinstance(
+            element,
+            (Visitable, schema.SchemaEventTarget),
+        )
+        and not hasattr(element, "__clause_element__")
+    )
 
 
 def _deep_is_literal(element):
index 986f63aad638aacb647914b3c3dfdb85b51abbac..1c68d6450c0a9e2fc329b757ef0a8686ab9cbb86 100644 (file)
@@ -719,7 +719,8 @@ def _append_param_update(
                 (
                     c,
                     compiler.preparer.format_column(
-                        c, use_table=include_table,
+                        c,
+                        use_table=include_table,
                     ),
                     compiler.process(c.onupdate.arg.self_group(), **kw),
                 )
@@ -733,7 +734,8 @@ def _append_param_update(
                 (
                     c,
                     compiler.preparer.format_column(
-                        c, use_table=include_table,
+                        c,
+                        use_table=include_table,
                     ),
                     _create_update_prefetch_bind_param(compiler, c, **kw),
                 )
index 67c11f6c767852f0af3b8325960d2681988f1879..5f3074cdc28b664dbb6c0e0566453ed052aa43a3 100644 (file)
@@ -985,7 +985,9 @@ class SchemaDropper(DDLBase):
 
 
 def sort_tables(
-    tables, skip_fn=None, extra_dependencies=None,
+    tables,
+    skip_fn=None,
+    extra_dependencies=None,
 ):
     """Sort a collection of :class:`_schema.Table` objects based on
     dependency.
index fd2efc6f913c50f5b5c9206b63f421ad37b6ca44..5ddc9ef82dbc135283de547d4d14bf89c2f13a91 100644 (file)
@@ -193,9 +193,7 @@ class UpdateBase(
     Executable,
     ClauseElement,
 ):
-    """Form the base for ``INSERT``, ``UPDATE``, and ``DELETE`` statements.
-
-    """
+    """Form the base for ``INSERT``, ``UPDATE``, and ``DELETE`` statements."""
 
     __visit_name__ = "update_base"
 
@@ -435,7 +433,7 @@ class UpdateBase(
         :param dialect_name: defaults to ``*``, if specified as the name
          of a particular dialect, will apply these hints only when
          that dialect is in use.
-         """
+        """
         if selectable is None:
             selectable = self.table
 
index 59f3fa86b73f2e9b4ec9f780fee45ad1c9518a9a..c8ae1e6b644f9448fa39f001d1016aab05074d74 100644 (file)
@@ -3526,7 +3526,10 @@ class BinaryExpression(ColumnElement):
         ("operator", InternalTraversal.dp_operator),
         ("negate", InternalTraversal.dp_operator),
         ("modifiers", InternalTraversal.dp_plain_dict),
-        ("type", InternalTraversal.dp_type,),  # affects JSON CAST operators
+        (
+            "type",
+            InternalTraversal.dp_type,
+        ),  # affects JSON CAST operators
     ]
 
     _is_implicitly_boolean = True
@@ -3638,8 +3641,8 @@ class Slice(ColumnElement):
 
 
 class IndexExpression(BinaryExpression):
-    """Represent the class of expressions that are like an "index" operation.
-    """
+    """Represent the class of expressions that are like an "index"
+    operation."""
 
     pass
 
index 7d52f97ee8abe8c679f97d4b6f227126e82200cd..676152781daecb504e3942f922e54e89b06fe994 100644 (file)
@@ -604,7 +604,11 @@ class AnalyzedCode(object):
         # create trackers to catch those.
 
         analyzed_function = AnalyzedFunction(
-            self, lambda_element, None, lambda_kw, fn,
+            self,
+            lambda_element,
+            None,
+            lambda_kw,
+            fn,
         )
 
         closure_trackers = self.closure_trackers
@@ -781,7 +785,12 @@ class AnalyzedFunction(object):
     )
 
     def __init__(
-        self, analyzed_code, lambda_element, apply_propagate_attrs, kw, fn,
+        self,
+        analyzed_code,
+        lambda_element,
+        apply_propagate_attrs,
+        kw,
+        fn,
     ):
         self.analyzed_code = analyzed_code
         self.fn = fn
index 496f8d9fb5c5741c08a9e2119b121b5656efd2d1..e96da0e249c60f4f0917829bca2a84539c7fcdaf 100644 (file)
@@ -830,9 +830,7 @@ class Table(DialectKWArgs, SchemaItem, TableClause):
         ":meth:`_reflection.Inspector.has_table`.",
     )
     def exists(self, bind=None):
-        """Return True if this table exists.
-
-        """
+        """Return True if this table exists."""
 
         if bind is None:
             bind = _bind_or_error(self)
@@ -3634,10 +3632,14 @@ class PrimaryKeyConstraint(ColumnCollectionConstraint):
             if col.autoincrement is True:
                 _validate_autoinc(col, True)
                 return col
-            elif col.autoincrement in (
-                "auto",
-                "ignore_fk",
-            ) and _validate_autoinc(col, False):
+            elif (
+                col.autoincrement
+                in (
+                    "auto",
+                    "ignore_fk",
+                )
+                and _validate_autoinc(col, False)
+            ):
                 return col
 
         else:
index e2edf20b583c0c5f26d6dbad391ea9f94602a16d..45d4f0b7f1c24c75ef81e1dc52814dc31d9b8b0d 100644 (file)
@@ -361,7 +361,10 @@ class String(Concatenable, TypeEngine):
         needs_isinstance = (
             needs_convert
             and dialect.returns_unicode_strings
-            in (String.RETURNS_CONDITIONAL, String.RETURNS_UNICODE,)
+            in (
+                String.RETURNS_CONDITIONAL,
+                String.RETURNS_UNICODE,
+            )
             and self._expect_unicode != "force_nocheck"
         )
         if needs_convert:
@@ -2286,7 +2289,7 @@ class JSON(Indexable, TypeEngine):
 
               :attr:`.types.JSON.NULL`
 
-         """
+        """
         self.none_as_null = none_as_null
 
     class JSONElementType(TypeEngine):
index 0da88dc54ec035ede1535dea3ef2349f084caa6c..614b70a419b9bb73b831458b9e81631dfd2acfe4 100644 (file)
@@ -1156,10 +1156,7 @@ class TypeDecorator(SchemaEventTarget, TypeEngine):
 
     @util.memoized_property
     def _has_literal_processor(self):
-        """memoized boolean, check if process_literal_param is implemented.
-
-
-        """
+        """memoized boolean, check if process_literal_param is implemented."""
 
         return (
             self.__class__.process_literal_param.__code__
index 264976cc86bc1146b04e2493c3b83f690028c072..96fa209fd7655bf8b5117e879703d9a899519b9b 100644 (file)
@@ -1011,8 +1011,7 @@ def _offset_or_limit_clause_asint_if_possible(clause):
 
 
 def _make_slice(limit_clause, offset_clause, start, stop):
-    """Compute LIMIT/OFFSET in terms of slice start/end
-    """
+    """Compute LIMIT/OFFSET in terms of slice start/end"""
 
     # for calculated limit/offset, try to do the addition of
     # values to offset in Python, however if a SQL clause is present
index c32b2749b2a54ce2e9443afb3a874a975e924e0a..af168cd85245dc7e90d5845a1b0fe033b608f9c7 100644 (file)
@@ -522,9 +522,12 @@ class ComparesTables(object):
             assert reflected_table.primary_key.columns[c.name] is not None
 
     def assert_types_base(self, c1, c2):
-        assert c1.type._compare_type_affinity(c2.type), (
-            "On column %r, type '%s' doesn't correspond to type '%s'"
-            % (c1.name, c1.type, c2.type)
+        assert c1.type._compare_type_affinity(
+            c2.type
+        ), "On column %r, type '%s' doesn't correspond to type '%s'" % (
+            c1.name,
+            c1.type,
+            c2.type,
         )
 
 
index 8bdad357c14ab8bf9b8b79cde53fc78f6e425850..18b856fb12f061d05a0d3a876b0c2b95eae18692 100644 (file)
@@ -166,7 +166,9 @@ def generate_driver_url(url, driver, query_str):
     # type: (URL, str, str) -> URL
     backend = url.get_backend_name()
 
-    new_url = url.set(drivername="%s+%s" % (backend, driver),)
+    new_url = url.set(
+        drivername="%s+%s" % (backend, driver),
+    )
     new_url = new_url.update_query_string(query_str)
 
     try:
@@ -214,8 +216,7 @@ def drop_db(cfg, eng, ident):
 
 @register.init
 def update_db_opts(db_url, db_opts):
-    """Set database options (db_opts) for a test database that we created.
-    """
+    """Set database options (db_opts) for a test database that we created."""
     pass
 
 
index 97413d32b9eb230ded63e98fc6d2dfeb5fa36798..b7f0d0f59cdc096b173acbf631217c83b37da52d 100644 (file)
@@ -67,7 +67,7 @@ class SuiteRequirements(Requirements):
 
     @property
     def on_update_cascade(self):
-        """"target database must support ON UPDATE..CASCADE behavior in
+        """target database must support ON UPDATE..CASCADE behavior in
         foreign keys."""
 
         return exclusions.open()
@@ -388,7 +388,7 @@ class SuiteRequirements(Requirements):
 
     @property
     def implements_get_lastrowid(self):
-        """"target dialect implements the executioncontext.get_lastrowid()
+        """target dialect implements the executioncontext.get_lastrowid()
         method without reliance on RETURNING.
 
         """
@@ -396,7 +396,7 @@ class SuiteRequirements(Requirements):
 
     @property
     def emulated_lastrowid(self):
-        """"target dialect retrieves cursor.lastrowid, or fetches
+        """target dialect retrieves cursor.lastrowid, or fetches
         from a database-side function after an insert() construct executes,
         within the get_lastrowid() method.
 
@@ -408,7 +408,7 @@ class SuiteRequirements(Requirements):
 
     @property
     def emulated_lastrowid_even_with_sequences(self):
-        """"target dialect retrieves cursor.lastrowid or an equivalent
+        """target dialect retrieves cursor.lastrowid or an equivalent
         after an insert() construct executes, even if the table has a
         Sequence on it.
 
@@ -417,7 +417,7 @@ class SuiteRequirements(Requirements):
 
     @property
     def dbapi_lastrowid(self):
-        """"target platform includes a 'lastrowid' accessor on the DBAPI
+        """target platform includes a 'lastrowid' accessor on the DBAPI
         cursor object.
 
         """
@@ -438,17 +438,16 @@ class SuiteRequirements(Requirements):
 
     @property
     def cross_schema_fk_reflection(self):
-        """target system must support reflection of inter-schema foreign keys
-
-        """
+        """target system must support reflection of inter-schema
+        foreign keys"""
         return exclusions.closed()
 
     @property
     def implicit_default_schema(self):
         """target system has a strong concept of 'default' schema that can
-           be referred to implicitly.
+        be referred to implicitly.
 
-           basically, PostgreSQL.
+        basically, PostgreSQL.
 
         """
         return exclusions.closed()
@@ -535,8 +534,8 @@ class SuiteRequirements(Requirements):
 
     @property
     def view_reflection(self):
-        """target database must support inspection of the full CREATE VIEW definition.
-        """
+        """target database must support inspection of the full CREATE VIEW
+        definition."""
         return self.views
 
     @property
@@ -654,9 +653,7 @@ class SuiteRequirements(Requirements):
 
     @property
     def symbol_names_w_double_quote(self):
-        """Target driver can create tables with a name like 'some " table'
-
-        """
+        """Target driver can create tables with a name like 'some " table'"""
         return exclusions.open()
 
     @property
@@ -804,7 +801,7 @@ class SuiteRequirements(Requirements):
 
     @property
     def json_array_indexes(self):
-        """"target platform supports numeric array indexes
+        """target platform supports numeric array indexes
         within a JSON structure"""
 
         return self.json_type
index 7a7eac02f7b7efcc7dd135d42a4118dc234f511c..da59d831f7e7ab9ef6da61b2ec9c0934e0a7fa41 100644 (file)
@@ -42,7 +42,11 @@ class LastrowidTest(fixtures.TablesTest):
     def _assert_round_trip(self, table, conn):
         row = conn.execute(table.select()).first()
         eq_(
-            row, (conn.dialect.default_sequence_base, "some data",),
+            row,
+            (
+                conn.dialect.default_sequence_base,
+                "some data",
+            ),
         )
 
     def test_autoincrement_on_insert(self, connection):
@@ -289,7 +293,11 @@ class ReturningTest(fixtures.TablesTest):
     def _assert_round_trip(self, table, conn):
         row = conn.execute(table.select()).first()
         eq_(
-            row, (conn.dialect.default_sequence_base, "some data",),
+            row,
+            (
+                conn.dialect.default_sequence_base,
+                "some data",
+            ),
         )
 
     @classmethod
index 3c10a45f62f274f07534c94a0b1482e26f8426d7..f728310d7c1acdcabd4cdfac25e2c163ab6b682f 100644 (file)
@@ -151,7 +151,10 @@ class QuotedNameArgumentTest(fixtures.TablesTest):
             Column("related_id", Integer),
             sa.PrimaryKeyConstraint("id", name="pk quote ' one"),
             sa.Index("ix quote ' one", "name"),
-            sa.UniqueConstraint("data", name="uq quote' one",),
+            sa.UniqueConstraint(
+                "data",
+                name="uq quote' one",
+            ),
             sa.ForeignKeyConstraint(
                 ["id"], ["related.id"], name="fk quote ' one"
             ),
@@ -170,7 +173,10 @@ class QuotedNameArgumentTest(fixtures.TablesTest):
                 Column("related_id", Integer),
                 sa.PrimaryKeyConstraint("id", name='pk quote " two'),
                 sa.Index('ix quote " two', "name"),
-                sa.UniqueConstraint("data", name='uq quote" two',),
+                sa.UniqueConstraint(
+                    "data",
+                    name='uq quote" two',
+                ),
                 sa.ForeignKeyConstraint(
                     ["id"], ["related.id"], name='fk quote " two'
                 ),
@@ -1039,7 +1045,8 @@ class ComponentReflectionTest(fixtures.TablesTest):
             "Skipped unsupported reflection of expression-based index t_idx"
         ):
             eq_(
-                insp.get_indexes("t"), expected,
+                insp.get_indexes("t"),
+                expected,
             )
 
     @testing.requires.index_reflects_included_columns
@@ -1098,7 +1105,8 @@ class ComponentReflectionTest(fixtures.TablesTest):
         if testing.requires.index_reflects_included_columns.enabled:
             expected[0]["include_columns"] = []
         eq_(
-            [idx for idx in indexes if idx["name"] == "user_tmp_ix"], expected,
+            [idx for idx in indexes if idx["name"] == "user_tmp_ix"],
+            expected,
         )
 
     @testing.requires.unique_constraint_reflection
@@ -1390,11 +1398,17 @@ class ComputedReflectionTest(fixtures.ComputedReflectionFixtureTest):
         )
         if testing.requires.computed_columns_virtual.enabled:
             self.check_column(
-                data, "computed_virtual", "normal+2", False,
+                data,
+                "computed_virtual",
+                "normal+2",
+                False,
             )
         if testing.requires.computed_columns_stored.enabled:
             self.check_column(
-                data, "computed_stored", "normal-42", True,
+                data,
+                "computed_stored",
+                "normal-42",
+                True,
             )
 
     @testing.requires.schemas
@@ -1414,11 +1428,17 @@ class ComputedReflectionTest(fixtures.ComputedReflectionFixtureTest):
         )
         if testing.requires.computed_columns_virtual.enabled:
             self.check_column(
-                data, "computed_virtual", "normal/2", False,
+                data,
+                "computed_virtual",
+                "normal/2",
+                False,
             )
         if testing.requires.computed_columns_stored.enabled:
             self.check_column(
-                data, "computed_stored", "normal*42", True,
+                data,
+                "computed_stored",
+                "normal*42",
+                True,
             )
 
 
index 1c1b20cf0133c7f96a21c26c3435599be23d31c3..9484d41d09c4639cbca7aa4ea86cd483dcee4b15 100644 (file)
@@ -408,7 +408,8 @@ class ServerSideCursorsTest(
             )
 
             eq_(
-                result.fetchmany(5), [(i, "data%d" % i) for i in range(1, 6)],
+                result.fetchmany(5),
+                [(i, "data%d" % i) for i in range(1, 6)],
             )
             eq_(
                 result.fetchmany(10),
index c199929a7263b55db3f42fed5151a59085418862..b0fb60c5f6399116476e52554fa47fe03d4f7cd7 100644 (file)
@@ -1058,7 +1058,12 @@ class IdentityColumnTest(fixtures.TablesTest):
             Column(
                 "id",
                 Integer,
-                Identity(increment=-5, start=0, minvalue=-1000, maxvalue=0,),
+                Identity(
+                    increment=-5,
+                    start=0,
+                    minvalue=-1000,
+                    maxvalue=0,
+                ),
                 primary_key=True,
             ),
             Column("desc", String(100)),
@@ -1067,13 +1072,16 @@ class IdentityColumnTest(fixtures.TablesTest):
     @classmethod
     def insert_data(cls, connection):
         connection.execute(
-            cls.tables.tbl_a.insert(), [{"desc": "a"}, {"desc": "b"}],
+            cls.tables.tbl_a.insert(),
+            [{"desc": "a"}, {"desc": "b"}],
         )
         connection.execute(
-            cls.tables.tbl_b.insert(), [{"desc": "a"}, {"desc": "b"}],
+            cls.tables.tbl_b.insert(),
+            [{"desc": "a"}, {"desc": "b"}],
         )
         connection.execute(
-            cls.tables.tbl_b.insert(), [{"id": 42, "desc": "c"}],
+            cls.tables.tbl_b.insert(),
+            [{"id": 42, "desc": "c"}],
         )
 
     def test_select_all(self, connection):
@@ -1102,7 +1110,8 @@ class IdentityColumnTest(fixtures.TablesTest):
     def test_insert_always_error(self, connection):
         def fn():
             connection.execute(
-                self.tables.tbl_a.insert(), [{"id": 200, "desc": "a"}],
+                self.tables.tbl_a.insert(),
+                [{"id": 200, "desc": "a"}],
             )
 
         assert_raises((DatabaseError, ProgrammingError), fn)
@@ -1204,7 +1213,8 @@ class IsOrIsNotDistinctFromTest(fixtures.TablesTest):
             tbl.select(tbl.c.col_a.is_distinct_from(tbl.c.col_b))
         ).fetchall()
         eq_(
-            len(result), expected_row_count_for_is,
+            len(result),
+            expected_row_count_for_is,
         )
 
         expected_row_count_for_isnot = (
@@ -1214,5 +1224,6 @@ class IsOrIsNotDistinctFromTest(fixtures.TablesTest):
             tbl.select(tbl.c.col_a.isnot_distinct_from(tbl.c.col_b))
         ).fetchall()
         eq_(
-            len(result), expected_row_count_for_isnot,
+            len(result),
+            expected_row_count_for_isnot,
         )
index 5a1876bc5f4e8a041cfda0e0ba43c5274e3829fc..de970da53c96cf42ba82104aeed52836e8faaa6c 100644 (file)
@@ -24,7 +24,12 @@ class SequenceTest(fixtures.TablesTest):
         Table(
             "seq_pk",
             metadata,
-            Column("id", Integer, Sequence("tab_id_seq"), primary_key=True,),
+            Column(
+                "id",
+                Integer,
+                Sequence("tab_id_seq"),
+                primary_key=True,
+            ),
             Column("data", String(50)),
         )
 
@@ -109,17 +114,21 @@ class HasSequenceTest(fixtures.TablesTest):
                 "schema_seq", schema=config.test_schema, metadata=metadata
             )
         Table(
-            "user_id_table", metadata, Column("id", Integer, primary_key=True),
+            "user_id_table",
+            metadata,
+            Column("id", Integer, primary_key=True),
         )
 
     def test_has_sequence(self, connection):
         eq_(
-            inspect(connection).has_sequence("user_id_seq"), True,
+            inspect(connection).has_sequence("user_id_seq"),
+            True,
         )
 
     def test_has_sequence_other_object(self, connection):
         eq_(
-            inspect(connection).has_sequence("user_id_table"), False,
+            inspect(connection).has_sequence("user_id_table"),
+            False,
         )
 
     @testing.requires.schemas
@@ -133,7 +142,8 @@ class HasSequenceTest(fixtures.TablesTest):
 
     def test_has_sequence_neg(self, connection):
         eq_(
-            inspect(connection).has_sequence("some_sequence"), False,
+            inspect(connection).has_sequence("some_sequence"),
+            False,
         )
 
     @testing.requires.schemas
@@ -157,7 +167,8 @@ class HasSequenceTest(fixtures.TablesTest):
     @testing.requires.schemas
     def test_has_sequence_remote_not_in_default(self, connection):
         eq_(
-            inspect(connection).has_sequence("schema_seq"), False,
+            inspect(connection).has_sequence("schema_seq"),
+            False,
         )
 
     def test_get_sequence_names(self, connection):
@@ -194,5 +205,6 @@ class HasSequenceTestEmpty(fixtures.TestBase):
 
     def test_get_sequence_names_no_sequence(self, connection):
         eq_(
-            inspect(connection).get_sequence_names(), [],
+            inspect(connection).get_sequence_names(),
+            [],
         )
index 8c654370000b211c6a8f754557d89357610ede6f..da01aa484bd4b6d26951fbe3562021a41d453069 100644 (file)
@@ -621,9 +621,7 @@ class NumericTest(_LiteralRoundTripFixture, fixtures.TestBase):
 
     @testing.requires.precision_numerics_enotation_large
     def test_enotation_decimal_large(self):
-        """test exceedingly large decimals.
-
-        """
+        """test exceedingly large decimals."""
 
         numbers = set(
             [
@@ -962,7 +960,8 @@ class JSONTest(_LiteralRoundTripFixture, fixtures.TablesTest):
 
         conn = connection
         conn.execute(
-            self.tables.data_table.insert(), {"name": "r1", "data": JSON.NULL},
+            self.tables.data_table.insert(),
+            {"name": "r1", "data": JSON.NULL},
         )
 
         eq_(
@@ -1158,13 +1157,18 @@ class JSONLegacyStringCastIndexTest(
         # "cannot extract array element from a non-array", which is
         # fixed in 9.4 but may exist in 9.3
         self._test_index_criteria(
-            and_(name == "r4", cast(col[1], String) == '"two"',), "r4",
+            and_(
+                name == "r4",
+                cast(col[1], String) == '"two"',
+            ),
+            "r4",
         )
 
     def test_string_cast_crit_mixed_path(self):
         col = self.tables.data_table.c["data"]
         self._test_index_criteria(
-            cast(col[("key3", 1, "six")], String) == '"seven"', "r3",
+            cast(col[("key3", 1, "six")], String) == '"seven"',
+            "r3",
         )
 
     def test_string_cast_crit_string_path(self):
@@ -1180,7 +1184,10 @@ class JSONLegacyStringCastIndexTest(
         col = self.tables.data_table.c["data"]
 
         self._test_index_criteria(
-            and_(name == "r6", cast(col["b"], String) == '"some value"',),
+            and_(
+                name == "r6",
+                cast(col["b"], String) == '"some value"',
+            ),
             "r6",
         )
 
index dbe22bb8df75bcdc51dd53373aeecd1352a97633..5704cf2a6e459be935b886f2069109916d981cfa 100644 (file)
@@ -119,7 +119,9 @@ def setup_filters():
         r"The Session.begin.subtransactions flag is deprecated",
     ]:
         warnings.filterwarnings(
-            "ignore", message=msg, category=sa_exc.RemovedIn20Warning,
+            "ignore",
+            message=msg,
+            category=sa_exc.RemovedIn20Warning,
         )
 
     try:
index 7c109b358e033bdd9506725c6f165c77a6fdc35b..b50d9885d40474788f4c899de3e8dce460e86f72 100644 (file)
@@ -133,10 +133,7 @@ class FacadeDict(ImmutableContainer, dict):
         return FacadeDict, (dict(self),)
 
     def _insert_item(self, key, value):
-        """insert an item into the dictionary directly.
-
-
-        """
+        """insert an item into the dictionary directly."""
         dict.__setitem__(self, key, value)
 
     def __repr__(self):
index e1d0e644443b8d9d036aabd7bbb36f6129a37b97..285f6c02163d38de286a0c0c108a07c542b17b95 100644 (file)
@@ -309,8 +309,7 @@ else:
 if py3k:
 
     def _formatannotation(annotation, base_module=None):
-        """vendored from python 3.7
-        """
+        """vendored from python 3.7"""
 
         if getattr(annotation, "__module__", None) == "typing":
             return repr(annotation).replace("typing.", "")
index eae4be768bef04f0fe6a0278e7af6e4bf9b400a8..83037bbff6e381e75cf9ca7a033e8d36a178204d 100644 (file)
@@ -131,7 +131,10 @@ def deprecated(
             warning = exc.RemovedIn20Warning
         version = "1.4"
     if add_deprecation_to_docstring:
-        header = ".. deprecated:: %s %s" % (version, (message or ""),)
+        header = ".. deprecated:: %s %s" % (
+            version,
+            (message or ""),
+        )
     else:
         header = None
 
index e8abf3130badf82efa2214c2abd98da847d59ff0..bbdd3381fe3d2b9709661601c8854209df685e89 100644 (file)
@@ -66,7 +66,8 @@ class safe_reraise(object):
             self._exc_info = None  # remove potential circular references
             if not self.warn_only:
                 compat.raise_(
-                    exc_value, with_traceback=exc_tb,
+                    exc_value,
+                    with_traceback=exc_tb,
                 )
         else:
             if not compat.py3k and self._exc_info and self._exc_info[1]:
@@ -731,10 +732,10 @@ def class_hierarchy(cls):
 
 def iterate_attributes(cls):
     """iterate all the keys and attributes associated
-       with a class, without using getattr().
+    with a class, without using getattr().
 
-       Does not use getattr() so that class-sensitive
-       descriptors (i.e. property.__get__()) are not called.
+    Does not use getattr() so that class-sensitive
+    descriptors (i.e. property.__get__()) are not called.
 
     """
     keys = dir(cls)
@@ -986,9 +987,7 @@ class HasMemoized(object):
 
     @classmethod
     def memoized_instancemethod(cls, fn):
-        """Decorate a method memoize its return value.
-
-        """
+        """Decorate a method memoize its return value."""
 
         def oneshot(self, *args, **kw):
             result = fn(self, *args, **kw)
@@ -1738,8 +1737,8 @@ def inject_param_text(doctext, inject_params):
 
 
 def repr_tuple_names(names):
-    """ Trims a list of strings from the middle and return a string of up to
-        four elements. Strings greater than 11 characters will be truncated"""
+    """Trims a list of strings from the middle and return a string of up to
+    four elements. Strings greater than 11 characters will be truncated"""
     if len(names) == 0:
         return None
     flag = len(names) <= 4
index 9447abeded74f1ceee19f472a474166c5e951327..3687dc8dc31f331cb9819a59872335c36e94708e 100644 (file)
@@ -220,7 +220,8 @@ class AsyncAdaptedQueue:
             return self._queue.put_nowait(item)
         except asyncio.queues.QueueFull as err:
             compat.raise_(
-                Full(), replace_context=err,
+                Full(),
+                replace_context=err,
             )
 
     def put(self, item, block=True, timeout=None):
@@ -236,7 +237,8 @@ class AsyncAdaptedQueue:
                 return self.await_(self._queue.put(item))
         except asyncio.queues.QueueFull as err:
             compat.raise_(
-                Full(), replace_context=err,
+                Full(),
+                replace_context=err,
             )
 
     def get_nowait(self):
@@ -244,7 +246,8 @@ class AsyncAdaptedQueue:
             return self._queue.get_nowait()
         except asyncio.queues.QueueEmpty as err:
             compat.raise_(
-                Empty(), replace_context=err,
+                Empty(),
+                replace_context=err,
             )
 
     def get(self, block=True, timeout=None):
@@ -259,5 +262,6 @@ class AsyncAdaptedQueue:
                 return self.await_(self._queue.get())
         except asyncio.queues.QueueEmpty as err:
             compat.raise_(
-                Empty(), replace_context=err,
+                Empty(),
+                replace_context=err,
             )
index 30a02472cfb686d8c7a2776eaf339af1781d0134..4bc2af93d3c7ed71141270788950a5f3b6f9d831 100644 (file)
@@ -891,7 +891,12 @@ class JoinedEagerLoadTest(NoCache, fixtures.MappedTest):
 
                 r.context.compiled.compile_state = compile_state
                 obj = ORMCompileState.orm_setup_cursor_result(
-                    sess, compile_state.statement, {}, exec_opts, {}, r,
+                    sess,
+                    compile_state.statement,
+                    {},
+                    exec_opts,
+                    {},
+                    r,
                 )
                 list(obj.unique())
                 sess.close()
index aea160c9e42e4c7205889dcc80998d04c3c77cc9..de3683430edb459e3671304550ca8d05b829e8a6 100644 (file)
@@ -131,7 +131,10 @@ class ResultSetTest(fixtures.TestBase, AssertsExecutionResults):
                 [row["field%d" % fnum] for fnum in range(NUM_FIELDS)]
 
     @testing.combinations(
-        (False, 0), (True, 1), (False, 1), (False, 2),
+        (False, 0),
+        (True, 1),
+        (False, 1),
+        (False, 2),
     )
     def test_one_or_none(self, one_or_first, rows_present):
         # TODO: this is not testing the ORM level "scalar_mapping"
index 0136b6e2966cade51c810468b849b6c49e106be9..a15bf1cfaee19927eaff8fee9846156c73c89528 100644 (file)
@@ -266,16 +266,43 @@ class ResultTest(fixtures.TestBase):
             object(),
         )
 
-        result = self._fixture(extras=[(ex1a, ex1b), (ex2,), (ex3a, ex3b,)])
+        result = self._fixture(
+            extras=[
+                (ex1a, ex1b),
+                (ex2,),
+                (
+                    ex3a,
+                    ex3b,
+                ),
+            ]
+        )
         eq_(
             result.columns(ex2, ex3b).columns(ex3a).all(),
             [(1,), (2,), (2,), (2,)],
         )
 
-        result = self._fixture(extras=[(ex1a, ex1b), (ex2,), (ex3a, ex3b,)])
+        result = self._fixture(
+            extras=[
+                (ex1a, ex1b),
+                (ex2,),
+                (
+                    ex3a,
+                    ex3b,
+                ),
+            ]
+        )
         eq_([row._mapping[ex1b] for row in result], [1, 2, 1, 4])
 
-        result = self._fixture(extras=[(ex1a, ex1b), (ex2,), (ex3a, ex3b,)])
+        result = self._fixture(
+            extras=[
+                (ex1a, ex1b),
+                (ex2,),
+                (
+                    ex3a,
+                    ex3b,
+                ),
+            ]
+        )
         eq_(
             [
                 dict(r)
@@ -906,7 +933,8 @@ class MergeResultTest(fixtures.TestBase):
 
         result = r1.merge(r2, r3, r4)
         eq_(
-            result.first(), (7, "u1"),
+            result.first(),
+            (7, "u1"),
         )
 
     def test_columns(self, merge_fixture):
@@ -1004,7 +1032,8 @@ class OnlyScalarsTest(fixtures.TestBase):
 
         r = r.columns(0).mappings()
         eq_(
-            list(r), [{"a": 1}, {"a": 2}, {"a": 1}, {"a": 1}, {"a": 4}],
+            list(r),
+            [{"a": 1}, {"a": 2}, {"a": 1}, {"a": 1}, {"a": 4}],
         )
 
     def test_scalar_mode_but_accessed_nonscalar_result(self, no_tuple_fixture):
@@ -1037,7 +1066,9 @@ class OnlyScalarsTest(fixtures.TestBase):
         )
 
         r = result.ChunkedIteratorResult(
-            metadata, no_tuple_fixture, source_supports_scalars=True,
+            metadata,
+            no_tuple_fixture,
+            source_supports_scalars=True,
         )
 
         r = r.unique()
@@ -1050,7 +1081,9 @@ class OnlyScalarsTest(fixtures.TestBase):
         )
 
         r = result.ChunkedIteratorResult(
-            metadata, no_tuple_fixture, source_supports_scalars=True,
+            metadata,
+            no_tuple_fixture,
+            source_supports_scalars=True,
         )
 
         r = r.unique()
@@ -1063,7 +1096,9 @@ class OnlyScalarsTest(fixtures.TestBase):
         )
 
         r = result.ChunkedIteratorResult(
-            metadata, no_tuple_fixture, source_supports_scalars=True,
+            metadata,
+            no_tuple_fixture,
+            source_supports_scalars=True,
         )
 
         r = r.scalars().unique()
index c8807df09a9500d043d1df54331b5f0322a192a5..0cbab7f28247ce194806efeee7557b34d686bee7 100644 (file)
@@ -15,7 +15,7 @@ class WarnDeprecatedLimitedTest(fixtures.TestBase):
             )
 
     def test_warn_deprecated_limited_cap(self):
-        """ warn_deprecated_limited() and warn_limited() use
+        """warn_deprecated_limited() and warn_limited() use
         _hash_limit_string
 
         actually just verifying that _hash_limit_string works as expected
index c3f12f9d998cd52393c1a6f67084354e358bda74..ec67a916c870e4e08310ad117d33fb15c678993f 100644 (file)
@@ -1373,7 +1373,9 @@ class CompileIdentityTest(fixtures.TestBase, AssertsCompiledSQL):
     def test_identity_object_no_primary_key(self):
         metadata = MetaData()
         tbl = Table(
-            "test", metadata, Column("id", Integer, Identity(increment=42)),
+            "test",
+            metadata,
+            Column("id", Integer, Identity(increment=42)),
         )
         self.assert_compile(
             schema.CreateTable(tbl),
@@ -1397,7 +1399,12 @@ class CompileIdentityTest(fixtures.TestBase, AssertsCompiledSQL):
         tbl = Table(
             "test",
             metadata,
-            Column("id", Integer, Identity(start=3), nullable=False,),
+            Column(
+                "id",
+                Integer,
+                Identity(start=3),
+                nullable=False,
+            ),
         )
         self.assert_compile(
             schema.CreateTable(tbl),
@@ -1425,7 +1432,11 @@ class CompileIdentityTest(fixtures.TestBase, AssertsCompiledSQL):
             "test",
             metadata,
             Column("id", Integer, autoincrement=False, primary_key=True),
-            Column("x", Integer, Identity(start=3, increment=42),),
+            Column(
+                "x",
+                Integer,
+                Identity(start=3, increment=42),
+            ),
         )
         self.assert_compile(
             schema.CreateTable(tbl),
@@ -1461,7 +1472,11 @@ class CompileIdentityTest(fixtures.TestBase, AssertsCompiledSQL):
                 Identity(start=3, increment=42),
                 autoincrement=True,
             ),
-            Column("id2", Integer, Identity(start=7, increment=2),),
+            Column(
+                "id2",
+                Integer,
+                Identity(start=7, increment=2),
+            ),
         )
         # this will be rejected by the database, just asserting this is what
         # the two autoincrements will do right now
@@ -1537,7 +1552,11 @@ class CompileIdentityTest(fixtures.TestBase, AssertsCompiledSQL):
 
     def test_identity_object_no_options(self):
         metadata = MetaData()
-        tbl = Table("test", metadata, Column("id", Integer, Identity()),)
+        tbl = Table(
+            "test",
+            metadata,
+            Column("id", Integer, Identity()),
+        )
         self.assert_compile(
             schema.CreateTable(tbl),
             "CREATE TABLE test (id INTEGER NOT NULL IDENTITY)",
index a29d49c4ce8ab304848d89b9451169638dba6ee1..44445595893d145eebfa4b1f0a73e573619e7bb3 100644 (file)
@@ -487,7 +487,9 @@ class IsolationLevelDetectTest(fixtures.TestBase):
 
         result = []
 
-        def fail_on_exec(stmt,):
+        def fail_on_exec(
+            stmt,
+        ):
             if view is not None and view in stmt:
                 result.append(("SERIALIZABLE",))
             else:
@@ -540,7 +542,8 @@ class InvalidTransactionFalsePositiveTest(fixtures.TablesTest):
     @classmethod
     def insert_data(cls, connection):
         connection.execute(
-            cls.tables.error_t.insert(), [{"error_code": "01002"}],
+            cls.tables.error_t.insert(),
+            [{"error_code": "01002"}],
         )
 
     def test_invalid_transaction_detection(self, connection):
index e37b388e878fda918dae4cc93f43aab064bb1636..d9dc033e1668349e63add06ac7575e2779d87a22 100644 (file)
@@ -150,9 +150,12 @@ class QueryUnicodeTest(fixtures.TestBase):
             )
         )
         r = connection.execute(t1.select()).first()
-        assert isinstance(r[1], util.text_type), (
-            "%s is %s instead of unicode, working on %s"
-            % (r[1], type(r[1]), meta.bind)
+        assert isinstance(
+            r[1], util.text_type
+        ), "%s is %s instead of unicode, working on %s" % (
+            r[1],
+            type(r[1]),
+            meta.bind,
         )
         eq_(r[1], util.ue("abc \xc3\xa9 def"))
 
index c7d012f5bf1252d92557178b10b886ab5b40555e..d33838b6a399a86273eecd0b4d939faa17d9e503 100644 (file)
@@ -288,7 +288,9 @@ class ReflectionTest(fixtures.TestBase, ComparesTables, AssertsCompiledSQL):
         if not exists:
             with expect_raises(exc.NoSuchTableError):
                 Table(
-                    table_name, metadata, autoload_with=connection,
+                    table_name,
+                    metadata,
+                    autoload_with=connection,
                 )
         else:
             tmp_t = Table(table_name, metadata, autoload_with=connection)
@@ -296,7 +298,8 @@ class ReflectionTest(fixtures.TestBase, ComparesTables, AssertsCompiledSQL):
                 tmp_t.select().where(tmp_t.c.id == 2)
             ).fetchall()
             eq_(
-                result, [(2, "bar", datetime.datetime(2020, 2, 2, 2, 2, 2))],
+                result,
+                [(2, "bar", datetime.datetime(2020, 2, 2, 2, 2, 2))],
             )
 
     @testing.provide_metadata
@@ -309,7 +312,11 @@ class ReflectionTest(fixtures.TestBase, ComparesTables, AssertsCompiledSQL):
     )
     def test_has_table_temporary(self, connection, table_name, exists):
         if exists:
-            tt = Table(table_name, self.metadata, Column("id", Integer),)
+            tt = Table(
+                table_name,
+                self.metadata,
+                Column("id", Integer),
+            )
             tt.create(connection)
 
         found_it = testing.db.dialect.has_table(connection, table_name)
@@ -516,13 +523,11 @@ class ReflectHugeViewTest(fixtures.TestBase):
                 for i in range(self.col_num)
             ]
         )
-        self.view_str = view_str = (
-            "CREATE VIEW huge_named_view AS SELECT %s FROM base_table"
-            % (
-                ",".join(
-                    "long_named_column_number_%d" % i
-                    for i in range(self.col_num)
-                )
+        self.view_str = (
+            view_str
+        ) = "CREATE VIEW huge_named_view AS SELECT %s FROM base_table" % (
+            ",".join(
+                "long_named_column_number_%d" % i for i in range(self.col_num)
             )
         )
         assert len(view_str) > 4000
index 2b1db0b413ec9661ecdd65f623dcc1c805847026..44a8f402072edcbf6f46dee59ab6656bb92b00ee 100644 (file)
@@ -31,7 +31,9 @@ class SequenceTest(fixtures.TablesTest):
             "bigint_seq_t",
             metadata,
             Column(
-                "id", BIGINT, default=Sequence("bigint_seq", start=3000000000),
+                "id",
+                BIGINT,
+                default=Sequence("bigint_seq", start=3000000000),
             ),
             Column("txt", String(50)),
         )
@@ -43,7 +45,9 @@ class SequenceTest(fixtures.TablesTest):
                 "id",
                 DECIMAL(10, 0),
                 default=Sequence(
-                    "decimal_seq", data_type=DECIMAL(10, 0), start=3000000000,
+                    "decimal_seq",
+                    data_type=DECIMAL(10, 0),
+                    start=3000000000,
                 ),
             ),
             Column("txt", String(50)),
index 34b026cf37158ef362b8a4c39302516f12a90b17..cf5b66d57f0dc8fc5a9e3a79247fbf4c90e30f4a 100644 (file)
@@ -789,7 +789,9 @@ class TypeRoundTripTest(
     @testing.metadata_fixture()
     def datetimeoffset_fixture(self, metadata):
         t = Table(
-            "test_dates", metadata, Column("adatetimeoffset", DATETIMEOFFSET),
+            "test_dates",
+            metadata,
+            Column("adatetimeoffset", DATETIMEOFFSET),
         )
 
         return t
@@ -886,7 +888,8 @@ class TypeRoundTripTest(
                 return
 
             conn.execute(
-                t.insert(), adatetimeoffset=dto_param_value,
+                t.insert(),
+                adatetimeoffset=dto_param_value,
             )
 
             row = conn.execute(t.select()).first()
index a555e539605612580ab40f2f41d15011f705c648..abd3a491ff1a8a1d533fa8e091cf0e3a6de954b3 100644 (file)
@@ -296,7 +296,12 @@ class ParseVersionTest(fixtures.TestBase):
 
     @testing.combinations(
         ((10, 2, 7), "10.2.7-MariaDB", (10, 2, 7), True),
-        ((10, 2, 7), "5.6.15.10.2.7-MariaDB", (5, 6, 15, 10, 2, 7), True,),
+        (
+            (10, 2, 7),
+            "5.6.15.10.2.7-MariaDB",
+            (5, 6, 15, 10, 2, 7),
+            True,
+        ),
         ((5, 0, 51, 24), "5.0.51a.24+lenny5", (5, 0, 51, 24), False),
         ((10, 2, 10), "10.2.10-MariaDB", (10, 2, 10), True),
         ((5, 7, 20), "5.7.20", (5, 7, 20), False),
index 15875dd542d55501d8577c1926024ae1fdd788cf..e23ff00d0de31d157984d654d741955980fba6f3 100644 (file)
@@ -29,7 +29,8 @@ class IdiosyncrasyTest(fixtures.TestBase):
     def test_is_boolean_symbols_despite_no_native(self, connection):
 
         is_(
-            connection.scalar(select(cast(true().is_(true()), Boolean))), True,
+            connection.scalar(select(cast(true().is_(true()), Boolean))),
+            True,
         )
 
         is_(
index 8983522c14b40c65e3aa060637d5eab84b484e38..f4621dce332596aedc8f0a3f204b7fe85efd6f7d 100644 (file)
@@ -491,7 +491,10 @@ class TypeRoundTripTest(fixtures.TestBase, AssertsExecutionResults):
         t.create(connection)
         connection.execute(
             t.insert(),
-            dict(scale_value=45.768392065789, unscale_value=45.768392065789,),
+            dict(
+                scale_value=45.768392065789,
+                unscale_value=45.768392065789,
+            ),
         )
         result = connection.scalar(select(t.c.scale_value))
         eq_(result, decimal.Decimal("45.768392065789"))
@@ -1054,7 +1057,8 @@ class EnumSetTest(
             dict(e1="a", e2="a", e3="a", e4="'a'", e5="a,b"),
         )
         connection.execute(
-            set_table.insert(), dict(e1="b", e2="b", e3="b", e4="b", e5="a,b"),
+            set_table.insert(),
+            dict(e1="b", e2="b", e3="b", e4="b", e5="a,b"),
         )
 
         expected = [
@@ -1065,7 +1069,13 @@ class EnumSetTest(
                 set(["'a'"]),
                 set(["a", "b"]),
             ),
-            (set(["b"]), set(["b"]), set(["b"]), set(["b"]), set(["a", "b"]),),
+            (
+                set(["b"]),
+                set(["b"]),
+                set(["b"]),
+                set(["b"]),
+                set(["a", "b"]),
+            ),
         ]
         res = connection.execute(set_table.select()).fetchall()
 
@@ -1264,7 +1274,13 @@ class EnumSetTest(
             Column("e6", mysql.SET("", "a", retrieve_as_bitwise=True)),
             Column(
                 "e7",
-                mysql.SET("", "'a'", "b'b", "'", retrieve_as_bitwise=True,),
+                mysql.SET(
+                    "",
+                    "'a'",
+                    "b'b",
+                    "'",
+                    retrieve_as_bitwise=True,
+                ),
             ),
         )
 
index ab4bb1e08aafeebb33a35430ba3007e94ff2cd3d..cd0e11e588587c43defcb104616f526697647c03 100644 (file)
@@ -103,10 +103,22 @@ class EncodingErrorsTest(fixtures.TestBase):
         )
 
     _oracle_char_combinations = testing.combinations(
-        ("STRING", cx_Oracle_STRING,),
-        ("FIXED_CHAR", cx_Oracle_FIXED_CHAR,),
-        ("CLOB", cx_Oracle_CLOB,),
-        ("NCLOB", cx_Oracle_NCLOB,),
+        (
+            "STRING",
+            cx_Oracle_STRING,
+        ),
+        (
+            "FIXED_CHAR",
+            cx_Oracle_FIXED_CHAR,
+        ),
+        (
+            "CLOB",
+            cx_Oracle_CLOB,
+        ),
+        (
+            "NCLOB",
+            cx_Oracle_NCLOB,
+        ),
         argnames="cx_oracle_type",
         id_="ia",
     )
@@ -149,7 +161,9 @@ class EncodingErrorsTest(fixtures.TestBase):
     @_oracle_char_combinations
     @testing.requires.python2
     def test_encoding_errors_sqla_py2k(
-        self, cx_Oracle, cx_oracle_type,
+        self,
+        cx_Oracle,
+        cx_oracle_type,
     ):
         ignore_dialect = cx_oracle.dialect(
             dbapi=cx_Oracle, encoding_errors="ignore"
@@ -167,7 +181,9 @@ class EncodingErrorsTest(fixtures.TestBase):
     @_oracle_char_combinations
     @testing.requires.python2
     def test_no_encoding_errors_sqla_py2k(
-        self, cx_Oracle, cx_oracle_type,
+        self,
+        cx_Oracle,
+        cx_oracle_type,
     ):
         plain_dialect = cx_oracle.dialect(dbapi=cx_Oracle)
 
@@ -183,7 +199,9 @@ class EncodingErrorsTest(fixtures.TestBase):
     @_oracle_char_combinations
     @testing.requires.python3
     def test_encoding_errors_cx_oracle_py3k(
-        self, cx_Oracle, cx_oracle_type,
+        self,
+        cx_Oracle,
+        cx_oracle_type,
     ):
         ignore_dialect = cx_oracle.dialect(
             dbapi=cx_Oracle, encoding_errors="ignore"
@@ -200,7 +218,10 @@ class EncodingErrorsTest(fixtures.TestBase):
             cursor.mock_calls,
             [
                 mock.call.var(
-                    mock.ANY, None, cursor.arraysize, encodingErrors="ignore",
+                    mock.ANY,
+                    None,
+                    cursor.arraysize,
+                    encodingErrors="ignore",
                 )
             ],
         )
@@ -208,7 +229,9 @@ class EncodingErrorsTest(fixtures.TestBase):
     @_oracle_char_combinations
     @testing.requires.python3
     def test_no_encoding_errors_cx_oracle_py3k(
-        self, cx_Oracle, cx_oracle_type,
+        self,
+        cx_Oracle,
+        cx_oracle_type,
     ):
         plain_dialect = cx_oracle.dialect(dbapi=cx_Oracle)
 
index b9975f65ee53e14e4e24d256e65e7c96ca5e5761..d2780fa29c548e87e7609a3f4cad74b6369cb3eb 100644 (file)
@@ -217,7 +217,12 @@ drop synonym %(test_schema)s.local_table;
             ),
             {"text": "my table comment"},
         )
-        eq_(insp.get_table_comment("parent",), {"text": "my local comment"})
+        eq_(
+            insp.get_table_comment(
+                "parent",
+            ),
+            {"text": "my local comment"},
+        )
         eq_(
             insp.get_table_comment(
                 "parent", schema=testing.db.dialect.default_schema_name
index dbb380d8d39190b6c023c524545b3eca67e43735..0b000e89d4fa8f186f296f7a72987b3b9a357dd9 100644 (file)
@@ -992,7 +992,8 @@ class LOBFetchTest(fixtures.TablesTest):
         )
         with engine.connect() as conn:
             result = exec_sql(
-                conn, "select id, data, bindata from z_test order by id",
+                conn,
+                "select id, data, bindata from z_test order by id",
             )
             results = result.fetchall()
 
index 64e9451243e3c06494ac4e5b8a7622f833a76e76..eaa5597d06bb3487640a0e9c22b9eb8aa707e69f 100644 (file)
@@ -1267,7 +1267,9 @@ class CompileTest(fixtures.TestBase, AssertsCompiledSQL):
         )
 
         self.assert_compile(
-            c.any(5), "%(param_1)s = ANY (x)", checkparams={"param_1": 5},
+            c.any(5),
+            "%(param_1)s = ANY (x)",
+            checkparams={"param_1": 5},
         )
 
         self.assert_compile(
@@ -1277,7 +1279,9 @@ class CompileTest(fixtures.TestBase, AssertsCompiledSQL):
         )
 
         self.assert_compile(
-            c.all(5), "%(param_1)s = ALL (x)", checkparams={"param_1": 5},
+            c.all(5),
+            "%(param_1)s = ALL (x)",
+            checkparams={"param_1": 5},
         )
 
         self.assert_compile(
@@ -2300,8 +2304,7 @@ class DistinctOnTest(fixtures.TestBase, AssertsCompiledSQL):
 
 class FullTextSearchTest(fixtures.TestBase, AssertsCompiledSQL):
 
-    """Tests for full text searching
-    """
+    """Tests for full text searching"""
 
     __dialect__ = postgresql.dialect()
 
@@ -2322,8 +2325,8 @@ class FullTextSearchTest(fixtures.TestBase, AssertsCompiledSQL):
 
     def _raise_query(self, q):
         """
-            useful for debugging. just do...
-            self._raise_query(q)
+        useful for debugging. just do...
+        self._raise_query(q)
         """
         c = q.compile(dialect=postgresql.dialect())
         raise ValueError(c)
index 971d4f12f71af06dfd85272cc513c7c6cb8fe47c..43c5aea21afaddf704a3eeb7c2a8423dfb217cf0 100644 (file)
@@ -473,7 +473,8 @@ class ExecutemanyValuesInsertsTest(ExecuteManyMode, fixtures.TablesTest):
         assert t.c.id not in result.keys()
         assert not result._soft_closed
         assert isinstance(
-            result.cursor_strategy, _cursor.FullyBufferedCursorFetchStrategy,
+            result.cursor_strategy,
+            _cursor.FullyBufferedCursorFetchStrategy,
         )
         assert not result.cursor.closed
         assert not result.closed
@@ -1020,7 +1021,10 @@ $$ LANGUAGE plpgsql;
             eq_(
                 conn.scalar(
                     select(
-                        cast(literal(quoted_name("some_name", False)), String,)
+                        cast(
+                            literal(quoted_name("some_name", False)),
+                            String,
+                        )
                     )
                 ),
                 "some_name",
index b8de35f421ee3b7d58066d26e715fe828d77b41b..2c67957197cca7ee27a673af02abbcf9537044b1 100644 (file)
@@ -900,8 +900,7 @@ class ReflectionTest(AssertsCompiledSQL, fixtures.TestBase):
 
     @testing.provide_metadata
     def test_index_reflection(self):
-        """ Reflecting expression-based indexes should warn
-        """
+        """Reflecting expression-based indexes should warn"""
 
         metadata = self.metadata
 
@@ -953,8 +952,7 @@ class ReflectionTest(AssertsCompiledSQL, fixtures.TestBase):
 
     @testing.provide_metadata
     def test_index_reflection_partial(self, connection):
-        """Reflect the filter defintion on partial indexes
-        """
+        """Reflect the filter defintion on partial indexes"""
 
         metadata = self.metadata
 
index 5def5aa5b77234c3ffd33485b1c7731be7ee9294..509603e1be7ff54001344a3f9bb293d85f2aef8d 100644 (file)
@@ -1436,7 +1436,9 @@ class ArrayRoundTripTest(object):
 
         stmt = select(
             func.array_cat(
-                array([1, 2, 3]), array([4, 5, 6]), type_=self.ARRAY(Integer),
+                array([1, 2, 3]),
+                array([4, 5, 6]),
+                type_=self.ARRAY(Integer),
             )[2:5]
         )
         eq_(connection.execute(stmt).scalar(), [2, 3, 4, 5])
@@ -1879,7 +1881,10 @@ class ArrayEnum(fixtures.TestBase):
                 c = "ccc"
 
             tbl.append_column(
-                Column("pyenum_col", array_cls(enum_cls(MyEnum)),),
+                Column(
+                    "pyenum_col",
+                    array_cls(enum_cls(MyEnum)),
+                ),
             )
 
         self.metadata.create_all(connection)
@@ -1918,7 +1923,10 @@ class ArrayJSON(fixtures.TestBase):
             "json_table",
             self.metadata,
             Column("id", Integer, primary_key=True),
-            Column("json_col", array_cls(json_cls),),
+            Column(
+                "json_col",
+                array_cls(json_cls),
+            ),
         )
 
         self.metadata.create_all(connection)
@@ -2166,7 +2174,8 @@ class SpecialTypesTest(fixtures.TablesTest, ComparesTables):
         connection.execute(t.update(), data="'a' 'cat' 'fat' 'mat' 'sat'")
 
         eq_(
-            connection.scalar(select(t.c.data)), "'a' 'cat' 'fat' 'mat' 'sat'",
+            connection.scalar(select(t.c.data)),
+            "'a' 'cat' 'fat' 'mat' 'sat'",
         )
 
     @testing.provide_metadata
@@ -3227,7 +3236,8 @@ class JSONRoundTripTest(fixtures.TablesTest):
 
     def _test_insert_none_as_null(self, conn):
         conn.execute(
-            self.tables.data_table.insert(), {"name": "r1", "nulldata": None},
+            self.tables.data_table.insert(),
+            {"name": "r1", "nulldata": None},
         )
         self._assert_column_is_NULL(conn, column="nulldata")
 
index 339a7c479e262005102f0980a3cd8f9b36270a8d..4a8f6fd788c7bf8186858f9a0cb4a18a424e5449 100644 (file)
@@ -71,9 +71,7 @@ class TestTypes(fixtures.TestBase, AssertsExecutionResults):
     __only_on__ = "sqlite"
 
     def test_boolean(self):
-        """Test that the boolean only treats 1 as True
-
-        """
+        """Test that the boolean only treats 1 as True"""
 
         meta = MetaData(testing.db)
         t = Table(
@@ -629,7 +627,7 @@ class DialectTest(
         'true', 'false', and 'column' are undocumented reserved words
         when used as column identifiers (as of 3.5.1).  Covering them
         here to ensure they remain in place if the dialect's
-        reserved_words set is updated in the future. """
+        reserved_words set is updated in the future."""
 
         meta = MetaData(testing.db)
         t = Table(
@@ -681,7 +679,7 @@ class DialectTest(
 
     @testing.provide_metadata
     def test_quoted_identifiers_functional_two(self):
-        """"test the edgiest of edge cases, quoted table/col names
+        """ "test the edgiest of edge cases, quoted table/col names
         that start and end with quotes.
 
         SQLite claims to have fixed this in
@@ -1364,7 +1362,11 @@ class InsertTest(fixtures.TestBase, AssertsExecutionResults):
     def test_empty_insert_pk1(self, connection):
         self._test_empty_insert(
             connection,
-            Table("a", MetaData(), Column("id", Integer, primary_key=True),),
+            Table(
+                "a",
+                MetaData(),
+                Column("id", Integer, primary_key=True),
+            ),
         )
 
     def test_empty_insert_pk2(self, connection):
@@ -1468,7 +1470,10 @@ class InsertTest(fixtures.TestBase, AssertsExecutionResults):
         self._test_empty_insert(
             connection,
             Table(
-                "f", MetaData(), Column("x", Integer), Column("y", Integer),
+                "f",
+                MetaData(),
+                Column("x", Integer),
+                Column("y", Integer),
             ),
         )
 
index 73681ee51c2feab1cfa19cc3f70a0a0a9839f671..a1d6d2725ac6660b8ef52d62bfc66c6509ee9dc4 100644 (file)
@@ -988,7 +988,8 @@ class CompiledCacheTest(fixtures.TestBase):
             eq_(conn.scalar(stmt), 2)
 
         with config.db.connect().execution_options(
-            compiled_cache=cache, schema_translate_map={None: None},
+            compiled_cache=cache,
+            schema_translate_map={None: None},
         ) as conn:
             # should use default schema again even though statement
             # was compiled with test_schema in the map
@@ -1017,7 +1018,10 @@ class MockStrategyTest(fixtures.TestBase):
             "testtable",
             metadata,
             Column(
-                "pk", Integer, Sequence("testtable_pk_seq"), primary_key=True,
+                "pk",
+                Integer,
+                Sequence("testtable_pk_seq"),
+                primary_key=True,
             ),
         )
 
@@ -1700,7 +1704,11 @@ class EngineEventsTest(fixtures.TestBase):
 
             compiled = [
                 ("CREATE TABLE t1", {}, None),
-                ("INSERT INTO t1 (c1, c2)", {"c2": "some data", "c1": 5}, (),),
+                (
+                    "INSERT INTO t1 (c1, c2)",
+                    {"c2": "some data", "c1": 5},
+                    (),
+                ),
                 ("INSERT INTO t1 (c1, c2)", {"c1": 6}, ()),
                 ("select * from t1", {}, None),
                 ("DROP TABLE t1", {}, None),
@@ -1948,7 +1956,12 @@ class EngineEventsTest(fixtures.TestBase):
         t = Table(
             "t",
             self.metadata,
-            Column("x", Integer, Sequence("t_id_seq"), primary_key=True,),
+            Column(
+                "x",
+                Integer,
+                Sequence("t_id_seq"),
+                primary_key=True,
+            ),
             implicit_returning=False,
         )
         self.metadata.create_all(engine)
@@ -2601,7 +2614,9 @@ class HandleErrorTest(fixtures.TestBase):
             Mock(side_effect=tsa.exc.InvalidRequestError("duplicate col")),
         ):
             assert_raises(
-                tsa.exc.InvalidRequestError, conn.execute, text("select 1"),
+                tsa.exc.InvalidRequestError,
+                conn.execute,
+                text("select 1"),
             )
 
         # cursor is closed
@@ -2999,7 +3014,12 @@ class DialectEventTest(fixtures.TestBase):
             stmt = "insert into table foo"
             params = {"foo": "bar"}
             ctx = dialect.execution_ctx_cls._init_statement(
-                dialect, conn, conn.connection, {}, stmt, [params],
+                dialect,
+                conn,
+                conn.connection,
+                {},
+                stmt,
+                [params],
             )
 
             conn._cursor_execute(ctx.cursor, stmt, params, ctx)
index 99df6a1e92d2d268f5782c01ba95ecf8b07317e0..b39d5f8add8cc30e631d0d7e246ff477421b1f45 100644 (file)
@@ -172,7 +172,11 @@ class URLTest(fixtures.TestBase):
         is_false(url1 == url3)
 
     @testing.combinations(
-        "drivername", "username", "password", "host", "database",
+        "drivername",
+        "username",
+        "password",
+        "host",
+        "database",
     )
     def test_component_set(self, component):
         common_url = (
@@ -251,7 +255,9 @@ class URLTest(fixtures.TestBase):
         )
 
     @testing.combinations(
-        "username", "host", "database",
+        "username",
+        "host",
+        "database",
     )
     def test_only_str_constructor(self, argname):
         assert_raises_message(
@@ -263,7 +269,9 @@ class URLTest(fixtures.TestBase):
         )
 
     @testing.combinations(
-        "username", "host", "database",
+        "username",
+        "host",
+        "database",
     )
     def test_only_str_set(self, argname):
         u1 = url.URL.create("somedriver")
@@ -735,7 +743,10 @@ class TestRegNewDBAPI(fixtures.TestBase):
             "sqlite:///?plugin=engineplugin1&foo=bar&myplugin1_arg=bat"
             "&plugin=engineplugin2&myplugin2_arg=hoho"
         )
-        e = create_engine(url_str, logging_name="foob",)
+        e = create_engine(
+            url_str,
+            logging_name="foob",
+        )
         eq_(e.dialect.name, "sqlite")
         eq_(e.logging_name, "bar")
 
index cf984c6580065a92304d4e8d6f83f52a7e50c9fc..194de9a7d204b3a71b0ad5c114b6c6512bd073fd 100644 (file)
@@ -99,7 +99,9 @@ class ReflectionTest(fixtures.TestBase, ComparesTables):
         self.assert_tables_equal(addresses, reflected_addresses)
 
     @testing.provide_metadata
-    def test_autoload_with_imply_autoload(self,):
+    def test_autoload_with_imply_autoload(
+        self,
+    ):
         meta = self.metadata
         t = Table(
             "t",
@@ -886,7 +888,7 @@ class ReflectionTest(fixtures.TestBase, ComparesTables):
     def test_override_existing_fk(self):
         """test that you can override columns and specify new foreign
         keys to other reflected tables, on columns which *do* already
-        have that foreign key, and that the FK is not duped. """
+        have that foreign key, and that the FK is not duped."""
 
         meta = self.metadata
         Table(
@@ -2294,9 +2296,15 @@ class ComputedColumnTest(fixtures.ComputedReflectionFixtureTest):
         )
         if testing.requires.computed_columns_virtual.enabled:
             self.check_table_column(
-                table, "computed_virtual", "normal+2", False,
+                table,
+                "computed_virtual",
+                "normal+2",
+                False,
             )
         if testing.requires.computed_columns_stored.enabled:
             self.check_table_column(
-                table, "computed_stored", "normal-42", True,
+                table,
+                "computed_stored",
+                "normal-42",
+                True,
             )
index 4b6cb6e0c239a25a5304d021279088ae512f79d9..d0774e84641b706453b853a75dac01567a49eaea 100644 (file)
@@ -196,7 +196,8 @@ class TransactionTest(fixtures.TestBase):
 
         with testing.db.connect() as conn:
             eq_(
-                conn.scalar(select(func.count(1)).select_from(users)), 0,
+                conn.scalar(select(func.count(1)).select_from(users)),
+                0,
             )
 
     def test_inactive_due_to_subtransaction_no_commit(self, local_connection):
@@ -1022,7 +1023,7 @@ class ExplicitAutoCommitTest(fixtures.TestBase):
     """test the 'autocommit' flag on select() and text() objects.
 
     Requires PostgreSQL so that we may define a custom function which
-    modifies the database. """
+    modifies the database."""
 
     __only_on__ = "postgresql"
 
@@ -1469,7 +1470,8 @@ class ConnectionCharacteristicTest(fixtures.TestBase):
                 c1 = c1.execution_options(foo="new_foo")
 
                 eq_(
-                    engine.dialect.get_foo(c1.connection), "new_foo",
+                    engine.dialect.get_foo(c1.connection),
+                    "new_foo",
                 )
         # stays outside of transaction
         eq_(engine.dialect.get_foo(c1.connection), "new_foo")
@@ -1513,7 +1515,8 @@ class ConnectionCharacteristicTest(fixtures.TestBase):
 
         conn = eng.connect()
         eq_(
-            eng.dialect.get_foo(conn.connection), "new_value",
+            eng.dialect.get_foo(conn.connection),
+            "new_value",
         )
 
 
@@ -1719,7 +1722,8 @@ class FutureTransactionTest(fixtures.FutureEngineMixin, fixtures.TablesTest):
 
         with testing.db.connect() as conn:
             eq_(
-                conn.scalar(select(func.count(1)).select_from(users)), 1,
+                conn.scalar(select(func.count(1)).select_from(users)),
+                1,
             )
 
     @testing.requires.autocommit
@@ -1766,13 +1770,15 @@ class FutureTransactionTest(fixtures.FutureEngineMixin, fixtures.TablesTest):
             assert not conn.in_transaction()
 
             eq_(
-                conn.scalar(select(func.count(1)).select_from(users)), 1,
+                conn.scalar(select(func.count(1)).select_from(users)),
+                1,
             )
 
             conn.execute(users.insert(), {"user_id": 2, "user_name": "name 2"})
 
             eq_(
-                conn.scalar(select(func.count(1)).select_from(users)), 2,
+                conn.scalar(select(func.count(1)).select_from(users)),
+                2,
             )
 
             assert conn.in_transaction()
@@ -1780,7 +1786,8 @@ class FutureTransactionTest(fixtures.FutureEngineMixin, fixtures.TablesTest):
             assert not conn.in_transaction()
 
             eq_(
-                conn.scalar(select(func.count(1)).select_from(users)), 1,
+                conn.scalar(select(func.count(1)).select_from(users)),
+                1,
             )
 
     def test_rollback_on_close(self):
@@ -1865,7 +1872,8 @@ class FutureTransactionTest(fixtures.FutureEngineMixin, fixtures.TablesTest):
 
             conn.rollback()
             eq_(
-                conn.scalar(select(func.count(1)).select_from(users)), 1,
+                conn.scalar(select(func.count(1)).select_from(users)),
+                1,
             )
 
     def test_rollback_no_begin(self):
@@ -1889,7 +1897,8 @@ class FutureTransactionTest(fixtures.FutureEngineMixin, fixtures.TablesTest):
             conn.commit()
 
             eq_(
-                conn.scalar(select(func.count(1)).select_from(users)), 1,
+                conn.scalar(select(func.count(1)).select_from(users)),
+                1,
             )
 
     def test_no_double_begin(self):
@@ -1910,7 +1919,8 @@ class FutureTransactionTest(fixtures.FutureEngineMixin, fixtures.TablesTest):
 
         with testing.db.connect() as conn:
             eq_(
-                conn.scalar(select(func.count(1)).select_from(users)), 0,
+                conn.scalar(select(func.count(1)).select_from(users)),
+                0,
             )
 
     def test_begin_block(self):
@@ -1921,7 +1931,8 @@ class FutureTransactionTest(fixtures.FutureEngineMixin, fixtures.TablesTest):
 
         with testing.db.connect() as conn:
             eq_(
-                conn.scalar(select(func.count(1)).select_from(users)), 1,
+                conn.scalar(select(func.count(1)).select_from(users)),
+                1,
             )
 
     @testing.requires.savepoints
@@ -1935,17 +1946,20 @@ class FutureTransactionTest(fixtures.FutureEngineMixin, fixtures.TablesTest):
             conn.execute(users.insert(), {"user_id": 2, "user_name": "name2"})
 
             eq_(
-                conn.scalar(select(func.count(1)).select_from(users)), 2,
+                conn.scalar(select(func.count(1)).select_from(users)),
+                2,
             )
             savepoint.rollback()
 
             eq_(
-                conn.scalar(select(func.count(1)).select_from(users)), 1,
+                conn.scalar(select(func.count(1)).select_from(users)),
+                1,
             )
 
         with testing.db.connect() as conn:
             eq_(
-                conn.scalar(select(func.count(1)).select_from(users)), 1,
+                conn.scalar(select(func.count(1)).select_from(users)),
+                1,
             )
 
     @testing.requires.savepoints
@@ -1959,17 +1973,20 @@ class FutureTransactionTest(fixtures.FutureEngineMixin, fixtures.TablesTest):
             conn.execute(users.insert(), {"user_id": 2, "user_name": "name2"})
 
             eq_(
-                conn.scalar(select(func.count(1)).select_from(users)), 2,
+                conn.scalar(select(func.count(1)).select_from(users)),
+                2,
             )
             savepoint.commit()
 
             eq_(
-                conn.scalar(select(func.count(1)).select_from(users)), 2,
+                conn.scalar(select(func.count(1)).select_from(users)),
+                2,
             )
 
         with testing.db.connect() as conn:
             eq_(
-                conn.scalar(select(func.count(1)).select_from(users)), 2,
+                conn.scalar(select(func.count(1)).select_from(users)),
+                2,
             )
 
     @testing.requires.savepoints
@@ -1988,7 +2005,8 @@ class FutureTransactionTest(fixtures.FutureEngineMixin, fixtures.TablesTest):
 
         with testing.db.connect() as conn:
             eq_(
-                conn.scalar(select(func.count(1)).select_from(users)), 0,
+                conn.scalar(select(func.count(1)).select_from(users)),
+                0,
             )
 
     @testing.requires.savepoints
@@ -2014,7 +2032,8 @@ class FutureTransactionTest(fixtures.FutureEngineMixin, fixtures.TablesTest):
 
         with testing.db.connect() as conn:
             eq_(
-                conn.scalar(select(func.count(1)).select_from(users)), 2,
+                conn.scalar(select(func.count(1)).select_from(users)),
+                2,
             )
 
     @testing.requires.savepoints
@@ -2036,7 +2055,8 @@ class FutureTransactionTest(fixtures.FutureEngineMixin, fixtures.TablesTest):
 
         with testing.db.connect() as conn:
             eq_(
-                conn.scalar(select(func.count(1)).select_from(users)), 3,
+                conn.scalar(select(func.count(1)).select_from(users)),
+                3,
             )
 
     @testing.requires.savepoints
@@ -2068,7 +2088,8 @@ class FutureTransactionTest(fixtures.FutureEngineMixin, fixtures.TablesTest):
 
         with testing.db.connect() as conn:
             eq_(
-                conn.scalar(select(func.count(1)).select_from(users)), 1,
+                conn.scalar(select(func.count(1)).select_from(users)),
+                1,
             )
 
     @testing.requires.savepoints
@@ -2097,5 +2118,6 @@ class FutureTransactionTest(fixtures.FutureEngineMixin, fixtures.TablesTest):
 
         with testing.db.connect() as conn:
             eq_(
-                conn.scalar(select(func.count(1)).select_from(users)), 0,
+                conn.scalar(select(func.count(1)).select_from(users)),
+                0,
             )
index a5d167c2e73f86222283318044102c7e41a59216..7c7d90e2175a97f22f842e873a1dff810b94f82b 100644 (file)
@@ -146,11 +146,16 @@ class AsyncEngineTest(EngineFixture):
     @async_test
     async def test_pool_exhausted(self, async_engine):
         engine = create_async_engine(
-            testing.db.url, pool_size=1, max_overflow=0, pool_timeout=0.1,
+            testing.db.url,
+            pool_size=1,
+            max_overflow=0,
+            pool_timeout=0.1,
         )
         async with engine.connect():
             await assert_raises_message_async(
-                asyncio.TimeoutError, "", engine.connect(),
+                asyncio.TimeoutError,
+                "",
+                engine.connect(),
             )
 
     @async_test
@@ -190,7 +195,8 @@ class AsyncResultTest(EngineFixture):
                 )
             elif filter_ == "scalars":
                 eq_(
-                    all_, ["name%d" % i for i in range(1, 20)],
+                    all_,
+                    ["name%d" % i for i in range(1, 20)],
                 )
             else:
                 eq_(all_, [(i, "name%d" % i) for i in range(1, 20)])
@@ -224,7 +230,8 @@ class AsyncResultTest(EngineFixture):
                 )
             elif filter_ == "scalars":
                 eq_(
-                    rows, ["name%d" % i for i in range(1, 20)],
+                    rows,
+                    ["name%d" % i for i in range(1, 20)],
                 )
             else:
                 eq_(rows, [(i, "name%d" % i) for i in range(1, 20)])
index 77d81b4f7b965d199896cc6eed19b6bab3c8ac98..3cb29c67dc5868250a7bbbf373a48e9341fea278 100644 (file)
@@ -3180,7 +3180,10 @@ class MultiOwnerTest(
         self._assert_raises_ambiguous(lambda: D.c_data == 5)
 
     def test_rel_expressions_not_available(self):
-        B, D, = self.classes("B", "D")
+        (
+            B,
+            D,
+        ) = self.classes("B", "D")
 
         self._assert_raises_ambiguous(lambda: D.c_data.any(B.id == 5))
 
index 6aa584017742e0b66d67528204513673b77ecd41..eff3ccdaef2c26d75abec2a8503d8194a22a34e2 100644 (file)
@@ -1022,7 +1022,9 @@ class CustomIntegrationTest(testing.AssertsCompiledSQL, BakedTest):
 
             if ckey is not None:
                 return get_value(
-                    ckey, CachingQuery.cache, orm_context.invoke_statement,
+                    ckey,
+                    CachingQuery.cache,
+                    orm_context.invoke_statement,
                 )
 
         return s1
@@ -1067,13 +1069,15 @@ class CustomIntegrationTest(testing.AssertsCompiledSQL, BakedTest):
         q = sess.query(User).filter(User.id == 7).set_cache_key("user7")
 
         eq_(
-            sess.execute(q).all(), [(User(id=7, addresses=[Address(id=1)]),)],
+            sess.execute(q).all(),
+            [(User(id=7, addresses=[Address(id=1)]),)],
         )
 
         eq_(list(q.cache), ["user7"])
 
         eq_(
-            sess.execute(q).all(), [(User(id=7, addresses=[Address(id=1)]),)],
+            sess.execute(q).all(),
+            [(User(id=7, addresses=[Address(id=1)]),)],
         )
 
     def test_use_w_baked(self):
index 455e26b1440eba17dba2546722f5f96ea9b6ee49..a8c17d7aca2548f8ccb74df4cd5328770c7471fb 100644 (file)
@@ -269,7 +269,8 @@ class ShardTest(object):
             )
         ).scalars()
         eq_(
-            {c.city for c in asia_and_europe}, {"Tokyo", "London", "Dublin"},
+            {c.city for c in asia_and_europe},
+            {"Tokyo", "London", "Dublin"},
         )
 
     def test_roundtrip(self):
@@ -287,18 +288,21 @@ class ShardTest(object):
             WeatherLocation.continent == "North America"
         )
         eq_(
-            {c.city for c in north_american_cities}, {"New York", "Toronto"},
+            {c.city for c in north_american_cities},
+            {"New York", "Toronto"},
         )
         asia_and_europe = sess.query(WeatherLocation).filter(
             WeatherLocation.continent.in_(["Europe", "Asia"])
         )
         eq_(
-            {c.city for c in asia_and_europe}, {"Tokyo", "London", "Dublin"},
+            {c.city for c in asia_and_europe},
+            {"Tokyo", "London", "Dublin"},
         )
 
         # inspect the shard token stored with each instance
         eq_(
-            {inspect(c).key[2] for c in asia_and_europe}, {"europe", "asia"},
+            {inspect(c).key[2] for c in asia_and_europe},
+            {"europe", "asia"},
         )
 
         eq_(
@@ -545,7 +549,9 @@ class ShardTest(object):
         sess.execute(
             update(Report)
             .filter(Report.temperature >= 80)
-            .values({"temperature": Report.temperature + 6},)
+            .values(
+                {"temperature": Report.temperature + 6},
+            )
             .execution_options(synchronize_session="evaluate")
         )
 
@@ -579,7 +585,9 @@ class ShardTest(object):
         # four shards
         sess.execute(
             update(Report)
-            .values({"temperature": Report.temperature + 6},)
+            .values(
+                {"temperature": Report.temperature + 6},
+            )
             .execution_options(synchronize_session="fetch")
         )
 
@@ -783,8 +791,7 @@ class MultipleDialectShardTest(ShardTest, fixtures.TestBase):
 
 
 class SelectinloadRegressionTest(fixtures.DeclarativeMappedTest):
-    """test #4175
-    """
+    """test #4175"""
 
     @classmethod
     def setup_classes(cls):
index 864174d96f8f7eb55d6ff1ec2b32eac5b414409b..56679a80015c78ada348a28bb8079c8efffcadda 100644 (file)
@@ -15,9 +15,7 @@ __all__ = ()
 
 
 class FixtureTest(fixtures.MappedTest):
-    """A MappedTest pre-configured with a common set of fixtures.
-
-    """
+    """A MappedTest pre-configured with a common set of fixtures."""
 
     run_define_tables = "once"
     run_setup_classes = "once"
index 290c83eae9f4904172d4ead17c189201f1282d2d..d8847ed40299dbe4c64f9f7d4b38bd07af8e06b6 100644 (file)
@@ -887,9 +887,7 @@ class DeclarativeInheritanceTest(DeclarativeTestBase):
         )
 
     def test_columns_single_inheritance_cascading_resolution_pk(self):
-        """An additional test for #4352 in terms of the requested use case.
-
-        """
+        """An additional test for #4352 in terms of the requested use case."""
 
         class TestBase(Base):
             __abstract__ = True
index 60c488be36904b96a04e891302546641c3dd07fc..bce554f30d7b8f6899d9857ace53ad7107576b31 100644 (file)
@@ -38,7 +38,10 @@ class ABCTest(fixtures.MappedTest):
         ta = ["a", metadata]
         ta.append(
             Column(
-                "id", Integer, primary_key=True, test_needs_autoincrement=True,
+                "id",
+                Integer,
+                primary_key=True,
+                test_needs_autoincrement=True,
             )
         ),
         ta.append(Column("a_data", String(30)))
index 2767607cb0ad58fb67254d02b71eb2e3d447f512..ce8d76a53373776e88d81e58126ccb69f22da3f4 100644 (file)
@@ -656,7 +656,9 @@ class RelationshipTest4(fixtures.MappedTest):
 
         def go():
             testcar = session.get(
-                Car, car1.car_id, options=[joinedload("employee")],
+                Car,
+                car1.car_id,
+                options=[joinedload("employee")],
             )
             assert str(testcar.employee) == "Engineer E4, status X"
 
index 9db11d362aa5ec3bf4872d561203ded6b5484b67..af960625e7dbd0b96ee9382acccb2bc0b36a3ccb 100644 (file)
@@ -895,8 +895,7 @@ class PolymorphicAttributeManagementTest(fixtures.MappedTest):
         )
 
     def test_entirely_oob_assignment(self):
-        """test warn on an unknown polymorphic identity.
-        """
+        """test warn on an unknown polymorphic identity."""
         B = self.classes.B
 
         sess = Session()
@@ -2615,7 +2614,7 @@ class OptimizedLoadTest(fixtures.MappedTest):
             eq_(s1.sub, "s1sub")
 
     def test_optimized_passes(self):
-        """"test that the 'optimized load' routine doesn't crash when
+        """ "test that the 'optimized load' routine doesn't crash when
         a column in the join condition is not available."""
 
         base, sub = self.tables.base, self.tables.sub
index 69a485d41d89b85f2cbba2e0b30e24ace1d95317..dd3ca4821caf530061c78b23b0e576f074c81641 100644 (file)
@@ -285,7 +285,8 @@ class _PolymorphicTestBase(object):
     def test_filter_on_subclass_one_future(self):
         sess = create_session(future=True)
         eq_(
-            sess.execute(select(Engineer)).scalar(), Engineer(name="dilbert"),
+            sess.execute(select(Engineer)).scalar(),
+            Engineer(name="dilbert"),
         )
 
     def test_filter_on_subclass_two(self):
@@ -1567,8 +1568,7 @@ class _PolymorphicTestBase(object):
             )
 
     def test_self_referential_two_point_five(self):
-        """Using two aliases, the above case works.
-        """
+        """Using two aliases, the above case works."""
         sess = create_session()
         palias = aliased(Person)
         palias2 = aliased(Person)
@@ -1613,7 +1613,8 @@ class _PolymorphicTestBase(object):
         stmt2 = select(pa1, pa2).order_by(pa1.person_id, pa2.person_id)
 
         eq_(
-            sess.execute(stmt2).unique().all(), expected,
+            sess.execute(stmt2).unique().all(),
+            expected,
         )
 
     def test_self_referential_two_point_five_future(self):
@@ -1641,7 +1642,8 @@ class _PolymorphicTestBase(object):
         stmt2 = select(pa1, pa2).order_by(pa1.person_id, pa2.person_id)
 
         eq_(
-            sess.execute(stmt2).unique().all(), expected,
+            sess.execute(stmt2).unique().all(),
+            expected,
         )
 
     def test_nesting_queries(self):
@@ -1755,7 +1757,8 @@ class _PolymorphicTestBase(object):
             ("vlad", "Elbonia, Inc."),
         ]
         eq_(
-            q(self, sess).all(), expected,
+            q(self, sess).all(),
+            expected,
         )
 
     def test_mixed_entities_two(self):
index 5fd2c5a6f63fa3b0fba8cc47c6906487fb9fe0d8..35c7565fb9d66e7eca638187e0f865215fd03931 100644 (file)
@@ -325,7 +325,7 @@ class InheritTest(fixtures.MappedTest):
         """this tests the RasterDocument being attached to the Assembly, but
         *not* the Document.  this means only a "sub-class" task, i.e.
         corresponding to an inheriting mapper but not the base mapper,
-        is created. """
+        is created."""
 
         product_mapper = mapper(
             Product,
index 8590949a771d6a9b34ff1ffd5af51a7365751a9c..03fd05bd5c944f38bb1f9e79c09cdcbea61277f9 100644 (file)
@@ -1408,9 +1408,7 @@ class SameNamedPropTwoPolymorphicSubClassesTest(fixtures.MappedTest):
 
 
 class SubClassToSubClassFromParentTest(fixtures.MappedTest):
-    """test #2617
-
-    """
+    """test #2617"""
 
     run_setup_classes = "once"
     run_setup_mappers = "once"
index 68548706e78ced1a49938f6dc0ee536fe6d9924d..8820aa6a459b0ab1b0c9d4b909c4be0157ed06fa 100644 (file)
@@ -378,7 +378,8 @@ class AttributesTest(fixtures.ORMTest):
             return b.name
 
         assert_raises(
-            orm_exc.UnmappedInstanceError, go,
+            orm_exc.UnmappedInstanceError,
+            go,
         )
 
     def test_del_scalar_nonobject(self):
@@ -597,7 +598,7 @@ class AttributesTest(fixtures.ORMTest):
 
     def test_lazytrackparent(self):
         """test that the "hasparent" flag works properly
-           when lazy loaders and backrefs are used
+        when lazy loaders and backrefs are used
 
         """
 
@@ -859,7 +860,7 @@ class AttributesTest(fixtures.ORMTest):
         """changeset: 1633 broke ability to use ORM to map classes with
         unusual descriptor attributes (for example, classes that inherit
         from ones implementing zope.interface.Interface). This is a
-        simple regression test to prevent that defect. """
+        simple regression test to prevent that defect."""
 
         class des(object):
             def __get__(self, instance, owner):
@@ -1111,7 +1112,7 @@ class UtilTest(fixtures.ORMTest):
 
     def test_set_commited_value_none_uselist(self):
         """test that set_committed_value->None to a uselist generates an
-        empty list """
+        empty list"""
 
         class Foo(object):
             pass
index 35735a79cae6571f1afcef1c6b6418d8b0883e89..fef827d833b6b935b644267a03e404394b277fc0 100644 (file)
@@ -203,7 +203,10 @@ class BindIntegrationTest(_fixtures.FixtureTest):
             },
             "e2",
         ),
-        (lambda User: {"clause": select(1).where(User.name == "ed")}, "e1",),
+        (
+            lambda User: {"clause": select(1).where(User.name == "ed")},
+            "e1",
+        ),
         (lambda: {"clause": select(1)}, "e3"),
         (lambda User: {"clause": Query([User])._statement_20()}, "e1"),
         (lambda: {"clause": Query([1])._statement_20()}, "e3"),
index 45a60a5cb92ff6848b69ab66bd475c4e6609e269..59d74701221f09950c2b931ed778812c1bf1a624 100644 (file)
@@ -492,7 +492,8 @@ class PolyCacheKeyTest(CacheKeyFixture, _poly_fixtures._Polymorphic):
             )
 
         self._run_cache_key_fixture(
-            lambda: stmt_20(one(), two(), three()), compare_values=True,
+            lambda: stmt_20(one(), two(), three()),
+            compare_values=True,
         )
 
 
index 5a139038b0b47a2eec13e7602d992fa4784bde40..6a916e28a818eb4e7d811b71165e5ce08c1e72e8 100644 (file)
@@ -3827,7 +3827,9 @@ class O2MConflictTest(fixtures.MappedTest):
 
 
 class PartialFlushTest(fixtures.MappedTest):
-    """test cascade behavior as it relates to object lists passed to flush().
+    """test cascade behavior as it relates to object lists passed
+    to flush().
+
     """
 
     @classmethod
index e084d90d9dc030c346d252b8e5ffd8725df690e4..a164034daa5307eabc6f160f8c2d7d92a51cf0e3 100644 (file)
@@ -747,7 +747,9 @@ class MappedSelectTest(fixtures.MappedTest):
 
         desc_values = (
             select(values, descriptions.c.d1, descriptions.c.d2)
-            .where(descriptions.c.id == values.c.description_id,)
+            .where(
+                descriptions.c.id == values.c.description_id,
+            )
             .alias("descriptions_values")
         )
 
index 506aca44d8aa5203cd82b81f83e4469442166508..12d3f7bfbbe622077517981a797cfdb1c1828d14 100644 (file)
@@ -307,7 +307,8 @@ class LoadersInSubqueriesTest(QueryTest, AssertsCompiledSQL):
             "FROM users) AS anon_1"
         )
         self.assert_compile(
-            stmt1._final_statement(legacy_query_style=False), expected,
+            stmt1._final_statement(legacy_query_style=False),
+            expected,
         )
 
         self.assert_compile(stmt2, expected)
@@ -334,7 +335,8 @@ class LoadersInSubqueriesTest(QueryTest, AssertsCompiledSQL):
         )
 
         self.assert_compile(
-            stmt1._final_statement(legacy_query_style=False), expected,
+            stmt1._final_statement(legacy_query_style=False),
+            expected,
         )
 
         self.assert_compile(stmt2, expected)
@@ -382,7 +384,9 @@ class ExtraColsTest(QueryTest, AssertsCompiledSQL):
                         "count",
                         column_property(
                             select(func.count(addresses.c.id))
-                            .where(users.c.id == addresses.c.user_id,)
+                            .where(
+                                users.c.id == addresses.c.user_id,
+                            )
                             .correlate(users)
                             .scalar_subquery()
                         ),
@@ -391,7 +395,15 @@ class ExtraColsTest(QueryTest, AssertsCompiledSQL):
             ),
         )
 
-        mapper(Address, addresses, properties={"user": relationship(User,)})
+        mapper(
+            Address,
+            addresses,
+            properties={
+                "user": relationship(
+                    User,
+                )
+            },
+        )
 
         return User, Address
 
@@ -405,10 +417,19 @@ class ExtraColsTest(QueryTest, AssertsCompiledSQL):
         )
 
         mapper(
-            User, users,
+            User,
+            users,
         )
 
-        mapper(Address, addresses, properties={"user": relationship(User,)})
+        mapper(
+            Address,
+            addresses,
+            properties={
+                "user": relationship(
+                    User,
+                )
+            },
+        )
 
         return User, Address
 
@@ -510,7 +531,7 @@ class ExtraColsTest(QueryTest, AssertsCompiledSQL):
 
     def test_column_properties_can_we_use(self, column_property_fixture):
         """test querying mappings that reference external columns or
-        selectables. """
+        selectables."""
 
         # User, Address = column_property_fixture
 
@@ -554,7 +575,12 @@ class ExtraColsTest(QueryTest, AssertsCompiledSQL):
         # col properties will retain anonymous labels, however will
         # adopt the .key within the subquery collection so they can
         # be addressed.
-        stmt = select(User.id, User.name, User.concat, User.count,)
+        stmt = select(
+            User.id,
+            User.name,
+            User.concat,
+            User.count,
+        )
 
         subq = stmt.subquery()
         # here, the subquery needs to export the columns that include
@@ -845,7 +871,8 @@ class ImplicitWithPolymorphicTest(
         self.assert_compile(stmt, expected)
 
         self.assert_compile(
-            q._final_statement(legacy_query_style=False), expected,
+            q._final_statement(legacy_query_style=False),
+            expected,
         )
 
     def test_select_where_baseclass(self):
@@ -886,7 +913,8 @@ class ImplicitWithPolymorphicTest(
         self.assert_compile(stmt, expected)
 
         self.assert_compile(
-            q._final_statement(legacy_query_style=False), expected,
+            q._final_statement(legacy_query_style=False),
+            expected,
         )
 
     def test_select_where_subclass(self):
@@ -978,7 +1006,8 @@ class ImplicitWithPolymorphicTest(
 
         self.assert_compile(stmt, expected)
         self.assert_compile(
-            q._final_statement(legacy_query_style=False), expected,
+            q._final_statement(legacy_query_style=False),
+            expected,
         )
 
 
index 5c61a6370ad998d8e9933f30e5ed8435d126f47f..47b5404c92e683d4396c75699dc1190eacefd2d2 100644 (file)
@@ -1199,10 +1199,7 @@ class OneToManyManyToOneTest(fixtures.MappedTest):
 
 
 class SelfReferentialPostUpdateTest(fixtures.MappedTest):
-    """Post_update on a single self-referential mapper.
-
-
-    """
+    """Post_update on a single self-referential mapper."""
 
     @classmethod
     def define_tables(cls, metadata):
@@ -1539,8 +1536,8 @@ class SelfReferentialPostUpdateTest3(fixtures.MappedTest):
 
 
 class PostUpdateBatchingTest(fixtures.MappedTest):
-    """test that lots of post update cols batch together into a single UPDATE.
-    """
+    """test that lots of post update cols batch together into a single
+    UPDATE."""
 
     @classmethod
     def define_tables(cls, metadata):
index e0665b23fc2cf0b95ff58f3560e857a70332e131..d3f9530724afa2600e3de786d2674bcba4dbe13f 100644 (file)
@@ -249,7 +249,8 @@ class PlainDeclarativeDataclassesTest(DataclassesTest):
             name: Optional[str] = None
 
             __mapper_args__ = dict(
-                polymorphic_on=widgets.c.type, polymorphic_identity="normal",
+                polymorphic_on=widgets.c.type,
+                polymorphic_identity="normal",
             )
 
         @declarative
@@ -258,7 +259,9 @@ class PlainDeclarativeDataclassesTest(DataclassesTest):
 
             magic: bool = False
 
-            __mapper_args__ = dict(polymorphic_identity="special",)
+            __mapper_args__ = dict(
+                polymorphic_identity="special",
+            )
 
         @declarative
         @dataclasses.dataclass
index a2dc8cf453e6b51eff44ff6e6586085672bb8745..aa1f2b88d87c6915554fc7047a09bd05a543ee95 100644 (file)
@@ -330,7 +330,10 @@ class ComputedDefaultsOnUpdateTest(fixtures.MappedTest):
             True,
             testing.requires.computed_columns_on_update_returning,
         ),
-        ("noneagerload", False,),
+        (
+            "noneagerload",
+            False,
+        ),
         id_="ia",
     )
     def test_update_computed(self, eager):
@@ -485,10 +488,12 @@ class IdentityDefaultsOnUpdateTest(fixtures.MappedTest):
                 ],
                 [
                     CompiledSQL(
-                        "INSERT INTO test (foo) VALUES (:foo)", [{"foo": 5}],
+                        "INSERT INTO test (foo) VALUES (:foo)",
+                        [{"foo": 5}],
                     ),
                     CompiledSQL(
-                        "INSERT INTO test (foo) VALUES (:foo)", [{"foo": 10}],
+                        "INSERT INTO test (foo) VALUES (:foo)",
+                        [{"foo": 10}],
                     ),
                 ],
             )
index 0a452bb1eaef3e3b053be09b0f150a82211d42ea..b2a04b8fff6cb6f5f4945162d971871109cae213 100644 (file)
@@ -273,8 +273,8 @@ class DeferredTest(AssertsCompiledSQL, _fixtures.FixtureTest):
         self.sql_count_(0, go)
 
     def test_preserve_changes(self):
-        """A deferred load operation doesn't revert modifications on attributes
-        """
+        """A deferred load operation doesn't revert modifications on
+        attributes"""
 
         orders, Order = self.tables.orders, self.classes.Order
 
@@ -824,7 +824,7 @@ class DeferredOptionsTest(AssertsCompiledSQL, _fixtures.FixtureTest):
 
     def test_locates_col(self):
         """changed in 1.0 - we don't search for deferred cols in the result
-        now.  """
+        now."""
 
         orders, Order = self.tables.orders, self.classes.Order
 
index bcba1f0315ae7e59e78223dcb4c4883df345e251..e04732440f9526aae9899968fcbb23a066556c45 100644 (file)
@@ -1700,7 +1700,13 @@ class SubqRelationsFromSelfTest(fixtures.DeclarativeMappedTest):
 
         s = Session(connection)
 
-        as_ = [A(id=i, cs=[C(), C()],) for i in range(1, 5)]
+        as_ = [
+            A(
+                id=i,
+                cs=[C(), C()],
+            )
+            for i in range(1, 5)
+        ]
 
         s.add_all(
             [
@@ -2358,7 +2364,7 @@ class NonPrimaryRelationshipLoaderTest(_fixtures.FixtureTest):
 
     def test_selectload(self):
         """tests lazy loading with two relationships simultaneously,
-        from the same table, using aliases.  """
+        from the same table, using aliases."""
 
         users, orders, User, Address, Order, addresses = (
             self.tables.users,
@@ -2409,7 +2415,7 @@ class NonPrimaryRelationshipLoaderTest(_fixtures.FixtureTest):
 
     def test_joinedload(self):
         """Eager loading with two relationships simultaneously,
-            from the same table, using aliases."""
+        from the same table, using aliases."""
 
         users, orders, User, Address, Order, addresses = (
             self.tables.users,
index a699cfa63449f3329a2a327ce057a5731c28bd5c..57225d640680981eefc9b2b402574be10403453c 100644 (file)
@@ -222,7 +222,7 @@ class EagerTest(_fixtures.FixtureTest, testing.AssertsCompiledSQL):
 
     def test_orderby_related(self):
         """A regular mapper select on a single table can
-            order by a relationship to a second table"""
+        order by a relationship to a second table"""
 
         Address, addresses, users, User = (
             self.classes.Address,
@@ -712,7 +712,7 @@ class EagerTest(_fixtures.FixtureTest, testing.AssertsCompiledSQL):
 
     def test_double_w_ac(self):
         """Eager loading with two relationships simultaneously,
-            from the same table, using aliases."""
+        from the same table, using aliases."""
 
         (
             users,
@@ -788,7 +788,7 @@ class EagerTest(_fixtures.FixtureTest, testing.AssertsCompiledSQL):
 
     def test_double_w_ac_against_subquery(self):
         """Eager loading with two relationships simultaneously,
-            from the same table, using aliases."""
+        from the same table, using aliases."""
 
         (
             users,
@@ -1297,7 +1297,9 @@ class EagerTest(_fixtures.FixtureTest, testing.AssertsCompiledSQL):
         # against a select.   original issue from ticket #904
         sel = (
             sa.select(users, addresses.c.email_address)
-            .where(users.c.id == addresses.c.user_id,)
+            .where(
+                users.c.id == addresses.c.user_id,
+            )
             .alias("useralias")
         )
         mapper(
@@ -4867,7 +4869,9 @@ class SubqueryTest(fixtures.MappedTest):
             tag_score = tags_table.c.score1 * tags_table.c.score2
             user_score = sa.select(
                 sa.func.sum(tags_table.c.score1 * tags_table.c.score2)
-            ).where(tags_table.c.user_id == users_table.c.id,)
+            ).where(
+                tags_table.c.user_id == users_table.c.id,
+            )
 
             if labeled:
                 tag_score = tag_score.label(labelname)
index b6a4b41cb381f8e2d0e92c7121be7d518d137901..2851622414e410931e511f97c855b5c77c5caa9b 100644 (file)
@@ -83,7 +83,11 @@ class ORMExecuteTest(_RemoveListeners, _fixtures.FixtureTest):
                     ckey = orm_context.execution_options["cache_key"]
 
             if ckey is not None:
-                return get_value(ckey, cache, orm_context.invoke_statement,)
+                return get_value(
+                    ckey,
+                    cache,
+                    orm_context.invoke_statement,
+                )
 
         return maker()
 
@@ -947,7 +951,7 @@ class DeclarativeEventListenTest(
 
 class DeferredMapperEventsTest(_RemoveListeners, _fixtures.FixtureTest):
 
-    """"test event listeners against unmapped classes.
+    """ "test event listeners against unmapped classes.
 
     This incurs special logic.  Note if we ever do the "remove" case,
     it has to get all of these, too.
index ba2fce60bffc50b659cee83ff958d7adcac14700..7ccf2c1aee669af27ab2bc2e692ba4285ec93972 100644 (file)
@@ -1146,7 +1146,9 @@ class ExpireTest(_fixtures.FixtureTest):
         eq_(len(list(sess)), 9)
 
     def test_state_change_col_to_deferred(self):
-        """Behavioral test to verify the current activity of loader callables
+        """Behavioral test to verify the current activity of loader
+        callables
+
         """
 
         users, User = self.tables.users, self.classes.User
@@ -1194,7 +1196,9 @@ class ExpireTest(_fixtures.FixtureTest):
         assert "name" not in attributes.instance_state(u1).callables
 
     def test_state_deferred_to_col(self):
-        """Behavioral test to verify the current activity of loader callables
+        """Behavioral test to verify the current activity of
+        loader callables
+
         """
 
         users, User = self.tables.users, self.classes.User
@@ -1236,7 +1240,9 @@ class ExpireTest(_fixtures.FixtureTest):
         assert "name" not in attributes.instance_state(u1).callables
 
     def test_state_noload_to_lazy(self):
-        """Behavioral test to verify the current activity of loader callables
+        """Behavioral test to verify the current activity of
+        loader callables
+
         """
 
         users, Address, addresses, User = (
index 79ed3cad4afc707e6228d9894f8cdd5608804e6e..055f24b5c1ebc7c7b1822d0a25fed85312000d8b 100644 (file)
@@ -381,7 +381,8 @@ class EntityFromSubqueryTest(QueryTest, AssertsCompiledSQL):
         subq = select(User).filter(User.id.in_([8, 9])).subquery()
         q = create_session().query(aliased(User, subq))
         eq_(
-            [User(id=8), User(id=9)], q.all(),
+            [User(id=8), User(id=9)],
+            q.all(),
         )
 
         subq = select(User).order_by(User.id).slice(1, 3).subquery()
@@ -392,7 +393,8 @@ class EntityFromSubqueryTest(QueryTest, AssertsCompiledSQL):
         u = aliased(User, subq)
         q = create_session().query(u).order_by(u.id)
         eq_(
-            [User(id=8)], list(q[0:1]),
+            [User(id=8)],
+            list(q[0:1]),
         )
 
     def test_join(self):
@@ -433,7 +435,8 @@ class EntityFromSubqueryTest(QueryTest, AssertsCompiledSQL):
         aq = aliased(Address, subq)
         q = create_session().query(aq.user_id, subq.c.count)
         eq_(
-            q.all(), [(7, 1), (8, 3), (9, 1)],
+            q.all(),
+            [(7, 1), (8, 3), (9, 1)],
         )
 
         subq = select(Address.user_id, Address.id)
@@ -447,7 +450,8 @@ class EntityFromSubqueryTest(QueryTest, AssertsCompiledSQL):
         )
 
         eq_(
-            q.all(), [(7, 1), (8, 3), (9, 1)],
+            q.all(),
+            [(7, 1), (8, 3), (9, 1)],
         )
 
     def test_error_w_aliased_against_select(self):
@@ -559,7 +563,8 @@ class EntityFromSubqueryTest(QueryTest, AssertsCompiledSQL):
         )
 
         eq_(
-            q.all(), [("chuck", "ed"), ("fred", "ed")],
+            q.all(),
+            [("chuck", "ed"), ("fred", "ed")],
         )
 
         q = (
@@ -645,7 +650,8 @@ class EntityFromSubqueryTest(QueryTest, AssertsCompiledSQL):
 
         q3 = sess.query(q2)
         eq_(
-            q3.all(), [(7, 1), (8, 1), (9, 1), (10, 1)],
+            q3.all(),
+            [(7, 1), (8, 1), (9, 1), (10, 1)],
         )
 
         q3 = select(q2)
@@ -2183,7 +2189,8 @@ class MixedEntitiesTest(QueryTest, AssertsCompiledSQL):
             select(User, Address).from_statement(selectquery)
         )
         eq_(
-            list(result), expected,
+            list(result),
+            expected,
         )
         sess.expunge_all()
 
@@ -3442,14 +3449,24 @@ class ExternalColumnsTest(QueryTest):
                 "concat": column_property((users.c.id * 2)),
                 "count": column_property(
                     select(func.count(addresses.c.id))
-                    .where(users.c.id == addresses.c.user_id,)
+                    .where(
+                        users.c.id == addresses.c.user_id,
+                    )
                     .correlate(users)
                     .scalar_subquery()
                 ),
             },
         )
 
-        mapper(Address, addresses, properties={"user": relationship(User,)})
+        mapper(
+            Address,
+            addresses,
+            properties={
+                "user": relationship(
+                    User,
+                )
+            },
+        )
 
         sess = create_session()
 
@@ -3603,7 +3620,9 @@ class ExternalColumnsTest(QueryTest):
                 "concat": column_property((users.c.id * 2)),
                 "count": column_property(
                     select(func.count(addresses.c.id))
-                    .where(users.c.id == addresses.c.user_id,)
+                    .where(
+                        users.c.id == addresses.c.user_id,
+                    )
                     .correlate(users)
                     .scalar_subquery()
                 ),
index 02742da8f1fe7efec061489058336880ce9ecbd2..765111b1e844556f47a4b8232ab41022594491ee 100644 (file)
@@ -624,7 +624,10 @@ class JoinTest(QueryTest, AssertsCompiledSQL):
         )
 
     def test_single_prop_4(self):
-        Order, User, = (self.classes.Order, self.classes.User)
+        (
+            Order,
+            User,
+        ) = (self.classes.Order, self.classes.User)
 
         sess = create_session()
         oalias1 = aliased(Order)
@@ -640,7 +643,10 @@ class JoinTest(QueryTest, AssertsCompiledSQL):
         )
 
     def test_single_prop_5(self):
-        Order, User, = (self.classes.Order, self.classes.User)
+        (
+            Order,
+            User,
+        ) = (self.classes.Order, self.classes.User)
 
         sess = create_session()
         self.assert_compile(
@@ -682,7 +688,10 @@ class JoinTest(QueryTest, AssertsCompiledSQL):
         )
 
     def test_single_prop_8(self):
-        Order, User, = (self.classes.Order, self.classes.User)
+        (
+            Order,
+            User,
+        ) = (self.classes.Order, self.classes.User)
 
         sess = create_session()
         # same as before using an aliased() for User as well
index f9d57bc062b41e11f6c99bb142e87f42176f56d8..57d3ce01d6931e2a3c69fc58c7287d8da0adb023 100644 (file)
@@ -803,7 +803,7 @@ class MergeTest(_fixtures.FixtureTest):
 
     def test_no_relationship_cascade(self):
         """test that merge doesn't interfere with a relationship()
-           target that specifically doesn't include 'merge' cascade.
+        target that specifically doesn't include 'merge' cascade.
         """
 
         Address, addresses, users, User = (
index 202ff9ab01c99a7deb66e7d27559e2eed9e9260f..87ec0d79d3e9daad5e4b5c464897d08f6e1ba62e 100644 (file)
@@ -266,10 +266,13 @@ class NaturalPKTest(fixtures.MappedTest):
             # test passive_updates=True; update user
             self.assert_sql_count(testing.db, go, 1)
         sess.expunge_all()
-        assert User(
-            username="jack",
-            addresses=[Address(username="jack"), Address(username="jack")],
-        ) == sess.query(User).get("jack")
+        assert (
+            User(
+                username="jack",
+                addresses=[Address(username="jack"), Address(username="jack")],
+            )
+            == sess.query(User).get("jack")
+        )
 
         u1 = sess.query(User).get("jack")
         u1.addresses = []
@@ -1120,10 +1123,13 @@ class NonPKCascadeTest(fixtures.MappedTest):
             # test passive_updates=True; update user
             self.assert_sql_count(testing.db, go, 1)
         sess.expunge_all()
-        assert User(
-            username="jack",
-            addresses=[Address(username="jack"), Address(username="jack")],
-        ) == sess.query(User).get(u1.id)
+        assert (
+            User(
+                username="jack",
+                addresses=[Address(username="jack"), Address(username="jack")],
+            )
+            == sess.query(User).get(u1.id)
+        )
         sess.expunge_all()
 
         u1 = sess.query(User).get(u1.id)
index daac38dc238db216f3d004aa04996a63b6154156..e40e815aa35a54efb7b6bf497f12193ab5a1ae18 100644 (file)
@@ -775,7 +775,10 @@ class SubclassRelationshipTest(
         )
 
     def test_any_walias(self):
-        DataContainer, Job, = (self.classes.DataContainer, self.classes.Job)
+        (
+            DataContainer,
+            Job,
+        ) = (self.classes.DataContainer, self.classes.Job)
 
         Job_A = aliased(Job)
 
@@ -865,7 +868,10 @@ class SubclassRelationshipTest(
         )
 
     def test_join_walias(self):
-        DataContainer, Job, = (self.classes.DataContainer, self.classes.Job)
+        (
+            DataContainer,
+            Job,
+        ) = (self.classes.DataContainer, self.classes.Job)
 
         Job_A = aliased(Job)
 
index 31643e5ff5271b5316f3423df1488626f9698b74..fc6471d524c4ebf03febeced0cb5f846ad5e3e3b 100644 (file)
@@ -884,7 +884,9 @@ class GetTest(QueryTest):
 
         stmt = select(User).execution_options(populate_existing=True)
 
-        s.execute(stmt,).scalars().all()
+        s.execute(
+            stmt,
+        ).scalars().all()
 
         self.assert_(u not in s.dirty)
 
@@ -2779,15 +2781,11 @@ class FilterTest(QueryTest, AssertsCompiledSQL):
 
         # test that the contents are not adapted by the aliased join
         ua = aliased(Address)
-        assert (
-            [User(id=7), User(id=8)]
-            == sess.query(User)
-            .join(ua, "addresses")
-            .filter(
-                ~User.addresses.any(Address.email_address == "fred@fred.com")
-            )
-            .all()
-        )
+        assert [User(id=7), User(id=8)] == sess.query(User).join(
+            ua, "addresses"
+        ).filter(
+            ~User.addresses.any(Address.email_address == "fred@fred.com")
+        ).all()
 
         assert [User(id=10)] == sess.query(User).outerjoin(
             ua, "addresses"
@@ -2801,15 +2799,11 @@ class FilterTest(QueryTest, AssertsCompiledSQL):
         sess = create_session()
 
         # test that any() doesn't overcorrelate
-        assert (
-            [User(id=7), User(id=8)]
-            == sess.query(User)
-            .join("addresses")
-            .filter(
-                ~User.addresses.any(Address.email_address == "fred@fred.com")
-            )
-            .all()
-        )
+        assert [User(id=7), User(id=8)] == sess.query(User).join(
+            "addresses"
+        ).filter(
+            ~User.addresses.any(Address.email_address == "fred@fred.com")
+        ).all()
 
     def test_has(self):
         # see also HasAnyTest, a newer suite which tests these at the level of
@@ -2825,42 +2819,41 @@ class FilterTest(QueryTest, AssertsCompiledSQL):
             Address.user.has(name="fred")
         ).all()
 
-        assert (
-            [Address(id=2), Address(id=3), Address(id=4), Address(id=5)]
-            == sess.query(Address)
-            .filter(Address.user.has(User.name.like("%ed%")))
-            .order_by(Address.id)
-            .all()
-        )
+        assert [
+            Address(id=2),
+            Address(id=3),
+            Address(id=4),
+            Address(id=5),
+        ] == sess.query(Address).filter(
+            Address.user.has(User.name.like("%ed%"))
+        ).order_by(
+            Address.id
+        ).all()
 
-        assert (
-            [Address(id=2), Address(id=3), Address(id=4)]
-            == sess.query(Address)
-            .filter(Address.user.has(User.name.like("%ed%"), id=8))
-            .order_by(Address.id)
-            .all()
-        )
+        assert [Address(id=2), Address(id=3), Address(id=4)] == sess.query(
+            Address
+        ).filter(Address.user.has(User.name.like("%ed%"), id=8)).order_by(
+            Address.id
+        ).all()
 
         # test has() doesn't overcorrelate
-        assert (
-            [Address(id=2), Address(id=3), Address(id=4)]
-            == sess.query(Address)
-            .join("user")
-            .filter(Address.user.has(User.name.like("%ed%"), id=8))
-            .order_by(Address.id)
-            .all()
-        )
+        assert [Address(id=2), Address(id=3), Address(id=4)] == sess.query(
+            Address
+        ).join("user").filter(
+            Address.user.has(User.name.like("%ed%"), id=8)
+        ).order_by(
+            Address.id
+        ).all()
 
         # test has() doesn't get subquery contents adapted by aliased join
         ua = aliased(User)
-        assert (
-            [Address(id=2), Address(id=3), Address(id=4)]
-            == sess.query(Address)
-            .join(ua, "user")
-            .filter(Address.user.has(User.name.like("%ed%"), id=8))
-            .order_by(Address.id)
-            .all()
-        )
+        assert [Address(id=2), Address(id=3), Address(id=4)] == sess.query(
+            Address
+        ).join(ua, "user").filter(
+            Address.user.has(User.name.like("%ed%"), id=8)
+        ).order_by(
+            Address.id
+        ).all()
 
         dingaling = sess.query(Dingaling).get(2)
         assert [User(id=9)] == sess.query(User).filter(
@@ -3392,7 +3385,7 @@ class SetOpsTest(QueryTest, AssertsCompiledSQL):
 
     def test_union_literal_expressions_compile(self):
         """test that column expressions translate during
-            the _from_statement() portion of union(), others"""
+        the _from_statement() portion of union(), others"""
 
         User = self.classes.User
 
@@ -3586,25 +3579,20 @@ class AggregateTest(QueryTest):
         User, Address = self.classes.User, self.classes.Address
 
         sess = create_session()
-        assert (
-            [User(name="ed", id=8)]
-            == sess.query(User)
-            .order_by(User.id)
-            .group_by(User)
-            .join("addresses")
-            .having(func.count(Address.id) > 2)
-            .all()
-        )
+        assert [User(name="ed", id=8)] == sess.query(User).order_by(
+            User.id
+        ).group_by(User).join("addresses").having(
+            func.count(Address.id) > 2
+        ).all()
 
-        assert (
-            [User(name="jack", id=7), User(name="fred", id=9)]
-            == sess.query(User)
-            .order_by(User.id)
-            .group_by(User)
-            .join("addresses")
-            .having(func.count(Address.id) < 2)
-            .all()
-        )
+        assert [
+            User(name="jack", id=7),
+            User(name="fred", id=9),
+        ] == sess.query(User).order_by(User.id).group_by(User).join(
+            "addresses"
+        ).having(
+            func.count(Address.id) < 2
+        ).all()
 
 
 class ExistsTest(QueryTest, AssertsCompiledSQL):
@@ -3668,7 +3656,8 @@ class CountTest(QueryTest):
         s = create_session()
 
         eq_(
-            s.execute(select(func.count()).select_from(User)).scalar(), 4,
+            s.execute(select(func.count()).select_from(User)).scalar(),
+            4,
         )
 
         eq_(
@@ -3741,17 +3730,20 @@ class CountTest(QueryTest):
 
         stmt = select(User, Address).join(Address, true()).limit(2)
         eq_(
-            s.scalar(select(func.count()).select_from(stmt.subquery())), 2,
+            s.scalar(select(func.count()).select_from(stmt.subquery())),
+            2,
         )
 
         stmt = select(User, Address).join(Address, true()).limit(100)
         eq_(
-            s.scalar(select(func.count()).select_from(stmt.subquery())), 20,
+            s.scalar(select(func.count()).select_from(stmt.subquery())),
+            20,
         )
 
         stmt = select(User, Address).join(Address).limit(100)
         eq_(
-            s.scalar(select(func.count()).select_from(stmt.subquery())), 5,
+            s.scalar(select(func.count()).select_from(stmt.subquery())),
+            5,
         )
 
     def test_cols(self):
@@ -3785,33 +3777,39 @@ class CountTest(QueryTest):
 
         stmt = select(func.count(distinct(User.name)))
         eq_(
-            s.scalar(select(func.count()).select_from(stmt.subquery())), 1,
+            s.scalar(select(func.count()).select_from(stmt.subquery())),
+            1,
         )
 
         stmt = select(func.count(distinct(User.name))).distinct()
 
         eq_(
-            s.scalar(select(func.count()).select_from(stmt.subquery())), 1,
+            s.scalar(select(func.count()).select_from(stmt.subquery())),
+            1,
         )
 
         stmt = select(User.name)
         eq_(
-            s.scalar(select(func.count()).select_from(stmt.subquery())), 4,
+            s.scalar(select(func.count()).select_from(stmt.subquery())),
+            4,
         )
 
         stmt = select(User.name, Address).join(Address, true())
         eq_(
-            s.scalar(select(func.count()).select_from(stmt.subquery())), 20,
+            s.scalar(select(func.count()).select_from(stmt.subquery())),
+            20,
         )
 
         stmt = select(Address.user_id)
         eq_(
-            s.scalar(select(func.count()).select_from(stmt.subquery())), 5,
+            s.scalar(select(func.count()).select_from(stmt.subquery())),
+            5,
         )
 
         stmt = stmt.distinct()
         eq_(
-            s.scalar(select(func.count()).select_from(stmt.subquery())), 3,
+            s.scalar(select(func.count()).select_from(stmt.subquery())),
+            3,
         )
 
 
@@ -4145,7 +4143,10 @@ class DistinctTest(QueryTest, AssertsCompiledSQL):
             .order_by(User.id, User.name, Address.email_address)
         )
         q2 = sess.query(
-            User.id, User.name.label("foo"), Address.id, Address.email_address,
+            User.id,
+            User.name.label("foo"),
+            Address.id,
+            Address.email_address,
         )
 
         self.assert_compile(
@@ -4169,7 +4170,11 @@ class DistinctTest(QueryTest, AssertsCompiledSQL):
         sess = create_session()
 
         q = (
-            sess.query(User.id, User.name.label("foo"), Address.id,)
+            sess.query(
+                User.id,
+                User.name.label("foo"),
+                Address.id,
+            )
             .distinct(Address.email_address)
             .order_by(User.id, User.name)
         )
index ccee396a32f317bbf98d80264b64999e2d009151..1c7eb2e619ea18537b9611b23771bfe45f6d139d 100644 (file)
@@ -76,7 +76,8 @@ class _Fixtures(_fixtures.FixtureTest):
             pass
 
         mapper(
-            UserWFoob, users,
+            UserWFoob,
+            users,
         )
         return HasFoob, UserWFoob
 
@@ -226,7 +227,10 @@ class LoaderCriteriaTest(_Fixtures, testing.AssertsCompiledSQL):
             s.execute(stmt).all()
 
         asserter.assert_(
-            CompiledSQL("SELECT users.id, users.name FROM users", [],),
+            CompiledSQL(
+                "SELECT users.id, users.name FROM users",
+                [],
+            ),
             CompiledSQL(
                 "SELECT addresses.user_id AS addresses_user_id, addresses.id "
                 "AS addresses_id, addresses.email_address "
@@ -259,7 +263,8 @@ class LoaderCriteriaTest(_Fixtures, testing.AssertsCompiledSQL):
 
         asserter.assert_(
             CompiledSQL(
-                "SELECT users.id, users.name FROM users ORDER BY users.id", [],
+                "SELECT users.id, users.name FROM users ORDER BY users.id",
+                [],
             ),
             CompiledSQL(
                 "SELECT addresses.id AS addresses_id, "
@@ -540,7 +545,8 @@ class LoaderCriteriaTest(_Fixtures, testing.AssertsCompiledSQL):
             .outerjoin(User.addresses)
             .options(
                 with_loader_criteria(
-                    Address, ~Address.email_address.like("ed@%"),
+                    Address,
+                    ~Address.email_address.like("ed@%"),
                 )
             )
             .order_by(User.id)
index eaa1751f9655f54028a83830d6809bf44bc82b31..9a91197ed659bc1ee67f1054fac11b72730f0828 100644 (file)
@@ -2489,7 +2489,7 @@ class JoinConditionErrorTest(fixtures.TestBase):
 class TypeMatchTest(fixtures.MappedTest):
 
     """test errors raised when trying to add items
-        whose type is not handled by a relationship"""
+    whose type is not handled by a relationship"""
 
     @classmethod
     def define_tables(cls, metadata):
@@ -3197,7 +3197,9 @@ class ViewOnlySyncBackref(fixtures.MappedTest):
             return
 
         mapper(
-            A, self.tables.t1, properties={"bs": rel()},
+            A,
+            self.tables.t1,
+            properties={"bs": rel()},
         )
         mapper(B, self.tables.t2)
 
@@ -3724,9 +3726,7 @@ class ViewOnlyComplexJoin(_RelationshipErrors, fixtures.MappedTest):
 
 
 class FunctionAsPrimaryJoinTest(fixtures.DeclarativeMappedTest):
-    """test :ticket:`3831`
-
-    """
+    """test :ticket:`3831`"""
 
     __only_on__ = "sqlite"
 
index 5da1e1a4b9dc759349fa615a1b8c40e2356aeaaa..c75942564720564aff1320b8f3df1a85630fb12b 100644 (file)
@@ -464,7 +464,7 @@ class EagerTest(_fixtures.FixtureTest, testing.AssertsCompiledSQL):
 
     def test_orderby_related(self):
         """A regular mapper select on a single table can
-            order by a relationship to a second table"""
+        order by a relationship to a second table"""
 
         Address, addresses, users, User = (
             self.classes.Address,
index 8ea79151b75c9a4c0841b65e6db4bca42bcad0ae..280a4355ffd8e6e71807f0406c5eda4d38ed2390 100644 (file)
@@ -495,7 +495,7 @@ class EagerTest(_fixtures.FixtureTest, testing.AssertsCompiledSQL):
 
     def test_orderby_related(self):
         """A regular mapper select on a single table can
-            order by a relationship to a second table"""
+        order by a relationship to a second table"""
 
         Address, addresses, users, User = (
             self.classes.Address,
@@ -3301,7 +3301,13 @@ class FromSubqTest(fixtures.DeclarativeMappedTest):
 
         s = Session(connection)
 
-        as_ = [A(id=i, cs=[C(), C()],) for i in range(1, 5)]
+        as_ = [
+            A(
+                id=i,
+                cs=[C(), C()],
+            )
+            for i in range(1, 5)
+        ]
 
         s.add_all(
             [
index 6218829452a4339900a22ac007c181bc6e340737..497693de282f0674fc4755a91a6aa45ac6e85d77 100644 (file)
@@ -2787,13 +2787,15 @@ class FutureJoinIntoAnExternalTransactionTest(
 
 
 class NonFutureJoinIntoAnExternalTransactionTest(
-    NewStyleJoinIntoAnExternalTransactionTest, fixtures.TestBase,
+    NewStyleJoinIntoAnExternalTransactionTest,
+    fixtures.TestBase,
 ):
     pass
 
 
 class LegacyJoinIntoAnExternalTransactionTest(
-    JoinIntoAnExternalTransactionFixture, fixtures.TestBase,
+    JoinIntoAnExternalTransactionFixture,
+    fixtures.TestBase,
 ):
     def setup_session(self):
         # begin a non-ORM transaction
index 3ca75cdb2d5b0f71420be774af1279c454a84c1d..ee76d7a247af5004513a424d1292822855caeaae 100644 (file)
@@ -3520,7 +3520,9 @@ class EnsurePKSortableTest(fixtures.MappedTest):
                 )
 
             assert_raises_message(
-                sa.exc.InvalidRequestError, message, s.flush,
+                sa.exc.InvalidRequestError,
+                message,
+                s.flush,
             )
         else:
             s.flush()
index ac290257d1db0b0087a924cbcd488c6f41662861..01eb7279bd0c1e39d336b6db75f8e5cf3422ebe9 100644 (file)
@@ -1172,7 +1172,10 @@ class UpdateDeleteTest(fixtures.MappedTest):
         stmt = (
             update(User)
             .filter(User.id == 15)
-            .ordered_values(("name", "foob"), ("age", 123),)
+            .ordered_values(
+                ("name", "foob"),
+                ("age", 123),
+            )
         )
         result = session.execute(stmt)
         cols = [
index 28f82c3b89c236fef4dc35e07448c51b1c948e74..291a115fe3eb2ffb9f4591be4bd460e2b17cf519 100644 (file)
@@ -532,16 +532,16 @@ class DefaultRequirements(SuiteRequirements):
 
     @property
     def cross_schema_fk_reflection(self):
-        """target system must support reflection of inter-schema foreign keys
-        """
+        """target system must support reflection of inter-schema foreign
+        keys"""
         return only_on(["postgresql", "mysql", "mariadb", "mssql"])
 
     @property
     def implicit_default_schema(self):
         """target system has a strong concept of 'default' schema that can
-           be referred to implicitly.
+        be referred to implicitly.
 
-           basically, PostgreSQL.
+        basically, PostgreSQL.
 
         """
         return only_on(["postgresql"])
@@ -857,9 +857,7 @@ class DefaultRequirements(SuiteRequirements):
 
     @property
     def symbol_names_w_double_quote(self):
-        """Target driver can create tables with a name like 'some " table'
-
-        """
+        """Target driver can create tables with a name like 'some " table'"""
 
         return skip_if(
             [no_support("oracle", "ORA-03001: unimplemented feature")]
@@ -867,7 +865,7 @@ class DefaultRequirements(SuiteRequirements):
 
     @property
     def emulated_lastrowid(self):
-        """"target dialect retrieves cursor.lastrowid or an equivalent
+        """ "target dialect retrieves cursor.lastrowid or an equivalent
         after an insert() construct executes.
         """
         return fails_on_everything_except(
@@ -881,7 +879,7 @@ class DefaultRequirements(SuiteRequirements):
 
     @property
     def emulated_lastrowid_even_with_sequences(self):
-        """"target dialect retrieves cursor.lastrowid or an equivalent
+        """ "target dialect retrieves cursor.lastrowid or an equivalent
         after an insert() construct executes, even if the table has a
         Sequence on it.
         """
@@ -899,7 +897,7 @@ class DefaultRequirements(SuiteRequirements):
 
     @property
     def dbapi_lastrowid(self):
-        """"target backend includes a 'lastrowid' accessor on the DBAPI
+        """ "target backend includes a 'lastrowid' accessor on the DBAPI
         cursor object.
 
         """
index 098606a91d7a85581b2265ce2f1b7010cd1693ad..257013ac483f6bfb2868cf79704fd8a881a87e29 100644 (file)
@@ -333,7 +333,11 @@ class CoreFixtures(object):
                 (table_a.c.b == 10, 20),
                 (table_a.c.a == 9, 12),
             ),
-            case((table_a.c.a == 5, 10), (table_a.c.a == 10, 20), else_=30,),
+            case(
+                (table_a.c.a == 5, 10),
+                (table_a.c.a == 10, 20),
+                else_=30,
+            ),
             case({"wendy": "W", "jack": "J"}, value=table_a.c.a, else_="E"),
             case({"wendy": "W", "jack": "J"}, value=table_a.c.b, else_="E"),
             case({"wendy_w": "W", "jack": "J"}, value=table_a.c.a, else_="E"),
@@ -1006,7 +1010,8 @@ class CacheKeyTest(CacheKeyFixture, CoreFixtures, fixtures.TestBase):
             )
 
         self._run_cache_key_fixture(
-            fixture, True,
+            fixture,
+            True,
         )
 
     def test_bindparam_subclass_nocache(self):
index a6118c03fb500941aff39088a14b75e97b1cf452..7fd4e683b7fe883261d438eafc12299d75be3a43 100644 (file)
@@ -447,7 +447,8 @@ class SelectTest(fixtures.TestBase, AssertsCompiledSQL):
 
         # this is native_boolean=False for default dialect
         self.assert_compile(
-            select(not_(True)).apply_labels(), "SELECT :param_1 = 0 AS anon_1",
+            select(not_(True)).apply_labels(),
+            "SELECT :param_1 = 0 AS anon_1",
         )
 
         self.assert_compile(
@@ -727,7 +728,11 @@ class SelectTest(fixtures.TestBase, AssertsCompiledSQL):
         foo_bar__id = foo_bar.c.id._annotate({"some_orm_thing": True})
 
         stmt = select(
-            foo.c.bar_id, foo_bar.c.id, foo_bar.c.id, foo_bar__id, foo_bar__id,
+            foo.c.bar_id,
+            foo_bar.c.id,
+            foo_bar.c.id,
+            foo_bar__id,
+            foo_bar__id,
         ).apply_labels()
 
         self.assert_compile(
@@ -752,9 +757,7 @@ class SelectTest(fixtures.TestBase, AssertsCompiledSQL):
         )
 
     def test_nested_label_targeting(self):
-        """test nested anonymous label generation.
-
-        """
+        """test nested anonymous label generation."""
         s1 = table1.select()
         s2 = s1.alias()
         s3 = select(s2).apply_labels()
@@ -1491,7 +1494,8 @@ class SelectTest(fixtures.TestBase, AssertsCompiledSQL):
     def test_order_by_nulls(self):
         self.assert_compile(
             table2.select().order_by(
-                table2.c.otherid, table2.c.othername.desc().nullsfirst(),
+                table2.c.otherid,
+                table2.c.othername.desc().nullsfirst(),
             ),
             "SELECT myothertable.otherid, myothertable.othername FROM "
             "myothertable ORDER BY myothertable.otherid, "
@@ -1500,7 +1504,8 @@ class SelectTest(fixtures.TestBase, AssertsCompiledSQL):
 
         self.assert_compile(
             table2.select().order_by(
-                table2.c.otherid, table2.c.othername.desc().nullslast(),
+                table2.c.otherid,
+                table2.c.othername.desc().nullslast(),
             ),
             "SELECT myothertable.otherid, myothertable.othername FROM "
             "myothertable ORDER BY myothertable.otherid, "
@@ -1519,7 +1524,8 @@ class SelectTest(fixtures.TestBase, AssertsCompiledSQL):
 
         self.assert_compile(
             table2.select().order_by(
-                table2.c.otherid.nullsfirst(), table2.c.othername.desc(),
+                table2.c.otherid.nullsfirst(),
+                table2.c.othername.desc(),
             ),
             "SELECT myothertable.otherid, myothertable.othername FROM "
             "myothertable ORDER BY myothertable.otherid NULLS FIRST, "
@@ -2068,7 +2074,10 @@ class SelectTest(fixtures.TestBase, AssertsCompiledSQL):
             "Can't resolve label reference for ORDER BY / GROUP BY / "
             "DISTINCT etc. Textual "
             "SQL expression 'noname'",
-            union(select(table1.c.myid, table1.c.name), select(table2),)
+            union(
+                select(table1.c.myid, table1.c.name),
+                select(table2),
+            )
             .order_by("noname")
             .compile,
         )
@@ -3159,7 +3168,7 @@ class BindParameterTest(AssertsCompiledSQL, fixtures.TestBase):
 
     def _test_binds_no_hash_collision(self):
         """test that construct_params doesn't corrupt dict
-            due to hash collisions"""
+        due to hash collisions"""
 
         total_params = 100000
 
@@ -3468,7 +3477,12 @@ class BindParameterTest(AssertsCompiledSQL, fixtures.TestBase):
         compiled = stmt_adapted.compile(cache_key=cache_key)
 
         # params set up as 5
-        eq_(compiled.construct_params(params={},), {"myid_1": 5})
+        eq_(
+            compiled.construct_params(
+                params={},
+            ),
+            {"myid_1": 5},
+        )
 
         # also works w the original cache key
         eq_(
@@ -3529,7 +3543,8 @@ class BindParameterTest(AssertsCompiledSQL, fixtures.TestBase):
         compiled = modified_stmt.compile(cache_key=cache_key)
 
         eq_(
-            compiled.construct_params(params={}), {"myid_1": 10, "myid_2": 12},
+            compiled.construct_params(params={}),
+            {"myid_1": 10, "myid_2": 12},
         )
 
         # make a new statement doing the same thing and make sure
index 410f49f2a5c3a7315124d06cfbdd714869f6bb14..4ebfdc7acf0ea1257f4e0a9372b0ee43dfe2a3fd 100644 (file)
@@ -261,9 +261,7 @@ class CTETest(fixtures.TestBase, AssertsCompiledSQL):
         )
 
     def test_recursive_union_alias_two(self):
-        """
-
-        """
+        """"""
 
         # I know, this is the PG VALUES keyword,
         # we're cheating here.  also yes we need the SELECT,
@@ -773,7 +771,10 @@ class CTETest(fixtures.TestBase, AssertsCompiledSQL):
 
         s2 = (
             select(
-                orders.c.order == "y", s1a.c.order, orders.c.order, s1.c.order,
+                orders.c.order == "y",
+                s1a.c.order,
+                orders.c.order,
+                s1.c.order,
             )
             .where(orders.c.order == "z")
             .cte("regional_sales_2")
@@ -815,7 +816,10 @@ class CTETest(fixtures.TestBase, AssertsCompiledSQL):
 
         s2 = (
             select(
-                orders.c.order == "y", s1a.c.order, orders.c.order, s1.c.order,
+                orders.c.order == "y",
+                s1a.c.order,
+                orders.c.order,
+                s1.c.order,
             )
             .where(orders.c.order == "z")
             .cte("regional_sales_2")
index 6cb1c38412d577e95bf91d82c373711afe7b56fb..2750568d878a61a61c01dc464b51bc8b09f1cc2a 100644 (file)
@@ -250,7 +250,12 @@ class DefaultObjectTest(fixtures.TestBase):
             Column("boolcol1", sa.Boolean, default=True),
             Column("boolcol2", sa.Boolean, default=False),
             # python function which uses ExecutionContext
-            Column("col7", Integer, default=lambda: 5, onupdate=lambda: 10,),
+            Column(
+                "col7",
+                Integer,
+                default=lambda: 5,
+                onupdate=lambda: 10,
+            ),
             # python builtin
             Column(
                 "col8",
@@ -1277,11 +1282,13 @@ class SpecialTypePKTest(fixtures.TestBase):
                 eq_(r.inserted_primary_key, (None,))
             else:
                 eq_(
-                    r.inserted_primary_key, (expected_result,),
+                    r.inserted_primary_key,
+                    (expected_result,),
                 )
 
             eq_(
-                conn.execute(t.select()).first(), (expected_result, 5),
+                conn.execute(t.select()).first(),
+                (expected_result, 5),
             )
 
     def test_plain(self):
index d078b36b8eb9b47b73df575903d407bb0f03d86a..f418eab6b90cb1b3f7faccb1120834ec44002d65 100644 (file)
@@ -545,7 +545,11 @@ class SelectableTest(fixtures.TestBase, AssertsCompiledSQL):
             r"The \"whens\" argument to case\(\) is now passed"
         ):
             stmt = select(t1).where(
-                case(whens={t1.c.q == 5: "foo"}, else_="bat",) != "bat"
+                case(
+                    whens={t1.c.q == 5: "foo"},
+                    else_="bat",
+                )
+                != "bat"
             )
 
         self.assert_compile(
@@ -1607,7 +1611,8 @@ class PositionalTextTest(fixtures.TablesTest):
     @classmethod
     def insert_data(cls, connection):
         connection.execute(
-            cls.tables.text1.insert(), [dict(a="a1", b="b1", c="c1", d="d1")],
+            cls.tables.text1.insert(),
+            [dict(a="a1", b="b1", c="c1", d="d1")],
         )
 
     def test_anon_aliased_overlapping(self, connection):
@@ -1756,7 +1761,8 @@ class DMLTest(_UpdateFromTestBase, fixtures.TablesTest, AssertsCompiledSQL):
             stmt = table.insert(values={}, inline=True)
 
         self.assert_compile(
-            stmt, "INSERT INTO sometable (foo) VALUES (foobar())",
+            stmt,
+            "INSERT INTO sometable (foo) VALUES (foobar())",
         )
 
         with testing.expect_deprecated_20(
@@ -1765,7 +1771,9 @@ class DMLTest(_UpdateFromTestBase, fixtures.TablesTest, AssertsCompiledSQL):
             stmt = table.insert(inline=True)
 
         self.assert_compile(
-            stmt, "INSERT INTO sometable (foo) VALUES (foobar())", params={},
+            stmt,
+            "INSERT INTO sometable (foo) VALUES (foobar())",
+            params={},
         )
 
     def test_update_inline_kw_defaults(self):
@@ -1808,7 +1816,9 @@ class DMLTest(_UpdateFromTestBase, fixtures.TablesTest, AssertsCompiledSQL):
 
     def test_update_whereclause(self):
         table1 = table(
-            "mytable", Column("myid", Integer), Column("name", String(30)),
+            "mytable",
+            Column("myid", Integer),
+            Column("name", String(30)),
         )
 
         with testing.expect_deprecated_20(
@@ -1823,7 +1833,9 @@ class DMLTest(_UpdateFromTestBase, fixtures.TablesTest, AssertsCompiledSQL):
 
     def test_update_values(self):
         table1 = table(
-            "mytable", Column("myid", Integer), Column("name", String(30)),
+            "mytable",
+            Column("myid", Integer),
+            Column("name", String(30)),
         )
 
         with testing.expect_deprecated_20(
@@ -1835,7 +1847,10 @@ class DMLTest(_UpdateFromTestBase, fixtures.TablesTest, AssertsCompiledSQL):
             )
 
     def test_delete_whereclause(self):
-        table1 = table("mytable", Column("myid", Integer),)
+        table1 = table(
+            "mytable",
+            Column("myid", Integer),
+        )
 
         with testing.expect_deprecated_20(
             "The delete.whereclause parameter will be "
index 6b07ebba961bc302bdacb3e0020f5b8cbe4271ba..4edc9d0258306dbf9784c42826a57c40dd2da053 100644 (file)
@@ -702,7 +702,9 @@ class ClauseTest(fixtures.TestBase, AssertsCompiledSQL):
 
         subq = subq.alias("subq")
         s = select(t1.c.col1, subq.c.col1).select_from(
-            t1, subq, t1.join(subq, t1.c.col1 == subq.c.col2),
+            t1,
+            subq,
+            t1.join(subq, t1.c.col1 == subq.c.col2),
         )
         s5 = CloningVisitor().traverse(s)
         eq_(str(s), str(s5))
@@ -2190,7 +2192,8 @@ class ValuesBaseTest(fixtures.TestBase, AssertsCompiledSQL):
         compile_state = i._compile_state_factory(i, None)
 
         self._compare_param_dict(
-            compile_state._dict_parameters, {"col1": 5, "col2": 6, "col3": 7},
+            compile_state._dict_parameters,
+            {"col1": 5, "col2": 6, "col3": 7},
         )
 
     def test_kw_and_dict_simultaneously_single(self):
@@ -2211,7 +2214,8 @@ class ValuesBaseTest(fixtures.TestBase, AssertsCompiledSQL):
         i = i.values([(5, 6, 7), (8, 9, 10)])
         compile_state = i._compile_state_factory(i, None)
         eq_(
-            compile_state._dict_parameters, {"col1": 5, "col2": 6, "col3": 7},
+            compile_state._dict_parameters,
+            {"col1": 5, "col2": 6, "col3": 7},
         )
         eq_(compile_state._has_multi_parameters, True)
         eq_(
index 3c6140b81c8ef47cbff89790ab84d20bb2810b27..f9a8f998ed26dc5437ad58b16aa3a2adb67e7d2c 100644 (file)
@@ -1008,7 +1008,8 @@ class ExecuteTest(fixtures.TestBase):
         connection.execute(t2.insert())
         connection.execute(t2.insert().values(value=func.length("one")))
         connection.execute(
-            t2.insert().values(value=func.length("asfda") + -19), stuff="hi",
+            t2.insert().values(value=func.length("asfda") + -19),
+            stuff="hi",
         )
 
         res = sorted(connection.execute(select(t2.c.value, t2.c.stuff)))
index becb62159b0a83ec48959743e800c77062309550..2564022c2fc3c95311245f29661cdcb52a7fca1b 100644 (file)
@@ -57,7 +57,10 @@ class _IdentityDDLFixture(testing.AssertsCompiledSQL):
             dict(always=False, cache=1000, order=True),
             "BY DEFAULT AS IDENTITY (CACHE 1000 ORDER)",
         ),
-        (dict(order=True), "BY DEFAULT AS IDENTITY (ORDER)",),
+        (
+            dict(order=True),
+            "BY DEFAULT AS IDENTITY (ORDER)",
+        ),
     )
     def test_create_ddl(self, identity_args, text):
 
@@ -153,10 +156,15 @@ class NotSupportingIdentityDDL(testing.AssertsCompiledSQL, fixtures.TestBase):
             MetaData(),
             Column("foo", Integer(), Identity("always", start=3)),
         )
-        t2 = Table("foo_table", MetaData(), Column("foo", Integer()),)
+        t2 = Table(
+            "foo_table",
+            MetaData(),
+            Column("foo", Integer()),
+        )
         exp = CreateTable(t2).compile(dialect=testing.db.dialect)
         self.assert_compile(
-            CreateTable(t), re.sub(r"[\n\t]", "", str(exp)),
+            CreateTable(t),
+            re.sub(r"[\n\t]", "", str(exp)),
         )
 
 
@@ -169,7 +177,9 @@ class IdentityTest(fixtures.TestBase):
 
         def fn(**kwargs):
             Table(
-                "t", MetaData(), Column("y", Integer, Identity(), **kwargs),
+                "t",
+                MetaData(),
+                Column("y", Integer, Identity(), **kwargs),
             )
 
         assert_raises_message(ArgumentError, text, fn, server_default="42")
index 45a8bccf538e905716be0e4b3b423deca54bad55..198ff48c08040141c241e1d1e3b1f865bfc96cd8 100644 (file)
@@ -231,7 +231,10 @@ class InsertExecTest(fixtures.TablesTest):
                 "t4",
                 metadata,
                 Column(
-                    "id", Integer, Sequence("t4_id_seq"), primary_key=True,
+                    "id",
+                    Integer,
+                    Sequence("t4_id_seq"),
+                    primary_key=True,
                 ),
                 Column("foo", String(30)),
             ),
@@ -387,7 +390,12 @@ class TableInsertTest(fixtures.TablesTest):
         Table(
             "foo",
             metadata,
-            Column("id", Integer, Sequence("t_id_seq"), primary_key=True,),
+            Column(
+                "id",
+                Integer,
+                Sequence("t_id_seq"),
+                primary_key=True,
+            ),
             Column("data", String(50)),
             Column("x", Integer),
         )
@@ -397,7 +405,11 @@ class TableInsertTest(fixtures.TablesTest):
             metadata,
             # note this will have full AUTO INCREMENT on MariaDB
             # whereas "foo" will not due to sequence support
-            Column("id", Integer, primary_key=True,),
+            Column(
+                "id",
+                Integer,
+                primary_key=True,
+            ),
             Column("data", String(50)),
             Column("x", Integer),
         )
index 9999bdc31a32603082727d8fa83bdfbecd71f6e7..ebcde3c6310a99adca3a5f1f1b0da5e4aea6ce08 100644 (file)
@@ -731,7 +731,11 @@ class MetaDataTest(fixtures.TestBase, ComparesTables):
                 "Column('foo', Integer(), table=None, primary_key=True, "
                 "nullable=False, onupdate=%s, default=%s, server_default=%s, "
                 "comment='foo')"
-                % (ColumnDefault(1), ColumnDefault(42), DefaultClause("42"),),
+                % (
+                    ColumnDefault(1),
+                    ColumnDefault(42),
+                    DefaultClause("42"),
+                ),
             ),
             (
                 Table("bar", MetaData(), Column("x", String)),
@@ -5243,7 +5247,8 @@ class CopyDialectOptionsTest(fixtures.TestBase):
     @classmethod
     def check_dialect_options_(cls, t):
         eq_(
-            t.dialect_kwargs["copydialectoptionstest_some_table_arg"], "a1",
+            t.dialect_kwargs["copydialectoptionstest_some_table_arg"],
+            "a1",
         )
         eq_(
             t.c.foo.dialect_kwargs["copydialectoptionstest_some_column_arg"],
@@ -5286,7 +5291,9 @@ class CopyDialectOptionsTest(fixtures.TestBase):
                 copydialectoptionstest_some_table_arg="a1",
             )
             Index(
-                "idx", t1.c.foo, copydialectoptionstest_some_index_arg="a4",
+                "idx",
+                t1.c.foo,
+                copydialectoptionstest_some_index_arg="a4",
             )
 
             self.check_dialect_options_(t1)
index 3eb0c449f792f7f6512af8acd866c8c8788b39d8..2f92738594248424efdd792a2e15a67836d39a1f 100644 (file)
@@ -1072,8 +1072,7 @@ class BooleanEvalTest(fixtures.TestBase, testing.AssertsCompiledSQL):
 
 class ConjunctionTest(fixtures.TestBase, testing.AssertsCompiledSQL):
 
-    """test interaction of and_()/or_() with boolean , null constants
-    """
+    """test interaction of and_()/or_() with boolean , null constants"""
 
     __dialect__ = default.DefaultDialect(supports_native_boolean=True)
 
@@ -1851,7 +1850,8 @@ class InTest(fixtures.TestBase, testing.AssertsCompiledSQL):
             )
             .select_from(
                 self.table1.join(
-                    self.table2, self.table1.c.myid == self.table2.c.otherid,
+                    self.table2,
+                    self.table1.c.myid == self.table2.c.otherid,
                 )
             )
             .order_by(self.table1.c.myid),
index 9b3ededcd76d538ddc1b3f0889874bdb49c66e42..9f66a2ef594c11fb28c49a0c292979cc12b1fc2c 100644 (file)
@@ -157,7 +157,8 @@ class QueryTest(fixtures.TestBase):
         eq_(connection.execute(select(or_(true, false))).scalar(), True)
         eq_(connection.execute(select(or_(false, false))).scalar(), False)
         eq_(
-            connection.execute(select(not_(or_(false, false)))).scalar(), True,
+            connection.execute(select(not_(or_(false, false)))).scalar(),
+            True,
         )
 
         row = connection.execute(
@@ -174,7 +175,8 @@ class QueryTest(fixtures.TestBase):
 
     def test_select_tuple(self, connection):
         connection.execute(
-            users.insert(), {"user_id": 1, "user_name": "apples"},
+            users.insert(),
+            {"user_id": 1, "user_name": "apples"},
         )
 
         assert_raises_message(
@@ -351,7 +353,8 @@ class QueryTest(fixtures.TestBase):
                 return "INT_%d" % value
 
         eq_(
-            connection.scalar(select(cast("INT_5", type_=MyInteger))), "INT_5",
+            connection.scalar(select(cast("INT_5", type_=MyInteger))),
+            "INT_5",
         )
         eq_(
             connection.scalar(
@@ -1213,7 +1216,8 @@ class CompoundTest(fixtures.TestBase):
     @testing.fails_on("sqlite", "FIXME: unknown")
     def test_union_all(self, connection):
         e = union_all(
-            select(t1.c.col3), union(select(t1.c.col3), select(t1.c.col3)),
+            select(t1.c.col3),
+            union(select(t1.c.col3), select(t1.c.col3)),
         )
 
         wanted = [("aaa",), ("aaa",), ("bbb",), ("bbb",), ("ccc",), ("ccc",)]
@@ -1734,35 +1738,45 @@ class JoinTest(fixtures.TestBase):
         for criteria in (t2.c.t2_id == t3.c.t2_id, t3.c.t2_id == t2.c.t2_id):
             expr = (
                 select(t1.c.t1_id, t2.c.t2_id, t3.c.t3_id)
-                .where(t1.c.name == "t1 #10",)
+                .where(
+                    t1.c.name == "t1 #10",
+                )
                 .select_from((t1.join(t2).outerjoin(t3, criteria)))
             )
             self.assertRows(expr, [(10, 20, 30)])
 
             expr = (
                 select(t1.c.t1_id, t2.c.t2_id, t3.c.t3_id)
-                .where(t2.c.name == "t2 #20",)
+                .where(
+                    t2.c.name == "t2 #20",
+                )
                 .select_from((t1.join(t2).outerjoin(t3, criteria)))
             )
             self.assertRows(expr, [(10, 20, 30)])
 
             expr = (
                 select(t1.c.t1_id, t2.c.t2_id, t3.c.t3_id)
-                .where(t3.c.name == "t3 #30",)
+                .where(
+                    t3.c.name == "t3 #30",
+                )
                 .select_from((t1.join(t2).outerjoin(t3, criteria)))
             )
             self.assertRows(expr, [(10, 20, 30)])
 
             expr = (
                 select(t1.c.t1_id, t2.c.t2_id, t3.c.t3_id)
-                .where(and_(t1.c.name == "t1 #10", t2.c.name == "t2 #20"),)
+                .where(
+                    and_(t1.c.name == "t1 #10", t2.c.name == "t2 #20"),
+                )
                 .select_from((t1.join(t2).outerjoin(t3, criteria)))
             )
             self.assertRows(expr, [(10, 20, 30)])
 
             expr = (
                 select(t1.c.t1_id, t2.c.t2_id, t3.c.t3_id)
-                .where(and_(t2.c.name == "t2 #20", t3.c.name == "t3 #30"),)
+                .where(
+                    and_(t2.c.name == "t2 #20", t3.c.name == "t3 #30"),
+                )
                 .select_from((t1.join(t2).outerjoin(t3, criteria)))
             )
             self.assertRows(expr, [(10, 20, 30)])
index 504ed40646681a3a90dba0fdc97b34d5174367e0..2dee9bc09d5fa5d792a0664a3a7588364ac4e982 100644 (file)
@@ -825,7 +825,8 @@ class QuoteTest(fixtures.TestBase, AssertsCompiledSQL):
 
         t2 = Table("t2", m, Column("x", Integer), quote=True)
         self.assert_compile(
-            select(t2.c.x).apply_labels(), 'SELECT "t2".x AS "t2_x" FROM "t2"',
+            select(t2.c.x).apply_labels(),
+            'SELECT "t2".x AS "t2_x" FROM "t2"',
         )
 
 
index 67f347ad3b5dea3d09d159940bb70ddf15d0e476..7d626654170a0407001ad57983ca62d12b68b591 100644 (file)
@@ -1474,7 +1474,9 @@ class KeyTargetingTest(fixtures.TablesTest):
             Column("team_id", metadata, ForeignKey("teams.id")),
         )
         Table(
-            "teams", metadata, Column("id", Integer, primary_key=True),
+            "teams",
+            metadata,
+            Column("id", Integer, primary_key=True),
         )
 
     @classmethod
@@ -1847,7 +1849,8 @@ class KeyTargetingTest(fixtures.TablesTest):
         # this has _result_columns structure that is not ordered
         # the same as the cursor.description.
         return text("select a AS keyed2_a, b AS keyed2_b from keyed2").columns(
-            keyed2_b=CHAR, keyed2_a=CHAR,
+            keyed2_b=CHAR,
+            keyed2_a=CHAR,
         )
 
     def _adapt_result_columns_fixture_seven(self):
@@ -1970,7 +1973,8 @@ class PositionalTextTest(fixtures.TablesTest):
     @classmethod
     def insert_data(cls, connection):
         connection.execute(
-            cls.tables.text1.insert(), [dict(a="a1", b="b1", c="c1", d="d1")],
+            cls.tables.text1.insert(),
+            [dict(a="a1", b="b1", c="c1", d="d1")],
         )
 
     def test_via_column(self, connection):
@@ -2589,7 +2593,8 @@ class MergeCursorResultTest(fixtures.TablesTest):
 
         result = r1.merge(r2, r3, r4)
         eq_(
-            result.first(), (7, "u1"),
+            result.first(),
+            (7, "u1"),
         )
         for r in [r1, r2, r3, r4]:
             assert r.closed
index 26d4969c87c85729acbd72eda973bbb31b19cab4..601bd62730cd808c7ed323c28deb80442a08e86f 100644 (file)
@@ -260,7 +260,12 @@ class SequenceReturningTest(fixtures.TestBase):
         table = Table(
             "tables",
             meta,
-            Column("id", Integer, seq, primary_key=True,),
+            Column(
+                "id",
+                Integer,
+                seq,
+                primary_key=True,
+            ),
             Column("data", String(50)),
         )
         with testing.db.connect() as conn:
index 4feba97aec5c6ef147d419b1a4047051b8bac653..8759bbb22fc252540f8c2423902ea3a9c9de72fe 100644 (file)
@@ -150,14 +150,16 @@ class RoleTest(fixtures.TestBase):
             "implicitly coercing SELECT object to scalar subquery"
         ):
             expect(
-                roles.LabeledColumnExprRole, select(column("q")),
+                roles.LabeledColumnExprRole,
+                select(column("q")),
             )
 
         with testing.expect_warnings(
             "implicitly coercing SELECT object to scalar subquery"
         ):
             expect(
-                roles.LabeledColumnExprRole, select(column("q")).alias(),
+                roles.LabeledColumnExprRole,
+                select(column("q")).alias(),
             )
 
     def test_statement_no_text_coercion(self):
index d09fe76e1b15b431ac7535a595ea57391399f4ea..b98fbd3d07d8fbb152a06a6452548b66b37648e0 100644 (file)
@@ -2898,7 +2898,8 @@ class WithLabelsTest(fixtures.TestBase):
     def test_labels_overlap_label(self):
         sel = self._labels_overlap().apply_labels()
         eq_(
-            list(sel.selected_columns.keys()), ["t_x_id", "t_x_id_1"],
+            list(sel.selected_columns.keys()),
+            ["t_x_id", "t_x_id_1"],
         )
         eq_(
             list(sel.subquery().c.keys()),
@@ -2941,10 +2942,12 @@ class WithLabelsTest(fixtures.TestBase):
     def test_keylabels_overlap_labels_dont_label(self):
         sel = self._keylabels_overlap_labels_dont().apply_labels()
         eq_(
-            list(sel.selected_columns.keys()), ["t_x_id", "t_x_b_1"],
+            list(sel.selected_columns.keys()),
+            ["t_x_id", "t_x_b_1"],
         )
         eq_(
-            list(sel.subquery().c.keys()), ["t_x_id", "t_x_b_1"],
+            list(sel.subquery().c.keys()),
+            ["t_x_id", "t_x_b_1"],
         )
         self._assert_result_keys(sel, ["t_a", "t_x_b"])
         self._assert_subq_result_keys(sel, ["t_a", "t_x_b"])
@@ -2965,7 +2968,8 @@ class WithLabelsTest(fixtures.TestBase):
     def test_keylabels_overlap_labels_overlap_label(self):
         sel = self._keylabels_overlap_labels_overlap().apply_labels()
         eq_(
-            list(sel.selected_columns.keys()), ["t_x_a", "t_x_id_1"],
+            list(sel.selected_columns.keys()),
+            ["t_x_a", "t_x_id_1"],
         )
 
         # deduping for different cols but same label
index 243ccfbab4a24de9c0cbff1c6e0ab5b05124533b..e609a8a91618a13caecc93c71e777a86de4d1ff8 100644 (file)
@@ -124,14 +124,14 @@ class LegacySequenceExecTest(fixtures.TestBase):
 
     def test_explicit_optional(self):
         """test dialect executes a Sequence, returns nextval, whether
-        or not "optional" is set """
+        or not "optional" is set"""
 
         s = Sequence("my_sequence", optional=True)
         self._assert_seq_result(s.execute(testing.db))
 
     def test_func_implicit_connectionless_execute(self):
         """test func.next_value().execute()/.scalar() works
-        with connectionless execution. """
+        with connectionless execution."""
 
         s = Sequence("my_sequence", metadata=MetaData(testing.db))
         self._assert_seq_result(s.next_value().execute().scalar())
@@ -178,21 +178,21 @@ class SequenceExecTest(fixtures.TestBase):
 
     def test_execute_optional(self, connection):
         """test dialect executes a Sequence, returns nextval, whether
-        or not "optional" is set """
+        or not "optional" is set"""
 
         s = Sequence("my_sequence", optional=True)
         self._assert_seq_result(connection.execute(s))
 
     def test_execute_next_value(self, connection):
         """test func.next_value().execute()/.scalar() works
-        with connectionless execution. """
+        with connectionless execution."""
 
         s = Sequence("my_sequence")
         self._assert_seq_result(connection.scalar(s.next_value()))
 
     def test_execute_optional_next_value(self, connection):
         """test func.next_value().execute()/.scalar() works
-        with connectionless execution. """
+        with connectionless execution."""
 
         s = Sequence("my_sequence", optional=True)
         self._assert_seq_result(connection.scalar(s.next_value()))
@@ -225,7 +225,11 @@ class SequenceExecTest(fixtures.TestBase):
         """test can use next_value() in values() of _ValuesBase"""
 
         metadata = self.metadata
-        t1 = Table("t", metadata, Column("x", Integer),)
+        t1 = Table(
+            "t",
+            metadata,
+            Column("x", Integer),
+        )
         t1.create(testing.db)
         s = Sequence("my_sequence")
         connection.execute(t1.insert().values(x=s.next_value()))
@@ -263,7 +267,15 @@ class SequenceExecTest(fixtures.TestBase):
 
         metadata = self.metadata
         s = Sequence("my_sequence")
-        t1 = Table("t", metadata, Column("x", Integer, primary_key=True,),)
+        t1 = Table(
+            "t",
+            metadata,
+            Column(
+                "x",
+                Integer,
+                primary_key=True,
+            ),
+        )
         t1.create(testing.db)
 
         e = engines.testing_engine(options={"implicit_returning": True})
@@ -424,7 +436,11 @@ class TableBoundSequenceTest(fixtures.TablesTest):
         Table(
             "Manager",
             metadata,
-            Column("obj_id", Integer, Sequence("obj_id_seq"),),
+            Column(
+                "obj_id",
+                Integer,
+                Sequence("obj_id_seq"),
+            ),
             Column("name", String(128)),
             Column(
                 "id",
@@ -477,10 +493,26 @@ class TableBoundSequenceTest(fixtures.TablesTest):
                     conn.execute(sometable.select().order_by(sometable.c.id))
                 ),
                 [
-                    (dsb, "somename", dsb,),
-                    (dsb + 1, "someother", dsb + 1,),
-                    (dsb + 2, "name3", dsb + 2,),
-                    (dsb + 3, "name4", dsb + 3,),
+                    (
+                        dsb,
+                        "somename",
+                        dsb,
+                    ),
+                    (
+                        dsb + 1,
+                        "someother",
+                        dsb + 1,
+                    ),
+                    (
+                        dsb + 2,
+                        "name3",
+                        dsb + 2,
+                    ),
+                    (
+                        dsb + 3,
+                        "name4",
+                        dsb + 3,
+                    ),
                 ],
             )
 
index 5464750dbf0f12118a4427f5c9f1346b2177c4a5..efa622b135a468ed381b00036d52354b6af76704 100644 (file)
@@ -3236,7 +3236,8 @@ class BooleanTest(
             )
 
             eq_(
-                conn.scalar(select(boolean_table.c.unconstrained_value)), True,
+                conn.scalar(select(boolean_table.c.unconstrained_value)),
+                True,
             )
 
     def test_bind_processor_coercion_native_true(self):
index 8be5868dbf720bc4c6dcc8524d7509ed3dbeb6eb..201e6c64fe146889f15817db42800ca45c47377f 100644 (file)
@@ -461,7 +461,9 @@ class UpdateTest(_UpdateFromTestBase, fixtures.TablesTest, AssertsCompiledSQL):
         self.assert_compile(
             update(table1)
             .where(table1.c.name == bindparam("crit"))
-            .values({table1.c.name: "hi"},),
+            .values(
+                {table1.c.name: "hi"},
+            ),
             "UPDATE mytable SET name=:name WHERE mytable.name = :crit",
             params={"crit": "notthere"},
             checkparams={"crit": "notthere", "name": "hi"},
@@ -473,7 +475,9 @@ class UpdateTest(_UpdateFromTestBase, fixtures.TablesTest, AssertsCompiledSQL):
         self.assert_compile(
             update(table1)
             .where(table1.c.myid == 12)
-            .values({table1.c.name: table1.c.myid},),
+            .values(
+                {table1.c.name: table1.c.myid},
+            ),
             "UPDATE mytable "
             "SET name=mytable.myid, description=:description "
             "WHERE mytable.myid = :myid_1",
index 3b0544278a8c3085a8fa32258dbd4fdf7539e468..1e4f2244290e1b1041aef52d0e498122e064a58b 100644 (file)
@@ -117,7 +117,8 @@ class ValuesTest(fixtures.TablesTest, AssertsCompiledSQL):
     def test_with_join_unnamed(self):
         people = self.tables.people
         values = Values(
-            column("column1", Integer), column("column2", Integer),
+            column("column1", Integer),
+            column("column2", Integer),
         ).data([(1, 1), (2, 1), (3, 2), (3, 3)])
         stmt = select(people, values).select_from(
             people.join(values, values.c.column2 == people.c.people_id)
diff --git a/tox.ini b/tox.ini
index 71e1828a708417277803750c16c29b5376cb27f9..d319979fe9c5e6abbb7a448a8b51df1c6c6e384f 100644 (file)
--- a/tox.ini
+++ b/tox.ini
@@ -111,7 +111,7 @@ deps=
       # in case it requires a version pin
       pydocstyle
       pygments
-      black==19.10b0
+      black==20.8b1
 commands =
      flake8 ./lib/ ./test/ ./examples/ setup.py doc/build/conf.py {posargs}
      black --check .