From: Mike Bayer Date: Mon, 28 Sep 2020 18:08:59 +0000 (-0400) Subject: upgrade to black 20.8b1 X-Git-Tag: rel_1_4_0b1~68 X-Git-Url: http://git.ipfire.org/?a=commitdiff_plain;h=c3f102c9fe9811fd5286628cc6aafa5fbc324621;p=thirdparty%2Fsqlalchemy%2Fsqlalchemy.git upgrade to black 20.8b1 It's better, the majority of these changes look more readable to me. also found some docstrings that had formatting / quoting issues. Change-Id: I582a45fde3a5648b2f36bab96bad56881321899b --- diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml index 00a097cf3d..ca602f0f0e 100644 --- a/.pre-commit-config.yaml +++ b/.pre-commit-config.yaml @@ -2,7 +2,7 @@ # See https://pre-commit.com/hooks.html for more hooks repos: - repo: https://github.com/python/black - rev: 19.10b0 + rev: 20.8b1 hooks: - id: black diff --git a/examples/asyncio/async_orm.py b/examples/asyncio/async_orm.py index b1054a239f..52df8bd2fc 100644 --- a/examples/asyncio/async_orm.py +++ b/examples/asyncio/async_orm.py @@ -38,7 +38,8 @@ async def async_main(): """Main program function.""" engine = create_async_engine( - "postgresql+asyncpg://scott:tiger@localhost/test", echo=True, + "postgresql+asyncpg://scott:tiger@localhost/test", + echo=True, ) async with engine.begin() as conn: diff --git a/examples/asyncio/basic.py b/examples/asyncio/basic.py index 05cdd8a05c..efdb7e9e87 100644 --- a/examples/asyncio/basic.py +++ b/examples/asyncio/basic.py @@ -27,7 +27,8 @@ t1 = Table( async def async_main(): # engine is an instance of AsyncEngine engine = create_async_engine( - "postgresql+asyncpg://scott:tiger@localhost/test", echo=True, + "postgresql+asyncpg://scott:tiger@localhost/test", + echo=True, ) # conn is an instance of AsyncConnection diff --git a/examples/asyncio/greenlet_orm.py b/examples/asyncio/greenlet_orm.py index e0b568c4b8..7429b6853b 100644 --- a/examples/asyncio/greenlet_orm.py +++ b/examples/asyncio/greenlet_orm.py @@ -64,7 +64,8 @@ async def async_main(): """Main program function.""" engine = create_async_engine( - "postgresql+asyncpg://scott:tiger@localhost/test", echo=True, + "postgresql+asyncpg://scott:tiger@localhost/test", + echo=True, ) async with engine.begin() as conn: await conn.run_sync(Base.metadata.drop_all) diff --git a/examples/dogpile_caching/caching_query.py b/examples/dogpile_caching/caching_query.py index f994473612..d1199090c8 100644 --- a/examples/dogpile_caching/caching_query.py +++ b/examples/dogpile_caching/caching_query.py @@ -146,7 +146,7 @@ class FromCache(UserDefinedOption): class RelationshipCache(FromCache): """Specifies that a Query as called within a "lazy load" - should load results from a cache.""" + should load results from a cache.""" propagate_to_loaders = True diff --git a/examples/versioned_rows/versioned_rows_w_versionid.py b/examples/versioned_rows/versioned_rows_w_versionid.py index 7a1fe54197..790d2ed14b 100644 --- a/examples/versioned_rows/versioned_rows_w_versionid.py +++ b/examples/versioned_rows/versioned_rows_w_versionid.py @@ -101,14 +101,17 @@ session.commit() e1.data = "e2" session.commit() -assert session.query( - Example.id, - Example.version_id, - Example.is_current_version, - Example.calc_is_current_version, - Example.data, -).order_by(Example.id, Example.version_id).all() == ( - [(1, 1, False, False, "e1"), (1, 2, True, True, "e2")] +assert ( + session.query( + Example.id, + Example.version_id, + Example.is_current_version, + Example.calc_is_current_version, + Example.data, + ) + .order_by(Example.id, Example.version_id) + .all() + == ([(1, 1, False, False, "e1"), (1, 2, True, True, "e2")]) ) # example 2, versioning with a parent @@ -159,12 +162,15 @@ session.commit() assert p1.child_id == 1 assert p1.child.version_id == 2 -assert session.query( - Child.id, - Child.version_id, - Child.is_current_version, - Child.calc_is_current_version, - Child.data, -).order_by(Child.id, Child.version_id).all() == ( - [(1, 1, False, False, "c1"), (1, 2, True, True, "c2")] +assert ( + session.query( + Child.id, + Child.version_id, + Child.is_current_version, + Child.calc_is_current_version, + Child.data, + ) + .order_by(Child.id, Child.version_id) + .all() + == ([(1, 1, False, False, "c1"), (1, 2, True, True, "c2")]) ) diff --git a/examples/vertical/dictlike-polymorphic.py b/examples/vertical/dictlike-polymorphic.py index 73d12ee4f2..23a6e093d9 100644 --- a/examples/vertical/dictlike-polymorphic.py +++ b/examples/vertical/dictlike-polymorphic.py @@ -67,9 +67,8 @@ class PolymorphicVerticalProperty(object): @value.comparator class value(PropComparator): - """A comparator for .value, builds a polymorphic comparison via CASE. - - """ + """A comparator for .value, builds a polymorphic comparison + via CASE.""" def __init__(self, cls): self.cls = cls diff --git a/lib/sqlalchemy/connectors/mxodbc.py b/lib/sqlalchemy/connectors/mxodbc.py index e243aba80f..e630f36e3d 100644 --- a/lib/sqlalchemy/connectors/mxodbc.py +++ b/lib/sqlalchemy/connectors/mxodbc.py @@ -66,7 +66,7 @@ class MxODBCConnector(Connector): @classmethod def _load_mx_exceptions(cls): - """ Import mxODBC exception classes into the module namespace, + """Import mxODBC exception classes into the module namespace, as if they had been imported normally. This is done here to avoid requiring all SQLAlchemy users to install mxODBC. """ @@ -84,7 +84,7 @@ class MxODBCConnector(Connector): return connect def _error_handler(self): - """ Return a handler that adjusts mxODBC's raised Warnings to + """Return a handler that adjusts mxODBC's raised Warnings to emit Python standard warnings. """ from mx.ODBC.Error import Warning as MxOdbcWarning diff --git a/lib/sqlalchemy/dialects/mssql/base.py b/lib/sqlalchemy/dialects/mssql/base.py index 2cbdc19aac..87ccc8427c 100644 --- a/lib/sqlalchemy/dialects/mssql/base.py +++ b/lib/sqlalchemy/dialects/mssql/base.py @@ -1033,7 +1033,7 @@ class TIME(sqltypes.TIME): self.__zero_date, value.time() ) elif isinstance(value, datetime.time): - """ issue #5339 + """issue #5339 per: https://github.com/mkleehammer/pyodbc/wiki/Tips-and-Tricks-by-Database-Platform#time-columns pass TIME value as string """ # noqa @@ -1260,9 +1260,7 @@ class SQL_VARIANT(sqltypes.TypeEngine): class TryCast(sql.elements.Cast): - """Represent a SQL Server TRY_CAST expression. - - """ + """Represent a SQL Server TRY_CAST expression.""" __visit_name__ = "try_cast" @@ -1579,8 +1577,12 @@ class MSExecutionContext(default.DefaultExecutionContext): elif ( self.isinsert or self.isupdate or self.isdelete ) and self.compiled.returning: - self.cursor_fetch_strategy = _cursor.FullyBufferedCursorFetchStrategy( # noqa - self.cursor, self.cursor.description, self.cursor.fetchall() + self.cursor_fetch_strategy = ( + _cursor.FullyBufferedCursorFetchStrategy( + self.cursor, + self.cursor.description, + self.cursor.fetchall(), + ) ) if self._enable_identity_insert: @@ -1729,8 +1731,8 @@ class MSSQLCompiler(compiler.SQLCompiler): return text def limit_clause(self, select, **kw): - """ MSSQL 2012 supports OFFSET/FETCH operators - Use it instead subquery with row_number + """MSSQL 2012 supports OFFSET/FETCH operators + Use it instead subquery with row_number """ diff --git a/lib/sqlalchemy/dialects/mssql/mxodbc.py b/lib/sqlalchemy/dialects/mssql/mxodbc.py index 998153d7a7..b274c2a2b2 100644 --- a/lib/sqlalchemy/dialects/mssql/mxodbc.py +++ b/lib/sqlalchemy/dialects/mssql/mxodbc.py @@ -59,8 +59,7 @@ from ...connectors.mxodbc import MxODBCConnector class _MSNumeric_mxodbc(_MSNumeric_pyodbc): - """Include pyodbc's numeric processor. - """ + """Include pyodbc's numeric processor.""" class _MSDate_mxodbc(_MSDate): diff --git a/lib/sqlalchemy/dialects/mysql/cymysql.py b/lib/sqlalchemy/dialects/mysql/cymysql.py index 2b45f5ddba..f1d0aedaf9 100644 --- a/lib/sqlalchemy/dialects/mysql/cymysql.py +++ b/lib/sqlalchemy/dialects/mysql/cymysql.py @@ -28,8 +28,7 @@ from ... import util class _cymysqlBIT(BIT): def result_processor(self, dialect, coltype): - """Convert a MySQL's 64 bit, variable length binary string to a long. - """ + """Convert MySQL's 64 bit, variable length binary string to a long.""" def process(value): if value is not None: diff --git a/lib/sqlalchemy/dialects/mysql/mariadb.py b/lib/sqlalchemy/dialects/mysql/mariadb.py index c6cadcd603..0dbb579e81 100644 --- a/lib/sqlalchemy/dialects/mysql/mariadb.py +++ b/lib/sqlalchemy/dialects/mysql/mariadb.py @@ -13,5 +13,10 @@ def loader(driver): driver_cls = getattr(driver_mod, driver).dialect return type( - "MariaDBDialect_%s" % driver, (MariaDBDialect, driver_cls,), {} + "MariaDBDialect_%s" % driver, + ( + MariaDBDialect, + driver_cls, + ), + {}, ) diff --git a/lib/sqlalchemy/dialects/mysql/types.py b/lib/sqlalchemy/dialects/mysql/types.py index 3b455cfb1f..5949750001 100644 --- a/lib/sqlalchemy/dialects/mysql/types.py +++ b/lib/sqlalchemy/dialects/mysql/types.py @@ -440,9 +440,7 @@ class TIME(sqltypes.TIME): class TIMESTAMP(sqltypes.TIMESTAMP): - """MySQL TIMESTAMP type. - - """ + """MySQL TIMESTAMP type.""" __visit_name__ = "TIMESTAMP" @@ -467,9 +465,7 @@ class TIMESTAMP(sqltypes.TIMESTAMP): class DATETIME(sqltypes.DATETIME): - """MySQL DATETIME type. - - """ + """MySQL DATETIME type.""" __visit_name__ = "DATETIME" diff --git a/lib/sqlalchemy/dialects/oracle/cx_oracle.py b/lib/sqlalchemy/dialects/oracle/cx_oracle.py index 651a6e6736..d1b69100f7 100644 --- a/lib/sqlalchemy/dialects/oracle/cx_oracle.py +++ b/lib/sqlalchemy/dialects/oracle/cx_oracle.py @@ -970,7 +970,11 @@ class OracleDialect_cx_oracle(OracleDialect): # allow all strings to come back natively as Unicode elif ( dialect.coerce_to_unicode - and default_type in (cx_Oracle.STRING, cx_Oracle.FIXED_CHAR,) + and default_type + in ( + cx_Oracle.STRING, + cx_Oracle.FIXED_CHAR, + ) and default_type is not cx_Oracle.CLOB and default_type is not cx_Oracle.NCLOB ): @@ -1018,7 +1022,9 @@ class OracleDialect_cx_oracle(OracleDialect): cx_Oracle.BLOB, ): return cursor.var( - cx_Oracle.LONG_BINARY, size, cursor.arraysize, + cx_Oracle.LONG_BINARY, + size, + cursor.arraysize, ) return output_type_handler diff --git a/lib/sqlalchemy/dialects/postgresql/asyncpg.py b/lib/sqlalchemy/dialects/postgresql/asyncpg.py index 1f988153c5..8b20de2b6d 100644 --- a/lib/sqlalchemy/dialects/postgresql/asyncpg.py +++ b/lib/sqlalchemy/dialects/postgresql/asyncpg.py @@ -576,11 +576,13 @@ class AsyncAdapt_asyncpg_dbapi: if async_fallback: return AsyncAdaptFallback_asyncpg_connection( - self, await_fallback(self.asyncpg.connect(*arg, **kw)), + self, + await_fallback(self.asyncpg.connect(*arg, **kw)), ) else: return AsyncAdapt_asyncpg_connection( - self, await_only(self.asyncpg.connect(*arg, **kw)), + self, + await_only(self.asyncpg.connect(*arg, **kw)), ) class Error(Exception): diff --git a/lib/sqlalchemy/dialects/postgresql/base.py b/lib/sqlalchemy/dialects/postgresql/base.py index 8786938668..5ed56db560 100644 --- a/lib/sqlalchemy/dialects/postgresql/base.py +++ b/lib/sqlalchemy/dialects/postgresql/base.py @@ -1338,9 +1338,7 @@ class TIME(sqltypes.TIME): class INTERVAL(sqltypes.NativeForEmulated, sqltypes._AbstractInterval): - """PostgreSQL INTERVAL type. - - """ + """PostgreSQL INTERVAL type.""" __visit_name__ = "INTERVAL" native = True @@ -1414,7 +1412,7 @@ class UUID(sqltypes.TypeEngine): as Python uuid objects, converting to/from string via the DBAPI. - """ + """ self.as_uuid = as_uuid def coerce_compared_value(self, op, value): @@ -3026,7 +3024,9 @@ class PGDialect(default.DefaultDialect): type_=sqltypes.Unicode, ), sql.bindparam( - "schema", util.text_type(schema), type_=sqltypes.Unicode, + "schema", + util.text_type(schema), + type_=sqltypes.Unicode, ), ) ) @@ -3200,7 +3200,9 @@ class PGDialect(default.DefaultDialect): "n.nspname=:schema" ).bindparams( sql.bindparam( - "schema", util.text_type(schema), type_=sqltypes.Unicode, + "schema", + util.text_type(schema), + type_=sqltypes.Unicode, ), ) ) diff --git a/lib/sqlalchemy/dialects/postgresql/hstore.py b/lib/sqlalchemy/dialects/postgresql/hstore.py index cb89f7c5f8..15ec2a585e 100644 --- a/lib/sqlalchemy/dialects/postgresql/hstore.py +++ b/lib/sqlalchemy/dialects/postgresql/hstore.py @@ -160,13 +160,11 @@ class HSTORE(sqltypes.Indexable, sqltypes.Concatenable, sqltypes.TypeEngine): return self.operate(HAS_KEY, other, result_type=sqltypes.Boolean) def has_all(self, other): - """Boolean expression. Test for presence of all keys in jsonb - """ + """Boolean expression. Test for presence of all keys in jsonb""" return self.operate(HAS_ALL, other, result_type=sqltypes.Boolean) def has_any(self, other): - """Boolean expression. Test for presence of any key in jsonb - """ + """Boolean expression. Test for presence of any key in jsonb""" return self.operate(HAS_ANY, other, result_type=sqltypes.Boolean) def contains(self, other, **kwargs): diff --git a/lib/sqlalchemy/dialects/postgresql/json.py b/lib/sqlalchemy/dialects/postgresql/json.py index 9ffe9cfe88..63e1656e06 100644 --- a/lib/sqlalchemy/dialects/postgresql/json.py +++ b/lib/sqlalchemy/dialects/postgresql/json.py @@ -204,7 +204,7 @@ class JSON(sqltypes.JSON): .. versionadded:: 1.1 - """ + """ super(JSON, self).__init__(none_as_null=none_as_null) if astext_type is not None: self.astext_type = astext_type @@ -300,13 +300,11 @@ class JSONB(JSON): return self.operate(HAS_KEY, other, result_type=sqltypes.Boolean) def has_all(self, other): - """Boolean expression. Test for presence of all keys in jsonb - """ + """Boolean expression. Test for presence of all keys in jsonb""" return self.operate(HAS_ALL, other, result_type=sqltypes.Boolean) def has_any(self, other): - """Boolean expression. Test for presence of any key in jsonb - """ + """Boolean expression. Test for presence of any key in jsonb""" return self.operate(HAS_ANY, other, result_type=sqltypes.Boolean) def contains(self, other, **kwargs): diff --git a/lib/sqlalchemy/dialects/postgresql/ranges.py b/lib/sqlalchemy/dialects/postgresql/ranges.py index a31d958ed9..ddc12c096d 100644 --- a/lib/sqlalchemy/dialects/postgresql/ranges.py +++ b/lib/sqlalchemy/dialects/postgresql/ranges.py @@ -100,48 +100,36 @@ class RangeOperators(object): class INT4RANGE(RangeOperators, sqltypes.TypeEngine): - """Represent the PostgreSQL INT4RANGE type. - - """ + """Represent the PostgreSQL INT4RANGE type.""" __visit_name__ = "INT4RANGE" class INT8RANGE(RangeOperators, sqltypes.TypeEngine): - """Represent the PostgreSQL INT8RANGE type. - - """ + """Represent the PostgreSQL INT8RANGE type.""" __visit_name__ = "INT8RANGE" class NUMRANGE(RangeOperators, sqltypes.TypeEngine): - """Represent the PostgreSQL NUMRANGE type. - - """ + """Represent the PostgreSQL NUMRANGE type.""" __visit_name__ = "NUMRANGE" class DATERANGE(RangeOperators, sqltypes.TypeEngine): - """Represent the PostgreSQL DATERANGE type. - - """ + """Represent the PostgreSQL DATERANGE type.""" __visit_name__ = "DATERANGE" class TSRANGE(RangeOperators, sqltypes.TypeEngine): - """Represent the PostgreSQL TSRANGE type. - - """ + """Represent the PostgreSQL TSRANGE type.""" __visit_name__ = "TSRANGE" class TSTZRANGE(RangeOperators, sqltypes.TypeEngine): - """Represent the PostgreSQL TSTZRANGE type. - - """ + """Represent the PostgreSQL TSTZRANGE type.""" __visit_name__ = "TSTZRANGE" diff --git a/lib/sqlalchemy/dialects/sqlite/pysqlite.py b/lib/sqlalchemy/dialects/sqlite/pysqlite.py index a8b2349216..eb855016ec 100644 --- a/lib/sqlalchemy/dialects/sqlite/pysqlite.py +++ b/lib/sqlalchemy/dialects/sqlite/pysqlite.py @@ -515,7 +515,9 @@ class SQLiteDialect_pysqlite(SQLiteDialect): dbapi_connection = connection dbapi_connection.create_function( - "regexp", 2, regexp, + "regexp", + 2, + regexp, ) fns = [set_regexp] diff --git a/lib/sqlalchemy/engine/base.py b/lib/sqlalchemy/engine/base.py index e6215540d1..9a6bdd7f36 100644 --- a/lib/sqlalchemy/engine/base.py +++ b/lib/sqlalchemy/engine/base.py @@ -67,9 +67,7 @@ class Connection(Connectable): _dispatch=None, _has_events=None, ): - """Construct a new Connection. - - """ + """Construct a new Connection.""" self.engine = engine self.dialect = engine.dialect self.__branch_from = _branch_from @@ -347,7 +345,7 @@ class Connection(Connectable): return c def get_execution_options(self): - """ Get the non-SQL options which will take effect during execution. + """Get the non-SQL options which will take effect during execution. .. versionadded:: 1.3 @@ -1234,7 +1232,11 @@ class Connection(Connectable): for fn in self.dispatch.before_execute: elem, event_multiparams, event_params = fn( - self, elem, event_multiparams, event_params, execution_options, + self, + elem, + event_multiparams, + event_params, + execution_options, ) if event_multiparams: @@ -2124,9 +2126,7 @@ class Transaction(object): assert not self.is_active def rollback(self): - """Roll back this :class:`.Transaction`. - - """ + """Roll back this :class:`.Transaction`.""" try: self._do_rollback() finally: @@ -2618,7 +2618,7 @@ class Engine(Connectable, log.Identified): return self._option_cls(self, opt) def get_execution_options(self): - """ Get the non-SQL options which will take effect during execution. + """Get the non-SQL options which will take effect during execution. .. versionadded: 1.3 diff --git a/lib/sqlalchemy/engine/cursor.py b/lib/sqlalchemy/engine/cursor.py index 43afa3628a..6c4a756c9a 100644 --- a/lib/sqlalchemy/engine/cursor.py +++ b/lib/sqlalchemy/engine/cursor.py @@ -1034,7 +1034,8 @@ class BufferedRowCursorFetchStrategy(CursorFetchStrategy): @classmethod def create(cls, result): return BufferedRowCursorFetchStrategy( - result.cursor, result.context.execution_options, + result.cursor, + result.context.execution_options, ) def _buffer_rows(self, result, dbapi_cursor): @@ -1204,9 +1205,7 @@ _NO_RESULT_METADATA = _NoResultMetaData() class BaseCursorResult(object): - """Base class for database result objects. - - """ + """Base class for database result objects.""" out_parameters = None _metadata = None diff --git a/lib/sqlalchemy/engine/interfaces.py b/lib/sqlalchemy/engine/interfaces.py index e0e4a9a833..27de5aaafa 100644 --- a/lib/sqlalchemy/engine/interfaces.py +++ b/lib/sqlalchemy/engine/interfaces.py @@ -530,7 +530,7 @@ class Dialect(object): :param dbapi_connection: a DBAPI connection, typically proxied within a :class:`.ConnectionFairy`. - """ + """ raise NotImplementedError() @@ -541,7 +541,7 @@ class Dialect(object): :param dbapi_connection: a DBAPI connection, typically proxied within a :class:`.ConnectionFairy`. - """ + """ raise NotImplementedError() diff --git a/lib/sqlalchemy/engine/reflection.py b/lib/sqlalchemy/engine/reflection.py index 198b5e568f..812f7ceeca 100644 --- a/lib/sqlalchemy/engine/reflection.py +++ b/lib/sqlalchemy/engine/reflection.py @@ -230,8 +230,7 @@ class Inspector(object): return self.dialect.default_schema_name def get_schema_names(self): - """Return all schema names. - """ + """Return all schema names.""" if hasattr(self.dialect, "get_schema_names"): with self._operation_context() as conn: diff --git a/lib/sqlalchemy/engine/result.py b/lib/sqlalchemy/engine/result.py index 56abca9a9f..8b9b413c46 100644 --- a/lib/sqlalchemy/engine/result.py +++ b/lib/sqlalchemy/engine/result.py @@ -167,7 +167,10 @@ class SimpleResultMetaData(ResultMetaData): if extra: recs_names = [ - ((name,) + extras, (index, name, extras),) + ( + (name,) + extras, + (index, name, extras), + ) for index, (name, extras) in enumerate(zip(self._keys, extra)) ] else: @@ -407,7 +410,10 @@ class ResultInternal(InPlaceGenerative): rows = [ made_row for made_row, sig_row in [ - (made_row, strategy(made_row) if strategy else made_row,) + ( + made_row, + strategy(made_row) if strategy else made_row, + ) for made_row in made_rows ] if sig_row not in uniques and not uniques.add(sig_row) @@ -543,7 +549,10 @@ class ResultInternal(InPlaceGenerative): return manyrows def _only_one_row( - self, raise_for_second_row, raise_for_none, scalar, + self, + raise_for_second_row, + raise_for_none, + scalar, ): onerow = self._fetchone_impl @@ -1400,10 +1409,7 @@ class MappingResult(FilterResult): def columns(self, *col_expressions): # type: (*object) -> MappingResult - r"""Establish the columns that should be returned in each row. - - - """ + r"""Establish the columns that should be returned in each row.""" return self._column_slices(col_expressions) def partitions(self, size=None): diff --git a/lib/sqlalchemy/engine/url.py b/lib/sqlalchemy/engine/url.py index 6d2f4aa244..58f59642cf 100644 --- a/lib/sqlalchemy/engine/url.py +++ b/lib/sqlalchemy/engine/url.py @@ -140,7 +140,7 @@ class URL( :class:`_engine.URL`, use the :meth:`_engine.URL.set` and :meth:`_engine.URL.update_query` methods. - """ + """ return cls( cls._assert_str(drivername, "drivername"), @@ -205,7 +205,9 @@ class URL( return util.immutabledict( { - _assert_str(key): _assert_value(value,) + _assert_str(key): _assert_value( + value, + ) for key, value in dict_items } ) diff --git a/lib/sqlalchemy/event/api.py b/lib/sqlalchemy/event/api.py index b36c448ceb..cd09235c3b 100644 --- a/lib/sqlalchemy/event/api.py +++ b/lib/sqlalchemy/event/api.py @@ -196,8 +196,6 @@ def remove(target, identifier, fn): def contains(target, identifier, fn): - """Return True if the given target/ident/fn is set up to listen. - - """ + """Return True if the given target/ident/fn is set up to listen.""" return _event_key(target, identifier, fn).contains() diff --git a/lib/sqlalchemy/event/attr.py b/lib/sqlalchemy/event/attr.py index abb264f98f..baa3cd28a6 100644 --- a/lib/sqlalchemy/event/attr.py +++ b/lib/sqlalchemy/event/attr.py @@ -379,7 +379,7 @@ class _ListenerCollection(_CompoundListener): def _update(self, other, only_propagate=True): """Populate from the listeners in another :class:`_Dispatch` - object.""" + object.""" existing_listeners = self.listeners existing_listener_set = set(existing_listeners) diff --git a/lib/sqlalchemy/event/base.py b/lib/sqlalchemy/event/base.py index c78080738f..daa6f9aeab 100644 --- a/lib/sqlalchemy/event/base.py +++ b/lib/sqlalchemy/event/base.py @@ -142,7 +142,7 @@ class _Dispatch(object): def _update(self, other, only_propagate=True): """Populate from the listeners in another :class:`_Dispatch` - object.""" + object.""" for ls in other._event_descriptors: if isinstance(ls, _EmptyListener): continue diff --git a/lib/sqlalchemy/event/registry.py b/lib/sqlalchemy/event/registry.py index 144dd45dc6..58680f3564 100644 --- a/lib/sqlalchemy/event/registry.py +++ b/lib/sqlalchemy/event/registry.py @@ -139,8 +139,7 @@ def _clear(owner, elements): class _EventKey(object): - """Represent :func:`.listen` arguments. - """ + """Represent :func:`.listen` arguments.""" __slots__ = ( "target", @@ -239,8 +238,7 @@ class _EventKey(object): collection.remove(self.with_wrapper(listener_fn)) def contains(self): - """Return True if this event key is registered to listen. - """ + """Return True if this event key is registered to listen.""" return self._key in _key_to_collection def base_listen( diff --git a/lib/sqlalchemy/exc.py b/lib/sqlalchemy/exc.py index b80bf9b011..7e4a3f53eb 100644 --- a/lib/sqlalchemy/exc.py +++ b/lib/sqlalchemy/exc.py @@ -35,7 +35,11 @@ class SQLAlchemyError(Exception): else: return ( "(Background on this error at: " - "http://sqlalche.me/e/%s/%s)" % (_version_token, self.code,) + "http://sqlalche.me/e/%s/%s)" + % ( + _version_token, + self.code, + ) ) def _message(self, as_unicode=compat.py3k): diff --git a/lib/sqlalchemy/ext/associationproxy.py b/lib/sqlalchemy/ext/associationproxy.py index 3ea77a952e..a2c6b596f4 100644 --- a/lib/sqlalchemy/ext/associationproxy.py +++ b/lib/sqlalchemy/ext/associationproxy.py @@ -840,8 +840,7 @@ class AmbiguousAssociationProxyInstance(AssociationProxyInstance): class ObjectAssociationProxyInstance(AssociationProxyInstance): - """an :class:`.AssociationProxyInstance` that has an object as a target. - """ + """an :class:`.AssociationProxyInstance` that has an object as a target.""" _target_is_object = True _is_canonical = True diff --git a/lib/sqlalchemy/ext/asyncio/engine.py b/lib/sqlalchemy/ext/asyncio/engine.py index 619cf85086..4a92fb1f2c 100644 --- a/lib/sqlalchemy/ext/asyncio/engine.py +++ b/lib/sqlalchemy/ext/asyncio/engine.py @@ -85,16 +85,12 @@ class AsyncConnection(StartableContext): return self.sync_connection def begin(self) -> "AsyncTransaction": - """Begin a transaction prior to autobegin occurring. - - """ + """Begin a transaction prior to autobegin occurring.""" self._sync_connection() return AsyncTransaction(self) def begin_nested(self) -> "AsyncTransaction": - """Begin a nested transaction and return a transaction handle. - - """ + """Begin a nested transaction and return a transaction handle.""" self._sync_connection() return AsyncTransaction(self, nested=True) @@ -154,7 +150,10 @@ class AsyncConnection(StartableContext): conn = self._sync_connection() result = await greenlet_spawn( - conn.exec_driver_sql, statement, parameters, execution_options, + conn.exec_driver_sql, + statement, + parameters, + execution_options, ) if result.context._is_server_side: raise async_exc.AsyncMethodRequired( @@ -230,7 +229,10 @@ class AsyncConnection(StartableContext): conn = self._sync_connection() result = await greenlet_spawn( - conn._execute_20, statement, parameters, execution_options, + conn._execute_20, + statement, + parameters, + execution_options, ) if result.context._is_server_side: raise async_exc.AsyncMethodRequired( @@ -261,7 +263,7 @@ class AsyncConnection(StartableContext): return result.scalar() async def run_sync(self, fn: Callable, *arg, **kw) -> Any: - """"Invoke the given sync callable passing self as the first argument. + """Invoke the given sync callable passing self as the first argument. This method maintains the asyncio event loop all the way through to the database connection by running the given callable in a @@ -418,9 +420,7 @@ class AsyncTransaction(StartableContext): await greenlet_spawn(self._sync_transaction().close) async def rollback(self): - """Roll back this :class:`.Transaction`. - - """ + """Roll back this :class:`.Transaction`.""" await greenlet_spawn(self._sync_transaction().rollback) async def commit(self): diff --git a/lib/sqlalchemy/ext/asyncio/result.py b/lib/sqlalchemy/ext/asyncio/result.py index 52b40acbab..7f8a707d52 100644 --- a/lib/sqlalchemy/ext/asyncio/result.py +++ b/lib/sqlalchemy/ext/asyncio/result.py @@ -553,10 +553,7 @@ class AsyncMappingResult(FilterResult): def columns(self, *col_expressions): # type: (*object) -> AsyncMappingResult - r"""Establish the columns that should be returned in each row. - - - """ + r"""Establish the columns that should be returned in each row.""" return self._column_slices(col_expressions) async def partitions(self, size=None): diff --git a/lib/sqlalchemy/ext/asyncio/session.py b/lib/sqlalchemy/ext/asyncio/session.py index 1673017808..cb06aa26d5 100644 --- a/lib/sqlalchemy/ext/asyncio/session.py +++ b/lib/sqlalchemy/ext/asyncio/session.py @@ -269,9 +269,7 @@ class AsyncSessionTransaction(StartableContext): return self.sync_transaction async def rollback(self): - """Roll back this :class:`_asyncio.AsyncTransaction`. - - """ + """Roll back this :class:`_asyncio.AsyncTransaction`.""" await greenlet_spawn(self._sync_transaction().rollback) async def commit(self): diff --git a/lib/sqlalchemy/ext/baked.py b/lib/sqlalchemy/ext/baked.py index 2886773878..8a2023e96f 100644 --- a/lib/sqlalchemy/ext/baked.py +++ b/lib/sqlalchemy/ext/baked.py @@ -173,8 +173,7 @@ class BakedQuery(object): return self._cache_key + (session._query_cls,) def _with_lazyload_options(self, options, effective_path, cache_path=None): - """Cloning version of _add_lazyload_options. - """ + """Cloning version of _add_lazyload_options.""" q = self._clone() q._add_lazyload_options(options, effective_path, cache_path=cache_path) return q diff --git a/lib/sqlalchemy/ext/hybrid.py b/lib/sqlalchemy/ext/hybrid.py index 75bb028f0b..83562502ab 100644 --- a/lib/sqlalchemy/ext/hybrid.py +++ b/lib/sqlalchemy/ext/hybrid.py @@ -950,7 +950,7 @@ class hybrid_property(interfaces.InspectionAttrInfo): :ref:`hybrid_reuse_subclass` - """ + """ return self def getter(self, fget): diff --git a/lib/sqlalchemy/ext/orderinglist.py b/lib/sqlalchemy/ext/orderinglist.py index 7b6b779977..03ea096e74 100644 --- a/lib/sqlalchemy/ext/orderinglist.py +++ b/lib/sqlalchemy/ext/orderinglist.py @@ -376,7 +376,7 @@ class OrderingList(list): def _reconstitute(cls, dict_, items): - """ Reconstitute an :class:`.OrderingList`. + """Reconstitute an :class:`.OrderingList`. This is the adjoint to :meth:`.OrderingList.__reduce__`. It is used for unpickling :class:`.OrderingList` objects. diff --git a/lib/sqlalchemy/inspection.py b/lib/sqlalchemy/inspection.py index 270f189bef..4d9ee30c44 100644 --- a/lib/sqlalchemy/inspection.py +++ b/lib/sqlalchemy/inspection.py @@ -54,7 +54,7 @@ def inspect(subject, raiseerr=True): :class:`sqlalchemy.exc.NoInspectionAvailable` is raised. If ``False``, ``None`` is returned. - """ + """ type_ = type(subject) for cls in type_.__mro__: if cls in _registrars: diff --git a/lib/sqlalchemy/orm/__init__.py b/lib/sqlalchemy/orm/__init__.py index 7f2c61a05c..e36797d47f 100644 --- a/lib/sqlalchemy/orm/__init__.py +++ b/lib/sqlalchemy/orm/__init__.py @@ -119,9 +119,7 @@ relationship = public_factory(RelationshipProperty, ".orm.relationship") @_sa_util.deprecated_20("relation", "Please use :func:`.relationship`.") def relation(*arg, **kw): - """A synonym for :func:`relationship`. - - """ + """A synonym for :func:`relationship`.""" return relationship(*arg, **kw) diff --git a/lib/sqlalchemy/orm/attributes.py b/lib/sqlalchemy/orm/attributes.py index 07b147f10f..c7ef97c6cd 100644 --- a/lib/sqlalchemy/orm/attributes.py +++ b/lib/sqlalchemy/orm/attributes.py @@ -968,9 +968,9 @@ class ScalarAttributeImpl(AttributeImpl): class ScalarObjectAttributeImpl(ScalarAttributeImpl): """represents a scalar-holding InstrumentedAttribute, - where the target object is also instrumented. + where the target object is also instrumented. - Adds events to delete/set operations. + Adds events to delete/set operations. """ @@ -1065,9 +1065,7 @@ class ScalarObjectAttributeImpl(ScalarAttributeImpl): check_old=None, pop=False, ): - """Set a value on the given InstanceState. - - """ + """Set a value on the given InstanceState.""" if self.dispatch._active_history: old = self.get( state, diff --git a/lib/sqlalchemy/orm/context.py b/lib/sqlalchemy/orm/context.py index c24e04aa59..5e9cf9cce3 100644 --- a/lib/sqlalchemy/orm/context.py +++ b/lib/sqlalchemy/orm/context.py @@ -1223,7 +1223,14 @@ class ORMSelectCompileState(ORMCompileState, SelectState): # figure out the final "left" and "right" sides and create an # ORMJoin to add to our _from_obj tuple self._join_left_to_right( - left, right, onclause, prop, False, False, isouter, full, + left, + right, + onclause, + prop, + False, + False, + isouter, + full, ) def _legacy_join(self, args): @@ -1822,7 +1829,8 @@ class ORMSelectCompileState(ORMCompileState, SelectState): self._mapper_loads_polymorphically_with( right_mapper, sql_util.ColumnAdapter( - right_mapper.selectable, right_mapper._equivalent_columns, + right_mapper.selectable, + right_mapper._equivalent_columns, ), ) # if the onclause is a ClauseElement, adapt it with any @@ -2538,7 +2546,11 @@ class _ORMColumnEntity(_ColumnEntity): ) def __init__( - self, compile_state, column, parententity, parent_bundle=None, + self, + compile_state, + column, + parententity, + parent_bundle=None, ): annotations = column._annotations diff --git a/lib/sqlalchemy/orm/decl_api.py b/lib/sqlalchemy/orm/decl_api.py index 70fffa2951..4d9766204e 100644 --- a/lib/sqlalchemy/orm/decl_api.py +++ b/lib/sqlalchemy/orm/decl_api.py @@ -375,7 +375,12 @@ def declarative_base( metadata=metadata, class_registry=class_registry, constructor=constructor, - ).generate_base(mapper=mapper, cls=cls, name=name, metaclass=metaclass,) + ).generate_base( + mapper=mapper, + cls=cls, + name=name, + metaclass=metaclass, + ) class registry(object): @@ -458,7 +463,11 @@ class registry(object): clsregistry.remove_class(cls.__name__, cls, self._class_registry) def generate_base( - self, mapper=None, cls=object, name="Base", metaclass=DeclarativeMeta, + self, + mapper=None, + cls=object, + name="Base", + metaclass=DeclarativeMeta, ): """Generate a declarative base class. diff --git a/lib/sqlalchemy/orm/decl_base.py b/lib/sqlalchemy/orm/decl_base.py index 0e0c79b1f9..0e89e729fb 100644 --- a/lib/sqlalchemy/orm/decl_base.py +++ b/lib/sqlalchemy/orm/decl_base.py @@ -203,7 +203,11 @@ class _ImperativeMapperConfig(_MapperConfig): __slots__ = ("dict_", "local_table", "inherits") def __init__( - self, registry, cls_, table, mapper_kw, + self, + registry, + cls_, + table, + mapper_kw, ): super(_ImperativeMapperConfig, self).__init__(registry, cls_) @@ -223,7 +227,8 @@ class _ImperativeMapperConfig(_MapperConfig): mapper_cls = mapper return self.set_cls_attribute( - "__mapper__", mapper_cls(self.cls, self.local_table, **mapper_kw), + "__mapper__", + mapper_cls(self.cls, self.local_table, **mapper_kw), ) def _setup_inheritance(self, mapper_kw): @@ -274,7 +279,12 @@ class _ClassScanMapperConfig(_MapperConfig): ) def __init__( - self, registry, cls_, dict_, table, mapper_kw, + self, + registry, + cls_, + dict_, + table, + mapper_kw, ): super(_ClassScanMapperConfig, self).__init__(registry, cls_) diff --git a/lib/sqlalchemy/orm/descriptor_props.py b/lib/sqlalchemy/orm/descriptor_props.py index c2efa24a19..713891d915 100644 --- a/lib/sqlalchemy/orm/descriptor_props.py +++ b/lib/sqlalchemy/orm/descriptor_props.py @@ -26,7 +26,7 @@ from ..sql import expression class DescriptorProperty(MapperProperty): """:class:`.MapperProperty` which proxies access to a - user-defined descriptor.""" + user-defined descriptor.""" doc = None diff --git a/lib/sqlalchemy/orm/dynamic.py b/lib/sqlalchemy/orm/dynamic.py index 48161a256f..4426041e33 100644 --- a/lib/sqlalchemy/orm/dynamic.py +++ b/lib/sqlalchemy/orm/dynamic.py @@ -487,12 +487,15 @@ class AppenderQuery(Generative): iterator = ( (item,) for item in self.attr._get_collection_history( - state, attributes.PASSIVE_NO_INITIALIZE, + state, + attributes.PASSIVE_NO_INITIALIZE, ).added_items ) row_metadata = _result.SimpleResultMetaData( - (self.mapper.class_.__name__,), [], _unique_filters=[id], + (self.mapper.class_.__name__,), + [], + _unique_filters=[id], ) return _result.IteratorResult(row_metadata, iterator).scalars() diff --git a/lib/sqlalchemy/orm/loading.py b/lib/sqlalchemy/orm/loading.py index a7dd1c5478..ecb704a04d 100644 --- a/lib/sqlalchemy/orm/loading.py +++ b/lib/sqlalchemy/orm/loading.py @@ -639,7 +639,7 @@ def _instance_processor( _polymorphic_from=None, ): """Produce a mapper level row processor callable - which processes rows into mapped instances.""" + which processes rows into mapped instances.""" # note that this method, most of which exists in a closure # called _instance(), resists being broken out, as @@ -1253,9 +1253,7 @@ def _decorate_polymorphic_switch( class PostLoad(object): - """Track loaders and states for "post load" operations. - - """ + """Track loaders and states for "post load" operations.""" __slots__ = "loaders", "states", "load_keys" diff --git a/lib/sqlalchemy/orm/mapper.py b/lib/sqlalchemy/orm/mapper.py index 296ddf385d..7b94bfa872 100644 --- a/lib/sqlalchemy/orm/mapper.py +++ b/lib/sqlalchemy/orm/mapper.py @@ -1275,8 +1275,7 @@ class Mapper( @classmethod def _configure_all(cls): - """Class-level path to the :func:`.configure_mappers` call. - """ + """Class-level path to the :func:`.configure_mappers` call.""" configure_mappers() def dispose(self): @@ -1799,7 +1798,7 @@ class Mapper( @util.preload_module("sqlalchemy.orm.descriptor_props") def _property_from_column(self, key, prop): """generate/update a :class:`.ColumnProperty` given a - :class:`_schema.Column` object. """ + :class:`_schema.Column` object.""" descriptor_props = util.preloaded.orm_descriptor_props # we were passed a Column or a list of Columns; # generate a properties.ColumnProperty @@ -1983,8 +1982,7 @@ class Mapper( return key in self._props def get_property(self, key, _configure_mappers=True): - """return a MapperProperty associated with the given key. - """ + """return a MapperProperty associated with the given key.""" if _configure_mappers and Mapper._new_mappers: configure_mappers() diff --git a/lib/sqlalchemy/orm/persistence.py b/lib/sqlalchemy/orm/persistence.py index d05381c1d2..fa126a279b 100644 --- a/lib/sqlalchemy/orm/persistence.py +++ b/lib/sqlalchemy/orm/persistence.py @@ -1179,7 +1179,8 @@ def _emit_insert_statements( c.returned_defaults_rows or (), ): for pk, col in zip( - inserted_primary_key, mapper._pks_by_table[table], + inserted_primary_key, + mapper._pks_by_table[table], ): prop = mapper_rec._columntoproperty[col] if state_dict.get(prop.key) is None: @@ -2236,7 +2237,8 @@ class BulkORMUpdate(UpdateDMLState, BulkUDCompileState): session.identity_map[identity_key] for identity_key in [ target_mapper.identity_key_from_primary_key( - list(primary_key), identity_token=identity_token, + list(primary_key), + identity_token=identity_token, ) for primary_key, identity_token in [ (row[0:-1], row[-1]) for row in matched_rows @@ -2337,7 +2339,8 @@ class BulkORMDelete(DeleteDMLState, BulkUDCompileState): # TODO: inline this and call remove_newly_deleted # once identity_key = target_mapper.identity_key_from_primary_key( - list(primary_key), identity_token=identity_token, + list(primary_key), + identity_token=identity_token, ) if identity_key in session.identity_map: session._remove_newly_deleted( diff --git a/lib/sqlalchemy/orm/query.py b/lib/sqlalchemy/orm/query.py index e7bfc25b7b..0e7fd2fc31 100644 --- a/lib/sqlalchemy/orm/query.py +++ b/lib/sqlalchemy/orm/query.py @@ -435,7 +435,10 @@ class Query( return stmt def subquery( - self, name=None, with_labels=False, reduce_columns=False, + self, + name=None, + with_labels=False, + reduce_columns=False, ): """Return the full SELECT statement represented by this :class:`_query.Query`, embedded within an @@ -1496,7 +1499,7 @@ class Query( return fn(self) def get_execution_options(self): - """ Get the non-SQL options which will take effect during execution. + """Get the non-SQL options which will take effect during execution. .. versionadded:: 1.3 @@ -1508,7 +1511,7 @@ class Query( @_generative def execution_options(self, **kwargs): - """ Set non-SQL options which take effect during execution. + """Set non-SQL options which take effect during execution. Options allowed here include all of those accepted by :meth:`_engine.Connection.execution_options`, as well as a series @@ -3030,7 +3033,9 @@ class Query( """ - bulk_del = BulkDelete(self,) + bulk_del = BulkDelete( + self, + ) if self.dispatch.before_compile_delete: for fn in self.dispatch.before_compile_delete: new_query = fn(bulk_del.query, bulk_del) diff --git a/lib/sqlalchemy/orm/relationships.py b/lib/sqlalchemy/orm/relationships.py index cd1502073e..13611f2bb7 100644 --- a/lib/sqlalchemy/orm/relationships.py +++ b/lib/sqlalchemy/orm/relationships.py @@ -3636,9 +3636,7 @@ class JoinCondition(object): class _ColInAnnotations(object): - """Seralizable object that tests for a name in c._annotations. - - """ + """Seralizable object that tests for a name in c._annotations.""" __slots__ = ("name",) diff --git a/lib/sqlalchemy/orm/session.py b/lib/sqlalchemy/orm/session.py index bebe015f74..e32e055103 100644 --- a/lib/sqlalchemy/orm/session.py +++ b/lib/sqlalchemy/orm/session.py @@ -47,7 +47,7 @@ _sessions = weakref.WeakValueDictionary() def _state_session(state): """Given an :class:`.InstanceState`, return the :class:`.Session` - associated, if any. + associated, if any. """ if state.session_id: try: @@ -404,7 +404,11 @@ class SessionTransaction(object): _rollback_exception = None def __init__( - self, session, parent=None, nested=False, autobegin=False, + self, + session, + parent=None, + nested=False, + autobegin=False, ): self.session = session self._connections = {} @@ -1865,7 +1869,8 @@ class Session(_SessionClassMethods): except sa_exc.NoInspectionAvailable as err: if isinstance(mapper, type): util.raise_( - exc.UnmappedClassError(mapper), replace_context=err, + exc.UnmappedClassError(mapper), + replace_context=err, ) else: raise @@ -2098,7 +2103,8 @@ class Session(_SessionClassMethods): state = attributes.instance_state(instance) except exc.NO_STATE as err: util.raise_( - exc.UnmappedInstanceError(instance), replace_context=err, + exc.UnmappedInstanceError(instance), + replace_context=err, ) self._expire_state(state, attribute_names) @@ -2204,7 +2210,8 @@ class Session(_SessionClassMethods): state = attributes.instance_state(instance) except exc.NO_STATE as err: util.raise_( - exc.UnmappedInstanceError(instance), replace_context=err, + exc.UnmappedInstanceError(instance), + replace_context=err, ) self._expire_state(state, attribute_names) @@ -2242,7 +2249,8 @@ class Session(_SessionClassMethods): state = attributes.instance_state(instance) except exc.NO_STATE as err: util.raise_( - exc.UnmappedInstanceError(instance), replace_context=err, + exc.UnmappedInstanceError(instance), + replace_context=err, ) if state.session_id is not self.hash_key: raise sa_exc.InvalidRequestError( @@ -2396,7 +2404,8 @@ class Session(_SessionClassMethods): state = attributes.instance_state(instance) except exc.NO_STATE as err: util.raise_( - exc.UnmappedInstanceError(instance), replace_context=err, + exc.UnmappedInstanceError(instance), + replace_context=err, ) self._save_or_update_state(state) @@ -2433,7 +2442,8 @@ class Session(_SessionClassMethods): state = attributes.instance_state(instance) except exc.NO_STATE as err: util.raise_( - exc.UnmappedInstanceError(instance), replace_context=err, + exc.UnmappedInstanceError(instance), + replace_context=err, ) self._delete_impl(state, instance, head=True) @@ -2663,7 +2673,10 @@ class Session(_SessionClassMethods): if execution_options: statement = statement.execution_options(**execution_options) return db_load_fn( - self, statement, primary_key_identity, load_options=load_options, + self, + statement, + primary_key_identity, + load_options=load_options, ) def merge(self, instance, load=True): @@ -3060,7 +3073,8 @@ class Session(_SessionClassMethods): state = attributes.instance_state(instance) except exc.NO_STATE as err: util.raise_( - exc.UnmappedInstanceError(instance), replace_context=err, + exc.UnmappedInstanceError(instance), + replace_context=err, ) return self._contains_state(state) @@ -3159,7 +3173,8 @@ class Session(_SessionClassMethods): except exc.NO_STATE as err: util.raise_( - exc.UnmappedInstanceError(o), replace_context=err, + exc.UnmappedInstanceError(o), + replace_context=err, ) objset.add(state) else: @@ -4062,7 +4077,8 @@ def object_session(instance): state = attributes.instance_state(instance) except exc.NO_STATE as err: util.raise_( - exc.UnmappedInstanceError(instance), replace_context=err, + exc.UnmappedInstanceError(instance), + replace_context=err, ) else: return _state_session(state) diff --git a/lib/sqlalchemy/orm/state.py b/lib/sqlalchemy/orm/state.py index 2332834977..b139d5933b 100644 --- a/lib/sqlalchemy/orm/state.py +++ b/lib/sqlalchemy/orm/state.py @@ -212,7 +212,7 @@ class InstanceState(interfaces.InspectionAttrInfo): :ref:`session_object_states` - """ + """ return self.key is not None and self._attached and not self._deleted @property @@ -533,7 +533,7 @@ class InstanceState(interfaces.InspectionAttrInfo): def _reset(self, dict_, key): """Remove the given attribute and any - callables associated with it.""" + callables associated with it.""" old = dict_.pop(key, None) if old is not None and self.manager[key].impl.collection: diff --git a/lib/sqlalchemy/orm/strategies.py b/lib/sqlalchemy/orm/strategies.py index 325bd4dc1e..900691688e 100644 --- a/lib/sqlalchemy/orm/strategies.py +++ b/lib/sqlalchemy/orm/strategies.py @@ -1455,7 +1455,12 @@ class SubqueryLoader(PostLoader): self._load() def _setup_query_from_rowproc( - self, context, path, entity, loadopt, adapter, + self, + context, + path, + entity, + loadopt, + adapter, ): compile_state = context.compile_state if ( @@ -1627,7 +1632,11 @@ class SubqueryLoader(PostLoader): return subq = self._setup_query_from_rowproc( - context, path, path[-1], loadopt, adapter, + context, + path, + path[-1], + loadopt, + adapter, ) if subq is None: @@ -1879,7 +1888,9 @@ class JoinedLoader(AbstractRelationshipLoader): prop.mapper, None ) path.set( - target_attributes, "user_defined_eager_row_processor", adapter, + target_attributes, + "user_defined_eager_row_processor", + adapter, ) return adapter diff --git a/lib/sqlalchemy/orm/util.py b/lib/sqlalchemy/orm/util.py index 170e4487e5..f902014efb 100644 --- a/lib/sqlalchemy/orm/util.py +++ b/lib/sqlalchemy/orm/util.py @@ -487,7 +487,8 @@ class AliasedClass(object): if alias is None: alias = mapper._with_polymorphic_selectable._anonymous_fromclause( - name=name, flat=flat, + name=name, + flat=flat, ) self._aliased_insp = AliasedInsp( diff --git a/lib/sqlalchemy/schema.py b/lib/sqlalchemy/schema.py index fe4c60a2d5..b83b5525f0 100644 --- a/lib/sqlalchemy/schema.py +++ b/lib/sqlalchemy/schema.py @@ -48,8 +48,8 @@ from .sql.schema import DefaultGenerator # noqa from .sql.schema import FetchedValue # noqa from .sql.schema import ForeignKey # noqa from .sql.schema import ForeignKeyConstraint # noqa -from .sql.schema import Index # noqa from .sql.schema import Identity # noqa +from .sql.schema import Index # noqa from .sql.schema import MetaData # noqa from .sql.schema import PrimaryKeyConstraint # noqa from .sql.schema import SchemaItem # noqa diff --git a/lib/sqlalchemy/sql/base.py b/lib/sqlalchemy/sql/base.py index 67ee8c9071..f9b5ce7e19 100644 --- a/lib/sqlalchemy/sql/base.py +++ b/lib/sqlalchemy/sql/base.py @@ -558,10 +558,7 @@ class _MetaOptions(type): class Options(util.with_metaclass(_MetaOptions)): - """A cacheable option dictionary with defaults. - - - """ + """A cacheable option dictionary with defaults.""" def __init__(self, **kw): self.__dict__.update(kw) @@ -635,7 +632,7 @@ class Options(util.with_metaclass(_MetaOptions)): def from_execution_options( cls, key, attrs, exec_options, statement_exec_options ): - """"process Options argument in terms of execution options. + """process Options argument in terms of execution options. e.g.:: @@ -706,9 +703,7 @@ class ExecutableOption(HasCopyInternals, HasCacheKey): __visit_name__ = "executable_option" def _clone(self): - """Create a shallow copy of this ExecutableOption. - - """ + """Create a shallow copy of this ExecutableOption.""" c = self.__class__.__new__(self.__class__) c.__dict__ = dict(self.__dict__) return c @@ -812,7 +807,7 @@ class Executable(Generative): @_generative def execution_options(self, **kw): - """ Set non-SQL options for the statement which take effect during + """Set non-SQL options for the statement which take effect during execution. Execution options can be set on a per-statement or @@ -858,7 +853,7 @@ class Executable(Generative): self._execution_options = self._execution_options.union(kw) def get_execution_options(self): - """ Get the non-SQL options which will take effect during execution. + """Get the non-SQL options which will take effect during execution. .. versionadded:: 1.3 @@ -877,9 +872,7 @@ class Executable(Generative): ":class:`.Session`.", ) def execute(self, *multiparams, **params): - """Compile and execute this :class:`.Executable`. - - """ + """Compile and execute this :class:`.Executable`.""" e = self.bind if e is None: label = getattr(self, "description", self.__class__.__name__) @@ -1388,18 +1381,18 @@ class DedupeColumnCollection(ColumnCollection): def replace(self, column): """add the given column to this collection, removing unaliased - versions of this column as well as existing columns with the - same key. + versions of this column as well as existing columns with the + same key. - e.g.:: + e.g.:: - t = Table('sometable', metadata, Column('col1', Integer)) - t.columns.replace(Column('col1', Integer, key='columnone')) + t = Table('sometable', metadata, Column('col1', Integer)) + t.columns.replace(Column('col1', Integer, key='columnone')) - will remove the original 'col1' from the collection, and add - the new column under the name 'columnname'. + will remove the original 'col1' from the collection, and add + the new column under the name 'columnname'. - Used by schema.Column to override columns during table reflection. + Used by schema.Column to override columns during table reflection. """ diff --git a/lib/sqlalchemy/sql/coercions.py b/lib/sqlalchemy/sql/coercions.py index 154564a081..558ced8bde 100644 --- a/lib/sqlalchemy/sql/coercions.py +++ b/lib/sqlalchemy/sql/coercions.py @@ -37,9 +37,13 @@ def _is_literal(element): """ - return not isinstance( - element, (Visitable, schema.SchemaEventTarget), - ) and not hasattr(element, "__clause_element__") + return ( + not isinstance( + element, + (Visitable, schema.SchemaEventTarget), + ) + and not hasattr(element, "__clause_element__") + ) def _deep_is_literal(element): diff --git a/lib/sqlalchemy/sql/crud.py b/lib/sqlalchemy/sql/crud.py index 986f63aad6..1c68d6450c 100644 --- a/lib/sqlalchemy/sql/crud.py +++ b/lib/sqlalchemy/sql/crud.py @@ -719,7 +719,8 @@ def _append_param_update( ( c, compiler.preparer.format_column( - c, use_table=include_table, + c, + use_table=include_table, ), compiler.process(c.onupdate.arg.self_group(), **kw), ) @@ -733,7 +734,8 @@ def _append_param_update( ( c, compiler.preparer.format_column( - c, use_table=include_table, + c, + use_table=include_table, ), _create_update_prefetch_bind_param(compiler, c, **kw), ) diff --git a/lib/sqlalchemy/sql/ddl.py b/lib/sqlalchemy/sql/ddl.py index 67c11f6c76..5f3074cdc2 100644 --- a/lib/sqlalchemy/sql/ddl.py +++ b/lib/sqlalchemy/sql/ddl.py @@ -985,7 +985,9 @@ class SchemaDropper(DDLBase): def sort_tables( - tables, skip_fn=None, extra_dependencies=None, + tables, + skip_fn=None, + extra_dependencies=None, ): """Sort a collection of :class:`_schema.Table` objects based on dependency. diff --git a/lib/sqlalchemy/sql/dml.py b/lib/sqlalchemy/sql/dml.py index fd2efc6f91..5ddc9ef82d 100644 --- a/lib/sqlalchemy/sql/dml.py +++ b/lib/sqlalchemy/sql/dml.py @@ -193,9 +193,7 @@ class UpdateBase( Executable, ClauseElement, ): - """Form the base for ``INSERT``, ``UPDATE``, and ``DELETE`` statements. - - """ + """Form the base for ``INSERT``, ``UPDATE``, and ``DELETE`` statements.""" __visit_name__ = "update_base" @@ -435,7 +433,7 @@ class UpdateBase( :param dialect_name: defaults to ``*``, if specified as the name of a particular dialect, will apply these hints only when that dialect is in use. - """ + """ if selectable is None: selectable = self.table diff --git a/lib/sqlalchemy/sql/elements.py b/lib/sqlalchemy/sql/elements.py index 59f3fa86b7..c8ae1e6b64 100644 --- a/lib/sqlalchemy/sql/elements.py +++ b/lib/sqlalchemy/sql/elements.py @@ -3526,7 +3526,10 @@ class BinaryExpression(ColumnElement): ("operator", InternalTraversal.dp_operator), ("negate", InternalTraversal.dp_operator), ("modifiers", InternalTraversal.dp_plain_dict), - ("type", InternalTraversal.dp_type,), # affects JSON CAST operators + ( + "type", + InternalTraversal.dp_type, + ), # affects JSON CAST operators ] _is_implicitly_boolean = True @@ -3638,8 +3641,8 @@ class Slice(ColumnElement): class IndexExpression(BinaryExpression): - """Represent the class of expressions that are like an "index" operation. - """ + """Represent the class of expressions that are like an "index" + operation.""" pass diff --git a/lib/sqlalchemy/sql/lambdas.py b/lib/sqlalchemy/sql/lambdas.py index 7d52f97ee8..676152781d 100644 --- a/lib/sqlalchemy/sql/lambdas.py +++ b/lib/sqlalchemy/sql/lambdas.py @@ -604,7 +604,11 @@ class AnalyzedCode(object): # create trackers to catch those. analyzed_function = AnalyzedFunction( - self, lambda_element, None, lambda_kw, fn, + self, + lambda_element, + None, + lambda_kw, + fn, ) closure_trackers = self.closure_trackers @@ -781,7 +785,12 @@ class AnalyzedFunction(object): ) def __init__( - self, analyzed_code, lambda_element, apply_propagate_attrs, kw, fn, + self, + analyzed_code, + lambda_element, + apply_propagate_attrs, + kw, + fn, ): self.analyzed_code = analyzed_code self.fn = fn diff --git a/lib/sqlalchemy/sql/schema.py b/lib/sqlalchemy/sql/schema.py index 496f8d9fb5..e96da0e249 100644 --- a/lib/sqlalchemy/sql/schema.py +++ b/lib/sqlalchemy/sql/schema.py @@ -830,9 +830,7 @@ class Table(DialectKWArgs, SchemaItem, TableClause): ":meth:`_reflection.Inspector.has_table`.", ) def exists(self, bind=None): - """Return True if this table exists. - - """ + """Return True if this table exists.""" if bind is None: bind = _bind_or_error(self) @@ -3634,10 +3632,14 @@ class PrimaryKeyConstraint(ColumnCollectionConstraint): if col.autoincrement is True: _validate_autoinc(col, True) return col - elif col.autoincrement in ( - "auto", - "ignore_fk", - ) and _validate_autoinc(col, False): + elif ( + col.autoincrement + in ( + "auto", + "ignore_fk", + ) + and _validate_autoinc(col, False) + ): return col else: diff --git a/lib/sqlalchemy/sql/sqltypes.py b/lib/sqlalchemy/sql/sqltypes.py index e2edf20b58..45d4f0b7f1 100644 --- a/lib/sqlalchemy/sql/sqltypes.py +++ b/lib/sqlalchemy/sql/sqltypes.py @@ -361,7 +361,10 @@ class String(Concatenable, TypeEngine): needs_isinstance = ( needs_convert and dialect.returns_unicode_strings - in (String.RETURNS_CONDITIONAL, String.RETURNS_UNICODE,) + in ( + String.RETURNS_CONDITIONAL, + String.RETURNS_UNICODE, + ) and self._expect_unicode != "force_nocheck" ) if needs_convert: @@ -2286,7 +2289,7 @@ class JSON(Indexable, TypeEngine): :attr:`.types.JSON.NULL` - """ + """ self.none_as_null = none_as_null class JSONElementType(TypeEngine): diff --git a/lib/sqlalchemy/sql/type_api.py b/lib/sqlalchemy/sql/type_api.py index 0da88dc54e..614b70a419 100644 --- a/lib/sqlalchemy/sql/type_api.py +++ b/lib/sqlalchemy/sql/type_api.py @@ -1156,10 +1156,7 @@ class TypeDecorator(SchemaEventTarget, TypeEngine): @util.memoized_property def _has_literal_processor(self): - """memoized boolean, check if process_literal_param is implemented. - - - """ + """memoized boolean, check if process_literal_param is implemented.""" return ( self.__class__.process_literal_param.__code__ diff --git a/lib/sqlalchemy/sql/util.py b/lib/sqlalchemy/sql/util.py index 264976cc86..96fa209fd7 100644 --- a/lib/sqlalchemy/sql/util.py +++ b/lib/sqlalchemy/sql/util.py @@ -1011,8 +1011,7 @@ def _offset_or_limit_clause_asint_if_possible(clause): def _make_slice(limit_clause, offset_clause, start, stop): - """Compute LIMIT/OFFSET in terms of slice start/end - """ + """Compute LIMIT/OFFSET in terms of slice start/end""" # for calculated limit/offset, try to do the addition of # values to offset in Python, however if a SQL clause is present diff --git a/lib/sqlalchemy/testing/assertions.py b/lib/sqlalchemy/testing/assertions.py index c32b2749b2..af168cd852 100644 --- a/lib/sqlalchemy/testing/assertions.py +++ b/lib/sqlalchemy/testing/assertions.py @@ -522,9 +522,12 @@ class ComparesTables(object): assert reflected_table.primary_key.columns[c.name] is not None def assert_types_base(self, c1, c2): - assert c1.type._compare_type_affinity(c2.type), ( - "On column %r, type '%s' doesn't correspond to type '%s'" - % (c1.name, c1.type, c2.type) + assert c1.type._compare_type_affinity( + c2.type + ), "On column %r, type '%s' doesn't correspond to type '%s'" % ( + c1.name, + c1.type, + c2.type, ) diff --git a/lib/sqlalchemy/testing/provision.py b/lib/sqlalchemy/testing/provision.py index 8bdad357c1..18b856fb12 100644 --- a/lib/sqlalchemy/testing/provision.py +++ b/lib/sqlalchemy/testing/provision.py @@ -166,7 +166,9 @@ def generate_driver_url(url, driver, query_str): # type: (URL, str, str) -> URL backend = url.get_backend_name() - new_url = url.set(drivername="%s+%s" % (backend, driver),) + new_url = url.set( + drivername="%s+%s" % (backend, driver), + ) new_url = new_url.update_query_string(query_str) try: @@ -214,8 +216,7 @@ def drop_db(cfg, eng, ident): @register.init def update_db_opts(db_url, db_opts): - """Set database options (db_opts) for a test database that we created. - """ + """Set database options (db_opts) for a test database that we created.""" pass diff --git a/lib/sqlalchemy/testing/requirements.py b/lib/sqlalchemy/testing/requirements.py index 97413d32b9..b7f0d0f59c 100644 --- a/lib/sqlalchemy/testing/requirements.py +++ b/lib/sqlalchemy/testing/requirements.py @@ -67,7 +67,7 @@ class SuiteRequirements(Requirements): @property def on_update_cascade(self): - """"target database must support ON UPDATE..CASCADE behavior in + """target database must support ON UPDATE..CASCADE behavior in foreign keys.""" return exclusions.open() @@ -388,7 +388,7 @@ class SuiteRequirements(Requirements): @property def implements_get_lastrowid(self): - """"target dialect implements the executioncontext.get_lastrowid() + """target dialect implements the executioncontext.get_lastrowid() method without reliance on RETURNING. """ @@ -396,7 +396,7 @@ class SuiteRequirements(Requirements): @property def emulated_lastrowid(self): - """"target dialect retrieves cursor.lastrowid, or fetches + """target dialect retrieves cursor.lastrowid, or fetches from a database-side function after an insert() construct executes, within the get_lastrowid() method. @@ -408,7 +408,7 @@ class SuiteRequirements(Requirements): @property def emulated_lastrowid_even_with_sequences(self): - """"target dialect retrieves cursor.lastrowid or an equivalent + """target dialect retrieves cursor.lastrowid or an equivalent after an insert() construct executes, even if the table has a Sequence on it. @@ -417,7 +417,7 @@ class SuiteRequirements(Requirements): @property def dbapi_lastrowid(self): - """"target platform includes a 'lastrowid' accessor on the DBAPI + """target platform includes a 'lastrowid' accessor on the DBAPI cursor object. """ @@ -438,17 +438,16 @@ class SuiteRequirements(Requirements): @property def cross_schema_fk_reflection(self): - """target system must support reflection of inter-schema foreign keys - - """ + """target system must support reflection of inter-schema + foreign keys""" return exclusions.closed() @property def implicit_default_schema(self): """target system has a strong concept of 'default' schema that can - be referred to implicitly. + be referred to implicitly. - basically, PostgreSQL. + basically, PostgreSQL. """ return exclusions.closed() @@ -535,8 +534,8 @@ class SuiteRequirements(Requirements): @property def view_reflection(self): - """target database must support inspection of the full CREATE VIEW definition. - """ + """target database must support inspection of the full CREATE VIEW + definition.""" return self.views @property @@ -654,9 +653,7 @@ class SuiteRequirements(Requirements): @property def symbol_names_w_double_quote(self): - """Target driver can create tables with a name like 'some " table' - - """ + """Target driver can create tables with a name like 'some " table'""" return exclusions.open() @property @@ -804,7 +801,7 @@ class SuiteRequirements(Requirements): @property def json_array_indexes(self): - """"target platform supports numeric array indexes + """target platform supports numeric array indexes within a JSON structure""" return self.json_type diff --git a/lib/sqlalchemy/testing/suite/test_insert.py b/lib/sqlalchemy/testing/suite/test_insert.py index 7a7eac02f7..da59d831f7 100644 --- a/lib/sqlalchemy/testing/suite/test_insert.py +++ b/lib/sqlalchemy/testing/suite/test_insert.py @@ -42,7 +42,11 @@ class LastrowidTest(fixtures.TablesTest): def _assert_round_trip(self, table, conn): row = conn.execute(table.select()).first() eq_( - row, (conn.dialect.default_sequence_base, "some data",), + row, + ( + conn.dialect.default_sequence_base, + "some data", + ), ) def test_autoincrement_on_insert(self, connection): @@ -289,7 +293,11 @@ class ReturningTest(fixtures.TablesTest): def _assert_round_trip(self, table, conn): row = conn.execute(table.select()).first() eq_( - row, (conn.dialect.default_sequence_base, "some data",), + row, + ( + conn.dialect.default_sequence_base, + "some data", + ), ) @classmethod diff --git a/lib/sqlalchemy/testing/suite/test_reflection.py b/lib/sqlalchemy/testing/suite/test_reflection.py index 3c10a45f62..f728310d7c 100644 --- a/lib/sqlalchemy/testing/suite/test_reflection.py +++ b/lib/sqlalchemy/testing/suite/test_reflection.py @@ -151,7 +151,10 @@ class QuotedNameArgumentTest(fixtures.TablesTest): Column("related_id", Integer), sa.PrimaryKeyConstraint("id", name="pk quote ' one"), sa.Index("ix quote ' one", "name"), - sa.UniqueConstraint("data", name="uq quote' one",), + sa.UniqueConstraint( + "data", + name="uq quote' one", + ), sa.ForeignKeyConstraint( ["id"], ["related.id"], name="fk quote ' one" ), @@ -170,7 +173,10 @@ class QuotedNameArgumentTest(fixtures.TablesTest): Column("related_id", Integer), sa.PrimaryKeyConstraint("id", name='pk quote " two'), sa.Index('ix quote " two', "name"), - sa.UniqueConstraint("data", name='uq quote" two',), + sa.UniqueConstraint( + "data", + name='uq quote" two', + ), sa.ForeignKeyConstraint( ["id"], ["related.id"], name='fk quote " two' ), @@ -1039,7 +1045,8 @@ class ComponentReflectionTest(fixtures.TablesTest): "Skipped unsupported reflection of expression-based index t_idx" ): eq_( - insp.get_indexes("t"), expected, + insp.get_indexes("t"), + expected, ) @testing.requires.index_reflects_included_columns @@ -1098,7 +1105,8 @@ class ComponentReflectionTest(fixtures.TablesTest): if testing.requires.index_reflects_included_columns.enabled: expected[0]["include_columns"] = [] eq_( - [idx for idx in indexes if idx["name"] == "user_tmp_ix"], expected, + [idx for idx in indexes if idx["name"] == "user_tmp_ix"], + expected, ) @testing.requires.unique_constraint_reflection @@ -1390,11 +1398,17 @@ class ComputedReflectionTest(fixtures.ComputedReflectionFixtureTest): ) if testing.requires.computed_columns_virtual.enabled: self.check_column( - data, "computed_virtual", "normal+2", False, + data, + "computed_virtual", + "normal+2", + False, ) if testing.requires.computed_columns_stored.enabled: self.check_column( - data, "computed_stored", "normal-42", True, + data, + "computed_stored", + "normal-42", + True, ) @testing.requires.schemas @@ -1414,11 +1428,17 @@ class ComputedReflectionTest(fixtures.ComputedReflectionFixtureTest): ) if testing.requires.computed_columns_virtual.enabled: self.check_column( - data, "computed_virtual", "normal/2", False, + data, + "computed_virtual", + "normal/2", + False, ) if testing.requires.computed_columns_stored.enabled: self.check_column( - data, "computed_stored", "normal*42", True, + data, + "computed_stored", + "normal*42", + True, ) diff --git a/lib/sqlalchemy/testing/suite/test_results.py b/lib/sqlalchemy/testing/suite/test_results.py index 1c1b20cf01..9484d41d09 100644 --- a/lib/sqlalchemy/testing/suite/test_results.py +++ b/lib/sqlalchemy/testing/suite/test_results.py @@ -408,7 +408,8 @@ class ServerSideCursorsTest( ) eq_( - result.fetchmany(5), [(i, "data%d" % i) for i in range(1, 6)], + result.fetchmany(5), + [(i, "data%d" % i) for i in range(1, 6)], ) eq_( result.fetchmany(10), diff --git a/lib/sqlalchemy/testing/suite/test_select.py b/lib/sqlalchemy/testing/suite/test_select.py index c199929a72..b0fb60c5f6 100644 --- a/lib/sqlalchemy/testing/suite/test_select.py +++ b/lib/sqlalchemy/testing/suite/test_select.py @@ -1058,7 +1058,12 @@ class IdentityColumnTest(fixtures.TablesTest): Column( "id", Integer, - Identity(increment=-5, start=0, minvalue=-1000, maxvalue=0,), + Identity( + increment=-5, + start=0, + minvalue=-1000, + maxvalue=0, + ), primary_key=True, ), Column("desc", String(100)), @@ -1067,13 +1072,16 @@ class IdentityColumnTest(fixtures.TablesTest): @classmethod def insert_data(cls, connection): connection.execute( - cls.tables.tbl_a.insert(), [{"desc": "a"}, {"desc": "b"}], + cls.tables.tbl_a.insert(), + [{"desc": "a"}, {"desc": "b"}], ) connection.execute( - cls.tables.tbl_b.insert(), [{"desc": "a"}, {"desc": "b"}], + cls.tables.tbl_b.insert(), + [{"desc": "a"}, {"desc": "b"}], ) connection.execute( - cls.tables.tbl_b.insert(), [{"id": 42, "desc": "c"}], + cls.tables.tbl_b.insert(), + [{"id": 42, "desc": "c"}], ) def test_select_all(self, connection): @@ -1102,7 +1110,8 @@ class IdentityColumnTest(fixtures.TablesTest): def test_insert_always_error(self, connection): def fn(): connection.execute( - self.tables.tbl_a.insert(), [{"id": 200, "desc": "a"}], + self.tables.tbl_a.insert(), + [{"id": 200, "desc": "a"}], ) assert_raises((DatabaseError, ProgrammingError), fn) @@ -1204,7 +1213,8 @@ class IsOrIsNotDistinctFromTest(fixtures.TablesTest): tbl.select(tbl.c.col_a.is_distinct_from(tbl.c.col_b)) ).fetchall() eq_( - len(result), expected_row_count_for_is, + len(result), + expected_row_count_for_is, ) expected_row_count_for_isnot = ( @@ -1214,5 +1224,6 @@ class IsOrIsNotDistinctFromTest(fixtures.TablesTest): tbl.select(tbl.c.col_a.isnot_distinct_from(tbl.c.col_b)) ).fetchall() eq_( - len(result), expected_row_count_for_isnot, + len(result), + expected_row_count_for_isnot, ) diff --git a/lib/sqlalchemy/testing/suite/test_sequence.py b/lib/sqlalchemy/testing/suite/test_sequence.py index 5a1876bc5f..de970da53c 100644 --- a/lib/sqlalchemy/testing/suite/test_sequence.py +++ b/lib/sqlalchemy/testing/suite/test_sequence.py @@ -24,7 +24,12 @@ class SequenceTest(fixtures.TablesTest): Table( "seq_pk", metadata, - Column("id", Integer, Sequence("tab_id_seq"), primary_key=True,), + Column( + "id", + Integer, + Sequence("tab_id_seq"), + primary_key=True, + ), Column("data", String(50)), ) @@ -109,17 +114,21 @@ class HasSequenceTest(fixtures.TablesTest): "schema_seq", schema=config.test_schema, metadata=metadata ) Table( - "user_id_table", metadata, Column("id", Integer, primary_key=True), + "user_id_table", + metadata, + Column("id", Integer, primary_key=True), ) def test_has_sequence(self, connection): eq_( - inspect(connection).has_sequence("user_id_seq"), True, + inspect(connection).has_sequence("user_id_seq"), + True, ) def test_has_sequence_other_object(self, connection): eq_( - inspect(connection).has_sequence("user_id_table"), False, + inspect(connection).has_sequence("user_id_table"), + False, ) @testing.requires.schemas @@ -133,7 +142,8 @@ class HasSequenceTest(fixtures.TablesTest): def test_has_sequence_neg(self, connection): eq_( - inspect(connection).has_sequence("some_sequence"), False, + inspect(connection).has_sequence("some_sequence"), + False, ) @testing.requires.schemas @@ -157,7 +167,8 @@ class HasSequenceTest(fixtures.TablesTest): @testing.requires.schemas def test_has_sequence_remote_not_in_default(self, connection): eq_( - inspect(connection).has_sequence("schema_seq"), False, + inspect(connection).has_sequence("schema_seq"), + False, ) def test_get_sequence_names(self, connection): @@ -194,5 +205,6 @@ class HasSequenceTestEmpty(fixtures.TestBase): def test_get_sequence_names_no_sequence(self, connection): eq_( - inspect(connection).get_sequence_names(), [], + inspect(connection).get_sequence_names(), + [], ) diff --git a/lib/sqlalchemy/testing/suite/test_types.py b/lib/sqlalchemy/testing/suite/test_types.py index 8c65437000..da01aa484b 100644 --- a/lib/sqlalchemy/testing/suite/test_types.py +++ b/lib/sqlalchemy/testing/suite/test_types.py @@ -621,9 +621,7 @@ class NumericTest(_LiteralRoundTripFixture, fixtures.TestBase): @testing.requires.precision_numerics_enotation_large def test_enotation_decimal_large(self): - """test exceedingly large decimals. - - """ + """test exceedingly large decimals.""" numbers = set( [ @@ -962,7 +960,8 @@ class JSONTest(_LiteralRoundTripFixture, fixtures.TablesTest): conn = connection conn.execute( - self.tables.data_table.insert(), {"name": "r1", "data": JSON.NULL}, + self.tables.data_table.insert(), + {"name": "r1", "data": JSON.NULL}, ) eq_( @@ -1158,13 +1157,18 @@ class JSONLegacyStringCastIndexTest( # "cannot extract array element from a non-array", which is # fixed in 9.4 but may exist in 9.3 self._test_index_criteria( - and_(name == "r4", cast(col[1], String) == '"two"',), "r4", + and_( + name == "r4", + cast(col[1], String) == '"two"', + ), + "r4", ) def test_string_cast_crit_mixed_path(self): col = self.tables.data_table.c["data"] self._test_index_criteria( - cast(col[("key3", 1, "six")], String) == '"seven"', "r3", + cast(col[("key3", 1, "six")], String) == '"seven"', + "r3", ) def test_string_cast_crit_string_path(self): @@ -1180,7 +1184,10 @@ class JSONLegacyStringCastIndexTest( col = self.tables.data_table.c["data"] self._test_index_criteria( - and_(name == "r6", cast(col["b"], String) == '"some value"',), + and_( + name == "r6", + cast(col["b"], String) == '"some value"', + ), "r6", ) diff --git a/lib/sqlalchemy/testing/warnings.py b/lib/sqlalchemy/testing/warnings.py index dbe22bb8df..5704cf2a6e 100644 --- a/lib/sqlalchemy/testing/warnings.py +++ b/lib/sqlalchemy/testing/warnings.py @@ -119,7 +119,9 @@ def setup_filters(): r"The Session.begin.subtransactions flag is deprecated", ]: warnings.filterwarnings( - "ignore", message=msg, category=sa_exc.RemovedIn20Warning, + "ignore", + message=msg, + category=sa_exc.RemovedIn20Warning, ) try: diff --git a/lib/sqlalchemy/util/_collections.py b/lib/sqlalchemy/util/_collections.py index 7c109b358e..b50d9885d4 100644 --- a/lib/sqlalchemy/util/_collections.py +++ b/lib/sqlalchemy/util/_collections.py @@ -133,10 +133,7 @@ class FacadeDict(ImmutableContainer, dict): return FacadeDict, (dict(self),) def _insert_item(self, key, value): - """insert an item into the dictionary directly. - - - """ + """insert an item into the dictionary directly.""" dict.__setitem__(self, key, value) def __repr__(self): diff --git a/lib/sqlalchemy/util/compat.py b/lib/sqlalchemy/util/compat.py index e1d0e64444..285f6c0216 100644 --- a/lib/sqlalchemy/util/compat.py +++ b/lib/sqlalchemy/util/compat.py @@ -309,8 +309,7 @@ else: if py3k: def _formatannotation(annotation, base_module=None): - """vendored from python 3.7 - """ + """vendored from python 3.7""" if getattr(annotation, "__module__", None) == "typing": return repr(annotation).replace("typing.", "") diff --git a/lib/sqlalchemy/util/deprecations.py b/lib/sqlalchemy/util/deprecations.py index eae4be768b..83037bbff6 100644 --- a/lib/sqlalchemy/util/deprecations.py +++ b/lib/sqlalchemy/util/deprecations.py @@ -131,7 +131,10 @@ def deprecated( warning = exc.RemovedIn20Warning version = "1.4" if add_deprecation_to_docstring: - header = ".. deprecated:: %s %s" % (version, (message or ""),) + header = ".. deprecated:: %s %s" % ( + version, + (message or ""), + ) else: header = None diff --git a/lib/sqlalchemy/util/langhelpers.py b/lib/sqlalchemy/util/langhelpers.py index e8abf3130b..bbdd3381fe 100644 --- a/lib/sqlalchemy/util/langhelpers.py +++ b/lib/sqlalchemy/util/langhelpers.py @@ -66,7 +66,8 @@ class safe_reraise(object): self._exc_info = None # remove potential circular references if not self.warn_only: compat.raise_( - exc_value, with_traceback=exc_tb, + exc_value, + with_traceback=exc_tb, ) else: if not compat.py3k and self._exc_info and self._exc_info[1]: @@ -731,10 +732,10 @@ def class_hierarchy(cls): def iterate_attributes(cls): """iterate all the keys and attributes associated - with a class, without using getattr(). + with a class, without using getattr(). - Does not use getattr() so that class-sensitive - descriptors (i.e. property.__get__()) are not called. + Does not use getattr() so that class-sensitive + descriptors (i.e. property.__get__()) are not called. """ keys = dir(cls) @@ -986,9 +987,7 @@ class HasMemoized(object): @classmethod def memoized_instancemethod(cls, fn): - """Decorate a method memoize its return value. - - """ + """Decorate a method memoize its return value.""" def oneshot(self, *args, **kw): result = fn(self, *args, **kw) @@ -1738,8 +1737,8 @@ def inject_param_text(doctext, inject_params): def repr_tuple_names(names): - """ Trims a list of strings from the middle and return a string of up to - four elements. Strings greater than 11 characters will be truncated""" + """Trims a list of strings from the middle and return a string of up to + four elements. Strings greater than 11 characters will be truncated""" if len(names) == 0: return None flag = len(names) <= 4 diff --git a/lib/sqlalchemy/util/queue.py b/lib/sqlalchemy/util/queue.py index 9447abeded..3687dc8dc3 100644 --- a/lib/sqlalchemy/util/queue.py +++ b/lib/sqlalchemy/util/queue.py @@ -220,7 +220,8 @@ class AsyncAdaptedQueue: return self._queue.put_nowait(item) except asyncio.queues.QueueFull as err: compat.raise_( - Full(), replace_context=err, + Full(), + replace_context=err, ) def put(self, item, block=True, timeout=None): @@ -236,7 +237,8 @@ class AsyncAdaptedQueue: return self.await_(self._queue.put(item)) except asyncio.queues.QueueFull as err: compat.raise_( - Full(), replace_context=err, + Full(), + replace_context=err, ) def get_nowait(self): @@ -244,7 +246,8 @@ class AsyncAdaptedQueue: return self._queue.get_nowait() except asyncio.queues.QueueEmpty as err: compat.raise_( - Empty(), replace_context=err, + Empty(), + replace_context=err, ) def get(self, block=True, timeout=None): @@ -259,5 +262,6 @@ class AsyncAdaptedQueue: return self.await_(self._queue.get()) except asyncio.queues.QueueEmpty as err: compat.raise_( - Empty(), replace_context=err, + Empty(), + replace_context=err, ) diff --git a/test/aaa_profiling/test_orm.py b/test/aaa_profiling/test_orm.py index 30a02472cf..4bc2af93d3 100644 --- a/test/aaa_profiling/test_orm.py +++ b/test/aaa_profiling/test_orm.py @@ -891,7 +891,12 @@ class JoinedEagerLoadTest(NoCache, fixtures.MappedTest): r.context.compiled.compile_state = compile_state obj = ORMCompileState.orm_setup_cursor_result( - sess, compile_state.statement, {}, exec_opts, {}, r, + sess, + compile_state.statement, + {}, + exec_opts, + {}, + r, ) list(obj.unique()) sess.close() diff --git a/test/aaa_profiling/test_resultset.py b/test/aaa_profiling/test_resultset.py index aea160c9e4..de3683430e 100644 --- a/test/aaa_profiling/test_resultset.py +++ b/test/aaa_profiling/test_resultset.py @@ -131,7 +131,10 @@ class ResultSetTest(fixtures.TestBase, AssertsExecutionResults): [row["field%d" % fnum] for fnum in range(NUM_FIELDS)] @testing.combinations( - (False, 0), (True, 1), (False, 1), (False, 2), + (False, 0), + (True, 1), + (False, 1), + (False, 2), ) def test_one_or_none(self, one_or_first, rows_present): # TODO: this is not testing the ORM level "scalar_mapping" diff --git a/test/base/test_result.py b/test/base/test_result.py index 0136b6e296..a15bf1cfae 100644 --- a/test/base/test_result.py +++ b/test/base/test_result.py @@ -266,16 +266,43 @@ class ResultTest(fixtures.TestBase): object(), ) - result = self._fixture(extras=[(ex1a, ex1b), (ex2,), (ex3a, ex3b,)]) + result = self._fixture( + extras=[ + (ex1a, ex1b), + (ex2,), + ( + ex3a, + ex3b, + ), + ] + ) eq_( result.columns(ex2, ex3b).columns(ex3a).all(), [(1,), (2,), (2,), (2,)], ) - result = self._fixture(extras=[(ex1a, ex1b), (ex2,), (ex3a, ex3b,)]) + result = self._fixture( + extras=[ + (ex1a, ex1b), + (ex2,), + ( + ex3a, + ex3b, + ), + ] + ) eq_([row._mapping[ex1b] for row in result], [1, 2, 1, 4]) - result = self._fixture(extras=[(ex1a, ex1b), (ex2,), (ex3a, ex3b,)]) + result = self._fixture( + extras=[ + (ex1a, ex1b), + (ex2,), + ( + ex3a, + ex3b, + ), + ] + ) eq_( [ dict(r) @@ -906,7 +933,8 @@ class MergeResultTest(fixtures.TestBase): result = r1.merge(r2, r3, r4) eq_( - result.first(), (7, "u1"), + result.first(), + (7, "u1"), ) def test_columns(self, merge_fixture): @@ -1004,7 +1032,8 @@ class OnlyScalarsTest(fixtures.TestBase): r = r.columns(0).mappings() eq_( - list(r), [{"a": 1}, {"a": 2}, {"a": 1}, {"a": 1}, {"a": 4}], + list(r), + [{"a": 1}, {"a": 2}, {"a": 1}, {"a": 1}, {"a": 4}], ) def test_scalar_mode_but_accessed_nonscalar_result(self, no_tuple_fixture): @@ -1037,7 +1066,9 @@ class OnlyScalarsTest(fixtures.TestBase): ) r = result.ChunkedIteratorResult( - metadata, no_tuple_fixture, source_supports_scalars=True, + metadata, + no_tuple_fixture, + source_supports_scalars=True, ) r = r.unique() @@ -1050,7 +1081,9 @@ class OnlyScalarsTest(fixtures.TestBase): ) r = result.ChunkedIteratorResult( - metadata, no_tuple_fixture, source_supports_scalars=True, + metadata, + no_tuple_fixture, + source_supports_scalars=True, ) r = r.unique() @@ -1063,7 +1096,9 @@ class OnlyScalarsTest(fixtures.TestBase): ) r = result.ChunkedIteratorResult( - metadata, no_tuple_fixture, source_supports_scalars=True, + metadata, + no_tuple_fixture, + source_supports_scalars=True, ) r = r.scalars().unique() diff --git a/test/base/test_warnings.py b/test/base/test_warnings.py index c8807df09a..0cbab7f282 100644 --- a/test/base/test_warnings.py +++ b/test/base/test_warnings.py @@ -15,7 +15,7 @@ class WarnDeprecatedLimitedTest(fixtures.TestBase): ) def test_warn_deprecated_limited_cap(self): - """ warn_deprecated_limited() and warn_limited() use + """warn_deprecated_limited() and warn_limited() use _hash_limit_string actually just verifying that _hash_limit_string works as expected diff --git a/test/dialect/mssql/test_compiler.py b/test/dialect/mssql/test_compiler.py index c3f12f9d99..ec67a916c8 100644 --- a/test/dialect/mssql/test_compiler.py +++ b/test/dialect/mssql/test_compiler.py @@ -1373,7 +1373,9 @@ class CompileIdentityTest(fixtures.TestBase, AssertsCompiledSQL): def test_identity_object_no_primary_key(self): metadata = MetaData() tbl = Table( - "test", metadata, Column("id", Integer, Identity(increment=42)), + "test", + metadata, + Column("id", Integer, Identity(increment=42)), ) self.assert_compile( schema.CreateTable(tbl), @@ -1397,7 +1399,12 @@ class CompileIdentityTest(fixtures.TestBase, AssertsCompiledSQL): tbl = Table( "test", metadata, - Column("id", Integer, Identity(start=3), nullable=False,), + Column( + "id", + Integer, + Identity(start=3), + nullable=False, + ), ) self.assert_compile( schema.CreateTable(tbl), @@ -1425,7 +1432,11 @@ class CompileIdentityTest(fixtures.TestBase, AssertsCompiledSQL): "test", metadata, Column("id", Integer, autoincrement=False, primary_key=True), - Column("x", Integer, Identity(start=3, increment=42),), + Column( + "x", + Integer, + Identity(start=3, increment=42), + ), ) self.assert_compile( schema.CreateTable(tbl), @@ -1461,7 +1472,11 @@ class CompileIdentityTest(fixtures.TestBase, AssertsCompiledSQL): Identity(start=3, increment=42), autoincrement=True, ), - Column("id2", Integer, Identity(start=7, increment=2),), + Column( + "id2", + Integer, + Identity(start=7, increment=2), + ), ) # this will be rejected by the database, just asserting this is what # the two autoincrements will do right now @@ -1537,7 +1552,11 @@ class CompileIdentityTest(fixtures.TestBase, AssertsCompiledSQL): def test_identity_object_no_options(self): metadata = MetaData() - tbl = Table("test", metadata, Column("id", Integer, Identity()),) + tbl = Table( + "test", + metadata, + Column("id", Integer, Identity()), + ) self.assert_compile( schema.CreateTable(tbl), "CREATE TABLE test (id INTEGER NOT NULL IDENTITY)", diff --git a/test/dialect/mssql/test_engine.py b/test/dialect/mssql/test_engine.py index a29d49c4ce..4444559589 100644 --- a/test/dialect/mssql/test_engine.py +++ b/test/dialect/mssql/test_engine.py @@ -487,7 +487,9 @@ class IsolationLevelDetectTest(fixtures.TestBase): result = [] - def fail_on_exec(stmt,): + def fail_on_exec( + stmt, + ): if view is not None and view in stmt: result.append(("SERIALIZABLE",)) else: @@ -540,7 +542,8 @@ class InvalidTransactionFalsePositiveTest(fixtures.TablesTest): @classmethod def insert_data(cls, connection): connection.execute( - cls.tables.error_t.insert(), [{"error_code": "01002"}], + cls.tables.error_t.insert(), + [{"error_code": "01002"}], ) def test_invalid_transaction_detection(self, connection): diff --git a/test/dialect/mssql/test_query.py b/test/dialect/mssql/test_query.py index e37b388e87..d9dc033e16 100644 --- a/test/dialect/mssql/test_query.py +++ b/test/dialect/mssql/test_query.py @@ -150,9 +150,12 @@ class QueryUnicodeTest(fixtures.TestBase): ) ) r = connection.execute(t1.select()).first() - assert isinstance(r[1], util.text_type), ( - "%s is %s instead of unicode, working on %s" - % (r[1], type(r[1]), meta.bind) + assert isinstance( + r[1], util.text_type + ), "%s is %s instead of unicode, working on %s" % ( + r[1], + type(r[1]), + meta.bind, ) eq_(r[1], util.ue("abc \xc3\xa9 def")) diff --git a/test/dialect/mssql/test_reflection.py b/test/dialect/mssql/test_reflection.py index c7d012f5bf..d33838b6a3 100644 --- a/test/dialect/mssql/test_reflection.py +++ b/test/dialect/mssql/test_reflection.py @@ -288,7 +288,9 @@ class ReflectionTest(fixtures.TestBase, ComparesTables, AssertsCompiledSQL): if not exists: with expect_raises(exc.NoSuchTableError): Table( - table_name, metadata, autoload_with=connection, + table_name, + metadata, + autoload_with=connection, ) else: tmp_t = Table(table_name, metadata, autoload_with=connection) @@ -296,7 +298,8 @@ class ReflectionTest(fixtures.TestBase, ComparesTables, AssertsCompiledSQL): tmp_t.select().where(tmp_t.c.id == 2) ).fetchall() eq_( - result, [(2, "bar", datetime.datetime(2020, 2, 2, 2, 2, 2))], + result, + [(2, "bar", datetime.datetime(2020, 2, 2, 2, 2, 2))], ) @testing.provide_metadata @@ -309,7 +312,11 @@ class ReflectionTest(fixtures.TestBase, ComparesTables, AssertsCompiledSQL): ) def test_has_table_temporary(self, connection, table_name, exists): if exists: - tt = Table(table_name, self.metadata, Column("id", Integer),) + tt = Table( + table_name, + self.metadata, + Column("id", Integer), + ) tt.create(connection) found_it = testing.db.dialect.has_table(connection, table_name) @@ -516,13 +523,11 @@ class ReflectHugeViewTest(fixtures.TestBase): for i in range(self.col_num) ] ) - self.view_str = view_str = ( - "CREATE VIEW huge_named_view AS SELECT %s FROM base_table" - % ( - ",".join( - "long_named_column_number_%d" % i - for i in range(self.col_num) - ) + self.view_str = ( + view_str + ) = "CREATE VIEW huge_named_view AS SELECT %s FROM base_table" % ( + ",".join( + "long_named_column_number_%d" % i for i in range(self.col_num) ) ) assert len(view_str) > 4000 diff --git a/test/dialect/mssql/test_sequence.py b/test/dialect/mssql/test_sequence.py index 2b1db0b413..44a8f40207 100644 --- a/test/dialect/mssql/test_sequence.py +++ b/test/dialect/mssql/test_sequence.py @@ -31,7 +31,9 @@ class SequenceTest(fixtures.TablesTest): "bigint_seq_t", metadata, Column( - "id", BIGINT, default=Sequence("bigint_seq", start=3000000000), + "id", + BIGINT, + default=Sequence("bigint_seq", start=3000000000), ), Column("txt", String(50)), ) @@ -43,7 +45,9 @@ class SequenceTest(fixtures.TablesTest): "id", DECIMAL(10, 0), default=Sequence( - "decimal_seq", data_type=DECIMAL(10, 0), start=3000000000, + "decimal_seq", + data_type=DECIMAL(10, 0), + start=3000000000, ), ), Column("txt", String(50)), diff --git a/test/dialect/mssql/test_types.py b/test/dialect/mssql/test_types.py index 34b026cf37..cf5b66d57f 100644 --- a/test/dialect/mssql/test_types.py +++ b/test/dialect/mssql/test_types.py @@ -789,7 +789,9 @@ class TypeRoundTripTest( @testing.metadata_fixture() def datetimeoffset_fixture(self, metadata): t = Table( - "test_dates", metadata, Column("adatetimeoffset", DATETIMEOFFSET), + "test_dates", + metadata, + Column("adatetimeoffset", DATETIMEOFFSET), ) return t @@ -886,7 +888,8 @@ class TypeRoundTripTest( return conn.execute( - t.insert(), adatetimeoffset=dto_param_value, + t.insert(), + adatetimeoffset=dto_param_value, ) row = conn.execute(t.select()).first() diff --git a/test/dialect/mysql/test_dialect.py b/test/dialect/mysql/test_dialect.py index a555e53960..abd3a491ff 100644 --- a/test/dialect/mysql/test_dialect.py +++ b/test/dialect/mysql/test_dialect.py @@ -296,7 +296,12 @@ class ParseVersionTest(fixtures.TestBase): @testing.combinations( ((10, 2, 7), "10.2.7-MariaDB", (10, 2, 7), True), - ((10, 2, 7), "5.6.15.10.2.7-MariaDB", (5, 6, 15, 10, 2, 7), True,), + ( + (10, 2, 7), + "5.6.15.10.2.7-MariaDB", + (5, 6, 15, 10, 2, 7), + True, + ), ((5, 0, 51, 24), "5.0.51a.24+lenny5", (5, 0, 51, 24), False), ((10, 2, 10), "10.2.10-MariaDB", (10, 2, 10), True), ((5, 7, 20), "5.7.20", (5, 7, 20), False), diff --git a/test/dialect/mysql/test_query.py b/test/dialect/mysql/test_query.py index 15875dd542..e23ff00d0d 100644 --- a/test/dialect/mysql/test_query.py +++ b/test/dialect/mysql/test_query.py @@ -29,7 +29,8 @@ class IdiosyncrasyTest(fixtures.TestBase): def test_is_boolean_symbols_despite_no_native(self, connection): is_( - connection.scalar(select(cast(true().is_(true()), Boolean))), True, + connection.scalar(select(cast(true().is_(true()), Boolean))), + True, ) is_( diff --git a/test/dialect/mysql/test_types.py b/test/dialect/mysql/test_types.py index 8983522c14..f4621dce33 100644 --- a/test/dialect/mysql/test_types.py +++ b/test/dialect/mysql/test_types.py @@ -491,7 +491,10 @@ class TypeRoundTripTest(fixtures.TestBase, AssertsExecutionResults): t.create(connection) connection.execute( t.insert(), - dict(scale_value=45.768392065789, unscale_value=45.768392065789,), + dict( + scale_value=45.768392065789, + unscale_value=45.768392065789, + ), ) result = connection.scalar(select(t.c.scale_value)) eq_(result, decimal.Decimal("45.768392065789")) @@ -1054,7 +1057,8 @@ class EnumSetTest( dict(e1="a", e2="a", e3="a", e4="'a'", e5="a,b"), ) connection.execute( - set_table.insert(), dict(e1="b", e2="b", e3="b", e4="b", e5="a,b"), + set_table.insert(), + dict(e1="b", e2="b", e3="b", e4="b", e5="a,b"), ) expected = [ @@ -1065,7 +1069,13 @@ class EnumSetTest( set(["'a'"]), set(["a", "b"]), ), - (set(["b"]), set(["b"]), set(["b"]), set(["b"]), set(["a", "b"]),), + ( + set(["b"]), + set(["b"]), + set(["b"]), + set(["b"]), + set(["a", "b"]), + ), ] res = connection.execute(set_table.select()).fetchall() @@ -1264,7 +1274,13 @@ class EnumSetTest( Column("e6", mysql.SET("", "a", retrieve_as_bitwise=True)), Column( "e7", - mysql.SET("", "'a'", "b'b", "'", retrieve_as_bitwise=True,), + mysql.SET( + "", + "'a'", + "b'b", + "'", + retrieve_as_bitwise=True, + ), ), ) diff --git a/test/dialect/oracle/test_dialect.py b/test/dialect/oracle/test_dialect.py index ab4bb1e08a..cd0e11e588 100644 --- a/test/dialect/oracle/test_dialect.py +++ b/test/dialect/oracle/test_dialect.py @@ -103,10 +103,22 @@ class EncodingErrorsTest(fixtures.TestBase): ) _oracle_char_combinations = testing.combinations( - ("STRING", cx_Oracle_STRING,), - ("FIXED_CHAR", cx_Oracle_FIXED_CHAR,), - ("CLOB", cx_Oracle_CLOB,), - ("NCLOB", cx_Oracle_NCLOB,), + ( + "STRING", + cx_Oracle_STRING, + ), + ( + "FIXED_CHAR", + cx_Oracle_FIXED_CHAR, + ), + ( + "CLOB", + cx_Oracle_CLOB, + ), + ( + "NCLOB", + cx_Oracle_NCLOB, + ), argnames="cx_oracle_type", id_="ia", ) @@ -149,7 +161,9 @@ class EncodingErrorsTest(fixtures.TestBase): @_oracle_char_combinations @testing.requires.python2 def test_encoding_errors_sqla_py2k( - self, cx_Oracle, cx_oracle_type, + self, + cx_Oracle, + cx_oracle_type, ): ignore_dialect = cx_oracle.dialect( dbapi=cx_Oracle, encoding_errors="ignore" @@ -167,7 +181,9 @@ class EncodingErrorsTest(fixtures.TestBase): @_oracle_char_combinations @testing.requires.python2 def test_no_encoding_errors_sqla_py2k( - self, cx_Oracle, cx_oracle_type, + self, + cx_Oracle, + cx_oracle_type, ): plain_dialect = cx_oracle.dialect(dbapi=cx_Oracle) @@ -183,7 +199,9 @@ class EncodingErrorsTest(fixtures.TestBase): @_oracle_char_combinations @testing.requires.python3 def test_encoding_errors_cx_oracle_py3k( - self, cx_Oracle, cx_oracle_type, + self, + cx_Oracle, + cx_oracle_type, ): ignore_dialect = cx_oracle.dialect( dbapi=cx_Oracle, encoding_errors="ignore" @@ -200,7 +218,10 @@ class EncodingErrorsTest(fixtures.TestBase): cursor.mock_calls, [ mock.call.var( - mock.ANY, None, cursor.arraysize, encodingErrors="ignore", + mock.ANY, + None, + cursor.arraysize, + encodingErrors="ignore", ) ], ) @@ -208,7 +229,9 @@ class EncodingErrorsTest(fixtures.TestBase): @_oracle_char_combinations @testing.requires.python3 def test_no_encoding_errors_cx_oracle_py3k( - self, cx_Oracle, cx_oracle_type, + self, + cx_Oracle, + cx_oracle_type, ): plain_dialect = cx_oracle.dialect(dbapi=cx_Oracle) diff --git a/test/dialect/oracle/test_reflection.py b/test/dialect/oracle/test_reflection.py index b9975f65ee..d2780fa29c 100644 --- a/test/dialect/oracle/test_reflection.py +++ b/test/dialect/oracle/test_reflection.py @@ -217,7 +217,12 @@ drop synonym %(test_schema)s.local_table; ), {"text": "my table comment"}, ) - eq_(insp.get_table_comment("parent",), {"text": "my local comment"}) + eq_( + insp.get_table_comment( + "parent", + ), + {"text": "my local comment"}, + ) eq_( insp.get_table_comment( "parent", schema=testing.db.dialect.default_schema_name diff --git a/test/dialect/oracle/test_types.py b/test/dialect/oracle/test_types.py index dbb380d8d3..0b000e89d4 100644 --- a/test/dialect/oracle/test_types.py +++ b/test/dialect/oracle/test_types.py @@ -992,7 +992,8 @@ class LOBFetchTest(fixtures.TablesTest): ) with engine.connect() as conn: result = exec_sql( - conn, "select id, data, bindata from z_test order by id", + conn, + "select id, data, bindata from z_test order by id", ) results = result.fetchall() diff --git a/test/dialect/postgresql/test_compiler.py b/test/dialect/postgresql/test_compiler.py index 64e9451243..eaa5597d06 100644 --- a/test/dialect/postgresql/test_compiler.py +++ b/test/dialect/postgresql/test_compiler.py @@ -1267,7 +1267,9 @@ class CompileTest(fixtures.TestBase, AssertsCompiledSQL): ) self.assert_compile( - c.any(5), "%(param_1)s = ANY (x)", checkparams={"param_1": 5}, + c.any(5), + "%(param_1)s = ANY (x)", + checkparams={"param_1": 5}, ) self.assert_compile( @@ -1277,7 +1279,9 @@ class CompileTest(fixtures.TestBase, AssertsCompiledSQL): ) self.assert_compile( - c.all(5), "%(param_1)s = ALL (x)", checkparams={"param_1": 5}, + c.all(5), + "%(param_1)s = ALL (x)", + checkparams={"param_1": 5}, ) self.assert_compile( @@ -2300,8 +2304,7 @@ class DistinctOnTest(fixtures.TestBase, AssertsCompiledSQL): class FullTextSearchTest(fixtures.TestBase, AssertsCompiledSQL): - """Tests for full text searching - """ + """Tests for full text searching""" __dialect__ = postgresql.dialect() @@ -2322,8 +2325,8 @@ class FullTextSearchTest(fixtures.TestBase, AssertsCompiledSQL): def _raise_query(self, q): """ - useful for debugging. just do... - self._raise_query(q) + useful for debugging. just do... + self._raise_query(q) """ c = q.compile(dialect=postgresql.dialect()) raise ValueError(c) diff --git a/test/dialect/postgresql/test_dialect.py b/test/dialect/postgresql/test_dialect.py index 971d4f12f7..43c5aea21a 100644 --- a/test/dialect/postgresql/test_dialect.py +++ b/test/dialect/postgresql/test_dialect.py @@ -473,7 +473,8 @@ class ExecutemanyValuesInsertsTest(ExecuteManyMode, fixtures.TablesTest): assert t.c.id not in result.keys() assert not result._soft_closed assert isinstance( - result.cursor_strategy, _cursor.FullyBufferedCursorFetchStrategy, + result.cursor_strategy, + _cursor.FullyBufferedCursorFetchStrategy, ) assert not result.cursor.closed assert not result.closed @@ -1020,7 +1021,10 @@ $$ LANGUAGE plpgsql; eq_( conn.scalar( select( - cast(literal(quoted_name("some_name", False)), String,) + cast( + literal(quoted_name("some_name", False)), + String, + ) ) ), "some_name", diff --git a/test/dialect/postgresql/test_reflection.py b/test/dialect/postgresql/test_reflection.py index b8de35f421..2c67957197 100644 --- a/test/dialect/postgresql/test_reflection.py +++ b/test/dialect/postgresql/test_reflection.py @@ -900,8 +900,7 @@ class ReflectionTest(AssertsCompiledSQL, fixtures.TestBase): @testing.provide_metadata def test_index_reflection(self): - """ Reflecting expression-based indexes should warn - """ + """Reflecting expression-based indexes should warn""" metadata = self.metadata @@ -953,8 +952,7 @@ class ReflectionTest(AssertsCompiledSQL, fixtures.TestBase): @testing.provide_metadata def test_index_reflection_partial(self, connection): - """Reflect the filter defintion on partial indexes - """ + """Reflect the filter defintion on partial indexes""" metadata = self.metadata diff --git a/test/dialect/postgresql/test_types.py b/test/dialect/postgresql/test_types.py index 5def5aa5b7..509603e1be 100644 --- a/test/dialect/postgresql/test_types.py +++ b/test/dialect/postgresql/test_types.py @@ -1436,7 +1436,9 @@ class ArrayRoundTripTest(object): stmt = select( func.array_cat( - array([1, 2, 3]), array([4, 5, 6]), type_=self.ARRAY(Integer), + array([1, 2, 3]), + array([4, 5, 6]), + type_=self.ARRAY(Integer), )[2:5] ) eq_(connection.execute(stmt).scalar(), [2, 3, 4, 5]) @@ -1879,7 +1881,10 @@ class ArrayEnum(fixtures.TestBase): c = "ccc" tbl.append_column( - Column("pyenum_col", array_cls(enum_cls(MyEnum)),), + Column( + "pyenum_col", + array_cls(enum_cls(MyEnum)), + ), ) self.metadata.create_all(connection) @@ -1918,7 +1923,10 @@ class ArrayJSON(fixtures.TestBase): "json_table", self.metadata, Column("id", Integer, primary_key=True), - Column("json_col", array_cls(json_cls),), + Column( + "json_col", + array_cls(json_cls), + ), ) self.metadata.create_all(connection) @@ -2166,7 +2174,8 @@ class SpecialTypesTest(fixtures.TablesTest, ComparesTables): connection.execute(t.update(), data="'a' 'cat' 'fat' 'mat' 'sat'") eq_( - connection.scalar(select(t.c.data)), "'a' 'cat' 'fat' 'mat' 'sat'", + connection.scalar(select(t.c.data)), + "'a' 'cat' 'fat' 'mat' 'sat'", ) @testing.provide_metadata @@ -3227,7 +3236,8 @@ class JSONRoundTripTest(fixtures.TablesTest): def _test_insert_none_as_null(self, conn): conn.execute( - self.tables.data_table.insert(), {"name": "r1", "nulldata": None}, + self.tables.data_table.insert(), + {"name": "r1", "nulldata": None}, ) self._assert_column_is_NULL(conn, column="nulldata") diff --git a/test/dialect/test_sqlite.py b/test/dialect/test_sqlite.py index 339a7c479e..4a8f6fd788 100644 --- a/test/dialect/test_sqlite.py +++ b/test/dialect/test_sqlite.py @@ -71,9 +71,7 @@ class TestTypes(fixtures.TestBase, AssertsExecutionResults): __only_on__ = "sqlite" def test_boolean(self): - """Test that the boolean only treats 1 as True - - """ + """Test that the boolean only treats 1 as True""" meta = MetaData(testing.db) t = Table( @@ -629,7 +627,7 @@ class DialectTest( 'true', 'false', and 'column' are undocumented reserved words when used as column identifiers (as of 3.5.1). Covering them here to ensure they remain in place if the dialect's - reserved_words set is updated in the future. """ + reserved_words set is updated in the future.""" meta = MetaData(testing.db) t = Table( @@ -681,7 +679,7 @@ class DialectTest( @testing.provide_metadata def test_quoted_identifiers_functional_two(self): - """"test the edgiest of edge cases, quoted table/col names + """ "test the edgiest of edge cases, quoted table/col names that start and end with quotes. SQLite claims to have fixed this in @@ -1364,7 +1362,11 @@ class InsertTest(fixtures.TestBase, AssertsExecutionResults): def test_empty_insert_pk1(self, connection): self._test_empty_insert( connection, - Table("a", MetaData(), Column("id", Integer, primary_key=True),), + Table( + "a", + MetaData(), + Column("id", Integer, primary_key=True), + ), ) def test_empty_insert_pk2(self, connection): @@ -1468,7 +1470,10 @@ class InsertTest(fixtures.TestBase, AssertsExecutionResults): self._test_empty_insert( connection, Table( - "f", MetaData(), Column("x", Integer), Column("y", Integer), + "f", + MetaData(), + Column("x", Integer), + Column("y", Integer), ), ) diff --git a/test/engine/test_execute.py b/test/engine/test_execute.py index 73681ee51c..a1d6d2725a 100644 --- a/test/engine/test_execute.py +++ b/test/engine/test_execute.py @@ -988,7 +988,8 @@ class CompiledCacheTest(fixtures.TestBase): eq_(conn.scalar(stmt), 2) with config.db.connect().execution_options( - compiled_cache=cache, schema_translate_map={None: None}, + compiled_cache=cache, + schema_translate_map={None: None}, ) as conn: # should use default schema again even though statement # was compiled with test_schema in the map @@ -1017,7 +1018,10 @@ class MockStrategyTest(fixtures.TestBase): "testtable", metadata, Column( - "pk", Integer, Sequence("testtable_pk_seq"), primary_key=True, + "pk", + Integer, + Sequence("testtable_pk_seq"), + primary_key=True, ), ) @@ -1700,7 +1704,11 @@ class EngineEventsTest(fixtures.TestBase): compiled = [ ("CREATE TABLE t1", {}, None), - ("INSERT INTO t1 (c1, c2)", {"c2": "some data", "c1": 5}, (),), + ( + "INSERT INTO t1 (c1, c2)", + {"c2": "some data", "c1": 5}, + (), + ), ("INSERT INTO t1 (c1, c2)", {"c1": 6}, ()), ("select * from t1", {}, None), ("DROP TABLE t1", {}, None), @@ -1948,7 +1956,12 @@ class EngineEventsTest(fixtures.TestBase): t = Table( "t", self.metadata, - Column("x", Integer, Sequence("t_id_seq"), primary_key=True,), + Column( + "x", + Integer, + Sequence("t_id_seq"), + primary_key=True, + ), implicit_returning=False, ) self.metadata.create_all(engine) @@ -2601,7 +2614,9 @@ class HandleErrorTest(fixtures.TestBase): Mock(side_effect=tsa.exc.InvalidRequestError("duplicate col")), ): assert_raises( - tsa.exc.InvalidRequestError, conn.execute, text("select 1"), + tsa.exc.InvalidRequestError, + conn.execute, + text("select 1"), ) # cursor is closed @@ -2999,7 +3014,12 @@ class DialectEventTest(fixtures.TestBase): stmt = "insert into table foo" params = {"foo": "bar"} ctx = dialect.execution_ctx_cls._init_statement( - dialect, conn, conn.connection, {}, stmt, [params], + dialect, + conn, + conn.connection, + {}, + stmt, + [params], ) conn._cursor_execute(ctx.cursor, stmt, params, ctx) diff --git a/test/engine/test_parseconnect.py b/test/engine/test_parseconnect.py index 99df6a1e92..b39d5f8add 100644 --- a/test/engine/test_parseconnect.py +++ b/test/engine/test_parseconnect.py @@ -172,7 +172,11 @@ class URLTest(fixtures.TestBase): is_false(url1 == url3) @testing.combinations( - "drivername", "username", "password", "host", "database", + "drivername", + "username", + "password", + "host", + "database", ) def test_component_set(self, component): common_url = ( @@ -251,7 +255,9 @@ class URLTest(fixtures.TestBase): ) @testing.combinations( - "username", "host", "database", + "username", + "host", + "database", ) def test_only_str_constructor(self, argname): assert_raises_message( @@ -263,7 +269,9 @@ class URLTest(fixtures.TestBase): ) @testing.combinations( - "username", "host", "database", + "username", + "host", + "database", ) def test_only_str_set(self, argname): u1 = url.URL.create("somedriver") @@ -735,7 +743,10 @@ class TestRegNewDBAPI(fixtures.TestBase): "sqlite:///?plugin=engineplugin1&foo=bar&myplugin1_arg=bat" "&plugin=engineplugin2&myplugin2_arg=hoho" ) - e = create_engine(url_str, logging_name="foob",) + e = create_engine( + url_str, + logging_name="foob", + ) eq_(e.dialect.name, "sqlite") eq_(e.logging_name, "bar") diff --git a/test/engine/test_reflection.py b/test/engine/test_reflection.py index cf984c6580..194de9a7d2 100644 --- a/test/engine/test_reflection.py +++ b/test/engine/test_reflection.py @@ -99,7 +99,9 @@ class ReflectionTest(fixtures.TestBase, ComparesTables): self.assert_tables_equal(addresses, reflected_addresses) @testing.provide_metadata - def test_autoload_with_imply_autoload(self,): + def test_autoload_with_imply_autoload( + self, + ): meta = self.metadata t = Table( "t", @@ -886,7 +888,7 @@ class ReflectionTest(fixtures.TestBase, ComparesTables): def test_override_existing_fk(self): """test that you can override columns and specify new foreign keys to other reflected tables, on columns which *do* already - have that foreign key, and that the FK is not duped. """ + have that foreign key, and that the FK is not duped.""" meta = self.metadata Table( @@ -2294,9 +2296,15 @@ class ComputedColumnTest(fixtures.ComputedReflectionFixtureTest): ) if testing.requires.computed_columns_virtual.enabled: self.check_table_column( - table, "computed_virtual", "normal+2", False, + table, + "computed_virtual", + "normal+2", + False, ) if testing.requires.computed_columns_stored.enabled: self.check_table_column( - table, "computed_stored", "normal-42", True, + table, + "computed_stored", + "normal-42", + True, ) diff --git a/test/engine/test_transaction.py b/test/engine/test_transaction.py index 4b6cb6e0c2..d0774e8464 100644 --- a/test/engine/test_transaction.py +++ b/test/engine/test_transaction.py @@ -196,7 +196,8 @@ class TransactionTest(fixtures.TestBase): with testing.db.connect() as conn: eq_( - conn.scalar(select(func.count(1)).select_from(users)), 0, + conn.scalar(select(func.count(1)).select_from(users)), + 0, ) def test_inactive_due_to_subtransaction_no_commit(self, local_connection): @@ -1022,7 +1023,7 @@ class ExplicitAutoCommitTest(fixtures.TestBase): """test the 'autocommit' flag on select() and text() objects. Requires PostgreSQL so that we may define a custom function which - modifies the database. """ + modifies the database.""" __only_on__ = "postgresql" @@ -1469,7 +1470,8 @@ class ConnectionCharacteristicTest(fixtures.TestBase): c1 = c1.execution_options(foo="new_foo") eq_( - engine.dialect.get_foo(c1.connection), "new_foo", + engine.dialect.get_foo(c1.connection), + "new_foo", ) # stays outside of transaction eq_(engine.dialect.get_foo(c1.connection), "new_foo") @@ -1513,7 +1515,8 @@ class ConnectionCharacteristicTest(fixtures.TestBase): conn = eng.connect() eq_( - eng.dialect.get_foo(conn.connection), "new_value", + eng.dialect.get_foo(conn.connection), + "new_value", ) @@ -1719,7 +1722,8 @@ class FutureTransactionTest(fixtures.FutureEngineMixin, fixtures.TablesTest): with testing.db.connect() as conn: eq_( - conn.scalar(select(func.count(1)).select_from(users)), 1, + conn.scalar(select(func.count(1)).select_from(users)), + 1, ) @testing.requires.autocommit @@ -1766,13 +1770,15 @@ class FutureTransactionTest(fixtures.FutureEngineMixin, fixtures.TablesTest): assert not conn.in_transaction() eq_( - conn.scalar(select(func.count(1)).select_from(users)), 1, + conn.scalar(select(func.count(1)).select_from(users)), + 1, ) conn.execute(users.insert(), {"user_id": 2, "user_name": "name 2"}) eq_( - conn.scalar(select(func.count(1)).select_from(users)), 2, + conn.scalar(select(func.count(1)).select_from(users)), + 2, ) assert conn.in_transaction() @@ -1780,7 +1786,8 @@ class FutureTransactionTest(fixtures.FutureEngineMixin, fixtures.TablesTest): assert not conn.in_transaction() eq_( - conn.scalar(select(func.count(1)).select_from(users)), 1, + conn.scalar(select(func.count(1)).select_from(users)), + 1, ) def test_rollback_on_close(self): @@ -1865,7 +1872,8 @@ class FutureTransactionTest(fixtures.FutureEngineMixin, fixtures.TablesTest): conn.rollback() eq_( - conn.scalar(select(func.count(1)).select_from(users)), 1, + conn.scalar(select(func.count(1)).select_from(users)), + 1, ) def test_rollback_no_begin(self): @@ -1889,7 +1897,8 @@ class FutureTransactionTest(fixtures.FutureEngineMixin, fixtures.TablesTest): conn.commit() eq_( - conn.scalar(select(func.count(1)).select_from(users)), 1, + conn.scalar(select(func.count(1)).select_from(users)), + 1, ) def test_no_double_begin(self): @@ -1910,7 +1919,8 @@ class FutureTransactionTest(fixtures.FutureEngineMixin, fixtures.TablesTest): with testing.db.connect() as conn: eq_( - conn.scalar(select(func.count(1)).select_from(users)), 0, + conn.scalar(select(func.count(1)).select_from(users)), + 0, ) def test_begin_block(self): @@ -1921,7 +1931,8 @@ class FutureTransactionTest(fixtures.FutureEngineMixin, fixtures.TablesTest): with testing.db.connect() as conn: eq_( - conn.scalar(select(func.count(1)).select_from(users)), 1, + conn.scalar(select(func.count(1)).select_from(users)), + 1, ) @testing.requires.savepoints @@ -1935,17 +1946,20 @@ class FutureTransactionTest(fixtures.FutureEngineMixin, fixtures.TablesTest): conn.execute(users.insert(), {"user_id": 2, "user_name": "name2"}) eq_( - conn.scalar(select(func.count(1)).select_from(users)), 2, + conn.scalar(select(func.count(1)).select_from(users)), + 2, ) savepoint.rollback() eq_( - conn.scalar(select(func.count(1)).select_from(users)), 1, + conn.scalar(select(func.count(1)).select_from(users)), + 1, ) with testing.db.connect() as conn: eq_( - conn.scalar(select(func.count(1)).select_from(users)), 1, + conn.scalar(select(func.count(1)).select_from(users)), + 1, ) @testing.requires.savepoints @@ -1959,17 +1973,20 @@ class FutureTransactionTest(fixtures.FutureEngineMixin, fixtures.TablesTest): conn.execute(users.insert(), {"user_id": 2, "user_name": "name2"}) eq_( - conn.scalar(select(func.count(1)).select_from(users)), 2, + conn.scalar(select(func.count(1)).select_from(users)), + 2, ) savepoint.commit() eq_( - conn.scalar(select(func.count(1)).select_from(users)), 2, + conn.scalar(select(func.count(1)).select_from(users)), + 2, ) with testing.db.connect() as conn: eq_( - conn.scalar(select(func.count(1)).select_from(users)), 2, + conn.scalar(select(func.count(1)).select_from(users)), + 2, ) @testing.requires.savepoints @@ -1988,7 +2005,8 @@ class FutureTransactionTest(fixtures.FutureEngineMixin, fixtures.TablesTest): with testing.db.connect() as conn: eq_( - conn.scalar(select(func.count(1)).select_from(users)), 0, + conn.scalar(select(func.count(1)).select_from(users)), + 0, ) @testing.requires.savepoints @@ -2014,7 +2032,8 @@ class FutureTransactionTest(fixtures.FutureEngineMixin, fixtures.TablesTest): with testing.db.connect() as conn: eq_( - conn.scalar(select(func.count(1)).select_from(users)), 2, + conn.scalar(select(func.count(1)).select_from(users)), + 2, ) @testing.requires.savepoints @@ -2036,7 +2055,8 @@ class FutureTransactionTest(fixtures.FutureEngineMixin, fixtures.TablesTest): with testing.db.connect() as conn: eq_( - conn.scalar(select(func.count(1)).select_from(users)), 3, + conn.scalar(select(func.count(1)).select_from(users)), + 3, ) @testing.requires.savepoints @@ -2068,7 +2088,8 @@ class FutureTransactionTest(fixtures.FutureEngineMixin, fixtures.TablesTest): with testing.db.connect() as conn: eq_( - conn.scalar(select(func.count(1)).select_from(users)), 1, + conn.scalar(select(func.count(1)).select_from(users)), + 1, ) @testing.requires.savepoints @@ -2097,5 +2118,6 @@ class FutureTransactionTest(fixtures.FutureEngineMixin, fixtures.TablesTest): with testing.db.connect() as conn: eq_( - conn.scalar(select(func.count(1)).select_from(users)), 0, + conn.scalar(select(func.count(1)).select_from(users)), + 0, ) diff --git a/test/ext/asyncio/test_engine_py3k.py b/test/ext/asyncio/test_engine_py3k.py index a5d167c2e7..7c7d90e217 100644 --- a/test/ext/asyncio/test_engine_py3k.py +++ b/test/ext/asyncio/test_engine_py3k.py @@ -146,11 +146,16 @@ class AsyncEngineTest(EngineFixture): @async_test async def test_pool_exhausted(self, async_engine): engine = create_async_engine( - testing.db.url, pool_size=1, max_overflow=0, pool_timeout=0.1, + testing.db.url, + pool_size=1, + max_overflow=0, + pool_timeout=0.1, ) async with engine.connect(): await assert_raises_message_async( - asyncio.TimeoutError, "", engine.connect(), + asyncio.TimeoutError, + "", + engine.connect(), ) @async_test @@ -190,7 +195,8 @@ class AsyncResultTest(EngineFixture): ) elif filter_ == "scalars": eq_( - all_, ["name%d" % i for i in range(1, 20)], + all_, + ["name%d" % i for i in range(1, 20)], ) else: eq_(all_, [(i, "name%d" % i) for i in range(1, 20)]) @@ -224,7 +230,8 @@ class AsyncResultTest(EngineFixture): ) elif filter_ == "scalars": eq_( - rows, ["name%d" % i for i in range(1, 20)], + rows, + ["name%d" % i for i in range(1, 20)], ) else: eq_(rows, [(i, "name%d" % i) for i in range(1, 20)]) diff --git a/test/ext/test_associationproxy.py b/test/ext/test_associationproxy.py index 77d81b4f7b..3cb29c67dc 100644 --- a/test/ext/test_associationproxy.py +++ b/test/ext/test_associationproxy.py @@ -3180,7 +3180,10 @@ class MultiOwnerTest( self._assert_raises_ambiguous(lambda: D.c_data == 5) def test_rel_expressions_not_available(self): - B, D, = self.classes("B", "D") + ( + B, + D, + ) = self.classes("B", "D") self._assert_raises_ambiguous(lambda: D.c_data.any(B.id == 5)) diff --git a/test/ext/test_baked.py b/test/ext/test_baked.py index 6aa5840177..eff3ccdaef 100644 --- a/test/ext/test_baked.py +++ b/test/ext/test_baked.py @@ -1022,7 +1022,9 @@ class CustomIntegrationTest(testing.AssertsCompiledSQL, BakedTest): if ckey is not None: return get_value( - ckey, CachingQuery.cache, orm_context.invoke_statement, + ckey, + CachingQuery.cache, + orm_context.invoke_statement, ) return s1 @@ -1067,13 +1069,15 @@ class CustomIntegrationTest(testing.AssertsCompiledSQL, BakedTest): q = sess.query(User).filter(User.id == 7).set_cache_key("user7") eq_( - sess.execute(q).all(), [(User(id=7, addresses=[Address(id=1)]),)], + sess.execute(q).all(), + [(User(id=7, addresses=[Address(id=1)]),)], ) eq_(list(q.cache), ["user7"]) eq_( - sess.execute(q).all(), [(User(id=7, addresses=[Address(id=1)]),)], + sess.execute(q).all(), + [(User(id=7, addresses=[Address(id=1)]),)], ) def test_use_w_baked(self): diff --git a/test/ext/test_horizontal_shard.py b/test/ext/test_horizontal_shard.py index 455e26b144..a8c17d7aca 100644 --- a/test/ext/test_horizontal_shard.py +++ b/test/ext/test_horizontal_shard.py @@ -269,7 +269,8 @@ class ShardTest(object): ) ).scalars() eq_( - {c.city for c in asia_and_europe}, {"Tokyo", "London", "Dublin"}, + {c.city for c in asia_and_europe}, + {"Tokyo", "London", "Dublin"}, ) def test_roundtrip(self): @@ -287,18 +288,21 @@ class ShardTest(object): WeatherLocation.continent == "North America" ) eq_( - {c.city for c in north_american_cities}, {"New York", "Toronto"}, + {c.city for c in north_american_cities}, + {"New York", "Toronto"}, ) asia_and_europe = sess.query(WeatherLocation).filter( WeatherLocation.continent.in_(["Europe", "Asia"]) ) eq_( - {c.city for c in asia_and_europe}, {"Tokyo", "London", "Dublin"}, + {c.city for c in asia_and_europe}, + {"Tokyo", "London", "Dublin"}, ) # inspect the shard token stored with each instance eq_( - {inspect(c).key[2] for c in asia_and_europe}, {"europe", "asia"}, + {inspect(c).key[2] for c in asia_and_europe}, + {"europe", "asia"}, ) eq_( @@ -545,7 +549,9 @@ class ShardTest(object): sess.execute( update(Report) .filter(Report.temperature >= 80) - .values({"temperature": Report.temperature + 6},) + .values( + {"temperature": Report.temperature + 6}, + ) .execution_options(synchronize_session="evaluate") ) @@ -579,7 +585,9 @@ class ShardTest(object): # four shards sess.execute( update(Report) - .values({"temperature": Report.temperature + 6},) + .values( + {"temperature": Report.temperature + 6}, + ) .execution_options(synchronize_session="fetch") ) @@ -783,8 +791,7 @@ class MultipleDialectShardTest(ShardTest, fixtures.TestBase): class SelectinloadRegressionTest(fixtures.DeclarativeMappedTest): - """test #4175 - """ + """test #4175""" @classmethod def setup_classes(cls): diff --git a/test/orm/_fixtures.py b/test/orm/_fixtures.py index 864174d96f..56679a8001 100644 --- a/test/orm/_fixtures.py +++ b/test/orm/_fixtures.py @@ -15,9 +15,7 @@ __all__ = () class FixtureTest(fixtures.MappedTest): - """A MappedTest pre-configured with a common set of fixtures. - - """ + """A MappedTest pre-configured with a common set of fixtures.""" run_define_tables = "once" run_setup_classes = "once" diff --git a/test/orm/declarative/test_inheritance.py b/test/orm/declarative/test_inheritance.py index 290c83eae9..d8847ed402 100644 --- a/test/orm/declarative/test_inheritance.py +++ b/test/orm/declarative/test_inheritance.py @@ -887,9 +887,7 @@ class DeclarativeInheritanceTest(DeclarativeTestBase): ) def test_columns_single_inheritance_cascading_resolution_pk(self): - """An additional test for #4352 in terms of the requested use case. - - """ + """An additional test for #4352 in terms of the requested use case.""" class TestBase(Base): __abstract__ = True diff --git a/test/orm/inheritance/test_abc_inheritance.py b/test/orm/inheritance/test_abc_inheritance.py index 60c488be36..bce554f30d 100644 --- a/test/orm/inheritance/test_abc_inheritance.py +++ b/test/orm/inheritance/test_abc_inheritance.py @@ -38,7 +38,10 @@ class ABCTest(fixtures.MappedTest): ta = ["a", metadata] ta.append( Column( - "id", Integer, primary_key=True, test_needs_autoincrement=True, + "id", + Integer, + primary_key=True, + test_needs_autoincrement=True, ) ), ta.append(Column("a_data", String(30))) diff --git a/test/orm/inheritance/test_assorted_poly.py b/test/orm/inheritance/test_assorted_poly.py index 2767607cb0..ce8d76a533 100644 --- a/test/orm/inheritance/test_assorted_poly.py +++ b/test/orm/inheritance/test_assorted_poly.py @@ -656,7 +656,9 @@ class RelationshipTest4(fixtures.MappedTest): def go(): testcar = session.get( - Car, car1.car_id, options=[joinedload("employee")], + Car, + car1.car_id, + options=[joinedload("employee")], ) assert str(testcar.employee) == "Engineer E4, status X" diff --git a/test/orm/inheritance/test_basic.py b/test/orm/inheritance/test_basic.py index 9db11d362a..af960625e7 100644 --- a/test/orm/inheritance/test_basic.py +++ b/test/orm/inheritance/test_basic.py @@ -895,8 +895,7 @@ class PolymorphicAttributeManagementTest(fixtures.MappedTest): ) def test_entirely_oob_assignment(self): - """test warn on an unknown polymorphic identity. - """ + """test warn on an unknown polymorphic identity.""" B = self.classes.B sess = Session() @@ -2615,7 +2614,7 @@ class OptimizedLoadTest(fixtures.MappedTest): eq_(s1.sub, "s1sub") def test_optimized_passes(self): - """"test that the 'optimized load' routine doesn't crash when + """ "test that the 'optimized load' routine doesn't crash when a column in the join condition is not available.""" base, sub = self.tables.base, self.tables.sub diff --git a/test/orm/inheritance/test_polymorphic_rel.py b/test/orm/inheritance/test_polymorphic_rel.py index 69a485d41d..dd3ca4821c 100644 --- a/test/orm/inheritance/test_polymorphic_rel.py +++ b/test/orm/inheritance/test_polymorphic_rel.py @@ -285,7 +285,8 @@ class _PolymorphicTestBase(object): def test_filter_on_subclass_one_future(self): sess = create_session(future=True) eq_( - sess.execute(select(Engineer)).scalar(), Engineer(name="dilbert"), + sess.execute(select(Engineer)).scalar(), + Engineer(name="dilbert"), ) def test_filter_on_subclass_two(self): @@ -1567,8 +1568,7 @@ class _PolymorphicTestBase(object): ) def test_self_referential_two_point_five(self): - """Using two aliases, the above case works. - """ + """Using two aliases, the above case works.""" sess = create_session() palias = aliased(Person) palias2 = aliased(Person) @@ -1613,7 +1613,8 @@ class _PolymorphicTestBase(object): stmt2 = select(pa1, pa2).order_by(pa1.person_id, pa2.person_id) eq_( - sess.execute(stmt2).unique().all(), expected, + sess.execute(stmt2).unique().all(), + expected, ) def test_self_referential_two_point_five_future(self): @@ -1641,7 +1642,8 @@ class _PolymorphicTestBase(object): stmt2 = select(pa1, pa2).order_by(pa1.person_id, pa2.person_id) eq_( - sess.execute(stmt2).unique().all(), expected, + sess.execute(stmt2).unique().all(), + expected, ) def test_nesting_queries(self): @@ -1755,7 +1757,8 @@ class _PolymorphicTestBase(object): ("vlad", "Elbonia, Inc."), ] eq_( - q(self, sess).all(), expected, + q(self, sess).all(), + expected, ) def test_mixed_entities_two(self): diff --git a/test/orm/inheritance/test_productspec.py b/test/orm/inheritance/test_productspec.py index 5fd2c5a6f6..35c7565fb9 100644 --- a/test/orm/inheritance/test_productspec.py +++ b/test/orm/inheritance/test_productspec.py @@ -325,7 +325,7 @@ class InheritTest(fixtures.MappedTest): """this tests the RasterDocument being attached to the Assembly, but *not* the Document. this means only a "sub-class" task, i.e. corresponding to an inheriting mapper but not the base mapper, - is created. """ + is created.""" product_mapper = mapper( Product, diff --git a/test/orm/inheritance/test_relationship.py b/test/orm/inheritance/test_relationship.py index 8590949a77..03fd05bd5c 100644 --- a/test/orm/inheritance/test_relationship.py +++ b/test/orm/inheritance/test_relationship.py @@ -1408,9 +1408,7 @@ class SameNamedPropTwoPolymorphicSubClassesTest(fixtures.MappedTest): class SubClassToSubClassFromParentTest(fixtures.MappedTest): - """test #2617 - - """ + """test #2617""" run_setup_classes = "once" run_setup_mappers = "once" diff --git a/test/orm/test_attributes.py b/test/orm/test_attributes.py index 68548706e7..8820aa6a45 100644 --- a/test/orm/test_attributes.py +++ b/test/orm/test_attributes.py @@ -378,7 +378,8 @@ class AttributesTest(fixtures.ORMTest): return b.name assert_raises( - orm_exc.UnmappedInstanceError, go, + orm_exc.UnmappedInstanceError, + go, ) def test_del_scalar_nonobject(self): @@ -597,7 +598,7 @@ class AttributesTest(fixtures.ORMTest): def test_lazytrackparent(self): """test that the "hasparent" flag works properly - when lazy loaders and backrefs are used + when lazy loaders and backrefs are used """ @@ -859,7 +860,7 @@ class AttributesTest(fixtures.ORMTest): """changeset: 1633 broke ability to use ORM to map classes with unusual descriptor attributes (for example, classes that inherit from ones implementing zope.interface.Interface). This is a - simple regression test to prevent that defect. """ + simple regression test to prevent that defect.""" class des(object): def __get__(self, instance, owner): @@ -1111,7 +1112,7 @@ class UtilTest(fixtures.ORMTest): def test_set_commited_value_none_uselist(self): """test that set_committed_value->None to a uselist generates an - empty list """ + empty list""" class Foo(object): pass diff --git a/test/orm/test_bind.py b/test/orm/test_bind.py index 35735a79ca..fef827d833 100644 --- a/test/orm/test_bind.py +++ b/test/orm/test_bind.py @@ -203,7 +203,10 @@ class BindIntegrationTest(_fixtures.FixtureTest): }, "e2", ), - (lambda User: {"clause": select(1).where(User.name == "ed")}, "e1",), + ( + lambda User: {"clause": select(1).where(User.name == "ed")}, + "e1", + ), (lambda: {"clause": select(1)}, "e3"), (lambda User: {"clause": Query([User])._statement_20()}, "e1"), (lambda: {"clause": Query([1])._statement_20()}, "e3"), diff --git a/test/orm/test_cache_key.py b/test/orm/test_cache_key.py index 45a60a5cb9..59d7470122 100644 --- a/test/orm/test_cache_key.py +++ b/test/orm/test_cache_key.py @@ -492,7 +492,8 @@ class PolyCacheKeyTest(CacheKeyFixture, _poly_fixtures._Polymorphic): ) self._run_cache_key_fixture( - lambda: stmt_20(one(), two(), three()), compare_values=True, + lambda: stmt_20(one(), two(), three()), + compare_values=True, ) diff --git a/test/orm/test_cascade.py b/test/orm/test_cascade.py index 5a139038b0..6a916e28a8 100644 --- a/test/orm/test_cascade.py +++ b/test/orm/test_cascade.py @@ -3827,7 +3827,9 @@ class O2MConflictTest(fixtures.MappedTest): class PartialFlushTest(fixtures.MappedTest): - """test cascade behavior as it relates to object lists passed to flush(). + """test cascade behavior as it relates to object lists passed + to flush(). + """ @classmethod diff --git a/test/orm/test_composites.py b/test/orm/test_composites.py index e084d90d9d..a164034daa 100644 --- a/test/orm/test_composites.py +++ b/test/orm/test_composites.py @@ -747,7 +747,9 @@ class MappedSelectTest(fixtures.MappedTest): desc_values = ( select(values, descriptions.c.d1, descriptions.c.d2) - .where(descriptions.c.id == values.c.description_id,) + .where( + descriptions.c.id == values.c.description_id, + ) .alias("descriptions_values") ) diff --git a/test/orm/test_core_compilation.py b/test/orm/test_core_compilation.py index 506aca44d8..12d3f7bfbb 100644 --- a/test/orm/test_core_compilation.py +++ b/test/orm/test_core_compilation.py @@ -307,7 +307,8 @@ class LoadersInSubqueriesTest(QueryTest, AssertsCompiledSQL): "FROM users) AS anon_1" ) self.assert_compile( - stmt1._final_statement(legacy_query_style=False), expected, + stmt1._final_statement(legacy_query_style=False), + expected, ) self.assert_compile(stmt2, expected) @@ -334,7 +335,8 @@ class LoadersInSubqueriesTest(QueryTest, AssertsCompiledSQL): ) self.assert_compile( - stmt1._final_statement(legacy_query_style=False), expected, + stmt1._final_statement(legacy_query_style=False), + expected, ) self.assert_compile(stmt2, expected) @@ -382,7 +384,9 @@ class ExtraColsTest(QueryTest, AssertsCompiledSQL): "count", column_property( select(func.count(addresses.c.id)) - .where(users.c.id == addresses.c.user_id,) + .where( + users.c.id == addresses.c.user_id, + ) .correlate(users) .scalar_subquery() ), @@ -391,7 +395,15 @@ class ExtraColsTest(QueryTest, AssertsCompiledSQL): ), ) - mapper(Address, addresses, properties={"user": relationship(User,)}) + mapper( + Address, + addresses, + properties={ + "user": relationship( + User, + ) + }, + ) return User, Address @@ -405,10 +417,19 @@ class ExtraColsTest(QueryTest, AssertsCompiledSQL): ) mapper( - User, users, + User, + users, ) - mapper(Address, addresses, properties={"user": relationship(User,)}) + mapper( + Address, + addresses, + properties={ + "user": relationship( + User, + ) + }, + ) return User, Address @@ -510,7 +531,7 @@ class ExtraColsTest(QueryTest, AssertsCompiledSQL): def test_column_properties_can_we_use(self, column_property_fixture): """test querying mappings that reference external columns or - selectables. """ + selectables.""" # User, Address = column_property_fixture @@ -554,7 +575,12 @@ class ExtraColsTest(QueryTest, AssertsCompiledSQL): # col properties will retain anonymous labels, however will # adopt the .key within the subquery collection so they can # be addressed. - stmt = select(User.id, User.name, User.concat, User.count,) + stmt = select( + User.id, + User.name, + User.concat, + User.count, + ) subq = stmt.subquery() # here, the subquery needs to export the columns that include @@ -845,7 +871,8 @@ class ImplicitWithPolymorphicTest( self.assert_compile(stmt, expected) self.assert_compile( - q._final_statement(legacy_query_style=False), expected, + q._final_statement(legacy_query_style=False), + expected, ) def test_select_where_baseclass(self): @@ -886,7 +913,8 @@ class ImplicitWithPolymorphicTest( self.assert_compile(stmt, expected) self.assert_compile( - q._final_statement(legacy_query_style=False), expected, + q._final_statement(legacy_query_style=False), + expected, ) def test_select_where_subclass(self): @@ -978,7 +1006,8 @@ class ImplicitWithPolymorphicTest( self.assert_compile(stmt, expected) self.assert_compile( - q._final_statement(legacy_query_style=False), expected, + q._final_statement(legacy_query_style=False), + expected, ) diff --git a/test/orm/test_cycles.py b/test/orm/test_cycles.py index 5c61a6370a..47b5404c92 100644 --- a/test/orm/test_cycles.py +++ b/test/orm/test_cycles.py @@ -1199,10 +1199,7 @@ class OneToManyManyToOneTest(fixtures.MappedTest): class SelfReferentialPostUpdateTest(fixtures.MappedTest): - """Post_update on a single self-referential mapper. - - - """ + """Post_update on a single self-referential mapper.""" @classmethod def define_tables(cls, metadata): @@ -1539,8 +1536,8 @@ class SelfReferentialPostUpdateTest3(fixtures.MappedTest): class PostUpdateBatchingTest(fixtures.MappedTest): - """test that lots of post update cols batch together into a single UPDATE. - """ + """test that lots of post update cols batch together into a single + UPDATE.""" @classmethod def define_tables(cls, metadata): diff --git a/test/orm/test_dataclasses_py3k.py b/test/orm/test_dataclasses_py3k.py index e0665b23fc..d3f9530724 100644 --- a/test/orm/test_dataclasses_py3k.py +++ b/test/orm/test_dataclasses_py3k.py @@ -249,7 +249,8 @@ class PlainDeclarativeDataclassesTest(DataclassesTest): name: Optional[str] = None __mapper_args__ = dict( - polymorphic_on=widgets.c.type, polymorphic_identity="normal", + polymorphic_on=widgets.c.type, + polymorphic_identity="normal", ) @declarative @@ -258,7 +259,9 @@ class PlainDeclarativeDataclassesTest(DataclassesTest): magic: bool = False - __mapper_args__ = dict(polymorphic_identity="special",) + __mapper_args__ = dict( + polymorphic_identity="special", + ) @declarative @dataclasses.dataclass diff --git a/test/orm/test_defaults.py b/test/orm/test_defaults.py index a2dc8cf453..aa1f2b88d8 100644 --- a/test/orm/test_defaults.py +++ b/test/orm/test_defaults.py @@ -330,7 +330,10 @@ class ComputedDefaultsOnUpdateTest(fixtures.MappedTest): True, testing.requires.computed_columns_on_update_returning, ), - ("noneagerload", False,), + ( + "noneagerload", + False, + ), id_="ia", ) def test_update_computed(self, eager): @@ -485,10 +488,12 @@ class IdentityDefaultsOnUpdateTest(fixtures.MappedTest): ], [ CompiledSQL( - "INSERT INTO test (foo) VALUES (:foo)", [{"foo": 5}], + "INSERT INTO test (foo) VALUES (:foo)", + [{"foo": 5}], ), CompiledSQL( - "INSERT INTO test (foo) VALUES (:foo)", [{"foo": 10}], + "INSERT INTO test (foo) VALUES (:foo)", + [{"foo": 10}], ), ], ) diff --git a/test/orm/test_deferred.py b/test/orm/test_deferred.py index 0a452bb1ea..b2a04b8fff 100644 --- a/test/orm/test_deferred.py +++ b/test/orm/test_deferred.py @@ -273,8 +273,8 @@ class DeferredTest(AssertsCompiledSQL, _fixtures.FixtureTest): self.sql_count_(0, go) def test_preserve_changes(self): - """A deferred load operation doesn't revert modifications on attributes - """ + """A deferred load operation doesn't revert modifications on + attributes""" orders, Order = self.tables.orders, self.classes.Order @@ -824,7 +824,7 @@ class DeferredOptionsTest(AssertsCompiledSQL, _fixtures.FixtureTest): def test_locates_col(self): """changed in 1.0 - we don't search for deferred cols in the result - now. """ + now.""" orders, Order = self.tables.orders, self.classes.Order diff --git a/test/orm/test_deprecations.py b/test/orm/test_deprecations.py index bcba1f0315..e04732440f 100644 --- a/test/orm/test_deprecations.py +++ b/test/orm/test_deprecations.py @@ -1700,7 +1700,13 @@ class SubqRelationsFromSelfTest(fixtures.DeclarativeMappedTest): s = Session(connection) - as_ = [A(id=i, cs=[C(), C()],) for i in range(1, 5)] + as_ = [ + A( + id=i, + cs=[C(), C()], + ) + for i in range(1, 5) + ] s.add_all( [ @@ -2358,7 +2364,7 @@ class NonPrimaryRelationshipLoaderTest(_fixtures.FixtureTest): def test_selectload(self): """tests lazy loading with two relationships simultaneously, - from the same table, using aliases. """ + from the same table, using aliases.""" users, orders, User, Address, Order, addresses = ( self.tables.users, @@ -2409,7 +2415,7 @@ class NonPrimaryRelationshipLoaderTest(_fixtures.FixtureTest): def test_joinedload(self): """Eager loading with two relationships simultaneously, - from the same table, using aliases.""" + from the same table, using aliases.""" users, orders, User, Address, Order, addresses = ( self.tables.users, diff --git a/test/orm/test_eager_relations.py b/test/orm/test_eager_relations.py index a699cfa634..57225d6406 100644 --- a/test/orm/test_eager_relations.py +++ b/test/orm/test_eager_relations.py @@ -222,7 +222,7 @@ class EagerTest(_fixtures.FixtureTest, testing.AssertsCompiledSQL): def test_orderby_related(self): """A regular mapper select on a single table can - order by a relationship to a second table""" + order by a relationship to a second table""" Address, addresses, users, User = ( self.classes.Address, @@ -712,7 +712,7 @@ class EagerTest(_fixtures.FixtureTest, testing.AssertsCompiledSQL): def test_double_w_ac(self): """Eager loading with two relationships simultaneously, - from the same table, using aliases.""" + from the same table, using aliases.""" ( users, @@ -788,7 +788,7 @@ class EagerTest(_fixtures.FixtureTest, testing.AssertsCompiledSQL): def test_double_w_ac_against_subquery(self): """Eager loading with two relationships simultaneously, - from the same table, using aliases.""" + from the same table, using aliases.""" ( users, @@ -1297,7 +1297,9 @@ class EagerTest(_fixtures.FixtureTest, testing.AssertsCompiledSQL): # against a select. original issue from ticket #904 sel = ( sa.select(users, addresses.c.email_address) - .where(users.c.id == addresses.c.user_id,) + .where( + users.c.id == addresses.c.user_id, + ) .alias("useralias") ) mapper( @@ -4867,7 +4869,9 @@ class SubqueryTest(fixtures.MappedTest): tag_score = tags_table.c.score1 * tags_table.c.score2 user_score = sa.select( sa.func.sum(tags_table.c.score1 * tags_table.c.score2) - ).where(tags_table.c.user_id == users_table.c.id,) + ).where( + tags_table.c.user_id == users_table.c.id, + ) if labeled: tag_score = tag_score.label(labelname) diff --git a/test/orm/test_events.py b/test/orm/test_events.py index b6a4b41cb3..2851622414 100644 --- a/test/orm/test_events.py +++ b/test/orm/test_events.py @@ -83,7 +83,11 @@ class ORMExecuteTest(_RemoveListeners, _fixtures.FixtureTest): ckey = orm_context.execution_options["cache_key"] if ckey is not None: - return get_value(ckey, cache, orm_context.invoke_statement,) + return get_value( + ckey, + cache, + orm_context.invoke_statement, + ) return maker() @@ -947,7 +951,7 @@ class DeclarativeEventListenTest( class DeferredMapperEventsTest(_RemoveListeners, _fixtures.FixtureTest): - """"test event listeners against unmapped classes. + """ "test event listeners against unmapped classes. This incurs special logic. Note if we ever do the "remove" case, it has to get all of these, too. diff --git a/test/orm/test_expire.py b/test/orm/test_expire.py index ba2fce60bf..7ccf2c1aee 100644 --- a/test/orm/test_expire.py +++ b/test/orm/test_expire.py @@ -1146,7 +1146,9 @@ class ExpireTest(_fixtures.FixtureTest): eq_(len(list(sess)), 9) def test_state_change_col_to_deferred(self): - """Behavioral test to verify the current activity of loader callables + """Behavioral test to verify the current activity of loader + callables + """ users, User = self.tables.users, self.classes.User @@ -1194,7 +1196,9 @@ class ExpireTest(_fixtures.FixtureTest): assert "name" not in attributes.instance_state(u1).callables def test_state_deferred_to_col(self): - """Behavioral test to verify the current activity of loader callables + """Behavioral test to verify the current activity of + loader callables + """ users, User = self.tables.users, self.classes.User @@ -1236,7 +1240,9 @@ class ExpireTest(_fixtures.FixtureTest): assert "name" not in attributes.instance_state(u1).callables def test_state_noload_to_lazy(self): - """Behavioral test to verify the current activity of loader callables + """Behavioral test to verify the current activity of + loader callables + """ users, Address, addresses, User = ( diff --git a/test/orm/test_froms.py b/test/orm/test_froms.py index 79ed3cad4a..055f24b5c1 100644 --- a/test/orm/test_froms.py +++ b/test/orm/test_froms.py @@ -381,7 +381,8 @@ class EntityFromSubqueryTest(QueryTest, AssertsCompiledSQL): subq = select(User).filter(User.id.in_([8, 9])).subquery() q = create_session().query(aliased(User, subq)) eq_( - [User(id=8), User(id=9)], q.all(), + [User(id=8), User(id=9)], + q.all(), ) subq = select(User).order_by(User.id).slice(1, 3).subquery() @@ -392,7 +393,8 @@ class EntityFromSubqueryTest(QueryTest, AssertsCompiledSQL): u = aliased(User, subq) q = create_session().query(u).order_by(u.id) eq_( - [User(id=8)], list(q[0:1]), + [User(id=8)], + list(q[0:1]), ) def test_join(self): @@ -433,7 +435,8 @@ class EntityFromSubqueryTest(QueryTest, AssertsCompiledSQL): aq = aliased(Address, subq) q = create_session().query(aq.user_id, subq.c.count) eq_( - q.all(), [(7, 1), (8, 3), (9, 1)], + q.all(), + [(7, 1), (8, 3), (9, 1)], ) subq = select(Address.user_id, Address.id) @@ -447,7 +450,8 @@ class EntityFromSubqueryTest(QueryTest, AssertsCompiledSQL): ) eq_( - q.all(), [(7, 1), (8, 3), (9, 1)], + q.all(), + [(7, 1), (8, 3), (9, 1)], ) def test_error_w_aliased_against_select(self): @@ -559,7 +563,8 @@ class EntityFromSubqueryTest(QueryTest, AssertsCompiledSQL): ) eq_( - q.all(), [("chuck", "ed"), ("fred", "ed")], + q.all(), + [("chuck", "ed"), ("fred", "ed")], ) q = ( @@ -645,7 +650,8 @@ class EntityFromSubqueryTest(QueryTest, AssertsCompiledSQL): q3 = sess.query(q2) eq_( - q3.all(), [(7, 1), (8, 1), (9, 1), (10, 1)], + q3.all(), + [(7, 1), (8, 1), (9, 1), (10, 1)], ) q3 = select(q2) @@ -2183,7 +2189,8 @@ class MixedEntitiesTest(QueryTest, AssertsCompiledSQL): select(User, Address).from_statement(selectquery) ) eq_( - list(result), expected, + list(result), + expected, ) sess.expunge_all() @@ -3442,14 +3449,24 @@ class ExternalColumnsTest(QueryTest): "concat": column_property((users.c.id * 2)), "count": column_property( select(func.count(addresses.c.id)) - .where(users.c.id == addresses.c.user_id,) + .where( + users.c.id == addresses.c.user_id, + ) .correlate(users) .scalar_subquery() ), }, ) - mapper(Address, addresses, properties={"user": relationship(User,)}) + mapper( + Address, + addresses, + properties={ + "user": relationship( + User, + ) + }, + ) sess = create_session() @@ -3603,7 +3620,9 @@ class ExternalColumnsTest(QueryTest): "concat": column_property((users.c.id * 2)), "count": column_property( select(func.count(addresses.c.id)) - .where(users.c.id == addresses.c.user_id,) + .where( + users.c.id == addresses.c.user_id, + ) .correlate(users) .scalar_subquery() ), diff --git a/test/orm/test_joins.py b/test/orm/test_joins.py index 02742da8f1..765111b1e8 100644 --- a/test/orm/test_joins.py +++ b/test/orm/test_joins.py @@ -624,7 +624,10 @@ class JoinTest(QueryTest, AssertsCompiledSQL): ) def test_single_prop_4(self): - Order, User, = (self.classes.Order, self.classes.User) + ( + Order, + User, + ) = (self.classes.Order, self.classes.User) sess = create_session() oalias1 = aliased(Order) @@ -640,7 +643,10 @@ class JoinTest(QueryTest, AssertsCompiledSQL): ) def test_single_prop_5(self): - Order, User, = (self.classes.Order, self.classes.User) + ( + Order, + User, + ) = (self.classes.Order, self.classes.User) sess = create_session() self.assert_compile( @@ -682,7 +688,10 @@ class JoinTest(QueryTest, AssertsCompiledSQL): ) def test_single_prop_8(self): - Order, User, = (self.classes.Order, self.classes.User) + ( + Order, + User, + ) = (self.classes.Order, self.classes.User) sess = create_session() # same as before using an aliased() for User as well diff --git a/test/orm/test_merge.py b/test/orm/test_merge.py index f9d57bc062..57d3ce01d6 100644 --- a/test/orm/test_merge.py +++ b/test/orm/test_merge.py @@ -803,7 +803,7 @@ class MergeTest(_fixtures.FixtureTest): def test_no_relationship_cascade(self): """test that merge doesn't interfere with a relationship() - target that specifically doesn't include 'merge' cascade. + target that specifically doesn't include 'merge' cascade. """ Address, addresses, users, User = ( diff --git a/test/orm/test_naturalpks.py b/test/orm/test_naturalpks.py index 202ff9ab01..87ec0d79d3 100644 --- a/test/orm/test_naturalpks.py +++ b/test/orm/test_naturalpks.py @@ -266,10 +266,13 @@ class NaturalPKTest(fixtures.MappedTest): # test passive_updates=True; update user self.assert_sql_count(testing.db, go, 1) sess.expunge_all() - assert User( - username="jack", - addresses=[Address(username="jack"), Address(username="jack")], - ) == sess.query(User).get("jack") + assert ( + User( + username="jack", + addresses=[Address(username="jack"), Address(username="jack")], + ) + == sess.query(User).get("jack") + ) u1 = sess.query(User).get("jack") u1.addresses = [] @@ -1120,10 +1123,13 @@ class NonPKCascadeTest(fixtures.MappedTest): # test passive_updates=True; update user self.assert_sql_count(testing.db, go, 1) sess.expunge_all() - assert User( - username="jack", - addresses=[Address(username="jack"), Address(username="jack")], - ) == sess.query(User).get(u1.id) + assert ( + User( + username="jack", + addresses=[Address(username="jack"), Address(username="jack")], + ) + == sess.query(User).get(u1.id) + ) sess.expunge_all() u1 = sess.query(User).get(u1.id) diff --git a/test/orm/test_of_type.py b/test/orm/test_of_type.py index daac38dc23..e40e815aa3 100644 --- a/test/orm/test_of_type.py +++ b/test/orm/test_of_type.py @@ -775,7 +775,10 @@ class SubclassRelationshipTest( ) def test_any_walias(self): - DataContainer, Job, = (self.classes.DataContainer, self.classes.Job) + ( + DataContainer, + Job, + ) = (self.classes.DataContainer, self.classes.Job) Job_A = aliased(Job) @@ -865,7 +868,10 @@ class SubclassRelationshipTest( ) def test_join_walias(self): - DataContainer, Job, = (self.classes.DataContainer, self.classes.Job) + ( + DataContainer, + Job, + ) = (self.classes.DataContainer, self.classes.Job) Job_A = aliased(Job) diff --git a/test/orm/test_query.py b/test/orm/test_query.py index 31643e5ff5..fc6471d524 100644 --- a/test/orm/test_query.py +++ b/test/orm/test_query.py @@ -884,7 +884,9 @@ class GetTest(QueryTest): stmt = select(User).execution_options(populate_existing=True) - s.execute(stmt,).scalars().all() + s.execute( + stmt, + ).scalars().all() self.assert_(u not in s.dirty) @@ -2779,15 +2781,11 @@ class FilterTest(QueryTest, AssertsCompiledSQL): # test that the contents are not adapted by the aliased join ua = aliased(Address) - assert ( - [User(id=7), User(id=8)] - == sess.query(User) - .join(ua, "addresses") - .filter( - ~User.addresses.any(Address.email_address == "fred@fred.com") - ) - .all() - ) + assert [User(id=7), User(id=8)] == sess.query(User).join( + ua, "addresses" + ).filter( + ~User.addresses.any(Address.email_address == "fred@fred.com") + ).all() assert [User(id=10)] == sess.query(User).outerjoin( ua, "addresses" @@ -2801,15 +2799,11 @@ class FilterTest(QueryTest, AssertsCompiledSQL): sess = create_session() # test that any() doesn't overcorrelate - assert ( - [User(id=7), User(id=8)] - == sess.query(User) - .join("addresses") - .filter( - ~User.addresses.any(Address.email_address == "fred@fred.com") - ) - .all() - ) + assert [User(id=7), User(id=8)] == sess.query(User).join( + "addresses" + ).filter( + ~User.addresses.any(Address.email_address == "fred@fred.com") + ).all() def test_has(self): # see also HasAnyTest, a newer suite which tests these at the level of @@ -2825,42 +2819,41 @@ class FilterTest(QueryTest, AssertsCompiledSQL): Address.user.has(name="fred") ).all() - assert ( - [Address(id=2), Address(id=3), Address(id=4), Address(id=5)] - == sess.query(Address) - .filter(Address.user.has(User.name.like("%ed%"))) - .order_by(Address.id) - .all() - ) + assert [ + Address(id=2), + Address(id=3), + Address(id=4), + Address(id=5), + ] == sess.query(Address).filter( + Address.user.has(User.name.like("%ed%")) + ).order_by( + Address.id + ).all() - assert ( - [Address(id=2), Address(id=3), Address(id=4)] - == sess.query(Address) - .filter(Address.user.has(User.name.like("%ed%"), id=8)) - .order_by(Address.id) - .all() - ) + assert [Address(id=2), Address(id=3), Address(id=4)] == sess.query( + Address + ).filter(Address.user.has(User.name.like("%ed%"), id=8)).order_by( + Address.id + ).all() # test has() doesn't overcorrelate - assert ( - [Address(id=2), Address(id=3), Address(id=4)] - == sess.query(Address) - .join("user") - .filter(Address.user.has(User.name.like("%ed%"), id=8)) - .order_by(Address.id) - .all() - ) + assert [Address(id=2), Address(id=3), Address(id=4)] == sess.query( + Address + ).join("user").filter( + Address.user.has(User.name.like("%ed%"), id=8) + ).order_by( + Address.id + ).all() # test has() doesn't get subquery contents adapted by aliased join ua = aliased(User) - assert ( - [Address(id=2), Address(id=3), Address(id=4)] - == sess.query(Address) - .join(ua, "user") - .filter(Address.user.has(User.name.like("%ed%"), id=8)) - .order_by(Address.id) - .all() - ) + assert [Address(id=2), Address(id=3), Address(id=4)] == sess.query( + Address + ).join(ua, "user").filter( + Address.user.has(User.name.like("%ed%"), id=8) + ).order_by( + Address.id + ).all() dingaling = sess.query(Dingaling).get(2) assert [User(id=9)] == sess.query(User).filter( @@ -3392,7 +3385,7 @@ class SetOpsTest(QueryTest, AssertsCompiledSQL): def test_union_literal_expressions_compile(self): """test that column expressions translate during - the _from_statement() portion of union(), others""" + the _from_statement() portion of union(), others""" User = self.classes.User @@ -3586,25 +3579,20 @@ class AggregateTest(QueryTest): User, Address = self.classes.User, self.classes.Address sess = create_session() - assert ( - [User(name="ed", id=8)] - == sess.query(User) - .order_by(User.id) - .group_by(User) - .join("addresses") - .having(func.count(Address.id) > 2) - .all() - ) + assert [User(name="ed", id=8)] == sess.query(User).order_by( + User.id + ).group_by(User).join("addresses").having( + func.count(Address.id) > 2 + ).all() - assert ( - [User(name="jack", id=7), User(name="fred", id=9)] - == sess.query(User) - .order_by(User.id) - .group_by(User) - .join("addresses") - .having(func.count(Address.id) < 2) - .all() - ) + assert [ + User(name="jack", id=7), + User(name="fred", id=9), + ] == sess.query(User).order_by(User.id).group_by(User).join( + "addresses" + ).having( + func.count(Address.id) < 2 + ).all() class ExistsTest(QueryTest, AssertsCompiledSQL): @@ -3668,7 +3656,8 @@ class CountTest(QueryTest): s = create_session() eq_( - s.execute(select(func.count()).select_from(User)).scalar(), 4, + s.execute(select(func.count()).select_from(User)).scalar(), + 4, ) eq_( @@ -3741,17 +3730,20 @@ class CountTest(QueryTest): stmt = select(User, Address).join(Address, true()).limit(2) eq_( - s.scalar(select(func.count()).select_from(stmt.subquery())), 2, + s.scalar(select(func.count()).select_from(stmt.subquery())), + 2, ) stmt = select(User, Address).join(Address, true()).limit(100) eq_( - s.scalar(select(func.count()).select_from(stmt.subquery())), 20, + s.scalar(select(func.count()).select_from(stmt.subquery())), + 20, ) stmt = select(User, Address).join(Address).limit(100) eq_( - s.scalar(select(func.count()).select_from(stmt.subquery())), 5, + s.scalar(select(func.count()).select_from(stmt.subquery())), + 5, ) def test_cols(self): @@ -3785,33 +3777,39 @@ class CountTest(QueryTest): stmt = select(func.count(distinct(User.name))) eq_( - s.scalar(select(func.count()).select_from(stmt.subquery())), 1, + s.scalar(select(func.count()).select_from(stmt.subquery())), + 1, ) stmt = select(func.count(distinct(User.name))).distinct() eq_( - s.scalar(select(func.count()).select_from(stmt.subquery())), 1, + s.scalar(select(func.count()).select_from(stmt.subquery())), + 1, ) stmt = select(User.name) eq_( - s.scalar(select(func.count()).select_from(stmt.subquery())), 4, + s.scalar(select(func.count()).select_from(stmt.subquery())), + 4, ) stmt = select(User.name, Address).join(Address, true()) eq_( - s.scalar(select(func.count()).select_from(stmt.subquery())), 20, + s.scalar(select(func.count()).select_from(stmt.subquery())), + 20, ) stmt = select(Address.user_id) eq_( - s.scalar(select(func.count()).select_from(stmt.subquery())), 5, + s.scalar(select(func.count()).select_from(stmt.subquery())), + 5, ) stmt = stmt.distinct() eq_( - s.scalar(select(func.count()).select_from(stmt.subquery())), 3, + s.scalar(select(func.count()).select_from(stmt.subquery())), + 3, ) @@ -4145,7 +4143,10 @@ class DistinctTest(QueryTest, AssertsCompiledSQL): .order_by(User.id, User.name, Address.email_address) ) q2 = sess.query( - User.id, User.name.label("foo"), Address.id, Address.email_address, + User.id, + User.name.label("foo"), + Address.id, + Address.email_address, ) self.assert_compile( @@ -4169,7 +4170,11 @@ class DistinctTest(QueryTest, AssertsCompiledSQL): sess = create_session() q = ( - sess.query(User.id, User.name.label("foo"), Address.id,) + sess.query( + User.id, + User.name.label("foo"), + Address.id, + ) .distinct(Address.email_address) .order_by(User.id, User.name) ) diff --git a/test/orm/test_relationship_criteria.py b/test/orm/test_relationship_criteria.py index ccee396a32..1c7eb2e619 100644 --- a/test/orm/test_relationship_criteria.py +++ b/test/orm/test_relationship_criteria.py @@ -76,7 +76,8 @@ class _Fixtures(_fixtures.FixtureTest): pass mapper( - UserWFoob, users, + UserWFoob, + users, ) return HasFoob, UserWFoob @@ -226,7 +227,10 @@ class LoaderCriteriaTest(_Fixtures, testing.AssertsCompiledSQL): s.execute(stmt).all() asserter.assert_( - CompiledSQL("SELECT users.id, users.name FROM users", [],), + CompiledSQL( + "SELECT users.id, users.name FROM users", + [], + ), CompiledSQL( "SELECT addresses.user_id AS addresses_user_id, addresses.id " "AS addresses_id, addresses.email_address " @@ -259,7 +263,8 @@ class LoaderCriteriaTest(_Fixtures, testing.AssertsCompiledSQL): asserter.assert_( CompiledSQL( - "SELECT users.id, users.name FROM users ORDER BY users.id", [], + "SELECT users.id, users.name FROM users ORDER BY users.id", + [], ), CompiledSQL( "SELECT addresses.id AS addresses_id, " @@ -540,7 +545,8 @@ class LoaderCriteriaTest(_Fixtures, testing.AssertsCompiledSQL): .outerjoin(User.addresses) .options( with_loader_criteria( - Address, ~Address.email_address.like("ed@%"), + Address, + ~Address.email_address.like("ed@%"), ) ) .order_by(User.id) diff --git a/test/orm/test_relationships.py b/test/orm/test_relationships.py index eaa1751f96..9a91197ed6 100644 --- a/test/orm/test_relationships.py +++ b/test/orm/test_relationships.py @@ -2489,7 +2489,7 @@ class JoinConditionErrorTest(fixtures.TestBase): class TypeMatchTest(fixtures.MappedTest): """test errors raised when trying to add items - whose type is not handled by a relationship""" + whose type is not handled by a relationship""" @classmethod def define_tables(cls, metadata): @@ -3197,7 +3197,9 @@ class ViewOnlySyncBackref(fixtures.MappedTest): return mapper( - A, self.tables.t1, properties={"bs": rel()}, + A, + self.tables.t1, + properties={"bs": rel()}, ) mapper(B, self.tables.t2) @@ -3724,9 +3726,7 @@ class ViewOnlyComplexJoin(_RelationshipErrors, fixtures.MappedTest): class FunctionAsPrimaryJoinTest(fixtures.DeclarativeMappedTest): - """test :ticket:`3831` - - """ + """test :ticket:`3831`""" __only_on__ = "sqlite" diff --git a/test/orm/test_selectin_relations.py b/test/orm/test_selectin_relations.py index 5da1e1a4b9..c759425647 100644 --- a/test/orm/test_selectin_relations.py +++ b/test/orm/test_selectin_relations.py @@ -464,7 +464,7 @@ class EagerTest(_fixtures.FixtureTest, testing.AssertsCompiledSQL): def test_orderby_related(self): """A regular mapper select on a single table can - order by a relationship to a second table""" + order by a relationship to a second table""" Address, addresses, users, User = ( self.classes.Address, diff --git a/test/orm/test_subquery_relations.py b/test/orm/test_subquery_relations.py index 8ea79151b7..280a4355ff 100644 --- a/test/orm/test_subquery_relations.py +++ b/test/orm/test_subquery_relations.py @@ -495,7 +495,7 @@ class EagerTest(_fixtures.FixtureTest, testing.AssertsCompiledSQL): def test_orderby_related(self): """A regular mapper select on a single table can - order by a relationship to a second table""" + order by a relationship to a second table""" Address, addresses, users, User = ( self.classes.Address, @@ -3301,7 +3301,13 @@ class FromSubqTest(fixtures.DeclarativeMappedTest): s = Session(connection) - as_ = [A(id=i, cs=[C(), C()],) for i in range(1, 5)] + as_ = [ + A( + id=i, + cs=[C(), C()], + ) + for i in range(1, 5) + ] s.add_all( [ diff --git a/test/orm/test_transaction.py b/test/orm/test_transaction.py index 6218829452..497693de28 100644 --- a/test/orm/test_transaction.py +++ b/test/orm/test_transaction.py @@ -2787,13 +2787,15 @@ class FutureJoinIntoAnExternalTransactionTest( class NonFutureJoinIntoAnExternalTransactionTest( - NewStyleJoinIntoAnExternalTransactionTest, fixtures.TestBase, + NewStyleJoinIntoAnExternalTransactionTest, + fixtures.TestBase, ): pass class LegacyJoinIntoAnExternalTransactionTest( - JoinIntoAnExternalTransactionFixture, fixtures.TestBase, + JoinIntoAnExternalTransactionFixture, + fixtures.TestBase, ): def setup_session(self): # begin a non-ORM transaction diff --git a/test/orm/test_unitofwork.py b/test/orm/test_unitofwork.py index 3ca75cdb2d..ee76d7a247 100644 --- a/test/orm/test_unitofwork.py +++ b/test/orm/test_unitofwork.py @@ -3520,7 +3520,9 @@ class EnsurePKSortableTest(fixtures.MappedTest): ) assert_raises_message( - sa.exc.InvalidRequestError, message, s.flush, + sa.exc.InvalidRequestError, + message, + s.flush, ) else: s.flush() diff --git a/test/orm/test_update_delete.py b/test/orm/test_update_delete.py index ac290257d1..01eb7279bd 100644 --- a/test/orm/test_update_delete.py +++ b/test/orm/test_update_delete.py @@ -1172,7 +1172,10 @@ class UpdateDeleteTest(fixtures.MappedTest): stmt = ( update(User) .filter(User.id == 15) - .ordered_values(("name", "foob"), ("age", 123),) + .ordered_values( + ("name", "foob"), + ("age", 123), + ) ) result = session.execute(stmt) cols = [ diff --git a/test/requirements.py b/test/requirements.py index 28f82c3b89..291a115fe3 100644 --- a/test/requirements.py +++ b/test/requirements.py @@ -532,16 +532,16 @@ class DefaultRequirements(SuiteRequirements): @property def cross_schema_fk_reflection(self): - """target system must support reflection of inter-schema foreign keys - """ + """target system must support reflection of inter-schema foreign + keys""" return only_on(["postgresql", "mysql", "mariadb", "mssql"]) @property def implicit_default_schema(self): """target system has a strong concept of 'default' schema that can - be referred to implicitly. + be referred to implicitly. - basically, PostgreSQL. + basically, PostgreSQL. """ return only_on(["postgresql"]) @@ -857,9 +857,7 @@ class DefaultRequirements(SuiteRequirements): @property def symbol_names_w_double_quote(self): - """Target driver can create tables with a name like 'some " table' - - """ + """Target driver can create tables with a name like 'some " table'""" return skip_if( [no_support("oracle", "ORA-03001: unimplemented feature")] @@ -867,7 +865,7 @@ class DefaultRequirements(SuiteRequirements): @property def emulated_lastrowid(self): - """"target dialect retrieves cursor.lastrowid or an equivalent + """ "target dialect retrieves cursor.lastrowid or an equivalent after an insert() construct executes. """ return fails_on_everything_except( @@ -881,7 +879,7 @@ class DefaultRequirements(SuiteRequirements): @property def emulated_lastrowid_even_with_sequences(self): - """"target dialect retrieves cursor.lastrowid or an equivalent + """ "target dialect retrieves cursor.lastrowid or an equivalent after an insert() construct executes, even if the table has a Sequence on it. """ @@ -899,7 +897,7 @@ class DefaultRequirements(SuiteRequirements): @property def dbapi_lastrowid(self): - """"target backend includes a 'lastrowid' accessor on the DBAPI + """ "target backend includes a 'lastrowid' accessor on the DBAPI cursor object. """ diff --git a/test/sql/test_compare.py b/test/sql/test_compare.py index 098606a91d..257013ac48 100644 --- a/test/sql/test_compare.py +++ b/test/sql/test_compare.py @@ -333,7 +333,11 @@ class CoreFixtures(object): (table_a.c.b == 10, 20), (table_a.c.a == 9, 12), ), - case((table_a.c.a == 5, 10), (table_a.c.a == 10, 20), else_=30,), + case( + (table_a.c.a == 5, 10), + (table_a.c.a == 10, 20), + else_=30, + ), case({"wendy": "W", "jack": "J"}, value=table_a.c.a, else_="E"), case({"wendy": "W", "jack": "J"}, value=table_a.c.b, else_="E"), case({"wendy_w": "W", "jack": "J"}, value=table_a.c.a, else_="E"), @@ -1006,7 +1010,8 @@ class CacheKeyTest(CacheKeyFixture, CoreFixtures, fixtures.TestBase): ) self._run_cache_key_fixture( - fixture, True, + fixture, + True, ) def test_bindparam_subclass_nocache(self): diff --git a/test/sql/test_compiler.py b/test/sql/test_compiler.py index a6118c03fb..7fd4e683b7 100644 --- a/test/sql/test_compiler.py +++ b/test/sql/test_compiler.py @@ -447,7 +447,8 @@ class SelectTest(fixtures.TestBase, AssertsCompiledSQL): # this is native_boolean=False for default dialect self.assert_compile( - select(not_(True)).apply_labels(), "SELECT :param_1 = 0 AS anon_1", + select(not_(True)).apply_labels(), + "SELECT :param_1 = 0 AS anon_1", ) self.assert_compile( @@ -727,7 +728,11 @@ class SelectTest(fixtures.TestBase, AssertsCompiledSQL): foo_bar__id = foo_bar.c.id._annotate({"some_orm_thing": True}) stmt = select( - foo.c.bar_id, foo_bar.c.id, foo_bar.c.id, foo_bar__id, foo_bar__id, + foo.c.bar_id, + foo_bar.c.id, + foo_bar.c.id, + foo_bar__id, + foo_bar__id, ).apply_labels() self.assert_compile( @@ -752,9 +757,7 @@ class SelectTest(fixtures.TestBase, AssertsCompiledSQL): ) def test_nested_label_targeting(self): - """test nested anonymous label generation. - - """ + """test nested anonymous label generation.""" s1 = table1.select() s2 = s1.alias() s3 = select(s2).apply_labels() @@ -1491,7 +1494,8 @@ class SelectTest(fixtures.TestBase, AssertsCompiledSQL): def test_order_by_nulls(self): self.assert_compile( table2.select().order_by( - table2.c.otherid, table2.c.othername.desc().nullsfirst(), + table2.c.otherid, + table2.c.othername.desc().nullsfirst(), ), "SELECT myothertable.otherid, myothertable.othername FROM " "myothertable ORDER BY myothertable.otherid, " @@ -1500,7 +1504,8 @@ class SelectTest(fixtures.TestBase, AssertsCompiledSQL): self.assert_compile( table2.select().order_by( - table2.c.otherid, table2.c.othername.desc().nullslast(), + table2.c.otherid, + table2.c.othername.desc().nullslast(), ), "SELECT myothertable.otherid, myothertable.othername FROM " "myothertable ORDER BY myothertable.otherid, " @@ -1519,7 +1524,8 @@ class SelectTest(fixtures.TestBase, AssertsCompiledSQL): self.assert_compile( table2.select().order_by( - table2.c.otherid.nullsfirst(), table2.c.othername.desc(), + table2.c.otherid.nullsfirst(), + table2.c.othername.desc(), ), "SELECT myothertable.otherid, myothertable.othername FROM " "myothertable ORDER BY myothertable.otherid NULLS FIRST, " @@ -2068,7 +2074,10 @@ class SelectTest(fixtures.TestBase, AssertsCompiledSQL): "Can't resolve label reference for ORDER BY / GROUP BY / " "DISTINCT etc. Textual " "SQL expression 'noname'", - union(select(table1.c.myid, table1.c.name), select(table2),) + union( + select(table1.c.myid, table1.c.name), + select(table2), + ) .order_by("noname") .compile, ) @@ -3159,7 +3168,7 @@ class BindParameterTest(AssertsCompiledSQL, fixtures.TestBase): def _test_binds_no_hash_collision(self): """test that construct_params doesn't corrupt dict - due to hash collisions""" + due to hash collisions""" total_params = 100000 @@ -3468,7 +3477,12 @@ class BindParameterTest(AssertsCompiledSQL, fixtures.TestBase): compiled = stmt_adapted.compile(cache_key=cache_key) # params set up as 5 - eq_(compiled.construct_params(params={},), {"myid_1": 5}) + eq_( + compiled.construct_params( + params={}, + ), + {"myid_1": 5}, + ) # also works w the original cache key eq_( @@ -3529,7 +3543,8 @@ class BindParameterTest(AssertsCompiledSQL, fixtures.TestBase): compiled = modified_stmt.compile(cache_key=cache_key) eq_( - compiled.construct_params(params={}), {"myid_1": 10, "myid_2": 12}, + compiled.construct_params(params={}), + {"myid_1": 10, "myid_2": 12}, ) # make a new statement doing the same thing and make sure diff --git a/test/sql/test_cte.py b/test/sql/test_cte.py index 410f49f2a5..4ebfdc7acf 100644 --- a/test/sql/test_cte.py +++ b/test/sql/test_cte.py @@ -261,9 +261,7 @@ class CTETest(fixtures.TestBase, AssertsCompiledSQL): ) def test_recursive_union_alias_two(self): - """ - - """ + """""" # I know, this is the PG VALUES keyword, # we're cheating here. also yes we need the SELECT, @@ -773,7 +771,10 @@ class CTETest(fixtures.TestBase, AssertsCompiledSQL): s2 = ( select( - orders.c.order == "y", s1a.c.order, orders.c.order, s1.c.order, + orders.c.order == "y", + s1a.c.order, + orders.c.order, + s1.c.order, ) .where(orders.c.order == "z") .cte("regional_sales_2") @@ -815,7 +816,10 @@ class CTETest(fixtures.TestBase, AssertsCompiledSQL): s2 = ( select( - orders.c.order == "y", s1a.c.order, orders.c.order, s1.c.order, + orders.c.order == "y", + s1a.c.order, + orders.c.order, + s1.c.order, ) .where(orders.c.order == "z") .cte("regional_sales_2") diff --git a/test/sql/test_defaults.py b/test/sql/test_defaults.py index 6cb1c38412..2750568d87 100644 --- a/test/sql/test_defaults.py +++ b/test/sql/test_defaults.py @@ -250,7 +250,12 @@ class DefaultObjectTest(fixtures.TestBase): Column("boolcol1", sa.Boolean, default=True), Column("boolcol2", sa.Boolean, default=False), # python function which uses ExecutionContext - Column("col7", Integer, default=lambda: 5, onupdate=lambda: 10,), + Column( + "col7", + Integer, + default=lambda: 5, + onupdate=lambda: 10, + ), # python builtin Column( "col8", @@ -1277,11 +1282,13 @@ class SpecialTypePKTest(fixtures.TestBase): eq_(r.inserted_primary_key, (None,)) else: eq_( - r.inserted_primary_key, (expected_result,), + r.inserted_primary_key, + (expected_result,), ) eq_( - conn.execute(t.select()).first(), (expected_result, 5), + conn.execute(t.select()).first(), + (expected_result, 5), ) def test_plain(self): diff --git a/test/sql/test_deprecations.py b/test/sql/test_deprecations.py index d078b36b8e..f418eab6b9 100644 --- a/test/sql/test_deprecations.py +++ b/test/sql/test_deprecations.py @@ -545,7 +545,11 @@ class SelectableTest(fixtures.TestBase, AssertsCompiledSQL): r"The \"whens\" argument to case\(\) is now passed" ): stmt = select(t1).where( - case(whens={t1.c.q == 5: "foo"}, else_="bat",) != "bat" + case( + whens={t1.c.q == 5: "foo"}, + else_="bat", + ) + != "bat" ) self.assert_compile( @@ -1607,7 +1611,8 @@ class PositionalTextTest(fixtures.TablesTest): @classmethod def insert_data(cls, connection): connection.execute( - cls.tables.text1.insert(), [dict(a="a1", b="b1", c="c1", d="d1")], + cls.tables.text1.insert(), + [dict(a="a1", b="b1", c="c1", d="d1")], ) def test_anon_aliased_overlapping(self, connection): @@ -1756,7 +1761,8 @@ class DMLTest(_UpdateFromTestBase, fixtures.TablesTest, AssertsCompiledSQL): stmt = table.insert(values={}, inline=True) self.assert_compile( - stmt, "INSERT INTO sometable (foo) VALUES (foobar())", + stmt, + "INSERT INTO sometable (foo) VALUES (foobar())", ) with testing.expect_deprecated_20( @@ -1765,7 +1771,9 @@ class DMLTest(_UpdateFromTestBase, fixtures.TablesTest, AssertsCompiledSQL): stmt = table.insert(inline=True) self.assert_compile( - stmt, "INSERT INTO sometable (foo) VALUES (foobar())", params={}, + stmt, + "INSERT INTO sometable (foo) VALUES (foobar())", + params={}, ) def test_update_inline_kw_defaults(self): @@ -1808,7 +1816,9 @@ class DMLTest(_UpdateFromTestBase, fixtures.TablesTest, AssertsCompiledSQL): def test_update_whereclause(self): table1 = table( - "mytable", Column("myid", Integer), Column("name", String(30)), + "mytable", + Column("myid", Integer), + Column("name", String(30)), ) with testing.expect_deprecated_20( @@ -1823,7 +1833,9 @@ class DMLTest(_UpdateFromTestBase, fixtures.TablesTest, AssertsCompiledSQL): def test_update_values(self): table1 = table( - "mytable", Column("myid", Integer), Column("name", String(30)), + "mytable", + Column("myid", Integer), + Column("name", String(30)), ) with testing.expect_deprecated_20( @@ -1835,7 +1847,10 @@ class DMLTest(_UpdateFromTestBase, fixtures.TablesTest, AssertsCompiledSQL): ) def test_delete_whereclause(self): - table1 = table("mytable", Column("myid", Integer),) + table1 = table( + "mytable", + Column("myid", Integer), + ) with testing.expect_deprecated_20( "The delete.whereclause parameter will be " diff --git a/test/sql/test_external_traversal.py b/test/sql/test_external_traversal.py index 6b07ebba96..4edc9d0258 100644 --- a/test/sql/test_external_traversal.py +++ b/test/sql/test_external_traversal.py @@ -702,7 +702,9 @@ class ClauseTest(fixtures.TestBase, AssertsCompiledSQL): subq = subq.alias("subq") s = select(t1.c.col1, subq.c.col1).select_from( - t1, subq, t1.join(subq, t1.c.col1 == subq.c.col2), + t1, + subq, + t1.join(subq, t1.c.col1 == subq.c.col2), ) s5 = CloningVisitor().traverse(s) eq_(str(s), str(s5)) @@ -2190,7 +2192,8 @@ class ValuesBaseTest(fixtures.TestBase, AssertsCompiledSQL): compile_state = i._compile_state_factory(i, None) self._compare_param_dict( - compile_state._dict_parameters, {"col1": 5, "col2": 6, "col3": 7}, + compile_state._dict_parameters, + {"col1": 5, "col2": 6, "col3": 7}, ) def test_kw_and_dict_simultaneously_single(self): @@ -2211,7 +2214,8 @@ class ValuesBaseTest(fixtures.TestBase, AssertsCompiledSQL): i = i.values([(5, 6, 7), (8, 9, 10)]) compile_state = i._compile_state_factory(i, None) eq_( - compile_state._dict_parameters, {"col1": 5, "col2": 6, "col3": 7}, + compile_state._dict_parameters, + {"col1": 5, "col2": 6, "col3": 7}, ) eq_(compile_state._has_multi_parameters, True) eq_( diff --git a/test/sql/test_functions.py b/test/sql/test_functions.py index 3c6140b81c..f9a8f998ed 100644 --- a/test/sql/test_functions.py +++ b/test/sql/test_functions.py @@ -1008,7 +1008,8 @@ class ExecuteTest(fixtures.TestBase): connection.execute(t2.insert()) connection.execute(t2.insert().values(value=func.length("one"))) connection.execute( - t2.insert().values(value=func.length("asfda") + -19), stuff="hi", + t2.insert().values(value=func.length("asfda") + -19), + stuff="hi", ) res = sorted(connection.execute(select(t2.c.value, t2.c.stuff))) diff --git a/test/sql/test_identity_column.py b/test/sql/test_identity_column.py index becb62159b..2564022c2f 100644 --- a/test/sql/test_identity_column.py +++ b/test/sql/test_identity_column.py @@ -57,7 +57,10 @@ class _IdentityDDLFixture(testing.AssertsCompiledSQL): dict(always=False, cache=1000, order=True), "BY DEFAULT AS IDENTITY (CACHE 1000 ORDER)", ), - (dict(order=True), "BY DEFAULT AS IDENTITY (ORDER)",), + ( + dict(order=True), + "BY DEFAULT AS IDENTITY (ORDER)", + ), ) def test_create_ddl(self, identity_args, text): @@ -153,10 +156,15 @@ class NotSupportingIdentityDDL(testing.AssertsCompiledSQL, fixtures.TestBase): MetaData(), Column("foo", Integer(), Identity("always", start=3)), ) - t2 = Table("foo_table", MetaData(), Column("foo", Integer()),) + t2 = Table( + "foo_table", + MetaData(), + Column("foo", Integer()), + ) exp = CreateTable(t2).compile(dialect=testing.db.dialect) self.assert_compile( - CreateTable(t), re.sub(r"[\n\t]", "", str(exp)), + CreateTable(t), + re.sub(r"[\n\t]", "", str(exp)), ) @@ -169,7 +177,9 @@ class IdentityTest(fixtures.TestBase): def fn(**kwargs): Table( - "t", MetaData(), Column("y", Integer, Identity(), **kwargs), + "t", + MetaData(), + Column("y", Integer, Identity(), **kwargs), ) assert_raises_message(ArgumentError, text, fn, server_default="42") diff --git a/test/sql/test_insert_exec.py b/test/sql/test_insert_exec.py index 45a8bccf53..198ff48c08 100644 --- a/test/sql/test_insert_exec.py +++ b/test/sql/test_insert_exec.py @@ -231,7 +231,10 @@ class InsertExecTest(fixtures.TablesTest): "t4", metadata, Column( - "id", Integer, Sequence("t4_id_seq"), primary_key=True, + "id", + Integer, + Sequence("t4_id_seq"), + primary_key=True, ), Column("foo", String(30)), ), @@ -387,7 +390,12 @@ class TableInsertTest(fixtures.TablesTest): Table( "foo", metadata, - Column("id", Integer, Sequence("t_id_seq"), primary_key=True,), + Column( + "id", + Integer, + Sequence("t_id_seq"), + primary_key=True, + ), Column("data", String(50)), Column("x", Integer), ) @@ -397,7 +405,11 @@ class TableInsertTest(fixtures.TablesTest): metadata, # note this will have full AUTO INCREMENT on MariaDB # whereas "foo" will not due to sequence support - Column("id", Integer, primary_key=True,), + Column( + "id", + Integer, + primary_key=True, + ), Column("data", String(50)), Column("x", Integer), ) diff --git a/test/sql/test_metadata.py b/test/sql/test_metadata.py index 9999bdc31a..ebcde3c631 100644 --- a/test/sql/test_metadata.py +++ b/test/sql/test_metadata.py @@ -731,7 +731,11 @@ class MetaDataTest(fixtures.TestBase, ComparesTables): "Column('foo', Integer(), table=None, primary_key=True, " "nullable=False, onupdate=%s, default=%s, server_default=%s, " "comment='foo')" - % (ColumnDefault(1), ColumnDefault(42), DefaultClause("42"),), + % ( + ColumnDefault(1), + ColumnDefault(42), + DefaultClause("42"), + ), ), ( Table("bar", MetaData(), Column("x", String)), @@ -5243,7 +5247,8 @@ class CopyDialectOptionsTest(fixtures.TestBase): @classmethod def check_dialect_options_(cls, t): eq_( - t.dialect_kwargs["copydialectoptionstest_some_table_arg"], "a1", + t.dialect_kwargs["copydialectoptionstest_some_table_arg"], + "a1", ) eq_( t.c.foo.dialect_kwargs["copydialectoptionstest_some_column_arg"], @@ -5286,7 +5291,9 @@ class CopyDialectOptionsTest(fixtures.TestBase): copydialectoptionstest_some_table_arg="a1", ) Index( - "idx", t1.c.foo, copydialectoptionstest_some_index_arg="a4", + "idx", + t1.c.foo, + copydialectoptionstest_some_index_arg="a4", ) self.check_dialect_options_(t1) diff --git a/test/sql/test_operators.py b/test/sql/test_operators.py index 3eb0c449f7..2f92738594 100644 --- a/test/sql/test_operators.py +++ b/test/sql/test_operators.py @@ -1072,8 +1072,7 @@ class BooleanEvalTest(fixtures.TestBase, testing.AssertsCompiledSQL): class ConjunctionTest(fixtures.TestBase, testing.AssertsCompiledSQL): - """test interaction of and_()/or_() with boolean , null constants - """ + """test interaction of and_()/or_() with boolean , null constants""" __dialect__ = default.DefaultDialect(supports_native_boolean=True) @@ -1851,7 +1850,8 @@ class InTest(fixtures.TestBase, testing.AssertsCompiledSQL): ) .select_from( self.table1.join( - self.table2, self.table1.c.myid == self.table2.c.otherid, + self.table2, + self.table1.c.myid == self.table2.c.otherid, ) ) .order_by(self.table1.c.myid), diff --git a/test/sql/test_query.py b/test/sql/test_query.py index 9b3ededcd7..9f66a2ef59 100644 --- a/test/sql/test_query.py +++ b/test/sql/test_query.py @@ -157,7 +157,8 @@ class QueryTest(fixtures.TestBase): eq_(connection.execute(select(or_(true, false))).scalar(), True) eq_(connection.execute(select(or_(false, false))).scalar(), False) eq_( - connection.execute(select(not_(or_(false, false)))).scalar(), True, + connection.execute(select(not_(or_(false, false)))).scalar(), + True, ) row = connection.execute( @@ -174,7 +175,8 @@ class QueryTest(fixtures.TestBase): def test_select_tuple(self, connection): connection.execute( - users.insert(), {"user_id": 1, "user_name": "apples"}, + users.insert(), + {"user_id": 1, "user_name": "apples"}, ) assert_raises_message( @@ -351,7 +353,8 @@ class QueryTest(fixtures.TestBase): return "INT_%d" % value eq_( - connection.scalar(select(cast("INT_5", type_=MyInteger))), "INT_5", + connection.scalar(select(cast("INT_5", type_=MyInteger))), + "INT_5", ) eq_( connection.scalar( @@ -1213,7 +1216,8 @@ class CompoundTest(fixtures.TestBase): @testing.fails_on("sqlite", "FIXME: unknown") def test_union_all(self, connection): e = union_all( - select(t1.c.col3), union(select(t1.c.col3), select(t1.c.col3)), + select(t1.c.col3), + union(select(t1.c.col3), select(t1.c.col3)), ) wanted = [("aaa",), ("aaa",), ("bbb",), ("bbb",), ("ccc",), ("ccc",)] @@ -1734,35 +1738,45 @@ class JoinTest(fixtures.TestBase): for criteria in (t2.c.t2_id == t3.c.t2_id, t3.c.t2_id == t2.c.t2_id): expr = ( select(t1.c.t1_id, t2.c.t2_id, t3.c.t3_id) - .where(t1.c.name == "t1 #10",) + .where( + t1.c.name == "t1 #10", + ) .select_from((t1.join(t2).outerjoin(t3, criteria))) ) self.assertRows(expr, [(10, 20, 30)]) expr = ( select(t1.c.t1_id, t2.c.t2_id, t3.c.t3_id) - .where(t2.c.name == "t2 #20",) + .where( + t2.c.name == "t2 #20", + ) .select_from((t1.join(t2).outerjoin(t3, criteria))) ) self.assertRows(expr, [(10, 20, 30)]) expr = ( select(t1.c.t1_id, t2.c.t2_id, t3.c.t3_id) - .where(t3.c.name == "t3 #30",) + .where( + t3.c.name == "t3 #30", + ) .select_from((t1.join(t2).outerjoin(t3, criteria))) ) self.assertRows(expr, [(10, 20, 30)]) expr = ( select(t1.c.t1_id, t2.c.t2_id, t3.c.t3_id) - .where(and_(t1.c.name == "t1 #10", t2.c.name == "t2 #20"),) + .where( + and_(t1.c.name == "t1 #10", t2.c.name == "t2 #20"), + ) .select_from((t1.join(t2).outerjoin(t3, criteria))) ) self.assertRows(expr, [(10, 20, 30)]) expr = ( select(t1.c.t1_id, t2.c.t2_id, t3.c.t3_id) - .where(and_(t2.c.name == "t2 #20", t3.c.name == "t3 #30"),) + .where( + and_(t2.c.name == "t2 #20", t3.c.name == "t3 #30"), + ) .select_from((t1.join(t2).outerjoin(t3, criteria))) ) self.assertRows(expr, [(10, 20, 30)]) diff --git a/test/sql/test_quote.py b/test/sql/test_quote.py index 504ed40646..2dee9bc09d 100644 --- a/test/sql/test_quote.py +++ b/test/sql/test_quote.py @@ -825,7 +825,8 @@ class QuoteTest(fixtures.TestBase, AssertsCompiledSQL): t2 = Table("t2", m, Column("x", Integer), quote=True) self.assert_compile( - select(t2.c.x).apply_labels(), 'SELECT "t2".x AS "t2_x" FROM "t2"', + select(t2.c.x).apply_labels(), + 'SELECT "t2".x AS "t2_x" FROM "t2"', ) diff --git a/test/sql/test_resultset.py b/test/sql/test_resultset.py index 67f347ad3b..7d62665417 100644 --- a/test/sql/test_resultset.py +++ b/test/sql/test_resultset.py @@ -1474,7 +1474,9 @@ class KeyTargetingTest(fixtures.TablesTest): Column("team_id", metadata, ForeignKey("teams.id")), ) Table( - "teams", metadata, Column("id", Integer, primary_key=True), + "teams", + metadata, + Column("id", Integer, primary_key=True), ) @classmethod @@ -1847,7 +1849,8 @@ class KeyTargetingTest(fixtures.TablesTest): # this has _result_columns structure that is not ordered # the same as the cursor.description. return text("select a AS keyed2_a, b AS keyed2_b from keyed2").columns( - keyed2_b=CHAR, keyed2_a=CHAR, + keyed2_b=CHAR, + keyed2_a=CHAR, ) def _adapt_result_columns_fixture_seven(self): @@ -1970,7 +1973,8 @@ class PositionalTextTest(fixtures.TablesTest): @classmethod def insert_data(cls, connection): connection.execute( - cls.tables.text1.insert(), [dict(a="a1", b="b1", c="c1", d="d1")], + cls.tables.text1.insert(), + [dict(a="a1", b="b1", c="c1", d="d1")], ) def test_via_column(self, connection): @@ -2589,7 +2593,8 @@ class MergeCursorResultTest(fixtures.TablesTest): result = r1.merge(r2, r3, r4) eq_( - result.first(), (7, "u1"), + result.first(), + (7, "u1"), ) for r in [r1, r2, r3, r4]: assert r.closed diff --git a/test/sql/test_returning.py b/test/sql/test_returning.py index 26d4969c87..601bd62730 100644 --- a/test/sql/test_returning.py +++ b/test/sql/test_returning.py @@ -260,7 +260,12 @@ class SequenceReturningTest(fixtures.TestBase): table = Table( "tables", meta, - Column("id", Integer, seq, primary_key=True,), + Column( + "id", + Integer, + seq, + primary_key=True, + ), Column("data", String(50)), ) with testing.db.connect() as conn: diff --git a/test/sql/test_roles.py b/test/sql/test_roles.py index 4feba97aec..8759bbb22f 100644 --- a/test/sql/test_roles.py +++ b/test/sql/test_roles.py @@ -150,14 +150,16 @@ class RoleTest(fixtures.TestBase): "implicitly coercing SELECT object to scalar subquery" ): expect( - roles.LabeledColumnExprRole, select(column("q")), + roles.LabeledColumnExprRole, + select(column("q")), ) with testing.expect_warnings( "implicitly coercing SELECT object to scalar subquery" ): expect( - roles.LabeledColumnExprRole, select(column("q")).alias(), + roles.LabeledColumnExprRole, + select(column("q")).alias(), ) def test_statement_no_text_coercion(self): diff --git a/test/sql/test_selectable.py b/test/sql/test_selectable.py index d09fe76e1b..b98fbd3d07 100644 --- a/test/sql/test_selectable.py +++ b/test/sql/test_selectable.py @@ -2898,7 +2898,8 @@ class WithLabelsTest(fixtures.TestBase): def test_labels_overlap_label(self): sel = self._labels_overlap().apply_labels() eq_( - list(sel.selected_columns.keys()), ["t_x_id", "t_x_id_1"], + list(sel.selected_columns.keys()), + ["t_x_id", "t_x_id_1"], ) eq_( list(sel.subquery().c.keys()), @@ -2941,10 +2942,12 @@ class WithLabelsTest(fixtures.TestBase): def test_keylabels_overlap_labels_dont_label(self): sel = self._keylabels_overlap_labels_dont().apply_labels() eq_( - list(sel.selected_columns.keys()), ["t_x_id", "t_x_b_1"], + list(sel.selected_columns.keys()), + ["t_x_id", "t_x_b_1"], ) eq_( - list(sel.subquery().c.keys()), ["t_x_id", "t_x_b_1"], + list(sel.subquery().c.keys()), + ["t_x_id", "t_x_b_1"], ) self._assert_result_keys(sel, ["t_a", "t_x_b"]) self._assert_subq_result_keys(sel, ["t_a", "t_x_b"]) @@ -2965,7 +2968,8 @@ class WithLabelsTest(fixtures.TestBase): def test_keylabels_overlap_labels_overlap_label(self): sel = self._keylabels_overlap_labels_overlap().apply_labels() eq_( - list(sel.selected_columns.keys()), ["t_x_a", "t_x_id_1"], + list(sel.selected_columns.keys()), + ["t_x_a", "t_x_id_1"], ) # deduping for different cols but same label diff --git a/test/sql/test_sequences.py b/test/sql/test_sequences.py index 243ccfbab4..e609a8a916 100644 --- a/test/sql/test_sequences.py +++ b/test/sql/test_sequences.py @@ -124,14 +124,14 @@ class LegacySequenceExecTest(fixtures.TestBase): def test_explicit_optional(self): """test dialect executes a Sequence, returns nextval, whether - or not "optional" is set """ + or not "optional" is set""" s = Sequence("my_sequence", optional=True) self._assert_seq_result(s.execute(testing.db)) def test_func_implicit_connectionless_execute(self): """test func.next_value().execute()/.scalar() works - with connectionless execution. """ + with connectionless execution.""" s = Sequence("my_sequence", metadata=MetaData(testing.db)) self._assert_seq_result(s.next_value().execute().scalar()) @@ -178,21 +178,21 @@ class SequenceExecTest(fixtures.TestBase): def test_execute_optional(self, connection): """test dialect executes a Sequence, returns nextval, whether - or not "optional" is set """ + or not "optional" is set""" s = Sequence("my_sequence", optional=True) self._assert_seq_result(connection.execute(s)) def test_execute_next_value(self, connection): """test func.next_value().execute()/.scalar() works - with connectionless execution. """ + with connectionless execution.""" s = Sequence("my_sequence") self._assert_seq_result(connection.scalar(s.next_value())) def test_execute_optional_next_value(self, connection): """test func.next_value().execute()/.scalar() works - with connectionless execution. """ + with connectionless execution.""" s = Sequence("my_sequence", optional=True) self._assert_seq_result(connection.scalar(s.next_value())) @@ -225,7 +225,11 @@ class SequenceExecTest(fixtures.TestBase): """test can use next_value() in values() of _ValuesBase""" metadata = self.metadata - t1 = Table("t", metadata, Column("x", Integer),) + t1 = Table( + "t", + metadata, + Column("x", Integer), + ) t1.create(testing.db) s = Sequence("my_sequence") connection.execute(t1.insert().values(x=s.next_value())) @@ -263,7 +267,15 @@ class SequenceExecTest(fixtures.TestBase): metadata = self.metadata s = Sequence("my_sequence") - t1 = Table("t", metadata, Column("x", Integer, primary_key=True,),) + t1 = Table( + "t", + metadata, + Column( + "x", + Integer, + primary_key=True, + ), + ) t1.create(testing.db) e = engines.testing_engine(options={"implicit_returning": True}) @@ -424,7 +436,11 @@ class TableBoundSequenceTest(fixtures.TablesTest): Table( "Manager", metadata, - Column("obj_id", Integer, Sequence("obj_id_seq"),), + Column( + "obj_id", + Integer, + Sequence("obj_id_seq"), + ), Column("name", String(128)), Column( "id", @@ -477,10 +493,26 @@ class TableBoundSequenceTest(fixtures.TablesTest): conn.execute(sometable.select().order_by(sometable.c.id)) ), [ - (dsb, "somename", dsb,), - (dsb + 1, "someother", dsb + 1,), - (dsb + 2, "name3", dsb + 2,), - (dsb + 3, "name4", dsb + 3,), + ( + dsb, + "somename", + dsb, + ), + ( + dsb + 1, + "someother", + dsb + 1, + ), + ( + dsb + 2, + "name3", + dsb + 2, + ), + ( + dsb + 3, + "name4", + dsb + 3, + ), ], ) diff --git a/test/sql/test_types.py b/test/sql/test_types.py index 5464750dbf..efa622b135 100644 --- a/test/sql/test_types.py +++ b/test/sql/test_types.py @@ -3236,7 +3236,8 @@ class BooleanTest( ) eq_( - conn.scalar(select(boolean_table.c.unconstrained_value)), True, + conn.scalar(select(boolean_table.c.unconstrained_value)), + True, ) def test_bind_processor_coercion_native_true(self): diff --git a/test/sql/test_update.py b/test/sql/test_update.py index 8be5868dbf..201e6c64fe 100644 --- a/test/sql/test_update.py +++ b/test/sql/test_update.py @@ -461,7 +461,9 @@ class UpdateTest(_UpdateFromTestBase, fixtures.TablesTest, AssertsCompiledSQL): self.assert_compile( update(table1) .where(table1.c.name == bindparam("crit")) - .values({table1.c.name: "hi"},), + .values( + {table1.c.name: "hi"}, + ), "UPDATE mytable SET name=:name WHERE mytable.name = :crit", params={"crit": "notthere"}, checkparams={"crit": "notthere", "name": "hi"}, @@ -473,7 +475,9 @@ class UpdateTest(_UpdateFromTestBase, fixtures.TablesTest, AssertsCompiledSQL): self.assert_compile( update(table1) .where(table1.c.myid == 12) - .values({table1.c.name: table1.c.myid},), + .values( + {table1.c.name: table1.c.myid}, + ), "UPDATE mytable " "SET name=mytable.myid, description=:description " "WHERE mytable.myid = :myid_1", diff --git a/test/sql/test_values.py b/test/sql/test_values.py index 3b0544278a..1e4f224429 100644 --- a/test/sql/test_values.py +++ b/test/sql/test_values.py @@ -117,7 +117,8 @@ class ValuesTest(fixtures.TablesTest, AssertsCompiledSQL): def test_with_join_unnamed(self): people = self.tables.people values = Values( - column("column1", Integer), column("column2", Integer), + column("column1", Integer), + column("column2", Integer), ).data([(1, 1), (2, 1), (3, 2), (3, 3)]) stmt = select(people, values).select_from( people.join(values, values.c.column2 == people.c.people_id) diff --git a/tox.ini b/tox.ini index 71e1828a70..d319979fe9 100644 --- a/tox.ini +++ b/tox.ini @@ -111,7 +111,7 @@ deps= # in case it requires a version pin pydocstyle pygments - black==19.10b0 + black==20.8b1 commands = flake8 ./lib/ ./test/ ./examples/ setup.py doc/build/conf.py {posargs} black --check .