]> git.ipfire.org Git - thirdparty/sqlalchemy/alembic.git/commitdiff
Update black
authorFederico Caselli <cfederico87@gmail.com>
Thu, 8 Jun 2023 19:42:44 +0000 (21:42 +0200)
committerFederico Caselli <cfederico87@gmail.com>
Thu, 8 Jun 2023 19:42:44 +0000 (21:42 +0200)
Change-Id: I836b9a322819c07ec202fdcca4fd52818b994bff

42 files changed:
.pre-commit-config.yaml
alembic/autogenerate/api.py
alembic/autogenerate/compare.py
alembic/autogenerate/render.py
alembic/command.py
alembic/config.py
alembic/ddl/impl.py
alembic/ddl/mssql.py
alembic/ddl/mysql.py
alembic/ddl/postgresql.py
alembic/ddl/sqlite.py
alembic/operations/batch.py
alembic/operations/ops.py
alembic/operations/toimpl.py
alembic/runtime/environment.py
alembic/runtime/migration.py
alembic/script/base.py
alembic/script/revision.py
alembic/script/write_hooks.py
alembic/testing/assertions.py
alembic/testing/env.py
alembic/testing/fixtures.py
alembic/testing/suite/_autogen_fixtures.py
alembic/util/langhelpers.py
alembic/util/pyfiles.py
alembic/util/sqla_compat.py
tests/test_autogen_diffs.py
tests/test_autogen_indexes.py
tests/test_autogen_render.py
tests/test_batch.py
tests/test_command.py
tests/test_external_dialect.py
tests/test_offline_environment.py
tests/test_op.py
tests/test_post_write.py
tests/test_postgresql.py
tests/test_revision.py
tests/test_script_consumption.py
tests/test_sqlite.py
tests/test_version_traversal.py
tools/write_pyi.py
tox.ini

index c4397b96a950ef2fd2e4898da8d23e1cce0add46..8d68141e0379763fba3bf8bb41e8d70aa2d4d2fb 100644 (file)
@@ -2,19 +2,19 @@
 # See https://pre-commit.com/hooks.html for more hooks
 repos:
 -   repo: https://github.com/python/black
-    rev: 22.3.0
+    rev: 23.3.0
     hooks:
     -   id: black
 
 -   repo: https://github.com/sqlalchemyorg/zimports
-    rev: v0.4.0
+    rev: v0.6.0
     hooks:
     -   id: zimports
         args:
             - --keep-unused-type-checking
 
 -   repo: https://github.com/pycqa/flake8
-    rev: 3.9.2
+    rev: 6.0.0
     hooks:
     -   id: flake8
         additional_dependencies:
index c4ec5c1c8b56d206aeda25c4ba89b2675bb7d82d..064bca9fdeb7903ff20193fa65af136431bd56c6 100644 (file)
@@ -331,7 +331,6 @@ class AutogenContext:
         opts: Optional[dict] = None,
         autogenerate: bool = True,
     ) -> None:
-
         if (
             autogenerate
             and migration_context is not None
@@ -431,7 +430,6 @@ class AutogenContext:
                     parent_names["schema_qualified_table_name"] = table_name
 
         for fn in self._name_filters:
-
             if not fn(name, type_, parent_names):
                 return False
         else:
index 031d683baa5b42cc2f5644f2bb4f790ffc972beb..db32a6a4f368d9275b3394a5982ebd543aa19052 100644 (file)
@@ -66,7 +66,6 @@ comparators = util.Dispatcher(uselist=True)
 def _produce_net_changes(
     autogen_context: AutogenContext, upgrade_ops: UpgradeOps
 ) -> None:
-
     connection = autogen_context.connection
     assert connection is not None
     include_schemas = autogen_context.opts.get("include_schemas", False)
@@ -145,7 +144,6 @@ def _compare_tables(
     upgrade_ops: UpgradeOps,
     autogen_context: AutogenContext,
 ) -> None:
-
     default_schema = inspector.bind.dialect.default_schema_name
 
     # tables coming from the connection will not have "schema"
@@ -214,7 +212,6 @@ def _compare_tables(
             )
             sqla_compat._reflect_table(inspector, t)
         if autogen_context.run_object_filters(t, tname, "table", True, None):
-
             modify_table_ops = ops.ModifyTableOps(tname, [], schema=s)
 
             comparators.dispatch("table")(
@@ -255,7 +252,6 @@ def _compare_tables(
         if autogen_context.run_object_filters(
             metadata_table, tname, "table", False, conn_table
         ):
-
             modify_table_ops = ops.ModifyTableOps(tname, [], schema=s)
             with _compare_columns(
                 s,
@@ -266,7 +262,6 @@ def _compare_tables(
                 autogen_context,
                 inspector,
             ):
-
                 comparators.dispatch("table")(
                     autogen_context,
                     modify_table_ops,
@@ -534,7 +529,6 @@ def _compare_indexes_and_uniques(
     conn_table: Optional[Table],
     metadata_table: Optional[Table],
 ) -> None:
-
     inspector = autogen_context.inspector
     is_create_table = conn_table is None
     is_drop_table = metadata_table is None
@@ -913,7 +907,6 @@ def _correct_for_uq_duplicates_uix(
                 _uq_constraint_sig(uqs_dupe_indexes[overlap]).sig
                 not in unnamed_metadata_uqs
             ):
-
                 conn_unique_constraints.discard(uqs_dupe_indexes[overlap])
         elif overlap not in metadata_ix_names:
             conn_indexes.discard(conn_ix_names[overlap])
@@ -929,7 +922,6 @@ def _compare_nullable(
     conn_col: Column[Any],
     metadata_col: Column[Any],
 ) -> None:
-
     metadata_col_nullable = metadata_col.nullable
     conn_col_nullable = conn_col.nullable
     alter_column_op.existing_nullable = conn_col_nullable
@@ -971,7 +963,6 @@ def _setup_autoincrement(
     conn_col: Column[Any],
     metadata_col: Column[Any],
 ) -> None:
-
     if metadata_col.table._autoincrement_column is metadata_col:
         alter_column_op.kw["autoincrement"] = True
     elif metadata_col.autoincrement is True:
@@ -990,7 +981,6 @@ def _compare_type(
     conn_col: Column[Any],
     metadata_col: Column[Any],
 ) -> None:
-
     conn_type = conn_col.type
     alter_column_op.existing_type = conn_type
     metadata_type = metadata_col.type
@@ -1027,7 +1017,6 @@ def _compare_type(
 def _render_server_default_for_compare(
     metadata_default: Optional[Any], autogen_context: AutogenContext
 ) -> Optional[str]:
-
     if isinstance(metadata_default, sa_schema.DefaultClause):
         if isinstance(metadata_default.arg, str):
             metadata_default = metadata_default.arg
@@ -1129,7 +1118,6 @@ def _compare_server_default(
     conn_col: Column[Any],
     metadata_col: Column[Any],
 ) -> Optional[bool]:
-
     metadata_default = metadata_col.server_default
     conn_col_default = conn_col.server_default
     if conn_col_default is None and metadata_default is None:
@@ -1218,7 +1206,6 @@ def _compare_column_comment(
     conn_col: Column[Any],
     metadata_col: Column[Any],
 ) -> Optional[Literal[False]]:
-
     assert autogen_context.dialect is not None
     if not autogen_context.dialect.supports_comments:
         return None
@@ -1246,7 +1233,6 @@ def _compare_foreign_keys(
     conn_table: Optional[Table],
     metadata_table: Optional[Table],
 ) -> None:
-
     # if we're doing CREATE TABLE, all FKs are created
     # inline within the table def
     if conn_table is None or metadata_table is None:
@@ -1366,7 +1352,6 @@ def _compare_table_comment(
     conn_table: Optional[Table],
     metadata_table: Optional[Table],
 ) -> None:
-
     assert autogen_context.dialect is not None
     if not autogen_context.dialect.supports_comments:
         return
index 00d1d2fe56772a857311e4f326528512ff672987..215af8ce5e53504c1d8f1d5cda0cdc5cae546bd6 100644 (file)
@@ -94,7 +94,6 @@ def _render_cmd_body(
     op_container: ops.OpContainer,
     autogen_context: AutogenContext,
 ) -> str:
-
     buf = StringIO()
     printer = PythonPrinter(buf)
 
@@ -165,7 +164,6 @@ def _render_modify_table(
 def _render_create_table_comment(
     autogen_context: AutogenContext, op: ops.CreateTableCommentOp
 ) -> str:
-
     templ = (
         "{prefix}create_table_comment(\n"
         "{indent}'{tname}',\n"
@@ -190,7 +188,6 @@ def _render_create_table_comment(
 def _render_drop_table_comment(
     autogen_context: AutogenContext, op: ops.DropTableCommentOp
 ) -> str:
-
     templ = (
         "{prefix}drop_table_comment(\n"
         "{indent}'{tname}',\n"
@@ -362,7 +359,6 @@ def _add_unique_constraint(
 def _add_fk_constraint(
     autogen_context: AutogenContext, op: ops.CreateForeignKeyOp
 ) -> str:
-
     args = [repr(_render_gen_name(autogen_context, op.constraint_name))]
     if not autogen_context._has_batch:
         args.append(repr(_ident(op.source_table)))
@@ -411,7 +407,6 @@ def _add_check_constraint(constraint, autogen_context):
 def _drop_constraint(
     autogen_context: AutogenContext, op: ops.DropConstraintOp
 ) -> str:
-
     if autogen_context._has_batch:
         template = "%(prefix)sdrop_constraint" "(%(name)r, type_=%(type)r)"
     else:
@@ -432,7 +427,6 @@ def _drop_constraint(
 
 @renderers.dispatch_for(ops.AddColumnOp)
 def _add_column(autogen_context: AutogenContext, op: ops.AddColumnOp) -> str:
-
     schema, tname, column = op.schema, op.table_name, op.column
     if autogen_context._has_batch:
         template = "%(prefix)sadd_column(%(column)s)"
@@ -452,7 +446,6 @@ def _add_column(autogen_context: AutogenContext, op: ops.AddColumnOp) -> str:
 
 @renderers.dispatch_for(ops.DropColumnOp)
 def _drop_column(autogen_context: AutogenContext, op: ops.DropColumnOp) -> str:
-
     schema, tname, column_name = op.schema, op.table_name, op.column_name
 
     if autogen_context._has_batch:
@@ -476,7 +469,6 @@ def _drop_column(autogen_context: AutogenContext, op: ops.DropColumnOp) -> str:
 def _alter_column(
     autogen_context: AutogenContext, op: ops.AlterColumnOp
 ) -> str:
-
     tname = op.table_name
     cname = op.column_name
     server_default = op.modify_server_default
@@ -566,7 +558,6 @@ def _render_potential_expr(
     is_server_default: bool = False,
 ) -> str:
     if isinstance(value, sql.ClauseElement):
-
         if wrap_in_text:
             template = "%(prefix)stext(%(sql)r)"
         else:
@@ -675,7 +666,6 @@ def _render_column(
     opts: List[Tuple[str, Any]] = []
 
     if column.server_default:
-
         rendered = _render_server_default(  # type:ignore[assignment]
             column.server_default, autogen_context
         )
@@ -984,7 +974,6 @@ def _fk_colspec(
 def _populate_render_fk_opts(
     constraint: ForeignKeyConstraint, opts: List[Tuple[str, str]]
 ) -> None:
-
     if constraint.onupdate:
         opts.append(("onupdate", repr(constraint.onupdate)))
     if constraint.ondelete:
index a015be398fc6f0440aa79c9d84be6e7da71329b7..f5e1ee06783d370682114de7ef0ce3319b2a50ae 100644 (file)
@@ -494,7 +494,6 @@ def history(
         for sc in script.walk_revisions(
             base=base or "base", head=head or "heads"
         ):
-
             if indicate_current:
                 sc._db_current_indicator = sc.revision in currents
 
index 1577ce9facb0b0df28bc40e58d1c54e76cb4f146..d01173cbdcdae16613eb8f62f2f7d1d1acbe9ae2 100644 (file)
@@ -24,7 +24,6 @@ from .util import compat
 
 
 class Config:
-
     r"""Represent an Alembic configuration.
 
     Within an ``env.py`` script, this is available
@@ -563,7 +562,6 @@ class CommandLine:
                 and fn.__name__[0] != "_"
                 and fn.__module__ == "alembic.command"
             ):
-
                 spec = compat.inspect_getfullargspec(fn)
                 if spec[3] is not None:
                     positional = spec[0][1 : -len(spec[3])]
index 03f134d584fc641da9387666f337e7be5a71e690..726f16867b8e14f2a33963524657e7f31937c043 100644 (file)
@@ -637,7 +637,6 @@ class DefaultImpl(metaclass=ImplMeta):
         return False
 
     def _compare_identity_default(self, metadata_identity, inspector_identity):
-
         # ignored contains the attributes that were not considered
         # because assumed to their default values in the db.
         diff, ignored = _compare_identity_options(
index 10c1a6b986c063062a8c428dbc707c2c48ec5726..56dd12c35ea3f640faeba284a99bb120c7464791 100644 (file)
@@ -98,7 +98,6 @@ class MSSQLImpl(DefaultImpl):
         existing_nullable: Optional[bool] = None,
         **kw: Any,
     ) -> None:
-
         if nullable is not None:
             if type_ is not None:
                 # the NULL/NOT NULL alter will handle
@@ -231,9 +230,7 @@ class MSSQLImpl(DefaultImpl):
         rendered_metadata_default,
         rendered_inspector_default,
     ):
-
         if rendered_metadata_default is not None:
-
             rendered_metadata_default = re.sub(
                 r"[\(\) \"\']", "", rendered_metadata_default
             )
index 5e66f53823bc29f5a028b682d532a2a5deea8cdd..32ced498b13017d823e5d3ab25d7faca659d5c23 100644 (file)
@@ -240,7 +240,6 @@ class MySQLImpl(DefaultImpl):
         metadata_unique_constraints,
         metadata_indexes,
     ):
-
         # TODO: if SQLA 1.0, make use of "duplicates_index"
         # metadata
         removed = set()
index e3ada90827af2b1230b99ba6edd4b8a462fceb14..c2d31062156423b1a71839b13003b1774f5fa331 100644 (file)
@@ -90,7 +90,6 @@ class PostgresqlImpl(DefaultImpl):
         self._exec(CreateIndex(index))
 
     def prep_table_for_batch(self, batch_impl, table):
-
         for constraint in table.constraints:
             if (
                 constraint.name is not None
@@ -157,7 +156,6 @@ class PostgresqlImpl(DefaultImpl):
         existing_autoincrement: Optional[bool] = None,
         **kw: Any,
     ) -> None:
-
         using = kw.pop("postgresql_using", None)
 
         if using is not None and type_ is None:
@@ -239,7 +237,6 @@ class PostgresqlImpl(DefaultImpl):
         metadata_unique_constraints,
         metadata_indexes,
     ):
-
         doubled_constraints = {
             index
             for index in conn_indexes
@@ -638,7 +635,6 @@ def _render_inline_exclude_constraint(
 
 
 def _postgresql_autogenerate_prefix(autogen_context: AutogenContext) -> str:
-
     imports = autogen_context.imports
     if imports is not None:
         imports.add("from sqlalchemy.dialects import postgresql")
index 67a1c2845984f996eb7522bac48d8d78f1dd8719..c6186c60a91892dedeb1eeecf3c3d0337ba9c9a8 100644 (file)
@@ -100,7 +100,6 @@ class SQLiteImpl(DefaultImpl):
         rendered_metadata_default: Optional[str],
         rendered_inspector_default: Optional[str],
     ) -> bool:
-
         if rendered_metadata_default is not None:
             rendered_metadata_default = re.sub(
                 r"^\((.+)\)$", r"\1", rendered_metadata_default
@@ -193,7 +192,6 @@ class SQLiteImpl(DefaultImpl):
         metadata_unique_constraints,
         metadata_indexes,
     ):
-
         self._skip_functional_indexes(metadata_indexes, conn_indexes)
 
 
index 5b6b54775fb1a3d1ff15415b7697e4476c091031..e4413dd3372932f1fdecd8f6885ad0a972664388 100644 (file)
@@ -337,7 +337,6 @@ class ApplyBatchImpl:
         for const in (
             list(self.named_constraints.values()) + self.unnamed_constraints
         ):
-
             const_columns = {c.key for c in _columns_for_constraint(const)}
 
             if not const_columns.issubset(self.column_transfers):
index ae929fae53d51b3db878537e11ceff1f4864ca60..4d9001212ccc56150b6cc948efd6bdcade9450ab 100644 (file)
@@ -176,7 +176,6 @@ class DropConstraintOp(MigrateOperation):
         )
 
     def to_constraint(self) -> Constraint:
-
         if self._reverse is not None:
             constraint = self._reverse.to_constraint()
             constraint.name = self.constraint_name
@@ -397,7 +396,6 @@ class CreateUniqueConstraintOp(AddConstraintOp):
     def from_constraint(
         cls, constraint: Constraint
     ) -> CreateUniqueConstraintOp:
-
         constraint_table = sqla_compat._table_for_constraint(constraint)
 
         uq_constraint = cast("UniqueConstraint", constraint)
@@ -535,7 +533,6 @@ class CreateForeignKeyOp(AddConstraintOp):
 
     @classmethod
     def from_constraint(cls, constraint: Constraint) -> CreateForeignKeyOp:
-
         fk_constraint = cast("ForeignKeyConstraint", constraint)
         kw: dict = {}
         if fk_constraint.onupdate:
@@ -1758,7 +1755,6 @@ class AlterColumnOp(AlterTableOp):
             return False
 
     def reverse(self) -> AlterColumnOp:
-
         kw = self.kw.copy()
         kw["existing_type"] = self.existing_type
         kw["existing_nullable"] = self.existing_nullable
index add142de37368870e2b2be382185e21187d98842..72229c6c7c70f4d8004ff7775331b16277a943cd 100644 (file)
@@ -14,7 +14,6 @@ if TYPE_CHECKING:
 def alter_column(
     operations: "Operations", operation: "ops.AlterColumnOp"
 ) -> None:
-
     compiler = operations.impl.dialect.statement_compiler(
         operations.impl.dialect, None
     )
index acd5cd1ebb2b2502ad93a676531bddc2c8bb05eb..e5dfa59e33b55e1477378ebc2ade8ce37a24f882 100644 (file)
@@ -25,7 +25,6 @@ from .. import util
 from ..operations import Operations
 
 if TYPE_CHECKING:
-
     from sqlalchemy.engine import URL
     from sqlalchemy.engine.base import Connection
     from sqlalchemy.sql.elements import ClauseElement
index 1715e8af9edcf330eba732a520d96e78e2008f39..2c10ad599a5233cc5ed494a07ad7a4dbc35772be 100644 (file)
@@ -522,7 +522,6 @@ class MigrationContext:
             if start_from_rev == "base":
                 start_from_rev = None
             elif start_from_rev is not None and self.script:
-
                 start_from_rev = [
                     cast("Script", self.script.get_revision(sfr)).revision
                     for sfr in util.to_list(start_from_rev)
@@ -614,7 +613,6 @@ class MigrationContext:
         assert self._migrations_fn is not None
         for step in self._migrations_fn(heads, self):
             with self.begin_transaction(_per_migration=True):
-
                 if self.as_sql and not head_maintainer.heads:
                     # for offline mode, include a CREATE TABLE from
                     # the base
@@ -733,7 +731,6 @@ class MigrationContext:
         rendered_metadata_default: Optional[str],
         rendered_column_default: Optional[str],
     ) -> bool:
-
         if self._user_compare_server_default is False:
             return False
 
@@ -1000,7 +997,6 @@ class MigrationInfo:
 
 
 class MigrationStep:
-
     from_revisions_no_deps: Tuple[str, ...]
     to_revisions_no_deps: Tuple[str, ...]
     is_upgrade: bool
index cf929c978d2d6da06ad59e25310c8fbb6a192943..9894b4c3318c7d02857d0eee6b5abfbd5f119a00 100644 (file)
@@ -479,7 +479,6 @@ class ScriptDirectory:
             multiple_heads="Multiple heads are present; please specify a "
             "single target revision"
         ):
-
             heads_revs = self.get_revisions(heads)
 
             steps = []
@@ -502,7 +501,6 @@ class ScriptDirectory:
             dests = self.get_revisions(revision) or [None]
 
             for dest in dests:
-
                 if dest is None:
                     # dest is 'base'.  Return a "delete branch" migration
                     # for all applicable heads.
index 39152969f067bb92acc174bbf758140db296ef89..fe9ff616d6258a7714048606d9923e5f6879afe5 100644 (file)
@@ -336,7 +336,6 @@ class RevisionMap:
                     and not parent._is_real_branch_point
                     and not parent.is_merge_point
                 ):
-
                     parent.branch_labels.update(revision.branch_labels)
                     if parent.down_revision:
                         parent = map_[parent.down_revision]
@@ -813,7 +812,6 @@ class RevisionMap:
         omit_immediate_dependencies: bool = False,
         include_dependencies: bool = True,
     ) -> Iterator[Any]:
-
         if omit_immediate_dependencies:
 
             def fn(rev):
@@ -843,7 +841,6 @@ class RevisionMap:
         check: bool = False,
         include_dependencies: bool = True,
     ) -> Iterator[Revision]:
-
         if include_dependencies:
 
             def fn(rev):
@@ -945,7 +942,6 @@ class RevisionMap:
 
         current_candidate_idx = 0
         while current_heads:
-
             candidate = current_heads[current_candidate_idx]
 
             for check_head_index, ancestors in enumerate(ancestors_by_idx):
index d37555d7979a0a3520a8e662004e71083a34ea22..0e9ec40a0a2e70069c739b4f9f928dcc762338ed 100644 (file)
@@ -114,7 +114,6 @@ def _parse_cmdline_options(cmdline_options_str: str, path: str) -> List[str]:
 def console_scripts(
     path: str, options: dict, ignore_output: bool = False
 ) -> None:
-
     try:
         entrypoint_name = options["entrypoint"]
     except KeyError as ke:
index 1c24066b808f57a51f57b3933e506dad4cefa9fc..ec9593b713656d7c9a4097c32b5d84b0b570069f 100644 (file)
@@ -64,7 +64,6 @@ def assert_raises_message_context_ok(
 def _assert_raises(
     except_cls, callable_, args, kwargs, msg=None, check_context=False
 ):
-
     with _expect_raises(except_cls, msg, check_context) as ec:
         callable_(*args, **kwargs)
     return ec.error
@@ -104,7 +103,6 @@ def expect_raises_message(except_cls, msg, check_context=True):
 
 
 def eq_ignore_whitespace(a, b, msg=None):
-
     a = re.sub(r"^\s+?|\n", "", a)
     a = re.sub(r" {2,}", " ", a)
     b = re.sub(r"^\s+?|\n", "", b)
@@ -120,7 +118,6 @@ def _get_dialect(name):
     if name is None or name == "default":
         return default.DefaultDialect()
     else:
-
         d = sqla_compat._create_url(name).get_dialect()()
 
         if name == "postgresql":
index 79a4980f49b2e5f330885733cc2a6d4fefa19881..5df7ef8227dd89ac97aaf5115e9d85339223a14d 100644 (file)
@@ -22,10 +22,8 @@ def _get_staging_directory():
 
 
 def staging_env(create=True, template="generic", sourceless=False):
-
     cfg = _testing_config()
     if create:
-
         path = os.path.join(_get_staging_directory(), "scripts")
         assert not os.path.exists(path), (
             "staging directory %s already exists; poor cleanup?" % path
@@ -284,7 +282,6 @@ def write_script(
 
 
 def make_sourceless(path, style):
-
     import py_compile
 
     py_compile.compile(path)
index 65f3a0a9ad952fb859cf62fbb60b2c6e6fe5a95b..4b83a745f3f5dbafbd0a0c073635122f888e797b 100644 (file)
@@ -134,7 +134,6 @@ def op_fixture(
     literal_binds=False,
     native_boolean=None,
 ):
-
     opts = {}
     if naming_convention:
         opts["target_metadata"] = MetaData(naming_convention=naming_convention)
@@ -217,7 +216,6 @@ def op_fixture(
 
 
 class AlterColRoundTripFixture:
-
     # since these tests are about syntax, use more recent SQLAlchemy as some of
     # the type / server default compare logic might not work on older
     # SQLAlchemy versions as seems to be the case for SQLAlchemy 1.1 on Oracle
index e09fbfe58d0b525e695b5b787f3a1bf6cd61bddd..d838ebef1068b5cc38a4a18f1b16b6cd00876581 100644 (file)
@@ -279,7 +279,6 @@ class AutogenFixtureTest(_ComparesFKs):
         return_ops=False,
         max_identifier_length=None,
     ):
-
         if max_identifier_length:
             dialect = self.bind.dialect
             existing_length = dialect.max_identifier_length
index f62bc1933a6150fcd01f41f3dc8207c64eb3c7b6..34d48bc6c77262f0119f8a5d3bf88211aee88f16 100644 (file)
@@ -243,7 +243,6 @@ class Dispatcher:
         return decorate
 
     def dispatch(self, obj: Any, qualifier: str = "default") -> Any:
-
         if isinstance(obj, str):
             targets: Sequence = [obj]
         elif isinstance(obj, type):
index 7535004767a537a794a8db2977713147e2fbd310..e7576731e124a972157b3bdc377a662672784111 100644 (file)
@@ -49,7 +49,6 @@ def coerce_resource_to_filename(fname: str) -> str:
 
     """
     if not os.path.isabs(fname) and ":" in fname:
-
         tokens = fname.split(":")
 
         # from https://importlib-resources.readthedocs.io/en/latest/migration.html#pkg-resources-resource-filename  # noqa E501
index 376448ac34b1e7d550cc7d3adfe194a26d94f765..d356abcd001d76b2d6e39fad369e5d1813fe592d 100644 (file)
@@ -524,7 +524,6 @@ def _get_constraint_final_name(
             constraint, _alembic_quote=False
         )
     else:
-
         # prior to SQLAlchemy 1.4, work around quoting logic to get at the
         # final compiled name without quotes.
         if hasattr(constraint.name, "quote"):
index 70ea10a4badd650a8e47dc7538f14db701c5d41e..ebba04bbc3bfd63c1868f435264b964eb979a227 100644 (file)
@@ -238,7 +238,6 @@ class AutogenDefaultSchemaTest(AutogenFixtureTest, TestBase):
     __backend__ = True
 
     def test_uses_explcit_schema_in_default_one(self):
-
         default_schema = self.bind.dialect.default_schema_name
 
         m1 = MetaData()
@@ -251,7 +250,6 @@ class AutogenDefaultSchemaTest(AutogenFixtureTest, TestBase):
         eq_(diffs, [])
 
     def test_uses_explcit_schema_in_default_two(self):
-
         default_schema = self.bind.dialect.default_schema_name
 
         m1 = MetaData()
@@ -268,7 +266,6 @@ class AutogenDefaultSchemaTest(AutogenFixtureTest, TestBase):
         eq_(diffs[0][1].c.keys(), ["y"])
 
     def test_uses_explcit_schema_in_default_three(self):
-
         default_schema = self.bind.dialect.default_schema_name
 
         m1 = MetaData()
@@ -300,7 +297,6 @@ class AutogenDefaultSchemaIsNoneTest(AutogenFixtureTest, TestBase):
         eq_(self.bind.dialect.default_schema_name, None)
 
     def test_no_default_schema(self):
-
         m1 = MetaData()
         m2 = MetaData()
 
@@ -854,7 +850,6 @@ class CompareTypeSpecificityTest(TestBase):
     def test_compare_type(
         self, impl_fixture, inspected_type, metadata_type, expected
     ):
-
         is_(
             impl_fixture.compare_type(
                 Column("x", inspected_type), Column("x", metadata_type)
@@ -1440,7 +1435,6 @@ class AutogenKeyTest(AutogenTest, TestBase):
     symbols = ["someothertable", "sometable"]
 
     def test_autogen(self):
-
         uo = ops.UpgradeOps(ops=[])
 
         ctx = self.autogen_context
index abefd66245e9e1d6666db5d883b6931ea4622455..9ec33d0f46a0a6a60de8adcf6bbc57e8244000cf 100644 (file)
@@ -1446,7 +1446,6 @@ def _lots_of_indexes(flatten: bool = False):
     req = config.requirements.reflects_indexes_column_sorting
 
     if flatten:
-
         flat = list(itertools.chain.from_iterable(diff_pairs))
         for f1, f2 in with_sort:
             flat.extend([(f1, req), (f2, req)])
index c4f474e84820ddab29ff571a859d822b16fab0ad..7a03cc15d9d6a7cf0d80067577c7eec3e13060b9 100644 (file)
@@ -1621,7 +1621,6 @@ class AutogenRenderTest(TestBase):
         )
 
     def test_generic_array_type(self):
-
         eq_ignore_whitespace(
             autogenerate.render._repr_type(
                 types.ARRAY(Integer), self.autogen_context
@@ -1757,7 +1756,6 @@ class AutogenRenderTest(TestBase):
         )
 
     def test_render_variant(self):
-
         self.autogen_context.opts["user_module_prefix"] = None
 
         type_ = (
@@ -2213,7 +2211,6 @@ class AutogenRenderTest(TestBase):
 
 class RenderNamingConventionTest(TestBase):
     def setUp(self):
-
         convention = {
             "ix": "ix_%(custom)s_%(column_0_label)s",
             "uq": "uq_%(custom)s_%(table_name)s_%(column_0_name)s",
index 5920cdf8fcb90b7e0f2195720d7ad30436e588d2..dd1c0089e935f0d881fd73baedfda074f048c04b 100644 (file)
@@ -901,7 +901,6 @@ class BatchApplyTest(TestBase):
 class BatchAPITest(TestBase):
     @contextmanager
     def _fixture(self, schema=None):
-
         migration_context = mock.Mock(
             opts={},
             impl=mock.MagicMock(__dialect__="sqlite", connection=object()),
index 0937930ea916a897fb9aff9b205cb45cd9fc7abd..9047f3f7dfa4c9b2c57faebd32dbfa9fc769a5eb 100644 (file)
@@ -1210,7 +1210,6 @@ class CommandLineTest(TestBase):
             )
 
     def test_init_w_package(self):
-
         path = os.path.join(_get_staging_directory(), "foobar")
 
         with mock.patch("alembic.command.open") as open_:
index de66517e5902aebdeebc1710dd717af22573d33d..8be1a2566e63db0a7391446054b724d1e68b46cf 100644 (file)
@@ -101,7 +101,6 @@ class ExternalDialectRenderTest(TestBase):
         )
 
     def test_external_nested_render_sqla_type(self):
-
         eq_ignore_whitespace(
             autogenerate.render._repr_type(
                 EXT_ARRAY(sqla_types.Integer), self.autogen_context
@@ -126,7 +125,6 @@ class ExternalDialectRenderTest(TestBase):
         )
 
     def test_external_nested_render_external_type(self):
-
         eq_ignore_whitespace(
             autogenerate.render._repr_type(
                 EXT_ARRAY(FOOBARTYPE), self.autogen_context
index 714d2480bf243491d716debb3c5d832cef74c550..8188d2cfd8de8732a96632fd6b197253941dff7e 100644 (file)
@@ -264,7 +264,6 @@ assert not context.requires_connection()
         command.downgrade(self.cfg, "%s:%s" % (b, a), sql=True)
 
     def test_running_comments_not_in_sql(self):
-
         message = "this is a very long \nand multiline\nmessage"
 
         d = command.revision(self.cfg, message=message)
index 6aca753d1ce4393f362396b634fd0c5bb2efc7da..54637fd375fa1b6c280f8cd7b95c7a9d7f81adba 100644 (file)
@@ -56,7 +56,6 @@ class OpTest(TestBase):
         context.assert_("CREATE INDEX name ON tname (foo(x))")
 
     def test_add_column_schema_hard_quoting(self):
-
         context = op_fixture("postgresql")
         op.add_column(
             "somename",
@@ -69,7 +68,6 @@ class OpTest(TestBase):
         )
 
     def test_rename_table_schema_hard_quoting(self):
-
         context = op_fixture("postgresql")
         op.rename_table(
             "t1", "t2", schema=quoted_name("some.schema", quote=True)
@@ -78,7 +76,6 @@ class OpTest(TestBase):
         context.assert_('ALTER TABLE "some.schema".t1 RENAME TO t2')
 
     def test_add_constraint_schema_hard_quoting(self):
-
         context = op_fixture("postgresql")
         op.create_check_constraint(
             "ck_user_name_len",
index a8bcd2fd7f5773bb8a8b2de7df786772158bd3dc..85e95fb9bb4a226ca661bca56d9293d04ad7ca60 100644 (file)
@@ -154,7 +154,6 @@ class RunHookTest(TestBase):
         ), mock.patch(
             "alembic.script.write_hooks.subprocess"
         ) as mock_subprocess:
-
             rev = command.revision(self.cfg, message="x")
 
         eq_(importlib_metadata_get.mock_calls, [mock.call("console_scripts")])
@@ -191,7 +190,6 @@ black.options = -l 79
 
     @combinations(True, False)
     def test_filename_interpolation(self, posix):
-
         input_config = """
 [post_write_hooks]
 hooks = black
@@ -223,7 +221,6 @@ black.options = arg1 REVISION_SCRIPT_FILENAME 'multi-word arg' \
             )
 
     def test_path_in_config(self):
-
         input_config = """
 [post_write_hooks]
 hooks = black
index 82a3f3b55c266a9f7298849684604c6fb1ae706f..7b7afdc06e28aaebc028a5713036ddb786172841 100644 (file)
@@ -957,7 +957,6 @@ class PostgresqlAutogenRenderTest(TestBase):
         )
 
     def test_postgresql_array_type(self):
-
         eq_ignore_whitespace(
             autogenerate.render._repr_type(
                 ARRAY(Integer), self.autogen_context
@@ -1011,7 +1010,6 @@ class PostgresqlAutogenRenderTest(TestBase):
         )
 
     def test_generic_array_type(self):
-
         eq_ignore_whitespace(
             autogenerate.render._repr_type(
                 types.ARRAY(Integer), self.autogen_context
@@ -1341,7 +1339,6 @@ class PGUniqueIndexAutogenerateTest(AutogenFixtureTest, TestBase):
 
     @config.requirements.btree_gist
     def test_exclude_const_unchanged(self):
-
         m1 = MetaData()
         m2 = MetaData()
 
index 0d5bfd54d77d558ba64dfa79a8bb12f8d4da7350..1534a681aff4634e7816b98192e2f09b8890ee6f 100644 (file)
@@ -649,7 +649,6 @@ class BranchTravellingTest(DownIterateTest):
         )
 
     def test_three_branches_end_in_single_branch(self):
-
         self._assert_iteration(
             ["merge", "fe1b1"],
             "a3",
@@ -670,7 +669,6 @@ class BranchTravellingTest(DownIterateTest):
         )
 
     def test_two_branches_to_root(self):
-
         # here we want 'a3' as a "stop" branch point, but *not*
         # 'db1', as we don't have multiple traversals on db1
         self._assert_iteration(
@@ -721,7 +719,6 @@ class BranchTravellingTest(DownIterateTest):
         )
 
     def test_three_branches_to_root(self):
-
         # in this case, both "a3" and "db1" are stop points
         self._assert_iteration(
             ["merge", "fe1b1"],
@@ -746,7 +743,6 @@ class BranchTravellingTest(DownIterateTest):
         )
 
     def test_three_branches_end_multiple_bases(self):
-
         # in this case, both "a3" and "db1" are stop points
         self._assert_iteration(
             ["merge", "fe1b1"],
@@ -765,7 +761,6 @@ class BranchTravellingTest(DownIterateTest):
         )
 
     def test_three_branches_end_multiple_bases_exclusive(self):
-
         self._assert_iteration(
             ["merge", "fe1b1"],
             ["cb1", "cb2"],
index a107b8055a1a4415e60c9e5fde4174b38bcc5c78..94c1768ef9851163903e92eef30b3dc4281088cd 100644 (file)
@@ -813,7 +813,6 @@ class IgnoreFilesTest(TestBase):
         clear_staging_env()
 
     def _test_ignore_file_py(self, fname):
-
         command.revision(self.cfg, message="some rev")
         script = ScriptDirectory.from_config(self.cfg)
         path = os.path.join(script.versions, fname)
@@ -1076,7 +1075,6 @@ class RecursiveScriptDirectoryTest(TestBase):
 
     @testing.fixture
     def multi_base_fixture(self):
-
         self.env = staging_env()
         self.cfg = _multi_dir_testing_config()
         self.cfg.set_main_option("recursive_version_locations", "true")
index a580eb208283e1c5c9fc6b96be872f3150d58698..d3c5367b879cf5f9e45d8aeaaadd0fda2e96cc73 100644 (file)
@@ -64,7 +64,6 @@ class SQLiteTest(TestBase):
 
     @config.requirements.comments
     def test_create_table_with_comment_ignored(self):
-
         context = op_fixture("sqlite")
         op.create_table(
             "t2",
@@ -79,7 +78,6 @@ class SQLiteTest(TestBase):
 
     @config.requirements.comments
     def test_add_column_with_comment_ignored(self):
-
         context = op_fixture("sqlite")
         op.add_column("t1", Column("c1", Integer, comment="c1 comment"))
         context.assert_("ALTER TABLE t1 ADD COLUMN c1 INTEGER")
index f7ad4f08a72d4bc6828433803e74ff8197297561..e1504b9cec752d17a739da9a6242373e9c868a01 100644 (file)
@@ -119,7 +119,6 @@ class RevisionPathTest(MigrationTest):
         )
 
     def test_invalid_relative_upgrade_path(self):
-
         assert_raises_message(
             util.CommandError,
             "Relative revision -2 didn't produce 2 migrations",
@@ -137,7 +136,6 @@ class RevisionPathTest(MigrationTest):
         )
 
     def test_downgrade_path(self):
-
         self._assert_downgrade(
             self.c.revision,
             self.e.revision,
@@ -153,7 +151,6 @@ class RevisionPathTest(MigrationTest):
         )
 
     def test_relative_downgrade_path(self):
-
         self._assert_downgrade(
             "-1", self.c.revision, [self.down_(self.c)], {self.b.revision}
         )
@@ -180,7 +177,6 @@ class RevisionPathTest(MigrationTest):
         )
 
     def test_invalid_relative_downgrade_path(self):
-
         assert_raises_message(
             util.CommandError,
             "Relative revision -5 didn't produce 5 migrations",
@@ -198,7 +194,6 @@ class RevisionPathTest(MigrationTest):
         )
 
     def test_invalid_move_rev_to_none(self):
-
         assert_raises_message(
             util.CommandError,
             r"Destination %s is not a valid downgrade "
@@ -209,7 +204,6 @@ class RevisionPathTest(MigrationTest):
         )
 
     def test_invalid_move_higher_to_lower(self):
-
         assert_raises_message(
             util.CommandError,
             r"Destination %s is not a valid downgrade "
@@ -282,7 +276,6 @@ class BranchedPathTest(MigrationTest):
         )
 
     def test_upgrade_single_branch(self):
-
         self._assert_upgrade(
             self.d1.revision,
             self.b.revision,
@@ -321,7 +314,6 @@ class BranchedPathTest(MigrationTest):
         )
 
     def test_relative_upgrade(self):
-
         self._assert_upgrade(
             "c2branch@head-1",
             self.b.revision,
@@ -616,7 +608,6 @@ class BranchFromMergepointTest(MigrationTest):
         )
 
     def test_mergepoint_to_only_one_side_downgrade(self):
-
         self._assert_downgrade(
             self.b1.revision,
             (self.d2.revision, self.d1.revision),
@@ -691,7 +682,6 @@ class BranchFrom3WayMergepointTest(MigrationTest):
         clear_staging_env()
 
     def test_mergepoint_to_only_one_side_upgrade(self):
-
         self._assert_upgrade(
             self.d1.revision,
             (self.d3.revision, self.d2.revision, self.b1.revision),
@@ -708,7 +698,6 @@ class BranchFrom3WayMergepointTest(MigrationTest):
         )
 
     def test_mergepoint_to_two_sides_upgrade(self):
-
         self._assert_upgrade(
             self.d1.revision,
             (self.d3.revision, self.b2.revision, self.b1.revision),
@@ -1176,7 +1165,6 @@ class DependsOnBranchTestFour(MigrationTest):
         clear_staging_env()
 
     def test_dependencies_are_normalized(self):
-
         heads = [self.b4.revision]
 
         self._assert_downgrade(
@@ -1378,7 +1366,6 @@ class MergedPathTest(MigrationTest):
         )
 
     def test_upgrade_across_merge_point(self):
-
         eq_(
             self.env._upgrade_revs(self.f.revision, self.b.revision),
             [
@@ -1393,7 +1380,6 @@ class MergedPathTest(MigrationTest):
         )
 
     def test_downgrade_across_merge_point(self):
-
         eq_(
             self.env._downgrade_revs(self.b.revision, self.f.revision),
             [
index da5b4845b969e86198bfb5832ae39aa5a5be14d3..499d830fe79966b54d05f2cfda188f8083dd7aeb 100644 (file)
@@ -41,7 +41,7 @@ TRIM_MODULE = [
     "sqlalchemy.sql.type_api.",
     "sqlalchemy.sql.functions.",
     "sqlalchemy.sql.dml.",
-    "typing."
+    "typing.",
 ]
 ADDITIONAL_ENV = {
     "MigrationContext": MigrationContext,
diff --git a/tox.ini b/tox.ini
index 8a317acf1f7920c070f8563ba952c83e605319e7..62ef921ab138ecb5c5fbba1c2db16d0a22eef1f5 100644 (file)
--- a/tox.ini
+++ b/tox.ini
@@ -96,7 +96,7 @@ deps=
       pydocstyle<4.0.0
       # used by flake8-rst-docstrings
       pygments
-      black==22.3.0
+      black==23.3.0
 commands =
      flake8 ./alembic/ ./tests/ setup.py docs/build/conf.py {posargs}
      black --check setup.py tests alembic