]> git.ipfire.org Git - thirdparty/sqlalchemy/alembic.git/commitdiff
Add Operations and BatchOperations stub methods
authorCaselIT <cfederico87@gmail.com>
Thu, 16 Mar 2023 23:50:53 +0000 (00:50 +0100)
committerFederico Caselli <cfederico87@gmail.com>
Thu, 4 May 2023 20:53:09 +0000 (22:53 +0200)
Updated stub generator script to also add stubs method definitions
for the :class:`.Operations` class and the :class:`.BatchOperations`
class obtained from :meth:`.Operations.batch_alter_table`.

Repaired the return signatures for :class:`.Operations` that mostly
return ``None``, and were erroneously referring to ``Optional[Table]``
in many cases.

Fixes: #1093
Change-Id: I98d38dd5a1e719b4dbbc1003746ec28f26c27808

alembic/context.pyi
alembic/ddl/postgresql.py
alembic/op.pyi
alembic/operations/__init__.py
alembic/operations/base.py
alembic/operations/ops.py
alembic/util/compat.py
docs/build/ops.rst
docs/build/unreleased/1093.rst [new file with mode: 0644]
tests/test_stubs.py
tools/write_pyi.py

index 1007a5ef75aadd8bb4394572d8f07a5a2c8628a0..c81a14fd06bda5a5359f751f7d1cfc03ac1c5cb9 100644 (file)
@@ -3,10 +3,14 @@
 from __future__ import annotations
 
 from typing import Any
+from typing import Callable
+from typing import Collection
 from typing import ContextManager
 from typing import Dict
 from typing import List
 from typing import Literal
+from typing import Mapping
+from typing import MutableMapping
 from typing import Optional
 from typing import overload
 from typing import TextIO
@@ -19,16 +23,16 @@ if TYPE_CHECKING:
     from sqlalchemy.engine.url import URL
     from sqlalchemy.sql.elements import ClauseElement
     from sqlalchemy.sql.schema import MetaData
+    from sqlalchemy.sql.schema import SchemaItem
 
+    from .autogenerate.api import AutogenContext
     from .config import Config
-    from .runtime.environment import IncludeNameFn
-    from .runtime.environment import IncludeObjectFn
-    from .runtime.environment import OnVersionApplyFn
-    from .runtime.environment import ProcessRevisionDirectiveFn
-    from .runtime.environment import RenderItemFn
+    from .operations.ops import MigrateOperation
     from .runtime.migration import _ProxyTransaction
     from .runtime.migration import MigrationContext
+    from .runtime.migration import MigrationInfo
     from .script import ScriptDirectory
+
 ### end imports ###
 
 def begin_transaction() -> Union[_ProxyTransaction, ContextManager[None]]:
@@ -79,7 +83,7 @@ config: Config
 
 def configure(
     connection: Optional[Connection] = None,
-    url: Optional[Union[str, URL]] = None,
+    url: Union[str, URL, None] = None,
     dialect_name: Optional[str] = None,
     dialect_opts: Optional[Dict[str, Any]] = None,
     transactional_ddl: Optional[bool] = None,
@@ -90,20 +94,77 @@ def configure(
     template_args: Optional[Dict[str, Any]] = None,
     render_as_batch: bool = False,
     target_metadata: Optional[MetaData] = None,
-    include_name: Optional[IncludeNameFn] = None,
-    include_object: Optional[IncludeObjectFn] = None,
+    include_name: Optional[
+        Callable[
+            [
+                Optional[str],
+                Literal[
+                    "schema",
+                    "table",
+                    "column",
+                    "index",
+                    "unique_constraint",
+                    "foreign_key_constraint",
+                ],
+                MutableMapping[
+                    Literal[
+                        "schema_name",
+                        "table_name",
+                        "schema_qualified_table_name",
+                    ],
+                    Optional[str],
+                ],
+            ],
+            bool,
+        ]
+    ] = None,
+    include_object: Optional[
+        Callable[
+            [
+                SchemaItem,
+                Optional[str],
+                Literal[
+                    "schema",
+                    "table",
+                    "column",
+                    "index",
+                    "unique_constraint",
+                    "foreign_key_constraint",
+                ],
+                bool,
+                Optional[SchemaItem],
+            ],
+            bool,
+        ]
+    ] = None,
     include_schemas: bool = False,
-    process_revision_directives: Optional[ProcessRevisionDirectiveFn] = None,
+    process_revision_directives: Optional[
+        Callable[
+            [MigrationContext, Tuple[str, str], List[MigrateOperation]], None
+        ]
+    ] = None,
     compare_type: bool = False,
     compare_server_default: bool = False,
-    render_item: Optional[RenderItemFn] = None,
+    render_item: Optional[
+        Callable[[str, Any, AutogenContext], Union[str, Literal[False]]]
+    ] = None,
     literal_binds: bool = False,
     upgrade_token: str = "upgrades",
     downgrade_token: str = "downgrades",
     alembic_module_prefix: str = "op.",
     sqlalchemy_module_prefix: str = "sa.",
     user_module_prefix: Optional[str] = None,
-    on_version_apply: Optional[OnVersionApplyFn] = None,
+    on_version_apply: Optional[
+        Callable[
+            [
+                MigrationContext,
+                MigrationInfo,
+                Collection[Any],
+                Mapping[str, Any],
+            ],
+            None,
+        ]
+    ] = None,
     **kw: Any,
 ) -> None:
     """Configure a :class:`.MigrationContext` within this
index cc0488b8d8a7e91f404c08b7cffd7d1c77977b8c..6c858e7bdf80edc6db209a0bf369e958cc8efd24 100644 (file)
@@ -593,7 +593,11 @@ class CreateExcludeConstraintOp(ops.AddConstraintOp):
 
     @classmethod
     def batch_create_exclude_constraint(
-        cls, operations, constraint_name, *elements, **kw
+        cls,
+        operations: BatchOperations,
+        constraint_name: str,
+        *elements: Any,
+        **kw: Any,
     ):
         """Issue a "create exclude constraint" instruction using the
         current batch migration context.
index 535b2d5a4a804d194ac4f04514f15e5c16f75c75..aa3ad2d9d10b1cbe6883d5848d139ed08ec2c075 100644 (file)
@@ -42,7 +42,7 @@ if TYPE_CHECKING:
 
 def add_column(
     table_name: str, column: Column, schema: Optional[str] = None
-) -> Optional[Table]:
+) -> None:
     """Issue an "add column" instruction using the current
     migration context.
 
@@ -60,19 +60,19 @@ def add_column(
 
     .. note::
 
-        With the exception of NOT NULL constraints or single-column FOREIGN KEY
-        constraints, other kinds of constraints such as PRIMARY KEY, UNIQUE or
-        CHECK constraints **cannot** be generated using this method; for these
-        constraints, refer to operations such as
+        With the exception of NOT NULL constraints or single-column FOREIGN
+        KEY constraints, other kinds of constraints such as PRIMARY KEY,
+        UNIQUE or CHECK constraints **cannot** be generated using this
+        method; for these constraints, refer to operations such as
         :meth:`.Operations.create_primary_key` and
         :meth:`.Operations.create_check_constraint`. In particular, the
         following :class:`~sqlalchemy.schema.Column` parameters are
         **ignored**:
 
         * :paramref:`~sqlalchemy.schema.Column.primary_key` - SQL databases
-          typically do not support an ALTER operation that can add individual
-          columns one at a time to an existing primary key constraint,
-          therefore it's less ambiguous to use the
+          typically do not support an ALTER operation that can add
+          individual columns one at a time to an existing primary key
+          constraint, therefore it's less ambiguous to use the
           :meth:`.Operations.create_primary_key` method, which assumes no
           existing primary key constraint is present.
         * :paramref:`~sqlalchemy.schema.Column.unique` - use the
@@ -137,7 +137,7 @@ def alter_column(
     existing_comment: Optional[str] = None,
     schema: Optional[str] = None,
     **kw: Any,
-) -> Optional[Table]:
+) -> None:
     r"""Issue an "alter column" instruction using the
     current migration context.
 
@@ -483,10 +483,10 @@ def bulk_insert(
 def create_check_constraint(
     constraint_name: Optional[str],
     table_name: str,
-    condition: Union[str, BinaryExpression],
+    condition: Union[str, BinaryExpression, TextClause],
     schema: Optional[str] = None,
     **kw: Any,
-) -> Optional[Table]:
+) -> None:
     """Issue a "create check constraint" instruction using the
     current migration context.
 
@@ -580,7 +580,7 @@ def create_foreign_key(
     source_schema: Optional[str] = None,
     referent_schema: Optional[str] = None,
     **dialect_kw: Any,
-) -> Optional[Table]:
+) -> None:
     """Issue a "create foreign key" instruction using the
     current migration context.
 
@@ -638,7 +638,7 @@ def create_index(
     schema: Optional[str] = None,
     unique: bool = False,
     **kw: Any,
-) -> Optional[Table]:
+) -> None:
     r"""Issue a "create index" instruction using the current
     migration context.
 
@@ -688,7 +688,7 @@ def create_primary_key(
     table_name: str,
     columns: List[str],
     schema: Optional[str] = None,
-) -> Optional[Table]:
+) -> None:
     """Issue a "create primary key" instruction using the current
     migration context.
 
@@ -724,9 +724,7 @@ def create_primary_key(
 
     """
 
-def create_table(
-    table_name: str, *columns: SchemaItem, **kw: Any
-) -> Optional[Table]:
+def create_table(table_name: str, *columns: SchemaItem, **kw: Any) -> Table:
     r"""Issue a "create table" instruction using the current migration
     context.
 
@@ -810,7 +808,7 @@ def create_table_comment(
     comment: Optional[str],
     existing_comment: Optional[str] = None,
     schema: Optional[str] = None,
-) -> Optional[Table]:
+) -> None:
     """Emit a COMMENT ON operation to set the comment for a table.
 
     .. versionadded:: 1.0.6
@@ -878,7 +876,7 @@ def create_unique_constraint(
 
 def drop_column(
     table_name: str, column_name: str, schema: Optional[str] = None, **kw: Any
-) -> Optional[Table]:
+) -> None:
     """Issue a "drop column" instruction using the current
     migration context.
 
@@ -921,7 +919,7 @@ def drop_constraint(
     table_name: str,
     type_: Optional[str] = None,
     schema: Optional[str] = None,
-) -> Optional[Table]:
+) -> None:
     r"""Drop a constraint of the given name, typically via DROP CONSTRAINT.
 
     :param constraint_name: name of the constraint.
@@ -940,7 +938,7 @@ def drop_index(
     table_name: Optional[str] = None,
     schema: Optional[str] = None,
     **kw: Any,
-) -> Optional[Table]:
+) -> None:
     r"""Issue a "drop index" instruction using the current
     migration context.
 
@@ -988,7 +986,7 @@ def drop_table_comment(
     table_name: str,
     existing_comment: Optional[str] = None,
     schema: Optional[str] = None,
-) -> Optional[Table]:
+) -> None:
     """Issue a "drop table comment" operation to
     remove an existing comment set on a table.
 
@@ -1009,7 +1007,7 @@ def drop_table_comment(
 def execute(
     sqltext: Union[str, TextClause, Update],
     execution_options: Optional[dict[str, Any]] = None,
-) -> Optional[Table]:
+) -> None:
     r"""Execute the given SQL using the current migration context.
 
     The given SQL can be a plain string, e.g.::
@@ -1177,8 +1175,8 @@ def inline_literal(
     advanced types like dates may not be supported directly
     by SQLAlchemy.
 
-    See :meth:`.execute` for an example usage of
-    :meth:`.inline_literal`.
+    See :meth:`.Operations.execute` for an example usage of
+    :meth:`.Operations.inline_literal`.
 
     The environment can also be configured to attempt to render
     "literal" values inline automatically, for those simple types
@@ -1229,7 +1227,7 @@ def register_operation(
 
 def rename_table(
     old_table_name: str, new_table_name: str, schema: Optional[str] = None
-) -> Optional[Table]:
+) -> None:
     """Emit an ALTER TABLE to rename a table.
 
     :param old_table_name: old name.
index 9527620ded60ddc3f2aba2ab416ee9cce5e4a4e3..9de1918c267550e145d1eedd7c6032504f3b47b5 100644 (file)
@@ -1,7 +1,13 @@
 from . import toimpl
+from .base import AbstractOperations
 from .base import BatchOperations
 from .base import Operations
 from .ops import MigrateOperation
 
 
-__all__ = ["Operations", "BatchOperations", "MigrateOperation"]
+__all__ = [
+    "AbstractOperations",
+    "Operations",
+    "BatchOperations",
+    "MigrateOperation",
+]
index 68f8c595c79854ee36cb5a4cc8f2d45faf2bf80c..6e45a11675fc8d924762529e98bbeaa7c8c97e67 100644 (file)
@@ -35,44 +35,28 @@ if TYPE_CHECKING:
 
     from sqlalchemy import Table
     from sqlalchemy.engine import Connection
+    from sqlalchemy.sql.expression import BinaryExpression
+    from sqlalchemy.sql.expression import TableClause
+    from sqlalchemy.sql.expression import TextClause
+    from sqlalchemy.sql.expression import Update
+    from sqlalchemy.sql.functions import Function
+    from sqlalchemy.sql.schema import Column
+    from sqlalchemy.sql.schema import Computed
+    from sqlalchemy.sql.schema import Identity
+    from sqlalchemy.sql.schema import SchemaItem
     from sqlalchemy.types import TypeEngine
 
     from .batch import BatchOperationsImpl
     from .ops import MigrateOperation
     from ..ddl import DefaultImpl
     from ..runtime.migration import MigrationContext
-
 __all__ = ("Operations", "BatchOperations")
 
 
-class Operations(util.ModuleClsProxy):
-
-    """Define high level migration operations.
-
-    Each operation corresponds to some schema migration operation,
-    executed against a particular :class:`.MigrationContext`
-    which in turn represents connectivity to a database,
-    or a file output stream.
-
-    While :class:`.Operations` is normally configured as
-    part of the :meth:`.EnvironmentContext.run_migrations`
-    method called from an ``env.py`` script, a standalone
-    :class:`.Operations` instance can be
-    made for use cases external to regular Alembic
-    migrations by passing in a :class:`.MigrationContext`::
+class AbstractOperations(util.ModuleClsProxy):
+    """Base class for Operations and BatchOperations.
 
-        from alembic.migration import MigrationContext
-        from alembic.operations import Operations
-
-        conn = myengine.connect()
-        ctx = MigrationContext.configure(conn)
-        op = Operations(ctx)
-
-        op.alter_column("t", "c", nullable=True)
-
-    Note that as of 0.8, most of the methods on this class are produced
-    dynamically using the :meth:`.Operations.register_operation`
-    method.
+    .. versionadded:: 1.11.0
 
     """
 
@@ -461,8 +445,8 @@ class Operations(util.ModuleClsProxy):
         advanced types like dates may not be supported directly
         by SQLAlchemy.
 
-        See :meth:`.execute` for an example usage of
-        :meth:`.inline_literal`.
+        See :meth:`.Operations.execute` for an example usage of
+        :meth:`.Operations.inline_literal`.
 
         The environment can also be configured to attempt to render
         "literal" values inline automatically, for those simple types
@@ -500,7 +484,1000 @@ class Operations(util.ModuleClsProxy):
         return self.migration_context.impl.bind  # type: ignore[return-value]
 
 
-class BatchOperations(Operations):
+class Operations(AbstractOperations):
+    """Define high level migration operations.
+
+    Each operation corresponds to some schema migration operation,
+    executed against a particular :class:`.MigrationContext`
+    which in turn represents connectivity to a database,
+    or a file output stream.
+
+    While :class:`.Operations` is normally configured as
+    part of the :meth:`.EnvironmentContext.run_migrations`
+    method called from an ``env.py`` script, a standalone
+    :class:`.Operations` instance can be
+    made for use cases external to regular Alembic
+    migrations by passing in a :class:`.MigrationContext`::
+
+        from alembic.migration import MigrationContext
+        from alembic.operations import Operations
+
+        conn = myengine.connect()
+        ctx = MigrationContext.configure(conn)
+        op = Operations(ctx)
+
+        op.alter_column("t", "c", nullable=True)
+
+    Note that as of 0.8, most of the methods on this class are produced
+    dynamically using the :meth:`.Operations.register_operation`
+    method.
+
+    """
+
+    if TYPE_CHECKING:
+        # START STUB FUNCTIONS: op_cls
+        # ### the following stubs are generated by tools/write_pyi.py ###
+        # ### do not edit ###
+
+        def add_column(
+            self, table_name: str, column: Column, schema: Optional[str] = None
+        ) -> None:
+            """Issue an "add column" instruction using the current
+            migration context.
+
+            e.g.::
+
+                from alembic import op
+                from sqlalchemy import Column, String
+
+                op.add_column("organization", Column("name", String()))
+
+            The :meth:`.Operations.add_column` method typically corresponds
+            to the SQL command "ALTER TABLE... ADD COLUMN".    Within the scope
+            of this command, the column's name, datatype, nullability,
+            and optional server-generated defaults may be indicated.
+
+            .. note::
+
+                With the exception of NOT NULL constraints or single-column FOREIGN
+                KEY constraints, other kinds of constraints such as PRIMARY KEY,
+                UNIQUE or CHECK constraints **cannot** be generated using this
+                method; for these constraints, refer to operations such as
+                :meth:`.Operations.create_primary_key` and
+                :meth:`.Operations.create_check_constraint`. In particular, the
+                following :class:`~sqlalchemy.schema.Column` parameters are
+                **ignored**:
+
+                * :paramref:`~sqlalchemy.schema.Column.primary_key` - SQL databases
+                  typically do not support an ALTER operation that can add
+                  individual columns one at a time to an existing primary key
+                  constraint, therefore it's less ambiguous to use the
+                  :meth:`.Operations.create_primary_key` method, which assumes no
+                  existing primary key constraint is present.
+                * :paramref:`~sqlalchemy.schema.Column.unique` - use the
+                  :meth:`.Operations.create_unique_constraint` method
+                * :paramref:`~sqlalchemy.schema.Column.index` - use the
+                  :meth:`.Operations.create_index` method
+
+
+            The provided :class:`~sqlalchemy.schema.Column` object may include a
+            :class:`~sqlalchemy.schema.ForeignKey` constraint directive,
+            referencing a remote table name. For this specific type of constraint,
+            Alembic will automatically emit a second ALTER statement in order to
+            add the single-column FOREIGN KEY constraint separately::
+
+                from alembic import op
+                from sqlalchemy import Column, INTEGER, ForeignKey
+
+                op.add_column(
+                    "organization",
+                    Column("account_id", INTEGER, ForeignKey("accounts.id")),
+                )
+
+            The column argument passed to :meth:`.Operations.add_column` is a
+            :class:`~sqlalchemy.schema.Column` construct, used in the same way it's
+            used in SQLAlchemy. In particular, values or functions to be indicated
+            as producing the column's default value on the database side are
+            specified using the ``server_default`` parameter, and not ``default``
+            which only specifies Python-side defaults::
+
+                from alembic import op
+                from sqlalchemy import Column, TIMESTAMP, func
+
+                # specify "DEFAULT NOW" along with the column add
+                op.add_column(
+                    "account",
+                    Column("timestamp", TIMESTAMP, server_default=func.now()),
+                )
+
+            :param table_name: String name of the parent table.
+            :param column: a :class:`sqlalchemy.schema.Column` object
+             representing the new column.
+            :param schema: Optional schema name to operate within.  To control
+             quoting of the schema outside of the default behavior, use
+             the SQLAlchemy construct
+             :class:`~sqlalchemy.sql.elements.quoted_name`.
+
+            """  # noqa: E501
+            ...
+
+        def alter_column(
+            self,
+            table_name: str,
+            column_name: str,
+            nullable: Optional[bool] = None,
+            comment: Union[str, Literal[False], None] = False,
+            server_default: Any = False,
+            new_column_name: Optional[str] = None,
+            type_: Union[TypeEngine, Type[TypeEngine], None] = None,
+            existing_type: Union[TypeEngine, Type[TypeEngine], None] = None,
+            existing_server_default: Union[
+                str, bool, Identity, Computed, None
+            ] = False,
+            existing_nullable: Optional[bool] = None,
+            existing_comment: Optional[str] = None,
+            schema: Optional[str] = None,
+            **kw: Any,
+        ) -> None:
+            r"""Issue an "alter column" instruction using the
+            current migration context.
+
+            Generally, only that aspect of the column which
+            is being changed, i.e. name, type, nullability,
+            default, needs to be specified.  Multiple changes
+            can also be specified at once and the backend should
+            "do the right thing", emitting each change either
+            separately or together as the backend allows.
+
+            MySQL has special requirements here, since MySQL
+            cannot ALTER a column without a full specification.
+            When producing MySQL-compatible migration files,
+            it is recommended that the ``existing_type``,
+            ``existing_server_default``, and ``existing_nullable``
+            parameters be present, if not being altered.
+
+            Type changes which are against the SQLAlchemy
+            "schema" types :class:`~sqlalchemy.types.Boolean`
+            and  :class:`~sqlalchemy.types.Enum` may also
+            add or drop constraints which accompany those
+            types on backends that don't support them natively.
+            The ``existing_type`` argument is
+            used in this case to identify and remove a previous
+            constraint that was bound to the type object.
+
+            :param table_name: string name of the target table.
+            :param column_name: string name of the target column,
+             as it exists before the operation begins.
+            :param nullable: Optional; specify ``True`` or ``False``
+             to alter the column's nullability.
+            :param server_default: Optional; specify a string
+             SQL expression, :func:`~sqlalchemy.sql.expression.text`,
+             or :class:`~sqlalchemy.schema.DefaultClause` to indicate
+             an alteration to the column's default value.
+             Set to ``None`` to have the default removed.
+            :param comment: optional string text of a new comment to add to the
+             column.
+
+             .. versionadded:: 1.0.6
+
+            :param new_column_name: Optional; specify a string name here to
+             indicate the new name within a column rename operation.
+            :param type\_: Optional; a :class:`~sqlalchemy.types.TypeEngine`
+             type object to specify a change to the column's type.
+             For SQLAlchemy types that also indicate a constraint (i.e.
+             :class:`~sqlalchemy.types.Boolean`, :class:`~sqlalchemy.types.Enum`),
+             the constraint is also generated.
+            :param autoincrement: set the ``AUTO_INCREMENT`` flag of the column;
+             currently understood by the MySQL dialect.
+            :param existing_type: Optional; a
+             :class:`~sqlalchemy.types.TypeEngine`
+             type object to specify the previous type.   This
+             is required for all MySQL column alter operations that
+             don't otherwise specify a new type, as well as for
+             when nullability is being changed on a SQL Server
+             column.  It is also used if the type is a so-called
+             SQLlchemy "schema" type which may define a constraint (i.e.
+             :class:`~sqlalchemy.types.Boolean`,
+             :class:`~sqlalchemy.types.Enum`),
+             so that the constraint can be dropped.
+            :param existing_server_default: Optional; The existing
+             default value of the column.   Required on MySQL if
+             an existing default is not being changed; else MySQL
+             removes the default.
+            :param existing_nullable: Optional; the existing nullability
+             of the column.  Required on MySQL if the existing nullability
+             is not being changed; else MySQL sets this to NULL.
+            :param existing_autoincrement: Optional; the existing autoincrement
+             of the column.  Used for MySQL's system of altering a column
+             that specifies ``AUTO_INCREMENT``.
+            :param existing_comment: string text of the existing comment on the
+             column to be maintained.  Required on MySQL if the existing comment
+             on the column is not being changed.
+
+             .. versionadded:: 1.0.6
+
+            :param schema: Optional schema name to operate within.  To control
+             quoting of the schema outside of the default behavior, use
+             the SQLAlchemy construct
+             :class:`~sqlalchemy.sql.elements.quoted_name`.
+            :param postgresql_using: String argument which will indicate a
+             SQL expression to render within the Postgresql-specific USING clause
+             within ALTER COLUMN.    This string is taken directly as raw SQL which
+             must explicitly include any necessary quoting or escaping of tokens
+             within the expression.
+
+            """  # noqa: E501
+            ...
+
+        def bulk_insert(
+            self,
+            table: Union[Table, TableClause],
+            rows: List[dict],
+            multiinsert: bool = True,
+        ) -> None:
+            """Issue a "bulk insert" operation using the current
+            migration context.
+
+            This provides a means of representing an INSERT of multiple rows
+            which works equally well in the context of executing on a live
+            connection as well as that of generating a SQL script.   In the
+            case of a SQL script, the values are rendered inline into the
+            statement.
+
+            e.g.::
+
+                from alembic import op
+                from datetime import date
+                from sqlalchemy.sql import table, column
+                from sqlalchemy import String, Integer, Date
+
+                # Create an ad-hoc table to use for the insert statement.
+                accounts_table = table(
+                    "account",
+                    column("id", Integer),
+                    column("name", String),
+                    column("create_date", Date),
+                )
+
+                op.bulk_insert(
+                    accounts_table,
+                    [
+                        {
+                            "id": 1,
+                            "name": "John Smith",
+                            "create_date": date(2010, 10, 5),
+                        },
+                        {
+                            "id": 2,
+                            "name": "Ed Williams",
+                            "create_date": date(2007, 5, 27),
+                        },
+                        {
+                            "id": 3,
+                            "name": "Wendy Jones",
+                            "create_date": date(2008, 8, 15),
+                        },
+                    ],
+                )
+
+            When using --sql mode, some datatypes may not render inline
+            automatically, such as dates and other special types.   When this
+            issue is present, :meth:`.Operations.inline_literal` may be used::
+
+                op.bulk_insert(
+                    accounts_table,
+                    [
+                        {
+                            "id": 1,
+                            "name": "John Smith",
+                            "create_date": op.inline_literal("2010-10-05"),
+                        },
+                        {
+                            "id": 2,
+                            "name": "Ed Williams",
+                            "create_date": op.inline_literal("2007-05-27"),
+                        },
+                        {
+                            "id": 3,
+                            "name": "Wendy Jones",
+                            "create_date": op.inline_literal("2008-08-15"),
+                        },
+                    ],
+                    multiinsert=False,
+                )
+
+            When using :meth:`.Operations.inline_literal` in conjunction with
+            :meth:`.Operations.bulk_insert`, in order for the statement to work
+            in "online" (e.g. non --sql) mode, the
+            :paramref:`~.Operations.bulk_insert.multiinsert`
+            flag should be set to ``False``, which will have the effect of
+            individual INSERT statements being emitted to the database, each
+            with a distinct VALUES clause, so that the "inline" values can
+            still be rendered, rather than attempting to pass the values
+            as bound parameters.
+
+            :param table: a table object which represents the target of the INSERT.
+
+            :param rows: a list of dictionaries indicating rows.
+
+            :param multiinsert: when at its default of True and --sql mode is not
+               enabled, the INSERT statement will be executed using
+               "executemany()" style, where all elements in the list of
+               dictionaries are passed as bound parameters in a single
+               list.   Setting this to False results in individual INSERT
+               statements being emitted per parameter set, and is needed
+               in those cases where non-literal values are present in the
+               parameter sets.
+
+            """  # noqa: E501
+            ...
+
+        def create_check_constraint(
+            self,
+            constraint_name: Optional[str],
+            table_name: str,
+            condition: Union[str, BinaryExpression, TextClause],
+            schema: Optional[str] = None,
+            **kw: Any,
+        ) -> None:
+            """Issue a "create check constraint" instruction using the
+            current migration context.
+
+            e.g.::
+
+                from alembic import op
+                from sqlalchemy.sql import column, func
+
+                op.create_check_constraint(
+                    "ck_user_name_len",
+                    "user",
+                    func.len(column("name")) > 5,
+                )
+
+            CHECK constraints are usually against a SQL expression, so ad-hoc
+            table metadata is usually needed.   The function will convert the given
+            arguments into a :class:`sqlalchemy.schema.CheckConstraint` bound
+            to an anonymous table in order to emit the CREATE statement.
+
+            :param name: Name of the check constraint.  The name is necessary
+             so that an ALTER statement can be emitted.  For setups that
+             use an automated naming scheme such as that described at
+             :ref:`sqla:constraint_naming_conventions`,
+             ``name`` here can be ``None``, as the event listener will
+             apply the name to the constraint object when it is associated
+             with the table.
+            :param table_name: String name of the source table.
+            :param condition: SQL expression that's the condition of the
+             constraint. Can be a string or SQLAlchemy expression language
+             structure.
+            :param deferrable: optional bool. If set, emit DEFERRABLE or
+             NOT DEFERRABLE when issuing DDL for this constraint.
+            :param initially: optional string. If set, emit INITIALLY <value>
+             when issuing DDL for this constraint.
+            :param schema: Optional schema name to operate within.  To control
+             quoting of the schema outside of the default behavior, use
+             the SQLAlchemy construct
+             :class:`~sqlalchemy.sql.elements.quoted_name`.
+
+            """  # noqa: E501
+            ...
+
+        def create_exclude_constraint(
+            self,
+            constraint_name: str,
+            table_name: str,
+            *elements: Any,
+            **kw: Any,
+        ) -> Optional[Table]:
+            """Issue an alter to create an EXCLUDE constraint using the
+            current migration context.
+
+            .. note::  This method is Postgresql specific, and additionally
+               requires at least SQLAlchemy 1.0.
+
+            e.g.::
+
+                from alembic import op
+
+                op.create_exclude_constraint(
+                    "user_excl",
+                    "user",
+                    ("period", "&&"),
+                    ("group", "="),
+                    where=("group != 'some group'"),
+                )
+
+            Note that the expressions work the same way as that of
+            the ``ExcludeConstraint`` object itself; if plain strings are
+            passed, quoting rules must be applied manually.
+
+            :param name: Name of the constraint.
+            :param table_name: String name of the source table.
+            :param elements: exclude conditions.
+            :param where: SQL expression or SQL string with optional WHERE
+             clause.
+            :param deferrable: optional bool. If set, emit DEFERRABLE or
+             NOT DEFERRABLE when issuing DDL for this constraint.
+            :param initially: optional string. If set, emit INITIALLY <value>
+             when issuing DDL for this constraint.
+            :param schema: Optional schema name to operate within.
+
+            """  # noqa: E501
+            ...
+
+        def create_foreign_key(
+            self,
+            constraint_name: Optional[str],
+            source_table: str,
+            referent_table: str,
+            local_cols: List[str],
+            remote_cols: List[str],
+            onupdate: Optional[str] = None,
+            ondelete: Optional[str] = None,
+            deferrable: Optional[bool] = None,
+            initially: Optional[str] = None,
+            match: Optional[str] = None,
+            source_schema: Optional[str] = None,
+            referent_schema: Optional[str] = None,
+            **dialect_kw: Any,
+        ) -> None:
+            """Issue a "create foreign key" instruction using the
+            current migration context.
+
+            e.g.::
+
+                from alembic import op
+
+                op.create_foreign_key(
+                    "fk_user_address",
+                    "address",
+                    "user",
+                    ["user_id"],
+                    ["id"],
+                )
+
+            This internally generates a :class:`~sqlalchemy.schema.Table` object
+            containing the necessary columns, then generates a new
+            :class:`~sqlalchemy.schema.ForeignKeyConstraint`
+            object which it then associates with the
+            :class:`~sqlalchemy.schema.Table`.
+            Any event listeners associated with this action will be fired
+            off normally.   The :class:`~sqlalchemy.schema.AddConstraint`
+            construct is ultimately used to generate the ALTER statement.
+
+            :param constraint_name: Name of the foreign key constraint.  The name
+             is necessary so that an ALTER statement can be emitted.  For setups
+             that use an automated naming scheme such as that described at
+             :ref:`sqla:constraint_naming_conventions`,
+             ``name`` here can be ``None``, as the event listener will
+             apply the name to the constraint object when it is associated
+             with the table.
+            :param source_table: String name of the source table.
+            :param referent_table: String name of the destination table.
+            :param local_cols: a list of string column names in the
+             source table.
+            :param remote_cols: a list of string column names in the
+             remote table.
+            :param onupdate: Optional string. If set, emit ON UPDATE <value> when
+             issuing DDL for this constraint. Typical values include CASCADE,
+             DELETE and RESTRICT.
+            :param ondelete: Optional string. If set, emit ON DELETE <value> when
+             issuing DDL for this constraint. Typical values include CASCADE,
+             DELETE and RESTRICT.
+            :param deferrable: optional bool. If set, emit DEFERRABLE or NOT
+             DEFERRABLE when issuing DDL for this constraint.
+            :param source_schema: Optional schema name of the source table.
+            :param referent_schema: Optional schema name of the destination table.
+
+            """  # noqa: E501
+            ...
+
+        def create_index(
+            self,
+            index_name: Optional[str],
+            table_name: str,
+            columns: Sequence[Union[str, TextClause, Function[Any]]],
+            schema: Optional[str] = None,
+            unique: bool = False,
+            **kw: Any,
+        ) -> None:
+            r"""Issue a "create index" instruction using the current
+            migration context.
+
+            e.g.::
+
+                from alembic import op
+
+                op.create_index("ik_test", "t1", ["foo", "bar"])
+
+            Functional indexes can be produced by using the
+            :func:`sqlalchemy.sql.expression.text` construct::
+
+                from alembic import op
+                from sqlalchemy import text
+
+                op.create_index("ik_test", "t1", [text("lower(foo)")])
+
+            :param index_name: name of the index.
+            :param table_name: name of the owning table.
+            :param columns: a list consisting of string column names and/or
+             :func:`~sqlalchemy.sql.expression.text` constructs.
+            :param schema: Optional schema name to operate within.  To control
+             quoting of the schema outside of the default behavior, use
+             the SQLAlchemy construct
+             :class:`~sqlalchemy.sql.elements.quoted_name`.
+            :param unique: If True, create a unique index.
+
+            :param quote:
+                Force quoting of this column's name on or off, corresponding
+                to ``True`` or ``False``. When left at its default
+                of ``None``, the column identifier will be quoted according to
+                whether the name is case sensitive (identifiers with at least one
+                upper case character are treated as case sensitive), or if it's a
+                reserved word. This flag is only needed to force quoting of a
+                reserved word which is not known by the SQLAlchemy dialect.
+
+            :param \**kw: Additional keyword arguments not mentioned above are
+                dialect specific, and passed in the form
+                ``<dialectname>_<argname>``.
+                See the documentation regarding an individual dialect at
+                :ref:`dialect_toplevel` for detail on documented arguments.
+
+            """  # noqa: E501
+            ...
+
+        def create_primary_key(
+            self,
+            constraint_name: Optional[str],
+            table_name: str,
+            columns: List[str],
+            schema: Optional[str] = None,
+        ) -> None:
+            """Issue a "create primary key" instruction using the current
+            migration context.
+
+            e.g.::
+
+                from alembic import op
+
+                op.create_primary_key("pk_my_table", "my_table", ["id", "version"])
+
+            This internally generates a :class:`~sqlalchemy.schema.Table` object
+            containing the necessary columns, then generates a new
+            :class:`~sqlalchemy.schema.PrimaryKeyConstraint`
+            object which it then associates with the
+            :class:`~sqlalchemy.schema.Table`.
+            Any event listeners associated with this action will be fired
+            off normally.   The :class:`~sqlalchemy.schema.AddConstraint`
+            construct is ultimately used to generate the ALTER statement.
+
+            :param constraint_name: Name of the primary key constraint.  The name
+             is necessary so that an ALTER statement can be emitted.  For setups
+             that use an automated naming scheme such as that described at
+             :ref:`sqla:constraint_naming_conventions`
+             ``name`` here can be ``None``, as the event listener will
+             apply the name to the constraint object when it is associated
+             with the table.
+            :param table_name: String name of the target table.
+            :param columns: a list of string column names to be applied to the
+             primary key constraint.
+            :param schema: Optional schema name to operate within.  To control
+             quoting of the schema outside of the default behavior, use
+             the SQLAlchemy construct
+             :class:`~sqlalchemy.sql.elements.quoted_name`.
+
+            """  # noqa: E501
+            ...
+
+        def create_table(
+            self, table_name: str, *columns: SchemaItem, **kw: Any
+        ) -> Table:
+            r"""Issue a "create table" instruction using the current migration
+            context.
+
+            This directive receives an argument list similar to that of the
+            traditional :class:`sqlalchemy.schema.Table` construct, but without the
+            metadata::
+
+                from sqlalchemy import INTEGER, VARCHAR, NVARCHAR, Column
+                from alembic import op
+
+                op.create_table(
+                    "account",
+                    Column("id", INTEGER, primary_key=True),
+                    Column("name", VARCHAR(50), nullable=False),
+                    Column("description", NVARCHAR(200)),
+                    Column("timestamp", TIMESTAMP, server_default=func.now()),
+                )
+
+            Note that :meth:`.create_table` accepts
+            :class:`~sqlalchemy.schema.Column`
+            constructs directly from the SQLAlchemy library.  In particular,
+            default values to be created on the database side are
+            specified using the ``server_default`` parameter, and not
+            ``default`` which only specifies Python-side defaults::
+
+                from alembic import op
+                from sqlalchemy import Column, TIMESTAMP, func
+
+                # specify "DEFAULT NOW" along with the "timestamp" column
+                op.create_table(
+                    "account",
+                    Column("id", INTEGER, primary_key=True),
+                    Column("timestamp", TIMESTAMP, server_default=func.now()),
+                )
+
+            The function also returns a newly created
+            :class:`~sqlalchemy.schema.Table` object, corresponding to the table
+            specification given, which is suitable for
+            immediate SQL operations, in particular
+            :meth:`.Operations.bulk_insert`::
+
+                from sqlalchemy import INTEGER, VARCHAR, NVARCHAR, Column
+                from alembic import op
+
+                account_table = op.create_table(
+                    "account",
+                    Column("id", INTEGER, primary_key=True),
+                    Column("name", VARCHAR(50), nullable=False),
+                    Column("description", NVARCHAR(200)),
+                    Column("timestamp", TIMESTAMP, server_default=func.now()),
+                )
+
+                op.bulk_insert(
+                    account_table,
+                    [
+                        {"name": "A1", "description": "account 1"},
+                        {"name": "A2", "description": "account 2"},
+                    ],
+                )
+
+            :param table_name: Name of the table
+            :param \*columns: collection of :class:`~sqlalchemy.schema.Column`
+             objects within
+             the table, as well as optional :class:`~sqlalchemy.schema.Constraint`
+             objects
+             and :class:`~.sqlalchemy.schema.Index` objects.
+            :param schema: Optional schema name to operate within.  To control
+             quoting of the schema outside of the default behavior, use
+             the SQLAlchemy construct
+             :class:`~sqlalchemy.sql.elements.quoted_name`.
+            :param \**kw: Other keyword arguments are passed to the underlying
+             :class:`sqlalchemy.schema.Table` object created for the command.
+
+            :return: the :class:`~sqlalchemy.schema.Table` object corresponding
+             to the parameters given.
+
+            """  # noqa: E501
+            ...
+
+        def create_table_comment(
+            self,
+            table_name: str,
+            comment: Optional[str],
+            existing_comment: Optional[str] = None,
+            schema: Optional[str] = None,
+        ) -> None:
+            """Emit a COMMENT ON operation to set the comment for a table.
+
+            .. versionadded:: 1.0.6
+
+            :param table_name: string name of the target table.
+            :param comment: string value of the comment being registered against
+             the specified table.
+            :param existing_comment: String value of a comment
+             already registered on the specified table, used within autogenerate
+             so that the operation is reversible, but not required for direct
+             use.
+
+            .. seealso::
+
+                :meth:`.Operations.drop_table_comment`
+
+                :paramref:`.Operations.alter_column.comment`
+
+            """  # noqa: E501
+            ...
+
+        def create_unique_constraint(
+            self,
+            constraint_name: Optional[str],
+            table_name: str,
+            columns: Sequence[str],
+            schema: Optional[str] = None,
+            **kw: Any,
+        ) -> Any:
+            """Issue a "create unique constraint" instruction using the
+            current migration context.
+
+            e.g.::
+
+                from alembic import op
+                op.create_unique_constraint("uq_user_name", "user", ["name"])
+
+            This internally generates a :class:`~sqlalchemy.schema.Table` object
+            containing the necessary columns, then generates a new
+            :class:`~sqlalchemy.schema.UniqueConstraint`
+            object which it then associates with the
+            :class:`~sqlalchemy.schema.Table`.
+            Any event listeners associated with this action will be fired
+            off normally.   The :class:`~sqlalchemy.schema.AddConstraint`
+            construct is ultimately used to generate the ALTER statement.
+
+            :param name: Name of the unique constraint.  The name is necessary
+             so that an ALTER statement can be emitted.  For setups that
+             use an automated naming scheme such as that described at
+             :ref:`sqla:constraint_naming_conventions`,
+             ``name`` here can be ``None``, as the event listener will
+             apply the name to the constraint object when it is associated
+             with the table.
+            :param table_name: String name of the source table.
+            :param columns: a list of string column names in the
+             source table.
+            :param deferrable: optional bool. If set, emit DEFERRABLE or
+             NOT DEFERRABLE when issuing DDL for this constraint.
+            :param initially: optional string. If set, emit INITIALLY <value>
+             when issuing DDL for this constraint.
+            :param schema: Optional schema name to operate within.  To control
+             quoting of the schema outside of the default behavior, use
+             the SQLAlchemy construct
+             :class:`~sqlalchemy.sql.elements.quoted_name`.
+
+            """  # noqa: E501
+            ...
+
+        def drop_column(
+            self,
+            table_name: str,
+            column_name: str,
+            schema: Optional[str] = None,
+            **kw: Any,
+        ) -> None:
+            """Issue a "drop column" instruction using the current
+            migration context.
+
+            e.g.::
+
+                drop_column("organization", "account_id")
+
+            :param table_name: name of table
+            :param column_name: name of column
+            :param schema: Optional schema name to operate within.  To control
+             quoting of the schema outside of the default behavior, use
+             the SQLAlchemy construct
+             :class:`~sqlalchemy.sql.elements.quoted_name`.
+            :param mssql_drop_check: Optional boolean.  When ``True``, on
+             Microsoft SQL Server only, first
+             drop the CHECK constraint on the column using a
+             SQL-script-compatible
+             block that selects into a @variable from sys.check_constraints,
+             then exec's a separate DROP CONSTRAINT for that constraint.
+            :param mssql_drop_default: Optional boolean.  When ``True``, on
+             Microsoft SQL Server only, first
+             drop the DEFAULT constraint on the column using a
+             SQL-script-compatible
+             block that selects into a @variable from sys.default_constraints,
+             then exec's a separate DROP CONSTRAINT for that default.
+            :param mssql_drop_foreign_key: Optional boolean.  When ``True``, on
+             Microsoft SQL Server only, first
+             drop a single FOREIGN KEY constraint on the column using a
+             SQL-script-compatible
+             block that selects into a @variable from
+             sys.foreign_keys/sys.foreign_key_columns,
+             then exec's a separate DROP CONSTRAINT for that default.  Only
+             works if the column has exactly one FK constraint which refers to
+             it, at the moment.
+
+            """  # noqa: E501
+            ...
+
+        def drop_constraint(
+            self,
+            constraint_name: str,
+            table_name: str,
+            type_: Optional[str] = None,
+            schema: Optional[str] = None,
+        ) -> None:
+            r"""Drop a constraint of the given name, typically via DROP CONSTRAINT.
+
+            :param constraint_name: name of the constraint.
+            :param table_name: table name.
+            :param type\_: optional, required on MySQL.  can be
+             'foreignkey', 'primary', 'unique', or 'check'.
+            :param schema: Optional schema name to operate within.  To control
+             quoting of the schema outside of the default behavior, use
+             the SQLAlchemy construct
+             :class:`~sqlalchemy.sql.elements.quoted_name`.
+
+            """  # noqa: E501
+            ...
+
+        def drop_index(
+            self,
+            index_name: str,
+            table_name: Optional[str] = None,
+            schema: Optional[str] = None,
+            **kw: Any,
+        ) -> None:
+            r"""Issue a "drop index" instruction using the current
+            migration context.
+
+            e.g.::
+
+                drop_index("accounts")
+
+            :param index_name: name of the index.
+            :param table_name: name of the owning table.  Some
+             backends such as Microsoft SQL Server require this.
+            :param schema: Optional schema name to operate within.  To control
+             quoting of the schema outside of the default behavior, use
+             the SQLAlchemy construct
+             :class:`~sqlalchemy.sql.elements.quoted_name`.
+            :param \**kw: Additional keyword arguments not mentioned above are
+                dialect specific, and passed in the form
+                ``<dialectname>_<argname>``.
+                See the documentation regarding an individual dialect at
+                :ref:`dialect_toplevel` for detail on documented arguments.
+
+            """  # noqa: E501
+            ...
+
+        def drop_table(
+            self, table_name: str, schema: Optional[str] = None, **kw: Any
+        ) -> None:
+            r"""Issue a "drop table" instruction using the current
+            migration context.
+
+
+            e.g.::
+
+                drop_table("accounts")
+
+            :param table_name: Name of the table
+            :param schema: Optional schema name to operate within.  To control
+             quoting of the schema outside of the default behavior, use
+             the SQLAlchemy construct
+             :class:`~sqlalchemy.sql.elements.quoted_name`.
+            :param \**kw: Other keyword arguments are passed to the underlying
+             :class:`sqlalchemy.schema.Table` object created for the command.
+
+            """  # noqa: E501
+            ...
+
+        def drop_table_comment(
+            self,
+            table_name: str,
+            existing_comment: Optional[str] = None,
+            schema: Optional[str] = None,
+        ) -> None:
+            """Issue a "drop table comment" operation to
+            remove an existing comment set on a table.
+
+            .. versionadded:: 1.0.6
+
+            :param table_name: string name of the target table.
+            :param existing_comment: An optional string value of a comment already
+             registered on the specified table.
+
+            .. seealso::
+
+                :meth:`.Operations.create_table_comment`
+
+                :paramref:`.Operations.alter_column.comment`
+
+            """  # noqa: E501
+            ...
+
+        def execute(
+            self,
+            sqltext: Union[str, TextClause, Update],
+            execution_options: Optional[dict[str, Any]] = None,
+        ) -> None:
+            r"""Execute the given SQL using the current migration context.
+
+            The given SQL can be a plain string, e.g.::
+
+                op.execute("INSERT INTO table (foo) VALUES ('some value')")
+
+            Or it can be any kind of Core SQL Expression construct, such as
+            below where we use an update construct::
+
+                from sqlalchemy.sql import table, column
+                from sqlalchemy import String
+                from alembic import op
+
+                account = table("account", column("name", String))
+                op.execute(
+                    account.update()
+                    .where(account.c.name == op.inline_literal("account 1"))
+                    .values({"name": op.inline_literal("account 2")})
+                )
+
+            Above, we made use of the SQLAlchemy
+            :func:`sqlalchemy.sql.expression.table` and
+            :func:`sqlalchemy.sql.expression.column` constructs to make a brief,
+            ad-hoc table construct just for our UPDATE statement.  A full
+            :class:`~sqlalchemy.schema.Table` construct of course works perfectly
+            fine as well, though note it's a recommended practice to at least
+            ensure the definition of a table is self-contained within the migration
+            script, rather than imported from a module that may break compatibility
+            with older migrations.
+
+            In a SQL script context, the statement is emitted directly to the
+            output stream.   There is *no* return result, however, as this
+            function is oriented towards generating a change script
+            that can run in "offline" mode.     Additionally, parameterized
+            statements are discouraged here, as they *will not work* in offline
+            mode.  Above, we use :meth:`.inline_literal` where parameters are
+            to be used.
+
+            For full interaction with a connected database where parameters can
+            also be used normally, use the "bind" available from the context::
+
+                from alembic import op
+
+                connection = op.get_bind()
+
+                connection.execute(
+                    account.update()
+                    .where(account.c.name == "account 1")
+                    .values({"name": "account 2"})
+                )
+
+            Additionally, when passing the statement as a plain string, it is first
+            coerceed into a :func:`sqlalchemy.sql.expression.text` construct
+            before being passed along.  In the less likely case that the
+            literal SQL string contains a colon, it must be escaped with a
+            backslash, as::
+
+               op.execute(r"INSERT INTO table (foo) VALUES ('\:colon_value')")
+
+
+            :param sqltext: Any legal SQLAlchemy expression, including:
+
+            * a string
+            * a :func:`sqlalchemy.sql.expression.text` construct.
+            * a :func:`sqlalchemy.sql.expression.insert` construct.
+            * a :func:`sqlalchemy.sql.expression.update`,
+              :func:`sqlalchemy.sql.expression.insert`,
+              or :func:`sqlalchemy.sql.expression.delete`  construct.
+            * Pretty much anything that's "executable" as described
+              in :ref:`sqlexpression_toplevel`.
+
+            .. note::  when passing a plain string, the statement is coerced into
+               a :func:`sqlalchemy.sql.expression.text` construct. This construct
+               considers symbols with colons, e.g. ``:foo`` to be bound parameters.
+               To avoid this, ensure that colon symbols are escaped, e.g.
+               ``\:foo``.
+
+            :param execution_options: Optional dictionary of
+             execution options, will be passed to
+             :meth:`sqlalchemy.engine.Connection.execution_options`.
+            """  # noqa: E501
+            ...
+
+        def rename_table(
+            self,
+            old_table_name: str,
+            new_table_name: str,
+            schema: Optional[str] = None,
+        ) -> None:
+            """Emit an ALTER TABLE to rename a table.
+
+            :param old_table_name: old name.
+            :param new_table_name: new name.
+            :param schema: Optional schema name to operate within.  To control
+             quoting of the schema outside of the default behavior, use
+             the SQLAlchemy construct
+             :class:`~sqlalchemy.sql.elements.quoted_name`.
+
+            """  # noqa: E501
+            ...
+
+        # END STUB FUNCTIONS: op_cls
+
+
+class BatchOperations(AbstractOperations):
     """Modifies the interface :class:`.Operations` for batch mode.
 
     This basically omits the ``table_name`` and ``schema`` parameters
@@ -524,3 +1501,262 @@ class BatchOperations(Operations):
             "The %s method does not apply to a batch table alter operation."
             % operation
         )
+
+    if TYPE_CHECKING:
+        # START STUB FUNCTIONS: batch_op
+        # ### the following stubs are generated by tools/write_pyi.py ###
+        # ### do not edit ###
+
+        def add_column(
+            self,
+            column: Column,
+            insert_before: Optional[str] = None,
+            insert_after: Optional[str] = None,
+        ) -> None:
+            """Issue an "add column" instruction using the current
+            batch migration context.
+
+            .. seealso::
+
+                :meth:`.Operations.add_column`
+
+            """  # noqa: E501
+            ...
+
+        def alter_column(
+            self,
+            column_name: str,
+            nullable: Optional[bool] = None,
+            comment: Union[str, Literal[False], None] = False,
+            server_default: Any = False,
+            new_column_name: Optional[str] = None,
+            type_: Union[TypeEngine, Type[TypeEngine], None] = None,
+            existing_type: Union[TypeEngine, Type[TypeEngine], None] = None,
+            existing_server_default: Union[
+                str, bool, Identity, Computed, None
+            ] = False,
+            existing_nullable: Optional[bool] = None,
+            existing_comment: Optional[str] = None,
+            insert_before: Optional[str] = None,
+            insert_after: Optional[str] = None,
+            **kw: Any,
+        ) -> None:
+            """Issue an "alter column" instruction using the current
+            batch migration context.
+
+            Parameters are the same as that of :meth:`.Operations.alter_column`,
+            as well as the following option(s):
+
+            :param insert_before: String name of an existing column which this
+             column should be placed before, when creating the new table.
+
+             .. versionadded:: 1.4.0
+
+            :param insert_after: String name of an existing column which this
+             column should be placed after, when creating the new table.  If
+             both :paramref:`.BatchOperations.alter_column.insert_before`
+             and :paramref:`.BatchOperations.alter_column.insert_after` are
+             omitted, the column is inserted after the last existing column
+             in the table.
+
+             .. versionadded:: 1.4.0
+
+            .. seealso::
+
+                :meth:`.Operations.alter_column`
+
+
+            """  # noqa: E501
+            ...
+
+        def create_check_constraint(
+            self,
+            constraint_name: str,
+            condition: Union[str, BinaryExpression, TextClause],
+            **kw: Any,
+        ) -> None:
+            """Issue a "create check constraint" instruction using the
+            current batch migration context.
+
+            The batch form of this call omits the ``source`` and ``schema``
+            arguments from the call.
+
+            .. seealso::
+
+                :meth:`.Operations.create_check_constraint`
+
+            """  # noqa: E501
+            ...
+
+        def create_exclude_constraint(
+            self, constraint_name: str, *elements: Any, **kw: Any
+        ):
+            """Issue a "create exclude constraint" instruction using the
+            current batch migration context.
+
+            .. note::  This method is Postgresql specific, and additionally
+               requires at least SQLAlchemy 1.0.
+
+            .. seealso::
+
+                :meth:`.Operations.create_exclude_constraint`
+
+            """  # noqa: E501
+            ...
+
+        def create_foreign_key(
+            self,
+            constraint_name: str,
+            referent_table: str,
+            local_cols: List[str],
+            remote_cols: List[str],
+            referent_schema: Optional[str] = None,
+            onupdate: Optional[str] = None,
+            ondelete: Optional[str] = None,
+            deferrable: Optional[bool] = None,
+            initially: Optional[str] = None,
+            match: Optional[str] = None,
+            **dialect_kw: Any,
+        ) -> None:
+            """Issue a "create foreign key" instruction using the
+            current batch migration context.
+
+            The batch form of this call omits the ``source`` and ``source_schema``
+            arguments from the call.
+
+            e.g.::
+
+                with batch_alter_table("address") as batch_op:
+                    batch_op.create_foreign_key(
+                        "fk_user_address",
+                        "user",
+                        ["user_id"],
+                        ["id"],
+                    )
+
+            .. seealso::
+
+                :meth:`.Operations.create_foreign_key`
+
+            """  # noqa: E501
+            ...
+
+        def create_index(
+            self, index_name: str, columns: List[str], **kw: Any
+        ) -> None:
+            """Issue a "create index" instruction using the
+            current batch migration context.
+
+            .. seealso::
+
+                :meth:`.Operations.create_index`
+
+            """  # noqa: E501
+            ...
+
+        def create_primary_key(
+            self, constraint_name: str, columns: List[str]
+        ) -> None:
+            """Issue a "create primary key" instruction using the
+            current batch migration context.
+
+            The batch form of this call omits the ``table_name`` and ``schema``
+            arguments from the call.
+
+            .. seealso::
+
+                :meth:`.Operations.create_primary_key`
+
+            """  # noqa: E501
+            ...
+
+        def create_table_comment(
+            self,
+            comment: Optional[str],
+            existing_comment: Optional[str] = None,
+        ) -> None:
+            """Emit a COMMENT ON operation to set the comment for a table
+            using the current batch migration context.
+
+            .. versionadded:: 1.6.0
+
+            :param comment: string value of the comment being registered against
+             the specified table.
+            :param existing_comment: String value of a comment
+             already registered on the specified table, used within autogenerate
+             so that the operation is reversible, but not required for direct
+             use.
+
+            """  # noqa: E501
+            ...
+
+        def create_unique_constraint(
+            self, constraint_name: str, columns: Sequence[str], **kw: Any
+        ) -> Any:
+            """Issue a "create unique constraint" instruction using the
+            current batch migration context.
+
+            The batch form of this call omits the ``source`` and ``schema``
+            arguments from the call.
+
+            .. seealso::
+
+                :meth:`.Operations.create_unique_constraint`
+
+            """  # noqa: E501
+            ...
+
+        def drop_column(self, column_name: str, **kw: Any) -> None:
+            """Issue a "drop column" instruction using the current
+            batch migration context.
+
+            .. seealso::
+
+                :meth:`.Operations.drop_column`
+
+            """  # noqa: E501
+            ...
+
+        def drop_constraint(
+            self, constraint_name: str, type_: Optional[str] = None
+        ) -> None:
+            """Issue a "drop constraint" instruction using the
+            current batch migration context.
+
+            The batch form of this call omits the ``table_name`` and ``schema``
+            arguments from the call.
+
+            .. seealso::
+
+                :meth:`.Operations.drop_constraint`
+
+            """  # noqa: E501
+            ...
+
+        def drop_index(self, index_name: str, **kw: Any) -> None:
+            """Issue a "drop index" instruction using the
+            current batch migration context.
+
+            .. seealso::
+
+                :meth:`.Operations.drop_index`
+
+            """  # noqa: E501
+            ...
+
+        def drop_table_comment(
+            self, existing_comment: Optional[str] = None
+        ) -> None:
+            """Issue a "drop table comment" operation to
+            remove an existing comment set on a table using the current
+            batch operations context.
+
+            .. versionadded:: 1.6.0
+
+            :param existing_comment: An optional string value of a comment already
+             registered on the specified table.
+
+            """  # noqa: E501
+            ...
+
+        # END STUB FUNCTIONS: batch_op
index 0295ab33ff69eaf93a67e501704c7ecb6f5f2160..99d21d9eb997f68b48e133c189cc65f7f914f19c 100644 (file)
@@ -199,7 +199,7 @@ class DropConstraintOp(MigrateOperation):
         table_name: str,
         type_: Optional[str] = None,
         schema: Optional[str] = None,
-    ) -> Optional[Table]:
+    ) -> None:
         r"""Drop a constraint of the given name, typically via DROP CONSTRAINT.
 
         :param constraint_name: name of the constraint.
@@ -300,7 +300,7 @@ class CreatePrimaryKeyOp(AddConstraintOp):
         table_name: str,
         columns: List[str],
         schema: Optional[str] = None,
-    ) -> Optional[Table]:
+    ) -> None:
         """Issue a "create primary key" instruction using the current
         migration context.
 
@@ -599,7 +599,7 @@ class CreateForeignKeyOp(AddConstraintOp):
         source_schema: Optional[str] = None,
         referent_schema: Optional[str] = None,
         **dialect_kw: Any,
-    ) -> Optional[Table]:
+    ) -> None:
         """Issue a "create foreign key" instruction using the
         current migration context.
 
@@ -781,10 +781,10 @@ class CreateCheckConstraintOp(AddConstraintOp):
         operations: Operations,
         constraint_name: Optional[str],
         table_name: str,
-        condition: Union[str, BinaryExpression],
+        condition: Union[str, BinaryExpression, TextClause],
         schema: Optional[str] = None,
         **kw: Any,
-    ) -> Optional[Table]:
+    ) -> None:
         """Issue a "create check constraint" instruction using the
         current migration context.
 
@@ -833,9 +833,9 @@ class CreateCheckConstraintOp(AddConstraintOp):
         cls,
         operations: BatchOperations,
         constraint_name: str,
-        condition: TextClause,
+        condition: Union[str, BinaryExpression, TextClause],
         **kw: Any,
-    ) -> Optional[Table]:
+    ) -> None:
         """Issue a "create check constraint" instruction using the
         current batch migration context.
 
@@ -921,7 +921,7 @@ class CreateIndexOp(MigrateOperation):
         schema: Optional[str] = None,
         unique: bool = False,
         **kw: Any,
-    ) -> Optional[Table]:
+    ) -> None:
         r"""Issue a "create index" instruction using the current
         migration context.
 
@@ -977,7 +977,7 @@ class CreateIndexOp(MigrateOperation):
         index_name: str,
         columns: List[str],
         **kw: Any,
-    ) -> Optional[Table]:
+    ) -> None:
         """Issue a "create index" instruction using the
         current batch migration context.
 
@@ -1056,7 +1056,7 @@ class DropIndexOp(MigrateOperation):
         table_name: Optional[str] = None,
         schema: Optional[str] = None,
         **kw: Any,
-    ) -> Optional[Table]:
+    ) -> None:
         r"""Issue a "drop index" instruction using the current
         migration context.
 
@@ -1084,7 +1084,7 @@ class DropIndexOp(MigrateOperation):
     @classmethod
     def batch_drop_index(
         cls, operations: BatchOperations, index_name: str, **kw: Any
-    ) -> Optional[Table]:
+    ) -> None:
         """Issue a "drop index" instruction using the
         current batch migration context.
 
@@ -1182,7 +1182,7 @@ class CreateTableOp(MigrateOperation):
         table_name: str,
         *columns: SchemaItem,
         **kw: Any,
-    ) -> Optional[Table]:
+    ) -> Table:
         r"""Issue a "create table" instruction using the current migration
         context.
 
@@ -1391,7 +1391,7 @@ class RenameTableOp(AlterTableOp):
         old_table_name: str,
         new_table_name: str,
         schema: Optional[str] = None,
-    ) -> Optional[Table]:
+    ) -> None:
         """Emit an ALTER TABLE to rename a table.
 
         :param old_table_name: old name.
@@ -1433,7 +1433,7 @@ class CreateTableCommentOp(AlterTableOp):
         comment: Optional[str],
         existing_comment: Optional[str] = None,
         schema: Optional[str] = None,
-    ) -> Optional[Table]:
+    ) -> None:
         """Emit a COMMENT ON operation to set the comment for a table.
 
         .. versionadded:: 1.0.6
@@ -1465,10 +1465,10 @@ class CreateTableCommentOp(AlterTableOp):
     @classmethod
     def batch_create_table_comment(
         cls,
-        operations,
-        comment,
-        existing_comment=None,
-    ):
+        operations: BatchOperations,
+        comment: Optional[str],
+        existing_comment: Optional[str] = None,
+    ) -> None:
         """Emit a COMMENT ON operation to set the comment for a table
         using the current batch migration context.
 
@@ -1542,7 +1542,7 @@ class DropTableCommentOp(AlterTableOp):
         table_name: str,
         existing_comment: Optional[str] = None,
         schema: Optional[str] = None,
-    ) -> Optional[Table]:
+    ) -> None:
         """Issue a "drop table comment" operation to
         remove an existing comment set on a table.
 
@@ -1564,7 +1564,11 @@ class DropTableCommentOp(AlterTableOp):
         return operations.invoke(op)
 
     @classmethod
-    def batch_drop_table_comment(cls, operations, existing_comment=None):
+    def batch_drop_table_comment(
+        cls,
+        operations: BatchOperations,
+        existing_comment: Optional[str] = None,
+    ) -> None:
         """Issue a "drop table comment" operation to
         remove an existing comment set on a table using the current
         batch operations context.
@@ -1779,7 +1783,7 @@ class AlterColumnOp(AlterTableOp):
         existing_comment: Optional[str] = None,
         schema: Optional[str] = None,
         **kw: Any,
-    ) -> Optional[Table]:
+    ) -> None:
         r"""Issue an "alter column" instruction using the
         current migration context.
 
@@ -1893,18 +1897,20 @@ class AlterColumnOp(AlterTableOp):
         operations: BatchOperations,
         column_name: str,
         nullable: Optional[bool] = None,
-        comment: Union[str, Literal[False]] = False,
-        server_default: Union[Function[Any], bool] = False,
+        comment: Optional[Union[str, Literal[False]]] = False,
+        server_default: Any = False,
         new_column_name: Optional[str] = None,
         type_: Optional[Union[TypeEngine, Type[TypeEngine]]] = None,
         existing_type: Optional[Union[TypeEngine, Type[TypeEngine]]] = None,
-        existing_server_default: bool = False,
+        existing_server_default: Optional[
+            Union[str, bool, Identity, Computed]
+        ] = False,
         existing_nullable: Optional[bool] = None,
         existing_comment: Optional[str] = None,
         insert_before: Optional[str] = None,
         insert_after: Optional[str] = None,
         **kw: Any,
-    ) -> Optional[Table]:
+    ) -> None:
         """Issue an "alter column" instruction using the current
         batch migration context.
 
@@ -2001,7 +2007,7 @@ class AddColumnOp(AlterTableOp):
         table_name: str,
         column: Column,
         schema: Optional[str] = None,
-    ) -> Optional[Table]:
+    ) -> None:
         """Issue an "add column" instruction using the current
         migration context.
 
@@ -2090,7 +2096,7 @@ class AddColumnOp(AlterTableOp):
         column: Column,
         insert_before: Optional[str] = None,
         insert_after: Optional[str] = None,
-    ) -> Optional[Table]:
+    ) -> None:
         """Issue an "add column" instruction using the current
         batch migration context.
 
@@ -2184,7 +2190,7 @@ class DropColumnOp(AlterTableOp):
         column_name: str,
         schema: Optional[str] = None,
         **kw: Any,
-    ) -> Optional[Table]:
+    ) -> None:
         """Issue a "drop column" instruction using the current
         migration context.
 
@@ -2228,7 +2234,7 @@ class DropColumnOp(AlterTableOp):
     @classmethod
     def batch_drop_column(
         cls, operations: BatchOperations, column_name: str, **kw: Any
-    ) -> Optional[Table]:
+    ) -> None:
         """Issue a "drop column" instruction using the current
         batch migration context.
 
@@ -2386,7 +2392,7 @@ class ExecuteSQLOp(MigrateOperation):
         operations: Operations,
         sqltext: Union[str, TextClause, Update],
         execution_options: Optional[dict[str, Any]] = None,
-    ) -> Optional[Table]:
+    ) -> None:
         r"""Execute the given SQL using the current migration context.
 
         The given SQL can be a plain string, e.g.::
index 2fe49573d61c22ae9c811ccf64c308a4707d205d..a5e0b5354ce9c173a90feae4be4eca34ab4a9386 100644 (file)
@@ -3,6 +3,7 @@ from __future__ import annotations
 import io
 import os
 import sys
+import typing
 from typing import Sequence
 
 from sqlalchemy.util import inspect_getfullargspec  # noqa
@@ -42,13 +43,18 @@ def importlib_metadata_get(group: str) -> Sequence[EntryPoint]:
 
 
 def formatannotation_fwdref(annotation, base_module=None):
-    """the python 3.7 _formatannotation with an extra repr() for 3rd party
-    modules"""
+    """vendored from python 3.7"""
+    # copied over _formatannotation from sqlalchemy 2.0
+
+    if isinstance(annotation, str):
+        return annotation
 
     if getattr(annotation, "__module__", None) == "typing":
-        return repr(annotation).replace("typing.", "")
+        return repr(annotation).replace("typing.", "").replace("~", "")
     if isinstance(annotation, type):
         if annotation.__module__ in ("builtins", base_module):
-            return annotation.__qualname__
-        return repr(annotation.__module__ + "." + annotation.__qualname__)
-    return repr(annotation)
+            return repr(annotation.__qualname__)
+        return annotation.__module__ + "." + annotation.__qualname__
+    elif isinstance(annotation, typing.TypeVar):
+        return repr(annotation).replace("~", "")
+    return repr(annotation).replace("~", "")
index efcff37178fcae892e7a144b72f78ae22466c8c6..c8d6d771c84e519f80f614e1a145a45334bbb5fc 100644 (file)
@@ -41,6 +41,9 @@ method.
 
 .. module:: alembic.operations
 
+.. autoclass:: AbstractOperations
+    :members:
+
 .. autoclass:: Operations
     :members:
 
diff --git a/docs/build/unreleased/1093.rst b/docs/build/unreleased/1093.rst
new file mode 100644 (file)
index 0000000..e4312f1
--- /dev/null
@@ -0,0 +1,19 @@
+.. change::
+    :tags: bug, typing
+    :tickets: 1093
+
+    Updated stub generator script to also add stubs method definitions for the
+    :class:`.Operations` class and the :class:`.BatchOperations` class obtained
+    from :meth:`.Operations.batch_alter_table`. As part of this change, the
+    class hierarchy of :class:`.Operations` and :class:`.BatchOperations` has
+    been rearranged on top of a common base class :class:`.AbstractOperations`
+    in order to type correctly, as :class:`.BatchOperations` uses different
+    method signatures for operations than :class:`.Operations`.
+
+
+.. change::
+    :tags: bug, typing
+
+    Repaired the return signatures for :class:`.Operations` that mostly
+    return ``None``, and were erroneously referring to ``Optional[Table]``
+    in many cases.
\ No newline at end of file
index d1e286ee8b3fb73bd43833fae08af82cacd97cd2..583101395dad19df5de1b3c24798c349ec1b7334 100644 (file)
@@ -4,6 +4,7 @@ import subprocess
 import sys
 
 import alembic
+from alembic.testing import combinations
 from alembic.testing import eq_
 from alembic.testing import TestBase
 
@@ -16,7 +17,7 @@ def run_command(file):
             sys.executable,
             str((_home / "tools" / "write_pyi.py").relative_to(_home)),
             "--stdout",
-            "--file",
+            "--name",
             file,
         ],
         stdout=subprocess.PIPE,
@@ -43,6 +44,14 @@ class TestStubFiles(TestBase):
         expected = file_path.read_text()
         eq_(generated, expected, compare(generated, expected))
 
+    @combinations("batch_op", "op_cls")
+    def test_operation_base_file(self, name):
+        res = run_command(name)
+        generated = res.stdout
+        file_path = Path(alembic.__file__).parent / "operations/base.py"
+        expected = file_path.read_text()
+        eq_(generated, expected, compare(generated, expected))
+
 
 def compare(actual: str, expected: str):
     diff = difflib.unified_diff(
index fa79c495beec6d00e7b5d5ce059021aeb0a04fa7..7d2487071f4122e8c0a73d30721864f6dd45b289 100644 (file)
@@ -1,4 +1,9 @@
+from __future__ import annotations
+
+from abc import abstractmethod
 from argparse import ArgumentParser
+from dataclasses import dataclass
+from dataclasses import field
 from pathlib import Path
 import re
 import sys
@@ -6,11 +11,14 @@ from tempfile import NamedTemporaryFile
 import textwrap
 import typing
 
-from mako.pygen import PythonPrinter
+from alembic.autogenerate.api import AutogenContext
+from alembic.ddl.impl import DefaultImpl
+from alembic.runtime.migration import MigrationInfo
 
 sys.path.append(str(Path(__file__).parent.parent))
 
 if True:  # avoid flake/zimports messing with the order
+    from alembic.operations.base import BatchOperations
     from alembic.operations.base import Operations
     from alembic.runtime.environment import EnvironmentContext
     from alembic.runtime.migration import MigrationContext
@@ -20,18 +28,12 @@ if True:  # avoid flake/zimports messing with the order
     from alembic.operations import ops
     import sqlalchemy as sa
 
-IGNORE_ITEMS = {
-    "op": {"context", "create_module_class_proxy"},
-    "context": {
-        "create_module_class_proxy",
-        "get_impl",
-        "requires_connection",
-    },
-}
+
 TRIM_MODULE = [
     "alembic.runtime.migration.",
     "alembic.operations.base.",
     "alembic.operations.ops.",
+    "alembic.autogenerate.api.",
     "sqlalchemy.engine.base.",
     "sqlalchemy.engine.url.",
     "sqlalchemy.sql.schema.",
@@ -41,58 +43,38 @@ TRIM_MODULE = [
     "sqlalchemy.sql.functions.",
     "sqlalchemy.sql.dml.",
 ]
-CONTEXT_MANAGERS = {"op": ["batch_alter_table"]}
-ADDITIONAL_ENV = {"MigrationContext": MigrationContext}
+ADDITIONAL_ENV = {
+    "MigrationContext": MigrationContext,
+    "AutogenContext": AutogenContext,
+    "DefaultImpl": DefaultImpl,
+    "MigrationInfo": MigrationInfo,
+}
 
 
 def generate_pyi_for_proxy(
-    cls: type,
-    progname: str,
-    source_path: Path,
-    destination_path: Path,
-    ignore_output: bool,
-    file_key: str,
+    file_info: FileInfo, destination_path: Path, ignore_output: bool
 ):
-    ignore_items = IGNORE_ITEMS.get(file_key, set())
-    context_managers = CONTEXT_MANAGERS.get(file_key, [])
     if sys.version_info < (3, 11):
         raise RuntimeError(
             "This script must be run with Python 3.11 or higher"
         )
 
+    progname = Path(sys.argv[0]).as_posix()
     # When using an absolute path on windows, this will generate the correct
     # relative path that shall be written to the top comment of the pyi file.
     if Path(progname).is_absolute():
         progname = Path(progname).relative_to(Path().cwd()).as_posix()
 
-    imports = []
-    read_imports = False
-    with open(source_path) as read_file:
-        for line in read_file:
-            if line.startswith("# ### this file stubs are generated by"):
-                read_imports = True
-            elif line.startswith("### end imports ###"):
-                read_imports = False
-                break
-            elif read_imports:
-                imports.append(line.rstrip())
+    file_info.read_file()
 
+    cls = file_info.target
     with open(destination_path, "w") as buf:
-        printer = PythonPrinter(buf)
-
-        printer.writeline(
-            f"# ### this file stubs are generated by {progname} "
-            "- do not edit ###"
-        )
-        for line in imports:
-            buf.write(line + "\n")
-        printer.writeline("### end imports ###")
-        buf.write("\n\n")
+        file_info.write_before(buf, progname)
 
         module = sys.modules[cls.__module__]
         env = {
             **typing.__dict__,
-            **sa.sql.schema.__dict__,
+            **sa.schema.__dict__,
             **sa.__dict__,
             **sa.types.__dict__,
             **ADDITIONAL_ENV,
@@ -101,39 +83,43 @@ def generate_pyi_for_proxy(
         }
 
         for name in dir(cls):
-            if name.startswith("_") or name in ignore_items:
+            if name.startswith("_") or name in file_info.ignore_items:
                 continue
             meth = getattr(cls, name, None)
             if callable(meth):
                 # If there are overloads, generate only those
                 # Do not generate the base implementation to avoid mypy errors
                 overloads = typing.get_overloads(meth)
+                is_context_manager = name in file_info.context_managers
                 if overloads:
                     # use enumerate so we can generate docs on the
                     # last overload
                     for i, ovl in enumerate(overloads, 1):
-                        _generate_stub_for_meth(
+                        text = _generate_stub_for_meth(
                             ovl,
                             cls,
-                            printer,
+                            file_info,
                             env,
-                            is_context_manager=name in context_managers,
+                            is_context_manager=is_context_manager,
                             is_overload=True,
                             base_method=meth,
                             gen_docs=(i == len(overloads)),
                         )
+                        file_info.write(buf, text)
                 else:
-                    _generate_stub_for_meth(
+                    text = _generate_stub_for_meth(
                         meth,
                         cls,
-                        printer,
+                        file_info,
                         env,
-                        is_context_manager=name in context_managers,
+                        is_context_manager=is_context_manager,
                     )
+                    file_info.write(buf, text)
             else:
-                _generate_stub_for_attr(cls, name, printer, env)
+                text = _generate_stub_for_attr(cls, name, env)
+                file_info.write(buf, text)
 
-        printer.close()
+        file_info.write_after(buf)
 
     console_scripts(
         str(destination_path),
@@ -150,7 +136,7 @@ def generate_pyi_for_proxy(
     )
 
 
-def _generate_stub_for_attr(cls, name, printer, env):
+def _generate_stub_for_attr(cls, name, env):
     try:
         annotations = typing.get_type_hints(cls, env)
     except NameError:
@@ -158,13 +144,13 @@ def _generate_stub_for_attr(cls, name, printer, env):
     type_ = annotations.get(name, "Any")
     if isinstance(type_, str) and type_[0] in "'\"":
         type_ = type_[1:-1]
-    printer.writeline(f"{name}: {type_}")
+    return f"{name}: {type_}"
 
 
 def _generate_stub_for_meth(
     fn,
     cls,
-    printer,
+    file_info,
     env,
     is_context_manager,
     is_overload=False,
@@ -185,7 +171,8 @@ def _generate_stub_for_meth(
     name_args = spec[0]
     assert name_args[0:1] == ["self"] or name_args[0:1] == ["cls"]
 
-    name_args[0:1] = []
+    if file_info.RemoveFirstArg:
+        name_args[0:1] = []
 
     def _formatannotation(annotation, base_module=None):
         if getattr(annotation, "__module__", None) == "typing":
@@ -219,8 +206,14 @@ def _generate_stub_for_meth(
 
     fn_doc = base_method.__doc__ if base_method else fn.__doc__
     has_docs = gen_docs and fn_doc is not None
-    string_prefix = "r" if chr(92) in fn_doc else ""
-    docs = f'{string_prefix}"""' + f"{fn_doc}" + '"""' if has_docs else ""
+    string_prefix = "r" if has_docs and chr(92) in fn_doc else ""
+    if has_docs:
+        noqua = " # noqa: E501" if file_info.docs_noqa_E501 else ""
+        docs = f'{string_prefix}"""{fn_doc}"""{noqua}'
+    else:
+        docs = ""
+
+    suffix = "..." if file_info.AddEllipsis and docs else ""
 
     func_text = textwrap.dedent(
         f"""
@@ -228,61 +221,199 @@ def _generate_stub_for_meth(
     {contextmanager}
     def {name}{argspec}: {"..." if not docs else ""}
         {docs}
+        {suffix}
     """
     )
 
-    printer.write_indented_block(func_text)
+    return func_text
 
 
-def run_file(
-    source_path: Path, cls_to_generate: type, stdout: bool, file_key: str
-):
-    progname = Path(sys.argv[0]).as_posix()
+def run_file(finfo: FileInfo, stdout: bool):
     if not stdout:
         generate_pyi_for_proxy(
-            cls_to_generate,
-            progname,
-            source_path=source_path,
-            destination_path=source_path,
-            ignore_output=False,
-            file_key=file_key,
+            finfo, destination_path=finfo.path, ignore_output=False
         )
     else:
-        with NamedTemporaryFile(delete=False, suffix=".pyi") as f:
+        with NamedTemporaryFile(delete=False, suffix=finfo.path.suffix) as f:
             f.close()
             f_path = Path(f.name)
             generate_pyi_for_proxy(
-                cls_to_generate,
-                progname,
-                source_path=source_path,
-                destination_path=f_path,
-                ignore_output=True,
-                file_key=file_key,
+                finfo, destination_path=f_path, ignore_output=True
             )
             sys.stdout.write(f_path.read_text())
         f_path.unlink()
 
 
 def main(args):
-    location = Path(__file__).parent.parent / "alembic"
-    if args.file in {"all", "op"}:
-        run_file(location / "op.pyi", Operations, args.stdout, "op")
-    if args.file in {"all", "context"}:
-        run_file(
-            location / "context.pyi",
-            EnvironmentContext,
-            args.stdout,
-            "context",
+    for case in cases:
+        if args.name == "all" or args.name == case.name:
+            run_file(case, args.stdout)
+
+
+@dataclass
+class FileInfo:
+    RemoveFirstArg: typing.ClassVar[bool]
+    AddEllipsis: typing.ClassVar[bool]
+
+    name: str
+    path: Path
+    target: type
+    ignore_items: set[str] = field(default_factory=set)
+    context_managers: set[str] = field(default_factory=set)
+    docs_noqa_E501: bool = field(default=False)
+
+    @abstractmethod
+    def read_file(self):
+        pass
+
+    @abstractmethod
+    def write_before(self, out: typing.IO[str], progname: str):
+        pass
+
+    @abstractmethod
+    def write(self, out: typing.IO[str], text: str):
+        pass
+
+    def write_after(self, out: typing.IO[str]):
+        pass
+
+
+@dataclass
+class StubFileInfo(FileInfo):
+    RemoveFirstArg = True
+    AddEllipsis = False
+    imports: list[str] = field(init=False)
+
+    def read_file(self):
+        imports = []
+        read_imports = False
+        with open(self.path) as read_file:
+            for line in read_file:
+                if line.startswith("# ### this file stubs are generated by"):
+                    read_imports = True
+                elif line.startswith("### end imports ###"):
+                    read_imports = False
+                    break
+                elif read_imports:
+                    imports.append(line.rstrip())
+        self.imports = imports
+
+    def write_before(self, out: typing.IO[str], progname: str):
+        self.write(
+            out,
+            f"# ### this file stubs are generated by {progname} "
+            "- do not edit ###",
+        )
+        for line in self.imports:
+            self.write(out, line)
+        self.write(out, "### end imports ###\n")
+
+    def write(self, out: typing.IO[str], text: str):
+        out.write(text)
+        out.write("\n")
+
+
+@dataclass
+class PyFileInfo(FileInfo):
+    RemoveFirstArg = False
+    AddEllipsis = True
+    indent: str = field(init=False)
+    before: list[str] = field(init=False)
+    after: list[str] = field(init=False)
+
+    def read_file(self):
+        self.before = []
+        self.after = []
+        state = "before"
+        start_text = rf"^(\s*)# START STUB FUNCTIONS: {self.name}"
+        end_text = rf"^\s*# END STUB FUNCTIONS: {self.name}"
+        with open(self.path) as read_file:
+            for line in read_file:
+                if m := re.match(start_text, line):
+                    assert state == "before"
+                    self.indent = m.group(1)
+                    self.before.append(line)
+                    state = "stubs"
+                elif m := re.match(end_text, line):
+                    assert state == "stubs"
+                    state = "after"
+                if state == "before":
+                    self.before.append(line)
+                if state == "after":
+                    self.after.append(line)
+        assert state == "after", state
+
+    def write_before(self, out: typing.IO[str], progname: str):
+        out.writelines(self.before)
+        self.write(
+            out, f"# ### the following stubs are generated by {progname} ###"
         )
+        self.write(out, "# ### do not edit ###")
+
+    def write(self, out: typing.IO[str], text: str):
+        out.write(textwrap.indent(text, self.indent))
+        out.write("\n")
+
+    def write_after(self, out: typing.IO[str]):
+        out.writelines(self.after)
 
 
+location = Path(__file__).parent.parent / "alembic"
+
+cls_ignore = {
+    "batch_alter_table",
+    "context",
+    "create_module_class_proxy",
+    "f",
+    "get_bind",
+    "get_context",
+    "implementation_for",
+    "inline_literal",
+    "invoke",
+    "register_operation",
+}
+
+cases = [
+    StubFileInfo(
+        "op",
+        location / "op.pyi",
+        Operations,
+        ignore_items={"context", "create_module_class_proxy"},
+        context_managers={"batch_alter_table"},
+    ),
+    StubFileInfo(
+        "context",
+        location / "context.pyi",
+        EnvironmentContext,
+        ignore_items={
+            "create_module_class_proxy",
+            "get_impl",
+            "requires_connection",
+        },
+    ),
+    PyFileInfo(
+        "batch_op",
+        location / "operations/base.py",
+        BatchOperations,
+        ignore_items=cls_ignore,
+        docs_noqa_E501=True,
+    ),
+    PyFileInfo(
+        "op_cls",
+        location / "operations/base.py",
+        Operations,
+        ignore_items=cls_ignore,
+        docs_noqa_E501=True,
+    ),
+]
+
 if __name__ == "__main__":
     parser = ArgumentParser()
     parser.add_argument(
-        "--file",
-        choices={"op", "context", "all"},
+        "--name",
+        choices=[fi.name for fi in cases] + ["all"],
         default="all",
-        help="Which file to generate. Default is to regenerate all files",
+        help="Which name to generate. Default is to regenerate all names",
     )
     parser.add_argument(
         "--stdout",