from sqlalchemy import Table
from sqlalchemy.engine import Connection
+ from sqlalchemy.sql.expression import BinaryExpression
+ from sqlalchemy.sql.expression import TableClause
+ from sqlalchemy.sql.expression import TextClause
+ from sqlalchemy.sql.expression import Update
+ from sqlalchemy.sql.functions import Function
+ from sqlalchemy.sql.schema import Column
+ from sqlalchemy.sql.schema import Computed
+ from sqlalchemy.sql.schema import Identity
+ from sqlalchemy.sql.schema import SchemaItem
from sqlalchemy.types import TypeEngine
from .batch import BatchOperationsImpl
from .ops import MigrateOperation
from ..ddl import DefaultImpl
from ..runtime.migration import MigrationContext
-
__all__ = ("Operations", "BatchOperations")
-class Operations(util.ModuleClsProxy):
-
- """Define high level migration operations.
-
- Each operation corresponds to some schema migration operation,
- executed against a particular :class:`.MigrationContext`
- which in turn represents connectivity to a database,
- or a file output stream.
-
- While :class:`.Operations` is normally configured as
- part of the :meth:`.EnvironmentContext.run_migrations`
- method called from an ``env.py`` script, a standalone
- :class:`.Operations` instance can be
- made for use cases external to regular Alembic
- migrations by passing in a :class:`.MigrationContext`::
+class AbstractOperations(util.ModuleClsProxy):
+ """Base class for Operations and BatchOperations.
- from alembic.migration import MigrationContext
- from alembic.operations import Operations
-
- conn = myengine.connect()
- ctx = MigrationContext.configure(conn)
- op = Operations(ctx)
-
- op.alter_column("t", "c", nullable=True)
-
- Note that as of 0.8, most of the methods on this class are produced
- dynamically using the :meth:`.Operations.register_operation`
- method.
+ .. versionadded:: 1.11.0
"""
advanced types like dates may not be supported directly
by SQLAlchemy.
- See :meth:`.execute` for an example usage of
- :meth:`.inline_literal`.
+ See :meth:`.Operations.execute` for an example usage of
+ :meth:`.Operations.inline_literal`.
The environment can also be configured to attempt to render
"literal" values inline automatically, for those simple types
return self.migration_context.impl.bind # type: ignore[return-value]
-class BatchOperations(Operations):
+class Operations(AbstractOperations):
+ """Define high level migration operations.
+
+ Each operation corresponds to some schema migration operation,
+ executed against a particular :class:`.MigrationContext`
+ which in turn represents connectivity to a database,
+ or a file output stream.
+
+ While :class:`.Operations` is normally configured as
+ part of the :meth:`.EnvironmentContext.run_migrations`
+ method called from an ``env.py`` script, a standalone
+ :class:`.Operations` instance can be
+ made for use cases external to regular Alembic
+ migrations by passing in a :class:`.MigrationContext`::
+
+ from alembic.migration import MigrationContext
+ from alembic.operations import Operations
+
+ conn = myengine.connect()
+ ctx = MigrationContext.configure(conn)
+ op = Operations(ctx)
+
+ op.alter_column("t", "c", nullable=True)
+
+ Note that as of 0.8, most of the methods on this class are produced
+ dynamically using the :meth:`.Operations.register_operation`
+ method.
+
+ """
+
+ if TYPE_CHECKING:
+ # START STUB FUNCTIONS: op_cls
+ # ### the following stubs are generated by tools/write_pyi.py ###
+ # ### do not edit ###
+
+ def add_column(
+ self, table_name: str, column: Column, schema: Optional[str] = None
+ ) -> None:
+ """Issue an "add column" instruction using the current
+ migration context.
+
+ e.g.::
+
+ from alembic import op
+ from sqlalchemy import Column, String
+
+ op.add_column("organization", Column("name", String()))
+
+ The :meth:`.Operations.add_column` method typically corresponds
+ to the SQL command "ALTER TABLE... ADD COLUMN". Within the scope
+ of this command, the column's name, datatype, nullability,
+ and optional server-generated defaults may be indicated.
+
+ .. note::
+
+ With the exception of NOT NULL constraints or single-column FOREIGN
+ KEY constraints, other kinds of constraints such as PRIMARY KEY,
+ UNIQUE or CHECK constraints **cannot** be generated using this
+ method; for these constraints, refer to operations such as
+ :meth:`.Operations.create_primary_key` and
+ :meth:`.Operations.create_check_constraint`. In particular, the
+ following :class:`~sqlalchemy.schema.Column` parameters are
+ **ignored**:
+
+ * :paramref:`~sqlalchemy.schema.Column.primary_key` - SQL databases
+ typically do not support an ALTER operation that can add
+ individual columns one at a time to an existing primary key
+ constraint, therefore it's less ambiguous to use the
+ :meth:`.Operations.create_primary_key` method, which assumes no
+ existing primary key constraint is present.
+ * :paramref:`~sqlalchemy.schema.Column.unique` - use the
+ :meth:`.Operations.create_unique_constraint` method
+ * :paramref:`~sqlalchemy.schema.Column.index` - use the
+ :meth:`.Operations.create_index` method
+
+
+ The provided :class:`~sqlalchemy.schema.Column` object may include a
+ :class:`~sqlalchemy.schema.ForeignKey` constraint directive,
+ referencing a remote table name. For this specific type of constraint,
+ Alembic will automatically emit a second ALTER statement in order to
+ add the single-column FOREIGN KEY constraint separately::
+
+ from alembic import op
+ from sqlalchemy import Column, INTEGER, ForeignKey
+
+ op.add_column(
+ "organization",
+ Column("account_id", INTEGER, ForeignKey("accounts.id")),
+ )
+
+ The column argument passed to :meth:`.Operations.add_column` is a
+ :class:`~sqlalchemy.schema.Column` construct, used in the same way it's
+ used in SQLAlchemy. In particular, values or functions to be indicated
+ as producing the column's default value on the database side are
+ specified using the ``server_default`` parameter, and not ``default``
+ which only specifies Python-side defaults::
+
+ from alembic import op
+ from sqlalchemy import Column, TIMESTAMP, func
+
+ # specify "DEFAULT NOW" along with the column add
+ op.add_column(
+ "account",
+ Column("timestamp", TIMESTAMP, server_default=func.now()),
+ )
+
+ :param table_name: String name of the parent table.
+ :param column: a :class:`sqlalchemy.schema.Column` object
+ representing the new column.
+ :param schema: Optional schema name to operate within. To control
+ quoting of the schema outside of the default behavior, use
+ the SQLAlchemy construct
+ :class:`~sqlalchemy.sql.elements.quoted_name`.
+
+ """ # noqa: E501
+ ...
+
+ def alter_column(
+ self,
+ table_name: str,
+ column_name: str,
+ nullable: Optional[bool] = None,
+ comment: Union[str, Literal[False], None] = False,
+ server_default: Any = False,
+ new_column_name: Optional[str] = None,
+ type_: Union[TypeEngine, Type[TypeEngine], None] = None,
+ existing_type: Union[TypeEngine, Type[TypeEngine], None] = None,
+ existing_server_default: Union[
+ str, bool, Identity, Computed, None
+ ] = False,
+ existing_nullable: Optional[bool] = None,
+ existing_comment: Optional[str] = None,
+ schema: Optional[str] = None,
+ **kw: Any,
+ ) -> None:
+ r"""Issue an "alter column" instruction using the
+ current migration context.
+
+ Generally, only that aspect of the column which
+ is being changed, i.e. name, type, nullability,
+ default, needs to be specified. Multiple changes
+ can also be specified at once and the backend should
+ "do the right thing", emitting each change either
+ separately or together as the backend allows.
+
+ MySQL has special requirements here, since MySQL
+ cannot ALTER a column without a full specification.
+ When producing MySQL-compatible migration files,
+ it is recommended that the ``existing_type``,
+ ``existing_server_default``, and ``existing_nullable``
+ parameters be present, if not being altered.
+
+ Type changes which are against the SQLAlchemy
+ "schema" types :class:`~sqlalchemy.types.Boolean`
+ and :class:`~sqlalchemy.types.Enum` may also
+ add or drop constraints which accompany those
+ types on backends that don't support them natively.
+ The ``existing_type`` argument is
+ used in this case to identify and remove a previous
+ constraint that was bound to the type object.
+
+ :param table_name: string name of the target table.
+ :param column_name: string name of the target column,
+ as it exists before the operation begins.
+ :param nullable: Optional; specify ``True`` or ``False``
+ to alter the column's nullability.
+ :param server_default: Optional; specify a string
+ SQL expression, :func:`~sqlalchemy.sql.expression.text`,
+ or :class:`~sqlalchemy.schema.DefaultClause` to indicate
+ an alteration to the column's default value.
+ Set to ``None`` to have the default removed.
+ :param comment: optional string text of a new comment to add to the
+ column.
+
+ .. versionadded:: 1.0.6
+
+ :param new_column_name: Optional; specify a string name here to
+ indicate the new name within a column rename operation.
+ :param type\_: Optional; a :class:`~sqlalchemy.types.TypeEngine`
+ type object to specify a change to the column's type.
+ For SQLAlchemy types that also indicate a constraint (i.e.
+ :class:`~sqlalchemy.types.Boolean`, :class:`~sqlalchemy.types.Enum`),
+ the constraint is also generated.
+ :param autoincrement: set the ``AUTO_INCREMENT`` flag of the column;
+ currently understood by the MySQL dialect.
+ :param existing_type: Optional; a
+ :class:`~sqlalchemy.types.TypeEngine`
+ type object to specify the previous type. This
+ is required for all MySQL column alter operations that
+ don't otherwise specify a new type, as well as for
+ when nullability is being changed on a SQL Server
+ column. It is also used if the type is a so-called
+ SQLlchemy "schema" type which may define a constraint (i.e.
+ :class:`~sqlalchemy.types.Boolean`,
+ :class:`~sqlalchemy.types.Enum`),
+ so that the constraint can be dropped.
+ :param existing_server_default: Optional; The existing
+ default value of the column. Required on MySQL if
+ an existing default is not being changed; else MySQL
+ removes the default.
+ :param existing_nullable: Optional; the existing nullability
+ of the column. Required on MySQL if the existing nullability
+ is not being changed; else MySQL sets this to NULL.
+ :param existing_autoincrement: Optional; the existing autoincrement
+ of the column. Used for MySQL's system of altering a column
+ that specifies ``AUTO_INCREMENT``.
+ :param existing_comment: string text of the existing comment on the
+ column to be maintained. Required on MySQL if the existing comment
+ on the column is not being changed.
+
+ .. versionadded:: 1.0.6
+
+ :param schema: Optional schema name to operate within. To control
+ quoting of the schema outside of the default behavior, use
+ the SQLAlchemy construct
+ :class:`~sqlalchemy.sql.elements.quoted_name`.
+ :param postgresql_using: String argument which will indicate a
+ SQL expression to render within the Postgresql-specific USING clause
+ within ALTER COLUMN. This string is taken directly as raw SQL which
+ must explicitly include any necessary quoting or escaping of tokens
+ within the expression.
+
+ """ # noqa: E501
+ ...
+
+ def bulk_insert(
+ self,
+ table: Union[Table, TableClause],
+ rows: List[dict],
+ multiinsert: bool = True,
+ ) -> None:
+ """Issue a "bulk insert" operation using the current
+ migration context.
+
+ This provides a means of representing an INSERT of multiple rows
+ which works equally well in the context of executing on a live
+ connection as well as that of generating a SQL script. In the
+ case of a SQL script, the values are rendered inline into the
+ statement.
+
+ e.g.::
+
+ from alembic import op
+ from datetime import date
+ from sqlalchemy.sql import table, column
+ from sqlalchemy import String, Integer, Date
+
+ # Create an ad-hoc table to use for the insert statement.
+ accounts_table = table(
+ "account",
+ column("id", Integer),
+ column("name", String),
+ column("create_date", Date),
+ )
+
+ op.bulk_insert(
+ accounts_table,
+ [
+ {
+ "id": 1,
+ "name": "John Smith",
+ "create_date": date(2010, 10, 5),
+ },
+ {
+ "id": 2,
+ "name": "Ed Williams",
+ "create_date": date(2007, 5, 27),
+ },
+ {
+ "id": 3,
+ "name": "Wendy Jones",
+ "create_date": date(2008, 8, 15),
+ },
+ ],
+ )
+
+ When using --sql mode, some datatypes may not render inline
+ automatically, such as dates and other special types. When this
+ issue is present, :meth:`.Operations.inline_literal` may be used::
+
+ op.bulk_insert(
+ accounts_table,
+ [
+ {
+ "id": 1,
+ "name": "John Smith",
+ "create_date": op.inline_literal("2010-10-05"),
+ },
+ {
+ "id": 2,
+ "name": "Ed Williams",
+ "create_date": op.inline_literal("2007-05-27"),
+ },
+ {
+ "id": 3,
+ "name": "Wendy Jones",
+ "create_date": op.inline_literal("2008-08-15"),
+ },
+ ],
+ multiinsert=False,
+ )
+
+ When using :meth:`.Operations.inline_literal` in conjunction with
+ :meth:`.Operations.bulk_insert`, in order for the statement to work
+ in "online" (e.g. non --sql) mode, the
+ :paramref:`~.Operations.bulk_insert.multiinsert`
+ flag should be set to ``False``, which will have the effect of
+ individual INSERT statements being emitted to the database, each
+ with a distinct VALUES clause, so that the "inline" values can
+ still be rendered, rather than attempting to pass the values
+ as bound parameters.
+
+ :param table: a table object which represents the target of the INSERT.
+
+ :param rows: a list of dictionaries indicating rows.
+
+ :param multiinsert: when at its default of True and --sql mode is not
+ enabled, the INSERT statement will be executed using
+ "executemany()" style, where all elements in the list of
+ dictionaries are passed as bound parameters in a single
+ list. Setting this to False results in individual INSERT
+ statements being emitted per parameter set, and is needed
+ in those cases where non-literal values are present in the
+ parameter sets.
+
+ """ # noqa: E501
+ ...
+
+ def create_check_constraint(
+ self,
+ constraint_name: Optional[str],
+ table_name: str,
+ condition: Union[str, BinaryExpression, TextClause],
+ schema: Optional[str] = None,
+ **kw: Any,
+ ) -> None:
+ """Issue a "create check constraint" instruction using the
+ current migration context.
+
+ e.g.::
+
+ from alembic import op
+ from sqlalchemy.sql import column, func
+
+ op.create_check_constraint(
+ "ck_user_name_len",
+ "user",
+ func.len(column("name")) > 5,
+ )
+
+ CHECK constraints are usually against a SQL expression, so ad-hoc
+ table metadata is usually needed. The function will convert the given
+ arguments into a :class:`sqlalchemy.schema.CheckConstraint` bound
+ to an anonymous table in order to emit the CREATE statement.
+
+ :param name: Name of the check constraint. The name is necessary
+ so that an ALTER statement can be emitted. For setups that
+ use an automated naming scheme such as that described at
+ :ref:`sqla:constraint_naming_conventions`,
+ ``name`` here can be ``None``, as the event listener will
+ apply the name to the constraint object when it is associated
+ with the table.
+ :param table_name: String name of the source table.
+ :param condition: SQL expression that's the condition of the
+ constraint. Can be a string or SQLAlchemy expression language
+ structure.
+ :param deferrable: optional bool. If set, emit DEFERRABLE or
+ NOT DEFERRABLE when issuing DDL for this constraint.
+ :param initially: optional string. If set, emit INITIALLY <value>
+ when issuing DDL for this constraint.
+ :param schema: Optional schema name to operate within. To control
+ quoting of the schema outside of the default behavior, use
+ the SQLAlchemy construct
+ :class:`~sqlalchemy.sql.elements.quoted_name`.
+
+ """ # noqa: E501
+ ...
+
+ def create_exclude_constraint(
+ self,
+ constraint_name: str,
+ table_name: str,
+ *elements: Any,
+ **kw: Any,
+ ) -> Optional[Table]:
+ """Issue an alter to create an EXCLUDE constraint using the
+ current migration context.
+
+ .. note:: This method is Postgresql specific, and additionally
+ requires at least SQLAlchemy 1.0.
+
+ e.g.::
+
+ from alembic import op
+
+ op.create_exclude_constraint(
+ "user_excl",
+ "user",
+ ("period", "&&"),
+ ("group", "="),
+ where=("group != 'some group'"),
+ )
+
+ Note that the expressions work the same way as that of
+ the ``ExcludeConstraint`` object itself; if plain strings are
+ passed, quoting rules must be applied manually.
+
+ :param name: Name of the constraint.
+ :param table_name: String name of the source table.
+ :param elements: exclude conditions.
+ :param where: SQL expression or SQL string with optional WHERE
+ clause.
+ :param deferrable: optional bool. If set, emit DEFERRABLE or
+ NOT DEFERRABLE when issuing DDL for this constraint.
+ :param initially: optional string. If set, emit INITIALLY <value>
+ when issuing DDL for this constraint.
+ :param schema: Optional schema name to operate within.
+
+ """ # noqa: E501
+ ...
+
+ def create_foreign_key(
+ self,
+ constraint_name: Optional[str],
+ source_table: str,
+ referent_table: str,
+ local_cols: List[str],
+ remote_cols: List[str],
+ onupdate: Optional[str] = None,
+ ondelete: Optional[str] = None,
+ deferrable: Optional[bool] = None,
+ initially: Optional[str] = None,
+ match: Optional[str] = None,
+ source_schema: Optional[str] = None,
+ referent_schema: Optional[str] = None,
+ **dialect_kw: Any,
+ ) -> None:
+ """Issue a "create foreign key" instruction using the
+ current migration context.
+
+ e.g.::
+
+ from alembic import op
+
+ op.create_foreign_key(
+ "fk_user_address",
+ "address",
+ "user",
+ ["user_id"],
+ ["id"],
+ )
+
+ This internally generates a :class:`~sqlalchemy.schema.Table` object
+ containing the necessary columns, then generates a new
+ :class:`~sqlalchemy.schema.ForeignKeyConstraint`
+ object which it then associates with the
+ :class:`~sqlalchemy.schema.Table`.
+ Any event listeners associated with this action will be fired
+ off normally. The :class:`~sqlalchemy.schema.AddConstraint`
+ construct is ultimately used to generate the ALTER statement.
+
+ :param constraint_name: Name of the foreign key constraint. The name
+ is necessary so that an ALTER statement can be emitted. For setups
+ that use an automated naming scheme such as that described at
+ :ref:`sqla:constraint_naming_conventions`,
+ ``name`` here can be ``None``, as the event listener will
+ apply the name to the constraint object when it is associated
+ with the table.
+ :param source_table: String name of the source table.
+ :param referent_table: String name of the destination table.
+ :param local_cols: a list of string column names in the
+ source table.
+ :param remote_cols: a list of string column names in the
+ remote table.
+ :param onupdate: Optional string. If set, emit ON UPDATE <value> when
+ issuing DDL for this constraint. Typical values include CASCADE,
+ DELETE and RESTRICT.
+ :param ondelete: Optional string. If set, emit ON DELETE <value> when
+ issuing DDL for this constraint. Typical values include CASCADE,
+ DELETE and RESTRICT.
+ :param deferrable: optional bool. If set, emit DEFERRABLE or NOT
+ DEFERRABLE when issuing DDL for this constraint.
+ :param source_schema: Optional schema name of the source table.
+ :param referent_schema: Optional schema name of the destination table.
+
+ """ # noqa: E501
+ ...
+
+ def create_index(
+ self,
+ index_name: Optional[str],
+ table_name: str,
+ columns: Sequence[Union[str, TextClause, Function[Any]]],
+ schema: Optional[str] = None,
+ unique: bool = False,
+ **kw: Any,
+ ) -> None:
+ r"""Issue a "create index" instruction using the current
+ migration context.
+
+ e.g.::
+
+ from alembic import op
+
+ op.create_index("ik_test", "t1", ["foo", "bar"])
+
+ Functional indexes can be produced by using the
+ :func:`sqlalchemy.sql.expression.text` construct::
+
+ from alembic import op
+ from sqlalchemy import text
+
+ op.create_index("ik_test", "t1", [text("lower(foo)")])
+
+ :param index_name: name of the index.
+ :param table_name: name of the owning table.
+ :param columns: a list consisting of string column names and/or
+ :func:`~sqlalchemy.sql.expression.text` constructs.
+ :param schema: Optional schema name to operate within. To control
+ quoting of the schema outside of the default behavior, use
+ the SQLAlchemy construct
+ :class:`~sqlalchemy.sql.elements.quoted_name`.
+ :param unique: If True, create a unique index.
+
+ :param quote:
+ Force quoting of this column's name on or off, corresponding
+ to ``True`` or ``False``. When left at its default
+ of ``None``, the column identifier will be quoted according to
+ whether the name is case sensitive (identifiers with at least one
+ upper case character are treated as case sensitive), or if it's a
+ reserved word. This flag is only needed to force quoting of a
+ reserved word which is not known by the SQLAlchemy dialect.
+
+ :param \**kw: Additional keyword arguments not mentioned above are
+ dialect specific, and passed in the form
+ ``<dialectname>_<argname>``.
+ See the documentation regarding an individual dialect at
+ :ref:`dialect_toplevel` for detail on documented arguments.
+
+ """ # noqa: E501
+ ...
+
+ def create_primary_key(
+ self,
+ constraint_name: Optional[str],
+ table_name: str,
+ columns: List[str],
+ schema: Optional[str] = None,
+ ) -> None:
+ """Issue a "create primary key" instruction using the current
+ migration context.
+
+ e.g.::
+
+ from alembic import op
+
+ op.create_primary_key("pk_my_table", "my_table", ["id", "version"])
+
+ This internally generates a :class:`~sqlalchemy.schema.Table` object
+ containing the necessary columns, then generates a new
+ :class:`~sqlalchemy.schema.PrimaryKeyConstraint`
+ object which it then associates with the
+ :class:`~sqlalchemy.schema.Table`.
+ Any event listeners associated with this action will be fired
+ off normally. The :class:`~sqlalchemy.schema.AddConstraint`
+ construct is ultimately used to generate the ALTER statement.
+
+ :param constraint_name: Name of the primary key constraint. The name
+ is necessary so that an ALTER statement can be emitted. For setups
+ that use an automated naming scheme such as that described at
+ :ref:`sqla:constraint_naming_conventions`
+ ``name`` here can be ``None``, as the event listener will
+ apply the name to the constraint object when it is associated
+ with the table.
+ :param table_name: String name of the target table.
+ :param columns: a list of string column names to be applied to the
+ primary key constraint.
+ :param schema: Optional schema name to operate within. To control
+ quoting of the schema outside of the default behavior, use
+ the SQLAlchemy construct
+ :class:`~sqlalchemy.sql.elements.quoted_name`.
+
+ """ # noqa: E501
+ ...
+
+ def create_table(
+ self, table_name: str, *columns: SchemaItem, **kw: Any
+ ) -> Table:
+ r"""Issue a "create table" instruction using the current migration
+ context.
+
+ This directive receives an argument list similar to that of the
+ traditional :class:`sqlalchemy.schema.Table` construct, but without the
+ metadata::
+
+ from sqlalchemy import INTEGER, VARCHAR, NVARCHAR, Column
+ from alembic import op
+
+ op.create_table(
+ "account",
+ Column("id", INTEGER, primary_key=True),
+ Column("name", VARCHAR(50), nullable=False),
+ Column("description", NVARCHAR(200)),
+ Column("timestamp", TIMESTAMP, server_default=func.now()),
+ )
+
+ Note that :meth:`.create_table` accepts
+ :class:`~sqlalchemy.schema.Column`
+ constructs directly from the SQLAlchemy library. In particular,
+ default values to be created on the database side are
+ specified using the ``server_default`` parameter, and not
+ ``default`` which only specifies Python-side defaults::
+
+ from alembic import op
+ from sqlalchemy import Column, TIMESTAMP, func
+
+ # specify "DEFAULT NOW" along with the "timestamp" column
+ op.create_table(
+ "account",
+ Column("id", INTEGER, primary_key=True),
+ Column("timestamp", TIMESTAMP, server_default=func.now()),
+ )
+
+ The function also returns a newly created
+ :class:`~sqlalchemy.schema.Table` object, corresponding to the table
+ specification given, which is suitable for
+ immediate SQL operations, in particular
+ :meth:`.Operations.bulk_insert`::
+
+ from sqlalchemy import INTEGER, VARCHAR, NVARCHAR, Column
+ from alembic import op
+
+ account_table = op.create_table(
+ "account",
+ Column("id", INTEGER, primary_key=True),
+ Column("name", VARCHAR(50), nullable=False),
+ Column("description", NVARCHAR(200)),
+ Column("timestamp", TIMESTAMP, server_default=func.now()),
+ )
+
+ op.bulk_insert(
+ account_table,
+ [
+ {"name": "A1", "description": "account 1"},
+ {"name": "A2", "description": "account 2"},
+ ],
+ )
+
+ :param table_name: Name of the table
+ :param \*columns: collection of :class:`~sqlalchemy.schema.Column`
+ objects within
+ the table, as well as optional :class:`~sqlalchemy.schema.Constraint`
+ objects
+ and :class:`~.sqlalchemy.schema.Index` objects.
+ :param schema: Optional schema name to operate within. To control
+ quoting of the schema outside of the default behavior, use
+ the SQLAlchemy construct
+ :class:`~sqlalchemy.sql.elements.quoted_name`.
+ :param \**kw: Other keyword arguments are passed to the underlying
+ :class:`sqlalchemy.schema.Table` object created for the command.
+
+ :return: the :class:`~sqlalchemy.schema.Table` object corresponding
+ to the parameters given.
+
+ """ # noqa: E501
+ ...
+
+ def create_table_comment(
+ self,
+ table_name: str,
+ comment: Optional[str],
+ existing_comment: Optional[str] = None,
+ schema: Optional[str] = None,
+ ) -> None:
+ """Emit a COMMENT ON operation to set the comment for a table.
+
+ .. versionadded:: 1.0.6
+
+ :param table_name: string name of the target table.
+ :param comment: string value of the comment being registered against
+ the specified table.
+ :param existing_comment: String value of a comment
+ already registered on the specified table, used within autogenerate
+ so that the operation is reversible, but not required for direct
+ use.
+
+ .. seealso::
+
+ :meth:`.Operations.drop_table_comment`
+
+ :paramref:`.Operations.alter_column.comment`
+
+ """ # noqa: E501
+ ...
+
+ def create_unique_constraint(
+ self,
+ constraint_name: Optional[str],
+ table_name: str,
+ columns: Sequence[str],
+ schema: Optional[str] = None,
+ **kw: Any,
+ ) -> Any:
+ """Issue a "create unique constraint" instruction using the
+ current migration context.
+
+ e.g.::
+
+ from alembic import op
+ op.create_unique_constraint("uq_user_name", "user", ["name"])
+
+ This internally generates a :class:`~sqlalchemy.schema.Table` object
+ containing the necessary columns, then generates a new
+ :class:`~sqlalchemy.schema.UniqueConstraint`
+ object which it then associates with the
+ :class:`~sqlalchemy.schema.Table`.
+ Any event listeners associated with this action will be fired
+ off normally. The :class:`~sqlalchemy.schema.AddConstraint`
+ construct is ultimately used to generate the ALTER statement.
+
+ :param name: Name of the unique constraint. The name is necessary
+ so that an ALTER statement can be emitted. For setups that
+ use an automated naming scheme such as that described at
+ :ref:`sqla:constraint_naming_conventions`,
+ ``name`` here can be ``None``, as the event listener will
+ apply the name to the constraint object when it is associated
+ with the table.
+ :param table_name: String name of the source table.
+ :param columns: a list of string column names in the
+ source table.
+ :param deferrable: optional bool. If set, emit DEFERRABLE or
+ NOT DEFERRABLE when issuing DDL for this constraint.
+ :param initially: optional string. If set, emit INITIALLY <value>
+ when issuing DDL for this constraint.
+ :param schema: Optional schema name to operate within. To control
+ quoting of the schema outside of the default behavior, use
+ the SQLAlchemy construct
+ :class:`~sqlalchemy.sql.elements.quoted_name`.
+
+ """ # noqa: E501
+ ...
+
+ def drop_column(
+ self,
+ table_name: str,
+ column_name: str,
+ schema: Optional[str] = None,
+ **kw: Any,
+ ) -> None:
+ """Issue a "drop column" instruction using the current
+ migration context.
+
+ e.g.::
+
+ drop_column("organization", "account_id")
+
+ :param table_name: name of table
+ :param column_name: name of column
+ :param schema: Optional schema name to operate within. To control
+ quoting of the schema outside of the default behavior, use
+ the SQLAlchemy construct
+ :class:`~sqlalchemy.sql.elements.quoted_name`.
+ :param mssql_drop_check: Optional boolean. When ``True``, on
+ Microsoft SQL Server only, first
+ drop the CHECK constraint on the column using a
+ SQL-script-compatible
+ block that selects into a @variable from sys.check_constraints,
+ then exec's a separate DROP CONSTRAINT for that constraint.
+ :param mssql_drop_default: Optional boolean. When ``True``, on
+ Microsoft SQL Server only, first
+ drop the DEFAULT constraint on the column using a
+ SQL-script-compatible
+ block that selects into a @variable from sys.default_constraints,
+ then exec's a separate DROP CONSTRAINT for that default.
+ :param mssql_drop_foreign_key: Optional boolean. When ``True``, on
+ Microsoft SQL Server only, first
+ drop a single FOREIGN KEY constraint on the column using a
+ SQL-script-compatible
+ block that selects into a @variable from
+ sys.foreign_keys/sys.foreign_key_columns,
+ then exec's a separate DROP CONSTRAINT for that default. Only
+ works if the column has exactly one FK constraint which refers to
+ it, at the moment.
+
+ """ # noqa: E501
+ ...
+
+ def drop_constraint(
+ self,
+ constraint_name: str,
+ table_name: str,
+ type_: Optional[str] = None,
+ schema: Optional[str] = None,
+ ) -> None:
+ r"""Drop a constraint of the given name, typically via DROP CONSTRAINT.
+
+ :param constraint_name: name of the constraint.
+ :param table_name: table name.
+ :param type\_: optional, required on MySQL. can be
+ 'foreignkey', 'primary', 'unique', or 'check'.
+ :param schema: Optional schema name to operate within. To control
+ quoting of the schema outside of the default behavior, use
+ the SQLAlchemy construct
+ :class:`~sqlalchemy.sql.elements.quoted_name`.
+
+ """ # noqa: E501
+ ...
+
+ def drop_index(
+ self,
+ index_name: str,
+ table_name: Optional[str] = None,
+ schema: Optional[str] = None,
+ **kw: Any,
+ ) -> None:
+ r"""Issue a "drop index" instruction using the current
+ migration context.
+
+ e.g.::
+
+ drop_index("accounts")
+
+ :param index_name: name of the index.
+ :param table_name: name of the owning table. Some
+ backends such as Microsoft SQL Server require this.
+ :param schema: Optional schema name to operate within. To control
+ quoting of the schema outside of the default behavior, use
+ the SQLAlchemy construct
+ :class:`~sqlalchemy.sql.elements.quoted_name`.
+ :param \**kw: Additional keyword arguments not mentioned above are
+ dialect specific, and passed in the form
+ ``<dialectname>_<argname>``.
+ See the documentation regarding an individual dialect at
+ :ref:`dialect_toplevel` for detail on documented arguments.
+
+ """ # noqa: E501
+ ...
+
+ def drop_table(
+ self, table_name: str, schema: Optional[str] = None, **kw: Any
+ ) -> None:
+ r"""Issue a "drop table" instruction using the current
+ migration context.
+
+
+ e.g.::
+
+ drop_table("accounts")
+
+ :param table_name: Name of the table
+ :param schema: Optional schema name to operate within. To control
+ quoting of the schema outside of the default behavior, use
+ the SQLAlchemy construct
+ :class:`~sqlalchemy.sql.elements.quoted_name`.
+ :param \**kw: Other keyword arguments are passed to the underlying
+ :class:`sqlalchemy.schema.Table` object created for the command.
+
+ """ # noqa: E501
+ ...
+
+ def drop_table_comment(
+ self,
+ table_name: str,
+ existing_comment: Optional[str] = None,
+ schema: Optional[str] = None,
+ ) -> None:
+ """Issue a "drop table comment" operation to
+ remove an existing comment set on a table.
+
+ .. versionadded:: 1.0.6
+
+ :param table_name: string name of the target table.
+ :param existing_comment: An optional string value of a comment already
+ registered on the specified table.
+
+ .. seealso::
+
+ :meth:`.Operations.create_table_comment`
+
+ :paramref:`.Operations.alter_column.comment`
+
+ """ # noqa: E501
+ ...
+
+ def execute(
+ self,
+ sqltext: Union[str, TextClause, Update],
+ execution_options: Optional[dict[str, Any]] = None,
+ ) -> None:
+ r"""Execute the given SQL using the current migration context.
+
+ The given SQL can be a plain string, e.g.::
+
+ op.execute("INSERT INTO table (foo) VALUES ('some value')")
+
+ Or it can be any kind of Core SQL Expression construct, such as
+ below where we use an update construct::
+
+ from sqlalchemy.sql import table, column
+ from sqlalchemy import String
+ from alembic import op
+
+ account = table("account", column("name", String))
+ op.execute(
+ account.update()
+ .where(account.c.name == op.inline_literal("account 1"))
+ .values({"name": op.inline_literal("account 2")})
+ )
+
+ Above, we made use of the SQLAlchemy
+ :func:`sqlalchemy.sql.expression.table` and
+ :func:`sqlalchemy.sql.expression.column` constructs to make a brief,
+ ad-hoc table construct just for our UPDATE statement. A full
+ :class:`~sqlalchemy.schema.Table` construct of course works perfectly
+ fine as well, though note it's a recommended practice to at least
+ ensure the definition of a table is self-contained within the migration
+ script, rather than imported from a module that may break compatibility
+ with older migrations.
+
+ In a SQL script context, the statement is emitted directly to the
+ output stream. There is *no* return result, however, as this
+ function is oriented towards generating a change script
+ that can run in "offline" mode. Additionally, parameterized
+ statements are discouraged here, as they *will not work* in offline
+ mode. Above, we use :meth:`.inline_literal` where parameters are
+ to be used.
+
+ For full interaction with a connected database where parameters can
+ also be used normally, use the "bind" available from the context::
+
+ from alembic import op
+
+ connection = op.get_bind()
+
+ connection.execute(
+ account.update()
+ .where(account.c.name == "account 1")
+ .values({"name": "account 2"})
+ )
+
+ Additionally, when passing the statement as a plain string, it is first
+ coerceed into a :func:`sqlalchemy.sql.expression.text` construct
+ before being passed along. In the less likely case that the
+ literal SQL string contains a colon, it must be escaped with a
+ backslash, as::
+
+ op.execute(r"INSERT INTO table (foo) VALUES ('\:colon_value')")
+
+
+ :param sqltext: Any legal SQLAlchemy expression, including:
+
+ * a string
+ * a :func:`sqlalchemy.sql.expression.text` construct.
+ * a :func:`sqlalchemy.sql.expression.insert` construct.
+ * a :func:`sqlalchemy.sql.expression.update`,
+ :func:`sqlalchemy.sql.expression.insert`,
+ or :func:`sqlalchemy.sql.expression.delete` construct.
+ * Pretty much anything that's "executable" as described
+ in :ref:`sqlexpression_toplevel`.
+
+ .. note:: when passing a plain string, the statement is coerced into
+ a :func:`sqlalchemy.sql.expression.text` construct. This construct
+ considers symbols with colons, e.g. ``:foo`` to be bound parameters.
+ To avoid this, ensure that colon symbols are escaped, e.g.
+ ``\:foo``.
+
+ :param execution_options: Optional dictionary of
+ execution options, will be passed to
+ :meth:`sqlalchemy.engine.Connection.execution_options`.
+ """ # noqa: E501
+ ...
+
+ def rename_table(
+ self,
+ old_table_name: str,
+ new_table_name: str,
+ schema: Optional[str] = None,
+ ) -> None:
+ """Emit an ALTER TABLE to rename a table.
+
+ :param old_table_name: old name.
+ :param new_table_name: new name.
+ :param schema: Optional schema name to operate within. To control
+ quoting of the schema outside of the default behavior, use
+ the SQLAlchemy construct
+ :class:`~sqlalchemy.sql.elements.quoted_name`.
+
+ """ # noqa: E501
+ ...
+
+ # END STUB FUNCTIONS: op_cls
+
+
+class BatchOperations(AbstractOperations):
"""Modifies the interface :class:`.Operations` for batch mode.
This basically omits the ``table_name`` and ``schema`` parameters
"The %s method does not apply to a batch table alter operation."
% operation
)
+
+ if TYPE_CHECKING:
+ # START STUB FUNCTIONS: batch_op
+ # ### the following stubs are generated by tools/write_pyi.py ###
+ # ### do not edit ###
+
+ def add_column(
+ self,
+ column: Column,
+ insert_before: Optional[str] = None,
+ insert_after: Optional[str] = None,
+ ) -> None:
+ """Issue an "add column" instruction using the current
+ batch migration context.
+
+ .. seealso::
+
+ :meth:`.Operations.add_column`
+
+ """ # noqa: E501
+ ...
+
+ def alter_column(
+ self,
+ column_name: str,
+ nullable: Optional[bool] = None,
+ comment: Union[str, Literal[False], None] = False,
+ server_default: Any = False,
+ new_column_name: Optional[str] = None,
+ type_: Union[TypeEngine, Type[TypeEngine], None] = None,
+ existing_type: Union[TypeEngine, Type[TypeEngine], None] = None,
+ existing_server_default: Union[
+ str, bool, Identity, Computed, None
+ ] = False,
+ existing_nullable: Optional[bool] = None,
+ existing_comment: Optional[str] = None,
+ insert_before: Optional[str] = None,
+ insert_after: Optional[str] = None,
+ **kw: Any,
+ ) -> None:
+ """Issue an "alter column" instruction using the current
+ batch migration context.
+
+ Parameters are the same as that of :meth:`.Operations.alter_column`,
+ as well as the following option(s):
+
+ :param insert_before: String name of an existing column which this
+ column should be placed before, when creating the new table.
+
+ .. versionadded:: 1.4.0
+
+ :param insert_after: String name of an existing column which this
+ column should be placed after, when creating the new table. If
+ both :paramref:`.BatchOperations.alter_column.insert_before`
+ and :paramref:`.BatchOperations.alter_column.insert_after` are
+ omitted, the column is inserted after the last existing column
+ in the table.
+
+ .. versionadded:: 1.4.0
+
+ .. seealso::
+
+ :meth:`.Operations.alter_column`
+
+
+ """ # noqa: E501
+ ...
+
+ def create_check_constraint(
+ self,
+ constraint_name: str,
+ condition: Union[str, BinaryExpression, TextClause],
+ **kw: Any,
+ ) -> None:
+ """Issue a "create check constraint" instruction using the
+ current batch migration context.
+
+ The batch form of this call omits the ``source`` and ``schema``
+ arguments from the call.
+
+ .. seealso::
+
+ :meth:`.Operations.create_check_constraint`
+
+ """ # noqa: E501
+ ...
+
+ def create_exclude_constraint(
+ self, constraint_name: str, *elements: Any, **kw: Any
+ ):
+ """Issue a "create exclude constraint" instruction using the
+ current batch migration context.
+
+ .. note:: This method is Postgresql specific, and additionally
+ requires at least SQLAlchemy 1.0.
+
+ .. seealso::
+
+ :meth:`.Operations.create_exclude_constraint`
+
+ """ # noqa: E501
+ ...
+
+ def create_foreign_key(
+ self,
+ constraint_name: str,
+ referent_table: str,
+ local_cols: List[str],
+ remote_cols: List[str],
+ referent_schema: Optional[str] = None,
+ onupdate: Optional[str] = None,
+ ondelete: Optional[str] = None,
+ deferrable: Optional[bool] = None,
+ initially: Optional[str] = None,
+ match: Optional[str] = None,
+ **dialect_kw: Any,
+ ) -> None:
+ """Issue a "create foreign key" instruction using the
+ current batch migration context.
+
+ The batch form of this call omits the ``source`` and ``source_schema``
+ arguments from the call.
+
+ e.g.::
+
+ with batch_alter_table("address") as batch_op:
+ batch_op.create_foreign_key(
+ "fk_user_address",
+ "user",
+ ["user_id"],
+ ["id"],
+ )
+
+ .. seealso::
+
+ :meth:`.Operations.create_foreign_key`
+
+ """ # noqa: E501
+ ...
+
+ def create_index(
+ self, index_name: str, columns: List[str], **kw: Any
+ ) -> None:
+ """Issue a "create index" instruction using the
+ current batch migration context.
+
+ .. seealso::
+
+ :meth:`.Operations.create_index`
+
+ """ # noqa: E501
+ ...
+
+ def create_primary_key(
+ self, constraint_name: str, columns: List[str]
+ ) -> None:
+ """Issue a "create primary key" instruction using the
+ current batch migration context.
+
+ The batch form of this call omits the ``table_name`` and ``schema``
+ arguments from the call.
+
+ .. seealso::
+
+ :meth:`.Operations.create_primary_key`
+
+ """ # noqa: E501
+ ...
+
+ def create_table_comment(
+ self,
+ comment: Optional[str],
+ existing_comment: Optional[str] = None,
+ ) -> None:
+ """Emit a COMMENT ON operation to set the comment for a table
+ using the current batch migration context.
+
+ .. versionadded:: 1.6.0
+
+ :param comment: string value of the comment being registered against
+ the specified table.
+ :param existing_comment: String value of a comment
+ already registered on the specified table, used within autogenerate
+ so that the operation is reversible, but not required for direct
+ use.
+
+ """ # noqa: E501
+ ...
+
+ def create_unique_constraint(
+ self, constraint_name: str, columns: Sequence[str], **kw: Any
+ ) -> Any:
+ """Issue a "create unique constraint" instruction using the
+ current batch migration context.
+
+ The batch form of this call omits the ``source`` and ``schema``
+ arguments from the call.
+
+ .. seealso::
+
+ :meth:`.Operations.create_unique_constraint`
+
+ """ # noqa: E501
+ ...
+
+ def drop_column(self, column_name: str, **kw: Any) -> None:
+ """Issue a "drop column" instruction using the current
+ batch migration context.
+
+ .. seealso::
+
+ :meth:`.Operations.drop_column`
+
+ """ # noqa: E501
+ ...
+
+ def drop_constraint(
+ self, constraint_name: str, type_: Optional[str] = None
+ ) -> None:
+ """Issue a "drop constraint" instruction using the
+ current batch migration context.
+
+ The batch form of this call omits the ``table_name`` and ``schema``
+ arguments from the call.
+
+ .. seealso::
+
+ :meth:`.Operations.drop_constraint`
+
+ """ # noqa: E501
+ ...
+
+ def drop_index(self, index_name: str, **kw: Any) -> None:
+ """Issue a "drop index" instruction using the
+ current batch migration context.
+
+ .. seealso::
+
+ :meth:`.Operations.drop_index`
+
+ """ # noqa: E501
+ ...
+
+ def drop_table_comment(
+ self, existing_comment: Optional[str] = None
+ ) -> None:
+ """Issue a "drop table comment" operation to
+ remove an existing comment set on a table using the current
+ batch operations context.
+
+ .. versionadded:: 1.6.0
+
+ :param existing_comment: An optional string value of a comment already
+ registered on the specified table.
+
+ """ # noqa: E501
+ ...
+
+ # END STUB FUNCTIONS: batch_op
+from __future__ import annotations
+
+from abc import abstractmethod
from argparse import ArgumentParser
+from dataclasses import dataclass
+from dataclasses import field
from pathlib import Path
import re
import sys
import textwrap
import typing
-from mako.pygen import PythonPrinter
+from alembic.autogenerate.api import AutogenContext
+from alembic.ddl.impl import DefaultImpl
+from alembic.runtime.migration import MigrationInfo
sys.path.append(str(Path(__file__).parent.parent))
if True: # avoid flake/zimports messing with the order
+ from alembic.operations.base import BatchOperations
from alembic.operations.base import Operations
from alembic.runtime.environment import EnvironmentContext
from alembic.runtime.migration import MigrationContext
from alembic.operations import ops
import sqlalchemy as sa
-IGNORE_ITEMS = {
- "op": {"context", "create_module_class_proxy"},
- "context": {
- "create_module_class_proxy",
- "get_impl",
- "requires_connection",
- },
-}
+
TRIM_MODULE = [
"alembic.runtime.migration.",
"alembic.operations.base.",
"alembic.operations.ops.",
+ "alembic.autogenerate.api.",
"sqlalchemy.engine.base.",
"sqlalchemy.engine.url.",
"sqlalchemy.sql.schema.",
"sqlalchemy.sql.functions.",
"sqlalchemy.sql.dml.",
]
-CONTEXT_MANAGERS = {"op": ["batch_alter_table"]}
-ADDITIONAL_ENV = {"MigrationContext": MigrationContext}
+ADDITIONAL_ENV = {
+ "MigrationContext": MigrationContext,
+ "AutogenContext": AutogenContext,
+ "DefaultImpl": DefaultImpl,
+ "MigrationInfo": MigrationInfo,
+}
def generate_pyi_for_proxy(
- cls: type,
- progname: str,
- source_path: Path,
- destination_path: Path,
- ignore_output: bool,
- file_key: str,
+ file_info: FileInfo, destination_path: Path, ignore_output: bool
):
- ignore_items = IGNORE_ITEMS.get(file_key, set())
- context_managers = CONTEXT_MANAGERS.get(file_key, [])
if sys.version_info < (3, 11):
raise RuntimeError(
"This script must be run with Python 3.11 or higher"
)
+ progname = Path(sys.argv[0]).as_posix()
# When using an absolute path on windows, this will generate the correct
# relative path that shall be written to the top comment of the pyi file.
if Path(progname).is_absolute():
progname = Path(progname).relative_to(Path().cwd()).as_posix()
- imports = []
- read_imports = False
- with open(source_path) as read_file:
- for line in read_file:
- if line.startswith("# ### this file stubs are generated by"):
- read_imports = True
- elif line.startswith("### end imports ###"):
- read_imports = False
- break
- elif read_imports:
- imports.append(line.rstrip())
+ file_info.read_file()
+ cls = file_info.target
with open(destination_path, "w") as buf:
- printer = PythonPrinter(buf)
-
- printer.writeline(
- f"# ### this file stubs are generated by {progname} "
- "- do not edit ###"
- )
- for line in imports:
- buf.write(line + "\n")
- printer.writeline("### end imports ###")
- buf.write("\n\n")
+ file_info.write_before(buf, progname)
module = sys.modules[cls.__module__]
env = {
**typing.__dict__,
- **sa.sql.schema.__dict__,
+ **sa.schema.__dict__,
**sa.__dict__,
**sa.types.__dict__,
**ADDITIONAL_ENV,
}
for name in dir(cls):
- if name.startswith("_") or name in ignore_items:
+ if name.startswith("_") or name in file_info.ignore_items:
continue
meth = getattr(cls, name, None)
if callable(meth):
# If there are overloads, generate only those
# Do not generate the base implementation to avoid mypy errors
overloads = typing.get_overloads(meth)
+ is_context_manager = name in file_info.context_managers
if overloads:
# use enumerate so we can generate docs on the
# last overload
for i, ovl in enumerate(overloads, 1):
- _generate_stub_for_meth(
+ text = _generate_stub_for_meth(
ovl,
cls,
- printer,
+ file_info,
env,
- is_context_manager=name in context_managers,
+ is_context_manager=is_context_manager,
is_overload=True,
base_method=meth,
gen_docs=(i == len(overloads)),
)
+ file_info.write(buf, text)
else:
- _generate_stub_for_meth(
+ text = _generate_stub_for_meth(
meth,
cls,
- printer,
+ file_info,
env,
- is_context_manager=name in context_managers,
+ is_context_manager=is_context_manager,
)
+ file_info.write(buf, text)
else:
- _generate_stub_for_attr(cls, name, printer, env)
+ text = _generate_stub_for_attr(cls, name, env)
+ file_info.write(buf, text)
- printer.close()
+ file_info.write_after(buf)
console_scripts(
str(destination_path),
)
-def _generate_stub_for_attr(cls, name, printer, env):
+def _generate_stub_for_attr(cls, name, env):
try:
annotations = typing.get_type_hints(cls, env)
except NameError:
type_ = annotations.get(name, "Any")
if isinstance(type_, str) and type_[0] in "'\"":
type_ = type_[1:-1]
- printer.writeline(f"{name}: {type_}")
+ return f"{name}: {type_}"
def _generate_stub_for_meth(
fn,
cls,
- printer,
+ file_info,
env,
is_context_manager,
is_overload=False,
name_args = spec[0]
assert name_args[0:1] == ["self"] or name_args[0:1] == ["cls"]
- name_args[0:1] = []
+ if file_info.RemoveFirstArg:
+ name_args[0:1] = []
def _formatannotation(annotation, base_module=None):
if getattr(annotation, "__module__", None) == "typing":
fn_doc = base_method.__doc__ if base_method else fn.__doc__
has_docs = gen_docs and fn_doc is not None
- string_prefix = "r" if chr(92) in fn_doc else ""
- docs = f'{string_prefix}"""' + f"{fn_doc}" + '"""' if has_docs else ""
+ string_prefix = "r" if has_docs and chr(92) in fn_doc else ""
+ if has_docs:
+ noqua = " # noqa: E501" if file_info.docs_noqa_E501 else ""
+ docs = f'{string_prefix}"""{fn_doc}"""{noqua}'
+ else:
+ docs = ""
+
+ suffix = "..." if file_info.AddEllipsis and docs else ""
func_text = textwrap.dedent(
f"""
{contextmanager}
def {name}{argspec}: {"..." if not docs else ""}
{docs}
+ {suffix}
"""
)
- printer.write_indented_block(func_text)
+ return func_text
-def run_file(
- source_path: Path, cls_to_generate: type, stdout: bool, file_key: str
-):
- progname = Path(sys.argv[0]).as_posix()
+def run_file(finfo: FileInfo, stdout: bool):
if not stdout:
generate_pyi_for_proxy(
- cls_to_generate,
- progname,
- source_path=source_path,
- destination_path=source_path,
- ignore_output=False,
- file_key=file_key,
+ finfo, destination_path=finfo.path, ignore_output=False
)
else:
- with NamedTemporaryFile(delete=False, suffix=".pyi") as f:
+ with NamedTemporaryFile(delete=False, suffix=finfo.path.suffix) as f:
f.close()
f_path = Path(f.name)
generate_pyi_for_proxy(
- cls_to_generate,
- progname,
- source_path=source_path,
- destination_path=f_path,
- ignore_output=True,
- file_key=file_key,
+ finfo, destination_path=f_path, ignore_output=True
)
sys.stdout.write(f_path.read_text())
f_path.unlink()
def main(args):
- location = Path(__file__).parent.parent / "alembic"
- if args.file in {"all", "op"}:
- run_file(location / "op.pyi", Operations, args.stdout, "op")
- if args.file in {"all", "context"}:
- run_file(
- location / "context.pyi",
- EnvironmentContext,
- args.stdout,
- "context",
+ for case in cases:
+ if args.name == "all" or args.name == case.name:
+ run_file(case, args.stdout)
+
+
+@dataclass
+class FileInfo:
+ RemoveFirstArg: typing.ClassVar[bool]
+ AddEllipsis: typing.ClassVar[bool]
+
+ name: str
+ path: Path
+ target: type
+ ignore_items: set[str] = field(default_factory=set)
+ context_managers: set[str] = field(default_factory=set)
+ docs_noqa_E501: bool = field(default=False)
+
+ @abstractmethod
+ def read_file(self):
+ pass
+
+ @abstractmethod
+ def write_before(self, out: typing.IO[str], progname: str):
+ pass
+
+ @abstractmethod
+ def write(self, out: typing.IO[str], text: str):
+ pass
+
+ def write_after(self, out: typing.IO[str]):
+ pass
+
+
+@dataclass
+class StubFileInfo(FileInfo):
+ RemoveFirstArg = True
+ AddEllipsis = False
+ imports: list[str] = field(init=False)
+
+ def read_file(self):
+ imports = []
+ read_imports = False
+ with open(self.path) as read_file:
+ for line in read_file:
+ if line.startswith("# ### this file stubs are generated by"):
+ read_imports = True
+ elif line.startswith("### end imports ###"):
+ read_imports = False
+ break
+ elif read_imports:
+ imports.append(line.rstrip())
+ self.imports = imports
+
+ def write_before(self, out: typing.IO[str], progname: str):
+ self.write(
+ out,
+ f"# ### this file stubs are generated by {progname} "
+ "- do not edit ###",
+ )
+ for line in self.imports:
+ self.write(out, line)
+ self.write(out, "### end imports ###\n")
+
+ def write(self, out: typing.IO[str], text: str):
+ out.write(text)
+ out.write("\n")
+
+
+@dataclass
+class PyFileInfo(FileInfo):
+ RemoveFirstArg = False
+ AddEllipsis = True
+ indent: str = field(init=False)
+ before: list[str] = field(init=False)
+ after: list[str] = field(init=False)
+
+ def read_file(self):
+ self.before = []
+ self.after = []
+ state = "before"
+ start_text = rf"^(\s*)# START STUB FUNCTIONS: {self.name}"
+ end_text = rf"^\s*# END STUB FUNCTIONS: {self.name}"
+ with open(self.path) as read_file:
+ for line in read_file:
+ if m := re.match(start_text, line):
+ assert state == "before"
+ self.indent = m.group(1)
+ self.before.append(line)
+ state = "stubs"
+ elif m := re.match(end_text, line):
+ assert state == "stubs"
+ state = "after"
+ if state == "before":
+ self.before.append(line)
+ if state == "after":
+ self.after.append(line)
+ assert state == "after", state
+
+ def write_before(self, out: typing.IO[str], progname: str):
+ out.writelines(self.before)
+ self.write(
+ out, f"# ### the following stubs are generated by {progname} ###"
)
+ self.write(out, "# ### do not edit ###")
+
+ def write(self, out: typing.IO[str], text: str):
+ out.write(textwrap.indent(text, self.indent))
+ out.write("\n")
+
+ def write_after(self, out: typing.IO[str]):
+ out.writelines(self.after)
+location = Path(__file__).parent.parent / "alembic"
+
+cls_ignore = {
+ "batch_alter_table",
+ "context",
+ "create_module_class_proxy",
+ "f",
+ "get_bind",
+ "get_context",
+ "implementation_for",
+ "inline_literal",
+ "invoke",
+ "register_operation",
+}
+
+cases = [
+ StubFileInfo(
+ "op",
+ location / "op.pyi",
+ Operations,
+ ignore_items={"context", "create_module_class_proxy"},
+ context_managers={"batch_alter_table"},
+ ),
+ StubFileInfo(
+ "context",
+ location / "context.pyi",
+ EnvironmentContext,
+ ignore_items={
+ "create_module_class_proxy",
+ "get_impl",
+ "requires_connection",
+ },
+ ),
+ PyFileInfo(
+ "batch_op",
+ location / "operations/base.py",
+ BatchOperations,
+ ignore_items=cls_ignore,
+ docs_noqa_E501=True,
+ ),
+ PyFileInfo(
+ "op_cls",
+ location / "operations/base.py",
+ Operations,
+ ignore_items=cls_ignore,
+ docs_noqa_E501=True,
+ ),
+]
+
if __name__ == "__main__":
parser = ArgumentParser()
parser.add_argument(
- "--file",
- choices={"op", "context", "all"},
+ "--name",
+ choices=[fi.name for fi in cases] + ["all"],
default="all",
- help="Which file to generate. Default is to regenerate all files",
+ help="Which name to generate. Default is to regenerate all names",
)
parser.add_argument(
"--stdout",