can be overridden by the ``transactional_ddl`` argument
to :meth:`.configure`
- This function requires that a :class:`.MigrationContext` has first been
- made available via :meth:`.configure`.
+ This function requires that a :class:`.MigrationContext`
+ has first been made available via :meth:`.configure`.
"""
return self.get_context().impl.transactional_ddl
"""
if self._migration_context is not None:
- return self.script._as_rev_number(self.get_context()._start_from_rev)
+ return self.script._as_rev_number(
+ self.get_context()._start_from_rev)
elif 'starting_rev' in self.context_opts:
- return self.script._as_rev_number(self.context_opts['starting_rev'])
+ return self.script._as_rev_number(
+ self.context_opts['starting_rev'])
else:
- raise util.CommandError("No starting revision argument is available.")
+ raise util.CommandError(
+ "No starting revision argument is available.")
def get_revision_argument(self):
"""Get the 'destination' revision argument.
has been configured.
"""
- return self.script._as_rev_number(self.context_opts['destination_rev'])
+ return self.script._as_rev_number(
+ self.context_opts['destination_rev'])
def get_tag_argument(self):
"""Return the value passed for the ``--tag`` argument, if any.
**kw
):
"""Configure a :class:`.MigrationContext` within this
- :class:`.EnvironmentContext` which will provide database connectivity
- and other configuration to a series of migration scripts.
+ :class:`.EnvironmentContext` which will provide database
+ connectivity and other configuration to a series of
+ migration scripts.
Many methods on :class:`.EnvironmentContext` require that
this method has been called in order to function, as they
This function is typically called from the ``env.py``
script within a migration environment. It can be called
- multiple times for an invocation. The most recent :class:`~sqlalchemy.engine.base.Connection`
+ multiple times for an invocation. The most recent
+ :class:`~sqlalchemy.engine.base.Connection`
for which it was called is the one that will be operated upon
by the next call to :meth:`.run_migrations`.
General parameters:
- :param connection: a :class:`~sqlalchemy.engine.base.Connection` to use
- for SQL execution in "online" mode. When present, is also used to
- determine the type of dialect in use.
- :param url: a string database url, or a :class:`sqlalchemy.engine.url.URL` object.
- The type of dialect to be used will be derived from this if ``connection`` is
- not passed.
- :param dialect_name: string name of a dialect, such as "postgresql", "mssql", etc.
- The type of dialect to be used will be derived from this if ``connection``
- and ``url`` are not passed.
- :param transactional_ddl: Force the usage of "transactional" DDL on or off;
- this otherwise defaults to whether or not the dialect in use supports it.
- :param output_buffer: a file-like object that will be used for textual output
- when the ``--sql`` option is used to generate SQL scripts. Defaults to
- ``sys.stdout`` if not passed here and also not present on the :class:`.Config`
- object. The value here overrides that of the :class:`.Config` object.
- :param starting_rev: Override the "starting revision" argument when using
- ``--sql`` mode.
- :param tag: a string tag for usage by custom ``env.py`` scripts. Set via
- the ``--tag`` option, can be overridden here.
+ :param connection: a :class:`~sqlalchemy.engine.base.Connection`
+ to use
+ for SQL execution in "online" mode. When present, is also
+ used to determine the type of dialect in use.
+ :param url: a string database url, or a
+ :class:`sqlalchemy.engine.url.URL` object.
+ The type of dialect to be used will be derived from this if
+ ``connection`` is not passed.
+ :param dialect_name: string name of a dialect, such as
+ "postgresql", "mssql", etc.
+ The type of dialect to be used will be derived from this if
+ ``connection`` and ``url`` are not passed.
+ :param transactional_ddl: Force the usage of "transactional"
+ DDL on or off;
+ this otherwise defaults to whether or not the dialect in
+ use supports it.
+ :param output_buffer: a file-like object that will be used
+ for textual output
+ when the ``--sql`` option is used to generate SQL scripts.
+ Defaults to
+ ``sys.stdout`` if not passed here and also not present on
+ the :class:`.Config`
+ object. The value here overrides that of the :class:`.Config`
+ object.
+ :param starting_rev: Override the "starting revision" argument
+ when using ``--sql`` mode.
+ :param tag: a string tag for usage by custom ``env.py`` scripts.
+ Set via the ``--tag`` option, can be overridden here.
- Parameters specific to the autogenerate feature, when ``alembic revision``
- is run with the ``--autogenerate`` feature:
+ Parameters specific to the autogenerate feature, when
+ ``alembic revision`` is run with the ``--autogenerate`` feature:
- :param target_metadata: a :class:`sqlalchemy.schema.MetaData` object that
- will be consulted during autogeneration. The tables present will be compared against
- what is locally available on the target :class:`~sqlalchemy.engine.base.Connection`
+ :param target_metadata: a :class:`sqlalchemy.schema.MetaData`
+ object that
+ will be consulted during autogeneration. The tables present
+ will be compared against
+ what is locally available on the target
+ :class:`~sqlalchemy.engine.base.Connection`
to produce candidate upgrade/downgrade operations.
- :param compare_type: Indicates type comparison behavior during an autogenerate
- operation. Defaults to ``False`` which disables type comparison. Set to
- ``True`` to turn on default type comparison, which has varied accuracy depending
- on backend.
+ :param compare_type: Indicates type comparison behavior during
+ an autogenerate
+ operation. Defaults to ``False`` which disables type
+ comparison. Set to
+ ``True`` to turn on default type comparison, which has varied
+ accuracy depending on backend.
- To customize type comparison behavior, a callable may be specified which
- can filter type comparisons during an autogenerate operation. The format of
- this callable is::
+ To customize type comparison behavior, a callable may be
+ specified which
+ can filter type comparisons during an autogenerate operation.
+ The format of this callable is::
def my_compare_type(context, inspected_column,
metadata_column, inspected_type, metadata_type):
``metadata_column`` is a :class:`sqlalchemy.schema.Column` from
the local model environment.
- A return value of ``None`` indicates to allow default type comparison to
- proceed.
-
- :param compare_server_default: Indicates server default comparison behavior during
- an autogenerate operation. Defaults to ``False`` which disables server default
- comparison. Set to ``True`` to turn on server default comparison, which has
+ A return value of ``None`` indicates to allow default type
+ comparison to proceed.
+
+ :param compare_server_default: Indicates server default comparison
+ behavior during
+ an autogenerate operation. Defaults to ``False`` which disables
+ server default
+ comparison. Set to ``True`` to turn on server default comparison,
+ which has
varied accuracy depending on backend.
- To customize server default comparison behavior, a callable may be specified
- which can filter server default comparisons during an autogenerate operation.
- defaults during an autogenerate operation. The format of this callable is::
+ To customize server default comparison behavior, a callable may
+ be specified
+ which can filter server default comparisons during an
+ autogenerate operation.
+ defaults during an autogenerate operation. The format of this
+ callable is::
def my_compare_server_default(context, inspected_column,
metadata_column, inspected_default, metadata_default,
``metadata_column`` is a :class:`sqlalchemy.schema.Column` from
the local model environment.
- A return value of ``None`` indicates to allow default server default comparison
- to proceed. Note that some backends such as Postgresql actually execute
+ A return value of ``None`` indicates to allow default server default
+ comparison
+ to proceed. Note that some backends such as Postgresql actually
+ execute
the two defaults on the database side to compare for equivalence.
:param upgrade_token: When autogenerate completes, the text of the
candidate upgrade operations will be present in this template
- variable when ``script.py.mako`` is rendered. Defaults to ``upgrades``.
+ variable when ``script.py.mako`` is rendered. Defaults to
+ ``upgrades``.
:param downgrade_token: When autogenerate completes, the text of the
candidate downgrade operations will be present in this
template variable when ``script.py.mako`` is rendered. Defaults to
(i.e. ``op.create_table``) Defaults to "``op.``".
Can be ``None`` to indicate no prefix.
- :param sqlalchemy_module_prefix: When autogenerate refers to SQLAlchemy
- :class:`~sqlalchemy.schema.Column` or type classes, this prefix will be used
+ :param sqlalchemy_module_prefix: When autogenerate refers to
+ SQLAlchemy
+ :class:`~sqlalchemy.schema.Column` or type classes, this prefix
+ will be used
(i.e. ``sa.Column("somename", sa.Integer)``) Defaults to "``sa.``".
Can be ``None`` to indicate no prefix.
Note that when dialect-specific types are rendered, autogenerate
Parameters specific to individual backends:
- :param mssql_batch_separator: The "batch separator" which will be placed
+ :param mssql_batch_separator: The "batch separator" which will
+ be placed
between each statement when generating offline SQL Server
- migrations. Defaults to ``GO``. Note this is in addition to the customary
+ migrations. Defaults to ``GO``. Note this is in addition to the
+ customary
semicolon ``;`` at the end of each statement; SQL Server considers
the "batch separator" to denote the end of an individual statement
execution, and cannot group certain dependent operations in
)
def run_migrations(self, **kw):
- """Run migrations as determined by the current command line configuration
+ """Run migrations as determined by the current command line
+ configuration
as well as versioning information present (or not) in the current
database connection (if one is present).
The function accepts optional ``**kw`` arguments. If these are
- passed, they are sent directly to the ``upgrade()`` and ``downgrade()``
+ passed, they are sent directly to the ``upgrade()`` and
+ ``downgrade()``
functions within each target revision file. By modifying the
``script.py.mako`` file so that the ``upgrade()`` and ``downgrade()``
functions accept arguments, parameters can be passed here so that
database in use, can be passed from a custom ``env.py`` script
to the migration functions.
- This function requires that a :class:`.MigrationContext` has first been
- made available via :meth:`.configure`.
+ This function requires that a :class:`.MigrationContext` has
+ first been made available via :meth:`.configure`.
"""
with Operations.context(self._migration_context):
function's documentation for full detail including
caveats and limitations.
- This function requires that a :class:`.MigrationContext` has first been
- made available via :meth:`.configure`.
+ This function requires that a :class:`.MigrationContext` has
+ first been made available via :meth:`.configure`.
"""
self.get_context().execute(sql)
def get_context(self):
"""Return the current :class:`.MigrationContext` object.
- If :meth:`.EnvironmentContext.configure` has not been called yet, raises
- an exception.
+ If :meth:`.EnvironmentContext.configure` has not been
+ called yet, raises an exception.
"""
:class:`sqlalchemy.engine.base.Connection` currently being used
to emit SQL to the database.
- This function requires that a :class:`.MigrationContext` has first been
- made available via :meth:`.configure`.
+ This function requires that a :class:`.MigrationContext`
+ has first been made available via :meth:`.configure`.
"""
return self.get_context().bind
self.output_buffer = opts.get("output_buffer", sys.stdout)
self._user_compare_type = opts.get('compare_type', False)
- self._user_compare_server_default = opts.get('compare_server_default', False)
+ self._user_compare_server_default = opts.get(
+ 'compare_server_default',
+ False)
self._start_from_rev = opts.get("starting_rev")
self.impl = ddl.DefaultImpl.get_by_dialect(dialect)(
This is a factory method usually called
by :meth:`.EnvironmentContext.configure`.
- :param connection: a :class:`~sqlalchemy.engine.base.Connection` to use
- for SQL execution in "online" mode. When present, is also used to
- determine the type of dialect in use.
- :param url: a string database url, or a :class:`sqlalchemy.engine.url.URL` object.
- The type of dialect to be used will be derived from this if ``connection`` is
- not passed.
- :param dialect_name: string name of a dialect, such as "postgresql", "mssql", etc.
- The type of dialect to be used will be derived from this if ``connection``
- and ``url`` are not passed.
+ :param connection: a :class:`~sqlalchemy.engine.base.Connection`
+ to use for SQL execution in "online" mode. When present,
+ is also used to determine the type of dialect in use.
+ :param url: a string database url, or a
+ :class:`sqlalchemy.engine.url.URL` object.
+ The type of dialect to be used will be derived from this if
+ ``connection`` is not passed.
+ :param dialect_name: string name of a dialect, such as
+ "postgresql", "mssql", etc. The type of dialect to be used will be
+ derived from this if ``connection`` and ``url`` are not passed.
:param opts: dictionary of options. Most other options
accepted by :meth:`.EnvironmentContext.configure` are passed via
this dictionary.
yield op
alembic.op._remove_proxy()
- def _foreign_key_constraint(self, name, source, referent, local_cols, remote_cols):
+ def _foreign_key_constraint(self, name, source, referent,
+ local_cols, remote_cols):
m = schema.MetaData()
t1 = schema.Table(source, m,
*[schema.Column(n, NULLTYPE) for n in local_cols])
:param type_: Optional; a :class:`~sqlalchemy.types.TypeEngine`
type object to specify a change to the column's type.
For SQLAlchemy types that also indicate a constraint (i.e.
- :class:`~sqlalchemy.types.Boolean`, :class:`~sqlalchemy.types.Enum`),
+ :class:`~sqlalchemy.types.Boolean`,
+ :class:`~sqlalchemy.types.Enum`),
the constraint is also generated.
- :param existing_type: Optional; a :class:`~sqlalchemy.types.TypeEngine`
+ :param existing_type: Optional; a
+ :class:`~sqlalchemy.types.TypeEngine`
type object to specify the previous type. This
is required for all MySQL column alter operations that
don't otherwise specify a new type, as well as for
column. It is also used if the type is a so-called
SQLlchemy "schema" type which
may define a constraint (i.e.
- :class:`~sqlalchemy.types.Boolean`, :class:`~sqlalchemy.types.Enum`),
+ :class:`~sqlalchemy.types.Boolean`,
+ :class:`~sqlalchemy.types.Enum`),
so that the constraint can be dropped.
:param existing_server_default: Optional; The existing
default value of the column. Required on MySQL if
"""
if existing_type:
- t = self._table(table_name, schema.Column(column_name, existing_type))
+ t = self._table(table_name,
+ schema.Column(column_name, existing_type)
+ )
for constraint in t.constraints:
if not isinstance(constraint, schema.PrimaryKeyConstraint):
self.impl.drop_constraint(constraint)
self.impl.add_constraint(constraint)
def add_column(self, table_name, column):
- """Issue an "add column" instruction using the current migration context.
+ """Issue an "add column" instruction using the current
+ migration context.
e.g.::
self.impl.add_constraint(constraint)
def drop_column(self, table_name, column_name, **kw):
- """Issue a "drop column" instruction using the current migration context.
+ """Issue a "drop column" instruction using the current
+ migration context.
e.g.::
:param column_name: name of column
:param mssql_drop_check: Optional boolean. When ``True``, on
Microsoft SQL Server only, first
- drop the CHECK constraint on the column using a SQL-script-compatible
+ drop the CHECK constraint on the column using a
+ SQL-script-compatible
block that selects into a @variable from sys.check_constraints,
then exec's a separate DROP CONSTRAINT for that constraint.
:param mssql_drop_default: Optional boolean. When ``True``, on
Microsoft SQL Server only, first
- drop the DEFAULT constraint on the column using a SQL-script-compatible
+ drop the DEFAULT constraint on the column using a
+ SQL-script-compatible
block that selects into a @variable from sys.default_constraints,
then exec's a separate DROP CONSTRAINT for that default.
)
- def create_foreign_key(self, name, source, referent, local_cols, remote_cols):
+ def create_foreign_key(self, name, source, referent, local_cols,
+ remote_cols):
"""Issue a "create foreign key" instruction using the
current migration context.
e.g.::
from alembic import op
- op.create_foreign_key("fk_user_address", "address", "user", ["user_id"], ["id"])
+ op.create_foreign_key(
+ "fk_user_address", "address",
+ "user", ["user_id"], ["id"])
This internally generates a :class:`~sqlalchemy.schema.Table` object
containing the necessary columns, then generates a new
)
def create_unique_constraint(self, name, source, local_cols, **kw):
- """Issue a "create unique constraint" instruction using the current migration context.
+ """Issue a "create unique constraint" instruction using the
+ current migration context.
e.g.::
)
def create_check_constraint(self, name, source, condition, **kw):
- """Issue a "create check constraint" instruction using the current migration context.
+ """Issue a "create check constraint" instruction using the
+ current migration context.
e.g.::
)
:param name: Name of the table
- :param \*columns: collection of :class:`~sqlalchemy.schema.Column` objects within
- the table, as well as optional :class:`~sqlalchemy.schema.Constraint` objects
+ :param \*columns: collection of :class:`~sqlalchemy.schema.Column`
+ objects within
+ the table, as well as optional :class:`~sqlalchemy.schema.Constraint`
+ objects
and :class:`~.sqlalchemy.schema.Index` objects.
- :param emit_events: if ``True``, emit ``before_create`` and ``after_create``
- events when the table is being created. In particular, the Postgresql ENUM
- type will emit a CREATE TYPE within these events.
+ :param emit_events: if ``True``, emit ``before_create`` and
+ ``after_create`` events when the table is being created. In
+ particular, the Postgresql ENUM type will emit a CREATE TYPE within
+ these events.
:param \**kw: Other keyword arguments are passed to the underlying
:class:`.Table` object created for the command.
)
def drop_table(self, name):
- """Issue a "drop table" instruction using the current migration context.
+ """Issue a "drop table" instruction using the current
+ migration context.
e.g.::
)
def create_index(self, name, tablename, *columns, **kw):
- """Issue a "create index" instruction using the current migration context.
+ """Issue a "create index" instruction using the current
+ migration context.
e.g.::
)
def drop_index(self, name):
- """Issue a "drop index" instruction using the current migration context.
+ """Issue a "drop index" instruction using the current
+ migration context.
e.g.::
self.impl.drop_constraint(const)
def bulk_insert(self, table, rows):
- """Issue a "bulk insert" operation using the current migration context.
+ """Issue a "bulk insert" operation using the current
+ migration context.
This provides a means of representing an INSERT of multiple rows
which works equally well in the context of executing on a live
bulk_insert(accounts_table,
[
- {'id':1, 'name':'John Smith', 'create_date':date(2010, 10, 5)},
- {'id':2, 'name':'Ed Williams', 'create_date':date(2007, 5, 27)},
- {'id':3, 'name':'Wendy Jones', 'create_date':date(2008, 8, 15)},
+ {'id':1, 'name':'John Smith',
+ 'create_date':date(2010, 10, 5)},
+ {'id':2, 'name':'Ed Williams',
+ 'create_date':date(2007, 5, 27)},
+ {'id':3, 'name':'Wendy Jones',
+ 'create_date':date(2008, 8, 15)},
]
)
"""
Also note that any parameterized statement here *will not work*
in offline mode - INSERT, UPDATE and DELETE statements which refer
to literal values would need to render
- inline expressions. For simple use cases, the :meth:`.inline_literal`
- function can be used for **rudimentary** quoting of string values.
- For "bulk" inserts, consider using :meth:`.bulk_insert`.
+ inline expressions. For simple use cases, the
+ :meth:`.inline_literal` function can be used for **rudimentary**
+ quoting of string values. For "bulk" inserts, consider using
+ :meth:`.bulk_insert`.
For example, to emit an UPDATE statement which is equally
compatible with both online and offline mode::
values({'name':op.inline_literal('account 2')})
)
- Note above we also used the SQLAlchemy :func:`sqlalchemy.sql.expression.table`
+ Note above we also used the SQLAlchemy
+ :func:`sqlalchemy.sql.expression.table`
and :func:`sqlalchemy.sql.expression.column` constructs to make a brief,
ad-hoc table construct just for our UPDATE statement. A full
:class:`~sqlalchemy.schema.Table` construct of course works perfectly
* a string
* a :func:`sqlalchemy.sql.expression.text` construct.
* a :func:`sqlalchemy.sql.expression.insert` construct.
- * a :func:`sqlalchemy.sql.expression.update`, :func:`sqlalchemy.sql.expression.insert`,
+ * a :func:`sqlalchemy.sql.expression.update`,
+ :func:`sqlalchemy.sql.expression.insert`,
or :func:`sqlalchemy.sql.expression.delete` construct.
* Pretty much anything that's "executable" as described
in :ref:`sqlexpression_toplevel`.
downrev = script.down_revision
script = self._revision_map[downrev]
if script is None and lower is not None:
- raise util.CommandError("Couldn't find revision %s" % downrev)
+ raise util.CommandError(
+ "Couldn't find revision %s" % downrev)
def upgrade_from(self, destination, current_rev, context):
revs = self._revs(destination, current_rev)
return [
- (script.module.upgrade, script.down_revision, script.revision) for script in
- reversed(list(revs))
+ (script.module.upgrade, script.down_revision, script.revision)
+ for script in reversed(list(revs))
]
def downgrade_to(self, destination, current_rev, context):
revs = self._revs(current_rev, destination)
return [
- (script.module.downgrade, script.revision, script.down_revision) for script in
- revs
+ (script.module.downgrade, script.revision, script.down_revision)
+ for script in revs
]
def run_env(self):
"Could not determine revision id from filename %s. "
"Be sure the 'revision' variable is "
"declared inside the script (please see 'Upgrading "
- "from Alembic 0.1 to 0.2' in the documentation)." % filename)
+ "from Alembic 0.1 to 0.2' in the documentation)."
+ % filename)
else:
revision = m.group(1)
else: