--- /dev/null
+Generic single-database configuration with an async dbapi.
\ No newline at end of file
--- /dev/null
+# A generic, single database configuration.
+
+[alembic]
+# path to migration scripts
+script_location = ${script_location}
+
+# template used to generate migration files
+# file_template = %%(rev)s_%%(slug)s
+
+# sys.path path, will be prepended to sys.path if present.
+# defaults to the current working directory.
+prepend_sys_path = .
+
+# timezone to use when rendering the date
+# within the migration file as well as the filename.
+# string value is passed to dateutil.tz.gettz()
+# leave blank for localtime
+# timezone =
+
+# max length of characters to apply to the
+# "slug" field
+# truncate_slug_length = 40
+
+# set to 'true' to run the environment during
+# the 'revision' command, regardless of autogenerate
+# revision_environment = false
+
+# set to 'true' to allow .pyc and .pyo files without
+# a source .py file to be detected as revisions in the
+# versions/ directory
+# sourceless = false
+
+# version location specification; this defaults
+# to ${script_location}/versions. When using multiple version
+# directories, initial revisions must be specified with --version-path
+# version_locations = %(here)s/bar %(here)s/bat ${script_location}/versions
+
+# the output encoding used when revision files
+# are written from script.py.mako
+# output_encoding = utf-8
+
+sqlalchemy.url = driver://user:pass@localhost/dbname
+
+
+[post_write_hooks]
+# post_write_hooks defines scripts or Python functions that are run
+# on newly generated revision scripts. See the documentation for further
+# detail and examples
+
+# format using "black" - use the console_scripts runner, against the "black" entrypoint
+# hooks=black
+# black.type=console_scripts
+# black.entrypoint=black
+# black.options=-l 79
+
+# Logging configuration
+[loggers]
+keys = root,sqlalchemy,alembic
+
+[handlers]
+keys = console
+
+[formatters]
+keys = generic
+
+[logger_root]
+level = WARN
+handlers = console
+qualname =
+
+[logger_sqlalchemy]
+level = WARN
+handlers =
+qualname = sqlalchemy.engine
+
+[logger_alembic]
+level = INFO
+handlers =
+qualname = alembic
+
+[handler_console]
+class = StreamHandler
+args = (sys.stderr,)
+level = NOTSET
+formatter = generic
+
+[formatter_generic]
+format = %(levelname)-5.5s [%(name)s] %(message)s
+datefmt = %H:%M:%S
--- /dev/null
+import asyncio
+from logging.config import fileConfig
+
+from sqlalchemy import engine_from_config
+from sqlalchemy import pool
+from sqlalchemy.ext.asyncio import AsyncEngine
+
+from alembic import context
+
+# this is the Alembic Config object, which provides
+# access to the values within the .ini file in use.
+config = context.config
+
+# Interpret the config file for Python logging.
+# This line sets up loggers basically.
+fileConfig(config.config_file_name)
+
+# add your model's MetaData object here
+# for 'autogenerate' support
+# from myapp import mymodel
+# target_metadata = mymodel.Base.metadata
+target_metadata = None
+
+# other values from the config, defined by the needs of env.py,
+# can be acquired:
+# my_important_option = config.get_main_option("my_important_option")
+# ... etc.
+
+
+def run_migrations_offline():
+ """Run migrations in 'offline' mode.
+
+ This configures the context with just a URL
+ and not an Engine, though an Engine is acceptable
+ here as well. By skipping the Engine creation
+ we don't even need a DBAPI to be available.
+
+ Calls to context.execute() here emit the given string to the
+ script output.
+
+ """
+ url = config.get_main_option("sqlalchemy.url")
+ context.configure(
+ url=url,
+ target_metadata=target_metadata,
+ literal_binds=True,
+ dialect_opts={"paramstyle": "named"},
+ )
+
+ with context.begin_transaction():
+ context.run_migrations()
+
+
+def do_run_migrations(connection):
+ context.configure(connection=connection, target_metadata=target_metadata)
+
+ with context.begin_transaction():
+ context.run_migrations()
+
+
+async def run_migrations_online():
+ """Run migrations in 'online' mode.
+
+ In this scenario we need to create an Engine
+ and associate a connection with the context.
+
+ """
+ connectable = AsyncEngine(
+ engine_from_config(
+ config.get_section(config.config_ini_section),
+ prefix="sqlalchemy.",
+ poolclass=pool.NullPool,
+ future=True,
+ )
+ )
+
+ async with connectable.connect() as connection:
+ await connection.run_sync(do_run_migrations)
+
+
+if context.is_offline_mode():
+ run_migrations_offline()
+else:
+ asyncio.run(run_migrations_online())
--- /dev/null
+"""${message}
+
+Revision ID: ${up_revision}
+Revises: ${down_revision | comma,n}
+Create Date: ${create_date}
+
+"""
+from alembic import op
+import sqlalchemy as sa
+${imports if imports else ""}
+
+# revision identifiers, used by Alembic.
+revision = ${repr(up_revision)}
+down_revision = ${repr(down_revision)}
+branch_labels = ${repr(branch_labels)}
+depends_on = ${repr(depends_on)}
+
+
+def upgrade():
+ ${upgrades if upgrades else "pass"}
+
+
+def downgrade():
+ ${downgrades if downgrades else "pass"}
def downgrade():
${downgrades if downgrades else "pass"}
+
+Using Asyncio with Alembic
+==========================
+
+SQLAlchemy version 1.4 introduced experimental support for asyncio, allowing
+use of most of its interface from async applications. Alembic currently does
+not provide an async api directly, but it can use an use SQLAlchemy Async
+engine to run the migrations and autogenerate.
+
+New configurations can use the template "async" to bootstrap an environment which
+can be used with async DBAPI like asyncpg, running the command::
+
+ alembic init -t async <script_directory_here>
+
+Existing configurations can be updated to use an async DBAPI by updating the ``env.py``
+file that's used by Alembic to start its operations. In particular only
+``run_migrations_online`` will need to be updated to be something like the example below::
+
+ import asyncio
+
+ # ... no change required to the rest of the code
+
+
+ def do_run_migrations(connection):
+ context.configure(connection=connection, target_metadata=target_metadata)
+
+ with context.begin_transaction():
+ context.run_migrations()
+
+
+ async def run_migrations_online():
+ """Run migrations in 'online' mode.
+
+ In this scenario we need to create an Engine
+ and associate a connection with the context.
+
+ """
+ connectable = AsyncEngine(
+ engine_from_config(
+ config.get_section(config.config_ini_section),
+ prefix="sqlalchemy.",
+ poolclass=pool.NullPool,
+ future=True,
+ )
+ )
+
+ async with connectable.connect() as connection:
+ await connection.run_sync(do_run_migrations)
+
+
+ if context.is_offline_mode():
+ run_migrations_offline()
+ else:
+ asyncio.run(run_migrations_online())
+
+An asnyc application can also interact with the Alembic api directly by using
+the SQLAlchemy ``run_sync`` method to adapt the non-async api of Alembic to
+an async consumer.
--- /dev/null
+.. change::
+ :tags: template
+ :ticket: 805
+
+ Add async template to Alembic to bootstrap environments that use
+ async DBAPI. Updated the cookbook to include a migration guide
+ on how to adapt an existing enviroment for use with DBAPI drivers.