template_args=None,
target_metadata=None,
include_symbol=None,
+ include_schemas=False,
compare_type=False,
compare_server_default=False,
upgrade_token="upgrades",
the two defaults on the database side to compare for equivalence.
:param include_symbol: A callable function which, given a table name
- and optional schema name, returns ``True`` or ``False``, indicating
+ and schema name (may be ``None``), returns ``True`` or ``False``, indicating
if the given table should be considered in the autogenerate sweep.
E.g.::
- def include_symbol(tablename, schema=None):
+ def include_symbol(tablename, schema):
return tablename not in ("skip_table_one", "skip_table_two")
context.configure(
include_symbol = include_symbol
)
+ To limit autogenerate to a certain set of schemas when using the
+ ``include_schemas`` option::
+
+ def include_symbol(tablename, schema):
+ return schema in (None, "schema1", "schema2")
+
+ context.configure(
+ # ...
+ include_schemas = True,
+ include_symbol = include_symbol
+ )
+
.. versionadded:: 0.3.6
+ .. versionchanged:: 0.4.0 the ``include_symbol`` callable must now
+ also accept a "schema" argument, which may be None.
+
:param upgrade_token: When autogenerate completes, the text of the
candidate upgrade operations will be present in this template
variable when ``script.py.mako`` is rendered. Defaults to
will render them using the dialect module name, i.e. ``mssql.BIT()``,
``postgresql.UUID()``.
+ :param include_schemas: If True, autogenerate will scan across
+ all schemas located by the SQLAlchemy
+ :meth:`~sqlalchemy.engine.reflection.Inspector.get_schema_names`
+ method, and include all differences in tables found across all
+ those schemas. When using this option, you may want to also
+ use the ``include_symbol`` option to specify a callable which
+ can filter the tables/schemas that get included.
+
+ .. versionadded :: 0.4.0
+
Parameters specific to individual backends:
:param mssql_batch_separator: The "batch separator" which will
"""
opts = self.context_opts
if transactional_ddl is not None:
- opts["transactional_ddl"] = transactional_ddl
+ opts["transactional_ddl"] = transactional_ddl
if output_buffer is not None:
opts["output_buffer"] = output_buffer
elif self.config.output_buffer is not None:
opts['template_args'].update(template_args)
opts['target_metadata'] = target_metadata
opts['include_symbol'] = include_symbol
+ opts['include_schemas'] = include_schemas
opts['upgrade_token'] = upgrade_token
opts['downgrade_token'] = downgrade_token
opts['sqlalchemy_module_prefix'] = sqlalchemy_module_prefix
:param old_table_name: old name.
:param new_table_name: new name.
- :param schema: Optional, name of schema to operate within.
+ :param schema: Optional schema name to operate within.
"""
self.impl.rename_table(
:param existing_autoincrement: Optional; the existing autoincrement
of the column. Used for MySQL's system of altering a column
that specifies ``AUTO_INCREMENT``.
- :param schema: Optional, name of schema to operate within.
+ :param schema: Optional schema name to operate within.
+
+ .. versionadded:: 0.4.0
+
"""
compiler = self.impl.dialect.statement_compiler(
:param table_name: String name of the parent table.
:param column: a :class:`sqlalchemy.schema.Column` object
representing the new column.
- :param schema: Optional, name of schema to operate within.
+ :param schema: Optional schema name to operate within.
+
+ .. versionadded:: 0.4.0
"""
:param table_name: name of table
:param column_name: name of column
+ :param schema: Optional schema name to operate within.
+
+ .. versionadded:: 0.4.0
+
:param mssql_drop_check: Optional boolean. When ``True``, on
Microsoft SQL Server only, first
drop the CHECK constraint on the column using a
issuing DDL for this constraint.
:param initially: optional string. If set, emit INITIALLY <value> when issuing DDL
for this constraint.
- :param schema: Optional schema name of the source table.
+ :param schema: Optional schema name to operate within.
+
+ .. versionadded:: 0.4.0
"""
issuing DDL for this constraint.
:param initially: optional string. If set, emit INITIALLY <value> when issuing DDL
for this constraint.
- :param schema: Optional schema name of the source table.
+ :param schema: Optional schema name to operate within.
+
+ ..versionadded:: 0.4.0
"""
self.impl.add_constraint(
``after_create`` events when the table is being created. In
particular, the Postgresql ENUM type will emit a CREATE TYPE within
these events.
+ :param schema: Optional schema name to operate within.
:param \**kw: Other keyword arguments are passed to the underlying
:class:`.Table` object created for the command.
drop_table("accounts")
:param name: Name of the table
+ :param schema: Optional schema name to operate within.
+
+ .. versionadded:: 0.4.0
+
:param \**kw: Other keyword arguments are passed to the underlying
:class:`.Table` object created for the command.
:param tablename: name of the owning table.
:param columns: a list of string column names in the
table.
- :param schema: Optional, name of schema to operate within.
+ :param schema: Optional schema name to operate within.
+
+ .. versionadded:: 0.4.0
"""
:param name: name of the index.
:param tablename: name of the owning table. Some
backends such as Microsoft SQL Server require this.
- :param schema: Optional, name of schema to operate within.
+ :param schema: Optional schema name to operate within.
+
+ .. versionadded:: 0.4.0
"""
# need a dummy column name here since SQLAlchemy
:param type: optional, required on MySQL. can be
'foreignkey', 'primary', 'unique', or 'check'.
- .. versionadded:: 0.3.6 'primary' qualfier to enable
- dropping of MySQL primary key constraints.
+ .. versionadded:: 0.3.6 'primary' qualfier to enable
+ dropping of MySQL primary key constraints.
+
+ :param schema: Optional schema name to operate within.
- :param schema: Optional, name of schema to operate within.
+ .. versionadded:: 0.4.0
"""
t = self._table(tablename, schema=schema)
return m
-
+_default_include_symbol = lambda name, schema=None: name in ("parent", "child",
+ "user", "order", "item",
+ "address", "extra")
class AutogenTest(object):
@classmethod
template_args = {}
autogenerate._produce_migration_diffs(self.context,
template_args, set(),
- include_symbol=lambda name: name in ('sometable', 'someothertable')
+ include_symbol=lambda name, schema=None: name in ('sometable', 'someothertable')
)
eq_(
re.sub(r"u'", "'", template_args['downgrades']),
)
+class AutogenCrossSchemaTest(AutogenTest, TestCase):
+ @classmethod
+ def _get_bind(cls):
+ cls.test_schema_name = "test_schema"
+ return db_for_dialect('postgresql')
+
+ @classmethod
+ def _get_db_schema(cls):
+ m = MetaData()
+ Table('t1', m,
+ Column('x', Integer)
+ )
+ Table('t2', m,
+ Column('y', Integer),
+ schema=cls.test_schema_name
+ )
+ return m
+
+ @classmethod
+ def _get_model_schema(cls):
+ m = MetaData()
+ Table('t3', m,
+ Column('q', Integer)
+ )
+ Table('t4', m,
+ Column('z', Integer),
+ schema=cls.test_schema_name
+ )
+ return m
+
+ def test_default_schema_omitted_upgrade(self):
+ metadata = self.m2
+ connection = self.context.bind
+ diffs = []
+ autogenerate._produce_net_changes(connection, metadata, diffs,
+ self.autogen_context,
+ include_symbol=lambda n, s: n == 't3',
+ include_schemas=True
+ )
+ eq_(diffs[0][0], "add_table")
+ eq_(diffs[0][1].schema, None)
+
+ def test_alt_schema_included_upgrade(self):
+ metadata = self.m2
+ connection = self.context.bind
+ diffs = []
+ autogenerate._produce_net_changes(connection, metadata, diffs,
+ self.autogen_context,
+ include_symbol=lambda n, s: n == 't4',
+ include_schemas=True
+ )
+ eq_(diffs[0][0], "add_table")
+ eq_(diffs[0][1].schema, self.test_schema_name)
+
+ def test_default_schema_omitted_downgrade(self):
+ metadata = self.m2
+ connection = self.context.bind
+ diffs = []
+ autogenerate._produce_net_changes(connection, metadata, diffs,
+ self.autogen_context,
+ include_symbol=lambda n, s: n == 't1',
+ include_schemas=True
+ )
+ eq_(diffs[0][0], "remove_table")
+ eq_(diffs[0][1].schema, None)
+
+ def test_alt_schema_included_downgrade(self):
+ metadata = self.m2
+ connection = self.context.bind
+ diffs = []
+ autogenerate._produce_net_changes(connection, metadata, diffs,
+ self.autogen_context,
+ include_symbol=lambda n, s: n == 't2',
+ include_schemas=True
+ )
+ eq_(diffs[0][0], "remove_table")
+ eq_(diffs[0][1].schema, self.test_schema_name)
+
+
+
class AutogenerateDiffTestWSchema(AutogenTest, TestCase):
@classmethod
def _get_bind(cls):
+ cls.test_schema_name = "test_schema"
return db_for_dialect('postgresql')
@classmethod
def _get_db_schema(cls):
- return _model_one(schema='foo')
+ return _model_one(schema=cls.test_schema_name)
@classmethod
def _get_model_schema(cls):
- return _model_two(schema='foo')
+ return _model_two(schema=cls.test_schema_name)
+
def test_diffs(self):
"""test generation of diff rules"""
connection = self.context.bind
diffs = []
autogenerate._produce_net_changes(connection, metadata, diffs,
- self.autogen_context)
+ self.autogen_context,
+ include_symbol=_default_include_symbol,
+ include_schemas=True
+ )
eq_(
diffs[0],
- ('add_table', metadata.tables['foo.item'])
+ ('add_table', metadata.tables['%s.item' % self.test_schema_name])
)
eq_(diffs[1][0], 'remove_table')
eq_(diffs[1][1].name, "extra")
eq_(diffs[2][0], "add_column")
- eq_(diffs[2][1], "foo")
+ eq_(diffs[2][1], self.test_schema_name)
eq_(diffs[2][2], "address")
- eq_(diffs[2][3], metadata.tables['foo.address'].c.street)
+ eq_(diffs[2][3], metadata.tables['%s.address' % self.test_schema_name].c.street)
eq_(diffs[3][0], "add_column")
- eq_(diffs[3][1], "foo")
+ eq_(diffs[3][1], self.test_schema_name)
eq_(diffs[3][2], "order")
- eq_(diffs[3][3], metadata.tables['foo.order'].c.user_id)
+ eq_(diffs[3][3], metadata.tables['%s.order' % self.test_schema_name].c.user_id)
eq_(diffs[4][0][0], "modify_type")
- eq_(diffs[4][0][1], "foo")
+ eq_(diffs[4][0][1], self.test_schema_name)
eq_(diffs[4][0][2], "order")
eq_(diffs[4][0][3], "amount")
eq_(repr(diffs[4][0][5]), "NUMERIC(precision=8, scale=2)")
eq_(diffs[5][3].name, 'pw')
eq_(diffs[6][0][0], "modify_default")
- eq_(diffs[6][0][1], "foo")
+ eq_(diffs[6][0][1], self.test_schema_name)
eq_(diffs[6][0][2], "user")
eq_(diffs[6][0][3], "a1")
eq_(diffs[6][0][6].arg, "x")
def test_render_nothing(self):
context = MigrationContext.configure(
- connection = self.bind.connect(),
- opts = {
- 'compare_type' : True,
- 'compare_server_default' : True,
- 'target_metadata' : self.m1,
- 'upgrade_token':"upgrades",
- 'downgrade_token':"downgrades",
+ connection=self.bind.connect(),
+ opts={
+ 'compare_type': True,
+ 'compare_server_default': True,
+ 'target_metadata': self.m1,
+ 'upgrade_token': "upgrades",
+ 'downgrade_token': "downgrades",
'alembic_module_prefix': 'op.',
'sqlalchemy_module_prefix': 'sa.',
}
)
template_args = {}
- autogenerate._produce_migration_diffs(context, template_args, set())
+ autogenerate._produce_migration_diffs(context, template_args, set(),
+ include_symbol=lambda name, schema: False
+ )
eq_(re.sub(r"u'", "'", template_args['upgrades']),
"""### commands auto generated by Alembic - please adjust! ###
pass
"""test a full render including indentation"""
template_args = {}
- autogenerate._produce_migration_diffs(self.context, template_args, set())
+ autogenerate._produce_migration_diffs(
+ self.context, template_args, set(),
+ include_symbol=_default_include_symbol,
+ include_schemas=True
+ )
eq_(re.sub(r"u'", "'", template_args['upgrades']),
"""### commands auto generated by Alembic - please adjust! ###
op.create_table('item',
sa.Column('description', sa.String(length=100), nullable=True),
sa.Column('order_id', sa.Integer(), nullable=True),
sa.CheckConstraint('len(description) > 5'),
- sa.ForeignKeyConstraint(['order_id'], ['foo.order.order_id'], ),
+ sa.ForeignKeyConstraint(['order_id'], ['order.order_id'], ),
sa.PrimaryKeyConstraint('id'),
- schema='foo'
+ schema='%(schema)s'
)
- op.drop_table('extra', schema='foo')
- op.add_column('address', sa.Column('street', sa.String(length=50), nullable=True), schema='foo')
- op.add_column('order', sa.Column('user_id', sa.Integer(), nullable=True), schema='foo')
+ op.drop_table('extra', schema='%(schema)s')
+ op.add_column('address', sa.Column('street', sa.String(length=50), nullable=True), schema='%(schema)s')
+ op.add_column('order', sa.Column('user_id', sa.Integer(), nullable=True), schema='%(schema)s')
op.alter_column('order', 'amount',
existing_type=sa.NUMERIC(precision=8, scale=2),
type_=sa.Numeric(precision=10, scale=2),
nullable=True,
existing_server_default='0::numeric',
- schema='foo')
- op.drop_column('user', 'pw', schema='foo')
+ schema='%(schema)s')
+ op.drop_column('user', 'pw', schema='%(schema)s')
op.alter_column('user', 'a1',
existing_type=sa.TEXT(),
server_default='x',
existing_nullable=True,
- schema='foo')
+ schema='%(schema)s')
op.alter_column('user', 'name',
existing_type=sa.VARCHAR(length=50),
nullable=False,
- schema='foo')
- ### end Alembic commands ###""")
+ schema='%(schema)s')
+ ### end Alembic commands ###""" % {"schema": self.test_schema_name})
eq_(re.sub(r"u'", "'", template_args['downgrades']),
"""### commands auto generated by Alembic - please adjust! ###
op.alter_column('user', 'name',
existing_type=sa.VARCHAR(length=50),
nullable=True,
- schema='foo')
+ schema='%(schema)s')
op.alter_column('user', 'a1',
existing_type=sa.TEXT(),
server_default=None,
existing_nullable=True,
- schema='foo')
- op.add_column('user', sa.Column('pw', sa.VARCHAR(length=50), nullable=True), schema='foo')
+ schema='%(schema)s')
+ op.add_column('user', sa.Column('pw', sa.VARCHAR(length=50), nullable=True), schema='%(schema)s')
op.alter_column('order', 'amount',
existing_type=sa.Numeric(precision=10, scale=2),
type_=sa.NUMERIC(precision=8, scale=2),
nullable=False,
existing_server_default='0::numeric',
- schema='foo')
- op.drop_column('order', 'user_id', schema='foo')
- op.drop_column('address', 'street', schema='foo')
+ schema='%(schema)s')
+ op.drop_column('order', 'user_id', schema='%(schema)s')
+ op.drop_column('address', 'street', schema='%(schema)s')
op.create_table('extra',
sa.Column('x', sa.CHAR(length=1), nullable=True),
sa.Column('uid', sa.INTEGER(), nullable=True),
- sa.ForeignKeyConstraint(['uid'], ['foo.user.id'], ),
+ sa.ForeignKeyConstraint(['uid'], ['%(schema)s.user.id'], name='extra_uid_fkey'),
sa.PrimaryKeyConstraint(),
- schema='foo'
+ schema='%(schema)s'
)
- op.drop_table('item', schema='foo')
- ### end Alembic commands ###""")
+ op.drop_table('item', schema='%(schema)s')
+ ### end Alembic commands ###""" % {"schema": self.test_schema_name})
class AutogenerateDiffTest(AutogenTest, TestCase):
connection = self.context.bind
diffs = []
autogenerate._produce_net_changes(connection, metadata, diffs,
- self.autogen_context)
+ self.autogen_context,
+ include_symbol= _default_include_symbol
+ )
eq_(
diffs[0],
def test_render_nothing(self):
context = MigrationContext.configure(
- connection = self.bind.connect(),
- opts = {
- 'compare_type' : True,
- 'compare_server_default' : True,
- 'target_metadata' : self.m1,
- 'upgrade_token':"upgrades",
- 'downgrade_token':"downgrades",
+ connection=self.bind.connect(),
+ opts={
+ 'compare_type': True,
+ 'compare_server_default': True,
+ 'target_metadata': self.m1,
+ 'upgrade_token': "upgrades",
+ 'downgrade_token': "downgrades",
}
)
template_args = {}
connection = empty_context.bind
cls.autogen_empty_context = {
- 'imports':set(),
- 'connection':connection,
- 'dialect':connection.dialect,
- 'context':empty_context
+ 'imports': set(),
+ 'connection': connection,
+ 'dialect': connection.dialect,
+ 'context': empty_context
}
@classmethod
@requires_07
def setup_class(cls):
cls.autogen_context = {
- 'opts':{
- 'sqlalchemy_module_prefix' : 'sa.',
- 'alembic_module_prefix' : 'op.',
+ 'opts': {
+ 'sqlalchemy_module_prefix': 'sa.',
+ 'alembic_module_prefix': 'op.',
},
- 'dialect':mysql.dialect()
+ 'dialect': mysql.dialect()
}
def test_render_table_upgrade(self):