- deprecated metadata.connect() and threadlocalmetadata.connect() have been
removed - send the "bind" attribute to bind a metadata.
- deprecated metadata.table_iterator() method removed (use sorted_tables)
- - the "metadata" argument is removed from DefaultGenerator and subclasses.
+ - the "metadata" argument is removed from DefaultGenerator and subclasses,
+ but remains locally present on Sequence, which is a standalone construct
+ in DDL.
- Removed public mutability from Index and Constraint objects:
- ForeignKeyConstraint.append_element()
- Index.append_column()
- Table.primary_key is not assignable - use table.append_constraint(PrimaryKeyConstraint(...))
- Column.bind (get via column.table.bind)
- Column.metadata (get via column.table.metadata)
-
+ - the use_alter flag on ForeignKey is now a shortcut option for operations that
+ can be hand-constructed using the DDL() event system. A side effect of this refactor
+ is that ForeignKeyConstraint objects with use_alter=True will *not* be emitted on
+ SQLite, which does not support ALTER for foreign keys. This has no effect on SQLite's
+ behavior since SQLite does not actually honor FOREIGN KEY constraints.
+
- DDL
- the DDL() system has been greatly expanded:
- CreateTable()
- DropSequence()
- these support "on" and "execute-at()" just like
plain DDL() does.
+ - the "on" callable passed to DDL() needs to accept **kw arguments.
+ In the case of MetaData before/after create/drop, the list of
+ Table objects for which CREATE/DROP DDL is to be issued is passed
+ as the kw argument "tables". This is necessary for metadata-level
+ DDL that is dependent on the presence of specific tables.
- dialect refactor
- the "owner" keyword argument is removed from Table. Use "schema" to
def __init__(self, connection):
self.connection = connection
- def find_alterables(self, tables):
- alterables = []
- class FindAlterables(schema.SchemaVisitor):
- def visit_foreign_key_constraint(self, constraint):
- if constraint.use_alter and constraint.table in tables:
- alterables.append(constraint)
- findalterables = FindAlterables()
- for table in tables:
- for c in table.constraints:
- findalterables.traverse(c)
- return alterables
-
-
class SchemaGenerator(DDLBase):
def __init__(self, dialect, connection, checkfirst=False, tables=None, **kwargs):
super(SchemaGenerator, self).__init__(connection, **kwargs)
else:
tables = metadata.tables.values()
collection = [t for t in sql_util.sort_tables(tables) if self._can_create(t)]
+
+ for listener in metadata.ddl_listeners['before-create']:
+ listener('before-create', metadata, self.connection, tables=collection)
+
for table in collection:
self.traverse_single(table)
- if self.dialect.supports_alter:
- for alterable in self.find_alterables(collection):
- self.connection.execute(schema.AddConstraint(alterable))
+
+ for listener in metadata.ddl_listeners['after-create']:
+ listener('after-create', metadata, self.connection, tables=collection)
def visit_table(self, table):
for listener in table.ddl_listeners['before-create']:
else:
tables = metadata.tables.values()
collection = [t for t in reversed(sql_util.sort_tables(tables)) if self._can_drop(t)]
- if self.dialect.supports_alter:
- for alterable in self.find_alterables(collection):
- self.connection.execute(schema.DropConstraint(alterable))
+
+ for listener in metadata.ddl_listeners['before-drop']:
+ listener('before-drop', metadata, self.connection, tables=collection)
+
for table in collection:
self.traverse_single(table)
+ for listener in metadata.ddl_listeners['after-drop']:
+ listener('after-drop', metadata, self.connection, tables=collection)
+
def _can_drop(self, table):
self.dialect.validate_identifier(table.name)
if table.schema:
def __init__(self, column, constraint=None, use_alter=False, name=None, onupdate=None,
ondelete=None, deferrable=None, initially=None, link_to_name=False):
"""
- Construct a column-level FOREIGN KEY.
+ Construct a column-level FOREIGN KEY.
+
+ The :class:`ForeignKey` object when constructed generates a :class:`ForeignKeyConstraint`
+ which is associated with the parent :class:`Table` object's collection of constraints.
:param column: A single target column for the key relationship. A :class:`Column`
object or a column name as a string: ``tablename.columnkey`` or
:param link_to_name: if True, the string name given in ``column`` is the rendered
name of the referenced column, not its locally assigned ``key``.
- :param use_alter: If True, do not emit this key as part of the CREATE TABLE
- definition. Instead, use ALTER TABLE after table creation to add
- the key. Useful for circular dependencies.
-
+ :param use_alter: passed to the underlying :class:`ForeignKeyConstraint` to indicate the
+ constraint should be generated/dropped externally from the CREATE TABLE/
+ DROP TABLE statement. See that classes' constructor for details.
+
"""
self._colspec = column
__visit_name__ = 'default_generator'
- def __init__(self, for_update=False, metadata=None):
+ def __init__(self, for_update=False):
self.for_update = for_update
def _set_parent(self, column):
@property
def bind(self):
"""Return the connectable associated with this default."""
-
- return self.column.table.bind
+ if getattr(self, 'column', None):
+ return self.column.table.bind
+ else:
+ return None
def __repr__(self):
return "DefaultGenerator()"
__visit_name__ = 'sequence'
def __init__(self, name, start=None, increment=None, schema=None,
- optional=False, quote=None, **kwargs):
- super(Sequence, self).__init__(**kwargs)
+ optional=False, quote=None, metadata=None, for_update=False):
+ super(Sequence, self).__init__(for_update=for_update)
self.name = name
self.start = start
self.increment = increment
self.optional = optional
self.quote = quote
self.schema = schema
- self.kwargs = kwargs
+ self.metadata = metadata
def __repr__(self):
return "Sequence(%s)" % ', '.join(
def _set_parent(self, column):
super(Sequence, self)._set_parent(column)
column.sequence = self
-
+ self.metadata = column.table.metadata
+
+ @property
+ def bind(self):
+ if self.metadata:
+ return self.metadata.bind
+ else:
+ return None
+
def create(self, bind=None, checkfirst=True):
"""Creates this sequence in the database."""
__visit_name__ = 'constraint'
- def __init__(self, name=None, deferrable=None, initially=None):
+ def __init__(self, name=None, deferrable=None, initially=None, inline_ddl=True):
"""Create a SQL constraint.
name
initially
Optional string. If set, emit INITIALLY <value> when issuing DDL
for this constraint.
+
+ inline_ddl
+ if True, DDL for this Constraint will be generated within the span of a
+ CREATE TABLE or DROP TABLE statement, when the associated table's
+ DDL is generated. if False, no DDL is issued within that process.
+ Instead, it is expected that an AddConstraint or DropConstraint
+ construct will be used to issue DDL for this Contraint.
+ The AddConstraint/DropConstraint constructs set this flag automatically
+ as well.
"""
self.name = name
self.deferrable = deferrable
self.initially = initially
+ self.inline_ddl = inline_ddl
@property
def table(self):
"""
__visit_name__ = 'foreign_key_constraint'
- def __init__(self, columns, refcolumns, name=None, onupdate=None, ondelete=None, deferrable=None, initially=None, use_alter=False, link_to_name=False, table=None):
+ def __init__(self, columns, refcolumns, name=None, onupdate=None, ondelete=None,
+ deferrable=None, initially=None, use_alter=False, link_to_name=False, table=None):
"""Construct a composite-capable FOREIGN KEY.
:param columns: A sequence of local column names. The named columns must be defined
:param link_to_name: if True, the string name given in ``column`` is the rendered
name of the referenced column, not its locally assigned ``key``.
- :param use_alter: If True, do not emit this constraint as part of the CREATE TABLE
- definition. Instead, use ALTER TABLE after table creation to add
- the key. Useful for circular dependencies and conditional constraint generation.
+ :param use_alter: If True, do not emit the DDL for this constraint
+ as part of the CREATE TABLE definition. Instead, generate it via an
+ ALTER TABLE statement issued after the full collection of tables have been
+ created, and drop it via an ALTER TABLE statement before the full collection
+ of tables are dropped. This is shorthand for the usage of
+ :class:`AddConstraint` and :class:`DropConstraint` applied as "after-create"
+ and "before-drop" events on the MetaData object. This is normally used to
+ generate/drop constraints on objects that are mutually dependent on each other.
"""
super(ForeignKeyConstraint, self).__init__(name, deferrable, initially)
if isinstance(col, basestring):
col = table.c[col]
fk._set_parent(col)
-
+
+ if self.use_alter:
+ def supports_alter(event, schema_item, bind, **kw):
+ return table in set(kw['tables']) and bind.dialect.supports_alter
+ AddConstraint(self, on=supports_alter).execute_at('after-create', table.metadata)
+ DropConstraint(self, on=supports_alter).execute_at('before-drop', table.metadata)
+
def copy(self, **kw):
return ForeignKeyConstraint(
[x.parent.name for x in self._elements.values()],
"""
if bind is None:
bind = _bind_or_error(self)
- # TODO!!! the listener stuff here needs to move to engine/ddl.py
- for listener in self.ddl_listeners['before-create']:
- listener('before-create', self, bind)
bind.create(self, checkfirst=checkfirst, tables=tables)
- for listener in self.ddl_listeners['after-create']:
- listener('after-create', self, bind)
def drop_all(self, bind=None, tables=None, checkfirst=True):
"""Drop all tables stored in this metadata.
"""
if bind is None:
bind = _bind_or_error(self)
- # TODO!!! the listener stuff here needs to move to engine/ddl.py
- for listener in self.ddl_listeners['before-drop']:
- listener('before-drop', self, bind)
bind.drop(self, checkfirst=checkfirst, tables=tables)
- for listener in self.ddl_listeners['after-drop']:
- listener('after-drop', self, bind)
class ThreadLocalMetaData(MetaData):
"""A MetaData variant that presents a different ``bind`` in every thread.
will be executed using the same Connection and transactional context
as the Table create/drop itself. The ``.bind`` property of this
statement is ignored.
-
+
event
One of the events defined in the schema item's ``.ddl_events``;
e.g. 'before-create', 'after-create', 'before-drop' or 'after-drop'
self.schema_item = schema_item
- def __call__(self, event, schema_item, bind):
+ def __call__(self, event, schema_item, bind, **kw):
"""Execute the DDL as a ddl_listener."""
- if self._should_execute(event, schema_item, bind):
+ if self._should_execute(event, schema_item, bind, **kw):
return bind.execute(self.against(schema_item))
def _check_ddl_on(self, on):
"Expected the name of a database dialect or a callable for "
"'on' criteria, got type '%s'." % type(on).__name__)
- def _should_execute(self, event, schema_item, bind):
+ def _should_execute(self, event, schema_item, bind, **kw):
if self.on is None:
return True
elif isinstance(self.on, basestring):
return self.on == bind.engine.name
else:
- return self.on(event, schema_item, bind)
+ return self.on(event, schema_item, bind, **kw)
def bind(self):
if self._bind:
DDL('something', on='postgres')
- If a callable, it will be invoked with three positional arguments:
+ If a callable, it will be invoked with three positional arguments
+ as well as optional keyword arguments:
event
The name of the event that has triggered this DDL, such as
connection
The ``Connection`` being used for DDL execution
+ **kw
+ Keyword arguments which may be sent include:
+ tables - a list of Table objects which are to be created/
+ dropped within a MetaData.create_all() or drop_all() method
+ call.
+
If the callable returns a true value, the DDL statement will be
executed.
self._check_ddl_on(on)
self.on = on
self.bind = bind
-
+ element.inline_ddl = False
class CreateTable(_CreateDropBase):
"""Represent a CREATE TABLE statement."""
const = ", \n\t".join(
self.process(constraint) for constraint in table.constraints if constraint is not table.primary_key
+ and constraint.inline_ddl
and (not self.dialect.supports_alter or not getattr(constraint, 'use_alter', False))
)
if const:
self.schema_item = schema_item
self.bind = bind
- def before_create(self, action, schema_item, bind):
+ def before_create(self, action, schema_item, bind, **kw):
assert self.state is None
assert schema_item is self.schema_item
assert bind is self.bind
self.state = action
- def after_create(self, action, schema_item, bind):
+ def after_create(self, action, schema_item, bind, **kw):
assert self.state in ('before-create', 'skipped')
assert schema_item is self.schema_item
assert bind is self.bind
self.state = action
- def before_drop(self, action, schema_item, bind):
+ def before_drop(self, action, schema_item, bind, **kw):
assert self.state is None
assert schema_item is self.schema_item
assert bind is self.bind
self.state = action
- def after_drop(self, action, schema_item, bind):
+ def after_drop(self, action, schema_item, bind, **kw):
assert self.state in ('before-drop', 'skipped')
assert schema_item is self.schema_item
assert bind is self.bind
pg_mock = engines.mock_engine(dialect_name='postgres')
constraint = CheckConstraint('a < b',name="my_test_constraint", table=users)
-
+
+ # by placing the constraint in an Add/Drop construct,
+ # the 'inline_ddl' flag is set to False
AddConstraint(constraint, on='postgres').execute_at("after-create", users)
DropConstraint(constraint, on='postgres').execute_at("before-drop", users)
- # TODO: need to figure out how to achieve
- # finer grained control of the DDL process in a
- # consistent way.
- # Constraint should get a new flag that is not part of the constructor:
- # "manual_ddl" or similar. The flag is public but is normally
- # set automatically by DDLElement.execute_at(), so that the
- # remove() step here is not needed.
- users.constraints.remove(constraint)
-
metadata.create_all(bind=nonpg_mock)
strings = " ".join(str(x) for x in nonpg_mock.mock)
assert "my_test_constraint" not in strings
import testenv; testenv.configure_for_tests()
import pickle
-from sqlalchemy import MetaData
from testlib.sa import Table, Column, Integer, String, UniqueConstraint, \
- CheckConstraint, ForeignKey
+ CheckConstraint, ForeignKey, MetaData
+from sqlalchemy import schema
import testlib.sa as tsa
-from testlib import TestBase, ComparesTables, testing, engines
+from testlib import TestBase, ComparesTables, AssertsCompiledSQL, testing, engines
from testlib.testing import eq_
class MetaDataTest(TestBase, ComparesTables):
MetaData(testing.db), autoload=True)
-class TableOptionsTest(TestBase):
- def setUp(self):
- self.engine = engines.mock_engine()
- self.metadata = MetaData(self.engine)
-
+class TableOptionsTest(TestBase, AssertsCompiledSQL):
def test_prefixes(self):
- table1 = Table("temporary_table_1", self.metadata,
+ table1 = Table("temporary_table_1", MetaData(),
Column("col1", Integer),
prefixes = ["TEMPORARY"])
- table1.create()
- assert [str(x) for x in self.engine.mock if 'CREATE TEMPORARY TABLE' in str(x)]
- del self.engine.mock[:]
- table2 = Table("temporary_table_2", self.metadata,
+
+ self.assert_compile(
+ schema.CreateTable(table1),
+ "CREATE TEMPORARY TABLE temporary_table_1 (col1 INTEGER)"
+ )
+
+ table2 = Table("temporary_table_2", MetaData(),
Column("col1", Integer),
prefixes = ["VIRTUAL"])
- table2.create()
- assert [str(x) for x in self.engine.mock if 'CREATE VIRTUAL TABLE' in str(x)]
+ self.assert_compile(
+ schema.CreateTable(table2),
+ "CREATE VIRTUAL TABLE temporary_table_2 (col1 INTEGER)"
+ )
if __name__ == '__main__':
testenv.main()
"CREATE TABLE tbl (a INTEGER, b INTEGER CHECK (a < b) DEFERRABLE INITIALLY DEFERRED)"
)
+ def test_use_alter(self):
+ m = MetaData()
+ t = Table('t', m,
+ Column('a', Integer),
+ )
+
+ t2 = Table('t2', m,
+ Column('a', Integer, ForeignKey('t.a', use_alter=True, name='fk_ta')),
+ )
+
+ e = engines.mock_engine(dialect_name='postgres')
+ m.create_all(e)
+ m.drop_all(e)
+
+ e.assert_sql([
+ "CREATE TABLE t (a INTEGER)",
+ "CREATE TABLE t2 (a INTEGER)",
+ "ALTER TABLE t2 ADD CONSTRAINT fk_ta FOREIGN KEY(a) REFERENCES t (a)",
+ "ALTER TABLE t2 DROP CONSTRAINT fk_ta",
+ "DROP TABLE t2",
+ "DROP TABLE t",
+ ])
+
+
def test_add_drop_constraint(self):
m = MetaData()
from collections import deque
from testlib import config
from testlib.compat import _function_named, callable
+import re
class ConnectionKiller(object):
def __init__(self):
return testing_engine(url, options)
-def mock_engine(db=None, dialect_name=None):
- """Provides a mocking engine based on the current testing.db."""
+def mock_engine(dialect_name=None):
+ """Provides a mocking engine based on the current testing.db.
+
+ This is normally used to test DDL generation flow as emitted
+ by an Engine.
+
+ It should not be used in other cases, as assert_compile() and
+ assert_sql_execution() are much better choices with fewer
+ moving parts.
+
+ """
from sqlalchemy import create_engine
if not dialect_name:
- dbi = db or config.db
- dialect_name = dbi.name
+ dialect_name = config.db.name
+
buffer = []
def executor(sql, *a, **kw):
buffer.append(sql)
+ def assert_sql(stmts):
+ recv = [re.sub(r'[\n\t]', '', str(s)) for s in buffer]
+ assert recv == stmts, recv
+
engine = create_engine(dialect_name + '://',
strategy='mock', executor=executor)
assert not hasattr(engine, 'mock')
engine.mock = buffer
+ engine.assert_sql = assert_sql
return engine
class ReplayableSession(object):