from generating do-nothing events for all the methods
not overridden. [ticket:2052]
+- sql
+ - The Sequence() construct, when passed a MetaData()
+ object via its 'metadata' argument, will be
+ included in CREATE/DROP statements within
+ metadata.create_all() and metadata.drop_all(),
+ including "checkfirst" logic. [ticket:2055]
+
0.7.0b1
=======
- Detailed descriptions of each change below are
seq = Sequence('some_sequence')
nextid = connection.execute(seq)
-Default Geneation API Constructs
---------------------------------
+Default Generation API Constructs
+---------------------------------
.. autoclass:: ColumnDefault
:show-inheritance:
self.preparer = dialect.identifier_preparer
self.dialect = dialect
- def _can_create(self, table):
+ def _can_create_table(self, table):
self.dialect.validate_identifier(table.name)
if table.schema:
self.dialect.validate_identifier(table.schema)
- return not self.checkfirst or not self.dialect.has_table(self.connection, table.name, schema=table.schema)
+ return not self.checkfirst or \
+ not self.dialect.has_table(self.connection,
+ table.name, schema=table.schema)
+
+ def _can_create_sequence(self, sequence):
+ return self.dialect.supports_sequences and \
+ (
+ (not self.dialect.sequences_optional or
+ not sequence.optional) and
+ (
+ not self.checkfirst or
+ not self.dialect.has_sequence(
+ self.connection,
+ sequence.name,
+ schema=sequence.schema)
+ )
+ )
def visit_metadata(self, metadata):
if self.tables:
tables = self.tables
else:
tables = metadata.tables.values()
- collection = [t for t in sql_util.sort_tables(tables) if self._can_create(t)]
+ collection = [t for t in sql_util.sort_tables(tables)
+ if self._can_create_table(t)]
+ seq_coll = [s for s in metadata._sequences
+ if s.column is None and self._can_create_sequence(s)]
metadata.dispatch.before_create(metadata, self.connection,
- tables=collection)
+ tables=collection,
+ checkfirst=self.checkfirst)
+
+ for seq in seq_coll:
+ self.traverse_single(seq, create_ok=True)
for table in collection:
self.traverse_single(table, create_ok=True)
metadata.dispatch.after_create(metadata, self.connection,
- tables=collection)
+ tables=collection,
+ checkfirst=self.checkfirst)
def visit_table(self, table, create_ok=False):
- if not create_ok and not self._can_create(table):
+ if not create_ok and not self._can_create_table(table):
return
- table.dispatch.before_create(table, self.connection)
+ table.dispatch.before_create(table, self.connection,
+ checkfirst=self.checkfirst)
for column in table.columns:
if column.default is not None:
for index in table.indexes:
self.traverse_single(index)
- table.dispatch.after_create(table, self.connection)
+ table.dispatch.after_create(table, self.connection,
+ checkfirst=self.checkfirst)
- def visit_sequence(self, sequence):
- if self.dialect.supports_sequences:
- if ((not self.dialect.sequences_optional or
- not sequence.optional) and
- (not self.checkfirst or
- not self.dialect.has_sequence(self.connection, sequence.name, schema=sequence.schema))):
- self.connection.execute(schema.CreateSequence(sequence))
+ def visit_sequence(self, sequence, create_ok=False):
+ if not create_ok and not self._can_create_sequence(sequence):
+ return
+ self.connection.execute(schema.CreateSequence(sequence))
def visit_index(self, index):
self.connection.execute(schema.CreateIndex(index))
tables = self.tables
else:
tables = metadata.tables.values()
- collection = [t for t in reversed(sql_util.sort_tables(tables)) if self._can_drop(t)]
+ collection = [t for t in reversed(sql_util.sort_tables(tables))
+ if self._can_drop_table(t)]
+ seq_coll = [s for s in metadata._sequences
+ if s.column is None and self._can_drop_sequence(s)]
metadata.dispatch.before_drop(metadata, self.connection,
- tables=collection)
+ tables=collection,
+ checkfirst=self.checkfirst)
for table in collection:
self.traverse_single(table, drop_ok=True)
+ for seq in seq_coll:
+ self.traverse_single(seq, drop_ok=True)
+
metadata.dispatch.after_drop(metadata, self.connection,
- tables=collection)
+ tables=collection,
+ checkfirst=self.checkfirst)
- def _can_drop(self, table):
+ def _can_drop_table(self, table):
self.dialect.validate_identifier(table.name)
if table.schema:
self.dialect.validate_identifier(table.schema)
- return not self.checkfirst or self.dialect.has_table(self.connection, table.name, schema=table.schema)
+ return not self.checkfirst or self.dialect.has_table(self.connection,
+ table.name, schema=table.schema)
+
+ def _can_drop_sequence(self, sequence):
+ return self.dialect.supports_sequences and \
+ ((not self.dialect.sequences_optional or
+ not sequence.optional) and
+ (not self.checkfirst or
+ self.dialect.has_sequence(
+ self.connection,
+ sequence.name,
+ schema=sequence.schema))
+ )
def visit_index(self, index):
self.connection.execute(schema.DropIndex(index))
def visit_table(self, table, drop_ok=False):
- if not drop_ok and not self._can_drop(table):
+ if not drop_ok and not self._can_drop_table(table):
return
- table.dispatch.before_drop(table, self.connection)
+ table.dispatch.before_drop(table, self.connection,
+ checkfirst=self.checkfirst)
for column in table.columns:
if column.default is not None:
self.connection.execute(schema.DropTable(table))
- table.dispatch.after_drop(table, self.connection)
+ table.dispatch.after_drop(table, self.connection,
+ checkfirst=self.checkfirst)
- def visit_sequence(self, sequence):
- if self.dialect.supports_sequences:
- if ((not self.dialect.sequences_optional or
- not sequence.optional) and
- (not self.checkfirst or
- self.dialect.has_sequence(self.connection, sequence.name, schema=sequence.schema))):
- self.connection.execute(schema.DropSequence(sequence))
+ def visit_sequence(self, sequence, drop_ok=False):
+ if not drop_ok and not self._can_drop_sequence(sequence):
+ return
+ self.connection.execute(schema.DropSequence(sequence))
is_sequence = False
is_server_default = False
+ column = None
def __init__(self, for_update=False):
self.for_update = for_update
return "ColumnDefault(%r)" % self.arg
class Sequence(DefaultGenerator):
- """Represents a named database sequence."""
+ """Represents a named database sequence.
+
+ The :class:`.Sequence` object represents the name and configurational
+ parameters of a database sequence. It also represents
+ a construct that can be "executed" by a SQLAlchemy :class:`.Engine`
+ or :class:`.Connection`, rendering the appropriate "next value" function
+ for the target database and returning a result.
+
+ The :class:`.Sequence` is typically associated with a primary key column::
+
+ some_table = Table('some_table', metadata,
+ Column('id', Integer, Sequence('some_table_seq'), primary_key=True)
+ )
+
+ When CREATE TABLE is emitted for the above :class:`.Table`, if the
+ target platform supports sequences, a CREATE SEQUENCE statement will
+ be emitted as well. For platforms that don't support sequences,
+ the :class:`.Sequence` construct is ignored.
+
+ See also: :class:`.CreateSequence` :class:`.DropSequence`
+
+ """
__visit_name__ = 'sequence'
is_sequence = True
def __init__(self, name, start=None, increment=None, schema=None,
- optional=False, quote=None, metadata=None, for_update=False):
+ optional=False, quote=None, metadata=None,
+ for_update=False):
+ """Construct a :class:`.Sequence` object.
+
+ :param name: The name of the sequence.
+ :param start: the starting index of the sequence. This value is
+ used when the CREATE SEQUENCE command is emitted to the database
+ as the value of the "START WITH" clause. If ``None``, the
+ clause is omitted, which on most platforms indicates a starting
+ value of 1.
+ :param increment: the increment value of the sequence. This
+ value is used when the CREATE SEQUENCE command is emitted to
+ the database as the value of the "INCREMENT BY" clause. If ``None``,
+ the clause is omitted, which on most platforms indicates an
+ increment of 1.
+ :param schema: Optional schema name for the sequence, if located
+ in a schema other than the default.
+ :param optional: boolean value, when ``True``, indicates that this
+ :class:`.Sequence` object only needs to be explicitly generated
+ on backends that don't provide another way to generate primary
+ key identifiers. Currently, it essentially means, "don't create
+ this sequence on the Postgresql backend, where the SERIAL keyword
+ creates a sequence for us automatically".
+ :param quote: boolean value, when ``True`` or ``False``, explicitly
+ forces quoting of the schema name on or off. When left at its
+ default of ``None``, normal quoting rules based on casing and reserved
+ words take place.
+ :param metadata: optional :class:`.MetaData` object which will be
+ associated with this :class:`.Sequence`. A :class:`.Sequence`
+ that is associated with a :class:`.MetaData` gains access to the
+ ``bind`` of that :class:`.MetaData`, meaning the :meth:`.Sequence.create`
+ and :meth:`.Sequence.drop` methods will make usage of that engine
+ automatically. Additionally, the appropriate CREATE SEQUENCE/
+ DROP SEQUENCE DDL commands will be emitted corresponding to this
+ :class:`.Sequence` when :meth:`.MetaData.create_all` and
+ :meth:`.MetaData.drop_all` are invoked (new in 0.7).
+
+ Note that when a :class:`.Sequence` is applied to a :class:`.Column`,
+ the :class:`.Sequence` is automatically associated with the
+ :class:`.MetaData` object of that column's parent :class:`.Table`,
+ when that association is made. The :class:`.Sequence` will then
+ be subject to automatic CREATE SEQUENCE/DROP SEQUENCE corresponding
+ to when the :class:`.Table` object itself is created or dropped,
+ rather than that of the :class:`.MetaData` object overall.
+ :param for_update: Indicates this :class:`.Sequence`, when associated
+ with a :class:`.Column`, should be invoked for UPDATE statements
+ on that column's table, rather than for INSERT statements, when
+ no value is otherwise present for that column in the statement.
+
+ """
super(Sequence, self).__init__(for_update=for_update)
self.name = name
self.start = start
self.quote = quote
self.schema = schema
self.metadata = metadata
+ if metadata:
+ self._set_metadata(metadata)
@util.memoized_property
def is_callable(self):
column._on_table_attach(self._set_table)
def _set_table(self, column, table):
- self.metadata = table.metadata
+ self._set_metadata(table.metadata)
+
+ def _set_metadata(self, metadata):
+ self.metadata = metadata
+ self.metadata._sequences.add(self)
@property
def bind(self):
"""
self.tables = util.immutabledict()
self._schemas = set()
+ self._sequences = set()
self.bind = bind
self.metadata = self
if reflect:
self.target = target
@expression._generative
- def execute_if(self, dialect=None, callable_=None):
+ def execute_if(self, dialect=None, callable_=None, state=None):
"""Return a callable that will execute this
DDLElement conditionally.
Optional keyword argument - a list of Table objects which are to
be created/ dropped within a MetaData.create_all() or drop_all()
method call.
-
+
+ :state:
+ Optional keyword argument - will be the ``state`` argument
+ passed to this function.
+
+ :checkfirst:
+ Keyword argument, will be True if the 'checkfirst' flag was
+ set during the call to ``create()``, ``create_all()``,
+ ``drop()``, ``drop_all()``.
+
If the callable returns a true value, the DDL statement will be
executed.
+ :param state: any value which will be passed to the callable_
+ as the ``state`` keyword argument.
+
See also:
:class:`.DDLEvents`
"""
self.dialect = dialect
self.callable_ = callable_
+ self.state = state
def _should_execute(self, target, bind, **kw):
if self.on is not None and \
if bind.engine.name not in self.dialect:
return False
if self.callable_ is not None and \
- not self.callable_(self, target, bind, **kw):
+ not self.callable_(self, target, bind, state=self.state, **kw):
return False
return True
def test_checksfor_sequence(self):
meta1 = MetaData(testing.db)
+ seq = Sequence('fooseq')
t = Table('mytable', meta1, Column('col1', Integer,
- Sequence('fooseq')))
+ seq))
+ seq.drop()
try:
testing.db.execute('CREATE SEQUENCE fooseq')
t.create(checkfirst=True)
nonai.insert().execute(id=1, data='row 1')
+class SequenceDDLTest(testing.TestBase, testing.AssertsCompiledSQL):
+ __dialect__ = 'default'
-class SequenceTest(testing.TestBase, testing.AssertsCompiledSQL):
-
- @classmethod
- @testing.requires.sequences
- def setup_class(cls):
- global cartitems, sometable, metadata
- metadata = MetaData(testing.db)
- cartitems = Table("cartitems", metadata,
- Column("cart_id", Integer, Sequence('cart_id_seq'), primary_key=True),
- Column("description", String(40)),
- Column("createdate", sa.DateTime())
- )
- sometable = Table( 'Manager', metadata,
- Column('obj_id', Integer, Sequence('obj_id_seq'), ),
- Column('name', String(128)),
- Column('id', Integer, Sequence('Manager_id_seq', optional=True),
- primary_key=True),
- )
-
- metadata.create_all()
-
-
- def test_compile(self):
+ def test_create_drop_ddl(self):
self.assert_compile(
CreateSequence(Sequence('foo_seq')),
"CREATE SEQUENCE foo_seq",
- use_default_dialect=True,
)
self.assert_compile(
CreateSequence(Sequence('foo_seq', start=5)),
"CREATE SEQUENCE foo_seq START WITH 5",
- use_default_dialect=True,
)
self.assert_compile(
CreateSequence(Sequence('foo_seq', increment=2)),
"CREATE SEQUENCE foo_seq INCREMENT BY 2",
- use_default_dialect=True,
)
self.assert_compile(
CreateSequence(Sequence('foo_seq', increment=2, start=5)),
"CREATE SEQUENCE foo_seq INCREMENT BY 2 START WITH 5",
- use_default_dialect=True,
)
self.assert_compile(
DropSequence(Sequence('foo_seq')),
"DROP SEQUENCE foo_seq",
- use_default_dialect=True,
)
+class SequenceTest(testing.TestBase, testing.AssertsCompiledSQL):
+ __requires__ = ('sequences',)
@testing.fails_on('firebird', 'no FB support for start/increment')
- @testing.requires.sequences
def test_start_increment(self):
for seq in (
Sequence('foo_seq'),
finally:
seq.drop(testing.db)
- @testing.requires.sequences
- def test_seq_nonpk(self):
- """test sequences fire off as defaults on non-pk columns"""
-
- engine = engines.testing_engine(options={'implicit_returning':False})
- result = engine.execute(sometable.insert(), name="somename")
-
- assert set(result.postfetch_cols()) == set([sometable.c.obj_id])
-
- result = engine.execute(sometable.insert(), name="someother")
- assert set(result.postfetch_cols()) == set([sometable.c.obj_id])
-
- sometable.insert().execute(
- {'name':'name3'},
- {'name':'name4'})
- eq_(sometable.select().order_by(sometable.c.id).execute().fetchall(),
- [(1, "somename", 1),
- (2, "someother", 2),
- (3, "name3", 3),
- (4, "name4", 4)])
-
- @testing.requires.sequences
- def test_sequence(self):
- cartitems.insert().execute(description='hi')
- cartitems.insert().execute(description='there')
- r = cartitems.insert().execute(description='lala')
-
- assert r.inserted_primary_key and r.inserted_primary_key[0] is not None
- id_ = r.inserted_primary_key[0]
-
- eq_(1,
- sa.select([func.count(cartitems.c.cart_id)],
- sa.and_(cartitems.c.description == 'lala',
- cartitems.c.cart_id == id_)).scalar())
-
- cartitems.select().execute().fetchall()
-
@testing.fails_on('maxdb', 'FIXME: unknown')
- # maxdb db-api seems to double-execute NEXTVAL internally somewhere,
+ # maxdb db-api seems to double-execute NEXTVAL
+ # internally somewhere,
# throwing off the numbers for these tests...
- @testing.requires.sequences
+ @testing.provide_metadata
def test_implicit_sequence_exec(self):
- s = Sequence("my_sequence", metadata=MetaData(testing.db))
- s.create()
- try:
- x = s.execute()
- eq_(x, 1)
- finally:
- s.drop()
+ s = Sequence("my_sequence", metadata=metadata)
+ metadata.create_all()
+ x = s.execute()
+ eq_(x, 1)
+
+ def _has_sequence(self, name):
+ return testing.db.dialect.has_sequence(testing.db, name)
@testing.fails_on('maxdb', 'FIXME: unknown')
- @testing.requires.sequences
def teststandalone_explicit(self):
s = Sequence("my_sequence")
s.create(bind=testing.db)
finally:
s.drop(testing.db)
- @testing.requires.sequences
- def test_checkfirst(self):
+ def test_checkfirst_sequence(self):
s = Sequence("my_sequence")
s.create(testing.db, checkfirst=False)
+ assert self._has_sequence('my_sequence')
s.create(testing.db, checkfirst=True)
s.drop(testing.db, checkfirst=False)
+ assert not self._has_sequence('my_sequence')
s.drop(testing.db, checkfirst=True)
- @testing.fails_on('maxdb', 'FIXME: unknown')
- @testing.requires.sequences
- def teststandalone2(self):
- x = cartitems.c.cart_id.default.execute()
- self.assert_(1 <= x <= 4)
+ def test_checkfirst_metadata(self):
+ m = MetaData()
+ s = Sequence("my_sequence", metadata=m)
+ m.create_all(testing.db, checkfirst=False)
+ assert self._has_sequence('my_sequence')
+ m.create_all(testing.db, checkfirst=True)
+ m.drop_all(testing.db, checkfirst=False)
+ assert not self._has_sequence('my_sequence')
+ m.drop_all(testing.db, checkfirst=True)
+
+ def test_checkfirst_table(self):
+ m = MetaData()
+ s = Sequence("my_sequence")
+ t = Table('t', m, Column('c', Integer, s, primary_key=True))
+ t.create(testing.db, checkfirst=False)
+ assert self._has_sequence('my_sequence')
+ t.create(testing.db, checkfirst=True)
+ t.drop(testing.db, checkfirst=False)
+ assert not self._has_sequence('my_sequence')
+ t.drop(testing.db, checkfirst=True)
+
+ @testing.provide_metadata
+ def test_table_overrides_metadata_create(self):
+ s1 = Sequence("s1", metadata=metadata)
+ s2 = Sequence("s2", metadata=metadata)
+ s3 = Sequence("s3")
+ t = Table('t', metadata,
+ Column('c', Integer, s3, primary_key=True))
+ assert s3.metadata is metadata
+
+
+ t.create(testing.db)
+ s3.drop(testing.db)
+
+ # 't' is created, and 's3' won't be
+ # re-created since it's linked to 't'.
+ # 's1' and 's2' are, however.
+ metadata.create_all(testing.db)
+ assert self._has_sequence('s1')
+ assert self._has_sequence('s2')
+ assert not self._has_sequence('s3')
+
+ s2.drop(testing.db)
+ assert self._has_sequence('s1')
+ assert not self._has_sequence('s2')
+
+ metadata.drop_all(testing.db)
+ assert not self._has_sequence('s1')
+ assert not self._has_sequence('s2')
+
+
+class TableBoundSequenceTest(testing.TestBase):
+ __requires__ = ('sequences',)
+
+ @classmethod
+ def setup_class(cls):
+ global cartitems, sometable, metadata
+ metadata = MetaData(testing.db)
+ cartitems = Table("cartitems", metadata,
+ Column("cart_id", Integer, Sequence('cart_id_seq'), primary_key=True),
+ Column("description", String(40)),
+ Column("createdate", sa.DateTime())
+ )
+ sometable = Table( 'Manager', metadata,
+ Column('obj_id', Integer, Sequence('obj_id_seq'), ),
+ Column('name', String(128)),
+ Column('id', Integer, Sequence('Manager_id_seq', optional=True),
+ primary_key=True),
+ )
+
+ metadata.create_all()
@classmethod
- @testing.requires.sequences
def teardown_class(cls):
metadata.drop_all()
+ def test_insert_via_seq(self):
+ cartitems.insert().execute(description='hi')
+ cartitems.insert().execute(description='there')
+ r = cartitems.insert().execute(description='lala')
+
+ assert r.inserted_primary_key and r.inserted_primary_key[0] is not None
+ id_ = r.inserted_primary_key[0]
+
+ eq_(1,
+ sa.select([func.count(cartitems.c.cart_id)],
+ sa.and_(cartitems.c.description == 'lala',
+ cartitems.c.cart_id == id_)).scalar())
+
+ cartitems.select().execute().fetchall()
+
+ def test_seq_nonpk(self):
+ """test sequences fire off as defaults on non-pk columns"""
+
+ engine = engines.testing_engine(options={'implicit_returning':False})
+ result = engine.execute(sometable.insert(), name="somename")
+
+ assert set(result.postfetch_cols()) == set([sometable.c.obj_id])
+
+ result = engine.execute(sometable.insert(), name="someother")
+ assert set(result.postfetch_cols()) == set([sometable.c.obj_id])
+
+ sometable.insert().execute(
+ {'name':'name3'},
+ {'name':'name4'})
+ eq_(sometable.select().order_by(sometable.c.id).execute().fetchall(),
+ [(1, "somename", 1),
+ (2, "someother", 2),
+ (3, "name3", 3),
+ (4, "name4", 4)])
+
class SpecialTypePKTest(testing.TestBase):
"""test process_result_value in conjunction with primary key columns.