]> git.ipfire.org Git - thirdparty/sqlalchemy/sqlalchemy.git/commitdiff
- The Sequence() construct, when passed a MetaData()
authorMike Bayer <mike_mp@zzzcomputing.com>
Mon, 14 Feb 2011 00:32:00 +0000 (19:32 -0500)
committerMike Bayer <mike_mp@zzzcomputing.com>
Mon, 14 Feb 2011 00:32:00 +0000 (19:32 -0500)
object via its 'metadata' argument, will be
included in CREATE/DROP statements within
metadata.create_all() and metadata.drop_all(),
including "checkfirst" logic.  [ticket:2055]

CHANGES
doc/build/core/schema.rst
lib/sqlalchemy/engine/ddl.py
lib/sqlalchemy/schema.py
test/dialect/test_postgresql.py
test/sql/test_defaults.py

diff --git a/CHANGES b/CHANGES
index b9d055e2eeb92bfee9a2485665143de96b983f19..7a2e961b2f586e37d57929dd37a8736c3d93f412 100644 (file)
--- a/CHANGES
+++ b/CHANGES
@@ -15,6 +15,13 @@ CHANGES
     from generating do-nothing events for all the methods 
     not overridden. [ticket:2052]
 
+- sql
+  - The Sequence() construct, when passed a MetaData() 
+    object via its 'metadata' argument, will be 
+    included in CREATE/DROP statements within
+    metadata.create_all() and metadata.drop_all(), 
+    including "checkfirst" logic.  [ticket:2055]
+
 0.7.0b1
 =======
 - Detailed descriptions of each change below are 
index e9844504ae3176d2ecd0dc55e116964695fda0ac..3683d2cb6ff25d314493789d8d7c8af0aa87c7cd 100644 (file)
@@ -801,8 +801,8 @@ executed standalone like a SQL expression, which has the effect of calling its
     seq = Sequence('some_sequence')
     nextid = connection.execute(seq)
 
-Default Geneation API Constructs
---------------------------------
+Default Generation API Constructs
+---------------------------------
 
 .. autoclass:: ColumnDefault
     :show-inheritance:
index 6b5684f641189c0a325a75736cc34aaaa95a4266..dca183f9a4957292d2745ea8474931e5e79bb0d4 100644 (file)
@@ -22,33 +22,58 @@ class SchemaGenerator(DDLBase):
         self.preparer = dialect.identifier_preparer
         self.dialect = dialect
 
-    def _can_create(self, table):
+    def _can_create_table(self, table):
         self.dialect.validate_identifier(table.name)
         if table.schema:
             self.dialect.validate_identifier(table.schema)
-        return not self.checkfirst or not self.dialect.has_table(self.connection, table.name, schema=table.schema)
+        return not self.checkfirst or \
+                not self.dialect.has_table(self.connection, 
+                                    table.name, schema=table.schema)
+
+    def _can_create_sequence(self, sequence):
+        return self.dialect.supports_sequences and \
+            (
+                (not self.dialect.sequences_optional or
+                 not sequence.optional) and
+                 (
+                 not self.checkfirst or
+                 not self.dialect.has_sequence(
+                            self.connection, 
+                            sequence.name, 
+                            schema=sequence.schema)
+                 )
+            )
 
     def visit_metadata(self, metadata):
         if self.tables:
             tables = self.tables
         else:
             tables = metadata.tables.values()
-        collection = [t for t in sql_util.sort_tables(tables) if self._can_create(t)]
+        collection = [t for t in sql_util.sort_tables(tables) 
+                        if self._can_create_table(t)]
+        seq_coll = [s for s in metadata._sequences 
+                        if s.column is None and self._can_create_sequence(s)]
 
         metadata.dispatch.before_create(metadata, self.connection,
-                                    tables=collection)
+                                    tables=collection,
+                                    checkfirst=self.checkfirst)
+
+        for seq in seq_coll:
+            self.traverse_single(seq, create_ok=True)
 
         for table in collection:
             self.traverse_single(table, create_ok=True)
 
         metadata.dispatch.after_create(metadata, self.connection,
-                                    tables=collection)
+                                    tables=collection,
+                                    checkfirst=self.checkfirst)
 
     def visit_table(self, table, create_ok=False):
-        if not create_ok and not self._can_create(table):
+        if not create_ok and not self._can_create_table(table):
             return
 
-        table.dispatch.before_create(table, self.connection)
+        table.dispatch.before_create(table, self.connection,
+                                        checkfirst=self.checkfirst)
 
         for column in table.columns:
             if column.default is not None:
@@ -60,15 +85,13 @@ class SchemaGenerator(DDLBase):
             for index in table.indexes:
                 self.traverse_single(index)
 
-        table.dispatch.after_create(table, self.connection)
+        table.dispatch.after_create(table, self.connection,
+                                        checkfirst=self.checkfirst)
 
-    def visit_sequence(self, sequence):
-        if self.dialect.supports_sequences:
-            if ((not self.dialect.sequences_optional or
-                 not sequence.optional) and
-                (not self.checkfirst or
-                 not self.dialect.has_sequence(self.connection, sequence.name, schema=sequence.schema))):
-                self.connection.execute(schema.CreateSequence(sequence))
+    def visit_sequence(self, sequence, create_ok=False):
+        if not create_ok and not self._can_create_sequence(sequence):
+            return 
+        self.connection.execute(schema.CreateSequence(sequence))
 
     def visit_index(self, index):
         self.connection.execute(schema.CreateIndex(index))
@@ -87,31 +110,52 @@ class SchemaDropper(DDLBase):
             tables = self.tables
         else:
             tables = metadata.tables.values()
-        collection = [t for t in reversed(sql_util.sort_tables(tables)) if self._can_drop(t)]
+        collection = [t for t in reversed(sql_util.sort_tables(tables)) 
+                                if self._can_drop_table(t)]
+        seq_coll = [s for s in metadata._sequences 
+                                if s.column is None and self._can_drop_sequence(s)]
 
         metadata.dispatch.before_drop(metadata, self.connection,
-                                            tables=collection)
+                                            tables=collection,
+                                            checkfirst=self.checkfirst)
 
         for table in collection:
             self.traverse_single(table, drop_ok=True)
 
+        for seq in seq_coll:
+            self.traverse_single(seq, drop_ok=True)
+
         metadata.dispatch.after_drop(metadata, self.connection,
-                                            tables=collection)
+                                            tables=collection,
+                                            checkfirst=self.checkfirst)
 
-    def _can_drop(self, table):
+    def _can_drop_table(self, table):
         self.dialect.validate_identifier(table.name)
         if table.schema:
             self.dialect.validate_identifier(table.schema)
-        return not self.checkfirst or self.dialect.has_table(self.connection, table.name, schema=table.schema)
+        return not self.checkfirst or self.dialect.has_table(self.connection, 
+                                            table.name, schema=table.schema)
+
+    def _can_drop_sequence(self, sequence):
+        return self.dialect.supports_sequences and \
+            ((not self.dialect.sequences_optional or
+                 not sequence.optional) and
+                (not self.checkfirst or
+                 self.dialect.has_sequence(
+                                self.connection, 
+                                sequence.name, 
+                                schema=sequence.schema))
+            )
 
     def visit_index(self, index):
         self.connection.execute(schema.DropIndex(index))
 
     def visit_table(self, table, drop_ok=False):
-        if not drop_ok and not self._can_drop(table):
+        if not drop_ok and not self._can_drop_table(table):
             return
 
-        table.dispatch.before_drop(table, self.connection)
+        table.dispatch.before_drop(table, self.connection,
+                                    checkfirst=self.checkfirst)
 
         for column in table.columns:
             if column.default is not None:
@@ -119,12 +163,10 @@ class SchemaDropper(DDLBase):
 
         self.connection.execute(schema.DropTable(table))
 
-        table.dispatch.after_drop(table, self.connection)
+        table.dispatch.after_drop(table, self.connection,
+                                        checkfirst=self.checkfirst)
 
-    def visit_sequence(self, sequence):
-        if self.dialect.supports_sequences:
-            if ((not self.dialect.sequences_optional or
-                 not sequence.optional) and
-                (not self.checkfirst or
-                 self.dialect.has_sequence(self.connection, sequence.name, schema=sequence.schema))):
-                self.connection.execute(schema.DropSequence(sequence))
+    def visit_sequence(self, sequence, drop_ok=False):
+        if not drop_ok and not self._can_drop_sequence(sequence):
+            return
+        self.connection.execute(schema.DropSequence(sequence))
index 3a4bd90ce9c2d65aa3ad6581c40d38fafba036c3..40d7de94500ef01f195f728604fd6537f2254161 100644 (file)
@@ -1219,6 +1219,7 @@ class DefaultGenerator(SchemaItem):
 
     is_sequence = False
     is_server_default = False
+    column = None
 
     def __init__(self, for_update=False):
         self.for_update = for_update
@@ -1336,14 +1337,84 @@ class ColumnDefault(DefaultGenerator):
         return "ColumnDefault(%r)" % self.arg
 
 class Sequence(DefaultGenerator):
-    """Represents a named database sequence."""
+    """Represents a named database sequence.
+    
+    The :class:`.Sequence` object represents the name and configurational
+    parameters of a database sequence.   It also represents
+    a construct that can be "executed" by a SQLAlchemy :class:`.Engine`
+    or :class:`.Connection`, rendering the appropriate "next value" function
+    for the target database and returning a result.
+    
+    The :class:`.Sequence` is typically associated with a primary key column::
+    
+        some_table = Table('some_table', metadata,
+            Column('id', Integer, Sequence('some_table_seq'), primary_key=True)
+        )
+        
+    When CREATE TABLE is emitted for the above :class:`.Table`, if the
+    target platform supports sequences, a CREATE SEQUENCE statement will
+    be emitted as well.   For platforms that don't support sequences,
+    the :class:`.Sequence` construct is ignored.
+    
+    See also: :class:`.CreateSequence` :class:`.DropSequence`
+    
+    """
 
     __visit_name__ = 'sequence'
 
     is_sequence = True
 
     def __init__(self, name, start=None, increment=None, schema=None,
-                 optional=False, quote=None, metadata=None, for_update=False):
+                 optional=False, quote=None, metadata=None, 
+                 for_update=False):
+        """Construct a :class:`.Sequence` object.
+        
+        :param name: The name of the sequence.
+        :param start: the starting index of the sequence.  This value is
+         used when the CREATE SEQUENCE command is emitted to the database
+         as the value of the "START WITH" clause.   If ``None``, the 
+         clause is omitted, which on most platforms indicates a starting
+         value of 1.
+        :param increment: the increment value of the sequence.  This 
+         value is used when the CREATE SEQUENCE command is emitted to
+         the database as the value of the "INCREMENT BY" clause.  If ``None``,
+         the clause is omitted, which on most platforms indicates an
+         increment of 1.
+        :param schema: Optional schema name for the sequence, if located
+         in a schema other than the default.
+        :param optional: boolean value, when ``True``, indicates that this
+         :class:`.Sequence` object only needs to be explicitly generated
+         on backends that don't provide another way to generate primary
+         key identifiers.  Currently, it essentially means, "don't create
+         this sequence on the Postgresql backend, where the SERIAL keyword
+         creates a sequence for us automatically".
+        :param quote: boolean value, when ``True`` or ``False``, explicitly
+         forces quoting of the schema name on or off.  When left at its
+         default of ``None``, normal quoting rules based on casing and reserved
+         words take place.
+        :param metadata: optional :class:`.MetaData` object which will be 
+         associated with this :class:`.Sequence`.  A :class:`.Sequence`
+         that is associated with a :class:`.MetaData` gains access to the 
+         ``bind`` of that :class:`.MetaData`, meaning the :meth:`.Sequence.create`
+         and :meth:`.Sequence.drop` methods will make usage of that engine
+         automatically.   Additionally, the appropriate CREATE SEQUENCE/
+         DROP SEQUENCE DDL commands will be emitted corresponding to this
+         :class:`.Sequence` when :meth:`.MetaData.create_all` and 
+         :meth:`.MetaData.drop_all` are invoked (new in 0.7).
+         
+         Note that when a :class:`.Sequence` is applied to a :class:`.Column`, 
+         the :class:`.Sequence` is automatically associated with the 
+         :class:`.MetaData` object of that column's parent :class:`.Table`, 
+         when that association is made.   The :class:`.Sequence` will then
+         be subject to automatic CREATE SEQUENCE/DROP SEQUENCE corresponding 
+         to when the :class:`.Table` object itself is created or dropped,
+         rather than that of the :class:`.MetaData` object overall.
+        :param for_update: Indicates this :class:`.Sequence`, when associated
+         with a :class:`.Column`, should be invoked for UPDATE statements
+         on that column's table, rather than for INSERT statements, when
+         no value is otherwise present for that column in the statement.
+         
+        """
         super(Sequence, self).__init__(for_update=for_update)
         self.name = name
         self.start = start
@@ -1352,6 +1423,8 @@ class Sequence(DefaultGenerator):
         self.quote = quote
         self.schema = schema
         self.metadata = metadata
+        if metadata:
+            self._set_metadata(metadata)
 
     @util.memoized_property
     def is_callable(self):
@@ -1372,7 +1445,11 @@ class Sequence(DefaultGenerator):
         column._on_table_attach(self._set_table)
 
     def _set_table(self, column, table):
-        self.metadata = table.metadata
+        self._set_metadata(table.metadata)
+
+    def _set_metadata(self, metadata):
+        self.metadata = metadata
+        self.metadata._sequences.add(self)
 
     @property
     def bind(self):
@@ -1939,6 +2016,7 @@ class MetaData(SchemaItem):
         """
         self.tables = util.immutabledict()
         self._schemas = set()
+        self._sequences = set()
         self.bind = bind
         self.metadata = self
         if reflect:
@@ -2335,7 +2413,7 @@ class DDLElement(expression.Executable, expression.ClauseElement):
         self.target = target
 
     @expression._generative
-    def execute_if(self, dialect=None, callable_=None):
+    def execute_if(self, dialect=None, callable_=None, state=None):
         """Return a callable that will execute this 
         DDLElement conditionally.
 
@@ -2375,10 +2453,22 @@ class DDLElement(expression.Executable, expression.ClauseElement):
               Optional keyword argument - a list of Table objects which are to
               be created/ dropped within a MetaData.create_all() or drop_all()
               method call.
-
+              
+            :state:
+              Optional keyword argument - will be the ``state`` argument
+              passed to this function.
+
+            :checkfirst:
+             Keyword argument, will be True if the 'checkfirst' flag was
+             set during the call to ``create()``, ``create_all()``, 
+             ``drop()``, ``drop_all()``.
+             
           If the callable returns a true value, the DDL statement will be
           executed.
 
+        :param state: any value which will be passed to the callable_ 
+          as the ``state`` keyword argument.
+          
         See also:
 
             :class:`.DDLEvents`
@@ -2388,6 +2478,7 @@ class DDLElement(expression.Executable, expression.ClauseElement):
         """
         self.dialect = dialect
         self.callable_ = callable_
+        self.state = state
 
     def _should_execute(self, target, bind, **kw):
         if self.on is not None and \
@@ -2401,7 +2492,7 @@ class DDLElement(expression.Executable, expression.ClauseElement):
             if bind.engine.name not in self.dialect:
                 return False
         if self.callable_ is not None and \
-            not self.callable_(self, target, bind, **kw):
+            not self.callable_(self, target, bind, state=self.state, **kw):
             return False
 
         return True
index 5d67e19216a8e2eb9cd37d1f5deeb36e2d1f45aa..1c7d0b16a43e8b6e66f41c2d240b055fd2115ea8 100644 (file)
@@ -1285,8 +1285,10 @@ class MiscTest(TestBase, AssertsExecutionResults, AssertsCompiledSQL):
 
     def test_checksfor_sequence(self):
         meta1 = MetaData(testing.db)
+        seq = Sequence('fooseq')
         t = Table('mytable', meta1, Column('col1', Integer,
-                  Sequence('fooseq')))
+                  seq))
+        seq.drop()
         try:
             testing.db.execute('CREATE SEQUENCE fooseq')
             t.create(checkfirst=True)
index cbecdbe1813d057b6cfc6de03835208411f654b5..617a29edad7210f02bb5e3e4ed456a15f398c853 100644 (file)
@@ -536,63 +536,39 @@ class AutoIncrementTest(_base.TablesTest):
 
         nonai.insert().execute(id=1, data='row 1')
 
+class SequenceDDLTest(testing.TestBase, testing.AssertsCompiledSQL):
+    __dialect__ = 'default'
 
-class SequenceTest(testing.TestBase, testing.AssertsCompiledSQL):
-
-    @classmethod
-    @testing.requires.sequences
-    def setup_class(cls):
-        global cartitems, sometable, metadata
-        metadata = MetaData(testing.db)
-        cartitems = Table("cartitems", metadata,
-            Column("cart_id", Integer, Sequence('cart_id_seq'), primary_key=True),
-            Column("description", String(40)),
-            Column("createdate", sa.DateTime())
-        )
-        sometable = Table( 'Manager', metadata,
-               Column('obj_id', Integer, Sequence('obj_id_seq'), ),
-               Column('name', String(128)),
-               Column('id', Integer, Sequence('Manager_id_seq', optional=True),
-                      primary_key=True),
-           )
-
-        metadata.create_all()
-
-
-    def test_compile(self):
+    def test_create_drop_ddl(self):
         self.assert_compile(
             CreateSequence(Sequence('foo_seq')),
             "CREATE SEQUENCE foo_seq",
-            use_default_dialect=True,
         )
 
         self.assert_compile(
             CreateSequence(Sequence('foo_seq', start=5)),
             "CREATE SEQUENCE foo_seq START WITH 5",
-            use_default_dialect=True,
         )
 
         self.assert_compile(
             CreateSequence(Sequence('foo_seq', increment=2)),
             "CREATE SEQUENCE foo_seq INCREMENT BY 2",
-            use_default_dialect=True,
         )
 
         self.assert_compile(
             CreateSequence(Sequence('foo_seq', increment=2, start=5)),
             "CREATE SEQUENCE foo_seq INCREMENT BY 2 START WITH 5",
-            use_default_dialect=True,
         )
 
         self.assert_compile(
             DropSequence(Sequence('foo_seq')),
             "DROP SEQUENCE foo_seq",
-            use_default_dialect=True,
         )
 
+class SequenceTest(testing.TestBase, testing.AssertsCompiledSQL):
+    __requires__ = ('sequences',)
 
     @testing.fails_on('firebird', 'no FB support for start/increment')
-    @testing.requires.sequences
     def test_start_increment(self):
         for seq in (
                 Sequence('foo_seq'), 
@@ -610,58 +586,21 @@ class SequenceTest(testing.TestBase, testing.AssertsCompiledSQL):
             finally:
                 seq.drop(testing.db)
 
-    @testing.requires.sequences
-    def test_seq_nonpk(self):
-        """test sequences fire off as defaults on non-pk columns"""
-
-        engine = engines.testing_engine(options={'implicit_returning':False})
-        result = engine.execute(sometable.insert(), name="somename")
-
-        assert set(result.postfetch_cols()) == set([sometable.c.obj_id])
-
-        result = engine.execute(sometable.insert(), name="someother")
-        assert set(result.postfetch_cols()) == set([sometable.c.obj_id])
-
-        sometable.insert().execute(
-            {'name':'name3'},
-            {'name':'name4'})
-        eq_(sometable.select().order_by(sometable.c.id).execute().fetchall(),
-            [(1, "somename", 1),
-             (2, "someother", 2),
-             (3, "name3", 3),
-             (4, "name4", 4)])
-
-    @testing.requires.sequences
-    def test_sequence(self):
-        cartitems.insert().execute(description='hi')
-        cartitems.insert().execute(description='there')
-        r = cartitems.insert().execute(description='lala')
-
-        assert r.inserted_primary_key and r.inserted_primary_key[0] is not None
-        id_ = r.inserted_primary_key[0]
-
-        eq_(1,
-            sa.select([func.count(cartitems.c.cart_id)],
-                      sa.and_(cartitems.c.description == 'lala',
-                              cartitems.c.cart_id == id_)).scalar())
-
-        cartitems.select().execute().fetchall()
-
     @testing.fails_on('maxdb', 'FIXME: unknown')
-    # maxdb db-api seems to double-execute NEXTVAL internally somewhere,
+    # maxdb db-api seems to double-execute NEXTVAL 
+    # internally somewhere,
     # throwing off the numbers for these tests...
-    @testing.requires.sequences
+    @testing.provide_metadata
     def test_implicit_sequence_exec(self):
-        s = Sequence("my_sequence", metadata=MetaData(testing.db))
-        s.create()
-        try:
-            x = s.execute()
-            eq_(x, 1)
-        finally:
-            s.drop()
+        s = Sequence("my_sequence", metadata=metadata)
+        metadata.create_all()
+        x = s.execute()
+        eq_(x, 1)
+
+    def _has_sequence(self, name):
+        return testing.db.dialect.has_sequence(testing.db, name)
 
     @testing.fails_on('maxdb', 'FIXME: unknown')
-    @testing.requires.sequences
     def teststandalone_explicit(self):
         s = Sequence("my_sequence")
         s.create(bind=testing.db)
@@ -671,25 +610,126 @@ class SequenceTest(testing.TestBase, testing.AssertsCompiledSQL):
         finally:
             s.drop(testing.db)
 
-    @testing.requires.sequences
-    def test_checkfirst(self):
+    def test_checkfirst_sequence(self):
         s = Sequence("my_sequence")
         s.create(testing.db, checkfirst=False)
+        assert self._has_sequence('my_sequence')
         s.create(testing.db, checkfirst=True)
         s.drop(testing.db, checkfirst=False)
+        assert not self._has_sequence('my_sequence')
         s.drop(testing.db, checkfirst=True)
 
-    @testing.fails_on('maxdb', 'FIXME: unknown')
-    @testing.requires.sequences
-    def teststandalone2(self):
-        x = cartitems.c.cart_id.default.execute()
-        self.assert_(1 <= x <= 4)
+    def test_checkfirst_metadata(self):
+        m = MetaData()
+        s = Sequence("my_sequence", metadata=m)
+        m.create_all(testing.db, checkfirst=False)
+        assert self._has_sequence('my_sequence')
+        m.create_all(testing.db, checkfirst=True)
+        m.drop_all(testing.db, checkfirst=False)
+        assert not self._has_sequence('my_sequence')
+        m.drop_all(testing.db, checkfirst=True)
+
+    def test_checkfirst_table(self):
+        m = MetaData()
+        s = Sequence("my_sequence")
+        t = Table('t', m, Column('c', Integer, s, primary_key=True))
+        t.create(testing.db, checkfirst=False)
+        assert self._has_sequence('my_sequence')
+        t.create(testing.db, checkfirst=True)
+        t.drop(testing.db, checkfirst=False)
+        assert not self._has_sequence('my_sequence')
+        t.drop(testing.db, checkfirst=True)
+
+    @testing.provide_metadata
+    def test_table_overrides_metadata_create(self):
+        s1 = Sequence("s1", metadata=metadata)
+        s2 = Sequence("s2", metadata=metadata)
+        s3 = Sequence("s3")
+        t = Table('t', metadata, 
+                    Column('c', Integer, s3, primary_key=True))
+        assert s3.metadata is metadata
+
+
+        t.create(testing.db)
+        s3.drop(testing.db)
+
+        # 't' is created, and 's3' won't be
+        # re-created since it's linked to 't'.
+        # 's1' and 's2' are, however.
+        metadata.create_all(testing.db)
+        assert self._has_sequence('s1')
+        assert self._has_sequence('s2')
+        assert not self._has_sequence('s3')
+
+        s2.drop(testing.db)
+        assert self._has_sequence('s1')
+        assert not self._has_sequence('s2')
+
+        metadata.drop_all(testing.db)
+        assert not self._has_sequence('s1')
+        assert not self._has_sequence('s2')
+
+
+class TableBoundSequenceTest(testing.TestBase):
+    __requires__ = ('sequences',)
+
+    @classmethod
+    def setup_class(cls):
+        global cartitems, sometable, metadata
+        metadata = MetaData(testing.db)
+        cartitems = Table("cartitems", metadata,
+            Column("cart_id", Integer, Sequence('cart_id_seq'), primary_key=True),
+            Column("description", String(40)),
+            Column("createdate", sa.DateTime())
+        )
+        sometable = Table( 'Manager', metadata,
+               Column('obj_id', Integer, Sequence('obj_id_seq'), ),
+               Column('name', String(128)),
+               Column('id', Integer, Sequence('Manager_id_seq', optional=True),
+                      primary_key=True),
+           )
+
+        metadata.create_all()
 
     @classmethod
-    @testing.requires.sequences
     def teardown_class(cls):
         metadata.drop_all()
 
+    def test_insert_via_seq(self):
+        cartitems.insert().execute(description='hi')
+        cartitems.insert().execute(description='there')
+        r = cartitems.insert().execute(description='lala')
+
+        assert r.inserted_primary_key and r.inserted_primary_key[0] is not None
+        id_ = r.inserted_primary_key[0]
+
+        eq_(1,
+            sa.select([func.count(cartitems.c.cart_id)],
+                      sa.and_(cartitems.c.description == 'lala',
+                              cartitems.c.cart_id == id_)).scalar())
+
+        cartitems.select().execute().fetchall()
+
+    def test_seq_nonpk(self):
+        """test sequences fire off as defaults on non-pk columns"""
+
+        engine = engines.testing_engine(options={'implicit_returning':False})
+        result = engine.execute(sometable.insert(), name="somename")
+
+        assert set(result.postfetch_cols()) == set([sometable.c.obj_id])
+
+        result = engine.execute(sometable.insert(), name="someother")
+        assert set(result.postfetch_cols()) == set([sometable.c.obj_id])
+
+        sometable.insert().execute(
+            {'name':'name3'},
+            {'name':'name4'})
+        eq_(sometable.select().order_by(sometable.c.id).execute().fetchall(),
+            [(1, "somename", 1),
+             (2, "someother", 2),
+             (3, "name3", 3),
+             (4, "name4", 4)])
+
 
 class SpecialTypePKTest(testing.TestBase):
     """test process_result_value in conjunction with primary key columns.