]> git.ipfire.org Git - thirdparty/sqlalchemy/sqlalchemy.git/commitdiff
refactor test suites for postgresql, mssql, mysql into packages.
authorMike Bayer <mike_mp@zzzcomputing.com>
Sat, 29 Jun 2013 02:30:11 +0000 (22:30 -0400)
committerMike Bayer <mike_mp@zzzcomputing.com>
Sat, 29 Jun 2013 02:40:10 +0000 (22:40 -0400)
23 files changed:
lib/sqlalchemy/util/__init__.py
lib/sqlalchemy/util/compat.py
test/dialect/mssql/__init__.py [new file with mode: 0644]
test/dialect/mssql/test_compiler.py [new file with mode: 0644]
test/dialect/mssql/test_engine.py [new file with mode: 0644]
test/dialect/mssql/test_query.py [new file with mode: 0644]
test/dialect/mssql/test_reflection.py [new file with mode: 0644]
test/dialect/mssql/test_types.py [new file with mode: 0644]
test/dialect/mysql/__init__.py [new file with mode: 0644]
test/dialect/mysql/test_compiler.py [new file with mode: 0644]
test/dialect/mysql/test_dialect.py [new file with mode: 0644]
test/dialect/mysql/test_query.py [new file with mode: 0644]
test/dialect/mysql/test_reflection.py [new file with mode: 0644]
test/dialect/mysql/test_types.py [new file with mode: 0644]
test/dialect/postgresql/__init__.py [new file with mode: 0644]
test/dialect/postgresql/test_compiler.py [new file with mode: 0644]
test/dialect/postgresql/test_dialect.py [new file with mode: 0644]
test/dialect/postgresql/test_query.py [new file with mode: 0644]
test/dialect/postgresql/test_reflection.py [new file with mode: 0644]
test/dialect/postgresql/test_types.py [new file with mode: 0644]
test/dialect/test_mssql.py [deleted file]
test/dialect/test_mysql.py [deleted file]
test/dialect/test_postgresql.py [deleted file]

index efeca6f1bc7c4e7183c2de4c79a19b74107265c7..4ea778a7704838b04aa59ee66ea22efc5a64750c 100644 (file)
@@ -7,7 +7,7 @@
 from .compat import callable, cmp, reduce,  \
     threading, py3k, py2k, py3k_warning, jython, pypy, cpython, win32, set_types, \
     pickle, dottedgetter, parse_qsl, namedtuple, next, WeakSet, reraise, \
-    raise_from_cause
+    raise_from_cause, u, b, ue, string_types, text_type, int_types
 
 from ._collections import KeyedTuple, ImmutableContainer, immutabledict, \
     Properties, OrderedProperties, ImmutableProperties, OrderedDict, \
index 6905d6347c2f71f7c709064e024e3f076edbf347..1ea4be917a62b82f011348809d17e5bf36492e78 100644 (file)
@@ -133,11 +133,41 @@ else:
             return obj
         return g
 
-# Adapted from six.py
 if py3k:
+
+    string_types = str,
+    binary_type = bytes
+    text_type = str
+    int_types = int,
+
+    def u(s):
+        return s
+
+    def ue(s):
+        return s
+
     def b(s):
         return s.encode("latin-1")
+
 else:
+    string_types = basestring,
+    binary_type = str
+    text_type = unicode
+    int_types = int, long
+
+    def b(s):
+        return s
+
+    def u(s):
+        # this differs from what six does, which doesn't support non-ASCII
+        # strings - we only use u() with
+        # literal source strings, and all our source files with non-ascii
+        # in them (all are tests) are utf-8 encoded.
+        return unicode(s, "utf-8")
+
+    def ue(s):
+        return unicode(s, "unicode_escape")
+
     def b(s):
         return s
 
diff --git a/test/dialect/mssql/__init__.py b/test/dialect/mssql/__init__.py
new file mode 100644 (file)
index 0000000..e69de29
diff --git a/test/dialect/mssql/test_compiler.py b/test/dialect/mssql/test_compiler.py
new file mode 100644 (file)
index 0000000..87037c6
--- /dev/null
@@ -0,0 +1,579 @@
+# -*- encoding: utf-8
+from sqlalchemy.testing import eq_
+from sqlalchemy import *
+from sqlalchemy import schema
+from sqlalchemy.sql import table, column
+from sqlalchemy.databases import mssql
+from sqlalchemy.dialects.mssql import mxodbc
+from sqlalchemy.testing import fixtures, AssertsCompiledSQL
+from sqlalchemy import sql
+
+
+class CompileTest(fixtures.TestBase, AssertsCompiledSQL):
+    __dialect__ = mssql.dialect()
+
+    def test_true_false(self):
+        self.assert_compile(
+            sql.false(), "0"
+        )
+        self.assert_compile(
+            sql.true(),
+            "1"
+        )
+
+    def test_select(self):
+        t = table('sometable', column('somecolumn'))
+        self.assert_compile(t.select(),
+                            'SELECT sometable.somecolumn FROM sometable')
+
+    def test_select_with_nolock(self):
+        t = table('sometable', column('somecolumn'))
+        self.assert_compile(t.select().with_hint(t, 'WITH (NOLOCK)'),
+                            'SELECT sometable.somecolumn FROM sometable WITH (NOLOCK)')
+
+    def test_join_with_hint(self):
+        t1 = table('t1',
+            column('a', Integer),
+            column('b', String),
+            column('c', String),
+        )
+        t2 = table('t2',
+            column("a", Integer),
+            column("b", Integer),
+            column("c", Integer),
+        )
+        join = t1.join(t2, t1.c.a==t2.c.a).\
+                        select().with_hint(t1, 'WITH (NOLOCK)')
+        self.assert_compile(
+            join,
+            'SELECT t1.a, t1.b, t1.c, t2.a, t2.b, t2.c '
+            'FROM t1 WITH (NOLOCK) JOIN t2 ON t1.a = t2.a'
+        )
+
+    def test_insert(self):
+        t = table('sometable', column('somecolumn'))
+        self.assert_compile(t.insert(),
+                            'INSERT INTO sometable (somecolumn) VALUES '
+                            '(:somecolumn)')
+
+    def test_update(self):
+        t = table('sometable', column('somecolumn'))
+        self.assert_compile(t.update(t.c.somecolumn == 7),
+                            'UPDATE sometable SET somecolumn=:somecolum'
+                            'n WHERE sometable.somecolumn = '
+                            ':somecolumn_1', dict(somecolumn=10))
+
+    def test_insert_hint(self):
+        t = table('sometable', column('somecolumn'))
+        for targ in (None, t):
+            for darg in ("*", "mssql"):
+                self.assert_compile(
+                    t.insert().
+                        values(somecolumn="x").
+                        with_hint("WITH (PAGLOCK)",
+                            selectable=targ,
+                            dialect_name=darg),
+                    "INSERT INTO sometable WITH (PAGLOCK) "
+                    "(somecolumn) VALUES (:somecolumn)"
+                )
+
+    def test_update_hint(self):
+        t = table('sometable', column('somecolumn'))
+        for targ in (None, t):
+            for darg in ("*", "mssql"):
+                self.assert_compile(
+                    t.update().where(t.c.somecolumn=="q").
+                            values(somecolumn="x").
+                            with_hint("WITH (PAGLOCK)",
+                                    selectable=targ,
+                                    dialect_name=darg),
+                    "UPDATE sometable WITH (PAGLOCK) "
+                    "SET somecolumn=:somecolumn "
+                    "WHERE sometable.somecolumn = :somecolumn_1"
+                )
+
+    def test_update_exclude_hint(self):
+        t = table('sometable', column('somecolumn'))
+        self.assert_compile(
+            t.update().where(t.c.somecolumn=="q").
+                values(somecolumn="x").
+                with_hint("XYZ", "mysql"),
+            "UPDATE sometable SET somecolumn=:somecolumn "
+            "WHERE sometable.somecolumn = :somecolumn_1"
+        )
+
+    def test_delete_hint(self):
+        t = table('sometable', column('somecolumn'))
+        for targ in (None, t):
+            for darg in ("*", "mssql"):
+                self.assert_compile(
+                    t.delete().where(t.c.somecolumn=="q").
+                            with_hint("WITH (PAGLOCK)",
+                                    selectable=targ,
+                                    dialect_name=darg),
+                    "DELETE FROM sometable WITH (PAGLOCK) "
+                    "WHERE sometable.somecolumn = :somecolumn_1"
+                )
+
+    def test_delete_exclude_hint(self):
+        t = table('sometable', column('somecolumn'))
+        self.assert_compile(
+            t.delete().\
+                where(t.c.somecolumn=="q").\
+                with_hint("XYZ", dialect_name="mysql"),
+            "DELETE FROM sometable WHERE "
+            "sometable.somecolumn = :somecolumn_1"
+        )
+
+    def test_update_from_hint(self):
+        t = table('sometable', column('somecolumn'))
+        t2 = table('othertable', column('somecolumn'))
+        for darg in ("*", "mssql"):
+            self.assert_compile(
+                t.update().where(t.c.somecolumn==t2.c.somecolumn).
+                        values(somecolumn="x").
+                        with_hint("WITH (PAGLOCK)",
+                                selectable=t2,
+                                dialect_name=darg),
+                "UPDATE sometable SET somecolumn=:somecolumn "
+                "FROM sometable, othertable WITH (PAGLOCK) "
+                "WHERE sometable.somecolumn = othertable.somecolumn"
+            )
+
+    # TODO: not supported yet.
+    #def test_delete_from_hint(self):
+    #    t = table('sometable', column('somecolumn'))
+    #    t2 = table('othertable', column('somecolumn'))
+    #    for darg in ("*", "mssql"):
+    #        self.assert_compile(
+    #            t.delete().where(t.c.somecolumn==t2.c.somecolumn).
+    #                    with_hint("WITH (PAGLOCK)",
+    #                            selectable=t2,
+    #                            dialect_name=darg),
+    #            ""
+    #        )
+
+    def test_strict_binds(self):
+        """test the 'strict' compiler binds."""
+
+        from sqlalchemy.dialects.mssql.base import MSSQLStrictCompiler
+        mxodbc_dialect = mxodbc.dialect()
+        mxodbc_dialect.statement_compiler = MSSQLStrictCompiler
+
+        t = table('sometable', column('foo'))
+
+        for expr, compile in [
+            (
+                select([literal("x"), literal("y")]),
+                "SELECT 'x' AS anon_1, 'y' AS anon_2",
+            ),
+            (
+                select([t]).where(t.c.foo.in_(['x', 'y', 'z'])),
+                "SELECT sometable.foo FROM sometable WHERE sometable.foo "
+                "IN ('x', 'y', 'z')",
+            ),
+            (
+                    t.c.foo.in_([None]),
+                    "sometable.foo IN (NULL)"
+            )
+        ]:
+            self.assert_compile(expr, compile, dialect=mxodbc_dialect)
+
+    def test_in_with_subqueries(self):
+        """Test removal of legacy behavior that converted "x==subquery"
+        to use IN.
+
+        """
+
+        t = table('sometable', column('somecolumn'))
+        self.assert_compile(t.select().where(t.c.somecolumn
+                            == t.select()),
+                            'SELECT sometable.somecolumn FROM '
+                            'sometable WHERE sometable.somecolumn = '
+                            '(SELECT sometable.somecolumn FROM '
+                            'sometable)')
+        self.assert_compile(t.select().where(t.c.somecolumn
+                            != t.select()),
+                            'SELECT sometable.somecolumn FROM '
+                            'sometable WHERE sometable.somecolumn != '
+                            '(SELECT sometable.somecolumn FROM '
+                            'sometable)')
+
+    def test_count(self):
+        t = table('sometable', column('somecolumn'))
+        self.assert_compile(t.count(),
+                            'SELECT count(sometable.somecolumn) AS '
+                            'tbl_row_count FROM sometable')
+
+    def test_noorderby_insubquery(self):
+        """test that the ms-sql dialect removes ORDER BY clauses from
+        subqueries"""
+
+        table1 = table('mytable',
+            column('myid', Integer),
+            column('name', String),
+            column('description', String),
+        )
+
+        q = select([table1.c.myid],
+                   order_by=[table1.c.myid]).alias('foo')
+        crit = q.c.myid == table1.c.myid
+        self.assert_compile(select(['*'], crit),
+                            "SELECT * FROM (SELECT mytable.myid AS "
+                            "myid FROM mytable) AS foo, mytable WHERE "
+                            "foo.myid = mytable.myid")
+
+
+
+    def test_delete_schema(self):
+        metadata = MetaData()
+        tbl = Table('test', metadata, Column('id', Integer,
+                    primary_key=True), schema='paj')
+        self.assert_compile(tbl.delete(tbl.c.id == 1),
+                            'DELETE FROM paj.test WHERE paj.test.id = '
+                            ':id_1')
+        s = select([tbl.c.id]).where(tbl.c.id == 1)
+        self.assert_compile(tbl.delete().where(tbl.c.id.in_(s)),
+                            'DELETE FROM paj.test WHERE paj.test.id IN '
+                            '(SELECT test_1.id FROM paj.test AS test_1 '
+                            'WHERE test_1.id = :id_1)')
+
+    def test_delete_schema_multipart(self):
+        metadata = MetaData()
+        tbl = Table('test', metadata, Column('id', Integer,
+                    primary_key=True), schema='banana.paj')
+        self.assert_compile(tbl.delete(tbl.c.id == 1),
+                            'DELETE FROM banana.paj.test WHERE '
+                            'banana.paj.test.id = :id_1')
+        s = select([tbl.c.id]).where(tbl.c.id == 1)
+        self.assert_compile(tbl.delete().where(tbl.c.id.in_(s)),
+                            'DELETE FROM banana.paj.test WHERE '
+                            'banana.paj.test.id IN (SELECT test_1.id '
+                            'FROM banana.paj.test AS test_1 WHERE '
+                            'test_1.id = :id_1)')
+
+    def test_delete_schema_multipart_needs_quoting(self):
+        metadata = MetaData()
+        tbl = Table('test', metadata, Column('id', Integer,
+                    primary_key=True), schema='banana split.paj')
+        self.assert_compile(tbl.delete(tbl.c.id == 1),
+                            'DELETE FROM [banana split].paj.test WHERE '
+                            '[banana split].paj.test.id = :id_1')
+        s = select([tbl.c.id]).where(tbl.c.id == 1)
+        self.assert_compile(tbl.delete().where(tbl.c.id.in_(s)),
+                            'DELETE FROM [banana split].paj.test WHERE '
+                            '[banana split].paj.test.id IN (SELECT '
+                            'test_1.id FROM [banana split].paj.test AS '
+                            'test_1 WHERE test_1.id = :id_1)')
+
+    def test_delete_schema_multipart_both_need_quoting(self):
+        metadata = MetaData()
+        tbl = Table('test', metadata, Column('id', Integer,
+                    primary_key=True),
+                    schema='banana split.paj with a space')
+        self.assert_compile(tbl.delete(tbl.c.id == 1),
+                            'DELETE FROM [banana split].[paj with a '
+                            'space].test WHERE [banana split].[paj '
+                            'with a space].test.id = :id_1')
+        s = select([tbl.c.id]).where(tbl.c.id == 1)
+        self.assert_compile(tbl.delete().where(tbl.c.id.in_(s)),
+                            'DELETE FROM [banana split].[paj with a '
+                            'space].test WHERE [banana split].[paj '
+                            'with a space].test.id IN (SELECT '
+                            'test_1.id FROM [banana split].[paj with a '
+                            'space].test AS test_1 WHERE test_1.id = '
+                            ':id_1)')
+
+    def test_union(self):
+        t1 = table('t1', column('col1'), column('col2'), column('col3'
+                   ), column('col4'))
+        t2 = table('t2', column('col1'), column('col2'), column('col3'
+                   ), column('col4'))
+        s1, s2 = select([t1.c.col3.label('col3'), t1.c.col4.label('col4'
+                        )], t1.c.col2.in_(['t1col2r1', 't1col2r2'])), \
+            select([t2.c.col3.label('col3'), t2.c.col4.label('col4')],
+                   t2.c.col2.in_(['t2col2r2', 't2col2r3']))
+        u = union(s1, s2, order_by=['col3', 'col4'])
+        self.assert_compile(u,
+                            'SELECT t1.col3 AS col3, t1.col4 AS col4 '
+                            'FROM t1 WHERE t1.col2 IN (:col2_1, '
+                            ':col2_2) UNION SELECT t2.col3 AS col3, '
+                            't2.col4 AS col4 FROM t2 WHERE t2.col2 IN '
+                            '(:col2_3, :col2_4) ORDER BY col3, col4')
+        self.assert_compile(u.alias('bar').select(),
+                            'SELECT bar.col3, bar.col4 FROM (SELECT '
+                            't1.col3 AS col3, t1.col4 AS col4 FROM t1 '
+                            'WHERE t1.col2 IN (:col2_1, :col2_2) UNION '
+                            'SELECT t2.col3 AS col3, t2.col4 AS col4 '
+                            'FROM t2 WHERE t2.col2 IN (:col2_3, '
+                            ':col2_4)) AS bar')
+
+    def test_function(self):
+        self.assert_compile(func.foo(1, 2), 'foo(:foo_1, :foo_2)')
+        self.assert_compile(func.current_time(), 'CURRENT_TIME')
+        self.assert_compile(func.foo(), 'foo()')
+        m = MetaData()
+        t = Table('sometable', m, Column('col1', Integer), Column('col2'
+                  , Integer))
+        self.assert_compile(select([func.max(t.c.col1)]),
+                            'SELECT max(sometable.col1) AS max_1 FROM '
+                            'sometable')
+
+    def test_function_overrides(self):
+        self.assert_compile(func.current_date(), "GETDATE()")
+        self.assert_compile(func.length(3), "LEN(:length_1)")
+
+    def test_extract(self):
+        t = table('t', column('col1'))
+
+        for field in 'day', 'month', 'year':
+            self.assert_compile(
+                select([extract(field, t.c.col1)]),
+                'SELECT DATEPART("%s", t.col1) AS anon_1 FROM t' % field)
+
+    def test_update_returning(self):
+        table1 = table('mytable', column('myid', Integer), column('name'
+                       , String(128)), column('description',
+                       String(128)))
+        u = update(table1, values=dict(name='foo'
+                   )).returning(table1.c.myid, table1.c.name)
+        self.assert_compile(u,
+                            'UPDATE mytable SET name=:name OUTPUT '
+                            'inserted.myid, inserted.name')
+        u = update(table1, values=dict(name='foo')).returning(table1)
+        self.assert_compile(u,
+                            'UPDATE mytable SET name=:name OUTPUT '
+                            'inserted.myid, inserted.name, '
+                            'inserted.description')
+        u = update(table1, values=dict(name='foo'
+                   )).returning(table1).where(table1.c.name == 'bar')
+        self.assert_compile(u,
+                            'UPDATE mytable SET name=:name OUTPUT '
+                            'inserted.myid, inserted.name, '
+                            'inserted.description WHERE mytable.name = '
+                            ':name_1')
+        u = update(table1, values=dict(name='foo'
+                   )).returning(func.length(table1.c.name))
+        self.assert_compile(u,
+                            'UPDATE mytable SET name=:name OUTPUT '
+                            'LEN(inserted.name) AS length_1')
+
+    def test_delete_returning(self):
+        table1 = table('mytable', column('myid', Integer), column('name'
+                       , String(128)), column('description',
+                       String(128)))
+        d = delete(table1).returning(table1.c.myid, table1.c.name)
+        self.assert_compile(d,
+                            'DELETE FROM mytable OUTPUT deleted.myid, '
+                            'deleted.name')
+        d = delete(table1).where(table1.c.name == 'bar'
+                                 ).returning(table1.c.myid,
+                table1.c.name)
+        self.assert_compile(d,
+                            'DELETE FROM mytable OUTPUT deleted.myid, '
+                            'deleted.name WHERE mytable.name = :name_1')
+
+    def test_insert_returning(self):
+        table1 = table('mytable', column('myid', Integer), column('name'
+                       , String(128)), column('description',
+                       String(128)))
+        i = insert(table1, values=dict(name='foo'
+                   )).returning(table1.c.myid, table1.c.name)
+        self.assert_compile(i,
+                            'INSERT INTO mytable (name) OUTPUT '
+                            'inserted.myid, inserted.name VALUES '
+                            '(:name)')
+        i = insert(table1, values=dict(name='foo')).returning(table1)
+        self.assert_compile(i,
+                            'INSERT INTO mytable (name) OUTPUT '
+                            'inserted.myid, inserted.name, '
+                            'inserted.description VALUES (:name)')
+        i = insert(table1, values=dict(name='foo'
+                   )).returning(func.length(table1.c.name))
+        self.assert_compile(i,
+                            'INSERT INTO mytable (name) OUTPUT '
+                            'LEN(inserted.name) AS length_1 VALUES '
+                            '(:name)')
+
+    def test_limit_using_top(self):
+        t = table('t', column('x', Integer), column('y', Integer))
+
+        s = select([t]).where(t.c.x==5).order_by(t.c.y).limit(10)
+
+        self.assert_compile(
+            s,
+            "SELECT TOP 10 t.x, t.y FROM t WHERE t.x = :x_1 ORDER BY t.y",
+            checkparams={'x_1': 5}
+        )
+
+    def test_limit_zero_using_top(self):
+        t = table('t', column('x', Integer), column('y', Integer))
+
+        s = select([t]).where(t.c.x==5).order_by(t.c.y).limit(0)
+
+        self.assert_compile(
+            s,
+            "SELECT TOP 0 t.x, t.y FROM t WHERE t.x = :x_1 ORDER BY t.y",
+            checkparams={'x_1': 5}
+        )
+
+    def test_offset_using_window(self):
+        t = table('t', column('x', Integer), column('y', Integer))
+
+        s = select([t]).where(t.c.x==5).order_by(t.c.y).offset(20)
+
+        # test that the select is not altered with subsequent compile
+        # calls
+        for i in range(2):
+            self.assert_compile(
+                s,
+                "SELECT anon_1.x, anon_1.y FROM (SELECT t.x AS x, t.y "
+                "AS y, ROW_NUMBER() OVER (ORDER BY t.y) AS "
+                "mssql_rn FROM t WHERE t.x = :x_1) AS "
+                "anon_1 WHERE mssql_rn > :mssql_rn_1",
+                checkparams={'mssql_rn_1': 20, 'x_1': 5}
+            )
+
+    def test_limit_offset_using_window(self):
+        t = table('t', column('x', Integer), column('y', Integer))
+
+        s = select([t]).where(t.c.x==5).order_by(t.c.y).limit(10).offset(20)
+
+        self.assert_compile(
+            s,
+            "SELECT anon_1.x, anon_1.y "
+            "FROM (SELECT t.x AS x, t.y AS y, "
+            "ROW_NUMBER() OVER (ORDER BY t.y) AS mssql_rn "
+            "FROM t "
+            "WHERE t.x = :x_1) AS anon_1 "
+            "WHERE mssql_rn > :mssql_rn_1 AND mssql_rn <= :mssql_rn_2",
+            checkparams={'mssql_rn_1': 20, 'mssql_rn_2': 30, 'x_1': 5}
+        )
+
+    def test_limit_offset_with_correlated_order_by(self):
+        t1 = table('t1', column('x', Integer), column('y', Integer))
+        t2 = table('t2', column('x', Integer), column('y', Integer))
+
+        order_by = select([t2.c.y]).where(t1.c.x == t2.c.x).as_scalar()
+        s = select([t1]).where(t1.c.x == 5).order_by(order_by) \
+            .limit(10).offset(20)
+
+        self.assert_compile(
+            s,
+            "SELECT anon_1.x, anon_1.y "
+            "FROM (SELECT t1.x AS x, t1.y AS y, "
+            "ROW_NUMBER() OVER (ORDER BY "
+            "(SELECT t2.y FROM t2 WHERE t1.x = t2.x)"
+            ") AS mssql_rn "
+            "FROM t1 "
+            "WHERE t1.x = :x_1) AS anon_1 "
+            "WHERE mssql_rn > :mssql_rn_1 AND mssql_rn <= :mssql_rn_2",
+            checkparams={'mssql_rn_1': 20, 'mssql_rn_2': 30, 'x_1': 5}
+        )
+
+    def test_limit_zero_offset_using_window(self):
+        t = table('t', column('x', Integer), column('y', Integer))
+
+        s = select([t]).where(t.c.x==5).order_by(t.c.y).limit(0).offset(0)
+
+        # render the LIMIT of zero, but not the OFFSET
+        # of zero, so produces TOP 0
+        self.assert_compile(
+            s,
+            "SELECT TOP 0 t.x, t.y FROM t "
+            "WHERE t.x = :x_1 ORDER BY t.y",
+            checkparams={'x_1': 5}
+        )
+
+    def test_sequence_start_0(self):
+        metadata = MetaData()
+        tbl = Table('test', metadata,
+                    Column('id', Integer, Sequence('', 0), primary_key=True))
+        self.assert_compile(schema.CreateTable(tbl),
+                            "CREATE TABLE test (id INTEGER NOT NULL IDENTITY(0,1), "
+                            "PRIMARY KEY (id))"
+                            )
+
+    def test_sequence_non_primary_key(self):
+        metadata = MetaData()
+        tbl = Table('test', metadata,
+                    Column('id', Integer, Sequence(''), primary_key=False))
+        self.assert_compile(schema.CreateTable(tbl),
+                            "CREATE TABLE test (id INTEGER NOT NULL IDENTITY(1,1))"
+                            )
+
+    def test_sequence_ignore_nullability(self):
+        metadata = MetaData()
+        tbl = Table('test', metadata,
+                    Column('id', Integer, Sequence(''), nullable=True))
+        self.assert_compile(schema.CreateTable(tbl),
+                            "CREATE TABLE test (id INTEGER NOT NULL IDENTITY(1,1))"
+                            )
+
+    def test_index_clustering(self):
+        metadata = MetaData()
+        tbl = Table('test', metadata,
+                    Column('id', Integer))
+        idx = Index("foo", tbl.c.id, mssql_clustered=True)
+        self.assert_compile(schema.CreateIndex(idx),
+                            "CREATE CLUSTERED INDEX foo ON test (id)"
+                            )
+
+    def test_index_ordering(self):
+        metadata = MetaData()
+        tbl = Table('test', metadata,
+                    Column('x', Integer), Column('y', Integer), Column('z', Integer))
+        idx = Index("foo", tbl.c.x.desc(), "y")
+        self.assert_compile(schema.CreateIndex(idx),
+                            "CREATE INDEX foo ON test (x DESC, y)"
+                            )
+
+    def test_index_extra_include_1(self):
+        metadata = MetaData()
+        tbl = Table('test', metadata,
+                    Column('x', Integer), Column('y', Integer), Column('z', Integer))
+        idx = Index("foo", tbl.c.x, mssql_include=['y'])
+        self.assert_compile(schema.CreateIndex(idx),
+                            "CREATE INDEX foo ON test (x) INCLUDE (y)"
+                            )
+
+    def test_index_extra_include_2(self):
+        metadata = MetaData()
+        tbl = Table('test', metadata,
+                    Column('x', Integer), Column('y', Integer), Column('z', Integer))
+        idx = Index("foo", tbl.c.x, mssql_include=[tbl.c.y])
+        self.assert_compile(schema.CreateIndex(idx),
+                            "CREATE INDEX foo ON test (x) INCLUDE (y)"
+                            )
+
+
+class SchemaTest(fixtures.TestBase):
+
+    def setup(self):
+        t = Table('sometable', MetaData(),
+            Column('pk_column', Integer),
+            Column('test_column', String)
+        )
+        self.column = t.c.test_column
+
+        dialect = mssql.dialect()
+        self.ddl_compiler = dialect.ddl_compiler(dialect,
+                schema.CreateTable(t))
+
+    def _column_spec(self):
+        return self.ddl_compiler.get_column_specification(self.column)
+
+    def test_that_mssql_default_nullability_emits_null(self):
+        eq_("test_column VARCHAR(max) NULL", self._column_spec())
+
+    def test_that_mssql_none_nullability_does_not_emit_nullability(self):
+        self.column.nullable = None
+        eq_("test_column VARCHAR(max)", self._column_spec())
+
+    def test_that_mssql_specified_nullable_emits_null(self):
+        self.column.nullable = True
+        eq_("test_column VARCHAR(max) NULL", self._column_spec())
+
+    def test_that_mssql_specified_not_nullable_emits_not_null(self):
+        self.column.nullable = False
+        eq_("test_column VARCHAR(max) NOT NULL", self._column_spec())
diff --git a/test/dialect/mssql/test_engine.py b/test/dialect/mssql/test_engine.py
new file mode 100644 (file)
index 0000000..2834f35
--- /dev/null
@@ -0,0 +1,154 @@
+# -*- encoding: utf-8
+from sqlalchemy.testing import eq_, engines
+from sqlalchemy import *
+from sqlalchemy import exc
+from sqlalchemy.dialects.mssql import pyodbc, pymssql
+from sqlalchemy.engine import url
+from sqlalchemy.testing import fixtures
+from sqlalchemy import testing
+from sqlalchemy.testing import assert_raises_message
+
+class ParseConnectTest(fixtures.TestBase):
+
+    def test_pyodbc_connect_dsn_trusted(self):
+        dialect = pyodbc.dialect()
+        u = url.make_url('mssql://mydsn')
+        connection = dialect.create_connect_args(u)
+        eq_([['dsn=mydsn;Trusted_Connection=Yes'], {}], connection)
+
+    def test_pyodbc_connect_old_style_dsn_trusted(self):
+        dialect = pyodbc.dialect()
+        u = url.make_url('mssql:///?dsn=mydsn')
+        connection = dialect.create_connect_args(u)
+        eq_([['dsn=mydsn;Trusted_Connection=Yes'], {}], connection)
+
+    def test_pyodbc_connect_dsn_non_trusted(self):
+        dialect = pyodbc.dialect()
+        u = url.make_url('mssql://username:password@mydsn')
+        connection = dialect.create_connect_args(u)
+        eq_([['dsn=mydsn;UID=username;PWD=password'], {}], connection)
+
+    def test_pyodbc_connect_dsn_extra(self):
+        dialect = pyodbc.dialect()
+        u = \
+            url.make_url('mssql://username:password@mydsn/?LANGUAGE=us_'
+                         'english&foo=bar')
+        connection = dialect.create_connect_args(u)
+        dsn_string = connection[0][0]
+        assert ";LANGUAGE=us_english" in dsn_string
+        assert ";foo=bar" in dsn_string
+
+    def test_pyodbc_connect(self):
+        dialect = pyodbc.dialect()
+        u = url.make_url('mssql://username:password@hostspec/database')
+        connection = dialect.create_connect_args(u)
+        eq_([['DRIVER={SQL Server};Server=hostspec;Database=database;UI'
+            'D=username;PWD=password'], {}], connection)
+
+    def test_pyodbc_connect_comma_port(self):
+        dialect = pyodbc.dialect()
+        u = \
+            url.make_url('mssql://username:password@hostspec:12345/data'
+                         'base')
+        connection = dialect.create_connect_args(u)
+        eq_([['DRIVER={SQL Server};Server=hostspec,12345;Database=datab'
+            'ase;UID=username;PWD=password'], {}], connection)
+
+    def test_pyodbc_connect_config_port(self):
+        dialect = pyodbc.dialect()
+        u = \
+            url.make_url('mssql://username:password@hostspec/database?p'
+                         'ort=12345')
+        connection = dialect.create_connect_args(u)
+        eq_([['DRIVER={SQL Server};Server=hostspec;Database=database;UI'
+            'D=username;PWD=password;port=12345'], {}], connection)
+
+    def test_pyodbc_extra_connect(self):
+        dialect = pyodbc.dialect()
+        u = \
+            url.make_url('mssql://username:password@hostspec/database?L'
+                         'ANGUAGE=us_english&foo=bar')
+        connection = dialect.create_connect_args(u)
+        eq_(connection[1], {})
+        eq_(connection[0][0]
+            in ('DRIVER={SQL Server};Server=hostspec;Database=database;'
+            'UID=username;PWD=password;foo=bar;LANGUAGE=us_english',
+            'DRIVER={SQL Server};Server=hostspec;Database=database;UID='
+            'username;PWD=password;LANGUAGE=us_english;foo=bar'), True)
+
+    def test_pyodbc_odbc_connect(self):
+        dialect = pyodbc.dialect()
+        u = \
+            url.make_url('mssql:///?odbc_connect=DRIVER%3D%7BSQL+Server'
+                         '%7D%3BServer%3Dhostspec%3BDatabase%3Ddatabase'
+                         '%3BUID%3Dusername%3BPWD%3Dpassword')
+        connection = dialect.create_connect_args(u)
+        eq_([['DRIVER={SQL Server};Server=hostspec;Database=database;UI'
+            'D=username;PWD=password'], {}], connection)
+
+    def test_pyodbc_odbc_connect_with_dsn(self):
+        dialect = pyodbc.dialect()
+        u = \
+            url.make_url('mssql:///?odbc_connect=dsn%3Dmydsn%3BDatabase'
+                         '%3Ddatabase%3BUID%3Dusername%3BPWD%3Dpassword'
+                         )
+        connection = dialect.create_connect_args(u)
+        eq_([['dsn=mydsn;Database=database;UID=username;PWD=password'],
+            {}], connection)
+
+    def test_pyodbc_odbc_connect_ignores_other_values(self):
+        dialect = pyodbc.dialect()
+        u = \
+            url.make_url('mssql://userdiff:passdiff@localhost/dbdiff?od'
+                         'bc_connect=DRIVER%3D%7BSQL+Server%7D%3BServer'
+                         '%3Dhostspec%3BDatabase%3Ddatabase%3BUID%3Duse'
+                         'rname%3BPWD%3Dpassword')
+        connection = dialect.create_connect_args(u)
+        eq_([['DRIVER={SQL Server};Server=hostspec;Database=database;UI'
+            'D=username;PWD=password'], {}], connection)
+
+    def test_pymssql_port_setting(self):
+        dialect = pymssql.dialect()
+
+        u = \
+            url.make_url('mssql+pymssql://scott:tiger@somehost/test')
+        connection = dialect.create_connect_args(u)
+        eq_(
+            [[], {'host': 'somehost', 'password': 'tiger',
+                    'user': 'scott', 'database': 'test'}], connection
+        )
+
+        u = \
+            url.make_url('mssql+pymssql://scott:tiger@somehost:5000/test')
+        connection = dialect.create_connect_args(u)
+        eq_(
+            [[], {'host': 'somehost:5000', 'password': 'tiger',
+                    'user': 'scott', 'database': 'test'}], connection
+        )
+
+    def test_pymssql_disconnect(self):
+        dialect = pymssql.dialect()
+
+        for error in [
+                'Adaptive Server connection timed out',
+                'message 20003',
+                "Error 10054",
+                "Not connected to any MS SQL server",
+                "Connection is closed"
+                ]:
+            eq_(dialect.is_disconnect(error, None, None), True)
+
+        eq_(dialect.is_disconnect("not an error", None, None), False)
+
+    @testing.only_on(['mssql+pyodbc', 'mssql+pymssql'],
+                            "FreeTDS specific test")
+    def test_bad_freetds_warning(self):
+        engine = engines.testing_engine()
+
+        def _bad_version(connection):
+            return 95, 10, 255
+
+        engine.dialect._get_server_version_info = _bad_version
+        assert_raises_message(exc.SAWarning,
+                              'Unrecognized server version info',
+                              engine.connect)
diff --git a/test/dialect/mssql/test_query.py b/test/dialect/mssql/test_query.py
new file mode 100644 (file)
index 0000000..bff7371
--- /dev/null
@@ -0,0 +1,449 @@
+# -*- encoding: utf-8
+from sqlalchemy.testing import eq_, engines
+from sqlalchemy import *
+from sqlalchemy.sql import table, column
+from sqlalchemy.databases import mssql
+from sqlalchemy.testing import fixtures, AssertsCompiledSQL
+from sqlalchemy import testing
+from sqlalchemy.util import ue
+from sqlalchemy import util
+
+
+
+class SchemaAliasingTest(fixtures.TestBase, AssertsCompiledSQL):
+    """SQL server cannot reference schema-qualified tables in a SELECT statement, they
+    must be aliased.
+    """
+    __dialect__ = mssql.dialect()
+
+    def setup(self):
+        metadata = MetaData()
+        self.t1 = table('t1',
+            column('a', Integer),
+            column('b', String),
+            column('c', String),
+        )
+        self.t2 = Table(
+            't2', metadata,
+            Column("a", Integer),
+            Column("b", Integer),
+            Column("c", Integer),
+            schema = 'schema'
+        )
+
+    def test_result_map(self):
+        s = self.t2.select()
+        c = s.compile(dialect=self.__dialect__)
+        assert self.t2.c.a in set(c.result_map['a'][1])
+
+    def test_result_map_use_labels(self):
+        s = self.t2.select(use_labels=True)
+        c = s.compile(dialect=self.__dialect__)
+        assert self.t2.c.a in set(c.result_map['schema_t2_a'][1])
+
+    def test_straight_select(self):
+        self.assert_compile(self.t2.select(),
+            "SELECT t2_1.a, t2_1.b, t2_1.c FROM [schema].t2 AS t2_1"
+        )
+
+    def test_straight_select_use_labels(self):
+        self.assert_compile(
+            self.t2.select(use_labels=True),
+            "SELECT t2_1.a AS schema_t2_a, t2_1.b AS schema_t2_b, "
+            "t2_1.c AS schema_t2_c FROM [schema].t2 AS t2_1"
+        )
+
+    def test_join_to_schema(self):
+        t1, t2 = self.t1, self.t2
+        self.assert_compile(
+            t1.join(t2, t1.c.a==t2.c.a).select(),
+            "SELECT t1.a, t1.b, t1.c, t2_1.a, t2_1.b, t2_1.c FROM t1 "
+            "JOIN [schema].t2 AS t2_1 ON t2_1.a = t1.a"
+        )
+
+    def test_union_schema_to_non(self):
+        t1, t2 = self.t1, self.t2
+        s = select([t2.c.a, t2.c.b]).apply_labels().\
+                union(
+                    select([t1.c.a, t1.c.b]).apply_labels()
+                ).alias().select()
+        self.assert_compile(
+            s,
+            "SELECT anon_1.schema_t2_a, anon_1.schema_t2_b FROM "
+            "(SELECT t2_1.a AS schema_t2_a, t2_1.b AS schema_t2_b "
+            "FROM [schema].t2 AS t2_1 UNION SELECT t1.a AS t1_a, "
+            "t1.b AS t1_b FROM t1) AS anon_1"
+        )
+
+    def test_column_subquery_to_alias(self):
+        a1 = self.t2.alias('a1')
+        s = select([self.t2, select([a1.c.a]).as_scalar()])
+        self.assert_compile(
+            s,
+            "SELECT t2_1.a, t2_1.b, t2_1.c, "
+            "(SELECT a1.a FROM [schema].t2 AS a1) "
+            "AS anon_1 FROM [schema].t2 AS t2_1"
+
+        )
+
+class IdentityInsertTest(fixtures.TestBase, AssertsCompiledSQL):
+    __only_on__ = 'mssql'
+    __dialect__ = mssql.MSDialect()
+
+    @classmethod
+    def setup_class(cls):
+        global metadata, cattable
+        metadata = MetaData(testing.db)
+
+        cattable = Table('cattable', metadata,
+            Column('id', Integer),
+            Column('description', String(50)),
+            PrimaryKeyConstraint('id', name='PK_cattable'),
+        )
+
+    def setup(self):
+        metadata.create_all()
+
+    def teardown(self):
+        metadata.drop_all()
+
+    def test_compiled(self):
+        self.assert_compile(cattable.insert().values(id=9,
+                            description='Python'),
+                            'INSERT INTO cattable (id, description) '
+                            'VALUES (:id, :description)')
+
+    def test_execute(self):
+        cattable.insert().values(id=9, description='Python').execute()
+
+        cats = cattable.select().order_by(cattable.c.id).execute()
+        eq_([(9, 'Python')], list(cats))
+
+        result = cattable.insert().values(description='PHP').execute()
+        eq_([10], result.inserted_primary_key)
+        lastcat = cattable.select().order_by(desc(cattable.c.id)).execute()
+        eq_((10, 'PHP'), lastcat.first())
+
+    def test_executemany(self):
+        cattable.insert().execute([{'id': 89, 'description': 'Python'},
+                                  {'id': 8, 'description': 'Ruby'},
+                                  {'id': 3, 'description': 'Perl'},
+                                  {'id': 1, 'description': 'Java'}])
+        cats = cattable.select().order_by(cattable.c.id).execute()
+        eq_([(1, 'Java'), (3, 'Perl'), (8, 'Ruby'), (89, 'Python')],
+            list(cats))
+        cattable.insert().execute([{'description': 'PHP'},
+                                  {'description': 'Smalltalk'}])
+        lastcats = \
+            cattable.select().order_by(desc(cattable.c.id)).limit(2).execute()
+        eq_([(91, 'Smalltalk'), (90, 'PHP')], list(lastcats))
+
+class QueryUnicodeTest(fixtures.TestBase):
+
+    __only_on__ = 'mssql'
+
+    def test_convert_unicode(self):
+        meta = MetaData(testing.db)
+        t1 = Table('unitest_table', meta, Column('id', Integer,
+                   primary_key=True), Column('descr',
+                   mssql.MSText(convert_unicode=True)))
+        meta.create_all()
+        con = testing.db.connect()
+
+        # encode in UTF-8 (sting object) because this is the default
+        # dialect encoding
+
+        con.execute(ue("insert into unitest_table values ('bien u\
+                    umang\xc3\xa9')").encode('UTF-8'))
+        try:
+            r = t1.select().execute().first()
+            assert isinstance(r[1], util.text_type), \
+                '%s is %s instead of unicode, working on %s' % (r[1],
+                    type(r[1]), meta.bind)
+        finally:
+            meta.drop_all()
+
+from sqlalchemy.testing.assertsql import ExactSQL
+class QueryTest(testing.AssertsExecutionResults, fixtures.TestBase):
+    __only_on__ = 'mssql'
+
+    def test_fetchid_trigger(self):
+        """
+        Verify identity return value on inserting to a trigger table.
+
+        MSSQL's OUTPUT INSERTED clause does not work for the
+        case of a table having an identity (autoincrement)
+        primary key column, and which also has a trigger configured
+        to fire upon each insert and subsequently perform an
+        insert into a different table.
+
+        SQLALchemy's MSSQL dialect by default will attempt to
+        use an OUTPUT_INSERTED clause, which in this case will
+        raise the following error:
+
+        ProgrammingError: (ProgrammingError) ('42000', 334,
+        "[Microsoft][SQL Server Native Client 10.0][SQL Server]The
+        target table 't1' of the DML statement cannot have any enabled
+        triggers if the statement contains an OUTPUT clause without
+        INTO clause.", 7748) 'INSERT INTO t1 (descr) OUTPUT inserted.id
+        VALUES (?)' ('hello',)
+
+        This test verifies a workaround, which is to rely on the
+        older SCOPE_IDENTITY() call, which still works for this scenario.
+        To enable the workaround, the Table must be instantiated
+        with the init parameter 'implicit_returning = False'.
+        """
+
+        #todo: this same test needs to be tried in a multithreaded context
+        #      with multiple threads inserting to the same table.
+        #todo: check whether this error also occurs with clients other
+        #      than the SQL Server Native Client. Maybe an assert_raises
+        #      test should be written.
+        meta = MetaData(testing.db)
+        t1 = Table('t1', meta,
+                Column('id', Integer, Sequence('fred', 100, 1),
+                                primary_key=True),
+                Column('descr', String(200)),
+                # the following flag will prevent the
+                # MSSQLCompiler.returning_clause from getting called,
+                # though the ExecutionContext will still have a
+                # _select_lastrowid, so the SELECT SCOPE_IDENTITY() will
+                # hopefully be called instead.
+                implicit_returning = False
+                )
+        t2 = Table('t2', meta,
+                Column('id', Integer, Sequence('fred', 200, 1),
+                                primary_key=True),
+                Column('descr', String(200)))
+        meta.create_all()
+        con = testing.db.connect()
+        con.execute("""create trigger paj on t1 for insert as
+            insert into t2 (descr) select descr from inserted""")
+
+        try:
+            tr = con.begin()
+            r = con.execute(t2.insert(), descr='hello')
+            self.assert_(r.inserted_primary_key == [200])
+            r = con.execute(t1.insert(), descr='hello')
+            self.assert_(r.inserted_primary_key == [100])
+
+        finally:
+            tr.commit()
+            con.execute("""drop trigger paj""")
+            meta.drop_all()
+
+    @testing.provide_metadata
+    def test_disable_scope_identity(self):
+        engine = engines.testing_engine(options={"use_scope_identity":False})
+        metadata = self.metadata
+        metadata.bind = engine
+        t1 = Table('t1', metadata,
+                Column('id', Integer, primary_key=True),
+                implicit_returning=False
+        )
+        metadata.create_all()
+
+        self.assert_sql_execution(
+                testing.db,
+                lambda: engine.execute(t1.insert()),
+                ExactSQL("INSERT INTO t1 DEFAULT VALUES"),
+                # we dont have an event for
+                # "SELECT @@IDENTITY" part here.
+                # this will be in 0.8 with #2459
+        )
+        assert not engine.dialect.use_scope_identity
+
+    def test_insertid_schema(self):
+        meta = MetaData(testing.db)
+        con = testing.db.connect()
+        con.execute('create schema paj')
+        tbl = Table('test', meta,
+                    Column('id', Integer, primary_key=True), schema='paj')
+        tbl.create()
+        try:
+            tbl.insert().execute({'id':1})
+        finally:
+            tbl.drop()
+            con.execute('drop schema paj')
+
+    def test_returning_no_autoinc(self):
+        meta = MetaData(testing.db)
+        table = Table('t1', meta, Column('id', Integer,
+                      primary_key=True), Column('data', String(50)))
+        table.create()
+        try:
+            result = table.insert().values(id=1,
+                    data=func.lower('SomeString'
+                    )).returning(table.c.id, table.c.data).execute()
+            eq_(result.fetchall(), [(1, 'somestring')])
+        finally:
+
+            # this will hang if the "SET IDENTITY_INSERT t1 OFF" occurs
+            # before the result is fetched
+
+            table.drop()
+
+    def test_delete_schema(self):
+        meta = MetaData(testing.db)
+        con = testing.db.connect()
+        con.execute('create schema paj')
+        tbl = Table('test', meta, Column('id', Integer,
+                    primary_key=True), schema='paj')
+        tbl.create()
+        try:
+            tbl.insert().execute({'id': 1})
+            tbl.delete(tbl.c.id == 1).execute()
+        finally:
+            tbl.drop()
+            con.execute('drop schema paj')
+
+    def test_insertid_reserved(self):
+        meta = MetaData(testing.db)
+        table = Table(
+            'select', meta,
+            Column('col', Integer, primary_key=True)
+        )
+        table.create()
+
+        meta2 = MetaData(testing.db)
+        try:
+            table.insert().execute(col=7)
+        finally:
+            table.drop()
+
+
+class Foo(object):
+    def __init__(self, **kw):
+        for k in kw:
+            setattr(self, k, kw[k])
+
+
+def full_text_search_missing():
+    """Test if full text search is not implemented and return False if
+    it is and True otherwise."""
+
+    try:
+        connection = testing.db.connect()
+        try:
+            connection.execute('CREATE FULLTEXT CATALOG Catalog AS '
+                               'DEFAULT')
+            return False
+        except:
+            return True
+    finally:
+        connection.close()
+
+class MatchTest(fixtures.TestBase, AssertsCompiledSQL):
+
+    __only_on__ = 'mssql'
+    __skip_if__ = full_text_search_missing,
+
+    @classmethod
+    def setup_class(cls):
+        global metadata, cattable, matchtable
+        metadata = MetaData(testing.db)
+        cattable = Table('cattable', metadata, Column('id', Integer),
+                         Column('description', String(50)),
+                         PrimaryKeyConstraint('id', name='PK_cattable'))
+        matchtable = Table(
+            'matchtable',
+            metadata,
+            Column('id', Integer),
+            Column('title', String(200)),
+            Column('category_id', Integer, ForeignKey('cattable.id')),
+            PrimaryKeyConstraint('id', name='PK_matchtable'),
+            )
+        DDL("""CREATE FULLTEXT INDEX
+                       ON cattable (description)
+                       KEY INDEX PK_cattable""").execute_at('after-create'
+                , matchtable)
+        DDL("""CREATE FULLTEXT INDEX
+                       ON matchtable (title)
+                       KEY INDEX PK_matchtable""").execute_at('after-create'
+                , matchtable)
+        metadata.create_all()
+        cattable.insert().execute([{'id': 1, 'description': 'Python'},
+                                  {'id': 2, 'description': 'Ruby'}])
+        matchtable.insert().execute([{'id': 1, 'title'
+                                    : 'Agile Web Development with Rails'
+                                    , 'category_id': 2}, {'id': 2,
+                                    'title': 'Dive Into Python',
+                                    'category_id': 1}, {'id': 3, 'title'
+                                    : "Programming Matz's Ruby",
+                                    'category_id': 2}, {'id': 4, 'title'
+                                    : 'The Definitive Guide to Django',
+                                    'category_id': 1}, {'id': 5, 'title'
+                                    : 'Python in a Nutshell',
+                                    'category_id': 1}])
+        DDL("WAITFOR DELAY '00:00:05'"
+            ).execute(bind=engines.testing_engine())
+
+    @classmethod
+    def teardown_class(cls):
+        metadata.drop_all()
+        connection = testing.db.connect()
+        connection.execute("DROP FULLTEXT CATALOG Catalog")
+        connection.close()
+
+    def test_expression(self):
+        self.assert_compile(matchtable.c.title.match('somstr'),
+                            'CONTAINS (matchtable.title, ?)')
+
+    def test_simple_match(self):
+        results = \
+            matchtable.select().where(matchtable.c.title.match('python'
+                )).order_by(matchtable.c.id).execute().fetchall()
+        eq_([2, 5], [r.id for r in results])
+
+    def test_simple_match_with_apostrophe(self):
+        results = \
+            matchtable.select().where(matchtable.c.title.match("Matz's"
+                )).execute().fetchall()
+        eq_([3], [r.id for r in results])
+
+    def test_simple_prefix_match(self):
+        results = \
+            matchtable.select().where(matchtable.c.title.match('"nut*"'
+                )).execute().fetchall()
+        eq_([5], [r.id for r in results])
+
+    def test_simple_inflectional_match(self):
+        results = \
+            matchtable.select().where(
+                matchtable.c.title.match('FORMSOF(INFLECTIONAL, "dives")'
+                )).execute().fetchall()
+        eq_([2], [r.id for r in results])
+
+    def test_or_match(self):
+        results1 = \
+            matchtable.select().where(or_(matchtable.c.title.match('nutshell'
+                ), matchtable.c.title.match('ruby'
+                ))).order_by(matchtable.c.id).execute().fetchall()
+        eq_([3, 5], [r.id for r in results1])
+        results2 = \
+            matchtable.select().where(
+                matchtable.c.title.match('nutshell OR ruby'
+                )).order_by(matchtable.c.id).execute().fetchall()
+        eq_([3, 5], [r.id for r in results2])
+
+    def test_and_match(self):
+        results1 = \
+            matchtable.select().where(and_(matchtable.c.title.match('python'
+                ), matchtable.c.title.match('nutshell'
+                ))).execute().fetchall()
+        eq_([5], [r.id for r in results1])
+        results2 = \
+            matchtable.select().where(
+                matchtable.c.title.match('python AND nutshell'
+                )).execute().fetchall()
+        eq_([5], [r.id for r in results2])
+
+    def test_match_across_joins(self):
+        results = matchtable.select().where(and_(cattable.c.id
+                == matchtable.c.category_id,
+                or_(cattable.c.description.match('Ruby'),
+                matchtable.c.title.match('nutshell'
+                )))).order_by(matchtable.c.id).execute().fetchall()
+        eq_([1, 3, 5], [r.id for r in results])
+
+
diff --git a/test/dialect/mssql/test_reflection.py b/test/dialect/mssql/test_reflection.py
new file mode 100644 (file)
index 0000000..e93162a
--- /dev/null
@@ -0,0 +1,236 @@
+# -*- encoding: utf-8
+from sqlalchemy.testing import eq_
+from sqlalchemy import *
+from sqlalchemy import types, schema, event
+from sqlalchemy.databases import mssql
+from sqlalchemy.testing import fixtures, AssertsCompiledSQL, \
+        ComparesTables
+from sqlalchemy import testing
+from sqlalchemy.engine.reflection import Inspector
+from sqlalchemy import util
+
+
+class ReflectionTest(fixtures.TestBase, ComparesTables):
+    __only_on__ = 'mssql'
+
+    @testing.provide_metadata
+    def test_basic_reflection(self):
+        meta = self.metadata
+
+        users = Table(
+            'engine_users',
+            meta,
+            Column('user_id', types.INT, primary_key=True),
+            Column('user_name', types.VARCHAR(20), nullable=False),
+            Column('test1', types.CHAR(5), nullable=False),
+            Column('test2', types.Float(5), nullable=False),
+            Column('test3', types.Text),
+            Column('test4', types.Numeric, nullable=False),
+            Column('test5', types.DateTime),
+            Column('parent_user_id', types.Integer,
+                   ForeignKey('engine_users.user_id')),
+            Column('test6', types.DateTime, nullable=False),
+            Column('test7', types.Text),
+            Column('test8', types.LargeBinary),
+            Column('test_passivedefault2', types.Integer,
+                   server_default='5'),
+            Column('test9', types.BINARY(100)),
+            Column('test_numeric', types.Numeric()),
+            )
+
+        addresses = Table(
+            'engine_email_addresses',
+            meta,
+            Column('address_id', types.Integer, primary_key=True),
+            Column('remote_user_id', types.Integer,
+                   ForeignKey(users.c.user_id)),
+            Column('email_address', types.String(20)),
+            )
+        meta.create_all()
+
+        meta2 = MetaData()
+        reflected_users = Table('engine_users', meta2,
+                                autoload=True,
+                                autoload_with=testing.db)
+        reflected_addresses = Table('engine_email_addresses',
+                meta2, autoload=True, autoload_with=testing.db)
+        self.assert_tables_equal(users, reflected_users)
+        self.assert_tables_equal(addresses, reflected_addresses)
+
+    @testing.provide_metadata
+    def test_identity(self):
+        metadata = self.metadata
+        table = Table(
+            'identity_test', metadata,
+            Column('col1', Integer, Sequence('fred', 2, 3), primary_key=True)
+        )
+        table.create()
+
+        meta2 = MetaData(testing.db)
+        table2 = Table('identity_test', meta2, autoload=True)
+        sequence = isinstance(table2.c['col1'].default, schema.Sequence) \
+                                and table2.c['col1'].default
+        assert sequence.start == 2
+        assert sequence.increment == 3
+
+    @testing.emits_warning("Did not recognize")
+    @testing.provide_metadata
+    def test_skip_types(self):
+        metadata = self.metadata
+        testing.db.execute("""
+            create table foo (id integer primary key, data xml)
+        """)
+        t1 = Table('foo', metadata, autoload=True)
+        assert isinstance(t1.c.id.type, Integer)
+        assert isinstance(t1.c.data.type, types.NullType)
+
+
+    @testing.provide_metadata
+    def test_db_qualified_items(self):
+        metadata = self.metadata
+        Table('foo', metadata, Column('id', Integer, primary_key=True))
+        Table('bar', metadata,
+                Column('id', Integer, primary_key=True),
+                Column('foo_id', Integer, ForeignKey('foo.id', name="fkfoo"))
+            )
+        metadata.create_all()
+
+        dbname = testing.db.scalar("select db_name()")
+        owner = testing.db.scalar("SELECT user_name()")
+
+        inspector = inspect(testing.db)
+        bar_via_db = inspector.get_foreign_keys(
+                            "bar", schema="%s.%s" % (dbname, owner))
+        eq_(
+            bar_via_db,
+            [{
+                'referred_table': 'foo',
+                'referred_columns': ['id'],
+                'referred_schema': 'test.dbo',
+                'name': 'fkfoo',
+                'constrained_columns': ['foo_id']}]
+        )
+
+        assert testing.db.has_table("bar", schema="test.dbo")
+
+        m2 = MetaData()
+        Table('bar', m2, schema="test.dbo", autoload=True,
+                                autoload_with=testing.db)
+        eq_(m2.tables["test.dbo.foo"].schema, "test.dbo")
+
+
+    @testing.provide_metadata
+    def test_indexes_cols(self):
+        metadata = self.metadata
+
+        t1 = Table('t', metadata, Column('x', Integer), Column('y', Integer))
+        Index('foo', t1.c.x, t1.c.y)
+        metadata.create_all()
+
+        m2 = MetaData()
+        t2 = Table('t', m2, autoload=True, autoload_with=testing.db)
+
+        eq_(
+            set(list(t2.indexes)[0].columns),
+            set([t2.c['x'], t2.c.y])
+        )
+
+    @testing.provide_metadata
+    def test_indexes_cols_with_commas(self):
+        metadata = self.metadata
+
+        t1 = Table('t', metadata,
+                        Column('x, col', Integer, key='x'),
+                        Column('y', Integer)
+                    )
+        Index('foo', t1.c.x, t1.c.y)
+        metadata.create_all()
+
+        m2 = MetaData()
+        t2 = Table('t', m2, autoload=True, autoload_with=testing.db)
+
+        eq_(
+            set(list(t2.indexes)[0].columns),
+            set([t2.c['x, col'], t2.c.y])
+        )
+
+    @testing.provide_metadata
+    def test_indexes_cols_with_spaces(self):
+        metadata = self.metadata
+
+        t1 = Table('t', metadata, Column('x col', Integer, key='x'),
+                                    Column('y', Integer))
+        Index('foo', t1.c.x, t1.c.y)
+        metadata.create_all()
+
+        m2 = MetaData()
+        t2 = Table('t', m2, autoload=True, autoload_with=testing.db)
+
+        eq_(
+            set(list(t2.indexes)[0].columns),
+            set([t2.c['x col'], t2.c.y])
+        )
+
+from sqlalchemy.dialects.mssql.information_schema import CoerceUnicode, tables
+from sqlalchemy.dialects.mssql import base
+
+class InfoCoerceUnicodeTest(fixtures.TestBase, AssertsCompiledSQL):
+    def test_info_unicode_coercion(self):
+
+        dialect = mssql.dialect()
+        value = CoerceUnicode().bind_processor(dialect)('a string')
+        assert isinstance(value, util.text_type)
+
+    def test_info_unicode_cast_no_2000(self):
+        dialect = mssql.dialect()
+        dialect.server_version_info = base.MS_2000_VERSION
+        stmt = tables.c.table_name == 'somename'
+        self.assert_compile(
+            stmt,
+            "[TABLES_1].[TABLE_NAME] = :TABLE_NAME_1",
+            dialect=dialect
+        )
+
+    def test_info_unicode_cast(self):
+        dialect = mssql.dialect()
+        dialect.server_version_info = base.MS_2005_VERSION
+        stmt = tables.c.table_name == 'somename'
+        self.assert_compile(
+            stmt,
+            "[TABLES_1].[TABLE_NAME] = CAST(:TABLE_NAME_1 AS NVARCHAR(max))",
+            dialect=dialect
+        )
+
+class ReflectHugeViewTest(fixtures.TestBase):
+    __only_on__ = 'mssql'
+
+    def setup(self):
+        self.col_num = 150
+
+        self.metadata = MetaData(testing.db)
+        t = Table('base_table', self.metadata,
+                *[
+                    Column("long_named_column_number_%d" % i, Integer)
+                    for i in range(self.col_num)
+                ]
+        )
+        self.view_str = view_str = \
+            "CREATE VIEW huge_named_view AS SELECT %s FROM base_table" % (
+            ",".join("long_named_column_number_%d" % i
+                        for i in range(self.col_num))
+            )
+        assert len(view_str) > 4000
+
+        event.listen(t, 'after_create', DDL(view_str) )
+        event.listen(t, 'before_drop', DDL("DROP VIEW huge_named_view") )
+
+        self.metadata.create_all()
+
+    def teardown(self):
+        self.metadata.drop_all()
+
+    def test_inspect_view_definition(self):
+        inspector = Inspector.from_engine(testing.db)
+        view_def = inspector.get_view_definition("huge_named_view")
+        eq_(view_def, self.view_str)
+
diff --git a/test/dialect/mssql/test_types.py b/test/dialect/mssql/test_types.py
new file mode 100644 (file)
index 0000000..9dc1983
--- /dev/null
@@ -0,0 +1,727 @@
+# -*- encoding: utf-8
+from sqlalchemy.testing import eq_, engines, pickleable
+import datetime
+import os
+from sqlalchemy import *
+from sqlalchemy import types, schema
+from sqlalchemy.databases import mssql
+from sqlalchemy.dialects.mssql.base import TIME
+from sqlalchemy.testing import fixtures, \
+        AssertsExecutionResults, ComparesTables
+from sqlalchemy import testing
+from sqlalchemy.testing import emits_warning_on
+import decimal
+from sqlalchemy.util import b
+
+
+class TimeTypeTest(fixtures.TestBase):
+
+    def test_result_processor_no_microseconds(self):
+        expected = datetime.time(12, 34, 56)
+        self._assert_result_processor(expected, '12:34:56')
+
+    def test_result_processor_too_many_microseconds(self):
+        # microsecond must be in 0..999999, should truncate (6 vs 7 digits)
+        expected = datetime.time(12, 34, 56, 123456)
+        self._assert_result_processor(expected, '12:34:56.1234567')
+
+    def _assert_result_processor(self, expected, value):
+        mssql_time_type = TIME()
+        result_processor = mssql_time_type.result_processor(None, None)
+        eq_(expected, result_processor(value))
+
+
+class TypeDDLTest(fixtures.TestBase):
+    def test_boolean(self):
+        "Exercise type specification for boolean type."
+
+        columns = [
+            # column type, args, kwargs, expected ddl
+            (Boolean, [], {},
+             'BIT'),
+           ]
+
+        metadata = MetaData()
+        table_args = ['test_mssql_boolean', metadata]
+        for index, spec in enumerate(columns):
+            type_, args, kw, res = spec
+            table_args.append(
+                Column('c%s' % index, type_(*args, **kw), nullable=None))
+
+        boolean_table = Table(*table_args)
+        dialect = mssql.dialect()
+        gen = dialect.ddl_compiler(dialect, schema.CreateTable(boolean_table))
+
+        for col in boolean_table.c:
+            index = int(col.name[1:])
+            testing.eq_(gen.get_column_specification(col),
+                           "%s %s" % (col.name, columns[index][3]))
+            self.assert_(repr(col))
+
+
+    def test_numeric(self):
+        "Exercise type specification and options for numeric types."
+
+        columns = [
+            # column type, args, kwargs, expected ddl
+            (types.NUMERIC, [], {},
+             'NUMERIC'),
+            (types.NUMERIC, [None], {},
+             'NUMERIC'),
+            (types.NUMERIC, [12, 4], {},
+             'NUMERIC(12, 4)'),
+
+            (types.Float, [], {},
+             'FLOAT'),
+            (types.Float, [None], {},
+             'FLOAT'),
+            (types.Float, [12], {},
+             'FLOAT(12)'),
+            (mssql.MSReal, [], {},
+             'REAL'),
+
+            (types.Integer, [], {},
+             'INTEGER'),
+            (types.BigInteger, [], {},
+             'BIGINT'),
+            (mssql.MSTinyInteger, [], {},
+             'TINYINT'),
+            (types.SmallInteger, [], {},
+             'SMALLINT'),
+           ]
+
+        metadata = MetaData()
+        table_args = ['test_mssql_numeric', metadata]
+        for index, spec in enumerate(columns):
+            type_, args, kw, res = spec
+            table_args.append(
+                Column('c%s' % index, type_(*args, **kw), nullable=None))
+
+        numeric_table = Table(*table_args)
+        dialect = mssql.dialect()
+        gen = dialect.ddl_compiler(dialect, schema.CreateTable(numeric_table))
+
+        for col in numeric_table.c:
+            index = int(col.name[1:])
+            testing.eq_(gen.get_column_specification(col),
+                           "%s %s" % (col.name, columns[index][3]))
+            self.assert_(repr(col))
+
+
+    def test_char(self):
+        """Exercise COLLATE-ish options on string types."""
+
+        columns = [
+            (mssql.MSChar, [], {},
+             'CHAR'),
+            (mssql.MSChar, [1], {},
+             'CHAR(1)'),
+            (mssql.MSChar, [1], {'collation': 'Latin1_General_CI_AS'},
+             'CHAR(1) COLLATE Latin1_General_CI_AS'),
+
+            (mssql.MSNChar, [], {},
+             'NCHAR'),
+            (mssql.MSNChar, [1], {},
+             'NCHAR(1)'),
+            (mssql.MSNChar, [1], {'collation': 'Latin1_General_CI_AS'},
+             'NCHAR(1) COLLATE Latin1_General_CI_AS'),
+
+            (mssql.MSString, [], {},
+             'VARCHAR(max)'),
+            (mssql.MSString, [1], {},
+             'VARCHAR(1)'),
+            (mssql.MSString, [1], {'collation': 'Latin1_General_CI_AS'},
+             'VARCHAR(1) COLLATE Latin1_General_CI_AS'),
+
+            (mssql.MSNVarchar, [], {},
+             'NVARCHAR(max)'),
+            (mssql.MSNVarchar, [1], {},
+             'NVARCHAR(1)'),
+            (mssql.MSNVarchar, [1], {'collation': 'Latin1_General_CI_AS'},
+             'NVARCHAR(1) COLLATE Latin1_General_CI_AS'),
+
+            (mssql.MSText, [], {},
+             'TEXT'),
+            (mssql.MSText, [], {'collation': 'Latin1_General_CI_AS'},
+             'TEXT COLLATE Latin1_General_CI_AS'),
+
+            (mssql.MSNText, [], {},
+             'NTEXT'),
+            (mssql.MSNText, [], {'collation': 'Latin1_General_CI_AS'},
+             'NTEXT COLLATE Latin1_General_CI_AS'),
+           ]
+
+        metadata = MetaData()
+        table_args = ['test_mssql_charset', metadata]
+        for index, spec in enumerate(columns):
+            type_, args, kw, res = spec
+            table_args.append(
+                Column('c%s' % index, type_(*args, **kw), nullable=None))
+
+        charset_table = Table(*table_args)
+        dialect = mssql.dialect()
+        gen = dialect.ddl_compiler(dialect, schema.CreateTable(charset_table))
+
+        for col in charset_table.c:
+            index = int(col.name[1:])
+            testing.eq_(gen.get_column_specification(col),
+                           "%s %s" % (col.name, columns[index][3]))
+            self.assert_(repr(col))
+
+
+    def test_timestamp(self):
+        """Exercise TIMESTAMP column."""
+
+        dialect = mssql.dialect()
+
+        metadata = MetaData()
+        spec, expected = (TIMESTAMP, 'TIMESTAMP')
+        t = Table('mssql_ts', metadata,
+                   Column('id', Integer, primary_key=True),
+                   Column('t', spec, nullable=None))
+        gen = dialect.ddl_compiler(dialect, schema.CreateTable(t))
+        testing.eq_(gen.get_column_specification(t.c.t), "t %s" % expected)
+        self.assert_(repr(t.c.t))
+
+    def test_money(self):
+        """Exercise type specification for money types."""
+
+        columns = [(mssql.MSMoney, [], {}, 'MONEY'),
+                   (mssql.MSSmallMoney, [], {}, 'SMALLMONEY')]
+        metadata = MetaData()
+        table_args = ['test_mssql_money', metadata]
+        for index, spec in enumerate(columns):
+            type_, args, kw, res = spec
+            table_args.append(Column('c%s' % index, type_(*args, **kw),
+                              nullable=None))
+        money_table = Table(*table_args)
+        dialect = mssql.dialect()
+        gen = dialect.ddl_compiler(dialect,
+                                   schema.CreateTable(money_table))
+        for col in money_table.c:
+            index = int(col.name[1:])
+            testing.eq_(gen.get_column_specification(col), '%s %s'
+                        % (col.name, columns[index][3]))
+            self.assert_(repr(col))
+
+    def test_binary(self):
+        "Exercise type specification for binary types."
+
+        columns = [
+            # column type, args, kwargs, expected ddl
+            (mssql.MSBinary, [], {},
+             'BINARY'),
+            (mssql.MSBinary, [10], {},
+             'BINARY(10)'),
+
+            (types.BINARY, [], {},
+             'BINARY'),
+            (types.BINARY, [10], {},
+             'BINARY(10)'),
+
+            (mssql.MSVarBinary, [], {},
+             'VARBINARY(max)'),
+            (mssql.MSVarBinary, [10], {},
+             'VARBINARY(10)'),
+
+            (types.VARBINARY, [10], {},
+             'VARBINARY(10)'),
+            (types.VARBINARY, [], {},
+             'VARBINARY(max)'),
+
+            (mssql.MSImage, [], {},
+             'IMAGE'),
+
+            (mssql.IMAGE, [], {},
+             'IMAGE'),
+
+            (types.LargeBinary, [], {},
+             'IMAGE'),
+        ]
+
+        metadata = MetaData()
+        table_args = ['test_mssql_binary', metadata]
+        for index, spec in enumerate(columns):
+            type_, args, kw, res = spec
+            table_args.append(Column('c%s' % index, type_(*args, **kw),
+                              nullable=None))
+        binary_table = Table(*table_args)
+        dialect = mssql.dialect()
+        gen = dialect.ddl_compiler(dialect,
+                                   schema.CreateTable(binary_table))
+        for col in binary_table.c:
+            index = int(col.name[1:])
+            testing.eq_(gen.get_column_specification(col), '%s %s'
+                        % (col.name, columns[index][3]))
+            self.assert_(repr(col))
+
+class TypeRoundTripTest(fixtures.TestBase, AssertsExecutionResults, ComparesTables):
+    __only_on__ = 'mssql'
+
+    @classmethod
+    def setup_class(cls):
+        global metadata
+        metadata = MetaData(testing.db)
+
+    def teardown(self):
+        metadata.drop_all()
+
+    @testing.fails_on_everything_except('mssql+pyodbc',
+            'this is some pyodbc-specific feature')
+    def test_decimal_notation(self):
+        numeric_table = Table('numeric_table', metadata, Column('id',
+                              Integer, Sequence('numeric_id_seq',
+                              optional=True), primary_key=True),
+                              Column('numericcol',
+                              Numeric(precision=38, scale=20,
+                              asdecimal=True)))
+        metadata.create_all()
+        test_items = [decimal.Decimal(d) for d in (
+            '1500000.00000000000000000000',
+            '-1500000.00000000000000000000',
+            '1500000',
+            '0.0000000000000000002',
+            '0.2',
+            '-0.0000000000000000002',
+            '-2E-2',
+            '156666.458923543',
+            '-156666.458923543',
+            '1',
+            '-1',
+            '-1234',
+            '1234',
+            '2E-12',
+            '4E8',
+            '3E-6',
+            '3E-7',
+            '4.1',
+            '1E-1',
+            '1E-2',
+            '1E-3',
+            '1E-4',
+            '1E-5',
+            '1E-6',
+            '1E-7',
+            '1E-1',
+            '1E-8',
+            '0.2732E2',
+            '-0.2432E2',
+            '4.35656E2',
+            '-02452E-2',
+            '45125E-2',
+            '1234.58965E-2',
+            '1.521E+15',
+            '-1E-25',
+            '1E-25',
+            '1254E-25',
+            '-1203E-25',
+            '0',
+            '-0.00',
+            '-0',
+            '4585E12',
+            '000000000000000000012',
+            '000000000000.32E12',
+            '00000000000000.1E+12',
+            '000000000000.2E-32',
+            )]
+
+        for value in test_items:
+            numeric_table.insert().execute(numericcol=value)
+
+        for value in select([numeric_table.c.numericcol]).execute():
+            assert value[0] in test_items, "%r not in test_items" % value[0]
+
+    def test_float(self):
+        float_table = Table('float_table', metadata, Column('id',
+                            Integer, Sequence('numeric_id_seq',
+                            optional=True), primary_key=True),
+                            Column('floatcol', Float()))
+        metadata.create_all()
+        try:
+            test_items = [float(d) for d in (
+                '1500000.00000000000000000000',
+                '-1500000.00000000000000000000',
+                '1500000',
+                '0.0000000000000000002',
+                '0.2',
+                '-0.0000000000000000002',
+                '156666.458923543',
+                '-156666.458923543',
+                '1',
+                '-1',
+                '1234',
+                '2E-12',
+                '4E8',
+                '3E-6',
+                '3E-7',
+                '4.1',
+                '1E-1',
+                '1E-2',
+                '1E-3',
+                '1E-4',
+                '1E-5',
+                '1E-6',
+                '1E-7',
+                '1E-8',
+                )]
+            for value in test_items:
+                float_table.insert().execute(floatcol=value)
+        except Exception as e:
+            raise e
+
+
+    # todo this should suppress warnings, but it does not
+    @emits_warning_on('mssql+mxodbc', r'.*does not have any indexes.*')
+    def test_dates(self):
+        "Exercise type specification for date types."
+
+        columns = [
+            # column type, args, kwargs, expected ddl
+            (mssql.MSDateTime, [], {},
+             'DATETIME', []),
+
+            (types.DATE, [], {},
+             'DATE', ['>=', (10,)]),
+            (types.Date, [], {},
+             'DATE', ['>=', (10,)]),
+            (types.Date, [], {},
+             'DATETIME', ['<', (10,)], mssql.MSDateTime),
+            (mssql.MSDate, [], {},
+             'DATE', ['>=', (10,)]),
+            (mssql.MSDate, [], {},
+             'DATETIME', ['<', (10,)], mssql.MSDateTime),
+
+            (types.TIME, [], {},
+             'TIME', ['>=', (10,)]),
+            (types.Time, [], {},
+             'TIME', ['>=', (10,)]),
+            (mssql.MSTime, [], {},
+             'TIME', ['>=', (10,)]),
+            (mssql.MSTime, [1], {},
+             'TIME(1)', ['>=', (10,)]),
+            (types.Time, [], {},
+             'DATETIME', ['<', (10,)], mssql.MSDateTime),
+            (mssql.MSTime, [], {},
+             'TIME', ['>=', (10,)]),
+
+            (mssql.MSSmallDateTime, [], {},
+             'SMALLDATETIME', []),
+
+            (mssql.MSDateTimeOffset, [], {},
+             'DATETIMEOFFSET', ['>=', (10,)]),
+            (mssql.MSDateTimeOffset, [1], {},
+             'DATETIMEOFFSET(1)', ['>=', (10,)]),
+
+            (mssql.MSDateTime2, [], {},
+             'DATETIME2', ['>=', (10,)]),
+            (mssql.MSDateTime2, [1], {},
+             'DATETIME2(1)', ['>=', (10,)]),
+
+            ]
+
+        table_args = ['test_mssql_dates', metadata]
+        for index, spec in enumerate(columns):
+            type_, args, kw, res, requires = spec[0:5]
+            if requires and testing._is_excluded('mssql', *requires) \
+                or not requires:
+                c = Column('c%s' % index, type_(*args,
+                                  **kw), nullable=None)
+                testing.db.dialect.type_descriptor(c.type)
+                table_args.append(c)
+        dates_table = Table(*table_args)
+        gen = testing.db.dialect.ddl_compiler(testing.db.dialect,
+                schema.CreateTable(dates_table))
+        for col in dates_table.c:
+            index = int(col.name[1:])
+            testing.eq_(gen.get_column_specification(col), '%s %s'
+                        % (col.name, columns[index][3]))
+            self.assert_(repr(col))
+        dates_table.create(checkfirst=True)
+        reflected_dates = Table('test_mssql_dates',
+                                MetaData(testing.db), autoload=True)
+        for col in reflected_dates.c:
+            self.assert_types_base(col, dates_table.c[col.key])
+
+    def test_date_roundtrip(self):
+        t = Table('test_dates', metadata,
+                    Column('id', Integer,
+                           Sequence('datetest_id_seq', optional=True),
+                           primary_key=True),
+                    Column('adate', Date),
+                    Column('atime', Time),
+                    Column('adatetime', DateTime))
+        metadata.create_all()
+        d1 = datetime.date(2007, 10, 30)
+        t1 = datetime.time(11, 2, 32)
+        d2 = datetime.datetime(2007, 10, 30, 11, 2, 32)
+        t.insert().execute(adate=d1, adatetime=d2, atime=t1)
+        t.insert().execute(adate=d2, adatetime=d2, atime=d2)
+
+        x = t.select().execute().fetchall()[0]
+        self.assert_(x.adate.__class__ == datetime.date)
+        self.assert_(x.atime.__class__ == datetime.time)
+        self.assert_(x.adatetime.__class__ == datetime.datetime)
+
+        t.delete().execute()
+
+        t.insert().execute(adate=d1, adatetime=d2, atime=t1)
+
+        eq_(select([t.c.adate, t.c.atime, t.c.adatetime], t.c.adate
+            == d1).execute().fetchall(), [(d1, t1, d2)])
+
+    @emits_warning_on('mssql+mxodbc', r'.*does not have any indexes.*')
+    @testing.provide_metadata
+    def test_binary_reflection(self):
+        "Exercise type specification for binary types."
+
+        columns = [
+            # column type, args, kwargs, expected ddl
+            (mssql.MSBinary, [], {},
+             'BINARY'),
+            (mssql.MSBinary, [10], {},
+             'BINARY(10)'),
+
+            (types.BINARY, [], {},
+             'BINARY'),
+            (types.BINARY, [10], {},
+             'BINARY(10)'),
+
+            (mssql.MSVarBinary, [], {},
+             'VARBINARY(max)'),
+            (mssql.MSVarBinary, [10], {},
+             'VARBINARY(10)'),
+
+            (types.VARBINARY, [10], {},
+             'VARBINARY(10)'),
+            (types.VARBINARY, [], {},
+             'VARBINARY(max)'),
+
+            (mssql.MSImage, [], {},
+             'IMAGE'),
+
+            (mssql.IMAGE, [], {},
+             'IMAGE'),
+
+            (types.LargeBinary, [], {},
+             'IMAGE'),
+        ]
+
+        metadata = self.metadata
+        table_args = ['test_mssql_binary', metadata]
+        for index, spec in enumerate(columns):
+            type_, args, kw, res = spec
+            table_args.append(Column('c%s' % index, type_(*args, **kw),
+                              nullable=None))
+        binary_table = Table(*table_args)
+        metadata.create_all()
+        reflected_binary = Table('test_mssql_binary',
+                                 MetaData(testing.db), autoload=True)
+        for col in reflected_binary.c:
+            c1 = testing.db.dialect.type_descriptor(col.type).__class__
+            c2 = \
+                testing.db.dialect.type_descriptor(
+                    binary_table.c[col.name].type).__class__
+            assert issubclass(c1, c2), '%r is not a subclass of %r' \
+                % (c1, c2)
+            if binary_table.c[col.name].type.length:
+                testing.eq_(col.type.length,
+                            binary_table.c[col.name].type.length)
+
+
+    def test_autoincrement(self):
+        Table('ai_1', metadata,
+               Column('int_y', Integer, primary_key=True),
+               Column('int_n', Integer, DefaultClause('0'),
+                      primary_key=True, autoincrement=False))
+        Table('ai_2', metadata,
+               Column('int_y', Integer, primary_key=True),
+               Column('int_n', Integer, DefaultClause('0'),
+                      primary_key=True, autoincrement=False))
+        Table('ai_3', metadata,
+               Column('int_n', Integer, DefaultClause('0'),
+                      primary_key=True, autoincrement=False),
+               Column('int_y', Integer, primary_key=True))
+        Table('ai_4', metadata,
+               Column('int_n', Integer, DefaultClause('0'),
+                      primary_key=True, autoincrement=False),
+               Column('int_n2', Integer, DefaultClause('0'),
+                      primary_key=True, autoincrement=False))
+        Table('ai_5', metadata,
+               Column('int_y', Integer, primary_key=True),
+               Column('int_n', Integer, DefaultClause('0'),
+                      primary_key=True, autoincrement=False))
+        Table('ai_6', metadata,
+               Column('o1', String(1), DefaultClause('x'),
+                      primary_key=True),
+               Column('int_y', Integer, primary_key=True))
+        Table('ai_7', metadata,
+               Column('o1', String(1), DefaultClause('x'),
+                      primary_key=True),
+               Column('o2', String(1), DefaultClause('x'),
+                      primary_key=True),
+               Column('int_y', Integer, primary_key=True))
+        Table('ai_8', metadata,
+               Column('o1', String(1), DefaultClause('x'),
+                      primary_key=True),
+               Column('o2', String(1), DefaultClause('x'),
+                      primary_key=True))
+        metadata.create_all()
+
+        table_names = ['ai_1', 'ai_2', 'ai_3', 'ai_4',
+                        'ai_5', 'ai_6', 'ai_7', 'ai_8']
+        mr = MetaData(testing.db)
+
+        for name in table_names:
+            tbl = Table(name, mr, autoload=True)
+            tbl = metadata.tables[name]
+            for c in tbl.c:
+                if c.name.startswith('int_y'):
+                    assert c.autoincrement, name
+                    assert tbl._autoincrement_column is c, name
+                elif c.name.startswith('int_n'):
+                    assert not c.autoincrement, name
+                    assert tbl._autoincrement_column is not c, name
+
+            # mxodbc can't handle scope_identity() with DEFAULT VALUES
+
+            if testing.db.driver == 'mxodbc':
+                eng = \
+                    [engines.testing_engine(options={'implicit_returning'
+                     : True})]
+            else:
+                eng = \
+                    [engines.testing_engine(options={'implicit_returning'
+                     : False}),
+                     engines.testing_engine(options={'implicit_returning'
+                     : True})]
+
+            for counter, engine in enumerate(eng):
+                engine.execute(tbl.insert())
+                if 'int_y' in tbl.c:
+                    assert engine.scalar(select([tbl.c.int_y])) \
+                        == counter + 1
+                    assert list(engine.execute(tbl.select()).first()).\
+                            count(counter + 1) == 1
+                else:
+                    assert 1 \
+                        not in list(engine.execute(tbl.select()).first())
+                engine.execute(tbl.delete())
+
+class MonkeyPatchedBinaryTest(fixtures.TestBase):
+    __only_on__ = 'mssql+pymssql'
+
+    def test_unicode(self):
+        module = __import__('pymssql')
+        result = module.Binary('foo')
+        eq_(result, 'foo')
+
+    def test_bytes(self):
+        module = __import__('pymssql')
+        input = b('\x80\x03]q\x00X\x03\x00\x00\x00oneq\x01a.')
+        expected_result = input
+        result = module.Binary(input)
+        eq_(result, expected_result)
+
+class BinaryTest(fixtures.TestBase, AssertsExecutionResults):
+    """Test the Binary and VarBinary types"""
+
+    __only_on__ = 'mssql'
+
+    @classmethod
+    def setup_class(cls):
+        global binary_table, MyPickleType
+
+        class MyPickleType(types.TypeDecorator):
+            impl = PickleType
+
+            def process_bind_param(self, value, dialect):
+                if value:
+                    value.stuff = 'this is modified stuff'
+                return value
+
+            def process_result_value(self, value, dialect):
+                if value:
+                    value.stuff = 'this is the right stuff'
+                return value
+
+        binary_table = Table(
+            'binary_table',
+            MetaData(testing.db),
+            Column('primary_id', Integer, Sequence('binary_id_seq',
+                   optional=True), primary_key=True),
+            Column('data', mssql.MSVarBinary(8000)),
+            Column('data_image', mssql.MSImage),
+            Column('data_slice', types.BINARY(100)),
+            Column('misc', String(30)),
+            Column('pickled', PickleType),
+            Column('mypickle', MyPickleType),
+            )
+        binary_table.create()
+
+    def teardown(self):
+        binary_table.delete().execute()
+
+    @classmethod
+    def teardown_class(cls):
+        binary_table.drop()
+
+    def test_binary(self):
+        testobj1 = pickleable.Foo('im foo 1')
+        testobj2 = pickleable.Foo('im foo 2')
+        testobj3 = pickleable.Foo('im foo 3')
+        stream1 = self.load_stream('binary_data_one.dat')
+        stream2 = self.load_stream('binary_data_two.dat')
+        binary_table.insert().execute(
+            primary_id=1,
+            misc='binary_data_one.dat',
+            data=stream1,
+            data_image=stream1,
+            data_slice=stream1[0:100],
+            pickled=testobj1,
+            mypickle=testobj3,
+            )
+        binary_table.insert().execute(
+            primary_id=2,
+            misc='binary_data_two.dat',
+            data=stream2,
+            data_image=stream2,
+            data_slice=stream2[0:99],
+            pickled=testobj2,
+            )
+
+        # TODO: pyodbc does not seem to accept "None" for a VARBINARY
+        # column (data=None). error:  [Microsoft][ODBC SQL Server
+        # Driver][SQL Server]Implicit conversion from data type varchar
+        # to varbinary is not allowed. Use the CONVERT function to run
+        # this query. (257) binary_table.insert().execute(primary_id=3,
+        # misc='binary_data_two.dat', data=None, data_image=None,
+        # data_slice=stream2[0:99], pickled=None)
+
+        binary_table.insert().execute(primary_id=3,
+                misc='binary_data_two.dat', data_image=None,
+                data_slice=stream2[0:99], pickled=None)
+        for stmt in \
+            binary_table.select(order_by=binary_table.c.primary_id), \
+            text('select * from binary_table order by '
+                 'binary_table.primary_id',
+                 typemap=dict(data=mssql.MSVarBinary(8000),
+                 data_image=mssql.MSImage,
+                 data_slice=types.BINARY(100), pickled=PickleType,
+                 mypickle=MyPickleType), bind=testing.db):
+            l = stmt.execute().fetchall()
+            eq_(list(stream1), list(l[0]['data']))
+            paddedstream = list(stream1[0:100])
+            paddedstream.extend(['\x00'] * (100 - len(paddedstream)))
+            eq_(paddedstream, list(l[0]['data_slice']))
+            eq_(list(stream2), list(l[1]['data']))
+            eq_(list(stream2), list(l[1]['data_image']))
+            eq_(testobj1, l[0]['pickled'])
+            eq_(testobj2, l[1]['pickled'])
+            eq_(testobj3.moredata, l[0]['mypickle'].moredata)
+            eq_(l[0]['mypickle'].stuff, 'this is the right stuff')
+
+    def load_stream(self, name, len=3000):
+        fp = open(os.path.join(os.path.dirname(__file__), "..", "..", name), 'rb')
+        stream = fp.read(len)
+        fp.close()
+        return stream
diff --git a/test/dialect/mysql/__init__.py b/test/dialect/mysql/__init__.py
new file mode 100644 (file)
index 0000000..e69de29
diff --git a/test/dialect/mysql/test_compiler.py b/test/dialect/mysql/test_compiler.py
new file mode 100644 (file)
index 0000000..a77a25c
--- /dev/null
@@ -0,0 +1,437 @@
+# coding: utf-8
+
+from sqlalchemy.testing import eq_, assert_raises_message
+from sqlalchemy import *
+from sqlalchemy import sql, exc, schema, types as sqltypes
+from sqlalchemy.dialects.mysql import base as mysql
+from sqlalchemy.testing import fixtures, AssertsCompiledSQL
+from sqlalchemy import testing
+
+class CompileTest(fixtures.TestBase, AssertsCompiledSQL):
+
+    __dialect__ = mysql.dialect()
+
+    def test_reserved_words(self):
+        table = Table("mysql_table", MetaData(),
+            Column("col1", Integer),
+            Column("master_ssl_verify_server_cert", Integer))
+        x = select([table.c.col1, table.c.master_ssl_verify_server_cert])
+
+        self.assert_compile(x,
+            "SELECT mysql_table.col1, "
+            "mysql_table.`master_ssl_verify_server_cert` FROM mysql_table")
+
+    def test_create_index_simple(self):
+        m = MetaData()
+        tbl = Table('testtbl', m, Column('data', String(255)))
+        idx = Index('test_idx1', tbl.c.data)
+
+        self.assert_compile(schema.CreateIndex(idx),
+            'CREATE INDEX test_idx1 ON testtbl (data)')
+
+    def test_create_index_with_length(self):
+        m = MetaData()
+        tbl = Table('testtbl', m, Column('data', String(255)))
+        idx1 = Index('test_idx1', tbl.c.data, mysql_length=10)
+        idx2 = Index('test_idx2', tbl.c.data, mysql_length=5)
+
+        self.assert_compile(schema.CreateIndex(idx1),
+            'CREATE INDEX test_idx1 ON testtbl (data(10))')
+        self.assert_compile(schema.CreateIndex(idx2),
+            'CREATE INDEX test_idx2 ON testtbl (data(5))')
+
+    def test_create_composite_index_with_length(self):
+        m = MetaData()
+        tbl = Table('testtbl', m,
+                    Column('a', String(255)),
+                    Column('b', String(255)))
+
+        idx1 = Index('test_idx1', tbl.c.a, tbl.c.b,
+                     mysql_length={'a': 10, 'b': 20})
+        idx2 = Index('test_idx2', tbl.c.a, tbl.c.b,
+                     mysql_length={'a': 15})
+        idx3 = Index('test_idx3', tbl.c.a, tbl.c.b,
+                     mysql_length=30)
+
+        self.assert_compile(
+            schema.CreateIndex(idx1),
+            'CREATE INDEX test_idx1 ON testtbl (a(10), b(20))'
+        )
+        self.assert_compile(
+            schema.CreateIndex(idx2),
+            'CREATE INDEX test_idx2 ON testtbl (a(15), b)'
+        )
+        self.assert_compile(
+            schema.CreateIndex(idx3),
+            'CREATE INDEX test_idx3 ON testtbl (a(30), b(30))'
+        )
+
+    def test_create_index_with_using(self):
+        m = MetaData()
+        tbl = Table('testtbl', m, Column('data', String(255)))
+        idx1 = Index('test_idx1', tbl.c.data, mysql_using='btree')
+        idx2 = Index('test_idx2', tbl.c.data, mysql_using='hash')
+
+        self.assert_compile(schema.CreateIndex(idx1),
+            'CREATE INDEX test_idx1 ON testtbl (data) USING btree')
+        self.assert_compile(schema.CreateIndex(idx2),
+            'CREATE INDEX test_idx2 ON testtbl (data) USING hash')
+
+    def test_create_pk_plain(self):
+        m = MetaData()
+        tbl = Table('testtbl', m, Column('data', String(255)),
+            PrimaryKeyConstraint('data'))
+
+        self.assert_compile(schema.CreateTable(tbl),
+            "CREATE TABLE testtbl (data VARCHAR(255), PRIMARY KEY (data))")
+
+    def test_create_pk_with_using(self):
+        m = MetaData()
+        tbl = Table('testtbl', m, Column('data', String(255)),
+            PrimaryKeyConstraint('data', mysql_using='btree'))
+
+        self.assert_compile(schema.CreateTable(tbl),
+            "CREATE TABLE testtbl (data VARCHAR(255), "
+            "PRIMARY KEY (data) USING btree)")
+
+    def test_skip_deferrable_kw(self):
+        m = MetaData()
+        t1 = Table('t1', m, Column('id', Integer, primary_key=True))
+        t2 = Table('t2', m, Column('id', Integer,
+                        ForeignKey('t1.id', deferrable=True),
+                            primary_key=True))
+
+        self.assert_compile(
+            schema.CreateTable(t2),
+            "CREATE TABLE t2 (id INTEGER NOT NULL, "
+            "PRIMARY KEY (id), FOREIGN KEY(id) REFERENCES t1 (id))"
+        )
+
+class SQLTest(fixtures.TestBase, AssertsCompiledSQL):
+    """Tests MySQL-dialect specific compilation."""
+
+    __dialect__ = mysql.dialect()
+
+    def test_precolumns(self):
+        dialect = self.__dialect__
+
+        def gen(distinct=None, prefixes=None):
+            kw = {}
+            if distinct is not None:
+                kw['distinct'] = distinct
+            if prefixes is not None:
+                kw['prefixes'] = prefixes
+            return str(select(['q'], **kw).compile(dialect=dialect))
+
+        eq_(gen(None), 'SELECT q')
+        eq_(gen(True), 'SELECT DISTINCT q')
+
+        eq_(gen(prefixes=['ALL']), 'SELECT ALL q')
+        eq_(gen(prefixes=['DISTINCTROW']),
+                'SELECT DISTINCTROW q')
+
+        # Interaction with MySQL prefix extensions
+        eq_(
+            gen(None, ['straight_join']),
+            'SELECT straight_join q')
+        eq_(
+            gen(False, ['HIGH_PRIORITY', 'SQL_SMALL_RESULT', 'ALL']),
+            'SELECT HIGH_PRIORITY SQL_SMALL_RESULT ALL q')
+        eq_(
+            gen(True, ['high_priority', sql.text('sql_cache')]),
+            'SELECT high_priority sql_cache DISTINCT q')
+
+    @testing.uses_deprecated
+    def test_deprecated_distinct(self):
+        dialect = self.__dialect__
+
+        self.assert_compile(
+            select(['q'], distinct='ALL'),
+            'SELECT ALL q',
+        )
+
+        self.assert_compile(
+            select(['q'], distinct='distinctROW'),
+            'SELECT DISTINCTROW q',
+        )
+
+        self.assert_compile(
+            select(['q'], distinct='ALL',
+                    prefixes=['HIGH_PRIORITY', 'SQL_SMALL_RESULT']),
+            'SELECT HIGH_PRIORITY SQL_SMALL_RESULT ALL q'
+        )
+
+    def test_backslash_escaping(self):
+        self.assert_compile(
+            sql.column('foo').like('bar', escape='\\'),
+            "foo LIKE %s ESCAPE '\\\\'"
+        )
+
+        dialect = mysql.dialect()
+        dialect._backslash_escapes=False
+        self.assert_compile(
+            sql.column('foo').like('bar', escape='\\'),
+            "foo LIKE %s ESCAPE '\\'",
+            dialect=dialect
+        )
+
+    def test_limit(self):
+        t = sql.table('t', sql.column('col1'), sql.column('col2'))
+
+        self.assert_compile(
+            select([t]).limit(10).offset(20),
+            "SELECT t.col1, t.col2 FROM t  LIMIT %s, %s",
+            {'param_1':20, 'param_2':10}
+            )
+        self.assert_compile(
+            select([t]).limit(10),
+            "SELECT t.col1, t.col2 FROM t  LIMIT %s",
+            {'param_1':10})
+
+        self.assert_compile(
+            select([t]).offset(10),
+            "SELECT t.col1, t.col2 FROM t  LIMIT %s, 18446744073709551615",
+            {'param_1':10}
+            )
+
+    def test_varchar_raise(self):
+        for type_ in (
+            String,
+            VARCHAR,
+            String(),
+            VARCHAR(),
+            NVARCHAR(),
+            Unicode,
+            Unicode(),
+        ):
+            type_ = sqltypes.to_instance(type_)
+            assert_raises_message(
+                exc.CompileError,
+                "VARCHAR requires a length on dialect mysql",
+                type_.compile,
+                dialect=mysql.dialect()
+            )
+
+            t1 = Table('sometable', MetaData(),
+                Column('somecolumn', type_)
+            )
+            assert_raises_message(
+                exc.CompileError,
+                r"\(in table 'sometable', column 'somecolumn'\)\: "
+                r"(?:N)?VARCHAR requires a length on dialect mysql",
+                schema.CreateTable(t1).compile,
+                dialect=mysql.dialect()
+            )
+
+    def test_update_limit(self):
+        t = sql.table('t', sql.column('col1'), sql.column('col2'))
+
+        self.assert_compile(
+            t.update(values={'col1':123}),
+            "UPDATE t SET col1=%s"
+            )
+        self.assert_compile(
+            t.update(values={'col1':123}, mysql_limit=5),
+            "UPDATE t SET col1=%s LIMIT 5"
+            )
+        self.assert_compile(
+            t.update(values={'col1':123}, mysql_limit=None),
+            "UPDATE t SET col1=%s"
+            )
+        self.assert_compile(
+            t.update(t.c.col2==456, values={'col1':123}, mysql_limit=1),
+            "UPDATE t SET col1=%s WHERE t.col2 = %s LIMIT 1"
+            )
+
+    def test_utc_timestamp(self):
+        self.assert_compile(func.utc_timestamp(), "UTC_TIMESTAMP")
+
+    def test_sysdate(self):
+        self.assert_compile(func.sysdate(), "SYSDATE()")
+
+    def test_cast(self):
+        t = sql.table('t', sql.column('col'))
+        m = mysql
+
+        specs = [
+            (Integer, "CAST(t.col AS SIGNED INTEGER)"),
+            (INT, "CAST(t.col AS SIGNED INTEGER)"),
+            (m.MSInteger, "CAST(t.col AS SIGNED INTEGER)"),
+            (m.MSInteger(unsigned=True), "CAST(t.col AS UNSIGNED INTEGER)"),
+            (SmallInteger, "CAST(t.col AS SIGNED INTEGER)"),
+            (m.MSSmallInteger, "CAST(t.col AS SIGNED INTEGER)"),
+            (m.MSTinyInteger, "CAST(t.col AS SIGNED INTEGER)"),
+            # 'SIGNED INTEGER' is a bigint, so this is ok.
+            (m.MSBigInteger, "CAST(t.col AS SIGNED INTEGER)"),
+            (m.MSBigInteger(unsigned=False), "CAST(t.col AS SIGNED INTEGER)"),
+            (m.MSBigInteger(unsigned=True),
+                            "CAST(t.col AS UNSIGNED INTEGER)"),
+            (m.MSBit, "t.col"),
+
+            # this is kind of sucky.  thank you default arguments!
+            (NUMERIC, "CAST(t.col AS DECIMAL)"),
+            (DECIMAL, "CAST(t.col AS DECIMAL)"),
+            (Numeric, "CAST(t.col AS DECIMAL)"),
+            (m.MSNumeric, "CAST(t.col AS DECIMAL)"),
+            (m.MSDecimal, "CAST(t.col AS DECIMAL)"),
+
+            (FLOAT, "t.col"),
+            (Float, "t.col"),
+            (m.MSFloat, "t.col"),
+            (m.MSDouble, "t.col"),
+            (m.MSReal, "t.col"),
+
+            (TIMESTAMP, "CAST(t.col AS DATETIME)"),
+            (DATETIME, "CAST(t.col AS DATETIME)"),
+            (DATE, "CAST(t.col AS DATE)"),
+            (TIME, "CAST(t.col AS TIME)"),
+            (DateTime, "CAST(t.col AS DATETIME)"),
+            (Date, "CAST(t.col AS DATE)"),
+            (Time, "CAST(t.col AS TIME)"),
+            (DateTime, "CAST(t.col AS DATETIME)"),
+            (Date, "CAST(t.col AS DATE)"),
+            (m.MSTime, "CAST(t.col AS TIME)"),
+            (m.MSTimeStamp, "CAST(t.col AS DATETIME)"),
+            (m.MSYear, "t.col"),
+            (m.MSYear(2), "t.col"),
+            (Interval, "t.col"),
+
+            (String, "CAST(t.col AS CHAR)"),
+            (Unicode, "CAST(t.col AS CHAR)"),
+            (UnicodeText, "CAST(t.col AS CHAR)"),
+            (VARCHAR, "CAST(t.col AS CHAR)"),
+            (NCHAR, "CAST(t.col AS CHAR)"),
+            (CHAR, "CAST(t.col AS CHAR)"),
+            (CLOB, "CAST(t.col AS CHAR)"),
+            (TEXT, "CAST(t.col AS CHAR)"),
+            (String(32), "CAST(t.col AS CHAR(32))"),
+            (Unicode(32), "CAST(t.col AS CHAR(32))"),
+            (CHAR(32), "CAST(t.col AS CHAR(32))"),
+            (m.MSString, "CAST(t.col AS CHAR)"),
+            (m.MSText, "CAST(t.col AS CHAR)"),
+            (m.MSTinyText, "CAST(t.col AS CHAR)"),
+            (m.MSMediumText, "CAST(t.col AS CHAR)"),
+            (m.MSLongText, "CAST(t.col AS CHAR)"),
+            (m.MSNChar, "CAST(t.col AS CHAR)"),
+            (m.MSNVarChar, "CAST(t.col AS CHAR)"),
+
+            (LargeBinary, "CAST(t.col AS BINARY)"),
+            (BLOB, "CAST(t.col AS BINARY)"),
+            (m.MSBlob, "CAST(t.col AS BINARY)"),
+            (m.MSBlob(32), "CAST(t.col AS BINARY)"),
+            (m.MSTinyBlob, "CAST(t.col AS BINARY)"),
+            (m.MSMediumBlob, "CAST(t.col AS BINARY)"),
+            (m.MSLongBlob, "CAST(t.col AS BINARY)"),
+            (m.MSBinary, "CAST(t.col AS BINARY)"),
+            (m.MSBinary(32), "CAST(t.col AS BINARY)"),
+            (m.MSVarBinary, "CAST(t.col AS BINARY)"),
+            (m.MSVarBinary(32), "CAST(t.col AS BINARY)"),
+
+            # maybe this could be changed to something more DWIM, needs
+            # testing
+            (Boolean, "t.col"),
+            (BOOLEAN, "t.col"),
+
+            (m.MSEnum, "t.col"),
+            (m.MSEnum("1", "2"), "t.col"),
+            (m.MSSet, "t.col"),
+            (m.MSSet("1", "2"), "t.col"),
+            ]
+
+        for type_, expected in specs:
+            self.assert_compile(cast(t.c.col, type_), expected)
+
+    def test_no_cast_pre_4(self):
+        self.assert_compile(
+                    cast(Column('foo', Integer), String),
+                    "CAST(foo AS CHAR)",
+            )
+        dialect = mysql.dialect()
+        dialect.server_version_info = (3, 2, 3)
+        self.assert_compile(
+                    cast(Column('foo', Integer), String),
+                    "foo",
+                    dialect=dialect
+            )
+
+    def test_cast_grouped_expression_non_castable(self):
+        self.assert_compile(
+            cast(sql.column('x') + sql.column('y'), Float),
+            "(x + y)"
+        )
+
+    def test_cast_grouped_expression_pre_4(self):
+        dialect = mysql.dialect()
+        dialect.server_version_info = (3, 2, 3)
+        self.assert_compile(
+            cast(sql.column('x') + sql.column('y'), Integer),
+            "(x + y)",
+            dialect=dialect
+        )
+
+    def test_extract(self):
+        t = sql.table('t', sql.column('col1'))
+
+        for field in 'year', 'month', 'day':
+            self.assert_compile(
+                select([extract(field, t.c.col1)]),
+                "SELECT EXTRACT(%s FROM t.col1) AS anon_1 FROM t" % field)
+
+        # millsecondS to millisecond
+        self.assert_compile(
+            select([extract('milliseconds', t.c.col1)]),
+            "SELECT EXTRACT(millisecond FROM t.col1) AS anon_1 FROM t")
+
+    def test_too_long_index(self):
+        exp = 'ix_zyrenian_zyme_zyzzogeton_zyzzogeton_zyrenian_zyme_zyz_5cd2'
+        tname = 'zyrenian_zyme_zyzzogeton_zyzzogeton'
+        cname = 'zyrenian_zyme_zyzzogeton_zo'
+
+        t1 = Table(tname, MetaData(),
+                    Column(cname, Integer, index=True),
+                )
+        ix1 = list(t1.indexes)[0]
+
+        self.assert_compile(
+            schema.CreateIndex(ix1),
+            "CREATE INDEX %s "
+            "ON %s (%s)" % (exp, tname, cname)
+        )
+
+    def test_innodb_autoincrement(self):
+        t1 = Table('sometable', MetaData(), Column('assigned_id',
+                   Integer(), primary_key=True, autoincrement=False),
+                   Column('id', Integer(), primary_key=True,
+                   autoincrement=True), mysql_engine='InnoDB')
+        self.assert_compile(schema.CreateTable(t1),
+                            'CREATE TABLE sometable (assigned_id '
+                            'INTEGER NOT NULL, id INTEGER NOT NULL '
+                            'AUTO_INCREMENT, PRIMARY KEY (assigned_id, '
+                            'id), KEY idx_autoinc_id (id))ENGINE=Inn'
+                            'oDB')
+
+        t1 = Table('sometable', MetaData(), Column('assigned_id',
+                   Integer(), primary_key=True, autoincrement=True),
+                   Column('id', Integer(), primary_key=True,
+                   autoincrement=False), mysql_engine='InnoDB')
+        self.assert_compile(schema.CreateTable(t1),
+                            'CREATE TABLE sometable (assigned_id '
+                            'INTEGER NOT NULL AUTO_INCREMENT, id '
+                            'INTEGER NOT NULL, PRIMARY KEY '
+                            '(assigned_id, id))ENGINE=InnoDB')
+
+    def test_innodb_autoincrement_reserved_word_column_name(self):
+        t1 = Table(
+            'sometable', MetaData(),
+            Column('id', Integer(), primary_key=True, autoincrement=False),
+            Column('order', Integer(), primary_key=True, autoincrement=True),
+            mysql_engine='InnoDB')
+        self.assert_compile(
+            schema.CreateTable(t1),
+            'CREATE TABLE sometable ('
+            'id INTEGER NOT NULL, '
+            '`order` INTEGER NOT NULL AUTO_INCREMENT, '
+            'PRIMARY KEY (id, `order`), '
+            'KEY idx_autoinc_order (`order`)'
+            ')ENGINE=InnoDB')
+
diff --git a/test/dialect/mysql/test_dialect.py b/test/dialect/mysql/test_dialect.py
new file mode 100644 (file)
index 0000000..62bdfc8
--- /dev/null
@@ -0,0 +1,99 @@
+# coding: utf-8
+
+from sqlalchemy.testing import eq_
+from sqlalchemy import *
+from sqlalchemy.engine.url import make_url
+from sqlalchemy.testing import fixtures
+from sqlalchemy import testing
+from sqlalchemy.testing import engines
+import datetime
+
+class DialectTest(fixtures.TestBase):
+    __only_on__ = 'mysql'
+
+    @testing.only_on(['mysql+mysqldb', 'mysql+oursql'],
+                    'requires particular SSL arguments')
+    def test_ssl_arguments(self):
+        dialect = testing.db.dialect
+        kwarg = dialect.create_connect_args(
+            make_url("mysql://scott:tiger@localhost:3306/test"
+                "?ssl_ca=/ca.pem&ssl_cert=/cert.pem&ssl_key=/key.pem")
+        )[1]
+        # args that differ among mysqldb and oursql
+        for k in ('use_unicode', 'found_rows', 'client_flag'):
+            kwarg.pop(k, None)
+        eq_(
+            kwarg,
+            {
+                'passwd': 'tiger', 'db': 'test',
+                'ssl': {'ca': '/ca.pem', 'cert': '/cert.pem',
+                        'key': '/key.pem'},
+                'host': 'localhost', 'user': 'scott',
+                'port': 3306
+            }
+        )
+
+class SQLModeDetectionTest(fixtures.TestBase):
+    __only_on__ = 'mysql'
+
+    def _options(self, modes):
+        def connect(con, record):
+            cursor = con.cursor()
+            print("DOING THiS:", "set sql_mode='%s'" % (",".join(modes)))
+            cursor.execute("set sql_mode='%s'" % (",".join(modes)))
+        e = engines.testing_engine(options={
+            'pool_events':[
+                (connect, 'first_connect'),
+                (connect, 'connect')
+            ]
+        })
+        return e
+
+    def test_backslash_escapes(self):
+        engine = self._options(['NO_BACKSLASH_ESCAPES'])
+        c = engine.connect()
+        assert not engine.dialect._backslash_escapes
+        c.close()
+        engine.dispose()
+
+        engine = self._options([])
+        c = engine.connect()
+        assert engine.dialect._backslash_escapes
+        c.close()
+        engine.dispose()
+
+    def test_ansi_quotes(self):
+        engine = self._options(['ANSI_QUOTES'])
+        c = engine.connect()
+        assert engine.dialect._server_ansiquotes
+        c.close()
+        engine.dispose()
+
+    def test_combination(self):
+        engine = self._options(['ANSI_QUOTES,NO_BACKSLASH_ESCAPES'])
+        c = engine.connect()
+        assert engine.dialect._server_ansiquotes
+        assert not engine.dialect._backslash_escapes
+        c.close()
+        engine.dispose()
+
+
+class ExecutionTest(fixtures.TestBase):
+    """Various MySQL execution special cases."""
+
+    __only_on__ = 'mysql'
+
+    def test_charset_caching(self):
+        engine = engines.testing_engine()
+
+        cx = engine.connect()
+        meta = MetaData()
+        charset = engine.dialect._detect_charset(cx)
+
+        meta.reflect(cx)
+        eq_(cx.dialect._connection_charset, charset)
+        cx.close()
+
+    def test_sysdate(self):
+        d = testing.db.scalar(func.sysdate())
+        assert isinstance(d, datetime.datetime)
diff --git a/test/dialect/mysql/test_query.py b/test/dialect/mysql/test_query.py
new file mode 100644 (file)
index 0000000..0400048
--- /dev/null
@@ -0,0 +1,129 @@
+# coding: utf-8
+
+from sqlalchemy.testing import eq_
+from sqlalchemy import *
+from sqlalchemy.testing import fixtures, AssertsCompiledSQL
+from sqlalchemy import testing
+
+
+class MatchTest(fixtures.TestBase, AssertsCompiledSQL):
+    __only_on__ = 'mysql'
+
+    @classmethod
+    def setup_class(cls):
+        global metadata, cattable, matchtable
+        metadata = MetaData(testing.db)
+
+        cattable = Table('cattable', metadata,
+            Column('id', Integer, primary_key=True),
+            Column('description', String(50)),
+            mysql_engine='MyISAM'
+        )
+        matchtable = Table('matchtable', metadata,
+            Column('id', Integer, primary_key=True),
+            Column('title', String(200)),
+            Column('category_id', Integer, ForeignKey('cattable.id')),
+            mysql_engine='MyISAM'
+        )
+        metadata.create_all()
+
+        cattable.insert().execute([
+            {'id': 1, 'description': 'Python'},
+            {'id': 2, 'description': 'Ruby'},
+        ])
+        matchtable.insert().execute([
+            {'id': 1,
+             'title': 'Agile Web Development with Rails',
+             'category_id': 2},
+            {'id': 2,
+             'title': 'Dive Into Python',
+             'category_id': 1},
+            {'id': 3,
+             'title': "Programming Matz's Ruby",
+             'category_id': 2},
+            {'id': 4,
+             'title': 'The Definitive Guide to Django',
+             'category_id': 1},
+            {'id': 5,
+             'title': 'Python in a Nutshell',
+             'category_id': 1}
+        ])
+
+    @classmethod
+    def teardown_class(cls):
+        metadata.drop_all()
+
+    @testing.fails_on('mysql+mysqlconnector', 'uses pyformat')
+    def test_expression(self):
+        format = testing.db.dialect.paramstyle == 'format' and '%s' or '?'
+        self.assert_compile(
+            matchtable.c.title.match('somstr'),
+            "MATCH (matchtable.title) AGAINST (%s IN BOOLEAN MODE)" % format)
+
+    @testing.fails_on('mysql+mysqldb', 'uses format')
+    @testing.fails_on('mysql+pymysql', 'uses format')
+    @testing.fails_on('mysql+cymysql', 'uses format')
+    @testing.fails_on('mysql+oursql', 'uses format')
+    @testing.fails_on('mysql+pyodbc', 'uses format')
+    @testing.fails_on('mysql+zxjdbc', 'uses format')
+    def test_expression(self):
+        format = '%(title_1)s'
+        self.assert_compile(
+            matchtable.c.title.match('somstr'),
+            "MATCH (matchtable.title) AGAINST (%s IN BOOLEAN MODE)" % format)
+
+    def test_simple_match(self):
+        results = (matchtable.select().
+                   where(matchtable.c.title.match('python')).
+                   order_by(matchtable.c.id).
+                   execute().
+                   fetchall())
+        eq_([2, 5], [r.id for r in results])
+
+    def test_simple_match_with_apostrophe(self):
+        results = (matchtable.select().
+                   where(matchtable.c.title.match("Matz's")).
+                   execute().
+                   fetchall())
+        eq_([3], [r.id for r in results])
+
+    def test_or_match(self):
+        results1 = (matchtable.select().
+                    where(or_(matchtable.c.title.match('nutshell'),
+                              matchtable.c.title.match('ruby'))).
+                    order_by(matchtable.c.id).
+                    execute().
+                    fetchall())
+        eq_([3, 5], [r.id for r in results1])
+        results2 = (matchtable.select().
+                    where(matchtable.c.title.match('nutshell ruby')).
+                    order_by(matchtable.c.id).
+                    execute().
+                    fetchall())
+        eq_([3, 5], [r.id for r in results2])
+
+
+    def test_and_match(self):
+        results1 = (matchtable.select().
+                    where(and_(matchtable.c.title.match('python'),
+                               matchtable.c.title.match('nutshell'))).
+                    execute().
+                    fetchall())
+        eq_([5], [r.id for r in results1])
+        results2 = (matchtable.select().
+                    where(matchtable.c.title.match('+python +nutshell')).
+                    execute().
+                    fetchall())
+        eq_([5], [r.id for r in results2])
+
+    def test_match_across_joins(self):
+        results = (matchtable.select().
+                   where(and_(cattable.c.id==matchtable.c.category_id,
+                              or_(cattable.c.description.match('Ruby'),
+                                  matchtable.c.title.match('nutshell')))).
+                   order_by(matchtable.c.id).
+                   execute().
+                   fetchall())
+        eq_([1, 3, 5], [r.id for r in results])
+
+
diff --git a/test/dialect/mysql/test_reflection.py b/test/dialect/mysql/test_reflection.py
new file mode 100644 (file)
index 0000000..b9e347d
--- /dev/null
@@ -0,0 +1,300 @@
+# coding: utf-8
+
+from sqlalchemy.testing import eq_
+from sqlalchemy import *
+from sqlalchemy import sql
+from sqlalchemy.dialects.mysql import base as mysql
+from sqlalchemy.testing import fixtures, AssertsExecutionResults
+from sqlalchemy import testing
+
+class ReflectionTest(fixtures.TestBase, AssertsExecutionResults):
+
+    __only_on__ = 'mysql'
+
+    def test_default_reflection(self):
+        """Test reflection of column defaults."""
+
+        from sqlalchemy.dialects.mysql import VARCHAR
+        def_table = Table(
+            'mysql_def',
+            MetaData(testing.db),
+            Column('c1', VARCHAR(10, collation='utf8_unicode_ci'),
+                   DefaultClause(''), nullable=False),
+            Column('c2', String(10), DefaultClause('0')),
+            Column('c3', String(10), DefaultClause('abc')),
+            Column('c4', TIMESTAMP, DefaultClause('2009-04-05 12:00:00'
+                   )),
+            Column('c5', TIMESTAMP),
+            Column('c6', TIMESTAMP,
+                   DefaultClause(sql.text("CURRENT_TIMESTAMP "
+                                          "ON UPDATE CURRENT_TIMESTAMP"))),
+            )
+        def_table.create()
+        try:
+            reflected = Table('mysql_def', MetaData(testing.db),
+                              autoload=True)
+        finally:
+            def_table.drop()
+        assert def_table.c.c1.server_default.arg == ''
+        assert def_table.c.c2.server_default.arg == '0'
+        assert def_table.c.c3.server_default.arg == 'abc'
+        assert def_table.c.c4.server_default.arg \
+            == '2009-04-05 12:00:00'
+        assert str(reflected.c.c1.server_default.arg) == "''"
+        assert str(reflected.c.c2.server_default.arg) == "'0'"
+        assert str(reflected.c.c3.server_default.arg) == "'abc'"
+        assert str(reflected.c.c4.server_default.arg) \
+            == "'2009-04-05 12:00:00'"
+        assert reflected.c.c5.default is None
+        assert reflected.c.c5.server_default is None
+        assert reflected.c.c6.default is None
+        eq_(
+            str(reflected.c.c6.server_default.arg).upper(),
+            "CURRENT_TIMESTAMP ON UPDATE CURRENT_TIMESTAMP"
+        )
+        reflected.create()
+        try:
+            reflected2 = Table('mysql_def', MetaData(testing.db),
+                               autoload=True)
+        finally:
+            reflected.drop()
+        assert str(reflected2.c.c1.server_default.arg) == "''"
+        assert str(reflected2.c.c2.server_default.arg) == "'0'"
+        assert str(reflected2.c.c3.server_default.arg) == "'abc'"
+        assert str(reflected2.c.c4.server_default.arg) \
+            == "'2009-04-05 12:00:00'"
+        assert reflected.c.c5.default is None
+        assert reflected.c.c5.server_default is None
+        assert reflected.c.c6.default is None
+        eq_(
+            str(reflected.c.c6.server_default.arg).upper(),
+            "CURRENT_TIMESTAMP ON UPDATE CURRENT_TIMESTAMP"
+        )
+
+    def test_reflection_with_table_options(self):
+        comment = r"""Comment types type speedily ' " \ '' Fun!"""
+
+        def_table = Table('mysql_def', MetaData(testing.db),
+            Column('c1', Integer()),
+            mysql_engine='MEMORY',
+            mysql_comment=comment,
+            mysql_default_charset='utf8',
+            mysql_auto_increment='5',
+            mysql_avg_row_length='3',
+            mysql_password='secret',
+            mysql_connection='fish',
+        )
+
+        def_table.create()
+        try:
+            reflected = Table('mysql_def', MetaData(testing.db),
+                          autoload=True)
+        finally:
+            def_table.drop()
+
+        assert def_table.kwargs['mysql_engine'] == 'MEMORY'
+        assert def_table.kwargs['mysql_comment'] == comment
+        assert def_table.kwargs['mysql_default_charset'] == 'utf8'
+        assert def_table.kwargs['mysql_auto_increment'] == '5'
+        assert def_table.kwargs['mysql_avg_row_length'] == '3'
+        assert def_table.kwargs['mysql_password'] == 'secret'
+        assert def_table.kwargs['mysql_connection'] == 'fish'
+
+        assert reflected.kwargs['mysql_engine'] == 'MEMORY'
+        assert reflected.kwargs['mysql_comment'] == comment
+        assert reflected.kwargs['mysql_default charset'] == 'utf8'
+        assert reflected.kwargs['mysql_avg_row_length'] == '3'
+        assert reflected.kwargs['mysql_connection'] == 'fish'
+
+        # This field doesn't seem to be returned by mysql itself.
+        #assert reflected.kwargs['mysql_password'] == 'secret'
+
+        # This is explicitly ignored when reflecting schema.
+        #assert reflected.kwargs['mysql_auto_increment'] == '5'
+
+    def test_reflection_on_include_columns(self):
+        """Test reflection of include_columns to be sure they respect case."""
+
+        case_table = Table('mysql_case', MetaData(testing.db),
+            Column('c1', String(10)),
+            Column('C2', String(10)),
+            Column('C3', String(10)))
+
+        try:
+            case_table.create()
+            reflected = Table('mysql_case', MetaData(testing.db),
+                              autoload=True, include_columns=['c1', 'C2'])
+            for t in case_table, reflected:
+                assert 'c1' in t.c.keys()
+                assert 'C2' in t.c.keys()
+            reflected2 = Table('mysql_case', MetaData(testing.db),
+                              autoload=True, include_columns=['c1', 'c2'])
+            assert 'c1' in reflected2.c.keys()
+            for c in ['c2', 'C2', 'C3']:
+                assert c not in reflected2.c.keys()
+        finally:
+            case_table.drop()
+
+    @testing.exclude('mysql', '<', (5, 0, 0), 'early types are squirrely')
+    @testing.uses_deprecated('Using String type with no length')
+    @testing.uses_deprecated('Manually quoting ENUM value literals')
+    def test_type_reflection(self):
+        # (ask_for, roundtripped_as_if_different)
+        specs = [( String(1), mysql.MSString(1), ),
+                 ( String(3), mysql.MSString(3), ),
+                 ( Text(), mysql.MSText(), ),
+                 ( Unicode(1), mysql.MSString(1), ),
+                 ( Unicode(3), mysql.MSString(3), ),
+                 ( UnicodeText(), mysql.MSText(), ),
+                 ( mysql.MSChar(1), ),
+                 ( mysql.MSChar(3), ),
+                 ( NCHAR(2), mysql.MSChar(2), ),
+                 ( mysql.MSNChar(2), mysql.MSChar(2), ), # N is CREATE only
+                 ( mysql.MSNVarChar(22), mysql.MSString(22), ),
+                 ( SmallInteger(), mysql.MSSmallInteger(), ),
+                 ( SmallInteger(), mysql.MSSmallInteger(4), ),
+                 ( mysql.MSSmallInteger(), ),
+                 ( mysql.MSSmallInteger(4), mysql.MSSmallInteger(4), ),
+                 ( mysql.MSMediumInteger(), mysql.MSMediumInteger(), ),
+                 ( mysql.MSMediumInteger(8), mysql.MSMediumInteger(8), ),
+                 ( LargeBinary(3), mysql.TINYBLOB(), ),
+                 ( LargeBinary(), mysql.BLOB() ),
+                 ( mysql.MSBinary(3), mysql.MSBinary(3), ),
+                 ( mysql.MSVarBinary(3),),
+                 ( mysql.MSTinyBlob(),),
+                 ( mysql.MSBlob(),),
+                 ( mysql.MSBlob(1234), mysql.MSBlob()),
+                 ( mysql.MSMediumBlob(),),
+                 ( mysql.MSLongBlob(),),
+                 ( mysql.ENUM("''","'fleem'"), ),
+                 ]
+
+        columns = [Column('c%i' % (i + 1), t[0]) for i, t in enumerate(specs)]
+
+        db = testing.db
+        m = MetaData(db)
+        t_table = Table('mysql_types', m, *columns)
+        try:
+            m.create_all()
+
+            m2 = MetaData(db)
+            rt = Table('mysql_types', m2, autoload=True)
+            try:
+                db.execute('CREATE OR REPLACE VIEW mysql_types_v '
+                           'AS SELECT * from mysql_types')
+                rv = Table('mysql_types_v', m2, autoload=True)
+
+                expected = [len(c) > 1 and c[1] or c[0] for c in specs]
+
+                # Early 5.0 releases seem to report more "general" for columns
+                # in a view, e.g. char -> varchar, tinyblob -> mediumblob
+                #
+                # Not sure exactly which point version has the fix.
+                if db.dialect.server_version_info < (5, 0, 11):
+                    tables = rt,
+                else:
+                    tables = rt, rv
+
+                for table in tables:
+                    for i, reflected in enumerate(table.c):
+                        assert isinstance(reflected.type,
+                                type(expected[i])), \
+                            'element %d: %r not instance of %r' % (i,
+                                reflected.type, type(expected[i]))
+            finally:
+                db.execute('DROP VIEW mysql_types_v')
+        finally:
+            m.drop_all()
+
+    def test_autoincrement(self):
+        meta = MetaData(testing.db)
+        try:
+            Table('ai_1', meta,
+                  Column('int_y', Integer, primary_key=True),
+                  Column('int_n', Integer, DefaultClause('0'),
+                         primary_key=True),
+                         mysql_engine='MyISAM')
+            Table('ai_2', meta,
+                  Column('int_y', Integer, primary_key=True),
+                  Column('int_n', Integer, DefaultClause('0'),
+                         primary_key=True),
+                         mysql_engine='MyISAM')
+            Table('ai_3', meta,
+                  Column('int_n', Integer, DefaultClause('0'),
+                         primary_key=True, autoincrement=False),
+                  Column('int_y', Integer, primary_key=True),
+                         mysql_engine='MyISAM')
+            Table('ai_4', meta,
+                  Column('int_n', Integer, DefaultClause('0'),
+                         primary_key=True, autoincrement=False),
+                  Column('int_n2', Integer, DefaultClause('0'),
+                         primary_key=True, autoincrement=False),
+                         mysql_engine='MyISAM')
+            Table('ai_5', meta,
+                  Column('int_y', Integer, primary_key=True),
+                  Column('int_n', Integer, DefaultClause('0'),
+                         primary_key=True, autoincrement=False),
+                         mysql_engine='MyISAM')
+            Table('ai_6', meta,
+                  Column('o1', String(1), DefaultClause('x'),
+                         primary_key=True),
+                  Column('int_y', Integer, primary_key=True),
+                         mysql_engine='MyISAM')
+            Table('ai_7', meta,
+                  Column('o1', String(1), DefaultClause('x'),
+                         primary_key=True),
+                  Column('o2', String(1), DefaultClause('x'),
+                         primary_key=True),
+                  Column('int_y', Integer, primary_key=True),
+                         mysql_engine='MyISAM')
+            Table('ai_8', meta,
+                  Column('o1', String(1), DefaultClause('x'),
+                         primary_key=True),
+                  Column('o2', String(1), DefaultClause('x'),
+                         primary_key=True),
+                         mysql_engine='MyISAM')
+            meta.create_all()
+
+            table_names = ['ai_1', 'ai_2', 'ai_3', 'ai_4',
+                           'ai_5', 'ai_6', 'ai_7', 'ai_8']
+            mr = MetaData(testing.db)
+            mr.reflect(only=table_names)
+
+            for tbl in [mr.tables[name] for name in table_names]:
+                for c in tbl.c:
+                    if c.name.startswith('int_y'):
+                        assert c.autoincrement
+                    elif c.name.startswith('int_n'):
+                        assert not c.autoincrement
+                tbl.insert().execute()
+                if 'int_y' in tbl.c:
+                    assert select([tbl.c.int_y]).scalar() == 1
+                    assert list(tbl.select().execute().first()).count(1) == 1
+                else:
+                    assert 1 not in list(tbl.select().execute().first())
+        finally:
+            meta.drop_all()
+
+    @testing.exclude('mysql', '<', (5, 0, 0), 'no information_schema support')
+    def test_system_views(self):
+        dialect = testing.db.dialect
+        connection = testing.db.connect()
+        view_names = dialect.get_view_names(connection, "information_schema")
+        self.assert_('TABLES' in view_names)
+
+
+class RawReflectionTest(fixtures.TestBase):
+    def setup(self):
+        dialect = mysql.dialect()
+        self.parser = mysql.MySQLTableDefinitionParser(dialect, dialect.identifier_preparer)
+
+    def test_key_reflection(self):
+        regex = self.parser._re_key
+
+        assert regex.match('  PRIMARY KEY (`id`),')
+        assert regex.match('  PRIMARY KEY USING BTREE (`id`),')
+        assert regex.match('  PRIMARY KEY (`id`) USING BTREE,')
+        assert regex.match('  PRIMARY KEY (`id`)')
+        assert regex.match('  PRIMARY KEY USING BTREE (`id`)')
+        assert regex.match('  PRIMARY KEY (`id`) USING BTREE')
+
diff --git a/test/dialect/mysql/test_types.py b/test/dialect/mysql/test_types.py
new file mode 100644 (file)
index 0000000..fb1de72
--- /dev/null
@@ -0,0 +1,667 @@
+# coding: utf-8
+
+from sqlalchemy.testing import eq_, assert_raises
+from sqlalchemy import *
+from sqlalchemy import sql, exc, schema
+from sqlalchemy.util import u
+from sqlalchemy.dialects.mysql import base as mysql
+from sqlalchemy.testing import fixtures, AssertsCompiledSQL, AssertsExecutionResults
+from sqlalchemy import testing
+from sqlalchemy.testing.engines import utf8_engine
+import datetime
+
+
+class TypesTest(fixtures.TestBase, AssertsExecutionResults, AssertsCompiledSQL):
+    "Test MySQL column types"
+
+    __dialect__ = mysql.dialect()
+
+    def test_numeric(self):
+        "Exercise type specification and options for numeric types."
+
+        columns = [
+            # column type, args, kwargs, expected ddl
+            # e.g. Column(Integer(10, unsigned=True)) ==
+            # 'INTEGER(10) UNSIGNED'
+            (mysql.MSNumeric, [], {},
+             'NUMERIC'),
+            (mysql.MSNumeric, [None], {},
+             'NUMERIC'),
+            (mysql.MSNumeric, [12], {},
+             'NUMERIC(12)'),
+            (mysql.MSNumeric, [12, 4], {'unsigned':True},
+             'NUMERIC(12, 4) UNSIGNED'),
+            (mysql.MSNumeric, [12, 4], {'zerofill':True},
+             'NUMERIC(12, 4) ZEROFILL'),
+            (mysql.MSNumeric, [12, 4], {'zerofill':True, 'unsigned':True},
+             'NUMERIC(12, 4) UNSIGNED ZEROFILL'),
+
+            (mysql.MSDecimal, [], {},
+             'DECIMAL'),
+            (mysql.MSDecimal, [None], {},
+             'DECIMAL'),
+            (mysql.MSDecimal, [12], {},
+             'DECIMAL(12)'),
+            (mysql.MSDecimal, [12, None], {},
+             'DECIMAL(12)'),
+            (mysql.MSDecimal, [12, 4], {'unsigned':True},
+             'DECIMAL(12, 4) UNSIGNED'),
+            (mysql.MSDecimal, [12, 4], {'zerofill':True},
+             'DECIMAL(12, 4) ZEROFILL'),
+            (mysql.MSDecimal, [12, 4], {'zerofill':True, 'unsigned':True},
+             'DECIMAL(12, 4) UNSIGNED ZEROFILL'),
+
+            (mysql.MSDouble, [None, None], {},
+             'DOUBLE'),
+            (mysql.MSDouble, [12, 4], {'unsigned':True},
+             'DOUBLE(12, 4) UNSIGNED'),
+            (mysql.MSDouble, [12, 4], {'zerofill':True},
+             'DOUBLE(12, 4) ZEROFILL'),
+            (mysql.MSDouble, [12, 4], {'zerofill':True, 'unsigned':True},
+             'DOUBLE(12, 4) UNSIGNED ZEROFILL'),
+
+            (mysql.MSReal, [None, None], {},
+             'REAL'),
+            (mysql.MSReal, [12, 4], {'unsigned':True},
+             'REAL(12, 4) UNSIGNED'),
+            (mysql.MSReal, [12, 4], {'zerofill':True},
+             'REAL(12, 4) ZEROFILL'),
+            (mysql.MSReal, [12, 4], {'zerofill':True, 'unsigned':True},
+             'REAL(12, 4) UNSIGNED ZEROFILL'),
+
+            (mysql.MSFloat, [], {},
+             'FLOAT'),
+            (mysql.MSFloat, [None], {},
+             'FLOAT'),
+            (mysql.MSFloat, [12], {},
+             'FLOAT(12)'),
+            (mysql.MSFloat, [12, 4], {},
+             'FLOAT(12, 4)'),
+            (mysql.MSFloat, [12, 4], {'unsigned':True},
+             'FLOAT(12, 4) UNSIGNED'),
+            (mysql.MSFloat, [12, 4], {'zerofill':True},
+             'FLOAT(12, 4) ZEROFILL'),
+            (mysql.MSFloat, [12, 4], {'zerofill':True, 'unsigned':True},
+             'FLOAT(12, 4) UNSIGNED ZEROFILL'),
+
+            (mysql.MSInteger, [], {},
+             'INTEGER'),
+            (mysql.MSInteger, [4], {},
+             'INTEGER(4)'),
+            (mysql.MSInteger, [4], {'unsigned':True},
+             'INTEGER(4) UNSIGNED'),
+            (mysql.MSInteger, [4], {'zerofill':True},
+             'INTEGER(4) ZEROFILL'),
+            (mysql.MSInteger, [4], {'zerofill':True, 'unsigned':True},
+             'INTEGER(4) UNSIGNED ZEROFILL'),
+
+            (mysql.MSBigInteger, [], {},
+             'BIGINT'),
+            (mysql.MSBigInteger, [4], {},
+             'BIGINT(4)'),
+            (mysql.MSBigInteger, [4], {'unsigned':True},
+             'BIGINT(4) UNSIGNED'),
+            (mysql.MSBigInteger, [4], {'zerofill':True},
+             'BIGINT(4) ZEROFILL'),
+            (mysql.MSBigInteger, [4], {'zerofill':True, 'unsigned':True},
+             'BIGINT(4) UNSIGNED ZEROFILL'),
+
+             (mysql.MSMediumInteger, [], {},
+              'MEDIUMINT'),
+             (mysql.MSMediumInteger, [4], {},
+              'MEDIUMINT(4)'),
+             (mysql.MSMediumInteger, [4], {'unsigned':True},
+              'MEDIUMINT(4) UNSIGNED'),
+             (mysql.MSMediumInteger, [4], {'zerofill':True},
+              'MEDIUMINT(4) ZEROFILL'),
+             (mysql.MSMediumInteger, [4], {'zerofill':True, 'unsigned':True},
+              'MEDIUMINT(4) UNSIGNED ZEROFILL'),
+
+            (mysql.MSTinyInteger, [], {},
+             'TINYINT'),
+            (mysql.MSTinyInteger, [1], {},
+             'TINYINT(1)'),
+            (mysql.MSTinyInteger, [1], {'unsigned':True},
+             'TINYINT(1) UNSIGNED'),
+            (mysql.MSTinyInteger, [1], {'zerofill':True},
+             'TINYINT(1) ZEROFILL'),
+            (mysql.MSTinyInteger, [1], {'zerofill':True, 'unsigned':True},
+             'TINYINT(1) UNSIGNED ZEROFILL'),
+
+            (mysql.MSSmallInteger, [], {},
+             'SMALLINT'),
+            (mysql.MSSmallInteger, [4], {},
+             'SMALLINT(4)'),
+            (mysql.MSSmallInteger, [4], {'unsigned':True},
+             'SMALLINT(4) UNSIGNED'),
+            (mysql.MSSmallInteger, [4], {'zerofill':True},
+             'SMALLINT(4) ZEROFILL'),
+            (mysql.MSSmallInteger, [4], {'zerofill':True, 'unsigned':True},
+             'SMALLINT(4) UNSIGNED ZEROFILL'),
+           ]
+
+        for type_, args, kw, res in columns:
+            self.assert_compile(
+                type_(*args, **kw),
+                res
+            )
+
+    @testing.exclude('mysql', '<', (4, 1, 1), 'no charset support')
+    def test_charset(self):
+        """Exercise CHARACTER SET and COLLATE-ish options on string types."""
+
+        columns = [
+            (mysql.MSChar, [1], {},
+             'CHAR(1)'),
+             (mysql.NCHAR, [1], {},
+              'NATIONAL CHAR(1)'),
+            (mysql.MSChar, [1], {'binary':True},
+             'CHAR(1) BINARY'),
+            (mysql.MSChar, [1], {'ascii':True},
+             'CHAR(1) ASCII'),
+            (mysql.MSChar, [1], {'unicode':True},
+             'CHAR(1) UNICODE'),
+            (mysql.MSChar, [1], {'ascii':True, 'binary':True},
+             'CHAR(1) ASCII BINARY'),
+            (mysql.MSChar, [1], {'unicode':True, 'binary':True},
+             'CHAR(1) UNICODE BINARY'),
+            (mysql.MSChar, [1], {'charset':'utf8'},
+             'CHAR(1) CHARACTER SET utf8'),
+            (mysql.MSChar, [1], {'charset':'utf8', 'binary':True},
+             'CHAR(1) CHARACTER SET utf8 BINARY'),
+            (mysql.MSChar, [1], {'charset':'utf8', 'unicode':True},
+             'CHAR(1) CHARACTER SET utf8'),
+            (mysql.MSChar, [1], {'charset':'utf8', 'ascii':True},
+             'CHAR(1) CHARACTER SET utf8'),
+            (mysql.MSChar, [1], {'collation': 'utf8_bin'},
+             'CHAR(1) COLLATE utf8_bin'),
+            (mysql.MSChar, [1], {'charset': 'utf8', 'collation': 'utf8_bin'},
+             'CHAR(1) CHARACTER SET utf8 COLLATE utf8_bin'),
+            (mysql.MSChar, [1], {'charset': 'utf8', 'binary': True},
+             'CHAR(1) CHARACTER SET utf8 BINARY'),
+            (mysql.MSChar, [1], {'charset': 'utf8', 'collation': 'utf8_bin',
+                              'binary': True},
+             'CHAR(1) CHARACTER SET utf8 COLLATE utf8_bin'),
+            (mysql.MSChar, [1], {'national':True},
+             'NATIONAL CHAR(1)'),
+            (mysql.MSChar, [1], {'national':True, 'charset':'utf8'},
+             'NATIONAL CHAR(1)'),
+            (mysql.MSChar, [1], {'national':True, 'charset':'utf8',
+                                'binary':True},
+             'NATIONAL CHAR(1) BINARY'),
+            (mysql.MSChar, [1], {'national':True, 'binary':True,
+                                'unicode':True},
+             'NATIONAL CHAR(1) BINARY'),
+            (mysql.MSChar, [1], {'national':True, 'collation':'utf8_bin'},
+             'NATIONAL CHAR(1) COLLATE utf8_bin'),
+
+            (mysql.MSString, [1], {'charset':'utf8', 'collation':'utf8_bin'},
+             'VARCHAR(1) CHARACTER SET utf8 COLLATE utf8_bin'),
+            (mysql.MSString, [1], {'national':True, 'collation':'utf8_bin'},
+             'NATIONAL VARCHAR(1) COLLATE utf8_bin'),
+
+            (mysql.MSTinyText, [], {'charset':'utf8', 'collation':'utf8_bin'},
+             'TINYTEXT CHARACTER SET utf8 COLLATE utf8_bin'),
+
+            (mysql.MSMediumText, [], {'charset':'utf8', 'binary':True},
+             'MEDIUMTEXT CHARACTER SET utf8 BINARY'),
+
+            (mysql.MSLongText, [], {'ascii':True},
+             'LONGTEXT ASCII'),
+
+            (mysql.ENUM, ["foo", "bar"], {'unicode':True},
+             '''ENUM('foo','bar') UNICODE'''),
+
+            (String, [20], {"collation":"utf8"}, 'VARCHAR(20) COLLATE utf8')
+
+
+           ]
+
+        for type_, args, kw, res in columns:
+            self.assert_compile(
+                type_(*args, **kw),
+                res
+            )
+
+    @testing.only_if('mysql')
+    @testing.exclude('mysql', '<', (5, 0, 5), 'a 5.0+ feature')
+    @testing.provide_metadata
+    def test_charset_collate_table(self):
+        t = Table('foo', self.metadata,
+            Column('id', Integer),
+            mysql_default_charset='utf8',
+            mysql_collate='utf8_unicode_ci'
+        )
+        t.create()
+        m2 = MetaData(testing.db)
+        t2 = Table('foo', m2, autoload=True)
+        eq_(t2.kwargs['mysql_collate'], 'utf8_unicode_ci')
+        eq_(t2.kwargs['mysql_default charset'], 'utf8')
+
+    def test_bit_50(self):
+        """Exercise BIT types on 5.0+ (not valid for all engine types)"""
+
+        for type_, expected in [
+            (mysql.MSBit(), "BIT"),
+            (mysql.MSBit(1), "BIT(1)"),
+            (mysql.MSBit(63), "BIT(63)"),
+        ]:
+            self.assert_compile(type_, expected)
+
+    @testing.only_if('mysql')
+    @testing.exclude('mysql', '<', (5, 0, 5), 'a 5.0+ feature')
+    @testing.fails_on('mysql+oursql', 'some round trips fail, oursql bug ?')
+    @testing.provide_metadata
+    def test_bit_50_roundtrip(self):
+        bit_table = Table('mysql_bits', self.metadata,
+                          Column('b1', mysql.MSBit),
+                          Column('b2', mysql.MSBit()),
+                          Column('b3', mysql.MSBit(), nullable=False),
+                          Column('b4', mysql.MSBit(1)),
+                          Column('b5', mysql.MSBit(8)),
+                          Column('b6', mysql.MSBit(32)),
+                          Column('b7', mysql.MSBit(63)),
+                          Column('b8', mysql.MSBit(64)))
+        self.metadata.create_all()
+
+        meta2 = MetaData(testing.db)
+        reflected = Table('mysql_bits', meta2, autoload=True)
+
+        for table in bit_table, reflected:
+
+            def roundtrip(store, expected=None):
+                expected = expected or store
+                table.insert(store).execute()
+                row = table.select().execute().first()
+                try:
+                    self.assert_(list(row) == expected)
+                except:
+                    print("Storing %s" % store)
+                    print("Expected %s" % expected)
+                    print("Found %s" % list(row))
+                    raise
+                table.delete().execute().close()
+
+            roundtrip([0] * 8)
+            roundtrip([None, None, 0, None, None, None, None, None])
+            roundtrip([1] * 8)
+            roundtrip([sql.text("b'1'")] * 8, [1] * 8)
+
+            i = 255
+            roundtrip([0, 0, 0, 0, i, i, i, i])
+            i = 2 ** 32 - 1
+            roundtrip([0, 0, 0, 0, 0, i, i, i])
+            i = 2 ** 63 - 1
+            roundtrip([0, 0, 0, 0, 0, 0, i, i])
+            i = 2 ** 64 - 1
+            roundtrip([0, 0, 0, 0, 0, 0, 0, i])
+
+    def test_boolean(self):
+        for type_, expected in [
+            (BOOLEAN(), "BOOL"),
+            (Boolean(), "BOOL"),
+            (mysql.TINYINT(1), "TINYINT(1)"),
+            (mysql.TINYINT(1, unsigned=True), "TINYINT(1) UNSIGNED")
+        ]:
+            self.assert_compile(type_, expected)
+
+    @testing.only_if('mysql')
+    @testing.provide_metadata
+    def test_boolean_roundtrip(self):
+        bool_table = Table(
+            'mysql_bool',
+            self.metadata,
+            Column('b1', BOOLEAN),
+            Column('b2', Boolean),
+            Column('b3', mysql.MSTinyInteger(1)),
+            Column('b4', mysql.MSTinyInteger(1, unsigned=True)),
+            Column('b5', mysql.MSTinyInteger),
+            )
+        self.metadata.create_all()
+        table = bool_table
+
+        def roundtrip(store, expected=None):
+            expected = expected or store
+            table.insert(store).execute()
+            row = table.select().execute().first()
+            self.assert_(list(row) == expected)
+            for i, val in enumerate(expected):
+                if isinstance(val, bool):
+                    self.assert_(val is row[i])
+            table.delete().execute()
+
+        roundtrip([None, None, None, None, None])
+        roundtrip([True, True, 1, 1, 1])
+        roundtrip([False, False, 0, 0, 0])
+        roundtrip([True, True, True, True, True], [True, True, 1,
+                  1, 1])
+        roundtrip([False, False, 0, 0, 0], [False, False, 0, 0, 0])
+
+        meta2 = MetaData(testing.db)
+        table = Table('mysql_bool', meta2, autoload=True)
+        eq_(colspec(table.c.b3), 'b3 TINYINT(1)')
+        eq_(colspec(table.c.b4), 'b4 TINYINT(1) UNSIGNED')
+        meta2 = MetaData(testing.db)
+        table = Table(
+            'mysql_bool',
+            meta2,
+            Column('b1', BOOLEAN),
+            Column('b2', Boolean),
+            Column('b3', BOOLEAN),
+            Column('b4', BOOLEAN),
+            autoload=True,
+            )
+        eq_(colspec(table.c.b3), 'b3 BOOL')
+        eq_(colspec(table.c.b4), 'b4 BOOL')
+        roundtrip([None, None, None, None, None])
+        roundtrip([True, True, 1, 1, 1], [True, True, True, True,
+                  1])
+        roundtrip([False, False, 0, 0, 0], [False, False, False,
+                  False, 0])
+        roundtrip([True, True, True, True, True], [True, True,
+                  True, True, 1])
+        roundtrip([False, False, 0, 0, 0], [False, False, False,
+                  False, 0])
+
+    def test_timestamp(self):
+        """Exercise funky TIMESTAMP default syntax."""
+
+        columns = [
+            ([TIMESTAMP],
+             'TIMESTAMP NULL'),
+            ([mysql.MSTimeStamp],
+             'TIMESTAMP NULL'),
+            ([mysql.MSTimeStamp,
+              DefaultClause(sql.text('CURRENT_TIMESTAMP'))],
+             "TIMESTAMP DEFAULT CURRENT_TIMESTAMP"),
+            ([mysql.MSTimeStamp,
+              DefaultClause(sql.text("'1999-09-09 09:09:09'"))],
+             "TIMESTAMP DEFAULT '1999-09-09 09:09:09'"),
+            ([mysql.MSTimeStamp,
+              DefaultClause(sql.text("'1999-09-09 09:09:09' "
+                                      "ON UPDATE CURRENT_TIMESTAMP"))],
+             "TIMESTAMP DEFAULT '1999-09-09 09:09:09' "
+             "ON UPDATE CURRENT_TIMESTAMP"),
+            ([mysql.MSTimeStamp,
+              DefaultClause(sql.text("CURRENT_TIMESTAMP "
+                                      "ON UPDATE CURRENT_TIMESTAMP"))],
+             "TIMESTAMP DEFAULT CURRENT_TIMESTAMP "
+             "ON UPDATE CURRENT_TIMESTAMP"),
+            ]
+        for spec, expected in columns:
+            c = Column('t', *spec)
+            Table('t', MetaData(), c)
+            self.assert_compile(
+                schema.CreateColumn(c),
+                "t %s" % expected
+
+            )
+
+    @testing.only_if('mysql')
+    @testing.provide_metadata
+    def test_timestamp_nullable(self):
+        ts_table = Table('mysql_timestamp', self.metadata,
+                            Column('t1', TIMESTAMP),
+                            Column('t2', TIMESTAMP, nullable=False),
+                    )
+        self.metadata.create_all()
+
+        now = testing.db.execute("select now()").scalar()
+
+        # TIMESTAMP without NULL inserts current time when passed
+        # NULL.  when not passed, generates 0000-00-00 quite
+        # annoyingly.
+        ts_table.insert().execute({'t1': now, 't2': None})
+        ts_table.insert().execute({'t1': None, 't2': None})
+
+        # normalize dates that are over the second boundary
+        def normalize(dt):
+            if dt is None:
+                return None
+            elif (dt - now).seconds < 5:
+                return now
+            else:
+                return dt
+        eq_(
+            [tuple([normalize(dt) for dt in row])
+            for row in ts_table.select().execute()],
+            [(now, now), (None, now)]
+        )
+
+    def test_time(self):
+        """"Exercise TIME."""
+
+        self.assert_compile(
+                mysql.TIME(),
+                "TIME"
+        )
+
+        self.assert_compile(
+                mysql.TIME(fsp=5),
+                "TIME(5)"
+        )
+
+        eq_(
+            mysql.TIME().result_processor(None, None)(
+                    datetime.timedelta(seconds=35, minutes=517,
+                            microseconds=450
+                    )),
+            datetime.time(8, 37, 35, 450)
+        )
+
+    @testing.only_if('mysql')
+    @testing.provide_metadata
+    def test_year(self):
+        """Exercise YEAR."""
+
+        year_table = Table('mysql_year', self.metadata,
+                           Column('y1', mysql.MSYear),
+                           Column('y2', mysql.MSYear),
+                           Column('y3', mysql.MSYear),
+                           Column('y5', mysql.MSYear(4)))
+
+        for col in year_table.c:
+            self.assert_(repr(col))
+        year_table.create()
+        reflected = Table('mysql_year', MetaData(testing.db),
+                          autoload=True)
+
+        for table in year_table, reflected:
+            table.insert(['1950', '50', None, 1950]).execute()
+            row = table.select().execute().first()
+            eq_(list(row), [1950, 2050, None, 1950])
+            table.delete().execute()
+            self.assert_(colspec(table.c.y1).startswith('y1 YEAR'))
+            eq_(colspec(table.c.y5), 'y5 YEAR(4)')
+
+    @testing.only_if('mysql')
+    @testing.provide_metadata
+    def test_set(self):
+        """Exercise the SET type."""
+
+        set_table = Table('mysql_set', self.metadata,
+                        Column('s1',
+                          mysql.MSSet("'dq'", "'sq'")), Column('s2',
+                          mysql.MSSet("'a'")), Column('s3',
+                          mysql.MSSet("'5'", "'7'", "'9'")))
+        eq_(colspec(set_table.c.s1), "s1 SET('dq','sq')")
+        eq_(colspec(set_table.c.s2), "s2 SET('a')")
+        eq_(colspec(set_table.c.s3), "s3 SET('5','7','9')")
+        set_table.create()
+        reflected = Table('mysql_set', MetaData(testing.db),
+                          autoload=True)
+        for table in set_table, reflected:
+
+            def roundtrip(store, expected=None):
+                expected = expected or store
+                table.insert(store).execute()
+                row = table.select().execute().first()
+                self.assert_(list(row) == expected)
+                table.delete().execute()
+
+            roundtrip([None, None, None], [None] * 3)
+            roundtrip(['', '', ''], [set([''])] * 3)
+            roundtrip([set(['dq']), set(['a']), set(['5'])])
+            roundtrip(['dq', 'a', '5'], [set(['dq']), set(['a']),
+                      set(['5'])])
+            roundtrip([1, 1, 1], [set(['dq']), set(['a']), set(['5'
+                      ])])
+            roundtrip([set(['dq', 'sq']), None, set(['9', '5', '7'
+                      ])])
+        set_table.insert().execute({'s3': set(['5'])},
+                {'s3': set(['5', '7'])}, {'s3': set(['5', '7', '9'])},
+                {'s3': set(['7', '9'])})
+        rows = select([set_table.c.s3], set_table.c.s3.in_([set(['5'
+                      ]), set(['5', '7']), set(['7', '5'
+                      ])])).execute().fetchall()
+        found = set([frozenset(row[0]) for row in rows])
+        eq_(found, set([frozenset(['5']), frozenset(['5', '7'])]))
+
+class EnumTest(fixtures.TestBase, AssertsExecutionResults, AssertsCompiledSQL):
+
+    __only_on__ = 'mysql'
+    __dialect__ = mysql.dialect()
+
+
+    @testing.uses_deprecated('Manually quoting ENUM value literals')
+    @testing.provide_metadata
+    def test_enum(self):
+        """Exercise the ENUM type."""
+
+        enum_table = Table('mysql_enum', self.metadata,
+            Column('e1', mysql.ENUM("'a'", "'b'")),
+            Column('e2', mysql.ENUM("'a'", "'b'"),
+                   nullable=False),
+            Column('e2generic', Enum("a", "b"),
+                  nullable=False),
+            Column('e3', mysql.ENUM("'a'", "'b'", strict=True)),
+            Column('e4', mysql.ENUM("'a'", "'b'", strict=True),
+                   nullable=False),
+            Column('e5', mysql.ENUM("a", "b")),
+            Column('e5generic', Enum("a", "b")),
+            Column('e6', mysql.ENUM("'a'", "b")),
+            )
+
+        eq_(colspec(enum_table.c.e1),
+                       "e1 ENUM('a','b')")
+        eq_(colspec(enum_table.c.e2),
+                       "e2 ENUM('a','b') NOT NULL")
+        eq_(colspec(enum_table.c.e2generic),
+                      "e2generic ENUM('a','b') NOT NULL")
+        eq_(colspec(enum_table.c.e3),
+                       "e3 ENUM('a','b')")
+        eq_(colspec(enum_table.c.e4),
+                       "e4 ENUM('a','b') NOT NULL")
+        eq_(colspec(enum_table.c.e5),
+                       "e5 ENUM('a','b')")
+        eq_(colspec(enum_table.c.e5generic),
+                      "e5generic ENUM('a','b')")
+        eq_(colspec(enum_table.c.e6),
+                       "e6 ENUM('''a''','b')")
+        enum_table.create()
+
+        assert_raises(exc.DBAPIError, enum_table.insert().execute,
+                        e1=None, e2=None, e3=None, e4=None)
+
+        assert_raises(exc.StatementError, enum_table.insert().execute,
+                                        e1='c', e2='c', e2generic='c', e3='c',
+                                        e4='c', e5='c', e5generic='c', e6='c')
+
+        enum_table.insert().execute()
+        enum_table.insert().execute(e1='a', e2='a', e2generic='a', e3='a',
+                                    e4='a', e5='a', e5generic='a', e6="'a'")
+        enum_table.insert().execute(e1='b', e2='b', e2generic='b', e3='b',
+                                    e4='b', e5='b', e5generic='b', e6='b')
+
+        res = enum_table.select().execute().fetchall()
+
+        expected = [(None, 'a', 'a', None, 'a', None, None, None),
+                    ('a', 'a', 'a', 'a', 'a', 'a', 'a', "'a'"),
+                    ('b', 'b', 'b', 'b', 'b', 'b', 'b', 'b')]
+
+        eq_(res, expected)
+
+    def test_unicode_enum(self):
+        unicode_engine = utf8_engine()
+        metadata = MetaData(unicode_engine)
+        t1 = Table('table', metadata,
+            Column('id', Integer, primary_key=True),
+            Column('value', Enum(u('réveillé'), u('drôle'), u('S’il'))),
+            Column('value2', mysql.ENUM(u('réveillé'), u('drôle'), u('S’il')))
+        )
+        metadata.create_all()
+        try:
+            t1.insert().execute(value=u('drôle'), value2=u('drôle'))
+            t1.insert().execute(value=u('réveillé'), value2=u('réveillé'))
+            t1.insert().execute(value=u('S’il'), value2=u('S’il'))
+            eq_(t1.select().order_by(t1.c.id).execute().fetchall(),
+                [(1, u('drôle'), u('drôle')), (2, u('réveillé'), u('réveillé')),
+                            (3, u('S’il'), u('S’il'))]
+            )
+
+            # test reflection of the enum labels
+
+            m2 = MetaData(testing.db)
+            t2 = Table('table', m2, autoload=True)
+
+            # TODO: what's wrong with the last element ?  is there
+            # latin-1 stuff forcing its way in ?
+
+            assert t2.c.value.type.enums[0:2] == \
+                    (u('réveillé'), u('drôle'))  # u'S’il') # eh ?
+
+            assert t2.c.value2.type.enums[0:2] == \
+                    (u('réveillé'), u('drôle'))  # u'S’il') # eh ?
+        finally:
+            metadata.drop_all()
+
+    def test_enum_compile(self):
+        e1 = Enum('x', 'y', 'z', name='somename')
+        t1 = Table('sometable', MetaData(), Column('somecolumn', e1))
+        self.assert_compile(schema.CreateTable(t1),
+                            "CREATE TABLE sometable (somecolumn "
+                            "ENUM('x','y','z'))")
+        t1 = Table('sometable', MetaData(), Column('somecolumn',
+                   Enum('x', 'y', 'z', native_enum=False)))
+        self.assert_compile(schema.CreateTable(t1),
+                            "CREATE TABLE sometable (somecolumn "
+                            "VARCHAR(1), CHECK (somecolumn IN ('x', "
+                            "'y', 'z')))")
+
+    @testing.exclude('mysql', '<', (4,), "3.23 can't handle an ENUM of ''")
+    @testing.uses_deprecated('Manually quoting ENUM value literals')
+    def test_enum_parse(self):
+        """More exercises for the ENUM type."""
+
+        # MySQL 3.23 can't handle an ENUM of ''....
+
+        enum_table = Table('mysql_enum', MetaData(testing.db),
+            Column('e1', mysql.ENUM("'a'")),
+            Column('e2', mysql.ENUM("''")),
+            Column('e3', mysql.ENUM('a')),
+            Column('e4', mysql.ENUM('')),
+            Column('e5', mysql.ENUM("'a'", "''")),
+            Column('e6', mysql.ENUM("''", "'a'")),
+            Column('e7', mysql.ENUM("''", "'''a'''", "'b''b'", "''''")))
+
+        for col in enum_table.c:
+            self.assert_(repr(col))
+        try:
+            enum_table.create()
+            reflected = Table('mysql_enum', MetaData(testing.db),
+                              autoload=True)
+            for t in enum_table, reflected:
+                eq_(t.c.e1.type.enums, ("a",))
+                eq_(t.c.e2.type.enums, ("",))
+                eq_(t.c.e3.type.enums, ("a",))
+                eq_(t.c.e4.type.enums, ("",))
+                eq_(t.c.e5.type.enums, ("a", ""))
+                eq_(t.c.e6.type.enums, ("", "a"))
+                eq_(t.c.e7.type.enums, ("", "'a'", "b'b", "'"))
+        finally:
+            enum_table.drop()
+
+def colspec(c):
+    return testing.db.dialect.ddl_compiler(
+                    testing.db.dialect, None).get_column_specification(c)
+
diff --git a/test/dialect/postgresql/__init__.py b/test/dialect/postgresql/__init__.py
new file mode 100644 (file)
index 0000000..e69de29
diff --git a/test/dialect/postgresql/test_compiler.py b/test/dialect/postgresql/test_compiler.py
new file mode 100644 (file)
index 0000000..a79c0e7
--- /dev/null
@@ -0,0 +1,589 @@
+# coding: utf-8
+
+from sqlalchemy.testing.assertions import AssertsCompiledSQL, is_, assert_raises
+from sqlalchemy.testing import engines, fixtures
+from sqlalchemy import testing
+import datetime
+from sqlalchemy import Table, Column, select, MetaData, text, Integer, \
+            String, Sequence, ForeignKey, join, Numeric, \
+            PrimaryKeyConstraint, DateTime, tuple_, Float, BigInteger, \
+            func, literal_column, literal, bindparam, cast, extract, \
+            SmallInteger, Enum, REAL, update, insert, Index, delete, \
+            and_, Date, TypeDecorator, Time, Unicode, Interval, or_, Text
+from sqlalchemy.dialects.postgresql import ExcludeConstraint, array
+from sqlalchemy import exc, schema
+from sqlalchemy.dialects.postgresql import base as postgresql
+from sqlalchemy.dialects.postgresql import TSRANGE
+from sqlalchemy.orm import mapper, aliased, Session
+from sqlalchemy.sql import table, column, operators
+
+class SequenceTest(fixtures.TestBase, AssertsCompiledSQL):
+
+    def test_format(self):
+        seq = Sequence('my_seq_no_schema')
+        dialect = postgresql.PGDialect()
+        assert dialect.identifier_preparer.format_sequence(seq) \
+            == 'my_seq_no_schema'
+        seq = Sequence('my_seq', schema='some_schema')
+        assert dialect.identifier_preparer.format_sequence(seq) \
+            == 'some_schema.my_seq'
+        seq = Sequence('My_Seq', schema='Some_Schema')
+        assert dialect.identifier_preparer.format_sequence(seq) \
+            == '"Some_Schema"."My_Seq"'
+
+    @testing.only_on('postgresql', 'foo')
+    @testing.provide_metadata
+    def test_reverse_eng_name(self):
+        metadata = self.metadata
+        engine = engines.testing_engine(options=dict(implicit_returning=False))
+        for tname, cname in [
+            ('tb1' * 30, 'abc'),
+            ('tb2', 'abc' * 30),
+            ('tb3' * 30, 'abc' * 30),
+            ('tb4', 'abc'),
+        ]:
+            t = Table(tname[:57],
+                metadata,
+                Column(cname[:57], Integer, primary_key=True)
+            )
+            t.create(engine)
+            r = engine.execute(t.insert())
+            assert r.inserted_primary_key == [1]
+
+class CompileTest(fixtures.TestBase, AssertsCompiledSQL):
+
+    __dialect__ = postgresql.dialect()
+
+    def test_update_returning(self):
+        dialect = postgresql.dialect()
+        table1 = table('mytable', column('myid', Integer), column('name'
+                       , String(128)), column('description',
+                       String(128)))
+        u = update(table1, values=dict(name='foo'
+                   )).returning(table1.c.myid, table1.c.name)
+        self.assert_compile(u,
+                            'UPDATE mytable SET name=%(name)s '
+                            'RETURNING mytable.myid, mytable.name',
+                            dialect=dialect)
+        u = update(table1, values=dict(name='foo')).returning(table1)
+        self.assert_compile(u,
+                            'UPDATE mytable SET name=%(name)s '
+                            'RETURNING mytable.myid, mytable.name, '
+                            'mytable.description', dialect=dialect)
+        u = update(table1, values=dict(name='foo'
+                   )).returning(func.length(table1.c.name))
+        self.assert_compile(u,
+                            'UPDATE mytable SET name=%(name)s '
+                            'RETURNING length(mytable.name) AS length_1'
+                            , dialect=dialect)
+
+
+    def test_insert_returning(self):
+        dialect = postgresql.dialect()
+        table1 = table('mytable',
+            column('myid', Integer),
+            column('name', String(128)),
+            column('description', String(128)),
+        )
+
+        i = insert(table1, values=dict(name='foo'
+                   )).returning(table1.c.myid, table1.c.name)
+        self.assert_compile(i,
+                            'INSERT INTO mytable (name) VALUES '
+                            '(%(name)s) RETURNING mytable.myid, '
+                            'mytable.name', dialect=dialect)
+        i = insert(table1, values=dict(name='foo')).returning(table1)
+        self.assert_compile(i,
+                            'INSERT INTO mytable (name) VALUES '
+                            '(%(name)s) RETURNING mytable.myid, '
+                            'mytable.name, mytable.description',
+                            dialect=dialect)
+        i = insert(table1, values=dict(name='foo'
+                   )).returning(func.length(table1.c.name))
+        self.assert_compile(i,
+                            'INSERT INTO mytable (name) VALUES '
+                            '(%(name)s) RETURNING length(mytable.name) '
+                            'AS length_1', dialect=dialect)
+
+
+    def test_create_partial_index(self):
+        m = MetaData()
+        tbl = Table('testtbl', m, Column('data', Integer))
+        idx = Index('test_idx1', tbl.c.data,
+                    postgresql_where=and_(tbl.c.data > 5, tbl.c.data
+                    < 10))
+        idx = Index('test_idx1', tbl.c.data,
+                    postgresql_where=and_(tbl.c.data > 5, tbl.c.data
+                    < 10))
+
+        # test quoting and all that
+
+        idx2 = Index('test_idx2', tbl.c.data,
+                     postgresql_where=and_(tbl.c.data > 'a', tbl.c.data
+                     < "b's"))
+        self.assert_compile(schema.CreateIndex(idx),
+                            'CREATE INDEX test_idx1 ON testtbl (data) '
+                            'WHERE data > 5 AND data < 10',
+                            dialect=postgresql.dialect())
+        self.assert_compile(schema.CreateIndex(idx2),
+                            "CREATE INDEX test_idx2 ON testtbl (data) "
+                            "WHERE data > 'a' AND data < 'b''s'",
+                            dialect=postgresql.dialect())
+
+    def test_create_index_with_ops(self):
+        m = MetaData()
+        tbl = Table('testtbl', m,
+                    Column('data', String),
+                    Column('data2', Integer, key='d2'))
+
+        idx = Index('test_idx1', tbl.c.data,
+                    postgresql_ops={'data': 'text_pattern_ops'})
+
+        idx2 = Index('test_idx2', tbl.c.data, tbl.c.d2,
+                    postgresql_ops={'data': 'text_pattern_ops',
+                                    'd2': 'int4_ops'})
+
+        self.assert_compile(schema.CreateIndex(idx),
+                            'CREATE INDEX test_idx1 ON testtbl '
+                            '(data text_pattern_ops)',
+                            dialect=postgresql.dialect())
+        self.assert_compile(schema.CreateIndex(idx2),
+                            'CREATE INDEX test_idx2 ON testtbl '
+                            '(data text_pattern_ops, data2 int4_ops)',
+                            dialect=postgresql.dialect())
+
+    def test_create_index_with_using(self):
+        m = MetaData()
+        tbl = Table('testtbl', m, Column('data', String))
+
+        idx1 = Index('test_idx1', tbl.c.data)
+        idx2 = Index('test_idx2', tbl.c.data, postgresql_using='btree')
+        idx3 = Index('test_idx3', tbl.c.data, postgresql_using='hash')
+
+        self.assert_compile(schema.CreateIndex(idx1),
+                            'CREATE INDEX test_idx1 ON testtbl '
+                            '(data)',
+                            dialect=postgresql.dialect())
+        self.assert_compile(schema.CreateIndex(idx2),
+                            'CREATE INDEX test_idx2 ON testtbl '
+                            'USING btree (data)',
+                            dialect=postgresql.dialect())
+        self.assert_compile(schema.CreateIndex(idx3),
+                            'CREATE INDEX test_idx3 ON testtbl '
+                            'USING hash (data)',
+                            dialect=postgresql.dialect())
+
+    def test_exclude_constraint_min(self):
+        m = MetaData()
+        tbl = Table('testtbl', m,
+                    Column('room', Integer, primary_key=True))
+        cons = ExcludeConstraint(('room', '='))
+        tbl.append_constraint(cons)
+        self.assert_compile(schema.AddConstraint(cons),
+                            'ALTER TABLE testtbl ADD EXCLUDE USING gist '
+                            '(room WITH =)',
+                            dialect=postgresql.dialect())
+
+    def test_exclude_constraint_full(self):
+        m = MetaData()
+        room = Column('room', Integer, primary_key=True)
+        tbl = Table('testtbl', m,
+                    room,
+                    Column('during', TSRANGE))
+        room = Column('room', Integer, primary_key=True)
+        cons = ExcludeConstraint((room, '='), ('during', '&&'),
+                                 name='my_name',
+                                 using='gist',
+                                 where="room > 100",
+                                 deferrable=True,
+                                 initially='immediate')
+        tbl.append_constraint(cons)
+        self.assert_compile(schema.AddConstraint(cons),
+                            'ALTER TABLE testtbl ADD CONSTRAINT my_name '
+                            'EXCLUDE USING gist '
+                            '(room WITH =, during WITH ''&&) WHERE '
+                            '(room > 100) DEFERRABLE INITIALLY immediate',
+                            dialect=postgresql.dialect())
+
+    def test_exclude_constraint_copy(self):
+        m = MetaData()
+        cons = ExcludeConstraint(('room', '='))
+        tbl = Table('testtbl', m,
+              Column('room', Integer, primary_key=True),
+              cons)
+        # apparently you can't copy a ColumnCollectionConstraint until
+        # after it has been bound to a table...
+        cons_copy = cons.copy()
+        tbl.append_constraint(cons_copy)
+        self.assert_compile(schema.AddConstraint(cons_copy),
+                            'ALTER TABLE testtbl ADD EXCLUDE USING gist '
+                            '(room WITH =)',
+                            dialect=postgresql.dialect())
+
+    def test_substring(self):
+        self.assert_compile(func.substring('abc', 1, 2),
+                            'SUBSTRING(%(substring_1)s FROM %(substring_2)s '
+                            'FOR %(substring_3)s)')
+        self.assert_compile(func.substring('abc', 1),
+                            'SUBSTRING(%(substring_1)s FROM %(substring_2)s)')
+
+
+
+    def test_extract(self):
+        t = table('t', column('col1', DateTime), column('col2', Date),
+                  column('col3', Time), column('col4',
+                  postgresql.INTERVAL))
+        for field in 'year', 'month', 'day', 'epoch', 'hour':
+            for expr, compiled_expr in [  # invalid, no cast. plain
+                                          # text.  no cast. addition is
+                                          # commutative subtraction is
+                                          # not invalid - no cast. dont
+                                          # crack up on entirely
+                                          # unsupported types
+                (t.c.col1, 't.col1 :: timestamp'),
+                (t.c.col2, 't.col2 :: date'),
+                (t.c.col3, 't.col3 :: time'),
+                (func.current_timestamp() - datetime.timedelta(days=5),
+                 '(CURRENT_TIMESTAMP - %(current_timestamp_1)s) :: '
+                 'timestamp'),
+                (func.current_timestamp() + func.current_timestamp(),
+                 'CURRENT_TIMESTAMP + CURRENT_TIMESTAMP'),
+                (text('foo.date + foo.time'), 'foo.date + foo.time'),
+                (func.current_timestamp() + datetime.timedelta(days=5),
+                 '(CURRENT_TIMESTAMP + %(current_timestamp_1)s) :: '
+                 'timestamp'),
+                (t.c.col2 + t.c.col3, '(t.col2 + t.col3) :: timestamp'
+                 ),
+                (t.c.col2 + datetime.timedelta(days=5),
+                 '(t.col2 + %(col2_1)s) :: timestamp'),
+                (datetime.timedelta(days=5) + t.c.col2,
+                 '(%(col2_1)s + t.col2) :: timestamp'),
+                (t.c.col1 + t.c.col4, '(t.col1 + t.col4) :: timestamp'
+                 ),
+                (t.c.col1 - datetime.timedelta(seconds=30),
+                 '(t.col1 - %(col1_1)s) :: timestamp'),
+                (datetime.timedelta(seconds=30) - t.c.col1,
+                 '%(col1_1)s - t.col1'),
+                (func.coalesce(t.c.col1, func.current_timestamp()),
+                 'coalesce(t.col1, CURRENT_TIMESTAMP) :: timestamp'),
+                (t.c.col3 + datetime.timedelta(seconds=30),
+                 '(t.col3 + %(col3_1)s) :: time'),
+                (func.current_timestamp() - func.coalesce(t.c.col1,
+                 func.current_timestamp()),
+                 '(CURRENT_TIMESTAMP - coalesce(t.col1, '
+                 'CURRENT_TIMESTAMP)) :: interval'),
+                (3 * func.foobar(type_=Interval),
+                 '(%(foobar_1)s * foobar()) :: interval'),
+                (literal(datetime.timedelta(seconds=10))
+                 - literal(datetime.timedelta(seconds=10)),
+                 '(%(param_1)s - %(param_2)s) :: interval'),
+                (t.c.col3 + 'some string', 't.col3 + %(col3_1)s'),
+                ]:
+                self.assert_compile(select([extract(field,
+                                    expr)]).select_from(t),
+                                    'SELECT EXTRACT(%s FROM %s) AS '
+                                    'anon_1 FROM t' % (field,
+                                    compiled_expr))
+
+    def test_reserved_words(self):
+        table = Table("pg_table", MetaData(),
+            Column("col1", Integer),
+            Column("variadic", Integer))
+        x = select([table.c.col1, table.c.variadic])
+
+        self.assert_compile(x,
+            '''SELECT pg_table.col1, pg_table."variadic" FROM pg_table''')
+
+    def test_array(self):
+        c = Column('x', postgresql.ARRAY(Integer))
+
+        self.assert_compile(
+            cast(c, postgresql.ARRAY(Integer)),
+            "CAST(x AS INTEGER[])"
+        )
+        self.assert_compile(
+            c[5],
+            "x[%(x_1)s]",
+            checkparams={'x_1': 5}
+        )
+
+        self.assert_compile(
+            c[5:7],
+            "x[%(x_1)s:%(x_2)s]",
+            checkparams={'x_2': 7, 'x_1': 5}
+        )
+        self.assert_compile(
+            c[5:7][2:3],
+            "x[%(x_1)s:%(x_2)s][%(param_1)s:%(param_2)s]",
+            checkparams={'x_2': 7, 'x_1': 5, 'param_1':2, 'param_2':3}
+        )
+        self.assert_compile(
+            c[5:7][3],
+            "x[%(x_1)s:%(x_2)s][%(param_1)s]",
+            checkparams={'x_2': 7, 'x_1': 5, 'param_1':3}
+        )
+
+        self.assert_compile(
+            c.contains([1]),
+            'x @> %(x_1)s',
+            checkparams={'x_1': [1]}
+        )
+        self.assert_compile(
+            c.contained_by([2]),
+            'x <@ %(x_1)s',
+            checkparams={'x_1': [2]}
+        )
+        self.assert_compile(
+            c.overlap([3]),
+            'x && %(x_1)s',
+            checkparams={'x_1': [3]}
+        )
+        self.assert_compile(
+            postgresql.Any(4, c),
+            '%(param_1)s = ANY (x)',
+            checkparams={'param_1': 4}
+        )
+        self.assert_compile(
+            c.any(5, operator=operators.ne),
+            '%(param_1)s != ANY (x)',
+            checkparams={'param_1': 5}
+        )
+        self.assert_compile(
+            postgresql.All(6, c, operator=operators.gt),
+            '%(param_1)s > ALL (x)',
+            checkparams={'param_1': 6}
+        )
+        self.assert_compile(
+            c.all(7, operator=operators.lt),
+            '%(param_1)s < ALL (x)',
+            checkparams={'param_1': 7}
+        )
+
+    def test_array_literal_type(self):
+        is_(postgresql.array([1, 2]).type._type_affinity, postgresql.ARRAY)
+        is_(postgresql.array([1, 2]).type.item_type._type_affinity, Integer)
+
+        is_(postgresql.array([1, 2], type_=String).
+                    type.item_type._type_affinity, String)
+
+    def test_array_literal(self):
+        self.assert_compile(
+            func.array_dims(postgresql.array([1, 2]) +
+                        postgresql.array([3, 4, 5])),
+            "array_dims(ARRAY[%(param_1)s, %(param_2)s] || "
+                    "ARRAY[%(param_3)s, %(param_4)s, %(param_5)s])",
+            checkparams={'param_5': 5, 'param_4': 4, 'param_1': 1,
+                'param_3': 3, 'param_2': 2}
+        )
+
+    def test_array_literal_insert(self):
+        m = MetaData()
+        t = Table('t', m, Column('data', postgresql.ARRAY(Integer)))
+        self.assert_compile(
+            t.insert().values(data=array([1, 2, 3])),
+            "INSERT INTO t (data) VALUES (ARRAY[%(param_1)s, "
+                "%(param_2)s, %(param_3)s])"
+        )
+
+    def test_update_array_element(self):
+        m = MetaData()
+        t = Table('t', m, Column('data', postgresql.ARRAY(Integer)))
+        self.assert_compile(
+            t.update().values({t.c.data[5]: 1}),
+            "UPDATE t SET data[%(data_1)s]=%(param_1)s",
+            checkparams={'data_1': 5, 'param_1': 1}
+        )
+
+    def test_update_array_slice(self):
+        m = MetaData()
+        t = Table('t', m, Column('data', postgresql.ARRAY(Integer)))
+        self.assert_compile(
+            t.update().values({t.c.data[2:5]: 2}),
+            "UPDATE t SET data[%(data_1)s:%(data_2)s]=%(param_1)s",
+            checkparams={'param_1': 2, 'data_2': 5, 'data_1': 2}
+
+        )
+
+    def test_from_only(self):
+        m = MetaData()
+        tbl1 = Table('testtbl1', m, Column('id', Integer))
+        tbl2 = Table('testtbl2', m, Column('id', Integer))
+
+        stmt = tbl1.select().with_hint(tbl1, 'ONLY', 'postgresql')
+        expected = 'SELECT testtbl1.id FROM ONLY testtbl1'
+        self.assert_compile(stmt, expected)
+
+        talias1 = tbl1.alias('foo')
+        stmt = talias1.select().with_hint(talias1, 'ONLY', 'postgresql')
+        expected = 'SELECT foo.id FROM ONLY testtbl1 AS foo'
+        self.assert_compile(stmt, expected)
+
+        stmt = select([tbl1, tbl2]).with_hint(tbl1, 'ONLY', 'postgresql')
+        expected = ('SELECT testtbl1.id, testtbl2.id FROM ONLY testtbl1, '
+                    'testtbl2')
+        self.assert_compile(stmt, expected)
+
+        stmt = select([tbl1, tbl2]).with_hint(tbl2, 'ONLY', 'postgresql')
+        expected = ('SELECT testtbl1.id, testtbl2.id FROM testtbl1, ONLY '
+                    'testtbl2')
+        self.assert_compile(stmt, expected)
+
+        stmt = select([tbl1, tbl2])
+        stmt = stmt.with_hint(tbl1, 'ONLY', 'postgresql')
+        stmt = stmt.with_hint(tbl2, 'ONLY', 'postgresql')
+        expected = ('SELECT testtbl1.id, testtbl2.id FROM ONLY testtbl1, '
+                    'ONLY testtbl2')
+        self.assert_compile(stmt, expected)
+
+        stmt = update(tbl1, values=dict(id=1))
+        stmt = stmt.with_hint('ONLY', dialect_name='postgresql')
+        expected = 'UPDATE ONLY testtbl1 SET id=%(id)s'
+        self.assert_compile(stmt, expected)
+
+        stmt = delete(tbl1).with_hint('ONLY', selectable=tbl1, dialect_name='postgresql')
+        expected = 'DELETE FROM ONLY testtbl1'
+        self.assert_compile(stmt, expected)
+
+        tbl3 = Table('testtbl3', m, Column('id', Integer), schema='testschema')
+        stmt = tbl3.select().with_hint(tbl3, 'ONLY', 'postgresql')
+        expected = 'SELECT testschema.testtbl3.id FROM ONLY testschema.testtbl3'
+        self.assert_compile(stmt, expected)
+
+        assert_raises(
+            exc.CompileError,
+            tbl3.select().with_hint(tbl3, "FAKE", "postgresql").compile,
+            dialect=postgresql.dialect()
+        )
+
+
+
+class DistinctOnTest(fixtures.TestBase, AssertsCompiledSQL):
+    """Test 'DISTINCT' with SQL expression language and orm.Query with
+    an emphasis on PG's 'DISTINCT ON' syntax.
+
+    """
+    __dialect__ = postgresql.dialect()
+
+    def setup(self):
+        self.table = Table('t', MetaData(),
+                Column('id',Integer, primary_key=True),
+                Column('a', String),
+                Column('b', String),
+            )
+
+    def test_plain_generative(self):
+        self.assert_compile(
+            select([self.table]).distinct(),
+            "SELECT DISTINCT t.id, t.a, t.b FROM t"
+        )
+
+    def test_on_columns_generative(self):
+        self.assert_compile(
+            select([self.table]).distinct(self.table.c.a),
+            "SELECT DISTINCT ON (t.a) t.id, t.a, t.b FROM t"
+        )
+
+    def test_on_columns_generative_multi_call(self):
+        self.assert_compile(
+            select([self.table]).distinct(self.table.c.a).
+                distinct(self.table.c.b),
+            "SELECT DISTINCT ON (t.a, t.b) t.id, t.a, t.b FROM t"
+        )
+
+    def test_plain_inline(self):
+        self.assert_compile(
+            select([self.table], distinct=True),
+            "SELECT DISTINCT t.id, t.a, t.b FROM t"
+        )
+
+    def test_on_columns_inline_list(self):
+        self.assert_compile(
+            select([self.table],
+                    distinct=[self.table.c.a, self.table.c.b]).
+                    order_by(self.table.c.a, self.table.c.b),
+            "SELECT DISTINCT ON (t.a, t.b) t.id, "
+            "t.a, t.b FROM t ORDER BY t.a, t.b"
+        )
+
+    def test_on_columns_inline_scalar(self):
+        self.assert_compile(
+            select([self.table], distinct=self.table.c.a),
+            "SELECT DISTINCT ON (t.a) t.id, t.a, t.b FROM t"
+        )
+
+    def test_query_plain(self):
+        sess = Session()
+        self.assert_compile(
+            sess.query(self.table).distinct(),
+            "SELECT DISTINCT t.id AS t_id, t.a AS t_a, "
+            "t.b AS t_b FROM t"
+        )
+
+    def test_query_on_columns(self):
+        sess = Session()
+        self.assert_compile(
+            sess.query(self.table).distinct(self.table.c.a),
+            "SELECT DISTINCT ON (t.a) t.id AS t_id, t.a AS t_a, "
+            "t.b AS t_b FROM t"
+        )
+
+    def test_query_on_columns_multi_call(self):
+        sess = Session()
+        self.assert_compile(
+            sess.query(self.table).distinct(self.table.c.a).
+                    distinct(self.table.c.b),
+            "SELECT DISTINCT ON (t.a, t.b) t.id AS t_id, t.a AS t_a, "
+            "t.b AS t_b FROM t"
+        )
+
+    def test_query_on_columns_subquery(self):
+        sess = Session()
+        class Foo(object):
+            pass
+        mapper(Foo, self.table)
+        sess = Session()
+        self.assert_compile(
+            sess.query(Foo).from_self().distinct(Foo.a, Foo.b),
+            "SELECT DISTINCT ON (anon_1.t_a, anon_1.t_b) anon_1.t_id "
+            "AS anon_1_t_id, anon_1.t_a AS anon_1_t_a, anon_1.t_b "
+            "AS anon_1_t_b FROM (SELECT t.id AS t_id, t.a AS t_a, "
+            "t.b AS t_b FROM t) AS anon_1"
+        )
+
+    def test_query_distinct_on_aliased(self):
+        class Foo(object):
+            pass
+        mapper(Foo, self.table)
+        a1 = aliased(Foo)
+        sess = Session()
+        self.assert_compile(
+            sess.query(a1).distinct(a1.a),
+            "SELECT DISTINCT ON (t_1.a) t_1.id AS t_1_id, "
+            "t_1.a AS t_1_a, t_1.b AS t_1_b FROM t AS t_1"
+        )
+
+    def test_distinct_on_subquery_anon(self):
+
+        sq = select([self.table]).alias()
+        q = select([self.table.c.id,sq.c.id]).\
+                    distinct(sq.c.id).\
+                    where(self.table.c.id==sq.c.id)
+
+        self.assert_compile(
+            q,
+            "SELECT DISTINCT ON (anon_1.id) t.id, anon_1.id "
+            "FROM t, (SELECT t.id AS id, t.a AS a, t.b "
+            "AS b FROM t) AS anon_1 WHERE t.id = anon_1.id"
+            )
+
+    def test_distinct_on_subquery_named(self):
+        sq = select([self.table]).alias('sq')
+        q = select([self.table.c.id,sq.c.id]).\
+                    distinct(sq.c.id).\
+                    where(self.table.c.id==sq.c.id)
+        self.assert_compile(
+            q,
+            "SELECT DISTINCT ON (sq.id) t.id, sq.id "
+            "FROM t, (SELECT t.id AS id, t.a AS a, "
+            "t.b AS b FROM t) AS sq WHERE t.id = sq.id"
+            )
diff --git a/test/dialect/postgresql/test_dialect.py b/test/dialect/postgresql/test_dialect.py
new file mode 100644 (file)
index 0000000..86ce91d
--- /dev/null
@@ -0,0 +1,221 @@
+# coding: utf-8
+
+from sqlalchemy.testing.assertions import eq_, assert_raises, \
+                assert_raises_message, AssertsExecutionResults, \
+                AssertsCompiledSQL
+from sqlalchemy.testing import engines, fixtures
+from sqlalchemy import testing
+import datetime
+from sqlalchemy import Table, Column, select, MetaData, text, Integer, \
+            String, Sequence, ForeignKey, join, Numeric, \
+            PrimaryKeyConstraint, DateTime, tuple_, Float, BigInteger, \
+            func, literal_column, literal, bindparam, cast, extract, \
+            SmallInteger, Enum, REAL, update, insert, Index, delete, \
+            and_, Date, TypeDecorator, Time, Unicode, Interval, or_, Text
+from sqlalchemy import exc, schema
+from sqlalchemy.dialects.postgresql import base as postgresql
+import logging
+import logging.handlers
+
+class MiscTest(fixtures.TestBase, AssertsExecutionResults, AssertsCompiledSQL):
+
+    __only_on__ = 'postgresql'
+
+    @testing.provide_metadata
+    def test_date_reflection(self):
+        metadata = self.metadata
+        t1 = Table('pgdate', metadata, Column('date1',
+                   DateTime(timezone=True)), Column('date2',
+                   DateTime(timezone=False)))
+        metadata.create_all()
+        m2 = MetaData(testing.db)
+        t2 = Table('pgdate', m2, autoload=True)
+        assert t2.c.date1.type.timezone is True
+        assert t2.c.date2.type.timezone is False
+
+    @testing.fails_on('+zxjdbc',
+                      'The JDBC driver handles the version parsing')
+    def test_version_parsing(self):
+
+
+        class MockConn(object):
+
+            def __init__(self, res):
+                self.res = res
+
+            def execute(self, str):
+                return self
+
+            def scalar(self):
+                return self.res
+
+
+        for string, version in \
+            [('PostgreSQL 8.3.8 on i686-redhat-linux-gnu, compiled by '
+             'GCC gcc (GCC) 4.1.2 20070925 (Red Hat 4.1.2-33)', (8, 3,
+             8)),
+             ('PostgreSQL 8.5devel on x86_64-unknown-linux-gnu, '
+             'compiled by GCC gcc (GCC) 4.4.2, 64-bit', (8, 5)),
+             ('EnterpriseDB 9.1.2.2 on x86_64-unknown-linux-gnu, '
+             'compiled by gcc (GCC) 4.1.2 20080704 (Red Hat 4.1.2-50), '
+             '64-bit', (9, 1, 2))]:
+            eq_(testing.db.dialect._get_server_version_info(MockConn(string)),
+                version)
+
+    @testing.only_on('postgresql+psycopg2', 'psycopg2-specific feature')
+    def test_psycopg2_version(self):
+        v = testing.db.dialect.psycopg2_version
+        assert testing.db.dialect.dbapi.__version__.\
+                    startswith(".".join(str(x) for x in v))
+
+    @testing.only_on('postgresql+psycopg2', 'psycopg2-specific feature')
+    def test_notice_logging(self):
+        log = logging.getLogger('sqlalchemy.dialects.postgresql')
+        buf = logging.handlers.BufferingHandler(100)
+        lev = log.level
+        log.addHandler(buf)
+        log.setLevel(logging.INFO)
+        try:
+            conn = testing.db.connect()
+            trans = conn.begin()
+            try:
+                conn.execute('create table foo (id serial primary key)')
+            finally:
+                trans.rollback()
+        finally:
+            log.removeHandler(buf)
+            log.setLevel(lev)
+        msgs = ' '.join(b.msg for b in buf.buffer)
+        assert 'will create implicit sequence' in msgs
+        assert 'will create implicit index' in msgs
+
+    @testing.only_on('postgresql+psycopg2', 'psycopg2-specific feature')
+    @engines.close_open_connections
+    def test_client_encoding(self):
+        c = testing.db.connect()
+        current_encoding = c.connection.connection.encoding
+        c.close()
+
+        # attempt to use an encoding that's not
+        # already set
+        if current_encoding == 'UTF8':
+            test_encoding = 'LATIN1'
+        else:
+            test_encoding = 'UTF8'
+
+        e = engines.testing_engine(
+                        options={'client_encoding':test_encoding}
+                    )
+        c = e.connect()
+        eq_(c.connection.connection.encoding, test_encoding)
+
+    @testing.only_on('postgresql+psycopg2', 'psycopg2-specific feature')
+    @engines.close_open_connections
+    def test_autocommit_isolation_level(self):
+        extensions = __import__('psycopg2.extensions').extensions
+
+        c = testing.db.connect()
+        c = c.execution_options(isolation_level='AUTOCOMMIT')
+        eq_(c.connection.connection.isolation_level,
+            extensions.ISOLATION_LEVEL_AUTOCOMMIT)
+
+    @testing.fails_on('+zxjdbc',
+                      "Can't infer the SQL type to use for an instance "
+                      "of org.python.core.PyObjectDerived.")
+    @testing.fails_on('+pg8000', "Can't determine correct type.")
+    def test_extract(self):
+        fivedaysago = datetime.datetime.now() \
+            - datetime.timedelta(days=5)
+        for field, exp in ('year', fivedaysago.year), ('month',
+                fivedaysago.month), ('day', fivedaysago.day):
+            r = testing.db.execute(select([extract(field, func.now()
+                                   + datetime.timedelta(days=-5))])).scalar()
+            eq_(r, exp)
+
+    def test_checksfor_sequence(self):
+        meta1 = MetaData(testing.db)
+        seq = Sequence('fooseq')
+        t = Table('mytable', meta1, Column('col1', Integer,
+                  seq))
+        seq.drop()
+        try:
+            testing.db.execute('CREATE SEQUENCE fooseq')
+            t.create(checkfirst=True)
+        finally:
+            t.drop(checkfirst=True)
+
+    def test_schema_roundtrips(self):
+        meta = MetaData(testing.db)
+        users = Table('users', meta, Column('id', Integer,
+                      primary_key=True), Column('name', String(50)),
+                      schema='test_schema')
+        users.create()
+        try:
+            users.insert().execute(id=1, name='name1')
+            users.insert().execute(id=2, name='name2')
+            users.insert().execute(id=3, name='name3')
+            users.insert().execute(id=4, name='name4')
+            eq_(users.select().where(users.c.name == 'name2'
+                ).execute().fetchall(), [(2, 'name2')])
+            eq_(users.select(use_labels=True).where(users.c.name
+                == 'name2').execute().fetchall(), [(2, 'name2')])
+            users.delete().where(users.c.id == 3).execute()
+            eq_(users.select().where(users.c.name == 'name3'
+                ).execute().fetchall(), [])
+            users.update().where(users.c.name == 'name4'
+                                 ).execute(name='newname')
+            eq_(users.select(use_labels=True).where(users.c.id
+                == 4).execute().fetchall(), [(4, 'newname')])
+        finally:
+            users.drop()
+
+    def test_preexecute_passivedefault(self):
+        """test that when we get a primary key column back from
+        reflecting a table which has a default value on it, we pre-
+        execute that DefaultClause upon insert."""
+
+        try:
+            meta = MetaData(testing.db)
+            testing.db.execute("""
+             CREATE TABLE speedy_users
+             (
+                 speedy_user_id   SERIAL     PRIMARY KEY,
+
+                 user_name        VARCHAR    NOT NULL,
+                 user_password    VARCHAR    NOT NULL
+             );
+            """)
+            t = Table('speedy_users', meta, autoload=True)
+            r = t.insert().execute(user_name='user',
+                                   user_password='lala')
+            assert r.inserted_primary_key == [1]
+            l = t.select().execute().fetchall()
+            assert l == [(1, 'user', 'lala')]
+        finally:
+            testing.db.execute('drop table speedy_users')
+
+
+    @testing.fails_on('+zxjdbc', 'psycopg2/pg8000 specific assertion')
+    @testing.fails_on('pypostgresql',
+                      'psycopg2/pg8000 specific assertion')
+    def test_numeric_raise(self):
+        stmt = text("select cast('hi' as char) as hi", typemap={'hi'
+                    : Numeric})
+        assert_raises(exc.InvalidRequestError, testing.db.execute, stmt)
+
+    def test_serial_integer(self):
+        for type_, expected in [
+            (Integer, 'SERIAL'),
+            (BigInteger, 'BIGSERIAL'),
+            (SmallInteger, 'SMALLINT'),
+            (postgresql.INTEGER, 'SERIAL'),
+            (postgresql.BIGINT, 'BIGSERIAL'),
+        ]:
+            m = MetaData()
+
+            t = Table('t', m, Column('c', type_, primary_key=True))
+            ddl_compiler = testing.db.dialect.ddl_compiler(testing.db.dialect, schema.CreateTable(t))
+            eq_(
+                ddl_compiler.get_column_specification(t.c.c),
+                "c %s NOT NULL" % expected
+            )
diff --git a/test/dialect/postgresql/test_query.py b/test/dialect/postgresql/test_query.py
new file mode 100644 (file)
index 0000000..a7bcbf3
--- /dev/null
@@ -0,0 +1,723 @@
+# coding: utf-8
+
+from sqlalchemy.testing.assertions import eq_, assert_raises, \
+                assert_raises_message, is_, AssertsExecutionResults, \
+                AssertsCompiledSQL, ComparesTables
+from sqlalchemy.testing import engines, fixtures
+from sqlalchemy import testing
+from sqlalchemy import Table, Column, select, MetaData, text, Integer, \
+            String, Sequence, ForeignKey, join, Numeric, \
+            PrimaryKeyConstraint, DateTime, tuple_, Float, BigInteger, \
+            func, literal_column, literal, bindparam, cast, extract, \
+            SmallInteger, Enum, REAL, update, insert, Index, delete, \
+            and_, Date, TypeDecorator, Time, Unicode, Interval, or_, Text
+from sqlalchemy import exc
+import logging
+
+class InsertTest(fixtures.TestBase, AssertsExecutionResults):
+
+    __only_on__ = 'postgresql'
+
+    @classmethod
+    def setup_class(cls):
+        global metadata
+        cls.engine = testing.db
+        metadata = MetaData(testing.db)
+
+    def teardown(self):
+        metadata.drop_all()
+        metadata.clear()
+        if self.engine is not testing.db:
+            self.engine.dispose()
+
+    def test_compiled_insert(self):
+        table = Table('testtable', metadata, Column('id', Integer,
+                      primary_key=True), Column('data', String(30)))
+        metadata.create_all()
+        ins = table.insert(inline=True, values={'data': bindparam('x'
+                           )}).compile()
+        ins.execute({'x': 'five'}, {'x': 'seven'})
+        assert table.select().execute().fetchall() == [(1, 'five'), (2,
+                'seven')]
+
+    def test_foreignkey_missing_insert(self):
+        t1 = Table('t1', metadata, Column('id', Integer,
+                   primary_key=True))
+        t2 = Table('t2', metadata, Column('id', Integer,
+                   ForeignKey('t1.id'), primary_key=True))
+        metadata.create_all()
+
+        # want to ensure that "null value in column "id" violates not-
+        # null constraint" is raised (IntegrityError on psycoopg2, but
+        # ProgrammingError on pg8000), and not "ProgrammingError:
+        # (ProgrammingError) relationship "t2_id_seq" does not exist".
+        # the latter corresponds to autoincrement behavior, which is not
+        # the case here due to the foreign key.
+
+        for eng in [engines.testing_engine(options={'implicit_returning'
+                    : False}),
+                    engines.testing_engine(options={'implicit_returning'
+                    : True})]:
+            assert_raises_message(exc.DBAPIError,
+                                  'violates not-null constraint',
+                                  eng.execute, t2.insert())
+
+    def test_sequence_insert(self):
+        table = Table('testtable', metadata, Column('id', Integer,
+                      Sequence('my_seq'), primary_key=True),
+                      Column('data', String(30)))
+        metadata.create_all()
+        self._assert_data_with_sequence(table, 'my_seq')
+
+    def test_sequence_returning_insert(self):
+        table = Table('testtable', metadata, Column('id', Integer,
+                      Sequence('my_seq'), primary_key=True),
+                      Column('data', String(30)))
+        metadata.create_all()
+        self._assert_data_with_sequence_returning(table, 'my_seq')
+
+    def test_opt_sequence_insert(self):
+        table = Table('testtable', metadata, Column('id', Integer,
+                      Sequence('my_seq', optional=True),
+                      primary_key=True), Column('data', String(30)))
+        metadata.create_all()
+        self._assert_data_autoincrement(table)
+
+    def test_opt_sequence_returning_insert(self):
+        table = Table('testtable', metadata, Column('id', Integer,
+                      Sequence('my_seq', optional=True),
+                      primary_key=True), Column('data', String(30)))
+        metadata.create_all()
+        self._assert_data_autoincrement_returning(table)
+
+    def test_autoincrement_insert(self):
+        table = Table('testtable', metadata, Column('id', Integer,
+                      primary_key=True), Column('data', String(30)))
+        metadata.create_all()
+        self._assert_data_autoincrement(table)
+
+    def test_autoincrement_returning_insert(self):
+        table = Table('testtable', metadata, Column('id', Integer,
+                      primary_key=True), Column('data', String(30)))
+        metadata.create_all()
+        self._assert_data_autoincrement_returning(table)
+
+    def test_noautoincrement_insert(self):
+        table = Table('testtable', metadata, Column('id', Integer,
+                      primary_key=True, autoincrement=False),
+                      Column('data', String(30)))
+        metadata.create_all()
+        self._assert_data_noautoincrement(table)
+
+    def _assert_data_autoincrement(self, table):
+        self.engine = \
+            engines.testing_engine(options={'implicit_returning'
+                                   : False})
+        metadata.bind = self.engine
+
+        def go():
+
+            # execute with explicit id
+
+            r = table.insert().execute({'id': 30, 'data': 'd1'})
+            assert r.inserted_primary_key == [30]
+
+            # execute with prefetch id
+
+            r = table.insert().execute({'data': 'd2'})
+            assert r.inserted_primary_key == [1]
+
+            # executemany with explicit ids
+
+            table.insert().execute({'id': 31, 'data': 'd3'}, {'id': 32,
+                                   'data': 'd4'})
+
+            # executemany, uses SERIAL
+
+            table.insert().execute({'data': 'd5'}, {'data': 'd6'})
+
+            # single execute, explicit id, inline
+
+            table.insert(inline=True).execute({'id': 33, 'data': 'd7'})
+
+            # single execute, inline, uses SERIAL
+
+            table.insert(inline=True).execute({'data': 'd8'})
+
+        # note that the test framework doesnt capture the "preexecute"
+        # of a seqeuence or default.  we just see it in the bind params.
+
+        self.assert_sql(self.engine, go, [], with_sequences=[
+            ('INSERT INTO testtable (id, data) VALUES (:id, :data)',
+             {'id': 30, 'data': 'd1'}),
+            ('INSERT INTO testtable (id, data) VALUES (:id, :data)',
+             {'id': 1, 'data': 'd2'}),
+            ('INSERT INTO testtable (id, data) VALUES (:id, :data)',
+             [{'id': 31, 'data': 'd3'}, {'id': 32, 'data': 'd4'}]),
+            ('INSERT INTO testtable (data) VALUES (:data)', [{'data'
+             : 'd5'}, {'data': 'd6'}]),
+            ('INSERT INTO testtable (id, data) VALUES (:id, :data)',
+             [{'id': 33, 'data': 'd7'}]),
+            ('INSERT INTO testtable (data) VALUES (:data)', [{'data'
+             : 'd8'}]),
+            ])
+        assert table.select().execute().fetchall() == [
+            (30, 'd1'),
+            (1, 'd2'),
+            (31, 'd3'),
+            (32, 'd4'),
+            (2, 'd5'),
+            (3, 'd6'),
+            (33, 'd7'),
+            (4, 'd8'),
+            ]
+        table.delete().execute()
+
+        # test the same series of events using a reflected version of
+        # the table
+
+        m2 = MetaData(self.engine)
+        table = Table(table.name, m2, autoload=True)
+
+        def go():
+            table.insert().execute({'id': 30, 'data': 'd1'})
+            r = table.insert().execute({'data': 'd2'})
+            assert r.inserted_primary_key == [5]
+            table.insert().execute({'id': 31, 'data': 'd3'}, {'id': 32,
+                                   'data': 'd4'})
+            table.insert().execute({'data': 'd5'}, {'data': 'd6'})
+            table.insert(inline=True).execute({'id': 33, 'data': 'd7'})
+            table.insert(inline=True).execute({'data': 'd8'})
+
+        self.assert_sql(self.engine, go, [], with_sequences=[
+            ('INSERT INTO testtable (id, data) VALUES (:id, :data)',
+             {'id': 30, 'data': 'd1'}),
+            ('INSERT INTO testtable (id, data) VALUES (:id, :data)',
+             {'id': 5, 'data': 'd2'}),
+            ('INSERT INTO testtable (id, data) VALUES (:id, :data)',
+             [{'id': 31, 'data': 'd3'}, {'id': 32, 'data': 'd4'}]),
+            ('INSERT INTO testtable (data) VALUES (:data)', [{'data'
+             : 'd5'}, {'data': 'd6'}]),
+            ('INSERT INTO testtable (id, data) VALUES (:id, :data)',
+             [{'id': 33, 'data': 'd7'}]),
+            ('INSERT INTO testtable (data) VALUES (:data)', [{'data'
+             : 'd8'}]),
+            ])
+        assert table.select().execute().fetchall() == [
+            (30, 'd1'),
+            (5, 'd2'),
+            (31, 'd3'),
+            (32, 'd4'),
+            (6, 'd5'),
+            (7, 'd6'),
+            (33, 'd7'),
+            (8, 'd8'),
+            ]
+        table.delete().execute()
+
+    def _assert_data_autoincrement_returning(self, table):
+        self.engine = \
+            engines.testing_engine(options={'implicit_returning': True})
+        metadata.bind = self.engine
+
+        def go():
+
+            # execute with explicit id
+
+            r = table.insert().execute({'id': 30, 'data': 'd1'})
+            assert r.inserted_primary_key == [30]
+
+            # execute with prefetch id
+
+            r = table.insert().execute({'data': 'd2'})
+            assert r.inserted_primary_key == [1]
+
+            # executemany with explicit ids
+
+            table.insert().execute({'id': 31, 'data': 'd3'}, {'id': 32,
+                                   'data': 'd4'})
+
+            # executemany, uses SERIAL
+
+            table.insert().execute({'data': 'd5'}, {'data': 'd6'})
+
+            # single execute, explicit id, inline
+
+            table.insert(inline=True).execute({'id': 33, 'data': 'd7'})
+
+            # single execute, inline, uses SERIAL
+
+            table.insert(inline=True).execute({'data': 'd8'})
+
+        self.assert_sql(self.engine, go, [], with_sequences=[
+            ('INSERT INTO testtable (id, data) VALUES (:id, :data)',
+             {'id': 30, 'data': 'd1'}),
+            ('INSERT INTO testtable (data) VALUES (:data) RETURNING '
+             'testtable.id', {'data': 'd2'}),
+            ('INSERT INTO testtable (id, data) VALUES (:id, :data)',
+             [{'id': 31, 'data': 'd3'}, {'id': 32, 'data': 'd4'}]),
+            ('INSERT INTO testtable (data) VALUES (:data)', [{'data'
+             : 'd5'}, {'data': 'd6'}]),
+            ('INSERT INTO testtable (id, data) VALUES (:id, :data)',
+             [{'id': 33, 'data': 'd7'}]),
+            ('INSERT INTO testtable (data) VALUES (:data)', [{'data'
+             : 'd8'}]),
+            ])
+        assert table.select().execute().fetchall() == [
+            (30, 'd1'),
+            (1, 'd2'),
+            (31, 'd3'),
+            (32, 'd4'),
+            (2, 'd5'),
+            (3, 'd6'),
+            (33, 'd7'),
+            (4, 'd8'),
+            ]
+        table.delete().execute()
+
+        # test the same series of events using a reflected version of
+        # the table
+
+        m2 = MetaData(self.engine)
+        table = Table(table.name, m2, autoload=True)
+
+        def go():
+            table.insert().execute({'id': 30, 'data': 'd1'})
+            r = table.insert().execute({'data': 'd2'})
+            assert r.inserted_primary_key == [5]
+            table.insert().execute({'id': 31, 'data': 'd3'}, {'id': 32,
+                                   'data': 'd4'})
+            table.insert().execute({'data': 'd5'}, {'data': 'd6'})
+            table.insert(inline=True).execute({'id': 33, 'data': 'd7'})
+            table.insert(inline=True).execute({'data': 'd8'})
+
+        self.assert_sql(self.engine, go, [], with_sequences=[
+            ('INSERT INTO testtable (id, data) VALUES (:id, :data)',
+             {'id': 30, 'data': 'd1'}),
+            ('INSERT INTO testtable (data) VALUES (:data) RETURNING '
+             'testtable.id', {'data': 'd2'}),
+            ('INSERT INTO testtable (id, data) VALUES (:id, :data)',
+             [{'id': 31, 'data': 'd3'}, {'id': 32, 'data': 'd4'}]),
+            ('INSERT INTO testtable (data) VALUES (:data)', [{'data'
+             : 'd5'}, {'data': 'd6'}]),
+            ('INSERT INTO testtable (id, data) VALUES (:id, :data)',
+             [{'id': 33, 'data': 'd7'}]),
+            ('INSERT INTO testtable (data) VALUES (:data)', [{'data'
+             : 'd8'}]),
+            ])
+        assert table.select().execute().fetchall() == [
+            (30, 'd1'),
+            (5, 'd2'),
+            (31, 'd3'),
+            (32, 'd4'),
+            (6, 'd5'),
+            (7, 'd6'),
+            (33, 'd7'),
+            (8, 'd8'),
+            ]
+        table.delete().execute()
+
+    def _assert_data_with_sequence(self, table, seqname):
+        self.engine = \
+            engines.testing_engine(options={'implicit_returning'
+                                   : False})
+        metadata.bind = self.engine
+
+        def go():
+            table.insert().execute({'id': 30, 'data': 'd1'})
+            table.insert().execute({'data': 'd2'})
+            table.insert().execute({'id': 31, 'data': 'd3'}, {'id': 32,
+                                   'data': 'd4'})
+            table.insert().execute({'data': 'd5'}, {'data': 'd6'})
+            table.insert(inline=True).execute({'id': 33, 'data': 'd7'})
+            table.insert(inline=True).execute({'data': 'd8'})
+
+        self.assert_sql(self.engine, go, [], with_sequences=[
+            ('INSERT INTO testtable (id, data) VALUES (:id, :data)',
+             {'id': 30, 'data': 'd1'}),
+            ('INSERT INTO testtable (id, data) VALUES (:id, :data)',
+             {'id': 1, 'data': 'd2'}),
+            ('INSERT INTO testtable (id, data) VALUES (:id, :data)',
+             [{'id': 31, 'data': 'd3'}, {'id': 32, 'data': 'd4'}]),
+            ("INSERT INTO testtable (id, data) VALUES (nextval('%s'), "
+             ":data)" % seqname, [{'data': 'd5'}, {'data': 'd6'}]),
+            ('INSERT INTO testtable (id, data) VALUES (:id, :data)',
+             [{'id': 33, 'data': 'd7'}]),
+            ("INSERT INTO testtable (id, data) VALUES (nextval('%s'), "
+             ":data)" % seqname, [{'data': 'd8'}]),
+            ])
+        assert table.select().execute().fetchall() == [
+            (30, 'd1'),
+            (1, 'd2'),
+            (31, 'd3'),
+            (32, 'd4'),
+            (2, 'd5'),
+            (3, 'd6'),
+            (33, 'd7'),
+            (4, 'd8'),
+            ]
+
+        # cant test reflection here since the Sequence must be
+        # explicitly specified
+
+    def _assert_data_with_sequence_returning(self, table, seqname):
+        self.engine = \
+            engines.testing_engine(options={'implicit_returning': True})
+        metadata.bind = self.engine
+
+        def go():
+            table.insert().execute({'id': 30, 'data': 'd1'})
+            table.insert().execute({'data': 'd2'})
+            table.insert().execute({'id': 31, 'data': 'd3'}, {'id': 32,
+                                   'data': 'd4'})
+            table.insert().execute({'data': 'd5'}, {'data': 'd6'})
+            table.insert(inline=True).execute({'id': 33, 'data': 'd7'})
+            table.insert(inline=True).execute({'data': 'd8'})
+
+        self.assert_sql(self.engine, go, [], with_sequences=[
+            ('INSERT INTO testtable (id, data) VALUES (:id, :data)',
+             {'id': 30, 'data': 'd1'}),
+            ("INSERT INTO testtable (id, data) VALUES "
+             "(nextval('my_seq'), :data) RETURNING testtable.id",
+             {'data': 'd2'}),
+            ('INSERT INTO testtable (id, data) VALUES (:id, :data)',
+             [{'id': 31, 'data': 'd3'}, {'id': 32, 'data': 'd4'}]),
+            ("INSERT INTO testtable (id, data) VALUES (nextval('%s'), "
+             ":data)" % seqname, [{'data': 'd5'}, {'data': 'd6'}]),
+            ('INSERT INTO testtable (id, data) VALUES (:id, :data)',
+             [{'id': 33, 'data': 'd7'}]),
+            ("INSERT INTO testtable (id, data) VALUES (nextval('%s'), "
+             ":data)" % seqname, [{'data': 'd8'}]),
+            ])
+        assert table.select().execute().fetchall() == [
+            (30, 'd1'),
+            (1, 'd2'),
+            (31, 'd3'),
+            (32, 'd4'),
+            (2, 'd5'),
+            (3, 'd6'),
+            (33, 'd7'),
+            (4, 'd8'),
+            ]
+
+        # cant test reflection here since the Sequence must be
+        # explicitly specified
+
+    def _assert_data_noautoincrement(self, table):
+        self.engine = \
+            engines.testing_engine(options={'implicit_returning'
+                                   : False})
+        metadata.bind = self.engine
+        table.insert().execute({'id': 30, 'data': 'd1'})
+        if self.engine.driver == 'pg8000':
+            exception_cls = exc.ProgrammingError
+        elif self.engine.driver == 'pypostgresql':
+            exception_cls = Exception
+        else:
+            exception_cls = exc.IntegrityError
+        assert_raises_message(exception_cls,
+                              'violates not-null constraint',
+                              table.insert().execute, {'data': 'd2'})
+        assert_raises_message(exception_cls,
+                              'violates not-null constraint',
+                              table.insert().execute, {'data': 'd2'},
+                              {'data': 'd3'})
+        assert_raises_message(exception_cls,
+                              'violates not-null constraint',
+                              table.insert().execute, {'data': 'd2'})
+        assert_raises_message(exception_cls,
+                              'violates not-null constraint',
+                              table.insert().execute, {'data': 'd2'},
+                              {'data': 'd3'})
+        table.insert().execute({'id': 31, 'data': 'd2'}, {'id': 32,
+                               'data': 'd3'})
+        table.insert(inline=True).execute({'id': 33, 'data': 'd4'})
+        assert table.select().execute().fetchall() == [(30, 'd1'), (31,
+                'd2'), (32, 'd3'), (33, 'd4')]
+        table.delete().execute()
+
+        # test the same series of events using a reflected version of
+        # the table
+
+        m2 = MetaData(self.engine)
+        table = Table(table.name, m2, autoload=True)
+        table.insert().execute({'id': 30, 'data': 'd1'})
+        assert_raises_message(exception_cls,
+                              'violates not-null constraint',
+                              table.insert().execute, {'data': 'd2'})
+        assert_raises_message(exception_cls,
+                              'violates not-null constraint',
+                              table.insert().execute, {'data': 'd2'},
+                              {'data': 'd3'})
+        table.insert().execute({'id': 31, 'data': 'd2'}, {'id': 32,
+                               'data': 'd3'})
+        table.insert(inline=True).execute({'id': 33, 'data': 'd4'})
+        assert table.select().execute().fetchall() == [(30, 'd1'), (31,
+                'd2'), (32, 'd3'), (33, 'd4')]
+
+
+class ServerSideCursorsTest(fixtures.TestBase, AssertsExecutionResults):
+
+    __only_on__ = 'postgresql+psycopg2'
+
+    def _fixture(self, server_side_cursors):
+        self.engine = engines.testing_engine(
+                        options={'server_side_cursors':server_side_cursors}
+                    )
+        return self.engine
+
+    def tearDown(self):
+        engines.testing_reaper.close_all()
+        self.engine.dispose()
+
+    def test_global_string(self):
+        engine = self._fixture(True)
+        result = engine.execute('select 1')
+        assert result.cursor.name
+
+    def test_global_text(self):
+        engine = self._fixture(True)
+        result = engine.execute(text('select 1'))
+        assert result.cursor.name
+
+    def test_global_expr(self):
+        engine = self._fixture(True)
+        result = engine.execute(select([1]))
+        assert result.cursor.name
+
+    def test_global_off_explicit(self):
+        engine = self._fixture(False)
+        result = engine.execute(text('select 1'))
+
+        # It should be off globally ...
+
+        assert not result.cursor.name
+
+    def test_stmt_option(self):
+        engine = self._fixture(False)
+
+        s = select([1]).execution_options(stream_results=True)
+        result = engine.execute(s)
+
+        # ... but enabled for this one.
+
+        assert result.cursor.name
+
+
+    def test_conn_option(self):
+        engine = self._fixture(False)
+
+        # and this one
+        result = \
+            engine.connect().execution_options(stream_results=True).\
+                execute('select 1'
+                )
+        assert result.cursor.name
+
+    def test_stmt_enabled_conn_option_disabled(self):
+        engine = self._fixture(False)
+
+        s = select([1]).execution_options(stream_results=True)
+
+        # not this one
+        result = \
+            engine.connect().execution_options(stream_results=False).\
+                execute(s)
+        assert not result.cursor.name
+
+    def test_stmt_option_disabled(self):
+        engine = self._fixture(True)
+        s = select([1]).execution_options(stream_results=False)
+        result = engine.execute(s)
+        assert not result.cursor.name
+
+    def test_aliases_and_ss(self):
+        engine = self._fixture(False)
+        s1 = select([1]).execution_options(stream_results=True).alias()
+        result = engine.execute(s1)
+        assert result.cursor.name
+
+        # s1's options shouldn't affect s2 when s2 is used as a
+        # from_obj.
+        s2 = select([1], from_obj=s1)
+        result = engine.execute(s2)
+        assert not result.cursor.name
+
+    def test_for_update_expr(self):
+        engine = self._fixture(True)
+        s1 = select([1], for_update=True)
+        result = engine.execute(s1)
+        assert result.cursor.name
+
+    def test_for_update_string(self):
+        engine = self._fixture(True)
+        result = engine.execute('SELECT 1 FOR UPDATE')
+        assert result.cursor.name
+
+    def test_text_no_ss(self):
+        engine = self._fixture(False)
+        s = text('select 42')
+        result = engine.execute(s)
+        assert not result.cursor.name
+
+    def test_text_ss_option(self):
+        engine = self._fixture(False)
+        s = text('select 42').execution_options(stream_results=True)
+        result = engine.execute(s)
+        assert result.cursor.name
+
+    def test_roundtrip(self):
+        engine = self._fixture(True)
+        test_table = Table('test_table', MetaData(engine),
+                           Column('id', Integer, primary_key=True),
+                           Column('data', String(50)))
+        test_table.create(checkfirst=True)
+        try:
+            test_table.insert().execute(data='data1')
+            nextid = engine.execute(Sequence('test_table_id_seq'))
+            test_table.insert().execute(id=nextid, data='data2')
+            eq_(test_table.select().execute().fetchall(), [(1, 'data1'
+                ), (2, 'data2')])
+            test_table.update().where(test_table.c.id
+                    == 2).values(data=test_table.c.data + ' updated'
+                                 ).execute()
+            eq_(test_table.select().execute().fetchall(), [(1, 'data1'
+                ), (2, 'data2 updated')])
+            test_table.delete().execute()
+            eq_(test_table.count().scalar(), 0)
+        finally:
+            test_table.drop(checkfirst=True)
+
+
+class MatchTest(fixtures.TestBase, AssertsCompiledSQL):
+
+    __only_on__ = 'postgresql'
+    __excluded_on__ = ('postgresql', '<', (8, 3, 0)),
+
+    @classmethod
+    def setup_class(cls):
+        global metadata, cattable, matchtable
+        metadata = MetaData(testing.db)
+        cattable = Table('cattable', metadata, Column('id', Integer,
+                         primary_key=True), Column('description',
+                         String(50)))
+        matchtable = Table('matchtable', metadata, Column('id',
+                           Integer, primary_key=True), Column('title',
+                           String(200)), Column('category_id', Integer,
+                           ForeignKey('cattable.id')))
+        metadata.create_all()
+        cattable.insert().execute([{'id': 1, 'description': 'Python'},
+                                  {'id': 2, 'description': 'Ruby'}])
+        matchtable.insert().execute([{'id': 1, 'title'
+                                    : 'Agile Web Development with Rails'
+                                    , 'category_id': 2},
+                                    {'id': 2,
+                                    'title': 'Dive Into Python',
+                                    'category_id': 1},
+                                    {'id': 3, 'title'
+                                    : "Programming Matz's Ruby",
+                                    'category_id': 2},
+                                    {'id': 4, 'title'
+                                    : 'The Definitive Guide to Django',
+                                    'category_id': 1},
+                                    {'id': 5, 'title'
+                                    : 'Python in a Nutshell',
+                                    'category_id': 1}])
+
+    @classmethod
+    def teardown_class(cls):
+        metadata.drop_all()
+
+    @testing.fails_on('postgresql+pg8000', 'uses positional')
+    @testing.fails_on('postgresql+zxjdbc', 'uses qmark')
+    def test_expression_pyformat(self):
+        self.assert_compile(matchtable.c.title.match('somstr'),
+                            'matchtable.title @@ to_tsquery(%(title_1)s'
+                            ')')
+
+    @testing.fails_on('postgresql+psycopg2', 'uses pyformat')
+    @testing.fails_on('postgresql+pypostgresql', 'uses pyformat')
+    @testing.fails_on('postgresql+zxjdbc', 'uses qmark')
+    def test_expression_positional(self):
+        self.assert_compile(matchtable.c.title.match('somstr'),
+                            'matchtable.title @@ to_tsquery(%s)')
+
+    def test_simple_match(self):
+        results = \
+            matchtable.select().where(matchtable.c.title.match('python'
+                )).order_by(matchtable.c.id).execute().fetchall()
+        eq_([2, 5], [r.id for r in results])
+
+    def test_simple_match_with_apostrophe(self):
+        results = \
+            matchtable.select().where(matchtable.c.title.match("Matz's"
+                )).execute().fetchall()
+        eq_([3], [r.id for r in results])
+
+    def test_simple_derivative_match(self):
+        results = \
+            matchtable.select().where(matchtable.c.title.match('nutshells'
+                )).execute().fetchall()
+        eq_([5], [r.id for r in results])
+
+    def test_or_match(self):
+        results1 = \
+            matchtable.select().where(or_(matchtable.c.title.match('nutshells'
+                ), matchtable.c.title.match('rubies'
+                ))).order_by(matchtable.c.id).execute().fetchall()
+        eq_([3, 5], [r.id for r in results1])
+        results2 = \
+            matchtable.select().where(
+                matchtable.c.title.match('nutshells | rubies'
+                )).order_by(matchtable.c.id).execute().fetchall()
+        eq_([3, 5], [r.id for r in results2])
+
+    def test_and_match(self):
+        results1 = \
+            matchtable.select().where(and_(matchtable.c.title.match('python'
+                ), matchtable.c.title.match('nutshells'
+                ))).execute().fetchall()
+        eq_([5], [r.id for r in results1])
+        results2 = \
+            matchtable.select().where(
+                matchtable.c.title.match('python & nutshells'
+                )).execute().fetchall()
+        eq_([5], [r.id for r in results2])
+
+    def test_match_across_joins(self):
+        results = matchtable.select().where(and_(cattable.c.id
+                == matchtable.c.category_id,
+                or_(cattable.c.description.match('Ruby'),
+                matchtable.c.title.match('nutshells'
+                )))).order_by(matchtable.c.id).execute().fetchall()
+        eq_([1, 3, 5], [r.id for r in results])
+
+
+class TupleTest(fixtures.TestBase):
+    __only_on__ = 'postgresql'
+
+    def test_tuple_containment(self):
+
+        for test, exp in [
+            ([('a', 'b')], True),
+            ([('a', 'c')], False),
+            ([('f', 'q'), ('a', 'b')], True),
+            ([('f', 'q'), ('a', 'c')], False)
+        ]:
+            eq_(
+                testing.db.execute(
+                    select([
+                            tuple_(
+                                literal_column("'a'"),
+                                literal_column("'b'")
+                            ).\
+                                in_([
+                                    tuple_(*[
+                                            literal_column("'%s'" % letter)
+                                            for letter in elem
+                                        ]) for elem in test
+                                ])
+                            ])
+                ).scalar(),
+                exp
+            )
diff --git a/test/dialect/postgresql/test_reflection.py b/test/dialect/postgresql/test_reflection.py
new file mode 100644 (file)
index 0000000..fb399b5
--- /dev/null
@@ -0,0 +1,460 @@
+# coding: utf-8
+
+from sqlalchemy.testing.assertions import eq_, assert_raises, \
+                assert_raises_message, is_, AssertsExecutionResults, \
+                AssertsCompiledSQL, ComparesTables
+from sqlalchemy.testing import engines, fixtures
+from sqlalchemy import testing
+from sqlalchemy import Table, Column, select, MetaData, text, Integer, \
+            String, Sequence, ForeignKey, join, Numeric, \
+            PrimaryKeyConstraint, DateTime, tuple_, Float, BigInteger, \
+            func, literal_column, literal, bindparam, cast, extract, \
+            SmallInteger, Enum, REAL, update, insert, Index, delete, \
+            and_, Date, TypeDecorator, Time, Unicode, Interval, or_, Text
+from sqlalchemy import exc
+from sqlalchemy.dialects.postgresql import base as postgresql
+import logging
+
+class DomainReflectionTest(fixtures.TestBase, AssertsExecutionResults):
+
+    """Test PostgreSQL domains"""
+
+    __only_on__ = 'postgresql'
+
+    @classmethod
+    def setup_class(cls):
+        con = testing.db.connect()
+        for ddl in \
+            'CREATE DOMAIN testdomain INTEGER NOT NULL DEFAULT 42', \
+            'CREATE DOMAIN test_schema.testdomain INTEGER DEFAULT 0', \
+            "CREATE TYPE testtype AS ENUM ('test')", \
+            'CREATE DOMAIN enumdomain AS testtype'\
+            :
+            try:
+                con.execute(ddl)
+            except exc.DBAPIError as e:
+                if not 'already exists' in str(e):
+                    raise e
+        con.execute('CREATE TABLE testtable (question integer, answer '
+                    'testdomain)')
+        con.execute('CREATE TABLE test_schema.testtable(question '
+                    'integer, answer test_schema.testdomain, anything '
+                    'integer)')
+        con.execute('CREATE TABLE crosschema (question integer, answer '
+                    'test_schema.testdomain)')
+
+        con.execute('CREATE TABLE enum_test (id integer, data enumdomain)')
+
+    @classmethod
+    def teardown_class(cls):
+        con = testing.db.connect()
+        con.execute('DROP TABLE testtable')
+        con.execute('DROP TABLE test_schema.testtable')
+        con.execute('DROP TABLE crosschema')
+        con.execute('DROP DOMAIN testdomain')
+        con.execute('DROP DOMAIN test_schema.testdomain')
+        con.execute("DROP TABLE enum_test")
+        con.execute("DROP DOMAIN enumdomain")
+        con.execute("DROP TYPE testtype")
+
+    def test_table_is_reflected(self):
+        metadata = MetaData(testing.db)
+        table = Table('testtable', metadata, autoload=True)
+        eq_(set(table.columns.keys()), set(['question', 'answer']),
+            "Columns of reflected table didn't equal expected columns")
+        assert isinstance(table.c.answer.type, Integer)
+
+    def test_domain_is_reflected(self):
+        metadata = MetaData(testing.db)
+        table = Table('testtable', metadata, autoload=True)
+        eq_(str(table.columns.answer.server_default.arg), '42',
+            "Reflected default value didn't equal expected value")
+        assert not table.columns.answer.nullable, \
+            'Expected reflected column to not be nullable.'
+
+    def test_enum_domain_is_reflected(self):
+        metadata = MetaData(testing.db)
+        table = Table('enum_test', metadata, autoload=True)
+        eq_(
+            table.c.data.type.enums,
+            ('test', )
+        )
+
+    def test_table_is_reflected_test_schema(self):
+        metadata = MetaData(testing.db)
+        table = Table('testtable', metadata, autoload=True,
+                      schema='test_schema')
+        eq_(set(table.columns.keys()), set(['question', 'answer',
+            'anything']),
+            "Columns of reflected table didn't equal expected columns")
+        assert isinstance(table.c.anything.type, Integer)
+
+    def test_schema_domain_is_reflected(self):
+        metadata = MetaData(testing.db)
+        table = Table('testtable', metadata, autoload=True,
+                      schema='test_schema')
+        eq_(str(table.columns.answer.server_default.arg), '0',
+            "Reflected default value didn't equal expected value")
+        assert table.columns.answer.nullable, \
+            'Expected reflected column to be nullable.'
+
+    def test_crosschema_domain_is_reflected(self):
+        metadata = MetaData(testing.db)
+        table = Table('crosschema', metadata, autoload=True)
+        eq_(str(table.columns.answer.server_default.arg), '0',
+            "Reflected default value didn't equal expected value")
+        assert table.columns.answer.nullable, \
+            'Expected reflected column to be nullable.'
+
+    def test_unknown_types(self):
+        from sqlalchemy.databases import postgresql
+        ischema_names = postgresql.PGDialect.ischema_names
+        postgresql.PGDialect.ischema_names = {}
+        try:
+            m2 = MetaData(testing.db)
+            assert_raises(exc.SAWarning, Table, 'testtable', m2,
+                          autoload=True)
+
+            @testing.emits_warning('Did not recognize type')
+            def warns():
+                m3 = MetaData(testing.db)
+                t3 = Table('testtable', m3, autoload=True)
+                assert t3.c.answer.type.__class__ == sa.types.NullType
+        finally:
+            postgresql.PGDialect.ischema_names = ischema_names
+
+
+class ReflectionTest(fixtures.TestBase):
+    __only_on__ = 'postgresql'
+
+    @testing.fails_if(('postgresql', '<', (8, 4)),
+            "newer query is bypassed due to unsupported SQL functions")
+    @testing.provide_metadata
+    def test_reflected_primary_key_order(self):
+        meta1 = self.metadata
+        subject = Table('subject', meta1,
+                        Column('p1', Integer, primary_key=True),
+                        Column('p2', Integer, primary_key=True),
+                        PrimaryKeyConstraint('p2', 'p1')
+                        )
+        meta1.create_all()
+        meta2 = MetaData(testing.db)
+        subject = Table('subject', meta2, autoload=True)
+        eq_(subject.primary_key.columns.keys(), ['p2', 'p1'])
+
+    @testing.provide_metadata
+    def test_pg_weirdchar_reflection(self):
+        meta1 = self.metadata
+        subject = Table('subject', meta1, Column('id$', Integer,
+                        primary_key=True))
+        referer = Table('referer', meta1, Column('id', Integer,
+                        primary_key=True), Column('ref', Integer,
+                        ForeignKey('subject.id$')))
+        meta1.create_all()
+        meta2 = MetaData(testing.db)
+        subject = Table('subject', meta2, autoload=True)
+        referer = Table('referer', meta2, autoload=True)
+        self.assert_((subject.c['id$']
+                     == referer.c.ref).compare(
+                        subject.join(referer).onclause))
+
+    @testing.provide_metadata
+    def test_renamed_sequence_reflection(self):
+        metadata = self.metadata
+        t = Table('t', metadata, Column('id', Integer, primary_key=True))
+        metadata.create_all()
+        m2 = MetaData(testing.db)
+        t2 = Table('t', m2, autoload=True, implicit_returning=False)
+        eq_(t2.c.id.server_default.arg.text,
+            "nextval('t_id_seq'::regclass)")
+        r = t2.insert().execute()
+        eq_(r.inserted_primary_key, [1])
+        testing.db.connect().execution_options(autocommit=True).\
+                execute('alter table t_id_seq rename to foobar_id_seq'
+                )
+        m3 = MetaData(testing.db)
+        t3 = Table('t', m3, autoload=True, implicit_returning=False)
+        eq_(t3.c.id.server_default.arg.text,
+            "nextval('foobar_id_seq'::regclass)")
+        r = t3.insert().execute()
+        eq_(r.inserted_primary_key, [2])
+
+    @testing.provide_metadata
+    def test_renamed_pk_reflection(self):
+        metadata = self.metadata
+        t = Table('t', metadata, Column('id', Integer, primary_key=True))
+        metadata.create_all()
+        testing.db.connect().execution_options(autocommit=True).\
+            execute('alter table t rename id to t_id')
+        m2 = MetaData(testing.db)
+        t2 = Table('t', m2, autoload=True)
+        eq_([c.name for c in t2.primary_key], ['t_id'])
+
+    @testing.provide_metadata
+    def test_schema_reflection(self):
+        """note: this test requires that the 'test_schema' schema be
+        separate and accessible by the test user"""
+
+        meta1 = self.metadata
+
+        users = Table('users', meta1, Column('user_id', Integer,
+                      primary_key=True), Column('user_name',
+                      String(30), nullable=False), schema='test_schema')
+        addresses = Table(
+            'email_addresses',
+            meta1,
+            Column('address_id', Integer, primary_key=True),
+            Column('remote_user_id', Integer,
+                   ForeignKey(users.c.user_id)),
+            Column('email_address', String(20)),
+            schema='test_schema',
+            )
+        meta1.create_all()
+        meta2 = MetaData(testing.db)
+        addresses = Table('email_addresses', meta2, autoload=True,
+                          schema='test_schema')
+        users = Table('users', meta2, mustexist=True,
+                      schema='test_schema')
+        j = join(users, addresses)
+        self.assert_((users.c.user_id
+                     == addresses.c.remote_user_id).compare(j.onclause))
+
+    @testing.provide_metadata
+    def test_schema_reflection_2(self):
+        meta1 = self.metadata
+        subject = Table('subject', meta1, Column('id', Integer,
+                        primary_key=True))
+        referer = Table('referer', meta1, Column('id', Integer,
+                        primary_key=True), Column('ref', Integer,
+                        ForeignKey('subject.id')), schema='test_schema')
+        meta1.create_all()
+        meta2 = MetaData(testing.db)
+        subject = Table('subject', meta2, autoload=True)
+        referer = Table('referer', meta2, schema='test_schema',
+                        autoload=True)
+        self.assert_((subject.c.id
+                     == referer.c.ref).compare(
+                        subject.join(referer).onclause))
+
+    @testing.provide_metadata
+    def test_schema_reflection_3(self):
+        meta1 = self.metadata
+        subject = Table('subject', meta1, Column('id', Integer,
+                        primary_key=True), schema='test_schema_2')
+        referer = Table('referer', meta1, Column('id', Integer,
+                        primary_key=True), Column('ref', Integer,
+                        ForeignKey('test_schema_2.subject.id')),
+                        schema='test_schema')
+        meta1.create_all()
+        meta2 = MetaData(testing.db)
+        subject = Table('subject', meta2, autoload=True,
+                        schema='test_schema_2')
+        referer = Table('referer', meta2, schema='test_schema',
+                        autoload=True)
+        self.assert_((subject.c.id
+                     == referer.c.ref).compare(
+                        subject.join(referer).onclause))
+
+    @testing.provide_metadata
+    def test_uppercase_lowercase_table(self):
+        metadata = self.metadata
+
+        a_table = Table('a', metadata, Column('x', Integer))
+        A_table = Table('A', metadata, Column('x', Integer))
+
+        a_table.create()
+        assert testing.db.has_table("a")
+        assert not testing.db.has_table("A")
+        A_table.create(checkfirst=True)
+        assert testing.db.has_table("A")
+
+    def test_uppercase_lowercase_sequence(self):
+
+        a_seq = Sequence('a')
+        A_seq = Sequence('A')
+
+        a_seq.create(testing.db)
+        assert testing.db.dialect.has_sequence(testing.db, "a")
+        assert not testing.db.dialect.has_sequence(testing.db, "A")
+        A_seq.create(testing.db, checkfirst=True)
+        assert testing.db.dialect.has_sequence(testing.db, "A")
+
+        a_seq.drop(testing.db)
+        A_seq.drop(testing.db)
+
+    def test_schema_reflection_multi_search_path(self):
+        """test the 'set the same schema' rule when
+        multiple schemas/search paths are in effect."""
+
+        db = engines.testing_engine()
+        conn = db.connect()
+        trans = conn.begin()
+        try:
+            conn.execute("set search_path to test_schema_2, "
+                                "test_schema, public")
+            conn.dialect.default_schema_name = "test_schema_2"
+
+            conn.execute("""
+            CREATE TABLE test_schema.some_table (
+                id SERIAL not null primary key
+            )
+            """)
+
+            conn.execute("""
+            CREATE TABLE test_schema_2.some_other_table (
+                id SERIAL not null primary key,
+                sid INTEGER REFERENCES test_schema.some_table(id)
+            )
+            """)
+
+            m1 = MetaData()
+
+            t2_schema = Table('some_other_table',
+                                m1,
+                                schema="test_schema_2",
+                                autoload=True,
+                                autoload_with=conn)
+            t1_schema = Table('some_table',
+                                m1,
+                                schema="test_schema",
+                                autoload=True,
+                                autoload_with=conn)
+
+            t2_no_schema = Table('some_other_table',
+                                m1,
+                                autoload=True,
+                                autoload_with=conn)
+
+            t1_no_schema = Table('some_table',
+                                m1,
+                                autoload=True,
+                                autoload_with=conn)
+
+            # OK, this because, "test_schema" is
+            # in the search path, and might as well be
+            # the default too.  why would we assign
+            # a "schema" to the Table ?
+            assert t2_schema.c.sid.references(
+                                t1_no_schema.c.id)
+
+            assert t2_no_schema.c.sid.references(
+                                t1_no_schema.c.id)
+
+        finally:
+            trans.rollback()
+            conn.close()
+            db.dispose()
+
+    @testing.provide_metadata
+    def test_index_reflection(self):
+        """ Reflecting partial & expression-based indexes should warn
+        """
+
+        metadata = self.metadata
+
+        t1 = Table('party', metadata, Column('id', String(10),
+                   nullable=False), Column('name', String(20),
+                   index=True), Column('aname', String(20)))
+        metadata.create_all()
+        testing.db.execute("""
+          create index idx1 on party ((id || name))
+        """)
+        testing.db.execute("""
+          create unique index idx2 on party (id) where name = 'test'
+        """)
+        testing.db.execute("""
+            create index idx3 on party using btree
+                (lower(name::text), lower(aname::text))
+        """)
+
+        def go():
+            m2 = MetaData(testing.db)
+            t2 = Table('party', m2, autoload=True)
+            assert len(t2.indexes) == 2
+
+            # Make sure indexes are in the order we expect them in
+
+            tmp = [(idx.name, idx) for idx in t2.indexes]
+            tmp.sort()
+            r1, r2 = [idx[1] for idx in tmp]
+            assert r1.name == 'idx2'
+            assert r1.unique == True
+            assert r2.unique == False
+            assert [t2.c.id] == r1.columns
+            assert [t2.c.name] == r2.columns
+
+        testing.assert_warnings(go,
+            [
+                'Skipped unsupported reflection of '
+                'expression-based index idx1',
+                'Predicate of partial index idx2 ignored during '
+                'reflection',
+                'Skipped unsupported reflection of '
+                'expression-based index idx3'
+            ])
+
+    @testing.provide_metadata
+    def test_index_reflection_modified(self):
+        """reflect indexes when a column name has changed - PG 9
+        does not update the name of the column in the index def.
+        [ticket:2141]
+
+        """
+
+        metadata = self.metadata
+
+        t1 = Table('t', metadata,
+            Column('id', Integer, primary_key=True),
+            Column('x', Integer)
+        )
+        metadata.create_all()
+        conn = testing.db.connect().execution_options(autocommit=True)
+        conn.execute("CREATE INDEX idx1 ON t (x)")
+        conn.execute("ALTER TABLE t RENAME COLUMN x to y")
+
+        ind = testing.db.dialect.get_indexes(conn, "t", None)
+        eq_(ind, [{'unique': False, 'column_names': ['y'], 'name': 'idx1'}])
+        conn.close()
+
+class CustomTypeReflectionTest(fixtures.TestBase):
+
+    class CustomType(object):
+        def __init__(self, arg1=None, arg2=None):
+            self.arg1 = arg1
+            self.arg2 = arg2
+
+    ischema_names = None
+
+    def setup(self):
+        ischema_names = postgresql.PGDialect.ischema_names
+        postgresql.PGDialect.ischema_names = ischema_names.copy()
+        self.ischema_names = ischema_names
+
+    def teardown(self):
+        postgresql.PGDialect.ischema_names = self.ischema_names
+        self.ischema_names = None
+
+    def _assert_reflected(self, dialect):
+        for sch, args in [
+            ('my_custom_type', (None, None)),
+            ('my_custom_type()', (None, None)),
+            ('my_custom_type(ARG1)', ('ARG1', None)),
+            ('my_custom_type(ARG1, ARG2)', ('ARG1', 'ARG2')),
+        ]:
+            column_info = dialect._get_column_info(
+                'colname', sch, None, False,
+                {}, {}, 'public')
+            assert isinstance(column_info['type'], self.CustomType)
+            eq_(column_info['type'].arg1, args[0])
+            eq_(column_info['type'].arg2, args[1])
+
+    def test_clslevel(self):
+        postgresql.PGDialect.ischema_names['my_custom_type'] = self.CustomType
+        dialect = postgresql.PGDialect()
+        self._assert_reflected(dialect)
+
+    def test_instancelevel(self):
+        dialect = postgresql.PGDialect()
+        dialect.ischema_names = dialect.ischema_names.copy()
+        dialect.ischema_names['my_custom_type'] = self.CustomType
+        self._assert_reflected(dialect)
diff --git a/test/dialect/postgresql/test_types.py b/test/dialect/postgresql/test_types.py
new file mode 100644 (file)
index 0000000..784f8bc
--- /dev/null
@@ -0,0 +1,1679 @@
+# coding: utf-8
+from sqlalchemy.testing.assertions import eq_, assert_raises, \
+                assert_raises_message, is_, AssertsExecutionResults, \
+                AssertsCompiledSQL, ComparesTables
+from sqlalchemy.testing import engines, fixtures
+from sqlalchemy import testing
+import datetime
+from sqlalchemy import Table, Column, select, MetaData, text, Integer, \
+            String, Sequence, ForeignKey, join, Numeric, \
+            PrimaryKeyConstraint, DateTime, tuple_, Float, BigInteger, \
+            func, literal_column, literal, bindparam, cast, extract, \
+            SmallInteger, Enum, REAL, update, insert, Index, delete, \
+            and_, Date, TypeDecorator, Time, Unicode, Interval, or_, Text
+from sqlalchemy.orm import Session, mapper, aliased
+from sqlalchemy import exc, schema, types
+from sqlalchemy.dialects.postgresql import base as postgresql
+from sqlalchemy.dialects.postgresql import HSTORE, hstore, array, \
+            INT4RANGE, INT8RANGE, NUMRANGE, DATERANGE, TSRANGE, TSTZRANGE
+import decimal
+from sqlalchemy import util
+from sqlalchemy.testing.util import round_decimal
+from sqlalchemy.sql import table, column, operators
+import logging
+import re
+
+class FloatCoercionTest(fixtures.TablesTest, AssertsExecutionResults):
+    __only_on__ = 'postgresql'
+    __dialect__ = postgresql.dialect()
+
+    @classmethod
+    def define_tables(cls, metadata):
+        data_table = Table('data_table', metadata,
+            Column('id', Integer, primary_key=True),
+            Column('data', Integer)
+        )
+
+    @classmethod
+    def insert_data(cls):
+        data_table = cls.tables.data_table
+
+        data_table.insert().execute(
+            {'data':3},
+            {'data':5},
+            {'data':7},
+            {'data':2},
+            {'data':15},
+            {'data':12},
+            {'data':6},
+            {'data':478},
+            {'data':52},
+            {'data':9},
+        )
+
+    @testing.fails_on('postgresql+zxjdbc',
+                      'XXX: postgresql+zxjdbc currently returns a Decimal result for Float')
+    def test_float_coercion(self):
+        data_table = self.tables.data_table
+
+        for type_, result in [
+            (Numeric, decimal.Decimal('140.381230939')),
+            (Float, 140.381230939),
+            (Float(asdecimal=True), decimal.Decimal('140.381230939')),
+            (Numeric(asdecimal=False), 140.381230939),
+        ]:
+            ret = testing.db.execute(
+                select([
+                    func.stddev_pop(data_table.c.data, type_=type_)
+                ])
+            ).scalar()
+
+            eq_(round_decimal(ret, 9), result)
+
+            ret = testing.db.execute(
+                select([
+                    cast(func.stddev_pop(data_table.c.data), type_)
+                ])
+            ).scalar()
+            eq_(round_decimal(ret, 9), result)
+
+    @testing.fails_on('postgresql+zxjdbc',
+                      'zxjdbc has no support for PG arrays')
+    @testing.provide_metadata
+    def test_arrays(self):
+        metadata = self.metadata
+        t1 = Table('t', metadata,
+            Column('x', postgresql.ARRAY(Float)),
+            Column('y', postgresql.ARRAY(REAL)),
+            Column('z', postgresql.ARRAY(postgresql.DOUBLE_PRECISION)),
+            Column('q', postgresql.ARRAY(Numeric))
+        )
+        metadata.create_all()
+        t1.insert().execute(x=[5], y=[5], z=[6], q=[decimal.Decimal("6.4")])
+        row = t1.select().execute().first()
+        eq_(
+            row,
+            ([5], [5], [6], [decimal.Decimal("6.4")])
+        )
+
+class EnumTest(fixtures.TestBase, AssertsExecutionResults, AssertsCompiledSQL):
+
+    __only_on__ = 'postgresql'
+    __dialect__ = postgresql.dialect()
+
+    def test_compile(self):
+        e1 = Enum('x', 'y', 'z', name='somename')
+        e2 = Enum('x', 'y', 'z', name='somename', schema='someschema')
+        self.assert_compile(postgresql.CreateEnumType(e1),
+                            "CREATE TYPE somename AS ENUM ('x','y','z')"
+                            )
+        self.assert_compile(postgresql.CreateEnumType(e2),
+                            "CREATE TYPE someschema.somename AS ENUM "
+                            "('x','y','z')")
+        self.assert_compile(postgresql.DropEnumType(e1),
+                            'DROP TYPE somename')
+        self.assert_compile(postgresql.DropEnumType(e2),
+                            'DROP TYPE someschema.somename')
+        t1 = Table('sometable', MetaData(), Column('somecolumn', e1))
+        self.assert_compile(schema.CreateTable(t1),
+                            'CREATE TABLE sometable (somecolumn '
+                            'somename)')
+        t1 = Table('sometable', MetaData(), Column('somecolumn',
+                   Enum('x', 'y', 'z', native_enum=False)))
+        self.assert_compile(schema.CreateTable(t1),
+                            "CREATE TABLE sometable (somecolumn "
+                            "VARCHAR(1), CHECK (somecolumn IN ('x', "
+                            "'y', 'z')))")
+
+    @testing.fails_on('postgresql+zxjdbc',
+                      'zxjdbc fails on ENUM: column "XXX" is of type '
+                      'XXX but expression is of type character varying')
+    @testing.fails_on('postgresql+pg8000',
+                      'zxjdbc fails on ENUM: column "XXX" is of type '
+                      'XXX but expression is of type text')
+    def test_create_table(self):
+        metadata = MetaData(testing.db)
+        t1 = Table('table', metadata, Column('id', Integer,
+                   primary_key=True), Column('value', Enum('one', 'two'
+                   , 'three', name='onetwothreetype')))
+        t1.create()
+        t1.create(checkfirst=True)  # check the create
+        try:
+            t1.insert().execute(value='two')
+            t1.insert().execute(value='three')
+            t1.insert().execute(value='three')
+            eq_(t1.select().order_by(t1.c.id).execute().fetchall(),
+                [(1, 'two'), (2, 'three'), (3, 'three')])
+        finally:
+            metadata.drop_all()
+            metadata.drop_all()
+
+    def test_name_required(self):
+        metadata = MetaData(testing.db)
+        etype = Enum('four', 'five', 'six', metadata=metadata)
+        assert_raises(exc.CompileError, etype.create)
+        assert_raises(exc.CompileError, etype.compile,
+                      dialect=postgresql.dialect())
+
+    @testing.fails_on('postgresql+zxjdbc',
+                      'zxjdbc fails on ENUM: column "XXX" is of type '
+                      'XXX but expression is of type character varying')
+    @testing.fails_on('postgresql+pg8000',
+                      'zxjdbc fails on ENUM: column "XXX" is of type '
+                      'XXX but expression is of type text')
+    @testing.provide_metadata
+    def test_unicode_labels(self):
+        metadata = self.metadata
+        t1 = Table('table', metadata,
+            Column('id', Integer, primary_key=True),
+            Column('value',
+                    Enum(util.u('réveillé'), util.u('drôle'), util.u('S’il'),
+                            name='onetwothreetype'))
+        )
+        metadata.create_all()
+        t1.insert().execute(value=util.u('drôle'))
+        t1.insert().execute(value=util.u('réveillé'))
+        t1.insert().execute(value=util.u('S’il'))
+        eq_(t1.select().order_by(t1.c.id).execute().fetchall(),
+            [(1, util.u('drôle')), (2, util.u('réveillé')),
+                    (3, util.u('S’il'))]
+        )
+        m2 = MetaData(testing.db)
+        t2 = Table('table', m2, autoload=True)
+        eq_(
+            t2.c.value.type.enums,
+            (util.u('réveillé'), util.u('drôle'), util.u('S’il'))
+        )
+
+    def test_non_native_type(self):
+        metadata = MetaData()
+        t1 = Table('foo', metadata, Column('bar', Enum('one', 'two',
+                   'three', name='myenum', native_enum=False)))
+
+        def go():
+            t1.create(testing.db)
+
+        try:
+            self.assert_sql(testing.db, go, [],
+                            with_sequences=[("CREATE TABLE foo (\tbar "
+                            "VARCHAR(5), \tCONSTRAINT myenum CHECK "
+                            "(bar IN ('one', 'two', 'three')))", {})])
+        finally:
+            metadata.drop_all(testing.db)
+
+    @testing.provide_metadata
+    def test_disable_create(self):
+        metadata = self.metadata
+
+        e1 = postgresql.ENUM('one', 'two', 'three',
+                            name="myenum",
+                            create_type=False)
+
+        t1 = Table('e1', metadata,
+            Column('c1', e1)
+        )
+        # table can be created separately
+        # without conflict
+        e1.create(bind=testing.db)
+        t1.create(testing.db)
+        t1.drop(testing.db)
+        e1.drop(bind=testing.db)
+
+    @testing.provide_metadata
+    def test_generate_multiple(self):
+        """Test that the same enum twice only generates once
+        for the create_all() call, without using checkfirst.
+
+        A 'memo' collection held by the DDL runner
+        now handles this.
+
+        """
+        metadata = self.metadata
+
+        e1 = Enum('one', 'two', 'three',
+                            name="myenum")
+        t1 = Table('e1', metadata,
+            Column('c1', e1)
+        )
+
+        t2 = Table('e2', metadata,
+            Column('c1', e1)
+        )
+
+        metadata.create_all(checkfirst=False)
+        metadata.drop_all(checkfirst=False)
+
+    def test_non_native_dialect(self):
+        engine = engines.testing_engine()
+        engine.connect()
+        engine.dialect.supports_native_enum = False
+        metadata = MetaData()
+        t1 = Table('foo', metadata, Column('bar', Enum('one', 'two',
+                   'three', name='myenum')))
+
+        def go():
+            t1.create(engine)
+
+        try:
+            self.assert_sql(engine, go, [],
+                            with_sequences=[("CREATE TABLE foo (\tbar "
+                            "VARCHAR(5), \tCONSTRAINT myenum CHECK "
+                            "(bar IN ('one', 'two', 'three')))", {})])
+        finally:
+            metadata.drop_all(engine)
+
+    def test_standalone_enum(self):
+        metadata = MetaData(testing.db)
+        etype = Enum('four', 'five', 'six', name='fourfivesixtype',
+                     metadata=metadata)
+        etype.create()
+        try:
+            assert testing.db.dialect.has_type(testing.db,
+                    'fourfivesixtype')
+        finally:
+            etype.drop()
+            assert not testing.db.dialect.has_type(testing.db,
+                    'fourfivesixtype')
+        metadata.create_all()
+        try:
+            assert testing.db.dialect.has_type(testing.db,
+                    'fourfivesixtype')
+        finally:
+            metadata.drop_all()
+            assert not testing.db.dialect.has_type(testing.db,
+                    'fourfivesixtype')
+
+    def test_no_support(self):
+        def server_version_info(self):
+            return (8, 2)
+
+        e = engines.testing_engine()
+        dialect = e.dialect
+        dialect._get_server_version_info = server_version_info
+
+        assert dialect.supports_native_enum
+        e.connect()
+        assert not dialect.supports_native_enum
+
+        # initialize is called again on new pool
+        e.dispose()
+        e.connect()
+        assert not dialect.supports_native_enum
+
+
+    def test_reflection(self):
+        metadata = MetaData(testing.db)
+        etype = Enum('four', 'five', 'six', name='fourfivesixtype',
+                     metadata=metadata)
+        t1 = Table('table', metadata, Column('id', Integer,
+                   primary_key=True), Column('value', Enum('one', 'two'
+                   , 'three', name='onetwothreetype')), Column('value2'
+                   , etype))
+        metadata.create_all()
+        try:
+            m2 = MetaData(testing.db)
+            t2 = Table('table', m2, autoload=True)
+            assert t2.c.value.type.enums == ('one', 'two', 'three')
+            assert t2.c.value.type.name == 'onetwothreetype'
+            assert t2.c.value2.type.enums == ('four', 'five', 'six')
+            assert t2.c.value2.type.name == 'fourfivesixtype'
+        finally:
+            metadata.drop_all()
+
+    def test_schema_reflection(self):
+        metadata = MetaData(testing.db)
+        etype = Enum(
+            'four',
+            'five',
+            'six',
+            name='fourfivesixtype',
+            schema='test_schema',
+            metadata=metadata,
+            )
+        t1 = Table('table', metadata, Column('id', Integer,
+                   primary_key=True), Column('value', Enum('one', 'two'
+                   , 'three', name='onetwothreetype',
+                   schema='test_schema')), Column('value2', etype))
+        metadata.create_all()
+        try:
+            m2 = MetaData(testing.db)
+            t2 = Table('table', m2, autoload=True)
+            assert t2.c.value.type.enums == ('one', 'two', 'three')
+            assert t2.c.value.type.name == 'onetwothreetype'
+            assert t2.c.value2.type.enums == ('four', 'five', 'six')
+            assert t2.c.value2.type.name == 'fourfivesixtype'
+            assert t2.c.value2.type.schema == 'test_schema'
+        finally:
+            metadata.drop_all()
+
+class NumericInterpretationTest(fixtures.TestBase):
+    __only_on__ = 'postgresql'
+
+    def test_numeric_codes(self):
+        from sqlalchemy.dialects.postgresql import pg8000, psycopg2, base
+
+        for dialect in (pg8000.dialect(), psycopg2.dialect()):
+
+            typ = Numeric().dialect_impl(dialect)
+            for code in base._INT_TYPES + base._FLOAT_TYPES + \
+                        base._DECIMAL_TYPES:
+                proc = typ.result_processor(dialect, code)
+                val = 23.7
+                if proc is not None:
+                    val = proc(val)
+                assert val in (23.7, decimal.Decimal("23.7"))
+
+    @testing.provide_metadata
+    def test_numeric_default(self):
+        metadata = self.metadata
+        # pg8000 appears to fail when the value is 0,
+        # returns an int instead of decimal.
+        t =Table('t', metadata,
+            Column('id', Integer, primary_key=True),
+            Column('nd', Numeric(asdecimal=True), default=1),
+            Column('nf', Numeric(asdecimal=False), default=1),
+            Column('fd', Float(asdecimal=True), default=1),
+            Column('ff', Float(asdecimal=False), default=1),
+        )
+        metadata.create_all()
+        r = t.insert().execute()
+
+        row = t.select().execute().first()
+        assert isinstance(row[1], decimal.Decimal)
+        assert isinstance(row[2], float)
+        assert isinstance(row[3], decimal.Decimal)
+        assert isinstance(row[4], float)
+        eq_(
+            row,
+            (1, decimal.Decimal("1"), 1, decimal.Decimal("1"), 1)
+        )
+
+class TimezoneTest(fixtures.TestBase):
+
+    """Test timezone-aware datetimes.
+
+    psycopg will return a datetime with a tzinfo attached to it, if
+    postgresql returns it.  python then will not let you compare a
+    datetime with a tzinfo to a datetime that doesnt have one.  this
+    test illustrates two ways to have datetime types with and without
+    timezone info. """
+
+    __only_on__ = 'postgresql'
+
+    @classmethod
+    def setup_class(cls):
+        global tztable, notztable, metadata
+        metadata = MetaData(testing.db)
+
+        # current_timestamp() in postgresql is assumed to return
+        # TIMESTAMP WITH TIMEZONE
+
+        tztable = Table('tztable', metadata, Column('id', Integer,
+                        primary_key=True), Column('date',
+                        DateTime(timezone=True),
+                        onupdate=func.current_timestamp()),
+                        Column('name', String(20)))
+        notztable = Table('notztable', metadata, Column('id', Integer,
+                          primary_key=True), Column('date',
+                          DateTime(timezone=False),
+                          onupdate=cast(func.current_timestamp(),
+                          DateTime(timezone=False))), Column('name',
+                          String(20)))
+        metadata.create_all()
+
+    @classmethod
+    def teardown_class(cls):
+        metadata.drop_all()
+
+    @testing.fails_on('postgresql+zxjdbc',
+                      "XXX: postgresql+zxjdbc doesn't give a tzinfo back")
+    def test_with_timezone(self):
+
+        # get a date with a tzinfo
+
+        somedate = \
+            testing.db.connect().scalar(func.current_timestamp().select())
+        assert somedate.tzinfo
+        tztable.insert().execute(id=1, name='row1', date=somedate)
+        row = select([tztable.c.date], tztable.c.id
+                     == 1).execute().first()
+        eq_(row[0], somedate)
+        eq_(somedate.tzinfo.utcoffset(somedate),
+            row[0].tzinfo.utcoffset(row[0]))
+        result = tztable.update(tztable.c.id
+                                == 1).returning(tztable.c.date).\
+                                    execute(name='newname'
+                )
+        row = result.first()
+        assert row[0] >= somedate
+
+    def test_without_timezone(self):
+
+        # get a date without a tzinfo
+
+        somedate = datetime.datetime( 2005, 10, 20, 11, 52, 0, )
+        assert not somedate.tzinfo
+        notztable.insert().execute(id=1, name='row1', date=somedate)
+        row = select([notztable.c.date], notztable.c.id
+                     == 1).execute().first()
+        eq_(row[0], somedate)
+        eq_(row[0].tzinfo, None)
+        result = notztable.update(notztable.c.id
+                                  == 1).returning(notztable.c.date).\
+                                    execute(name='newname'
+                )
+        row = result.first()
+        assert row[0] >= somedate
+
+class TimePrecisionTest(fixtures.TestBase, AssertsCompiledSQL):
+
+    __dialect__ = postgresql.dialect()
+
+    def test_compile(self):
+        for type_, expected in [
+            (postgresql.TIME(), 'TIME WITHOUT TIME ZONE'),
+            (postgresql.TIME(precision=5), 'TIME(5) WITHOUT TIME ZONE'
+             ),
+            (postgresql.TIME(timezone=True, precision=5),
+             'TIME(5) WITH TIME ZONE'),
+            (postgresql.TIMESTAMP(), 'TIMESTAMP WITHOUT TIME ZONE'),
+            (postgresql.TIMESTAMP(precision=5),
+             'TIMESTAMP(5) WITHOUT TIME ZONE'),
+            (postgresql.TIMESTAMP(timezone=True, precision=5),
+             'TIMESTAMP(5) WITH TIME ZONE'),
+            ]:
+            self.assert_compile(type_, expected)
+
+    @testing.only_on('postgresql', 'DB specific feature')
+    @testing.provide_metadata
+    def test_reflection(self):
+        metadata = self.metadata
+        t1 = Table(
+            't1',
+            metadata,
+            Column('c1', postgresql.TIME()),
+            Column('c2', postgresql.TIME(precision=5)),
+            Column('c3', postgresql.TIME(timezone=True, precision=5)),
+            Column('c4', postgresql.TIMESTAMP()),
+            Column('c5', postgresql.TIMESTAMP(precision=5)),
+            Column('c6', postgresql.TIMESTAMP(timezone=True,
+                   precision=5)),
+            )
+        t1.create()
+        m2 = MetaData(testing.db)
+        t2 = Table('t1', m2, autoload=True)
+        eq_(t2.c.c1.type.precision, None)
+        eq_(t2.c.c2.type.precision, 5)
+        eq_(t2.c.c3.type.precision, 5)
+        eq_(t2.c.c4.type.precision, None)
+        eq_(t2.c.c5.type.precision, 5)
+        eq_(t2.c.c6.type.precision, 5)
+        eq_(t2.c.c1.type.timezone, False)
+        eq_(t2.c.c2.type.timezone, False)
+        eq_(t2.c.c3.type.timezone, True)
+        eq_(t2.c.c4.type.timezone, False)
+        eq_(t2.c.c5.type.timezone, False)
+        eq_(t2.c.c6.type.timezone, True)
+
+class ArrayTest(fixtures.TablesTest, AssertsExecutionResults):
+
+    __only_on__ = 'postgresql'
+
+    __unsupported_on__ = 'postgresql+pg8000', 'postgresql+zxjdbc'
+
+    @classmethod
+    def define_tables(cls, metadata):
+
+        class ProcValue(TypeDecorator):
+            impl = postgresql.ARRAY(Integer, dimensions=2)
+
+            def process_bind_param(self, value, dialect):
+                if value is None:
+                    return None
+                return [
+                    [x + 5 for x in v]
+                    for v in value
+                ]
+
+            def process_result_value(self, value, dialect):
+                if value is None:
+                    return None
+                return [
+                    [x - 7 for x in v]
+                    for v in value
+                ]
+
+        Table('arrtable', metadata,
+                        Column('id', Integer, primary_key=True),
+                        Column('intarr', postgresql.ARRAY(Integer)),
+                         Column('strarr', postgresql.ARRAY(Unicode())),
+                        Column('dimarr', ProcValue)
+                    )
+
+        Table('dim_arrtable', metadata,
+                        Column('id', Integer, primary_key=True),
+                        Column('intarr', postgresql.ARRAY(Integer, dimensions=1)),
+                         Column('strarr', postgresql.ARRAY(Unicode(), dimensions=1)),
+                        Column('dimarr', ProcValue)
+                    )
+
+    def _fixture_456(self, table):
+        testing.db.execute(
+                table.insert(),
+                intarr=[4, 5, 6]
+        )
+
+    def test_reflect_array_column(self):
+        metadata2 = MetaData(testing.db)
+        tbl = Table('arrtable', metadata2, autoload=True)
+        assert isinstance(tbl.c.intarr.type, postgresql.ARRAY)
+        assert isinstance(tbl.c.strarr.type, postgresql.ARRAY)
+        assert isinstance(tbl.c.intarr.type.item_type, Integer)
+        assert isinstance(tbl.c.strarr.type.item_type, String)
+
+    def test_insert_array(self):
+        arrtable = self.tables.arrtable
+        arrtable.insert().execute(intarr=[1, 2, 3], strarr=[util.u('abc'),
+                                  util.u('def')])
+        results = arrtable.select().execute().fetchall()
+        eq_(len(results), 1)
+        eq_(results[0]['intarr'], [1, 2, 3])
+        eq_(results[0]['strarr'], [util.u('abc'), util.u('def')])
+
+    def test_array_where(self):
+        arrtable = self.tables.arrtable
+        arrtable.insert().execute(intarr=[1, 2, 3], strarr=[util.u('abc'),
+                                  util.u('def')])
+        arrtable.insert().execute(intarr=[4, 5, 6], strarr=util.u('ABC'))
+        results = arrtable.select().where(arrtable.c.intarr == [1, 2,
+                3]).execute().fetchall()
+        eq_(len(results), 1)
+        eq_(results[0]['intarr'], [1, 2, 3])
+
+    def test_array_concat(self):
+        arrtable = self.tables.arrtable
+        arrtable.insert().execute(intarr=[1, 2, 3],
+                    strarr=[util.u('abc'), util.u('def')])
+        results = select([arrtable.c.intarr + [4, 5,
+                         6]]).execute().fetchall()
+        eq_(len(results), 1)
+        eq_(results[0][0], [ 1, 2, 3, 4, 5, 6, ])
+
+    def test_array_subtype_resultprocessor(self):
+        arrtable = self.tables.arrtable
+        arrtable.insert().execute(intarr=[4, 5, 6],
+                                  strarr=[[util.ue('m\xe4\xe4')], [
+                                    util.ue('m\xf6\xf6')]])
+        arrtable.insert().execute(intarr=[1, 2, 3], strarr=[
+                        util.ue('m\xe4\xe4'), util.ue('m\xf6\xf6')])
+        results = \
+            arrtable.select(order_by=[arrtable.c.intarr]).execute().fetchall()
+        eq_(len(results), 2)
+        eq_(results[0]['strarr'], [util.ue('m\xe4\xe4'), util.ue('m\xf6\xf6')])
+        eq_(results[1]['strarr'], [[util.ue('m\xe4\xe4')], [util.ue('m\xf6\xf6')]])
+
+    def test_array_literal(self):
+        eq_(
+            testing.db.scalar(
+                select([
+                    postgresql.array([1, 2]) + postgresql.array([3, 4, 5])
+                ])
+                ), [1,2,3,4,5]
+        )
+
+    def test_array_getitem_single_type(self):
+        arrtable = self.tables.arrtable
+        is_(arrtable.c.intarr[1].type._type_affinity, Integer)
+        is_(arrtable.c.strarr[1].type._type_affinity, String)
+
+    def test_array_getitem_slice_type(self):
+        arrtable = self.tables.arrtable
+        is_(arrtable.c.intarr[1:3].type._type_affinity, postgresql.ARRAY)
+        is_(arrtable.c.strarr[1:3].type._type_affinity, postgresql.ARRAY)
+
+    def test_array_getitem_single_exec(self):
+        arrtable = self.tables.arrtable
+        self._fixture_456(arrtable)
+        eq_(
+            testing.db.scalar(select([arrtable.c.intarr[2]])),
+            5
+        )
+        testing.db.execute(
+            arrtable.update().values({arrtable.c.intarr[2]: 7})
+        )
+        eq_(
+            testing.db.scalar(select([arrtable.c.intarr[2]])),
+            7
+        )
+
+    def test_undim_array_empty(self):
+        arrtable = self.tables.arrtable
+        self._fixture_456(arrtable)
+        eq_(
+            testing.db.scalar(
+                select([arrtable.c.intarr]).
+                    where(arrtable.c.intarr.contains([]))
+            ),
+            [4, 5, 6]
+        )
+
+    def test_array_getitem_slice_exec(self):
+        arrtable = self.tables.arrtable
+        testing.db.execute(
+            arrtable.insert(),
+            intarr=[4, 5, 6],
+            strarr=[util.u('abc'), util.u('def')]
+        )
+        eq_(
+            testing.db.scalar(select([arrtable.c.intarr[2:3]])),
+            [5, 6]
+        )
+        testing.db.execute(
+            arrtable.update().values({arrtable.c.intarr[2:3]: [7, 8]})
+        )
+        eq_(
+            testing.db.scalar(select([arrtable.c.intarr[2:3]])),
+            [7, 8]
+        )
+
+
+    def _test_undim_array_contains_typed_exec(self, struct):
+        arrtable = self.tables.arrtable
+        self._fixture_456(arrtable)
+        eq_(
+            testing.db.scalar(
+                select([arrtable.c.intarr]).
+                    where(arrtable.c.intarr.contains(struct([4, 5])))
+            ),
+            [4, 5, 6]
+        )
+
+    def test_undim_array_contains_set_exec(self):
+        self._test_undim_array_contains_typed_exec(set)
+
+    def test_undim_array_contains_list_exec(self):
+        self._test_undim_array_contains_typed_exec(list)
+
+    def test_undim_array_contains_generator_exec(self):
+        self._test_undim_array_contains_typed_exec(
+                    lambda elem: (x for x in elem))
+
+    def _test_dim_array_contains_typed_exec(self, struct):
+        dim_arrtable = self.tables.dim_arrtable
+        self._fixture_456(dim_arrtable)
+        eq_(
+            testing.db.scalar(
+                select([dim_arrtable.c.intarr]).
+                    where(dim_arrtable.c.intarr.contains(struct([4, 5])))
+            ),
+            [4, 5, 6]
+        )
+
+    def test_dim_array_contains_set_exec(self):
+        self._test_dim_array_contains_typed_exec(set)
+
+    def test_dim_array_contains_list_exec(self):
+        self._test_dim_array_contains_typed_exec(list)
+
+    def test_dim_array_contains_generator_exec(self):
+        self._test_dim_array_contains_typed_exec(lambda elem: (x for x in elem))
+
+    def test_array_contained_by_exec(self):
+        arrtable = self.tables.arrtable
+        with testing.db.connect() as conn:
+            conn.execute(
+                arrtable.insert(),
+                intarr=[6, 5, 4]
+            )
+            eq_(
+                conn.scalar(
+                    select([arrtable.c.intarr.contained_by([4, 5, 6, 7])])
+                ),
+                True
+            )
+
+    def test_array_overlap_exec(self):
+        arrtable = self.tables.arrtable
+        with testing.db.connect() as conn:
+            conn.execute(
+                arrtable.insert(),
+                intarr=[4, 5, 6]
+            )
+            eq_(
+                conn.scalar(
+                    select([arrtable.c.intarr]).
+                        where(arrtable.c.intarr.overlap([7, 6]))
+                ),
+                [4, 5, 6]
+            )
+
+    def test_array_any_exec(self):
+        arrtable = self.tables.arrtable
+        with testing.db.connect() as conn:
+            conn.execute(
+                arrtable.insert(),
+                intarr=[4, 5, 6]
+            )
+            eq_(
+                conn.scalar(
+                    select([arrtable.c.intarr]).
+                        where(postgresql.Any(5, arrtable.c.intarr))
+                ),
+                [4, 5, 6]
+            )
+
+    def test_array_all_exec(self):
+        arrtable = self.tables.arrtable
+        with testing.db.connect() as conn:
+            conn.execute(
+                arrtable.insert(),
+                intarr=[4, 5, 6]
+            )
+            eq_(
+                conn.scalar(
+                    select([arrtable.c.intarr]).
+                        where(arrtable.c.intarr.all(4, operator=operators.le))
+                ),
+                [4, 5, 6]
+            )
+
+
+    @testing.provide_metadata
+    def test_tuple_flag(self):
+        metadata = self.metadata
+
+        t1 = Table('t1', metadata,
+            Column('id', Integer, primary_key=True),
+            Column('data', postgresql.ARRAY(String(5), as_tuple=True)),
+            Column('data2', postgresql.ARRAY(Numeric(asdecimal=False), as_tuple=True)),
+        )
+        metadata.create_all()
+        testing.db.execute(t1.insert(), id=1, data=["1","2","3"], data2=[5.4, 5.6])
+        testing.db.execute(t1.insert(), id=2, data=["4", "5", "6"], data2=[1.0])
+        testing.db.execute(t1.insert(), id=3, data=[["4", "5"], ["6", "7"]],
+                        data2=[[5.4, 5.6], [1.0, 1.1]])
+
+        r = testing.db.execute(t1.select().order_by(t1.c.id)).fetchall()
+        eq_(
+            r,
+            [
+                (1, ('1', '2', '3'), (5.4, 5.6)),
+                (2, ('4', '5', '6'), (1.0,)),
+                (3, (('4', '5'), ('6', '7')), ((5.4, 5.6), (1.0, 1.1)))
+            ]
+        )
+        # hashable
+        eq_(
+            set(row[1] for row in r),
+            set([('1', '2', '3'), ('4', '5', '6'), (('4', '5'), ('6', '7'))])
+        )
+
+    def test_dimension(self):
+        arrtable = self.tables.arrtable
+        testing.db.execute(arrtable.insert(), dimarr=[[1, 2, 3], [4,5, 6]])
+        eq_(
+            testing.db.scalar(select([arrtable.c.dimarr])),
+            [[-1, 0, 1], [2, 3, 4]]
+        )
+
+class TimestampTest(fixtures.TestBase, AssertsExecutionResults):
+    __only_on__ = 'postgresql'
+
+    def test_timestamp(self):
+        engine = testing.db
+        connection = engine.connect()
+
+        s = select(["timestamp '2007-12-25'"])
+        result = connection.execute(s).first()
+        eq_(result[0], datetime.datetime(2007, 12, 25, 0, 0))
+
+
+class SpecialTypesTest(fixtures.TestBase, ComparesTables, AssertsCompiledSQL):
+    """test DDL and reflection of PG-specific types """
+
+    __only_on__ = 'postgresql'
+    __excluded_on__ = (('postgresql', '<', (8, 3, 0)),)
+
+    @classmethod
+    def setup_class(cls):
+        global metadata, table
+        metadata = MetaData(testing.db)
+
+        # create these types so that we can issue
+        # special SQL92 INTERVAL syntax
+        class y2m(types.UserDefinedType, postgresql.INTERVAL):
+            def get_col_spec(self):
+                return "INTERVAL YEAR TO MONTH"
+
+        class d2s(types.UserDefinedType, postgresql.INTERVAL):
+            def get_col_spec(self):
+                return "INTERVAL DAY TO SECOND"
+
+        table = Table('sometable', metadata,
+            Column('id', postgresql.UUID, primary_key=True),
+            Column('flag', postgresql.BIT),
+            Column('bitstring', postgresql.BIT(4)),
+            Column('addr', postgresql.INET),
+            Column('addr2', postgresql.MACADDR),
+            Column('addr3', postgresql.CIDR),
+            Column('doubleprec', postgresql.DOUBLE_PRECISION),
+            Column('plain_interval', postgresql.INTERVAL),
+            Column('year_interval', y2m()),
+            Column('month_interval', d2s()),
+            Column('precision_interval', postgresql.INTERVAL(precision=3))
+        )
+
+        metadata.create_all()
+
+        # cheat so that the "strict type check"
+        # works
+        table.c.year_interval.type = postgresql.INTERVAL()
+        table.c.month_interval.type = postgresql.INTERVAL()
+
+    @classmethod
+    def teardown_class(cls):
+        metadata.drop_all()
+
+    def test_reflection(self):
+        m = MetaData(testing.db)
+        t = Table('sometable', m, autoload=True)
+
+        self.assert_tables_equal(table, t, strict_types=True)
+        assert t.c.plain_interval.type.precision is None
+        assert t.c.precision_interval.type.precision == 3
+        assert t.c.bitstring.type.length == 4
+
+    def test_bit_compile(self):
+        pairs = [(postgresql.BIT(), 'BIT(1)'),
+                 (postgresql.BIT(5), 'BIT(5)'),
+                 (postgresql.BIT(varying=True), 'BIT VARYING'),
+                 (postgresql.BIT(5, varying=True), 'BIT VARYING(5)'),
+                ]
+        for type_, expected in pairs:
+            self.assert_compile(type_, expected)
+
+    @testing.provide_metadata
+    def test_bit_reflection(self):
+        metadata = self.metadata
+        t1 = Table('t1', metadata,
+        Column('bit1', postgresql.BIT()),
+        Column('bit5', postgresql.BIT(5)),
+        Column('bitvarying', postgresql.BIT(varying=True)),
+        Column('bitvarying5', postgresql.BIT(5, varying=True)),
+        )
+        t1.create()
+        m2 = MetaData(testing.db)
+        t2 = Table('t1', m2, autoload=True)
+        eq_(t2.c.bit1.type.length, 1)
+        eq_(t2.c.bit1.type.varying, False)
+        eq_(t2.c.bit5.type.length, 5)
+        eq_(t2.c.bit5.type.varying, False)
+        eq_(t2.c.bitvarying.type.length, None)
+        eq_(t2.c.bitvarying.type.varying, True)
+        eq_(t2.c.bitvarying5.type.length, 5)
+        eq_(t2.c.bitvarying5.type.varying, True)
+
+class UUIDTest(fixtures.TestBase):
+    """Test the bind/return values of the UUID type."""
+
+    __only_on__ = 'postgresql'
+
+    @testing.requires.python25
+    @testing.fails_on('postgresql+zxjdbc',
+                      'column "data" is of type uuid but expression is of type character varying')
+    @testing.fails_on('postgresql+pg8000', 'No support for UUID type')
+    def test_uuid_string(self):
+        import uuid
+        self._test_round_trip(
+            Table('utable', MetaData(),
+                Column('data', postgresql.UUID())
+            ),
+            str(uuid.uuid4()),
+            str(uuid.uuid4())
+        )
+
+    @testing.requires.python25
+    @testing.fails_on('postgresql+zxjdbc',
+                      'column "data" is of type uuid but expression is of type character varying')
+    @testing.fails_on('postgresql+pg8000', 'No support for UUID type')
+    def test_uuid_uuid(self):
+        import uuid
+        self._test_round_trip(
+            Table('utable', MetaData(),
+                Column('data', postgresql.UUID(as_uuid=True))
+            ),
+            uuid.uuid4(),
+            uuid.uuid4()
+        )
+
+    def test_no_uuid_available(self):
+        from sqlalchemy.dialects.postgresql import base
+        uuid_type = base._python_UUID
+        base._python_UUID = None
+        try:
+            assert_raises(
+                NotImplementedError,
+                postgresql.UUID, as_uuid=True
+            )
+        finally:
+            base._python_UUID = uuid_type
+
+    def setup(self):
+        self.conn = testing.db.connect()
+        trans = self.conn.begin()
+
+    def teardown(self):
+        self.conn.close()
+
+    def _test_round_trip(self, utable, value1, value2):
+        utable.create(self.conn)
+        self.conn.execute(utable.insert(), {'data':value1})
+        self.conn.execute(utable.insert(), {'data':value2})
+        r = self.conn.execute(
+                select([utable.c.data]).
+                    where(utable.c.data != value1)
+                )
+        eq_(r.fetchone()[0], value2)
+        eq_(r.fetchone(), None)
+
+
+
+class HStoreTest(fixtures.TestBase):
+    def _assert_sql(self, construct, expected):
+        dialect = postgresql.dialect()
+        compiled = str(construct.compile(dialect=dialect))
+        compiled = re.sub(r'\s+', ' ', compiled)
+        expected = re.sub(r'\s+', ' ', expected)
+        eq_(compiled, expected)
+
+    def setup(self):
+        metadata = MetaData()
+        self.test_table = Table('test_table', metadata,
+            Column('id', Integer, primary_key=True),
+            Column('hash', HSTORE)
+        )
+        self.hashcol = self.test_table.c.hash
+
+    def _test_where(self, whereclause, expected):
+        stmt = select([self.test_table]).where(whereclause)
+        self._assert_sql(
+            stmt,
+            "SELECT test_table.id, test_table.hash FROM test_table "
+            "WHERE %s" % expected
+        )
+
+    def _test_cols(self, colclause, expected, from_=True):
+        stmt = select([colclause])
+        self._assert_sql(
+            stmt,
+            (
+                "SELECT %s" +
+                (" FROM test_table" if from_ else "")
+            ) % expected
+        )
+
+    def test_bind_serialize_default(self):
+        from sqlalchemy.engine import default
+
+        dialect = default.DefaultDialect()
+        proc = self.test_table.c.hash.type._cached_bind_processor(dialect)
+        eq_(
+            proc(util.OrderedDict([("key1", "value1"), ("key2", "value2")])),
+            '"key1"=>"value1", "key2"=>"value2"'
+        )
+
+    def test_bind_serialize_with_slashes_and_quotes(self):
+        from sqlalchemy.engine import default
+
+        dialect = default.DefaultDialect()
+        proc = self.test_table.c.hash.type._cached_bind_processor(dialect)
+        eq_(
+            proc({'\\"a': '\\"1'}),
+            '"\\\\\\"a"=>"\\\\\\"1"'
+        )
+
+    def test_parse_error(self):
+        from sqlalchemy.engine import default
+
+        dialect = default.DefaultDialect()
+        proc = self.test_table.c.hash.type._cached_result_processor(
+                    dialect, None)
+        assert_raises_message(
+            ValueError,
+            r'''After u?'\[\.\.\.\], "key1"=>"value1", ', could not parse '''
+            '''residual at position 36: u?'crapcrapcrap, "key3"\[\.\.\.\]''',
+            proc,
+            '"key2"=>"value2", "key1"=>"value1", '
+                        'crapcrapcrap, "key3"=>"value3"'
+        )
+
+    def test_result_deserialize_default(self):
+        from sqlalchemy.engine import default
+
+        dialect = default.DefaultDialect()
+        proc = self.test_table.c.hash.type._cached_result_processor(
+                    dialect, None)
+        eq_(
+            proc('"key2"=>"value2", "key1"=>"value1"'),
+            {"key1": "value1", "key2": "value2"}
+        )
+
+    def test_result_deserialize_with_slashes_and_quotes(self):
+        from sqlalchemy.engine import default
+
+        dialect = default.DefaultDialect()
+        proc = self.test_table.c.hash.type._cached_result_processor(
+                    dialect, None)
+        eq_(
+            proc('"\\\\\\"a"=>"\\\\\\"1"'),
+            {'\\"a': '\\"1'}
+        )
+
+    def test_bind_serialize_psycopg2(self):
+        from sqlalchemy.dialects.postgresql import psycopg2
+
+        dialect = psycopg2.PGDialect_psycopg2()
+        dialect._has_native_hstore = True
+        proc = self.test_table.c.hash.type._cached_bind_processor(dialect)
+        is_(proc, None)
+
+        dialect = psycopg2.PGDialect_psycopg2()
+        dialect._has_native_hstore = False
+        proc = self.test_table.c.hash.type._cached_bind_processor(dialect)
+        eq_(
+            proc(util.OrderedDict([("key1", "value1"), ("key2", "value2")])),
+            '"key1"=>"value1", "key2"=>"value2"'
+        )
+
+    def test_result_deserialize_psycopg2(self):
+        from sqlalchemy.dialects.postgresql import psycopg2
+
+        dialect = psycopg2.PGDialect_psycopg2()
+        dialect._has_native_hstore = True
+        proc = self.test_table.c.hash.type._cached_result_processor(
+                    dialect, None)
+        is_(proc, None)
+
+        dialect = psycopg2.PGDialect_psycopg2()
+        dialect._has_native_hstore = False
+        proc = self.test_table.c.hash.type._cached_result_processor(
+                    dialect, None)
+        eq_(
+            proc('"key2"=>"value2", "key1"=>"value1"'),
+            {"key1": "value1", "key2": "value2"}
+        )
+
+    def test_where_has_key(self):
+        self._test_where(
+            # hide from 2to3
+            getattr(self.hashcol, 'has_key')('foo'),
+            "test_table.hash ? %(hash_1)s"
+        )
+
+    def test_where_has_all(self):
+        self._test_where(
+            self.hashcol.has_all(postgresql.array(['1', '2'])),
+            "test_table.hash ?& ARRAY[%(param_1)s, %(param_2)s]"
+        )
+
+    def test_where_has_any(self):
+        self._test_where(
+            self.hashcol.has_any(postgresql.array(['1', '2'])),
+            "test_table.hash ?| ARRAY[%(param_1)s, %(param_2)s]"
+        )
+
+    def test_where_defined(self):
+        self._test_where(
+            self.hashcol.defined('foo'),
+            "defined(test_table.hash, %(param_1)s)"
+        )
+
+    def test_where_contains(self):
+        self._test_where(
+            self.hashcol.contains({'foo': '1'}),
+            "test_table.hash @> %(hash_1)s"
+        )
+
+    def test_where_contained_by(self):
+        self._test_where(
+            self.hashcol.contained_by({'foo': '1', 'bar': None}),
+            "test_table.hash <@ %(hash_1)s"
+        )
+
+    def test_where_getitem(self):
+        self._test_where(
+            self.hashcol['bar'] == None,
+            "(test_table.hash -> %(hash_1)s) IS NULL"
+        )
+
+    def test_cols_get(self):
+        self._test_cols(
+            self.hashcol['foo'],
+            "test_table.hash -> %(hash_1)s AS anon_1",
+            True
+        )
+
+    def test_cols_delete_single_key(self):
+        self._test_cols(
+            self.hashcol.delete('foo'),
+            "delete(test_table.hash, %(param_1)s) AS delete_1",
+            True
+        )
+
+    def test_cols_delete_array_of_keys(self):
+        self._test_cols(
+            self.hashcol.delete(postgresql.array(['foo', 'bar'])),
+            ("delete(test_table.hash, ARRAY[%(param_1)s, %(param_2)s]) "
+             "AS delete_1"),
+            True
+        )
+
+    def test_cols_delete_matching_pairs(self):
+        self._test_cols(
+            self.hashcol.delete(hstore('1', '2')),
+            ("delete(test_table.hash, hstore(%(param_1)s, %(param_2)s)) "
+             "AS delete_1"),
+            True
+        )
+
+    def test_cols_slice(self):
+        self._test_cols(
+            self.hashcol.slice(postgresql.array(['1', '2'])),
+            ("slice(test_table.hash, ARRAY[%(param_1)s, %(param_2)s]) "
+             "AS slice_1"),
+            True
+        )
+
+    def test_cols_hstore_pair_text(self):
+        self._test_cols(
+            hstore('foo', '3')['foo'],
+            "hstore(%(param_1)s, %(param_2)s) -> %(hstore_1)s AS anon_1",
+            False
+        )
+
+    def test_cols_hstore_pair_array(self):
+        self._test_cols(
+            hstore(postgresql.array(['1', '2']),
+                   postgresql.array(['3', None]))['1'],
+            ("hstore(ARRAY[%(param_1)s, %(param_2)s], "
+             "ARRAY[%(param_3)s, NULL]) -> %(hstore_1)s AS anon_1"),
+            False
+        )
+
+    def test_cols_hstore_single_array(self):
+        self._test_cols(
+            hstore(postgresql.array(['1', '2', '3', None]))['3'],
+            ("hstore(ARRAY[%(param_1)s, %(param_2)s, %(param_3)s, NULL]) "
+             "-> %(hstore_1)s AS anon_1"),
+            False
+        )
+
+    def test_cols_concat(self):
+        self._test_cols(
+            self.hashcol.concat(hstore(cast(self.test_table.c.id, Text), '3')),
+            ("test_table.hash || hstore(CAST(test_table.id AS TEXT), "
+             "%(param_1)s) AS anon_1"),
+            True
+        )
+
+    def test_cols_concat_op(self):
+        self._test_cols(
+            hstore('foo', 'bar') + self.hashcol,
+            "hstore(%(param_1)s, %(param_2)s) || test_table.hash AS anon_1",
+            True
+        )
+
+    def test_cols_concat_get(self):
+        self._test_cols(
+            (self.hashcol + self.hashcol)['foo'],
+            "test_table.hash || test_table.hash -> %(param_1)s AS anon_1"
+        )
+
+    def test_cols_keys(self):
+        self._test_cols(
+            # hide from 2to3
+            getattr(self.hashcol, 'keys')(),
+            "akeys(test_table.hash) AS akeys_1",
+            True
+        )
+
+    def test_cols_vals(self):
+        self._test_cols(
+            self.hashcol.vals(),
+            "avals(test_table.hash) AS avals_1",
+            True
+        )
+
+    def test_cols_array(self):
+        self._test_cols(
+            self.hashcol.array(),
+            "hstore_to_array(test_table.hash) AS hstore_to_array_1",
+            True
+        )
+
+    def test_cols_matrix(self):
+        self._test_cols(
+            self.hashcol.matrix(),
+            "hstore_to_matrix(test_table.hash) AS hstore_to_matrix_1",
+            True
+        )
+
+
+class HStoreRoundTripTest(fixtures.TablesTest):
+    __requires__ = 'hstore',
+    __dialect__ = 'postgresql'
+
+    @classmethod
+    def define_tables(cls, metadata):
+        Table('data_table', metadata,
+            Column('id', Integer, primary_key=True),
+            Column('name', String(30), nullable=False),
+            Column('data', HSTORE)
+        )
+
+    def _fixture_data(self, engine):
+        data_table = self.tables.data_table
+        engine.execute(
+                data_table.insert(),
+                {'name': 'r1', 'data': {"k1": "r1v1", "k2": "r1v2"}},
+                {'name': 'r2', 'data': {"k1": "r2v1", "k2": "r2v2"}},
+                {'name': 'r3', 'data': {"k1": "r3v1", "k2": "r3v2"}},
+                {'name': 'r4', 'data': {"k1": "r4v1", "k2": "r4v2"}},
+                {'name': 'r5', 'data': {"k1": "r5v1", "k2": "r5v2"}},
+        )
+
+    def _assert_data(self, compare):
+        data = testing.db.execute(
+            select([self.tables.data_table.c.data]).
+                order_by(self.tables.data_table.c.name)
+        ).fetchall()
+        eq_([d for d, in data], compare)
+
+    def _test_insert(self, engine):
+        engine.execute(
+            self.tables.data_table.insert(),
+            {'name': 'r1', 'data': {"k1": "r1v1", "k2": "r1v2"}}
+        )
+        self._assert_data([{"k1": "r1v1", "k2": "r1v2"}])
+
+    def _non_native_engine(self):
+        if testing.against("postgresql+psycopg2"):
+            engine = engines.testing_engine(options=dict(use_native_hstore=False))
+        else:
+            engine = testing.db
+        engine.connect()
+        return engine
+
+    def test_reflect(self):
+        from sqlalchemy import inspect
+        insp = inspect(testing.db)
+        cols = insp.get_columns('data_table')
+        assert isinstance(cols[2]['type'], HSTORE)
+
+    @testing.only_on("postgresql+psycopg2")
+    def test_insert_native(self):
+        engine = testing.db
+        self._test_insert(engine)
+
+    def test_insert_python(self):
+        engine = self._non_native_engine()
+        self._test_insert(engine)
+
+    @testing.only_on("postgresql+psycopg2")
+    def test_criterion_native(self):
+        engine = testing.db
+        self._fixture_data(engine)
+        self._test_criterion(engine)
+
+    def test_criterion_python(self):
+        engine = self._non_native_engine()
+        self._fixture_data(engine)
+        self._test_criterion(engine)
+
+    def _test_criterion(self, engine):
+        data_table = self.tables.data_table
+        result = engine.execute(
+            select([data_table.c.data]).where(data_table.c.data['k1'] == 'r3v1')
+        ).first()
+        eq_(result, ({'k1': 'r3v1', 'k2': 'r3v2'},))
+
+    def _test_fixed_round_trip(self, engine):
+        s = select([
+                hstore(
+                    array(['key1', 'key2', 'key3']),
+                    array(['value1', 'value2', 'value3'])
+                )
+            ])
+        eq_(
+            engine.scalar(s),
+            {"key1": "value1", "key2": "value2", "key3": "value3"}
+        )
+
+    def test_fixed_round_trip_python(self):
+        engine = self._non_native_engine()
+        self._test_fixed_round_trip(engine)
+
+    @testing.only_on("postgresql+psycopg2")
+    def test_fixed_round_trip_native(self):
+        engine = testing.db
+        self._test_fixed_round_trip(engine)
+
+    def _test_unicode_round_trip(self, engine):
+        s = select([
+            hstore(
+                array([util.u('réveillé'), util.u('drôle'), util.u('S’il')]),
+                array([util.u('réveillé'), util.u('drôle'), util.u('S’il')])
+            )
+        ])
+        eq_(
+            engine.scalar(s),
+            {
+                util.u('réveillé'): util.u('réveillé'),
+                util.u('drôle'): util.u('drôle'),
+                util.u('S’il'): util.u('S’il')
+            }
+        )
+
+    def test_unicode_round_trip_python(self):
+        engine = self._non_native_engine()
+        self._test_unicode_round_trip(engine)
+
+    @testing.only_on("postgresql+psycopg2")
+    def test_unicode_round_trip_native(self):
+        engine = testing.db
+        self._test_unicode_round_trip(engine)
+
+    def test_escaped_quotes_round_trip_python(self):
+        engine = self._non_native_engine()
+        self._test_escaped_quotes_round_trip(engine)
+
+    @testing.only_on("postgresql+psycopg2")
+    def test_escaped_quotes_round_trip_native(self):
+        engine = testing.db
+        self._test_escaped_quotes_round_trip(engine)
+
+    def _test_escaped_quotes_round_trip(self, engine):
+        engine.execute(
+            self.tables.data_table.insert(),
+            {'name': 'r1', 'data': {r'key \"foo\"': r'value \"bar"\ xyz'}}
+        )
+        self._assert_data([{r'key \"foo\"': r'value \"bar"\ xyz'}])
+
+class _RangeTypeMixin(object):
+    __requires__ = 'range_types',
+    __dialect__ = 'postgresql+psycopg2'
+
+    @property
+    def extras(self):
+        # done this way so we don't get ImportErrors with
+        # older psycopg2 versions.
+        from psycopg2 import extras
+        return extras
+
+    @classmethod
+    def define_tables(cls, metadata):
+        # no reason ranges shouldn't be primary keys,
+        # so lets just use them as such
+        table = Table('data_table', metadata,
+            Column('range', cls._col_type, primary_key=True),
+        )
+        cls.col = table.c.range
+
+    def test_actual_type(self):
+        eq_(str(self._col_type()), self._col_str)
+
+    def test_reflect(self):
+        from sqlalchemy import inspect
+        insp = inspect(testing.db)
+        cols = insp.get_columns('data_table')
+        assert isinstance(cols[0]['type'], self._col_type)
+
+    def _assert_data(self):
+        data = testing.db.execute(
+            select([self.tables.data_table.c.range])
+        ).fetchall()
+        eq_(data, [(self._data_obj(), )])
+
+    def test_insert_obj(self):
+        testing.db.engine.execute(
+            self.tables.data_table.insert(),
+            {'range': self._data_obj()}
+        )
+        self._assert_data()
+
+    def test_insert_text(self):
+        testing.db.engine.execute(
+            self.tables.data_table.insert(),
+            {'range': self._data_str}
+        )
+        self._assert_data()
+
+    # operator tests
+
+    def _test_clause(self, colclause, expected):
+        dialect = postgresql.dialect()
+        compiled = str(colclause.compile(dialect=dialect))
+        eq_(compiled, expected)
+
+    def test_where_equal(self):
+        self._test_clause(
+            self.col==self._data_str,
+            "data_table.range = %(range_1)s"
+        )
+
+    def test_where_not_equal(self):
+        self._test_clause(
+            self.col!=self._data_str,
+            "data_table.range <> %(range_1)s"
+        )
+
+    def test_where_less_than(self):
+        self._test_clause(
+            self.col < self._data_str,
+            "data_table.range < %(range_1)s"
+        )
+
+    def test_where_greater_than(self):
+        self._test_clause(
+            self.col > self._data_str,
+            "data_table.range > %(range_1)s"
+        )
+
+    def test_where_less_than_or_equal(self):
+        self._test_clause(
+            self.col <= self._data_str,
+            "data_table.range <= %(range_1)s"
+        )
+
+    def test_where_greater_than_or_equal(self):
+        self._test_clause(
+            self.col >= self._data_str,
+            "data_table.range >= %(range_1)s"
+        )
+
+    def test_contains(self):
+        self._test_clause(
+            self.col.contains(self._data_str),
+            "data_table.range @> %(range_1)s"
+        )
+
+    def test_contained_by(self):
+        self._test_clause(
+            self.col.contained_by(self._data_str),
+            "data_table.range <@ %(range_1)s"
+        )
+
+    def test_overlaps(self):
+        self._test_clause(
+            self.col.overlaps(self._data_str),
+            "data_table.range && %(range_1)s"
+        )
+
+    def test_strictly_left_of(self):
+        self._test_clause(
+            self.col << self._data_str,
+            "data_table.range << %(range_1)s"
+        )
+        self._test_clause(
+            self.col.strictly_left_of(self._data_str),
+            "data_table.range << %(range_1)s"
+        )
+
+    def test_strictly_right_of(self):
+        self._test_clause(
+            self.col >> self._data_str,
+            "data_table.range >> %(range_1)s"
+        )
+        self._test_clause(
+            self.col.strictly_right_of(self._data_str),
+            "data_table.range >> %(range_1)s"
+        )
+
+    def test_not_extend_right_of(self):
+        self._test_clause(
+            self.col.not_extend_right_of(self._data_str),
+            "data_table.range &< %(range_1)s"
+        )
+
+    def test_not_extend_left_of(self):
+        self._test_clause(
+            self.col.not_extend_left_of(self._data_str),
+            "data_table.range &> %(range_1)s"
+        )
+
+    def test_adjacent_to(self):
+        self._test_clause(
+            self.col.adjacent_to(self._data_str),
+            "data_table.range -|- %(range_1)s"
+        )
+
+    def test_union(self):
+        self._test_clause(
+            self.col + self.col,
+            "data_table.range + data_table.range"
+        )
+
+    def test_union_result(self):
+        # insert
+        testing.db.engine.execute(
+            self.tables.data_table.insert(),
+            {'range': self._data_str}
+        )
+        # select
+        range = self.tables.data_table.c.range
+        data = testing.db.execute(
+            select([range + range])
+            ).fetchall()
+        eq_(data, [(self._data_obj(), )])
+
+
+    def test_intersection(self):
+        self._test_clause(
+            self.col * self.col,
+            "data_table.range * data_table.range"
+        )
+
+    def test_intersection_result(self):
+        # insert
+        testing.db.engine.execute(
+            self.tables.data_table.insert(),
+            {'range': self._data_str}
+        )
+        # select
+        range = self.tables.data_table.c.range
+        data = testing.db.execute(
+            select([range * range])
+            ).fetchall()
+        eq_(data, [(self._data_obj(), )])
+
+    def test_different(self):
+        self._test_clause(
+            self.col - self.col,
+            "data_table.range - data_table.range"
+        )
+
+    def test_difference_result(self):
+        # insert
+        testing.db.engine.execute(
+            self.tables.data_table.insert(),
+            {'range': self._data_str}
+        )
+        # select
+        range = self.tables.data_table.c.range
+        data = testing.db.execute(
+            select([range - range])
+            ).fetchall()
+        eq_(data, [(self._data_obj().__class__(empty=True), )])
+
+class Int4RangeTests(_RangeTypeMixin, fixtures.TablesTest):
+
+    _col_type = INT4RANGE
+    _col_str = 'INT4RANGE'
+    _data_str = '[1,2)'
+    def _data_obj(self):
+        return self.extras.NumericRange(1, 2)
+
+class Int8RangeTests(_RangeTypeMixin, fixtures.TablesTest):
+
+    _col_type = INT8RANGE
+    _col_str = 'INT8RANGE'
+    _data_str = '[9223372036854775806,9223372036854775807)'
+    def _data_obj(self):
+        return self.extras.NumericRange(
+            9223372036854775806, 9223372036854775807
+            )
+
+class NumRangeTests(_RangeTypeMixin, fixtures.TablesTest):
+
+    _col_type = NUMRANGE
+    _col_str = 'NUMRANGE'
+    _data_str = '[1.0,2.0)'
+    def _data_obj(self):
+        return self.extras.NumericRange(
+            decimal.Decimal('1.0'), decimal.Decimal('2.0')
+            )
+
+class DateRangeTests(_RangeTypeMixin, fixtures.TablesTest):
+
+    _col_type = DATERANGE
+    _col_str = 'DATERANGE'
+    _data_str = '[2013-03-23,2013-03-24)'
+    def _data_obj(self):
+        return self.extras.DateRange(
+            datetime.date(2013, 3, 23), datetime.date(2013, 3, 24)
+            )
+
+class DateTimeRangeTests(_RangeTypeMixin, fixtures.TablesTest):
+
+    _col_type = TSRANGE
+    _col_str = 'TSRANGE'
+    _data_str = '[2013-03-23 14:30,2013-03-23 23:30)'
+    def _data_obj(self):
+        return self.extras.DateTimeRange(
+            datetime.datetime(2013, 3, 23, 14, 30),
+            datetime.datetime(2013, 3, 23, 23, 30)
+            )
+
+class DateTimeTZRangeTests(_RangeTypeMixin, fixtures.TablesTest):
+
+    _col_type = TSTZRANGE
+    _col_str = 'TSTZRANGE'
+
+    # make sure we use one, steady timestamp with timezone pair
+    # for all parts of all these tests
+    _tstzs = None
+    def tstzs(self):
+        if self._tstzs is None:
+            lower = testing.db.connect().scalar(
+                func.current_timestamp().select()
+                )
+            upper = lower+datetime.timedelta(1)
+            self._tstzs = (lower, upper)
+        return self._tstzs
+
+    @property
+    def _data_str(self):
+        return '[%s,%s)' % self.tstzs()
+
+    def _data_obj(self):
+        return self.extras.DateTimeTZRange(*self.tstzs())
diff --git a/test/dialect/test_mssql.py b/test/dialect/test_mssql.py
deleted file mode 100644 (file)
index 28dcd1d..0000000
+++ /dev/null
@@ -1,2143 +0,0 @@
-# -*- encoding: utf-8
-from sqlalchemy.testing import eq_, engines, pickleable
-import datetime
-import os
-from sqlalchemy import *
-from sqlalchemy import types, exc, schema, event
-from sqlalchemy.orm import *
-from sqlalchemy.sql import table, column
-from sqlalchemy.databases import mssql
-from sqlalchemy.dialects.mssql import pyodbc, mxodbc, pymssql
-from sqlalchemy.dialects.mssql.base import TIME
-from sqlalchemy.engine import url
-from sqlalchemy.testing import fixtures, AssertsCompiledSQL, \
-        AssertsExecutionResults, ComparesTables
-from sqlalchemy import testing
-from sqlalchemy.testing import emits_warning_on, assert_raises_message
-import decimal
-from sqlalchemy.engine.reflection import Inspector
-from sqlalchemy.util.compat import b
-from sqlalchemy import sql
-
-
-class CompileTest(fixtures.TestBase, AssertsCompiledSQL):
-    __dialect__ = mssql.dialect()
-
-    def test_true_false(self):
-        self.assert_compile(
-            sql.false(), "0"
-        )
-        self.assert_compile(
-            sql.true(),
-            "1"
-        )
-
-    def test_select(self):
-        t = table('sometable', column('somecolumn'))
-        self.assert_compile(t.select(),
-                            'SELECT sometable.somecolumn FROM sometable')
-
-    def test_select_with_nolock(self):
-        t = table('sometable', column('somecolumn'))
-        self.assert_compile(t.select().with_hint(t, 'WITH (NOLOCK)'),
-                            'SELECT sometable.somecolumn FROM sometable WITH (NOLOCK)')
-
-    def test_join_with_hint(self):
-        t1 = table('t1',
-            column('a', Integer),
-            column('b', String),
-            column('c', String),
-        )
-        t2 = table('t2',
-            column("a", Integer),
-            column("b", Integer),
-            column("c", Integer),
-        )
-        join = t1.join(t2, t1.c.a==t2.c.a).\
-                        select().with_hint(t1, 'WITH (NOLOCK)')
-        self.assert_compile(
-            join,
-            'SELECT t1.a, t1.b, t1.c, t2.a, t2.b, t2.c '
-            'FROM t1 WITH (NOLOCK) JOIN t2 ON t1.a = t2.a'
-        )
-
-    def test_insert(self):
-        t = table('sometable', column('somecolumn'))
-        self.assert_compile(t.insert(),
-                            'INSERT INTO sometable (somecolumn) VALUES '
-                            '(:somecolumn)')
-
-    def test_update(self):
-        t = table('sometable', column('somecolumn'))
-        self.assert_compile(t.update(t.c.somecolumn == 7),
-                            'UPDATE sometable SET somecolumn=:somecolum'
-                            'n WHERE sometable.somecolumn = '
-                            ':somecolumn_1', dict(somecolumn=10))
-
-    def test_insert_hint(self):
-        t = table('sometable', column('somecolumn'))
-        for targ in (None, t):
-            for darg in ("*", "mssql"):
-                self.assert_compile(
-                    t.insert().
-                        values(somecolumn="x").
-                        with_hint("WITH (PAGLOCK)",
-                            selectable=targ,
-                            dialect_name=darg),
-                    "INSERT INTO sometable WITH (PAGLOCK) "
-                    "(somecolumn) VALUES (:somecolumn)"
-                )
-
-    def test_update_hint(self):
-        t = table('sometable', column('somecolumn'))
-        for targ in (None, t):
-            for darg in ("*", "mssql"):
-                self.assert_compile(
-                    t.update().where(t.c.somecolumn=="q").
-                            values(somecolumn="x").
-                            with_hint("WITH (PAGLOCK)",
-                                    selectable=targ,
-                                    dialect_name=darg),
-                    "UPDATE sometable WITH (PAGLOCK) "
-                    "SET somecolumn=:somecolumn "
-                    "WHERE sometable.somecolumn = :somecolumn_1"
-                )
-
-    def test_update_exclude_hint(self):
-        t = table('sometable', column('somecolumn'))
-        self.assert_compile(
-            t.update().where(t.c.somecolumn=="q").
-                values(somecolumn="x").
-                with_hint("XYZ", "mysql"),
-            "UPDATE sometable SET somecolumn=:somecolumn "
-            "WHERE sometable.somecolumn = :somecolumn_1"
-        )
-
-    def test_delete_hint(self):
-        t = table('sometable', column('somecolumn'))
-        for targ in (None, t):
-            for darg in ("*", "mssql"):
-                self.assert_compile(
-                    t.delete().where(t.c.somecolumn=="q").
-                            with_hint("WITH (PAGLOCK)",
-                                    selectable=targ,
-                                    dialect_name=darg),
-                    "DELETE FROM sometable WITH (PAGLOCK) "
-                    "WHERE sometable.somecolumn = :somecolumn_1"
-                )
-
-    def test_delete_exclude_hint(self):
-        t = table('sometable', column('somecolumn'))
-        self.assert_compile(
-            t.delete().\
-                where(t.c.somecolumn=="q").\
-                with_hint("XYZ", dialect_name="mysql"),
-            "DELETE FROM sometable WHERE "
-            "sometable.somecolumn = :somecolumn_1"
-        )
-
-    def test_update_from_hint(self):
-        t = table('sometable', column('somecolumn'))
-        t2 = table('othertable', column('somecolumn'))
-        for darg in ("*", "mssql"):
-            self.assert_compile(
-                t.update().where(t.c.somecolumn==t2.c.somecolumn).
-                        values(somecolumn="x").
-                        with_hint("WITH (PAGLOCK)",
-                                selectable=t2,
-                                dialect_name=darg),
-                "UPDATE sometable SET somecolumn=:somecolumn "
-                "FROM sometable, othertable WITH (PAGLOCK) "
-                "WHERE sometable.somecolumn = othertable.somecolumn"
-            )
-
-    # TODO: not supported yet.
-    #def test_delete_from_hint(self):
-    #    t = table('sometable', column('somecolumn'))
-    #    t2 = table('othertable', column('somecolumn'))
-    #    for darg in ("*", "mssql"):
-    #        self.assert_compile(
-    #            t.delete().where(t.c.somecolumn==t2.c.somecolumn).
-    #                    with_hint("WITH (PAGLOCK)",
-    #                            selectable=t2,
-    #                            dialect_name=darg),
-    #            ""
-    #        )
-
-    def test_strict_binds(self):
-        """test the 'strict' compiler binds."""
-
-        from sqlalchemy.dialects.mssql.base import MSSQLStrictCompiler
-        mxodbc_dialect = mxodbc.dialect()
-        mxodbc_dialect.statement_compiler = MSSQLStrictCompiler
-
-        t = table('sometable', column('foo'))
-
-        for expr, compile in [
-            (
-                select([literal("x"), literal("y")]),
-                "SELECT 'x' AS anon_1, 'y' AS anon_2",
-            ),
-            (
-                select([t]).where(t.c.foo.in_(['x', 'y', 'z'])),
-                "SELECT sometable.foo FROM sometable WHERE sometable.foo "
-                "IN ('x', 'y', 'z')",
-            ),
-            (
-                    t.c.foo.in_([None]),
-                    "sometable.foo IN (NULL)"
-            )
-        ]:
-            self.assert_compile(expr, compile, dialect=mxodbc_dialect)
-
-    def test_in_with_subqueries(self):
-        """Test removal of legacy behavior that converted "x==subquery"
-        to use IN.
-
-        """
-
-        t = table('sometable', column('somecolumn'))
-        self.assert_compile(t.select().where(t.c.somecolumn
-                            == t.select()),
-                            'SELECT sometable.somecolumn FROM '
-                            'sometable WHERE sometable.somecolumn = '
-                            '(SELECT sometable.somecolumn FROM '
-                            'sometable)')
-        self.assert_compile(t.select().where(t.c.somecolumn
-                            != t.select()),
-                            'SELECT sometable.somecolumn FROM '
-                            'sometable WHERE sometable.somecolumn != '
-                            '(SELECT sometable.somecolumn FROM '
-                            'sometable)')
-
-    def test_count(self):
-        t = table('sometable', column('somecolumn'))
-        self.assert_compile(t.count(),
-                            'SELECT count(sometable.somecolumn) AS '
-                            'tbl_row_count FROM sometable')
-
-    def test_noorderby_insubquery(self):
-        """test that the ms-sql dialect removes ORDER BY clauses from
-        subqueries"""
-
-        table1 = table('mytable',
-            column('myid', Integer),
-            column('name', String),
-            column('description', String),
-        )
-
-        q = select([table1.c.myid],
-                   order_by=[table1.c.myid]).alias('foo')
-        crit = q.c.myid == table1.c.myid
-        self.assert_compile(select(['*'], crit),
-                            "SELECT * FROM (SELECT mytable.myid AS "
-                            "myid FROM mytable) AS foo, mytable WHERE "
-                            "foo.myid = mytable.myid")
-
-
-
-    def test_delete_schema(self):
-        metadata = MetaData()
-        tbl = Table('test', metadata, Column('id', Integer,
-                    primary_key=True), schema='paj')
-        self.assert_compile(tbl.delete(tbl.c.id == 1),
-                            'DELETE FROM paj.test WHERE paj.test.id = '
-                            ':id_1')
-        s = select([tbl.c.id]).where(tbl.c.id == 1)
-        self.assert_compile(tbl.delete().where(tbl.c.id.in_(s)),
-                            'DELETE FROM paj.test WHERE paj.test.id IN '
-                            '(SELECT test_1.id FROM paj.test AS test_1 '
-                            'WHERE test_1.id = :id_1)')
-
-    def test_delete_schema_multipart(self):
-        metadata = MetaData()
-        tbl = Table('test', metadata, Column('id', Integer,
-                    primary_key=True), schema='banana.paj')
-        self.assert_compile(tbl.delete(tbl.c.id == 1),
-                            'DELETE FROM banana.paj.test WHERE '
-                            'banana.paj.test.id = :id_1')
-        s = select([tbl.c.id]).where(tbl.c.id == 1)
-        self.assert_compile(tbl.delete().where(tbl.c.id.in_(s)),
-                            'DELETE FROM banana.paj.test WHERE '
-                            'banana.paj.test.id IN (SELECT test_1.id '
-                            'FROM banana.paj.test AS test_1 WHERE '
-                            'test_1.id = :id_1)')
-
-    def test_delete_schema_multipart_needs_quoting(self):
-        metadata = MetaData()
-        tbl = Table('test', metadata, Column('id', Integer,
-                    primary_key=True), schema='banana split.paj')
-        self.assert_compile(tbl.delete(tbl.c.id == 1),
-                            'DELETE FROM [banana split].paj.test WHERE '
-                            '[banana split].paj.test.id = :id_1')
-        s = select([tbl.c.id]).where(tbl.c.id == 1)
-        self.assert_compile(tbl.delete().where(tbl.c.id.in_(s)),
-                            'DELETE FROM [banana split].paj.test WHERE '
-                            '[banana split].paj.test.id IN (SELECT '
-                            'test_1.id FROM [banana split].paj.test AS '
-                            'test_1 WHERE test_1.id = :id_1)')
-
-    def test_delete_schema_multipart_both_need_quoting(self):
-        metadata = MetaData()
-        tbl = Table('test', metadata, Column('id', Integer,
-                    primary_key=True),
-                    schema='banana split.paj with a space')
-        self.assert_compile(tbl.delete(tbl.c.id == 1),
-                            'DELETE FROM [banana split].[paj with a '
-                            'space].test WHERE [banana split].[paj '
-                            'with a space].test.id = :id_1')
-        s = select([tbl.c.id]).where(tbl.c.id == 1)
-        self.assert_compile(tbl.delete().where(tbl.c.id.in_(s)),
-                            'DELETE FROM [banana split].[paj with a '
-                            'space].test WHERE [banana split].[paj '
-                            'with a space].test.id IN (SELECT '
-                            'test_1.id FROM [banana split].[paj with a '
-                            'space].test AS test_1 WHERE test_1.id = '
-                            ':id_1)')
-
-    def test_union(self):
-        t1 = table('t1', column('col1'), column('col2'), column('col3'
-                   ), column('col4'))
-        t2 = table('t2', column('col1'), column('col2'), column('col3'
-                   ), column('col4'))
-        s1, s2 = select([t1.c.col3.label('col3'), t1.c.col4.label('col4'
-                        )], t1.c.col2.in_(['t1col2r1', 't1col2r2'])), \
-            select([t2.c.col3.label('col3'), t2.c.col4.label('col4')],
-                   t2.c.col2.in_(['t2col2r2', 't2col2r3']))
-        u = union(s1, s2, order_by=['col3', 'col4'])
-        self.assert_compile(u,
-                            'SELECT t1.col3 AS col3, t1.col4 AS col4 '
-                            'FROM t1 WHERE t1.col2 IN (:col2_1, '
-                            ':col2_2) UNION SELECT t2.col3 AS col3, '
-                            't2.col4 AS col4 FROM t2 WHERE t2.col2 IN '
-                            '(:col2_3, :col2_4) ORDER BY col3, col4')
-        self.assert_compile(u.alias('bar').select(),
-                            'SELECT bar.col3, bar.col4 FROM (SELECT '
-                            't1.col3 AS col3, t1.col4 AS col4 FROM t1 '
-                            'WHERE t1.col2 IN (:col2_1, :col2_2) UNION '
-                            'SELECT t2.col3 AS col3, t2.col4 AS col4 '
-                            'FROM t2 WHERE t2.col2 IN (:col2_3, '
-                            ':col2_4)) AS bar')
-
-    def test_function(self):
-        self.assert_compile(func.foo(1, 2), 'foo(:foo_1, :foo_2)')
-        self.assert_compile(func.current_time(), 'CURRENT_TIME')
-        self.assert_compile(func.foo(), 'foo()')
-        m = MetaData()
-        t = Table('sometable', m, Column('col1', Integer), Column('col2'
-                  , Integer))
-        self.assert_compile(select([func.max(t.c.col1)]),
-                            'SELECT max(sometable.col1) AS max_1 FROM '
-                            'sometable')
-
-    def test_function_overrides(self):
-        self.assert_compile(func.current_date(), "GETDATE()")
-        self.assert_compile(func.length(3), "LEN(:length_1)")
-
-    def test_extract(self):
-        t = table('t', column('col1'))
-
-        for field in 'day', 'month', 'year':
-            self.assert_compile(
-                select([extract(field, t.c.col1)]),
-                'SELECT DATEPART("%s", t.col1) AS anon_1 FROM t' % field)
-
-    def test_update_returning(self):
-        table1 = table('mytable', column('myid', Integer), column('name'
-                       , String(128)), column('description',
-                       String(128)))
-        u = update(table1, values=dict(name='foo'
-                   )).returning(table1.c.myid, table1.c.name)
-        self.assert_compile(u,
-                            'UPDATE mytable SET name=:name OUTPUT '
-                            'inserted.myid, inserted.name')
-        u = update(table1, values=dict(name='foo')).returning(table1)
-        self.assert_compile(u,
-                            'UPDATE mytable SET name=:name OUTPUT '
-                            'inserted.myid, inserted.name, '
-                            'inserted.description')
-        u = update(table1, values=dict(name='foo'
-                   )).returning(table1).where(table1.c.name == 'bar')
-        self.assert_compile(u,
-                            'UPDATE mytable SET name=:name OUTPUT '
-                            'inserted.myid, inserted.name, '
-                            'inserted.description WHERE mytable.name = '
-                            ':name_1')
-        u = update(table1, values=dict(name='foo'
-                   )).returning(func.length(table1.c.name))
-        self.assert_compile(u,
-                            'UPDATE mytable SET name=:name OUTPUT '
-                            'LEN(inserted.name) AS length_1')
-
-    def test_delete_returning(self):
-        table1 = table('mytable', column('myid', Integer), column('name'
-                       , String(128)), column('description',
-                       String(128)))
-        d = delete(table1).returning(table1.c.myid, table1.c.name)
-        self.assert_compile(d,
-                            'DELETE FROM mytable OUTPUT deleted.myid, '
-                            'deleted.name')
-        d = delete(table1).where(table1.c.name == 'bar'
-                                 ).returning(table1.c.myid,
-                table1.c.name)
-        self.assert_compile(d,
-                            'DELETE FROM mytable OUTPUT deleted.myid, '
-                            'deleted.name WHERE mytable.name = :name_1')
-
-    def test_insert_returning(self):
-        table1 = table('mytable', column('myid', Integer), column('name'
-                       , String(128)), column('description',
-                       String(128)))
-        i = insert(table1, values=dict(name='foo'
-                   )).returning(table1.c.myid, table1.c.name)
-        self.assert_compile(i,
-                            'INSERT INTO mytable (name) OUTPUT '
-                            'inserted.myid, inserted.name VALUES '
-                            '(:name)')
-        i = insert(table1, values=dict(name='foo')).returning(table1)
-        self.assert_compile(i,
-                            'INSERT INTO mytable (name) OUTPUT '
-                            'inserted.myid, inserted.name, '
-                            'inserted.description VALUES (:name)')
-        i = insert(table1, values=dict(name='foo'
-                   )).returning(func.length(table1.c.name))
-        self.assert_compile(i,
-                            'INSERT INTO mytable (name) OUTPUT '
-                            'LEN(inserted.name) AS length_1 VALUES '
-                            '(:name)')
-
-    def test_limit_using_top(self):
-        t = table('t', column('x', Integer), column('y', Integer))
-
-        s = select([t]).where(t.c.x==5).order_by(t.c.y).limit(10)
-
-        self.assert_compile(
-            s,
-            "SELECT TOP 10 t.x, t.y FROM t WHERE t.x = :x_1 ORDER BY t.y",
-            checkparams={u'x_1': 5}
-        )
-
-    def test_limit_zero_using_top(self):
-        t = table('t', column('x', Integer), column('y', Integer))
-
-        s = select([t]).where(t.c.x==5).order_by(t.c.y).limit(0)
-
-        self.assert_compile(
-            s,
-            "SELECT TOP 0 t.x, t.y FROM t WHERE t.x = :x_1 ORDER BY t.y",
-            checkparams={u'x_1': 5}
-        )
-
-    def test_offset_using_window(self):
-        t = table('t', column('x', Integer), column('y', Integer))
-
-        s = select([t]).where(t.c.x==5).order_by(t.c.y).offset(20)
-
-        # test that the select is not altered with subsequent compile
-        # calls
-        for i in xrange(2):
-            self.assert_compile(
-                s,
-                "SELECT anon_1.x, anon_1.y FROM (SELECT t.x AS x, t.y "
-                "AS y, ROW_NUMBER() OVER (ORDER BY t.y) AS "
-                "mssql_rn FROM t WHERE t.x = :x_1) AS "
-                "anon_1 WHERE mssql_rn > :mssql_rn_1",
-                checkparams={u'mssql_rn_1': 20, u'x_1': 5}
-            )
-
-    def test_limit_offset_using_window(self):
-        t = table('t', column('x', Integer), column('y', Integer))
-
-        s = select([t]).where(t.c.x==5).order_by(t.c.y).limit(10).offset(20)
-
-        self.assert_compile(
-            s,
-            "SELECT anon_1.x, anon_1.y "
-            "FROM (SELECT t.x AS x, t.y AS y, "
-            "ROW_NUMBER() OVER (ORDER BY t.y) AS mssql_rn "
-            "FROM t "
-            "WHERE t.x = :x_1) AS anon_1 "
-            "WHERE mssql_rn > :mssql_rn_1 AND mssql_rn <= :mssql_rn_2",
-            checkparams={u'mssql_rn_1': 20, u'mssql_rn_2': 30, u'x_1': 5}
-        )
-
-    def test_limit_offset_with_correlated_order_by(self):
-        t1 = table('t1', column('x', Integer), column('y', Integer))
-        t2 = table('t2', column('x', Integer), column('y', Integer))
-
-        order_by = select([t2.c.y]).where(t1.c.x == t2.c.x).as_scalar()
-        s = select([t1]).where(t1.c.x == 5).order_by(order_by) \
-            .limit(10).offset(20)
-
-        self.assert_compile(
-            s,
-            "SELECT anon_1.x, anon_1.y "
-            "FROM (SELECT t1.x AS x, t1.y AS y, "
-            "ROW_NUMBER() OVER (ORDER BY "
-            "(SELECT t2.y FROM t2 WHERE t1.x = t2.x)"
-            ") AS mssql_rn "
-            "FROM t1 "
-            "WHERE t1.x = :x_1) AS anon_1 "
-            "WHERE mssql_rn > :mssql_rn_1 AND mssql_rn <= :mssql_rn_2",
-            checkparams={u'mssql_rn_1': 20, u'mssql_rn_2': 30, u'x_1': 5}
-        )
-
-    def test_limit_zero_offset_using_window(self):
-        t = table('t', column('x', Integer), column('y', Integer))
-
-        s = select([t]).where(t.c.x==5).order_by(t.c.y).limit(0).offset(0)
-
-        # render the LIMIT of zero, but not the OFFSET
-        # of zero, so produces TOP 0
-        self.assert_compile(
-            s,
-            "SELECT TOP 0 t.x, t.y FROM t "
-            "WHERE t.x = :x_1 ORDER BY t.y",
-            checkparams={u'x_1': 5}
-        )
-
-    def test_sequence_start_0(self):
-        metadata = MetaData()
-        tbl = Table('test', metadata,
-                    Column('id', Integer, Sequence('', 0), primary_key=True))
-        self.assert_compile(schema.CreateTable(tbl),
-                            "CREATE TABLE test (id INTEGER NOT NULL IDENTITY(0,1), "
-                            "PRIMARY KEY (id))"
-                            )
-
-    def test_sequence_non_primary_key(self):
-        metadata = MetaData()
-        tbl = Table('test', metadata,
-                    Column('id', Integer, Sequence(''), primary_key=False))
-        self.assert_compile(schema.CreateTable(tbl),
-                            "CREATE TABLE test (id INTEGER NOT NULL IDENTITY(1,1))"
-                            )
-
-    def test_sequence_ignore_nullability(self):
-        metadata = MetaData()
-        tbl = Table('test', metadata,
-                    Column('id', Integer, Sequence(''), nullable=True))
-        self.assert_compile(schema.CreateTable(tbl),
-                            "CREATE TABLE test (id INTEGER NOT NULL IDENTITY(1,1))"
-                            )
-
-    def test_index_clustering(self):
-        metadata = MetaData()
-        tbl = Table('test', metadata,
-                    Column('id', Integer))
-        idx = Index("foo", tbl.c.id, mssql_clustered=True)
-        self.assert_compile(schema.CreateIndex(idx),
-                            "CREATE CLUSTERED INDEX foo ON test (id)"
-                            )
-
-    def test_index_ordering(self):
-        metadata = MetaData()
-        tbl = Table('test', metadata,
-                    Column('x', Integer), Column('y', Integer), Column('z', Integer))
-        idx = Index("foo", tbl.c.x.desc(), "y")
-        self.assert_compile(schema.CreateIndex(idx),
-                            "CREATE INDEX foo ON test (x DESC, y)"
-                            )
-
-    def test_index_extra_include_1(self):
-        metadata = MetaData()
-        tbl = Table('test', metadata,
-                    Column('x', Integer), Column('y', Integer), Column('z', Integer))
-        idx = Index("foo", tbl.c.x, mssql_include=['y'])
-        self.assert_compile(schema.CreateIndex(idx),
-                            "CREATE INDEX foo ON test (x) INCLUDE (y)"
-                            )
-
-    def test_index_extra_include_2(self):
-        metadata = MetaData()
-        tbl = Table('test', metadata,
-                    Column('x', Integer), Column('y', Integer), Column('z', Integer))
-        idx = Index("foo", tbl.c.x, mssql_include=[tbl.c.y])
-        self.assert_compile(schema.CreateIndex(idx),
-                            "CREATE INDEX foo ON test (x) INCLUDE (y)"
-                            )
-
-class SchemaAliasingTest(fixtures.TestBase, AssertsCompiledSQL):
-    """SQL server cannot reference schema-qualified tables in a SELECT statement, they
-    must be aliased.
-    """
-    __dialect__ = mssql.dialect()
-
-    def setup(self):
-        metadata = MetaData()
-        self.t1 = table('t1',
-            column('a', Integer),
-            column('b', String),
-            column('c', String),
-        )
-        self.t2 = Table(
-            't2', metadata,
-            Column("a", Integer),
-            Column("b", Integer),
-            Column("c", Integer),
-            schema = 'schema'
-        )
-
-    def test_result_map(self):
-        s = self.t2.select()
-        c = s.compile(dialect=self.__dialect__)
-        assert self.t2.c.a in set(c.result_map['a'][1])
-
-    def test_result_map_use_labels(self):
-        s = self.t2.select(use_labels=True)
-        c = s.compile(dialect=self.__dialect__)
-        assert self.t2.c.a in set(c.result_map['schema_t2_a'][1])
-
-    def test_straight_select(self):
-        self.assert_compile(self.t2.select(),
-            "SELECT t2_1.a, t2_1.b, t2_1.c FROM [schema].t2 AS t2_1"
-        )
-
-    def test_straight_select_use_labels(self):
-        self.assert_compile(
-            self.t2.select(use_labels=True),
-            "SELECT t2_1.a AS schema_t2_a, t2_1.b AS schema_t2_b, "
-            "t2_1.c AS schema_t2_c FROM [schema].t2 AS t2_1"
-        )
-
-    def test_join_to_schema(self):
-        t1, t2 = self.t1, self.t2
-        self.assert_compile(
-            t1.join(t2, t1.c.a==t2.c.a).select(),
-            "SELECT t1.a, t1.b, t1.c, t2_1.a, t2_1.b, t2_1.c FROM t1 "
-            "JOIN [schema].t2 AS t2_1 ON t2_1.a = t1.a"
-        )
-
-    def test_union_schema_to_non(self):
-        t1, t2 = self.t1, self.t2
-        s = select([t2.c.a, t2.c.b]).apply_labels().\
-                union(
-                    select([t1.c.a, t1.c.b]).apply_labels()
-                ).alias().select()
-        self.assert_compile(
-            s,
-            "SELECT anon_1.schema_t2_a, anon_1.schema_t2_b FROM "
-            "(SELECT t2_1.a AS schema_t2_a, t2_1.b AS schema_t2_b "
-            "FROM [schema].t2 AS t2_1 UNION SELECT t1.a AS t1_a, "
-            "t1.b AS t1_b FROM t1) AS anon_1"
-        )
-
-    def test_column_subquery_to_alias(self):
-        a1 = self.t2.alias('a1')
-        s = select([self.t2, select([a1.c.a]).as_scalar()])
-        self.assert_compile(
-            s,
-            "SELECT t2_1.a, t2_1.b, t2_1.c, "
-            "(SELECT a1.a FROM [schema].t2 AS a1) "
-            "AS anon_1 FROM [schema].t2 AS t2_1"
-
-        )
-
-class IdentityInsertTest(fixtures.TestBase, AssertsCompiledSQL):
-    __only_on__ = 'mssql'
-    __dialect__ = mssql.MSDialect()
-
-    @classmethod
-    def setup_class(cls):
-        global metadata, cattable
-        metadata = MetaData(testing.db)
-
-        cattable = Table('cattable', metadata,
-            Column('id', Integer),
-            Column('description', String(50)),
-            PrimaryKeyConstraint('id', name='PK_cattable'),
-        )
-
-    def setup(self):
-        metadata.create_all()
-
-    def teardown(self):
-        metadata.drop_all()
-
-    def test_compiled(self):
-        self.assert_compile(cattable.insert().values(id=9,
-                            description='Python'),
-                            'INSERT INTO cattable (id, description) '
-                            'VALUES (:id, :description)')
-
-    def test_execute(self):
-        cattable.insert().values(id=9, description='Python').execute()
-
-        cats = cattable.select().order_by(cattable.c.id).execute()
-        eq_([(9, 'Python')], list(cats))
-
-        result = cattable.insert().values(description='PHP').execute()
-        eq_([10], result.inserted_primary_key)
-        lastcat = cattable.select().order_by(desc(cattable.c.id)).execute()
-        eq_((10, 'PHP'), lastcat.first())
-
-    def test_executemany(self):
-        cattable.insert().execute([{'id': 89, 'description': 'Python'},
-                                  {'id': 8, 'description': 'Ruby'},
-                                  {'id': 3, 'description': 'Perl'},
-                                  {'id': 1, 'description': 'Java'}])
-        cats = cattable.select().order_by(cattable.c.id).execute()
-        eq_([(1, 'Java'), (3, 'Perl'), (8, 'Ruby'), (89, 'Python')],
-            list(cats))
-        cattable.insert().execute([{'description': 'PHP'},
-                                  {'description': 'Smalltalk'}])
-        lastcats = \
-            cattable.select().order_by(desc(cattable.c.id)).limit(2).execute()
-        eq_([(91, 'Smalltalk'), (90, 'PHP')], list(lastcats))
-
-
-class ReflectionTest(fixtures.TestBase, ComparesTables):
-    __only_on__ = 'mssql'
-
-    @testing.provide_metadata
-    def test_basic_reflection(self):
-        meta = self.metadata
-
-        users = Table(
-            'engine_users',
-            meta,
-            Column('user_id', types.INT, primary_key=True),
-            Column('user_name', types.VARCHAR(20), nullable=False),
-            Column('test1', types.CHAR(5), nullable=False),
-            Column('test2', types.Float(5), nullable=False),
-            Column('test3', types.Text),
-            Column('test4', types.Numeric, nullable=False),
-            Column('test5', types.DateTime),
-            Column('parent_user_id', types.Integer,
-                   ForeignKey('engine_users.user_id')),
-            Column('test6', types.DateTime, nullable=False),
-            Column('test7', types.Text),
-            Column('test8', types.LargeBinary),
-            Column('test_passivedefault2', types.Integer,
-                   server_default='5'),
-            Column('test9', types.BINARY(100)),
-            Column('test_numeric', types.Numeric()),
-            )
-
-        addresses = Table(
-            'engine_email_addresses',
-            meta,
-            Column('address_id', types.Integer, primary_key=True),
-            Column('remote_user_id', types.Integer,
-                   ForeignKey(users.c.user_id)),
-            Column('email_address', types.String(20)),
-            )
-        meta.create_all()
-
-        meta2 = MetaData()
-        reflected_users = Table('engine_users', meta2,
-                                autoload=True,
-                                autoload_with=testing.db)
-        reflected_addresses = Table('engine_email_addresses',
-                meta2, autoload=True, autoload_with=testing.db)
-        self.assert_tables_equal(users, reflected_users)
-        self.assert_tables_equal(addresses, reflected_addresses)
-
-    @testing.provide_metadata
-    def test_identity(self):
-        metadata = self.metadata
-        table = Table(
-            'identity_test', metadata,
-            Column('col1', Integer, Sequence('fred', 2, 3), primary_key=True)
-        )
-        table.create()
-
-        meta2 = MetaData(testing.db)
-        table2 = Table('identity_test', meta2, autoload=True)
-        sequence = isinstance(table2.c['col1'].default, schema.Sequence) \
-                                and table2.c['col1'].default
-        assert sequence.start == 2
-        assert sequence.increment == 3
-
-    @testing.emits_warning("Did not recognize")
-    @testing.provide_metadata
-    def test_skip_types(self):
-        metadata = self.metadata
-        testing.db.execute("""
-            create table foo (id integer primary key, data xml)
-        """)
-        t1 = Table('foo', metadata, autoload=True)
-        assert isinstance(t1.c.id.type, Integer)
-        assert isinstance(t1.c.data.type, types.NullType)
-
-
-    @testing.provide_metadata
-    def test_db_qualified_items(self):
-        metadata = self.metadata
-        Table('foo', metadata, Column('id', Integer, primary_key=True))
-        Table('bar', metadata,
-                Column('id', Integer, primary_key=True),
-                Column('foo_id', Integer, ForeignKey('foo.id', name="fkfoo"))
-            )
-        metadata.create_all()
-
-        dbname = testing.db.scalar("select db_name()")
-        owner = testing.db.scalar("SELECT user_name()")
-
-        inspector = inspect(testing.db)
-        bar_via_db = inspector.get_foreign_keys(
-                            "bar", schema="%s.%s" % (dbname, owner))
-        eq_(
-            bar_via_db,
-            [{
-                'referred_table': 'foo',
-                'referred_columns': ['id'],
-                'referred_schema': 'test.dbo',
-                'name': 'fkfoo',
-                'constrained_columns': ['foo_id']}]
-        )
-
-        assert testing.db.has_table("bar", schema="test.dbo")
-
-        m2 = MetaData()
-        Table('bar', m2, schema="test.dbo", autoload=True,
-                                autoload_with=testing.db)
-        eq_(m2.tables["test.dbo.foo"].schema, "test.dbo")
-
-
-    @testing.provide_metadata
-    def test_indexes_cols(self):
-        metadata = self.metadata
-
-        t1 = Table('t', metadata, Column('x', Integer), Column('y', Integer))
-        Index('foo', t1.c.x, t1.c.y)
-        metadata.create_all()
-
-        m2 = MetaData()
-        t2 = Table('t', m2, autoload=True, autoload_with=testing.db)
-
-        eq_(
-            set(list(t2.indexes)[0].columns),
-            set([t2.c['x'], t2.c.y])
-        )
-
-    @testing.provide_metadata
-    def test_indexes_cols_with_commas(self):
-        metadata = self.metadata
-
-        t1 = Table('t', metadata,
-                        Column('x, col', Integer, key='x'),
-                        Column('y', Integer)
-                    )
-        Index('foo', t1.c.x, t1.c.y)
-        metadata.create_all()
-
-        m2 = MetaData()
-        t2 = Table('t', m2, autoload=True, autoload_with=testing.db)
-
-        eq_(
-            set(list(t2.indexes)[0].columns),
-            set([t2.c['x, col'], t2.c.y])
-        )
-
-    @testing.provide_metadata
-    def test_indexes_cols_with_spaces(self):
-        metadata = self.metadata
-
-        t1 = Table('t', metadata, Column('x col', Integer, key='x'),
-                                    Column('y', Integer))
-        Index('foo', t1.c.x, t1.c.y)
-        metadata.create_all()
-
-        m2 = MetaData()
-        t2 = Table('t', m2, autoload=True, autoload_with=testing.db)
-
-        eq_(
-            set(list(t2.indexes)[0].columns),
-            set([t2.c['x col'], t2.c.y])
-        )
-
-class QueryUnicodeTest(fixtures.TestBase):
-
-    __only_on__ = 'mssql'
-
-    def test_convert_unicode(self):
-        meta = MetaData(testing.db)
-        t1 = Table('unitest_table', meta, Column('id', Integer,
-                   primary_key=True), Column('descr',
-                   mssql.MSText(convert_unicode=True)))
-        meta.create_all()
-        con = testing.db.connect()
-
-        # encode in UTF-8 (sting object) because this is the default
-        # dialect encoding
-
-        con.execute(u"insert into unitest_table values ('bien u\
-                    umang\xc3\xa9')".encode('UTF-8'))
-        try:
-            r = t1.select().execute().first()
-            assert isinstance(r[1], unicode), \
-                '%s is %s instead of unicode, working on %s' % (r[1],
-                    type(r[1]), meta.bind)
-        finally:
-            meta.drop_all()
-
-from sqlalchemy.testing.assertsql import ExactSQL
-class QueryTest(testing.AssertsExecutionResults, fixtures.TestBase):
-    __only_on__ = 'mssql'
-
-    def test_fetchid_trigger(self):
-        """
-        Verify identity return value on inserting to a trigger table.
-
-        MSSQL's OUTPUT INSERTED clause does not work for the
-        case of a table having an identity (autoincrement)
-        primary key column, and which also has a trigger configured
-        to fire upon each insert and subsequently perform an
-        insert into a different table.
-
-        SQLALchemy's MSSQL dialect by default will attempt to
-        use an OUTPUT_INSERTED clause, which in this case will
-        raise the following error:
-
-        ProgrammingError: (ProgrammingError) ('42000', 334,
-        "[Microsoft][SQL Server Native Client 10.0][SQL Server]The
-        target table 't1' of the DML statement cannot have any enabled
-        triggers if the statement contains an OUTPUT clause without
-        INTO clause.", 7748) 'INSERT INTO t1 (descr) OUTPUT inserted.id
-        VALUES (?)' ('hello',)
-
-        This test verifies a workaround, which is to rely on the
-        older SCOPE_IDENTITY() call, which still works for this scenario.
-        To enable the workaround, the Table must be instantiated
-        with the init parameter 'implicit_returning = False'.
-        """
-
-        #todo: this same test needs to be tried in a multithreaded context
-        #      with multiple threads inserting to the same table.
-        #todo: check whether this error also occurs with clients other
-        #      than the SQL Server Native Client. Maybe an assert_raises
-        #      test should be written.
-        meta = MetaData(testing.db)
-        t1 = Table('t1', meta,
-                Column('id', Integer, Sequence('fred', 100, 1),
-                                primary_key=True),
-                Column('descr', String(200)),
-                # the following flag will prevent the
-                # MSSQLCompiler.returning_clause from getting called,
-                # though the ExecutionContext will still have a
-                # _select_lastrowid, so the SELECT SCOPE_IDENTITY() will
-                # hopefully be called instead.
-                implicit_returning = False
-                )
-        t2 = Table('t2', meta,
-                Column('id', Integer, Sequence('fred', 200, 1),
-                                primary_key=True),
-                Column('descr', String(200)))
-        meta.create_all()
-        con = testing.db.connect()
-        con.execute("""create trigger paj on t1 for insert as
-            insert into t2 (descr) select descr from inserted""")
-
-        try:
-            tr = con.begin()
-            r = con.execute(t2.insert(), descr='hello')
-            self.assert_(r.inserted_primary_key == [200])
-            r = con.execute(t1.insert(), descr='hello')
-            self.assert_(r.inserted_primary_key == [100])
-
-        finally:
-            tr.commit()
-            con.execute("""drop trigger paj""")
-            meta.drop_all()
-
-    @testing.provide_metadata
-    def test_disable_scope_identity(self):
-        engine = engines.testing_engine(options={"use_scope_identity":False})
-        metadata = self.metadata
-        metadata.bind = engine
-        t1 = Table('t1', metadata,
-                Column('id', Integer, primary_key=True),
-                implicit_returning=False
-        )
-        metadata.create_all()
-
-        self.assert_sql_execution(
-                testing.db,
-                lambda: engine.execute(t1.insert()),
-                ExactSQL("INSERT INTO t1 DEFAULT VALUES"),
-                # we dont have an event for
-                # "SELECT @@IDENTITY" part here.
-                # this will be in 0.8 with #2459
-        )
-        assert not engine.dialect.use_scope_identity
-
-    def test_insertid_schema(self):
-        meta = MetaData(testing.db)
-        con = testing.db.connect()
-        con.execute('create schema paj')
-        tbl = Table('test', meta,
-                    Column('id', Integer, primary_key=True), schema='paj')
-        tbl.create()
-        try:
-            tbl.insert().execute({'id':1})
-        finally:
-            tbl.drop()
-            con.execute('drop schema paj')
-
-    def test_returning_no_autoinc(self):
-        meta = MetaData(testing.db)
-        table = Table('t1', meta, Column('id', Integer,
-                      primary_key=True), Column('data', String(50)))
-        table.create()
-        try:
-            result = table.insert().values(id=1,
-                    data=func.lower('SomeString'
-                    )).returning(table.c.id, table.c.data).execute()
-            eq_(result.fetchall(), [(1, 'somestring')])
-        finally:
-
-            # this will hang if the "SET IDENTITY_INSERT t1 OFF" occurs
-            # before the result is fetched
-
-            table.drop()
-
-    def test_delete_schema(self):
-        meta = MetaData(testing.db)
-        con = testing.db.connect()
-        con.execute('create schema paj')
-        tbl = Table('test', meta, Column('id', Integer,
-                    primary_key=True), schema='paj')
-        tbl.create()
-        try:
-            tbl.insert().execute({'id': 1})
-            tbl.delete(tbl.c.id == 1).execute()
-        finally:
-            tbl.drop()
-            con.execute('drop schema paj')
-
-    def test_insertid_reserved(self):
-        meta = MetaData(testing.db)
-        table = Table(
-            'select', meta,
-            Column('col', Integer, primary_key=True)
-        )
-        table.create()
-
-        meta2 = MetaData(testing.db)
-        try:
-            table.insert().execute(col=7)
-        finally:
-            table.drop()
-
-
-class Foo(object):
-    def __init__(self, **kw):
-        for k in kw:
-            setattr(self, k, kw[k])
-
-class GenerativeQueryTest(fixtures.TestBase):
-    __only_on__ = 'mssql'
-
-    @classmethod
-    def setup_class(cls):
-        global foo, metadata
-        metadata = MetaData(testing.db)
-        foo = Table('foo', metadata,
-                    Column('id', Integer, Sequence('foo_id_seq'),
-                           primary_key=True),
-                    Column('bar', Integer),
-                    Column('range', Integer))
-
-        mapper(Foo, foo)
-        metadata.create_all()
-
-        sess = create_session(bind=testing.db)
-        for i in range(100):
-            sess.add(Foo(bar=i, range=i%10))
-        sess.flush()
-
-    @classmethod
-    def teardown_class(cls):
-        metadata.drop_all()
-        clear_mappers()
-
-    def test_slice_mssql(self):
-        sess = create_session(bind=testing.db)
-        query = sess.query(Foo)
-        orig = query.all()
-        assert list(query[:10]) == orig[:10]
-        assert list(query[:10]) == orig[:10]
-
-
-class SchemaTest(fixtures.TestBase):
-
-    def setup(self):
-        t = Table('sometable', MetaData(),
-            Column('pk_column', Integer),
-            Column('test_column', String)
-        )
-        self.column = t.c.test_column
-
-        dialect = mssql.dialect()
-        self.ddl_compiler = dialect.ddl_compiler(dialect,
-                schema.CreateTable(t))
-
-    def _column_spec(self):
-        return self.ddl_compiler.get_column_specification(self.column)
-
-    def test_that_mssql_default_nullability_emits_null(self):
-        eq_("test_column VARCHAR(max) NULL", self._column_spec())
-
-    def test_that_mssql_none_nullability_does_not_emit_nullability(self):
-        self.column.nullable = None
-        eq_("test_column VARCHAR(max)", self._column_spec())
-
-    def test_that_mssql_specified_nullable_emits_null(self):
-        self.column.nullable = True
-        eq_("test_column VARCHAR(max) NULL", self._column_spec())
-
-    def test_that_mssql_specified_not_nullable_emits_not_null(self):
-        self.column.nullable = False
-        eq_("test_column VARCHAR(max) NOT NULL", self._column_spec())
-
-
-def full_text_search_missing():
-    """Test if full text search is not implemented and return False if
-    it is and True otherwise."""
-
-    try:
-        connection = testing.db.connect()
-        try:
-            connection.execute('CREATE FULLTEXT CATALOG Catalog AS '
-                               'DEFAULT')
-            return False
-        except:
-            return True
-    finally:
-        connection.close()
-
-class MatchTest(fixtures.TestBase, AssertsCompiledSQL):
-
-    __only_on__ = 'mssql'
-    __skip_if__ = full_text_search_missing,
-
-    @classmethod
-    def setup_class(cls):
-        global metadata, cattable, matchtable
-        metadata = MetaData(testing.db)
-        cattable = Table('cattable', metadata, Column('id', Integer),
-                         Column('description', String(50)),
-                         PrimaryKeyConstraint('id', name='PK_cattable'))
-        matchtable = Table(
-            'matchtable',
-            metadata,
-            Column('id', Integer),
-            Column('title', String(200)),
-            Column('category_id', Integer, ForeignKey('cattable.id')),
-            PrimaryKeyConstraint('id', name='PK_matchtable'),
-            )
-        DDL("""CREATE FULLTEXT INDEX
-                       ON cattable (description)
-                       KEY INDEX PK_cattable""").execute_at('after-create'
-                , matchtable)
-        DDL("""CREATE FULLTEXT INDEX
-                       ON matchtable (title)
-                       KEY INDEX PK_matchtable""").execute_at('after-create'
-                , matchtable)
-        metadata.create_all()
-        cattable.insert().execute([{'id': 1, 'description': 'Python'},
-                                  {'id': 2, 'description': 'Ruby'}])
-        matchtable.insert().execute([{'id': 1, 'title'
-                                    : 'Agile Web Development with Rails'
-                                    , 'category_id': 2}, {'id': 2,
-                                    'title': 'Dive Into Python',
-                                    'category_id': 1}, {'id': 3, 'title'
-                                    : "Programming Matz's Ruby",
-                                    'category_id': 2}, {'id': 4, 'title'
-                                    : 'The Definitive Guide to Django',
-                                    'category_id': 1}, {'id': 5, 'title'
-                                    : 'Python in a Nutshell',
-                                    'category_id': 1}])
-        DDL("WAITFOR DELAY '00:00:05'"
-            ).execute(bind=engines.testing_engine())
-
-    @classmethod
-    def teardown_class(cls):
-        metadata.drop_all()
-        connection = testing.db.connect()
-        connection.execute("DROP FULLTEXT CATALOG Catalog")
-        connection.close()
-
-    def test_expression(self):
-        self.assert_compile(matchtable.c.title.match('somstr'),
-                            'CONTAINS (matchtable.title, ?)')
-
-    def test_simple_match(self):
-        results = \
-            matchtable.select().where(matchtable.c.title.match('python'
-                )).order_by(matchtable.c.id).execute().fetchall()
-        eq_([2, 5], [r.id for r in results])
-
-    def test_simple_match_with_apostrophe(self):
-        results = \
-            matchtable.select().where(matchtable.c.title.match("Matz's"
-                )).execute().fetchall()
-        eq_([3], [r.id for r in results])
-
-    def test_simple_prefix_match(self):
-        results = \
-            matchtable.select().where(matchtable.c.title.match('"nut*"'
-                )).execute().fetchall()
-        eq_([5], [r.id for r in results])
-
-    def test_simple_inflectional_match(self):
-        results = \
-            matchtable.select().where(
-                matchtable.c.title.match('FORMSOF(INFLECTIONAL, "dives")'
-                )).execute().fetchall()
-        eq_([2], [r.id for r in results])
-
-    def test_or_match(self):
-        results1 = \
-            matchtable.select().where(or_(matchtable.c.title.match('nutshell'
-                ), matchtable.c.title.match('ruby'
-                ))).order_by(matchtable.c.id).execute().fetchall()
-        eq_([3, 5], [r.id for r in results1])
-        results2 = \
-            matchtable.select().where(
-                matchtable.c.title.match('nutshell OR ruby'
-                )).order_by(matchtable.c.id).execute().fetchall()
-        eq_([3, 5], [r.id for r in results2])
-
-    def test_and_match(self):
-        results1 = \
-            matchtable.select().where(and_(matchtable.c.title.match('python'
-                ), matchtable.c.title.match('nutshell'
-                ))).execute().fetchall()
-        eq_([5], [r.id for r in results1])
-        results2 = \
-            matchtable.select().where(
-                matchtable.c.title.match('python AND nutshell'
-                )).execute().fetchall()
-        eq_([5], [r.id for r in results2])
-
-    def test_match_across_joins(self):
-        results = matchtable.select().where(and_(cattable.c.id
-                == matchtable.c.category_id,
-                or_(cattable.c.description.match('Ruby'),
-                matchtable.c.title.match('nutshell'
-                )))).order_by(matchtable.c.id).execute().fetchall()
-        eq_([1, 3, 5], [r.id for r in results])
-
-
-class ParseConnectTest(fixtures.TestBase):
-
-    def test_pyodbc_connect_dsn_trusted(self):
-        dialect = pyodbc.dialect()
-        u = url.make_url('mssql://mydsn')
-        connection = dialect.create_connect_args(u)
-        eq_([['dsn=mydsn;Trusted_Connection=Yes'], {}], connection)
-
-    def test_pyodbc_connect_old_style_dsn_trusted(self):
-        dialect = pyodbc.dialect()
-        u = url.make_url('mssql:///?dsn=mydsn')
-        connection = dialect.create_connect_args(u)
-        eq_([['dsn=mydsn;Trusted_Connection=Yes'], {}], connection)
-
-    def test_pyodbc_connect_dsn_non_trusted(self):
-        dialect = pyodbc.dialect()
-        u = url.make_url('mssql://username:password@mydsn')
-        connection = dialect.create_connect_args(u)
-        eq_([['dsn=mydsn;UID=username;PWD=password'], {}], connection)
-
-    def test_pyodbc_connect_dsn_extra(self):
-        dialect = pyodbc.dialect()
-        u = \
-            url.make_url('mssql://username:password@mydsn/?LANGUAGE=us_'
-                         'english&foo=bar')
-        connection = dialect.create_connect_args(u)
-        dsn_string = connection[0][0]
-        assert ";LANGUAGE=us_english" in dsn_string
-        assert ";foo=bar" in dsn_string
-
-    def test_pyodbc_connect(self):
-        dialect = pyodbc.dialect()
-        u = url.make_url('mssql://username:password@hostspec/database')
-        connection = dialect.create_connect_args(u)
-        eq_([['DRIVER={SQL Server};Server=hostspec;Database=database;UI'
-            'D=username;PWD=password'], {}], connection)
-
-    def test_pyodbc_connect_comma_port(self):
-        dialect = pyodbc.dialect()
-        u = \
-            url.make_url('mssql://username:password@hostspec:12345/data'
-                         'base')
-        connection = dialect.create_connect_args(u)
-        eq_([['DRIVER={SQL Server};Server=hostspec,12345;Database=datab'
-            'ase;UID=username;PWD=password'], {}], connection)
-
-    def test_pyodbc_connect_config_port(self):
-        dialect = pyodbc.dialect()
-        u = \
-            url.make_url('mssql://username:password@hostspec/database?p'
-                         'ort=12345')
-        connection = dialect.create_connect_args(u)
-        eq_([['DRIVER={SQL Server};Server=hostspec;Database=database;UI'
-            'D=username;PWD=password;port=12345'], {}], connection)
-
-    def test_pyodbc_extra_connect(self):
-        dialect = pyodbc.dialect()
-        u = \
-            url.make_url('mssql://username:password@hostspec/database?L'
-                         'ANGUAGE=us_english&foo=bar')
-        connection = dialect.create_connect_args(u)
-        eq_(connection[1], {})
-        eq_(connection[0][0]
-            in ('DRIVER={SQL Server};Server=hostspec;Database=database;'
-            'UID=username;PWD=password;foo=bar;LANGUAGE=us_english',
-            'DRIVER={SQL Server};Server=hostspec;Database=database;UID='
-            'username;PWD=password;LANGUAGE=us_english;foo=bar'), True)
-
-    def test_pyodbc_odbc_connect(self):
-        dialect = pyodbc.dialect()
-        u = \
-            url.make_url('mssql:///?odbc_connect=DRIVER%3D%7BSQL+Server'
-                         '%7D%3BServer%3Dhostspec%3BDatabase%3Ddatabase'
-                         '%3BUID%3Dusername%3BPWD%3Dpassword')
-        connection = dialect.create_connect_args(u)
-        eq_([['DRIVER={SQL Server};Server=hostspec;Database=database;UI'
-            'D=username;PWD=password'], {}], connection)
-
-    def test_pyodbc_odbc_connect_with_dsn(self):
-        dialect = pyodbc.dialect()
-        u = \
-            url.make_url('mssql:///?odbc_connect=dsn%3Dmydsn%3BDatabase'
-                         '%3Ddatabase%3BUID%3Dusername%3BPWD%3Dpassword'
-                         )
-        connection = dialect.create_connect_args(u)
-        eq_([['dsn=mydsn;Database=database;UID=username;PWD=password'],
-            {}], connection)
-
-    def test_pyodbc_odbc_connect_ignores_other_values(self):
-        dialect = pyodbc.dialect()
-        u = \
-            url.make_url('mssql://userdiff:passdiff@localhost/dbdiff?od'
-                         'bc_connect=DRIVER%3D%7BSQL+Server%7D%3BServer'
-                         '%3Dhostspec%3BDatabase%3Ddatabase%3BUID%3Duse'
-                         'rname%3BPWD%3Dpassword')
-        connection = dialect.create_connect_args(u)
-        eq_([['DRIVER={SQL Server};Server=hostspec;Database=database;UI'
-            'D=username;PWD=password'], {}], connection)
-
-    def test_pymssql_port_setting(self):
-        dialect = pymssql.dialect()
-
-        u = \
-            url.make_url('mssql+pymssql://scott:tiger@somehost/test')
-        connection = dialect.create_connect_args(u)
-        eq_(
-            [[], {'host': 'somehost', 'password': 'tiger',
-                    'user': 'scott', 'database': 'test'}], connection
-        )
-
-        u = \
-            url.make_url('mssql+pymssql://scott:tiger@somehost:5000/test')
-        connection = dialect.create_connect_args(u)
-        eq_(
-            [[], {'host': 'somehost:5000', 'password': 'tiger',
-                    'user': 'scott', 'database': 'test'}], connection
-        )
-
-    def test_pymssql_disconnect(self):
-        dialect = pymssql.dialect()
-
-        for error in [
-                'Adaptive Server connection timed out',
-                'message 20003',
-                "Error 10054",
-                "Not connected to any MS SQL server",
-                "Connection is closed"
-                ]:
-            eq_(dialect.is_disconnect(error, None, None), True)
-
-        eq_(dialect.is_disconnect("not an error", None, None), False)
-
-    @testing.only_on(['mssql+pyodbc', 'mssql+pymssql'],
-                            "FreeTDS specific test")
-    def test_bad_freetds_warning(self):
-        engine = engines.testing_engine()
-
-        def _bad_version(connection):
-            return 95, 10, 255
-
-        engine.dialect._get_server_version_info = _bad_version
-        assert_raises_message(exc.SAWarning,
-                              'Unrecognized server version info',
-                              engine.connect)
-
-class TimeTypeTest(fixtures.TestBase):
-
-    def test_result_processor_no_microseconds(self):
-        expected = datetime.time(12, 34, 56)
-        self._assert_result_processor(expected, '12:34:56')
-
-    def test_result_processor_too_many_microseconds(self):
-        # microsecond must be in 0..999999, should truncate (6 vs 7 digits)
-        expected = datetime.time(12, 34, 56, 123456)
-        self._assert_result_processor(expected, '12:34:56.1234567')
-
-    def _assert_result_processor(self, expected, value):
-        mssql_time_type = TIME()
-        result_processor = mssql_time_type.result_processor(None, None)
-        eq_(expected, result_processor(value))
-
-
-class TypeDDLTest(fixtures.TestBase):
-    def test_boolean(self):
-        "Exercise type specification for boolean type."
-
-        columns = [
-            # column type, args, kwargs, expected ddl
-            (Boolean, [], {},
-             'BIT'),
-           ]
-
-        metadata = MetaData()
-        table_args = ['test_mssql_boolean', metadata]
-        for index, spec in enumerate(columns):
-            type_, args, kw, res = spec
-            table_args.append(
-                Column('c%s' % index, type_(*args, **kw), nullable=None))
-
-        boolean_table = Table(*table_args)
-        dialect = mssql.dialect()
-        gen = dialect.ddl_compiler(dialect, schema.CreateTable(boolean_table))
-
-        for col in boolean_table.c:
-            index = int(col.name[1:])
-            testing.eq_(gen.get_column_specification(col),
-                           "%s %s" % (col.name, columns[index][3]))
-            self.assert_(repr(col))
-
-
-    def test_numeric(self):
-        "Exercise type specification and options for numeric types."
-
-        columns = [
-            # column type, args, kwargs, expected ddl
-            (types.NUMERIC, [], {},
-             'NUMERIC'),
-            (types.NUMERIC, [None], {},
-             'NUMERIC'),
-            (types.NUMERIC, [12, 4], {},
-             'NUMERIC(12, 4)'),
-
-            (types.Float, [], {},
-             'FLOAT'),
-            (types.Float, [None], {},
-             'FLOAT'),
-            (types.Float, [12], {},
-             'FLOAT(12)'),
-            (mssql.MSReal, [], {},
-             'REAL'),
-
-            (types.Integer, [], {},
-             'INTEGER'),
-            (types.BigInteger, [], {},
-             'BIGINT'),
-            (mssql.MSTinyInteger, [], {},
-             'TINYINT'),
-            (types.SmallInteger, [], {},
-             'SMALLINT'),
-           ]
-
-        metadata = MetaData()
-        table_args = ['test_mssql_numeric', metadata]
-        for index, spec in enumerate(columns):
-            type_, args, kw, res = spec
-            table_args.append(
-                Column('c%s' % index, type_(*args, **kw), nullable=None))
-
-        numeric_table = Table(*table_args)
-        dialect = mssql.dialect()
-        gen = dialect.ddl_compiler(dialect, schema.CreateTable(numeric_table))
-
-        for col in numeric_table.c:
-            index = int(col.name[1:])
-            testing.eq_(gen.get_column_specification(col),
-                           "%s %s" % (col.name, columns[index][3]))
-            self.assert_(repr(col))
-
-
-    def test_char(self):
-        """Exercise COLLATE-ish options on string types."""
-
-        columns = [
-            (mssql.MSChar, [], {},
-             'CHAR'),
-            (mssql.MSChar, [1], {},
-             'CHAR(1)'),
-            (mssql.MSChar, [1], {'collation': 'Latin1_General_CI_AS'},
-             'CHAR(1) COLLATE Latin1_General_CI_AS'),
-
-            (mssql.MSNChar, [], {},
-             'NCHAR'),
-            (mssql.MSNChar, [1], {},
-             'NCHAR(1)'),
-            (mssql.MSNChar, [1], {'collation': 'Latin1_General_CI_AS'},
-             'NCHAR(1) COLLATE Latin1_General_CI_AS'),
-
-            (mssql.MSString, [], {},
-             'VARCHAR(max)'),
-            (mssql.MSString, [1], {},
-             'VARCHAR(1)'),
-            (mssql.MSString, [1], {'collation': 'Latin1_General_CI_AS'},
-             'VARCHAR(1) COLLATE Latin1_General_CI_AS'),
-
-            (mssql.MSNVarchar, [], {},
-             'NVARCHAR(max)'),
-            (mssql.MSNVarchar, [1], {},
-             'NVARCHAR(1)'),
-            (mssql.MSNVarchar, [1], {'collation': 'Latin1_General_CI_AS'},
-             'NVARCHAR(1) COLLATE Latin1_General_CI_AS'),
-
-            (mssql.MSText, [], {},
-             'TEXT'),
-            (mssql.MSText, [], {'collation': 'Latin1_General_CI_AS'},
-             'TEXT COLLATE Latin1_General_CI_AS'),
-
-            (mssql.MSNText, [], {},
-             'NTEXT'),
-            (mssql.MSNText, [], {'collation': 'Latin1_General_CI_AS'},
-             'NTEXT COLLATE Latin1_General_CI_AS'),
-           ]
-
-        metadata = MetaData()
-        table_args = ['test_mssql_charset', metadata]
-        for index, spec in enumerate(columns):
-            type_, args, kw, res = spec
-            table_args.append(
-                Column('c%s' % index, type_(*args, **kw), nullable=None))
-
-        charset_table = Table(*table_args)
-        dialect = mssql.dialect()
-        gen = dialect.ddl_compiler(dialect, schema.CreateTable(charset_table))
-
-        for col in charset_table.c:
-            index = int(col.name[1:])
-            testing.eq_(gen.get_column_specification(col),
-                           "%s %s" % (col.name, columns[index][3]))
-            self.assert_(repr(col))
-
-
-    def test_timestamp(self):
-        """Exercise TIMESTAMP column."""
-
-        dialect = mssql.dialect()
-
-        metadata = MetaData()
-        spec, expected = (TIMESTAMP, 'TIMESTAMP')
-        t = Table('mssql_ts', metadata,
-                   Column('id', Integer, primary_key=True),
-                   Column('t', spec, nullable=None))
-        gen = dialect.ddl_compiler(dialect, schema.CreateTable(t))
-        testing.eq_(gen.get_column_specification(t.c.t), "t %s" % expected)
-        self.assert_(repr(t.c.t))
-
-    def test_money(self):
-        """Exercise type specification for money types."""
-
-        columns = [(mssql.MSMoney, [], {}, 'MONEY'),
-                   (mssql.MSSmallMoney, [], {}, 'SMALLMONEY')]
-        metadata = MetaData()
-        table_args = ['test_mssql_money', metadata]
-        for index, spec in enumerate(columns):
-            type_, args, kw, res = spec
-            table_args.append(Column('c%s' % index, type_(*args, **kw),
-                              nullable=None))
-        money_table = Table(*table_args)
-        dialect = mssql.dialect()
-        gen = dialect.ddl_compiler(dialect,
-                                   schema.CreateTable(money_table))
-        for col in money_table.c:
-            index = int(col.name[1:])
-            testing.eq_(gen.get_column_specification(col), '%s %s'
-                        % (col.name, columns[index][3]))
-            self.assert_(repr(col))
-
-    def test_binary(self):
-        "Exercise type specification for binary types."
-
-        columns = [
-            # column type, args, kwargs, expected ddl
-            (mssql.MSBinary, [], {},
-             'BINARY'),
-            (mssql.MSBinary, [10], {},
-             'BINARY(10)'),
-
-            (types.BINARY, [], {},
-             'BINARY'),
-            (types.BINARY, [10], {},
-             'BINARY(10)'),
-
-            (mssql.MSVarBinary, [], {},
-             'VARBINARY(max)'),
-            (mssql.MSVarBinary, [10], {},
-             'VARBINARY(10)'),
-
-            (types.VARBINARY, [10], {},
-             'VARBINARY(10)'),
-            (types.VARBINARY, [], {},
-             'VARBINARY(max)'),
-
-            (mssql.MSImage, [], {},
-             'IMAGE'),
-
-            (mssql.IMAGE, [], {},
-             'IMAGE'),
-
-            (types.LargeBinary, [], {},
-             'IMAGE'),
-        ]
-
-        metadata = MetaData()
-        table_args = ['test_mssql_binary', metadata]
-        for index, spec in enumerate(columns):
-            type_, args, kw, res = spec
-            table_args.append(Column('c%s' % index, type_(*args, **kw),
-                              nullable=None))
-        binary_table = Table(*table_args)
-        dialect = mssql.dialect()
-        gen = dialect.ddl_compiler(dialect,
-                                   schema.CreateTable(binary_table))
-        for col in binary_table.c:
-            index = int(col.name[1:])
-            testing.eq_(gen.get_column_specification(col), '%s %s'
-                        % (col.name, columns[index][3]))
-            self.assert_(repr(col))
-
-class TypeRoundTripTest(fixtures.TestBase, AssertsExecutionResults, ComparesTables):
-    __only_on__ = 'mssql'
-
-    @classmethod
-    def setup_class(cls):
-        global metadata
-        metadata = MetaData(testing.db)
-
-    def teardown(self):
-        metadata.drop_all()
-
-    @testing.fails_on_everything_except('mssql+pyodbc',
-            'this is some pyodbc-specific feature')
-    def test_decimal_notation(self):
-        numeric_table = Table('numeric_table', metadata, Column('id',
-                              Integer, Sequence('numeric_id_seq',
-                              optional=True), primary_key=True),
-                              Column('numericcol',
-                              Numeric(precision=38, scale=20,
-                              asdecimal=True)))
-        metadata.create_all()
-        test_items = [decimal.Decimal(d) for d in (
-            '1500000.00000000000000000000',
-            '-1500000.00000000000000000000',
-            '1500000',
-            '0.0000000000000000002',
-            '0.2',
-            '-0.0000000000000000002',
-            '-2E-2',
-            '156666.458923543',
-            '-156666.458923543',
-            '1',
-            '-1',
-            '-1234',
-            '1234',
-            '2E-12',
-            '4E8',
-            '3E-6',
-            '3E-7',
-            '4.1',
-            '1E-1',
-            '1E-2',
-            '1E-3',
-            '1E-4',
-            '1E-5',
-            '1E-6',
-            '1E-7',
-            '1E-1',
-            '1E-8',
-            '0.2732E2',
-            '-0.2432E2',
-            '4.35656E2',
-            '-02452E-2',
-            '45125E-2',
-            '1234.58965E-2',
-            '1.521E+15',
-            '-1E-25',
-            '1E-25',
-            '1254E-25',
-            '-1203E-25',
-            '0',
-            '-0.00',
-            '-0',
-            '4585E12',
-            '000000000000000000012',
-            '000000000000.32E12',
-            '00000000000000.1E+12',
-            '000000000000.2E-32',
-            )]
-
-        for value in test_items:
-            numeric_table.insert().execute(numericcol=value)
-
-        for value in select([numeric_table.c.numericcol]).execute():
-            assert value[0] in test_items, "%r not in test_items" % value[0]
-
-    def test_float(self):
-        float_table = Table('float_table', metadata, Column('id',
-                            Integer, Sequence('numeric_id_seq',
-                            optional=True), primary_key=True),
-                            Column('floatcol', Float()))
-        metadata.create_all()
-        try:
-            test_items = [float(d) for d in (
-                '1500000.00000000000000000000',
-                '-1500000.00000000000000000000',
-                '1500000',
-                '0.0000000000000000002',
-                '0.2',
-                '-0.0000000000000000002',
-                '156666.458923543',
-                '-156666.458923543',
-                '1',
-                '-1',
-                '1234',
-                '2E-12',
-                '4E8',
-                '3E-6',
-                '3E-7',
-                '4.1',
-                '1E-1',
-                '1E-2',
-                '1E-3',
-                '1E-4',
-                '1E-5',
-                '1E-6',
-                '1E-7',
-                '1E-8',
-                )]
-            for value in test_items:
-                float_table.insert().execute(floatcol=value)
-        except Exception, e:
-            raise e
-
-
-    # todo this should suppress warnings, but it does not
-    @emits_warning_on('mssql+mxodbc', r'.*does not have any indexes.*')
-    def test_dates(self):
-        "Exercise type specification for date types."
-
-        columns = [
-            # column type, args, kwargs, expected ddl
-            (mssql.MSDateTime, [], {},
-             'DATETIME', []),
-
-            (types.DATE, [], {},
-             'DATE', ['>=', (10,)]),
-            (types.Date, [], {},
-             'DATE', ['>=', (10,)]),
-            (types.Date, [], {},
-             'DATETIME', ['<', (10,)], mssql.MSDateTime),
-            (mssql.MSDate, [], {},
-             'DATE', ['>=', (10,)]),
-            (mssql.MSDate, [], {},
-             'DATETIME', ['<', (10,)], mssql.MSDateTime),
-
-            (types.TIME, [], {},
-             'TIME', ['>=', (10,)]),
-            (types.Time, [], {},
-             'TIME', ['>=', (10,)]),
-            (mssql.MSTime, [], {},
-             'TIME', ['>=', (10,)]),
-            (mssql.MSTime, [1], {},
-             'TIME(1)', ['>=', (10,)]),
-            (types.Time, [], {},
-             'DATETIME', ['<', (10,)], mssql.MSDateTime),
-            (mssql.MSTime, [], {},
-             'TIME', ['>=', (10,)]),
-
-            (mssql.MSSmallDateTime, [], {},
-             'SMALLDATETIME', []),
-
-            (mssql.MSDateTimeOffset, [], {},
-             'DATETIMEOFFSET', ['>=', (10,)]),
-            (mssql.MSDateTimeOffset, [1], {},
-             'DATETIMEOFFSET(1)', ['>=', (10,)]),
-
-            (mssql.MSDateTime2, [], {},
-             'DATETIME2', ['>=', (10,)]),
-            (mssql.MSDateTime2, [1], {},
-             'DATETIME2(1)', ['>=', (10,)]),
-
-            ]
-
-        table_args = ['test_mssql_dates', metadata]
-        for index, spec in enumerate(columns):
-            type_, args, kw, res, requires = spec[0:5]
-            if requires and testing._is_excluded('mssql', *requires) \
-                or not requires:
-                c = Column('c%s' % index, type_(*args,
-                                  **kw), nullable=None)
-                testing.db.dialect.type_descriptor(c.type)
-                table_args.append(c)
-        dates_table = Table(*table_args)
-        gen = testing.db.dialect.ddl_compiler(testing.db.dialect,
-                schema.CreateTable(dates_table))
-        for col in dates_table.c:
-            index = int(col.name[1:])
-            testing.eq_(gen.get_column_specification(col), '%s %s'
-                        % (col.name, columns[index][3]))
-            self.assert_(repr(col))
-        dates_table.create(checkfirst=True)
-        reflected_dates = Table('test_mssql_dates',
-                                MetaData(testing.db), autoload=True)
-        for col in reflected_dates.c:
-            self.assert_types_base(col, dates_table.c[col.key])
-
-    def test_date_roundtrip(self):
-        t = Table('test_dates', metadata,
-                    Column('id', Integer,
-                           Sequence('datetest_id_seq', optional=True),
-                           primary_key=True),
-                    Column('adate', Date),
-                    Column('atime', Time),
-                    Column('adatetime', DateTime))
-        metadata.create_all()
-        d1 = datetime.date(2007, 10, 30)
-        t1 = datetime.time(11, 2, 32)
-        d2 = datetime.datetime(2007, 10, 30, 11, 2, 32)
-        t.insert().execute(adate=d1, adatetime=d2, atime=t1)
-        t.insert().execute(adate=d2, adatetime=d2, atime=d2)
-
-        x = t.select().execute().fetchall()[0]
-        self.assert_(x.adate.__class__ == datetime.date)
-        self.assert_(x.atime.__class__ == datetime.time)
-        self.assert_(x.adatetime.__class__ == datetime.datetime)
-
-        t.delete().execute()
-
-        t.insert().execute(adate=d1, adatetime=d2, atime=t1)
-
-        eq_(select([t.c.adate, t.c.atime, t.c.adatetime], t.c.adate
-            == d1).execute().fetchall(), [(d1, t1, d2)])
-
-    @emits_warning_on('mssql+mxodbc', r'.*does not have any indexes.*')
-    @testing.provide_metadata
-    def test_binary_reflection(self):
-        "Exercise type specification for binary types."
-
-        columns = [
-            # column type, args, kwargs, expected ddl
-            (mssql.MSBinary, [], {},
-             'BINARY'),
-            (mssql.MSBinary, [10], {},
-             'BINARY(10)'),
-
-            (types.BINARY, [], {},
-             'BINARY'),
-            (types.BINARY, [10], {},
-             'BINARY(10)'),
-
-            (mssql.MSVarBinary, [], {},
-             'VARBINARY(max)'),
-            (mssql.MSVarBinary, [10], {},
-             'VARBINARY(10)'),
-
-            (types.VARBINARY, [10], {},
-             'VARBINARY(10)'),
-            (types.VARBINARY, [], {},
-             'VARBINARY(max)'),
-
-            (mssql.MSImage, [], {},
-             'IMAGE'),
-
-            (mssql.IMAGE, [], {},
-             'IMAGE'),
-
-            (types.LargeBinary, [], {},
-             'IMAGE'),
-        ]
-
-        metadata = self.metadata
-        table_args = ['test_mssql_binary', metadata]
-        for index, spec in enumerate(columns):
-            type_, args, kw, res = spec
-            table_args.append(Column('c%s' % index, type_(*args, **kw),
-                              nullable=None))
-        binary_table = Table(*table_args)
-        metadata.create_all()
-        reflected_binary = Table('test_mssql_binary',
-                                 MetaData(testing.db), autoload=True)
-        for col in reflected_binary.c:
-            c1 = testing.db.dialect.type_descriptor(col.type).__class__
-            c2 = \
-                testing.db.dialect.type_descriptor(
-                    binary_table.c[col.name].type).__class__
-            assert issubclass(c1, c2), '%r is not a subclass of %r' \
-                % (c1, c2)
-            if binary_table.c[col.name].type.length:
-                testing.eq_(col.type.length,
-                            binary_table.c[col.name].type.length)
-
-
-    def test_autoincrement(self):
-        Table('ai_1', metadata,
-               Column('int_y', Integer, primary_key=True),
-               Column('int_n', Integer, DefaultClause('0'),
-                      primary_key=True, autoincrement=False))
-        Table('ai_2', metadata,
-               Column('int_y', Integer, primary_key=True),
-               Column('int_n', Integer, DefaultClause('0'),
-                      primary_key=True, autoincrement=False))
-        Table('ai_3', metadata,
-               Column('int_n', Integer, DefaultClause('0'),
-                      primary_key=True, autoincrement=False),
-               Column('int_y', Integer, primary_key=True))
-        Table('ai_4', metadata,
-               Column('int_n', Integer, DefaultClause('0'),
-                      primary_key=True, autoincrement=False),
-               Column('int_n2', Integer, DefaultClause('0'),
-                      primary_key=True, autoincrement=False))
-        Table('ai_5', metadata,
-               Column('int_y', Integer, primary_key=True),
-               Column('int_n', Integer, DefaultClause('0'),
-                      primary_key=True, autoincrement=False))
-        Table('ai_6', metadata,
-               Column('o1', String(1), DefaultClause('x'),
-                      primary_key=True),
-               Column('int_y', Integer, primary_key=True))
-        Table('ai_7', metadata,
-               Column('o1', String(1), DefaultClause('x'),
-                      primary_key=True),
-               Column('o2', String(1), DefaultClause('x'),
-                      primary_key=True),
-               Column('int_y', Integer, primary_key=True))
-        Table('ai_8', metadata,
-               Column('o1', String(1), DefaultClause('x'),
-                      primary_key=True),
-               Column('o2', String(1), DefaultClause('x'),
-                      primary_key=True))
-        metadata.create_all()
-
-        table_names = ['ai_1', 'ai_2', 'ai_3', 'ai_4',
-                        'ai_5', 'ai_6', 'ai_7', 'ai_8']
-        mr = MetaData(testing.db)
-
-        for name in table_names:
-            tbl = Table(name, mr, autoload=True)
-            tbl = metadata.tables[name]
-            for c in tbl.c:
-                if c.name.startswith('int_y'):
-                    assert c.autoincrement, name
-                    assert tbl._autoincrement_column is c, name
-                elif c.name.startswith('int_n'):
-                    assert not c.autoincrement, name
-                    assert tbl._autoincrement_column is not c, name
-
-            # mxodbc can't handle scope_identity() with DEFAULT VALUES
-
-            if testing.db.driver == 'mxodbc':
-                eng = \
-                    [engines.testing_engine(options={'implicit_returning'
-                     : True})]
-            else:
-                eng = \
-                    [engines.testing_engine(options={'implicit_returning'
-                     : False}),
-                     engines.testing_engine(options={'implicit_returning'
-                     : True})]
-
-            for counter, engine in enumerate(eng):
-                engine.execute(tbl.insert())
-                if 'int_y' in tbl.c:
-                    assert engine.scalar(select([tbl.c.int_y])) \
-                        == counter + 1
-                    assert list(engine.execute(tbl.select()).first()).\
-                            count(counter + 1) == 1
-                else:
-                    assert 1 \
-                        not in list(engine.execute(tbl.select()).first())
-                engine.execute(tbl.delete())
-
-class MonkeyPatchedBinaryTest(fixtures.TestBase):
-    __only_on__ = 'mssql+pymssql'
-
-    def test_unicode(self):
-        module = __import__('pymssql')
-        result = module.Binary(u'foo')
-        eq_(result, u'foo')
-
-    def test_bytes(self):
-        module = __import__('pymssql')
-        input = b('\x80\x03]q\x00X\x03\x00\x00\x00oneq\x01a.')
-        expected_result = input
-        result = module.Binary(input)
-        eq_(result, expected_result)
-
-class BinaryTest(fixtures.TestBase, AssertsExecutionResults):
-    """Test the Binary and VarBinary types"""
-
-    __only_on__ = 'mssql'
-
-    @classmethod
-    def setup_class(cls):
-        global binary_table, MyPickleType
-
-        class MyPickleType(types.TypeDecorator):
-            impl = PickleType
-
-            def process_bind_param(self, value, dialect):
-                if value:
-                    value.stuff = 'this is modified stuff'
-                return value
-
-            def process_result_value(self, value, dialect):
-                if value:
-                    value.stuff = 'this is the right stuff'
-                return value
-
-        binary_table = Table(
-            'binary_table',
-            MetaData(testing.db),
-            Column('primary_id', Integer, Sequence('binary_id_seq',
-                   optional=True), primary_key=True),
-            Column('data', mssql.MSVarBinary(8000)),
-            Column('data_image', mssql.MSImage),
-            Column('data_slice', types.BINARY(100)),
-            Column('misc', String(30)),
-            Column('pickled', PickleType),
-            Column('mypickle', MyPickleType),
-            )
-        binary_table.create()
-
-    def teardown(self):
-        binary_table.delete().execute()
-
-    @classmethod
-    def teardown_class(cls):
-        binary_table.drop()
-
-    def test_binary(self):
-        testobj1 = pickleable.Foo('im foo 1')
-        testobj2 = pickleable.Foo('im foo 2')
-        testobj3 = pickleable.Foo('im foo 3')
-        stream1 = self.load_stream('binary_data_one.dat')
-        stream2 = self.load_stream('binary_data_two.dat')
-        binary_table.insert().execute(
-            primary_id=1,
-            misc='binary_data_one.dat',
-            data=stream1,
-            data_image=stream1,
-            data_slice=stream1[0:100],
-            pickled=testobj1,
-            mypickle=testobj3,
-            )
-        binary_table.insert().execute(
-            primary_id=2,
-            misc='binary_data_two.dat',
-            data=stream2,
-            data_image=stream2,
-            data_slice=stream2[0:99],
-            pickled=testobj2,
-            )
-
-        # TODO: pyodbc does not seem to accept "None" for a VARBINARY
-        # column (data=None). error:  [Microsoft][ODBC SQL Server
-        # Driver][SQL Server]Implicit conversion from data type varchar
-        # to varbinary is not allowed. Use the CONVERT function to run
-        # this query. (257) binary_table.insert().execute(primary_id=3,
-        # misc='binary_data_two.dat', data=None, data_image=None,
-        # data_slice=stream2[0:99], pickled=None)
-
-        binary_table.insert().execute(primary_id=3,
-                misc='binary_data_two.dat', data_image=None,
-                data_slice=stream2[0:99], pickled=None)
-        for stmt in \
-            binary_table.select(order_by=binary_table.c.primary_id), \
-            text('select * from binary_table order by '
-                 'binary_table.primary_id',
-                 typemap=dict(data=mssql.MSVarBinary(8000),
-                 data_image=mssql.MSImage,
-                 data_slice=types.BINARY(100), pickled=PickleType,
-                 mypickle=MyPickleType), bind=testing.db):
-            l = stmt.execute().fetchall()
-            eq_(list(stream1), list(l[0]['data']))
-            paddedstream = list(stream1[0:100])
-            paddedstream.extend(['\x00'] * (100 - len(paddedstream)))
-            eq_(paddedstream, list(l[0]['data_slice']))
-            eq_(list(stream2), list(l[1]['data']))
-            eq_(list(stream2), list(l[1]['data_image']))
-            eq_(testobj1, l[0]['pickled'])
-            eq_(testobj2, l[1]['pickled'])
-            eq_(testobj3.moredata, l[0]['mypickle'].moredata)
-            eq_(l[0]['mypickle'].stuff, 'this is the right stuff')
-
-    def load_stream(self, name, len=3000):
-        fp = open(os.path.join(os.path.dirname(__file__), "..", name), 'rb')
-        stream = fp.read(len)
-        fp.close()
-        return stream
-
-from sqlalchemy.dialects.mssql.information_schema import CoerceUnicode, tables
-from sqlalchemy.dialects.mssql import base
-
-class InfoCoerceUnicodeTest(fixtures.TestBase, AssertsCompiledSQL):
-    def test_info_unicode_coercion(self):
-
-        dialect = mssql.dialect()
-        value = CoerceUnicode().bind_processor(dialect)('a string')
-        assert isinstance(value, unicode)
-
-    def test_info_unicode_cast_no_2000(self):
-        dialect = mssql.dialect()
-        dialect.server_version_info = base.MS_2000_VERSION
-        stmt = tables.c.table_name == 'somename'
-        self.assert_compile(
-            stmt,
-            "[TABLES_1].[TABLE_NAME] = :TABLE_NAME_1",
-            dialect=dialect
-        )
-
-    def test_info_unicode_cast(self):
-        dialect = mssql.dialect()
-        dialect.server_version_info = base.MS_2005_VERSION
-        stmt = tables.c.table_name == 'somename'
-        self.assert_compile(
-            stmt,
-            "[TABLES_1].[TABLE_NAME] = CAST(:TABLE_NAME_1 AS NVARCHAR(max))",
-            dialect=dialect
-        )
-
-class ReflectHugeViewTest(fixtures.TestBase):
-    __only_on__ = 'mssql'
-
-    def setup(self):
-        self.col_num = 150
-
-        self.metadata = MetaData(testing.db)
-        t = Table('base_table', self.metadata,
-                *[
-                    Column("long_named_column_number_%d" % i, Integer)
-                    for i in xrange(self.col_num)
-                ]
-        )
-        self.view_str = view_str = \
-            "CREATE VIEW huge_named_view AS SELECT %s FROM base_table" % (
-            ",".join("long_named_column_number_%d" % i
-                        for i in xrange(self.col_num))
-            )
-        assert len(view_str) > 4000
-
-        event.listen(t, 'after_create', DDL(view_str) )
-        event.listen(t, 'before_drop', DDL("DROP VIEW huge_named_view") )
-
-        self.metadata.create_all()
-
-    def teardown(self):
-        self.metadata.drop_all()
-
-    def test_inspect_view_definition(self):
-        inspector = Inspector.from_engine(testing.db)
-        view_def = inspector.get_view_definition("huge_named_view")
-        eq_(view_def, self.view_str)
-
diff --git a/test/dialect/test_mysql.py b/test/dialect/test_mysql.py
deleted file mode 100644 (file)
index ab2a286..0000000
+++ /dev/null
@@ -1,1603 +0,0 @@
-# coding: utf-8
-
-from sqlalchemy.testing import eq_, assert_raises, assert_raises_message
-
-
-from sqlalchemy import *
-from sqlalchemy import sql, exc, schema, types as sqltypes
-from sqlalchemy.dialects.mysql import base as mysql
-from sqlalchemy.engine.url import make_url
-from sqlalchemy.testing import fixtures, AssertsCompiledSQL, AssertsExecutionResults
-from sqlalchemy import testing
-from sqlalchemy.testing import engines
-from sqlalchemy.testing.engines import utf8_engine
-import datetime
-
-class CompileTest(fixtures.TestBase, AssertsCompiledSQL):
-
-    __dialect__ = mysql.dialect()
-
-    def test_reserved_words(self):
-        table = Table("mysql_table", MetaData(),
-            Column("col1", Integer),
-            Column("master_ssl_verify_server_cert", Integer))
-        x = select([table.c.col1, table.c.master_ssl_verify_server_cert])
-
-        self.assert_compile(x,
-            "SELECT mysql_table.col1, "
-            "mysql_table.`master_ssl_verify_server_cert` FROM mysql_table")
-
-    def test_create_index_simple(self):
-        m = MetaData()
-        tbl = Table('testtbl', m, Column('data', String(255)))
-        idx = Index('test_idx1', tbl.c.data)
-
-        self.assert_compile(schema.CreateIndex(idx),
-            'CREATE INDEX test_idx1 ON testtbl (data)')
-
-    def test_create_index_with_length(self):
-        m = MetaData()
-        tbl = Table('testtbl', m, Column('data', String(255)))
-        idx1 = Index('test_idx1', tbl.c.data, mysql_length=10)
-        idx2 = Index('test_idx2', tbl.c.data, mysql_length=5)
-
-        self.assert_compile(schema.CreateIndex(idx1),
-            'CREATE INDEX test_idx1 ON testtbl (data(10))')
-        self.assert_compile(schema.CreateIndex(idx2),
-            'CREATE INDEX test_idx2 ON testtbl (data(5))')
-
-    def test_create_composite_index_with_length(self):
-        m = MetaData()
-        tbl = Table('testtbl', m,
-                    Column('a', String(255)),
-                    Column('b', String(255)))
-
-        idx1 = Index('test_idx1', tbl.c.a, tbl.c.b,
-                     mysql_length={'a': 10, 'b': 20})
-        idx2 = Index('test_idx2', tbl.c.a, tbl.c.b,
-                     mysql_length={'a': 15})
-        idx3 = Index('test_idx3', tbl.c.a, tbl.c.b,
-                     mysql_length=30)
-
-        self.assert_compile(
-            schema.CreateIndex(idx1),
-            'CREATE INDEX test_idx1 ON testtbl (a(10), b(20))'
-        )
-        self.assert_compile(
-            schema.CreateIndex(idx2),
-            'CREATE INDEX test_idx2 ON testtbl (a(15), b)'
-        )
-        self.assert_compile(
-            schema.CreateIndex(idx3),
-            'CREATE INDEX test_idx3 ON testtbl (a(30), b(30))'
-        )
-
-    def test_create_index_with_using(self):
-        m = MetaData()
-        tbl = Table('testtbl', m, Column('data', String(255)))
-        idx1 = Index('test_idx1', tbl.c.data, mysql_using='btree')
-        idx2 = Index('test_idx2', tbl.c.data, mysql_using='hash')
-
-        self.assert_compile(schema.CreateIndex(idx1),
-            'CREATE INDEX test_idx1 ON testtbl (data) USING btree')
-        self.assert_compile(schema.CreateIndex(idx2),
-            'CREATE INDEX test_idx2 ON testtbl (data) USING hash')
-
-    def test_create_pk_plain(self):
-        m = MetaData()
-        tbl = Table('testtbl', m, Column('data', String(255)),
-            PrimaryKeyConstraint('data'))
-
-        self.assert_compile(schema.CreateTable(tbl),
-            "CREATE TABLE testtbl (data VARCHAR(255), PRIMARY KEY (data))")
-
-    def test_create_pk_with_using(self):
-        m = MetaData()
-        tbl = Table('testtbl', m, Column('data', String(255)),
-            PrimaryKeyConstraint('data', mysql_using='btree'))
-
-        self.assert_compile(schema.CreateTable(tbl),
-            "CREATE TABLE testtbl (data VARCHAR(255), "
-            "PRIMARY KEY (data) USING btree)")
-
-    def test_skip_deferrable_kw(self):
-        m = MetaData()
-        t1 = Table('t1', m, Column('id', Integer, primary_key=True))
-        t2 = Table('t2', m, Column('id', Integer,
-                        ForeignKey('t1.id', deferrable=True),
-                            primary_key=True))
-
-        self.assert_compile(
-            schema.CreateTable(t2),
-            "CREATE TABLE t2 (id INTEGER NOT NULL, "
-            "PRIMARY KEY (id), FOREIGN KEY(id) REFERENCES t1 (id))"
-        )
-
-class DialectTest(fixtures.TestBase):
-    __only_on__ = 'mysql'
-
-    @testing.only_on(['mysql+mysqldb', 'mysql+oursql'],
-                    'requires particular SSL arguments')
-    def test_ssl_arguments(self):
-        dialect = testing.db.dialect
-        kwarg = dialect.create_connect_args(
-            make_url("mysql://scott:tiger@localhost:3306/test"
-                "?ssl_ca=/ca.pem&ssl_cert=/cert.pem&ssl_key=/key.pem")
-        )[1]
-        # args that differ among mysqldb and oursql
-        for k in ('use_unicode', 'found_rows', 'client_flag'):
-            kwarg.pop(k, None)
-        eq_(
-            kwarg,
-            {
-                'passwd': 'tiger', 'db': 'test',
-                'ssl': {'ca': '/ca.pem', 'cert': '/cert.pem',
-                        'key': '/key.pem'},
-                'host': 'localhost', 'user': 'scott',
-                'port': 3306
-            }
-        )
-
-
-class TypesTest(fixtures.TestBase, AssertsExecutionResults, AssertsCompiledSQL):
-    "Test MySQL column types"
-
-    __dialect__ = mysql.dialect()
-
-    def test_numeric(self):
-        "Exercise type specification and options for numeric types."
-
-        columns = [
-            # column type, args, kwargs, expected ddl
-            # e.g. Column(Integer(10, unsigned=True)) ==
-            # 'INTEGER(10) UNSIGNED'
-            (mysql.MSNumeric, [], {},
-             'NUMERIC'),
-            (mysql.MSNumeric, [None], {},
-             'NUMERIC'),
-            (mysql.MSNumeric, [12], {},
-             'NUMERIC(12)'),
-            (mysql.MSNumeric, [12, 4], {'unsigned':True},
-             'NUMERIC(12, 4) UNSIGNED'),
-            (mysql.MSNumeric, [12, 4], {'zerofill':True},
-             'NUMERIC(12, 4) ZEROFILL'),
-            (mysql.MSNumeric, [12, 4], {'zerofill':True, 'unsigned':True},
-             'NUMERIC(12, 4) UNSIGNED ZEROFILL'),
-
-            (mysql.MSDecimal, [], {},
-             'DECIMAL'),
-            (mysql.MSDecimal, [None], {},
-             'DECIMAL'),
-            (mysql.MSDecimal, [12], {},
-             'DECIMAL(12)'),
-            (mysql.MSDecimal, [12, None], {},
-             'DECIMAL(12)'),
-            (mysql.MSDecimal, [12, 4], {'unsigned':True},
-             'DECIMAL(12, 4) UNSIGNED'),
-            (mysql.MSDecimal, [12, 4], {'zerofill':True},
-             'DECIMAL(12, 4) ZEROFILL'),
-            (mysql.MSDecimal, [12, 4], {'zerofill':True, 'unsigned':True},
-             'DECIMAL(12, 4) UNSIGNED ZEROFILL'),
-
-            (mysql.MSDouble, [None, None], {},
-             'DOUBLE'),
-            (mysql.MSDouble, [12, 4], {'unsigned':True},
-             'DOUBLE(12, 4) UNSIGNED'),
-            (mysql.MSDouble, [12, 4], {'zerofill':True},
-             'DOUBLE(12, 4) ZEROFILL'),
-            (mysql.MSDouble, [12, 4], {'zerofill':True, 'unsigned':True},
-             'DOUBLE(12, 4) UNSIGNED ZEROFILL'),
-
-            (mysql.MSReal, [None, None], {},
-             'REAL'),
-            (mysql.MSReal, [12, 4], {'unsigned':True},
-             'REAL(12, 4) UNSIGNED'),
-            (mysql.MSReal, [12, 4], {'zerofill':True},
-             'REAL(12, 4) ZEROFILL'),
-            (mysql.MSReal, [12, 4], {'zerofill':True, 'unsigned':True},
-             'REAL(12, 4) UNSIGNED ZEROFILL'),
-
-            (mysql.MSFloat, [], {},
-             'FLOAT'),
-            (mysql.MSFloat, [None], {},
-             'FLOAT'),
-            (mysql.MSFloat, [12], {},
-             'FLOAT(12)'),
-            (mysql.MSFloat, [12, 4], {},
-             'FLOAT(12, 4)'),
-            (mysql.MSFloat, [12, 4], {'unsigned':True},
-             'FLOAT(12, 4) UNSIGNED'),
-            (mysql.MSFloat, [12, 4], {'zerofill':True},
-             'FLOAT(12, 4) ZEROFILL'),
-            (mysql.MSFloat, [12, 4], {'zerofill':True, 'unsigned':True},
-             'FLOAT(12, 4) UNSIGNED ZEROFILL'),
-
-            (mysql.MSInteger, [], {},
-             'INTEGER'),
-            (mysql.MSInteger, [4], {},
-             'INTEGER(4)'),
-            (mysql.MSInteger, [4], {'unsigned':True},
-             'INTEGER(4) UNSIGNED'),
-            (mysql.MSInteger, [4], {'zerofill':True},
-             'INTEGER(4) ZEROFILL'),
-            (mysql.MSInteger, [4], {'zerofill':True, 'unsigned':True},
-             'INTEGER(4) UNSIGNED ZEROFILL'),
-
-            (mysql.MSBigInteger, [], {},
-             'BIGINT'),
-            (mysql.MSBigInteger, [4], {},
-             'BIGINT(4)'),
-            (mysql.MSBigInteger, [4], {'unsigned':True},
-             'BIGINT(4) UNSIGNED'),
-            (mysql.MSBigInteger, [4], {'zerofill':True},
-             'BIGINT(4) ZEROFILL'),
-            (mysql.MSBigInteger, [4], {'zerofill':True, 'unsigned':True},
-             'BIGINT(4) UNSIGNED ZEROFILL'),
-
-             (mysql.MSMediumInteger, [], {},
-              'MEDIUMINT'),
-             (mysql.MSMediumInteger, [4], {},
-              'MEDIUMINT(4)'),
-             (mysql.MSMediumInteger, [4], {'unsigned':True},
-              'MEDIUMINT(4) UNSIGNED'),
-             (mysql.MSMediumInteger, [4], {'zerofill':True},
-              'MEDIUMINT(4) ZEROFILL'),
-             (mysql.MSMediumInteger, [4], {'zerofill':True, 'unsigned':True},
-              'MEDIUMINT(4) UNSIGNED ZEROFILL'),
-
-            (mysql.MSTinyInteger, [], {},
-             'TINYINT'),
-            (mysql.MSTinyInteger, [1], {},
-             'TINYINT(1)'),
-            (mysql.MSTinyInteger, [1], {'unsigned':True},
-             'TINYINT(1) UNSIGNED'),
-            (mysql.MSTinyInteger, [1], {'zerofill':True},
-             'TINYINT(1) ZEROFILL'),
-            (mysql.MSTinyInteger, [1], {'zerofill':True, 'unsigned':True},
-             'TINYINT(1) UNSIGNED ZEROFILL'),
-
-            (mysql.MSSmallInteger, [], {},
-             'SMALLINT'),
-            (mysql.MSSmallInteger, [4], {},
-             'SMALLINT(4)'),
-            (mysql.MSSmallInteger, [4], {'unsigned':True},
-             'SMALLINT(4) UNSIGNED'),
-            (mysql.MSSmallInteger, [4], {'zerofill':True},
-             'SMALLINT(4) ZEROFILL'),
-            (mysql.MSSmallInteger, [4], {'zerofill':True, 'unsigned':True},
-             'SMALLINT(4) UNSIGNED ZEROFILL'),
-           ]
-
-        for type_, args, kw, res in columns:
-            self.assert_compile(
-                type_(*args, **kw),
-                res
-            )
-
-    @testing.exclude('mysql', '<', (4, 1, 1), 'no charset support')
-    def test_charset(self):
-        """Exercise CHARACTER SET and COLLATE-ish options on string types."""
-
-        columns = [
-            (mysql.MSChar, [1], {},
-             'CHAR(1)'),
-             (mysql.NCHAR, [1], {},
-              'NATIONAL CHAR(1)'),
-            (mysql.MSChar, [1], {'binary':True},
-             'CHAR(1) BINARY'),
-            (mysql.MSChar, [1], {'ascii':True},
-             'CHAR(1) ASCII'),
-            (mysql.MSChar, [1], {'unicode':True},
-             'CHAR(1) UNICODE'),
-            (mysql.MSChar, [1], {'ascii':True, 'binary':True},
-             'CHAR(1) ASCII BINARY'),
-            (mysql.MSChar, [1], {'unicode':True, 'binary':True},
-             'CHAR(1) UNICODE BINARY'),
-            (mysql.MSChar, [1], {'charset':'utf8'},
-             'CHAR(1) CHARACTER SET utf8'),
-            (mysql.MSChar, [1], {'charset':'utf8', 'binary':True},
-             'CHAR(1) CHARACTER SET utf8 BINARY'),
-            (mysql.MSChar, [1], {'charset':'utf8', 'unicode':True},
-             'CHAR(1) CHARACTER SET utf8'),
-            (mysql.MSChar, [1], {'charset':'utf8', 'ascii':True},
-             'CHAR(1) CHARACTER SET utf8'),
-            (mysql.MSChar, [1], {'collation': 'utf8_bin'},
-             'CHAR(1) COLLATE utf8_bin'),
-            (mysql.MSChar, [1], {'charset': 'utf8', 'collation': 'utf8_bin'},
-             'CHAR(1) CHARACTER SET utf8 COLLATE utf8_bin'),
-            (mysql.MSChar, [1], {'charset': 'utf8', 'binary': True},
-             'CHAR(1) CHARACTER SET utf8 BINARY'),
-            (mysql.MSChar, [1], {'charset': 'utf8', 'collation': 'utf8_bin',
-                              'binary': True},
-             'CHAR(1) CHARACTER SET utf8 COLLATE utf8_bin'),
-            (mysql.MSChar, [1], {'national':True},
-             'NATIONAL CHAR(1)'),
-            (mysql.MSChar, [1], {'national':True, 'charset':'utf8'},
-             'NATIONAL CHAR(1)'),
-            (mysql.MSChar, [1], {'national':True, 'charset':'utf8',
-                                'binary':True},
-             'NATIONAL CHAR(1) BINARY'),
-            (mysql.MSChar, [1], {'national':True, 'binary':True,
-                                'unicode':True},
-             'NATIONAL CHAR(1) BINARY'),
-            (mysql.MSChar, [1], {'national':True, 'collation':'utf8_bin'},
-             'NATIONAL CHAR(1) COLLATE utf8_bin'),
-
-            (mysql.MSString, [1], {'charset':'utf8', 'collation':'utf8_bin'},
-             'VARCHAR(1) CHARACTER SET utf8 COLLATE utf8_bin'),
-            (mysql.MSString, [1], {'national':True, 'collation':'utf8_bin'},
-             'NATIONAL VARCHAR(1) COLLATE utf8_bin'),
-
-            (mysql.MSTinyText, [], {'charset':'utf8', 'collation':'utf8_bin'},
-             'TINYTEXT CHARACTER SET utf8 COLLATE utf8_bin'),
-
-            (mysql.MSMediumText, [], {'charset':'utf8', 'binary':True},
-             'MEDIUMTEXT CHARACTER SET utf8 BINARY'),
-
-            (mysql.MSLongText, [], {'ascii':True},
-             'LONGTEXT ASCII'),
-
-            (mysql.ENUM, ["foo", "bar"], {'unicode':True},
-             '''ENUM('foo','bar') UNICODE'''),
-
-            (String, [20], {"collation":"utf8"}, 'VARCHAR(20) COLLATE utf8')
-
-
-           ]
-
-        for type_, args, kw, res in columns:
-            self.assert_compile(
-                type_(*args, **kw),
-                res
-            )
-
-    @testing.only_if('mysql')
-    @testing.exclude('mysql', '<', (5, 0, 5), 'a 5.0+ feature')
-    @testing.provide_metadata
-    def test_charset_collate_table(self):
-        t = Table('foo', self.metadata,
-            Column('id', Integer),
-            mysql_default_charset='utf8',
-            mysql_collate='utf8_unicode_ci'
-        )
-        t.create()
-        m2 = MetaData(testing.db)
-        t2 = Table('foo', m2, autoload=True)
-        eq_(t2.kwargs['mysql_collate'], 'utf8_unicode_ci')
-        eq_(t2.kwargs['mysql_default charset'], 'utf8')
-
-    def test_bit_50(self):
-        """Exercise BIT types on 5.0+ (not valid for all engine types)"""
-
-        for type_, expected in [
-            (mysql.MSBit(), "BIT"),
-            (mysql.MSBit(1), "BIT(1)"),
-            (mysql.MSBit(63), "BIT(63)"),
-        ]:
-            self.assert_compile(type_, expected)
-
-    @testing.only_if('mysql')
-    @testing.exclude('mysql', '<', (5, 0, 5), 'a 5.0+ feature')
-    @testing.fails_on('mysql+oursql', 'some round trips fail, oursql bug ?')
-    @testing.provide_metadata
-    def test_bit_50_roundtrip(self):
-        bit_table = Table('mysql_bits', self.metadata,
-                          Column('b1', mysql.MSBit),
-                          Column('b2', mysql.MSBit()),
-                          Column('b3', mysql.MSBit(), nullable=False),
-                          Column('b4', mysql.MSBit(1)),
-                          Column('b5', mysql.MSBit(8)),
-                          Column('b6', mysql.MSBit(32)),
-                          Column('b7', mysql.MSBit(63)),
-                          Column('b8', mysql.MSBit(64)))
-        self.metadata.create_all()
-
-        meta2 = MetaData(testing.db)
-        reflected = Table('mysql_bits', meta2, autoload=True)
-
-        for table in bit_table, reflected:
-
-            def roundtrip(store, expected=None):
-                expected = expected or store
-                table.insert(store).execute()
-                row = table.select().execute().first()
-                try:
-                    self.assert_(list(row) == expected)
-                except:
-                    print "Storing %s" % store
-                    print "Expected %s" % expected
-                    print "Found %s" % list(row)
-                    raise
-                table.delete().execute().close()
-
-            roundtrip([0] * 8)
-            roundtrip([None, None, 0, None, None, None, None, None])
-            roundtrip([1] * 8)
-            roundtrip([sql.text("b'1'")] * 8, [1] * 8)
-
-            i = 255
-            roundtrip([0, 0, 0, 0, i, i, i, i])
-            i = 2 ** 32 - 1
-            roundtrip([0, 0, 0, 0, 0, i, i, i])
-            i = 2 ** 63 - 1
-            roundtrip([0, 0, 0, 0, 0, 0, i, i])
-            i = 2 ** 64 - 1
-            roundtrip([0, 0, 0, 0, 0, 0, 0, i])
-
-    def test_boolean(self):
-        for type_, expected in [
-            (BOOLEAN(), "BOOL"),
-            (Boolean(), "BOOL"),
-            (mysql.TINYINT(1), "TINYINT(1)"),
-            (mysql.TINYINT(1, unsigned=True), "TINYINT(1) UNSIGNED")
-        ]:
-            self.assert_compile(type_, expected)
-
-    @testing.only_if('mysql')
-    @testing.provide_metadata
-    def test_boolean_roundtrip(self):
-        bool_table = Table(
-            'mysql_bool',
-            self.metadata,
-            Column('b1', BOOLEAN),
-            Column('b2', Boolean),
-            Column('b3', mysql.MSTinyInteger(1)),
-            Column('b4', mysql.MSTinyInteger(1, unsigned=True)),
-            Column('b5', mysql.MSTinyInteger),
-            )
-        self.metadata.create_all()
-        table = bool_table
-
-        def roundtrip(store, expected=None):
-            expected = expected or store
-            table.insert(store).execute()
-            row = table.select().execute().first()
-            self.assert_(list(row) == expected)
-            for i, val in enumerate(expected):
-                if isinstance(val, bool):
-                    self.assert_(val is row[i])
-            table.delete().execute()
-
-        roundtrip([None, None, None, None, None])
-        roundtrip([True, True, 1, 1, 1])
-        roundtrip([False, False, 0, 0, 0])
-        roundtrip([True, True, True, True, True], [True, True, 1,
-                  1, 1])
-        roundtrip([False, False, 0, 0, 0], [False, False, 0, 0, 0])
-
-        meta2 = MetaData(testing.db)
-        table = Table('mysql_bool', meta2, autoload=True)
-        eq_(colspec(table.c.b3), 'b3 TINYINT(1)')
-        eq_(colspec(table.c.b4), 'b4 TINYINT(1) UNSIGNED')
-        meta2 = MetaData(testing.db)
-        table = Table(
-            'mysql_bool',
-            meta2,
-            Column('b1', BOOLEAN),
-            Column('b2', Boolean),
-            Column('b3', BOOLEAN),
-            Column('b4', BOOLEAN),
-            autoload=True,
-            )
-        eq_(colspec(table.c.b3), 'b3 BOOL')
-        eq_(colspec(table.c.b4), 'b4 BOOL')
-        roundtrip([None, None, None, None, None])
-        roundtrip([True, True, 1, 1, 1], [True, True, True, True,
-                  1])
-        roundtrip([False, False, 0, 0, 0], [False, False, False,
-                  False, 0])
-        roundtrip([True, True, True, True, True], [True, True,
-                  True, True, 1])
-        roundtrip([False, False, 0, 0, 0], [False, False, False,
-                  False, 0])
-
-    def test_timestamp(self):
-        """Exercise funky TIMESTAMP default syntax."""
-
-        columns = [
-            ([TIMESTAMP],
-             'TIMESTAMP NULL'),
-            ([mysql.MSTimeStamp],
-             'TIMESTAMP NULL'),
-            ([mysql.MSTimeStamp,
-              DefaultClause(sql.text('CURRENT_TIMESTAMP'))],
-             "TIMESTAMP DEFAULT CURRENT_TIMESTAMP"),
-            ([mysql.MSTimeStamp,
-              DefaultClause(sql.text("'1999-09-09 09:09:09'"))],
-             "TIMESTAMP DEFAULT '1999-09-09 09:09:09'"),
-            ([mysql.MSTimeStamp,
-              DefaultClause(sql.text("'1999-09-09 09:09:09' "
-                                      "ON UPDATE CURRENT_TIMESTAMP"))],
-             "TIMESTAMP DEFAULT '1999-09-09 09:09:09' "
-             "ON UPDATE CURRENT_TIMESTAMP"),
-            ([mysql.MSTimeStamp,
-              DefaultClause(sql.text("CURRENT_TIMESTAMP "
-                                      "ON UPDATE CURRENT_TIMESTAMP"))],
-             "TIMESTAMP DEFAULT CURRENT_TIMESTAMP "
-             "ON UPDATE CURRENT_TIMESTAMP"),
-            ]
-        for spec, expected in columns:
-            c = Column('t', *spec)
-            Table('t', MetaData(), c)
-            self.assert_compile(
-                schema.CreateColumn(c),
-                "t %s" % expected
-
-            )
-
-    @testing.only_if('mysql')
-    @testing.provide_metadata
-    def test_timestamp_nullable(self):
-        ts_table = Table('mysql_timestamp', self.metadata,
-                            Column('t1', TIMESTAMP),
-                            Column('t2', TIMESTAMP, nullable=False),
-                    )
-        self.metadata.create_all()
-
-        now = testing.db.execute("select now()").scalar()
-
-        # TIMESTAMP without NULL inserts current time when passed
-        # NULL.  when not passed, generates 0000-00-00 quite
-        # annoyingly.
-        ts_table.insert().execute({'t1': now, 't2': None})
-        ts_table.insert().execute({'t1': None, 't2': None})
-
-        # normalize dates that are over the second boundary
-        def normalize(dt):
-            if dt is None:
-                return None
-            elif (dt - now).seconds < 5:
-                return now
-            else:
-                return dt
-        eq_(
-            [tuple([normalize(dt) for dt in row])
-            for row in ts_table.select().execute()],
-            [(now, now), (None, now)]
-        )
-
-    def test_time(self):
-        """"Exercise TIME."""
-
-        self.assert_compile(
-                mysql.TIME(),
-                "TIME"
-        )
-
-        self.assert_compile(
-                mysql.TIME(fsp=5),
-                "TIME(5)"
-        )
-
-        eq_(
-            mysql.TIME().result_processor(None, None)(
-                    datetime.timedelta(seconds=35, minutes=517,
-                            microseconds=450
-                    )),
-            datetime.time(8, 37, 35, 450)
-        )
-
-    @testing.only_if('mysql')
-    @testing.provide_metadata
-    def test_year(self):
-        """Exercise YEAR."""
-
-        year_table = Table('mysql_year', self.metadata,
-                           Column('y1', mysql.MSYear),
-                           Column('y2', mysql.MSYear),
-                           Column('y3', mysql.MSYear),
-                           Column('y4', mysql.MSYear(2)),
-                           Column('y5', mysql.MSYear(4)))
-
-        for col in year_table.c:
-            self.assert_(repr(col))
-        year_table.create()
-        reflected = Table('mysql_year', MetaData(testing.db),
-                          autoload=True)
-
-        for table in year_table, reflected:
-            table.insert(['1950', '50', None, 50, 1950]).execute()
-            row = table.select().execute().first()
-            eq_(list(row), [1950, 2050, None, 50, 1950])
-            table.delete().execute()
-            self.assert_(colspec(table.c.y1).startswith('y1 YEAR'))
-            eq_(colspec(table.c.y4), 'y4 YEAR(2)')
-            eq_(colspec(table.c.y5), 'y5 YEAR(4)')
-
-    @testing.only_if('mysql')
-    @testing.provide_metadata
-    def test_set(self):
-        """Exercise the SET type."""
-
-        set_table = Table('mysql_set', self.metadata,
-                        Column('s1',
-                          mysql.MSSet("'dq'", "'sq'")), Column('s2',
-                          mysql.MSSet("'a'")), Column('s3',
-                          mysql.MSSet("'5'", "'7'", "'9'")))
-        eq_(colspec(set_table.c.s1), "s1 SET('dq','sq')")
-        eq_(colspec(set_table.c.s2), "s2 SET('a')")
-        eq_(colspec(set_table.c.s3), "s3 SET('5','7','9')")
-        set_table.create()
-        reflected = Table('mysql_set', MetaData(testing.db),
-                          autoload=True)
-        for table in set_table, reflected:
-
-            def roundtrip(store, expected=None):
-                expected = expected or store
-                table.insert(store).execute()
-                row = table.select().execute().first()
-                self.assert_(list(row) == expected)
-                table.delete().execute()
-
-            roundtrip([None, None, None], [None] * 3)
-            roundtrip(['', '', ''], [set([''])] * 3)
-            roundtrip([set(['dq']), set(['a']), set(['5'])])
-            roundtrip(['dq', 'a', '5'], [set(['dq']), set(['a']),
-                      set(['5'])])
-            roundtrip([1, 1, 1], [set(['dq']), set(['a']), set(['5'
-                      ])])
-            roundtrip([set(['dq', 'sq']), None, set(['9', '5', '7'
-                      ])])
-        set_table.insert().execute({'s3': set(['5'])},
-                {'s3': set(['5', '7'])}, {'s3': set(['5', '7', '9'])},
-                {'s3': set(['7', '9'])})
-        rows = select([set_table.c.s3], set_table.c.s3.in_([set(['5'
-                      ]), set(['5', '7']), set(['7', '5'
-                      ])])).execute().fetchall()
-        found = set([frozenset(row[0]) for row in rows])
-        eq_(found, set([frozenset(['5']), frozenset(['5', '7'])]))
-
-class EnumTest(fixtures.TestBase, AssertsExecutionResults, AssertsCompiledSQL):
-
-    __only_on__ = 'mysql'
-    __dialect__ = mysql.dialect()
-
-
-    @testing.uses_deprecated('Manually quoting ENUM value literals')
-    @testing.provide_metadata
-    def test_enum(self):
-        """Exercise the ENUM type."""
-
-        enum_table = Table('mysql_enum', self.metadata,
-            Column('e1', mysql.ENUM("'a'", "'b'")),
-            Column('e2', mysql.ENUM("'a'", "'b'"),
-                   nullable=False),
-            Column('e2generic', Enum("a", "b"),
-                  nullable=False),
-            Column('e3', mysql.ENUM("'a'", "'b'", strict=True)),
-            Column('e4', mysql.ENUM("'a'", "'b'", strict=True),
-                   nullable=False),
-            Column('e5', mysql.ENUM("a", "b")),
-            Column('e5generic', Enum("a", "b")),
-            Column('e6', mysql.ENUM("'a'", "b")),
-            )
-
-        eq_(colspec(enum_table.c.e1),
-                       "e1 ENUM('a','b')")
-        eq_(colspec(enum_table.c.e2),
-                       "e2 ENUM('a','b') NOT NULL")
-        eq_(colspec(enum_table.c.e2generic),
-                      "e2generic ENUM('a','b') NOT NULL")
-        eq_(colspec(enum_table.c.e3),
-                       "e3 ENUM('a','b')")
-        eq_(colspec(enum_table.c.e4),
-                       "e4 ENUM('a','b') NOT NULL")
-        eq_(colspec(enum_table.c.e5),
-                       "e5 ENUM('a','b')")
-        eq_(colspec(enum_table.c.e5generic),
-                      "e5generic ENUM('a','b')")
-        eq_(colspec(enum_table.c.e6),
-                       "e6 ENUM('''a''','b')")
-        enum_table.create()
-
-        assert_raises(exc.DBAPIError, enum_table.insert().execute,
-                        e1=None, e2=None, e3=None, e4=None)
-
-        assert_raises(exc.StatementError, enum_table.insert().execute,
-                                        e1='c', e2='c', e2generic='c', e3='c',
-                                        e4='c', e5='c', e5generic='c', e6='c')
-
-        enum_table.insert().execute()
-        enum_table.insert().execute(e1='a', e2='a', e2generic='a', e3='a',
-                                    e4='a', e5='a', e5generic='a', e6="'a'")
-        enum_table.insert().execute(e1='b', e2='b', e2generic='b', e3='b',
-                                    e4='b', e5='b', e5generic='b', e6='b')
-
-        res = enum_table.select().execute().fetchall()
-
-        expected = [(None, 'a', 'a', None, 'a', None, None, None),
-                    ('a', 'a', 'a', 'a', 'a', 'a', 'a', "'a'"),
-                    ('b', 'b', 'b', 'b', 'b', 'b', 'b', 'b')]
-
-        eq_(res, expected)
-
-    def test_unicode_enum(self):
-        unicode_engine = utf8_engine()
-        metadata = MetaData(unicode_engine)
-        t1 = Table('table', metadata,
-            Column('id', Integer, primary_key=True),
-            Column('value', Enum(u'réveillé', u'drôle', u'S’il')),
-            Column('value2', mysql.ENUM(u'réveillé', u'drôle', u'S’il'))
-        )
-        metadata.create_all()
-        try:
-            t1.insert().execute(value=u'drôle', value2=u'drôle')
-            t1.insert().execute(value=u'réveillé', value2=u'réveillé')
-            t1.insert().execute(value=u'S’il', value2=u'S’il')
-            eq_(t1.select().order_by(t1.c.id).execute().fetchall(),
-                [(1, u'drôle', u'drôle'), (2, u'réveillé', u'réveillé'),
-                            (3, u'S’il', u'S’il')]
-            )
-
-            # test reflection of the enum labels
-
-            m2 = MetaData(testing.db)
-            t2 = Table('table', m2, autoload=True)
-
-            # TODO: what's wrong with the last element ?  is there
-            # latin-1 stuff forcing its way in ?
-
-            assert t2.c.value.type.enums[0:2] == \
-                    (u'réveillé', u'drôle')  # u'S’il') # eh ?
-
-            assert t2.c.value2.type.enums[0:2] == \
-                    (u'réveillé', u'drôle')  # u'S’il') # eh ?
-        finally:
-            metadata.drop_all()
-
-    def test_enum_compile(self):
-        e1 = Enum('x', 'y', 'z', name='somename')
-        t1 = Table('sometable', MetaData(), Column('somecolumn', e1))
-        self.assert_compile(schema.CreateTable(t1),
-                            "CREATE TABLE sometable (somecolumn "
-                            "ENUM('x','y','z'))")
-        t1 = Table('sometable', MetaData(), Column('somecolumn',
-                   Enum('x', 'y', 'z', native_enum=False)))
-        self.assert_compile(schema.CreateTable(t1),
-                            "CREATE TABLE sometable (somecolumn "
-                            "VARCHAR(1), CHECK (somecolumn IN ('x', "
-                            "'y', 'z')))")
-
-    @testing.exclude('mysql', '<', (4,), "3.23 can't handle an ENUM of ''")
-    @testing.uses_deprecated('Manually quoting ENUM value literals')
-    def test_enum_parse(self):
-        """More exercises for the ENUM type."""
-
-        # MySQL 3.23 can't handle an ENUM of ''....
-
-        enum_table = Table('mysql_enum', MetaData(testing.db),
-            Column('e1', mysql.ENUM("'a'")),
-            Column('e2', mysql.ENUM("''")),
-            Column('e3', mysql.ENUM('a')),
-            Column('e4', mysql.ENUM('')),
-            Column('e5', mysql.ENUM("'a'", "''")),
-            Column('e6', mysql.ENUM("''", "'a'")),
-            Column('e7', mysql.ENUM("''", "'''a'''", "'b''b'", "''''")))
-
-        for col in enum_table.c:
-            self.assert_(repr(col))
-        try:
-            enum_table.create()
-            reflected = Table('mysql_enum', MetaData(testing.db),
-                              autoload=True)
-            for t in enum_table, reflected:
-                eq_(t.c.e1.type.enums, ("a",))
-                eq_(t.c.e2.type.enums, ("",))
-                eq_(t.c.e3.type.enums, ("a",))
-                eq_(t.c.e4.type.enums, ("",))
-                eq_(t.c.e5.type.enums, ("a", ""))
-                eq_(t.c.e6.type.enums, ("", "a"))
-                eq_(t.c.e7.type.enums, ("", "'a'", "b'b", "'"))
-        finally:
-            enum_table.drop()
-
-class ReflectionTest(fixtures.TestBase, AssertsExecutionResults):
-
-    __only_on__ = 'mysql'
-
-    def test_default_reflection(self):
-        """Test reflection of column defaults."""
-
-        from sqlalchemy.dialects.mysql import VARCHAR
-        def_table = Table(
-            'mysql_def',
-            MetaData(testing.db),
-            Column('c1', VARCHAR(10, collation='utf8_unicode_ci'),
-                   DefaultClause(''), nullable=False),
-            Column('c2', String(10), DefaultClause('0')),
-            Column('c3', String(10), DefaultClause('abc')),
-            Column('c4', TIMESTAMP, DefaultClause('2009-04-05 12:00:00'
-                   )),
-            Column('c5', TIMESTAMP),
-            Column('c6', TIMESTAMP,
-                   DefaultClause(sql.text("CURRENT_TIMESTAMP "
-                                          "ON UPDATE CURRENT_TIMESTAMP"))),
-            )
-        def_table.create()
-        try:
-            reflected = Table('mysql_def', MetaData(testing.db),
-                              autoload=True)
-        finally:
-            def_table.drop()
-        assert def_table.c.c1.server_default.arg == ''
-        assert def_table.c.c2.server_default.arg == '0'
-        assert def_table.c.c3.server_default.arg == 'abc'
-        assert def_table.c.c4.server_default.arg \
-            == '2009-04-05 12:00:00'
-        assert str(reflected.c.c1.server_default.arg) == "''"
-        assert str(reflected.c.c2.server_default.arg) == "'0'"
-        assert str(reflected.c.c3.server_default.arg) == "'abc'"
-        assert str(reflected.c.c4.server_default.arg) \
-            == "'2009-04-05 12:00:00'"
-        assert reflected.c.c5.default is None
-        assert reflected.c.c5.server_default is None
-        assert reflected.c.c6.default is None
-        eq_(
-            str(reflected.c.c6.server_default.arg).upper(),
-            "CURRENT_TIMESTAMP ON UPDATE CURRENT_TIMESTAMP"
-        )
-        reflected.create()
-        try:
-            reflected2 = Table('mysql_def', MetaData(testing.db),
-                               autoload=True)
-        finally:
-            reflected.drop()
-        assert str(reflected2.c.c1.server_default.arg) == "''"
-        assert str(reflected2.c.c2.server_default.arg) == "'0'"
-        assert str(reflected2.c.c3.server_default.arg) == "'abc'"
-        assert str(reflected2.c.c4.server_default.arg) \
-            == "'2009-04-05 12:00:00'"
-        assert reflected.c.c5.default is None
-        assert reflected.c.c5.server_default is None
-        assert reflected.c.c6.default is None
-        eq_(
-            str(reflected.c.c6.server_default.arg).upper(),
-            "CURRENT_TIMESTAMP ON UPDATE CURRENT_TIMESTAMP"
-        )
-
-    def test_reflection_with_table_options(self):
-        comment = r"""Comment types type speedily ' " \ '' Fun!"""
-
-        def_table = Table('mysql_def', MetaData(testing.db),
-            Column('c1', Integer()),
-            mysql_engine='MEMORY',
-            mysql_comment=comment,
-            mysql_default_charset='utf8',
-            mysql_auto_increment='5',
-            mysql_avg_row_length='3',
-            mysql_password='secret',
-            mysql_connection='fish',
-        )
-
-        def_table.create()
-        try:
-            reflected = Table('mysql_def', MetaData(testing.db),
-                          autoload=True)
-        finally:
-            def_table.drop()
-
-        assert def_table.kwargs['mysql_engine'] == 'MEMORY'
-        assert def_table.kwargs['mysql_comment'] == comment
-        assert def_table.kwargs['mysql_default_charset'] == 'utf8'
-        assert def_table.kwargs['mysql_auto_increment'] == '5'
-        assert def_table.kwargs['mysql_avg_row_length'] == '3'
-        assert def_table.kwargs['mysql_password'] == 'secret'
-        assert def_table.kwargs['mysql_connection'] == 'fish'
-
-        assert reflected.kwargs['mysql_engine'] == 'MEMORY'
-        assert reflected.kwargs['mysql_comment'] == comment
-        assert reflected.kwargs['mysql_default charset'] == 'utf8'
-        assert reflected.kwargs['mysql_avg_row_length'] == '3'
-        assert reflected.kwargs['mysql_connection'] == 'fish'
-
-        # This field doesn't seem to be returned by mysql itself.
-        #assert reflected.kwargs['mysql_password'] == 'secret'
-
-        # This is explicitly ignored when reflecting schema.
-        #assert reflected.kwargs['mysql_auto_increment'] == '5'
-
-    def test_reflection_on_include_columns(self):
-        """Test reflection of include_columns to be sure they respect case."""
-
-        case_table = Table('mysql_case', MetaData(testing.db),
-            Column('c1', String(10)),
-            Column('C2', String(10)),
-            Column('C3', String(10)))
-
-        try:
-            case_table.create()
-            reflected = Table('mysql_case', MetaData(testing.db),
-                              autoload=True, include_columns=['c1', 'C2'])
-            for t in case_table, reflected:
-                assert 'c1' in t.c.keys()
-                assert 'C2' in t.c.keys()
-            reflected2 = Table('mysql_case', MetaData(testing.db),
-                              autoload=True, include_columns=['c1', 'c2'])
-            assert 'c1' in reflected2.c.keys()
-            for c in ['c2', 'C2', 'C3']:
-                assert c not in reflected2.c.keys()
-        finally:
-            case_table.drop()
-
-    @testing.exclude('mysql', '<', (5, 0, 0), 'early types are squirrely')
-    @testing.uses_deprecated('Using String type with no length')
-    @testing.uses_deprecated('Manually quoting ENUM value literals')
-    def test_type_reflection(self):
-        # (ask_for, roundtripped_as_if_different)
-        specs = [( String(1), mysql.MSString(1), ),
-                 ( String(3), mysql.MSString(3), ),
-                 ( Text(), mysql.MSText(), ),
-                 ( Unicode(1), mysql.MSString(1), ),
-                 ( Unicode(3), mysql.MSString(3), ),
-                 ( UnicodeText(), mysql.MSText(), ),
-                 ( mysql.MSChar(1), ),
-                 ( mysql.MSChar(3), ),
-                 ( NCHAR(2), mysql.MSChar(2), ),
-                 ( mysql.MSNChar(2), mysql.MSChar(2), ), # N is CREATE only
-                 ( mysql.MSNVarChar(22), mysql.MSString(22), ),
-                 ( SmallInteger(), mysql.MSSmallInteger(), ),
-                 ( SmallInteger(), mysql.MSSmallInteger(4), ),
-                 ( mysql.MSSmallInteger(), ),
-                 ( mysql.MSSmallInteger(4), mysql.MSSmallInteger(4), ),
-                 ( mysql.MSMediumInteger(), mysql.MSMediumInteger(), ),
-                 ( mysql.MSMediumInteger(8), mysql.MSMediumInteger(8), ),
-                 ( LargeBinary(3), mysql.TINYBLOB(), ),
-                 ( LargeBinary(), mysql.BLOB() ),
-                 ( mysql.MSBinary(3), mysql.MSBinary(3), ),
-                 ( mysql.MSVarBinary(3),),
-                 ( mysql.MSTinyBlob(),),
-                 ( mysql.MSBlob(),),
-                 ( mysql.MSBlob(1234), mysql.MSBlob()),
-                 ( mysql.MSMediumBlob(),),
-                 ( mysql.MSLongBlob(),),
-                 ( mysql.ENUM("''","'fleem'"), ),
-                 ]
-
-        columns = [Column('c%i' % (i + 1), t[0]) for i, t in enumerate(specs)]
-
-        db = testing.db
-        m = MetaData(db)
-        t_table = Table('mysql_types', m, *columns)
-        try:
-            m.create_all()
-
-            m2 = MetaData(db)
-            rt = Table('mysql_types', m2, autoload=True)
-            try:
-                db.execute('CREATE OR REPLACE VIEW mysql_types_v '
-                           'AS SELECT * from mysql_types')
-                rv = Table('mysql_types_v', m2, autoload=True)
-
-                expected = [len(c) > 1 and c[1] or c[0] for c in specs]
-
-                # Early 5.0 releases seem to report more "general" for columns
-                # in a view, e.g. char -> varchar, tinyblob -> mediumblob
-                #
-                # Not sure exactly which point version has the fix.
-                if db.dialect.server_version_info < (5, 0, 11):
-                    tables = rt,
-                else:
-                    tables = rt, rv
-
-                for table in tables:
-                    for i, reflected in enumerate(table.c):
-                        assert isinstance(reflected.type,
-                                type(expected[i])), \
-                            'element %d: %r not instance of %r' % (i,
-                                reflected.type, type(expected[i]))
-            finally:
-                db.execute('DROP VIEW mysql_types_v')
-        finally:
-            m.drop_all()
-
-    def test_autoincrement(self):
-        meta = MetaData(testing.db)
-        try:
-            Table('ai_1', meta,
-                  Column('int_y', Integer, primary_key=True),
-                  Column('int_n', Integer, DefaultClause('0'),
-                         primary_key=True),
-                         mysql_engine='MyISAM')
-            Table('ai_2', meta,
-                  Column('int_y', Integer, primary_key=True),
-                  Column('int_n', Integer, DefaultClause('0'),
-                         primary_key=True),
-                         mysql_engine='MyISAM')
-            Table('ai_3', meta,
-                  Column('int_n', Integer, DefaultClause('0'),
-                         primary_key=True, autoincrement=False),
-                  Column('int_y', Integer, primary_key=True),
-                         mysql_engine='MyISAM')
-            Table('ai_4', meta,
-                  Column('int_n', Integer, DefaultClause('0'),
-                         primary_key=True, autoincrement=False),
-                  Column('int_n2', Integer, DefaultClause('0'),
-                         primary_key=True, autoincrement=False),
-                         mysql_engine='MyISAM')
-            Table('ai_5', meta,
-                  Column('int_y', Integer, primary_key=True),
-                  Column('int_n', Integer, DefaultClause('0'),
-                         primary_key=True, autoincrement=False),
-                         mysql_engine='MyISAM')
-            Table('ai_6', meta,
-                  Column('o1', String(1), DefaultClause('x'),
-                         primary_key=True),
-                  Column('int_y', Integer, primary_key=True),
-                         mysql_engine='MyISAM')
-            Table('ai_7', meta,
-                  Column('o1', String(1), DefaultClause('x'),
-                         primary_key=True),
-                  Column('o2', String(1), DefaultClause('x'),
-                         primary_key=True),
-                  Column('int_y', Integer, primary_key=True),
-                         mysql_engine='MyISAM')
-            Table('ai_8', meta,
-                  Column('o1', String(1), DefaultClause('x'),
-                         primary_key=True),
-                  Column('o2', String(1), DefaultClause('x'),
-                         primary_key=True),
-                         mysql_engine='MyISAM')
-            meta.create_all()
-
-            table_names = ['ai_1', 'ai_2', 'ai_3', 'ai_4',
-                           'ai_5', 'ai_6', 'ai_7', 'ai_8']
-            mr = MetaData(testing.db)
-            mr.reflect(only=table_names)
-
-            for tbl in [mr.tables[name] for name in table_names]:
-                for c in tbl.c:
-                    if c.name.startswith('int_y'):
-                        assert c.autoincrement
-                    elif c.name.startswith('int_n'):
-                        assert not c.autoincrement
-                tbl.insert().execute()
-                if 'int_y' in tbl.c:
-                    assert select([tbl.c.int_y]).scalar() == 1
-                    assert list(tbl.select().execute().first()).count(1) == 1
-                else:
-                    assert 1 not in list(tbl.select().execute().first())
-        finally:
-            meta.drop_all()
-
-    @testing.exclude('mysql', '<', (5, 0, 0), 'no information_schema support')
-    def test_system_views(self):
-        dialect = testing.db.dialect
-        connection = testing.db.connect()
-        view_names = dialect.get_view_names(connection, "information_schema")
-        self.assert_('TABLES' in view_names)
-
-
-class SQLTest(fixtures.TestBase, AssertsCompiledSQL):
-    """Tests MySQL-dialect specific compilation."""
-
-    __dialect__ = mysql.dialect()
-
-    def test_precolumns(self):
-        dialect = self.__dialect__
-
-        def gen(distinct=None, prefixes=None):
-            kw = {}
-            if distinct is not None:
-                kw['distinct'] = distinct
-            if prefixes is not None:
-                kw['prefixes'] = prefixes
-            return str(select(['q'], **kw).compile(dialect=dialect))
-
-        eq_(gen(None), 'SELECT q')
-        eq_(gen(True), 'SELECT DISTINCT q')
-
-        eq_(gen(prefixes=['ALL']), 'SELECT ALL q')
-        eq_(gen(prefixes=['DISTINCTROW']),
-                'SELECT DISTINCTROW q')
-
-        # Interaction with MySQL prefix extensions
-        eq_(
-            gen(None, ['straight_join']),
-            'SELECT straight_join q')
-        eq_(
-            gen(False, ['HIGH_PRIORITY', 'SQL_SMALL_RESULT', 'ALL']),
-            'SELECT HIGH_PRIORITY SQL_SMALL_RESULT ALL q')
-        eq_(
-            gen(True, ['high_priority', sql.text('sql_cache')]),
-            'SELECT high_priority sql_cache DISTINCT q')
-
-    @testing.uses_deprecated
-    def test_deprecated_distinct(self):
-        dialect = self.__dialect__
-
-        self.assert_compile(
-            select(['q'], distinct='ALL'),
-            'SELECT ALL q',
-        )
-
-        self.assert_compile(
-            select(['q'], distinct='distinctROW'),
-            'SELECT DISTINCTROW q',
-        )
-
-        self.assert_compile(
-            select(['q'], distinct='ALL',
-                    prefixes=['HIGH_PRIORITY', 'SQL_SMALL_RESULT']),
-            'SELECT HIGH_PRIORITY SQL_SMALL_RESULT ALL q'
-        )
-
-    def test_backslash_escaping(self):
-        self.assert_compile(
-            sql.column('foo').like('bar', escape='\\'),
-            "foo LIKE %s ESCAPE '\\\\'"
-        )
-
-        dialect = mysql.dialect()
-        dialect._backslash_escapes=False
-        self.assert_compile(
-            sql.column('foo').like('bar', escape='\\'),
-            "foo LIKE %s ESCAPE '\\'",
-            dialect=dialect
-        )
-
-    def test_limit(self):
-        t = sql.table('t', sql.column('col1'), sql.column('col2'))
-
-        self.assert_compile(
-            select([t]).limit(10).offset(20),
-            "SELECT t.col1, t.col2 FROM t  LIMIT %s, %s",
-            {'param_1':20, 'param_2':10}
-            )
-        self.assert_compile(
-            select([t]).limit(10),
-            "SELECT t.col1, t.col2 FROM t  LIMIT %s",
-            {'param_1':10})
-
-        self.assert_compile(
-            select([t]).offset(10),
-            "SELECT t.col1, t.col2 FROM t  LIMIT %s, 18446744073709551615",
-            {'param_1':10}
-            )
-
-    def test_varchar_raise(self):
-        for type_ in (
-            String,
-            VARCHAR,
-            String(),
-            VARCHAR(),
-            NVARCHAR(),
-            Unicode,
-            Unicode(),
-        ):
-            type_ = sqltypes.to_instance(type_)
-            assert_raises_message(
-                exc.CompileError,
-                "VARCHAR requires a length on dialect mysql",
-                type_.compile,
-                dialect=mysql.dialect()
-            )
-
-            t1 = Table('sometable', MetaData(),
-                Column('somecolumn', type_)
-            )
-            assert_raises_message(
-                exc.CompileError,
-                r"\(in table 'sometable', column 'somecolumn'\)\: "
-                r"(?:N)?VARCHAR requires a length on dialect mysql",
-                schema.CreateTable(t1).compile,
-                dialect=mysql.dialect()
-            )
-
-    def test_update_limit(self):
-        t = sql.table('t', sql.column('col1'), sql.column('col2'))
-
-        self.assert_compile(
-            t.update(values={'col1':123}),
-            "UPDATE t SET col1=%s"
-            )
-        self.assert_compile(
-            t.update(values={'col1':123}, mysql_limit=5),
-            "UPDATE t SET col1=%s LIMIT 5"
-            )
-        self.assert_compile(
-            t.update(values={'col1':123}, mysql_limit=None),
-            "UPDATE t SET col1=%s"
-            )
-        self.assert_compile(
-            t.update(t.c.col2==456, values={'col1':123}, mysql_limit=1),
-            "UPDATE t SET col1=%s WHERE t.col2 = %s LIMIT 1"
-            )
-
-    def test_utc_timestamp(self):
-        self.assert_compile(func.utc_timestamp(), "UTC_TIMESTAMP")
-
-    def test_sysdate(self):
-        self.assert_compile(func.sysdate(), "SYSDATE()")
-
-    def test_cast(self):
-        t = sql.table('t', sql.column('col'))
-        m = mysql
-
-        specs = [
-            (Integer, "CAST(t.col AS SIGNED INTEGER)"),
-            (INT, "CAST(t.col AS SIGNED INTEGER)"),
-            (m.MSInteger, "CAST(t.col AS SIGNED INTEGER)"),
-            (m.MSInteger(unsigned=True), "CAST(t.col AS UNSIGNED INTEGER)"),
-            (SmallInteger, "CAST(t.col AS SIGNED INTEGER)"),
-            (m.MSSmallInteger, "CAST(t.col AS SIGNED INTEGER)"),
-            (m.MSTinyInteger, "CAST(t.col AS SIGNED INTEGER)"),
-            # 'SIGNED INTEGER' is a bigint, so this is ok.
-            (m.MSBigInteger, "CAST(t.col AS SIGNED INTEGER)"),
-            (m.MSBigInteger(unsigned=False), "CAST(t.col AS SIGNED INTEGER)"),
-            (m.MSBigInteger(unsigned=True),
-                            "CAST(t.col AS UNSIGNED INTEGER)"),
-            (m.MSBit, "t.col"),
-
-            # this is kind of sucky.  thank you default arguments!
-            (NUMERIC, "CAST(t.col AS DECIMAL)"),
-            (DECIMAL, "CAST(t.col AS DECIMAL)"),
-            (Numeric, "CAST(t.col AS DECIMAL)"),
-            (m.MSNumeric, "CAST(t.col AS DECIMAL)"),
-            (m.MSDecimal, "CAST(t.col AS DECIMAL)"),
-
-            (FLOAT, "t.col"),
-            (Float, "t.col"),
-            (m.MSFloat, "t.col"),
-            (m.MSDouble, "t.col"),
-            (m.MSReal, "t.col"),
-
-            (TIMESTAMP, "CAST(t.col AS DATETIME)"),
-            (DATETIME, "CAST(t.col AS DATETIME)"),
-            (DATE, "CAST(t.col AS DATE)"),
-            (TIME, "CAST(t.col AS TIME)"),
-            (DateTime, "CAST(t.col AS DATETIME)"),
-            (Date, "CAST(t.col AS DATE)"),
-            (Time, "CAST(t.col AS TIME)"),
-            (DateTime, "CAST(t.col AS DATETIME)"),
-            (Date, "CAST(t.col AS DATE)"),
-            (m.MSTime, "CAST(t.col AS TIME)"),
-            (m.MSTimeStamp, "CAST(t.col AS DATETIME)"),
-            (m.MSYear, "t.col"),
-            (m.MSYear(2), "t.col"),
-            (Interval, "t.col"),
-
-            (String, "CAST(t.col AS CHAR)"),
-            (Unicode, "CAST(t.col AS CHAR)"),
-            (UnicodeText, "CAST(t.col AS CHAR)"),
-            (VARCHAR, "CAST(t.col AS CHAR)"),
-            (NCHAR, "CAST(t.col AS CHAR)"),
-            (CHAR, "CAST(t.col AS CHAR)"),
-            (CLOB, "CAST(t.col AS CHAR)"),
-            (TEXT, "CAST(t.col AS CHAR)"),
-            (String(32), "CAST(t.col AS CHAR(32))"),
-            (Unicode(32), "CAST(t.col AS CHAR(32))"),
-            (CHAR(32), "CAST(t.col AS CHAR(32))"),
-            (m.MSString, "CAST(t.col AS CHAR)"),
-            (m.MSText, "CAST(t.col AS CHAR)"),
-            (m.MSTinyText, "CAST(t.col AS CHAR)"),
-            (m.MSMediumText, "CAST(t.col AS CHAR)"),
-            (m.MSLongText, "CAST(t.col AS CHAR)"),
-            (m.MSNChar, "CAST(t.col AS CHAR)"),
-            (m.MSNVarChar, "CAST(t.col AS CHAR)"),
-
-            (LargeBinary, "CAST(t.col AS BINARY)"),
-            (BLOB, "CAST(t.col AS BINARY)"),
-            (m.MSBlob, "CAST(t.col AS BINARY)"),
-            (m.MSBlob(32), "CAST(t.col AS BINARY)"),
-            (m.MSTinyBlob, "CAST(t.col AS BINARY)"),
-            (m.MSMediumBlob, "CAST(t.col AS BINARY)"),
-            (m.MSLongBlob, "CAST(t.col AS BINARY)"),
-            (m.MSBinary, "CAST(t.col AS BINARY)"),
-            (m.MSBinary(32), "CAST(t.col AS BINARY)"),
-            (m.MSVarBinary, "CAST(t.col AS BINARY)"),
-            (m.MSVarBinary(32), "CAST(t.col AS BINARY)"),
-
-            # maybe this could be changed to something more DWIM, needs
-            # testing
-            (Boolean, "t.col"),
-            (BOOLEAN, "t.col"),
-
-            (m.MSEnum, "t.col"),
-            (m.MSEnum("1", "2"), "t.col"),
-            (m.MSSet, "t.col"),
-            (m.MSSet("1", "2"), "t.col"),
-            ]
-
-        for type_, expected in specs:
-            self.assert_compile(cast(t.c.col, type_), expected)
-
-    def test_no_cast_pre_4(self):
-        self.assert_compile(
-                    cast(Column('foo', Integer), String),
-                    "CAST(foo AS CHAR)",
-            )
-        dialect = mysql.dialect()
-        dialect.server_version_info = (3, 2, 3)
-        self.assert_compile(
-                    cast(Column('foo', Integer), String),
-                    "foo",
-                    dialect=dialect
-            )
-
-    def test_cast_grouped_expression_non_castable(self):
-        self.assert_compile(
-            cast(sql.column('x') + sql.column('y'), Float),
-            "(x + y)"
-        )
-
-    def test_cast_grouped_expression_pre_4(self):
-        dialect = mysql.dialect()
-        dialect.server_version_info = (3, 2, 3)
-        self.assert_compile(
-            cast(sql.column('x') + sql.column('y'), Integer),
-            "(x + y)",
-            dialect=dialect
-        )
-
-    def test_extract(self):
-        t = sql.table('t', sql.column('col1'))
-
-        for field in 'year', 'month', 'day':
-            self.assert_compile(
-                select([extract(field, t.c.col1)]),
-                "SELECT EXTRACT(%s FROM t.col1) AS anon_1 FROM t" % field)
-
-        # millsecondS to millisecond
-        self.assert_compile(
-            select([extract('milliseconds', t.c.col1)]),
-            "SELECT EXTRACT(millisecond FROM t.col1) AS anon_1 FROM t")
-
-    def test_too_long_index(self):
-        exp = 'ix_zyrenian_zyme_zyzzogeton_zyzzogeton_zyrenian_zyme_zyz_5cd2'
-        tname = 'zyrenian_zyme_zyzzogeton_zyzzogeton'
-        cname = 'zyrenian_zyme_zyzzogeton_zo'
-
-        t1 = Table(tname, MetaData(),
-                    Column(cname, Integer, index=True),
-                )
-        ix1 = list(t1.indexes)[0]
-
-        self.assert_compile(
-            schema.CreateIndex(ix1),
-            "CREATE INDEX %s "
-            "ON %s (%s)" % (exp, tname, cname)
-        )
-
-    def test_innodb_autoincrement(self):
-        t1 = Table('sometable', MetaData(), Column('assigned_id',
-                   Integer(), primary_key=True, autoincrement=False),
-                   Column('id', Integer(), primary_key=True,
-                   autoincrement=True), mysql_engine='InnoDB')
-        self.assert_compile(schema.CreateTable(t1),
-                            'CREATE TABLE sometable (assigned_id '
-                            'INTEGER NOT NULL, id INTEGER NOT NULL '
-                            'AUTO_INCREMENT, PRIMARY KEY (assigned_id, '
-                            'id), KEY idx_autoinc_id (id))ENGINE=Inn'
-                            'oDB')
-
-        t1 = Table('sometable', MetaData(), Column('assigned_id',
-                   Integer(), primary_key=True, autoincrement=True),
-                   Column('id', Integer(), primary_key=True,
-                   autoincrement=False), mysql_engine='InnoDB')
-        self.assert_compile(schema.CreateTable(t1),
-                            'CREATE TABLE sometable (assigned_id '
-                            'INTEGER NOT NULL AUTO_INCREMENT, id '
-                            'INTEGER NOT NULL, PRIMARY KEY '
-                            '(assigned_id, id))ENGINE=InnoDB')
-
-    def test_innodb_autoincrement_reserved_word_column_name(self):
-        t1 = Table(
-            'sometable', MetaData(),
-            Column('id', Integer(), primary_key=True, autoincrement=False),
-            Column('order', Integer(), primary_key=True, autoincrement=True),
-            mysql_engine='InnoDB')
-        self.assert_compile(
-            schema.CreateTable(t1),
-            'CREATE TABLE sometable ('
-            'id INTEGER NOT NULL, '
-            '`order` INTEGER NOT NULL AUTO_INCREMENT, '
-            'PRIMARY KEY (id, `order`), '
-            'KEY idx_autoinc_order (`order`)'
-            ')ENGINE=InnoDB')
-
-
-class SQLModeDetectionTest(fixtures.TestBase):
-    __only_on__ = 'mysql'
-
-    def _options(self, modes):
-        def connect(con, record):
-            cursor = con.cursor()
-            print "DOING THiS:", "set sql_mode='%s'" % (",".join(modes))
-            cursor.execute("set sql_mode='%s'" % (",".join(modes)))
-        e = engines.testing_engine(options={
-            'pool_events':[
-                (connect, 'first_connect'),
-                (connect, 'connect')
-            ]
-        })
-        return e
-
-    def test_backslash_escapes(self):
-        engine = self._options(['NO_BACKSLASH_ESCAPES'])
-        c = engine.connect()
-        assert not engine.dialect._backslash_escapes
-        c.close()
-        engine.dispose()
-
-        engine = self._options([])
-        c = engine.connect()
-        assert engine.dialect._backslash_escapes
-        c.close()
-        engine.dispose()
-
-    def test_ansi_quotes(self):
-        engine = self._options(['ANSI_QUOTES'])
-        c = engine.connect()
-        assert engine.dialect._server_ansiquotes
-        c.close()
-        engine.dispose()
-
-    def test_combination(self):
-        engine = self._options(['ANSI_QUOTES,NO_BACKSLASH_ESCAPES'])
-        c = engine.connect()
-        assert engine.dialect._server_ansiquotes
-        assert not engine.dialect._backslash_escapes
-        c.close()
-        engine.dispose()
-
-class RawReflectionTest(fixtures.TestBase):
-    def setup(self):
-        dialect = mysql.dialect()
-        self.parser = mysql.MySQLTableDefinitionParser(dialect, dialect.identifier_preparer)
-
-    def test_key_reflection(self):
-        regex = self.parser._re_key
-
-        assert regex.match('  PRIMARY KEY (`id`),')
-        assert regex.match('  PRIMARY KEY USING BTREE (`id`),')
-        assert regex.match('  PRIMARY KEY (`id`) USING BTREE,')
-        assert regex.match('  PRIMARY KEY (`id`)')
-        assert regex.match('  PRIMARY KEY USING BTREE (`id`)')
-        assert regex.match('  PRIMARY KEY (`id`) USING BTREE')
-
-
-class ExecutionTest(fixtures.TestBase):
-    """Various MySQL execution special cases."""
-
-    __only_on__ = 'mysql'
-
-    def test_charset_caching(self):
-        engine = engines.testing_engine()
-
-        cx = engine.connect()
-        meta = MetaData()
-        charset = engine.dialect._detect_charset(cx)
-
-        meta.reflect(cx)
-        eq_(cx.dialect._connection_charset, charset)
-        cx.close()
-
-    def test_sysdate(self):
-        d = testing.db.scalar(func.sysdate())
-        assert isinstance(d, datetime.datetime)
-
-class MatchTest(fixtures.TestBase, AssertsCompiledSQL):
-    __only_on__ = 'mysql'
-
-    @classmethod
-    def setup_class(cls):
-        global metadata, cattable, matchtable
-        metadata = MetaData(testing.db)
-
-        cattable = Table('cattable', metadata,
-            Column('id', Integer, primary_key=True),
-            Column('description', String(50)),
-            mysql_engine='MyISAM'
-        )
-        matchtable = Table('matchtable', metadata,
-            Column('id', Integer, primary_key=True),
-            Column('title', String(200)),
-            Column('category_id', Integer, ForeignKey('cattable.id')),
-            mysql_engine='MyISAM'
-        )
-        metadata.create_all()
-
-        cattable.insert().execute([
-            {'id': 1, 'description': 'Python'},
-            {'id': 2, 'description': 'Ruby'},
-        ])
-        matchtable.insert().execute([
-            {'id': 1,
-             'title': 'Agile Web Development with Rails',
-             'category_id': 2},
-            {'id': 2,
-             'title': 'Dive Into Python',
-             'category_id': 1},
-            {'id': 3,
-             'title': "Programming Matz's Ruby",
-             'category_id': 2},
-            {'id': 4,
-             'title': 'The Definitive Guide to Django',
-             'category_id': 1},
-            {'id': 5,
-             'title': 'Python in a Nutshell',
-             'category_id': 1}
-        ])
-
-    @classmethod
-    def teardown_class(cls):
-        metadata.drop_all()
-
-    @testing.fails_on('mysql+mysqlconnector', 'uses pyformat')
-    def test_expression(self):
-        format = testing.db.dialect.paramstyle == 'format' and '%s' or '?'
-        self.assert_compile(
-            matchtable.c.title.match('somstr'),
-            "MATCH (matchtable.title) AGAINST (%s IN BOOLEAN MODE)" % format)
-
-    @testing.fails_on('mysql+mysqldb', 'uses format')
-    @testing.fails_on('mysql+pymysql', 'uses format')
-    @testing.fails_on('mysql+cymysql', 'uses format')
-    @testing.fails_on('mysql+oursql', 'uses format')
-    @testing.fails_on('mysql+pyodbc', 'uses format')
-    @testing.fails_on('mysql+zxjdbc', 'uses format')
-    def test_expression(self):
-        format = '%(title_1)s'
-        self.assert_compile(
-            matchtable.c.title.match('somstr'),
-            "MATCH (matchtable.title) AGAINST (%s IN BOOLEAN MODE)" % format)
-
-    def test_simple_match(self):
-        results = (matchtable.select().
-                   where(matchtable.c.title.match('python')).
-                   order_by(matchtable.c.id).
-                   execute().
-                   fetchall())
-        eq_([2, 5], [r.id for r in results])
-
-    def test_simple_match_with_apostrophe(self):
-        results = (matchtable.select().
-                   where(matchtable.c.title.match("Matz's")).
-                   execute().
-                   fetchall())
-        eq_([3], [r.id for r in results])
-
-    def test_or_match(self):
-        results1 = (matchtable.select().
-                    where(or_(matchtable.c.title.match('nutshell'),
-                              matchtable.c.title.match('ruby'))).
-                    order_by(matchtable.c.id).
-                    execute().
-                    fetchall())
-        eq_([3, 5], [r.id for r in results1])
-        results2 = (matchtable.select().
-                    where(matchtable.c.title.match('nutshell ruby')).
-                    order_by(matchtable.c.id).
-                    execute().
-                    fetchall())
-        eq_([3, 5], [r.id for r in results2])
-
-
-    def test_and_match(self):
-        results1 = (matchtable.select().
-                    where(and_(matchtable.c.title.match('python'),
-                               matchtable.c.title.match('nutshell'))).
-                    execute().
-                    fetchall())
-        eq_([5], [r.id for r in results1])
-        results2 = (matchtable.select().
-                    where(matchtable.c.title.match('+python +nutshell')).
-                    execute().
-                    fetchall())
-        eq_([5], [r.id for r in results2])
-
-    def test_match_across_joins(self):
-        results = (matchtable.select().
-                   where(and_(cattable.c.id==matchtable.c.category_id,
-                              or_(cattable.c.description.match('Ruby'),
-                                  matchtable.c.title.match('nutshell')))).
-                   order_by(matchtable.c.id).
-                   execute().
-                   fetchall())
-        eq_([1, 3, 5], [r.id for r in results])
-
-
-def colspec(c):
-    return testing.db.dialect.ddl_compiler(
-                    testing.db.dialect, None).get_column_specification(c)
-
diff --git a/test/dialect/test_postgresql.py b/test/dialect/test_postgresql.py
deleted file mode 100644 (file)
index 986e575..0000000
+++ /dev/null
@@ -1,3608 +0,0 @@
-# coding: utf-8
-
-from __future__ import with_statement
-
-from sqlalchemy.testing.assertions import eq_, assert_raises, \
-                assert_raises_message, is_, AssertsExecutionResults, \
-                AssertsCompiledSQL, ComparesTables
-from sqlalchemy.testing import engines, fixtures
-from sqlalchemy import testing
-import datetime
-from sqlalchemy import Table, Column, select, MetaData, text, Integer, \
-            String, Sequence, ForeignKey, join, Numeric, \
-            PrimaryKeyConstraint, DateTime, tuple_, Float, BigInteger, \
-            func, literal_column, literal, bindparam, cast, extract, \
-            SmallInteger, Enum, REAL, update, insert, Index, delete, \
-            and_, Date, TypeDecorator, Time, Unicode, Interval, or_, Text
-from sqlalchemy.orm import Session, mapper, aliased
-from sqlalchemy import exc, schema, types
-from sqlalchemy.dialects.postgresql import base as postgresql
-from sqlalchemy.dialects.postgresql import HSTORE, hstore, array, \
-            INT4RANGE, INT8RANGE, NUMRANGE, DATERANGE, TSRANGE, TSTZRANGE, \
-            ExcludeConstraint
-import decimal
-from sqlalchemy import util
-from sqlalchemy.testing.util import round_decimal
-from sqlalchemy.sql import table, column, operators
-import logging
-import logging.handlers
-import re
-
-class SequenceTest(fixtures.TestBase, AssertsCompiledSQL):
-
-    def test_format(self):
-        seq = Sequence('my_seq_no_schema')
-        dialect = postgresql.PGDialect()
-        assert dialect.identifier_preparer.format_sequence(seq) \
-            == 'my_seq_no_schema'
-        seq = Sequence('my_seq', schema='some_schema')
-        assert dialect.identifier_preparer.format_sequence(seq) \
-            == 'some_schema.my_seq'
-        seq = Sequence('My_Seq', schema='Some_Schema')
-        assert dialect.identifier_preparer.format_sequence(seq) \
-            == '"Some_Schema"."My_Seq"'
-
-    @testing.only_on('postgresql', 'foo')
-    @testing.provide_metadata
-    def test_reverse_eng_name(self):
-        metadata = self.metadata
-        engine = engines.testing_engine(options=dict(implicit_returning=False))
-        for tname, cname in [
-            ('tb1' * 30, 'abc'),
-            ('tb2', 'abc' * 30),
-            ('tb3' * 30, 'abc' * 30),
-            ('tb4', 'abc'),
-        ]:
-            t = Table(tname[:57],
-                metadata,
-                Column(cname[:57], Integer, primary_key=True)
-            )
-            t.create(engine)
-            r = engine.execute(t.insert())
-            assert r.inserted_primary_key == [1]
-
-class CompileTest(fixtures.TestBase, AssertsCompiledSQL):
-
-    __dialect__ = postgresql.dialect()
-
-    def test_update_returning(self):
-        dialect = postgresql.dialect()
-        table1 = table('mytable', column('myid', Integer), column('name'
-                       , String(128)), column('description',
-                       String(128)))
-        u = update(table1, values=dict(name='foo'
-                   )).returning(table1.c.myid, table1.c.name)
-        self.assert_compile(u,
-                            'UPDATE mytable SET name=%(name)s '
-                            'RETURNING mytable.myid, mytable.name',
-                            dialect=dialect)
-        u = update(table1, values=dict(name='foo')).returning(table1)
-        self.assert_compile(u,
-                            'UPDATE mytable SET name=%(name)s '
-                            'RETURNING mytable.myid, mytable.name, '
-                            'mytable.description', dialect=dialect)
-        u = update(table1, values=dict(name='foo'
-                   )).returning(func.length(table1.c.name))
-        self.assert_compile(u,
-                            'UPDATE mytable SET name=%(name)s '
-                            'RETURNING length(mytable.name) AS length_1'
-                            , dialect=dialect)
-
-
-    def test_insert_returning(self):
-        dialect = postgresql.dialect()
-        table1 = table('mytable',
-            column('myid', Integer),
-            column('name', String(128)),
-            column('description', String(128)),
-        )
-
-        i = insert(table1, values=dict(name='foo'
-                   )).returning(table1.c.myid, table1.c.name)
-        self.assert_compile(i,
-                            'INSERT INTO mytable (name) VALUES '
-                            '(%(name)s) RETURNING mytable.myid, '
-                            'mytable.name', dialect=dialect)
-        i = insert(table1, values=dict(name='foo')).returning(table1)
-        self.assert_compile(i,
-                            'INSERT INTO mytable (name) VALUES '
-                            '(%(name)s) RETURNING mytable.myid, '
-                            'mytable.name, mytable.description',
-                            dialect=dialect)
-        i = insert(table1, values=dict(name='foo'
-                   )).returning(func.length(table1.c.name))
-        self.assert_compile(i,
-                            'INSERT INTO mytable (name) VALUES '
-                            '(%(name)s) RETURNING length(mytable.name) '
-                            'AS length_1', dialect=dialect)
-
-
-    def test_create_partial_index(self):
-        m = MetaData()
-        tbl = Table('testtbl', m, Column('data', Integer))
-        idx = Index('test_idx1', tbl.c.data,
-                    postgresql_where=and_(tbl.c.data > 5, tbl.c.data
-                    < 10))
-        idx = Index('test_idx1', tbl.c.data,
-                    postgresql_where=and_(tbl.c.data > 5, tbl.c.data
-                    < 10))
-
-        # test quoting and all that
-
-        idx2 = Index('test_idx2', tbl.c.data,
-                     postgresql_where=and_(tbl.c.data > 'a', tbl.c.data
-                     < "b's"))
-        self.assert_compile(schema.CreateIndex(idx),
-                            'CREATE INDEX test_idx1 ON testtbl (data) '
-                            'WHERE data > 5 AND data < 10',
-                            dialect=postgresql.dialect())
-        self.assert_compile(schema.CreateIndex(idx2),
-                            "CREATE INDEX test_idx2 ON testtbl (data) "
-                            "WHERE data > 'a' AND data < 'b''s'",
-                            dialect=postgresql.dialect())
-
-    def test_create_index_with_ops(self):
-        m = MetaData()
-        tbl = Table('testtbl', m,
-                    Column('data', String),
-                    Column('data2', Integer, key='d2'))
-
-        idx = Index('test_idx1', tbl.c.data,
-                    postgresql_ops={'data': 'text_pattern_ops'})
-
-        idx2 = Index('test_idx2', tbl.c.data, tbl.c.d2,
-                    postgresql_ops={'data': 'text_pattern_ops',
-                                    'd2': 'int4_ops'})
-
-        self.assert_compile(schema.CreateIndex(idx),
-                            'CREATE INDEX test_idx1 ON testtbl '
-                            '(data text_pattern_ops)',
-                            dialect=postgresql.dialect())
-        self.assert_compile(schema.CreateIndex(idx2),
-                            'CREATE INDEX test_idx2 ON testtbl '
-                            '(data text_pattern_ops, data2 int4_ops)',
-                            dialect=postgresql.dialect())
-
-    def test_create_index_with_using(self):
-        m = MetaData()
-        tbl = Table('testtbl', m, Column('data', String))
-
-        idx1 = Index('test_idx1', tbl.c.data)
-        idx2 = Index('test_idx2', tbl.c.data, postgresql_using='btree')
-        idx3 = Index('test_idx3', tbl.c.data, postgresql_using='hash')
-
-        self.assert_compile(schema.CreateIndex(idx1),
-                            'CREATE INDEX test_idx1 ON testtbl '
-                            '(data)',
-                            dialect=postgresql.dialect())
-        self.assert_compile(schema.CreateIndex(idx2),
-                            'CREATE INDEX test_idx2 ON testtbl '
-                            'USING btree (data)',
-                            dialect=postgresql.dialect())
-        self.assert_compile(schema.CreateIndex(idx3),
-                            'CREATE INDEX test_idx3 ON testtbl '
-                            'USING hash (data)',
-                            dialect=postgresql.dialect())
-
-    def test_exclude_constraint_min(self):
-        m = MetaData()
-        tbl = Table('testtbl', m, 
-                    Column('room', Integer, primary_key=True))
-        cons = ExcludeConstraint(('room', '='))
-        tbl.append_constraint(cons)
-        self.assert_compile(schema.AddConstraint(cons),
-                            'ALTER TABLE testtbl ADD EXCLUDE USING gist '
-                            '(room WITH =)',
-                            dialect=postgresql.dialect())
-
-    def test_exclude_constraint_full(self):
-        m = MetaData()
-        room = Column('room', Integer, primary_key=True)
-        tbl = Table('testtbl', m,
-                    room,
-                    Column('during', TSRANGE))
-        room = Column('room', Integer, primary_key=True)
-        cons = ExcludeConstraint((room, '='), ('during', '&&'),
-                                 name='my_name',
-                                 using='gist',
-                                 where="room > 100",
-                                 deferrable=True,
-                                 initially='immediate')
-        tbl.append_constraint(cons)
-        self.assert_compile(schema.AddConstraint(cons),
-                            'ALTER TABLE testtbl ADD CONSTRAINT my_name '
-                            'EXCLUDE USING gist '
-                            '(room WITH =, during WITH ''&&) WHERE '
-                            '(room > 100) DEFERRABLE INITIALLY immediate',
-                            dialect=postgresql.dialect())
-
-    def test_exclude_constraint_copy(self):
-        m = MetaData()
-        cons = ExcludeConstraint(('room', '='))
-        tbl = Table('testtbl', m, 
-              Column('room', Integer, primary_key=True),
-              cons)
-        # apparently you can't copy a ColumnCollectionConstraint until
-        # after it has been bound to a table...
-        cons_copy = cons.copy()
-        tbl.append_constraint(cons_copy)
-        self.assert_compile(schema.AddConstraint(cons_copy),
-                            'ALTER TABLE testtbl ADD EXCLUDE USING gist '
-                            '(room WITH =)',
-                            dialect=postgresql.dialect())
-
-    def test_substring(self):
-        self.assert_compile(func.substring('abc', 1, 2),
-                            'SUBSTRING(%(substring_1)s FROM %(substring_2)s '
-                            'FOR %(substring_3)s)')
-        self.assert_compile(func.substring('abc', 1),
-                            'SUBSTRING(%(substring_1)s FROM %(substring_2)s)')
-
-
-
-    def test_extract(self):
-        t = table('t', column('col1', DateTime), column('col2', Date),
-                  column('col3', Time), column('col4',
-                  postgresql.INTERVAL))
-        for field in 'year', 'month', 'day', 'epoch', 'hour':
-            for expr, compiled_expr in [  # invalid, no cast. plain
-                                          # text.  no cast. addition is
-                                          # commutative subtraction is
-                                          # not invalid - no cast. dont
-                                          # crack up on entirely
-                                          # unsupported types
-                (t.c.col1, 't.col1 :: timestamp'),
-                (t.c.col2, 't.col2 :: date'),
-                (t.c.col3, 't.col3 :: time'),
-                (func.current_timestamp() - datetime.timedelta(days=5),
-                 '(CURRENT_TIMESTAMP - %(current_timestamp_1)s) :: '
-                 'timestamp'),
-                (func.current_timestamp() + func.current_timestamp(),
-                 'CURRENT_TIMESTAMP + CURRENT_TIMESTAMP'),
-                (text('foo.date + foo.time'), 'foo.date + foo.time'),
-                (func.current_timestamp() + datetime.timedelta(days=5),
-                 '(CURRENT_TIMESTAMP + %(current_timestamp_1)s) :: '
-                 'timestamp'),
-                (t.c.col2 + t.c.col3, '(t.col2 + t.col3) :: timestamp'
-                 ),
-                (t.c.col2 + datetime.timedelta(days=5),
-                 '(t.col2 + %(col2_1)s) :: timestamp'),
-                (datetime.timedelta(days=5) + t.c.col2,
-                 '(%(col2_1)s + t.col2) :: timestamp'),
-                (t.c.col1 + t.c.col4, '(t.col1 + t.col4) :: timestamp'
-                 ),
-                (t.c.col1 - datetime.timedelta(seconds=30),
-                 '(t.col1 - %(col1_1)s) :: timestamp'),
-                (datetime.timedelta(seconds=30) - t.c.col1,
-                 '%(col1_1)s - t.col1'),
-                (func.coalesce(t.c.col1, func.current_timestamp()),
-                 'coalesce(t.col1, CURRENT_TIMESTAMP) :: timestamp'),
-                (t.c.col3 + datetime.timedelta(seconds=30),
-                 '(t.col3 + %(col3_1)s) :: time'),
-                (func.current_timestamp() - func.coalesce(t.c.col1,
-                 func.current_timestamp()),
-                 '(CURRENT_TIMESTAMP - coalesce(t.col1, '
-                 'CURRENT_TIMESTAMP)) :: interval'),
-                (3 * func.foobar(type_=Interval),
-                 '(%(foobar_1)s * foobar()) :: interval'),
-                (literal(datetime.timedelta(seconds=10))
-                 - literal(datetime.timedelta(seconds=10)),
-                 '(%(param_1)s - %(param_2)s) :: interval'),
-                (t.c.col3 + 'some string', 't.col3 + %(col3_1)s'),
-                ]:
-                self.assert_compile(select([extract(field,
-                                    expr)]).select_from(t),
-                                    'SELECT EXTRACT(%s FROM %s) AS '
-                                    'anon_1 FROM t' % (field,
-                                    compiled_expr))
-
-    def test_reserved_words(self):
-        table = Table("pg_table", MetaData(),
-            Column("col1", Integer),
-            Column("variadic", Integer))
-        x = select([table.c.col1, table.c.variadic])
-
-        self.assert_compile(x,
-            '''SELECT pg_table.col1, pg_table."variadic" FROM pg_table''')
-
-    def test_array(self):
-        c = Column('x', postgresql.ARRAY(Integer))
-
-        self.assert_compile(
-            cast(c, postgresql.ARRAY(Integer)),
-            "CAST(x AS INTEGER[])"
-        )
-        self.assert_compile(
-            c[5],
-            "x[%(x_1)s]",
-            checkparams={'x_1': 5}
-        )
-
-        self.assert_compile(
-            c[5:7],
-            "x[%(x_1)s:%(x_2)s]",
-            checkparams={'x_2': 7, 'x_1': 5}
-        )
-        self.assert_compile(
-            c[5:7][2:3],
-            "x[%(x_1)s:%(x_2)s][%(param_1)s:%(param_2)s]",
-            checkparams={'x_2': 7, 'x_1': 5, 'param_1':2, 'param_2':3}
-        )
-        self.assert_compile(
-            c[5:7][3],
-            "x[%(x_1)s:%(x_2)s][%(param_1)s]",
-            checkparams={'x_2': 7, 'x_1': 5, 'param_1':3}
-        )
-
-        self.assert_compile(
-            c.contains([1]),
-            'x @> %(x_1)s',
-            checkparams={'x_1': [1]}
-        )
-        self.assert_compile(
-            c.contained_by([2]),
-            'x <@ %(x_1)s',
-            checkparams={'x_1': [2]}
-        )
-        self.assert_compile(
-            c.overlap([3]),
-            'x && %(x_1)s',
-            checkparams={'x_1': [3]}
-        )
-        self.assert_compile(
-            postgresql.Any(4, c),
-            '%(param_1)s = ANY (x)',
-            checkparams={'param_1': 4}
-        )
-        self.assert_compile(
-            c.any(5, operator=operators.ne),
-            '%(param_1)s != ANY (x)',
-            checkparams={'param_1': 5}
-        )
-        self.assert_compile(
-            postgresql.All(6, c, operator=operators.gt),
-            '%(param_1)s > ALL (x)',
-            checkparams={'param_1': 6}
-        )
-        self.assert_compile(
-            c.all(7, operator=operators.lt),
-            '%(param_1)s < ALL (x)',
-            checkparams={'param_1': 7}
-        )
-
-    def test_array_literal_type(self):
-        is_(postgresql.array([1, 2]).type._type_affinity, postgresql.ARRAY)
-        is_(postgresql.array([1, 2]).type.item_type._type_affinity, Integer)
-
-        is_(postgresql.array([1, 2], type_=String).
-                    type.item_type._type_affinity, String)
-
-    def test_array_literal(self):
-        self.assert_compile(
-            func.array_dims(postgresql.array([1, 2]) +
-                        postgresql.array([3, 4, 5])),
-            "array_dims(ARRAY[%(param_1)s, %(param_2)s] || "
-                    "ARRAY[%(param_3)s, %(param_4)s, %(param_5)s])",
-            checkparams={'param_5': 5, 'param_4': 4, 'param_1': 1,
-                'param_3': 3, 'param_2': 2}
-        )
-
-    def test_array_literal_insert(self):
-        m = MetaData()
-        t = Table('t', m, Column('data', postgresql.ARRAY(Integer)))
-        self.assert_compile(
-            t.insert().values(data=array([1, 2, 3])),
-            "INSERT INTO t (data) VALUES (ARRAY[%(param_1)s, "
-                "%(param_2)s, %(param_3)s])"
-        )
-
-    def test_update_array_element(self):
-        m = MetaData()
-        t = Table('t', m, Column('data', postgresql.ARRAY(Integer)))
-        self.assert_compile(
-            t.update().values({t.c.data[5]: 1}),
-            "UPDATE t SET data[%(data_1)s]=%(param_1)s",
-            checkparams={'data_1': 5, 'param_1': 1}
-        )
-
-    def test_update_array_slice(self):
-        m = MetaData()
-        t = Table('t', m, Column('data', postgresql.ARRAY(Integer)))
-        self.assert_compile(
-            t.update().values({t.c.data[2:5]: 2}),
-            "UPDATE t SET data[%(data_1)s:%(data_2)s]=%(param_1)s",
-            checkparams={'param_1': 2, 'data_2': 5, 'data_1': 2}
-
-        )
-
-    def test_from_only(self):
-        m = MetaData()
-        tbl1 = Table('testtbl1', m, Column('id', Integer))
-        tbl2 = Table('testtbl2', m, Column('id', Integer))
-
-        stmt = tbl1.select().with_hint(tbl1, 'ONLY', 'postgresql')
-        expected = 'SELECT testtbl1.id FROM ONLY testtbl1'
-        self.assert_compile(stmt, expected)
-
-        talias1 = tbl1.alias('foo')
-        stmt = talias1.select().with_hint(talias1, 'ONLY', 'postgresql')
-        expected = 'SELECT foo.id FROM ONLY testtbl1 AS foo'
-        self.assert_compile(stmt, expected)
-
-        stmt = select([tbl1, tbl2]).with_hint(tbl1, 'ONLY', 'postgresql')
-        expected = ('SELECT testtbl1.id, testtbl2.id FROM ONLY testtbl1, '
-                    'testtbl2')
-        self.assert_compile(stmt, expected)
-
-        stmt = select([tbl1, tbl2]).with_hint(tbl2, 'ONLY', 'postgresql')
-        expected = ('SELECT testtbl1.id, testtbl2.id FROM testtbl1, ONLY '
-                    'testtbl2')
-        self.assert_compile(stmt, expected)
-
-        stmt = select([tbl1, tbl2])
-        stmt = stmt.with_hint(tbl1, 'ONLY', 'postgresql')
-        stmt = stmt.with_hint(tbl2, 'ONLY', 'postgresql')
-        expected = ('SELECT testtbl1.id, testtbl2.id FROM ONLY testtbl1, '
-                    'ONLY testtbl2')
-        self.assert_compile(stmt, expected)
-
-        stmt = update(tbl1, values=dict(id=1))
-        stmt = stmt.with_hint('ONLY', dialect_name='postgresql')
-        expected = 'UPDATE ONLY testtbl1 SET id=%(id)s'
-        self.assert_compile(stmt, expected)
-
-        stmt = delete(tbl1).with_hint('ONLY', selectable=tbl1, dialect_name='postgresql')
-        expected = 'DELETE FROM ONLY testtbl1'
-        self.assert_compile(stmt, expected)
-
-        tbl3 = Table('testtbl3', m, Column('id', Integer), schema='testschema')
-        stmt = tbl3.select().with_hint(tbl3, 'ONLY', 'postgresql')
-        expected = 'SELECT testschema.testtbl3.id FROM ONLY testschema.testtbl3'
-        self.assert_compile(stmt, expected)
-
-        assert_raises(
-            exc.CompileError,
-            tbl3.select().with_hint(tbl3, "FAKE", "postgresql").compile,
-            dialect=postgresql.dialect()
-        )
-
-class FloatCoercionTest(fixtures.TablesTest, AssertsExecutionResults):
-    __only_on__ = 'postgresql'
-    __dialect__ = postgresql.dialect()
-
-    @classmethod
-    def define_tables(cls, metadata):
-        data_table = Table('data_table', metadata,
-            Column('id', Integer, primary_key=True),
-            Column('data', Integer)
-        )
-
-    @classmethod
-    def insert_data(cls):
-        data_table = cls.tables.data_table
-
-        data_table.insert().execute(
-            {'data':3},
-            {'data':5},
-            {'data':7},
-            {'data':2},
-            {'data':15},
-            {'data':12},
-            {'data':6},
-            {'data':478},
-            {'data':52},
-            {'data':9},
-        )
-
-    @testing.fails_on('postgresql+zxjdbc',
-                      'XXX: postgresql+zxjdbc currently returns a Decimal result for Float')
-    def test_float_coercion(self):
-        data_table = self.tables.data_table
-
-        for type_, result in [
-            (Numeric, decimal.Decimal('140.381230939')),
-            (Float, 140.381230939),
-            (Float(asdecimal=True), decimal.Decimal('140.381230939')),
-            (Numeric(asdecimal=False), 140.381230939),
-        ]:
-            ret = testing.db.execute(
-                select([
-                    func.stddev_pop(data_table.c.data, type_=type_)
-                ])
-            ).scalar()
-
-            eq_(round_decimal(ret, 9), result)
-
-            ret = testing.db.execute(
-                select([
-                    cast(func.stddev_pop(data_table.c.data), type_)
-                ])
-            ).scalar()
-            eq_(round_decimal(ret, 9), result)
-
-    @testing.fails_on('postgresql+zxjdbc',
-                      'zxjdbc has no support for PG arrays')
-    @testing.provide_metadata
-    def test_arrays(self):
-        metadata = self.metadata
-        t1 = Table('t', metadata,
-            Column('x', postgresql.ARRAY(Float)),
-            Column('y', postgresql.ARRAY(REAL)),
-            Column('z', postgresql.ARRAY(postgresql.DOUBLE_PRECISION)),
-            Column('q', postgresql.ARRAY(Numeric))
-        )
-        metadata.create_all()
-        t1.insert().execute(x=[5], y=[5], z=[6], q=[decimal.Decimal("6.4")])
-        row = t1.select().execute().first()
-        eq_(
-            row,
-            ([5], [5], [6], [decimal.Decimal("6.4")])
-        )
-
-class EnumTest(fixtures.TestBase, AssertsExecutionResults, AssertsCompiledSQL):
-
-    __only_on__ = 'postgresql'
-    __dialect__ = postgresql.dialect()
-
-    def test_compile(self):
-        e1 = Enum('x', 'y', 'z', name='somename')
-        e2 = Enum('x', 'y', 'z', name='somename', schema='someschema')
-        self.assert_compile(postgresql.CreateEnumType(e1),
-                            "CREATE TYPE somename AS ENUM ('x','y','z')"
-                            )
-        self.assert_compile(postgresql.CreateEnumType(e2),
-                            "CREATE TYPE someschema.somename AS ENUM "
-                            "('x','y','z')")
-        self.assert_compile(postgresql.DropEnumType(e1),
-                            'DROP TYPE somename')
-        self.assert_compile(postgresql.DropEnumType(e2),
-                            'DROP TYPE someschema.somename')
-        t1 = Table('sometable', MetaData(), Column('somecolumn', e1))
-        self.assert_compile(schema.CreateTable(t1),
-                            'CREATE TABLE sometable (somecolumn '
-                            'somename)')
-        t1 = Table('sometable', MetaData(), Column('somecolumn',
-                   Enum('x', 'y', 'z', native_enum=False)))
-        self.assert_compile(schema.CreateTable(t1),
-                            "CREATE TABLE sometable (somecolumn "
-                            "VARCHAR(1), CHECK (somecolumn IN ('x', "
-                            "'y', 'z')))")
-
-    @testing.fails_on('postgresql+zxjdbc',
-                      'zxjdbc fails on ENUM: column "XXX" is of type '
-                      'XXX but expression is of type character varying')
-    @testing.fails_on('postgresql+pg8000',
-                      'zxjdbc fails on ENUM: column "XXX" is of type '
-                      'XXX but expression is of type text')
-    def test_create_table(self):
-        metadata = MetaData(testing.db)
-        t1 = Table('table', metadata, Column('id', Integer,
-                   primary_key=True), Column('value', Enum('one', 'two'
-                   , 'three', name='onetwothreetype')))
-        t1.create()
-        t1.create(checkfirst=True)  # check the create
-        try:
-            t1.insert().execute(value='two')
-            t1.insert().execute(value='three')
-            t1.insert().execute(value='three')
-            eq_(t1.select().order_by(t1.c.id).execute().fetchall(),
-                [(1, 'two'), (2, 'three'), (3, 'three')])
-        finally:
-            metadata.drop_all()
-            metadata.drop_all()
-
-    def test_name_required(self):
-        metadata = MetaData(testing.db)
-        etype = Enum('four', 'five', 'six', metadata=metadata)
-        assert_raises(exc.CompileError, etype.create)
-        assert_raises(exc.CompileError, etype.compile,
-                      dialect=postgresql.dialect())
-
-    @testing.fails_on('postgresql+zxjdbc',
-                      'zxjdbc fails on ENUM: column "XXX" is of type '
-                      'XXX but expression is of type character varying')
-    @testing.fails_on('postgresql+pg8000',
-                      'zxjdbc fails on ENUM: column "XXX" is of type '
-                      'XXX but expression is of type text')
-    def test_unicode_labels(self):
-        metadata = MetaData(testing.db)
-        t1 = Table('table', metadata,
-            Column('id', Integer, primary_key=True),
-            Column('value',
-                    Enum(u'réveillé', u'drôle', u'S’il',
-                            name='onetwothreetype'))
-        )
-
-        metadata.create_all()
-        try:
-            t1.insert().execute(value=u'drôle')
-            t1.insert().execute(value=u'réveillé')
-            t1.insert().execute(value=u'S’il')
-            eq_(t1.select().order_by(t1.c.id).execute().fetchall(),
-                [(1, u'drôle'), (2, u'réveillé'), (3, u'S’il')]
-            )
-            m2 = MetaData(testing.db)
-            t2 = Table('table', m2, autoload=True)
-            assert t2.c.value.type.enums == (u'réveillé', u'drôle', u'S’il')
-        finally:
-            metadata.drop_all()
-
-    def test_non_native_type(self):
-        metadata = MetaData()
-        t1 = Table('foo', metadata, Column('bar', Enum('one', 'two',
-                   'three', name='myenum', native_enum=False)))
-
-        def go():
-            t1.create(testing.db)
-
-        try:
-            self.assert_sql(testing.db, go, [],
-                            with_sequences=[("CREATE TABLE foo (\tbar "
-                            "VARCHAR(5), \tCONSTRAINT myenum CHECK "
-                            "(bar IN ('one', 'two', 'three')))", {})])
-        finally:
-            metadata.drop_all(testing.db)
-
-    @testing.provide_metadata
-    def test_disable_create(self):
-        metadata = self.metadata
-
-        e1 = postgresql.ENUM('one', 'two', 'three',
-                            name="myenum",
-                            create_type=False)
-
-        t1 = Table('e1', metadata,
-            Column('c1', e1)
-        )
-        # table can be created separately
-        # without conflict
-        e1.create(bind=testing.db)
-        t1.create(testing.db)
-        t1.drop(testing.db)
-        e1.drop(bind=testing.db)
-
-    @testing.provide_metadata
-    def test_generate_multiple(self):
-        """Test that the same enum twice only generates once
-        for the create_all() call, without using checkfirst.
-
-        A 'memo' collection held by the DDL runner
-        now handles this.
-
-        """
-        metadata = self.metadata
-
-        e1 = Enum('one', 'two', 'three',
-                            name="myenum")
-        t1 = Table('e1', metadata,
-            Column('c1', e1)
-        )
-
-        t2 = Table('e2', metadata,
-            Column('c1', e1)
-        )
-
-        metadata.create_all(checkfirst=False)
-        metadata.drop_all(checkfirst=False)
-
-    def test_non_native_dialect(self):
-        engine = engines.testing_engine()
-        engine.connect()
-        engine.dialect.supports_native_enum = False
-        metadata = MetaData()
-        t1 = Table('foo', metadata, Column('bar', Enum('one', 'two',
-                   'three', name='myenum')))
-
-        def go():
-            t1.create(engine)
-
-        try:
-            self.assert_sql(engine, go, [],
-                            with_sequences=[("CREATE TABLE foo (\tbar "
-                            "VARCHAR(5), \tCONSTRAINT myenum CHECK "
-                            "(bar IN ('one', 'two', 'three')))", {})])
-        finally:
-            metadata.drop_all(engine)
-
-    def test_standalone_enum(self):
-        metadata = MetaData(testing.db)
-        etype = Enum('four', 'five', 'six', name='fourfivesixtype',
-                     metadata=metadata)
-        etype.create()
-        try:
-            assert testing.db.dialect.has_type(testing.db,
-                    'fourfivesixtype')
-        finally:
-            etype.drop()
-            assert not testing.db.dialect.has_type(testing.db,
-                    'fourfivesixtype')
-        metadata.create_all()
-        try:
-            assert testing.db.dialect.has_type(testing.db,
-                    'fourfivesixtype')
-        finally:
-            metadata.drop_all()
-            assert not testing.db.dialect.has_type(testing.db,
-                    'fourfivesixtype')
-
-    def test_no_support(self):
-        def server_version_info(self):
-            return (8, 2)
-
-        e = engines.testing_engine()
-        dialect = e.dialect
-        dialect._get_server_version_info = server_version_info
-
-        assert dialect.supports_native_enum
-        e.connect()
-        assert not dialect.supports_native_enum
-
-        # initialize is called again on new pool
-        e.dispose()
-        e.connect()
-        assert not dialect.supports_native_enum
-
-
-    def test_reflection(self):
-        metadata = MetaData(testing.db)
-        etype = Enum('four', 'five', 'six', name='fourfivesixtype',
-                     metadata=metadata)
-        t1 = Table('table', metadata, Column('id', Integer,
-                   primary_key=True), Column('value', Enum('one', 'two'
-                   , 'three', name='onetwothreetype')), Column('value2'
-                   , etype))
-        metadata.create_all()
-        try:
-            m2 = MetaData(testing.db)
-            t2 = Table('table', m2, autoload=True)
-            assert t2.c.value.type.enums == ('one', 'two', 'three')
-            assert t2.c.value.type.name == 'onetwothreetype'
-            assert t2.c.value2.type.enums == ('four', 'five', 'six')
-            assert t2.c.value2.type.name == 'fourfivesixtype'
-        finally:
-            metadata.drop_all()
-
-    def test_schema_reflection(self):
-        metadata = MetaData(testing.db)
-        etype = Enum(
-            'four',
-            'five',
-            'six',
-            name='fourfivesixtype',
-            schema='test_schema',
-            metadata=metadata,
-            )
-        t1 = Table('table', metadata, Column('id', Integer,
-                   primary_key=True), Column('value', Enum('one', 'two'
-                   , 'three', name='onetwothreetype',
-                   schema='test_schema')), Column('value2', etype))
-        metadata.create_all()
-        try:
-            m2 = MetaData(testing.db)
-            t2 = Table('table', m2, autoload=True)
-            assert t2.c.value.type.enums == ('one', 'two', 'three')
-            assert t2.c.value.type.name == 'onetwothreetype'
-            assert t2.c.value2.type.enums == ('four', 'five', 'six')
-            assert t2.c.value2.type.name == 'fourfivesixtype'
-            assert t2.c.value2.type.schema == 'test_schema'
-        finally:
-            metadata.drop_all()
-
-class NumericInterpretationTest(fixtures.TestBase):
-    __only_on__ = 'postgresql'
-
-    def test_numeric_codes(self):
-        from sqlalchemy.dialects.postgresql import pg8000, psycopg2, base
-
-        for dialect in (pg8000.dialect(), psycopg2.dialect()):
-
-            typ = Numeric().dialect_impl(dialect)
-            for code in base._INT_TYPES + base._FLOAT_TYPES + \
-                        base._DECIMAL_TYPES:
-                proc = typ.result_processor(dialect, code)
-                val = 23.7
-                if proc is not None:
-                    val = proc(val)
-                assert val in (23.7, decimal.Decimal("23.7"))
-
-    @testing.provide_metadata
-    def test_numeric_default(self):
-        metadata = self.metadata
-        # pg8000 appears to fail when the value is 0,
-        # returns an int instead of decimal.
-        t =Table('t', metadata,
-            Column('id', Integer, primary_key=True),
-            Column('nd', Numeric(asdecimal=True), default=1),
-            Column('nf', Numeric(asdecimal=False), default=1),
-            Column('fd', Float(asdecimal=True), default=1),
-            Column('ff', Float(asdecimal=False), default=1),
-        )
-        metadata.create_all()
-        r = t.insert().execute()
-
-        row = t.select().execute().first()
-        assert isinstance(row[1], decimal.Decimal)
-        assert isinstance(row[2], float)
-        assert isinstance(row[3], decimal.Decimal)
-        assert isinstance(row[4], float)
-        eq_(
-            row,
-            (1, decimal.Decimal("1"), 1, decimal.Decimal("1"), 1)
-        )
-
-class InsertTest(fixtures.TestBase, AssertsExecutionResults):
-
-    __only_on__ = 'postgresql'
-
-    @classmethod
-    def setup_class(cls):
-        global metadata
-        cls.engine = testing.db
-        metadata = MetaData(testing.db)
-
-    def teardown(self):
-        metadata.drop_all()
-        metadata.clear()
-        if self.engine is not testing.db:
-            self.engine.dispose()
-
-    def test_compiled_insert(self):
-        table = Table('testtable', metadata, Column('id', Integer,
-                      primary_key=True), Column('data', String(30)))
-        metadata.create_all()
-        ins = table.insert(inline=True, values={'data': bindparam('x'
-                           )}).compile()
-        ins.execute({'x': 'five'}, {'x': 'seven'})
-        assert table.select().execute().fetchall() == [(1, 'five'), (2,
-                'seven')]
-
-    def test_foreignkey_missing_insert(self):
-        t1 = Table('t1', metadata, Column('id', Integer,
-                   primary_key=True))
-        t2 = Table('t2', metadata, Column('id', Integer,
-                   ForeignKey('t1.id'), primary_key=True))
-        metadata.create_all()
-
-        # want to ensure that "null value in column "id" violates not-
-        # null constraint" is raised (IntegrityError on psycoopg2, but
-        # ProgrammingError on pg8000), and not "ProgrammingError:
-        # (ProgrammingError) relationship "t2_id_seq" does not exist".
-        # the latter corresponds to autoincrement behavior, which is not
-        # the case here due to the foreign key.
-
-        for eng in [engines.testing_engine(options={'implicit_returning'
-                    : False}),
-                    engines.testing_engine(options={'implicit_returning'
-                    : True})]:
-            assert_raises_message(exc.DBAPIError,
-                                  'violates not-null constraint',
-                                  eng.execute, t2.insert())
-
-    def test_sequence_insert(self):
-        table = Table('testtable', metadata, Column('id', Integer,
-                      Sequence('my_seq'), primary_key=True),
-                      Column('data', String(30)))
-        metadata.create_all()
-        self._assert_data_with_sequence(table, 'my_seq')
-
-    def test_sequence_returning_insert(self):
-        table = Table('testtable', metadata, Column('id', Integer,
-                      Sequence('my_seq'), primary_key=True),
-                      Column('data', String(30)))
-        metadata.create_all()
-        self._assert_data_with_sequence_returning(table, 'my_seq')
-
-    def test_opt_sequence_insert(self):
-        table = Table('testtable', metadata, Column('id', Integer,
-                      Sequence('my_seq', optional=True),
-                      primary_key=True), Column('data', String(30)))
-        metadata.create_all()
-        self._assert_data_autoincrement(table)
-
-    def test_opt_sequence_returning_insert(self):
-        table = Table('testtable', metadata, Column('id', Integer,
-                      Sequence('my_seq', optional=True),
-                      primary_key=True), Column('data', String(30)))
-        metadata.create_all()
-        self._assert_data_autoincrement_returning(table)
-
-    def test_autoincrement_insert(self):
-        table = Table('testtable', metadata, Column('id', Integer,
-                      primary_key=True), Column('data', String(30)))
-        metadata.create_all()
-        self._assert_data_autoincrement(table)
-
-    def test_autoincrement_returning_insert(self):
-        table = Table('testtable', metadata, Column('id', Integer,
-                      primary_key=True), Column('data', String(30)))
-        metadata.create_all()
-        self._assert_data_autoincrement_returning(table)
-
-    def test_noautoincrement_insert(self):
-        table = Table('testtable', metadata, Column('id', Integer,
-                      primary_key=True, autoincrement=False),
-                      Column('data', String(30)))
-        metadata.create_all()
-        self._assert_data_noautoincrement(table)
-
-    def _assert_data_autoincrement(self, table):
-        self.engine = \
-            engines.testing_engine(options={'implicit_returning'
-                                   : False})
-        metadata.bind = self.engine
-
-        def go():
-
-            # execute with explicit id
-
-            r = table.insert().execute({'id': 30, 'data': 'd1'})
-            assert r.inserted_primary_key == [30]
-
-            # execute with prefetch id
-
-            r = table.insert().execute({'data': 'd2'})
-            assert r.inserted_primary_key == [1]
-
-            # executemany with explicit ids
-
-            table.insert().execute({'id': 31, 'data': 'd3'}, {'id': 32,
-                                   'data': 'd4'})
-
-            # executemany, uses SERIAL
-
-            table.insert().execute({'data': 'd5'}, {'data': 'd6'})
-
-            # single execute, explicit id, inline
-
-            table.insert(inline=True).execute({'id': 33, 'data': 'd7'})
-
-            # single execute, inline, uses SERIAL
-
-            table.insert(inline=True).execute({'data': 'd8'})
-
-        # note that the test framework doesnt capture the "preexecute"
-        # of a seqeuence or default.  we just see it in the bind params.
-
-        self.assert_sql(self.engine, go, [], with_sequences=[
-            ('INSERT INTO testtable (id, data) VALUES (:id, :data)',
-             {'id': 30, 'data': 'd1'}),
-            ('INSERT INTO testtable (id, data) VALUES (:id, :data)',
-             {'id': 1, 'data': 'd2'}),
-            ('INSERT INTO testtable (id, data) VALUES (:id, :data)',
-             [{'id': 31, 'data': 'd3'}, {'id': 32, 'data': 'd4'}]),
-            ('INSERT INTO testtable (data) VALUES (:data)', [{'data'
-             : 'd5'}, {'data': 'd6'}]),
-            ('INSERT INTO testtable (id, data) VALUES (:id, :data)',
-             [{'id': 33, 'data': 'd7'}]),
-            ('INSERT INTO testtable (data) VALUES (:data)', [{'data'
-             : 'd8'}]),
-            ])
-        assert table.select().execute().fetchall() == [
-            (30, 'd1'),
-            (1, 'd2'),
-            (31, 'd3'),
-            (32, 'd4'),
-            (2, 'd5'),
-            (3, 'd6'),
-            (33, 'd7'),
-            (4, 'd8'),
-            ]
-        table.delete().execute()
-
-        # test the same series of events using a reflected version of
-        # the table
-
-        m2 = MetaData(self.engine)
-        table = Table(table.name, m2, autoload=True)
-
-        def go():
-            table.insert().execute({'id': 30, 'data': 'd1'})
-            r = table.insert().execute({'data': 'd2'})
-            assert r.inserted_primary_key == [5]
-            table.insert().execute({'id': 31, 'data': 'd3'}, {'id': 32,
-                                   'data': 'd4'})
-            table.insert().execute({'data': 'd5'}, {'data': 'd6'})
-            table.insert(inline=True).execute({'id': 33, 'data': 'd7'})
-            table.insert(inline=True).execute({'data': 'd8'})
-
-        self.assert_sql(self.engine, go, [], with_sequences=[
-            ('INSERT INTO testtable (id, data) VALUES (:id, :data)',
-             {'id': 30, 'data': 'd1'}),
-            ('INSERT INTO testtable (id, data) VALUES (:id, :data)',
-             {'id': 5, 'data': 'd2'}),
-            ('INSERT INTO testtable (id, data) VALUES (:id, :data)',
-             [{'id': 31, 'data': 'd3'}, {'id': 32, 'data': 'd4'}]),
-            ('INSERT INTO testtable (data) VALUES (:data)', [{'data'
-             : 'd5'}, {'data': 'd6'}]),
-            ('INSERT INTO testtable (id, data) VALUES (:id, :data)',
-             [{'id': 33, 'data': 'd7'}]),
-            ('INSERT INTO testtable (data) VALUES (:data)', [{'data'
-             : 'd8'}]),
-            ])
-        assert table.select().execute().fetchall() == [
-            (30, 'd1'),
-            (5, 'd2'),
-            (31, 'd3'),
-            (32, 'd4'),
-            (6, 'd5'),
-            (7, 'd6'),
-            (33, 'd7'),
-            (8, 'd8'),
-            ]
-        table.delete().execute()
-
-    def _assert_data_autoincrement_returning(self, table):
-        self.engine = \
-            engines.testing_engine(options={'implicit_returning': True})
-        metadata.bind = self.engine
-
-        def go():
-
-            # execute with explicit id
-
-            r = table.insert().execute({'id': 30, 'data': 'd1'})
-            assert r.inserted_primary_key == [30]
-
-            # execute with prefetch id
-
-            r = table.insert().execute({'data': 'd2'})
-            assert r.inserted_primary_key == [1]
-
-            # executemany with explicit ids
-
-            table.insert().execute({'id': 31, 'data': 'd3'}, {'id': 32,
-                                   'data': 'd4'})
-
-            # executemany, uses SERIAL
-
-            table.insert().execute({'data': 'd5'}, {'data': 'd6'})
-
-            # single execute, explicit id, inline
-
-            table.insert(inline=True).execute({'id': 33, 'data': 'd7'})
-
-            # single execute, inline, uses SERIAL
-
-            table.insert(inline=True).execute({'data': 'd8'})
-
-        self.assert_sql(self.engine, go, [], with_sequences=[
-            ('INSERT INTO testtable (id, data) VALUES (:id, :data)',
-             {'id': 30, 'data': 'd1'}),
-            ('INSERT INTO testtable (data) VALUES (:data) RETURNING '
-             'testtable.id', {'data': 'd2'}),
-            ('INSERT INTO testtable (id, data) VALUES (:id, :data)',
-             [{'id': 31, 'data': 'd3'}, {'id': 32, 'data': 'd4'}]),
-            ('INSERT INTO testtable (data) VALUES (:data)', [{'data'
-             : 'd5'}, {'data': 'd6'}]),
-            ('INSERT INTO testtable (id, data) VALUES (:id, :data)',
-             [{'id': 33, 'data': 'd7'}]),
-            ('INSERT INTO testtable (data) VALUES (:data)', [{'data'
-             : 'd8'}]),
-            ])
-        assert table.select().execute().fetchall() == [
-            (30, 'd1'),
-            (1, 'd2'),
-            (31, 'd3'),
-            (32, 'd4'),
-            (2, 'd5'),
-            (3, 'd6'),
-            (33, 'd7'),
-            (4, 'd8'),
-            ]
-        table.delete().execute()
-
-        # test the same series of events using a reflected version of
-        # the table
-
-        m2 = MetaData(self.engine)
-        table = Table(table.name, m2, autoload=True)
-
-        def go():
-            table.insert().execute({'id': 30, 'data': 'd1'})
-            r = table.insert().execute({'data': 'd2'})
-            assert r.inserted_primary_key == [5]
-            table.insert().execute({'id': 31, 'data': 'd3'}, {'id': 32,
-                                   'data': 'd4'})
-            table.insert().execute({'data': 'd5'}, {'data': 'd6'})
-            table.insert(inline=True).execute({'id': 33, 'data': 'd7'})
-            table.insert(inline=True).execute({'data': 'd8'})
-
-        self.assert_sql(self.engine, go, [], with_sequences=[
-            ('INSERT INTO testtable (id, data) VALUES (:id, :data)',
-             {'id': 30, 'data': 'd1'}),
-            ('INSERT INTO testtable (data) VALUES (:data) RETURNING '
-             'testtable.id', {'data': 'd2'}),
-            ('INSERT INTO testtable (id, data) VALUES (:id, :data)',
-             [{'id': 31, 'data': 'd3'}, {'id': 32, 'data': 'd4'}]),
-            ('INSERT INTO testtable (data) VALUES (:data)', [{'data'
-             : 'd5'}, {'data': 'd6'}]),
-            ('INSERT INTO testtable (id, data) VALUES (:id, :data)',
-             [{'id': 33, 'data': 'd7'}]),
-            ('INSERT INTO testtable (data) VALUES (:data)', [{'data'
-             : 'd8'}]),
-            ])
-        assert table.select().execute().fetchall() == [
-            (30, 'd1'),
-            (5, 'd2'),
-            (31, 'd3'),
-            (32, 'd4'),
-            (6, 'd5'),
-            (7, 'd6'),
-            (33, 'd7'),
-            (8, 'd8'),
-            ]
-        table.delete().execute()
-
-    def _assert_data_with_sequence(self, table, seqname):
-        self.engine = \
-            engines.testing_engine(options={'implicit_returning'
-                                   : False})
-        metadata.bind = self.engine
-
-        def go():
-            table.insert().execute({'id': 30, 'data': 'd1'})
-            table.insert().execute({'data': 'd2'})
-            table.insert().execute({'id': 31, 'data': 'd3'}, {'id': 32,
-                                   'data': 'd4'})
-            table.insert().execute({'data': 'd5'}, {'data': 'd6'})
-            table.insert(inline=True).execute({'id': 33, 'data': 'd7'})
-            table.insert(inline=True).execute({'data': 'd8'})
-
-        self.assert_sql(self.engine, go, [], with_sequences=[
-            ('INSERT INTO testtable (id, data) VALUES (:id, :data)',
-             {'id': 30, 'data': 'd1'}),
-            ('INSERT INTO testtable (id, data) VALUES (:id, :data)',
-             {'id': 1, 'data': 'd2'}),
-            ('INSERT INTO testtable (id, data) VALUES (:id, :data)',
-             [{'id': 31, 'data': 'd3'}, {'id': 32, 'data': 'd4'}]),
-            ("INSERT INTO testtable (id, data) VALUES (nextval('%s'), "
-             ":data)" % seqname, [{'data': 'd5'}, {'data': 'd6'}]),
-            ('INSERT INTO testtable (id, data) VALUES (:id, :data)',
-             [{'id': 33, 'data': 'd7'}]),
-            ("INSERT INTO testtable (id, data) VALUES (nextval('%s'), "
-             ":data)" % seqname, [{'data': 'd8'}]),
-            ])
-        assert table.select().execute().fetchall() == [
-            (30, 'd1'),
-            (1, 'd2'),
-            (31, 'd3'),
-            (32, 'd4'),
-            (2, 'd5'),
-            (3, 'd6'),
-            (33, 'd7'),
-            (4, 'd8'),
-            ]
-
-        # cant test reflection here since the Sequence must be
-        # explicitly specified
-
-    def _assert_data_with_sequence_returning(self, table, seqname):
-        self.engine = \
-            engines.testing_engine(options={'implicit_returning': True})
-        metadata.bind = self.engine
-
-        def go():
-            table.insert().execute({'id': 30, 'data': 'd1'})
-            table.insert().execute({'data': 'd2'})
-            table.insert().execute({'id': 31, 'data': 'd3'}, {'id': 32,
-                                   'data': 'd4'})
-            table.insert().execute({'data': 'd5'}, {'data': 'd6'})
-            table.insert(inline=True).execute({'id': 33, 'data': 'd7'})
-            table.insert(inline=True).execute({'data': 'd8'})
-
-        self.assert_sql(self.engine, go, [], with_sequences=[
-            ('INSERT INTO testtable (id, data) VALUES (:id, :data)',
-             {'id': 30, 'data': 'd1'}),
-            ("INSERT INTO testtable (id, data) VALUES "
-             "(nextval('my_seq'), :data) RETURNING testtable.id",
-             {'data': 'd2'}),
-            ('INSERT INTO testtable (id, data) VALUES (:id, :data)',
-             [{'id': 31, 'data': 'd3'}, {'id': 32, 'data': 'd4'}]),
-            ("INSERT INTO testtable (id, data) VALUES (nextval('%s'), "
-             ":data)" % seqname, [{'data': 'd5'}, {'data': 'd6'}]),
-            ('INSERT INTO testtable (id, data) VALUES (:id, :data)',
-             [{'id': 33, 'data': 'd7'}]),
-            ("INSERT INTO testtable (id, data) VALUES (nextval('%s'), "
-             ":data)" % seqname, [{'data': 'd8'}]),
-            ])
-        assert table.select().execute().fetchall() == [
-            (30, 'd1'),
-            (1, 'd2'),
-            (31, 'd3'),
-            (32, 'd4'),
-            (2, 'd5'),
-            (3, 'd6'),
-            (33, 'd7'),
-            (4, 'd8'),
-            ]
-
-        # cant test reflection here since the Sequence must be
-        # explicitly specified
-
-    def _assert_data_noautoincrement(self, table):
-        self.engine = \
-            engines.testing_engine(options={'implicit_returning'
-                                   : False})
-        metadata.bind = self.engine
-        table.insert().execute({'id': 30, 'data': 'd1'})
-        if self.engine.driver == 'pg8000':
-            exception_cls = exc.ProgrammingError
-        elif self.engine.driver == 'pypostgresql':
-            exception_cls = Exception
-        else:
-            exception_cls = exc.IntegrityError
-        assert_raises_message(exception_cls,
-                              'violates not-null constraint',
-                              table.insert().execute, {'data': 'd2'})
-        assert_raises_message(exception_cls,
-                              'violates not-null constraint',
-                              table.insert().execute, {'data': 'd2'},
-                              {'data': 'd3'})
-        assert_raises_message(exception_cls,
-                              'violates not-null constraint',
-                              table.insert().execute, {'data': 'd2'})
-        assert_raises_message(exception_cls,
-                              'violates not-null constraint',
-                              table.insert().execute, {'data': 'd2'},
-                              {'data': 'd3'})
-        table.insert().execute({'id': 31, 'data': 'd2'}, {'id': 32,
-                               'data': 'd3'})
-        table.insert(inline=True).execute({'id': 33, 'data': 'd4'})
-        assert table.select().execute().fetchall() == [(30, 'd1'), (31,
-                'd2'), (32, 'd3'), (33, 'd4')]
-        table.delete().execute()
-
-        # test the same series of events using a reflected version of
-        # the table
-
-        m2 = MetaData(self.engine)
-        table = Table(table.name, m2, autoload=True)
-        table.insert().execute({'id': 30, 'data': 'd1'})
-        assert_raises_message(exception_cls,
-                              'violates not-null constraint',
-                              table.insert().execute, {'data': 'd2'})
-        assert_raises_message(exception_cls,
-                              'violates not-null constraint',
-                              table.insert().execute, {'data': 'd2'},
-                              {'data': 'd3'})
-        table.insert().execute({'id': 31, 'data': 'd2'}, {'id': 32,
-                               'data': 'd3'})
-        table.insert(inline=True).execute({'id': 33, 'data': 'd4'})
-        assert table.select().execute().fetchall() == [(30, 'd1'), (31,
-                'd2'), (32, 'd3'), (33, 'd4')]
-
-class DomainReflectionTest(fixtures.TestBase, AssertsExecutionResults):
-
-    """Test PostgreSQL domains"""
-
-    __only_on__ = 'postgresql'
-
-    @classmethod
-    def setup_class(cls):
-        con = testing.db.connect()
-        for ddl in \
-            'CREATE DOMAIN testdomain INTEGER NOT NULL DEFAULT 42', \
-            'CREATE DOMAIN test_schema.testdomain INTEGER DEFAULT 0', \
-            "CREATE TYPE testtype AS ENUM ('test')", \
-            'CREATE DOMAIN enumdomain AS testtype'\
-            :
-            try:
-                con.execute(ddl)
-            except exc.DBAPIError, e:
-                if not 'already exists' in str(e):
-                    raise e
-        con.execute('CREATE TABLE testtable (question integer, answer '
-                    'testdomain)')
-        con.execute('CREATE TABLE test_schema.testtable(question '
-                    'integer, answer test_schema.testdomain, anything '
-                    'integer)')
-        con.execute('CREATE TABLE crosschema (question integer, answer '
-                    'test_schema.testdomain)')
-
-        con.execute('CREATE TABLE enum_test (id integer, data enumdomain)')
-
-    @classmethod
-    def teardown_class(cls):
-        con = testing.db.connect()
-        con.execute('DROP TABLE testtable')
-        con.execute('DROP TABLE test_schema.testtable')
-        con.execute('DROP TABLE crosschema')
-        con.execute('DROP DOMAIN testdomain')
-        con.execute('DROP DOMAIN test_schema.testdomain')
-        con.execute("DROP TABLE enum_test")
-        con.execute("DROP DOMAIN enumdomain")
-        con.execute("DROP TYPE testtype")
-
-    def test_table_is_reflected(self):
-        metadata = MetaData(testing.db)
-        table = Table('testtable', metadata, autoload=True)
-        eq_(set(table.columns.keys()), set(['question', 'answer']),
-            "Columns of reflected table didn't equal expected columns")
-        assert isinstance(table.c.answer.type, Integer)
-
-    def test_domain_is_reflected(self):
-        metadata = MetaData(testing.db)
-        table = Table('testtable', metadata, autoload=True)
-        eq_(str(table.columns.answer.server_default.arg), '42',
-            "Reflected default value didn't equal expected value")
-        assert not table.columns.answer.nullable, \
-            'Expected reflected column to not be nullable.'
-
-    def test_enum_domain_is_reflected(self):
-        metadata = MetaData(testing.db)
-        table = Table('enum_test', metadata, autoload=True)
-        eq_(
-            table.c.data.type.enums,
-            ('test', )
-        )
-
-    def test_table_is_reflected_test_schema(self):
-        metadata = MetaData(testing.db)
-        table = Table('testtable', metadata, autoload=True,
-                      schema='test_schema')
-        eq_(set(table.columns.keys()), set(['question', 'answer',
-            'anything']),
-            "Columns of reflected table didn't equal expected columns")
-        assert isinstance(table.c.anything.type, Integer)
-
-    def test_schema_domain_is_reflected(self):
-        metadata = MetaData(testing.db)
-        table = Table('testtable', metadata, autoload=True,
-                      schema='test_schema')
-        eq_(str(table.columns.answer.server_default.arg), '0',
-            "Reflected default value didn't equal expected value")
-        assert table.columns.answer.nullable, \
-            'Expected reflected column to be nullable.'
-
-    def test_crosschema_domain_is_reflected(self):
-        metadata = MetaData(testing.db)
-        table = Table('crosschema', metadata, autoload=True)
-        eq_(str(table.columns.answer.server_default.arg), '0',
-            "Reflected default value didn't equal expected value")
-        assert table.columns.answer.nullable, \
-            'Expected reflected column to be nullable.'
-
-    def test_unknown_types(self):
-        from sqlalchemy.databases import postgresql
-        ischema_names = postgresql.PGDialect.ischema_names
-        postgresql.PGDialect.ischema_names = {}
-        try:
-            m2 = MetaData(testing.db)
-            assert_raises(exc.SAWarning, Table, 'testtable', m2,
-                          autoload=True)
-
-            @testing.emits_warning('Did not recognize type')
-            def warns():
-                m3 = MetaData(testing.db)
-                t3 = Table('testtable', m3, autoload=True)
-                assert t3.c.answer.type.__class__ == sa.types.NullType
-        finally:
-            postgresql.PGDialect.ischema_names = ischema_names
-
-class DistinctOnTest(fixtures.TestBase, AssertsCompiledSQL):
-    """Test 'DISTINCT' with SQL expression language and orm.Query with
-    an emphasis on PG's 'DISTINCT ON' syntax.
-
-    """
-    __dialect__ = postgresql.dialect()
-
-    def setup(self):
-        self.table = Table('t', MetaData(),
-                Column('id',Integer, primary_key=True),
-                Column('a', String),
-                Column('b', String),
-            )
-
-    def test_plain_generative(self):
-        self.assert_compile(
-            select([self.table]).distinct(),
-            "SELECT DISTINCT t.id, t.a, t.b FROM t"
-        )
-
-    def test_on_columns_generative(self):
-        self.assert_compile(
-            select([self.table]).distinct(self.table.c.a),
-            "SELECT DISTINCT ON (t.a) t.id, t.a, t.b FROM t"
-        )
-
-    def test_on_columns_generative_multi_call(self):
-        self.assert_compile(
-            select([self.table]).distinct(self.table.c.a).
-                distinct(self.table.c.b),
-            "SELECT DISTINCT ON (t.a, t.b) t.id, t.a, t.b FROM t"
-        )
-
-    def test_plain_inline(self):
-        self.assert_compile(
-            select([self.table], distinct=True),
-            "SELECT DISTINCT t.id, t.a, t.b FROM t"
-        )
-
-    def test_on_columns_inline_list(self):
-        self.assert_compile(
-            select([self.table],
-                    distinct=[self.table.c.a, self.table.c.b]).
-                    order_by(self.table.c.a, self.table.c.b),
-            "SELECT DISTINCT ON (t.a, t.b) t.id, "
-            "t.a, t.b FROM t ORDER BY t.a, t.b"
-        )
-
-    def test_on_columns_inline_scalar(self):
-        self.assert_compile(
-            select([self.table], distinct=self.table.c.a),
-            "SELECT DISTINCT ON (t.a) t.id, t.a, t.b FROM t"
-        )
-
-    def test_query_plain(self):
-        sess = Session()
-        self.assert_compile(
-            sess.query(self.table).distinct(),
-            "SELECT DISTINCT t.id AS t_id, t.a AS t_a, "
-            "t.b AS t_b FROM t"
-        )
-
-    def test_query_on_columns(self):
-        sess = Session()
-        self.assert_compile(
-            sess.query(self.table).distinct(self.table.c.a),
-            "SELECT DISTINCT ON (t.a) t.id AS t_id, t.a AS t_a, "
-            "t.b AS t_b FROM t"
-        )
-
-    def test_query_on_columns_multi_call(self):
-        sess = Session()
-        self.assert_compile(
-            sess.query(self.table).distinct(self.table.c.a).
-                    distinct(self.table.c.b),
-            "SELECT DISTINCT ON (t.a, t.b) t.id AS t_id, t.a AS t_a, "
-            "t.b AS t_b FROM t"
-        )
-
-    def test_query_on_columns_subquery(self):
-        sess = Session()
-        class Foo(object):
-            pass
-        mapper(Foo, self.table)
-        sess = Session()
-        self.assert_compile(
-            sess.query(Foo).from_self().distinct(Foo.a, Foo.b),
-            "SELECT DISTINCT ON (anon_1.t_a, anon_1.t_b) anon_1.t_id "
-            "AS anon_1_t_id, anon_1.t_a AS anon_1_t_a, anon_1.t_b "
-            "AS anon_1_t_b FROM (SELECT t.id AS t_id, t.a AS t_a, "
-            "t.b AS t_b FROM t) AS anon_1"
-        )
-
-    def test_query_distinct_on_aliased(self):
-        class Foo(object):
-            pass
-        mapper(Foo, self.table)
-        a1 = aliased(Foo)
-        sess = Session()
-        self.assert_compile(
-            sess.query(a1).distinct(a1.a),
-            "SELECT DISTINCT ON (t_1.a) t_1.id AS t_1_id, "
-            "t_1.a AS t_1_a, t_1.b AS t_1_b FROM t AS t_1"
-        )
-
-    def test_distinct_on_subquery_anon(self):
-
-        sq = select([self.table]).alias()
-        q = select([self.table.c.id,sq.c.id]).\
-                    distinct(sq.c.id).\
-                    where(self.table.c.id==sq.c.id)
-
-        self.assert_compile(
-            q,
-            "SELECT DISTINCT ON (anon_1.id) t.id, anon_1.id "
-            "FROM t, (SELECT t.id AS id, t.a AS a, t.b "
-            "AS b FROM t) AS anon_1 WHERE t.id = anon_1.id"
-            )
-
-    def test_distinct_on_subquery_named(self):
-        sq = select([self.table]).alias('sq')
-        q = select([self.table.c.id,sq.c.id]).\
-                    distinct(sq.c.id).\
-                    where(self.table.c.id==sq.c.id)
-        self.assert_compile(
-            q,
-            "SELECT DISTINCT ON (sq.id) t.id, sq.id "
-            "FROM t, (SELECT t.id AS id, t.a AS a, "
-            "t.b AS b FROM t) AS sq WHERE t.id = sq.id"
-            )
-
-class ReflectionTest(fixtures.TestBase):
-    __only_on__ = 'postgresql'
-
-    @testing.fails_if(('postgresql', '<', (8, 4)),
-            "newer query is bypassed due to unsupported SQL functions")
-    @testing.provide_metadata
-    def test_reflected_primary_key_order(self):
-        meta1 = self.metadata
-        subject = Table('subject', meta1,
-                        Column('p1', Integer, primary_key=True),
-                        Column('p2', Integer, primary_key=True),
-                        PrimaryKeyConstraint('p2', 'p1')
-                        )
-        meta1.create_all()
-        meta2 = MetaData(testing.db)
-        subject = Table('subject', meta2, autoload=True)
-        eq_(subject.primary_key.columns.keys(), [u'p2', u'p1'])
-
-    @testing.provide_metadata
-    def test_pg_weirdchar_reflection(self):
-        meta1 = self.metadata
-        subject = Table('subject', meta1, Column('id$', Integer,
-                        primary_key=True))
-        referer = Table('referer', meta1, Column('id', Integer,
-                        primary_key=True), Column('ref', Integer,
-                        ForeignKey('subject.id$')))
-        meta1.create_all()
-        meta2 = MetaData(testing.db)
-        subject = Table('subject', meta2, autoload=True)
-        referer = Table('referer', meta2, autoload=True)
-        self.assert_((subject.c['id$']
-                     == referer.c.ref).compare(
-                        subject.join(referer).onclause))
-
-    @testing.provide_metadata
-    def test_renamed_sequence_reflection(self):
-        metadata = self.metadata
-        t = Table('t', metadata, Column('id', Integer, primary_key=True))
-        metadata.create_all()
-        m2 = MetaData(testing.db)
-        t2 = Table('t', m2, autoload=True, implicit_returning=False)
-        eq_(t2.c.id.server_default.arg.text,
-            "nextval('t_id_seq'::regclass)")
-        r = t2.insert().execute()
-        eq_(r.inserted_primary_key, [1])
-        testing.db.connect().execution_options(autocommit=True).\
-                execute('alter table t_id_seq rename to foobar_id_seq'
-                )
-        m3 = MetaData(testing.db)
-        t3 = Table('t', m3, autoload=True, implicit_returning=False)
-        eq_(t3.c.id.server_default.arg.text,
-            "nextval('foobar_id_seq'::regclass)")
-        r = t3.insert().execute()
-        eq_(r.inserted_primary_key, [2])
-
-    @testing.provide_metadata
-    def test_renamed_pk_reflection(self):
-        metadata = self.metadata
-        t = Table('t', metadata, Column('id', Integer, primary_key=True))
-        metadata.create_all()
-        testing.db.connect().execution_options(autocommit=True).\
-            execute('alter table t rename id to t_id')
-        m2 = MetaData(testing.db)
-        t2 = Table('t', m2, autoload=True)
-        eq_([c.name for c in t2.primary_key], ['t_id'])
-
-    @testing.provide_metadata
-    def test_schema_reflection(self):
-        """note: this test requires that the 'test_schema' schema be
-        separate and accessible by the test user"""
-
-        meta1 = self.metadata
-
-        users = Table('users', meta1, Column('user_id', Integer,
-                      primary_key=True), Column('user_name',
-                      String(30), nullable=False), schema='test_schema')
-        addresses = Table(
-            'email_addresses',
-            meta1,
-            Column('address_id', Integer, primary_key=True),
-            Column('remote_user_id', Integer,
-                   ForeignKey(users.c.user_id)),
-            Column('email_address', String(20)),
-            schema='test_schema',
-            )
-        meta1.create_all()
-        meta2 = MetaData(testing.db)
-        addresses = Table('email_addresses', meta2, autoload=True,
-                          schema='test_schema')
-        users = Table('users', meta2, mustexist=True,
-                      schema='test_schema')
-        j = join(users, addresses)
-        self.assert_((users.c.user_id
-                     == addresses.c.remote_user_id).compare(j.onclause))
-
-    @testing.provide_metadata
-    def test_schema_reflection_2(self):
-        meta1 = self.metadata
-        subject = Table('subject', meta1, Column('id', Integer,
-                        primary_key=True))
-        referer = Table('referer', meta1, Column('id', Integer,
-                        primary_key=True), Column('ref', Integer,
-                        ForeignKey('subject.id')), schema='test_schema')
-        meta1.create_all()
-        meta2 = MetaData(testing.db)
-        subject = Table('subject', meta2, autoload=True)
-        referer = Table('referer', meta2, schema='test_schema',
-                        autoload=True)
-        self.assert_((subject.c.id
-                     == referer.c.ref).compare(
-                        subject.join(referer).onclause))
-
-    @testing.provide_metadata
-    def test_schema_reflection_3(self):
-        meta1 = self.metadata
-        subject = Table('subject', meta1, Column('id', Integer,
-                        primary_key=True), schema='test_schema_2')
-        referer = Table('referer', meta1, Column('id', Integer,
-                        primary_key=True), Column('ref', Integer,
-                        ForeignKey('test_schema_2.subject.id')),
-                        schema='test_schema')
-        meta1.create_all()
-        meta2 = MetaData(testing.db)
-        subject = Table('subject', meta2, autoload=True,
-                        schema='test_schema_2')
-        referer = Table('referer', meta2, schema='test_schema',
-                        autoload=True)
-        self.assert_((subject.c.id
-                     == referer.c.ref).compare(
-                        subject.join(referer).onclause))
-
-    @testing.provide_metadata
-    def test_uppercase_lowercase_table(self):
-        metadata = self.metadata
-
-        a_table = Table('a', metadata, Column('x', Integer))
-        A_table = Table('A', metadata, Column('x', Integer))
-
-        a_table.create()
-        assert testing.db.has_table("a")
-        assert not testing.db.has_table("A")
-        A_table.create(checkfirst=True)
-        assert testing.db.has_table("A")
-
-    def test_uppercase_lowercase_sequence(self):
-
-        a_seq = Sequence('a')
-        A_seq = Sequence('A')
-
-        a_seq.create(testing.db)
-        assert testing.db.dialect.has_sequence(testing.db, "a")
-        assert not testing.db.dialect.has_sequence(testing.db, "A")
-        A_seq.create(testing.db, checkfirst=True)
-        assert testing.db.dialect.has_sequence(testing.db, "A")
-
-        a_seq.drop(testing.db)
-        A_seq.drop(testing.db)
-
-    def test_schema_reflection_multi_search_path(self):
-        """test the 'set the same schema' rule when
-        multiple schemas/search paths are in effect."""
-
-        db = engines.testing_engine()
-        conn = db.connect()
-        trans = conn.begin()
-        try:
-            conn.execute("set search_path to test_schema_2, "
-                                "test_schema, public")
-            conn.dialect.default_schema_name = "test_schema_2"
-
-            conn.execute("""
-            CREATE TABLE test_schema.some_table (
-                id SERIAL not null primary key
-            )
-            """)
-
-            conn.execute("""
-            CREATE TABLE test_schema_2.some_other_table (
-                id SERIAL not null primary key,
-                sid INTEGER REFERENCES test_schema.some_table(id)
-            )
-            """)
-
-            m1 = MetaData()
-
-            t2_schema = Table('some_other_table',
-                                m1,
-                                schema="test_schema_2",
-                                autoload=True,
-                                autoload_with=conn)
-            t1_schema = Table('some_table',
-                                m1,
-                                schema="test_schema",
-                                autoload=True,
-                                autoload_with=conn)
-
-            t2_no_schema = Table('some_other_table',
-                                m1,
-                                autoload=True,
-                                autoload_with=conn)
-
-            t1_no_schema = Table('some_table',
-                                m1,
-                                autoload=True,
-                                autoload_with=conn)
-
-            # OK, this because, "test_schema" is
-            # in the search path, and might as well be
-            # the default too.  why would we assign
-            # a "schema" to the Table ?
-            assert t2_schema.c.sid.references(
-                                t1_no_schema.c.id)
-
-            assert t2_no_schema.c.sid.references(
-                                t1_no_schema.c.id)
-
-        finally:
-            trans.rollback()
-            conn.close()
-            db.dispose()
-
-    @testing.provide_metadata
-    def test_index_reflection(self):
-        """ Reflecting partial & expression-based indexes should warn
-        """
-
-        metadata = self.metadata
-
-        t1 = Table('party', metadata, Column('id', String(10),
-                   nullable=False), Column('name', String(20),
-                   index=True), Column('aname', String(20)))
-        metadata.create_all()
-        testing.db.execute("""
-          create index idx1 on party ((id || name))
-        """)
-        testing.db.execute("""
-          create unique index idx2 on party (id) where name = 'test'
-        """)
-        testing.db.execute("""
-            create index idx3 on party using btree
-                (lower(name::text), lower(aname::text))
-        """)
-
-        def go():
-            m2 = MetaData(testing.db)
-            t2 = Table('party', m2, autoload=True)
-            assert len(t2.indexes) == 2
-
-            # Make sure indexes are in the order we expect them in
-
-            tmp = [(idx.name, idx) for idx in t2.indexes]
-            tmp.sort()
-            r1, r2 = [idx[1] for idx in tmp]
-            assert r1.name == 'idx2'
-            assert r1.unique == True
-            assert r2.unique == False
-            assert [t2.c.id] == r1.columns
-            assert [t2.c.name] == r2.columns
-
-        testing.assert_warnings(go,
-            [
-                'Skipped unsupported reflection of '
-                'expression-based index idx1',
-                'Predicate of partial index idx2 ignored during '
-                'reflection',
-                'Skipped unsupported reflection of '
-                'expression-based index idx3'
-            ])
-
-    @testing.provide_metadata
-    def test_index_reflection_modified(self):
-        """reflect indexes when a column name has changed - PG 9
-        does not update the name of the column in the index def.
-        [ticket:2141]
-
-        """
-
-        metadata = self.metadata
-
-        t1 = Table('t', metadata,
-            Column('id', Integer, primary_key=True),
-            Column('x', Integer)
-        )
-        metadata.create_all()
-        conn = testing.db.connect().execution_options(autocommit=True)
-        conn.execute("CREATE INDEX idx1 ON t (x)")
-        conn.execute("ALTER TABLE t RENAME COLUMN x to y")
-
-        ind = testing.db.dialect.get_indexes(conn, "t", None)
-        eq_(ind, [{'unique': False, 'column_names': [u'y'], 'name': u'idx1'}])
-        conn.close()
-
-class CustomTypeReflectionTest(fixtures.TestBase):
-
-    class CustomType(object):
-        def __init__(self, arg1=None, arg2=None):
-            self.arg1 = arg1
-            self.arg2 = arg2
-
-    ischema_names = None
-
-    def setup(self):
-        ischema_names = postgresql.PGDialect.ischema_names
-        postgresql.PGDialect.ischema_names = ischema_names.copy()
-        self.ischema_names = ischema_names
-
-    def teardown(self):
-        postgresql.PGDialect.ischema_names = self.ischema_names
-        self.ischema_names = None
-
-    def _assert_reflected(self, dialect):
-        for sch, args in [
-            ('my_custom_type', (None, None)),
-            ('my_custom_type()', (None, None)),
-            ('my_custom_type(ARG1)', ('ARG1', None)),
-            ('my_custom_type(ARG1, ARG2)', ('ARG1', 'ARG2')),
-        ]:
-            column_info = dialect._get_column_info(
-                'colname', sch, None, False,
-                {}, {}, 'public')
-            assert isinstance(column_info['type'], self.CustomType)
-            eq_(column_info['type'].arg1, args[0])
-            eq_(column_info['type'].arg2, args[1])
-
-    def test_clslevel(self):
-        postgresql.PGDialect.ischema_names['my_custom_type'] = self.CustomType
-        dialect = postgresql.PGDialect()
-        self._assert_reflected(dialect)
-
-    def test_instancelevel(self):
-        dialect = postgresql.PGDialect()
-        dialect.ischema_names = dialect.ischema_names.copy()
-        dialect.ischema_names['my_custom_type'] = self.CustomType
-        self._assert_reflected(dialect)
-
-
-class MiscTest(fixtures.TestBase, AssertsExecutionResults, AssertsCompiledSQL):
-
-    __only_on__ = 'postgresql'
-
-    @testing.provide_metadata
-    def test_date_reflection(self):
-        metadata = self.metadata
-        t1 = Table('pgdate', metadata, Column('date1',
-                   DateTime(timezone=True)), Column('date2',
-                   DateTime(timezone=False)))
-        metadata.create_all()
-        m2 = MetaData(testing.db)
-        t2 = Table('pgdate', m2, autoload=True)
-        assert t2.c.date1.type.timezone is True
-        assert t2.c.date2.type.timezone is False
-
-    @testing.fails_on('+zxjdbc',
-                      'The JDBC driver handles the version parsing')
-    def test_version_parsing(self):
-
-
-        class MockConn(object):
-
-            def __init__(self, res):
-                self.res = res
-
-            def execute(self, str):
-                return self
-
-            def scalar(self):
-                return self.res
-
-
-        for string, version in \
-            [('PostgreSQL 8.3.8 on i686-redhat-linux-gnu, compiled by '
-             'GCC gcc (GCC) 4.1.2 20070925 (Red Hat 4.1.2-33)', (8, 3,
-             8)),
-             ('PostgreSQL 8.5devel on x86_64-unknown-linux-gnu, '
-             'compiled by GCC gcc (GCC) 4.4.2, 64-bit', (8, 5)),
-             ('EnterpriseDB 9.1.2.2 on x86_64-unknown-linux-gnu, '
-             'compiled by gcc (GCC) 4.1.2 20080704 (Red Hat 4.1.2-50), '
-             '64-bit', (9, 1, 2))]:
-            eq_(testing.db.dialect._get_server_version_info(MockConn(string)),
-                version)
-
-    @testing.only_on('postgresql+psycopg2', 'psycopg2-specific feature')
-    def test_psycopg2_version(self):
-        v = testing.db.dialect.psycopg2_version
-        assert testing.db.dialect.dbapi.__version__.\
-                    startswith(".".join(str(x) for x in v))
-
-    @testing.only_on('postgresql+psycopg2', 'psycopg2-specific feature')
-    def test_notice_logging(self):
-        log = logging.getLogger('sqlalchemy.dialects.postgresql')
-        buf = logging.handlers.BufferingHandler(100)
-        lev = log.level
-        log.addHandler(buf)
-        log.setLevel(logging.INFO)
-        try:
-            conn = testing.db.connect()
-            trans = conn.begin()
-            try:
-                conn.execute('create table foo (id serial primary key)')
-            finally:
-                trans.rollback()
-        finally:
-            log.removeHandler(buf)
-            log.setLevel(lev)
-        msgs = ' '.join(b.msg for b in buf.buffer)
-        assert 'will create implicit sequence' in msgs
-        assert 'will create implicit index' in msgs
-
-    @testing.only_on('postgresql+psycopg2', 'psycopg2-specific feature')
-    @engines.close_open_connections
-    def test_client_encoding(self):
-        c = testing.db.connect()
-        current_encoding = c.connection.connection.encoding
-        c.close()
-
-        # attempt to use an encoding that's not
-        # already set
-        if current_encoding == 'UTF8':
-            test_encoding = 'LATIN1'
-        else:
-            test_encoding = 'UTF8'
-
-        e = engines.testing_engine(
-                        options={'client_encoding':test_encoding}
-                    )
-        c = e.connect()
-        eq_(c.connection.connection.encoding, test_encoding)
-
-    @testing.only_on('postgresql+psycopg2', 'psycopg2-specific feature')
-    @engines.close_open_connections
-    def test_autocommit_isolation_level(self):
-        extensions = __import__('psycopg2.extensions').extensions
-
-        c = testing.db.connect()
-        c = c.execution_options(isolation_level='AUTOCOMMIT')
-        eq_(c.connection.connection.isolation_level,
-            extensions.ISOLATION_LEVEL_AUTOCOMMIT)
-
-    @testing.fails_on('+zxjdbc',
-                      "Can't infer the SQL type to use for an instance "
-                      "of org.python.core.PyObjectDerived.")
-    @testing.fails_on('+pg8000', "Can't determine correct type.")
-    def test_extract(self):
-        fivedaysago = datetime.datetime.now() \
-            - datetime.timedelta(days=5)
-        for field, exp in ('year', fivedaysago.year), ('month',
-                fivedaysago.month), ('day', fivedaysago.day):
-            r = testing.db.execute(select([extract(field, func.now()
-                                   + datetime.timedelta(days=-5))])).scalar()
-            eq_(r, exp)
-
-    def test_checksfor_sequence(self):
-        meta1 = MetaData(testing.db)
-        seq = Sequence('fooseq')
-        t = Table('mytable', meta1, Column('col1', Integer,
-                  seq))
-        seq.drop()
-        try:
-            testing.db.execute('CREATE SEQUENCE fooseq')
-            t.create(checkfirst=True)
-        finally:
-            t.drop(checkfirst=True)
-
-    def test_schema_roundtrips(self):
-        meta = MetaData(testing.db)
-        users = Table('users', meta, Column('id', Integer,
-                      primary_key=True), Column('name', String(50)),
-                      schema='test_schema')
-        users.create()
-        try:
-            users.insert().execute(id=1, name='name1')
-            users.insert().execute(id=2, name='name2')
-            users.insert().execute(id=3, name='name3')
-            users.insert().execute(id=4, name='name4')
-            eq_(users.select().where(users.c.name == 'name2'
-                ).execute().fetchall(), [(2, 'name2')])
-            eq_(users.select(use_labels=True).where(users.c.name
-                == 'name2').execute().fetchall(), [(2, 'name2')])
-            users.delete().where(users.c.id == 3).execute()
-            eq_(users.select().where(users.c.name == 'name3'
-                ).execute().fetchall(), [])
-            users.update().where(users.c.name == 'name4'
-                                 ).execute(name='newname')
-            eq_(users.select(use_labels=True).where(users.c.id
-                == 4).execute().fetchall(), [(4, 'newname')])
-        finally:
-            users.drop()
-
-    def test_preexecute_passivedefault(self):
-        """test that when we get a primary key column back from
-        reflecting a table which has a default value on it, we pre-
-        execute that DefaultClause upon insert."""
-
-        try:
-            meta = MetaData(testing.db)
-            testing.db.execute("""
-             CREATE TABLE speedy_users
-             (
-                 speedy_user_id   SERIAL     PRIMARY KEY,
-
-                 user_name        VARCHAR    NOT NULL,
-                 user_password    VARCHAR    NOT NULL
-             );
-            """)
-            t = Table('speedy_users', meta, autoload=True)
-            r = t.insert().execute(user_name='user',
-                                   user_password='lala')
-            assert r.inserted_primary_key == [1]
-            l = t.select().execute().fetchall()
-            assert l == [(1, 'user', 'lala')]
-        finally:
-            testing.db.execute('drop table speedy_users')
-
-
-    @testing.fails_on('+zxjdbc', 'psycopg2/pg8000 specific assertion')
-    @testing.fails_on('pypostgresql',
-                      'psycopg2/pg8000 specific assertion')
-    def test_numeric_raise(self):
-        stmt = text("select cast('hi' as char) as hi", typemap={'hi'
-                    : Numeric})
-        assert_raises(exc.InvalidRequestError, testing.db.execute, stmt)
-
-    def test_serial_integer(self):
-        for type_, expected in [
-            (Integer, 'SERIAL'),
-            (BigInteger, 'BIGSERIAL'),
-            (SmallInteger, 'SMALLINT'),
-            (postgresql.INTEGER, 'SERIAL'),
-            (postgresql.BIGINT, 'BIGSERIAL'),
-        ]:
-            m = MetaData()
-
-            t = Table('t', m, Column('c', type_, primary_key=True))
-            ddl_compiler = testing.db.dialect.ddl_compiler(testing.db.dialect, schema.CreateTable(t))
-            eq_(
-                ddl_compiler.get_column_specification(t.c.c),
-                "c %s NOT NULL" % expected
-            )
-
-class TimezoneTest(fixtures.TestBase):
-
-    """Test timezone-aware datetimes.
-
-    psycopg will return a datetime with a tzinfo attached to it, if
-    postgresql returns it.  python then will not let you compare a
-    datetime with a tzinfo to a datetime that doesnt have one.  this
-    test illustrates two ways to have datetime types with and without
-    timezone info. """
-
-    __only_on__ = 'postgresql'
-
-    @classmethod
-    def setup_class(cls):
-        global tztable, notztable, metadata
-        metadata = MetaData(testing.db)
-
-        # current_timestamp() in postgresql is assumed to return
-        # TIMESTAMP WITH TIMEZONE
-
-        tztable = Table('tztable', metadata, Column('id', Integer,
-                        primary_key=True), Column('date',
-                        DateTime(timezone=True),
-                        onupdate=func.current_timestamp()),
-                        Column('name', String(20)))
-        notztable = Table('notztable', metadata, Column('id', Integer,
-                          primary_key=True), Column('date',
-                          DateTime(timezone=False),
-                          onupdate=cast(func.current_timestamp(),
-                          DateTime(timezone=False))), Column('name',
-                          String(20)))
-        metadata.create_all()
-
-    @classmethod
-    def teardown_class(cls):
-        metadata.drop_all()
-
-    @testing.fails_on('postgresql+zxjdbc',
-                      "XXX: postgresql+zxjdbc doesn't give a tzinfo back")
-    def test_with_timezone(self):
-
-        # get a date with a tzinfo
-
-        somedate = \
-            testing.db.connect().scalar(func.current_timestamp().select())
-        assert somedate.tzinfo
-        tztable.insert().execute(id=1, name='row1', date=somedate)
-        row = select([tztable.c.date], tztable.c.id
-                     == 1).execute().first()
-        eq_(row[0], somedate)
-        eq_(somedate.tzinfo.utcoffset(somedate),
-            row[0].tzinfo.utcoffset(row[0]))
-        result = tztable.update(tztable.c.id
-                                == 1).returning(tztable.c.date).\
-                                    execute(name='newname'
-                )
-        row = result.first()
-        assert row[0] >= somedate
-
-    def test_without_timezone(self):
-
-        # get a date without a tzinfo
-
-        somedate = datetime.datetime( 2005, 10, 20, 11, 52, 0, )
-        assert not somedate.tzinfo
-        notztable.insert().execute(id=1, name='row1', date=somedate)
-        row = select([notztable.c.date], notztable.c.id
-                     == 1).execute().first()
-        eq_(row[0], somedate)
-        eq_(row[0].tzinfo, None)
-        result = notztable.update(notztable.c.id
-                                  == 1).returning(notztable.c.date).\
-                                    execute(name='newname'
-                )
-        row = result.first()
-        assert row[0] >= somedate
-
-class TimePrecisionTest(fixtures.TestBase, AssertsCompiledSQL):
-
-    __dialect__ = postgresql.dialect()
-
-    def test_compile(self):
-        for type_, expected in [
-            (postgresql.TIME(), 'TIME WITHOUT TIME ZONE'),
-            (postgresql.TIME(precision=5), 'TIME(5) WITHOUT TIME ZONE'
-             ),
-            (postgresql.TIME(timezone=True, precision=5),
-             'TIME(5) WITH TIME ZONE'),
-            (postgresql.TIMESTAMP(), 'TIMESTAMP WITHOUT TIME ZONE'),
-            (postgresql.TIMESTAMP(precision=5),
-             'TIMESTAMP(5) WITHOUT TIME ZONE'),
-            (postgresql.TIMESTAMP(timezone=True, precision=5),
-             'TIMESTAMP(5) WITH TIME ZONE'),
-            ]:
-            self.assert_compile(type_, expected)
-
-    @testing.only_on('postgresql', 'DB specific feature')
-    @testing.provide_metadata
-    def test_reflection(self):
-        metadata = self.metadata
-        t1 = Table(
-            't1',
-            metadata,
-            Column('c1', postgresql.TIME()),
-            Column('c2', postgresql.TIME(precision=5)),
-            Column('c3', postgresql.TIME(timezone=True, precision=5)),
-            Column('c4', postgresql.TIMESTAMP()),
-            Column('c5', postgresql.TIMESTAMP(precision=5)),
-            Column('c6', postgresql.TIMESTAMP(timezone=True,
-                   precision=5)),
-            )
-        t1.create()
-        m2 = MetaData(testing.db)
-        t2 = Table('t1', m2, autoload=True)
-        eq_(t2.c.c1.type.precision, None)
-        eq_(t2.c.c2.type.precision, 5)
-        eq_(t2.c.c3.type.precision, 5)
-        eq_(t2.c.c4.type.precision, None)
-        eq_(t2.c.c5.type.precision, 5)
-        eq_(t2.c.c6.type.precision, 5)
-        eq_(t2.c.c1.type.timezone, False)
-        eq_(t2.c.c2.type.timezone, False)
-        eq_(t2.c.c3.type.timezone, True)
-        eq_(t2.c.c4.type.timezone, False)
-        eq_(t2.c.c5.type.timezone, False)
-        eq_(t2.c.c6.type.timezone, True)
-
-class ArrayTest(fixtures.TablesTest, AssertsExecutionResults):
-
-    __only_on__ = 'postgresql'
-
-    __unsupported_on__ = 'postgresql+pg8000', 'postgresql+zxjdbc'
-
-    @classmethod
-    def define_tables(cls, metadata):
-
-        class ProcValue(TypeDecorator):
-            impl = postgresql.ARRAY(Integer, dimensions=2)
-
-            def process_bind_param(self, value, dialect):
-                if value is None:
-                    return None
-                return [
-                    [x + 5 for x in v]
-                    for v in value
-                ]
-
-            def process_result_value(self, value, dialect):
-                if value is None:
-                    return None
-                return [
-                    [x - 7 for x in v]
-                    for v in value
-                ]
-
-        Table('arrtable', metadata,
-                        Column('id', Integer, primary_key=True),
-                        Column('intarr', postgresql.ARRAY(Integer)),
-                         Column('strarr', postgresql.ARRAY(Unicode())),
-                        Column('dimarr', ProcValue)
-                    )
-
-        Table('dim_arrtable', metadata,
-                        Column('id', Integer, primary_key=True),
-                        Column('intarr', postgresql.ARRAY(Integer, dimensions=1)),
-                         Column('strarr', postgresql.ARRAY(Unicode(), dimensions=1)),
-                        Column('dimarr', ProcValue)
-                    )
-
-    def _fixture_456(self, table):
-        testing.db.execute(
-                table.insert(),
-                intarr=[4, 5, 6]
-        )
-
-    def test_reflect_array_column(self):
-        metadata2 = MetaData(testing.db)
-        tbl = Table('arrtable', metadata2, autoload=True)
-        assert isinstance(tbl.c.intarr.type, postgresql.ARRAY)
-        assert isinstance(tbl.c.strarr.type, postgresql.ARRAY)
-        assert isinstance(tbl.c.intarr.type.item_type, Integer)
-        assert isinstance(tbl.c.strarr.type.item_type, String)
-
-    def test_insert_array(self):
-        arrtable = self.tables.arrtable
-        arrtable.insert().execute(intarr=[1, 2, 3], strarr=[u'abc',
-                                  u'def'])
-        results = arrtable.select().execute().fetchall()
-        eq_(len(results), 1)
-        eq_(results[0]['intarr'], [1, 2, 3])
-        eq_(results[0]['strarr'], ['abc', 'def'])
-
-    def test_array_where(self):
-        arrtable = self.tables.arrtable
-        arrtable.insert().execute(intarr=[1, 2, 3], strarr=[u'abc',
-                                  u'def'])
-        arrtable.insert().execute(intarr=[4, 5, 6], strarr=u'ABC')
-        results = arrtable.select().where(arrtable.c.intarr == [1, 2,
-                3]).execute().fetchall()
-        eq_(len(results), 1)
-        eq_(results[0]['intarr'], [1, 2, 3])
-
-    def test_array_concat(self):
-        arrtable = self.tables.arrtable
-        arrtable.insert().execute(intarr=[1, 2, 3],
-                    strarr=[u'abc', u'def'])
-        results = select([arrtable.c.intarr + [4, 5,
-                         6]]).execute().fetchall()
-        eq_(len(results), 1)
-        eq_(results[0][0], [ 1, 2, 3, 4, 5, 6, ])
-
-    def test_array_subtype_resultprocessor(self):
-        arrtable = self.tables.arrtable
-        arrtable.insert().execute(intarr=[4, 5, 6],
-                                  strarr=[[u'm\xe4\xe4'], [u'm\xf6\xf6'
-                                  ]])
-        arrtable.insert().execute(intarr=[1, 2, 3], strarr=[u'm\xe4\xe4'
-                                  , u'm\xf6\xf6'])
-        results = \
-            arrtable.select(order_by=[arrtable.c.intarr]).execute().fetchall()
-        eq_(len(results), 2)
-        eq_(results[0]['strarr'], [u'm\xe4\xe4', u'm\xf6\xf6'])
-        eq_(results[1]['strarr'], [[u'm\xe4\xe4'], [u'm\xf6\xf6']])
-
-    def test_array_literal(self):
-        eq_(
-            testing.db.scalar(
-                select([
-                    postgresql.array([1, 2]) + postgresql.array([3, 4, 5])
-                ])
-                ), [1,2,3,4,5]
-        )
-
-    def test_array_getitem_single_type(self):
-        arrtable = self.tables.arrtable
-        is_(arrtable.c.intarr[1].type._type_affinity, Integer)
-        is_(arrtable.c.strarr[1].type._type_affinity, String)
-
-    def test_array_getitem_slice_type(self):
-        arrtable = self.tables.arrtable
-        is_(arrtable.c.intarr[1:3].type._type_affinity, postgresql.ARRAY)
-        is_(arrtable.c.strarr[1:3].type._type_affinity, postgresql.ARRAY)
-
-    def test_array_getitem_single_exec(self):
-        arrtable = self.tables.arrtable
-        self._fixture_456(arrtable)
-        eq_(
-            testing.db.scalar(select([arrtable.c.intarr[2]])),
-            5
-        )
-        testing.db.execute(
-            arrtable.update().values({arrtable.c.intarr[2]: 7})
-        )
-        eq_(
-            testing.db.scalar(select([arrtable.c.intarr[2]])),
-            7
-        )
-
-    def test_undim_array_empty(self):
-        arrtable = self.tables.arrtable
-        self._fixture_456(arrtable)
-        eq_(
-            testing.db.scalar(
-                select([arrtable.c.intarr]).
-                    where(arrtable.c.intarr.contains([]))
-            ),
-            [4, 5, 6]
-        )
-
-    def test_array_getitem_slice_exec(self):
-        arrtable = self.tables.arrtable
-        testing.db.execute(
-            arrtable.insert(),
-            intarr=[4, 5, 6],
-            strarr=[u'abc', u'def']
-        )
-        eq_(
-            testing.db.scalar(select([arrtable.c.intarr[2:3]])),
-            [5, 6]
-        )
-        testing.db.execute(
-            arrtable.update().values({arrtable.c.intarr[2:3]: [7, 8]})
-        )
-        eq_(
-            testing.db.scalar(select([arrtable.c.intarr[2:3]])),
-            [7, 8]
-        )
-
-
-    def _test_undim_array_contains_typed_exec(self, struct):
-        arrtable = self.tables.arrtable
-        self._fixture_456(arrtable)
-        eq_(
-            testing.db.scalar(
-                select([arrtable.c.intarr]).
-                    where(arrtable.c.intarr.contains(struct([4, 5])))
-            ),
-            [4, 5, 6]
-        )
-
-    def test_undim_array_contains_set_exec(self):
-        self._test_undim_array_contains_typed_exec(set)
-
-    def test_undim_array_contains_list_exec(self):
-        self._test_undim_array_contains_typed_exec(list)
-
-    def test_undim_array_contains_generator_exec(self):
-        self._test_undim_array_contains_typed_exec(
-                    lambda elem: (x for x in elem))
-
-    def _test_dim_array_contains_typed_exec(self, struct):
-        dim_arrtable = self.tables.dim_arrtable
-        self._fixture_456(dim_arrtable)
-        eq_(
-            testing.db.scalar(
-                select([dim_arrtable.c.intarr]).
-                    where(dim_arrtable.c.intarr.contains(struct([4, 5])))
-            ),
-            [4, 5, 6]
-        )
-
-    def test_dim_array_contains_set_exec(self):
-        self._test_dim_array_contains_typed_exec(set)
-
-    def test_dim_array_contains_list_exec(self):
-        self._test_dim_array_contains_typed_exec(list)
-
-    def test_dim_array_contains_generator_exec(self):
-        self._test_dim_array_contains_typed_exec(lambda elem: (x for x in elem))
-
-    def test_array_contained_by_exec(self):
-        arrtable = self.tables.arrtable
-        with testing.db.connect() as conn:
-            conn.execute(
-                arrtable.insert(),
-                intarr=[6, 5, 4]
-            )
-            eq_(
-                conn.scalar(
-                    select([arrtable.c.intarr.contained_by([4, 5, 6, 7])])
-                ),
-                True
-            )
-
-    def test_array_overlap_exec(self):
-        arrtable = self.tables.arrtable
-        with testing.db.connect() as conn:
-            conn.execute(
-                arrtable.insert(),
-                intarr=[4, 5, 6]
-            )
-            eq_(
-                conn.scalar(
-                    select([arrtable.c.intarr]).
-                        where(arrtable.c.intarr.overlap([7, 6]))
-                ),
-                [4, 5, 6]
-            )
-
-    def test_array_any_exec(self):
-        arrtable = self.tables.arrtable
-        with testing.db.connect() as conn:
-            conn.execute(
-                arrtable.insert(),
-                intarr=[4, 5, 6]
-            )
-            eq_(
-                conn.scalar(
-                    select([arrtable.c.intarr]).
-                        where(postgresql.Any(5, arrtable.c.intarr))
-                ),
-                [4, 5, 6]
-            )
-
-    def test_array_all_exec(self):
-        arrtable = self.tables.arrtable
-        with testing.db.connect() as conn:
-            conn.execute(
-                arrtable.insert(),
-                intarr=[4, 5, 6]
-            )
-            eq_(
-                conn.scalar(
-                    select([arrtable.c.intarr]).
-                        where(arrtable.c.intarr.all(4, operator=operators.le))
-                ),
-                [4, 5, 6]
-            )
-
-
-    @testing.provide_metadata
-    def test_tuple_flag(self):
-        metadata = self.metadata
-
-        t1 = Table('t1', metadata,
-            Column('id', Integer, primary_key=True),
-            Column('data', postgresql.ARRAY(String(5), as_tuple=True)),
-            Column('data2', postgresql.ARRAY(Numeric(asdecimal=False), as_tuple=True)),
-        )
-        metadata.create_all()
-        testing.db.execute(t1.insert(), id=1, data=["1","2","3"], data2=[5.4, 5.6])
-        testing.db.execute(t1.insert(), id=2, data=["4", "5", "6"], data2=[1.0])
-        testing.db.execute(t1.insert(), id=3, data=[["4", "5"], ["6", "7"]],
-                        data2=[[5.4, 5.6], [1.0, 1.1]])
-
-        r = testing.db.execute(t1.select().order_by(t1.c.id)).fetchall()
-        eq_(
-            r,
-            [
-                (1, ('1', '2', '3'), (5.4, 5.6)),
-                (2, ('4', '5', '6'), (1.0,)),
-                (3, (('4', '5'), ('6', '7')), ((5.4, 5.6), (1.0, 1.1)))
-            ]
-        )
-        # hashable
-        eq_(
-            set(row[1] for row in r),
-            set([('1', '2', '3'), ('4', '5', '6'), (('4', '5'), ('6', '7'))])
-        )
-
-    def test_dimension(self):
-        arrtable = self.tables.arrtable
-        testing.db.execute(arrtable.insert(), dimarr=[[1, 2, 3], [4,5, 6]])
-        eq_(
-            testing.db.scalar(select([arrtable.c.dimarr])),
-            [[-1, 0, 1], [2, 3, 4]]
-        )
-
-class TimestampTest(fixtures.TestBase, AssertsExecutionResults):
-    __only_on__ = 'postgresql'
-
-    def test_timestamp(self):
-        engine = testing.db
-        connection = engine.connect()
-
-        s = select(["timestamp '2007-12-25'"])
-        result = connection.execute(s).first()
-        eq_(result[0], datetime.datetime(2007, 12, 25, 0, 0))
-
-class ServerSideCursorsTest(fixtures.TestBase, AssertsExecutionResults):
-
-    __only_on__ = 'postgresql+psycopg2'
-
-    def _fixture(self, server_side_cursors):
-        self.engine = engines.testing_engine(
-                        options={'server_side_cursors':server_side_cursors}
-                    )
-        return self.engine
-
-    def tearDown(self):
-        engines.testing_reaper.close_all()
-        self.engine.dispose()
-
-    def test_global_string(self):
-        engine = self._fixture(True)
-        result = engine.execute('select 1')
-        assert result.cursor.name
-
-    def test_global_text(self):
-        engine = self._fixture(True)
-        result = engine.execute(text('select 1'))
-        assert result.cursor.name
-
-    def test_global_expr(self):
-        engine = self._fixture(True)
-        result = engine.execute(select([1]))
-        assert result.cursor.name
-
-    def test_global_off_explicit(self):
-        engine = self._fixture(False)
-        result = engine.execute(text('select 1'))
-
-        # It should be off globally ...
-
-        assert not result.cursor.name
-
-    def test_stmt_option(self):
-        engine = self._fixture(False)
-
-        s = select([1]).execution_options(stream_results=True)
-        result = engine.execute(s)
-
-        # ... but enabled for this one.
-
-        assert result.cursor.name
-
-
-    def test_conn_option(self):
-        engine = self._fixture(False)
-
-        # and this one
-        result = \
-            engine.connect().execution_options(stream_results=True).\
-                execute('select 1'
-                )
-        assert result.cursor.name
-
-    def test_stmt_enabled_conn_option_disabled(self):
-        engine = self._fixture(False)
-
-        s = select([1]).execution_options(stream_results=True)
-
-        # not this one
-        result = \
-            engine.connect().execution_options(stream_results=False).\
-                execute(s)
-        assert not result.cursor.name
-
-    def test_stmt_option_disabled(self):
-        engine = self._fixture(True)
-        s = select([1]).execution_options(stream_results=False)
-        result = engine.execute(s)
-        assert not result.cursor.name
-
-    def test_aliases_and_ss(self):
-        engine = self._fixture(False)
-        s1 = select([1]).execution_options(stream_results=True).alias()
-        result = engine.execute(s1)
-        assert result.cursor.name
-
-        # s1's options shouldn't affect s2 when s2 is used as a
-        # from_obj.
-        s2 = select([1], from_obj=s1)
-        result = engine.execute(s2)
-        assert not result.cursor.name
-
-    def test_for_update_expr(self):
-        engine = self._fixture(True)
-        s1 = select([1], for_update=True)
-        result = engine.execute(s1)
-        assert result.cursor.name
-
-    def test_for_update_string(self):
-        engine = self._fixture(True)
-        result = engine.execute('SELECT 1 FOR UPDATE')
-        assert result.cursor.name
-
-    def test_text_no_ss(self):
-        engine = self._fixture(False)
-        s = text('select 42')
-        result = engine.execute(s)
-        assert not result.cursor.name
-
-    def test_text_ss_option(self):
-        engine = self._fixture(False)
-        s = text('select 42').execution_options(stream_results=True)
-        result = engine.execute(s)
-        assert result.cursor.name
-
-    def test_roundtrip(self):
-        engine = self._fixture(True)
-        test_table = Table('test_table', MetaData(engine),
-                           Column('id', Integer, primary_key=True),
-                           Column('data', String(50)))
-        test_table.create(checkfirst=True)
-        try:
-            test_table.insert().execute(data='data1')
-            nextid = engine.execute(Sequence('test_table_id_seq'))
-            test_table.insert().execute(id=nextid, data='data2')
-            eq_(test_table.select().execute().fetchall(), [(1, 'data1'
-                ), (2, 'data2')])
-            test_table.update().where(test_table.c.id
-                    == 2).values(data=test_table.c.data + ' updated'
-                                 ).execute()
-            eq_(test_table.select().execute().fetchall(), [(1, 'data1'
-                ), (2, 'data2 updated')])
-            test_table.delete().execute()
-            eq_(test_table.count().scalar(), 0)
-        finally:
-            test_table.drop(checkfirst=True)
-
-class SpecialTypesTest(fixtures.TestBase, ComparesTables, AssertsCompiledSQL):
-    """test DDL and reflection of PG-specific types """
-
-    __only_on__ = 'postgresql'
-    __excluded_on__ = (('postgresql', '<', (8, 3, 0)),)
-
-    @classmethod
-    def setup_class(cls):
-        global metadata, table
-        metadata = MetaData(testing.db)
-
-        # create these types so that we can issue
-        # special SQL92 INTERVAL syntax
-        class y2m(types.UserDefinedType, postgresql.INTERVAL):
-            def get_col_spec(self):
-                return "INTERVAL YEAR TO MONTH"
-
-        class d2s(types.UserDefinedType, postgresql.INTERVAL):
-            def get_col_spec(self):
-                return "INTERVAL DAY TO SECOND"
-
-        table = Table('sometable', metadata,
-            Column('id', postgresql.UUID, primary_key=True),
-            Column('flag', postgresql.BIT),
-            Column('bitstring', postgresql.BIT(4)),
-            Column('addr', postgresql.INET),
-            Column('addr2', postgresql.MACADDR),
-            Column('addr3', postgresql.CIDR),
-            Column('doubleprec', postgresql.DOUBLE_PRECISION),
-            Column('plain_interval', postgresql.INTERVAL),
-            Column('year_interval', y2m()),
-            Column('month_interval', d2s()),
-            Column('precision_interval', postgresql.INTERVAL(precision=3))
-        )
-
-        metadata.create_all()
-
-        # cheat so that the "strict type check"
-        # works
-        table.c.year_interval.type = postgresql.INTERVAL()
-        table.c.month_interval.type = postgresql.INTERVAL()
-
-    @classmethod
-    def teardown_class(cls):
-        metadata.drop_all()
-
-    def test_reflection(self):
-        m = MetaData(testing.db)
-        t = Table('sometable', m, autoload=True)
-
-        self.assert_tables_equal(table, t, strict_types=True)
-        assert t.c.plain_interval.type.precision is None
-        assert t.c.precision_interval.type.precision == 3
-        assert t.c.bitstring.type.length == 4
-
-    def test_bit_compile(self):
-        pairs = [(postgresql.BIT(), 'BIT(1)'),
-                 (postgresql.BIT(5), 'BIT(5)'),
-                 (postgresql.BIT(varying=True), 'BIT VARYING'),
-                 (postgresql.BIT(5, varying=True), 'BIT VARYING(5)'),
-                ]
-        for type_, expected in pairs:
-            self.assert_compile(type_, expected)
-
-    @testing.provide_metadata
-    def test_bit_reflection(self):
-        metadata = self.metadata
-        t1 = Table('t1', metadata,
-        Column('bit1', postgresql.BIT()),
-        Column('bit5', postgresql.BIT(5)),
-        Column('bitvarying', postgresql.BIT(varying=True)),
-        Column('bitvarying5', postgresql.BIT(5, varying=True)),
-        )
-        t1.create()
-        m2 = MetaData(testing.db)
-        t2 = Table('t1', m2, autoload=True)
-        eq_(t2.c.bit1.type.length, 1)
-        eq_(t2.c.bit1.type.varying, False)
-        eq_(t2.c.bit5.type.length, 5)
-        eq_(t2.c.bit5.type.varying, False)
-        eq_(t2.c.bitvarying.type.length, None)
-        eq_(t2.c.bitvarying.type.varying, True)
-        eq_(t2.c.bitvarying5.type.length, 5)
-        eq_(t2.c.bitvarying5.type.varying, True)
-
-class UUIDTest(fixtures.TestBase):
-    """Test the bind/return values of the UUID type."""
-
-    __only_on__ = 'postgresql'
-
-    @testing.requires.python25
-    @testing.fails_on('postgresql+zxjdbc',
-                      'column "data" is of type uuid but expression is of type character varying')
-    @testing.fails_on('postgresql+pg8000', 'No support for UUID type')
-    def test_uuid_string(self):
-        import uuid
-        self._test_round_trip(
-            Table('utable', MetaData(),
-                Column('data', postgresql.UUID())
-            ),
-            str(uuid.uuid4()),
-            str(uuid.uuid4())
-        )
-
-    @testing.requires.python25
-    @testing.fails_on('postgresql+zxjdbc',
-                      'column "data" is of type uuid but expression is of type character varying')
-    @testing.fails_on('postgresql+pg8000', 'No support for UUID type')
-    def test_uuid_uuid(self):
-        import uuid
-        self._test_round_trip(
-            Table('utable', MetaData(),
-                Column('data', postgresql.UUID(as_uuid=True))
-            ),
-            uuid.uuid4(),
-            uuid.uuid4()
-        )
-
-    def test_no_uuid_available(self):
-        from sqlalchemy.dialects.postgresql import base
-        uuid_type = base._python_UUID
-        base._python_UUID = None
-        try:
-            assert_raises(
-                NotImplementedError,
-                postgresql.UUID, as_uuid=True
-            )
-        finally:
-            base._python_UUID = uuid_type
-
-    def setup(self):
-        self.conn = testing.db.connect()
-        trans = self.conn.begin()
-
-    def teardown(self):
-        self.conn.close()
-
-    def _test_round_trip(self, utable, value1, value2):
-        utable.create(self.conn)
-        self.conn.execute(utable.insert(), {'data':value1})
-        self.conn.execute(utable.insert(), {'data':value2})
-        r = self.conn.execute(
-                select([utable.c.data]).
-                    where(utable.c.data != value1)
-                )
-        eq_(r.fetchone()[0], value2)
-        eq_(r.fetchone(), None)
-
-
-class MatchTest(fixtures.TestBase, AssertsCompiledSQL):
-
-    __only_on__ = 'postgresql'
-    __excluded_on__ = ('postgresql', '<', (8, 3, 0)),
-
-    @classmethod
-    def setup_class(cls):
-        global metadata, cattable, matchtable
-        metadata = MetaData(testing.db)
-        cattable = Table('cattable', metadata, Column('id', Integer,
-                         primary_key=True), Column('description',
-                         String(50)))
-        matchtable = Table('matchtable', metadata, Column('id',
-                           Integer, primary_key=True), Column('title',
-                           String(200)), Column('category_id', Integer,
-                           ForeignKey('cattable.id')))
-        metadata.create_all()
-        cattable.insert().execute([{'id': 1, 'description': 'Python'},
-                                  {'id': 2, 'description': 'Ruby'}])
-        matchtable.insert().execute([{'id': 1, 'title'
-                                    : 'Agile Web Development with Rails'
-                                    , 'category_id': 2},
-                                    {'id': 2,
-                                    'title': 'Dive Into Python',
-                                    'category_id': 1},
-                                    {'id': 3, 'title'
-                                    : "Programming Matz's Ruby",
-                                    'category_id': 2},
-                                    {'id': 4, 'title'
-                                    : 'The Definitive Guide to Django',
-                                    'category_id': 1},
-                                    {'id': 5, 'title'
-                                    : 'Python in a Nutshell',
-                                    'category_id': 1}])
-
-    @classmethod
-    def teardown_class(cls):
-        metadata.drop_all()
-
-    @testing.fails_on('postgresql+pg8000', 'uses positional')
-    @testing.fails_on('postgresql+zxjdbc', 'uses qmark')
-    def test_expression_pyformat(self):
-        self.assert_compile(matchtable.c.title.match('somstr'),
-                            'matchtable.title @@ to_tsquery(%(title_1)s'
-                            ')')
-
-    @testing.fails_on('postgresql+psycopg2', 'uses pyformat')
-    @testing.fails_on('postgresql+pypostgresql', 'uses pyformat')
-    @testing.fails_on('postgresql+zxjdbc', 'uses qmark')
-    def test_expression_positional(self):
-        self.assert_compile(matchtable.c.title.match('somstr'),
-                            'matchtable.title @@ to_tsquery(%s)')
-
-    def test_simple_match(self):
-        results = \
-            matchtable.select().where(matchtable.c.title.match('python'
-                )).order_by(matchtable.c.id).execute().fetchall()
-        eq_([2, 5], [r.id for r in results])
-
-    def test_simple_match_with_apostrophe(self):
-        results = \
-            matchtable.select().where(matchtable.c.title.match("Matz's"
-                )).execute().fetchall()
-        eq_([3], [r.id for r in results])
-
-    @testing.requires.english_locale_on_postgresql
-    def test_simple_derivative_match(self):
-        results = \
-            matchtable.select().where(matchtable.c.title.match('nutshells'
-                )).execute().fetchall()
-        eq_([5], [r.id for r in results])
-
-    @testing.requires.english_locale_on_postgresql
-    def test_or_match(self):
-        results1 = \
-            matchtable.select().where(or_(matchtable.c.title.match('nutshells'
-                ), matchtable.c.title.match('rubies'
-                ))).order_by(matchtable.c.id).execute().fetchall()
-        eq_([3, 5], [r.id for r in results1])
-        results2 = \
-            matchtable.select().where(
-                matchtable.c.title.match('nutshells | rubies'
-                )).order_by(matchtable.c.id).execute().fetchall()
-        eq_([3, 5], [r.id for r in results2])
-
-    @testing.requires.english_locale_on_postgresql
-    def test_and_match(self):
-        results1 = \
-            matchtable.select().where(and_(matchtable.c.title.match('python'
-                ), matchtable.c.title.match('nutshells'
-                ))).execute().fetchall()
-        eq_([5], [r.id for r in results1])
-        results2 = \
-            matchtable.select().where(
-                matchtable.c.title.match('python & nutshells'
-                )).execute().fetchall()
-        eq_([5], [r.id for r in results2])
-
-    @testing.requires.english_locale_on_postgresql
-    def test_match_across_joins(self):
-        results = matchtable.select().where(and_(cattable.c.id
-                == matchtable.c.category_id,
-                or_(cattable.c.description.match('Ruby'),
-                matchtable.c.title.match('nutshells'
-                )))).order_by(matchtable.c.id).execute().fetchall()
-        eq_([1, 3, 5], [r.id for r in results])
-
-
-class TupleTest(fixtures.TestBase):
-    __only_on__ = 'postgresql'
-
-    def test_tuple_containment(self):
-
-        for test, exp in [
-            ([('a', 'b')], True),
-            ([('a', 'c')], False),
-            ([('f', 'q'), ('a', 'b')], True),
-            ([('f', 'q'), ('a', 'c')], False)
-        ]:
-            eq_(
-                testing.db.execute(
-                    select([
-                            tuple_(
-                                literal_column("'a'"),
-                                literal_column("'b'")
-                            ).\
-                                in_([
-                                    tuple_(*[
-                                            literal_column("'%s'" % letter)
-                                            for letter in elem
-                                        ]) for elem in test
-                                ])
-                            ])
-                ).scalar(),
-                exp
-            )
-
-
-class HStoreTest(fixtures.TestBase):
-    def _assert_sql(self, construct, expected):
-        dialect = postgresql.dialect()
-        compiled = str(construct.compile(dialect=dialect))
-        compiled = re.sub(r'\s+', ' ', compiled)
-        expected = re.sub(r'\s+', ' ', expected)
-        eq_(compiled, expected)
-
-    def setup(self):
-        metadata = MetaData()
-        self.test_table = Table('test_table', metadata,
-            Column('id', Integer, primary_key=True),
-            Column('hash', HSTORE)
-        )
-        self.hashcol = self.test_table.c.hash
-
-    def _test_where(self, whereclause, expected):
-        stmt = select([self.test_table]).where(whereclause)
-        self._assert_sql(
-            stmt,
-            "SELECT test_table.id, test_table.hash FROM test_table "
-            "WHERE %s" % expected
-        )
-
-    def _test_cols(self, colclause, expected, from_=True):
-        stmt = select([colclause])
-        self._assert_sql(
-            stmt,
-            (
-                "SELECT %s" +
-                (" FROM test_table" if from_ else "")
-            ) % expected
-        )
-
-    def test_bind_serialize_default(self):
-        from sqlalchemy.engine import default
-
-        dialect = default.DefaultDialect()
-        proc = self.test_table.c.hash.type._cached_bind_processor(dialect)
-        eq_(
-            proc(util.OrderedDict([("key1", "value1"), ("key2", "value2")])),
-            '"key1"=>"value1", "key2"=>"value2"'
-        )
-
-    def test_bind_serialize_with_slashes_and_quotes(self):
-        from sqlalchemy.engine import default
-
-        dialect = default.DefaultDialect()
-        proc = self.test_table.c.hash.type._cached_bind_processor(dialect)
-        eq_(
-            proc({'\\"a': '\\"1'}),
-            '"\\\\\\"a"=>"\\\\\\"1"'
-        )
-
-    def test_parse_error(self):
-        from sqlalchemy.engine import default
-
-        dialect = default.DefaultDialect()
-        proc = self.test_table.c.hash.type._cached_result_processor(
-                    dialect, None)
-        assert_raises_message(
-            ValueError,
-            r'''After u?'\[\.\.\.\], "key1"=>"value1", ', could not parse '''
-            '''residual at position 36: u?'crapcrapcrap, "key3"\[\.\.\.\]''',
-            proc,
-            '"key2"=>"value2", "key1"=>"value1", '
-                        'crapcrapcrap, "key3"=>"value3"'
-        )
-
-    def test_result_deserialize_default(self):
-        from sqlalchemy.engine import default
-
-        dialect = default.DefaultDialect()
-        proc = self.test_table.c.hash.type._cached_result_processor(
-                    dialect, None)
-        eq_(
-            proc('"key2"=>"value2", "key1"=>"value1"'),
-            {"key1": "value1", "key2": "value2"}
-        )
-
-    def test_result_deserialize_with_slashes_and_quotes(self):
-        from sqlalchemy.engine import default
-
-        dialect = default.DefaultDialect()
-        proc = self.test_table.c.hash.type._cached_result_processor(
-                    dialect, None)
-        eq_(
-            proc('"\\\\\\"a"=>"\\\\\\"1"'),
-            {'\\"a': '\\"1'}
-        )
-
-    def test_bind_serialize_psycopg2(self):
-        from sqlalchemy.dialects.postgresql import psycopg2
-
-        dialect = psycopg2.PGDialect_psycopg2()
-        dialect._has_native_hstore = True
-        proc = self.test_table.c.hash.type._cached_bind_processor(dialect)
-        is_(proc, None)
-
-        dialect = psycopg2.PGDialect_psycopg2()
-        dialect._has_native_hstore = False
-        proc = self.test_table.c.hash.type._cached_bind_processor(dialect)
-        eq_(
-            proc(util.OrderedDict([("key1", "value1"), ("key2", "value2")])),
-            '"key1"=>"value1", "key2"=>"value2"'
-        )
-
-    def test_result_deserialize_psycopg2(self):
-        from sqlalchemy.dialects.postgresql import psycopg2
-
-        dialect = psycopg2.PGDialect_psycopg2()
-        dialect._has_native_hstore = True
-        proc = self.test_table.c.hash.type._cached_result_processor(
-                    dialect, None)
-        is_(proc, None)
-
-        dialect = psycopg2.PGDialect_psycopg2()
-        dialect._has_native_hstore = False
-        proc = self.test_table.c.hash.type._cached_result_processor(
-                    dialect, None)
-        eq_(
-            proc('"key2"=>"value2", "key1"=>"value1"'),
-            {"key1": "value1", "key2": "value2"}
-        )
-
-    def test_where_has_key(self):
-        self._test_where(
-            # hide from 2to3
-            getattr(self.hashcol, 'has_key')('foo'),
-            "test_table.hash ? %(hash_1)s"
-        )
-
-    def test_where_has_all(self):
-        self._test_where(
-            self.hashcol.has_all(postgresql.array(['1', '2'])),
-            "test_table.hash ?& ARRAY[%(param_1)s, %(param_2)s]"
-        )
-
-    def test_where_has_any(self):
-        self._test_where(
-            self.hashcol.has_any(postgresql.array(['1', '2'])),
-            "test_table.hash ?| ARRAY[%(param_1)s, %(param_2)s]"
-        )
-
-    def test_where_defined(self):
-        self._test_where(
-            self.hashcol.defined('foo'),
-            "defined(test_table.hash, %(param_1)s)"
-        )
-
-    def test_where_contains(self):
-        self._test_where(
-            self.hashcol.contains({'foo': '1'}),
-            "test_table.hash @> %(hash_1)s"
-        )
-
-    def test_where_contained_by(self):
-        self._test_where(
-            self.hashcol.contained_by({'foo': '1', 'bar': None}),
-            "test_table.hash <@ %(hash_1)s"
-        )
-
-    def test_where_getitem(self):
-        self._test_where(
-            self.hashcol['bar'] == None,
-            "(test_table.hash -> %(hash_1)s) IS NULL"
-        )
-
-    def test_cols_get(self):
-        self._test_cols(
-            self.hashcol['foo'],
-            "test_table.hash -> %(hash_1)s AS anon_1",
-            True
-        )
-
-    def test_cols_delete_single_key(self):
-        self._test_cols(
-            self.hashcol.delete('foo'),
-            "delete(test_table.hash, %(param_1)s) AS delete_1",
-            True
-        )
-
-    def test_cols_delete_array_of_keys(self):
-        self._test_cols(
-            self.hashcol.delete(postgresql.array(['foo', 'bar'])),
-            ("delete(test_table.hash, ARRAY[%(param_1)s, %(param_2)s]) "
-             "AS delete_1"),
-            True
-        )
-
-    def test_cols_delete_matching_pairs(self):
-        self._test_cols(
-            self.hashcol.delete(hstore('1', '2')),
-            ("delete(test_table.hash, hstore(%(param_1)s, %(param_2)s)) "
-             "AS delete_1"),
-            True
-        )
-
-    def test_cols_slice(self):
-        self._test_cols(
-            self.hashcol.slice(postgresql.array(['1', '2'])),
-            ("slice(test_table.hash, ARRAY[%(param_1)s, %(param_2)s]) "
-             "AS slice_1"),
-            True
-        )
-
-    def test_cols_hstore_pair_text(self):
-        self._test_cols(
-            hstore('foo', '3')['foo'],
-            "hstore(%(param_1)s, %(param_2)s) -> %(hstore_1)s AS anon_1",
-            False
-        )
-
-    def test_cols_hstore_pair_array(self):
-        self._test_cols(
-            hstore(postgresql.array(['1', '2']),
-                   postgresql.array(['3', None]))['1'],
-            ("hstore(ARRAY[%(param_1)s, %(param_2)s], "
-             "ARRAY[%(param_3)s, NULL]) -> %(hstore_1)s AS anon_1"),
-            False
-        )
-
-    def test_cols_hstore_single_array(self):
-        self._test_cols(
-            hstore(postgresql.array(['1', '2', '3', None]))['3'],
-            ("hstore(ARRAY[%(param_1)s, %(param_2)s, %(param_3)s, NULL]) "
-             "-> %(hstore_1)s AS anon_1"),
-            False
-        )
-
-    def test_cols_concat(self):
-        self._test_cols(
-            self.hashcol.concat(hstore(cast(self.test_table.c.id, Text), '3')),
-            ("test_table.hash || hstore(CAST(test_table.id AS TEXT), "
-             "%(param_1)s) AS anon_1"),
-            True
-        )
-
-    def test_cols_concat_op(self):
-        self._test_cols(
-            hstore('foo', 'bar') + self.hashcol,
-            "hstore(%(param_1)s, %(param_2)s) || test_table.hash AS anon_1",
-            True
-        )
-
-    def test_cols_concat_get(self):
-        self._test_cols(
-            (self.hashcol + self.hashcol)['foo'],
-            "test_table.hash || test_table.hash -> %(param_1)s AS anon_1"
-        )
-
-    def test_cols_keys(self):
-        self._test_cols(
-            # hide from 2to3
-            getattr(self.hashcol, 'keys')(),
-            "akeys(test_table.hash) AS akeys_1",
-            True
-        )
-
-    def test_cols_vals(self):
-        self._test_cols(
-            self.hashcol.vals(),
-            "avals(test_table.hash) AS avals_1",
-            True
-        )
-
-    def test_cols_array(self):
-        self._test_cols(
-            self.hashcol.array(),
-            "hstore_to_array(test_table.hash) AS hstore_to_array_1",
-            True
-        )
-
-    def test_cols_matrix(self):
-        self._test_cols(
-            self.hashcol.matrix(),
-            "hstore_to_matrix(test_table.hash) AS hstore_to_matrix_1",
-            True
-        )
-
-
-class HStoreRoundTripTest(fixtures.TablesTest):
-    __requires__ = 'hstore',
-    __dialect__ = 'postgresql'
-
-    @classmethod
-    def define_tables(cls, metadata):
-        Table('data_table', metadata,
-            Column('id', Integer, primary_key=True),
-            Column('name', String(30), nullable=False),
-            Column('data', HSTORE)
-        )
-
-    def _fixture_data(self, engine):
-        data_table = self.tables.data_table
-        engine.execute(
-                data_table.insert(),
-                {'name': 'r1', 'data': {"k1": "r1v1", "k2": "r1v2"}},
-                {'name': 'r2', 'data': {"k1": "r2v1", "k2": "r2v2"}},
-                {'name': 'r3', 'data': {"k1": "r3v1", "k2": "r3v2"}},
-                {'name': 'r4', 'data': {"k1": "r4v1", "k2": "r4v2"}},
-                {'name': 'r5', 'data': {"k1": "r5v1", "k2": "r5v2"}},
-        )
-
-    def _assert_data(self, compare):
-        data = testing.db.execute(
-            select([self.tables.data_table.c.data]).
-                order_by(self.tables.data_table.c.name)
-        ).fetchall()
-        eq_([d for d, in data], compare)
-
-    def _test_insert(self, engine):
-        engine.execute(
-            self.tables.data_table.insert(),
-            {'name': 'r1', 'data': {"k1": "r1v1", "k2": "r1v2"}}
-        )
-        self._assert_data([{"k1": "r1v1", "k2": "r1v2"}])
-
-    def _non_native_engine(self):
-        if testing.against("postgresql+psycopg2"):
-            engine = engines.testing_engine(options=dict(use_native_hstore=False))
-        else:
-            engine = testing.db
-        engine.connect()
-        return engine
-
-    def test_reflect(self):
-        from sqlalchemy import inspect
-        insp = inspect(testing.db)
-        cols = insp.get_columns('data_table')
-        assert isinstance(cols[2]['type'], HSTORE)
-
-    @testing.only_on("postgresql+psycopg2")
-    def test_insert_native(self):
-        engine = testing.db
-        self._test_insert(engine)
-
-    def test_insert_python(self):
-        engine = self._non_native_engine()
-        self._test_insert(engine)
-
-    @testing.only_on("postgresql+psycopg2")
-    def test_criterion_native(self):
-        engine = testing.db
-        self._fixture_data(engine)
-        self._test_criterion(engine)
-
-    def test_criterion_python(self):
-        engine = self._non_native_engine()
-        self._fixture_data(engine)
-        self._test_criterion(engine)
-
-    def _test_criterion(self, engine):
-        data_table = self.tables.data_table
-        result = engine.execute(
-            select([data_table.c.data]).where(data_table.c.data['k1'] == 'r3v1')
-        ).first()
-        eq_(result, ({'k1': 'r3v1', 'k2': 'r3v2'},))
-
-    def _test_fixed_round_trip(self, engine):
-        s = select([
-                hstore(
-                    array(['key1', 'key2', 'key3']),
-                    array(['value1', 'value2', 'value3'])
-                )
-            ])
-        eq_(
-            engine.scalar(s),
-            {"key1": "value1", "key2": "value2", "key3": "value3"}
-        )
-
-    def test_fixed_round_trip_python(self):
-        engine = self._non_native_engine()
-        self._test_fixed_round_trip(engine)
-
-    @testing.only_on("postgresql+psycopg2")
-    def test_fixed_round_trip_native(self):
-        engine = testing.db
-        self._test_fixed_round_trip(engine)
-
-    def _test_unicode_round_trip(self, engine):
-        s = select([
-                hstore(
-                    array([u'réveillé', u'drôle', u'S’il']),
-                    array([u'réveillé', u'drôle', u'S’il'])
-                )
-            ])
-        eq_(
-            engine.scalar(s),
-            {
-                u'réveillé': u'réveillé',
-                u'drôle': u'drôle',
-                u'S’il': u'S’il'
-            }
-        )
-
-    def test_unicode_round_trip_python(self):
-        engine = self._non_native_engine()
-        self._test_unicode_round_trip(engine)
-
-    @testing.only_on("postgresql+psycopg2")
-    def test_unicode_round_trip_native(self):
-        engine = testing.db
-        self._test_unicode_round_trip(engine)
-
-    def test_escaped_quotes_round_trip_python(self):
-        engine = self._non_native_engine()
-        self._test_escaped_quotes_round_trip(engine)
-
-    @testing.only_on("postgresql+psycopg2")
-    def test_escaped_quotes_round_trip_native(self):
-        engine = testing.db
-        self._test_escaped_quotes_round_trip(engine)
-
-    def _test_escaped_quotes_round_trip(self, engine):
-        engine.execute(
-            self.tables.data_table.insert(),
-            {'name': 'r1', 'data': {r'key \"foo\"': r'value \"bar"\ xyz'}}
-        )
-        self._assert_data([{r'key \"foo\"': r'value \"bar"\ xyz'}])
-
-class _RangeTypeMixin(object):
-    __requires__ = 'range_types',
-    __dialect__ = 'postgresql+psycopg2'
-
-    @property
-    def extras(self):
-        # done this way so we don't get ImportErrors with
-        # older psycopg2 versions.
-        from psycopg2 import extras
-        return extras
-    
-    @classmethod
-    def define_tables(cls, metadata):
-        # no reason ranges shouldn't be primary keys,
-        # so lets just use them as such
-        table = Table('data_table', metadata,
-            Column('range', cls._col_type, primary_key=True),
-        )
-        cls.col = table.c.range
-
-    def test_actual_type(self):
-        eq_(str(self._col_type()), self._col_str)
-        
-    def test_reflect(self):
-        from sqlalchemy import inspect
-        insp = inspect(testing.db)
-        cols = insp.get_columns('data_table')
-        assert isinstance(cols[0]['type'], self._col_type)
-
-    def _assert_data(self):
-        data = testing.db.execute(
-            select([self.tables.data_table.c.range])
-        ).fetchall()
-        eq_(data, [(self._data_obj(), )])
-
-    def test_insert_obj(self):
-        testing.db.engine.execute(
-            self.tables.data_table.insert(),
-            {'range': self._data_obj()}
-        )
-        self._assert_data()
-
-    def test_insert_text(self):
-        testing.db.engine.execute(
-            self.tables.data_table.insert(),
-            {'range': self._data_str}
-        )
-        self._assert_data()
-
-    # operator tests
-        
-    def _test_clause(self, colclause, expected):
-        dialect = postgresql.dialect()
-        compiled = str(colclause.compile(dialect=dialect))
-        eq_(compiled, expected)
-
-    def test_where_equal(self):
-        self._test_clause(
-            self.col==self._data_str,
-            "data_table.range = %(range_1)s"
-        )
-
-    def test_where_not_equal(self):
-        self._test_clause(
-            self.col!=self._data_str,
-            "data_table.range <> %(range_1)s"
-        )
-
-    def test_where_less_than(self):
-        self._test_clause(
-            self.col < self._data_str,
-            "data_table.range < %(range_1)s"
-        )
-
-    def test_where_greater_than(self):
-        self._test_clause(
-            self.col > self._data_str,
-            "data_table.range > %(range_1)s"
-        )
-
-    def test_where_less_than_or_equal(self):
-        self._test_clause(
-            self.col <= self._data_str,
-            "data_table.range <= %(range_1)s"
-        )
-
-    def test_where_greater_than_or_equal(self):
-        self._test_clause(
-            self.col >= self._data_str,
-            "data_table.range >= %(range_1)s"
-        )
-
-    def test_contains(self):
-        self._test_clause(
-            self.col.contains(self._data_str),
-            "data_table.range @> %(range_1)s"
-        )
-
-    def test_contained_by(self):
-        self._test_clause(
-            self.col.contained_by(self._data_str),
-            "data_table.range <@ %(range_1)s"
-        )
-
-    def test_overlaps(self):
-        self._test_clause(
-            self.col.overlaps(self._data_str),
-            "data_table.range && %(range_1)s"
-        )
-
-    def test_strictly_left_of(self):
-        self._test_clause(
-            self.col << self._data_str,
-            "data_table.range << %(range_1)s"
-        )
-        self._test_clause(
-            self.col.strictly_left_of(self._data_str),
-            "data_table.range << %(range_1)s"
-        )
-
-    def test_strictly_right_of(self):
-        self._test_clause(
-            self.col >> self._data_str,
-            "data_table.range >> %(range_1)s"
-        )
-        self._test_clause(
-            self.col.strictly_right_of(self._data_str),
-            "data_table.range >> %(range_1)s"
-        )
-
-    def test_not_extend_right_of(self):
-        self._test_clause(
-            self.col.not_extend_right_of(self._data_str),
-            "data_table.range &< %(range_1)s"
-        )
-
-    def test_not_extend_left_of(self):
-        self._test_clause(
-            self.col.not_extend_left_of(self._data_str),
-            "data_table.range &> %(range_1)s"
-        )
-
-    def test_adjacent_to(self):
-        self._test_clause(
-            self.col.adjacent_to(self._data_str),
-            "data_table.range -|- %(range_1)s"
-        )
-
-    def test_union(self):
-        self._test_clause(
-            self.col + self.col,
-            "data_table.range + data_table.range"
-        )
-
-    def test_union_result(self):
-        # insert
-        testing.db.engine.execute(
-            self.tables.data_table.insert(),
-            {'range': self._data_str}
-        )
-        # select
-        range = self.tables.data_table.c.range
-        data = testing.db.execute(
-            select([range + range])
-            ).fetchall()
-        eq_(data, [(self._data_obj(), )])
-        
-
-    def test_intersection(self):
-        self._test_clause(
-            self.col * self.col,
-            "data_table.range * data_table.range"
-        )
-
-    def test_intersection_result(self):
-        # insert
-        testing.db.engine.execute(
-            self.tables.data_table.insert(),
-            {'range': self._data_str}
-        )
-        # select
-        range = self.tables.data_table.c.range
-        data = testing.db.execute(
-            select([range * range])
-            ).fetchall()
-        eq_(data, [(self._data_obj(), )])
-        
-    def test_different(self):
-        self._test_clause(
-            self.col - self.col,
-            "data_table.range - data_table.range"
-        )
-
-    def test_difference_result(self):
-        # insert
-        testing.db.engine.execute(
-            self.tables.data_table.insert(),
-            {'range': self._data_str}
-        )
-        # select
-        range = self.tables.data_table.c.range
-        data = testing.db.execute(
-            select([range - range])
-            ).fetchall()
-        eq_(data, [(self._data_obj().__class__(empty=True), )])
-        
-class Int4RangeTests(_RangeTypeMixin, fixtures.TablesTest):
-
-    _col_type = INT4RANGE
-    _col_str = 'INT4RANGE'
-    _data_str = '[1,2)'
-    def _data_obj(self):
-        return self.extras.NumericRange(1, 2)
-
-class Int8RangeTests(_RangeTypeMixin, fixtures.TablesTest):
-
-    _col_type = INT8RANGE
-    _col_str = 'INT8RANGE'
-    _data_str = '[9223372036854775806,9223372036854775807)'
-    def _data_obj(self):
-        return self.extras.NumericRange(
-            9223372036854775806, 9223372036854775807
-            )
-
-class NumRangeTests(_RangeTypeMixin, fixtures.TablesTest):
-
-    _col_type = NUMRANGE
-    _col_str = 'NUMRANGE'
-    _data_str = '[1.0,2.0)'
-    def _data_obj(self):
-        return self.extras.NumericRange(
-            decimal.Decimal('1.0'), decimal.Decimal('2.0')
-            )
-
-class DateRangeTests(_RangeTypeMixin, fixtures.TablesTest):
-
-    _col_type = DATERANGE
-    _col_str = 'DATERANGE'
-    _data_str = '[2013-03-23,2013-03-24)'
-    def _data_obj(self):
-        return self.extras.DateRange(
-            datetime.date(2013, 3, 23), datetime.date(2013, 3, 24)
-            )
-
-class DateTimeRangeTests(_RangeTypeMixin, fixtures.TablesTest):
-
-    _col_type = TSRANGE
-    _col_str = 'TSRANGE'
-    _data_str = '[2013-03-23 14:30,2013-03-23 23:30)'
-    def _data_obj(self):
-        return self.extras.DateTimeRange(
-            datetime.datetime(2013, 3, 23, 14, 30),
-            datetime.datetime(2013, 3, 23, 23, 30)
-            )
-
-class DateTimeTZRangeTests(_RangeTypeMixin, fixtures.TablesTest):
-
-    _col_type = TSTZRANGE
-    _col_str = 'TSTZRANGE'
-
-    # make sure we use one, steady timestamp with timezone pair
-    # for all parts of all these tests
-    _tstzs = None
-    def tstzs(self):
-        if self._tstzs is None:
-            lower = testing.db.connect().scalar(
-                func.current_timestamp().select()
-                )
-            upper = lower+datetime.timedelta(1)
-            self._tstzs = (lower, upper)
-        return self._tstzs
-
-    @property
-    def _data_str(self):
-        return '[%s,%s)' % self.tstzs()
-    
-    def _data_obj(self):
-        return self.extras.DateTimeTZRange(*self.tstzs())