]> git.ipfire.org Git - thirdparty/sqlalchemy/sqlalchemy.git/commitdiff
- *Major* cleanup / modernization of the Informix
authorMike Bayer <mike_mp@zzzcomputing.com>
Sat, 2 Oct 2010 01:26:05 +0000 (21:26 -0400)
committerMike Bayer <mike_mp@zzzcomputing.com>
Sat, 2 Oct 2010 01:26:05 +0000 (21:26 -0400)
dialect for 0.6, courtesy Florian Apolloner.
[ticket:1906]

16 files changed:
.hgignore
CHANGES
doc/build/core/engines.rst
doc/build/dialects/informix.rst
lib/sqlalchemy/dialects/informix/base.py
lib/sqlalchemy/dialects/informix/informixdb.py
lib/sqlalchemy/test/requires.py
test/dialect/test_informix.py
test/engine/test_bind.py
test/engine/test_execute.py
test/engine/test_pool.py
test/engine/test_reconnect.py
test/engine/test_reflection.py
test/engine/test_transaction.py
test/sql/test_query.py
test/sql/test_types.py

index 0a1571405426a545006c3b160307d22dc4704d2e..ef7855faec89fc531b01159c085128e62aa4c695 100755 (executable)
--- a/.hgignore
+++ b/.hgignore
@@ -3,4 +3,5 @@ syntax:regexp
 ^doc/build/output
 .pyc$
 .orig$
-.egg-info
\ No newline at end of file
+.egg-info
+test.cfg
diff --git a/CHANGES b/CHANGES
index fdf0ce75e4523512b818c22388c80ad9ebc7e18e..ab5e240bfc2551799e8bf119c7aa01990a9e0d4a 100644 (file)
--- a/CHANGES
+++ b/CHANGES
@@ -172,6 +172,11 @@ CHANGES
      a connection is first used is now "BEGIN (implicit)" 
      to emphasize that DBAPI has no explicit begin().
 
+- informix
+   - *Major* cleanup / modernization of the Informix 
+     dialect for 0.6, courtesy Florian Apolloner. 
+     [ticket:1906]
+     
 - misc
    - CircularDependencyError now has .cycles and .edges
      members, which are the set of elements involved in
index de81a6d730568bc2c24519b343ac4bce68724dcb..576bace368fbad3d55900ad24b250c099355641e 100644 (file)
@@ -63,7 +63,7 @@ ibm-db_                    thirdparty                   thirdparty   thirdparty
 **Firebird**
 kinterbasdb_               ``firebird+kinterbasdb``\*   yes          development   no           yes                yes
 **Informix**
-informixdb_                ``informix+informixdb``\*    development  development   no           unknown            unknown
+informixdb_                ``informix+informixdb``\*    yes          development   no           unknown            unknown
 **MaxDB**
 sapdb_                     ``maxdb+sapdb``\*            development  development   no           yes                unknown
 **Microsoft Access**
index 7cf271d0b7903a6212126161dfbf60dabe020ae4..617b8cd9dd120cfb1ea6c20f2caa36d93610c2cf 100644 (file)
@@ -2,3 +2,8 @@ Informix
 ========
 
 .. automodule:: sqlalchemy.dialects.informix.base
+
+informixdb Notes
+--------------------
+
+.. automodule:: sqlalchemy.dialects.informix.informixdb
\ No newline at end of file
index 242b8a3289a25d7a5e315012b44dd63594b5ca8a..9aa23173b93e519e480277ea0dadce93b77076d8 100644 (file)
@@ -7,7 +7,7 @@
 # the MIT License: http://www.opensource.org/licenses/mit-license.php
 """Support for the Informix database.
 
-This dialect is *not* tested on SQLAlchemy 0.6.
+This dialect is mostly functional as of SQLAlchemy 0.6.5.
 
 
 """
@@ -16,7 +16,7 @@ This dialect is *not* tested on SQLAlchemy 0.6.
 import datetime
 
 from sqlalchemy import sql, schema, exc, pool, util
-from sqlalchemy.sql import compiler
+from sqlalchemy.sql import compiler, text
 from sqlalchemy.engine import default, reflection
 from sqlalchemy import types as sqltypes
 
@@ -47,9 +47,9 @@ class InfoTime(sqltypes.Time):
                 return value
         return process
 
-
 colspecs = {
     sqltypes.DateTime : InfoDateTime,
+    sqltypes.TIMESTAMP: InfoDateTime,
     sqltypes.Time: InfoTime,
 }
 
@@ -85,6 +85,9 @@ class InfoTypeCompiler(compiler.GenericTypeCompiler):
     def visit_TIME(self, type_):
         return "DATETIME HOUR TO SECOND"
 
+    def visit_TIMESTAMP(self, type_):
+        return "DATETIME YEAR TO SECOND"
+
     def visit_large_binary(self, type_):
         return "BYTE"
 
@@ -92,17 +95,16 @@ class InfoTypeCompiler(compiler.GenericTypeCompiler):
         return "SMALLINT"
 
 class InfoSQLCompiler(compiler.SQLCompiler):
-
     def default_from(self):
         return " from systables where tabname = 'systables' "
 
     def get_select_precolumns(self, select):
-        s = select._distinct and "DISTINCT " or ""
-        # only has limit
+        s = ""
+        if select._offset:
+            s += "SKIP %s " % select._offset
         if select._limit:
-            s += " FIRST %s " % select._limit
-        else:
-            s += ""
+            s += "FIRST %s " % select._limit
+        s += select._distinct and "DISTINCT " or ""
         return s
 
     def visit_select(self, select, asfrom=False, parens=True, **kw):
@@ -114,8 +116,6 @@ class InfoSQLCompiler(compiler.SQLCompiler):
             return text
 
     def limit_clause(self, select):
-        if select._offset is not None and select._offset > 0:
-            raise NotImplementedError("Informix does not support OFFSET")
         return ""
 
     def visit_function(self, func, **kw):
@@ -128,14 +128,32 @@ class InfoSQLCompiler(compiler.SQLCompiler):
         else:
             return compiler.SQLCompiler.visit_function(self, func, **kw)
 
+    def visit_mod(self, binary, **kw):
+        return "MOD(%s, %s)" % (self.process(binary.left), self.process(binary.right))
+
 
 class InfoDDLCompiler(compiler.DDLCompiler):
-    def get_column_specification(self, column, first_pk=False):
+
+    def visit_add_constraint(self, create):
+        preparer = self.preparer
+        return "ALTER TABLE %s ADD CONSTRAINT %s" % (
+            self.preparer.format_table(create.element.table),
+            self.process(create.element)
+        )
+
+    def get_column_specification(self, column, **kw):
         colspec = self.preparer.format_column(column)
-        if column.primary_key and \
-                    len(column.foreign_keys)==0 and \
-                    column.autoincrement and \
-           isinstance(column.type, sqltypes.Integer) and first_pk:
+        first = None
+        if column.primary_key and column.autoincrement:
+            try:
+                first = [c for c in column.table.primary_key.columns
+                         if (c.autoincrement and
+                             isinstance(c.type, sqltypes.Integer) and
+                             not c.foreign_keys)].pop(0)
+            except IndexError:
+                pass
+
+        if column is first:
             colspec += " SERIAL"
         else:
             colspec += " " + self.dialect.type_compiler.process(column.type)
@@ -148,18 +166,53 @@ class InfoDDLCompiler(compiler.DDLCompiler):
 
         return colspec
 
+    def get_column_default_string(self, column):
+        if (isinstance(column.server_default, schema.DefaultClause) and
+            isinstance(column.server_default.arg, basestring)):
+                if isinstance(column.type, (sqltypes.Integer, sqltypes.Numeric)):
+                    return self.sql_compiler.process(text(column.server_default.arg))
+
+        return super(InfoDDLCompiler, self).get_column_default_string(column)
+
+    ### Informix wants the constraint name at the end, hence this ist c&p from sql/compiler.py
+    def visit_primary_key_constraint(self, constraint):
+        if len(constraint) == 0:
+            return ''
+        text = "PRIMARY KEY "
+        text += "(%s)" % ', '.join(self.preparer.quote(c.name, c.quote)
+                                       for c in constraint)
+        text += self.define_constraint_deferrability(constraint)
+
+        if constraint.name is not None:
+            text += " CONSTRAINT %s" % self.preparer.format_constraint(constraint)
+        return text
+
+    def visit_foreign_key_constraint(self, constraint):
+        preparer = self.dialect.identifier_preparer
+        remote_table = list(constraint._elements.values())[0].column.table
+        text = "FOREIGN KEY (%s) REFERENCES %s (%s)" % (
+            ', '.join(preparer.quote(f.parent.name, f.parent.quote)
+                      for f in constraint._elements.values()),
+            preparer.format_table(remote_table),
+            ', '.join(preparer.quote(f.column.name, f.column.quote)
+                      for f in constraint._elements.values())
+        )
+        text += self.define_constraint_cascades(constraint)
+        text += self.define_constraint_deferrability(constraint)
+
+        if constraint.name is not None:
+            text += " CONSTRAINT %s " % \
+                        preparer.format_constraint(constraint)
+        return text
+
+    def visit_unique_constraint(self, constraint):
+        text = "UNIQUE (%s)" % (', '.join(self.preparer.quote(c.name, c.quote) for c in constraint))
+        text += self.define_constraint_deferrability(constraint)
+
+        if constraint.name is not None:
+            text += "CONSTRAINT %s " % self.preparer.format_constraint(constraint)
+        return text
 
-class InfoIdentifierPreparer(compiler.IdentifierPreparer):
-    def __init__(self, dialect):
-        super(InfoIdentifierPreparer, self).\
-                        __init__(dialect, initial_quote="'")
-
-    def format_constraint(self, constraint):
-        # informix doesnt support names for constraints
-        return ''
-
-    def _requires_quotes(self, value):
-        return False
 
 class InformixDialect(default.DefaultDialect):
     name = 'informix'
@@ -169,9 +222,13 @@ class InformixDialect(default.DefaultDialect):
     type_compiler = InfoTypeCompiler
     statement_compiler = InfoSQLCompiler
     ddl_compiler = InfoDDLCompiler
-    preparer = InfoIdentifierPreparer
     colspecs = colspecs
     ischema_names = ischema_names
+    default_paramstyle = 'qmark'
+
+    def __init__(self, has_transactions=True, *args, **kwargs):
+        self.has_transactions = has_transactions
+        default.DefaultDialect.__init__(self, *args, **kwargs)
 
     def initialize(self, connection):
         super(InformixDialect, self).initialize(connection)
@@ -182,43 +239,78 @@ class InformixDialect(default.DefaultDialect):
         else:
             self.max_identifier_length = 128
         
-    def do_begin(self, connect):
-        cu = connect.cursor()
+    def do_begin(self, connection):
+        cu = connection.cursor()
         cu.execute('SET LOCK MODE TO WAIT')
-        #cu.execute('SET ISOLATION TO REPEATABLE READ')
+        if self.has_transactions:
+            cu.execute('SET ISOLATION TO REPEATABLE READ')
+
+    def do_commit(self, connection):
+        if self.has_transactions:
+            connection.commit()
+
+    def do_rollback(self, connection):
+        if self.has_transactions:
+            connection.rollback()
+
+    def _get_table_names(self, connection, schema, type, **kw):
+        schema = schema or self.default_schema_name
+        s = "select tabname, owner from systables where owner=? and tabtype=?"
+        return [row[0] for row in connection.execute(s, schema, type)]
 
     @reflection.cache
     def get_table_names(self, connection, schema=None, **kw):
-        s = "select tabname from systables"
+        return self._get_table_names(connection, schema, 'T', **kw)
+
+    @reflection.cache
+    def get_view_names(self, connection, schema=None, **kw):
+        return self._get_table_names(connection, schema, 'V', **kw)
+
+    @reflection.cache
+    def get_schema_names(self, connection, **kw):
+        s = "select owner from systables"
         return [row[0] for row in connection.execute(s)]
 
     def has_table(self, connection, table_name, schema=None):
+        schema = schema or self.default_schema_name
         cursor = connection.execute(
-                """select tabname from systables where tabname=?""",
-                table_name.lower())
+                """select tabname from systables where tabname=? and owner=?""",
+                table_name, schema)
         return cursor.first() is not None
 
     @reflection.cache
     def get_columns(self, connection, table_name, schema=None, **kw):
+        schema = schema or self.default_schema_name
         c = connection.execute(
             """select colname, coltype, collength, t3.default, t1.colno from
                 syscolumns as t1 , systables as t2 , OUTER sysdefaults as t3
-                where t1.tabid = t2.tabid and t2.tabname=? 
+                where t1.tabid = t2.tabid and t2.tabname=? and t2.owner=?
                   and t3.tabid = t2.tabid and t3.colno = t1.colno
-                order by t1.colno""", table.name.lower())
+                order by t1.colno""", table_name, schema)
+
+        primary_cols = self.get_primary_keys(connection, table_name, schema, **kw)
+
         columns = []
+        rows = c.fetchall()
         for name, colattr, collength, default, colno in rows:
             name = name.lower()
-            if include_columns and name not in include_columns:
-                continue
+
+            autoincrement = False
+            primary_key = False
+
+            if name in primary_cols:
+                primary_key = True
 
             # in 7.31, coltype = 0x000
             #                       ^^-- column type
             #                      ^-- 1 not null, 0 null
-            nullable, coltype = divmod(colattr, 256)
+            not_nullable, coltype = divmod(colattr, 256)
             if coltype not in (0, 13) and default:
                 default = default.split()[-1]
 
+            if coltype == 6: # Serial, mark as autoincrement
+                autoincrement = True
+
             if coltype == 0 or coltype == 13: # char, varchar
                 coltype = ischema_names[coltype](collength)
                 if default:
@@ -236,32 +328,34 @@ class InformixDialect(default.DefaultDialect):
                               (coltype, name))
                     coltype = sqltypes.NULLTYPE
             
-            # TODO: nullability ??
-            nullable = True
-            
-            column_info = dict(name=name, type=coltype, nullable=nullable,
-                               default=default)
+            column_info = dict(name=name, type=coltype, nullable=not not_nullable,
+                               default=default, autoincrement=autoincrement,
+                               primary_key=primary_key)
             columns.append(column_info)
         return columns
 
     @reflection.cache
     def get_foreign_keys(self, connection, table_name, schema=None, **kw):
-        # FK
+        schema_sel = schema or self.default_schema_name
         c = connection.execute(
-        """select t1.constrname as cons_name , t1.constrtype as cons_type ,
-                 t4.colname as local_column , t7.tabname as remote_table ,
-                 t6.colname as remote_column
+        """select t1.constrname as cons_name,
+                 t4.colname as local_column, t7.tabname as remote_table,
+                 t6.colname as remote_column, t7.owner as remote_owner 
             from sysconstraints as t1 , systables as t2 ,
                  sysindexes as t3 , syscolumns as t4 ,
                  sysreferences as t5 , syscolumns as t6 , systables as t7 ,
                  sysconstraints as t8 , sysindexes as t9
-           where t1.tabid = t2.tabid and t2.tabname=? and t1.constrtype = 'R'
+           where t1.tabid = t2.tabid and t2.tabname=? and t2.owner=? and t1.constrtype = 'R'
              and t3.tabid = t2.tabid and t3.idxname = t1.idxname
-             and t4.tabid = t2.tabid and t4.colno = t3.part1
+             and t4.tabid = t2.tabid and t4.colno in (t3.part1, t3.part2, t3.part3,
+             t3.part4, t3.part5, t3.part6, t3.part7, t3.part8, t3.part9, t3.part10,
+             t3.part11, t3.part11, t3.part12, t3.part13, t3.part4, t3.part15, t3.part16) 
              and t5.constrid = t1.constrid and t8.constrid = t5.primary
-             and t6.tabid = t5.ptabid and t6.colno = t9.part1 and t9.idxname =
+             and t6.tabid = t5.ptabid and t6.colno in (t9.part1, t9.part2, t9.part3,
+             t9.part4, t9.part5, t9.part6, t9.part7, t9.part8, t9.part9, t9.part10,
+             t9.part11, t9.part11, t9.part12, t9.part13, t9.part4, t9.part15, t9.part16) and t9.idxname =
              t8.idxname
-             and t7.tabid = t5.ptabid""", table.name.lower())
+             and t7.tabid = t5.ptabid""", table_name, schema_sel)
 
 
         def fkey_rec():
@@ -275,8 +369,9 @@ class InformixDialect(default.DefaultDialect):
 
         fkeys = util.defaultdict(fkey_rec)
 
-        for cons_name, cons_type, local_column, \
-                    remote_table, remote_column in rows:
+        rows = c.fetchall()
+        for cons_name, local_column, \
+                    remote_table, remote_column, remote_owner in rows:
 
             rec = fkeys[cons_name]
             rec['name'] = cons_name
@@ -285,25 +380,91 @@ class InformixDialect(default.DefaultDialect):
 
             if not rec['referred_table']:
                 rec['referred_table'] = remote_table
+                if schema is not None:
+                    rec['referred_schema'] = remote_owner
 
-            local_cols.append(local_column)
-            remote_cols.append(remote_column)
+            if local_column not in local_cols:
+                local_cols.append(local_column)
+            if remote_column not in remote_cols:
+                remote_cols.append(remote_column)
 
         return fkeys.values()
 
     @reflection.cache
     def get_primary_keys(self, connection, table_name, schema=None, **kw):
+        schema = schema or self.default_schema_name
+
+        # Select the column positions from sysindexes for sysconstraints
+        data = connection.execute(
+            """select t2.* 
+            from systables as t1, sysindexes as t2, sysconstraints as t3
+            where t1.tabid=t2.tabid and t1.tabname=? and t1.owner=?
+            and t2.idxname=t3.idxname and t3.constrtype='P'""",
+            table_name, schema
+        ).fetchall()
+
+        colpositions = set()
+
+        for row in data:
+            colpos = set([getattr(row, 'part%d' % x) for x in range(1,16)])
+            colpositions |= colpos
+
+        if not len(colpositions):
+            return []
+
+        # Select the column names using the columnpositions
+        # TODO: Maybe cache a bit of those col infos (eg select all colnames for one table)
+        place_holder = ','.join('?'*len(colpositions))
         c = connection.execute(
-            """select t4.colname as local_column
-            from sysconstraints as t1 , systables as t2 ,
-                 sysindexes as t3 , syscolumns as t4
-           where t1.tabid = t2.tabid and t2.tabname=? and t1.constrtype = 'P'
-             and t3.tabid = t2.tabid and t3.idxname = t1.idxname
-             and t4.tabid = t2.tabid and t4.colno = t3.part1""",
-             table.name.lower())
-        return [r[0] for r in c.fetchall()]
+            """select t1.colname
+            from syscolumns as t1, systables as t2
+            where t2.tabname=? and t1.tabid = t2.tabid and 
+            t1.colno in (%s)""" % place_holder,
+            table_name, *colpositions
+        ).fetchall()
+
+        return reduce(lambda x,y: list(x)+list(y), c, [])
 
     @reflection.cache
     def get_indexes(self, connection, table_name, schema, **kw):
-        # TODO
-        return []
+        # TODO: schema...
+        c = connection.execute(
+            """select t1.*
+            from sysindexes as t1 , systables as t2
+           where t1.tabid = t2.tabid and t2.tabname=?""",
+             table_name)
+
+        indexes = []
+        for row in c.fetchall():
+            colnames = [getattr(row, 'part%d' % x) for x in range(1,16)]
+            colnames = [x for x in colnames if x]
+            place_holder = ','.join('?'*len(colnames))
+            c = connection.execute(
+                """select t1.colname
+                from syscolumns as t1, systables as t2
+                where t2.tabname=? and t1.tabid = t2.tabid and 
+                t1.colno in (%s)""" % place_holder,
+                table_name, *colnames
+            ).fetchall()
+            c = reduce(lambda x,y: list(x)+list(y), c, [])
+            indexes.append({
+                'name': row.idxname,
+                'unique': row.idxtype.lower() == 'u',
+                'column_names': c
+            })
+        return indexes
+
+    @reflection.cache
+    def get_view_definition(self, connection, view_name, schema=None, **kw):
+        schema = schema or self.default_schema_name
+        c = connection.execute(
+            """select t1.viewtext
+            from sysviews as t1 , systables as t2
+            where t1.tabid=t2.tabid and t2.tabname=?
+            and t2.owner=? order by seqno""",
+             view_name, schema).fetchall()
+
+        return ''.join([row[0] for row in c])
+
+    def _get_default_schema_name(self, connection):
+        return connection.execute('select CURRENT_ROLE from systables').scalar()
index 8edcc953b57f0b003b4c4c58308438f57e8c4209..f11c57bb601ab27ac37780ee573dce9fad616be8 100644 (file)
@@ -1,16 +1,38 @@
+"""
+Support for the informixdb DBAPI.
+
+informixdb is available at:
+
+    http://informixdb.sourceforge.net/
+    
+Connecting
+^^^^^^^^^^
+
+Sample informix connection::
+
+    engine = create_engine('informix+informixdb://user:password@host/dbname')
+
+"""
+
+import re
+
 from sqlalchemy.dialects.informix.base import InformixDialect
 from sqlalchemy.engine import default
 
+VERSION_RE = re.compile(r'(\d+)\.(\d+)(.+\d+)')
+
 class InformixExecutionContext_informixdb(default.DefaultExecutionContext):
     def post_exec(self):
         if self.isinsert:
-            self._lastrowid = [self.cursor.sqlerrd[1]]
+            self._lastrowid = self.cursor.sqlerrd[1]
+
+    def get_lastrowid(self):
+        return self._lastrowid
 
 
 class InformixDialect_informixdb(InformixDialect):
     driver = 'informixdb'
-    default_paramstyle = 'qmark'
-    execution_context_cls = InformixExecutionContext_informixdb
+    execution_ctx_cls = InformixExecutionContext_informixdb
 
     @classmethod
     def dbapi(cls):
@@ -31,13 +53,8 @@ class InformixDialect_informixdb(InformixDialect):
 
     def _get_server_version_info(self, connection):
         # http://informixdb.sourceforge.net/manual.html#inspecting-version-numbers
-        version = []
-        for n in connection.connection.dbms_version.split('.'):
-          try:
-            version.append(int(n))
-          except ValueError:
-            version.append(n)
-        return tuple(version)
+        v = VERSION_RE.split(connection.connection.dbms_version)
+        return (int(v[1]), int(v[2]), v[3])
 
     def is_disconnect(self, e):
         if isinstance(e, self.dbapi.OperationalError):
index a9b84e85d87f405e81456782cbe79807f0a6c8ce..14c548f122aeb1aed63923f809fae6ee62833c75 100644 (file)
@@ -52,6 +52,7 @@ def boolean_col_expressions(fn):
         no_support('mssql', 'not supported by database'),
         no_support('sybase', 'not supported by database'),
         no_support('maxdb', 'FIXME: verify not supported by database'),
+        no_support('informix', 'not supported by database'),
     )
     
 def identity(fn):
@@ -120,6 +121,7 @@ def savepoints(fn):
         no_support('sqlite', 'not supported by database'),
         no_support('sybase', 'FIXME: guessing, needs confirmation'),
         exclude('mysql', '<', (5, 0, 3), 'not supported by database'),
+        exclude('informix', '<', (11, 55, 'xC3'), 'not supported by database'),
         )
 
 def denormalized_names(fn):
@@ -148,6 +150,7 @@ def sequences(fn):
         no_support('mysql', 'no SEQUENCE support'),
         no_support('sqlite', 'no SEQUENCE support'),
         no_support('sybase', 'no SEQUENCE support'),
+        no_support('informix', 'no SEQUENCE support'),
         )
 
 def update_nowait(fn):
@@ -176,6 +179,7 @@ def intersect(fn):
         fails_on('firebird', 'no support for INTERSECT'),
         fails_on('mysql', 'no support for INTERSECT'),
         fails_on('sybase', 'no support for INTERSECT'),
+        fails_on('informix', 'no support for INTERSECT'),
     )
 
 def except_(fn):
@@ -185,6 +189,7 @@ def except_(fn):
         fails_on('firebird', 'no support for EXCEPT'),
         fails_on('mysql', 'no support for EXCEPT'),
         fails_on('sybase', 'no support for EXCEPT'),
+        fails_on('informix', 'no support for EXCEPT'),
     )
 
 def offset(fn):
index 78dc54eda90e725fac9e6a3ab5f24f14ce17d77f..ceec587d93d218b15d6465c6679c37e213d9e337 100644 (file)
@@ -5,7 +5,6 @@ from sqlalchemy.test import *
 
 class CompileTest(TestBase, AssertsCompiledSQL):
 
-    __only_on__ = 'informix'
     __dialect__ = informix.InformixDialect()
 
     def test_statements(self):
@@ -23,3 +22,4 @@ class CompileTest(TestBase, AssertsCompiledSQL):
                             't1.col1 = t2.col3')
         self.assert_compile(t1.update().values({t1.c.col1: t1.c.col1
                             + 1}), 'UPDATE t1 SET col1=(t1.col1 + ?)')
+
index 547afc64cf95658197e68cebfe84a1fc7b8f251d..dfcc5e172a061a1215fe888f0e7f7be4b78c45e1 100644 (file)
@@ -147,7 +147,7 @@ class BindTest(testing.TestBase):
             table.insert().execute(foo=7)
             trans.rollback()
             metadata.bind = None
-            assert conn.execute('select count(1) from test_table'
+            assert conn.execute('select count(*) from test_table'
                                 ).scalar() == 0
         finally:
             metadata.drop_all(bind=conn)
index 47879ece9e078bc710083e5e2ffefd9a9393059d..ee5953c7494b940f36e6310909b2c168019e39f8 100644 (file)
@@ -34,9 +34,10 @@ class ExecuteTest(TestBase):
     def teardown_class(cls):
         metadata.drop_all()
 
-    @testing.fails_on_everything_except('firebird', 'maxdb', 
-                                        'sqlite', '+pyodbc', 
-                                        '+mxodbc', '+zxjdbc', 'mysql+oursql')
+    @testing.fails_on_everything_except('firebird', 'maxdb',
+                                        'sqlite', '+pyodbc',
+                                        '+mxodbc', '+zxjdbc', 'mysql+oursql',
+                                        'informix+informixdb')
     def test_raw_qmark(self):
         for conn in testing.db, testing.db.connect():
             conn.execute('insert into users (user_id, user_name) '
@@ -103,7 +104,7 @@ class ExecuteTest(TestBase):
                     'horse'), (4, 'sally')]
             conn.execute('delete from users')
 
-    @testing.fails_on_everything_except('sqlite', 'oracle+cx_oracle')
+    @testing.fails_on_everything_except('sqlite', 'oracle+cx_oracle', 'informix+informixdb')
     def test_raw_named(self):
         for conn in testing.db, testing.db.connect():
             conn.execute('insert into users (user_id, user_name) '
@@ -183,7 +184,7 @@ class CompiledCacheTest(TestBase):
         cached_conn.execute(ins, {'user_name':'u2'})
         cached_conn.execute(ins, {'user_name':'u3'})
         assert len(cache) == 1
-        eq_(conn.execute("select count(1) from users").scalar(), 3)
+        eq_(conn.execute("select count(*) from users").scalar(), 3)
     
 class LogTest(TestBase):
     def _test_logger(self, eng, eng_name, pool_name):
index 94d75a9a811167635353009a9a4d7a2e9afe64f7..9db65d2ab83874d09f873ec17279a1621c529daa 100644 (file)
@@ -668,7 +668,7 @@ class QueuePoolTest(PoolTestBase):
         c1 = None
         c1 = p.connect()
         assert c1.connection.id != c_id
-        
+
     def test_recreate(self):
         dbapi = MockDBAPI()
         p = pool.QueuePool(creator=lambda : dbapi.connect('foo.db'),
index 8ef8513589fcd106cd2300f3e221f970360e0d25..e26413957961b54cd6fc9a97b872c01fd822fa35 100644 (file)
@@ -221,6 +221,8 @@ class RealReconnectTest(TestBase):
     def teardown(self):
         engine.dispose()
 
+    @testing.fails_on('+informixdb',
+                      "Wrong error thrown, fix in informixdb?")
     def test_reconnect(self):
         conn = engine.connect()
 
@@ -256,7 +258,7 @@ class RealReconnectTest(TestBase):
         assert not conn.invalidated
 
         conn.close()
-    
+
     def test_invalidate_twice(self):
         conn = engine.connect()
         conn.invalidate()
@@ -288,7 +290,9 @@ class RealReconnectTest(TestBase):
         
         # pool was recreated
         assert engine.pool is not p1
-        
+
+    @testing.fails_on('+informixdb',
+                      "Wrong error thrown, fix in informixdb?")
     def test_null_pool(self):
         engine = \
             engines.reconnecting_engine(options=dict(poolclass=pool.NullPool))
@@ -307,6 +311,8 @@ class RealReconnectTest(TestBase):
         eq_(conn.execute(select([1])).scalar(), 1)
         assert not conn.invalidated
         
+    @testing.fails_on('+informixdb',
+                      "Wrong error thrown, fix in informixdb?")
     def test_close(self):
         conn = engine.connect()
         eq_(conn.execute(select([1])).scalar(), 1)
@@ -325,6 +331,8 @@ class RealReconnectTest(TestBase):
         conn = engine.connect()
         eq_(conn.execute(select([1])).scalar(), 1)
 
+    @testing.fails_on('+informixdb',
+                      "Wrong error thrown, fix in informixdb?")
     def test_with_transaction(self):
         conn = engine.connect()
         trans = conn.begin()
@@ -401,6 +409,8 @@ class InvalidateDuringResultTest(TestBase):
     @testing.fails_on('+pg8000',
                       "Buffers the result set and doesn't check for "
                       "connection close")
+    @testing.fails_on('+informixdb',
+                      "Wrong error thrown, fix in informixdb?")
     def test_invalidate_on_results(self):
         conn = engine.connect()
         result = conn.execute('select * from sometable')
index a82f1ec5282a13fff1db4f437465aaa5a6353f2c..6a8e7cf8acef404adf1f3a9793d9e7e26c2fa360 100644 (file)
@@ -281,7 +281,7 @@ class ReflectionTest(TestBase, ComparesTables):
         meta.create_all()
         try:
             meta2 = MetaData(testing.db)
-            a2 = Table('addresses', meta2, 
+            a2 = Table('addresses', meta2,
                     Column('user_id',sa.Integer, sa.ForeignKey('users.id')),
                    autoload=True)
             u2 = Table('users', meta2, autoload=True)
@@ -597,7 +597,7 @@ class ReflectionTest(TestBase, ComparesTables):
             Column('data', sa.String(50)),
             sa.ForeignKeyConstraint(['foo', 'bar', 'lala'],
              ['multi.multi_id', 'multi.multi_rev', 'multi.multi_hoho'
-             ]), 
+             ]),
             test_needs_fk=True,
         )
         meta.create_all()
@@ -620,6 +620,7 @@ class ReflectionTest(TestBase, ComparesTables):
 
 
     @testing.crashes('oracle', 'FIXME: unknown, confirm not fails_on')
+    @testing.fails_on('+informixdb', 'FIXME: should be supported via the DELIMITED env var but that breaks everything else for now')
     def test_reserved(self):
 
         # check a table that uses an SQL reserved name doesn't cause an
@@ -742,7 +743,6 @@ class ReflectionTest(TestBase, ComparesTables):
             m2 = MetaData(testing.db)
             t2 = Table('party', m2, autoload=True)
 
-            print len(t2.indexes), t2.indexes
             assert len(t2.indexes) == 3
             # Make sure indexes are in the order we expect them in
             tmp = [(idx.name, idx) for idx in t2.indexes]
@@ -1056,7 +1056,7 @@ def createTables(meta, schema=None):
         Column('test3', sa.Text),
         Column('test4', sa.Numeric(10, 2), nullable = False),
         Column('test5', sa.Date),
-        Column('test5-1', sa.TIMESTAMP),
+        Column('test5_1', sa.TIMESTAMP),
         parent_user_id,
         Column('test6', sa.Date, nullable=False),
         Column('test7', sa.Text),
index e7e2fe1b8fcac0980472cbe9a4f24e5fb9733a8e..f09c67164125a647b5f1987b3ca2a9599823d317 100644 (file)
@@ -135,7 +135,7 @@ class TransactionTest(TestBase):
             conn2 = connection.execution_options(dummy=True)
             conn2.execute(users.insert(), user_id=2, user_name='user2')
             transaction.rollback()
-            eq_(connection.scalar("select count(1) from query_users"), 0)
+            eq_(connection.scalar("select count(*) from query_users"), 0)
         finally:
             connection.close()
         
@@ -150,7 +150,7 @@ class TransactionTest(TestBase):
         connection.execute(users.insert(), user_id=5, user_name='user5')
         trans2.commit()
         transaction.rollback()
-        self.assert_(connection.scalar('select count(1) from '
+        self.assert_(connection.scalar('select count(*) from '
                      'query_users') == 0)
         result = connection.execute('select * from query_users')
         assert len(result.fetchall()) == 0
@@ -170,7 +170,7 @@ class TransactionTest(TestBase):
         assert connection.in_transaction()
         transaction.commit()
         assert not connection.in_transaction()
-        self.assert_(connection.scalar('select count(1) from '
+        self.assert_(connection.scalar('select count(*) from '
                      'query_users') == 5)
         result = connection.execute('select * from query_users')
         assert len(result.fetchall()) == 5
@@ -190,7 +190,7 @@ class TransactionTest(TestBase):
         assert connection.in_transaction()
         transaction.close()
         assert not connection.in_transaction()
-        self.assert_(connection.scalar('select count(1) from '
+        self.assert_(connection.scalar('select count(*) from '
                      'query_users') == 0)
         result = connection.execute('select * from query_users')
         assert len(result.fetchall()) == 0
@@ -604,7 +604,7 @@ class TLTransactionTest(TestBase):
 
     def test_commits(self):
         connection = tlengine.connect()
-        assert connection.execute('select count(1) from query_users'
+        assert connection.execute('select count(*) from query_users'
                                   ).scalar() == 0
         connection.close()
         connection = tlengine.contextual_connect()
@@ -697,7 +697,7 @@ class TLTransactionTest(TestBase):
         tlengine.rollback()
         try:
             self.assert_(external_connection.scalar(
-                        'select count(1) from query_users'
+                        'select count(*) from query_users'
                          ) == 0)
         finally:
             external_connection.close()
@@ -719,7 +719,7 @@ class TLTransactionTest(TestBase):
         tlengine.commit()
         try:
             self.assert_(external_connection.scalar(
-                        'select count(1) from query_users'
+                        'select count(*) from query_users'
                          ) == 5)
         finally:
             external_connection.close()
@@ -751,7 +751,7 @@ class TLTransactionTest(TestBase):
         conn.close()
         try:
             self.assert_(external_connection.scalar(
-                        'select count(1) from query_users'
+                        'select count(*) from query_users'
                          ) == 0)
         finally:
             external_connection.close()
@@ -778,7 +778,7 @@ class TLTransactionTest(TestBase):
         connection.close()
         try:
             self.assert_(external_connection.scalar(
-                        'select count(1) from query_users'
+                        'select count(*) from query_users'
                          ) == 0)
         finally:
             external_connection.close()
index a87931bb355d66be0e0cd2f1d8993098f1295c56..410ff73a6549f90af17a71f1e75501cf2fe974da 100644 (file)
@@ -259,6 +259,7 @@ class QueryTest(TestBase):
         )
         
         concat = ("test: " + users.c.user_name).label('thedata')
+        print select([concat]).order_by("thedata")
         eq_(
             select([concat]).order_by("thedata").execute().fetchall(),
             [("test: ed",), ("test: fred",), ("test: jack",)]
@@ -397,6 +398,7 @@ class QueryTest(TestBase):
     
     @testing.fails_on("firebird", "see dialect.test_firebird:MiscTest.test_percents_in_text")
     @testing.fails_on("oracle", "neither % nor %% are accepted")
+    @testing.fails_on("informix", "neither % nor %% are accepted")
     @testing.fails_on("+pg8000", "can't interpret result column from '%%'")
     @testing.emits_warning('.*now automatically escapes.*')
     def test_percents_in_text(self):
@@ -661,7 +663,7 @@ class QueryTest(TestBase):
             "This result object is closed.",
             result.fetchone
         )
-        
+
     def test_result_case_sensitivity(self):
         """test name normalization for result sets."""
         
@@ -1179,6 +1181,7 @@ class CompoundTest(TestBase):
     @testing.fails_on('firebird', "has trouble extracting anonymous column from union subquery")
     @testing.fails_on('mysql', 'FIXME: unknown')
     @testing.fails_on('sqlite', 'FIXME: unknown')
+    @testing.fails_on('informix', "FIXME: unknown (maybe the second alias isn't allows)")
     def test_union_all(self):
         e = union_all(
             select([t1.c.col3]),
index d7caae6003c5a76e1de9069a1fefcee5c5bc77d0..2a21ce03472fd6bf9f787d2f74a54053ceb8c9cb 100644 (file)
@@ -1118,7 +1118,7 @@ class NumericTest(TestBase):
     def teardown(self):
         metadata.drop_all()
         
-    @testing.emits_warning(r".*does \*not\* support Decimal objects natively")    
+    @testing.emits_warning(r".*does \*not\* support Decimal objects natively")
     def _do_test(self, type_, input_, output, filter_ = None):
         t = Table('t', metadata, Column('x', type_))
         t.create()