]> git.ipfire.org Git - thirdparty/sqlalchemy/sqlalchemy.git/commitdiff
- remove informix dialect, moved out to https://bitbucket.org/zzzeek/sqlalchemy_infor...
authorMike Bayer <mike_mp@zzzcomputing.com>
Sun, 17 Nov 2013 18:45:23 +0000 (13:45 -0500)
committerMike Bayer <mike_mp@zzzcomputing.com>
Sun, 17 Nov 2013 18:45:23 +0000 (13:45 -0500)
- remove informix, maxdb, access symbols from tests etc.

30 files changed:
doc/build/changelog/changelog_09.rst
doc/build/dialects/index.rst
lib/sqlalchemy/databases/__init__.py
lib/sqlalchemy/dialects/__init__.py
lib/sqlalchemy/dialects/informix/__init__.py [deleted file]
lib/sqlalchemy/dialects/informix/base.py [deleted file]
lib/sqlalchemy/dialects/informix/informixdb.py [deleted file]
lib/sqlalchemy/engine/reflection.py
test/dialect/test_informix.py [deleted file]
test/engine/test_execute.py
test/engine/test_reconnect.py
test/engine/test_reflection.py
test/engine/test_transaction.py
test/orm/inheritance/test_basic.py
test/orm/inheritance/test_manytomany.py
test/orm/inheritance/test_poly_linked_list.py
test/orm/test_assorted_eager.py
test/orm/test_cascade.py
test/orm/test_eager_relations.py
test/orm/test_expire.py
test/orm/test_generative.py
test/orm/test_lazy_relations.py
test/orm/test_mapper.py
test/orm/test_query.py
test/orm/test_subquery_relations.py
test/requirements.py
test/sql/test_case_statement.py
test/sql/test_defaults.py
test/sql/test_functions.py
test/sql/test_query.py

index b0dc07d9702db89fc441159de8692bfc8dab0509..5eaf78b9d186af533d0fc85f37330a26fd015072 100644 (file)
 .. changelog::
     :version: 0.9.0b2
 
+    .. change::
+        :tags: removed
+
+        The "informix" and "informixdb" dialects have been removed; the code
+        is now available as a separate repository on Bitbucket.   The IBM-DB
+        project has provided production-level Informix support since the
+        informixdb dialect was first added.
+
     .. change::
         :tags: bug, orm
 
index 7835a5e21bd69d77b638265fb535d1a2409b29a5..d22f11ee28725cb67154a5856e0e0b92644336d5 100644 (file)
@@ -18,7 +18,6 @@ Included Dialects
 
     drizzle
     firebird
-    informix
     mssql
     mysql
     oracle
@@ -45,17 +44,31 @@ Current external dialect projects for SQLAlchemy include:
 Production Ready
 ^^^^^^^^^^^^^^^^
 
-* `ibm_db_sa <http://code.google.com/p/ibm-db/wiki/README>`_ - driver for IBM DB2, developed jointly by IBM and SQLAlchemy developers.
+* `ibm_db_sa <http://code.google.com/p/ibm-db/wiki/README>`_ - driver for IBM DB2 and Informix, developed jointly by IBM and SQLAlchemy developers.
 * `sqlalchemy-sqlany <https://code.google.com/p/sqlalchemy-sqlany/>`_ - driver for SAP Sybase SQL Anywhere, developed by SAP.
 * `sqlalchemy-monetdb <https://github.com/gijzelaerr/sqlalchemy-monetdb>`_ - driver for MonetDB.
 
 Experimental / Incomplete
 ^^^^^^^^^^^^^^^^^^^^^^^^^^
 
-* `sqlalchemy-access <https://bitbucket.org/zzzeek/sqlalchemy-access>`_ - driver for Microsoft Access.
+Dialects that were started but are not in a completed state.  Code contributions
+welcome here.
+
 * `CALCHIPAN <https://bitbucket.org/zzzeek/calchipan/>`_ - Adapts `Pandas <http://pandas.pydata.org/>`_ dataframes to SQLAlchemy.
 * `sqlalchemy-akiban <https://github.com/zzzeek/sqlalchemy_akiban>`_ - driver and ORM extensions for the `Akiban <http://www.akiban.com>`_ database.
 * `sqlalchemy-cubrid <https://bitbucket.org/zzzeek/sqlalchemy-cubrid>`_ - driver for the CUBRID database.
+
+Attic
+^^^^^
+
+Dialects in the "attic" are those that were contributed for SQLAlchemy long ago
+but have received little attention or demand since then, and are now moved out to
+their own repositories in at best a semi-working state.
+Community members interested in these dialects should feel free to pick up on
+their current codebase and fork off into working libraries.
+
+* `sqlalchemy-access <https://bitbucket.org/zzzeek/sqlalchemy-access>`_ - driver for Microsoft Access.
+* `sqlalchemy-informixdb <https://bitbucket.org/zzzeek/sqlalchemy_informixdb>`_ - driver for the informixdb DBAPI.
 * `sqlalchemy-maxdb <https://bitbucket.org/zzzeek/sqlalchemy-maxdb>`_ - driver for the MaxDB database
 
 
index e6821b009492d332530de183716175976aeeb09a..0774ffc0195d129a939e6ea4ad6110b0c85f4b0b 100644 (file)
@@ -15,7 +15,6 @@ from ..dialects.mysql import base as mysql
 from ..dialects.drizzle import base as drizzle
 from ..dialects.oracle import base as oracle
 from ..dialects.firebird import base as firebird
-from ..dialects.informix import base as informix
 from ..dialects.mssql import base as mssql
 from ..dialects.sybase import base as sybase
 
@@ -23,7 +22,6 @@ from ..dialects.sybase import base as sybase
 __all__ = (
     'drizzle',
     'firebird',
-    'informix',
     'mssql',
     'mysql',
     'postgresql',
index 7f5d34707771377fbd112c9eb3e6e7b7d390e314..8b276a7ca293d3e1937ae4e120790b34cbdd8bc3 100644 (file)
@@ -7,7 +7,6 @@
 __all__ = (
     'drizzle',
     'firebird',
-#    'informix',
     'mssql',
     'mysql',
     'oracle',
diff --git a/lib/sqlalchemy/dialects/informix/__init__.py b/lib/sqlalchemy/dialects/informix/__init__.py
deleted file mode 100644 (file)
index a55277c..0000000
+++ /dev/null
@@ -1,9 +0,0 @@
-# informix/__init__.py
-# Copyright (C) 2005-2013 the SQLAlchemy authors and contributors <see AUTHORS file>
-#
-# This module is part of SQLAlchemy and is released under
-# the MIT License: http://www.opensource.org/licenses/mit-license.php
-
-from sqlalchemy.dialects.informix import base, informixdb
-
-base.dialect = informixdb.dialect
diff --git a/lib/sqlalchemy/dialects/informix/base.py b/lib/sqlalchemy/dialects/informix/base.py
deleted file mode 100644 (file)
index e13ea88..0000000
+++ /dev/null
@@ -1,590 +0,0 @@
-# informix/base.py
-# Copyright (C) 2005-2013 the SQLAlchemy authors and contributors <see AUTHORS file>
-# coding: gbk
-#
-# This module is part of SQLAlchemy and is released under
-# the MIT License: http://www.opensource.org/licenses/mit-license.php
-
-"""
-.. dialect:: informix
-    :name: Informix
-
-.. note::
-
-    The Informix dialect functions on current SQLAlchemy versions
-    but is not regularly tested, and may have many issues and
-    caveats not currently handled.
-
-"""
-
-
-import datetime
-
-from sqlalchemy import sql, schema, exc, pool, util
-from sqlalchemy.sql import compiler, text
-from sqlalchemy.engine import default, reflection
-from sqlalchemy import types as sqltypes
-from functools import reduce
-
-RESERVED_WORDS = set(
-    ["abs", "absolute", "access", "access_method", "acos", "active", "add",
-    "address", "add_months", "admin", "after", "aggregate", "alignment",
-    "all", "allocate", "all_rows", "alter", "and", "ansi", "any", "append",
-    "array", "as", "asc", "ascii", "asin", "at", "atan", "atan2", "attach",
-    "attributes", "audit", "authentication", "authid", "authorization",
-    "authorized", "auto", "autofree", "auto_reprepare", "auto_stat_mode",
-    "avg", "avoid_execute", "avoid_fact", "avoid_full", "avoid_hash",
-    "avoid_index", "avoid_index_sj", "avoid_multi_index", "avoid_nl",
-    "avoid_star_join", "avoid_subqf", "based", "before", "begin",
-    "between", "bigint", "bigserial", "binary", "bitand", "bitandnot",
-    "bitnot", "bitor", "bitxor", "blob", "blobdir", "boolean", "both",
-    "bound_impl_pdq", "buffered", "builtin", "by", "byte", "cache", "call",
-    "cannothash", "cardinality", "cascade", "case", "cast", "ceil", "char",
-    "character", "character_length", "char_length", "check", "class",
-    "class_origin", "client", "clob", "clobdir", "close", "cluster",
-    "clustersize", "cobol", "codeset", "collation", "collection",
-    "column", "columns", "commit", "committed", "commutator", "component",
-    "components", "concat", "concurrent", "connect", "connection",
-    "connection_name", "connect_by_iscycle", "connect_by_isleaf",
-    "connect_by_rootconst", "constraint", "constraints", "constructor",
-    "context", "continue", "copy", "cos", "costfunc", "count", "crcols",
-    "create", "cross", "current", "current_role", "currval", "cursor",
-    "cycle", "database", "datafiles", "dataskip", "date", "datetime",
-    "day", "dba", "dbdate", "dbinfo", "dbpassword", "dbsecadm",
-    "dbservername", "deallocate", "debug", "debugmode", "debug_env", "dec",
-    "decimal", "declare", "decode", "decrypt_binary", "decrypt_char",
-    "dec_t", "default", "default_role", "deferred", "deferred_prepare",
-    "define", "delay", "delete", "deleting", "delimited", "delimiter",
-    "deluxe", "desc", "describe", "descriptor", "detach", "diagnostics",
-    "directives", "dirty", "disable", "disabled", "disconnect", "disk",
-    "distinct", "distributebinary", "distributesreferences",
-    "distributions", "document", "domain", "donotdistribute", "dormant",
-    "double", "drop", "dtime_t", "each", "elif", "else", "enabled",
-    "encryption", "encrypt_aes", "encrypt_tdes", "end", "enum",
-    "environment", "error", "escape", "exception", "exclusive", "exec",
-    "execute", "executeanywhere", "exemption", "exists", "exit", "exp",
-    "explain", "explicit", "express", "expression", "extdirectives",
-    "extend", "extent", "external", "fact", "false", "far", "fetch",
-    "file", "filetoblob", "filetoclob", "fillfactor", "filtering", "first",
-    "first_rows", "fixchar", "fixed", "float", "floor", "flush", "for",
-    "force", "forced", "force_ddl_exec", "foreach", "foreign", "format",
-    "format_units", "fortran", "found", "fraction", "fragment",
-    "fragments", "free", "from", "full", "function", "general", "get",
-    "gethint", "global", "go", "goto", "grant", "greaterthan",
-    "greaterthanorequal", "group", "handlesnulls", "hash", "having", "hdr",
-    "hex", "high", "hint", "hold", "home", "hour", "idslbacreadarray",
-    "idslbacreadset", "idslbacreadtree", "idslbacrules",
-    "idslbacwritearray", "idslbacwriteset", "idslbacwritetree",
-    "idssecuritylabel", "if", "ifx_auto_reprepare", "ifx_batchedread_table",
-    "ifx_int8_t", "ifx_lo_create_spec_t", "ifx_lo_stat_t", "immediate",
-    "implicit", "implicit_pdq", "in", "inactive", "increment", "index",
-    "indexes", "index_all", "index_sj", "indicator", "informix", "init",
-    "initcap", "inline", "inner", "inout", "insert", "inserting", "instead",
-    "int", "int8", "integ", "integer", "internal", "internallength",
-    "interval", "into", "intrvl_t", "is", "iscanonical", "isolation",
-    "item", "iterator", "java", "join", "keep", "key", "label", "labeleq",
-    "labelge", "labelglb", "labelgt", "labelle", "labellt", "labellub",
-    "labeltostring", "language", "last", "last_day", "leading", "left",
-    "length", "lessthan", "lessthanorequal", "let", "level", "like",
-    "limit", "list", "listing", "load", "local", "locator", "lock", "locks",
-    "locopy", "loc_t", "log", "log10", "logn", "long", "loop", "lotofile",
-    "low", "lower", "lpad", "ltrim", "lvarchar", "matched", "matches",
-    "max", "maxerrors", "maxlen", "maxvalue", "mdy", "median", "medium",
-    "memory", "memory_resident", "merge", "message_length", "message_text",
-    "middle", "min", "minute", "minvalue", "mod", "mode", "moderate",
-    "modify", "module", "money", "month", "months_between", "mounting",
-    "multiset", "multi_index", "name", "nchar", "negator", "new", "next",
-    "nextval", "next_day", "no", "nocache", "nocycle", "nomaxvalue",
-    "nomigrate", "nominvalue", "none", "non_dim", "non_resident", "noorder",
-    "normal", "not", "notemplatearg", "notequal", "null", "nullif",
-    "numeric", "numrows", "numtodsinterval", "numtoyminterval", "nvarchar",
-    "nvl", "octet_length", "of", "off", "old", "on", "online", "only",
-    "opaque", "opclass", "open", "optcompind", "optical", "optimization",
-    "option", "or", "order", "ordered", "out", "outer", "output",
-    "override", "page", "parallelizable", "parameter", "partition",
-    "pascal", "passedbyvalue", "password", "pdqpriority", "percaltl_cos",
-    "pipe", "pli", "pload", "policy", "pow", "power", "precision",
-    "prepare", "previous", "primary", "prior", "private", "privileges",
-    "procedure", "properties", "public", "put", "raise", "range", "raw",
-    "read", "real", "recordend", "references", "referencing", "register",
-    "rejectfile", "relative", "release", "remainder", "rename",
-    "reoptimization", "repeatable", "replace", "replication", "reserve",
-    "resolution", "resource", "restart", "restrict", "resume", "retain",
-    "retainupdatelocks", "return", "returned_sqlstate", "returning",
-    "returns", "reuse", "revoke", "right", "robin", "role", "rollback",
-    "rollforward", "root", "round", "routine", "row", "rowid", "rowids",
-    "rows", "row_count", "rpad", "rtrim", "rule", "sameas", "samples",
-    "sampling", "save", "savepoint", "schema", "scroll", "seclabel_by_comp",
-    "seclabel_by_name", "seclabel_to_char", "second", "secondary",
-    "section", "secured", "security", "selconst", "select", "selecting",
-    "selfunc", "selfuncargs", "sequence", "serial", "serial8",
-    "serializable", "serveruuid", "server_name", "session", "set",
-    "setsessionauth", "share", "short", "siblings", "signed", "sin",
-    "sitename", "size", "skall", "skinhibit", "skip", "skshow",
-    "smallfloat", "smallint", "some", "specific", "sql", "sqlcode",
-    "sqlcontext", "sqlerror", "sqlstate", "sqlwarning", "sqrt",
-    "stability", "stack", "standard", "start", "star_join", "statchange",
-    "statement", "static", "statistics", "statlevel", "status", "stdev",
-    "step", "stop", "storage", "store", "strategies", "string",
-    "stringtolabel", "struct", "style", "subclass_origin", "substr",
-    "substring", "sum", "support", "sync", "synonym", "sysdate",
-    "sysdbclose", "sysdbopen", "system", "sys_connect_by_path", "table",
-    "tables", "tan", "task", "temp", "template", "test", "text", "then",
-    "time", "timeout", "to", "today", "to_char", "to_date",
-    "to_dsinterval", "to_number", "to_yminterval", "trace", "trailing",
-    "transaction", "transition", "tree", "trigger", "triggers", "trim",
-    "true", "trunc", "truncate", "trusted", "type", "typedef", "typeid",
-    "typename", "typeof", "uid", "uncommitted", "under", "union",
-    "unique", "units", "unknown", "unload", "unlock", "unsigned",
-    "update", "updating", "upon", "upper", "usage", "use",
-    "uselastcommitted", "user", "use_hash", "use_nl", "use_subqf",
-    "using", "value", "values", "var", "varchar", "variable", "variance",
-    "variant", "varying", "vercols", "view", "violations", "void",
-    "volatile", "wait", "warning", "weekday", "when", "whenever", "where",
-    "while", "with", "without", "work", "write", "writedown", "writeup",
-    "xadatasource", "xid", "xload", "xunload", "year"
-    ])
-
-
-class InfoDateTime(sqltypes.DateTime):
-
-    def bind_processor(self, dialect):
-        def process(value):
-            if value is not None:
-                if value.microsecond:
-                    value = value.replace(microsecond=0)
-            return value
-        return process
-
-
-class InfoTime(sqltypes.Time):
-
-    def bind_processor(self, dialect):
-        def process(value):
-            if value is not None:
-                if value.microsecond:
-                    value = value.replace(microsecond=0)
-            return value
-        return process
-
-    def result_processor(self, dialect, coltype):
-        def process(value):
-            if isinstance(value, datetime.datetime):
-                return value.time()
-            else:
-                return value
-        return process
-
-colspecs = {
-    sqltypes.DateTime: InfoDateTime,
-    sqltypes.TIMESTAMP: InfoDateTime,
-    sqltypes.Time: InfoTime,
-}
-
-
-ischema_names = {
-    0: sqltypes.CHAR,           # CHAR
-    1: sqltypes.SMALLINT,       # SMALLINT
-    2: sqltypes.INTEGER,        # INT
-    3: sqltypes.FLOAT,          # Float
-    3: sqltypes.Float,          # SmallFloat
-    5: sqltypes.DECIMAL,        # DECIMAL
-    6: sqltypes.Integer,        # Serial
-    7: sqltypes.DATE,           # DATE
-    8: sqltypes.Numeric,        # MONEY
-    10: sqltypes.DATETIME,      # DATETIME
-    11: sqltypes.LargeBinary,   # BYTE
-    12: sqltypes.TEXT,          # TEXT
-    13: sqltypes.VARCHAR,       # VARCHAR
-    15: sqltypes.NCHAR,         # NCHAR
-    16: sqltypes.NVARCHAR,      # NVARCHAR
-    17: sqltypes.Integer,       # INT8
-    18: sqltypes.Integer,       # Serial8
-    43: sqltypes.String,        # LVARCHAR
-    -1: sqltypes.BLOB,          # BLOB
-    -1: sqltypes.CLOB,          # CLOB
-}
-
-
-class InfoTypeCompiler(compiler.GenericTypeCompiler):
-    def visit_DATETIME(self, type_):
-        return "DATETIME YEAR TO SECOND"
-
-    def visit_TIME(self, type_):
-        return "DATETIME HOUR TO SECOND"
-
-    def visit_TIMESTAMP(self, type_):
-        return "DATETIME YEAR TO SECOND"
-
-    def visit_large_binary(self, type_):
-        return "BYTE"
-
-    def visit_boolean(self, type_):
-        return "SMALLINT"
-
-
-class InfoSQLCompiler(compiler.SQLCompiler):
-
-    def default_from(self):
-        return " from systables where tabname = 'systables' "
-
-    def get_select_precolumns(self, select):
-        s = ""
-        if select._offset:
-            s += "SKIP %s " % select._offset
-        if select._limit:
-            s += "FIRST %s " % select._limit
-        s += select._distinct and "DISTINCT " or ""
-        return s
-
-    def visit_select(self, select, asfrom=False, parens=True, **kw):
-        text = compiler.SQLCompiler.visit_select(self, select, asfrom, parens, **kw)
-        if asfrom and parens and self.dialect.server_version_info < (11,):
-            #assuming that 11 version doesn't need this, not tested
-            return "table(multiset" + text + ")"
-        else:
-            return text
-
-    def limit_clause(self, select):
-        return ""
-
-    def visit_function(self, func, **kw):
-        if func.name.lower() == 'current_date':
-            return "today"
-        elif func.name.lower() == 'current_time':
-            return "CURRENT HOUR TO SECOND"
-        elif func.name.lower() in ('current_timestamp', 'now'):
-            return "CURRENT YEAR TO SECOND"
-        else:
-            return compiler.SQLCompiler.visit_function(self, func, **kw)
-
-    def visit_mod_binary(self, binary, operator, **kw):
-        return "MOD(%s, %s)" % (self.process(binary.left, **kw),
-                                self.process(binary.right, **kw))
-
-
-class InfoDDLCompiler(compiler.DDLCompiler):
-
-    def visit_add_constraint(self, create):
-        preparer = self.preparer
-        return "ALTER TABLE %s ADD CONSTRAINT %s" % (
-            self.preparer.format_table(create.element.table),
-            self.process(create.element)
-        )
-
-    def get_column_specification(self, column, **kw):
-        colspec = self.preparer.format_column(column)
-        first = None
-        if column.primary_key and column.autoincrement:
-            try:
-                first = [c for c in column.table.primary_key.columns
-                         if (c.autoincrement and
-                             isinstance(c.type, sqltypes.Integer) and
-                             not c.foreign_keys)].pop(0)
-            except IndexError:
-                pass
-
-        if column is first:
-            colspec += " SERIAL"
-        else:
-            colspec += " " + self.dialect.type_compiler.process(column.type)
-            default = self.get_column_default_string(column)
-            if default is not None:
-                colspec += " DEFAULT " + default
-
-        if not column.nullable:
-            colspec += " NOT NULL"
-
-        return colspec
-
-    def get_column_default_string(self, column):
-        if (isinstance(column.server_default, schema.DefaultClause) and
-            isinstance(column.server_default.arg, util.string_types)):
-                if isinstance(column.type, (sqltypes.Integer, sqltypes.Numeric)):
-                    return self.sql_compiler.process(text(column.server_default.arg))
-
-        return super(InfoDDLCompiler, self).get_column_default_string(column)
-
-    ### Informix wants the constraint name at the end, hence this ist c&p from sql/compiler.py
-    def visit_primary_key_constraint(self, constraint):
-        if len(constraint) == 0:
-            return ''
-        text = "PRIMARY KEY "
-        text += "(%s)" % ', '.join(self.preparer.quote(c.name, c.quote)
-                                       for c in constraint)
-        text += self.define_constraint_deferrability(constraint)
-
-        if constraint.name is not None:
-            text += " CONSTRAINT %s" % self.preparer.format_constraint(constraint)
-        return text
-
-    def visit_foreign_key_constraint(self, constraint):
-        preparer = self.dialect.identifier_preparer
-        remote_table = list(constraint._elements.values())[0].column.table
-        text = "FOREIGN KEY (%s) REFERENCES %s (%s)" % (
-            ', '.join(preparer.quote(f.parent.name, f.parent.quote)
-                      for f in constraint._elements.values()),
-            preparer.format_table(remote_table),
-            ', '.join(preparer.quote(f.column.name, f.column.quote)
-                      for f in constraint._elements.values())
-        )
-        text += self.define_constraint_cascades(constraint)
-        text += self.define_constraint_deferrability(constraint)
-
-        if constraint.name is not None:
-            text += " CONSTRAINT %s " % \
-                        preparer.format_constraint(constraint)
-        return text
-
-    def visit_unique_constraint(self, constraint):
-        text = "UNIQUE (%s)" % (', '.join(self.preparer.quote(c.name, c.quote) for c in constraint))
-        text += self.define_constraint_deferrability(constraint)
-
-        if constraint.name is not None:
-            text += "CONSTRAINT %s " % self.preparer.format_constraint(constraint)
-        return text
-
-
-class InformixIdentifierPreparer(compiler.IdentifierPreparer):
-
-    reserved_words = RESERVED_WORDS
-
-
-class InformixDialect(default.DefaultDialect):
-    name = 'informix'
-
-    max_identifier_length = 128  # adjusts at runtime based on server version
-
-    type_compiler = InfoTypeCompiler
-    statement_compiler = InfoSQLCompiler
-    ddl_compiler = InfoDDLCompiler
-    colspecs = colspecs
-    ischema_names = ischema_names
-    preparer = InformixIdentifierPreparer
-    default_paramstyle = 'qmark'
-
-    def initialize(self, connection):
-        super(InformixDialect, self).initialize(connection)
-
-        # http://www.querix.com/support/knowledge-base/error_number_message/error_200
-        if self.server_version_info < (9, 2):
-            self.max_identifier_length = 18
-        else:
-            self.max_identifier_length = 128
-
-    def _get_table_names(self, connection, schema, type, **kw):
-        schema = schema or self.default_schema_name
-        s = "select tabname, owner from systables where owner=? and tabtype=?"
-        return [row[0] for row in connection.execute(s, schema, type)]
-
-    @reflection.cache
-    def get_table_names(self, connection, schema=None, **kw):
-        return self._get_table_names(connection, schema, 'T', **kw)
-
-    @reflection.cache
-    def get_view_names(self, connection, schema=None, **kw):
-        return self._get_table_names(connection, schema, 'V', **kw)
-
-    @reflection.cache
-    def get_schema_names(self, connection, **kw):
-        s = "select owner from systables"
-        return [row[0] for row in connection.execute(s)]
-
-    def has_table(self, connection, table_name, schema=None):
-        schema = schema or self.default_schema_name
-        cursor = connection.execute(
-                """select tabname from systables where tabname=? and owner=?""",
-                table_name, schema)
-        return cursor.first() is not None
-
-    @reflection.cache
-    def get_columns(self, connection, table_name, schema=None, **kw):
-        schema = schema or self.default_schema_name
-        c = connection.execute(
-            """select colname, coltype, collength, t3.default, t1.colno from
-                syscolumns as t1 , systables as t2 , OUTER sysdefaults as t3
-                where t1.tabid = t2.tabid and t2.tabname=? and t2.owner=?
-                  and t3.tabid = t2.tabid and t3.colno = t1.colno
-                order by t1.colno""", table_name, schema)
-
-        pk_constraint = self.get_pk_constraint(connection, table_name, schema, **kw)
-        primary_cols = pk_constraint['constrained_columns']
-
-        columns = []
-        rows = c.fetchall()
-        for name, colattr, collength, default, colno in rows:
-            name = name.lower()
-
-            autoincrement = False
-            primary_key = False
-
-            if name in primary_cols:
-                primary_key = True
-
-            # in 7.31, coltype = 0x000
-            #                       ^^-- column type
-            #                      ^-- 1 not null, 0 null
-            not_nullable, coltype = divmod(colattr, 256)
-            if coltype not in (0, 13) and default:
-                default = default.split()[-1]
-
-            if coltype == 6:  # Serial, mark as autoincrement
-                autoincrement = True
-
-            if coltype == 0 or coltype == 13:  # char, varchar
-                coltype = ischema_names[coltype](collength)
-                if default:
-                    default = "'%s'" % default
-            elif coltype == 5:  # decimal
-                precision, scale = (collength & 0xFF00) >> 8, collength & 0xFF
-                if scale == 255:
-                    scale = 0
-                coltype = sqltypes.Numeric(precision, scale)
-            else:
-                try:
-                    coltype = ischema_names[coltype]
-                except KeyError:
-                    util.warn("Did not recognize type '%s' of column '%s'" %
-                              (coltype, name))
-                    coltype = sqltypes.NULLTYPE
-
-            column_info = dict(name=name, type=coltype, nullable=not not_nullable,
-                               default=default, autoincrement=autoincrement,
-                               primary_key=primary_key)
-            columns.append(column_info)
-        return columns
-
-    @reflection.cache
-    def get_foreign_keys(self, connection, table_name, schema=None, **kw):
-        schema_sel = schema or self.default_schema_name
-        c = connection.execute(
-        """select t1.constrname as cons_name,
-                 t4.colname as local_column, t7.tabname as remote_table,
-                 t6.colname as remote_column, t7.owner as remote_owner
-            from sysconstraints as t1 , systables as t2 ,
-                 sysindexes as t3 , syscolumns as t4 ,
-                 sysreferences as t5 , syscolumns as t6 , systables as t7 ,
-                 sysconstraints as t8 , sysindexes as t9
-           where t1.tabid = t2.tabid and t2.tabname=? and t2.owner=? and t1.constrtype = 'R'
-             and t3.tabid = t2.tabid and t3.idxname = t1.idxname
-             and t4.tabid = t2.tabid and t4.colno in (t3.part1, t3.part2, t3.part3,
-             t3.part4, t3.part5, t3.part6, t3.part7, t3.part8, t3.part9, t3.part10,
-             t3.part11, t3.part11, t3.part12, t3.part13, t3.part4, t3.part15, t3.part16)
-             and t5.constrid = t1.constrid and t8.constrid = t5.primary
-             and t6.tabid = t5.ptabid and t6.colno in (t9.part1, t9.part2, t9.part3,
-             t9.part4, t9.part5, t9.part6, t9.part7, t9.part8, t9.part9, t9.part10,
-             t9.part11, t9.part11, t9.part12, t9.part13, t9.part4, t9.part15, t9.part16) and t9.idxname =
-             t8.idxname
-             and t7.tabid = t5.ptabid""", table_name, schema_sel)
-
-        def fkey_rec():
-            return {
-                 'name': None,
-                 'constrained_columns': [],
-                 'referred_schema': None,
-                 'referred_table': None,
-                 'referred_columns': []
-             }
-
-        fkeys = util.defaultdict(fkey_rec)
-
-        rows = c.fetchall()
-        for cons_name, local_column, \
-                    remote_table, remote_column, remote_owner in rows:
-
-            rec = fkeys[cons_name]
-            rec['name'] = cons_name
-            local_cols, remote_cols = \
-                        rec['constrained_columns'], rec['referred_columns']
-
-            if not rec['referred_table']:
-                rec['referred_table'] = remote_table
-                if schema is not None:
-                    rec['referred_schema'] = remote_owner
-
-            if local_column not in local_cols:
-                local_cols.append(local_column)
-            if remote_column not in remote_cols:
-                remote_cols.append(remote_column)
-
-        return list(fkeys.values())
-
-    @reflection.cache
-    def get_pk_constraint(self, connection, table_name, schema=None, **kw):
-        schema = schema or self.default_schema_name
-
-        # Select the column positions from sysindexes for sysconstraints
-        data = connection.execute(
-            """select t2.*
-            from systables as t1, sysindexes as t2, sysconstraints as t3
-            where t1.tabid=t2.tabid and t1.tabname=? and t1.owner=?
-            and t2.idxname=t3.idxname and t3.constrtype='P'""",
-            table_name, schema
-        ).fetchall()
-
-        colpositions = set()
-
-        for row in data:
-            colpos = set([getattr(row, 'part%d' % x) for x in range(1, 16)])
-            colpositions |= colpos
-
-        if not len(colpositions):
-            return {'constrained_columns': [], 'name': None}
-
-        # Select the column names using the columnpositions
-        # TODO: Maybe cache a bit of those col infos (eg select all colnames for one table)
-        place_holder = ','.join('?' * len(colpositions))
-        c = connection.execute(
-            """select t1.colname
-            from syscolumns as t1, systables as t2
-            where t2.tabname=? and t1.tabid = t2.tabid and
-            t1.colno in (%s)""" % place_holder,
-            table_name, *colpositions
-        ).fetchall()
-
-        cols = reduce(lambda x, y: list(x) + list(y), c, [])
-        return {'constrained_columns': cols, 'name': None}
-
-    @reflection.cache
-    def get_indexes(self, connection, table_name, schema, **kw):
-        # TODO: schema...
-        c = connection.execute(
-            """select t1.*
-            from sysindexes as t1 , systables as t2
-           where t1.tabid = t2.tabid and t2.tabname=?""",
-             table_name)
-
-        indexes = []
-        for row in c.fetchall():
-            colnames = [getattr(row, 'part%d' % x) for x in range(1, 16)]
-            colnames = [x for x in colnames if x]
-            place_holder = ','.join('?' * len(colnames))
-            c = connection.execute(
-                """select t1.colname
-                from syscolumns as t1, systables as t2
-                where t2.tabname=? and t1.tabid = t2.tabid and
-                t1.colno in (%s)""" % place_holder,
-                table_name, *colnames
-            ).fetchall()
-            c = reduce(lambda x, y: list(x) + list(y), c, [])
-            indexes.append({
-                'name': row.idxname,
-                'unique': row.idxtype.lower() == 'u',
-                'column_names': c
-            })
-        return indexes
-
-    @reflection.cache
-    def get_view_definition(self, connection, view_name, schema=None, **kw):
-        schema = schema or self.default_schema_name
-        c = connection.execute(
-            """select t1.viewtext
-            from sysviews as t1 , systables as t2
-            where t1.tabid=t2.tabid and t2.tabname=?
-            and t2.owner=? order by seqno""",
-             view_name, schema).fetchall()
-
-        return ''.join([row[0] for row in c])
-
-    def _get_default_schema_name(self, connection):
-        return connection.execute('select CURRENT_ROLE from systables').scalar()
diff --git a/lib/sqlalchemy/dialects/informix/informixdb.py b/lib/sqlalchemy/dialects/informix/informixdb.py
deleted file mode 100644 (file)
index f2f0d3e..0000000
+++ /dev/null
@@ -1,69 +0,0 @@
-# informix/informixdb.py
-# Copyright (C) 2005-2013 the SQLAlchemy authors and contributors <see AUTHORS file>
-#
-# This module is part of SQLAlchemy and is released under
-# the MIT License: http://www.opensource.org/licenses/mit-license.php
-
-"""
-
-.. dialect:: informix+informixdb
-    :name: informixdb
-    :dbapi: informixdb
-    :connectstring: informix+informixdb://user:password@host/dbname
-    :url: http://informixdb.sourceforge.net/
-
-"""
-
-import re
-
-from sqlalchemy.dialects.informix.base import InformixDialect
-from sqlalchemy.engine import default
-
-VERSION_RE = re.compile(r'(\d+)\.(\d+)(.+\d+)')
-
-
-class InformixExecutionContext_informixdb(default.DefaultExecutionContext):
-
-    def post_exec(self):
-        if self.isinsert:
-            self._lastrowid = self.cursor.sqlerrd[1]
-
-    def get_lastrowid(self):
-        return self._lastrowid
-
-
-class InformixDialect_informixdb(InformixDialect):
-    driver = 'informixdb'
-    execution_ctx_cls = InformixExecutionContext_informixdb
-
-    @classmethod
-    def dbapi(cls):
-        return __import__('informixdb')
-
-    def create_connect_args(self, url):
-        if url.host:
-            dsn = '%s@%s' % (url.database, url.host)
-        else:
-            dsn = url.database
-
-        if url.username:
-            opt = {'user': url.username, 'password': url.password}
-        else:
-            opt = {}
-
-        return ([dsn], opt)
-
-    def _get_server_version_info(self, connection):
-        # http://informixdb.sourceforge.net/manual.html#inspecting-version-numbers
-        v = VERSION_RE.split(connection.connection.dbms_version)
-        return (int(v[1]), int(v[2]), v[3])
-
-    def is_disconnect(self, e, connection, cursor):
-        if isinstance(e, self.dbapi.OperationalError):
-            return 'closed the connection' in str(e) \
-                    or 'connection not open' in str(e)
-        else:
-            return False
-
-
-dialect = InformixDialect_informixdb
index 1f219e30c1efdea9cfe60f97984a856998568363..4a884453bb1698ab47e1b7949ee7fd2e9d184bf4 100644 (file)
@@ -493,7 +493,7 @@ class Inspector(object):
                 )
 
             if 'sequence' in col_d:
-                # TODO: mssql, maxdb and sybase are using this.
+                # TODO: mssql and sybase are using this.
                 seq = col_d['sequence']
                 sequence = sa_schema.Sequence(seq['name'], 1, 1)
                 if 'start' in seq:
diff --git a/test/dialect/test_informix.py b/test/dialect/test_informix.py
deleted file mode 100644 (file)
index 332edd2..0000000
+++ /dev/null
@@ -1,25 +0,0 @@
-from sqlalchemy import *
-from sqlalchemy.databases import informix
-from sqlalchemy.testing import *
-
-
-class CompileTest(fixtures.TestBase, AssertsCompiledSQL):
-
-    __dialect__ = informix.InformixDialect()
-
-    def test_statements(self):
-        meta = MetaData()
-        t1 = Table('t1', meta, Column('col1', Integer,
-                   primary_key=True), Column('col2', String(50)))
-        t2 = Table('t2', meta, Column('col1', Integer,
-                   primary_key=True), Column('col2', String(50)),
-                   Column('col3', Integer, ForeignKey('t1.col1')))
-        self.assert_compile(t1.select(),
-                            'SELECT t1.col1, t1.col2 FROM t1')
-        self.assert_compile(select([t1, t2]).select_from(t1.join(t2)),
-                            'SELECT t1.col1, t1.col2, t2.col1, '
-                            't2.col2, t2.col3 FROM t1 JOIN t2 ON '
-                            't1.col1 = t2.col3')
-        self.assert_compile(t1.update().values({t1.c.col1: t1.c.col1
-                            + 1}), 'UPDATE t1 SET col1=(t1.col1 + ?)')
-
index b116e4d6b75627dfd7f39a1c2b45f1c029bfbe7e..dbefc9f42a4510a423fac7fec3ba4210d99e2413 100644 (file)
@@ -59,10 +59,9 @@ class ExecuteTest(fixtures.TestBase):
                 scalar(stmt)
         eq_(result, '%')
 
-    @testing.fails_on_everything_except('firebird', 'maxdb',
+    @testing.fails_on_everything_except('firebird',
                                         'sqlite', '+pyodbc',
-                                        '+mxodbc', '+zxjdbc', 'mysql+oursql',
-                                        'informix+informixdb')
+                                        '+mxodbc', '+zxjdbc', 'mysql+oursql')
     def test_raw_qmark(self):
         def go(conn):
             conn.execute('insert into users (user_id, user_name) '
@@ -182,7 +181,7 @@ class ExecuteTest(fixtures.TestBase):
         finally:
             conn.close()
 
-    @testing.fails_on_everything_except('sqlite', 'oracle+cx_oracle', 'informix+informixdb')
+    @testing.fails_on_everything_except('sqlite', 'oracle+cx_oracle')
     def test_raw_named(self):
         def go(conn):
             conn.execute('insert into users (user_id, user_name) '
index 0a964cf63501c37c1a27afc8f3450b7d54708b5a..ba336a1bfac6359e37fb9859fdca3dca18f28210 100644 (file)
@@ -413,8 +413,6 @@ class RealReconnectTest(fixtures.TestBase):
     def teardown(self):
         self.engine.dispose()
 
-    @testing.fails_on('+informixdb',
-                      "Wrong error thrown, fix in informixdb?")
     def test_reconnect(self):
         conn = self.engine.connect()
 
@@ -539,8 +537,6 @@ class RealReconnectTest(fixtures.TestBase):
         # pool was recreated
         assert engine.pool is not p1
 
-    @testing.fails_on('+informixdb',
-                      "Wrong error thrown, fix in informixdb?")
     def test_null_pool(self):
         engine = \
             engines.reconnecting_engine(options=dict(poolclass=pool.NullPool))
@@ -554,8 +550,6 @@ class RealReconnectTest(fixtures.TestBase):
         eq_(conn.execute(select([1])).scalar(), 1)
         assert not conn.invalidated
 
-    @testing.fails_on('+informixdb',
-                      "Wrong error thrown, fix in informixdb?")
     def test_close(self):
         conn = self.engine.connect()
         eq_(conn.execute(select([1])).scalar(), 1)
@@ -569,8 +563,6 @@ class RealReconnectTest(fixtures.TestBase):
         conn = self.engine.connect()
         eq_(conn.execute(select([1])).scalar(), 1)
 
-    @testing.fails_on('+informixdb',
-                      "Wrong error thrown, fix in informixdb?")
     def test_with_transaction(self):
         conn = self.engine.connect()
         trans = conn.begin()
@@ -651,8 +643,6 @@ class InvalidateDuringResultTest(fixtures.TestBase):
                     '+cymysql', '+pymysql', '+pg8000'
                     ], "Buffers the result set and doesn't check for "
                         "connection close")
-    @testing.fails_on('+informixdb',
-                      "Wrong error thrown, fix in informixdb?")
     def test_invalidate_on_results(self):
         conn = self.engine.connect()
         result = conn.execute('select * from sometable')
index e7baa9d568354fb0d05cb854ba549e7266743237..a3b0ecdb521e14aa767616fbd336d7944f8c9acf 100644 (file)
@@ -754,10 +754,6 @@ class ReflectionTest(fixtures.TestBase, ComparesTables):
 
 
     @testing.crashes('oracle', 'FIXME: unknown, confirm not fails_on')
-    @testing.fails_on('+informixdb',
-                        "FIXME: should be supported via the "
-                        "DELIMITED env var but that breaks "
-                        "everything else for now")
     @testing.provide_metadata
     def test_reserved(self):
 
@@ -774,7 +770,7 @@ class ReflectionTest(fixtures.TestBase, ComparesTables):
         # There's currently no way to calculate identifier case
         # normalization in isolation, so...
 
-        if testing.against('firebird', 'oracle', 'maxdb'):
+        if testing.against('firebird', 'oracle'):
             check_col = 'TRUE'
         else:
             check_col = 'true'
index 50d38e25794177c5cd9e2bf70c626b581f0c199b..7738dfb342918c1b5fa185bb3e20e61eb1d8bcf3 100644 (file)
@@ -1035,7 +1035,6 @@ class ForUpdateTest(fixtures.TestBase):
     @testing.crashes('mssql', 'FIXME: unknown')
     @testing.crashes('firebird', 'FIXME: unknown')
     @testing.crashes('sybase', 'FIXME: unknown')
-    @testing.crashes('access', 'FIXME: unknown')
     @testing.requires.independent_connections
     def test_queued_update(self):
         """Test SELECT FOR UPDATE with concurrent modifications.
@@ -1100,7 +1099,6 @@ class ForUpdateTest(fixtures.TestBase):
     @testing.crashes('mssql', 'FIXME: unknown')
     @testing.crashes('firebird', 'FIXME: unknown')
     @testing.crashes('sybase', 'FIXME: unknown')
-    @testing.crashes('access', 'FIXME: unknown')
     @testing.requires.independent_connections
     def test_queued_select(self):
         """Simple SELECT FOR UPDATE conflict test"""
@@ -1112,7 +1110,6 @@ class ForUpdateTest(fixtures.TestBase):
     @testing.fails_on('mysql', 'No support for NOWAIT')
     @testing.crashes('firebird', 'FIXME: unknown')
     @testing.crashes('sybase', 'FIXME: unknown')
-    @testing.crashes('access', 'FIXME: unknown')
     @testing.requires.independent_connections
     def test_nowait_select(self):
         """Simple SELECT FOR UPDATE NOWAIT conflict test"""
index 41a167e724f8b35043986a23aec614085c26d0e5..1737d1ccb0f1277efbe2d97d24e65402fdf3742a 100644 (file)
@@ -964,7 +964,6 @@ class EagerLazyTest(fixtures.MappedTest):
                         Column('foo_id', Integer, ForeignKey('foo.id'))
         )
 
-    @testing.fails_on('maxdb', 'FIXME: unknown')
     def test_basic(self):
         class Foo(object): pass
         class Bar(Foo): pass
index 31c4ba40adb2d70912408ed9258f07a9622d1506..51b797940801aea10143279a9202a9df15e2a65c 100644 (file)
@@ -201,7 +201,6 @@ class InheritTest3(fixtures.MappedTest):
         found = [repr(l[0])] + sorted([repr(o) for o in l[0].foos])
         eq_(found, compare)
 
-    @testing.fails_on('maxdb', 'FIXME: unknown')
     def testadvanced(self):
         class Foo(object):
             def __init__(self, data=None):
index 1915007def90f9fd3cce0f49ab55b6a0448889c9..ec263b3b04b862a66580ca86a937d84bd0117dc3 100644 (file)
@@ -115,19 +115,15 @@ class PolymorphicCircularTest(fixtures.MappedTest):
         configure_mappers()
         assert table1_mapper.primary_key == (table1.c.id,), table1_mapper.primary_key
 
-    @testing.fails_on('maxdb', 'FIXME: unknown')
     def testone(self):
         self._testlist([Table1, Table2, Table1, Table2])
 
-    @testing.fails_on('maxdb', 'FIXME: unknown')
     def testtwo(self):
         self._testlist([Table3])
 
-    @testing.fails_on('maxdb', 'FIXME: unknown')
     def testthree(self):
         self._testlist([Table2, Table1, Table1B, Table3, Table3, Table1B, Table1B, Table2, Table1])
 
-    @testing.fails_on('maxdb', 'FIXME: unknown')
     def testfour(self):
         self._testlist([
                 Table2('t2', [Data('data1'), Data('data2')]),
index 83fccbf7a07ab5eb550b7fbc2cbdbb495a5979da..16747bd671b82cbab62fcb987058d7aef787d8bc 100644 (file)
@@ -286,7 +286,6 @@ class EagerTest2(fixtures.MappedTest):
                            lazy='joined',
                            backref=backref('middle', lazy='joined')))),
 
-    @testing.fails_on('maxdb', 'FIXME: unknown')
     def test_eager_terminate(self):
         """Eager query generation does not include the same mapper's table twice.
 
@@ -339,7 +338,6 @@ class EagerTest3(fixtures.MappedTest):
         class Stat(cls.Basic):
             pass
 
-    @testing.fails_on('maxdb', 'FIXME: unknown')
     def test_nesting_with_functions(self):
         Stat, Foo, stats, foo, Data, datas = (self.classes.Stat,
                                 self.classes.Foo,
@@ -423,7 +421,6 @@ class EagerTest4(fixtures.MappedTest):
         class Employee(cls.Basic):
             pass
 
-    @testing.fails_on('maxdb', 'FIXME: unknown')
     def test_basic(self):
         Department, Employee, employees, departments = (self.classes.Department,
                                 self.classes.Employee,
@@ -774,7 +771,6 @@ class EagerTest8(fixtures.MappedTest):
         class Joined(cls.Comparable):
             pass
 
-    @testing.fails_on('maxdb', 'FIXME: unknown')
     def test_nested_joins(self):
         task, Task_Type, Joined, prj, task_type, msg = (self.tables.task,
                                 self.classes.Task_Type,
@@ -867,7 +863,6 @@ class EagerTest9(fixtures.MappedTest):
                                  backref=backref('entries', lazy='joined',
                                                  order_by=entries.c.entry_id))))
 
-    @testing.fails_on('maxdb', 'FIXME: unknown')
     def test_joinedload_on_path(self):
         Entry, Account, Transaction = (self.classes.Entry,
                                 self.classes.Account,
index d0318b0799b9c05c3039cda1c5fa70d6e9b354b5..615ae815d10dc968ec03c29e505c40c6d6eb3b11 100644 (file)
@@ -1217,7 +1217,6 @@ class M2OCascadeDeleteOrphanTestOne(fixtures.MappedTest):
         sess.flush()
         sess.close()
 
-    @testing.fails_on('maxdb', 'FIXME: unknown')
     def test_orphan(self):
         prefs, User, extra = (self.tables.prefs,
                                 self.classes.User,
@@ -1282,7 +1281,6 @@ class M2OCascadeDeleteOrphanTestOne(fixtures.MappedTest):
         assert p2 in sess
         sess.commit()
 
-    @testing.fails_on('maxdb', 'FIXME: unknown')
     def test_orphan_on_update(self):
         prefs, User, extra = (self.tables.prefs,
                                 self.classes.User,
index e70525be48408ed44185687f23b801bd89199582..f2ba3cc27d9066f6870679693a5bc2315cccf824 100644 (file)
@@ -600,7 +600,6 @@ class EagerTest(_fixtures.FixtureTest, testing.AssertsCompiledSQL):
         assert 'orders' not in noeagers[0].__dict__
         assert 'addresses' not in noeagers[0].__dict__
 
-    @testing.fails_on('maxdb', 'FIXME: unknown')
     def test_limit(self):
         """Limit operations combined with lazy-load relationships."""
 
@@ -655,7 +654,6 @@ class EagerTest(_fixtures.FixtureTest, testing.AssertsCompiledSQL):
             eq_(self.static.user_address_result, l)
         self.assert_sql_count(testing.db, go, 1)
 
-    @testing.fails_on('maxdb', 'FIXME: unknown')
     def test_limit_2(self):
         keywords, items, item_keywords, Keyword, Item = (self.tables.keywords,
                                 self.tables.items,
@@ -677,7 +675,6 @@ class EagerTest(_fixtures.FixtureTest, testing.AssertsCompiledSQL):
 
         eq_(self.static.item_keyword_result[1:3], l)
 
-    @testing.fails_on('maxdb', 'FIXME: unknown')
     def test_limit_3(self):
         """test that the ORDER BY is propagated from the inner
         select to the outer select, when using the
@@ -709,7 +706,7 @@ class EagerTest(_fixtures.FixtureTest, testing.AssertsCompiledSQL):
 
         q = sess.query(User)
 
-        if not testing.against('maxdb', 'mssql'):
+        if not testing.against('mssql'):
             l = q.join('orders').order_by(Order.user_id.desc()).limit(2).offset(1)
             eq_([
                 User(id=9,
@@ -944,7 +941,6 @@ class EagerTest(_fixtures.FixtureTest, testing.AssertsCompiledSQL):
             eq_([User(id=7, address=Address(id=1))], l)
         self.assert_sql_count(testing.db, go, 1)
 
-    @testing.fails_on('maxdb', 'FIXME: unknown')
     def test_many_to_one(self):
         users, Address, addresses, User = (self.tables.users,
                                 self.classes.Address,
@@ -1922,7 +1918,6 @@ class SelfReferentialEagerTest(fixtures.MappedTest):
             Column('parent_id', Integer, ForeignKey('nodes.id')),
             Column('data', String(30)))
 
-    @testing.fails_on('maxdb', 'FIXME: unknown')
     def test_basic(self):
         nodes = self.tables.nodes
 
@@ -2108,7 +2103,6 @@ class SelfReferentialEagerTest(fixtures.MappedTest):
             )
         )
 
-    @testing.fails_on('maxdb', 'FIXME: unknown')
     def test_no_depth(self):
         nodes = self.tables.nodes
 
index ea823f79c1c4ac337756e957b46f040409726da5..292546e99807bce02a803bc56b5e9653d2b8648e 100644 (file)
@@ -1436,7 +1436,6 @@ class RefreshTest(_fixtures.FixtureTest):
         s.expire(u)
         assert len(u.addresses) == 3
 
-    @testing.fails_on('maxdb', 'FIXME: unknown')
     def test_refresh2(self):
         """test a hang condition that was occurring on expire/refresh"""
 
index 52858cc2651f26e7782cab71be15def878d52985..cbe559db96d9b1d7583c7d7583c58c08e2556399 100644 (file)
@@ -43,7 +43,6 @@ class GenerativeQueryTest(fixtures.MappedTest):
         assert res.order_by(Foo.bar)[0].bar == 5
         assert res.order_by(sa.desc(Foo.bar))[0].bar == 95
 
-    @testing.fails_on('maxdb', 'FIXME: unknown')
     def test_slice(self):
         Foo = self.classes.Foo
 
index 66b1eb5e482cf690fd426ecd4d57178d76452434..37d290b58b2848f2b6a3bb5788ea035eb2cb72b9 100644 (file)
@@ -178,7 +178,7 @@ class LazyTest(_fixtures.FixtureTest):
         sess = create_session()
         q = sess.query(User)
 
-        if testing.against('maxdb', 'mssql'):
+        if testing.against('mssql'):
             l = q.limit(2).all()
             assert self.static.user_all_result[:2] == l
         else:
index 5255e4fe22292101321349a10627f4767aa7d88b..b1c9d3fb691f9a7d3a29df7386abf7a0893cef2e 100644 (file)
@@ -1749,7 +1749,6 @@ class OptionsTest(_fixtures.FixtureTest):
             eq_(l, self.static.user_address_result)
         self.sql_count_(0, go)
 
-    @testing.fails_on('maxdb', 'FIXME: unknown')
     def test_eager_options_with_limit(self):
         Address, addresses, users, User = (self.classes.Address,
                                 self.tables.addresses,
@@ -1775,7 +1774,6 @@ class OptionsTest(_fixtures.FixtureTest):
         eq_(u.id, 8)
         eq_(len(u.addresses), 3)
 
-    @testing.fails_on('maxdb', 'FIXME: unknown')
     def test_lazy_options_with_limit(self):
         Address, addresses, users, User = (self.classes.Address,
                                 self.tables.addresses,
index 4d276896cf1facae301926dd8093b6a5bb25cc50..619836ae4f6ce9a3351acb5434bef1c8c93070c7 100644 (file)
@@ -1336,7 +1336,6 @@ class FilterTest(QueryTest, AssertsCompiledSQL):
 
         assert [User(id=10)] == sess.query(User).outerjoin("addresses", aliased=True).filter(~User.addresses.any()).all()
 
-    @testing.crashes('maxdb', 'can dump core')
     def test_has(self):
         Dingaling, User, Address = (self.classes.Dingaling,
                                 self.classes.User,
index ef6649c3be8dfe116a028adb4a7d8fa8b54e5e9f..3181e090942590ec66505abbdb078e55d4d6f7fb 100644 (file)
@@ -632,7 +632,6 @@ class EagerTest(_fixtures.FixtureTest, testing.AssertsCompiledSQL):
             ], q.all())
         self.assert_sql_count(testing.db, go, 6)
 
-    @testing.fails_on('maxdb', 'FIXME: unknown')
     def test_limit(self):
         """Limit operations combined with lazy-load relationships."""
 
@@ -706,7 +705,6 @@ class EagerTest(_fixtures.FixtureTest, testing.AssertsCompiledSQL):
             eq_([User(id=7, address=Address(id=1))], l)
         self.assert_sql_count(testing.db, go, 2)
 
-    @testing.fails_on('maxdb', 'FIXME: unknown')
     def test_many_to_one(self):
         users, Address, addresses, User = (self.tables.users,
                                 self.classes.Address,
@@ -1144,7 +1142,6 @@ class SelfReferentialTest(fixtures.MappedTest):
             Column('parent_id', Integer, ForeignKey('nodes.id')),
             Column('data', String(30)))
 
-    @testing.fails_on('maxdb', 'FIXME: unknown')
     def test_basic(self):
         nodes = self.tables.nodes
 
@@ -1309,7 +1306,6 @@ class SelfReferentialTest(fixtures.MappedTest):
             ]), d)
         self.assert_sql_count(testing.db, go, 3)
 
-    @testing.fails_on('maxdb', 'FIXME: unknown')
     def test_no_depth(self):
         """no join depth is set, so no eager loading occurs."""
 
index e7728d6e0abee62132ffff13af2e4aceb7ca45c8..4ed0a92897dbe26754eddb56786e5969e15f2601 100644 (file)
@@ -85,8 +85,6 @@ class DefaultRequirements(SuiteRequirements):
             no_support('oracle', 'not supported by database'),
             no_support('mssql', 'not supported by database'),
             no_support('sybase', 'not supported by database'),
-            no_support('maxdb', 'FIXME: verify not supported by database'),
-            no_support('informix', 'not supported by database'),
         ])
 
     @property
@@ -226,7 +224,6 @@ class DefaultRequirements(SuiteRequirements):
                     "sqlite",
                     "sybase",
                     ("mysql", "<", (5, 0, 3)),
-                    ("informix", "<", (11, 55, "xC3"))
                     ], "savepoints not supported")
 
 
@@ -283,14 +280,14 @@ class DefaultRequirements(SuiteRequirements):
         """Target database must support INTERSECT or equivalent."""
 
         return fails_if([
-                "firebird", "mysql", "sybase", "informix"
+                "firebird", "mysql", "sybase",
             ], 'no support for INTERSECT')
 
     @property
     def except_(self):
         """Target database must support EXCEPT or equivalent (i.e. MINUS)."""
         return fails_if([
-                "firebird", "mysql", "sybase", "informix"
+                "firebird", "mysql", "sybase",
             ], 'no support for EXCEPT')
 
     @property
@@ -313,7 +310,6 @@ class DefaultRequirements(SuiteRequirements):
 
         return skip_if([
             no_support('firebird', 'no SA implementation'),
-            no_support('maxdb', 'two-phase xact not supported by database'),
             no_support('mssql', 'two-phase xact not supported by drivers'),
             no_support('oracle', 'two-phase xact not implemented in SQLA/oracle'),
             no_support('drizzle', 'two-phase xact not supported by database'),
@@ -366,7 +362,6 @@ class DefaultRequirements(SuiteRequirements):
         """Target driver must support some degree of non-ascii symbol names."""
         # TODO: expand to exclude MySQLdb versions w/ broken unicode
         return skip_if([
-            no_support('maxdb', 'database support flakey'),
             no_support('oracle', 'FIXME: no support in database?'),
             no_support('sybase', 'FIXME: guessing, needs confirmation'),
             no_support('mssql+pymssql', 'no FreeTDS support'),
index 944a15384bf8b276abbb3b488b8c520be0cb0238..998a55cd8479f589ef3c162fd42054c764c5e254 100644 (file)
@@ -32,7 +32,6 @@ class CaseTest(fixtures.TestBase, AssertsCompiledSQL):
         info_table.drop()
 
     @testing.fails_on('firebird', 'FIXME: unknown')
-    @testing.fails_on('maxdb', 'FIXME: unknown')
     @testing.requires.subqueries
     def test_case(self):
         inner = select([case([
@@ -130,7 +129,6 @@ class CaseTest(fixtures.TestBase, AssertsCompiledSQL):
 
 
     @testing.fails_on('firebird', 'FIXME: unknown')
-    @testing.fails_on('maxdb', 'FIXME: unknown')
     def testcase_with_dict(self):
         query = select([case({
                     info_table.c.pk < 3: 'lessthan3',
index 56b7971b2616dc26a8ad15655f15919472007cf1..4a17c1cda7b7cecd31fa5985b2943986e487db02 100644 (file)
@@ -45,7 +45,7 @@ class DefaultTest(fixtures.TestBase):
                 # since its a "branched" connection
                 conn.close()
 
-        use_function_defaults = testing.against('postgresql', 'mssql', 'maxdb')
+        use_function_defaults = testing.against('postgresql', 'mssql')
         is_oracle = testing.against('oracle')
 
         class MyClass(object):
@@ -73,9 +73,7 @@ class DefaultTest(fixtures.TestBase):
             f2 = sa.select([func.length('abcdefghijk')], bind=db).scalar()
             def1 = currenttime
             deftype = sa.Date
-            if testing.against('maxdb'):
-                def2 = sa.text("curdate")
-            elif testing.against('mssql'):
+            if testing.against('mssql'):
                 def2 = sa.text("getdate()")
             else:
                 def2 = sa.text("current_date")
index ee503dbb7966ad73adfc2412cfbc1eebdd9c03f3..ee1d61f85e7d008c723d29c1d4fb669b2978fd6e 100644 (file)
@@ -21,13 +21,12 @@ class CompileTest(fixtures.TestBase, AssertsCompiledSQL):
         functions._registry.clear()
 
     def test_compile(self):
-        for dialect in all_dialects(exclude=('sybase', 'access',
-                                                'informix', 'maxdb')):
+        for dialect in all_dialects(exclude=('sybase', )):
             bindtemplate = BIND_TEMPLATES[dialect.paramstyle]
             self.assert_compile(func.current_timestamp(),
                                         "CURRENT_TIMESTAMP", dialect=dialect)
             self.assert_compile(func.localtime(), "LOCALTIME", dialect=dialect)
-            if dialect.name in ('firebird', 'maxdb'):
+            if dialect.name in ('firebird',):
                 self.assert_compile(func.nosuchfunction(),
                                             "nosuchfunction", dialect=dialect)
             else:
index 39c8962666a10554951c72608fd455b10d3bcd3c..8e619fe7456d2156750d57aa2e589868b9b952f5 100644 (file)
@@ -1110,7 +1110,6 @@ class QueryTest(fixtures.TestBase):
 
     @testing.crashes('oracle', 'FIXME: unknown, varify not fails_on()')
     @testing.crashes('firebird', 'An identifier must begin with a letter')
-    @testing.crashes('maxdb', 'FIXME: unknown, verify not fails_on()')
     def test_column_accessor_shadow(self):
         meta = MetaData(testing.db)
         shadowed = Table('test_shadowed', meta,
@@ -1900,7 +1899,6 @@ class CompoundTest(fixtures.TestBase):
         eq_(u.execute().fetchall(), wanted)
 
     @testing.fails_on('firebird', "doesn't like ORDER BY with UNIONs")
-    @testing.fails_on('maxdb', 'FIXME: unknown')
     @testing.requires.subqueries
     def test_union_ordered_alias(self):
         (s1, s2) = (
@@ -1919,7 +1917,6 @@ class CompoundTest(fixtures.TestBase):
     @testing.fails_on('firebird', "has trouble extracting anonymous column from union subquery")
     @testing.fails_on('mysql', 'FIXME: unknown')
     @testing.fails_on('sqlite', 'FIXME: unknown')
-    @testing.fails_on('informix', "FIXME: unknown (maybe the second alias isn't allows)")
     def test_union_all(self):
         e = union_all(
             select([t1.c.col3]),