diff options
author | Mike Bayer <mike_mp@zzzcomputing.com> | 2013-11-17 13:45:23 -0500 |
---|---|---|
committer | Mike Bayer <mike_mp@zzzcomputing.com> | 2013-11-17 13:45:23 -0500 |
commit | 59ca4633acd42d90dc01aef9a40373ee98080481 (patch) | |
tree | 41c46078729933e14d9b7ff4767b4a5a813f673a | |
parent | d6545f7db78d8bd930685019678f6e7df056ed22 (diff) | |
download | sqlalchemy-59ca4633acd42d90dc01aef9a40373ee98080481.tar.gz |
- remove informix dialect, moved out to https://bitbucket.org/zzzeek/sqlalchemy_informixdb
- remove informix, maxdb, access symbols from tests etc.
30 files changed, 37 insertions, 771 deletions
diff --git a/doc/build/changelog/changelog_09.rst b/doc/build/changelog/changelog_09.rst index b0dc07d97..5eaf78b9d 100644 --- a/doc/build/changelog/changelog_09.rst +++ b/doc/build/changelog/changelog_09.rst @@ -15,6 +15,14 @@ :version: 0.9.0b2 .. change:: + :tags: removed + + The "informix" and "informixdb" dialects have been removed; the code + is now available as a separate repository on Bitbucket. The IBM-DB + project has provided production-level Informix support since the + informixdb dialect was first added. + + .. change:: :tags: bug, orm Fixed bug where usage of new :class:`.Bundle` object would cause diff --git a/doc/build/dialects/index.rst b/doc/build/dialects/index.rst index 7835a5e21..d22f11ee2 100644 --- a/doc/build/dialects/index.rst +++ b/doc/build/dialects/index.rst @@ -18,7 +18,6 @@ Included Dialects drizzle firebird - informix mssql mysql oracle @@ -45,17 +44,31 @@ Current external dialect projects for SQLAlchemy include: Production Ready ^^^^^^^^^^^^^^^^ -* `ibm_db_sa <http://code.google.com/p/ibm-db/wiki/README>`_ - driver for IBM DB2, developed jointly by IBM and SQLAlchemy developers. +* `ibm_db_sa <http://code.google.com/p/ibm-db/wiki/README>`_ - driver for IBM DB2 and Informix, developed jointly by IBM and SQLAlchemy developers. * `sqlalchemy-sqlany <https://code.google.com/p/sqlalchemy-sqlany/>`_ - driver for SAP Sybase SQL Anywhere, developed by SAP. * `sqlalchemy-monetdb <https://github.com/gijzelaerr/sqlalchemy-monetdb>`_ - driver for MonetDB. Experimental / Incomplete ^^^^^^^^^^^^^^^^^^^^^^^^^^ -* `sqlalchemy-access <https://bitbucket.org/zzzeek/sqlalchemy-access>`_ - driver for Microsoft Access. +Dialects that were started but are not in a completed state. Code contributions +welcome here. + * `CALCHIPAN <https://bitbucket.org/zzzeek/calchipan/>`_ - Adapts `Pandas <http://pandas.pydata.org/>`_ dataframes to SQLAlchemy. * `sqlalchemy-akiban <https://github.com/zzzeek/sqlalchemy_akiban>`_ - driver and ORM extensions for the `Akiban <http://www.akiban.com>`_ database. * `sqlalchemy-cubrid <https://bitbucket.org/zzzeek/sqlalchemy-cubrid>`_ - driver for the CUBRID database. + +Attic +^^^^^ + +Dialects in the "attic" are those that were contributed for SQLAlchemy long ago +but have received little attention or demand since then, and are now moved out to +their own repositories in at best a semi-working state. +Community members interested in these dialects should feel free to pick up on +their current codebase and fork off into working libraries. + +* `sqlalchemy-access <https://bitbucket.org/zzzeek/sqlalchemy-access>`_ - driver for Microsoft Access. +* `sqlalchemy-informixdb <https://bitbucket.org/zzzeek/sqlalchemy_informixdb>`_ - driver for the informixdb DBAPI. * `sqlalchemy-maxdb <https://bitbucket.org/zzzeek/sqlalchemy-maxdb>`_ - driver for the MaxDB database diff --git a/lib/sqlalchemy/databases/__init__.py b/lib/sqlalchemy/databases/__init__.py index e6821b009..0774ffc01 100644 --- a/lib/sqlalchemy/databases/__init__.py +++ b/lib/sqlalchemy/databases/__init__.py @@ -15,7 +15,6 @@ from ..dialects.mysql import base as mysql from ..dialects.drizzle import base as drizzle from ..dialects.oracle import base as oracle from ..dialects.firebird import base as firebird -from ..dialects.informix import base as informix from ..dialects.mssql import base as mssql from ..dialects.sybase import base as sybase @@ -23,7 +22,6 @@ from ..dialects.sybase import base as sybase __all__ = ( 'drizzle', 'firebird', - 'informix', 'mssql', 'mysql', 'postgresql', diff --git a/lib/sqlalchemy/dialects/__init__.py b/lib/sqlalchemy/dialects/__init__.py index 7f5d34707..8b276a7ca 100644 --- a/lib/sqlalchemy/dialects/__init__.py +++ b/lib/sqlalchemy/dialects/__init__.py @@ -7,7 +7,6 @@ __all__ = ( 'drizzle', 'firebird', -# 'informix', 'mssql', 'mysql', 'oracle', diff --git a/lib/sqlalchemy/dialects/informix/__init__.py b/lib/sqlalchemy/dialects/informix/__init__.py deleted file mode 100644 index a55277c9f..000000000 --- a/lib/sqlalchemy/dialects/informix/__init__.py +++ /dev/null @@ -1,9 +0,0 @@ -# informix/__init__.py -# Copyright (C) 2005-2013 the SQLAlchemy authors and contributors <see AUTHORS file> -# -# This module is part of SQLAlchemy and is released under -# the MIT License: http://www.opensource.org/licenses/mit-license.php - -from sqlalchemy.dialects.informix import base, informixdb - -base.dialect = informixdb.dialect diff --git a/lib/sqlalchemy/dialects/informix/base.py b/lib/sqlalchemy/dialects/informix/base.py deleted file mode 100644 index e13ea8819..000000000 --- a/lib/sqlalchemy/dialects/informix/base.py +++ /dev/null @@ -1,590 +0,0 @@ -# informix/base.py -# Copyright (C) 2005-2013 the SQLAlchemy authors and contributors <see AUTHORS file> -# coding: gbk -# -# This module is part of SQLAlchemy and is released under -# the MIT License: http://www.opensource.org/licenses/mit-license.php - -""" -.. dialect:: informix - :name: Informix - -.. note:: - - The Informix dialect functions on current SQLAlchemy versions - but is not regularly tested, and may have many issues and - caveats not currently handled. - -""" - - -import datetime - -from sqlalchemy import sql, schema, exc, pool, util -from sqlalchemy.sql import compiler, text -from sqlalchemy.engine import default, reflection -from sqlalchemy import types as sqltypes -from functools import reduce - -RESERVED_WORDS = set( - ["abs", "absolute", "access", "access_method", "acos", "active", "add", - "address", "add_months", "admin", "after", "aggregate", "alignment", - "all", "allocate", "all_rows", "alter", "and", "ansi", "any", "append", - "array", "as", "asc", "ascii", "asin", "at", "atan", "atan2", "attach", - "attributes", "audit", "authentication", "authid", "authorization", - "authorized", "auto", "autofree", "auto_reprepare", "auto_stat_mode", - "avg", "avoid_execute", "avoid_fact", "avoid_full", "avoid_hash", - "avoid_index", "avoid_index_sj", "avoid_multi_index", "avoid_nl", - "avoid_star_join", "avoid_subqf", "based", "before", "begin", - "between", "bigint", "bigserial", "binary", "bitand", "bitandnot", - "bitnot", "bitor", "bitxor", "blob", "blobdir", "boolean", "both", - "bound_impl_pdq", "buffered", "builtin", "by", "byte", "cache", "call", - "cannothash", "cardinality", "cascade", "case", "cast", "ceil", "char", - "character", "character_length", "char_length", "check", "class", - "class_origin", "client", "clob", "clobdir", "close", "cluster", - "clustersize", "cobol", "codeset", "collation", "collection", - "column", "columns", "commit", "committed", "commutator", "component", - "components", "concat", "concurrent", "connect", "connection", - "connection_name", "connect_by_iscycle", "connect_by_isleaf", - "connect_by_rootconst", "constraint", "constraints", "constructor", - "context", "continue", "copy", "cos", "costfunc", "count", "crcols", - "create", "cross", "current", "current_role", "currval", "cursor", - "cycle", "database", "datafiles", "dataskip", "date", "datetime", - "day", "dba", "dbdate", "dbinfo", "dbpassword", "dbsecadm", - "dbservername", "deallocate", "debug", "debugmode", "debug_env", "dec", - "decimal", "declare", "decode", "decrypt_binary", "decrypt_char", - "dec_t", "default", "default_role", "deferred", "deferred_prepare", - "define", "delay", "delete", "deleting", "delimited", "delimiter", - "deluxe", "desc", "describe", "descriptor", "detach", "diagnostics", - "directives", "dirty", "disable", "disabled", "disconnect", "disk", - "distinct", "distributebinary", "distributesreferences", - "distributions", "document", "domain", "donotdistribute", "dormant", - "double", "drop", "dtime_t", "each", "elif", "else", "enabled", - "encryption", "encrypt_aes", "encrypt_tdes", "end", "enum", - "environment", "error", "escape", "exception", "exclusive", "exec", - "execute", "executeanywhere", "exemption", "exists", "exit", "exp", - "explain", "explicit", "express", "expression", "extdirectives", - "extend", "extent", "external", "fact", "false", "far", "fetch", - "file", "filetoblob", "filetoclob", "fillfactor", "filtering", "first", - "first_rows", "fixchar", "fixed", "float", "floor", "flush", "for", - "force", "forced", "force_ddl_exec", "foreach", "foreign", "format", - "format_units", "fortran", "found", "fraction", "fragment", - "fragments", "free", "from", "full", "function", "general", "get", - "gethint", "global", "go", "goto", "grant", "greaterthan", - "greaterthanorequal", "group", "handlesnulls", "hash", "having", "hdr", - "hex", "high", "hint", "hold", "home", "hour", "idslbacreadarray", - "idslbacreadset", "idslbacreadtree", "idslbacrules", - "idslbacwritearray", "idslbacwriteset", "idslbacwritetree", - "idssecuritylabel", "if", "ifx_auto_reprepare", "ifx_batchedread_table", - "ifx_int8_t", "ifx_lo_create_spec_t", "ifx_lo_stat_t", "immediate", - "implicit", "implicit_pdq", "in", "inactive", "increment", "index", - "indexes", "index_all", "index_sj", "indicator", "informix", "init", - "initcap", "inline", "inner", "inout", "insert", "inserting", "instead", - "int", "int8", "integ", "integer", "internal", "internallength", - "interval", "into", "intrvl_t", "is", "iscanonical", "isolation", - "item", "iterator", "java", "join", "keep", "key", "label", "labeleq", - "labelge", "labelglb", "labelgt", "labelle", "labellt", "labellub", - "labeltostring", "language", "last", "last_day", "leading", "left", - "length", "lessthan", "lessthanorequal", "let", "level", "like", - "limit", "list", "listing", "load", "local", "locator", "lock", "locks", - "locopy", "loc_t", "log", "log10", "logn", "long", "loop", "lotofile", - "low", "lower", "lpad", "ltrim", "lvarchar", "matched", "matches", - "max", "maxerrors", "maxlen", "maxvalue", "mdy", "median", "medium", - "memory", "memory_resident", "merge", "message_length", "message_text", - "middle", "min", "minute", "minvalue", "mod", "mode", "moderate", - "modify", "module", "money", "month", "months_between", "mounting", - "multiset", "multi_index", "name", "nchar", "negator", "new", "next", - "nextval", "next_day", "no", "nocache", "nocycle", "nomaxvalue", - "nomigrate", "nominvalue", "none", "non_dim", "non_resident", "noorder", - "normal", "not", "notemplatearg", "notequal", "null", "nullif", - "numeric", "numrows", "numtodsinterval", "numtoyminterval", "nvarchar", - "nvl", "octet_length", "of", "off", "old", "on", "online", "only", - "opaque", "opclass", "open", "optcompind", "optical", "optimization", - "option", "or", "order", "ordered", "out", "outer", "output", - "override", "page", "parallelizable", "parameter", "partition", - "pascal", "passedbyvalue", "password", "pdqpriority", "percaltl_cos", - "pipe", "pli", "pload", "policy", "pow", "power", "precision", - "prepare", "previous", "primary", "prior", "private", "privileges", - "procedure", "properties", "public", "put", "raise", "range", "raw", - "read", "real", "recordend", "references", "referencing", "register", - "rejectfile", "relative", "release", "remainder", "rename", - "reoptimization", "repeatable", "replace", "replication", "reserve", - "resolution", "resource", "restart", "restrict", "resume", "retain", - "retainupdatelocks", "return", "returned_sqlstate", "returning", - "returns", "reuse", "revoke", "right", "robin", "role", "rollback", - "rollforward", "root", "round", "routine", "row", "rowid", "rowids", - "rows", "row_count", "rpad", "rtrim", "rule", "sameas", "samples", - "sampling", "save", "savepoint", "schema", "scroll", "seclabel_by_comp", - "seclabel_by_name", "seclabel_to_char", "second", "secondary", - "section", "secured", "security", "selconst", "select", "selecting", - "selfunc", "selfuncargs", "sequence", "serial", "serial8", - "serializable", "serveruuid", "server_name", "session", "set", - "setsessionauth", "share", "short", "siblings", "signed", "sin", - "sitename", "size", "skall", "skinhibit", "skip", "skshow", - "smallfloat", "smallint", "some", "specific", "sql", "sqlcode", - "sqlcontext", "sqlerror", "sqlstate", "sqlwarning", "sqrt", - "stability", "stack", "standard", "start", "star_join", "statchange", - "statement", "static", "statistics", "statlevel", "status", "stdev", - "step", "stop", "storage", "store", "strategies", "string", - "stringtolabel", "struct", "style", "subclass_origin", "substr", - "substring", "sum", "support", "sync", "synonym", "sysdate", - "sysdbclose", "sysdbopen", "system", "sys_connect_by_path", "table", - "tables", "tan", "task", "temp", "template", "test", "text", "then", - "time", "timeout", "to", "today", "to_char", "to_date", - "to_dsinterval", "to_number", "to_yminterval", "trace", "trailing", - "transaction", "transition", "tree", "trigger", "triggers", "trim", - "true", "trunc", "truncate", "trusted", "type", "typedef", "typeid", - "typename", "typeof", "uid", "uncommitted", "under", "union", - "unique", "units", "unknown", "unload", "unlock", "unsigned", - "update", "updating", "upon", "upper", "usage", "use", - "uselastcommitted", "user", "use_hash", "use_nl", "use_subqf", - "using", "value", "values", "var", "varchar", "variable", "variance", - "variant", "varying", "vercols", "view", "violations", "void", - "volatile", "wait", "warning", "weekday", "when", "whenever", "where", - "while", "with", "without", "work", "write", "writedown", "writeup", - "xadatasource", "xid", "xload", "xunload", "year" - ]) - - -class InfoDateTime(sqltypes.DateTime): - - def bind_processor(self, dialect): - def process(value): - if value is not None: - if value.microsecond: - value = value.replace(microsecond=0) - return value - return process - - -class InfoTime(sqltypes.Time): - - def bind_processor(self, dialect): - def process(value): - if value is not None: - if value.microsecond: - value = value.replace(microsecond=0) - return value - return process - - def result_processor(self, dialect, coltype): - def process(value): - if isinstance(value, datetime.datetime): - return value.time() - else: - return value - return process - -colspecs = { - sqltypes.DateTime: InfoDateTime, - sqltypes.TIMESTAMP: InfoDateTime, - sqltypes.Time: InfoTime, -} - - -ischema_names = { - 0: sqltypes.CHAR, # CHAR - 1: sqltypes.SMALLINT, # SMALLINT - 2: sqltypes.INTEGER, # INT - 3: sqltypes.FLOAT, # Float - 3: sqltypes.Float, # SmallFloat - 5: sqltypes.DECIMAL, # DECIMAL - 6: sqltypes.Integer, # Serial - 7: sqltypes.DATE, # DATE - 8: sqltypes.Numeric, # MONEY - 10: sqltypes.DATETIME, # DATETIME - 11: sqltypes.LargeBinary, # BYTE - 12: sqltypes.TEXT, # TEXT - 13: sqltypes.VARCHAR, # VARCHAR - 15: sqltypes.NCHAR, # NCHAR - 16: sqltypes.NVARCHAR, # NVARCHAR - 17: sqltypes.Integer, # INT8 - 18: sqltypes.Integer, # Serial8 - 43: sqltypes.String, # LVARCHAR - -1: sqltypes.BLOB, # BLOB - -1: sqltypes.CLOB, # CLOB -} - - -class InfoTypeCompiler(compiler.GenericTypeCompiler): - def visit_DATETIME(self, type_): - return "DATETIME YEAR TO SECOND" - - def visit_TIME(self, type_): - return "DATETIME HOUR TO SECOND" - - def visit_TIMESTAMP(self, type_): - return "DATETIME YEAR TO SECOND" - - def visit_large_binary(self, type_): - return "BYTE" - - def visit_boolean(self, type_): - return "SMALLINT" - - -class InfoSQLCompiler(compiler.SQLCompiler): - - def default_from(self): - return " from systables where tabname = 'systables' " - - def get_select_precolumns(self, select): - s = "" - if select._offset: - s += "SKIP %s " % select._offset - if select._limit: - s += "FIRST %s " % select._limit - s += select._distinct and "DISTINCT " or "" - return s - - def visit_select(self, select, asfrom=False, parens=True, **kw): - text = compiler.SQLCompiler.visit_select(self, select, asfrom, parens, **kw) - if asfrom and parens and self.dialect.server_version_info < (11,): - #assuming that 11 version doesn't need this, not tested - return "table(multiset" + text + ")" - else: - return text - - def limit_clause(self, select): - return "" - - def visit_function(self, func, **kw): - if func.name.lower() == 'current_date': - return "today" - elif func.name.lower() == 'current_time': - return "CURRENT HOUR TO SECOND" - elif func.name.lower() in ('current_timestamp', 'now'): - return "CURRENT YEAR TO SECOND" - else: - return compiler.SQLCompiler.visit_function(self, func, **kw) - - def visit_mod_binary(self, binary, operator, **kw): - return "MOD(%s, %s)" % (self.process(binary.left, **kw), - self.process(binary.right, **kw)) - - -class InfoDDLCompiler(compiler.DDLCompiler): - - def visit_add_constraint(self, create): - preparer = self.preparer - return "ALTER TABLE %s ADD CONSTRAINT %s" % ( - self.preparer.format_table(create.element.table), - self.process(create.element) - ) - - def get_column_specification(self, column, **kw): - colspec = self.preparer.format_column(column) - first = None - if column.primary_key and column.autoincrement: - try: - first = [c for c in column.table.primary_key.columns - if (c.autoincrement and - isinstance(c.type, sqltypes.Integer) and - not c.foreign_keys)].pop(0) - except IndexError: - pass - - if column is first: - colspec += " SERIAL" - else: - colspec += " " + self.dialect.type_compiler.process(column.type) - default = self.get_column_default_string(column) - if default is not None: - colspec += " DEFAULT " + default - - if not column.nullable: - colspec += " NOT NULL" - - return colspec - - def get_column_default_string(self, column): - if (isinstance(column.server_default, schema.DefaultClause) and - isinstance(column.server_default.arg, util.string_types)): - if isinstance(column.type, (sqltypes.Integer, sqltypes.Numeric)): - return self.sql_compiler.process(text(column.server_default.arg)) - - return super(InfoDDLCompiler, self).get_column_default_string(column) - - ### Informix wants the constraint name at the end, hence this ist c&p from sql/compiler.py - def visit_primary_key_constraint(self, constraint): - if len(constraint) == 0: - return '' - text = "PRIMARY KEY " - text += "(%s)" % ', '.join(self.preparer.quote(c.name, c.quote) - for c in constraint) - text += self.define_constraint_deferrability(constraint) - - if constraint.name is not None: - text += " CONSTRAINT %s" % self.preparer.format_constraint(constraint) - return text - - def visit_foreign_key_constraint(self, constraint): - preparer = self.dialect.identifier_preparer - remote_table = list(constraint._elements.values())[0].column.table - text = "FOREIGN KEY (%s) REFERENCES %s (%s)" % ( - ', '.join(preparer.quote(f.parent.name, f.parent.quote) - for f in constraint._elements.values()), - preparer.format_table(remote_table), - ', '.join(preparer.quote(f.column.name, f.column.quote) - for f in constraint._elements.values()) - ) - text += self.define_constraint_cascades(constraint) - text += self.define_constraint_deferrability(constraint) - - if constraint.name is not None: - text += " CONSTRAINT %s " % \ - preparer.format_constraint(constraint) - return text - - def visit_unique_constraint(self, constraint): - text = "UNIQUE (%s)" % (', '.join(self.preparer.quote(c.name, c.quote) for c in constraint)) - text += self.define_constraint_deferrability(constraint) - - if constraint.name is not None: - text += "CONSTRAINT %s " % self.preparer.format_constraint(constraint) - return text - - -class InformixIdentifierPreparer(compiler.IdentifierPreparer): - - reserved_words = RESERVED_WORDS - - -class InformixDialect(default.DefaultDialect): - name = 'informix' - - max_identifier_length = 128 # adjusts at runtime based on server version - - type_compiler = InfoTypeCompiler - statement_compiler = InfoSQLCompiler - ddl_compiler = InfoDDLCompiler - colspecs = colspecs - ischema_names = ischema_names - preparer = InformixIdentifierPreparer - default_paramstyle = 'qmark' - - def initialize(self, connection): - super(InformixDialect, self).initialize(connection) - - # http://www.querix.com/support/knowledge-base/error_number_message/error_200 - if self.server_version_info < (9, 2): - self.max_identifier_length = 18 - else: - self.max_identifier_length = 128 - - def _get_table_names(self, connection, schema, type, **kw): - schema = schema or self.default_schema_name - s = "select tabname, owner from systables where owner=? and tabtype=?" - return [row[0] for row in connection.execute(s, schema, type)] - - @reflection.cache - def get_table_names(self, connection, schema=None, **kw): - return self._get_table_names(connection, schema, 'T', **kw) - - @reflection.cache - def get_view_names(self, connection, schema=None, **kw): - return self._get_table_names(connection, schema, 'V', **kw) - - @reflection.cache - def get_schema_names(self, connection, **kw): - s = "select owner from systables" - return [row[0] for row in connection.execute(s)] - - def has_table(self, connection, table_name, schema=None): - schema = schema or self.default_schema_name - cursor = connection.execute( - """select tabname from systables where tabname=? and owner=?""", - table_name, schema) - return cursor.first() is not None - - @reflection.cache - def get_columns(self, connection, table_name, schema=None, **kw): - schema = schema or self.default_schema_name - c = connection.execute( - """select colname, coltype, collength, t3.default, t1.colno from - syscolumns as t1 , systables as t2 , OUTER sysdefaults as t3 - where t1.tabid = t2.tabid and t2.tabname=? and t2.owner=? - and t3.tabid = t2.tabid and t3.colno = t1.colno - order by t1.colno""", table_name, schema) - - pk_constraint = self.get_pk_constraint(connection, table_name, schema, **kw) - primary_cols = pk_constraint['constrained_columns'] - - columns = [] - rows = c.fetchall() - for name, colattr, collength, default, colno in rows: - name = name.lower() - - autoincrement = False - primary_key = False - - if name in primary_cols: - primary_key = True - - # in 7.31, coltype = 0x000 - # ^^-- column type - # ^-- 1 not null, 0 null - not_nullable, coltype = divmod(colattr, 256) - if coltype not in (0, 13) and default: - default = default.split()[-1] - - if coltype == 6: # Serial, mark as autoincrement - autoincrement = True - - if coltype == 0 or coltype == 13: # char, varchar - coltype = ischema_names[coltype](collength) - if default: - default = "'%s'" % default - elif coltype == 5: # decimal - precision, scale = (collength & 0xFF00) >> 8, collength & 0xFF - if scale == 255: - scale = 0 - coltype = sqltypes.Numeric(precision, scale) - else: - try: - coltype = ischema_names[coltype] - except KeyError: - util.warn("Did not recognize type '%s' of column '%s'" % - (coltype, name)) - coltype = sqltypes.NULLTYPE - - column_info = dict(name=name, type=coltype, nullable=not not_nullable, - default=default, autoincrement=autoincrement, - primary_key=primary_key) - columns.append(column_info) - return columns - - @reflection.cache - def get_foreign_keys(self, connection, table_name, schema=None, **kw): - schema_sel = schema or self.default_schema_name - c = connection.execute( - """select t1.constrname as cons_name, - t4.colname as local_column, t7.tabname as remote_table, - t6.colname as remote_column, t7.owner as remote_owner - from sysconstraints as t1 , systables as t2 , - sysindexes as t3 , syscolumns as t4 , - sysreferences as t5 , syscolumns as t6 , systables as t7 , - sysconstraints as t8 , sysindexes as t9 - where t1.tabid = t2.tabid and t2.tabname=? and t2.owner=? and t1.constrtype = 'R' - and t3.tabid = t2.tabid and t3.idxname = t1.idxname - and t4.tabid = t2.tabid and t4.colno in (t3.part1, t3.part2, t3.part3, - t3.part4, t3.part5, t3.part6, t3.part7, t3.part8, t3.part9, t3.part10, - t3.part11, t3.part11, t3.part12, t3.part13, t3.part4, t3.part15, t3.part16) - and t5.constrid = t1.constrid and t8.constrid = t5.primary - and t6.tabid = t5.ptabid and t6.colno in (t9.part1, t9.part2, t9.part3, - t9.part4, t9.part5, t9.part6, t9.part7, t9.part8, t9.part9, t9.part10, - t9.part11, t9.part11, t9.part12, t9.part13, t9.part4, t9.part15, t9.part16) and t9.idxname = - t8.idxname - and t7.tabid = t5.ptabid""", table_name, schema_sel) - - def fkey_rec(): - return { - 'name': None, - 'constrained_columns': [], - 'referred_schema': None, - 'referred_table': None, - 'referred_columns': [] - } - - fkeys = util.defaultdict(fkey_rec) - - rows = c.fetchall() - for cons_name, local_column, \ - remote_table, remote_column, remote_owner in rows: - - rec = fkeys[cons_name] - rec['name'] = cons_name - local_cols, remote_cols = \ - rec['constrained_columns'], rec['referred_columns'] - - if not rec['referred_table']: - rec['referred_table'] = remote_table - if schema is not None: - rec['referred_schema'] = remote_owner - - if local_column not in local_cols: - local_cols.append(local_column) - if remote_column not in remote_cols: - remote_cols.append(remote_column) - - return list(fkeys.values()) - - @reflection.cache - def get_pk_constraint(self, connection, table_name, schema=None, **kw): - schema = schema or self.default_schema_name - - # Select the column positions from sysindexes for sysconstraints - data = connection.execute( - """select t2.* - from systables as t1, sysindexes as t2, sysconstraints as t3 - where t1.tabid=t2.tabid and t1.tabname=? and t1.owner=? - and t2.idxname=t3.idxname and t3.constrtype='P'""", - table_name, schema - ).fetchall() - - colpositions = set() - - for row in data: - colpos = set([getattr(row, 'part%d' % x) for x in range(1, 16)]) - colpositions |= colpos - - if not len(colpositions): - return {'constrained_columns': [], 'name': None} - - # Select the column names using the columnpositions - # TODO: Maybe cache a bit of those col infos (eg select all colnames for one table) - place_holder = ','.join('?' * len(colpositions)) - c = connection.execute( - """select t1.colname - from syscolumns as t1, systables as t2 - where t2.tabname=? and t1.tabid = t2.tabid and - t1.colno in (%s)""" % place_holder, - table_name, *colpositions - ).fetchall() - - cols = reduce(lambda x, y: list(x) + list(y), c, []) - return {'constrained_columns': cols, 'name': None} - - @reflection.cache - def get_indexes(self, connection, table_name, schema, **kw): - # TODO: schema... - c = connection.execute( - """select t1.* - from sysindexes as t1 , systables as t2 - where t1.tabid = t2.tabid and t2.tabname=?""", - table_name) - - indexes = [] - for row in c.fetchall(): - colnames = [getattr(row, 'part%d' % x) for x in range(1, 16)] - colnames = [x for x in colnames if x] - place_holder = ','.join('?' * len(colnames)) - c = connection.execute( - """select t1.colname - from syscolumns as t1, systables as t2 - where t2.tabname=? and t1.tabid = t2.tabid and - t1.colno in (%s)""" % place_holder, - table_name, *colnames - ).fetchall() - c = reduce(lambda x, y: list(x) + list(y), c, []) - indexes.append({ - 'name': row.idxname, - 'unique': row.idxtype.lower() == 'u', - 'column_names': c - }) - return indexes - - @reflection.cache - def get_view_definition(self, connection, view_name, schema=None, **kw): - schema = schema or self.default_schema_name - c = connection.execute( - """select t1.viewtext - from sysviews as t1 , systables as t2 - where t1.tabid=t2.tabid and t2.tabname=? - and t2.owner=? order by seqno""", - view_name, schema).fetchall() - - return ''.join([row[0] for row in c]) - - def _get_default_schema_name(self, connection): - return connection.execute('select CURRENT_ROLE from systables').scalar() diff --git a/lib/sqlalchemy/dialects/informix/informixdb.py b/lib/sqlalchemy/dialects/informix/informixdb.py deleted file mode 100644 index f2f0d3e80..000000000 --- a/lib/sqlalchemy/dialects/informix/informixdb.py +++ /dev/null @@ -1,69 +0,0 @@ -# informix/informixdb.py -# Copyright (C) 2005-2013 the SQLAlchemy authors and contributors <see AUTHORS file> -# -# This module is part of SQLAlchemy and is released under -# the MIT License: http://www.opensource.org/licenses/mit-license.php - -""" - -.. dialect:: informix+informixdb - :name: informixdb - :dbapi: informixdb - :connectstring: informix+informixdb://user:password@host/dbname - :url: http://informixdb.sourceforge.net/ - -""" - -import re - -from sqlalchemy.dialects.informix.base import InformixDialect -from sqlalchemy.engine import default - -VERSION_RE = re.compile(r'(\d+)\.(\d+)(.+\d+)') - - -class InformixExecutionContext_informixdb(default.DefaultExecutionContext): - - def post_exec(self): - if self.isinsert: - self._lastrowid = self.cursor.sqlerrd[1] - - def get_lastrowid(self): - return self._lastrowid - - -class InformixDialect_informixdb(InformixDialect): - driver = 'informixdb' - execution_ctx_cls = InformixExecutionContext_informixdb - - @classmethod - def dbapi(cls): - return __import__('informixdb') - - def create_connect_args(self, url): - if url.host: - dsn = '%s@%s' % (url.database, url.host) - else: - dsn = url.database - - if url.username: - opt = {'user': url.username, 'password': url.password} - else: - opt = {} - - return ([dsn], opt) - - def _get_server_version_info(self, connection): - # http://informixdb.sourceforge.net/manual.html#inspecting-version-numbers - v = VERSION_RE.split(connection.connection.dbms_version) - return (int(v[1]), int(v[2]), v[3]) - - def is_disconnect(self, e, connection, cursor): - if isinstance(e, self.dbapi.OperationalError): - return 'closed the connection' in str(e) \ - or 'connection not open' in str(e) - else: - return False - - -dialect = InformixDialect_informixdb diff --git a/lib/sqlalchemy/engine/reflection.py b/lib/sqlalchemy/engine/reflection.py index 1f219e30c..4a884453b 100644 --- a/lib/sqlalchemy/engine/reflection.py +++ b/lib/sqlalchemy/engine/reflection.py @@ -493,7 +493,7 @@ class Inspector(object): ) if 'sequence' in col_d: - # TODO: mssql, maxdb and sybase are using this. + # TODO: mssql and sybase are using this. seq = col_d['sequence'] sequence = sa_schema.Sequence(seq['name'], 1, 1) if 'start' in seq: diff --git a/test/dialect/test_informix.py b/test/dialect/test_informix.py deleted file mode 100644 index 332edd24e..000000000 --- a/test/dialect/test_informix.py +++ /dev/null @@ -1,25 +0,0 @@ -from sqlalchemy import * -from sqlalchemy.databases import informix -from sqlalchemy.testing import * - - -class CompileTest(fixtures.TestBase, AssertsCompiledSQL): - - __dialect__ = informix.InformixDialect() - - def test_statements(self): - meta = MetaData() - t1 = Table('t1', meta, Column('col1', Integer, - primary_key=True), Column('col2', String(50))) - t2 = Table('t2', meta, Column('col1', Integer, - primary_key=True), Column('col2', String(50)), - Column('col3', Integer, ForeignKey('t1.col1'))) - self.assert_compile(t1.select(), - 'SELECT t1.col1, t1.col2 FROM t1') - self.assert_compile(select([t1, t2]).select_from(t1.join(t2)), - 'SELECT t1.col1, t1.col2, t2.col1, ' - 't2.col2, t2.col3 FROM t1 JOIN t2 ON ' - 't1.col1 = t2.col3') - self.assert_compile(t1.update().values({t1.c.col1: t1.c.col1 - + 1}), 'UPDATE t1 SET col1=(t1.col1 + ?)') - diff --git a/test/engine/test_execute.py b/test/engine/test_execute.py index b116e4d6b..dbefc9f42 100644 --- a/test/engine/test_execute.py +++ b/test/engine/test_execute.py @@ -59,10 +59,9 @@ class ExecuteTest(fixtures.TestBase): scalar(stmt) eq_(result, '%') - @testing.fails_on_everything_except('firebird', 'maxdb', + @testing.fails_on_everything_except('firebird', 'sqlite', '+pyodbc', - '+mxodbc', '+zxjdbc', 'mysql+oursql', - 'informix+informixdb') + '+mxodbc', '+zxjdbc', 'mysql+oursql') def test_raw_qmark(self): def go(conn): conn.execute('insert into users (user_id, user_name) ' @@ -182,7 +181,7 @@ class ExecuteTest(fixtures.TestBase): finally: conn.close() - @testing.fails_on_everything_except('sqlite', 'oracle+cx_oracle', 'informix+informixdb') + @testing.fails_on_everything_except('sqlite', 'oracle+cx_oracle') def test_raw_named(self): def go(conn): conn.execute('insert into users (user_id, user_name) ' diff --git a/test/engine/test_reconnect.py b/test/engine/test_reconnect.py index 0a964cf63..ba336a1bf 100644 --- a/test/engine/test_reconnect.py +++ b/test/engine/test_reconnect.py @@ -413,8 +413,6 @@ class RealReconnectTest(fixtures.TestBase): def teardown(self): self.engine.dispose() - @testing.fails_on('+informixdb', - "Wrong error thrown, fix in informixdb?") def test_reconnect(self): conn = self.engine.connect() @@ -539,8 +537,6 @@ class RealReconnectTest(fixtures.TestBase): # pool was recreated assert engine.pool is not p1 - @testing.fails_on('+informixdb', - "Wrong error thrown, fix in informixdb?") def test_null_pool(self): engine = \ engines.reconnecting_engine(options=dict(poolclass=pool.NullPool)) @@ -554,8 +550,6 @@ class RealReconnectTest(fixtures.TestBase): eq_(conn.execute(select([1])).scalar(), 1) assert not conn.invalidated - @testing.fails_on('+informixdb', - "Wrong error thrown, fix in informixdb?") def test_close(self): conn = self.engine.connect() eq_(conn.execute(select([1])).scalar(), 1) @@ -569,8 +563,6 @@ class RealReconnectTest(fixtures.TestBase): conn = self.engine.connect() eq_(conn.execute(select([1])).scalar(), 1) - @testing.fails_on('+informixdb', - "Wrong error thrown, fix in informixdb?") def test_with_transaction(self): conn = self.engine.connect() trans = conn.begin() @@ -651,8 +643,6 @@ class InvalidateDuringResultTest(fixtures.TestBase): '+cymysql', '+pymysql', '+pg8000' ], "Buffers the result set and doesn't check for " "connection close") - @testing.fails_on('+informixdb', - "Wrong error thrown, fix in informixdb?") def test_invalidate_on_results(self): conn = self.engine.connect() result = conn.execute('select * from sometable') diff --git a/test/engine/test_reflection.py b/test/engine/test_reflection.py index e7baa9d56..a3b0ecdb5 100644 --- a/test/engine/test_reflection.py +++ b/test/engine/test_reflection.py @@ -754,10 +754,6 @@ class ReflectionTest(fixtures.TestBase, ComparesTables): @testing.crashes('oracle', 'FIXME: unknown, confirm not fails_on') - @testing.fails_on('+informixdb', - "FIXME: should be supported via the " - "DELIMITED env var but that breaks " - "everything else for now") @testing.provide_metadata def test_reserved(self): @@ -774,7 +770,7 @@ class ReflectionTest(fixtures.TestBase, ComparesTables): # There's currently no way to calculate identifier case # normalization in isolation, so... - if testing.against('firebird', 'oracle', 'maxdb'): + if testing.against('firebird', 'oracle'): check_col = 'TRUE' else: check_col = 'true' diff --git a/test/engine/test_transaction.py b/test/engine/test_transaction.py index 50d38e257..7738dfb34 100644 --- a/test/engine/test_transaction.py +++ b/test/engine/test_transaction.py @@ -1035,7 +1035,6 @@ class ForUpdateTest(fixtures.TestBase): @testing.crashes('mssql', 'FIXME: unknown') @testing.crashes('firebird', 'FIXME: unknown') @testing.crashes('sybase', 'FIXME: unknown') - @testing.crashes('access', 'FIXME: unknown') @testing.requires.independent_connections def test_queued_update(self): """Test SELECT FOR UPDATE with concurrent modifications. @@ -1100,7 +1099,6 @@ class ForUpdateTest(fixtures.TestBase): @testing.crashes('mssql', 'FIXME: unknown') @testing.crashes('firebird', 'FIXME: unknown') @testing.crashes('sybase', 'FIXME: unknown') - @testing.crashes('access', 'FIXME: unknown') @testing.requires.independent_connections def test_queued_select(self): """Simple SELECT FOR UPDATE conflict test""" @@ -1112,7 +1110,6 @@ class ForUpdateTest(fixtures.TestBase): @testing.fails_on('mysql', 'No support for NOWAIT') @testing.crashes('firebird', 'FIXME: unknown') @testing.crashes('sybase', 'FIXME: unknown') - @testing.crashes('access', 'FIXME: unknown') @testing.requires.independent_connections def test_nowait_select(self): """Simple SELECT FOR UPDATE NOWAIT conflict test""" diff --git a/test/orm/inheritance/test_basic.py b/test/orm/inheritance/test_basic.py index 41a167e72..1737d1ccb 100644 --- a/test/orm/inheritance/test_basic.py +++ b/test/orm/inheritance/test_basic.py @@ -964,7 +964,6 @@ class EagerLazyTest(fixtures.MappedTest): Column('foo_id', Integer, ForeignKey('foo.id')) ) - @testing.fails_on('maxdb', 'FIXME: unknown') def test_basic(self): class Foo(object): pass class Bar(Foo): pass diff --git a/test/orm/inheritance/test_manytomany.py b/test/orm/inheritance/test_manytomany.py index 31c4ba40a..51b797940 100644 --- a/test/orm/inheritance/test_manytomany.py +++ b/test/orm/inheritance/test_manytomany.py @@ -201,7 +201,6 @@ class InheritTest3(fixtures.MappedTest): found = [repr(l[0])] + sorted([repr(o) for o in l[0].foos]) eq_(found, compare) - @testing.fails_on('maxdb', 'FIXME: unknown') def testadvanced(self): class Foo(object): def __init__(self, data=None): diff --git a/test/orm/inheritance/test_poly_linked_list.py b/test/orm/inheritance/test_poly_linked_list.py index 1915007de..ec263b3b0 100644 --- a/test/orm/inheritance/test_poly_linked_list.py +++ b/test/orm/inheritance/test_poly_linked_list.py @@ -115,19 +115,15 @@ class PolymorphicCircularTest(fixtures.MappedTest): configure_mappers() assert table1_mapper.primary_key == (table1.c.id,), table1_mapper.primary_key - @testing.fails_on('maxdb', 'FIXME: unknown') def testone(self): self._testlist([Table1, Table2, Table1, Table2]) - @testing.fails_on('maxdb', 'FIXME: unknown') def testtwo(self): self._testlist([Table3]) - @testing.fails_on('maxdb', 'FIXME: unknown') def testthree(self): self._testlist([Table2, Table1, Table1B, Table3, Table3, Table1B, Table1B, Table2, Table1]) - @testing.fails_on('maxdb', 'FIXME: unknown') def testfour(self): self._testlist([ Table2('t2', [Data('data1'), Data('data2')]), diff --git a/test/orm/test_assorted_eager.py b/test/orm/test_assorted_eager.py index 83fccbf7a..16747bd67 100644 --- a/test/orm/test_assorted_eager.py +++ b/test/orm/test_assorted_eager.py @@ -286,7 +286,6 @@ class EagerTest2(fixtures.MappedTest): lazy='joined', backref=backref('middle', lazy='joined')))), - @testing.fails_on('maxdb', 'FIXME: unknown') def test_eager_terminate(self): """Eager query generation does not include the same mapper's table twice. @@ -339,7 +338,6 @@ class EagerTest3(fixtures.MappedTest): class Stat(cls.Basic): pass - @testing.fails_on('maxdb', 'FIXME: unknown') def test_nesting_with_functions(self): Stat, Foo, stats, foo, Data, datas = (self.classes.Stat, self.classes.Foo, @@ -423,7 +421,6 @@ class EagerTest4(fixtures.MappedTest): class Employee(cls.Basic): pass - @testing.fails_on('maxdb', 'FIXME: unknown') def test_basic(self): Department, Employee, employees, departments = (self.classes.Department, self.classes.Employee, @@ -774,7 +771,6 @@ class EagerTest8(fixtures.MappedTest): class Joined(cls.Comparable): pass - @testing.fails_on('maxdb', 'FIXME: unknown') def test_nested_joins(self): task, Task_Type, Joined, prj, task_type, msg = (self.tables.task, self.classes.Task_Type, @@ -867,7 +863,6 @@ class EagerTest9(fixtures.MappedTest): backref=backref('entries', lazy='joined', order_by=entries.c.entry_id)))) - @testing.fails_on('maxdb', 'FIXME: unknown') def test_joinedload_on_path(self): Entry, Account, Transaction = (self.classes.Entry, self.classes.Account, diff --git a/test/orm/test_cascade.py b/test/orm/test_cascade.py index d0318b079..615ae815d 100644 --- a/test/orm/test_cascade.py +++ b/test/orm/test_cascade.py @@ -1217,7 +1217,6 @@ class M2OCascadeDeleteOrphanTestOne(fixtures.MappedTest): sess.flush() sess.close() - @testing.fails_on('maxdb', 'FIXME: unknown') def test_orphan(self): prefs, User, extra = (self.tables.prefs, self.classes.User, @@ -1282,7 +1281,6 @@ class M2OCascadeDeleteOrphanTestOne(fixtures.MappedTest): assert p2 in sess sess.commit() - @testing.fails_on('maxdb', 'FIXME: unknown') def test_orphan_on_update(self): prefs, User, extra = (self.tables.prefs, self.classes.User, diff --git a/test/orm/test_eager_relations.py b/test/orm/test_eager_relations.py index e70525be4..f2ba3cc27 100644 --- a/test/orm/test_eager_relations.py +++ b/test/orm/test_eager_relations.py @@ -600,7 +600,6 @@ class EagerTest(_fixtures.FixtureTest, testing.AssertsCompiledSQL): assert 'orders' not in noeagers[0].__dict__ assert 'addresses' not in noeagers[0].__dict__ - @testing.fails_on('maxdb', 'FIXME: unknown') def test_limit(self): """Limit operations combined with lazy-load relationships.""" @@ -655,7 +654,6 @@ class EagerTest(_fixtures.FixtureTest, testing.AssertsCompiledSQL): eq_(self.static.user_address_result, l) self.assert_sql_count(testing.db, go, 1) - @testing.fails_on('maxdb', 'FIXME: unknown') def test_limit_2(self): keywords, items, item_keywords, Keyword, Item = (self.tables.keywords, self.tables.items, @@ -677,7 +675,6 @@ class EagerTest(_fixtures.FixtureTest, testing.AssertsCompiledSQL): eq_(self.static.item_keyword_result[1:3], l) - @testing.fails_on('maxdb', 'FIXME: unknown') def test_limit_3(self): """test that the ORDER BY is propagated from the inner select to the outer select, when using the @@ -709,7 +706,7 @@ class EagerTest(_fixtures.FixtureTest, testing.AssertsCompiledSQL): q = sess.query(User) - if not testing.against('maxdb', 'mssql'): + if not testing.against('mssql'): l = q.join('orders').order_by(Order.user_id.desc()).limit(2).offset(1) eq_([ User(id=9, @@ -944,7 +941,6 @@ class EagerTest(_fixtures.FixtureTest, testing.AssertsCompiledSQL): eq_([User(id=7, address=Address(id=1))], l) self.assert_sql_count(testing.db, go, 1) - @testing.fails_on('maxdb', 'FIXME: unknown') def test_many_to_one(self): users, Address, addresses, User = (self.tables.users, self.classes.Address, @@ -1922,7 +1918,6 @@ class SelfReferentialEagerTest(fixtures.MappedTest): Column('parent_id', Integer, ForeignKey('nodes.id')), Column('data', String(30))) - @testing.fails_on('maxdb', 'FIXME: unknown') def test_basic(self): nodes = self.tables.nodes @@ -2108,7 +2103,6 @@ class SelfReferentialEagerTest(fixtures.MappedTest): ) ) - @testing.fails_on('maxdb', 'FIXME: unknown') def test_no_depth(self): nodes = self.tables.nodes diff --git a/test/orm/test_expire.py b/test/orm/test_expire.py index ea823f79c..292546e99 100644 --- a/test/orm/test_expire.py +++ b/test/orm/test_expire.py @@ -1436,7 +1436,6 @@ class RefreshTest(_fixtures.FixtureTest): s.expire(u) assert len(u.addresses) == 3 - @testing.fails_on('maxdb', 'FIXME: unknown') def test_refresh2(self): """test a hang condition that was occurring on expire/refresh""" diff --git a/test/orm/test_generative.py b/test/orm/test_generative.py index 52858cc26..cbe559db9 100644 --- a/test/orm/test_generative.py +++ b/test/orm/test_generative.py @@ -43,7 +43,6 @@ class GenerativeQueryTest(fixtures.MappedTest): assert res.order_by(Foo.bar)[0].bar == 5 assert res.order_by(sa.desc(Foo.bar))[0].bar == 95 - @testing.fails_on('maxdb', 'FIXME: unknown') def test_slice(self): Foo = self.classes.Foo diff --git a/test/orm/test_lazy_relations.py b/test/orm/test_lazy_relations.py index 66b1eb5e4..37d290b58 100644 --- a/test/orm/test_lazy_relations.py +++ b/test/orm/test_lazy_relations.py @@ -178,7 +178,7 @@ class LazyTest(_fixtures.FixtureTest): sess = create_session() q = sess.query(User) - if testing.against('maxdb', 'mssql'): + if testing.against('mssql'): l = q.limit(2).all() assert self.static.user_all_result[:2] == l else: diff --git a/test/orm/test_mapper.py b/test/orm/test_mapper.py index 5255e4fe2..b1c9d3fb6 100644 --- a/test/orm/test_mapper.py +++ b/test/orm/test_mapper.py @@ -1749,7 +1749,6 @@ class OptionsTest(_fixtures.FixtureTest): eq_(l, self.static.user_address_result) self.sql_count_(0, go) - @testing.fails_on('maxdb', 'FIXME: unknown') def test_eager_options_with_limit(self): Address, addresses, users, User = (self.classes.Address, self.tables.addresses, @@ -1775,7 +1774,6 @@ class OptionsTest(_fixtures.FixtureTest): eq_(u.id, 8) eq_(len(u.addresses), 3) - @testing.fails_on('maxdb', 'FIXME: unknown') def test_lazy_options_with_limit(self): Address, addresses, users, User = (self.classes.Address, self.tables.addresses, diff --git a/test/orm/test_query.py b/test/orm/test_query.py index 4d276896c..619836ae4 100644 --- a/test/orm/test_query.py +++ b/test/orm/test_query.py @@ -1336,7 +1336,6 @@ class FilterTest(QueryTest, AssertsCompiledSQL): assert [User(id=10)] == sess.query(User).outerjoin("addresses", aliased=True).filter(~User.addresses.any()).all() - @testing.crashes('maxdb', 'can dump core') def test_has(self): Dingaling, User, Address = (self.classes.Dingaling, self.classes.User, diff --git a/test/orm/test_subquery_relations.py b/test/orm/test_subquery_relations.py index ef6649c3b..3181e0909 100644 --- a/test/orm/test_subquery_relations.py +++ b/test/orm/test_subquery_relations.py @@ -632,7 +632,6 @@ class EagerTest(_fixtures.FixtureTest, testing.AssertsCompiledSQL): ], q.all()) self.assert_sql_count(testing.db, go, 6) - @testing.fails_on('maxdb', 'FIXME: unknown') def test_limit(self): """Limit operations combined with lazy-load relationships.""" @@ -706,7 +705,6 @@ class EagerTest(_fixtures.FixtureTest, testing.AssertsCompiledSQL): eq_([User(id=7, address=Address(id=1))], l) self.assert_sql_count(testing.db, go, 2) - @testing.fails_on('maxdb', 'FIXME: unknown') def test_many_to_one(self): users, Address, addresses, User = (self.tables.users, self.classes.Address, @@ -1144,7 +1142,6 @@ class SelfReferentialTest(fixtures.MappedTest): Column('parent_id', Integer, ForeignKey('nodes.id')), Column('data', String(30))) - @testing.fails_on('maxdb', 'FIXME: unknown') def test_basic(self): nodes = self.tables.nodes @@ -1309,7 +1306,6 @@ class SelfReferentialTest(fixtures.MappedTest): ]), d) self.assert_sql_count(testing.db, go, 3) - @testing.fails_on('maxdb', 'FIXME: unknown') def test_no_depth(self): """no join depth is set, so no eager loading occurs.""" diff --git a/test/requirements.py b/test/requirements.py index e7728d6e0..4ed0a9289 100644 --- a/test/requirements.py +++ b/test/requirements.py @@ -85,8 +85,6 @@ class DefaultRequirements(SuiteRequirements): no_support('oracle', 'not supported by database'), no_support('mssql', 'not supported by database'), no_support('sybase', 'not supported by database'), - no_support('maxdb', 'FIXME: verify not supported by database'), - no_support('informix', 'not supported by database'), ]) @property @@ -226,7 +224,6 @@ class DefaultRequirements(SuiteRequirements): "sqlite", "sybase", ("mysql", "<", (5, 0, 3)), - ("informix", "<", (11, 55, "xC3")) ], "savepoints not supported") @@ -283,14 +280,14 @@ class DefaultRequirements(SuiteRequirements): """Target database must support INTERSECT or equivalent.""" return fails_if([ - "firebird", "mysql", "sybase", "informix" + "firebird", "mysql", "sybase", ], 'no support for INTERSECT') @property def except_(self): """Target database must support EXCEPT or equivalent (i.e. MINUS).""" return fails_if([ - "firebird", "mysql", "sybase", "informix" + "firebird", "mysql", "sybase", ], 'no support for EXCEPT') @property @@ -313,7 +310,6 @@ class DefaultRequirements(SuiteRequirements): return skip_if([ no_support('firebird', 'no SA implementation'), - no_support('maxdb', 'two-phase xact not supported by database'), no_support('mssql', 'two-phase xact not supported by drivers'), no_support('oracle', 'two-phase xact not implemented in SQLA/oracle'), no_support('drizzle', 'two-phase xact not supported by database'), @@ -366,7 +362,6 @@ class DefaultRequirements(SuiteRequirements): """Target driver must support some degree of non-ascii symbol names.""" # TODO: expand to exclude MySQLdb versions w/ broken unicode return skip_if([ - no_support('maxdb', 'database support flakey'), no_support('oracle', 'FIXME: no support in database?'), no_support('sybase', 'FIXME: guessing, needs confirmation'), no_support('mssql+pymssql', 'no FreeTDS support'), diff --git a/test/sql/test_case_statement.py b/test/sql/test_case_statement.py index 944a15384..998a55cd8 100644 --- a/test/sql/test_case_statement.py +++ b/test/sql/test_case_statement.py @@ -32,7 +32,6 @@ class CaseTest(fixtures.TestBase, AssertsCompiledSQL): info_table.drop() @testing.fails_on('firebird', 'FIXME: unknown') - @testing.fails_on('maxdb', 'FIXME: unknown') @testing.requires.subqueries def test_case(self): inner = select([case([ @@ -130,7 +129,6 @@ class CaseTest(fixtures.TestBase, AssertsCompiledSQL): @testing.fails_on('firebird', 'FIXME: unknown') - @testing.fails_on('maxdb', 'FIXME: unknown') def testcase_with_dict(self): query = select([case({ info_table.c.pk < 3: 'lessthan3', diff --git a/test/sql/test_defaults.py b/test/sql/test_defaults.py index 56b7971b2..4a17c1cda 100644 --- a/test/sql/test_defaults.py +++ b/test/sql/test_defaults.py @@ -45,7 +45,7 @@ class DefaultTest(fixtures.TestBase): # since its a "branched" connection conn.close() - use_function_defaults = testing.against('postgresql', 'mssql', 'maxdb') + use_function_defaults = testing.against('postgresql', 'mssql') is_oracle = testing.against('oracle') class MyClass(object): @@ -73,9 +73,7 @@ class DefaultTest(fixtures.TestBase): f2 = sa.select([func.length('abcdefghijk')], bind=db).scalar() def1 = currenttime deftype = sa.Date - if testing.against('maxdb'): - def2 = sa.text("curdate") - elif testing.against('mssql'): + if testing.against('mssql'): def2 = sa.text("getdate()") else: def2 = sa.text("current_date") diff --git a/test/sql/test_functions.py b/test/sql/test_functions.py index ee503dbb7..ee1d61f85 100644 --- a/test/sql/test_functions.py +++ b/test/sql/test_functions.py @@ -21,13 +21,12 @@ class CompileTest(fixtures.TestBase, AssertsCompiledSQL): functions._registry.clear() def test_compile(self): - for dialect in all_dialects(exclude=('sybase', 'access', - 'informix', 'maxdb')): + for dialect in all_dialects(exclude=('sybase', )): bindtemplate = BIND_TEMPLATES[dialect.paramstyle] self.assert_compile(func.current_timestamp(), "CURRENT_TIMESTAMP", dialect=dialect) self.assert_compile(func.localtime(), "LOCALTIME", dialect=dialect) - if dialect.name in ('firebird', 'maxdb'): + if dialect.name in ('firebird',): self.assert_compile(func.nosuchfunction(), "nosuchfunction", dialect=dialect) else: diff --git a/test/sql/test_query.py b/test/sql/test_query.py index 39c896266..8e619fe74 100644 --- a/test/sql/test_query.py +++ b/test/sql/test_query.py @@ -1110,7 +1110,6 @@ class QueryTest(fixtures.TestBase): @testing.crashes('oracle', 'FIXME: unknown, varify not fails_on()') @testing.crashes('firebird', 'An identifier must begin with a letter') - @testing.crashes('maxdb', 'FIXME: unknown, verify not fails_on()') def test_column_accessor_shadow(self): meta = MetaData(testing.db) shadowed = Table('test_shadowed', meta, @@ -1900,7 +1899,6 @@ class CompoundTest(fixtures.TestBase): eq_(u.execute().fetchall(), wanted) @testing.fails_on('firebird', "doesn't like ORDER BY with UNIONs") - @testing.fails_on('maxdb', 'FIXME: unknown') @testing.requires.subqueries def test_union_ordered_alias(self): (s1, s2) = ( @@ -1919,7 +1917,6 @@ class CompoundTest(fixtures.TestBase): @testing.fails_on('firebird', "has trouble extracting anonymous column from union subquery") @testing.fails_on('mysql', 'FIXME: unknown') @testing.fails_on('sqlite', 'FIXME: unknown') - @testing.fails_on('informix', "FIXME: unknown (maybe the second alias isn't allows)") def test_union_all(self): e = union_all( select([t1.c.col3]), |