summaryrefslogtreecommitdiff
path: root/lib/sqlalchemy/dialects
diff options
context:
space:
mode:
authorMike Bayer <mike_mp@zzzcomputing.com>2010-11-14 18:25:13 -0500
committerMike Bayer <mike_mp@zzzcomputing.com>2010-11-14 18:25:13 -0500
commit6fb06409c622e0355e0a36817940035c33e17ce3 (patch)
tree872218fb4c1d65b1f7476497c540bf504999d200 /lib/sqlalchemy/dialects
parent90c8d8e0c9e2d0a9eeace7fa326df26a5f28465a (diff)
parent06bf218ed37ca780bc4de2ceb47769c84de70ba1 (diff)
downloadsqlalchemy-6fb06409c622e0355e0a36817940035c33e17ce3.tar.gz
merge tip
Diffstat (limited to 'lib/sqlalchemy/dialects')
-rw-r--r--lib/sqlalchemy/dialects/informix/base.py293
-rw-r--r--lib/sqlalchemy/dialects/informix/informixdb.py37
-rw-r--r--lib/sqlalchemy/dialects/mssql/base.py79
-rw-r--r--lib/sqlalchemy/dialects/mssql/information_schema.py2
-rw-r--r--lib/sqlalchemy/dialects/mssql/pymssql.py6
-rw-r--r--lib/sqlalchemy/dialects/mysql/base.py4
-rw-r--r--lib/sqlalchemy/dialects/mysql/zxjdbc.py2
-rw-r--r--lib/sqlalchemy/dialects/oracle/base.py8
-rw-r--r--lib/sqlalchemy/dialects/postgresql/base.py70
-rw-r--r--lib/sqlalchemy/dialects/sqlite/base.py16
-rw-r--r--lib/sqlalchemy/dialects/sqlite/pysqlite.py7
11 files changed, 389 insertions, 135 deletions
diff --git a/lib/sqlalchemy/dialects/informix/base.py b/lib/sqlalchemy/dialects/informix/base.py
index 242b8a328..9aa23173b 100644
--- a/lib/sqlalchemy/dialects/informix/base.py
+++ b/lib/sqlalchemy/dialects/informix/base.py
@@ -7,7 +7,7 @@
# the MIT License: http://www.opensource.org/licenses/mit-license.php
"""Support for the Informix database.
-This dialect is *not* tested on SQLAlchemy 0.6.
+This dialect is mostly functional as of SQLAlchemy 0.6.5.
"""
@@ -16,7 +16,7 @@ This dialect is *not* tested on SQLAlchemy 0.6.
import datetime
from sqlalchemy import sql, schema, exc, pool, util
-from sqlalchemy.sql import compiler
+from sqlalchemy.sql import compiler, text
from sqlalchemy.engine import default, reflection
from sqlalchemy import types as sqltypes
@@ -47,9 +47,9 @@ class InfoTime(sqltypes.Time):
return value
return process
-
colspecs = {
sqltypes.DateTime : InfoDateTime,
+ sqltypes.TIMESTAMP: InfoDateTime,
sqltypes.Time: InfoTime,
}
@@ -85,6 +85,9 @@ class InfoTypeCompiler(compiler.GenericTypeCompiler):
def visit_TIME(self, type_):
return "DATETIME HOUR TO SECOND"
+ def visit_TIMESTAMP(self, type_):
+ return "DATETIME YEAR TO SECOND"
+
def visit_large_binary(self, type_):
return "BYTE"
@@ -92,17 +95,16 @@ class InfoTypeCompiler(compiler.GenericTypeCompiler):
return "SMALLINT"
class InfoSQLCompiler(compiler.SQLCompiler):
-
def default_from(self):
return " from systables where tabname = 'systables' "
def get_select_precolumns(self, select):
- s = select._distinct and "DISTINCT " or ""
- # only has limit
+ s = ""
+ if select._offset:
+ s += "SKIP %s " % select._offset
if select._limit:
- s += " FIRST %s " % select._limit
- else:
- s += ""
+ s += "FIRST %s " % select._limit
+ s += select._distinct and "DISTINCT " or ""
return s
def visit_select(self, select, asfrom=False, parens=True, **kw):
@@ -114,8 +116,6 @@ class InfoSQLCompiler(compiler.SQLCompiler):
return text
def limit_clause(self, select):
- if select._offset is not None and select._offset > 0:
- raise NotImplementedError("Informix does not support OFFSET")
return ""
def visit_function(self, func, **kw):
@@ -128,14 +128,32 @@ class InfoSQLCompiler(compiler.SQLCompiler):
else:
return compiler.SQLCompiler.visit_function(self, func, **kw)
+ def visit_mod(self, binary, **kw):
+ return "MOD(%s, %s)" % (self.process(binary.left), self.process(binary.right))
+
class InfoDDLCompiler(compiler.DDLCompiler):
- def get_column_specification(self, column, first_pk=False):
+
+ def visit_add_constraint(self, create):
+ preparer = self.preparer
+ return "ALTER TABLE %s ADD CONSTRAINT %s" % (
+ self.preparer.format_table(create.element.table),
+ self.process(create.element)
+ )
+
+ def get_column_specification(self, column, **kw):
colspec = self.preparer.format_column(column)
- if column.primary_key and \
- len(column.foreign_keys)==0 and \
- column.autoincrement and \
- isinstance(column.type, sqltypes.Integer) and first_pk:
+ first = None
+ if column.primary_key and column.autoincrement:
+ try:
+ first = [c for c in column.table.primary_key.columns
+ if (c.autoincrement and
+ isinstance(c.type, sqltypes.Integer) and
+ not c.foreign_keys)].pop(0)
+ except IndexError:
+ pass
+
+ if column is first:
colspec += " SERIAL"
else:
colspec += " " + self.dialect.type_compiler.process(column.type)
@@ -148,18 +166,53 @@ class InfoDDLCompiler(compiler.DDLCompiler):
return colspec
+ def get_column_default_string(self, column):
+ if (isinstance(column.server_default, schema.DefaultClause) and
+ isinstance(column.server_default.arg, basestring)):
+ if isinstance(column.type, (sqltypes.Integer, sqltypes.Numeric)):
+ return self.sql_compiler.process(text(column.server_default.arg))
+
+ return super(InfoDDLCompiler, self).get_column_default_string(column)
+
+ ### Informix wants the constraint name at the end, hence this ist c&p from sql/compiler.py
+ def visit_primary_key_constraint(self, constraint):
+ if len(constraint) == 0:
+ return ''
+ text = "PRIMARY KEY "
+ text += "(%s)" % ', '.join(self.preparer.quote(c.name, c.quote)
+ for c in constraint)
+ text += self.define_constraint_deferrability(constraint)
+
+ if constraint.name is not None:
+ text += " CONSTRAINT %s" % self.preparer.format_constraint(constraint)
+ return text
+
+ def visit_foreign_key_constraint(self, constraint):
+ preparer = self.dialect.identifier_preparer
+ remote_table = list(constraint._elements.values())[0].column.table
+ text = "FOREIGN KEY (%s) REFERENCES %s (%s)" % (
+ ', '.join(preparer.quote(f.parent.name, f.parent.quote)
+ for f in constraint._elements.values()),
+ preparer.format_table(remote_table),
+ ', '.join(preparer.quote(f.column.name, f.column.quote)
+ for f in constraint._elements.values())
+ )
+ text += self.define_constraint_cascades(constraint)
+ text += self.define_constraint_deferrability(constraint)
+
+ if constraint.name is not None:
+ text += " CONSTRAINT %s " % \
+ preparer.format_constraint(constraint)
+ return text
+
+ def visit_unique_constraint(self, constraint):
+ text = "UNIQUE (%s)" % (', '.join(self.preparer.quote(c.name, c.quote) for c in constraint))
+ text += self.define_constraint_deferrability(constraint)
+
+ if constraint.name is not None:
+ text += "CONSTRAINT %s " % self.preparer.format_constraint(constraint)
+ return text
-class InfoIdentifierPreparer(compiler.IdentifierPreparer):
- def __init__(self, dialect):
- super(InfoIdentifierPreparer, self).\
- __init__(dialect, initial_quote="'")
-
- def format_constraint(self, constraint):
- # informix doesnt support names for constraints
- return ''
-
- def _requires_quotes(self, value):
- return False
class InformixDialect(default.DefaultDialect):
name = 'informix'
@@ -169,9 +222,13 @@ class InformixDialect(default.DefaultDialect):
type_compiler = InfoTypeCompiler
statement_compiler = InfoSQLCompiler
ddl_compiler = InfoDDLCompiler
- preparer = InfoIdentifierPreparer
colspecs = colspecs
ischema_names = ischema_names
+ default_paramstyle = 'qmark'
+
+ def __init__(self, has_transactions=True, *args, **kwargs):
+ self.has_transactions = has_transactions
+ default.DefaultDialect.__init__(self, *args, **kwargs)
def initialize(self, connection):
super(InformixDialect, self).initialize(connection)
@@ -182,43 +239,78 @@ class InformixDialect(default.DefaultDialect):
else:
self.max_identifier_length = 128
- def do_begin(self, connect):
- cu = connect.cursor()
+ def do_begin(self, connection):
+ cu = connection.cursor()
cu.execute('SET LOCK MODE TO WAIT')
- #cu.execute('SET ISOLATION TO REPEATABLE READ')
+ if self.has_transactions:
+ cu.execute('SET ISOLATION TO REPEATABLE READ')
+
+ def do_commit(self, connection):
+ if self.has_transactions:
+ connection.commit()
+
+ def do_rollback(self, connection):
+ if self.has_transactions:
+ connection.rollback()
+
+ def _get_table_names(self, connection, schema, type, **kw):
+ schema = schema or self.default_schema_name
+ s = "select tabname, owner from systables where owner=? and tabtype=?"
+ return [row[0] for row in connection.execute(s, schema, type)]
@reflection.cache
def get_table_names(self, connection, schema=None, **kw):
- s = "select tabname from systables"
+ return self._get_table_names(connection, schema, 'T', **kw)
+
+ @reflection.cache
+ def get_view_names(self, connection, schema=None, **kw):
+ return self._get_table_names(connection, schema, 'V', **kw)
+
+ @reflection.cache
+ def get_schema_names(self, connection, **kw):
+ s = "select owner from systables"
return [row[0] for row in connection.execute(s)]
def has_table(self, connection, table_name, schema=None):
+ schema = schema or self.default_schema_name
cursor = connection.execute(
- """select tabname from systables where tabname=?""",
- table_name.lower())
+ """select tabname from systables where tabname=? and owner=?""",
+ table_name, schema)
return cursor.first() is not None
@reflection.cache
def get_columns(self, connection, table_name, schema=None, **kw):
+ schema = schema or self.default_schema_name
c = connection.execute(
"""select colname, coltype, collength, t3.default, t1.colno from
syscolumns as t1 , systables as t2 , OUTER sysdefaults as t3
- where t1.tabid = t2.tabid and t2.tabname=?
+ where t1.tabid = t2.tabid and t2.tabname=? and t2.owner=?
and t3.tabid = t2.tabid and t3.colno = t1.colno
- order by t1.colno""", table.name.lower())
+ order by t1.colno""", table_name, schema)
+
+ primary_cols = self.get_primary_keys(connection, table_name, schema, **kw)
+
columns = []
+ rows = c.fetchall()
for name, colattr, collength, default, colno in rows:
name = name.lower()
- if include_columns and name not in include_columns:
- continue
+
+ autoincrement = False
+ primary_key = False
+
+ if name in primary_cols:
+ primary_key = True
# in 7.31, coltype = 0x000
# ^^-- column type
# ^-- 1 not null, 0 null
- nullable, coltype = divmod(colattr, 256)
+ not_nullable, coltype = divmod(colattr, 256)
if coltype not in (0, 13) and default:
default = default.split()[-1]
+ if coltype == 6: # Serial, mark as autoincrement
+ autoincrement = True
+
if coltype == 0 or coltype == 13: # char, varchar
coltype = ischema_names[coltype](collength)
if default:
@@ -236,32 +328,34 @@ class InformixDialect(default.DefaultDialect):
(coltype, name))
coltype = sqltypes.NULLTYPE
- # TODO: nullability ??
- nullable = True
-
- column_info = dict(name=name, type=coltype, nullable=nullable,
- default=default)
+ column_info = dict(name=name, type=coltype, nullable=not not_nullable,
+ default=default, autoincrement=autoincrement,
+ primary_key=primary_key)
columns.append(column_info)
return columns
@reflection.cache
def get_foreign_keys(self, connection, table_name, schema=None, **kw):
- # FK
+ schema_sel = schema or self.default_schema_name
c = connection.execute(
- """select t1.constrname as cons_name , t1.constrtype as cons_type ,
- t4.colname as local_column , t7.tabname as remote_table ,
- t6.colname as remote_column
+ """select t1.constrname as cons_name,
+ t4.colname as local_column, t7.tabname as remote_table,
+ t6.colname as remote_column, t7.owner as remote_owner
from sysconstraints as t1 , systables as t2 ,
sysindexes as t3 , syscolumns as t4 ,
sysreferences as t5 , syscolumns as t6 , systables as t7 ,
sysconstraints as t8 , sysindexes as t9
- where t1.tabid = t2.tabid and t2.tabname=? and t1.constrtype = 'R'
+ where t1.tabid = t2.tabid and t2.tabname=? and t2.owner=? and t1.constrtype = 'R'
and t3.tabid = t2.tabid and t3.idxname = t1.idxname
- and t4.tabid = t2.tabid and t4.colno = t3.part1
+ and t4.tabid = t2.tabid and t4.colno in (t3.part1, t3.part2, t3.part3,
+ t3.part4, t3.part5, t3.part6, t3.part7, t3.part8, t3.part9, t3.part10,
+ t3.part11, t3.part11, t3.part12, t3.part13, t3.part4, t3.part15, t3.part16)
and t5.constrid = t1.constrid and t8.constrid = t5.primary
- and t6.tabid = t5.ptabid and t6.colno = t9.part1 and t9.idxname =
+ and t6.tabid = t5.ptabid and t6.colno in (t9.part1, t9.part2, t9.part3,
+ t9.part4, t9.part5, t9.part6, t9.part7, t9.part8, t9.part9, t9.part10,
+ t9.part11, t9.part11, t9.part12, t9.part13, t9.part4, t9.part15, t9.part16) and t9.idxname =
t8.idxname
- and t7.tabid = t5.ptabid""", table.name.lower())
+ and t7.tabid = t5.ptabid""", table_name, schema_sel)
def fkey_rec():
@@ -275,8 +369,9 @@ class InformixDialect(default.DefaultDialect):
fkeys = util.defaultdict(fkey_rec)
- for cons_name, cons_type, local_column, \
- remote_table, remote_column in rows:
+ rows = c.fetchall()
+ for cons_name, local_column, \
+ remote_table, remote_column, remote_owner in rows:
rec = fkeys[cons_name]
rec['name'] = cons_name
@@ -285,25 +380,91 @@ class InformixDialect(default.DefaultDialect):
if not rec['referred_table']:
rec['referred_table'] = remote_table
+ if schema is not None:
+ rec['referred_schema'] = remote_owner
- local_cols.append(local_column)
- remote_cols.append(remote_column)
+ if local_column not in local_cols:
+ local_cols.append(local_column)
+ if remote_column not in remote_cols:
+ remote_cols.append(remote_column)
return fkeys.values()
@reflection.cache
def get_primary_keys(self, connection, table_name, schema=None, **kw):
+ schema = schema or self.default_schema_name
+
+ # Select the column positions from sysindexes for sysconstraints
+ data = connection.execute(
+ """select t2.*
+ from systables as t1, sysindexes as t2, sysconstraints as t3
+ where t1.tabid=t2.tabid and t1.tabname=? and t1.owner=?
+ and t2.idxname=t3.idxname and t3.constrtype='P'""",
+ table_name, schema
+ ).fetchall()
+
+ colpositions = set()
+
+ for row in data:
+ colpos = set([getattr(row, 'part%d' % x) for x in range(1,16)])
+ colpositions |= colpos
+
+ if not len(colpositions):
+ return []
+
+ # Select the column names using the columnpositions
+ # TODO: Maybe cache a bit of those col infos (eg select all colnames for one table)
+ place_holder = ','.join('?'*len(colpositions))
c = connection.execute(
- """select t4.colname as local_column
- from sysconstraints as t1 , systables as t2 ,
- sysindexes as t3 , syscolumns as t4
- where t1.tabid = t2.tabid and t2.tabname=? and t1.constrtype = 'P'
- and t3.tabid = t2.tabid and t3.idxname = t1.idxname
- and t4.tabid = t2.tabid and t4.colno = t3.part1""",
- table.name.lower())
- return [r[0] for r in c.fetchall()]
+ """select t1.colname
+ from syscolumns as t1, systables as t2
+ where t2.tabname=? and t1.tabid = t2.tabid and
+ t1.colno in (%s)""" % place_holder,
+ table_name, *colpositions
+ ).fetchall()
+
+ return reduce(lambda x,y: list(x)+list(y), c, [])
@reflection.cache
def get_indexes(self, connection, table_name, schema, **kw):
- # TODO
- return []
+ # TODO: schema...
+ c = connection.execute(
+ """select t1.*
+ from sysindexes as t1 , systables as t2
+ where t1.tabid = t2.tabid and t2.tabname=?""",
+ table_name)
+
+ indexes = []
+ for row in c.fetchall():
+ colnames = [getattr(row, 'part%d' % x) for x in range(1,16)]
+ colnames = [x for x in colnames if x]
+ place_holder = ','.join('?'*len(colnames))
+ c = connection.execute(
+ """select t1.colname
+ from syscolumns as t1, systables as t2
+ where t2.tabname=? and t1.tabid = t2.tabid and
+ t1.colno in (%s)""" % place_holder,
+ table_name, *colnames
+ ).fetchall()
+ c = reduce(lambda x,y: list(x)+list(y), c, [])
+ indexes.append({
+ 'name': row.idxname,
+ 'unique': row.idxtype.lower() == 'u',
+ 'column_names': c
+ })
+ return indexes
+
+ @reflection.cache
+ def get_view_definition(self, connection, view_name, schema=None, **kw):
+ schema = schema or self.default_schema_name
+ c = connection.execute(
+ """select t1.viewtext
+ from sysviews as t1 , systables as t2
+ where t1.tabid=t2.tabid and t2.tabname=?
+ and t2.owner=? order by seqno""",
+ view_name, schema).fetchall()
+
+ return ''.join([row[0] for row in c])
+
+ def _get_default_schema_name(self, connection):
+ return connection.execute('select CURRENT_ROLE from systables').scalar()
diff --git a/lib/sqlalchemy/dialects/informix/informixdb.py b/lib/sqlalchemy/dialects/informix/informixdb.py
index 8edcc953b..f11c57bb6 100644
--- a/lib/sqlalchemy/dialects/informix/informixdb.py
+++ b/lib/sqlalchemy/dialects/informix/informixdb.py
@@ -1,16 +1,38 @@
+"""
+Support for the informixdb DBAPI.
+
+informixdb is available at:
+
+ http://informixdb.sourceforge.net/
+
+Connecting
+^^^^^^^^^^
+
+Sample informix connection::
+
+ engine = create_engine('informix+informixdb://user:password@host/dbname')
+
+"""
+
+import re
+
from sqlalchemy.dialects.informix.base import InformixDialect
from sqlalchemy.engine import default
+VERSION_RE = re.compile(r'(\d+)\.(\d+)(.+\d+)')
+
class InformixExecutionContext_informixdb(default.DefaultExecutionContext):
def post_exec(self):
if self.isinsert:
- self._lastrowid = [self.cursor.sqlerrd[1]]
+ self._lastrowid = self.cursor.sqlerrd[1]
+
+ def get_lastrowid(self):
+ return self._lastrowid
class InformixDialect_informixdb(InformixDialect):
driver = 'informixdb'
- default_paramstyle = 'qmark'
- execution_context_cls = InformixExecutionContext_informixdb
+ execution_ctx_cls = InformixExecutionContext_informixdb
@classmethod
def dbapi(cls):
@@ -31,13 +53,8 @@ class InformixDialect_informixdb(InformixDialect):
def _get_server_version_info(self, connection):
# http://informixdb.sourceforge.net/manual.html#inspecting-version-numbers
- version = []
- for n in connection.connection.dbms_version.split('.'):
- try:
- version.append(int(n))
- except ValueError:
- version.append(n)
- return tuple(version)
+ v = VERSION_RE.split(connection.connection.dbms_version)
+ return (int(v[1]), int(v[2]), v[3])
def is_disconnect(self, e):
if isinstance(e, self.dbapi.OperationalError):
diff --git a/lib/sqlalchemy/dialects/mssql/base.py b/lib/sqlalchemy/dialects/mssql/base.py
index a17b2484d..fc374c595 100644
--- a/lib/sqlalchemy/dialects/mssql/base.py
+++ b/lib/sqlalchemy/dialects/mssql/base.py
@@ -114,6 +114,8 @@ Known Issues
------------
* No support for more than one ``IDENTITY`` column per table
+* reflection of indexes does not work with versions older than
+ SQL Server 2005
"""
import datetime, decimal, inspect, operator, sys, re
@@ -758,20 +760,20 @@ class MSSQLCompiler(compiler.SQLCompiler):
return None
def visit_table(self, table, mssql_aliased=False, **kwargs):
- if mssql_aliased:
+ if mssql_aliased is table:
return super(MSSQLCompiler, self).visit_table(table, **kwargs)
# alias schema-qualified tables
alias = self._schema_aliased_table(table)
if alias is not None:
- return self.process(alias, mssql_aliased=True, **kwargs)
+ return self.process(alias, mssql_aliased=table, **kwargs)
else:
return super(MSSQLCompiler, self).visit_table(table, **kwargs)
def visit_alias(self, alias, **kwargs):
# translate for schema-qualified table aliases
self.tablealiases[alias.original] = alias
- kwargs['mssql_aliased'] = True
+ kwargs['mssql_aliased'] = alias.original
return super(MSSQLCompiler, self).visit_alias(alias, **kwargs)
def visit_extract(self, extract, **kw):
@@ -1127,26 +1129,55 @@ class MSDialect(default.DefaultDialect):
view_names = [r[0] for r in connection.execute(s)]
return view_names
- # The cursor reports it is closed after executing the sp.
@reflection.cache
def get_indexes(self, connection, tablename, schema=None, **kw):
+ # using system catalogs, don't support index reflection
+ # below MS 2005
+ if self.server_version_info < MS_2005_VERSION:
+ return []
+
current_schema = schema or self.default_schema_name
- col_finder = re.compile("(\w+)")
full_tname = "%s.%s" % (current_schema, tablename)
- indexes = []
- s = sql.text("exec sp_helpindex '%s'" % full_tname)
- rp = connection.execute(s)
- if rp.closed:
- # did not work for this setup.
- return []
+
+ rp = connection.execute(
+ sql.text("select ind.index_id, ind.is_unique, ind.name "
+ "from sys.indexes as ind join sys.tables as tab on "
+ "ind.object_id=tab.object_id "
+ "join sys.schemas as sch on sch.schema_id=tab.schema_id "
+ "where tab.name = :tabname "
+ "and sch.name=:schname "
+ "and ind.is_primary_key=0",
+ bindparams=[
+ sql.bindparam('tabname', tablename, sqltypes.Unicode),
+ sql.bindparam('schname', current_schema, sqltypes.Unicode)
+ ]
+ )
+ )
+ indexes = {}
for row in rp:
- if 'primary key' not in row['index_description']:
- indexes.append({
- 'name' : row['index_name'],
- 'column_names' : col_finder.findall(row['index_keys']),
- 'unique': 'unique' in row['index_description']
- })
- return indexes
+ indexes[row['index_id']] = {
+ 'name':row['name'],
+ 'unique':row['is_unique'] == 1,
+ 'column_names':[]
+ }
+ rp = connection.execute(
+ sql.text("select ind_col.index_id, col.name from sys.columns as col "
+ "join sys.index_columns as ind_col on "
+ "ind_col.column_id=col.column_id "
+ "join sys.tables as tab on tab.object_id=col.object_id "
+ "join sys.schemas as sch on sch.schema_id=tab.schema_id "
+ "where tab.name=:tabname "
+ "and sch.name=:schname",
+ bindparams=[
+ sql.bindparam('tabname', tablename, sqltypes.Unicode),
+ sql.bindparam('schname', current_schema, sqltypes.Unicode)
+ ]),
+ )
+ for row in rp:
+ if row['index_id'] in indexes:
+ indexes[row['index_id']]['column_names'].append(row['name'])
+
+ return indexes.values()
@reflection.cache
def get_view_definition(self, connection, viewname, schema=None, **kw):
@@ -1210,13 +1241,13 @@ class MSDialect(default.DefaultDialect):
"Did not recognize type '%s' of column '%s'" %
(type, name))
coltype = sqltypes.NULLTYPE
+ else:
+ if issubclass(coltype, sqltypes.Numeric) and \
+ coltype is not MSReal:
+ kwargs['scale'] = numericscale
+ kwargs['precision'] = numericprec
- if issubclass(coltype, sqltypes.Numeric) and \
- coltype is not MSReal:
- kwargs['scale'] = numericscale
- kwargs['precision'] = numericprec
-
- coltype = coltype(**kwargs)
+ coltype = coltype(**kwargs)
cdict = {
'name' : name,
'type' : coltype,
diff --git a/lib/sqlalchemy/dialects/mssql/information_schema.py b/lib/sqlalchemy/dialects/mssql/information_schema.py
index cd1606dbf..4dd6436cd 100644
--- a/lib/sqlalchemy/dialects/mssql/information_schema.py
+++ b/lib/sqlalchemy/dialects/mssql/information_schema.py
@@ -1,3 +1,5 @@
+# TODO: should be using the sys. catalog with SQL Server, not information schema
+
from sqlalchemy import Table, MetaData, Column, ForeignKey
from sqlalchemy.types import String, Unicode, Integer, TypeDecorator
diff --git a/lib/sqlalchemy/dialects/mssql/pymssql.py b/lib/sqlalchemy/dialects/mssql/pymssql.py
index b6728c6b0..c5f471942 100644
--- a/lib/sqlalchemy/dialects/mssql/pymssql.py
+++ b/lib/sqlalchemy/dialects/mssql/pymssql.py
@@ -85,7 +85,9 @@ class MSDialect_pymssql(MSDialect):
def create_connect_args(self, url):
opts = url.translate_connect_args(username='user')
opts.update(url.query)
- opts.pop('port', None)
+ port = opts.pop('port', None)
+ if port and 'host' in opts:
+ opts['host'] = "%s:%s" % (opts['host'], port)
return [[], opts]
def is_disconnect(self, e):
@@ -99,4 +101,4 @@ class MSDialect_pymssql(MSDialect):
else:
return False
-dialect = MSDialect_pymssql \ No newline at end of file
+dialect = MSDialect_pymssql
diff --git a/lib/sqlalchemy/dialects/mysql/base.py b/lib/sqlalchemy/dialects/mysql/base.py
index d526d74e8..a6e8f8c21 100644
--- a/lib/sqlalchemy/dialects/mysql/base.py
+++ b/lib/sqlalchemy/dialects/mysql/base.py
@@ -2373,8 +2373,8 @@ class MySQLTableDefinitionParser(object):
r'(?: +COLLATE +(?P<collate>[\w_]+))?'
r'(?: +(?P<notnull>NOT NULL))?'
r'(?: +DEFAULT +(?P<default>'
- r'(?:NULL|\x27(?:\x27\x27|[^\x27])*\x27|\w+)'
- r'(?:ON UPDATE \w+)?'
+ r'(?:NULL|\x27(?:\x27\x27|[^\x27])*\x27|\w+'
+ r'(?: +ON UPDATE \w+)?)'
r'))?'
r'(?: +(?P<autoincr>AUTO_INCREMENT))?'
r'(?: +COMMENT +(P<comment>(?:\x27\x27|[^\x27])+))?'
diff --git a/lib/sqlalchemy/dialects/mysql/zxjdbc.py b/lib/sqlalchemy/dialects/mysql/zxjdbc.py
index 06d3e6616..0c0c39b67 100644
--- a/lib/sqlalchemy/dialects/mysql/zxjdbc.py
+++ b/lib/sqlalchemy/dialects/mysql/zxjdbc.py
@@ -92,7 +92,7 @@ class MySQLDialect_zxjdbc(ZxJDBCConnector, MySQLDialect):
def _extract_error_code(self, exception):
# e.g.: DBAPIError: (Error) Table 'test.u2' doesn't exist
# [SQLCode: 1146], [SQLState: 42S02] 'DESCRIBE `u2`' ()
- m = re.compile(r"\[SQLCode\: (\d+)\]").search(str(exception.orig.args))
+ m = re.compile(r"\[SQLCode\: (\d+)\]").search(str(exception.args))
c = m.group(1)
if c:
return int(c)
diff --git a/lib/sqlalchemy/dialects/oracle/base.py b/lib/sqlalchemy/dialects/oracle/base.py
index 0aa348953..256972696 100644
--- a/lib/sqlalchemy/dialects/oracle/base.py
+++ b/lib/sqlalchemy/dialects/oracle/base.py
@@ -640,9 +640,11 @@ class OracleDialect(default.DefaultDialect):
def initialize(self, connection):
super(OracleDialect, self).initialize(connection)
- self.implicit_returning = self.server_version_info > (10, ) and \
- self.__dict__.get('implicit_returning', True)
-
+ self.implicit_returning = self.__dict__.get(
+ 'implicit_returning',
+ self.server_version_info > (10, )
+ )
+
if self._is_oracle_8:
self.colspecs = self.colspecs.copy()
self.colspecs.pop(sqltypes.Interval)
diff --git a/lib/sqlalchemy/dialects/postgresql/base.py b/lib/sqlalchemy/dialects/postgresql/base.py
index 768fbcb4d..1420e8987 100644
--- a/lib/sqlalchemy/dialects/postgresql/base.py
+++ b/lib/sqlalchemy/dialects/postgresql/base.py
@@ -171,7 +171,7 @@ class ARRAY(sqltypes.MutableType, sqltypes.Concatenable, sqltypes.TypeEngine):
"""
__visit_name__ = 'ARRAY'
- def __init__(self, item_type, mutable=True):
+ def __init__(self, item_type, mutable=True, as_tuple=False):
"""Construct an ARRAY.
E.g.::
@@ -186,9 +186,14 @@ class ARRAY(sqltypes.MutableType, sqltypes.Concatenable, sqltypes.TypeEngine):
``ARRAY(ARRAY(Integer))`` or such. The type mapping figures out on
the fly
- :param mutable: Defaults to True: specify whether lists passed to this
+ :param mutable=True: Specify whether lists passed to this
class should be considered mutable. If so, generic copy operations
(typically used by the ORM) will shallow-copy values.
+
+ :param as_tuple=False: Specify whether return results should be converted
+ to tuples from lists. DBAPIs such as psycopg2 return lists by default.
+ When tuples are returned, the results are hashable. This flag can only
+ be set to ``True`` when ``mutable`` is set to ``False``. (new in 0.6.5)
"""
if isinstance(item_type, ARRAY):
@@ -198,7 +203,12 @@ class ARRAY(sqltypes.MutableType, sqltypes.Concatenable, sqltypes.TypeEngine):
item_type = item_type()
self.item_type = item_type
self.mutable = mutable
-
+ if mutable and as_tuple:
+ raise exc.ArgumentError(
+ "mutable must be set to False if as_tuple is True."
+ )
+ self.as_tuple = as_tuple
+
def copy_value(self, value):
if value is None:
return None
@@ -224,7 +234,8 @@ class ARRAY(sqltypes.MutableType, sqltypes.Concatenable, sqltypes.TypeEngine):
def adapt(self, impltype):
return impltype(
self.item_type,
- mutable=self.mutable
+ mutable=self.mutable,
+ as_tuple=self.as_tuple
)
def bind_processor(self, dialect):
@@ -252,19 +263,28 @@ class ARRAY(sqltypes.MutableType, sqltypes.Concatenable, sqltypes.TypeEngine):
if item_proc:
def convert_item(item):
if isinstance(item, list):
- return [convert_item(child) for child in item]
+ r = [convert_item(child) for child in item]
+ if self.as_tuple:
+ r = tuple(r)
+ return r
else:
return item_proc(item)
else:
def convert_item(item):
if isinstance(item, list):
- return [convert_item(child) for child in item]
+ r = [convert_item(child) for child in item]
+ if self.as_tuple:
+ r = tuple(r)
+ return r
else:
return item
def process(value):
if value is None:
return value
- return [convert_item(item) for item in value]
+ r = [convert_item(item) for item in value]
+ if self.as_tuple:
+ r = tuple(r)
+ return r
return process
PGArray = ARRAY
@@ -1033,28 +1053,32 @@ class PGDialect(default.DefaultDialect):
else:
args = ()
- if attype in self.ischema_names:
- coltype = self.ischema_names[attype]
- elif attype in enums:
- enum = enums[attype]
- coltype = ENUM
- if "." in attype:
- kwargs['schema'], kwargs['name'] = attype.split('.')
- else:
- kwargs['name'] = attype
- args = tuple(enum['labels'])
- elif attype in domains:
- domain = domains[attype]
- if domain['attype'] in self.ischema_names:
+ while True:
+ if attype in self.ischema_names:
+ coltype = self.ischema_names[attype]
+ break
+ elif attype in enums:
+ enum = enums[attype]
+ coltype = ENUM
+ if "." in attype:
+ kwargs['schema'], kwargs['name'] = attype.split('.')
+ else:
+ kwargs['name'] = attype
+ args = tuple(enum['labels'])
+ break
+ elif attype in domains:
+ domain = domains[attype]
+ attype = domain['attype']
# A table can't override whether the domain is nullable.
nullable = domain['nullable']
if domain['default'] and not default:
# It can, however, override the default
# value, but can't set it to null.
default = domain['default']
- coltype = self.ischema_names[domain['attype']]
- else:
- coltype = None
+ continue
+ else:
+ coltype = None
+ break
if coltype:
coltype = coltype(*args, **kwargs)
diff --git a/lib/sqlalchemy/dialects/sqlite/base.py b/lib/sqlalchemy/dialects/sqlite/base.py
index 7bd6d51f3..261793a33 100644
--- a/lib/sqlalchemy/dialects/sqlite/base.py
+++ b/lib/sqlalchemy/dialects/sqlite/base.py
@@ -270,7 +270,21 @@ class SQLiteDDLCompiler(compiler.DDLCompiler):
return super(SQLiteDDLCompiler, self).\
visit_primary_key_constraint(constraint)
-
+
+ def visit_foreign_key_constraint(self, constraint):
+
+ local_table = constraint._elements.values()[0].parent.table
+ remote_table = list(constraint._elements.values())[0].column.table
+
+ if local_table.schema != remote_table.schema:
+ return None
+ else:
+ return super(SQLiteDDLCompiler, self).visit_foreign_key_constraint(constraint)
+
+ def define_constraint_remote_table(self, constraint, table, preparer):
+ """Format the remote table clause of a CREATE CONSTRAINT clause."""
+
+ return preparer.format_table(table, use_schema=False)
def visit_create_index(self, create):
index = create.element
diff --git a/lib/sqlalchemy/dialects/sqlite/pysqlite.py b/lib/sqlalchemy/dialects/sqlite/pysqlite.py
index 575cb37f2..b2295f49b 100644
--- a/lib/sqlalchemy/dialects/sqlite/pysqlite.py
+++ b/lib/sqlalchemy/dialects/sqlite/pysqlite.py
@@ -68,12 +68,13 @@ pysqlite's driver does not. Additionally, SQLAlchemy does not at
this time automatically render the "cast" syntax required for the
freestanding functions "current_timestamp" and "current_date" to return
datetime/date types natively. Unfortunately, pysqlite
-does not provide the standard DBAPI types in `cursor.description`,
+does not provide the standard DBAPI types in ``cursor.description``,
leaving SQLAlchemy with no way to detect these types on the fly
without expensive per-row type checks.
-Usage of PARSE_DECLTYPES can be forced if one configures
-"native_datetime=True" on create_engine()::
+Keeping in mind that pysqlite's parsing option is not recommended,
+nor should be necessary, for use with SQLAlchemy, usage of PARSE_DECLTYPES
+can be forced if one configures "native_datetime=True" on create_engine()::
engine = create_engine('sqlite://',
connect_args={'detect_types': sqlite3.PARSE_DECLTYPES|sqlite3.PARSE_COLNAMES},