summaryrefslogtreecommitdiff
path: root/lib/sqlalchemy/engine
diff options
context:
space:
mode:
Diffstat (limited to 'lib/sqlalchemy/engine')
-rw-r--r--lib/sqlalchemy/engine/__init__.py24
-rw-r--r--lib/sqlalchemy/engine/base.py139
-rw-r--r--lib/sqlalchemy/engine/ddl.py193
-rw-r--r--lib/sqlalchemy/engine/default.py165
-rw-r--r--lib/sqlalchemy/engine/interfaces.py140
-rw-r--r--lib/sqlalchemy/engine/reflection.py164
-rw-r--r--lib/sqlalchemy/engine/result.py33
-rw-r--r--lib/sqlalchemy/engine/strategies.py29
-rw-r--r--lib/sqlalchemy/engine/threadlocal.py2
-rw-r--r--lib/sqlalchemy/engine/url.py48
-rw-r--r--lib/sqlalchemy/engine/util.py24
11 files changed, 455 insertions, 506 deletions
diff --git a/lib/sqlalchemy/engine/__init__.py b/lib/sqlalchemy/engine/__init__.py
index 0a5a96784..890c76645 100644
--- a/lib/sqlalchemy/engine/__init__.py
+++ b/lib/sqlalchemy/engine/__init__.py
@@ -1,5 +1,5 @@
# engine/__init__.py
-# Copyright (C) 2005-2013 the SQLAlchemy authors and contributors <see AUTHORS file>
+# Copyright (C) 2005-2014 the SQLAlchemy authors and contributors <see AUTHORS file>
#
# This module is part of SQLAlchemy and is released under
# the MIT License: http://www.opensource.org/licenses/mit-license.php
@@ -50,14 +50,13 @@ url.py
within a URL.
"""
-# not sure what this was used for
-#import sqlalchemy.databases
-
from .interfaces import (
- Compiled,
Connectable,
Dialect,
ExecutionContext,
+
+ # backwards compat
+ Compiled,
TypeCompiler
)
@@ -83,8 +82,12 @@ from .util import (
connection_memoize
)
+
from . import util, strategies
+# backwards compat
+from ..sql import ddl
+
default_strategy = 'plain'
@@ -345,10 +348,13 @@ def engine_from_config(configuration, prefix='sqlalchemy.', **kwargs):
arguments.
"""
- opts = util._coerce_config(configuration, prefix)
- opts.update(kwargs)
- url = opts.pop('url')
- return create_engine(url, **opts)
+ options = dict((key[len(prefix):], configuration[key])
+ for key in configuration
+ if key.startswith(prefix))
+ options['_coerce_config'] = True
+ options.update(kwargs)
+ url = options.pop('url')
+ return create_engine(url, **options)
__all__ = (
diff --git a/lib/sqlalchemy/engine/base.py b/lib/sqlalchemy/engine/base.py
index f69bd3d4b..1f2b7a3e5 100644
--- a/lib/sqlalchemy/engine/base.py
+++ b/lib/sqlalchemy/engine/base.py
@@ -1,5 +1,5 @@
# engine/base.py
-# Copyright (C) 2005-2013 the SQLAlchemy authors and contributors <see AUTHORS file>
+# Copyright (C) 2005-2014 the SQLAlchemy authors and contributors <see AUTHORS file>
#
# This module is part of SQLAlchemy and is released under
# the MIT License: http://www.opensource.org/licenses/mit-license.php
@@ -11,8 +11,8 @@ from __future__ import with_statement
import sys
-from .. import exc, schema, util, log, interfaces
-from ..sql import expression, util as sql_util
+from .. import exc, util, log, interfaces
+from ..sql import expression, util as sql_util, schema, ddl
from .interfaces import Connectable, Compiled
from .util import _distill_params
import contextlib
@@ -303,20 +303,40 @@ class Connection(Connectable):
def invalidate(self, exception=None):
"""Invalidate the underlying DBAPI connection associated with
- this Connection.
+ this :class:`.Connection`.
- The underlying DB-API connection is literally closed (if
+ The underlying DBAPI connection is literally closed (if
possible), and is discarded. Its source connection pool will
typically lazily create a new connection to replace it.
- Upon the next usage, this Connection will attempt to reconnect
- to the pool with a new connection.
+ Upon the next use (where "use" typically means using the
+ :meth:`.Connection.execute` method or similar),
+ this :class:`.Connection` will attempt to
+ procure a new DBAPI connection using the services of the
+ :class:`.Pool` as a source of connectivty (e.g. a "reconnection").
+
+ If a transaction was in progress (e.g. the
+ :meth:`.Connection.begin` method has been called) when
+ :meth:`.Connection.invalidate` method is called, at the DBAPI
+ level all state associated with this transaction is lost, as
+ the DBAPI connection is closed. The :class:`.Connection`
+ will not allow a reconnection to proceed until the :class:`.Transaction`
+ object is ended, by calling the :meth:`.Transaction.rollback`
+ method; until that point, any attempt at continuing to use the
+ :class:`.Connection` will raise an
+ :class:`~sqlalchemy.exc.InvalidRequestError`.
+ This is to prevent applications from accidentally
+ continuing an ongoing transactional operations despite the
+ fact that the transaction has been lost due to an
+ invalidation.
+
+ The :meth:`.Connection.invalidate` method, just like auto-invalidation,
+ will at the connection pool level invoke the :meth:`.PoolEvents.invalidate`
+ event.
- Transactions in progress remain in an "opened" state (even though the
- actual transaction is gone); these must be explicitly rolled back
- before a reconnect on this Connection can proceed. This is to prevent
- applications from accidentally continuing their transactional
- operations in a non-transactional state.
+ .. seealso::
+
+ :ref:`pool_connection_invalidation`
"""
if self.invalidated:
@@ -403,7 +423,6 @@ class Connection(Connectable):
See also :meth:`.Connection.begin`,
:meth:`.Connection.begin_twophase`.
"""
-
if self.__transaction is None:
self.__transaction = RootTransaction(self)
else:
@@ -450,7 +469,7 @@ class Connection(Connectable):
return self.__transaction is not None
- def _begin_impl(self):
+ def _begin_impl(self, transaction):
if self._echo:
self.engine.logger.info("BEGIN (implicit)")
@@ -459,6 +478,8 @@ class Connection(Connectable):
try:
self.engine.dialect.do_begin(self.connection)
+ if self.connection._reset_agent is None:
+ self.connection._reset_agent = transaction
except Exception as e:
self._handle_dbapi_exception(e, None, None, None, None)
@@ -471,9 +492,12 @@ class Connection(Connectable):
self.engine.logger.info("ROLLBACK")
try:
self.engine.dialect.do_rollback(self.connection)
- self.__transaction = None
except Exception as e:
self._handle_dbapi_exception(e, None, None, None, None)
+ finally:
+ if self.connection._reset_agent is self.__transaction:
+ self.connection._reset_agent = None
+ self.__transaction = None
else:
self.__transaction = None
@@ -485,9 +509,12 @@ class Connection(Connectable):
self.engine.logger.info("COMMIT")
try:
self.engine.dialect.do_commit(self.connection)
- self.__transaction = None
except Exception as e:
self._handle_dbapi_exception(e, None, None, None, None)
+ finally:
+ if self.connection._reset_agent is self.__transaction:
+ self.connection._reset_agent = None
+ self.__transaction = None
def _savepoint_impl(self, name=None):
if self._has_events:
@@ -516,14 +543,17 @@ class Connection(Connectable):
self.engine.dialect.do_release_savepoint(self, name)
self.__transaction = context
- def _begin_twophase_impl(self, xid):
+ def _begin_twophase_impl(self, transaction):
if self._echo:
self.engine.logger.info("BEGIN TWOPHASE (implicit)")
if self._has_events:
- self.dispatch.begin_twophase(self, xid)
+ self.dispatch.begin_twophase(self, transaction.xid)
if self._still_open_and_connection_is_valid:
- self.engine.dialect.do_begin_twophase(self, xid)
+ self.engine.dialect.do_begin_twophase(self, transaction.xid)
+
+ if self.connection._reset_agent is None:
+ self.connection._reset_agent = transaction
def _prepare_twophase_impl(self, xid):
if self._has_events:
@@ -539,8 +569,14 @@ class Connection(Connectable):
if self._still_open_and_connection_is_valid:
assert isinstance(self.__transaction, TwoPhaseTransaction)
- self.engine.dialect.do_rollback_twophase(self, xid, is_prepared)
- self.__transaction = None
+ try:
+ self.engine.dialect.do_rollback_twophase(self, xid, is_prepared)
+ finally:
+ if self.connection._reset_agent is self.__transaction:
+ self.connection._reset_agent = None
+ self.__transaction = None
+ else:
+ self.__transaction = None
def _commit_twophase_impl(self, xid, is_prepared):
if self._has_events:
@@ -548,8 +584,14 @@ class Connection(Connectable):
if self._still_open_and_connection_is_valid:
assert isinstance(self.__transaction, TwoPhaseTransaction)
- self.engine.dialect.do_commit_twophase(self, xid, is_prepared)
- self.__transaction = None
+ try:
+ self.engine.dialect.do_commit_twophase(self, xid, is_prepared)
+ finally:
+ if self.connection._reset_agent is self.__transaction:
+ self.connection._reset_agent = None
+ self.__transaction = None
+ else:
+ self.__transaction = None
def _autorollback(self):
if not self.in_transaction():
@@ -581,6 +623,8 @@ class Connection(Connectable):
else:
if not self.__branch:
conn.close()
+ if conn._reset_agent is self.__transaction:
+ conn._reset_agent = None
del self.__connection
self.__can_reconnect = False
self.__transaction = None
@@ -652,17 +696,16 @@ class Connection(Connectable):
DBAPI-agnostic way, use the :func:`~.expression.text` construct.
"""
- for c in type(object).__mro__:
- if c in Connection.executors:
- return Connection.executors[c](
- self,
- object,
- multiparams,
- params)
- else:
+ if isinstance(object, util.string_types[0]):
+ return self._execute_text(object, multiparams, params)
+ try:
+ meth = object._execute_on_connection
+ except AttributeError:
raise exc.InvalidRequestError(
"Unexecutable object type: %s" %
type(object))
+ else:
+ return meth(self, multiparams, params)
def _execute_function(self, func, multiparams, params):
"""Execute a sql.FunctionElement object."""
@@ -825,7 +868,7 @@ class Connection(Connectable):
context = constructor(dialect, self, conn, *args)
except Exception as e:
self._handle_dbapi_exception(e,
- str(statement), parameters,
+ util.text_type(statement), parameters,
None, None)
if context.compiled:
@@ -898,6 +941,11 @@ class Connection(Connectable):
elif not context._is_explicit_returning:
result.close(_autoclose_connection=False)
result._metadata = None
+ elif context.isupdate and context._is_implicit_returning:
+ context._fetch_implicit_update_returning(result)
+ result.close(_autoclose_connection=False)
+ result._metadata = None
+
elif result._metadata is None:
# no results, get rowcount
# (which requires open cursor on some drivers
@@ -1033,16 +1081,6 @@ class Connection(Connectable):
if self.should_close_with_result:
self.close()
- # poor man's multimethod/generic function thingy
- executors = {
- expression.FunctionElement: _execute_function,
- expression.ClauseElement: _execute_clauseelement,
- Compiled: _execute_compiled,
- schema.SchemaItem: _execute_default,
- schema.DDLElement: _execute_ddl,
- util.string_types[0]: _execute_text
- }
-
def default_schema_name(self):
return self.engine.dialect.get_default_schema_name(self)
@@ -1210,7 +1248,7 @@ class Transaction(object):
class RootTransaction(Transaction):
def __init__(self, connection):
super(RootTransaction, self).__init__(connection, None)
- self.connection._begin_impl()
+ self.connection._begin_impl(self)
def _do_rollback(self):
if self.is_active:
@@ -1259,7 +1297,7 @@ class TwoPhaseTransaction(Transaction):
super(TwoPhaseTransaction, self).__init__(connection, None)
self._is_prepared = False
self.xid = xid
- self.connection._begin_twophase_impl(self.xid)
+ self.connection._begin_twophase_impl(self)
def prepare(self):
"""Prepare this :class:`.TwoPhaseTransaction`.
@@ -1423,7 +1461,7 @@ class Engine(Connectable, log.Identified):
echo = log.echo_property()
def __repr__(self):
- return 'Engine(%s)' % str(self.url)
+ return 'Engine(%r)' % self.url
def dispose(self):
"""Dispose of the connection pool used by this :class:`.Engine`.
@@ -1667,6 +1705,17 @@ class Engine(Connectable, log.Identified):
return self.dialect.get_table_names(conn, schema)
def has_table(self, table_name, schema=None):
+ """Return True if the given backend has a table of the given name.
+
+ .. seealso::
+
+ :ref:`metadata_reflection_inspector` - detailed schema inspection using
+ the :class:`.Inspector` interface.
+
+ :class:`.quoted_name` - used to pass quoting information along
+ with a schema identifier.
+
+ """
return self.run_callable(self.dialect.has_table, table_name, schema)
def raw_connection(self):
diff --git a/lib/sqlalchemy/engine/ddl.py b/lib/sqlalchemy/engine/ddl.py
deleted file mode 100644
index 6daa9be6b..000000000
--- a/lib/sqlalchemy/engine/ddl.py
+++ /dev/null
@@ -1,193 +0,0 @@
-# engine/ddl.py
-# Copyright (C) 2009-2013 the SQLAlchemy authors and contributors <see AUTHORS file>
-#
-# This module is part of SQLAlchemy and is released under
-# the MIT License: http://www.opensource.org/licenses/mit-license.php
-
-"""Routines to handle CREATE/DROP workflow."""
-
-from .. import schema
-from ..sql import util as sql_util
-
-
-class DDLBase(schema.SchemaVisitor):
- def __init__(self, connection):
- self.connection = connection
-
-
-class SchemaGenerator(DDLBase):
-
- def __init__(self, dialect, connection, checkfirst=False,
- tables=None, **kwargs):
- super(SchemaGenerator, self).__init__(connection, **kwargs)
- self.checkfirst = checkfirst
- self.tables = tables
- self.preparer = dialect.identifier_preparer
- self.dialect = dialect
- self.memo = {}
-
- def _can_create_table(self, table):
- self.dialect.validate_identifier(table.name)
- if table.schema:
- self.dialect.validate_identifier(table.schema)
- return not self.checkfirst or \
- not self.dialect.has_table(self.connection,
- table.name, schema=table.schema)
-
- def _can_create_sequence(self, sequence):
- return self.dialect.supports_sequences and \
- (
- (not self.dialect.sequences_optional or
- not sequence.optional) and
- (
- not self.checkfirst or
- not self.dialect.has_sequence(
- self.connection,
- sequence.name,
- schema=sequence.schema)
- )
- )
-
- def visit_metadata(self, metadata):
- if self.tables is not None:
- tables = self.tables
- else:
- tables = list(metadata.tables.values())
- collection = [t for t in sql_util.sort_tables(tables)
- if self._can_create_table(t)]
- seq_coll = [s for s in metadata._sequences.values()
- if s.column is None and self._can_create_sequence(s)]
-
- metadata.dispatch.before_create(metadata, self.connection,
- tables=collection,
- checkfirst=self.checkfirst,
- _ddl_runner=self)
-
- for seq in seq_coll:
- self.traverse_single(seq, create_ok=True)
-
- for table in collection:
- self.traverse_single(table, create_ok=True)
-
- metadata.dispatch.after_create(metadata, self.connection,
- tables=collection,
- checkfirst=self.checkfirst,
- _ddl_runner=self)
-
- def visit_table(self, table, create_ok=False):
- if not create_ok and not self._can_create_table(table):
- return
-
- table.dispatch.before_create(table, self.connection,
- checkfirst=self.checkfirst,
- _ddl_runner=self)
-
- for column in table.columns:
- if column.default is not None:
- self.traverse_single(column.default)
-
- self.connection.execute(schema.CreateTable(table))
-
- if hasattr(table, 'indexes'):
- for index in table.indexes:
- self.traverse_single(index)
-
- table.dispatch.after_create(table, self.connection,
- checkfirst=self.checkfirst,
- _ddl_runner=self)
-
- def visit_sequence(self, sequence, create_ok=False):
- if not create_ok and not self._can_create_sequence(sequence):
- return
- self.connection.execute(schema.CreateSequence(sequence))
-
- def visit_index(self, index):
- self.connection.execute(schema.CreateIndex(index))
-
-
-class SchemaDropper(DDLBase):
-
- def __init__(self, dialect, connection, checkfirst=False,
- tables=None, **kwargs):
- super(SchemaDropper, self).__init__(connection, **kwargs)
- self.checkfirst = checkfirst
- self.tables = tables
- self.preparer = dialect.identifier_preparer
- self.dialect = dialect
- self.memo = {}
-
- def visit_metadata(self, metadata):
- if self.tables is not None:
- tables = self.tables
- else:
- tables = list(metadata.tables.values())
-
- collection = [
- t
- for t in reversed(sql_util.sort_tables(tables))
- if self._can_drop_table(t)
- ]
-
- seq_coll = [
- s
- for s in metadata._sequences.values()
- if s.column is None and self._can_drop_sequence(s)
- ]
-
- metadata.dispatch.before_drop(
- metadata, self.connection, tables=collection,
- checkfirst=self.checkfirst, _ddl_runner=self)
-
- for table in collection:
- self.traverse_single(table, drop_ok=True)
-
- for seq in seq_coll:
- self.traverse_single(seq, drop_ok=True)
-
- metadata.dispatch.after_drop(
- metadata, self.connection, tables=collection,
- checkfirst=self.checkfirst, _ddl_runner=self)
-
- def _can_drop_table(self, table):
- self.dialect.validate_identifier(table.name)
- if table.schema:
- self.dialect.validate_identifier(table.schema)
- return not self.checkfirst or self.dialect.has_table(self.connection,
- table.name, schema=table.schema)
-
- def _can_drop_sequence(self, sequence):
- return self.dialect.supports_sequences and \
- ((not self.dialect.sequences_optional or
- not sequence.optional) and
- (not self.checkfirst or
- self.dialect.has_sequence(
- self.connection,
- sequence.name,
- schema=sequence.schema))
- )
-
- def visit_index(self, index):
- self.connection.execute(schema.DropIndex(index))
-
- def visit_table(self, table, drop_ok=False):
- if not drop_ok and not self._can_drop_table(table):
- return
-
- table.dispatch.before_drop(table, self.connection,
- checkfirst=self.checkfirst,
- _ddl_runner=self)
-
- for column in table.columns:
- if column.default is not None:
- self.traverse_single(column.default)
-
- self.connection.execute(schema.DropTable(table))
-
- table.dispatch.after_drop(table, self.connection,
- checkfirst=self.checkfirst,
- _ddl_runner=self)
-
- def visit_sequence(self, sequence, drop_ok=False):
- if not drop_ok and not self._can_drop_sequence(sequence):
- return
- self.connection.execute(schema.DropSequence(sequence))
diff --git a/lib/sqlalchemy/engine/default.py b/lib/sqlalchemy/engine/default.py
index 3e8e96a42..ed975b8cf 100644
--- a/lib/sqlalchemy/engine/default.py
+++ b/lib/sqlalchemy/engine/default.py
@@ -1,5 +1,5 @@
# engine/default.py
-# Copyright (C) 2005-2013 the SQLAlchemy authors and contributors <see AUTHORS file>
+# Copyright (C) 2005-2014 the SQLAlchemy authors and contributors <see AUTHORS file>
#
# This module is part of SQLAlchemy and is released under
# the MIT License: http://www.opensource.org/licenses/mit-license.php
@@ -16,7 +16,8 @@ import re
import random
from . import reflection, interfaces, result
from ..sql import compiler, expression
-from .. import exc, types as sqltypes, util, pool, processors
+from .. import types as sqltypes
+from .. import exc, util, pool, processors
import codecs
import weakref
from .. import event
@@ -26,6 +27,7 @@ AUTOCOMMIT_REGEXP = re.compile(
re.I | re.UNICODE)
+
class DefaultDialect(interfaces.Dialect):
"""Default implementation of Dialect"""
@@ -57,6 +59,18 @@ class DefaultDialect(interfaces.Dialect):
supports_simple_order_by_label = True
+ engine_config_types = util.immutabledict([
+ ('convert_unicode', util.bool_or_str('force')),
+ ('pool_timeout', int),
+ ('echo', util.bool_or_str('debug')),
+ ('echo_pool', util.bool_or_str('debug')),
+ ('pool_recycle', int),
+ ('pool_size', int),
+ ('max_overflow', int),
+ ('pool_threadlocal', bool),
+ ('use_native_unicode', bool),
+ ])
+
# if the NUMERIC type
# returns decimal.Decimal.
# *not* the FLOAT type however.
@@ -97,6 +111,33 @@ class DefaultDialect(interfaces.Dialect):
server_version_info = None
+ construct_arguments = None
+ """Optional set of argument specifiers for various SQLAlchemy
+ constructs, typically schema items.
+
+ To
+ implement, establish as a series of tuples, as in::
+
+ construct_arguments = [
+ (schema.Index, {
+ "using": False,
+ "where": None,
+ "ops": None
+ })
+ ]
+
+ If the above construct is established on the Postgresql dialect,
+ the ``Index`` construct will now accept additional keyword arguments
+ such as ``postgresql_using``, ``postgresql_where``, etc. Any kind of
+ ``postgresql_XYZ`` argument not corresponding to the above template will
+ be rejected with an ``ArgumentError`, for all those SQLAlchemy constructs
+ which implement the :class:`.DialectKWArgs` class.
+
+ The default is ``None``; older dialects which don't implement the argument
+ will have the old behavior of un-validated kwargs to schema/SQL constructs.
+
+ """
+
# indicates symbol names are
# UPPERCASEd if they are case insensitive
# within the database.
@@ -111,6 +152,7 @@ class DefaultDialect(interfaces.Dialect):
implicit_returning=None,
supports_right_nested_joins=None,
case_sensitive=True,
+ supports_native_boolean=None,
label_length=None, **kwargs):
if not getattr(self, 'ported_sqla_06', True):
@@ -136,7 +178,8 @@ class DefaultDialect(interfaces.Dialect):
self.type_compiler = self.type_compiler(self)
if supports_right_nested_joins is not None:
self.supports_right_nested_joins = supports_right_nested_joins
-
+ if supports_native_boolean is not None:
+ self.supports_native_boolean = supports_native_boolean
self.case_sensitive = case_sensitive
if label_length and label_length > self.max_identifier_length:
@@ -159,6 +202,8 @@ class DefaultDialect(interfaces.Dialect):
self._encoder = codecs.getencoder(self.encoding)
self._decoder = processors.to_unicode_processor_factory(self.encoding)
+
+
@util.memoized_property
def _type_memos(self):
return weakref.WeakKeyDictionary()
@@ -191,6 +236,10 @@ class DefaultDialect(interfaces.Dialect):
self.returns_unicode_strings = self._check_unicode_returns(connection)
+ if self.description_encoding is not None and \
+ self._check_unicode_description(connection):
+ self._description_decoder = self.description_encoding = None
+
self.do_rollback(connection.connection)
def on_connect(self):
@@ -207,46 +256,78 @@ class DefaultDialect(interfaces.Dialect):
"""
return None
- def _check_unicode_returns(self, connection):
+ def _check_unicode_returns(self, connection, additional_tests=None):
if util.py2k and not self.supports_unicode_statements:
cast_to = util.binary_type
else:
cast_to = util.text_type
- def check_unicode(formatstr, type_):
- cursor = connection.connection.cursor()
+ if self.positional:
+ parameters = self.execute_sequence_format()
+ else:
+ parameters = {}
+
+ def check_unicode(test):
+ statement = cast_to(expression.select([test]).compile(dialect=self))
try:
- try:
- cursor.execute(
- cast_to(
- expression.select(
- [expression.cast(
- expression.literal_column(
- "'test %s returns'" % formatstr),
- type_)
- ]).compile(dialect=self)
- )
- )
- row = cursor.fetchone()
-
- return isinstance(row[0], util.text_type)
- except self.dbapi.Error as de:
- util.warn("Exception attempting to "
- "detect unicode returns: %r" % de)
- return False
- finally:
+ cursor = connection.connection.cursor()
+ connection._cursor_execute(cursor, statement, parameters)
+ row = cursor.fetchone()
cursor.close()
+ except exc.DBAPIError as de:
+ # note that _cursor_execute() will have closed the cursor
+ # if an exception is thrown.
+ util.warn("Exception attempting to "
+ "detect unicode returns: %r" % de)
+ return False
+ else:
+ return isinstance(row[0], util.text_type)
+
+ tests = [
+ # detect plain VARCHAR
+ expression.cast(
+ expression.literal_column("'test plain returns'"),
+ sqltypes.VARCHAR(60)
+ ),
+ # detect if there's an NVARCHAR type with different behavior available
+ expression.cast(
+ expression.literal_column("'test unicode returns'"),
+ sqltypes.Unicode(60)
+ ),
+ ]
+
+ if additional_tests:
+ tests += additional_tests
+
+ results = set([check_unicode(test) for test in tests])
+
+ if results.issuperset([True, False]):
+ return "conditional"
+ else:
+ return results == set([True])
- # detect plain VARCHAR
- unicode_for_varchar = check_unicode("plain", sqltypes.VARCHAR(60))
-
- # detect if there's an NVARCHAR type with different behavior available
- unicode_for_unicode = check_unicode("unicode", sqltypes.Unicode(60))
+ def _check_unicode_description(self, connection):
+ # all DBAPIs on Py2K return cursor.description as encoded,
+ # until pypy2.1beta2 with sqlite, so let's just check it -
+ # it's likely others will start doing this too in Py2k.
- if unicode_for_unicode and not unicode_for_varchar:
- return "conditional"
+ if util.py2k and not self.supports_unicode_statements:
+ cast_to = util.binary_type
else:
- return unicode_for_varchar
+ cast_to = util.text_type
+
+ cursor = connection.connection.cursor()
+ try:
+ cursor.execute(
+ cast_to(
+ expression.select([
+ expression.literal_column("'x'").label("some_label")
+ ]).compile(dialect=self)
+ )
+ )
+ return isinstance(cursor.description[0][0], util.text_type)
+ finally:
+ cursor.close()
def type_descriptor(self, typeobj):
"""Provide a database-specific :class:`.TypeEngine` object, given
@@ -259,8 +340,7 @@ class DefaultDialect(interfaces.Dialect):
"""
return sqltypes.adapt_type(typeobj, self.colspecs)
- def reflecttable(self, connection, table, include_columns,
- exclude_columns=None):
+ def reflecttable(self, connection, table, include_columns, exclude_columns):
insp = reflection.Inspector.from_engine(connection)
return insp.reflecttable(table, include_columns, exclude_columns)
@@ -368,6 +448,7 @@ class DefaultExecutionContext(interfaces.ExecutionContext):
statement = None
postfetch_cols = None
prefetch_cols = None
+ returning_cols = None
_is_implicit_returning = False
_is_explicit_returning = False
@@ -464,6 +545,7 @@ class DefaultExecutionContext(interfaces.ExecutionContext):
if self.isinsert or self.isupdate:
self.postfetch_cols = self.compiled.postfetch
self.prefetch_cols = self.compiled.prefetch
+ self.returning_cols = self.compiled.returning
self.__process_defaults()
processors = compiled._bind_processors
@@ -722,6 +804,11 @@ class DefaultExecutionContext(interfaces.ExecutionContext):
ipk.append(row[c])
self.inserted_primary_key = ipk
+ self.returned_defaults = row
+
+ def _fetch_implicit_update_returning(self, resultproxy):
+ row = resultproxy.fetchone()
+ self.returned_defaults = row
def lastrow_has_defaults(self):
return (self.isinsert or self.isupdate) and \
@@ -808,6 +895,8 @@ class DefaultExecutionContext(interfaces.ExecutionContext):
and generate inserted_primary_key collection.
"""
+ key_getter = self.compiled._key_getters_for_crud_column[2]
+
if self.executemany:
if len(self.compiled.prefetch):
scalar_defaults = {}
@@ -831,7 +920,7 @@ class DefaultExecutionContext(interfaces.ExecutionContext):
else:
val = self.get_update_default(c)
if val is not None:
- param[c.key] = val
+ param[key_getter(c)] = val
del self.current_parameters
else:
self.current_parameters = compiled_parameters = \
@@ -844,12 +933,12 @@ class DefaultExecutionContext(interfaces.ExecutionContext):
val = self.get_update_default(c)
if val is not None:
- compiled_parameters[c.key] = val
+ compiled_parameters[key_getter(c)] = val
del self.current_parameters
if self.isinsert:
self.inserted_primary_key = [
- self.compiled_parameters[0].get(c.key, None)
+ self.compiled_parameters[0].get(key_getter(c), None)
for c in self.compiled.\
statement.table.primary_key
]
diff --git a/lib/sqlalchemy/engine/interfaces.py b/lib/sqlalchemy/engine/interfaces.py
index 750aa2fcd..5c44933e8 100644
--- a/lib/sqlalchemy/engine/interfaces.py
+++ b/lib/sqlalchemy/engine/interfaces.py
@@ -1,13 +1,15 @@
# engine/interfaces.py
-# Copyright (C) 2005-2013 the SQLAlchemy authors and contributors <see AUTHORS file>
+# Copyright (C) 2005-2014 the SQLAlchemy authors and contributors <see AUTHORS file>
#
# This module is part of SQLAlchemy and is released under
# the MIT License: http://www.opensource.org/licenses/mit-license.php
"""Define core interfaces used by the engine system."""
-from .. import util, event, events
+from .. import util, event
+# backwards compat
+from ..sql.compiler import Compiled, TypeCompiler
class Dialect(object):
"""Define the behavior of a specific database and DB-API combination.
@@ -191,19 +193,21 @@ class Dialect(object):
pass
- def reflecttable(self, connection, table, include_columns=None):
+ def reflecttable(self, connection, table, include_columns, exclude_columns):
"""Load table description from the database.
Given a :class:`.Connection` and a
:class:`~sqlalchemy.schema.Table` object, reflect its columns and
- properties from the database. If include_columns (a list or
- set) is specified, limit the autoload to the given column
- names.
+ properties from the database.
- The default implementation uses the
- :class:`~sqlalchemy.engine.reflection.Inspector` interface to
- provide the output, building upon the granular table/column/
- constraint etc. methods of :class:`.Dialect`.
+ The implementation of this method is provided by
+ :meth:`.DefaultDialect.reflecttable`, which makes use of
+ :class:`.Inspector` to retrieve column information.
+
+ Dialects should **not** seek to implement this method, and should
+ instead implement individual schema inspection operations such as
+ :meth:`.Dialect.get_columns`, :meth:`.Dialect.get_pk_constraint`,
+ etc.
"""
@@ -246,7 +250,7 @@ class Dialect(object):
Deprecated. This method is only called by the default
implementation of :meth:`.Dialect.get_pk_constraint`. Dialects should
- instead implement this method directly.
+ instead implement the :meth:`.Dialect.get_pk_constraint` method directly.
"""
@@ -338,7 +342,7 @@ class Dialect(object):
raise NotImplementedError()
- def get_unique_constraints(self, table_name, schema=None, **kw):
+ def get_unique_constraints(self, connection, table_name, schema=None, **kw):
"""Return information about unique constraints in `table_name`.
Given a string `table_name` and an optional string `schema`, return
@@ -769,110 +773,6 @@ class ExecutionContext(object):
raise NotImplementedError()
-class Compiled(object):
- """Represent a compiled SQL or DDL expression.
-
- The ``__str__`` method of the ``Compiled`` object should produce
- the actual text of the statement. ``Compiled`` objects are
- specific to their underlying database dialect, and also may
- or may not be specific to the columns referenced within a
- particular set of bind parameters. In no case should the
- ``Compiled`` object be dependent on the actual values of those
- bind parameters, even though it may reference those values as
- defaults.
- """
-
- def __init__(self, dialect, statement, bind=None,
- compile_kwargs=util.immutabledict()):
- """Construct a new ``Compiled`` object.
-
- :param dialect: ``Dialect`` to compile against.
-
- :param statement: ``ClauseElement`` to be compiled.
-
- :param bind: Optional Engine or Connection to compile this
- statement against.
-
- :param compile_kwargs: additional kwargs that will be
- passed to the initial call to :meth:`.Compiled.process`.
-
- .. versionadded:: 0.8
-
- """
-
- self.dialect = dialect
- self.bind = bind
- if statement is not None:
- self.statement = statement
- self.can_execute = statement.supports_execution
- self.string = self.process(self.statement, **compile_kwargs)
-
- @util.deprecated("0.7", ":class:`.Compiled` objects now compile "
- "within the constructor.")
- def compile(self):
- """Produce the internal string representation of this element."""
- pass
-
- @property
- def sql_compiler(self):
- """Return a Compiled that is capable of processing SQL expressions.
-
- If this compiler is one, it would likely just return 'self'.
-
- """
-
- raise NotImplementedError()
-
- def process(self, obj, **kwargs):
- return obj._compiler_dispatch(self, **kwargs)
-
- def __str__(self):
- """Return the string text of the generated SQL or DDL."""
-
- return self.string or ''
-
- def construct_params(self, params=None):
- """Return the bind params for this compiled object.
-
- :param params: a dict of string/object pairs whose values will
- override bind values compiled in to the
- statement.
- """
-
- raise NotImplementedError()
-
- @property
- def params(self):
- """Return the bind params for this compiled object."""
- return self.construct_params()
-
- def execute(self, *multiparams, **params):
- """Execute this compiled object."""
-
- e = self.bind
- if e is None:
- raise exc.UnboundExecutionError(
- "This Compiled object is not bound to any Engine "
- "or Connection.")
- return e._execute_compiled(self, multiparams, params)
-
- def scalar(self, *multiparams, **params):
- """Execute this compiled object and return the result's
- scalar value."""
-
- return self.execute(*multiparams, **params).scalar()
-
-
-class TypeCompiler(object):
- """Produces DDL specification for TypeEngine objects."""
-
- def __init__(self, dialect):
- self.dialect = dialect
-
- def process(self, type_):
- return type_._compiler_dispatch(self)
-
-
class Connectable(object):
"""Interface for an object which supports execution of SQL constructs.
@@ -884,8 +784,6 @@ class Connectable(object):
"""
- dispatch = event.dispatcher(events.ConnectionEvents)
-
def connect(self, **kwargs):
"""Return a :class:`.Connection` object.
@@ -914,7 +812,8 @@ class Connectable(object):
"object directly, i.e. :meth:`.Table.create`, "
":meth:`.Index.create`, :meth:`.MetaData.create_all`")
def create(self, entity, **kwargs):
- """Emit CREATE statements for the given schema entity."""
+ """Emit CREATE statements for the given schema entity.
+ """
raise NotImplementedError()
@@ -923,7 +822,8 @@ class Connectable(object):
"object directly, i.e. :meth:`.Table.drop`, "
":meth:`.Index.drop`, :meth:`.MetaData.drop_all`")
def drop(self, entity, **kwargs):
- """Emit DROP statements for the given schema entity."""
+ """Emit DROP statements for the given schema entity.
+ """
raise NotImplementedError()
diff --git a/lib/sqlalchemy/engine/reflection.py b/lib/sqlalchemy/engine/reflection.py
index 50b3f774c..45f100518 100644
--- a/lib/sqlalchemy/engine/reflection.py
+++ b/lib/sqlalchemy/engine/reflection.py
@@ -1,5 +1,5 @@
# engine/reflection.py
-# Copyright (C) 2005-2013 the SQLAlchemy authors and contributors <see AUTHORS file>
+# Copyright (C) 2005-2014 the SQLAlchemy authors and contributors <see AUTHORS file>
#
# This module is part of SQLAlchemy and is released under
# the MIT License: http://www.opensource.org/licenses/mit-license.php
@@ -25,9 +25,9 @@ methods such as get_table_names, get_columns, etc.
"""
from .. import exc, sql
-from .. import schema as sa_schema
+from ..sql import schema as sa_schema
from .. import util
-from ..types import TypeEngine
+from ..sql.type_api import TypeEngine
from ..util import deprecated
from ..util import topological
from .. import inspection
@@ -161,7 +161,7 @@ class Inspector(object):
"""Return all table names in referred to within a particular schema.
The names are expected to be real tables only, not views.
- Views are instead returned using the :meth:`.get_view_names`
+ Views are instead returned using the :meth:`.Inspector.get_view_names`
method.
@@ -169,7 +169,7 @@ class Inspector(object):
database's default schema is
used, else the named schema is searched. If the database does not
support named schemas, behavior is undefined if ``schema`` is not
- passed as ``None``.
+ passed as ``None``. For special quoting, use :class:`.quoted_name`.
:param order_by: Optional, may be the string "foreign_key" to sort
the result on foreign key dependencies.
@@ -206,6 +206,13 @@ class Inspector(object):
This currently includes some options that apply to MySQL tables.
+ :param table_name: string name of the table. For special quoting,
+ use :class:`.quoted_name`.
+
+ :param schema: string schema name; if omitted, uses the default schema
+ of the database connection. For special quoting,
+ use :class:`.quoted_name`.
+
"""
if hasattr(self.dialect, 'get_table_options'):
return self.dialect.get_table_options(
@@ -217,6 +224,8 @@ class Inspector(object):
"""Return all view names in `schema`.
:param schema: Optional, retrieve names from a non-default schema.
+ For special quoting, use :class:`.quoted_name`.
+
"""
return self.dialect.get_view_names(self.bind, schema,
@@ -226,6 +235,8 @@ class Inspector(object):
"""Return definition for `view_name`.
:param schema: Optional, retrieve names from a non-default schema.
+ For special quoting, use :class:`.quoted_name`.
+
"""
return self.dialect.get_view_definition(
@@ -251,6 +262,14 @@ class Inspector(object):
attrs
dict containing optional column attributes
+
+ :param table_name: string name of the table. For special quoting,
+ use :class:`.quoted_name`.
+
+ :param schema: string schema name; if omitted, uses the default schema
+ of the database connection. For special quoting,
+ use :class:`.quoted_name`.
+
"""
col_defs = self.dialect.get_columns(self.bind, table_name, schema,
@@ -288,6 +307,13 @@ class Inspector(object):
name
optional name of the primary key constraint.
+ :param table_name: string name of the table. For special quoting,
+ use :class:`.quoted_name`.
+
+ :param schema: string schema name; if omitted, uses the default schema
+ of the database connection. For special quoting,
+ use :class:`.quoted_name`.
+
"""
return self.dialect.get_pk_constraint(self.bind, table_name, schema,
info_cache=self.info_cache,
@@ -315,6 +341,13 @@ class Inspector(object):
name
optional name of the foreign key constraint.
+ :param table_name: string name of the table. For special quoting,
+ use :class:`.quoted_name`.
+
+ :param schema: string schema name; if omitted, uses the default schema
+ of the database connection. For special quoting,
+ use :class:`.quoted_name`.
+
"""
return self.dialect.get_foreign_keys(self.bind, table_name, schema,
@@ -336,6 +369,13 @@ class Inspector(object):
unique
boolean
+ :param table_name: string name of the table. For special quoting,
+ use :class:`.quoted_name`.
+
+ :param schema: string schema name; if omitted, uses the default schema
+ of the database connection. For special quoting,
+ use :class:`.quoted_name`.
+
"""
return self.dialect.get_indexes(self.bind, table_name,
@@ -354,7 +394,14 @@ class Inspector(object):
column_names
list of column names in order
- .. versionadded:: 0.9.0
+ :param table_name: string name of the table. For special quoting,
+ use :class:`.quoted_name`.
+
+ :param schema: string schema name; if omitted, uses the default schema
+ of the database connection. For special quoting,
+ use :class:`.quoted_name`.
+
+ .. versionadded:: 0.8.4
"""
@@ -384,24 +431,25 @@ class Inspector(object):
"""
dialect = self.bind.dialect
- # table attributes we might need.
- reflection_options = dict(
- (k, table.kwargs.get(k)) for k in dialect.reflection_options if k in table.kwargs)
-
schema = table.schema
table_name = table.name
- # apply table options
- tbl_opts = self.get_table_options(table_name, schema, **table.kwargs)
- if tbl_opts:
- table.kwargs.update(tbl_opts)
+ # get table-level arguments that are specifically
+ # intended for reflection, e.g. oracle_resolve_synonyms.
+ # these are unconditionally passed to related Table
+ # objects
+ reflection_options = dict(
+ (k, table.dialect_kwargs.get(k))
+ for k in dialect.reflection_options
+ if k in table.dialect_kwargs
+ )
- # table.kwargs will need to be passed to each reflection method. Make
- # sure keywords are strings.
- tblkw = table.kwargs.copy()
- for (k, v) in list(tblkw.items()):
- del tblkw[k]
- tblkw[str(k)] = v
+ # reflect table options, like mysql_engine
+ tbl_opts = self.get_table_options(table_name, schema, **table.dialect_kwargs)
+ if tbl_opts:
+ # add additional kwargs to the Table if the dialect
+ # returned them
+ table._validate_dialect_kwargs(tbl_opts)
if util.py2k:
if isinstance(schema, str):
@@ -409,10 +457,13 @@ class Inspector(object):
if isinstance(table_name, str):
table_name = table_name.decode(dialect.encoding)
- # columns
found_table = False
- for col_d in self.get_columns(table_name, schema, **tblkw):
+ cols_by_orig_name = {}
+
+ for col_d in self.get_columns(table_name, schema, **table.dialect_kwargs):
found_table = True
+ orig_name = col_d['name']
+
table.dispatch.column_reflect(self, table, col_d)
name = col_d['name']
@@ -422,12 +473,12 @@ class Inspector(object):
continue
coltype = col_d['type']
- col_kw = {
- 'nullable': col_d['nullable'],
- }
- for k in ('autoincrement', 'quote', 'info', 'key'):
- if k in col_d:
- col_kw[k] = col_d[k]
+
+ col_kw = dict(
+ (k, col_d[k])
+ for k in ['nullable', 'autoincrement', 'quote', 'info', 'key']
+ if k in col_d
+ )
colargs = []
if col_d.get('default') is not None:
@@ -441,7 +492,7 @@ class Inspector(object):
)
if 'sequence' in col_d:
- # TODO: mssql, maxdb and sybase are using this.
+ # TODO: mssql and sybase are using this.
seq = col_d['sequence']
sequence = sa_schema.Sequence(seq['name'], 1, 1)
if 'start' in seq:
@@ -450,37 +501,41 @@ class Inspector(object):
sequence.increment = seq['increment']
colargs.append(sequence)
- col = sa_schema.Column(name, coltype, *colargs, **col_kw)
+ cols_by_orig_name[orig_name] = col = \
+ sa_schema.Column(name, coltype, *colargs, **col_kw)
+
+ if col.key in table.primary_key:
+ col.primary_key = True
table.append_column(col)
if not found_table:
raise exc.NoSuchTableError(table.name)
- # Primary keys
- pk_cons = self.get_pk_constraint(table_name, schema, **tblkw)
+ pk_cons = self.get_pk_constraint(table_name, schema, **table.dialect_kwargs)
if pk_cons:
pk_cols = [
- table.c[pk]
+ cols_by_orig_name[pk]
for pk in pk_cons['constrained_columns']
- if pk in table.c and pk not in exclude_columns
- ]
- pk_cols += [
- pk
- for pk in table.primary_key
- if pk.key in exclude_columns
+ if pk in cols_by_orig_name and pk not in exclude_columns
]
- primary_key_constraint = sa_schema.PrimaryKeyConstraint(
- name=pk_cons.get('name'),
- *pk_cols
- )
- table.append_constraint(primary_key_constraint)
+ # update pk constraint name
+ table.primary_key.name = pk_cons.get('name')
+
+ # tell the PKConstraint to re-initialize
+ # it's column collection
+ table.primary_key._reload(pk_cols)
- # Foreign keys
- fkeys = self.get_foreign_keys(table_name, schema, **tblkw)
+ fkeys = self.get_foreign_keys(table_name, schema, **table.dialect_kwargs)
for fkey_d in fkeys:
conname = fkey_d['name']
- constrained_columns = fkey_d['constrained_columns']
+ # look for columns by orig name in cols_by_orig_name,
+ # but support columns that are in-Python only as fallback
+ constrained_columns = [
+ cols_by_orig_name[c].key
+ if c in cols_by_orig_name else c
+ for c in fkey_d['constrained_columns']
+ ]
if exclude_columns and set(constrained_columns).intersection(
exclude_columns):
continue
@@ -504,9 +559,14 @@ class Inspector(object):
)
for column in referred_columns:
refspec.append(".".join([referred_table, column]))
+ if 'options' in fkey_d:
+ options = fkey_d['options']
+ else:
+ options = {}
table.append_constraint(
sa_schema.ForeignKeyConstraint(constrained_columns, refspec,
- conname, link_to_name=True))
+ conname, link_to_name=True,
+ **options))
# Indexes
indexes = self.get_indexes(table_name, schema)
for index_d in indexes:
@@ -520,5 +580,11 @@ class Inspector(object):
"Omitting %s KEY for (%s), key covers omitted columns." %
(flavor, ', '.join(columns)))
continue
- sa_schema.Index(name, *[table.columns[c] for c in columns],
+ # look for columns by orig name in cols_by_orig_name,
+ # but support columns that are in-Python only as fallback
+ sa_schema.Index(name, *[
+ cols_by_orig_name[c] if c in cols_by_orig_name
+ else table.c[c]
+ for c in columns
+ ],
**dict(unique=unique))
diff --git a/lib/sqlalchemy/engine/result.py b/lib/sqlalchemy/engine/result.py
index 65ce3b742..f9e0ca0d2 100644
--- a/lib/sqlalchemy/engine/result.py
+++ b/lib/sqlalchemy/engine/result.py
@@ -1,5 +1,5 @@
# engine/result.py
-# Copyright (C) 2005-2013 the SQLAlchemy authors and contributors <see AUTHORS file>
+# Copyright (C) 2005-2014 the SQLAlchemy authors and contributors <see AUTHORS file>
#
# This module is part of SQLAlchemy and is released under
# the MIT License: http://www.opensource.org/licenses/mit-license.php
@@ -9,8 +9,8 @@ and :class:`.RowProxy."""
-from .. import exc, types, util
-from ..sql import expression
+from .. import exc, util
+from ..sql import expression, sqltypes
import collections
# This reconstructor is necessary so that pickles with the C extension or
@@ -125,8 +125,11 @@ class RowProxy(BaseRowProxy):
__hash__ = None
+ def __lt__(self, other):
+ return tuple(self) < tuple(other)
+
def __eq__(self, other):
- return other is self or other == tuple(self)
+ return other is self or tuple(other) == tuple(self)
def __ne__(self, other):
return not self.__eq__(other)
@@ -205,10 +208,10 @@ class ResultMetaData(object):
else colname.lower()]
except KeyError:
name, obj, type_ = \
- colname, None, typemap.get(coltype, types.NULLTYPE)
+ colname, None, typemap.get(coltype, sqltypes.NULLTYPE)
else:
name, obj, type_ = \
- colname, None, typemap.get(coltype, types.NULLTYPE)
+ colname, None, typemap.get(coltype, sqltypes.NULLTYPE)
processor = context.get_result_processor(type_, colname, coltype)
@@ -621,6 +624,24 @@ class ResultProxy(object):
else:
return self.context.compiled_parameters[0]
+ @property
+ def returned_defaults(self):
+ """Return the values of default columns that were fetched using
+ the :meth:`.ValuesBase.return_defaults` feature.
+
+ The value is an instance of :class:`.RowProxy`, or ``None``
+ if :meth:`.ValuesBase.return_defaults` was not used or if the
+ backend does not support RETURNING.
+
+ .. versionadded:: 0.9.0
+
+ .. seealso::
+
+ :meth:`.ValuesBase.return_defaults`
+
+ """
+ return self.context.returned_defaults
+
def lastrow_has_defaults(self):
"""Return ``lastrow_has_defaults()`` from the underlying
:class:`.ExecutionContext`.
diff --git a/lib/sqlalchemy/engine/strategies.py b/lib/sqlalchemy/engine/strategies.py
index ab9d370a3..f6c064033 100644
--- a/lib/sqlalchemy/engine/strategies.py
+++ b/lib/sqlalchemy/engine/strategies.py
@@ -1,5 +1,5 @@
# engine/strategies.py
-# Copyright (C) 2005-2013 the SQLAlchemy authors and contributors <see AUTHORS file>
+# Copyright (C) 2005-2014 the SQLAlchemy authors and contributors <see AUTHORS file>
#
# This module is part of SQLAlchemy and is released under
# the MIT License: http://www.opensource.org/licenses/mit-license.php
@@ -49,18 +49,27 @@ class DefaultEngineStrategy(EngineStrategy):
dialect_cls = u.get_dialect()
+ if kwargs.pop('_coerce_config', False):
+ def pop_kwarg(key, default=None):
+ value = kwargs.pop(key, default)
+ if key in dialect_cls.engine_config_types:
+ value = dialect_cls.engine_config_types[key](value)
+ return value
+ else:
+ pop_kwarg = kwargs.pop
+
dialect_args = {}
# consume dialect arguments from kwargs
for k in util.get_cls_kwargs(dialect_cls):
if k in kwargs:
- dialect_args[k] = kwargs.pop(k)
+ dialect_args[k] = pop_kwarg(k)
dbapi = kwargs.pop('module', None)
if dbapi is None:
dbapi_args = {}
for k in util.get_func_kwargs(dialect_cls.dbapi):
if k in kwargs:
- dbapi_args[k] = kwargs.pop(k)
+ dbapi_args[k] = pop_kwarg(k)
dbapi = dialect_cls.dbapi(**dbapi_args)
dialect_args['dbapi'] = dbapi
@@ -70,15 +79,15 @@ class DefaultEngineStrategy(EngineStrategy):
# assemble connection arguments
(cargs, cparams) = dialect.create_connect_args(u)
- cparams.update(kwargs.pop('connect_args', {}))
+ cparams.update(pop_kwarg('connect_args', {}))
# look for existing pool or create
- pool = kwargs.pop('pool', None)
+ pool = pop_kwarg('pool', None)
if pool is None:
def connect():
try:
return dialect.connect(*cargs, **cparams)
- except Exception as e:
+ except dialect.dbapi.Error as e:
invalidated = dialect.is_disconnect(e, None, None)
util.raise_from_cause(
exc.DBAPIError.instance(None, None,
@@ -87,9 +96,9 @@ class DefaultEngineStrategy(EngineStrategy):
)
)
- creator = kwargs.pop('creator', connect)
+ creator = pop_kwarg('creator', connect)
- poolclass = kwargs.pop('poolclass', None)
+ poolclass = pop_kwarg('poolclass', None)
if poolclass is None:
poolclass = dialect_cls.get_pool_class(u)
pool_args = {}
@@ -106,7 +115,7 @@ class DefaultEngineStrategy(EngineStrategy):
for k in util.get_cls_kwargs(poolclass):
tk = translate.get(k, k)
if tk in kwargs:
- pool_args[k] = kwargs.pop(tk)
+ pool_args[k] = pop_kwarg(tk)
pool = poolclass(creator, **pool_args)
else:
if isinstance(pool, poollib._DBProxy):
@@ -119,7 +128,7 @@ class DefaultEngineStrategy(EngineStrategy):
engine_args = {}
for k in util.get_cls_kwargs(engineclass):
if k in kwargs:
- engine_args[k] = kwargs.pop(k)
+ engine_args[k] = pop_kwarg(k)
_initialize = kwargs.pop('_initialize', True)
diff --git a/lib/sqlalchemy/engine/threadlocal.py b/lib/sqlalchemy/engine/threadlocal.py
index d4aeafd6f..ae647a78e 100644
--- a/lib/sqlalchemy/engine/threadlocal.py
+++ b/lib/sqlalchemy/engine/threadlocal.py
@@ -1,5 +1,5 @@
# engine/threadlocal.py
-# Copyright (C) 2005-2013 the SQLAlchemy authors and contributors <see AUTHORS file>
+# Copyright (C) 2005-2014 the SQLAlchemy authors and contributors <see AUTHORS file>
#
# This module is part of SQLAlchemy and is released under
# the MIT License: http://www.opensource.org/licenses/mit-license.php
diff --git a/lib/sqlalchemy/engine/url.py b/lib/sqlalchemy/engine/url.py
index ed5729eea..78ac06187 100644
--- a/lib/sqlalchemy/engine/url.py
+++ b/lib/sqlalchemy/engine/url.py
@@ -1,5 +1,5 @@
# engine/url.py
-# Copyright (C) 2005-2013 the SQLAlchemy authors and contributors <see AUTHORS file>
+# Copyright (C) 2005-2014 the SQLAlchemy authors and contributors <see AUTHORS file>
#
# This module is part of SQLAlchemy and is released under
# the MIT License: http://www.opensource.org/licenses/mit-license.php
@@ -16,6 +16,7 @@ be used directly and is also accepted directly by ``create_engine()``.
import re
from .. import exc, util
from . import Dialect
+from ..dialects import registry
class URL(object):
@@ -23,8 +24,8 @@ class URL(object):
Represent the components of a URL used to connect to a database.
This object is suitable to be passed directly to a
- ``create_engine()`` call. The fields of the URL are parsed from a
- string by the ``module-level make_url()`` function. the string
+ :func:`~sqlalchemy.create_engine` call. The fields of the URL are parsed from a
+ string by the :func:`.make_url` function. the string
format of the URL is an RFC-1738-style string.
All initialization parameters are available as public attributes.
@@ -61,15 +62,19 @@ class URL(object):
self.database = database
self.query = query or {}
- def __str__(self):
+ def __to_string__(self, hide_password=True):
s = self.drivername + "://"
if self.username is not None:
- s += self.username
+ s += _rfc_1738_quote(self.username)
if self.password is not None:
- s += ':' + util.quote_plus(self.password)
+ s += ':' + ('***' if hide_password
+ else _rfc_1738_quote(self.password))
s += "@"
if self.host is not None:
- s += self.host
+ if ':' in self.host:
+ s += "[%s]" % self.host
+ else:
+ s += self.host
if self.port is not None:
s += ':' + str(self.port)
if self.database is not None:
@@ -80,6 +85,12 @@ class URL(object):
s += '?' + "&".join("%s=%s" % (k, self.query[k]) for k in keys)
return s
+ def __str__(self):
+ return self.__to_string__(hide_password=False)
+
+ def __repr__(self):
+ return self.__to_string__()
+
def __hash__(self):
return hash(str(self))
@@ -102,7 +113,6 @@ class URL(object):
name = self.drivername
else:
name = self.drivername.replace('+', '.')
- from sqlalchemy.dialects import registry
cls = registry.load(name)
# check for legacy dialects that
# would return a module with 'dialect' as the
@@ -160,10 +170,13 @@ def _parse_rfc1738_args(name):
(?P<name>[\w\+]+)://
(?:
(?P<username>[^:/]*)
- (?::(?P<password>[^/]*))?
+ (?::(?P<password>.*))?
@)?
(?:
- (?P<host>[^/:]*)
+ (?:
+ \[(?P<ipv6host>[^/]+)\] |
+ (?P<ipv4host>[^/:]+)
+ )?
(?::(?P<port>[^/]*))?
)?
(?:/(?P<database>.*))?
@@ -182,10 +195,15 @@ def _parse_rfc1738_args(name):
query = None
components['query'] = query
+ if components['username'] is not None:
+ components['username'] = _rfc_1738_unquote(components['username'])
+
if components['password'] is not None:
- components['password'] = \
- util.unquote_plus(components['password'])
+ components['password'] = _rfc_1738_unquote(components['password'])
+ ipv4host = components.pop('ipv4host')
+ ipv6host = components.pop('ipv6host')
+ components['host'] = ipv4host or ipv6host
name = components.pop('name')
return URL(name, **components)
else:
@@ -193,6 +211,12 @@ def _parse_rfc1738_args(name):
"Could not parse rfc1738 URL from string '%s'" % name)
+def _rfc_1738_quote(text):
+ return re.sub(r'[:@/]', lambda m: "%%%X" % ord(m.group(0)), text)
+
+def _rfc_1738_unquote(text):
+ return util.unquote(text)
+
def _parse_keyvalue_args(name):
m = re.match(r'(\w+)://(.*)', name)
if m is not None:
diff --git a/lib/sqlalchemy/engine/util.py b/lib/sqlalchemy/engine/util.py
index e56452751..6c0644be4 100644
--- a/lib/sqlalchemy/engine/util.py
+++ b/lib/sqlalchemy/engine/util.py
@@ -1,33 +1,11 @@
# engine/util.py
-# Copyright (C) 2005-2013 the SQLAlchemy authors and contributors <see AUTHORS file>
+# Copyright (C) 2005-2014 the SQLAlchemy authors and contributors <see AUTHORS file>
#
# This module is part of SQLAlchemy and is released under
# the MIT License: http://www.opensource.org/licenses/mit-license.php
from .. import util
-
-def _coerce_config(configuration, prefix):
- """Convert configuration values to expected types."""
-
- options = dict((key[len(prefix):], configuration[key])
- for key in configuration
- if key.startswith(prefix))
- for option, type_ in (
- ('convert_unicode', util.bool_or_str('force')),
- ('pool_timeout', int),
- ('echo', util.bool_or_str('debug')),
- ('echo_pool', util.bool_or_str('debug')),
- ('pool_recycle', int),
- ('pool_size', int),
- ('max_overflow', int),
- ('pool_threadlocal', bool),
- ('use_native_unicode', bool),
- ):
- util.coerce_kw_type(options, option, type_)
- return options
-
-
def connection_memoize(key):
"""Decorator, memoize a function in a connection.info stash.