summaryrefslogtreecommitdiff
path: root/lib/sqlalchemy/engine
diff options
context:
space:
mode:
Diffstat (limited to 'lib/sqlalchemy/engine')
-rw-r--r--lib/sqlalchemy/engine/__init__.py13
-rw-r--r--lib/sqlalchemy/engine/base.py297
-rw-r--r--lib/sqlalchemy/engine/default.py136
-rw-r--r--lib/sqlalchemy/engine/interfaces.py89
-rw-r--r--lib/sqlalchemy/engine/reflection.py82
-rw-r--r--lib/sqlalchemy/engine/strategies.py11
-rw-r--r--lib/sqlalchemy/engine/threadlocal.py5
7 files changed, 488 insertions, 145 deletions
diff --git a/lib/sqlalchemy/engine/__init__.py b/lib/sqlalchemy/engine/__init__.py
index cf75871bf..f512e260a 100644
--- a/lib/sqlalchemy/engine/__init__.py
+++ b/lib/sqlalchemy/engine/__init__.py
@@ -72,6 +72,7 @@ from .base import (
)
from .result import (
+ BaseRowProxy,
BufferedColumnResultProxy,
BufferedColumnRow,
BufferedRowResultProxy,
@@ -256,9 +257,19 @@ def create_engine(*args, **kwargs):
Behavior here varies per backend, and
individual dialects should be consulted directly.
+ Note that the isolation level can also be set on a per-:class:`.Connection`
+ basis as well, using the
+ :paramref:`.Connection.execution_options.isolation_level`
+ feature.
+
.. seealso::
- :ref:`SQLite Concurrency <sqlite_concurrency>`
+ :attr:`.Connection.default_isolation_level` - view default level
+
+ :paramref:`.Connection.execution_options.isolation_level`
+ - set per :class:`.Connection` isolation level
+
+ :ref:`SQLite Transaction Isolation <sqlite_isolation_level>`
:ref:`Postgresql Transaction Isolation <postgresql_isolation_level>`
diff --git a/lib/sqlalchemy/engine/base.py b/lib/sqlalchemy/engine/base.py
index dd82be1d1..8d816b7fd 100644
--- a/lib/sqlalchemy/engine/base.py
+++ b/lib/sqlalchemy/engine/base.py
@@ -201,14 +201,19 @@ class Connection(Connectable):
used by the ORM internally supersedes a cache dictionary
specified here.
- :param isolation_level: Available on: Connection.
+ :param isolation_level: Available on: :class:`.Connection`.
Set the transaction isolation level for
- the lifespan of this connection. Valid values include
- those string values accepted by the ``isolation_level``
- parameter passed to :func:`.create_engine`, and are
- database specific, including those for :ref:`sqlite_toplevel`,
- :ref:`postgresql_toplevel` - see those dialect's documentation
- for further info.
+ the lifespan of this :class:`.Connection` object (*not* the
+ underyling DBAPI connection, for which the level is reset
+ to its original setting upon termination of this
+ :class:`.Connection` object).
+
+ Valid values include
+ those string values accepted by the
+ :paramref:`.create_engine.isolation_level`
+ parameter passed to :func:`.create_engine`. These levels are
+ semi-database specific; see individual dialect documentation for
+ valid levels.
Note that this option necessarily affects the underlying
DBAPI connection for the lifespan of the originating
@@ -217,6 +222,20 @@ class Connection(Connectable):
is returned to the connection pool, i.e.
the :meth:`.Connection.close` method is called.
+ .. seealso::
+
+ :paramref:`.create_engine.isolation_level`
+ - set per :class:`.Engine` isolation level
+
+ :meth:`.Connection.get_isolation_level` - view current level
+
+ :ref:`SQLite Transaction Isolation <sqlite_isolation_level>`
+
+ :ref:`Postgresql Transaction Isolation <postgresql_isolation_level>`
+
+ :ref:`MySQL Transaction Isolation <mysql_isolation_level>`
+
+
:param no_parameters: When ``True``, if the final parameter
list or dictionary is totally empty, will invoke the
statement on the cursor as ``cursor.execute(statement)``,
@@ -260,23 +279,97 @@ class Connection(Connectable):
@property
def connection(self):
- "The underlying DB-API connection managed by this Connection."
+ """The underlying DB-API connection managed by this Connection.
+
+ .. seealso::
+
+
+ :ref:`dbapi_connections`
+
+ """
try:
return self.__connection
except AttributeError:
- return self._revalidate_connection()
+ try:
+ return self._revalidate_connection()
+ except Exception as e:
+ self._handle_dbapi_exception(e, None, None, None, None)
+
+ def get_isolation_level(self):
+ """Return the current isolation level assigned to this
+ :class:`.Connection`.
+
+ This will typically be the default isolation level as determined
+ by the dialect, unless if the
+ :paramref:`.Connection.execution_options.isolation_level`
+ feature has been used to alter the isolation level on a
+ per-:class:`.Connection` basis.
+
+ This attribute will typically perform a live SQL operation in order
+ to procure the current isolation level, so the value returned is the
+ actual level on the underlying DBAPI connection regardless of how
+ this state was set. Compare to the
+ :attr:`.Connection.default_isolation_level` accessor
+ which returns the dialect-level setting without performing a SQL
+ query.
+
+ .. versionadded:: 0.9.9
+
+ .. seealso::
+
+ :attr:`.Connection.default_isolation_level` - view default level
+
+ :paramref:`.create_engine.isolation_level`
+ - set per :class:`.Engine` isolation level
+
+ :paramref:`.Connection.execution_options.isolation_level`
+ - set per :class:`.Connection` isolation level
+
+ """
+ try:
+ return self.dialect.get_isolation_level(self.connection)
+ except Exception as e:
+ self._handle_dbapi_exception(e, None, None, None, None)
+
+ @property
+ def default_isolation_level(self):
+ """The default isolation level assigned to this :class:`.Connection`.
+
+ This is the isolation level setting that the :class:`.Connection`
+ has when first procured via the :meth:`.Engine.connect` method.
+ This level stays in place until the
+ :paramref:`.Connection.execution_options.isolation_level` is used
+ to change the setting on a per-:class:`.Connection` basis.
+
+ Unlike :meth:`.Connection.get_isolation_level`, this attribute is set
+ ahead of time from the first connection procured by the dialect,
+ so SQL query is not invoked when this accessor is called.
+
+ .. versionadded:: 0.9.9
+
+ .. seealso::
+
+ :meth:`.Connection.get_isolation_level` - view current level
+
+ :paramref:`.create_engine.isolation_level`
+ - set per :class:`.Engine` isolation level
+
+ :paramref:`.Connection.execution_options.isolation_level`
+ - set per :class:`.Connection` isolation level
+
+ """
+ return self.dialect.default_isolation_level
def _revalidate_connection(self):
if self.__branch_from:
return self.__branch_from._revalidate_connection()
-
if self.__can_reconnect and self.__invalid:
if self.__transaction is not None:
raise exc.InvalidRequestError(
"Can't reconnect until invalid "
"transaction is rolled back")
- self.__connection = self.engine.raw_connection()
+ self.__connection = self.engine.raw_connection(_connection=self)
self.__invalid = False
return self.__connection
raise exc.ResourceClosedError("This Connection is closed")
@@ -741,7 +834,7 @@ class Connection(Connectable):
a subclass of :class:`.Executable`, such as a
:func:`~.expression.select` construct
* a :class:`.FunctionElement`, such as that generated
- by :attr:`.func`, will be automatically wrapped in
+ by :data:`.func`, will be automatically wrapped in
a SELECT statement, which is then executed.
* a :class:`.DDLElement` object
* a :class:`.DefaultGenerator` object
@@ -959,9 +1052,10 @@ class Connection(Connectable):
context = constructor(dialect, self, conn, *args)
except Exception as e:
- self._handle_dbapi_exception(e,
- util.text_type(statement), parameters,
- None, None)
+ self._handle_dbapi_exception(
+ e,
+ util.text_type(statement), parameters,
+ None, None)
if context.compiled:
context.pre_exec()
@@ -985,36 +1079,39 @@ class Connection(Connectable):
"%r",
sql_util._repr_params(parameters, batches=10)
)
+
+ evt_handled = False
try:
if context.executemany:
- for fn in () if not self.dialect._has_events \
- else self.dialect.dispatch.do_executemany:
- if fn(cursor, statement, parameters, context):
- break
- else:
+ if self.dialect._has_events:
+ for fn in self.dialect.dispatch.do_executemany:
+ if fn(cursor, statement, parameters, context):
+ evt_handled = True
+ break
+ if not evt_handled:
self.dialect.do_executemany(
cursor,
statement,
parameters,
context)
-
elif not parameters and context.no_parameters:
- for fn in () if not self.dialect._has_events \
- else self.dialect.dispatch.do_execute_no_params:
- if fn(cursor, statement, context):
- break
- else:
+ if self.dialect._has_events:
+ for fn in self.dialect.dispatch.do_execute_no_params:
+ if fn(cursor, statement, context):
+ evt_handled = True
+ break
+ if not evt_handled:
self.dialect.do_execute_no_params(
cursor,
statement,
context)
-
else:
- for fn in () if not self.dialect._has_events \
- else self.dialect.dispatch.do_execute:
- if fn(cursor, statement, parameters, context):
- break
- else:
+ if self.dialect._has_events:
+ for fn in self.dialect.dispatch.do_execute:
+ if fn(cursor, statement, parameters, context):
+ evt_handled = True
+ break
+ if not evt_handled:
self.dialect.do_execute(
cursor,
statement,
@@ -1038,31 +1135,12 @@ class Connection(Connectable):
if context.compiled:
context.post_exec()
- if context.isinsert and not context.executemany:
- context.post_insert()
-
- # create a resultproxy, get rowcount/implicit RETURNING
- # rows, close cursor if no further results pending
- result = context.get_result_proxy()
- if context.isinsert:
- if context._is_implicit_returning:
- context._fetch_implicit_returning(result)
- result.close(_autoclose_connection=False)
- result._metadata = None
- elif not context._is_explicit_returning:
+ if context.is_crud:
+ result = context._setup_crud_result_proxy()
+ else:
+ result = context.get_result_proxy()
+ if result._metadata is None:
result.close(_autoclose_connection=False)
- result._metadata = None
- elif context.isupdate and context._is_implicit_returning:
- context._fetch_implicit_update_returning(result)
- result.close(_autoclose_connection=False)
- result._metadata = None
-
- elif result._metadata is None:
- # no results, get rowcount
- # (which requires open cursor on some drivers
- # such as kintersbasdb, mxodbc),
- result.rowcount
- result.close(_autoclose_connection=False)
if context.should_autocommit and self._root.__transaction is None:
self._root._commit_impl(autocommit=True)
@@ -1149,7 +1227,10 @@ class Connection(Connectable):
self._is_disconnect = \
isinstance(e, self.dialect.dbapi.Error) and \
not self.closed and \
- self.dialect.is_disconnect(e, self.__connection, cursor)
+ self.dialect.is_disconnect(
+ e,
+ self.__connection if not self.invalidated else None,
+ cursor)
if context:
context.is_disconnect = self._is_disconnect
@@ -1194,7 +1275,8 @@ class Connection(Connectable):
# new handle_error event
ctx = ExceptionContextImpl(
- e, sqlalchemy_exception, self, cursor, statement,
+ e, sqlalchemy_exception, self.engine,
+ self, cursor, statement,
parameters, context, self._is_disconnect)
for fn in self.dispatch.handle_error:
@@ -1236,12 +1318,65 @@ class Connection(Connectable):
del self._reentrant_error
if self._is_disconnect:
del self._is_disconnect
- dbapi_conn_wrapper = self.connection
- self.engine.pool._invalidate(dbapi_conn_wrapper, e)
- self.invalidate(e)
+ if not self.invalidated:
+ dbapi_conn_wrapper = self.__connection
+ self.engine.pool._invalidate(dbapi_conn_wrapper, e)
+ self.invalidate(e)
if self.should_close_with_result:
self.close()
+ @classmethod
+ def _handle_dbapi_exception_noconnection(cls, e, dialect, engine):
+
+ exc_info = sys.exc_info()
+
+ is_disconnect = dialect.is_disconnect(e, None, None)
+
+ should_wrap = isinstance(e, dialect.dbapi.Error)
+
+ if should_wrap:
+ sqlalchemy_exception = exc.DBAPIError.instance(
+ None,
+ None,
+ e,
+ dialect.dbapi.Error,
+ connection_invalidated=is_disconnect)
+ else:
+ sqlalchemy_exception = None
+
+ newraise = None
+
+ if engine._has_events:
+ ctx = ExceptionContextImpl(
+ e, sqlalchemy_exception, engine, None, None, None,
+ None, None, is_disconnect)
+ for fn in engine.dispatch.handle_error:
+ try:
+ # handler returns an exception;
+ # call next handler in a chain
+ per_fn = fn(ctx)
+ if per_fn is not None:
+ ctx.chained_exception = newraise = per_fn
+ except Exception as _raised:
+ # handler raises an exception - stop processing
+ newraise = _raised
+ break
+
+ if sqlalchemy_exception and \
+ is_disconnect != ctx.is_disconnect:
+ sqlalchemy_exception.connection_invalidated = \
+ is_disconnect = ctx.is_disconnect
+
+ if newraise:
+ util.raise_from_cause(newraise, exc_info)
+ elif should_wrap:
+ util.raise_from_cause(
+ sqlalchemy_exception,
+ exc_info
+ )
+ else:
+ util.reraise(*exc_info)
+
def default_schema_name(self):
return self.engine.dialect.get_default_schema_name(self)
@@ -1320,8 +1455,9 @@ class ExceptionContextImpl(ExceptionContext):
"""Implement the :class:`.ExceptionContext` interface."""
def __init__(self, exception, sqlalchemy_exception,
- connection, cursor, statement, parameters,
+ engine, connection, cursor, statement, parameters,
context, is_disconnect):
+ self.engine = engine
self.connection = connection
self.sqlalchemy_exception = sqlalchemy_exception
self.original_exception = exception
@@ -1865,10 +2001,11 @@ class Engine(Connectable, log.Identified):
"""
- return self._connection_cls(self,
- self.pool.connect(),
- close_with_result=close_with_result,
- **kwargs)
+ return self._connection_cls(
+ self,
+ self._wrap_pool_connect(self.pool.connect, None),
+ close_with_result=close_with_result,
+ **kwargs)
def table_names(self, schema=None, connection=None):
"""Return a list of all table names available in the database.
@@ -1898,7 +2035,18 @@ class Engine(Connectable, log.Identified):
"""
return self.run_callable(self.dialect.has_table, table_name, schema)
- def raw_connection(self):
+ def _wrap_pool_connect(self, fn, connection):
+ dialect = self.dialect
+ try:
+ return fn()
+ except dialect.dbapi.Error as e:
+ if connection is None:
+ Connection._handle_dbapi_exception_noconnection(
+ e, dialect, self)
+ else:
+ util.reraise(*sys.exc_info())
+
+ def raw_connection(self, _connection=None):
"""Return a "raw" DBAPI connection from the connection pool.
The returned object is a proxied version of the DBAPI
@@ -1909,13 +2057,18 @@ class Engine(Connectable, log.Identified):
for real.
This method provides direct DBAPI connection access for
- special situations. In most situations, the :class:`.Connection`
- object should be used, which is procured using the
- :meth:`.Engine.connect` method.
+ special situations when the API provided by :class:`.Connection`
+ is not needed. When a :class:`.Connection` object is already
+ present, the DBAPI connection is available using
+ the :attr:`.Connection.connection` accessor.
- """
+ .. seealso::
- return self.pool.unique_connection()
+ :ref:`dbapi_connections`
+
+ """
+ return self._wrap_pool_connect(
+ self.pool.unique_connection, _connection)
class OptionEngine(Engine):
diff --git a/lib/sqlalchemy/engine/default.py b/lib/sqlalchemy/engine/default.py
index a5af6ff19..f6c2263b3 100644
--- a/lib/sqlalchemy/engine/default.py
+++ b/lib/sqlalchemy/engine/default.py
@@ -452,14 +452,12 @@ class DefaultExecutionContext(interfaces.ExecutionContext):
isinsert = False
isupdate = False
isdelete = False
+ is_crud = False
isddl = False
executemany = False
result_map = None
compiled = None
statement = None
- postfetch_cols = None
- prefetch_cols = None
- returning_cols = None
_is_implicit_returning = False
_is_explicit_returning = False
@@ -515,10 +513,8 @@ class DefaultExecutionContext(interfaces.ExecutionContext):
if not compiled.can_execute:
raise exc.ArgumentError("Not an executable clause")
- self.execution_options = compiled.statement._execution_options
- if connection._execution_options:
- self.execution_options = dict(self.execution_options)
- self.execution_options.update(connection._execution_options)
+ self.execution_options = compiled.statement._execution_options.union(
+ connection._execution_options)
# compiled clauseelement. process bind params, process table defaults,
# track collections used by ResultProxy to target and process results
@@ -548,6 +544,7 @@ class DefaultExecutionContext(interfaces.ExecutionContext):
self.cursor = self.create_cursor()
if self.isinsert or self.isupdate or self.isdelete:
+ self.is_crud = True
self._is_explicit_returning = bool(compiled.statement._returning)
self._is_implicit_returning = bool(
compiled.returning and not compiled.statement._returning)
@@ -681,10 +678,6 @@ class DefaultExecutionContext(interfaces.ExecutionContext):
return self.execution_options.get("no_parameters", False)
@util.memoized_property
- def is_crud(self):
- return self.isinsert or self.isupdate or self.isdelete
-
- @util.memoized_property
def should_autocommit(self):
autocommit = self.execution_options.get('autocommit',
not self.compiled and
@@ -799,52 +792,84 @@ class DefaultExecutionContext(interfaces.ExecutionContext):
def supports_sane_multi_rowcount(self):
return self.dialect.supports_sane_multi_rowcount
- def post_insert(self):
-
+ def _setup_crud_result_proxy(self):
+ if self.isinsert and \
+ not self.executemany:
+ if not self._is_implicit_returning and \
+ not self.compiled.inline and \
+ self.dialect.postfetch_lastrowid:
+
+ self._setup_ins_pk_from_lastrowid()
+
+ elif not self._is_implicit_returning:
+ self._setup_ins_pk_from_empty()
+
+ result = self.get_result_proxy()
+
+ if self.isinsert:
+ if self._is_implicit_returning:
+ row = result.fetchone()
+ self.returned_defaults = row
+ self._setup_ins_pk_from_implicit_returning(row)
+ result.close(_autoclose_connection=False)
+ result._metadata = None
+ elif not self._is_explicit_returning:
+ result.close(_autoclose_connection=False)
+ result._metadata = None
+ elif self.isupdate and self._is_implicit_returning:
+ row = result.fetchone()
+ self.returned_defaults = row
+ result.close(_autoclose_connection=False)
+ result._metadata = None
+
+ elif result._metadata is None:
+ # no results, get rowcount
+ # (which requires open cursor on some drivers
+ # such as kintersbasdb, mxodbc)
+ result.rowcount
+ result.close(_autoclose_connection=False)
+ return result
+
+ def _setup_ins_pk_from_lastrowid(self):
key_getter = self.compiled._key_getters_for_crud_column[2]
table = self.compiled.statement.table
+ compiled_params = self.compiled_parameters[0]
+
+ lastrowid = self.get_lastrowid()
+ autoinc_col = table._autoincrement_column
+ if autoinc_col is not None:
+ # apply type post processors to the lastrowid
+ proc = autoinc_col.type._cached_result_processor(
+ self.dialect, None)
+ if proc is not None:
+ lastrowid = proc(lastrowid)
+ self.inserted_primary_key = [
+ lastrowid if c is autoinc_col else
+ compiled_params.get(key_getter(c), None)
+ for c in table.primary_key
+ ]
- if not self._is_implicit_returning and \
- not self._is_explicit_returning and \
- not self.compiled.inline and \
- self.dialect.postfetch_lastrowid:
-
- lastrowid = self.get_lastrowid()
- autoinc_col = table._autoincrement_column
- if autoinc_col is not None:
- # apply type post processors to the lastrowid
- proc = autoinc_col.type._cached_result_processor(
- self.dialect, None)
- if proc is not None:
- lastrowid = proc(lastrowid)
- self.inserted_primary_key = [
- lastrowid if c is autoinc_col else
- self.compiled_parameters[0].get(key_getter(c), None)
- for c in table.primary_key
- ]
- else:
- self.inserted_primary_key = [
- self.compiled_parameters[0].get(key_getter(c), None)
- for c in table.primary_key
- ]
-
- def _fetch_implicit_returning(self, resultproxy):
+ def _setup_ins_pk_from_empty(self):
+ key_getter = self.compiled._key_getters_for_crud_column[2]
table = self.compiled.statement.table
- row = resultproxy.fetchone()
-
- ipk = []
- for c, v in zip(table.primary_key, self.inserted_primary_key):
- if v is not None:
- ipk.append(v)
- else:
- ipk.append(row[c])
+ compiled_params = self.compiled_parameters[0]
+ self.inserted_primary_key = [
+ compiled_params.get(key_getter(c), None)
+ for c in table.primary_key
+ ]
- self.inserted_primary_key = ipk
- self.returned_defaults = row
+ def _setup_ins_pk_from_implicit_returning(self, row):
+ key_getter = self.compiled._key_getters_for_crud_column[2]
+ table = self.compiled.statement.table
+ compiled_params = self.compiled_parameters[0]
- def _fetch_implicit_update_returning(self, resultproxy):
- row = resultproxy.fetchone()
- self.returned_defaults = row
+ self.inserted_primary_key = [
+ row[col] if value is None else value
+ for col, value in [
+ (col, compiled_params.get(key_getter(col), None))
+ for col in table.primary_key
+ ]
+ ]
def lastrow_has_defaults(self):
return (self.isinsert or self.isupdate) and \
@@ -956,14 +981,17 @@ class DefaultExecutionContext(interfaces.ExecutionContext):
def _process_executesingle_defaults(self):
key_getter = self.compiled._key_getters_for_crud_column[2]
-
prefetch = self.compiled.prefetch
self.current_parameters = compiled_parameters = \
self.compiled_parameters[0]
for c in prefetch:
if self.isinsert:
- val = self.get_insert_default(c)
+ if c.default and \
+ not c.default.is_sequence and c.default.is_scalar:
+ val = c.default.arg
+ else:
+ val = self.get_insert_default(c)
else:
val = self.get_update_default(c)
@@ -972,6 +1000,4 @@ class DefaultExecutionContext(interfaces.ExecutionContext):
del self.current_parameters
-
-
DefaultDialect.execution_ctx_cls = DefaultExecutionContext
diff --git a/lib/sqlalchemy/engine/interfaces.py b/lib/sqlalchemy/engine/interfaces.py
index 0ad2efae0..5f0d74328 100644
--- a/lib/sqlalchemy/engine/interfaces.py
+++ b/lib/sqlalchemy/engine/interfaces.py
@@ -654,17 +654,82 @@ class Dialect(object):
return None
def reset_isolation_level(self, dbapi_conn):
- """Given a DBAPI connection, revert its isolation to the default."""
+ """Given a DBAPI connection, revert its isolation to the default.
+
+ Note that this is a dialect-level method which is used as part
+ of the implementation of the :class:`.Connection` and
+ :class:`.Engine`
+ isolation level facilities; these APIs should be preferred for
+ most typical use cases.
+
+ .. seealso::
+
+ :meth:`.Connection.get_isolation_level` - view current level
+
+ :attr:`.Connection.default_isolation_level` - view default level
+
+ :paramref:`.Connection.execution_options.isolation_level` -
+ set per :class:`.Connection` isolation level
+
+ :paramref:`.create_engine.isolation_level` -
+ set per :class:`.Engine` isolation level
+
+ """
raise NotImplementedError()
def set_isolation_level(self, dbapi_conn, level):
- """Given a DBAPI connection, set its isolation level."""
+ """Given a DBAPI connection, set its isolation level.
+
+ Note that this is a dialect-level method which is used as part
+ of the implementation of the :class:`.Connection` and
+ :class:`.Engine`
+ isolation level facilities; these APIs should be preferred for
+ most typical use cases.
+
+ .. seealso::
+
+ :meth:`.Connection.get_isolation_level` - view current level
+
+ :attr:`.Connection.default_isolation_level` - view default level
+
+ :paramref:`.Connection.execution_options.isolation_level` -
+ set per :class:`.Connection` isolation level
+
+ :paramref:`.create_engine.isolation_level` -
+ set per :class:`.Engine` isolation level
+
+ """
raise NotImplementedError()
def get_isolation_level(self, dbapi_conn):
- """Given a DBAPI connection, return its isolation level."""
+ """Given a DBAPI connection, return its isolation level.
+
+ When working with a :class:`.Connection` object, the corresponding
+ DBAPI connection may be procured using the
+ :attr:`.Connection.connection` accessor.
+
+ Note that this is a dialect-level method which is used as part
+ of the implementation of the :class:`.Connection` and
+ :class:`.Engine` isolation level facilities;
+ these APIs should be preferred for most typical use cases.
+
+
+ .. seealso::
+
+ :meth:`.Connection.get_isolation_level` - view current level
+
+ :attr:`.Connection.default_isolation_level` - view default level
+
+ :paramref:`.Connection.execution_options.isolation_level` -
+ set per :class:`.Connection` isolation level
+
+ :paramref:`.create_engine.isolation_level` -
+ set per :class:`.Engine` isolation level
+
+
+ """
raise NotImplementedError()
@@ -917,7 +982,23 @@ class ExceptionContext(object):
connection = None
"""The :class:`.Connection` in use during the exception.
- This member is always present.
+ This member is present, except in the case of a failure when
+ first connecting.
+
+ .. seealso::
+
+ :attr:`.ExceptionContext.engine`
+
+
+ """
+
+ engine = None
+ """The :class:`.Engine` in use during the exception.
+
+ This member should always be present, even in the case of a failure
+ when first connecting.
+
+ .. versionadded:: 1.0.0
"""
diff --git a/lib/sqlalchemy/engine/reflection.py b/lib/sqlalchemy/engine/reflection.py
index 2a1def86a..6e102aad6 100644
--- a/lib/sqlalchemy/engine/reflection.py
+++ b/lib/sqlalchemy/engine/reflection.py
@@ -173,7 +173,14 @@ class Inspector(object):
passed as ``None``. For special quoting, use :class:`.quoted_name`.
:param order_by: Optional, may be the string "foreign_key" to sort
- the result on foreign key dependencies.
+ the result on foreign key dependencies. Does not automatically
+ resolve cycles, and will raise :class:`.CircularDependencyError`
+ if cycles exist.
+
+ .. deprecated:: 1.0.0 - see
+ :meth:`.Inspector.get_sorted_table_and_fkc_names` for a version
+ of this which resolves foreign key cycles between tables
+ automatically.
.. versionchanged:: 0.8 the "foreign_key" sorting sorts tables
in order of dependee to dependent; that is, in creation
@@ -183,6 +190,8 @@ class Inspector(object):
.. seealso::
+ :meth:`.Inspector.get_sorted_table_and_fkc_names`
+
:attr:`.MetaData.sorted_tables`
"""
@@ -201,6 +210,64 @@ class Inspector(object):
tnames = list(topological.sort(tuples, tnames))
return tnames
+ def get_sorted_table_and_fkc_names(self, schema=None):
+ """Return dependency-sorted table and foreign key constraint names in
+ referred to within a particular schema.
+
+ This will yield 2-tuples of
+ ``(tablename, [(tname, fkname), (tname, fkname), ...])``
+ consisting of table names in CREATE order grouped with the foreign key
+ constraint names that are not detected as belonging to a cycle.
+ The final element
+ will be ``(None, [(tname, fkname), (tname, fkname), ..])``
+ which will consist of remaining
+ foreign key constraint names that would require a separate CREATE
+ step after-the-fact, based on dependencies between tables.
+
+ .. versionadded:: 1.0.-
+
+ .. seealso::
+
+ :meth:`.Inspector.get_table_names`
+
+ :func:`.sort_tables_and_constraints` - similar method which works
+ with an already-given :class:`.MetaData`.
+
+ """
+ if hasattr(self.dialect, 'get_table_names'):
+ tnames = self.dialect.get_table_names(
+ self.bind, schema, info_cache=self.info_cache)
+ else:
+ tnames = self.engine.table_names(schema)
+
+ tuples = set()
+ remaining_fkcs = set()
+
+ fknames_for_table = {}
+ for tname in tnames:
+ fkeys = self.get_foreign_keys(tname, schema)
+ fknames_for_table[tname] = set(
+ [fk['name'] for fk in fkeys]
+ )
+ for fkey in fkeys:
+ if tname != fkey['referred_table']:
+ tuples.add((fkey['referred_table'], tname))
+ try:
+ candidate_sort = list(topological.sort(tuples, tnames))
+ except exc.CircularDependencyError as err:
+ for edge in err.edges:
+ tuples.remove(edge)
+ remaining_fkcs.update(
+ (edge[1], fkc)
+ for fkc in fknames_for_table[edge[1]]
+ )
+
+ candidate_sort = list(topological.sort(tuples, tnames))
+ return [
+ (tname, fknames_for_table[tname].difference(remaining_fkcs))
+ for tname in candidate_sort
+ ] + [(None, list(remaining_fkcs))]
+
def get_temp_table_names(self):
"""return a list of temporary table names for the current bind.
@@ -394,6 +461,12 @@ class Inspector(object):
unique
boolean
+ dialect_options
+ dict of dialect-specific index options. May not be present
+ for all dialects.
+
+ .. versionadded:: 1.0.0
+
:param table_name: string name of the table. For special quoting,
use :class:`.quoted_name`.
@@ -642,6 +715,8 @@ class Inspector(object):
columns = index_d['column_names']
unique = index_d['unique']
flavor = index_d.get('type', 'index')
+ dialect_options = index_d.get('dialect_options', {})
+
duplicates = index_d.get('duplicates_constraint')
if include_columns and \
not set(columns).issubset(include_columns):
@@ -667,7 +742,10 @@ class Inspector(object):
else:
idx_cols.append(idx_col)
- sa_schema.Index(name, *idx_cols, **dict(unique=unique))
+ sa_schema.Index(
+ name, *idx_cols,
+ **dict(list(dialect_options.items()) + [('unique', unique)])
+ )
def _reflect_unique_constraints(
self, table_name, schema, table, cols_by_orig_name,
diff --git a/lib/sqlalchemy/engine/strategies.py b/lib/sqlalchemy/engine/strategies.py
index 398ef8df6..fd665ad03 100644
--- a/lib/sqlalchemy/engine/strategies.py
+++ b/lib/sqlalchemy/engine/strategies.py
@@ -86,16 +86,7 @@ class DefaultEngineStrategy(EngineStrategy):
pool = pop_kwarg('pool', None)
if pool is None:
def connect():
- try:
- return dialect.connect(*cargs, **cparams)
- except dialect.dbapi.Error as e:
- invalidated = dialect.is_disconnect(e, None, None)
- util.raise_from_cause(
- exc.DBAPIError.instance(
- None, None, e, dialect.dbapi.Error,
- connection_invalidated=invalidated
- )
- )
+ return dialect.connect(*cargs, **cparams)
creator = pop_kwarg('creator', connect)
diff --git a/lib/sqlalchemy/engine/threadlocal.py b/lib/sqlalchemy/engine/threadlocal.py
index 637523a0e..e64ab09f4 100644
--- a/lib/sqlalchemy/engine/threadlocal.py
+++ b/lib/sqlalchemy/engine/threadlocal.py
@@ -59,7 +59,10 @@ class TLEngine(base.Engine):
# guards against pool-level reapers, if desired.
# or not connection.connection.is_valid:
connection = self._tl_connection_cls(
- self, self.pool.connect(), **kw)
+ self,
+ self._wrap_pool_connect(
+ self.pool.connect, connection),
+ **kw)
self._connections.conn = weakref.ref(connection)
return connection._increment_connect()