summaryrefslogtreecommitdiff
path: root/lib/sqlalchemy/engine
diff options
context:
space:
mode:
authorMike Bayer <mike_mp@zzzcomputing.com>2019-06-04 17:29:20 -0400
committerMike Bayer <mike_mp@zzzcomputing.com>2020-02-21 17:53:33 -0500
commitf559f378c47811b5528ad1769cb86925e85fd1e5 (patch)
treefd8325501a96cf1e4280c15f267f63b2af7b5f97 /lib/sqlalchemy/engine
parent93b7767d00267ebe149cabcae7246b6796352eb8 (diff)
downloadsqlalchemy-f559f378c47811b5528ad1769cb86925e85fd1e5.tar.gz
Result initial introduction
This builds on cc718cccc0bf8a01abdf4068c7ea4f3 which moved RowProxy to Row, allowing Row to be more like a named tuple. - KeyedTuple in ORM is replaced with Row - ResultSetMetaData broken out into "simple" and "cursor" versions for ORM and Core, as well as LegacyCursor version. - Row now has _mapping attribute that supplies full mapping behavior. Row and SimpleRow both have named tuple behavior otherwise. LegacyRow has some mapping features on the tuple which emit deprecation warnings (e.g. keys(), values(), etc). the biggest change for mapping->tuple is the behavior of __contains__ which moves from testing of "key in row" to "value in row". - ResultProxy breaks into ResultProxy and FutureResult (interim), the latter has the newer APIs. Made available to dialects using execution options. - internal reflection methods and most tests move off of implicit Row mapping behavior and move to row._mapping, result.mappings() method using future result - a new strategy system for cursor handling replaces the various subclasses of RowProxy - some execution context adjustments. We will leave EC in but refined things like get_result_proxy() and out parameter handling. Dialects for 1.4 will need to adjust from get_result_proxy() to get_result_cursor_strategy(), if they are using this method - out parameter handling now accommodated by get_out_parameter_values() EC method. Oracle changes for this. external dialect for DB2 for example will also need to adjust for this. - deprecate case_insensitive flag for engine / result, this feature is not used mapping-methods on Row are deprecated, and replaced with Row._mapping.<meth>, including: row.keys() -> use row._mapping.keys() row.items() -> use row._mapping.items() row.values() -> use row._mapping.values() key in row -> use key in row._mapping int in row -> use int < len(row) Fixes: #4710 Fixes: #4878 Change-Id: Ieb9085e9bcff564359095b754da9ae0af55679f0
Diffstat (limited to 'lib/sqlalchemy/engine')
-rw-r--r--lib/sqlalchemy/engine/__init__.py13
-rw-r--r--lib/sqlalchemy/engine/base.py9
-rw-r--r--lib/sqlalchemy/engine/create.py7
-rw-r--r--lib/sqlalchemy/engine/default.py114
-rw-r--r--lib/sqlalchemy/engine/interfaces.py101
-rw-r--r--lib/sqlalchemy/engine/result.py1402
-rw-r--r--lib/sqlalchemy/engine/row.py501
7 files changed, 1450 insertions, 697 deletions
diff --git a/lib/sqlalchemy/engine/__init__.py b/lib/sqlalchemy/engine/__init__.py
index a129069a4..7f0270b42 100644
--- a/lib/sqlalchemy/engine/__init__.py
+++ b/lib/sqlalchemy/engine/__init__.py
@@ -38,10 +38,23 @@ from .result import BufferedColumnResultProxy # noqa
from .result import BufferedColumnRow # noqa
from .result import BufferedRowResultProxy # noqa
from .result import FullyBufferedResultProxy # noqa
+from .result import LegacyRow # noqa
+from .result import result_tuple # noqa
from .result import ResultProxy # noqa
from .result import Row # noqa
+from .result import RowMapping # noqa
from .util import connection_memoize # noqa
from ..sql import ddl # noqa
__all__ = ("create_engine", "engine_from_config", "create_mock_engine")
+
+
+def __go(lcls):
+ from .. import future
+ from . import result
+
+ result._future_Result = future.Result
+
+
+__go(locals())
diff --git a/lib/sqlalchemy/engine/base.py b/lib/sqlalchemy/engine/base.py
index 29df67dcb..ce6c2e9c6 100644
--- a/lib/sqlalchemy/engine/base.py
+++ b/lib/sqlalchemy/engine/base.py
@@ -1297,12 +1297,7 @@ class Connection(Connectable):
if context.compiled:
context.post_exec()
- if context.is_crud or context.is_text:
- result = context._setup_crud_result_proxy()
- else:
- result = context.get_result_proxy()
- if result._metadata is None:
- result._soft_close()
+ result = context._setup_result_proxy()
if context.should_autocommit and self._root.__transaction is None:
self._root._commit_impl(autocommit=True)
@@ -1310,6 +1305,8 @@ class Connection(Connectable):
# for "connectionless" execution, we have to close this
# Connection after the statement is complete.
if self.should_close_with_result:
+ assert not context._is_future_result
+
# ResultProxy already exhausted rows / has no rows.
# close us now
if result._soft_closed:
diff --git a/lib/sqlalchemy/engine/create.py b/lib/sqlalchemy/engine/create.py
index 5198c8cd6..3d50b0828 100644
--- a/lib/sqlalchemy/engine/create.py
+++ b/lib/sqlalchemy/engine/create.py
@@ -34,6 +34,13 @@ from ..sql import compiler
'expressions, or an "empty set" SELECT, at statement execution'
"time.",
),
+ case_sensitive=(
+ "1.4",
+ "The :paramref:`.create_engine.case_sensitive` parameter "
+ "is deprecated and will be removed in a future release. "
+ "Applications should work with result column names in a case "
+ "sensitive fashion.",
+ ),
)
def create_engine(url, **kwargs):
"""Create a new :class:`.Engine` instance.
diff --git a/lib/sqlalchemy/engine/default.py b/lib/sqlalchemy/engine/default.py
index 7d36345fd..7efc4bda2 100644
--- a/lib/sqlalchemy/engine/default.py
+++ b/lib/sqlalchemy/engine/default.py
@@ -19,7 +19,7 @@ import re
import weakref
from . import interfaces
-from . import result
+from . import result as _result
from .. import event
from .. import exc
from .. import pool
@@ -201,6 +201,13 @@ class DefaultDialect(interfaces.Dialect):
'expressions, or an "empty set" SELECT, at statement execution'
"time.",
),
+ case_sensitive=(
+ "1.4",
+ "The :paramref:`.create_engine.case_sensitive` parameter "
+ "is deprecated and will be removed in a future release. "
+ "Applications should work with result column names in a case "
+ "sensitive fashion.",
+ ),
)
def __init__(
self,
@@ -667,6 +674,8 @@ class DefaultExecutionContext(interfaces.ExecutionContext):
returned_defaults = None
_is_implicit_returning = False
_is_explicit_returning = False
+ _is_future_result = False
+ _is_server_side = False
# a hook for SQLite's translation of
# result column names
@@ -725,6 +734,9 @@ class DefaultExecutionContext(interfaces.ExecutionContext):
# we get here
assert compiled.can_execute
+ self._is_future_result = connection._execution_options.get(
+ "future_result", False
+ )
self.execution_options = compiled.execution_options.union(
connection._execution_options
)
@@ -860,6 +872,10 @@ class DefaultExecutionContext(interfaces.ExecutionContext):
self.dialect = connection.dialect
self.is_text = True
+ self._is_future_result = connection._execution_options.get(
+ "future_result", False
+ )
+
# plain text statement
self.execution_options = connection._execution_options
@@ -1035,6 +1051,11 @@ class DefaultExecutionContext(interfaces.ExecutionContext):
def pre_exec(self):
pass
+ def get_out_parameter_values(self, names):
+ raise NotImplementedError(
+ "This dialect does not support OUT parameters"
+ )
+
def post_exec(self):
pass
@@ -1051,27 +1072,18 @@ class DefaultExecutionContext(interfaces.ExecutionContext):
def get_lastrowid(self):
"""return self.cursor.lastrowid, or equivalent, after an INSERT.
- This may involve calling special cursor functions,
- issuing a new SELECT on the cursor (or a new one),
- or returning a stored value that was
+ This may involve calling special cursor functions, issuing a new SELECT
+ on the cursor (or a new one), or returning a stored value that was
calculated within post_exec().
- This function will only be called for dialects
- which support "implicit" primary key generation,
- keep preexecute_autoincrement_sequences set to False,
- and when no explicit id value was bound to the
- statement.
+ This function will only be called for dialects which support "implicit"
+ primary key generation, keep preexecute_autoincrement_sequences set to
+ False, and when no explicit id value was bound to the statement.
- The function is called once, directly after
- post_exec() and before the transaction is committed
- or ResultProxy is generated. If the post_exec()
- method assigns a value to `self._lastrowid`, the
- value is used in place of calling get_lastrowid().
-
- Note that this method is *not* equivalent to the
- ``lastrowid`` method on ``ResultProxy``, which is a
- direct proxy to the DBAPI ``lastrowid`` accessor
- in all cases.
+ The function is called once for an INSERT statement that would need to
+ return the last inserted primary key for those dialects that make use
+ of the lastrowid concept. In these cases, it is called directly after
+ :meth:`.ExecutionContext.post_exec`.
"""
return self.cursor.lastrowid
@@ -1079,11 +1091,13 @@ class DefaultExecutionContext(interfaces.ExecutionContext):
def handle_dbapi_exception(self, e):
pass
- def get_result_proxy(self):
+ def get_result_cursor_strategy(self, result):
if self._is_server_side:
- return result.BufferedRowResultProxy(self)
+ strat_cls = _result.BufferedRowCursorFetchStrategy
else:
- return result.ResultProxy(self)
+ strat_cls = _result.DefaultCursorFetchStrategy
+
+ return strat_cls.create(result)
@property
def rowcount(self):
@@ -1095,6 +1109,49 @@ class DefaultExecutionContext(interfaces.ExecutionContext):
def supports_sane_multi_rowcount(self):
return self.dialect.supports_sane_multi_rowcount
+ def _setup_result_proxy(self):
+ if self.is_crud or self.is_text:
+ result = self._setup_crud_result_proxy()
+ else:
+ result = _result.ResultProxy._create_for_context(self)
+
+ if (
+ self.compiled
+ and not self.isddl
+ and self.compiled.has_out_parameters
+ ):
+ self._setup_out_parameters(result)
+
+ return result
+
+ def _setup_out_parameters(self, result):
+
+ out_bindparams = [
+ (param, name)
+ for param, name in self.compiled.bind_names.items()
+ if param.isoutparam
+ ]
+ out_parameters = {}
+
+ for bindparam, raw_value in zip(
+ [param for param, name in out_bindparams],
+ self.get_out_parameter_values(
+ [name for param, name in out_bindparams]
+ ),
+ ):
+
+ type_ = bindparam.type
+ impl_type = type_.dialect_impl(self.dialect)
+ dbapi_type = impl_type.get_dbapi_type(self.dialect.dbapi)
+ result_processor = impl_type.result_processor(
+ self.dialect, dbapi_type
+ )
+ if result_processor is not None:
+ raw_value = result_processor(raw_value)
+ out_parameters[bindparam.key] = raw_value
+
+ result.out_parameters = out_parameters
+
def _setup_crud_result_proxy(self):
if self.isinsert and not self.executemany:
if (
@@ -1108,11 +1165,11 @@ class DefaultExecutionContext(interfaces.ExecutionContext):
elif not self._is_implicit_returning:
self._setup_ins_pk_from_empty()
- result = self.get_result_proxy()
+ result = _result.ResultProxy._create_for_context(self)
if self.isinsert:
if self._is_implicit_returning:
- row = result.fetchone()
+ row = result._onerow()
self.returned_defaults = row
self._setup_ins_pk_from_implicit_returning(row)
result._soft_close()
@@ -1121,7 +1178,7 @@ class DefaultExecutionContext(interfaces.ExecutionContext):
result._soft_close()
result._metadata = None
elif self.isupdate and self._is_implicit_returning:
- row = result.fetchone()
+ row = result._onerow()
self.returned_defaults = row
result._soft_close()
result._metadata = None
@@ -1179,8 +1236,13 @@ class DefaultExecutionContext(interfaces.ExecutionContext):
key_getter = self.compiled._key_getters_for_crud_column[2]
table = self.compiled.statement.table
compiled_params = self.compiled_parameters[0]
+
+ # TODO: why are we using keyed index here? can't we get the ints?
+ # can compiler build up the structure here as far as what was
+ # explicit and what comes back in returning?
+ row_mapping = row._mapping
self.inserted_primary_key = [
- row[col] if value is None else value
+ row_mapping[col] if value is None else value
for col, value in [
(col, compiled_params.get(key_getter(col), None))
for col in table.primary_key
diff --git a/lib/sqlalchemy/engine/interfaces.py b/lib/sqlalchemy/engine/interfaces.py
index 237eb0f2f..3d4308df2 100644
--- a/lib/sqlalchemy/engine/interfaces.py
+++ b/lib/sqlalchemy/engine/interfaces.py
@@ -1044,6 +1044,44 @@ class ExecutionContext(object):
raise NotImplementedError()
+ def get_out_parameter_values(self, out_param_names):
+ """Return a sequence of OUT parameter values from a cursor.
+
+ For dialects that support OUT parameters, this method will be called
+ when there is a :class:`.SQLCompiler` object which has the
+ :attr:`.SQLCompiler.has_out_parameters` flag set. This flag in turn
+ will be set to True if the statement itself has :class:`.BindParameter`
+ objects that have the ``.isoutparam`` flag set which are consumed by
+ the :meth:`.SQLCompiler.visit_bindparam` method. If the dialect
+ compiler produces :class:`.BindParameter` objects with ``.isoutparam``
+ set which are not handled by :meth:`.SQLCompiler.visit_bindparam`, it
+ should set this flag explicitly.
+
+ The list of names that were rendered for each bound parameter
+ is passed to the method. The method should then return a sequence of
+ values corresponding to the list of parameter objects. Unlike in
+ previous SQLAlchemy versions, the values can be the **raw values** from
+ the DBAPI; the execution context will apply the appropriate type
+ handler based on what's present in self.compiled.binds and update the
+ values. The processed dictionary will then be made available via the
+ ``.out_parameters`` collection on the result object. Note that
+ SQLAlchemy 1.4 has multiple kinds of result object as part of the 2.0
+ transition.
+
+ .. versionadded:: 1.4 - added
+ :meth:`.ExecutionContext.get_out_parameter_values`, which is invoked
+ automatically by the :class:`.DefaultExecutionContext` when there
+ are :class:`.BindParameter` objects with the ``.isoutparam`` flag
+ set. This replaces the practice of setting out parameters within
+ the now-removed ``get_result_proxy()`` method.
+
+ .. seealso::
+
+ :meth:`.ExecutionContext.get_result_cursor_strategy`
+
+ """
+ raise NotImplementedError()
+
def post_exec(self):
"""Called after the execution of a compiled statement.
@@ -1054,12 +1092,67 @@ class ExecutionContext(object):
raise NotImplementedError()
- def result(self):
- """Return a result object corresponding to this ExecutionContext.
+ def get_result_cursor_strategy(self, result):
+ """Return a result cursor strategy for a given result object.
- Returns a ResultProxy.
- """
+ This method is implemented by the :class:`.DefaultDialect` and is
+ only needed by implementing dialects in the case where some special
+ steps regarding the cursor must be taken, such as manufacturing
+ fake results from some other element of the cursor, or pre-buffering
+ the cursor's results.
+
+ A simplified version of the default implementation is::
+
+ from sqlalchemy.engine.result import DefaultCursorFetchStrategy
+
+ class MyExecutionContext(DefaultExecutionContext):
+ def get_result_cursor_strategy(self, result):
+ return DefaultCursorFetchStrategy.create(result)
+
+ Above, the :class:`.DefaultCursorFetchStrategy` will be applied
+ to the result object. For results that are pre-buffered from a
+ cursor that might be closed, an implementation might be::
+
+ from sqlalchemy.engine.result import (
+ FullyBufferedCursorFetchStrategy
+ )
+
+ class MyExecutionContext(DefaultExecutionContext):
+ _pre_buffered_result = None
+
+ def pre_exec(self):
+ if self.special_condition_prebuffer_cursor():
+ self._pre_buffered_result = (
+ self.cursor.description,
+ self.cursor.fetchall()
+ )
+
+ def get_result_cursor_strategy(self, result):
+ if self._pre_buffered_result:
+ description, cursor_buffer = self._pre_buffered_result
+ return (
+ FullyBufferedCursorFetchStrategy.
+ create_from_buffer(
+ result, description, cursor_buffer
+ )
+ )
+ else:
+ return DefaultCursorFetchStrategy.create(result)
+
+ This method replaces the previous not-quite-documented
+ ``get_result_proxy()`` method.
+
+ .. versionadded:: 1.4 - result objects now interpret cursor results
+ based on a pluggable "strategy" object, which is delivered
+ by the :class:`.ExecutionContext` via the
+ :meth:`.ExecutionContext.get_result_cursor_strategy` method.
+
+ .. seealso::
+
+ :meth:`.ExecutionContext.get_out_parameter_values`
+
+ """
raise NotImplementedError()
def handle_dbapi_exception(self, e):
diff --git a/lib/sqlalchemy/engine/result.py b/lib/sqlalchemy/engine/result.py
index 13738cb46..1a63c307b 100644
--- a/lib/sqlalchemy/engine/result.py
+++ b/lib/sqlalchemy/engine/result.py
@@ -5,13 +5,20 @@
# This module is part of SQLAlchemy and is released under
# the MIT License: http://www.opensource.org/licenses/mit-license.php
-"""Define result set constructs including :class:`.ResultProxy`
-and :class:`.Row`."""
+"""Define result set constructs including :class:`.Result`"""
import collections
+import functools
import operator
+from .row import _baserow_usecext
+from .row import BaseRow # noqa
+from .row import LegacyRow # noqa
+from .row import Row # noqa
+from .row import RowMapping # noqa
+from .row import RowProxy # noqa
+from .row import rowproxy_reconstructor # noqa
from .. import exc
from .. import util
from ..sql import expression
@@ -21,264 +28,81 @@ from ..sql.compiler import RM_NAME
from ..sql.compiler import RM_OBJECTS
from ..sql.compiler import RM_RENDERED_NAME
from ..sql.compiler import RM_TYPE
-from ..util.compat import collections_abc
+if _baserow_usecext:
+ from sqlalchemy.cresultproxy import tuplegetter as _tuplegetter
_UNPICKLED = util.symbol("unpickled")
-# This reconstructor is necessary so that pickles with the C extension or
-# without use the same Binary format.
-try:
- # We need a different reconstructor on the C extension so that we can
- # add extra checks that fields have correctly been initialized by
- # __setstate__.
- from sqlalchemy.cresultproxy import safe_rowproxy_reconstructor
-
- # The extra function embedding is needed so that the
- # reconstructor function has the same signature whether or not
- # the extension is present.
- def rowproxy_reconstructor(cls, state):
- return safe_rowproxy_reconstructor(cls, state)
+# cyclical import for sqlalchemy.future
+_future_Result = None
+# metadata entry tuple indexes.
+# using raw tuple is faster than namedtuple.
+MD_INDEX = 0 # integer index in cursor.description
+MD_OBJECTS = 1 # other string keys and ColumnElement obj that can match
+MD_LOOKUP_KEY = 2 # string key we usually expect for key-based lookup
+MD_RENDERED_NAME = 3 # name that is usually in cursor.description
+MD_PROCESSOR = 4 # callable to process a result value into a row
+MD_UNTRANSLATED = 5 # raw name from cursor.description
-except ImportError:
-
- def rowproxy_reconstructor(cls, state):
- obj = cls.__new__(cls)
- obj.__setstate__(state)
- return obj
+class ResultMetaData(object):
+ __slots__ = ()
-try:
- from sqlalchemy.cresultproxy import BaseRow
- from sqlalchemy.cresultproxy import tuplegetter as _tuplegetter
+ def _has_key(self, key):
+ return key in self._keymap
- _baserow_usecext = True
-except ImportError:
- _baserow_usecext = False
+ def _key_fallback(self, key):
+ if isinstance(key, int):
+ raise IndexError(key)
+ else:
+ raise KeyError(key)
- class BaseRow(object):
- __slots__ = ("_parent", "_data", "_keymap")
- def __init__(self, parent, processors, keymap, data):
- """Row objects are constructed by ResultProxy objects."""
+class SimpleResultMetaData(ResultMetaData):
+ __slots__ = "keys", "_keymap", "_processors"
- self._parent = parent
+ def __init__(self, keys, extra=None):
+ self.keys = list(keys)
- self._data = tuple(
- [
- proc(value) if proc else value
- for proc, value in zip(processors, data)
- ]
- )
- self._keymap = keymap
+ len_keys = len(keys)
- def __reduce__(self):
- return (
- rowproxy_reconstructor,
- (self.__class__, self.__getstate__()),
+ self._keymap = {
+ name: (index, name) for index, name in enumerate(self.keys)
+ }
+ if not _baserow_usecext:
+ self._keymap.update(
+ {
+ index: (index, None, self.keys[index])
+ for index in range(len_keys)
+ }
)
-
- def _values_impl(self):
- return list(self)
-
- def __iter__(self):
- return iter(self._data)
-
- def __len__(self):
- return len(self._data)
-
- def __hash__(self):
- return hash(self._data)
-
- def _get_by_key_impl(self, key):
- try:
+ if extra:
+ for key, ex in zip(keys, extra):
rec = self._keymap[key]
- except KeyError:
- rec = self._parent._key_fallback(key)
- except TypeError:
- # the non-C version detects a slice using TypeError.
- # this is pretty inefficient for the slice use case
- # but is more efficient for the integer use case since we
- # don't have to check it up front.
- if isinstance(key, slice):
- return tuple(self._data[key])
- else:
- raise
- if rec[MD_INDEX] is None:
- raise exc.InvalidRequestError(
- "Ambiguous column name '%s' in "
- "result set column descriptions" % rec[MD_LOOKUP_KEY]
- )
-
- return self._data[rec[MD_INDEX]]
-
- def _get_by_key_impl_mapping(self, key):
- # the C code has two different methods so that we can distinguish
- # between tuple-like keys (integers, slices) and mapping-like keys
- # (strings, objects)
- return self._get_by_key_impl(key)
-
- def __getattr__(self, name):
- try:
- return self._get_by_key_impl_mapping(name)
- except KeyError as e:
- raise AttributeError(e.args[0])
-
-
-class Row(BaseRow, collections_abc.Sequence):
- """Represent a single result row.
-
- The :class:`.Row` object is retrieved from a database result, from the
- :class:`.ResultProxy` object using methods like
- :meth:`.ResultProxy.fetchall`.
-
- The :class:`.Row` object seeks to act mostly like a Python named
- tuple, but also provides some Python dictionary behaviors at the same time.
-
- .. seealso::
-
- :ref:`coretutorial_selecting` - includes examples of selecting
- rows from SELECT statements.
-
- .. versionchanged 1.4::
-
- Renamed ``RowProxy`` to :class:`.Row`. :class:`.Row` is no longer a
- "proxy" object in that it contains the final form of data within it.
-
- """
-
- __slots__ = ()
-
- def __contains__(self, key):
- return self._parent._has_key(key)
+ self._keymap.update({e: rec for e in ex})
+ self._processors = [None] * len(keys)
def __getstate__(self):
- return {"_parent": self._parent, "_data": self._data}
+ return {"keys": self.keys}
def __setstate__(self, state):
- self._parent = parent = state["_parent"]
- self._data = state["_data"]
- self._keymap = parent._keymap
-
- def _op(self, other, op):
- return (
- op(tuple(self), tuple(other))
- if isinstance(other, Row)
- else op(tuple(self), other)
- )
-
- __hash__ = BaseRow.__hash__
-
- def __lt__(self, other):
- return self._op(other, operator.lt)
-
- def __le__(self, other):
- return self._op(other, operator.le)
+ self.__init__(state["keys"])
- def __ge__(self, other):
- return self._op(other, operator.ge)
-
- def __gt__(self, other):
- return self._op(other, operator.gt)
-
- def __eq__(self, other):
- return self._op(other, operator.eq)
-
- def __ne__(self, other):
- return self._op(other, operator.ne)
-
- def __repr__(self):
- return repr(sql_util._repr_row(self))
-
- def has_key(self, key):
- """Return True if this :class:`.Row` contains the given key.
-
- Through the SQLAlchemy 1.x series, the ``__contains__()`` method
- of :class:`.Row` also links to :meth:`.Row.has_key`, in that
- an expression such as ::
-
- "some_col" in row
-
- Will return True if the row contains a column named ``"some_col"``,
- in the way that a Python mapping works.
-
- However, it is planned that the 2.0 series of SQLAlchemy will reverse
- this behavior so that ``__contains__()`` will refer to a value being
- present in the row, in the way that a Python tuple works.
-
- """
-
- return self._parent._has_key(key)
-
- def __getitem__(self, key):
- return self._get_by_key_impl(key)
-
- def items(self):
- """Return a list of tuples, each tuple containing a key/value pair.
-
- This method is analogous to the Python dictionary ``.items()`` method,
- except that it returns a list, not an iterator.
-
- """
-
- return [(key, self[key]) for key in self.keys()]
-
- def keys(self):
- """Return the list of keys as strings represented by this
- :class:`.Row`.
-
- This method is analogous to the Python dictionary ``.keys()`` method,
- except that it returns a list, not an iterator.
-
- """
-
- return [k for k in self._parent.keys if k is not None]
-
- def iterkeys(self):
- """Return a an iterator against the :meth:`.Row.keys` method.
-
- This method is analogous to the Python-2-only dictionary
- ``.iterkeys()`` method.
-
- """
- return iter(self._parent.keys)
-
- def itervalues(self):
- """Return a an iterator against the :meth:`.Row.values` method.
-
- This method is analogous to the Python-2-only dictionary
- ``.itervalues()`` method.
-
- """
- return iter(self)
-
- def values(self):
- """Return the values represented by this :class:`.Row` as a list.
-
- This method is analogous to the Python dictionary ``.values()`` method,
- except that it returns a list, not an iterator.
-
- """
+ def _has_key(self, key):
+ return key in self._keymap
- return self._values_impl()
+ def _contains(self, value, row):
+ return value in row._data
-BaseRowProxy = BaseRow
-RowProxy = Row
+def result_tuple(fields, extra=None):
+ parent = SimpleResultMetaData(fields, extra)
+ return functools.partial(Row, parent, parent._processors, parent._keymap)
-# metadata entry tuple indexes.
-# using raw tuple is faster than namedtuple.
-MD_INDEX = 0 # integer index in cursor.description
-MD_OBJECTS = 1 # other string keys and ColumnElement obj that can match
-MD_LOOKUP_KEY = 2 # string key we usually expect for key-based lookup
-MD_RENDERED_NAME = 3 # name that is usually in cursor.description
-MD_PROCESSOR = 4 # callable to process a result value into a row
-MD_UNTRANSLATED = 5 # raw name from cursor.description
-
-
-class ResultMetaData(object):
+class CursorResultMetaData(ResultMetaData):
"""Handle cursor.description, applying additional info from an execution
context."""
@@ -654,6 +478,7 @@ class ResultMetaData(object):
match_map = self._create_description_match_map(
result_columns, case_sensitive, loose_column_name_matching
)
+
self.matched_on_name = True
for (
idx,
@@ -671,15 +496,6 @@ class ResultMetaData(object):
mapped_type = ctx_rec[2]
yield idx, colname, mapped_type, coltype, obj, untranslated
- def _merge_cols_by_none(self, context, cursor_description):
- for (
- idx,
- colname,
- untranslated,
- coltype,
- ) in self._colnames_from_description(context, cursor_description):
- yield idx, colname, sqltypes.NULLTYPE, coltype, None, untranslated
-
@classmethod
def _create_description_match_map(
cls,
@@ -695,6 +511,7 @@ class ResultMetaData(object):
d = {}
for elem in result_columns:
key = elem[RM_RENDERED_NAME]
+
if not case_sensitive:
key = key.lower()
if key in d:
@@ -717,13 +534,134 @@ class ResultMetaData(object):
d.setdefault(
r_key, (elem[RM_NAME], elem[RM_OBJECTS], elem[RM_TYPE])
)
+
return d
+ def _merge_cols_by_none(self, context, cursor_description):
+ for (
+ idx,
+ colname,
+ untranslated,
+ coltype,
+ ) in self._colnames_from_description(context, cursor_description):
+ yield idx, colname, sqltypes.NULLTYPE, coltype, None, untranslated
+
+ def _key_fallback(self, key, raiseerr=True):
+ if raiseerr:
+ raise exc.NoSuchColumnError(
+ "Could not locate column in row for column '%s'"
+ % util.string_or_unprintable(key)
+ )
+ else:
+ return None
+
+ def _raise_for_ambiguous_column_name(self, rec):
+ raise exc.InvalidRequestError(
+ "Ambiguous column name '%s' in "
+ "result set column descriptions" % rec[MD_LOOKUP_KEY]
+ )
+
+ def _warn_for_nonint(self, key):
+ raise TypeError(
+ "TypeError: tuple indices must be integers or slices, not %s"
+ % type(key).__name__
+ )
+
+ def _getter(self, key, raiseerr=True):
+ try:
+ rec = self._keymap[key]
+ except KeyError:
+ rec = self._key_fallback(key, raiseerr)
+ if rec is None:
+ return None
+
+ index, obj = rec[0:2]
+
+ if index is None:
+ self._raise_for_ambiguous_column_name(rec)
+
+ return operator.methodcaller("_get_by_key_impl_mapping", index)
+
+ def _tuple_getter(self, keys, raiseerr=True):
+ """Given a list of keys, return a callable that will deliver a tuple.
+
+ This is strictly used by the ORM and the keys are Column objects.
+ However, this might be some nice-ish feature if we could find a very
+ clean way of presenting it.
+
+ note that in the new world of "row._mapping", this is a mapping-getter.
+ maybe the name should indicate that somehow.
+
+
+ """
+ indexes = []
+ for key in keys:
+ try:
+ rec = self._keymap[key]
+ except KeyError:
+ rec = self._key_fallback(key, raiseerr)
+ if rec is None:
+ return None
+
+ index, obj = rec[0:2]
+
+ if index is None:
+ self._raise_for_ambiguous_column_name(obj)
+ indexes.append(index)
+
+ if _baserow_usecext:
+ return _tuplegetter(*indexes)
+ else:
+ return self._pure_py_tuplegetter(*indexes)
+
+ def _pure_py_tuplegetter(self, *indexes):
+ getters = [
+ operator.methodcaller("_get_by_key_impl_mapping", index)
+ for index in indexes
+ ]
+ return lambda rec: tuple(getter(rec) for getter in getters)
+
+ def __getstate__(self):
+ return {
+ "_keymap": {
+ key: (rec[MD_INDEX], _UNPICKLED, key)
+ for key, rec in self._keymap.items()
+ if isinstance(key, util.string_types + util.int_types)
+ },
+ "keys": self.keys,
+ "case_sensitive": self.case_sensitive,
+ "matched_on_name": self.matched_on_name,
+ }
+
+ def __setstate__(self, state):
+ self._processors = [None for _ in range(len(state["keys"]))]
+ self._keymap = state["_keymap"]
+
+ self.keys = state["keys"]
+ self.case_sensitive = state["case_sensitive"]
+ self.matched_on_name = state["matched_on_name"]
+
+
+class LegacyCursorResultMetaData(CursorResultMetaData):
+ def _contains(self, value, row):
+ key = value
+ if key in self._keymap:
+ util.warn_deprecated(
+ "Using the 'in' operator to test for string or column "
+ "keys, or integer indexes, in a :class:`.Row` object is "
+ "deprecated and will "
+ "be removed in a future release. "
+ "Use the `Row._fields` or `Row._mapping` attribute, i.e. "
+ "'key in row._fields'"
+ )
+ return True
+ else:
+ return self._key_fallback(key, False) is not None
+
def _key_fallback(self, key, raiseerr=True):
map_ = self._keymap
result = None
- # lowercase col support will be deprecated, at the
- # create_engine() / dialect level
+
if isinstance(key, util.string_types):
result = map_.get(key if self.case_sensitive else key.lower())
elif isinstance(key, expression.ColumnElement):
@@ -786,170 +724,392 @@ class ResultMetaData(object):
map_[key] = result
return result
+ def _warn_for_nonint(self, key):
+ util.warn_deprecated_20(
+ "Using non-integer/slice indices on Row is deprecated and will "
+ "be removed in version 2.0; please use row._mapping[<key>], or "
+ "the mappings() accessor on the sqlalchemy.future result object.",
+ stacklevel=4,
+ )
+
def _has_key(self, key):
if key in self._keymap:
return True
else:
return self._key_fallback(key, False) is not None
- def _getter(self, key, raiseerr=True):
- try:
- rec = self._keymap[key]
- except KeyError:
- rec = self._key_fallback(key, raiseerr)
- if rec is None:
- return None
- index, obj = rec[0:2]
+class CursorFetchStrategy(object):
+ """Define a cursor strategy for a result object.
- if index is None:
- raise exc.InvalidRequestError(
- "Ambiguous column name '%s' in "
- "result set column descriptions" % rec[MD_LOOKUP_KEY]
- )
+ Subclasses define different ways of fetching rows, typically but
+ not necessarily using a DBAPI cursor object.
- return operator.methodcaller("_get_by_key_impl", index)
+ .. versionadded:: 1.4
- def _tuple_getter(self, keys, raiseerr=True):
- """Given a list of keys, return a callable that will deliver a tuple.
+ """
- This is strictly used by the ORM and the keys are Column objects.
- However, this might be some nice-ish feature if we could find a very
- clean way of presenting it.
+ __slots__ = ("dbapi_cursor", "cursor_description")
- note that in the new world of "row._mapping", this is a mapping-getter.
- maybe the name should indicate that somehow.
+ def __init__(self, dbapi_cursor, cursor_description):
+ self.dbapi_cursor = dbapi_cursor
+ self.cursor_description = cursor_description
+ @classmethod
+ def create(cls, result):
+ raise NotImplementedError()
- """
- indexes = []
- for key in keys:
- try:
- rec = self._keymap[key]
- except KeyError:
- rec = self._key_fallback(key, raiseerr)
- if rec is None:
- return None
+ def soft_close(self, result):
+ raise NotImplementedError()
- index, obj = rec[0:2]
+ def hard_close(self, result):
+ raise NotImplementedError()
- if index is None:
- raise exc.InvalidRequestError(
- "Ambiguous column name '%s' in "
- "result set column descriptions" % obj
- )
- indexes.append(index)
+ def fetchone(self):
+ raise NotImplementedError()
- if _baserow_usecext:
- return _tuplegetter(*indexes)
+ def fetchmany(self, size=None):
+ raise NotImplementedError()
+
+ def fetchall(self):
+ raise NotImplementedError()
+
+
+class NoCursorDQLFetchStrategy(CursorFetchStrategy):
+ """Cursor strategy for a DQL result that has no open cursor.
+
+ This is a result set that can return rows, i.e. for a SELECT, or for an
+ INSERT, UPDATE, DELETE that includes RETURNING. However it is in the state
+ where the cursor is closed and no rows remain available. The owning result
+ object may or may not be "hard closed", which determines if the fetch
+ methods send empty results or raise for closed result.
+
+ """
+
+ __slots__ = ("closed",)
+
+ def __init__(self, closed):
+ self.closed = closed
+ self.cursor_description = None
+
+ def soft_close(self, result):
+ pass
+
+ def hard_close(self, result):
+ self.closed = True
+
+ def fetchone(self):
+ return self._non_result(None)
+
+ def fetchmany(self, size=None):
+ return self._non_result([])
+
+ def fetchall(self):
+ return self._non_result([])
+
+ def _non_result(self, default):
+ if self.closed:
+ raise exc.ResourceClosedError("This result object is closed.")
else:
- return self._pure_py_tuplegetter(*indexes)
+ return default
- def _pure_py_tuplegetter(self, *indexes):
- getters = [
- operator.methodcaller("_get_by_key_impl", index)
- for index in indexes
- ]
- return lambda rec: tuple(getter(rec) for getter in getters)
- def __getstate__(self):
- return {
- "_keymap": {
- key: (rec[MD_INDEX], _UNPICKLED, key)
- for key, rec in self._keymap.items()
- if isinstance(key, util.string_types + util.int_types)
- },
- "keys": self.keys,
- "case_sensitive": self.case_sensitive,
- "matched_on_name": self.matched_on_name,
- }
+class NoCursorDMLFetchStrategy(CursorFetchStrategy):
+ """Cursor strategy for a DML result that has no open cursor.
- def __setstate__(self, state):
- self._processors = [None for _ in range(len(state["keys"]))]
- self._keymap = state["_keymap"]
+ This is a result set that does not return rows, i.e. for an INSERT,
+ UPDATE, DELETE that does not include RETURNING.
- self.keys = state["keys"]
- self.case_sensitive = state["case_sensitive"]
- self.matched_on_name = state["matched_on_name"]
+ """
+ __slots__ = ("closed",)
-class ResultProxy(object):
- """A facade around a DBAPI cursor object.
+ def __init__(self, closed):
+ self.closed = closed
+ self.cursor_description = None
- Returns database rows via the :class:`.Row` class, which provides
- additional API features and behaviors on top of the raw data returned
- by the DBAPI.
+ def soft_close(self, result):
+ pass
+
+ def hard_close(self, result):
+ self.closed = True
+
+ def fetchone(self):
+ return self._non_result(None)
+
+ def fetchmany(self, size=None):
+ return self._non_result([])
+
+ def fetchall(self):
+ return self._non_result([])
+
+ def _non_result(self, default):
+ raise exc.ResourceClosedError(
+ "This result object does not return rows. "
+ "It has been closed automatically."
+ )
+
+
+class DefaultCursorFetchStrategy(CursorFetchStrategy):
+ """Call fetch methods from a DBAPI cursor.
+
+ Alternate versions of this class may instead buffer the rows from
+ cursors or not use cursors at all.
+
+ """
+
+ @classmethod
+ def create(cls, result):
+ dbapi_cursor = result.cursor
+ description = dbapi_cursor.description
+
+ if description is None:
+ return NoCursorDMLFetchStrategy(False)
+ else:
+ return cls(dbapi_cursor, description)
+
+ def soft_close(self, result):
+ result.cursor_strategy = NoCursorDQLFetchStrategy(False)
+
+ def hard_close(self, result):
+ result.cursor_strategy = NoCursorDQLFetchStrategy(True)
+
+ def fetchone(self):
+ return self.dbapi_cursor.fetchone()
+
+ def fetchmany(self, size=None):
+ if size is None:
+ return self.dbapi_cursor.fetchmany()
+ else:
+ return self.dbapi_cursor.fetchmany(size)
+
+ def fetchall(self):
+ return self.dbapi_cursor.fetchall()
+
+
+class BufferedRowCursorFetchStrategy(DefaultCursorFetchStrategy):
+ """A cursor fetch strategy with row buffering behavior.
+
+ This strategy buffers the contents of a selection of rows
+ before ``fetchone()`` is called. This is to allow the results of
+ ``cursor.description`` to be available immediately, when
+ interfacing with a DB-API that requires rows to be consumed before
+ this information is available (currently psycopg2, when used with
+ server-side cursors).
+
+ The pre-fetching behavior fetches only one row initially, and then
+ grows its buffer size by a fixed amount with each successive need
+ for additional rows up the ``max_row_buffer`` size, which defaults
+ to 1000::
+
+ with psycopg2_engine.connect() as conn:
+
+ result = conn.execution_options(
+ stream_results=True, max_row_buffer=50
+ ).execute("select * from table")
+
+ .. versionadded:: 1.4 ``max_row_buffer`` may now exceed 1000 rows.
.. seealso::
- :ref:`coretutorial_selecting` - introductory material for accessing
- :class:`.ResultProxy` and :class:`.Row` objects.
+ :ref:`psycopg2_execution_options`
+ """
+
+ __slots__ = ("_max_row_buffer", "_rowbuffer", "_bufsize")
+
+ def __init__(
+ self, max_row_buffer, dbapi_cursor, description, initial_buffer
+ ):
+ super(BufferedRowCursorFetchStrategy, self).__init__(
+ dbapi_cursor, description
+ )
+
+ self._max_row_buffer = max_row_buffer
+ self._growth_factor = 5
+ self._rowbuffer = initial_buffer
+
+ self._bufsize = min(self._max_row_buffer, self._growth_factor)
+
+ @classmethod
+ def create(cls, result):
+ """Buffered row strategy has to buffer the first rows *before*
+ cursor.description is fetched so that it works with named cursors
+ correctly
+
+ """
+
+ dbapi_cursor = result.cursor
+
+ initial_buffer = collections.deque(dbapi_cursor.fetchmany(1))
+
+ description = dbapi_cursor.description
+
+ if description is None:
+ return NoCursorDMLFetchStrategy(False)
+ else:
+ max_row_buffer = result.context.execution_options.get(
+ "max_row_buffer", 1000
+ )
+ return cls(
+ max_row_buffer, dbapi_cursor, description, initial_buffer
+ )
+
+ def __buffer_rows(self):
+ size = self._bufsize
+ self._rowbuffer = collections.deque(self.dbapi_cursor.fetchmany(size))
+ if size < self._max_row_buffer:
+ self._bufsize = min(
+ self._max_row_buffer, size * self._growth_factor
+ )
+
+ def soft_close(self, result):
+ self._rowbuffer.clear()
+ super(BufferedRowCursorFetchStrategy, self).soft_close(result)
+
+ def hard_close(self, result):
+ self._rowbuffer.clear()
+ super(BufferedRowCursorFetchStrategy, self).hard_close(result)
+
+ def fetchone(self):
+ if not self._rowbuffer:
+ self.__buffer_rows()
+ if not self._rowbuffer:
+ return None
+ return self._rowbuffer.popleft()
+
+ def fetchmany(self, size=None):
+ if size is None:
+ return self.fetchall()
+ result = []
+ for x in range(0, size):
+ row = self.fetchone()
+ if row is None:
+ break
+ result.append(row)
+ return result
+
+ def fetchall(self):
+ self._rowbuffer.extend(self.dbapi_cursor.fetchall())
+ ret = self._rowbuffer
+ self._rowbuffer = collections.deque()
+ return ret
+
+
+class FullyBufferedCursorFetchStrategy(DefaultCursorFetchStrategy):
+ """A cursor strategy that buffers rows fully upon creation.
+
+ Used for operations where a result is to be delivered
+ after the database conversation can not be continued,
+ such as MSSQL INSERT...OUTPUT after an autocommit.
+
+ """
+
+ __slots__ = ("_rowbuffer",)
+
+ def __init__(self, dbapi_cursor, description, initial_buffer=None):
+ super(FullyBufferedCursorFetchStrategy, self).__init__(
+ dbapi_cursor, description
+ )
+ if initial_buffer is not None:
+ self._rowbuffer = collections.deque(initial_buffer)
+ else:
+ self._rowbuffer = self._buffer_rows()
+
+ @classmethod
+ def create_from_buffer(cls, dbapi_cursor, description, buffer):
+ return cls(dbapi_cursor, description, buffer)
+
+ def _buffer_rows(self):
+ return collections.deque(self.dbapi_cursor.fetchall())
+
+ def soft_close(self, result):
+ self._rowbuffer.clear()
+ super(FullyBufferedCursorFetchStrategy, self).soft_close(result)
+
+ def hard_close(self, result):
+ self._rowbuffer.clear()
+ super(FullyBufferedCursorFetchStrategy, self).hard_close(result)
+
+ def fetchone(self):
+ if self._rowbuffer:
+ return self._rowbuffer.popleft()
+ else:
+ return None
+
+ def fetchmany(self, size=None):
+ if size is None:
+ return self.fetchall()
+ result = []
+ for x in range(0, size):
+ row = self.fetchone()
+ if row is None:
+ break
+ result.append(row)
+ return result
+
+ def fetchall(self):
+ ret = self._rowbuffer
+ self._rowbuffer = collections.deque()
+ return ret
+
+
+class BaseResult(object):
+ """Base class for database result objects.
+
+
+ :class:`.BaseResult` is the base class for the 1.x style
+ :class:`.ResultProxy` class as well as the 2.x style
+ :class:`.future.Result` class.
"""
- _process_row = Row
out_parameters = None
- _autoclose_connection = False
_metadata = None
_soft_closed = False
closed = False
+ @classmethod
+ def _create_for_context(cls, context):
+ if context._is_future_result:
+ obj = object.__new__(_future_Result)
+ else:
+ obj = object.__new__(ResultProxy)
+ obj.__init__(context)
+ return obj
+
def __init__(self, context):
self.context = context
self.dialect = context.dialect
- self.cursor = self._saved_cursor = context.cursor
+ self.cursor = context.cursor
self.connection = context.root_connection
self._echo = (
self.connection._echo and context.engine._should_log_debug()
)
self._init_metadata()
- def _getter(self, key, raiseerr=True):
- try:
- getter = self._metadata._getter
- except AttributeError:
- return self._non_result(None)
- else:
- return getter(key, raiseerr)
-
- def _tuple_getter(self, key, raiseerr=True):
- try:
- getter = self._metadata._tuple_getter
- except AttributeError:
- return self._non_result(None)
- else:
- return getter(key, raiseerr)
-
- def _has_key(self, key):
- try:
- has_key = self._metadata._has_key
- except AttributeError:
- return self._non_result(None)
- else:
- return has_key(key)
-
def _init_metadata(self):
- cursor_description = self._cursor_description()
- if cursor_description is not None:
- if (
- self.context.compiled
- and "compiled_cache" in self.context.execution_options
- ):
+ self.cursor_strategy = strat = self.context.get_result_cursor_strategy(
+ self
+ )
+
+ if strat.cursor_description is not None:
+ if self.context.compiled:
if self.context.compiled._cached_metadata:
self._metadata = self.context.compiled._cached_metadata
else:
- # TODO: what we hope to do here is have "Legacy" be
- # the default in 1.4 but a flag (somewhere?) will have it
- # use non-legacy. ORM should be able to use non-legacy
self._metadata = (
self.context.compiled._cached_metadata
- ) = ResultMetaData(self, cursor_description)
+ ) = self._cursor_metadata(self, strat.cursor_description)
else:
- self._metadata = ResultMetaData(self, cursor_description)
+ self._metadata = self._cursor_metadata(
+ self, strat.cursor_description
+ )
if self._echo:
self.context.engine.logger.debug(
- "Col %r", tuple(x[0] for x in cursor_description)
+ "Col %r", tuple(x[0] for x in strat.cursor_description)
)
+ # leave cursor open so that execution context can continue
+ # setting up things like rowcount
def keys(self):
"""Return the list of string keys that would represented by each
@@ -960,109 +1120,31 @@ class ResultProxy(object):
else:
return []
- @util.memoized_property
- def rowcount(self):
- """Return the 'rowcount' for this result.
-
- The 'rowcount' reports the number of rows *matched*
- by the WHERE criterion of an UPDATE or DELETE statement.
-
- .. note::
-
- Notes regarding :attr:`.ResultProxy.rowcount`:
-
-
- * This attribute returns the number of rows *matched*,
- which is not necessarily the same as the number of rows
- that were actually *modified* - an UPDATE statement, for example,
- may have no net change on a given row if the SET values
- given are the same as those present in the row already.
- Such a row would be matched but not modified.
- On backends that feature both styles, such as MySQL,
- rowcount is configured by default to return the match
- count in all cases.
-
- * :attr:`.ResultProxy.rowcount` is *only* useful in conjunction
- with an UPDATE or DELETE statement. Contrary to what the Python
- DBAPI says, it does *not* return the
- number of rows available from the results of a SELECT statement
- as DBAPIs cannot support this functionality when rows are
- unbuffered.
-
- * :attr:`.ResultProxy.rowcount` may not be fully implemented by
- all dialects. In particular, most DBAPIs do not support an
- aggregate rowcount result from an executemany call.
- The :meth:`.ResultProxy.supports_sane_rowcount` and
- :meth:`.ResultProxy.supports_sane_multi_rowcount` methods
- will report from the dialect if each usage is known to be
- supported.
-
- * Statements that use RETURNING may not return a correct
- rowcount.
-
- """
+ def _getter(self, key, raiseerr=True):
try:
- return self.context.rowcount
- except BaseException as e:
- self.connection._handle_dbapi_exception(
- e, None, None, self.cursor, self.context
- )
-
- @property
- def lastrowid(self):
- """return the 'lastrowid' accessor on the DBAPI cursor.
-
- This is a DBAPI specific method and is only functional
- for those backends which support it, for statements
- where it is appropriate. It's behavior is not
- consistent across backends.
-
- Usage of this method is normally unnecessary when
- using insert() expression constructs; the
- :attr:`~ResultProxy.inserted_primary_key` attribute provides a
- tuple of primary key values for a newly inserted row,
- regardless of database backend.
+ getter = self._metadata._getter
+ except AttributeError:
+ return self.cursor_strategy._non_result(None)
+ else:
+ return getter(key, raiseerr)
- """
+ def _tuple_getter(self, key, raiseerr=True):
try:
- return self._saved_cursor.lastrowid
- except BaseException as e:
- self.connection._handle_dbapi_exception(
- e, None, None, self._saved_cursor, self.context
- )
-
- @property
- def returns_rows(self):
- """True if this :class:`.ResultProxy` returns rows.
-
- I.e. if it is legal to call the methods
- :meth:`~.ResultProxy.fetchone`,
- :meth:`~.ResultProxy.fetchmany`
- :meth:`~.ResultProxy.fetchall`.
-
- """
- return self._metadata is not None
-
- @property
- def is_insert(self):
- """True if this :class:`.ResultProxy` is the result
- of a executing an expression language compiled
- :func:`.expression.insert` construct.
-
- When True, this implies that the
- :attr:`inserted_primary_key` attribute is accessible,
- assuming the statement did not include
- a user defined "returning" construct.
-
- """
- return self.context.isinsert
-
- def _cursor_description(self):
- """May be overridden by subclasses."""
+ getter = self._metadata._tuple_getter
+ except AttributeError:
+ return self.cursor_strategy._non_result(None)
+ else:
+ return getter(key, raiseerr)
- return self._saved_cursor.description
+ def _has_key(self, key):
+ try:
+ has_key = self._metadata._has_key
+ except AttributeError:
+ return self.cursor_strategy._non_result(None)
+ else:
+ return has_key(key)
- def _soft_close(self):
+ def _soft_close(self, hard=False):
"""Soft close this :class:`.ResultProxy`.
This releases all DBAPI cursor resources, but leaves the
@@ -1085,80 +1167,21 @@ class ResultProxy(object):
"""
- if self._soft_closed:
- return
- self._soft_closed = True
- cursor = self.cursor
- self.connection._safe_close_cursor(cursor)
- if self._autoclose_connection:
- self.connection.close()
- self.cursor = None
-
- def close(self):
- """Close this ResultProxy.
-
- This closes out the underlying DBAPI cursor corresponding
- to the statement execution, if one is still present. Note that the
- DBAPI cursor is automatically released when the :class:`.ResultProxy`
- exhausts all available rows. :meth:`.ResultProxy.close` is generally
- an optional method except in the case when discarding a
- :class:`.ResultProxy` that still has additional rows pending for fetch.
-
- In the case of a result that is the product of
- :ref:`connectionless execution <dbengine_implicit>`,
- the underlying :class:`.Connection` object is also closed, which
- :term:`releases` DBAPI connection resources.
-
- After this method is called, it is no longer valid to call upon
- the fetch methods, which will raise a :class:`.ResourceClosedError`
- on subsequent use.
- .. versionchanged:: 1.0.0 - the :meth:`.ResultProxy.close` method
- has been separated out from the process that releases the underlying
- DBAPI cursor resource. The "auto close" feature of the
- :class:`.Connection` now performs a so-called "soft close", which
- releases the underlying DBAPI cursor, but allows the
- :class:`.ResultProxy` to still behave as an open-but-exhausted
- result set; the actual :meth:`.ResultProxy.close` method is never
- called. It is still safe to discard a :class:`.ResultProxy`
- that has been fully exhausted without calling this method.
-
- .. seealso::
-
- :ref:`connections_toplevel`
-
- """
+ if (not hard and self._soft_closed) or (hard and self.closed):
+ return
- if not self.closed:
- self._soft_close()
+ if hard:
self.closed = True
-
- def __iter__(self):
- """Implement iteration protocol."""
-
- while True:
- row = self.fetchone()
- if row is None:
- return
- else:
- yield row
-
- def __next__(self):
- """Implement the Python next() protocol.
-
- This method, mirrored as both ``.next()`` and ``.__next__()``, is part
- of Python's API for producing iterator-like behavior.
-
- .. versionadded:: 1.2
-
- """
- row = self.fetchone()
- if row is None:
- raise StopIteration()
+ self.cursor_strategy.hard_close(self)
else:
- return row
+ self.cursor_strategy.soft_close(self)
- next = __next__
+ if not self._soft_closed:
+ cursor = self.cursor
+ self.cursor = None
+ self.connection._safe_close_cursor(cursor)
+ self._soft_closed = True
@util.memoized_property
def inserted_primary_key(self):
@@ -1340,37 +1363,196 @@ class ResultProxy(object):
return self.dialect.supports_sane_multi_rowcount
- def _fetchone_impl(self):
+ @util.memoized_property
+ def rowcount(self):
+ """Return the 'rowcount' for this result.
+
+ The 'rowcount' reports the number of rows *matched*
+ by the WHERE criterion of an UPDATE or DELETE statement.
+
+ .. note::
+
+ Notes regarding :attr:`.ResultProxy.rowcount`:
+
+
+ * This attribute returns the number of rows *matched*,
+ which is not necessarily the same as the number of rows
+ that were actually *modified* - an UPDATE statement, for example,
+ may have no net change on a given row if the SET values
+ given are the same as those present in the row already.
+ Such a row would be matched but not modified.
+ On backends that feature both styles, such as MySQL,
+ rowcount is configured by default to return the match
+ count in all cases.
+
+ * :attr:`.ResultProxy.rowcount` is *only* useful in conjunction
+ with an UPDATE or DELETE statement. Contrary to what the Python
+ DBAPI says, it does *not* return the
+ number of rows available from the results of a SELECT statement
+ as DBAPIs cannot support this functionality when rows are
+ unbuffered.
+
+ * :attr:`.ResultProxy.rowcount` may not be fully implemented by
+ all dialects. In particular, most DBAPIs do not support an
+ aggregate rowcount result from an executemany call.
+ The :meth:`.ResultProxy.supports_sane_rowcount` and
+ :meth:`.ResultProxy.supports_sane_multi_rowcount` methods
+ will report from the dialect if each usage is known to be
+ supported.
+
+ * Statements that use RETURNING may not return a correct
+ rowcount.
+
+ """
try:
- return self.cursor.fetchone()
- except AttributeError:
- return self._non_result(None)
+ return self.context.rowcount
+ except BaseException as e:
+ self.connection._handle_dbapi_exception(
+ e, None, None, self.cursor, self.context
+ )
+
+ @property
+ def lastrowid(self):
+ """return the 'lastrowid' accessor on the DBAPI cursor.
+
+ This is a DBAPI specific method and is only functional
+ for those backends which support it, for statements
+ where it is appropriate. It's behavior is not
+ consistent across backends.
+
+ Usage of this method is normally unnecessary when
+ using insert() expression constructs; the
+ :attr:`~ResultProxy.inserted_primary_key` attribute provides a
+ tuple of primary key values for a newly inserted row,
+ regardless of database backend.
- def _fetchmany_impl(self, size=None):
+ """
try:
- if size is None:
- return self.cursor.fetchmany()
+ return self.context.get_lastrowid()
+ except BaseException as e:
+ self.connection._handle_dbapi_exception(
+ e, None, None, self.cursor, self.context
+ )
+
+ @property
+ def returns_rows(self):
+ """True if this :class:`.ResultProxy` returns rows.
+
+ I.e. if it is legal to call the methods
+ :meth:`~.ResultProxy.fetchone`,
+ :meth:`~.ResultProxy.fetchmany`
+ :meth:`~.ResultProxy.fetchall`.
+
+ """
+ return self._metadata is not None
+
+ @property
+ def is_insert(self):
+ """True if this :class:`.ResultProxy` is the result
+ of a executing an expression language compiled
+ :func:`.expression.insert` construct.
+
+ When True, this implies that the
+ :attr:`inserted_primary_key` attribute is accessible,
+ assuming the statement did not include
+ a user defined "returning" construct.
+
+ """
+ return self.context.isinsert
+
+
+class ResultProxy(BaseResult):
+ """A facade around a DBAPI cursor object.
+
+ Returns database rows via the :class:`.Row` class, which provides
+ additional API features and behaviors on top of the raw data returned
+ by the DBAPI.
+
+ Within the scope of the 1.x series of SQLAlchemy, the :class:`.ResultProxy`
+ will in fact return instances of the :class:`.LegacyRow` class, which
+ maintains Python mapping (i.e. dictionary) like behaviors upon the object
+ itself. Going forward, the :attr:`.Row._mapping` attribute should be used
+ for dictionary behaviors.
+
+ .. seealso::
+
+ :ref:`coretutorial_selecting` - introductory material for accessing
+ :class:`.ResultProxy` and :class:`.Row` objects.
+
+ """
+
+ _autoclose_connection = False
+ _process_row = LegacyRow
+ _cursor_metadata = LegacyCursorResultMetaData
+ _cursor_strategy_cls = DefaultCursorFetchStrategy
+
+ def __iter__(self):
+ """Implement iteration protocol."""
+
+ while True:
+ row = self.fetchone()
+ if row is None:
+ return
else:
- return self.cursor.fetchmany(size)
- except AttributeError:
- return self._non_result([])
+ yield row
- def _fetchall_impl(self):
- try:
- return self.cursor.fetchall()
- except AttributeError:
- return self._non_result([])
+ def close(self):
+ """Close this ResultProxy.
- def _non_result(self, default):
- if self._metadata is None:
- raise exc.ResourceClosedError(
- "This result object does not return rows. "
- "It has been closed automatically."
- )
- elif self.closed:
- raise exc.ResourceClosedError("This result object is closed.")
+ This closes out the underlying DBAPI cursor corresponding
+ to the statement execution, if one is still present. Note that the
+ DBAPI cursor is automatically released when the :class:`.ResultProxy`
+ exhausts all available rows. :meth:`.ResultProxy.close` is generally
+ an optional method except in the case when discarding a
+ :class:`.ResultProxy` that still has additional rows pending for fetch.
+
+ In the case of a result that is the product of
+ :ref:`connectionless execution <dbengine_implicit>`,
+ the underlying :class:`.Connection` object is also closed, which
+ :term:`releases` DBAPI connection resources.
+
+ .. deprecated:: 2.0 "connectionless" execution is deprecated and will
+ be removed in version 2.0. Version 2.0 will feature the
+ :class:`.Result` object that will no longer affect the status
+ of the originating connection in any case.
+
+ After this method is called, it is no longer valid to call upon
+ the fetch methods, which will raise a :class:`.ResourceClosedError`
+ on subsequent use.
+
+ .. seealso::
+
+ :ref:`connections_toplevel`
+
+ """
+ self._soft_close(hard=True)
+
+ def _soft_close(self, hard=False):
+ soft_closed = self._soft_closed
+ super(ResultProxy, self)._soft_close(hard=hard)
+ if (
+ not soft_closed
+ and self._soft_closed
+ and self._autoclose_connection
+ ):
+ self.connection.close()
+
+ def __next__(self):
+ """Implement the Python next() protocol.
+
+ This method, mirrored as both ``.next()`` and ``.__next__()``, is part
+ of Python's API for producing iterator-like behavior.
+
+ .. versionadded:: 1.2
+
+ """
+ row = self.fetchone()
+ if row is None:
+ raise StopIteration()
else:
- return default
+ return row
+
+ next = __next__
def process_rows(self, rows):
process_row = self._process_row
@@ -1406,7 +1588,7 @@ class ResultProxy(object):
"""
try:
- l = self.process_rows(self._fetchall_impl())
+ l = self.process_rows(self.cursor_strategy.fetchall())
self._soft_close()
return l
except BaseException as e:
@@ -1432,7 +1614,7 @@ class ResultProxy(object):
"""
try:
- l = self.process_rows(self._fetchmany_impl(size))
+ l = self.process_rows(self.cursor_strategy.fetchmany(size))
if len(l) == 0:
self._soft_close()
return l
@@ -1441,6 +1623,9 @@ class ResultProxy(object):
e, None, None, self.cursor, self.context
)
+ def _onerow(self):
+ return self.fetchone()
+
def fetchone(self):
"""Fetch one row, just like DB-API ``cursor.fetchone()``.
@@ -1457,7 +1642,7 @@ class ResultProxy(object):
"""
try:
- row = self._fetchone_impl()
+ row = self.cursor_strategy.fetchone()
if row is not None:
return self.process_rows([row])[0]
else:
@@ -1477,11 +1662,8 @@ class ResultProxy(object):
:return: a :class:`.Row` object, or None if no rows remain
"""
- if self._metadata is None:
- return self._non_result(None)
-
try:
- row = self._fetchone_impl()
+ row = self.cursor_strategy.fetchone()
except BaseException as e:
self.connection._handle_dbapi_exception(
e, None, None, self.cursor, self.context
@@ -1514,128 +1696,26 @@ class ResultProxy(object):
class BufferedRowResultProxy(ResultProxy):
"""A ResultProxy with row buffering behavior.
- ``ResultProxy`` that buffers the contents of a selection of rows
- before ``fetchone()`` is called. This is to allow the results of
- ``cursor.description`` to be available immediately, when
- interfacing with a DB-API that requires rows to be consumed before
- this information is available (currently psycopg2, when used with
- server-side cursors).
+ .. deprecated:: 1.4 this class is now supplied using a strategy object.
+ See :class:`.BufferedRowCursorFetchStrategy`.
- The pre-fetching behavior fetches only one row initially, and then
- grows its buffer size by a fixed amount with each successive need
- for additional rows up the ``max_row_buffer`` size, which defaults
- to 1000::
-
- with psycopg2_engine.connect() as conn:
-
- result = conn.execution_options(
- stream_results=True, max_row_buffer=50
- ).execute("select * from table")
-
- .. versionadded:: 1.4 ``max_row_buffer`` may now exceed 1000 rows.
-
- .. seealso::
-
- :ref:`psycopg2_execution_options`
"""
- def _init_metadata(self):
- self._max_row_buffer = self.context.execution_options.get(
- "max_row_buffer", 1000
- )
- self._growth_factor = 5
- self.__buffer_rows()
- super(BufferedRowResultProxy, self)._init_metadata()
-
- def __buffer_rows(self):
- if self.cursor is None:
- return
- size = getattr(self, "_bufsize", 1)
- self.__rowbuffer = collections.deque(self.cursor.fetchmany(size))
- if size < self._max_row_buffer:
- self._bufsize = min(
- self._max_row_buffer, size * self._growth_factor
- )
-
- def _soft_close(self, **kw):
- self.__rowbuffer.clear()
- super(BufferedRowResultProxy, self)._soft_close(**kw)
-
- def _fetchone_impl(self):
- if self.cursor is None:
- return self._non_result(None)
- if not self.__rowbuffer:
- self.__buffer_rows()
- if not self.__rowbuffer:
- return None
- return self.__rowbuffer.popleft()
-
- def _fetchmany_impl(self, size=None):
- if size is None:
- return self._fetchall_impl()
- result = []
- for x in range(0, size):
- row = self._fetchone_impl()
- if row is None:
- break
- result.append(row)
- return result
-
- def _fetchall_impl(self):
- if self.cursor is None:
- return self._non_result([])
- self.__rowbuffer.extend(self.cursor.fetchall())
- ret = self.__rowbuffer
- self.__rowbuffer = collections.deque()
- return ret
+ _cursor_strategy_cls = BufferedRowCursorFetchStrategy
class FullyBufferedResultProxy(ResultProxy):
"""A result proxy that buffers rows fully upon creation.
- Used for operations where a result is to be delivered
- after the database conversation can not be continued,
- such as MSSQL INSERT...OUTPUT after an autocommit.
+ .. deprecated:: 1.4 this class is now supplied using a strategy object.
+ See :class:`.FullyBufferedCursorFetchStrategy`.
"""
- def _init_metadata(self):
- super(FullyBufferedResultProxy, self)._init_metadata()
- self.__rowbuffer = self._buffer_rows()
-
- def _buffer_rows(self):
- return collections.deque(self.cursor.fetchall())
-
- def _soft_close(self, **kw):
- self.__rowbuffer.clear()
- super(FullyBufferedResultProxy, self)._soft_close(**kw)
-
- def _fetchone_impl(self):
- if self.__rowbuffer:
- return self.__rowbuffer.popleft()
- else:
- return self._non_result(None)
-
- def _fetchmany_impl(self, size=None):
- if size is None:
- return self._fetchall_impl()
- result = []
- for x in range(0, size):
- row = self._fetchone_impl()
- if row is None:
- break
- result.append(row)
- return result
-
- def _fetchall_impl(self):
- if not self.cursor:
- return self._non_result([])
- ret = self.__rowbuffer
- self.__rowbuffer = collections.deque()
- return ret
+ _cursor_strategy_cls = FullyBufferedCursorFetchStrategy
-class BufferedColumnRow(Row):
+class BufferedColumnRow(LegacyRow):
"""Row is now BufferedColumn in all cases"""
diff --git a/lib/sqlalchemy/engine/row.py b/lib/sqlalchemy/engine/row.py
new file mode 100644
index 000000000..b4347a598
--- /dev/null
+++ b/lib/sqlalchemy/engine/row.py
@@ -0,0 +1,501 @@
+# engine/row.py
+# Copyright (C) 2005-2020 the SQLAlchemy authors and contributors
+# <see AUTHORS file>
+#
+# This module is part of SQLAlchemy and is released under
+# the MIT License: http://www.opensource.org/licenses/mit-license.php
+
+"""Define row constructs including :class:`.Row`."""
+
+
+import operator
+
+from .. import util
+from ..sql import util as sql_util
+from ..util.compat import collections_abc
+
+
+MD_INDEX = 0 # integer index in cursor.description
+
+# This reconstructor is necessary so that pickles with the C extension or
+# without use the same Binary format.
+try:
+ # We need a different reconstructor on the C extension so that we can
+ # add extra checks that fields have correctly been initialized by
+ # __setstate__.
+ from sqlalchemy.cresultproxy import safe_rowproxy_reconstructor
+
+ # The extra function embedding is needed so that the
+ # reconstructor function has the same signature whether or not
+ # the extension is present.
+ def rowproxy_reconstructor(cls, state):
+ return safe_rowproxy_reconstructor(cls, state)
+
+
+except ImportError:
+
+ def rowproxy_reconstructor(cls, state):
+ obj = cls.__new__(cls)
+ obj.__setstate__(state)
+ return obj
+
+
+try:
+ from sqlalchemy.cresultproxy import BaseRow
+
+ _baserow_usecext = True
+except ImportError:
+ _baserow_usecext = False
+
+ class BaseRow(object):
+ __slots__ = ("_parent", "_data", "_keymap")
+
+ def __init__(self, parent, processors, keymap, data):
+ """Row objects are constructed by ResultProxy objects."""
+
+ self._parent = parent
+
+ self._data = tuple(
+ [
+ proc(value) if proc else value
+ for proc, value in zip(processors, data)
+ ]
+ )
+ self._keymap = keymap
+
+ def __reduce__(self):
+ return (
+ rowproxy_reconstructor,
+ (self.__class__, self.__getstate__()),
+ )
+
+ def _values_impl(self):
+ return list(self)
+
+ def __iter__(self):
+ return iter(self._data)
+
+ def __len__(self):
+ return len(self._data)
+
+ def __hash__(self):
+ return hash(self._data)
+
+ def _subscript_impl(self, key, ismapping):
+ try:
+ rec = self._keymap[key]
+ except KeyError:
+ rec = self._parent._key_fallback(key)
+ except TypeError:
+ # the non-C version detects a slice using TypeError.
+ # this is pretty inefficient for the slice use case
+ # but is more efficient for the integer use case since we
+ # don't have to check it up front.
+ if isinstance(key, slice):
+ return tuple(self._data[key])
+ else:
+ raise
+
+ mdindex = rec[MD_INDEX]
+ if mdindex is None:
+ self._parent._raise_for_ambiguous_column_name(rec)
+ elif not ismapping and mdindex != key and not isinstance(key, int):
+ self._parent._warn_for_nonint(key)
+
+ # TODO: warn for non-int here, RemovedIn20Warning when available
+
+ return self._data[mdindex]
+
+ def _get_by_key_impl(self, key):
+ return self._subscript_impl(key, False)
+
+ def _get_by_key_impl_mapping(self, key):
+ # the C code has two different methods so that we can distinguish
+ # between tuple-like keys (integers, slices) and mapping-like keys
+ # (strings, objects)
+ return self._subscript_impl(key, True)
+
+ def __getattr__(self, name):
+ try:
+ return self._get_by_key_impl_mapping(name)
+ except KeyError as e:
+ raise AttributeError(e.args[0])
+
+
+class Row(BaseRow, collections_abc.Sequence):
+ """Represent a single result row.
+
+ The :class:`.Row` object represents a row of a database result. It is
+ typically associated in the 1.x series of SQLAlchemy with the
+ :class:`.ResultProxy` object, however is also used by the ORM for
+ tuple-like results as of SQLAlchemy 1.4.
+
+ The :class:`.Row` object seeks to act as much like a Python named
+ tuple as possible. For mapping (i.e. dictionary) behavior on a row,
+ such as testing for containment of keys, refer to the :attr:`.Row._mapping`
+ attribute.
+
+ .. seealso::
+
+ :ref:`coretutorial_selecting` - includes examples of selecting
+ rows from SELECT statements.
+
+ :class:`.LegacyRow` - Compatibility interface introduced in SQLAlchemy
+ 1.4.
+
+ .. versionchanged:: 1.4
+
+ Renamed ``RowProxy`` to :class:`.Row`. :class:`.Row` is no longer a
+ "proxy" object in that it contains the final form of data within it,
+ and now acts mostly like a named tuple. Mapping-like functionality is
+ moved to the :attr:`.Row._mapping` attribute, but will remain available
+ in SQLAlchemy 1.x series via the :class:`.LegacyRow` class that is used
+ by :class:`.ResultProxy`. See :ref:`change_4710_core` for background
+ on this change.
+
+ """
+
+ __slots__ = ()
+
+ @property
+ def _mapping(self):
+ """Return a :class:`.RowMapping` for this :class:`.Row`.
+
+ This object provides a consistent Python mapping (i.e. dictionary)
+ interface for the data contained within the row. The :class:`.Row`
+ by itself behaves like a named tuple, however in the 1.4 series of
+ SQLAlchemy, the :class:`.LegacyRow` class is still used by Core which
+ continues to have mapping-like behaviors against the row object
+ itself.
+
+ .. seealso::
+
+ :attr:`.Row._fields`
+
+ .. versionadded:: 1.4
+
+ """
+
+ return RowMapping(self)
+
+ def __contains__(self, key):
+ return key in self._data
+
+ def __getitem__(self, key):
+ return self._data[key]
+
+ def __getstate__(self):
+ return {"_parent": self._parent, "_data": self._data}
+
+ def __setstate__(self, state):
+ self._parent = parent = state["_parent"]
+ self._data = state["_data"]
+ self._keymap = parent._keymap
+
+ def _op(self, other, op):
+ return (
+ op(tuple(self), tuple(other))
+ if isinstance(other, Row)
+ else op(tuple(self), other)
+ )
+
+ __hash__ = BaseRow.__hash__
+
+ def __lt__(self, other):
+ return self._op(other, operator.lt)
+
+ def __le__(self, other):
+ return self._op(other, operator.le)
+
+ def __ge__(self, other):
+ return self._op(other, operator.ge)
+
+ def __gt__(self, other):
+ return self._op(other, operator.gt)
+
+ def __eq__(self, other):
+ return self._op(other, operator.eq)
+
+ def __ne__(self, other):
+ return self._op(other, operator.ne)
+
+ def __repr__(self):
+ return repr(sql_util._repr_row(self))
+
+ @util.deprecated(
+ "1.4",
+ "The :meth:`.Row.keys` method is deprecated and will be removed in a "
+ "future release. Use the namedtuple standard accessor "
+ ":attr:`.Row._fields`, or for full mapping behavior use "
+ "row._mapping.keys() ",
+ )
+ def keys(self):
+ """Return the list of keys as strings represented by this
+ :class:`.Row`.
+
+ This method is analogous to the Python dictionary ``.keys()`` method,
+ except that it returns a list, not an iterator.
+
+ .. seealso::
+
+ :attr:`.Row._fields`
+
+ :attr:`.Row._mapping`
+
+ """
+ return [k for k in self._parent.keys if k is not None]
+
+ @property
+ def _fields(self):
+ """Return a tuple of string keys as represented by this
+ :class:`.Row`.
+
+ This attribute is analogous to the Python named tuple ``._fields``
+ attribute.
+
+ .. versionadded:: 1.4
+
+ .. seealso::
+
+ :attr:`.Row._mapping`
+
+ """
+ return tuple([k for k in self._parent.keys if k is not None])
+
+ def _asdict(self):
+ """Return a new dict which maps field names to their corresponding
+ values.
+
+ This method is analogous to the Python named tuple ``._asdict()``
+ method, and works by applying the ``dict()`` constructor to the
+ :attr:`.Row._mapping` attribute.
+
+ .. versionadded:: 1.4
+
+ .. seealso::
+
+ :attr:`.Row._mapping`
+
+ """
+ return dict(self._mapping)
+
+ def _replace(self):
+ raise NotImplementedError()
+
+ @property
+ def _field_defaults(self):
+ raise NotImplementedError()
+
+
+class LegacyRow(Row):
+ """A subclass of :class:`.Row` that delivers 1.x SQLAlchemy behaviors
+ for Core.
+
+ The :class:`.LegacyRow` class is where most of the Python mapping
+ (i.e. dictionary-like)
+ behaviors are implemented for the row object. The mapping behavior
+ of :class:`.Row` going forward is accessible via the :class:`.Row._mapping`
+ attribute.
+
+ .. versionadded:: 1.4 - added :class:`.LegacyRow` which encapsulates most
+ of the deprecated behaviors of :class:`.Row`.
+
+ """
+
+ def __contains__(self, key):
+ return self._parent._contains(key, self)
+
+ def __getitem__(self, key):
+ return self._get_by_key_impl(key)
+
+ @util.deprecated(
+ "1.4",
+ "The :meth:`.LegacyRow.has_key` method is deprecated and will be "
+ "removed in a future release. To test for key membership, use "
+ "the :attr:`Row._mapping` attribute, i.e. 'key in row._mapping`.",
+ )
+ def has_key(self, key):
+ """Return True if this :class:`.LegacyRow` contains the given key.
+
+ Through the SQLAlchemy 1.x series, the ``__contains__()`` method of
+ :class:`.Row` (or :class:`.LegacyRow` as of SQLAlchemy 1.4) also links
+ to :meth:`.Row.has_key`, in that an expression such as ::
+
+ "some_col" in row
+
+ Will return True if the row contains a column named ``"some_col"``,
+ in the way that a Python mapping works.
+
+ However, it is planned that the 2.0 series of SQLAlchemy will reverse
+ this behavior so that ``__contains__()`` will refer to a value being
+ present in the row, in the way that a Python tuple works.
+
+ .. seealso::
+
+ :ref:`change_4710_core`
+
+ """
+
+ return self._parent._has_key(key)
+
+ @util.deprecated(
+ "1.4",
+ "The :meth:`.LegacyRow.items` method is deprecated and will be "
+ "removed in a future release. Use the :attr:`Row._mapping` "
+ "attribute, i.e., 'row._mapping.items()'.",
+ )
+ def items(self):
+ """Return a list of tuples, each tuple containing a key/value pair.
+
+ This method is analogous to the Python dictionary ``.items()`` method,
+ except that it returns a list, not an iterator.
+
+ """
+
+ return [(key, self[key]) for key in self.keys()]
+
+ @util.deprecated(
+ "1.4",
+ "The :meth:`.LegacyRow.iterkeys` method is deprecated and will be "
+ "removed in a future release. Use the :attr:`Row._mapping` "
+ "attribute, i.e., 'row._mapping.keys()'.",
+ )
+ def iterkeys(self):
+ """Return a an iterator against the :meth:`.Row.keys` method.
+
+ This method is analogous to the Python-2-only dictionary
+ ``.iterkeys()`` method.
+
+ """
+ return iter(self._parent.keys)
+
+ @util.deprecated(
+ "1.4",
+ "The :meth:`.LegacyRow.itervalues` method is deprecated and will be "
+ "removed in a future release. Use the :attr:`Row._mapping` "
+ "attribute, i.e., 'row._mapping.values()'.",
+ )
+ def itervalues(self):
+ """Return a an iterator against the :meth:`.Row.values` method.
+
+ This method is analogous to the Python-2-only dictionary
+ ``.itervalues()`` method.
+
+ """
+ return iter(self)
+
+ @util.deprecated(
+ "1.4",
+ "The :meth:`.LegacyRow.values` method is deprecated and will be "
+ "removed in a future release. Use the :attr:`Row._mapping` "
+ "attribute, i.e., 'row._mapping.values()'.",
+ )
+ def values(self):
+ """Return the values represented by this :class:`.Row` as a list.
+
+ This method is analogous to the Python dictionary ``.values()`` method,
+ except that it returns a list, not an iterator.
+
+ """
+
+ return self._values_impl()
+
+
+BaseRowProxy = BaseRow
+RowProxy = Row
+
+
+class ROMappingView(
+ collections_abc.KeysView,
+ collections_abc.ValuesView,
+ collections_abc.ItemsView,
+):
+ __slots__ = (
+ "_mapping",
+ "_items",
+ )
+
+ def __init__(self, mapping, items):
+ self._mapping = mapping
+ self._items = items
+
+ def __len__(self):
+ return len(self._items)
+
+ def __repr__(self):
+ return "{0.__class__.__name__}({0._mapping!r})".format(self)
+
+ def __iter__(self):
+ return iter(self._items)
+
+ def __contains__(self, item):
+ return item in self._items
+
+ def __eq__(self, other):
+ return list(other) == list(self)
+
+ def __ne__(self, other):
+ return list(other) != list(self)
+
+
+class RowMapping(collections_abc.Mapping):
+ """A ``Mapping`` that maps column names and objects to :class:`.Row` values.
+
+ The :class:`.RowMapping` is available from a :class:`.Row` via the
+ :attr:`.Row._mapping` attribute and supplies Python mapping (i.e.
+ dictionary) access to the contents of the row. This includes support
+ for testing of containment of specific keys (string column names or
+ objects), as well as iteration of keys, values, and items::
+
+ for row in result:
+ if 'a' in row._mapping:
+ print("Column 'a': %s" % row._mapping['a'])
+
+ print("Column b: %s" % row._mapping[table.c.b])
+
+
+ .. versionadded:: 1.4 The :class:`.RowMapping` object replaces the
+ mapping-like access previously provided by a database result row,
+ which now seeks to behave mostly like a named tuple.
+
+ """
+
+ __slots__ = ("row",)
+
+ def __init__(self, row):
+ self.row = row
+
+ def __getitem__(self, key):
+ return self.row._get_by_key_impl_mapping(key)
+
+ def __iter__(self):
+ return (k for k in self.row._parent.keys if k is not None)
+
+ def __len__(self):
+ return len(self.row)
+
+ def __contains__(self, key):
+ return self.row._parent._has_key(key)
+
+ def items(self):
+ """Return a view of key/value tuples for the elements in the
+ underlying :class:`.Row`.
+
+ """
+ return ROMappingView(self, [(key, self[key]) for key in self.keys()])
+
+ def keys(self):
+ """Return a view of 'keys' for string column names represented
+ by the underlying :class:`.Row`.
+
+ """
+ return ROMappingView(
+ self, [k for k in self.row._parent.keys if k is not None]
+ )
+
+ def values(self):
+ """Return a view of values for the values represented in the
+ underlying :class:`.Row`.
+
+ """
+ return ROMappingView(self, self.row._values_impl())