summaryrefslogtreecommitdiff
path: root/lib/sqlalchemy/engine
diff options
context:
space:
mode:
authorMike Bayer <mike_mp@zzzcomputing.com>2020-05-20 13:41:44 -0400
committerMike Bayer <mike_mp@zzzcomputing.com>2020-05-21 14:16:03 -0400
commit4550983e0ce2f35b3585e53894c941c23693e71d (patch)
tree3928e6e333c2b9bb6e23a4de079565a387d309ae /lib/sqlalchemy/engine
parent3d55263c92ee29a0257d823124c353a35246cf31 (diff)
downloadsqlalchemy-4550983e0ce2f35b3585e53894c941c23693e71d.tar.gz
Performance fixes for new result set
A few small mistakes led to huge callcounts. Additionally, the warn-on-get behavior which is attempting to warn for deprecated access in SQLAlchemy 2.0 is very expensive; it's not clear if its feasible to have this warning or to somehow alter how it works. Fixes: #5340 Change-Id: I73bdd2d7b6f1b25cc0222accabd585cf761a5af4
Diffstat (limited to 'lib/sqlalchemy/engine')
-rw-r--r--lib/sqlalchemy/engine/base.py4
-rw-r--r--lib/sqlalchemy/engine/cursor.py86
-rw-r--r--lib/sqlalchemy/engine/default.py131
-rw-r--r--lib/sqlalchemy/engine/result.py103
-rw-r--r--lib/sqlalchemy/engine/row.py103
5 files changed, 261 insertions, 166 deletions
diff --git a/lib/sqlalchemy/engine/base.py b/lib/sqlalchemy/engine/base.py
index f169655e0..bbfafe8f1 100644
--- a/lib/sqlalchemy/engine/base.py
+++ b/lib/sqlalchemy/engine/base.py
@@ -1482,8 +1482,10 @@ class Connection(Connectable):
if (
not self._is_future
- and context.should_autocommit
+ # usually we're in a transaction so avoid relatively
+ # expensive / legacy should_autocommit call
and self._transaction is None
+ and context.should_autocommit
):
self._commit_impl(autocommit=True)
diff --git a/lib/sqlalchemy/engine/cursor.py b/lib/sqlalchemy/engine/cursor.py
index a886d2025..a393f8da7 100644
--- a/lib/sqlalchemy/engine/cursor.py
+++ b/lib/sqlalchemy/engine/cursor.py
@@ -671,6 +671,8 @@ class CursorResultMetaData(ResultMetaData):
class LegacyCursorResultMetaData(CursorResultMetaData):
+ __slots__ = ()
+
def _contains(self, value, row):
key = value
if key in self._keymap:
@@ -813,17 +815,15 @@ class NoCursorFetchStrategy(ResultFetchStrategy):
"""
- __slots__ = ("closed",)
+ __slots__ = ()
- def __init__(self, closed):
- self.closed = closed
- self.cursor_description = None
+ cursor_description = None
def soft_close(self, result):
pass
def hard_close(self, result):
- self.closed = True
+ pass
def fetchone(self, result):
return self._non_result(result, None)
@@ -849,8 +849,10 @@ class NoCursorDQLFetchStrategy(NoCursorFetchStrategy):
"""
+ __slots__ = ()
+
def _non_result(self, result, default, err=None):
- if self.closed:
+ if result.closed:
util.raise_(
exc.ResourceClosedError("This result object is closed."),
replace_context=err,
@@ -859,6 +861,9 @@ class NoCursorDQLFetchStrategy(NoCursorFetchStrategy):
return default
+_NO_CURSOR_DQL = NoCursorDQLFetchStrategy()
+
+
class NoCursorDMLFetchStrategy(NoCursorFetchStrategy):
"""Cursor strategy for a DML result that has no open cursor.
@@ -867,12 +872,17 @@ class NoCursorDMLFetchStrategy(NoCursorFetchStrategy):
"""
+ __slots__ = ()
+
def _non_result(self, result, default, err=None):
# we only expect to have a _NoResultMetaData() here right now.
assert not result._metadata.returns_rows
result._metadata._we_dont_return_rows(err)
+_NO_CURSOR_DML = NoCursorDMLFetchStrategy()
+
+
class CursorFetchStrategy(ResultFetchStrategy):
"""Call fetch methods from a DBAPI cursor.
@@ -893,15 +903,15 @@ class CursorFetchStrategy(ResultFetchStrategy):
description = dbapi_cursor.description
if description is None:
- return NoCursorDMLFetchStrategy(False)
+ return _NO_CURSOR_DML
else:
return cls(dbapi_cursor, description)
def soft_close(self, result):
- result.cursor_strategy = NoCursorDQLFetchStrategy(False)
+ result.cursor_strategy = _NO_CURSOR_DQL
def hard_close(self, result):
- result.cursor_strategy = NoCursorDQLFetchStrategy(True)
+ result.cursor_strategy = _NO_CURSOR_DQL
def handle_exception(self, result, err):
result.connection._handle_dbapi_exception(
@@ -1016,7 +1026,7 @@ class BufferedRowCursorFetchStrategy(CursorFetchStrategy):
description = dbapi_cursor.description
if description is None:
- return NoCursorDMLFetchStrategy(False)
+ return _NO_CURSOR_DML
else:
max_row_buffer = result.context.execution_options.get(
"max_row_buffer", 1000
@@ -1184,7 +1194,7 @@ class _NoResultMetaData(ResultMetaData):
self._we_dont_return_rows()
-_no_result_metadata = _NoResultMetaData()
+_NO_RESULT_METADATA = _NoResultMetaData()
class BaseCursorResult(object):
@@ -1199,11 +1209,12 @@ class BaseCursorResult(object):
@classmethod
def _create_for_context(cls, context):
+
if context._is_future_result:
- obj = object.__new__(CursorResult)
+ obj = CursorResult(context)
else:
- obj = object.__new__(LegacyCursorResult)
- obj.__init__(context)
+ obj = LegacyCursorResult(context)
+
return obj
def __init__(self, context):
@@ -1214,35 +1225,33 @@ class BaseCursorResult(object):
self._echo = (
self.connection._echo and context.engine._should_log_debug()
)
- self._init_metadata()
- def _init_metadata(self):
- self.cursor_strategy = strat = self.context.get_result_cursor_strategy(
- self
- )
+ # this is a hook used by dialects to change the strategy,
+ # so for the moment we have to keep calling this every time
+ # :(
+ self.cursor_strategy = strat = context.get_result_cursor_strategy(self)
if strat.cursor_description is not None:
- if self.context.compiled:
- if self.context.compiled._cached_metadata:
- cached_md = self.context.compiled._cached_metadata
- self._metadata = cached_md._adapt_to_context(self.context)
+ self._init_metadata(context, strat.cursor_description)
+ else:
+ self._metadata = _NO_RESULT_METADATA
+
+ def _init_metadata(self, context, cursor_description):
+ if context.compiled:
+ if context.compiled._cached_metadata:
+ cached_md = context.compiled._cached_metadata
+ self._metadata = cached_md._adapt_to_context(context)
- else:
- self._metadata = (
- self.context.compiled._cached_metadata
- ) = self._cursor_metadata(self, strat.cursor_description)
else:
- self._metadata = self._cursor_metadata(
- self, strat.cursor_description
- )
- if self._echo:
- self.context.engine.logger.debug(
- "Col %r", tuple(x[0] for x in strat.cursor_description)
- )
+ self._metadata = (
+ context.compiled._cached_metadata
+ ) = self._cursor_metadata(self, cursor_description)
else:
- self._metadata = _no_result_metadata
- # leave cursor open so that execution context can continue
- # setting up things like rowcount
+ self._metadata = self._cursor_metadata(self, cursor_description)
+ if self._echo:
+ context.engine.logger.debug(
+ "Col %r", tuple(x[0] for x in cursor_description)
+ )
def _soft_close(self, hard=False):
"""Soft close this :class:`_engine.CursorResult`.
@@ -1638,9 +1647,6 @@ class CursorResult(BaseCursorResult, Result):
def _fetchmany_impl(self, size=None):
return self.cursor_strategy.fetchmany(self, size)
- def _soft_close(self, **kw):
- BaseCursorResult._soft_close(self, **kw)
-
def _raw_row_iterator(self):
return self._fetchiter_impl()
diff --git a/lib/sqlalchemy/engine/default.py b/lib/sqlalchemy/engine/default.py
index d9b4cdda6..094ab3d55 100644
--- a/lib/sqlalchemy/engine/default.py
+++ b/lib/sqlalchemy/engine/default.py
@@ -372,6 +372,8 @@ class DefaultDialect(interfaces.Dialect):
return None
def _check_unicode_returns(self, connection, additional_tests=None):
+ # this now runs in py2k only and will be removed in 2.0; disabled for
+ # Python 3 in all cases under #5315
if util.py2k and not self.supports_unicode_statements:
cast_to = util.binary_type
else:
@@ -752,15 +754,9 @@ class DefaultExecutionContext(interfaces.ExecutionContext):
self.compiled = compiled = compiled_ddl
self.isddl = True
- self.execution_options = compiled.execution_options
- if connection._execution_options:
- self.execution_options = self.execution_options.union(
- connection._execution_options
- )
- if execution_options:
- self.execution_options = self.execution_options.union(
- execution_options
- )
+ self.execution_options = compiled.execution_options.merge_with(
+ connection._execution_options, execution_options
+ )
self._is_future_result = (
connection._is_future
@@ -815,15 +811,9 @@ class DefaultExecutionContext(interfaces.ExecutionContext):
# we get here
assert compiled.can_execute
- self.execution_options = compiled.execution_options
- if connection._execution_options:
- self.execution_options = self.execution_options.union(
- connection._execution_options
- )
- if execution_options:
- self.execution_options = self.execution_options.union(
- execution_options
- )
+ self.execution_options = compiled.execution_options.merge_with(
+ connection._execution_options, execution_options
+ )
self._is_future_result = (
connection._is_future
@@ -921,42 +911,32 @@ class DefaultExecutionContext(interfaces.ExecutionContext):
# Convert the dictionary of bind parameter values
# into a dict or list to be sent to the DBAPI's
# execute() or executemany() method.
- parameters = []
if compiled.positional:
- for compiled_params in self.compiled_parameters:
- param = [
- processors[key](compiled_params[key])
- if key in processors
- else compiled_params[key]
- for key in positiontup
- ]
- parameters.append(dialect.execute_sequence_format(param))
+ parameters = [
+ dialect.execute_sequence_format(
+ [
+ processors[key](compiled_params[key])
+ if key in processors
+ else compiled_params[key]
+ for key in positiontup
+ ]
+ )
+ for compiled_params in self.compiled_parameters
+ ]
else:
encode = not dialect.supports_unicode_statements
- for compiled_params in self.compiled_parameters:
- if encode:
- param = dict(
- (
- dialect._encoder(key)[0],
- processors[key](compiled_params[key])
- if key in processors
- else compiled_params[key],
- )
- for key in compiled_params
- )
- else:
- param = dict(
- (
- key,
- processors[key](compiled_params[key])
- if key in processors
- else compiled_params[key],
- )
- for key in compiled_params
- )
-
- parameters.append(param)
+ parameters = [
+ {
+ dialect._encoder(key)[0]
+ if encode
+ else key: processors[key](value)
+ if key in processors
+ else value
+ for key, value in compiled_params.items()
+ }
+ for compiled_params in self.compiled_parameters
+ ]
self.parameters = dialect.execute_sequence_format(parameters)
@@ -980,14 +960,9 @@ class DefaultExecutionContext(interfaces.ExecutionContext):
self.dialect = connection.dialect
self.is_text = True
- if connection._execution_options:
- self.execution_options = self.execution_options.union(
- connection._execution_options
- )
- if execution_options:
- self.execution_options = self.execution_options.union(
- execution_options
- )
+ self.execution_options = self.execution_options.merge_with(
+ connection._execution_options, execution_options
+ )
self._is_future_result = (
connection._is_future
@@ -1038,14 +1013,9 @@ class DefaultExecutionContext(interfaces.ExecutionContext):
self._dbapi_connection = dbapi_connection
self.dialect = connection.dialect
- if connection._execution_options:
- self.execution_options = self.execution_options.union(
- connection._execution_options
- )
- if execution_options:
- self.execution_options = self.execution_options.union(
- execution_options
- )
+ self.execution_options = self.execution_options.merge_with(
+ connection._execution_options, execution_options
+ )
self._is_future_result = (
connection._is_future
@@ -1173,7 +1143,17 @@ class DefaultExecutionContext(interfaces.ExecutionContext):
return use_server_side
def create_cursor(self):
- if self._use_server_side_cursor():
+ if (
+ # inlining initial preference checks for SS cursors
+ self.dialect.supports_server_side_cursors
+ and (
+ self.execution_options.get("stream_results", False)
+ or (
+ self.dialect.server_side_cursors
+ and self._use_server_side_cursor()
+ )
+ )
+ ):
self._is_server_side = True
return self.create_server_side_cursor()
else:
@@ -1227,6 +1207,17 @@ class DefaultExecutionContext(interfaces.ExecutionContext):
pass
def get_result_cursor_strategy(self, result):
+ """Dialect-overriable hook to return the internal strategy that
+ fetches results.
+
+
+ Some dialects will in some cases return special objects here that
+ have pre-buffered rows from some source or another, such as turning
+ Oracle OUT parameters into rows to accommodate for "returning",
+ SQL Server fetching "returning" before it resets "identity insert",
+ etc.
+
+ """
if self._is_server_side:
strat_cls = _cursor.BufferedRowCursorFetchStrategy
else:
@@ -1312,7 +1303,7 @@ class DefaultExecutionContext(interfaces.ExecutionContext):
# the first row will have been fetched and current assumptions
# are that the result has only one row, until executemany()
# support is added here.
- assert result.returns_rows
+ assert result._metadata.returns_rows
result._soft_close()
elif not self._is_explicit_returning:
result._soft_close()
@@ -1330,9 +1321,9 @@ class DefaultExecutionContext(interfaces.ExecutionContext):
# test that it has a cursor metadata that is accurate.
# the rows have all been fetched however.
- assert result.returns_rows
+ assert result._metadata.returns_rows
- elif not result.returns_rows:
+ elif not result._metadata.returns_rows:
# no results, get rowcount
# (which requires open cursor on some drivers
# such as kintersbasdb, mxodbc)
diff --git a/lib/sqlalchemy/engine/result.py b/lib/sqlalchemy/engine/result.py
index fe0abf0bb..109ab41fe 100644
--- a/lib/sqlalchemy/engine/result.py
+++ b/lib/sqlalchemy/engine/result.py
@@ -86,7 +86,7 @@ class ResultMetaData(object):
index = self._index_for_key(key, raiseerr)
if index is not None:
- return operator.methodcaller("_get_by_key_impl_mapping", index)
+ return operator.itemgetter(index)
else:
return None
@@ -169,10 +169,7 @@ class SimpleResultMetaData(ResultMetaData):
self._keymap = {key: rec for keys, rec in recs_names for key in keys}
- if _processors is None:
- self._processors = [None] * len_keys
- else:
- self._processors = _processors
+ self._processors = _processors
def _for_freeze(self):
unique_filters = self._unique_filters
@@ -256,7 +253,9 @@ class SimpleResultMetaData(ResultMetaData):
def result_tuple(fields, extra=None):
parent = SimpleResultMetaData(fields, extra)
- return functools.partial(Row, parent, parent._processors, parent._keymap)
+ return functools.partial(
+ Row, parent, parent._processors, parent._keymap, Row._default_key_style
+ )
# a symbol that indicates to internal Result methods that
@@ -280,6 +279,8 @@ class Result(InPlaceGenerative):
_row_logging_fn = None
+ _source_supports_scalars = False
+ _generate_rows = True
_column_slice_filter = None
_post_creational_filter = None
_unique_filter_state = None
@@ -388,11 +389,14 @@ class Result(InPlaceGenerative):
uniques, strategy = self._unique_filter_state
if not strategy and self._metadata._unique_filters:
- filters = self._metadata._unique_filters
- if self._metadata._tuplefilter:
- filters = self._metadata._tuplefilter(filters)
+ if self._source_supports_scalars:
+ strategy = self._metadata._unique_filters[0]
+ else:
+ filters = self._metadata._unique_filters
+ if self._metadata._tuplefilter:
+ filters = self._metadata._tuplefilter(filters)
- strategy = operator.methodcaller("_filter_on_values", filters)
+ strategy = operator.methodcaller("_filter_on_values", filters)
return uniques, strategy
def columns(self, *col_expressions):
@@ -489,7 +493,8 @@ class Result(InPlaceGenerative):
"""
result = self._column_slices([index])
- result._post_creational_filter = operator.itemgetter(0)
+ if self._generate_rows:
+ result._post_creational_filter = operator.itemgetter(0)
result._no_scalar_onerow = True
return result
@@ -497,11 +502,20 @@ class Result(InPlaceGenerative):
def _column_slices(self, indexes):
self._metadata = self._metadata._reduce(indexes)
+ if self._source_supports_scalars and len(indexes) == 1:
+ self._generate_rows = False
+ else:
+ self._generate_rows = True
+
def _getter(self, key, raiseerr=True):
"""return a callable that will retrieve the given key from a
:class:`.Row`.
"""
+ if self._source_supports_scalars:
+ raise NotImplementedError(
+ "can't use this function in 'only scalars' mode"
+ )
return self._metadata._getter(key, raiseerr)
def _tuple_getter(self, keys):
@@ -509,6 +523,10 @@ class Result(InPlaceGenerative):
:class:`.Row`.
"""
+ if self._source_supports_scalars:
+ raise NotImplementedError(
+ "can't use this function in 'only scalars' mode"
+ )
return self._metadata._row_as_tuple_getter(keys)
@_generative
@@ -527,9 +545,14 @@ class Result(InPlaceGenerative):
"""
self._post_creational_filter = operator.attrgetter("_mapping")
self._no_scalar_onerow = False
+ self._generate_rows = True
def _row_getter(self):
+ if self._source_supports_scalars and not self._generate_rows:
+ return None
+
process_row = self._process_row
+ key_style = self._process_row._default_key_style
metadata = self._metadata
keymap = metadata._keymap
@@ -537,10 +560,11 @@ class Result(InPlaceGenerative):
tf = metadata._tuplefilter
if tf:
- processors = tf(processors)
+ if processors:
+ processors = tf(processors)
_make_row_orig = functools.partial(
- process_row, metadata, processors, keymap
+ process_row, metadata, processors, keymap, key_style
)
def make_row(row):
@@ -548,7 +572,7 @@ class Result(InPlaceGenerative):
else:
make_row = functools.partial(
- process_row, metadata, processors, keymap
+ process_row, metadata, processors, keymap, key_style
)
fns = ()
@@ -626,7 +650,7 @@ class Result(InPlaceGenerative):
def iterrows(self):
for row in self._fetchiter_impl():
- obj = make_row(row)
+ obj = make_row(row) if make_row else row
hashed = strategy(obj) if strategy else obj
if hashed in uniques:
continue
@@ -639,7 +663,7 @@ class Result(InPlaceGenerative):
def iterrows(self):
for row in self._fetchiter_impl():
- row = make_row(row)
+ row = make_row(row) if make_row else row
if post_creational_filter:
row = post_creational_filter(row)
yield row
@@ -658,6 +682,10 @@ class Result(InPlaceGenerative):
def allrows(self):
rows = self._fetchall_impl()
+ if make_row:
+ made_rows = [make_row(row) for row in rows]
+ else:
+ made_rows = rows
rows = [
made_row
for made_row, sig_row in [
@@ -665,7 +693,7 @@ class Result(InPlaceGenerative):
made_row,
strategy(made_row) if strategy else made_row,
)
- for made_row in [make_row(row) for row in rows]
+ for made_row in made_rows
]
if sig_row not in uniques and not uniques.add(sig_row)
]
@@ -678,11 +706,16 @@ class Result(InPlaceGenerative):
def allrows(self):
rows = self._fetchall_impl()
+
if post_creational_filter:
- rows = [
- post_creational_filter(make_row(row)) for row in rows
- ]
- else:
+ if make_row:
+ rows = [
+ post_creational_filter(make_row(row))
+ for row in rows
+ ]
+ else:
+ rows = [post_creational_filter(row) for row in rows]
+ elif make_row:
rows = [make_row(row) for row in rows]
return rows
@@ -708,7 +741,7 @@ class Result(InPlaceGenerative):
if row is None:
return _NO_ROW
else:
- obj = make_row(row)
+ obj = make_row(row) if make_row else row
hashed = strategy(obj) if strategy else obj
if hashed in uniques:
continue
@@ -725,7 +758,7 @@ class Result(InPlaceGenerative):
if row is None:
return _NO_ROW
else:
- row = make_row(row)
+ row = make_row(row) if make_row else row
if post_creational_filter:
row = post_creational_filter(row)
return row
@@ -1042,6 +1075,8 @@ class FrozenResult(object):
def __init__(self, result):
self.metadata = result._metadata._for_freeze()
self._post_creational_filter = result._post_creational_filter
+ self._source_supports_scalars = result._source_supports_scalars
+ self._generate_rows = result._generate_rows
result._post_creational_filter = None
self.data = result.fetchall()
@@ -1056,6 +1091,8 @@ class FrozenResult(object):
def __call__(self):
result = IteratorResult(self.metadata, iter(self.data))
result._post_creational_filter = self._post_creational_filter
+ result._source_supports_scalars = self._source_supports_scalars
+ result._generate_rows = self._generate_rows
return result
@@ -1112,16 +1149,28 @@ class ChunkedIteratorResult(IteratorResult):
"""
- def __init__(self, cursor_metadata, chunks):
+ def __init__(self, cursor_metadata, chunks, source_supports_scalars=False):
self._metadata = cursor_metadata
self.chunks = chunks
+ self._source_supports_scalars = source_supports_scalars
+
+ self.iterator = itertools.chain.from_iterable(
+ self.chunks(None, self._generate_rows)
+ )
- self.iterator = itertools.chain.from_iterable(self.chunks(None))
+ def _column_slices(self, indexes):
+ result = super(ChunkedIteratorResult, self)._column_slices(indexes)
+ self.iterator = itertools.chain.from_iterable(
+ self.chunks(self._yield_per, self._generate_rows)
+ )
+ return result
@_generative
def yield_per(self, num):
self._yield_per = num
- self.iterator = itertools.chain.from_iterable(self.chunks(num))
+ self.iterator = itertools.chain.from_iterable(
+ self.chunks(num, self._generate_rows)
+ )
class MergedResult(IteratorResult):
@@ -1149,6 +1198,8 @@ class MergedResult(IteratorResult):
self._post_creational_filter = results[0]._post_creational_filter
self._no_scalar_onerow = results[0]._no_scalar_onerow
self._yield_per = results[0]._yield_per
+ self._source_supports_scalars = results[0]._source_supports_scalars
+ self._generate_rows = results[0]._generate_rows
def close(self):
self._soft_close(hard=True)
diff --git a/lib/sqlalchemy/engine/row.py b/lib/sqlalchemy/engine/row.py
index 6cd020110..d279776ce 100644
--- a/lib/sqlalchemy/engine/row.py
+++ b/lib/sqlalchemy/engine/row.py
@@ -14,7 +14,6 @@ from .. import util
from ..sql import util as sql_util
from ..util.compat import collections_abc
-
MD_INDEX = 0 # integer index in cursor.description
# This reconstructor is necessary so that pickles with the C extension or
@@ -40,6 +39,11 @@ except ImportError:
return obj
+KEY_INTEGER_ONLY = 0
+KEY_OBJECTS_ONLY = 1
+KEY_OBJECTS_BUT_WARN = 2
+KEY_OBJECTS_NO_WARN = 3
+
try:
from sqlalchemy.cresultproxy import BaseRow
@@ -48,21 +52,27 @@ except ImportError:
_baserow_usecext = False
class BaseRow(object):
- __slots__ = ("_parent", "_data", "_keymap")
+ __slots__ = ("_parent", "_data", "_keymap", "_key_style")
- def __init__(self, parent, processors, keymap, data):
+ def __init__(self, parent, processors, keymap, key_style, data):
"""Row objects are constructed by CursorResult objects."""
self._parent = parent
- self._data = tuple(
- [
- proc(value) if proc else value
- for proc, value in zip(processors, data)
- ]
- )
+ if processors:
+ self._data = tuple(
+ [
+ proc(value) if proc else value
+ for proc, value in zip(processors, data)
+ ]
+ )
+ else:
+ self._data = tuple(data)
+
self._keymap = keymap
+ self._key_style = key_style
+
def __reduce__(self):
return (
rowproxy_reconstructor,
@@ -70,7 +80,13 @@ except ImportError:
)
def _filter_on_values(self, filters):
- return Row(self._parent, filters, self._keymap, self._data)
+ return Row(
+ self._parent,
+ filters,
+ self._keymap,
+ self._key_style,
+ self._data,
+ )
def _values_impl(self):
return list(self)
@@ -105,10 +121,14 @@ except ImportError:
mdindex = rec[MD_INDEX]
if mdindex is None:
self._parent._raise_for_ambiguous_column_name(rec)
- elif not ismapping and mdindex != key and not isinstance(key, int):
- self._parent._warn_for_nonint(key)
- # TODO: warn for non-int here, RemovedIn20Warning when available
+ elif (
+ self._key_style == KEY_OBJECTS_BUT_WARN
+ and not ismapping
+ and mdindex != key
+ and not isinstance(key, int)
+ ):
+ self._parent._warn_for_nonint(key)
return self._data[mdindex]
@@ -164,6 +184,8 @@ class Row(BaseRow, collections_abc.Sequence):
__slots__ = ()
+ _default_key_style = KEY_INTEGER_ONLY
+
@property
def _mapping(self):
"""Return a :class:`.RowMapping` for this :class:`.Row`.
@@ -182,19 +204,29 @@ class Row(BaseRow, collections_abc.Sequence):
.. versionadded:: 1.4
"""
-
- return RowMapping(self)
+ return RowMapping(
+ self._parent,
+ None,
+ self._keymap,
+ RowMapping._default_key_style,
+ self._data,
+ )
def __contains__(self, key):
return key in self._data
def __getstate__(self):
- return {"_parent": self._parent, "_data": self._data}
+ return {
+ "_parent": self._parent,
+ "_data": self._data,
+ "_key_style": self._key_style,
+ }
def __setstate__(self, state):
self._parent = parent = state["_parent"]
self._data = state["_data"]
self._keymap = parent._keymap
+ self._key_style = state["_key_style"]
def _op(self, other, op):
return (
@@ -305,11 +337,20 @@ class LegacyRow(Row):
"""
+ __slots__ = ()
+
+ if util.SQLALCHEMY_WARN_20:
+ _default_key_style = KEY_OBJECTS_BUT_WARN
+ else:
+ _default_key_style = KEY_OBJECTS_NO_WARN
+
def __contains__(self, key):
return self._parent._contains(key, self)
- def __getitem__(self, key):
- return self._get_by_key_impl(key)
+ if not _baserow_usecext:
+
+ def __getitem__(self, key):
+ return self._get_by_key_impl(key)
@util.deprecated(
"1.4",
@@ -441,7 +482,7 @@ class ROMappingView(
return list(other) != list(self)
-class RowMapping(collections_abc.Mapping):
+class RowMapping(BaseRow, collections_abc.Mapping):
"""A ``Mapping`` that maps column names and objects to :class:`.Row` values.
The :class:`.RowMapping` is available from a :class:`.Row` via the
@@ -463,22 +504,26 @@ class RowMapping(collections_abc.Mapping):
"""
- __slots__ = ("row",)
+ __slots__ = ()
- def __init__(self, row):
- self.row = row
+ _default_key_style = KEY_OBJECTS_ONLY
- def __getitem__(self, key):
- return self.row._get_by_key_impl_mapping(key)
+ if not _baserow_usecext:
+
+ def __getitem__(self, key):
+ return self._get_by_key_impl(key)
+
+ def _values_impl(self):
+ return list(self._data)
def __iter__(self):
- return (k for k in self.row._parent.keys if k is not None)
+ return (k for k in self._parent.keys if k is not None)
def __len__(self):
- return len(self.row)
+ return len(self._data)
def __contains__(self, key):
- return self.row._parent._has_key(key)
+ return self._parent._has_key(key)
def __repr__(self):
return repr(dict(self))
@@ -496,11 +541,11 @@ class RowMapping(collections_abc.Mapping):
"""
- return self.row._parent.keys
+ return self._parent.keys
def values(self):
"""Return a view of values for the values represented in the
underlying :class:`.Row`.
"""
- return ROMappingView(self, self.row._values_impl())
+ return ROMappingView(self, self._values_impl())