diff options
| author | mike bayer <mike_mp@zzzcomputing.com> | 2023-04-27 00:13:38 +0000 |
|---|---|---|
| committer | Gerrit Code Review <gerrit@bbpush.zzzcomputing.com> | 2023-04-27 00:13:38 +0000 |
| commit | 0991dbda73651b3d30ac4ca35c8f9b96051e14f9 (patch) | |
| tree | e60739b0d030f62e9338a8506d599c7eade243ee /lib/sqlalchemy/engine | |
| parent | 11535752b94acb41ff684cf8d9c745038addc447 (diff) | |
| parent | 105f18be353965b064750726597b63334fc0716b (diff) | |
| download | sqlalchemy-0991dbda73651b3d30ac4ca35c8f9b96051e14f9.tar.gz | |
Merge "Performance improvement in Row" into main
Diffstat (limited to 'lib/sqlalchemy/engine')
| -rw-r--r-- | lib/sqlalchemy/engine/_py_row.py | 15 | ||||
| -rw-r--r-- | lib/sqlalchemy/engine/cursor.py | 44 | ||||
| -rw-r--r-- | lib/sqlalchemy/engine/default.py | 10 | ||||
| -rw-r--r-- | lib/sqlalchemy/engine/result.py | 29 | ||||
| -rw-r--r-- | lib/sqlalchemy/engine/row.py | 10 |
5 files changed, 59 insertions, 49 deletions
diff --git a/lib/sqlalchemy/engine/_py_row.py b/lib/sqlalchemy/engine/_py_row.py index 4a9acec9b..3358abd78 100644 --- a/lib/sqlalchemy/engine/_py_row.py +++ b/lib/sqlalchemy/engine/_py_row.py @@ -99,6 +99,9 @@ class BaseRow: pass self._parent._key_not_found(name, True) + def _to_tuple_instance(self) -> Tuple[Any, ...]: + return self._data + # This reconstructor is necessary so that pickles with the Cy extension or # without use the same Binary format. @@ -111,9 +114,9 @@ def rowproxy_reconstructor( def tuplegetter(*indexes: int) -> _TupleGetterType: - it = operator.itemgetter(*indexes) - - if len(indexes) > 1: - return it - else: - return lambda row: (it(row),) + if len(indexes) != 1: + for i in range(1, len(indexes)): + if indexes[i - 1] != indexes[i] - 1: + return operator.itemgetter(*indexes) + # slice form is faster but returns a list if input is list + return operator.itemgetter(slice(indexes[0], indexes[-1] + 1)) diff --git a/lib/sqlalchemy/engine/cursor.py b/lib/sqlalchemy/engine/cursor.py index bd46f30ac..7491afc3e 100644 --- a/lib/sqlalchemy/engine/cursor.py +++ b/lib/sqlalchemy/engine/cursor.py @@ -14,6 +14,7 @@ from __future__ import annotations import collections import functools +import operator import typing from typing import Any from typing import cast @@ -1440,39 +1441,46 @@ class CursorResult(Result[_T]): # getter assuming no transformations will be called as this # is the most common case - if echo: - log = self.context.connection._log_debug - - def _log_row(row): - log("Row %r", sql_util._repr_row(row)) - return row - - self._row_logging_fn = log_row = _log_row - else: - log_row = None - metadata = self._init_metadata(context, cursor_description) _make_row = functools.partial( Row, metadata, - metadata._processors, + metadata._effective_processors, metadata._key_to_index, ) - if log_row: + + if context._num_sentinel_cols: + sentinel_filter = operator.itemgetter( + slice(-context._num_sentinel_cols) + ) + + def _sliced_row(raw_data): + return _make_row(sentinel_filter(raw_data)) + + sliced_row = _sliced_row + else: + sliced_row = _make_row + + if echo: + log = self.context.connection._log_debug + + def _log_row(row): + log("Row %r", sql_util._repr_row(row)) + return row + + self._row_logging_fn = _log_row def _make_row_2(row): - made_row = _make_row(row) - assert log_row is not None - log_row(made_row) - return made_row + return _log_row(sliced_row(row)) make_row = _make_row_2 else: - make_row = _make_row + make_row = sliced_row self._set_memoized_attribute("_row_getter", make_row) else: + assert context._num_sentinel_cols == 0 self._metadata = self._no_result_metadata def _init_metadata(self, context, cursor_description): diff --git a/lib/sqlalchemy/engine/default.py b/lib/sqlalchemy/engine/default.py index d60428287..48f5de725 100644 --- a/lib/sqlalchemy/engine/default.py +++ b/lib/sqlalchemy/engine/default.py @@ -1883,11 +1883,11 @@ class DefaultExecutionContext(ExecutionContext): strategy = _cursor._NO_CURSOR_DML elif self._num_sentinel_cols: assert self.execute_style is ExecuteStyle.INSERTMANYVALUES - if cursor_description: - # strip out the sentinel columns from cursor description - cursor_description = cursor_description[ - 0 : -(self._num_sentinel_cols) - ] + # strip out the sentinel columns from cursor description + # a similar logic is done to the rows only in CursorResult + cursor_description = cursor_description[ + 0 : -self._num_sentinel_cols + ] result: _cursor.CursorResult[Any] = _cursor.CursorResult( self, strategy, cursor_description diff --git a/lib/sqlalchemy/engine/result.py b/lib/sqlalchemy/engine/result.py index cc6d26c88..cf34c195a 100644 --- a/lib/sqlalchemy/engine/result.py +++ b/lib/sqlalchemy/engine/result.py @@ -41,6 +41,7 @@ from ..sql.base import _generative from ..sql.base import HasMemoized from ..sql.base import InPlaceGenerative from ..util import HasMemoized_ro_memoized_attribute +from ..util import NONE_SET from ..util._has_cy import HAS_CYEXTENSION from ..util.typing import Literal from ..util.typing import Self @@ -84,7 +85,7 @@ across all the result types _InterimSupportsScalarsRowType = Union[Row, Any] _ProcessorsType = Sequence[Optional["_ResultProcessorType[Any]"]] -_TupleGetterType = Callable[[Sequence[Any]], Tuple[Any, ...]] +_TupleGetterType = Callable[[Sequence[Any]], Sequence[Any]] _UniqueFilterType = Callable[[Any], Any] _UniqueFilterStateType = Tuple[Set[Any], Optional[_UniqueFilterType]] @@ -205,6 +206,13 @@ class ResultMetaData: else: self._key_fallback(key, None) + @property + def _effective_processors(self) -> Optional[_ProcessorsType]: + if not self._processors or NONE_SET.issuperset(self._processors): + return None + else: + return self._processors + class RMKeyView(typing.KeysView[Any]): __slots__ = ("_parent", "_keys") @@ -390,7 +398,7 @@ def result_tuple( ) -> Callable[[Iterable[Any]], Row[Any]]: parent = SimpleResultMetaData(fields, extra) return functools.partial( - Row, parent, parent._processors, parent._key_to_index + Row, parent, parent._effective_processors, parent._key_to_index ) @@ -454,7 +462,7 @@ class ResultInternal(InPlaceGenerative, Generic[_R]): def process_row( # type: ignore metadata: ResultMetaData, - processors: _ProcessorsType, + processors: Optional[_ProcessorsType], key_to_index: Mapping[_KeyType, int], scalar_obj: Any, ) -> Row[Any]: @@ -468,7 +476,7 @@ class ResultInternal(InPlaceGenerative, Generic[_R]): metadata = self._metadata key_to_index = metadata._key_to_index - processors = metadata._processors + processors = metadata._effective_processors tf = metadata._tuplefilter if tf and not real_result._source_supports_scalars: @@ -489,21 +497,12 @@ class ResultInternal(InPlaceGenerative, Generic[_R]): process_row, metadata, processors, key_to_index ) - fns: Tuple[Any, ...] = () - if real_result._row_logging_fn: - fns = (real_result._row_logging_fn,) - else: - fns = () - - if fns: + _log_row = real_result._row_logging_fn _make_row = make_row def make_row(row: _InterimRowType[Row[Any]]) -> _R: - interim_row = _make_row(row) - for fn in fns: - interim_row = fn(interim_row) - return interim_row # type: ignore + return _log_row(_make_row(row)) # type: ignore return make_row diff --git a/lib/sqlalchemy/engine/row.py b/lib/sqlalchemy/engine/row.py index 4b767da09..da781334a 100644 --- a/lib/sqlalchemy/engine/row.py +++ b/lib/sqlalchemy/engine/row.py @@ -40,7 +40,7 @@ else: if TYPE_CHECKING: from .result import _KeyType from .result import RMKeyView - from ..sql.type_api import _ResultProcessorType + from .result import _ProcessorsType _T = TypeVar("_T", bound=Any) _TP = TypeVar("_TP", bound=Tuple[Any, ...]) @@ -131,9 +131,9 @@ class Row(BaseRow, Sequence[Any], Generic[_TP]): return RowMapping(self._parent, None, self._key_to_index, self._data) def _filter_on_values( - self, filters: Optional[Sequence[Optional[_ResultProcessorType[Any]]]] + self, processor: Optional[_ProcessorsType] ) -> Row[Any]: - return Row(self._parent, filters, self._key_to_index, self._data) + return Row(self._parent, processor, self._key_to_index, self._data) if not TYPE_CHECKING: @@ -163,9 +163,9 @@ class Row(BaseRow, Sequence[Any], Generic[_TP]): def _op(self, other: Any, op: Callable[[Any, Any], bool]) -> bool: return ( - op(tuple(self), tuple(other)) + op(self._to_tuple_instance(), other._to_tuple_instance()) if isinstance(other, Row) - else op(tuple(self), other) + else op(self._to_tuple_instance(), other) ) __hash__ = BaseRow.__hash__ |
