diff options
| author | Mike Bayer <mike_mp@zzzcomputing.com> | 2019-06-04 17:29:20 -0400 |
|---|---|---|
| committer | Mike Bayer <mike_mp@zzzcomputing.com> | 2020-02-21 17:53:33 -0500 |
| commit | f559f378c47811b5528ad1769cb86925e85fd1e5 (patch) | |
| tree | fd8325501a96cf1e4280c15f267f63b2af7b5f97 /lib/sqlalchemy | |
| parent | 93b7767d00267ebe149cabcae7246b6796352eb8 (diff) | |
| download | sqlalchemy-f559f378c47811b5528ad1769cb86925e85fd1e5.tar.gz | |
Result initial introduction
This builds on cc718cccc0bf8a01abdf4068c7ea4f3 which moved
RowProxy to Row, allowing Row to be more like a named tuple.
- KeyedTuple in ORM is replaced with Row
- ResultSetMetaData broken out into "simple" and "cursor" versions
for ORM and Core, as well as LegacyCursor version.
- Row now has _mapping attribute that supplies full mapping behavior.
Row and SimpleRow both have named tuple behavior otherwise.
LegacyRow has some mapping features on the tuple which emit
deprecation warnings (e.g. keys(), values(), etc). the biggest
change for mapping->tuple is the behavior of __contains__ which
moves from testing of "key in row" to "value in row".
- ResultProxy breaks into ResultProxy and FutureResult (interim),
the latter has the newer APIs. Made available to dialects
using execution options.
- internal reflection methods and most tests move off of implicit
Row mapping behavior and move to row._mapping, result.mappings()
method using future result
- a new strategy system for cursor handling replaces the various
subclasses of RowProxy
- some execution context adjustments. We will leave EC in but
refined things like get_result_proxy() and out parameter handling.
Dialects for 1.4 will need to adjust from get_result_proxy()
to get_result_cursor_strategy(), if they are using this method
- out parameter handling now accommodated by get_out_parameter_values()
EC method. Oracle changes for this. external dialect for
DB2 for example will also need to adjust for this.
- deprecate case_insensitive flag for engine / result, this
feature is not used
mapping-methods on Row are deprecated, and replaced with
Row._mapping.<meth>, including:
row.keys() -> use row._mapping.keys()
row.items() -> use row._mapping.items()
row.values() -> use row._mapping.values()
key in row -> use key in row._mapping
int in row -> use int < len(row)
Fixes: #4710
Fixes: #4878
Change-Id: Ieb9085e9bcff564359095b754da9ae0af55679f0
Diffstat (limited to 'lib/sqlalchemy')
28 files changed, 1836 insertions, 1028 deletions
diff --git a/lib/sqlalchemy/cextension/resultproxy.c b/lib/sqlalchemy/cextension/resultproxy.c index f6523359d..3c44010b8 100644 --- a/lib/sqlalchemy/cextension/resultproxy.c +++ b/lib/sqlalchemy/cextension/resultproxy.c @@ -277,14 +277,9 @@ BaseRow_getitem(BaseRow *self, Py_ssize_t i) } static PyObject * -BaseRow_getitem_by_object(BaseRow *self, PyObject *key) +BaseRow_getitem_by_object(BaseRow *self, PyObject *key, int asmapping) { PyObject *record, *indexobject; - PyObject *exc_module, *exception, *cstr_obj; -#if PY_MAJOR_VERSION >= 3 - PyObject *bytes; -#endif - char *cstr_key; long index; int key_fallback = 0; @@ -308,49 +303,14 @@ BaseRow_getitem_by_object(BaseRow *self, PyObject *key) } if (indexobject == Py_None) { - exc_module = PyImport_ImportModule("sqlalchemy.exc"); - if (exc_module == NULL) - return NULL; - - exception = PyObject_GetAttrString(exc_module, - "InvalidRequestError"); - Py_DECREF(exc_module); - if (exception == NULL) - return NULL; - - cstr_obj = PyTuple_GetItem(record, 2); - if (cstr_obj == NULL) - return NULL; + PyObject *tmp; - cstr_obj = PyObject_Str(cstr_obj); - if (cstr_obj == NULL) - return NULL; - -/* - FIXME: raise encoding error exception (in both versions below) - if the key contains non-ascii chars, instead of an - InvalidRequestError without any message like in the - python version. -*/ - - -#if PY_MAJOR_VERSION >= 3 - bytes = PyUnicode_AsASCIIString(cstr_obj); - if (bytes == NULL) - return NULL; - cstr_key = PyBytes_AS_STRING(bytes); -#else - cstr_key = PyString_AsString(cstr_obj); -#endif - if (cstr_key == NULL) { - Py_DECREF(cstr_obj); + tmp = PyObject_CallMethod(self->parent, "_raise_for_ambiguous_column_name", "(O)", record); + if (tmp == NULL) { return NULL; } - Py_DECREF(cstr_obj); + Py_DECREF(tmp); - PyErr_Format(exception, - "Ambiguous column name '%.200s' in " - "result set column descriptions", cstr_key); return NULL; } @@ -363,6 +323,16 @@ BaseRow_getitem_by_object(BaseRow *self, PyObject *key) /* -1 can be either the actual value, or an error flag. */ return NULL; + if (!asmapping) { + PyObject *tmp; + + tmp = PyObject_CallMethod(self->parent, "_warn_for_nonint", "O", key); + if (tmp == NULL) { + return NULL; + } + Py_DECREF(tmp); + } + return BaseRow_getitem(self, index); } @@ -400,17 +370,7 @@ BaseRow_subscript_impl(BaseRow *self, PyObject *key, int asmapping) Py_DECREF(values); return result; } else { - /* - // if we want to warn for non-integer access by getitem, - // that would happen here. - if (!asmapping) { - tmp = PyObject_CallMethod(self->parent, "_warn_for_nonint", ""); - if (tmp == NULL) { - return NULL; - } - Py_DECREF(tmp); - }*/ - return BaseRow_getitem_by_object(self, key); + return BaseRow_getitem_by_object(self, key, asmapping); } } diff --git a/lib/sqlalchemy/dialects/mssql/base.py b/lib/sqlalchemy/dialects/mssql/base.py index 4339551a3..a3855cc2c 100644 --- a/lib/sqlalchemy/dialects/mssql/base.py +++ b/lib/sqlalchemy/dialects/mssql/base.py @@ -685,7 +685,6 @@ import operator import re from . import information_schema as ischema -from ... import engine from ... import exc from ... import schema as sa_schema from ... import sql @@ -693,6 +692,7 @@ from ... import types as sqltypes from ... import util from ...engine import default from ...engine import reflection +from ...engine import result as _result from ...sql import compiler from ...sql import elements from ...sql import expression @@ -1431,8 +1431,9 @@ class MSTypeCompiler(compiler.GenericTypeCompiler): class MSExecutionContext(default.DefaultExecutionContext): _enable_identity_insert = False _select_lastrowid = False - _result_proxy = None _lastrowid = None + _rowcount = None + _result_strategy = None def _opt_encode(self, statement): if not self.dialect.supports_unicode_statements: @@ -1500,6 +1501,10 @@ class MSExecutionContext(default.DefaultExecutionContext): """Disable IDENTITY_INSERT if enabled.""" conn = self.root_connection + + if self.isinsert or self.isupdate or self.isdelete: + self._rowcount = self.cursor.rowcount + if self._select_lastrowid: if self.dialect.use_scope_identity: conn._cursor_execute( @@ -1516,10 +1521,13 @@ class MSExecutionContext(default.DefaultExecutionContext): row = self.cursor.fetchall()[0] self._lastrowid = int(row[0]) - if ( + elif ( self.isinsert or self.isupdate or self.isdelete ) and self.compiled.returning: - self._result_proxy = engine.FullyBufferedResultProxy(self) + fbcr = _result.FullyBufferedCursorFetchStrategy + self._result_strategy = fbcr.create_from_buffer( + self.cursor, self.cursor.description, self.cursor.fetchall() + ) if self._enable_identity_insert: conn._cursor_execute( @@ -1537,6 +1545,13 @@ class MSExecutionContext(default.DefaultExecutionContext): def get_lastrowid(self): return self._lastrowid + @property + def rowcount(self): + if self._rowcount is not None: + return self._rowcount + else: + return self.cursor.rowcount + def handle_dbapi_exception(self, e): if self._enable_identity_insert: try: @@ -1551,11 +1566,13 @@ class MSExecutionContext(default.DefaultExecutionContext): except Exception: pass - def get_result_proxy(self): - if self._result_proxy: - return self._result_proxy + def get_result_cursor_strategy(self, result): + if self._result_strategy: + return self._result_strategy else: - return engine.ResultProxy(self) + return super(MSExecutionContext, self).get_result_cursor_strategy( + result + ) class MSSQLCompiler(compiler.SQLCompiler): @@ -2570,7 +2587,7 @@ class MSDialect(default.DefaultDialect): if self.server_version_info < MS_2005_VERSION: return [] - rp = connection.execute( + rp = connection.execution_options(future_result=True).execute( sql.text( "select ind.index_id, ind.is_unique, ind.name " "from sys.indexes as ind join sys.tables as tab on " @@ -2587,13 +2604,13 @@ class MSDialect(default.DefaultDialect): .columns(name=sqltypes.Unicode()) ) indexes = {} - for row in rp: + for row in rp.mappings(): indexes[row["index_id"]] = { "name": row["name"], "unique": row["is_unique"] == 1, "column_names": [], } - rp = connection.execute( + rp = connection.execution_options(future_result=True).execute( sql.text( "select ind_col.index_id, ind_col.object_id, col.name " "from sys.columns as col " @@ -2611,7 +2628,7 @@ class MSDialect(default.DefaultDialect): ) .columns(name=sqltypes.Unicode()) ) - for row in rp: + for row in rp.mappings(): if row["index_id"] in indexes: indexes[row["index_id"]]["column_names"].append(row["name"]) @@ -2657,12 +2674,10 @@ class MSDialect(default.DefaultDialect): [columns], whereclause, order_by=[columns.c.ordinal_position] ) - c = connection.execute(s) + c = connection.execution_options(future_result=True).execute(s) cols = [] - while True: - row = c.fetchone() - if row is None: - break + + for row in c.mappings(): ( name, type_, @@ -2785,9 +2800,9 @@ class MSDialect(default.DefaultDialect): C.c.table_schema == owner, ), ) - c = connection.execute(s) + c = connection.execution_options(future_result=True).execute(s) constraint_name = None - for row in c: + for row in c.mappings(): if "PRIMARY" in row[TC.c.constraint_type.name]: pkeys.append(row[0]) if constraint_name is None: diff --git a/lib/sqlalchemy/dialects/mysql/mysqlconnector.py b/lib/sqlalchemy/dialects/mysql/mysqlconnector.py index 962db750c..66a429d35 100644 --- a/lib/sqlalchemy/dialects/mysql/mysqlconnector.py +++ b/lib/sqlalchemy/dialects/mysql/mysqlconnector.py @@ -26,17 +26,11 @@ import re from .base import BIT from .base import MySQLCompiler from .base import MySQLDialect -from .base import MySQLExecutionContext from .base import MySQLIdentifierPreparer from ... import processors from ... import util -class MySQLExecutionContext_mysqlconnector(MySQLExecutionContext): - def get_lastrowid(self): - return self.cursor.lastrowid - - class MySQLCompiler_mysqlconnector(MySQLCompiler): def visit_mod_binary(self, binary, operator, **kw): if self.dialect._mysqlconnector_double_percents: @@ -100,7 +94,6 @@ class MySQLDialect_mysqlconnector(MySQLDialect): supports_native_decimal = True default_paramstyle = "format" - execution_ctx_cls = MySQLExecutionContext_mysqlconnector statement_compiler = MySQLCompiler_mysqlconnector preparer = MySQLIdentifierPreparer_mysqlconnector diff --git a/lib/sqlalchemy/dialects/oracle/base.py b/lib/sqlalchemy/dialects/oracle/base.py index 8c69bf097..f6de4de68 100644 --- a/lib/sqlalchemy/dialects/oracle/base.py +++ b/lib/sqlalchemy/dialects/oracle/base.py @@ -931,6 +931,12 @@ class OracleCompiler(compiler.SQLCompiler): binds.append( self.bindparam_string(self._truncate_bindparam(outparam)) ) + + # ensure the ExecutionContext.get_out_parameters() method is + # *not* called; the cx_Oracle dialect wants to handle these + # parameters separately + self.has_out_parameters = False + columns.append(self.process(col_expr, within_columns_clause=False)) self._add_to_result_map( @@ -1454,9 +1460,11 @@ class OracleDialect(default.DefaultDialect): q += " AND ".join(clauses) - result = connection.execute(sql.text(q), **params) + result = connection.execution_options(future_result=True).execute( + sql.text(q), **params + ) if desired_owner: - row = result.first() + row = result.mappings().first() if row: return ( row["table_name"], @@ -1467,7 +1475,7 @@ class OracleDialect(default.DefaultDialect): else: return None, None, None, None else: - rows = result.fetchall() + rows = result.mappings().all() if len(rows) > 1: raise AssertionError( "There are multiple tables visible to the schema, you " @@ -1621,8 +1629,10 @@ class OracleDialect(default.DefaultDialect): row = result.first() if row: - if "compression" in row and enabled.get(row.compression, False): - if "compress_for" in row: + if "compression" in row._fields and enabled.get( + row.compression, False + ): + if "compress_for" in row._fields: options["oracle_compress"] = row.compress_for else: options["oracle_compress"] = True diff --git a/lib/sqlalchemy/dialects/oracle/cx_oracle.py b/lib/sqlalchemy/dialects/oracle/cx_oracle.py index b000a4615..69423992f 100644 --- a/lib/sqlalchemy/dialects/oracle/cx_oracle.py +++ b/lib/sqlalchemy/dialects/oracle/cx_oracle.py @@ -569,9 +569,10 @@ class OracleExecutionContext_cx_oracle(OracleExecutionContext): param[toname] = param[fromname] del param[fromname] - def _handle_out_parameters(self): - # if a single execute, check for outparams - if len(self.compiled_parameters) == 1: + def _generate_out_parameter_vars(self): + # check for has_out_parameters or RETURNING, create cx_Oracle.var + # objects if so + if self.compiled.returning or self.compiled.has_out_parameters: quoted_bind_names = self.compiled._quoted_bind_names for bindparam in self.compiled.binds.values(): if bindparam.isoutparam: @@ -645,7 +646,7 @@ class OracleExecutionContext_cx_oracle(OracleExecutionContext): include_types=self.dialect._include_setinputsizes, ) - self._handle_out_parameters() + self._generate_out_parameter_vars() self._generate_cursor_outputtype_handler() @@ -656,66 +657,51 @@ class OracleExecutionContext_cx_oracle(OracleExecutionContext): return c - def get_result_proxy(self): - if self.out_parameters and self.compiled.returning: + def get_out_parameter_values(self, out_param_names): + # this method should not be called when the compiler has + # RETURNING as we've turned the has_out_parameters flag set to + # False. + assert not self.compiled.returning + + return [ + self.dialect._paramval(self.out_parameters[name]) + for name in out_param_names + ] + + def get_result_cursor_strategy(self, result): + if self.compiled and self.out_parameters and self.compiled.returning: + # create a fake cursor result from the out parameters. unlike + # get_out_parameter_values(), the result-row handlers here will be + # applied at the Result level returning_params = [ self.dialect._returningval(self.out_parameters["ret_%d" % i]) for i in range(len(self.out_parameters)) ] - return ReturningResultProxy(self, returning_params) - - result = _result.ResultProxy(self) - - if self.out_parameters: - if ( - self.compiled_parameters is not None - and len(self.compiled_parameters) == 1 - ): - result.out_parameters = out_parameters = {} - - for bind, name in self.compiled.bind_names.items(): - if name in self.out_parameters: - type_ = bind.type - impl_type = type_.dialect_impl(self.dialect) - dbapi_type = impl_type.get_dbapi_type( - self.dialect.dbapi - ) - result_processor = impl_type.result_processor( - self.dialect, dbapi_type - ) - if result_processor is not None: - out_parameters[name] = result_processor( - self.dialect._paramval( - self.out_parameters[name] - ) - ) - else: - out_parameters[name] = self.dialect._paramval( - self.out_parameters[name] - ) - else: - result.out_parameters = dict( - (k, self._dialect._paramval(v)) - for k, v in self.out_parameters.items() - ) - return result + return ReturningResultStrategy( + result, result.cursor, returning_params + ) + else: + return super( + OracleExecutionContext_cx_oracle, self + ).get_result_cursor_strategy(result) -class ReturningResultProxy(_result.FullyBufferedResultProxy): - """Result proxy which stuffs the _returning clause + outparams - into the fetch.""" +class ReturningResultStrategy(_result.FullyBufferedCursorFetchStrategy): + __slots__ = ("_returning_params",) - def __init__(self, context, returning_params): + def __init__(self, result, dbapi_cursor, returning_params): self._returning_params = returning_params - super(ReturningResultProxy, self).__init__(context) - def _cursor_description(self): - returning = self.context.compiled.returning - return [ + returning = result.context.compiled.returning + cursor_description = [ (getattr(col, "name", col.anon_label), None) for col in returning ] + super(ReturningResultStrategy, self).__init__( + dbapi_cursor, cursor_description + ) + def _buffer_rows(self): return collections.deque([tuple(self._returning_params)]) diff --git a/lib/sqlalchemy/dialects/postgresql/base.py b/lib/sqlalchemy/dialects/postgresql/base.py index ee81fc020..b30e77704 100644 --- a/lib/sqlalchemy/dialects/postgresql/base.py +++ b/lib/sqlalchemy/dialects/postgresql/base.py @@ -3538,18 +3538,18 @@ class PGDialect(default.DefaultDialect): enums = [] enum_by_name = {} for enum in c.fetchall(): - key = (enum["schema"], enum["name"]) + key = (enum.schema, enum.name) if key in enum_by_name: - enum_by_name[key]["labels"].append(enum["label"]) + enum_by_name[key]["labels"].append(enum.label) else: enum_by_name[key] = enum_rec = { - "name": enum["name"], - "schema": enum["schema"], - "visible": enum["visible"], + "name": enum.name, + "schema": enum.schema, + "visible": enum.visible, "labels": [], } - if enum["label"] is not None: - enum_rec["labels"].append(enum["label"]) + if enum.label is not None: + enum_rec["labels"].append(enum.label) enums.append(enum_rec) return enums @@ -3568,10 +3568,11 @@ class PGDialect(default.DefaultDialect): """ s = sql.text(SQL_DOMAINS).columns(attname=sqltypes.Unicode) - c = connection.execute(s) + c = connection.execution_options(future_result=True).execute(s) domains = {} - for domain in c.fetchall(): + for domain in c.mappings(): + domain = domain # strip (30) from character varying(30) attype = re.search(r"([^\(]+)", domain["attype"]).group(1) # 'visible' just means whether or not the domain is in a diff --git a/lib/sqlalchemy/dialects/postgresql/psycopg2.py b/lib/sqlalchemy/dialects/postgresql/psycopg2.py index e4867fa0b..0b6afc337 100644 --- a/lib/sqlalchemy/dialects/postgresql/psycopg2.py +++ b/lib/sqlalchemy/dialects/postgresql/psycopg2.py @@ -458,7 +458,6 @@ from ... import exc from ... import processors from ... import types as sqltypes from ... import util -from ...engine import result as _result from ...util import collections_abc try: @@ -577,13 +576,12 @@ class PGExecutionContext_psycopg2(PGExecutionContext): ident = "c_%s_%s" % (hex(id(self))[2:], hex(_server_side_id())[2:]) return self._dbapi_connection.cursor(ident) - def get_result_proxy(self): + def get_result_cursor_strategy(self, result): self._log_notices(self.cursor) - if self._is_server_side: - return _result.BufferedRowResultProxy(self) - else: - return _result.ResultProxy(self) + return super(PGExecutionContext, self).get_result_cursor_strategy( + result + ) def _log_notices(self, cursor): # check also that notices is an iterable, after it's already diff --git a/lib/sqlalchemy/engine/__init__.py b/lib/sqlalchemy/engine/__init__.py index a129069a4..7f0270b42 100644 --- a/lib/sqlalchemy/engine/__init__.py +++ b/lib/sqlalchemy/engine/__init__.py @@ -38,10 +38,23 @@ from .result import BufferedColumnResultProxy # noqa from .result import BufferedColumnRow # noqa from .result import BufferedRowResultProxy # noqa from .result import FullyBufferedResultProxy # noqa +from .result import LegacyRow # noqa +from .result import result_tuple # noqa from .result import ResultProxy # noqa from .result import Row # noqa +from .result import RowMapping # noqa from .util import connection_memoize # noqa from ..sql import ddl # noqa __all__ = ("create_engine", "engine_from_config", "create_mock_engine") + + +def __go(lcls): + from .. import future + from . import result + + result._future_Result = future.Result + + +__go(locals()) diff --git a/lib/sqlalchemy/engine/base.py b/lib/sqlalchemy/engine/base.py index 29df67dcb..ce6c2e9c6 100644 --- a/lib/sqlalchemy/engine/base.py +++ b/lib/sqlalchemy/engine/base.py @@ -1297,12 +1297,7 @@ class Connection(Connectable): if context.compiled: context.post_exec() - if context.is_crud or context.is_text: - result = context._setup_crud_result_proxy() - else: - result = context.get_result_proxy() - if result._metadata is None: - result._soft_close() + result = context._setup_result_proxy() if context.should_autocommit and self._root.__transaction is None: self._root._commit_impl(autocommit=True) @@ -1310,6 +1305,8 @@ class Connection(Connectable): # for "connectionless" execution, we have to close this # Connection after the statement is complete. if self.should_close_with_result: + assert not context._is_future_result + # ResultProxy already exhausted rows / has no rows. # close us now if result._soft_closed: diff --git a/lib/sqlalchemy/engine/create.py b/lib/sqlalchemy/engine/create.py index 5198c8cd6..3d50b0828 100644 --- a/lib/sqlalchemy/engine/create.py +++ b/lib/sqlalchemy/engine/create.py @@ -34,6 +34,13 @@ from ..sql import compiler 'expressions, or an "empty set" SELECT, at statement execution' "time.", ), + case_sensitive=( + "1.4", + "The :paramref:`.create_engine.case_sensitive` parameter " + "is deprecated and will be removed in a future release. " + "Applications should work with result column names in a case " + "sensitive fashion.", + ), ) def create_engine(url, **kwargs): """Create a new :class:`.Engine` instance. diff --git a/lib/sqlalchemy/engine/default.py b/lib/sqlalchemy/engine/default.py index 7d36345fd..7efc4bda2 100644 --- a/lib/sqlalchemy/engine/default.py +++ b/lib/sqlalchemy/engine/default.py @@ -19,7 +19,7 @@ import re import weakref from . import interfaces -from . import result +from . import result as _result from .. import event from .. import exc from .. import pool @@ -201,6 +201,13 @@ class DefaultDialect(interfaces.Dialect): 'expressions, or an "empty set" SELECT, at statement execution' "time.", ), + case_sensitive=( + "1.4", + "The :paramref:`.create_engine.case_sensitive` parameter " + "is deprecated and will be removed in a future release. " + "Applications should work with result column names in a case " + "sensitive fashion.", + ), ) def __init__( self, @@ -667,6 +674,8 @@ class DefaultExecutionContext(interfaces.ExecutionContext): returned_defaults = None _is_implicit_returning = False _is_explicit_returning = False + _is_future_result = False + _is_server_side = False # a hook for SQLite's translation of # result column names @@ -725,6 +734,9 @@ class DefaultExecutionContext(interfaces.ExecutionContext): # we get here assert compiled.can_execute + self._is_future_result = connection._execution_options.get( + "future_result", False + ) self.execution_options = compiled.execution_options.union( connection._execution_options ) @@ -860,6 +872,10 @@ class DefaultExecutionContext(interfaces.ExecutionContext): self.dialect = connection.dialect self.is_text = True + self._is_future_result = connection._execution_options.get( + "future_result", False + ) + # plain text statement self.execution_options = connection._execution_options @@ -1035,6 +1051,11 @@ class DefaultExecutionContext(interfaces.ExecutionContext): def pre_exec(self): pass + def get_out_parameter_values(self, names): + raise NotImplementedError( + "This dialect does not support OUT parameters" + ) + def post_exec(self): pass @@ -1051,27 +1072,18 @@ class DefaultExecutionContext(interfaces.ExecutionContext): def get_lastrowid(self): """return self.cursor.lastrowid, or equivalent, after an INSERT. - This may involve calling special cursor functions, - issuing a new SELECT on the cursor (or a new one), - or returning a stored value that was + This may involve calling special cursor functions, issuing a new SELECT + on the cursor (or a new one), or returning a stored value that was calculated within post_exec(). - This function will only be called for dialects - which support "implicit" primary key generation, - keep preexecute_autoincrement_sequences set to False, - and when no explicit id value was bound to the - statement. + This function will only be called for dialects which support "implicit" + primary key generation, keep preexecute_autoincrement_sequences set to + False, and when no explicit id value was bound to the statement. - The function is called once, directly after - post_exec() and before the transaction is committed - or ResultProxy is generated. If the post_exec() - method assigns a value to `self._lastrowid`, the - value is used in place of calling get_lastrowid(). - - Note that this method is *not* equivalent to the - ``lastrowid`` method on ``ResultProxy``, which is a - direct proxy to the DBAPI ``lastrowid`` accessor - in all cases. + The function is called once for an INSERT statement that would need to + return the last inserted primary key for those dialects that make use + of the lastrowid concept. In these cases, it is called directly after + :meth:`.ExecutionContext.post_exec`. """ return self.cursor.lastrowid @@ -1079,11 +1091,13 @@ class DefaultExecutionContext(interfaces.ExecutionContext): def handle_dbapi_exception(self, e): pass - def get_result_proxy(self): + def get_result_cursor_strategy(self, result): if self._is_server_side: - return result.BufferedRowResultProxy(self) + strat_cls = _result.BufferedRowCursorFetchStrategy else: - return result.ResultProxy(self) + strat_cls = _result.DefaultCursorFetchStrategy + + return strat_cls.create(result) @property def rowcount(self): @@ -1095,6 +1109,49 @@ class DefaultExecutionContext(interfaces.ExecutionContext): def supports_sane_multi_rowcount(self): return self.dialect.supports_sane_multi_rowcount + def _setup_result_proxy(self): + if self.is_crud or self.is_text: + result = self._setup_crud_result_proxy() + else: + result = _result.ResultProxy._create_for_context(self) + + if ( + self.compiled + and not self.isddl + and self.compiled.has_out_parameters + ): + self._setup_out_parameters(result) + + return result + + def _setup_out_parameters(self, result): + + out_bindparams = [ + (param, name) + for param, name in self.compiled.bind_names.items() + if param.isoutparam + ] + out_parameters = {} + + for bindparam, raw_value in zip( + [param for param, name in out_bindparams], + self.get_out_parameter_values( + [name for param, name in out_bindparams] + ), + ): + + type_ = bindparam.type + impl_type = type_.dialect_impl(self.dialect) + dbapi_type = impl_type.get_dbapi_type(self.dialect.dbapi) + result_processor = impl_type.result_processor( + self.dialect, dbapi_type + ) + if result_processor is not None: + raw_value = result_processor(raw_value) + out_parameters[bindparam.key] = raw_value + + result.out_parameters = out_parameters + def _setup_crud_result_proxy(self): if self.isinsert and not self.executemany: if ( @@ -1108,11 +1165,11 @@ class DefaultExecutionContext(interfaces.ExecutionContext): elif not self._is_implicit_returning: self._setup_ins_pk_from_empty() - result = self.get_result_proxy() + result = _result.ResultProxy._create_for_context(self) if self.isinsert: if self._is_implicit_returning: - row = result.fetchone() + row = result._onerow() self.returned_defaults = row self._setup_ins_pk_from_implicit_returning(row) result._soft_close() @@ -1121,7 +1178,7 @@ class DefaultExecutionContext(interfaces.ExecutionContext): result._soft_close() result._metadata = None elif self.isupdate and self._is_implicit_returning: - row = result.fetchone() + row = result._onerow() self.returned_defaults = row result._soft_close() result._metadata = None @@ -1179,8 +1236,13 @@ class DefaultExecutionContext(interfaces.ExecutionContext): key_getter = self.compiled._key_getters_for_crud_column[2] table = self.compiled.statement.table compiled_params = self.compiled_parameters[0] + + # TODO: why are we using keyed index here? can't we get the ints? + # can compiler build up the structure here as far as what was + # explicit and what comes back in returning? + row_mapping = row._mapping self.inserted_primary_key = [ - row[col] if value is None else value + row_mapping[col] if value is None else value for col, value in [ (col, compiled_params.get(key_getter(col), None)) for col in table.primary_key diff --git a/lib/sqlalchemy/engine/interfaces.py b/lib/sqlalchemy/engine/interfaces.py index 237eb0f2f..3d4308df2 100644 --- a/lib/sqlalchemy/engine/interfaces.py +++ b/lib/sqlalchemy/engine/interfaces.py @@ -1044,6 +1044,44 @@ class ExecutionContext(object): raise NotImplementedError() + def get_out_parameter_values(self, out_param_names): + """Return a sequence of OUT parameter values from a cursor. + + For dialects that support OUT parameters, this method will be called + when there is a :class:`.SQLCompiler` object which has the + :attr:`.SQLCompiler.has_out_parameters` flag set. This flag in turn + will be set to True if the statement itself has :class:`.BindParameter` + objects that have the ``.isoutparam`` flag set which are consumed by + the :meth:`.SQLCompiler.visit_bindparam` method. If the dialect + compiler produces :class:`.BindParameter` objects with ``.isoutparam`` + set which are not handled by :meth:`.SQLCompiler.visit_bindparam`, it + should set this flag explicitly. + + The list of names that were rendered for each bound parameter + is passed to the method. The method should then return a sequence of + values corresponding to the list of parameter objects. Unlike in + previous SQLAlchemy versions, the values can be the **raw values** from + the DBAPI; the execution context will apply the appropriate type + handler based on what's present in self.compiled.binds and update the + values. The processed dictionary will then be made available via the + ``.out_parameters`` collection on the result object. Note that + SQLAlchemy 1.4 has multiple kinds of result object as part of the 2.0 + transition. + + .. versionadded:: 1.4 - added + :meth:`.ExecutionContext.get_out_parameter_values`, which is invoked + automatically by the :class:`.DefaultExecutionContext` when there + are :class:`.BindParameter` objects with the ``.isoutparam`` flag + set. This replaces the practice of setting out parameters within + the now-removed ``get_result_proxy()`` method. + + .. seealso:: + + :meth:`.ExecutionContext.get_result_cursor_strategy` + + """ + raise NotImplementedError() + def post_exec(self): """Called after the execution of a compiled statement. @@ -1054,12 +1092,67 @@ class ExecutionContext(object): raise NotImplementedError() - def result(self): - """Return a result object corresponding to this ExecutionContext. + def get_result_cursor_strategy(self, result): + """Return a result cursor strategy for a given result object. - Returns a ResultProxy. - """ + This method is implemented by the :class:`.DefaultDialect` and is + only needed by implementing dialects in the case where some special + steps regarding the cursor must be taken, such as manufacturing + fake results from some other element of the cursor, or pre-buffering + the cursor's results. + + A simplified version of the default implementation is:: + + from sqlalchemy.engine.result import DefaultCursorFetchStrategy + + class MyExecutionContext(DefaultExecutionContext): + def get_result_cursor_strategy(self, result): + return DefaultCursorFetchStrategy.create(result) + + Above, the :class:`.DefaultCursorFetchStrategy` will be applied + to the result object. For results that are pre-buffered from a + cursor that might be closed, an implementation might be:: + + from sqlalchemy.engine.result import ( + FullyBufferedCursorFetchStrategy + ) + + class MyExecutionContext(DefaultExecutionContext): + _pre_buffered_result = None + + def pre_exec(self): + if self.special_condition_prebuffer_cursor(): + self._pre_buffered_result = ( + self.cursor.description, + self.cursor.fetchall() + ) + + def get_result_cursor_strategy(self, result): + if self._pre_buffered_result: + description, cursor_buffer = self._pre_buffered_result + return ( + FullyBufferedCursorFetchStrategy. + create_from_buffer( + result, description, cursor_buffer + ) + ) + else: + return DefaultCursorFetchStrategy.create(result) + + This method replaces the previous not-quite-documented + ``get_result_proxy()`` method. + + .. versionadded:: 1.4 - result objects now interpret cursor results + based on a pluggable "strategy" object, which is delivered + by the :class:`.ExecutionContext` via the + :meth:`.ExecutionContext.get_result_cursor_strategy` method. + + .. seealso:: + + :meth:`.ExecutionContext.get_out_parameter_values` + + """ raise NotImplementedError() def handle_dbapi_exception(self, e): diff --git a/lib/sqlalchemy/engine/result.py b/lib/sqlalchemy/engine/result.py index 13738cb46..1a63c307b 100644 --- a/lib/sqlalchemy/engine/result.py +++ b/lib/sqlalchemy/engine/result.py @@ -5,13 +5,20 @@ # This module is part of SQLAlchemy and is released under # the MIT License: http://www.opensource.org/licenses/mit-license.php -"""Define result set constructs including :class:`.ResultProxy` -and :class:`.Row`.""" +"""Define result set constructs including :class:`.Result`""" import collections +import functools import operator +from .row import _baserow_usecext +from .row import BaseRow # noqa +from .row import LegacyRow # noqa +from .row import Row # noqa +from .row import RowMapping # noqa +from .row import RowProxy # noqa +from .row import rowproxy_reconstructor # noqa from .. import exc from .. import util from ..sql import expression @@ -21,264 +28,81 @@ from ..sql.compiler import RM_NAME from ..sql.compiler import RM_OBJECTS from ..sql.compiler import RM_RENDERED_NAME from ..sql.compiler import RM_TYPE -from ..util.compat import collections_abc +if _baserow_usecext: + from sqlalchemy.cresultproxy import tuplegetter as _tuplegetter _UNPICKLED = util.symbol("unpickled") -# This reconstructor is necessary so that pickles with the C extension or -# without use the same Binary format. -try: - # We need a different reconstructor on the C extension so that we can - # add extra checks that fields have correctly been initialized by - # __setstate__. - from sqlalchemy.cresultproxy import safe_rowproxy_reconstructor - - # The extra function embedding is needed so that the - # reconstructor function has the same signature whether or not - # the extension is present. - def rowproxy_reconstructor(cls, state): - return safe_rowproxy_reconstructor(cls, state) +# cyclical import for sqlalchemy.future +_future_Result = None +# metadata entry tuple indexes. +# using raw tuple is faster than namedtuple. +MD_INDEX = 0 # integer index in cursor.description +MD_OBJECTS = 1 # other string keys and ColumnElement obj that can match +MD_LOOKUP_KEY = 2 # string key we usually expect for key-based lookup +MD_RENDERED_NAME = 3 # name that is usually in cursor.description +MD_PROCESSOR = 4 # callable to process a result value into a row +MD_UNTRANSLATED = 5 # raw name from cursor.description -except ImportError: - - def rowproxy_reconstructor(cls, state): - obj = cls.__new__(cls) - obj.__setstate__(state) - return obj +class ResultMetaData(object): + __slots__ = () -try: - from sqlalchemy.cresultproxy import BaseRow - from sqlalchemy.cresultproxy import tuplegetter as _tuplegetter + def _has_key(self, key): + return key in self._keymap - _baserow_usecext = True -except ImportError: - _baserow_usecext = False + def _key_fallback(self, key): + if isinstance(key, int): + raise IndexError(key) + else: + raise KeyError(key) - class BaseRow(object): - __slots__ = ("_parent", "_data", "_keymap") - def __init__(self, parent, processors, keymap, data): - """Row objects are constructed by ResultProxy objects.""" +class SimpleResultMetaData(ResultMetaData): + __slots__ = "keys", "_keymap", "_processors" - self._parent = parent + def __init__(self, keys, extra=None): + self.keys = list(keys) - self._data = tuple( - [ - proc(value) if proc else value - for proc, value in zip(processors, data) - ] - ) - self._keymap = keymap + len_keys = len(keys) - def __reduce__(self): - return ( - rowproxy_reconstructor, - (self.__class__, self.__getstate__()), + self._keymap = { + name: (index, name) for index, name in enumerate(self.keys) + } + if not _baserow_usecext: + self._keymap.update( + { + index: (index, None, self.keys[index]) + for index in range(len_keys) + } ) - - def _values_impl(self): - return list(self) - - def __iter__(self): - return iter(self._data) - - def __len__(self): - return len(self._data) - - def __hash__(self): - return hash(self._data) - - def _get_by_key_impl(self, key): - try: + if extra: + for key, ex in zip(keys, extra): rec = self._keymap[key] - except KeyError: - rec = self._parent._key_fallback(key) - except TypeError: - # the non-C version detects a slice using TypeError. - # this is pretty inefficient for the slice use case - # but is more efficient for the integer use case since we - # don't have to check it up front. - if isinstance(key, slice): - return tuple(self._data[key]) - else: - raise - if rec[MD_INDEX] is None: - raise exc.InvalidRequestError( - "Ambiguous column name '%s' in " - "result set column descriptions" % rec[MD_LOOKUP_KEY] - ) - - return self._data[rec[MD_INDEX]] - - def _get_by_key_impl_mapping(self, key): - # the C code has two different methods so that we can distinguish - # between tuple-like keys (integers, slices) and mapping-like keys - # (strings, objects) - return self._get_by_key_impl(key) - - def __getattr__(self, name): - try: - return self._get_by_key_impl_mapping(name) - except KeyError as e: - raise AttributeError(e.args[0]) - - -class Row(BaseRow, collections_abc.Sequence): - """Represent a single result row. - - The :class:`.Row` object is retrieved from a database result, from the - :class:`.ResultProxy` object using methods like - :meth:`.ResultProxy.fetchall`. - - The :class:`.Row` object seeks to act mostly like a Python named - tuple, but also provides some Python dictionary behaviors at the same time. - - .. seealso:: - - :ref:`coretutorial_selecting` - includes examples of selecting - rows from SELECT statements. - - .. versionchanged 1.4:: - - Renamed ``RowProxy`` to :class:`.Row`. :class:`.Row` is no longer a - "proxy" object in that it contains the final form of data within it. - - """ - - __slots__ = () - - def __contains__(self, key): - return self._parent._has_key(key) + self._keymap.update({e: rec for e in ex}) + self._processors = [None] * len(keys) def __getstate__(self): - return {"_parent": self._parent, "_data": self._data} + return {"keys": self.keys} def __setstate__(self, state): - self._parent = parent = state["_parent"] - self._data = state["_data"] - self._keymap = parent._keymap - - def _op(self, other, op): - return ( - op(tuple(self), tuple(other)) - if isinstance(other, Row) - else op(tuple(self), other) - ) - - __hash__ = BaseRow.__hash__ - - def __lt__(self, other): - return self._op(other, operator.lt) - - def __le__(self, other): - return self._op(other, operator.le) + self.__init__(state["keys"]) - def __ge__(self, other): - return self._op(other, operator.ge) - - def __gt__(self, other): - return self._op(other, operator.gt) - - def __eq__(self, other): - return self._op(other, operator.eq) - - def __ne__(self, other): - return self._op(other, operator.ne) - - def __repr__(self): - return repr(sql_util._repr_row(self)) - - def has_key(self, key): - """Return True if this :class:`.Row` contains the given key. - - Through the SQLAlchemy 1.x series, the ``__contains__()`` method - of :class:`.Row` also links to :meth:`.Row.has_key`, in that - an expression such as :: - - "some_col" in row - - Will return True if the row contains a column named ``"some_col"``, - in the way that a Python mapping works. - - However, it is planned that the 2.0 series of SQLAlchemy will reverse - this behavior so that ``__contains__()`` will refer to a value being - present in the row, in the way that a Python tuple works. - - """ - - return self._parent._has_key(key) - - def __getitem__(self, key): - return self._get_by_key_impl(key) - - def items(self): - """Return a list of tuples, each tuple containing a key/value pair. - - This method is analogous to the Python dictionary ``.items()`` method, - except that it returns a list, not an iterator. - - """ - - return [(key, self[key]) for key in self.keys()] - - def keys(self): - """Return the list of keys as strings represented by this - :class:`.Row`. - - This method is analogous to the Python dictionary ``.keys()`` method, - except that it returns a list, not an iterator. - - """ - - return [k for k in self._parent.keys if k is not None] - - def iterkeys(self): - """Return a an iterator against the :meth:`.Row.keys` method. - - This method is analogous to the Python-2-only dictionary - ``.iterkeys()`` method. - - """ - return iter(self._parent.keys) - - def itervalues(self): - """Return a an iterator against the :meth:`.Row.values` method. - - This method is analogous to the Python-2-only dictionary - ``.itervalues()`` method. - - """ - return iter(self) - - def values(self): - """Return the values represented by this :class:`.Row` as a list. - - This method is analogous to the Python dictionary ``.values()`` method, - except that it returns a list, not an iterator. - - """ + def _has_key(self, key): + return key in self._keymap - return self._values_impl() + def _contains(self, value, row): + return value in row._data -BaseRowProxy = BaseRow -RowProxy = Row +def result_tuple(fields, extra=None): + parent = SimpleResultMetaData(fields, extra) + return functools.partial(Row, parent, parent._processors, parent._keymap) -# metadata entry tuple indexes. -# using raw tuple is faster than namedtuple. -MD_INDEX = 0 # integer index in cursor.description -MD_OBJECTS = 1 # other string keys and ColumnElement obj that can match -MD_LOOKUP_KEY = 2 # string key we usually expect for key-based lookup -MD_RENDERED_NAME = 3 # name that is usually in cursor.description -MD_PROCESSOR = 4 # callable to process a result value into a row -MD_UNTRANSLATED = 5 # raw name from cursor.description - - -class ResultMetaData(object): +class CursorResultMetaData(ResultMetaData): """Handle cursor.description, applying additional info from an execution context.""" @@ -654,6 +478,7 @@ class ResultMetaData(object): match_map = self._create_description_match_map( result_columns, case_sensitive, loose_column_name_matching ) + self.matched_on_name = True for ( idx, @@ -671,15 +496,6 @@ class ResultMetaData(object): mapped_type = ctx_rec[2] yield idx, colname, mapped_type, coltype, obj, untranslated - def _merge_cols_by_none(self, context, cursor_description): - for ( - idx, - colname, - untranslated, - coltype, - ) in self._colnames_from_description(context, cursor_description): - yield idx, colname, sqltypes.NULLTYPE, coltype, None, untranslated - @classmethod def _create_description_match_map( cls, @@ -695,6 +511,7 @@ class ResultMetaData(object): d = {} for elem in result_columns: key = elem[RM_RENDERED_NAME] + if not case_sensitive: key = key.lower() if key in d: @@ -717,13 +534,134 @@ class ResultMetaData(object): d.setdefault( r_key, (elem[RM_NAME], elem[RM_OBJECTS], elem[RM_TYPE]) ) + return d + def _merge_cols_by_none(self, context, cursor_description): + for ( + idx, + colname, + untranslated, + coltype, + ) in self._colnames_from_description(context, cursor_description): + yield idx, colname, sqltypes.NULLTYPE, coltype, None, untranslated + + def _key_fallback(self, key, raiseerr=True): + if raiseerr: + raise exc.NoSuchColumnError( + "Could not locate column in row for column '%s'" + % util.string_or_unprintable(key) + ) + else: + return None + + def _raise_for_ambiguous_column_name(self, rec): + raise exc.InvalidRequestError( + "Ambiguous column name '%s' in " + "result set column descriptions" % rec[MD_LOOKUP_KEY] + ) + + def _warn_for_nonint(self, key): + raise TypeError( + "TypeError: tuple indices must be integers or slices, not %s" + % type(key).__name__ + ) + + def _getter(self, key, raiseerr=True): + try: + rec = self._keymap[key] + except KeyError: + rec = self._key_fallback(key, raiseerr) + if rec is None: + return None + + index, obj = rec[0:2] + + if index is None: + self._raise_for_ambiguous_column_name(rec) + + return operator.methodcaller("_get_by_key_impl_mapping", index) + + def _tuple_getter(self, keys, raiseerr=True): + """Given a list of keys, return a callable that will deliver a tuple. + + This is strictly used by the ORM and the keys are Column objects. + However, this might be some nice-ish feature if we could find a very + clean way of presenting it. + + note that in the new world of "row._mapping", this is a mapping-getter. + maybe the name should indicate that somehow. + + + """ + indexes = [] + for key in keys: + try: + rec = self._keymap[key] + except KeyError: + rec = self._key_fallback(key, raiseerr) + if rec is None: + return None + + index, obj = rec[0:2] + + if index is None: + self._raise_for_ambiguous_column_name(obj) + indexes.append(index) + + if _baserow_usecext: + return _tuplegetter(*indexes) + else: + return self._pure_py_tuplegetter(*indexes) + + def _pure_py_tuplegetter(self, *indexes): + getters = [ + operator.methodcaller("_get_by_key_impl_mapping", index) + for index in indexes + ] + return lambda rec: tuple(getter(rec) for getter in getters) + + def __getstate__(self): + return { + "_keymap": { + key: (rec[MD_INDEX], _UNPICKLED, key) + for key, rec in self._keymap.items() + if isinstance(key, util.string_types + util.int_types) + }, + "keys": self.keys, + "case_sensitive": self.case_sensitive, + "matched_on_name": self.matched_on_name, + } + + def __setstate__(self, state): + self._processors = [None for _ in range(len(state["keys"]))] + self._keymap = state["_keymap"] + + self.keys = state["keys"] + self.case_sensitive = state["case_sensitive"] + self.matched_on_name = state["matched_on_name"] + + +class LegacyCursorResultMetaData(CursorResultMetaData): + def _contains(self, value, row): + key = value + if key in self._keymap: + util.warn_deprecated( + "Using the 'in' operator to test for string or column " + "keys, or integer indexes, in a :class:`.Row` object is " + "deprecated and will " + "be removed in a future release. " + "Use the `Row._fields` or `Row._mapping` attribute, i.e. " + "'key in row._fields'" + ) + return True + else: + return self._key_fallback(key, False) is not None + def _key_fallback(self, key, raiseerr=True): map_ = self._keymap result = None - # lowercase col support will be deprecated, at the - # create_engine() / dialect level + if isinstance(key, util.string_types): result = map_.get(key if self.case_sensitive else key.lower()) elif isinstance(key, expression.ColumnElement): @@ -786,170 +724,392 @@ class ResultMetaData(object): map_[key] = result return result + def _warn_for_nonint(self, key): + util.warn_deprecated_20( + "Using non-integer/slice indices on Row is deprecated and will " + "be removed in version 2.0; please use row._mapping[<key>], or " + "the mappings() accessor on the sqlalchemy.future result object.", + stacklevel=4, + ) + def _has_key(self, key): if key in self._keymap: return True else: return self._key_fallback(key, False) is not None - def _getter(self, key, raiseerr=True): - try: - rec = self._keymap[key] - except KeyError: - rec = self._key_fallback(key, raiseerr) - if rec is None: - return None - index, obj = rec[0:2] +class CursorFetchStrategy(object): + """Define a cursor strategy for a result object. - if index is None: - raise exc.InvalidRequestError( - "Ambiguous column name '%s' in " - "result set column descriptions" % rec[MD_LOOKUP_KEY] - ) + Subclasses define different ways of fetching rows, typically but + not necessarily using a DBAPI cursor object. - return operator.methodcaller("_get_by_key_impl", index) + .. versionadded:: 1.4 - def _tuple_getter(self, keys, raiseerr=True): - """Given a list of keys, return a callable that will deliver a tuple. + """ - This is strictly used by the ORM and the keys are Column objects. - However, this might be some nice-ish feature if we could find a very - clean way of presenting it. + __slots__ = ("dbapi_cursor", "cursor_description") - note that in the new world of "row._mapping", this is a mapping-getter. - maybe the name should indicate that somehow. + def __init__(self, dbapi_cursor, cursor_description): + self.dbapi_cursor = dbapi_cursor + self.cursor_description = cursor_description + @classmethod + def create(cls, result): + raise NotImplementedError() - """ - indexes = [] - for key in keys: - try: - rec = self._keymap[key] - except KeyError: - rec = self._key_fallback(key, raiseerr) - if rec is None: - return None + def soft_close(self, result): + raise NotImplementedError() - index, obj = rec[0:2] + def hard_close(self, result): + raise NotImplementedError() - if index is None: - raise exc.InvalidRequestError( - "Ambiguous column name '%s' in " - "result set column descriptions" % obj - ) - indexes.append(index) + def fetchone(self): + raise NotImplementedError() - if _baserow_usecext: - return _tuplegetter(*indexes) + def fetchmany(self, size=None): + raise NotImplementedError() + + def fetchall(self): + raise NotImplementedError() + + +class NoCursorDQLFetchStrategy(CursorFetchStrategy): + """Cursor strategy for a DQL result that has no open cursor. + + This is a result set that can return rows, i.e. for a SELECT, or for an + INSERT, UPDATE, DELETE that includes RETURNING. However it is in the state + where the cursor is closed and no rows remain available. The owning result + object may or may not be "hard closed", which determines if the fetch + methods send empty results or raise for closed result. + + """ + + __slots__ = ("closed",) + + def __init__(self, closed): + self.closed = closed + self.cursor_description = None + + def soft_close(self, result): + pass + + def hard_close(self, result): + self.closed = True + + def fetchone(self): + return self._non_result(None) + + def fetchmany(self, size=None): + return self._non_result([]) + + def fetchall(self): + return self._non_result([]) + + def _non_result(self, default): + if self.closed: + raise exc.ResourceClosedError("This result object is closed.") else: - return self._pure_py_tuplegetter(*indexes) + return default - def _pure_py_tuplegetter(self, *indexes): - getters = [ - operator.methodcaller("_get_by_key_impl", index) - for index in indexes - ] - return lambda rec: tuple(getter(rec) for getter in getters) - def __getstate__(self): - return { - "_keymap": { - key: (rec[MD_INDEX], _UNPICKLED, key) - for key, rec in self._keymap.items() - if isinstance(key, util.string_types + util.int_types) - }, - "keys": self.keys, - "case_sensitive": self.case_sensitive, - "matched_on_name": self.matched_on_name, - } +class NoCursorDMLFetchStrategy(CursorFetchStrategy): + """Cursor strategy for a DML result that has no open cursor. - def __setstate__(self, state): - self._processors = [None for _ in range(len(state["keys"]))] - self._keymap = state["_keymap"] + This is a result set that does not return rows, i.e. for an INSERT, + UPDATE, DELETE that does not include RETURNING. - self.keys = state["keys"] - self.case_sensitive = state["case_sensitive"] - self.matched_on_name = state["matched_on_name"] + """ + __slots__ = ("closed",) -class ResultProxy(object): - """A facade around a DBAPI cursor object. + def __init__(self, closed): + self.closed = closed + self.cursor_description = None - Returns database rows via the :class:`.Row` class, which provides - additional API features and behaviors on top of the raw data returned - by the DBAPI. + def soft_close(self, result): + pass + + def hard_close(self, result): + self.closed = True + + def fetchone(self): + return self._non_result(None) + + def fetchmany(self, size=None): + return self._non_result([]) + + def fetchall(self): + return self._non_result([]) + + def _non_result(self, default): + raise exc.ResourceClosedError( + "This result object does not return rows. " + "It has been closed automatically." + ) + + +class DefaultCursorFetchStrategy(CursorFetchStrategy): + """Call fetch methods from a DBAPI cursor. + + Alternate versions of this class may instead buffer the rows from + cursors or not use cursors at all. + + """ + + @classmethod + def create(cls, result): + dbapi_cursor = result.cursor + description = dbapi_cursor.description + + if description is None: + return NoCursorDMLFetchStrategy(False) + else: + return cls(dbapi_cursor, description) + + def soft_close(self, result): + result.cursor_strategy = NoCursorDQLFetchStrategy(False) + + def hard_close(self, result): + result.cursor_strategy = NoCursorDQLFetchStrategy(True) + + def fetchone(self): + return self.dbapi_cursor.fetchone() + + def fetchmany(self, size=None): + if size is None: + return self.dbapi_cursor.fetchmany() + else: + return self.dbapi_cursor.fetchmany(size) + + def fetchall(self): + return self.dbapi_cursor.fetchall() + + +class BufferedRowCursorFetchStrategy(DefaultCursorFetchStrategy): + """A cursor fetch strategy with row buffering behavior. + + This strategy buffers the contents of a selection of rows + before ``fetchone()`` is called. This is to allow the results of + ``cursor.description`` to be available immediately, when + interfacing with a DB-API that requires rows to be consumed before + this information is available (currently psycopg2, when used with + server-side cursors). + + The pre-fetching behavior fetches only one row initially, and then + grows its buffer size by a fixed amount with each successive need + for additional rows up the ``max_row_buffer`` size, which defaults + to 1000:: + + with psycopg2_engine.connect() as conn: + + result = conn.execution_options( + stream_results=True, max_row_buffer=50 + ).execute("select * from table") + + .. versionadded:: 1.4 ``max_row_buffer`` may now exceed 1000 rows. .. seealso:: - :ref:`coretutorial_selecting` - introductory material for accessing - :class:`.ResultProxy` and :class:`.Row` objects. + :ref:`psycopg2_execution_options` + """ + + __slots__ = ("_max_row_buffer", "_rowbuffer", "_bufsize") + + def __init__( + self, max_row_buffer, dbapi_cursor, description, initial_buffer + ): + super(BufferedRowCursorFetchStrategy, self).__init__( + dbapi_cursor, description + ) + + self._max_row_buffer = max_row_buffer + self._growth_factor = 5 + self._rowbuffer = initial_buffer + + self._bufsize = min(self._max_row_buffer, self._growth_factor) + + @classmethod + def create(cls, result): + """Buffered row strategy has to buffer the first rows *before* + cursor.description is fetched so that it works with named cursors + correctly + + """ + + dbapi_cursor = result.cursor + + initial_buffer = collections.deque(dbapi_cursor.fetchmany(1)) + + description = dbapi_cursor.description + + if description is None: + return NoCursorDMLFetchStrategy(False) + else: + max_row_buffer = result.context.execution_options.get( + "max_row_buffer", 1000 + ) + return cls( + max_row_buffer, dbapi_cursor, description, initial_buffer + ) + + def __buffer_rows(self): + size = self._bufsize + self._rowbuffer = collections.deque(self.dbapi_cursor.fetchmany(size)) + if size < self._max_row_buffer: + self._bufsize = min( + self._max_row_buffer, size * self._growth_factor + ) + + def soft_close(self, result): + self._rowbuffer.clear() + super(BufferedRowCursorFetchStrategy, self).soft_close(result) + + def hard_close(self, result): + self._rowbuffer.clear() + super(BufferedRowCursorFetchStrategy, self).hard_close(result) + + def fetchone(self): + if not self._rowbuffer: + self.__buffer_rows() + if not self._rowbuffer: + return None + return self._rowbuffer.popleft() + + def fetchmany(self, size=None): + if size is None: + return self.fetchall() + result = [] + for x in range(0, size): + row = self.fetchone() + if row is None: + break + result.append(row) + return result + + def fetchall(self): + self._rowbuffer.extend(self.dbapi_cursor.fetchall()) + ret = self._rowbuffer + self._rowbuffer = collections.deque() + return ret + + +class FullyBufferedCursorFetchStrategy(DefaultCursorFetchStrategy): + """A cursor strategy that buffers rows fully upon creation. + + Used for operations where a result is to be delivered + after the database conversation can not be continued, + such as MSSQL INSERT...OUTPUT after an autocommit. + + """ + + __slots__ = ("_rowbuffer",) + + def __init__(self, dbapi_cursor, description, initial_buffer=None): + super(FullyBufferedCursorFetchStrategy, self).__init__( + dbapi_cursor, description + ) + if initial_buffer is not None: + self._rowbuffer = collections.deque(initial_buffer) + else: + self._rowbuffer = self._buffer_rows() + + @classmethod + def create_from_buffer(cls, dbapi_cursor, description, buffer): + return cls(dbapi_cursor, description, buffer) + + def _buffer_rows(self): + return collections.deque(self.dbapi_cursor.fetchall()) + + def soft_close(self, result): + self._rowbuffer.clear() + super(FullyBufferedCursorFetchStrategy, self).soft_close(result) + + def hard_close(self, result): + self._rowbuffer.clear() + super(FullyBufferedCursorFetchStrategy, self).hard_close(result) + + def fetchone(self): + if self._rowbuffer: + return self._rowbuffer.popleft() + else: + return None + + def fetchmany(self, size=None): + if size is None: + return self.fetchall() + result = [] + for x in range(0, size): + row = self.fetchone() + if row is None: + break + result.append(row) + return result + + def fetchall(self): + ret = self._rowbuffer + self._rowbuffer = collections.deque() + return ret + + +class BaseResult(object): + """Base class for database result objects. + + + :class:`.BaseResult` is the base class for the 1.x style + :class:`.ResultProxy` class as well as the 2.x style + :class:`.future.Result` class. """ - _process_row = Row out_parameters = None - _autoclose_connection = False _metadata = None _soft_closed = False closed = False + @classmethod + def _create_for_context(cls, context): + if context._is_future_result: + obj = object.__new__(_future_Result) + else: + obj = object.__new__(ResultProxy) + obj.__init__(context) + return obj + def __init__(self, context): self.context = context self.dialect = context.dialect - self.cursor = self._saved_cursor = context.cursor + self.cursor = context.cursor self.connection = context.root_connection self._echo = ( self.connection._echo and context.engine._should_log_debug() ) self._init_metadata() - def _getter(self, key, raiseerr=True): - try: - getter = self._metadata._getter - except AttributeError: - return self._non_result(None) - else: - return getter(key, raiseerr) - - def _tuple_getter(self, key, raiseerr=True): - try: - getter = self._metadata._tuple_getter - except AttributeError: - return self._non_result(None) - else: - return getter(key, raiseerr) - - def _has_key(self, key): - try: - has_key = self._metadata._has_key - except AttributeError: - return self._non_result(None) - else: - return has_key(key) - def _init_metadata(self): - cursor_description = self._cursor_description() - if cursor_description is not None: - if ( - self.context.compiled - and "compiled_cache" in self.context.execution_options - ): + self.cursor_strategy = strat = self.context.get_result_cursor_strategy( + self + ) + + if strat.cursor_description is not None: + if self.context.compiled: if self.context.compiled._cached_metadata: self._metadata = self.context.compiled._cached_metadata else: - # TODO: what we hope to do here is have "Legacy" be - # the default in 1.4 but a flag (somewhere?) will have it - # use non-legacy. ORM should be able to use non-legacy self._metadata = ( self.context.compiled._cached_metadata - ) = ResultMetaData(self, cursor_description) + ) = self._cursor_metadata(self, strat.cursor_description) else: - self._metadata = ResultMetaData(self, cursor_description) + self._metadata = self._cursor_metadata( + self, strat.cursor_description + ) if self._echo: self.context.engine.logger.debug( - "Col %r", tuple(x[0] for x in cursor_description) + "Col %r", tuple(x[0] for x in strat.cursor_description) ) + # leave cursor open so that execution context can continue + # setting up things like rowcount def keys(self): """Return the list of string keys that would represented by each @@ -960,109 +1120,31 @@ class ResultProxy(object): else: return [] - @util.memoized_property - def rowcount(self): - """Return the 'rowcount' for this result. - - The 'rowcount' reports the number of rows *matched* - by the WHERE criterion of an UPDATE or DELETE statement. - - .. note:: - - Notes regarding :attr:`.ResultProxy.rowcount`: - - - * This attribute returns the number of rows *matched*, - which is not necessarily the same as the number of rows - that were actually *modified* - an UPDATE statement, for example, - may have no net change on a given row if the SET values - given are the same as those present in the row already. - Such a row would be matched but not modified. - On backends that feature both styles, such as MySQL, - rowcount is configured by default to return the match - count in all cases. - - * :attr:`.ResultProxy.rowcount` is *only* useful in conjunction - with an UPDATE or DELETE statement. Contrary to what the Python - DBAPI says, it does *not* return the - number of rows available from the results of a SELECT statement - as DBAPIs cannot support this functionality when rows are - unbuffered. - - * :attr:`.ResultProxy.rowcount` may not be fully implemented by - all dialects. In particular, most DBAPIs do not support an - aggregate rowcount result from an executemany call. - The :meth:`.ResultProxy.supports_sane_rowcount` and - :meth:`.ResultProxy.supports_sane_multi_rowcount` methods - will report from the dialect if each usage is known to be - supported. - - * Statements that use RETURNING may not return a correct - rowcount. - - """ + def _getter(self, key, raiseerr=True): try: - return self.context.rowcount - except BaseException as e: - self.connection._handle_dbapi_exception( - e, None, None, self.cursor, self.context - ) - - @property - def lastrowid(self): - """return the 'lastrowid' accessor on the DBAPI cursor. - - This is a DBAPI specific method and is only functional - for those backends which support it, for statements - where it is appropriate. It's behavior is not - consistent across backends. - - Usage of this method is normally unnecessary when - using insert() expression constructs; the - :attr:`~ResultProxy.inserted_primary_key` attribute provides a - tuple of primary key values for a newly inserted row, - regardless of database backend. + getter = self._metadata._getter + except AttributeError: + return self.cursor_strategy._non_result(None) + else: + return getter(key, raiseerr) - """ + def _tuple_getter(self, key, raiseerr=True): try: - return self._saved_cursor.lastrowid - except BaseException as e: - self.connection._handle_dbapi_exception( - e, None, None, self._saved_cursor, self.context - ) - - @property - def returns_rows(self): - """True if this :class:`.ResultProxy` returns rows. - - I.e. if it is legal to call the methods - :meth:`~.ResultProxy.fetchone`, - :meth:`~.ResultProxy.fetchmany` - :meth:`~.ResultProxy.fetchall`. - - """ - return self._metadata is not None - - @property - def is_insert(self): - """True if this :class:`.ResultProxy` is the result - of a executing an expression language compiled - :func:`.expression.insert` construct. - - When True, this implies that the - :attr:`inserted_primary_key` attribute is accessible, - assuming the statement did not include - a user defined "returning" construct. - - """ - return self.context.isinsert - - def _cursor_description(self): - """May be overridden by subclasses.""" + getter = self._metadata._tuple_getter + except AttributeError: + return self.cursor_strategy._non_result(None) + else: + return getter(key, raiseerr) - return self._saved_cursor.description + def _has_key(self, key): + try: + has_key = self._metadata._has_key + except AttributeError: + return self.cursor_strategy._non_result(None) + else: + return has_key(key) - def _soft_close(self): + def _soft_close(self, hard=False): """Soft close this :class:`.ResultProxy`. This releases all DBAPI cursor resources, but leaves the @@ -1085,80 +1167,21 @@ class ResultProxy(object): """ - if self._soft_closed: - return - self._soft_closed = True - cursor = self.cursor - self.connection._safe_close_cursor(cursor) - if self._autoclose_connection: - self.connection.close() - self.cursor = None - - def close(self): - """Close this ResultProxy. - - This closes out the underlying DBAPI cursor corresponding - to the statement execution, if one is still present. Note that the - DBAPI cursor is automatically released when the :class:`.ResultProxy` - exhausts all available rows. :meth:`.ResultProxy.close` is generally - an optional method except in the case when discarding a - :class:`.ResultProxy` that still has additional rows pending for fetch. - - In the case of a result that is the product of - :ref:`connectionless execution <dbengine_implicit>`, - the underlying :class:`.Connection` object is also closed, which - :term:`releases` DBAPI connection resources. - - After this method is called, it is no longer valid to call upon - the fetch methods, which will raise a :class:`.ResourceClosedError` - on subsequent use. - .. versionchanged:: 1.0.0 - the :meth:`.ResultProxy.close` method - has been separated out from the process that releases the underlying - DBAPI cursor resource. The "auto close" feature of the - :class:`.Connection` now performs a so-called "soft close", which - releases the underlying DBAPI cursor, but allows the - :class:`.ResultProxy` to still behave as an open-but-exhausted - result set; the actual :meth:`.ResultProxy.close` method is never - called. It is still safe to discard a :class:`.ResultProxy` - that has been fully exhausted without calling this method. - - .. seealso:: - - :ref:`connections_toplevel` - - """ + if (not hard and self._soft_closed) or (hard and self.closed): + return - if not self.closed: - self._soft_close() + if hard: self.closed = True - - def __iter__(self): - """Implement iteration protocol.""" - - while True: - row = self.fetchone() - if row is None: - return - else: - yield row - - def __next__(self): - """Implement the Python next() protocol. - - This method, mirrored as both ``.next()`` and ``.__next__()``, is part - of Python's API for producing iterator-like behavior. - - .. versionadded:: 1.2 - - """ - row = self.fetchone() - if row is None: - raise StopIteration() + self.cursor_strategy.hard_close(self) else: - return row + self.cursor_strategy.soft_close(self) - next = __next__ + if not self._soft_closed: + cursor = self.cursor + self.cursor = None + self.connection._safe_close_cursor(cursor) + self._soft_closed = True @util.memoized_property def inserted_primary_key(self): @@ -1340,37 +1363,196 @@ class ResultProxy(object): return self.dialect.supports_sane_multi_rowcount - def _fetchone_impl(self): + @util.memoized_property + def rowcount(self): + """Return the 'rowcount' for this result. + + The 'rowcount' reports the number of rows *matched* + by the WHERE criterion of an UPDATE or DELETE statement. + + .. note:: + + Notes regarding :attr:`.ResultProxy.rowcount`: + + + * This attribute returns the number of rows *matched*, + which is not necessarily the same as the number of rows + that were actually *modified* - an UPDATE statement, for example, + may have no net change on a given row if the SET values + given are the same as those present in the row already. + Such a row would be matched but not modified. + On backends that feature both styles, such as MySQL, + rowcount is configured by default to return the match + count in all cases. + + * :attr:`.ResultProxy.rowcount` is *only* useful in conjunction + with an UPDATE or DELETE statement. Contrary to what the Python + DBAPI says, it does *not* return the + number of rows available from the results of a SELECT statement + as DBAPIs cannot support this functionality when rows are + unbuffered. + + * :attr:`.ResultProxy.rowcount` may not be fully implemented by + all dialects. In particular, most DBAPIs do not support an + aggregate rowcount result from an executemany call. + The :meth:`.ResultProxy.supports_sane_rowcount` and + :meth:`.ResultProxy.supports_sane_multi_rowcount` methods + will report from the dialect if each usage is known to be + supported. + + * Statements that use RETURNING may not return a correct + rowcount. + + """ try: - return self.cursor.fetchone() - except AttributeError: - return self._non_result(None) + return self.context.rowcount + except BaseException as e: + self.connection._handle_dbapi_exception( + e, None, None, self.cursor, self.context + ) + + @property + def lastrowid(self): + """return the 'lastrowid' accessor on the DBAPI cursor. + + This is a DBAPI specific method and is only functional + for those backends which support it, for statements + where it is appropriate. It's behavior is not + consistent across backends. + + Usage of this method is normally unnecessary when + using insert() expression constructs; the + :attr:`~ResultProxy.inserted_primary_key` attribute provides a + tuple of primary key values for a newly inserted row, + regardless of database backend. - def _fetchmany_impl(self, size=None): + """ try: - if size is None: - return self.cursor.fetchmany() + return self.context.get_lastrowid() + except BaseException as e: + self.connection._handle_dbapi_exception( + e, None, None, self.cursor, self.context + ) + + @property + def returns_rows(self): + """True if this :class:`.ResultProxy` returns rows. + + I.e. if it is legal to call the methods + :meth:`~.ResultProxy.fetchone`, + :meth:`~.ResultProxy.fetchmany` + :meth:`~.ResultProxy.fetchall`. + + """ + return self._metadata is not None + + @property + def is_insert(self): + """True if this :class:`.ResultProxy` is the result + of a executing an expression language compiled + :func:`.expression.insert` construct. + + When True, this implies that the + :attr:`inserted_primary_key` attribute is accessible, + assuming the statement did not include + a user defined "returning" construct. + + """ + return self.context.isinsert + + +class ResultProxy(BaseResult): + """A facade around a DBAPI cursor object. + + Returns database rows via the :class:`.Row` class, which provides + additional API features and behaviors on top of the raw data returned + by the DBAPI. + + Within the scope of the 1.x series of SQLAlchemy, the :class:`.ResultProxy` + will in fact return instances of the :class:`.LegacyRow` class, which + maintains Python mapping (i.e. dictionary) like behaviors upon the object + itself. Going forward, the :attr:`.Row._mapping` attribute should be used + for dictionary behaviors. + + .. seealso:: + + :ref:`coretutorial_selecting` - introductory material for accessing + :class:`.ResultProxy` and :class:`.Row` objects. + + """ + + _autoclose_connection = False + _process_row = LegacyRow + _cursor_metadata = LegacyCursorResultMetaData + _cursor_strategy_cls = DefaultCursorFetchStrategy + + def __iter__(self): + """Implement iteration protocol.""" + + while True: + row = self.fetchone() + if row is None: + return else: - return self.cursor.fetchmany(size) - except AttributeError: - return self._non_result([]) + yield row - def _fetchall_impl(self): - try: - return self.cursor.fetchall() - except AttributeError: - return self._non_result([]) + def close(self): + """Close this ResultProxy. - def _non_result(self, default): - if self._metadata is None: - raise exc.ResourceClosedError( - "This result object does not return rows. " - "It has been closed automatically." - ) - elif self.closed: - raise exc.ResourceClosedError("This result object is closed.") + This closes out the underlying DBAPI cursor corresponding + to the statement execution, if one is still present. Note that the + DBAPI cursor is automatically released when the :class:`.ResultProxy` + exhausts all available rows. :meth:`.ResultProxy.close` is generally + an optional method except in the case when discarding a + :class:`.ResultProxy` that still has additional rows pending for fetch. + + In the case of a result that is the product of + :ref:`connectionless execution <dbengine_implicit>`, + the underlying :class:`.Connection` object is also closed, which + :term:`releases` DBAPI connection resources. + + .. deprecated:: 2.0 "connectionless" execution is deprecated and will + be removed in version 2.0. Version 2.0 will feature the + :class:`.Result` object that will no longer affect the status + of the originating connection in any case. + + After this method is called, it is no longer valid to call upon + the fetch methods, which will raise a :class:`.ResourceClosedError` + on subsequent use. + + .. seealso:: + + :ref:`connections_toplevel` + + """ + self._soft_close(hard=True) + + def _soft_close(self, hard=False): + soft_closed = self._soft_closed + super(ResultProxy, self)._soft_close(hard=hard) + if ( + not soft_closed + and self._soft_closed + and self._autoclose_connection + ): + self.connection.close() + + def __next__(self): + """Implement the Python next() protocol. + + This method, mirrored as both ``.next()`` and ``.__next__()``, is part + of Python's API for producing iterator-like behavior. + + .. versionadded:: 1.2 + + """ + row = self.fetchone() + if row is None: + raise StopIteration() else: - return default + return row + + next = __next__ def process_rows(self, rows): process_row = self._process_row @@ -1406,7 +1588,7 @@ class ResultProxy(object): """ try: - l = self.process_rows(self._fetchall_impl()) + l = self.process_rows(self.cursor_strategy.fetchall()) self._soft_close() return l except BaseException as e: @@ -1432,7 +1614,7 @@ class ResultProxy(object): """ try: - l = self.process_rows(self._fetchmany_impl(size)) + l = self.process_rows(self.cursor_strategy.fetchmany(size)) if len(l) == 0: self._soft_close() return l @@ -1441,6 +1623,9 @@ class ResultProxy(object): e, None, None, self.cursor, self.context ) + def _onerow(self): + return self.fetchone() + def fetchone(self): """Fetch one row, just like DB-API ``cursor.fetchone()``. @@ -1457,7 +1642,7 @@ class ResultProxy(object): """ try: - row = self._fetchone_impl() + row = self.cursor_strategy.fetchone() if row is not None: return self.process_rows([row])[0] else: @@ -1477,11 +1662,8 @@ class ResultProxy(object): :return: a :class:`.Row` object, or None if no rows remain """ - if self._metadata is None: - return self._non_result(None) - try: - row = self._fetchone_impl() + row = self.cursor_strategy.fetchone() except BaseException as e: self.connection._handle_dbapi_exception( e, None, None, self.cursor, self.context @@ -1514,128 +1696,26 @@ class ResultProxy(object): class BufferedRowResultProxy(ResultProxy): """A ResultProxy with row buffering behavior. - ``ResultProxy`` that buffers the contents of a selection of rows - before ``fetchone()`` is called. This is to allow the results of - ``cursor.description`` to be available immediately, when - interfacing with a DB-API that requires rows to be consumed before - this information is available (currently psycopg2, when used with - server-side cursors). + .. deprecated:: 1.4 this class is now supplied using a strategy object. + See :class:`.BufferedRowCursorFetchStrategy`. - The pre-fetching behavior fetches only one row initially, and then - grows its buffer size by a fixed amount with each successive need - for additional rows up the ``max_row_buffer`` size, which defaults - to 1000:: - - with psycopg2_engine.connect() as conn: - - result = conn.execution_options( - stream_results=True, max_row_buffer=50 - ).execute("select * from table") - - .. versionadded:: 1.4 ``max_row_buffer`` may now exceed 1000 rows. - - .. seealso:: - - :ref:`psycopg2_execution_options` """ - def _init_metadata(self): - self._max_row_buffer = self.context.execution_options.get( - "max_row_buffer", 1000 - ) - self._growth_factor = 5 - self.__buffer_rows() - super(BufferedRowResultProxy, self)._init_metadata() - - def __buffer_rows(self): - if self.cursor is None: - return - size = getattr(self, "_bufsize", 1) - self.__rowbuffer = collections.deque(self.cursor.fetchmany(size)) - if size < self._max_row_buffer: - self._bufsize = min( - self._max_row_buffer, size * self._growth_factor - ) - - def _soft_close(self, **kw): - self.__rowbuffer.clear() - super(BufferedRowResultProxy, self)._soft_close(**kw) - - def _fetchone_impl(self): - if self.cursor is None: - return self._non_result(None) - if not self.__rowbuffer: - self.__buffer_rows() - if not self.__rowbuffer: - return None - return self.__rowbuffer.popleft() - - def _fetchmany_impl(self, size=None): - if size is None: - return self._fetchall_impl() - result = [] - for x in range(0, size): - row = self._fetchone_impl() - if row is None: - break - result.append(row) - return result - - def _fetchall_impl(self): - if self.cursor is None: - return self._non_result([]) - self.__rowbuffer.extend(self.cursor.fetchall()) - ret = self.__rowbuffer - self.__rowbuffer = collections.deque() - return ret + _cursor_strategy_cls = BufferedRowCursorFetchStrategy class FullyBufferedResultProxy(ResultProxy): """A result proxy that buffers rows fully upon creation. - Used for operations where a result is to be delivered - after the database conversation can not be continued, - such as MSSQL INSERT...OUTPUT after an autocommit. + .. deprecated:: 1.4 this class is now supplied using a strategy object. + See :class:`.FullyBufferedCursorFetchStrategy`. """ - def _init_metadata(self): - super(FullyBufferedResultProxy, self)._init_metadata() - self.__rowbuffer = self._buffer_rows() - - def _buffer_rows(self): - return collections.deque(self.cursor.fetchall()) - - def _soft_close(self, **kw): - self.__rowbuffer.clear() - super(FullyBufferedResultProxy, self)._soft_close(**kw) - - def _fetchone_impl(self): - if self.__rowbuffer: - return self.__rowbuffer.popleft() - else: - return self._non_result(None) - - def _fetchmany_impl(self, size=None): - if size is None: - return self._fetchall_impl() - result = [] - for x in range(0, size): - row = self._fetchone_impl() - if row is None: - break - result.append(row) - return result - - def _fetchall_impl(self): - if not self.cursor: - return self._non_result([]) - ret = self.__rowbuffer - self.__rowbuffer = collections.deque() - return ret + _cursor_strategy_cls = FullyBufferedCursorFetchStrategy -class BufferedColumnRow(Row): +class BufferedColumnRow(LegacyRow): """Row is now BufferedColumn in all cases""" diff --git a/lib/sqlalchemy/engine/row.py b/lib/sqlalchemy/engine/row.py new file mode 100644 index 000000000..b4347a598 --- /dev/null +++ b/lib/sqlalchemy/engine/row.py @@ -0,0 +1,501 @@ +# engine/row.py +# Copyright (C) 2005-2020 the SQLAlchemy authors and contributors +# <see AUTHORS file> +# +# This module is part of SQLAlchemy and is released under +# the MIT License: http://www.opensource.org/licenses/mit-license.php + +"""Define row constructs including :class:`.Row`.""" + + +import operator + +from .. import util +from ..sql import util as sql_util +from ..util.compat import collections_abc + + +MD_INDEX = 0 # integer index in cursor.description + +# This reconstructor is necessary so that pickles with the C extension or +# without use the same Binary format. +try: + # We need a different reconstructor on the C extension so that we can + # add extra checks that fields have correctly been initialized by + # __setstate__. + from sqlalchemy.cresultproxy import safe_rowproxy_reconstructor + + # The extra function embedding is needed so that the + # reconstructor function has the same signature whether or not + # the extension is present. + def rowproxy_reconstructor(cls, state): + return safe_rowproxy_reconstructor(cls, state) + + +except ImportError: + + def rowproxy_reconstructor(cls, state): + obj = cls.__new__(cls) + obj.__setstate__(state) + return obj + + +try: + from sqlalchemy.cresultproxy import BaseRow + + _baserow_usecext = True +except ImportError: + _baserow_usecext = False + + class BaseRow(object): + __slots__ = ("_parent", "_data", "_keymap") + + def __init__(self, parent, processors, keymap, data): + """Row objects are constructed by ResultProxy objects.""" + + self._parent = parent + + self._data = tuple( + [ + proc(value) if proc else value + for proc, value in zip(processors, data) + ] + ) + self._keymap = keymap + + def __reduce__(self): + return ( + rowproxy_reconstructor, + (self.__class__, self.__getstate__()), + ) + + def _values_impl(self): + return list(self) + + def __iter__(self): + return iter(self._data) + + def __len__(self): + return len(self._data) + + def __hash__(self): + return hash(self._data) + + def _subscript_impl(self, key, ismapping): + try: + rec = self._keymap[key] + except KeyError: + rec = self._parent._key_fallback(key) + except TypeError: + # the non-C version detects a slice using TypeError. + # this is pretty inefficient for the slice use case + # but is more efficient for the integer use case since we + # don't have to check it up front. + if isinstance(key, slice): + return tuple(self._data[key]) + else: + raise + + mdindex = rec[MD_INDEX] + if mdindex is None: + self._parent._raise_for_ambiguous_column_name(rec) + elif not ismapping and mdindex != key and not isinstance(key, int): + self._parent._warn_for_nonint(key) + + # TODO: warn for non-int here, RemovedIn20Warning when available + + return self._data[mdindex] + + def _get_by_key_impl(self, key): + return self._subscript_impl(key, False) + + def _get_by_key_impl_mapping(self, key): + # the C code has two different methods so that we can distinguish + # between tuple-like keys (integers, slices) and mapping-like keys + # (strings, objects) + return self._subscript_impl(key, True) + + def __getattr__(self, name): + try: + return self._get_by_key_impl_mapping(name) + except KeyError as e: + raise AttributeError(e.args[0]) + + +class Row(BaseRow, collections_abc.Sequence): + """Represent a single result row. + + The :class:`.Row` object represents a row of a database result. It is + typically associated in the 1.x series of SQLAlchemy with the + :class:`.ResultProxy` object, however is also used by the ORM for + tuple-like results as of SQLAlchemy 1.4. + + The :class:`.Row` object seeks to act as much like a Python named + tuple as possible. For mapping (i.e. dictionary) behavior on a row, + such as testing for containment of keys, refer to the :attr:`.Row._mapping` + attribute. + + .. seealso:: + + :ref:`coretutorial_selecting` - includes examples of selecting + rows from SELECT statements. + + :class:`.LegacyRow` - Compatibility interface introduced in SQLAlchemy + 1.4. + + .. versionchanged:: 1.4 + + Renamed ``RowProxy`` to :class:`.Row`. :class:`.Row` is no longer a + "proxy" object in that it contains the final form of data within it, + and now acts mostly like a named tuple. Mapping-like functionality is + moved to the :attr:`.Row._mapping` attribute, but will remain available + in SQLAlchemy 1.x series via the :class:`.LegacyRow` class that is used + by :class:`.ResultProxy`. See :ref:`change_4710_core` for background + on this change. + + """ + + __slots__ = () + + @property + def _mapping(self): + """Return a :class:`.RowMapping` for this :class:`.Row`. + + This object provides a consistent Python mapping (i.e. dictionary) + interface for the data contained within the row. The :class:`.Row` + by itself behaves like a named tuple, however in the 1.4 series of + SQLAlchemy, the :class:`.LegacyRow` class is still used by Core which + continues to have mapping-like behaviors against the row object + itself. + + .. seealso:: + + :attr:`.Row._fields` + + .. versionadded:: 1.4 + + """ + + return RowMapping(self) + + def __contains__(self, key): + return key in self._data + + def __getitem__(self, key): + return self._data[key] + + def __getstate__(self): + return {"_parent": self._parent, "_data": self._data} + + def __setstate__(self, state): + self._parent = parent = state["_parent"] + self._data = state["_data"] + self._keymap = parent._keymap + + def _op(self, other, op): + return ( + op(tuple(self), tuple(other)) + if isinstance(other, Row) + else op(tuple(self), other) + ) + + __hash__ = BaseRow.__hash__ + + def __lt__(self, other): + return self._op(other, operator.lt) + + def __le__(self, other): + return self._op(other, operator.le) + + def __ge__(self, other): + return self._op(other, operator.ge) + + def __gt__(self, other): + return self._op(other, operator.gt) + + def __eq__(self, other): + return self._op(other, operator.eq) + + def __ne__(self, other): + return self._op(other, operator.ne) + + def __repr__(self): + return repr(sql_util._repr_row(self)) + + @util.deprecated( + "1.4", + "The :meth:`.Row.keys` method is deprecated and will be removed in a " + "future release. Use the namedtuple standard accessor " + ":attr:`.Row._fields`, or for full mapping behavior use " + "row._mapping.keys() ", + ) + def keys(self): + """Return the list of keys as strings represented by this + :class:`.Row`. + + This method is analogous to the Python dictionary ``.keys()`` method, + except that it returns a list, not an iterator. + + .. seealso:: + + :attr:`.Row._fields` + + :attr:`.Row._mapping` + + """ + return [k for k in self._parent.keys if k is not None] + + @property + def _fields(self): + """Return a tuple of string keys as represented by this + :class:`.Row`. + + This attribute is analogous to the Python named tuple ``._fields`` + attribute. + + .. versionadded:: 1.4 + + .. seealso:: + + :attr:`.Row._mapping` + + """ + return tuple([k for k in self._parent.keys if k is not None]) + + def _asdict(self): + """Return a new dict which maps field names to their corresponding + values. + + This method is analogous to the Python named tuple ``._asdict()`` + method, and works by applying the ``dict()`` constructor to the + :attr:`.Row._mapping` attribute. + + .. versionadded:: 1.4 + + .. seealso:: + + :attr:`.Row._mapping` + + """ + return dict(self._mapping) + + def _replace(self): + raise NotImplementedError() + + @property + def _field_defaults(self): + raise NotImplementedError() + + +class LegacyRow(Row): + """A subclass of :class:`.Row` that delivers 1.x SQLAlchemy behaviors + for Core. + + The :class:`.LegacyRow` class is where most of the Python mapping + (i.e. dictionary-like) + behaviors are implemented for the row object. The mapping behavior + of :class:`.Row` going forward is accessible via the :class:`.Row._mapping` + attribute. + + .. versionadded:: 1.4 - added :class:`.LegacyRow` which encapsulates most + of the deprecated behaviors of :class:`.Row`. + + """ + + def __contains__(self, key): + return self._parent._contains(key, self) + + def __getitem__(self, key): + return self._get_by_key_impl(key) + + @util.deprecated( + "1.4", + "The :meth:`.LegacyRow.has_key` method is deprecated and will be " + "removed in a future release. To test for key membership, use " + "the :attr:`Row._mapping` attribute, i.e. 'key in row._mapping`.", + ) + def has_key(self, key): + """Return True if this :class:`.LegacyRow` contains the given key. + + Through the SQLAlchemy 1.x series, the ``__contains__()`` method of + :class:`.Row` (or :class:`.LegacyRow` as of SQLAlchemy 1.4) also links + to :meth:`.Row.has_key`, in that an expression such as :: + + "some_col" in row + + Will return True if the row contains a column named ``"some_col"``, + in the way that a Python mapping works. + + However, it is planned that the 2.0 series of SQLAlchemy will reverse + this behavior so that ``__contains__()`` will refer to a value being + present in the row, in the way that a Python tuple works. + + .. seealso:: + + :ref:`change_4710_core` + + """ + + return self._parent._has_key(key) + + @util.deprecated( + "1.4", + "The :meth:`.LegacyRow.items` method is deprecated and will be " + "removed in a future release. Use the :attr:`Row._mapping` " + "attribute, i.e., 'row._mapping.items()'.", + ) + def items(self): + """Return a list of tuples, each tuple containing a key/value pair. + + This method is analogous to the Python dictionary ``.items()`` method, + except that it returns a list, not an iterator. + + """ + + return [(key, self[key]) for key in self.keys()] + + @util.deprecated( + "1.4", + "The :meth:`.LegacyRow.iterkeys` method is deprecated and will be " + "removed in a future release. Use the :attr:`Row._mapping` " + "attribute, i.e., 'row._mapping.keys()'.", + ) + def iterkeys(self): + """Return a an iterator against the :meth:`.Row.keys` method. + + This method is analogous to the Python-2-only dictionary + ``.iterkeys()`` method. + + """ + return iter(self._parent.keys) + + @util.deprecated( + "1.4", + "The :meth:`.LegacyRow.itervalues` method is deprecated and will be " + "removed in a future release. Use the :attr:`Row._mapping` " + "attribute, i.e., 'row._mapping.values()'.", + ) + def itervalues(self): + """Return a an iterator against the :meth:`.Row.values` method. + + This method is analogous to the Python-2-only dictionary + ``.itervalues()`` method. + + """ + return iter(self) + + @util.deprecated( + "1.4", + "The :meth:`.LegacyRow.values` method is deprecated and will be " + "removed in a future release. Use the :attr:`Row._mapping` " + "attribute, i.e., 'row._mapping.values()'.", + ) + def values(self): + """Return the values represented by this :class:`.Row` as a list. + + This method is analogous to the Python dictionary ``.values()`` method, + except that it returns a list, not an iterator. + + """ + + return self._values_impl() + + +BaseRowProxy = BaseRow +RowProxy = Row + + +class ROMappingView( + collections_abc.KeysView, + collections_abc.ValuesView, + collections_abc.ItemsView, +): + __slots__ = ( + "_mapping", + "_items", + ) + + def __init__(self, mapping, items): + self._mapping = mapping + self._items = items + + def __len__(self): + return len(self._items) + + def __repr__(self): + return "{0.__class__.__name__}({0._mapping!r})".format(self) + + def __iter__(self): + return iter(self._items) + + def __contains__(self, item): + return item in self._items + + def __eq__(self, other): + return list(other) == list(self) + + def __ne__(self, other): + return list(other) != list(self) + + +class RowMapping(collections_abc.Mapping): + """A ``Mapping`` that maps column names and objects to :class:`.Row` values. + + The :class:`.RowMapping` is available from a :class:`.Row` via the + :attr:`.Row._mapping` attribute and supplies Python mapping (i.e. + dictionary) access to the contents of the row. This includes support + for testing of containment of specific keys (string column names or + objects), as well as iteration of keys, values, and items:: + + for row in result: + if 'a' in row._mapping: + print("Column 'a': %s" % row._mapping['a']) + + print("Column b: %s" % row._mapping[table.c.b]) + + + .. versionadded:: 1.4 The :class:`.RowMapping` object replaces the + mapping-like access previously provided by a database result row, + which now seeks to behave mostly like a named tuple. + + """ + + __slots__ = ("row",) + + def __init__(self, row): + self.row = row + + def __getitem__(self, key): + return self.row._get_by_key_impl_mapping(key) + + def __iter__(self): + return (k for k in self.row._parent.keys if k is not None) + + def __len__(self): + return len(self.row) + + def __contains__(self, key): + return self.row._parent._has_key(key) + + def items(self): + """Return a view of key/value tuples for the elements in the + underlying :class:`.Row`. + + """ + return ROMappingView(self, [(key, self[key]) for key in self.keys()]) + + def keys(self): + """Return a view of 'keys' for string column names represented + by the underlying :class:`.Row`. + + """ + return ROMappingView( + self, [k for k in self.row._parent.keys if k is not None] + ) + + def values(self): + """Return a view of values for the values represented in the + underlying :class:`.Row`. + + """ + return ROMappingView(self, self.row._values_impl()) diff --git a/lib/sqlalchemy/ext/baked.py b/lib/sqlalchemy/ext/baked.py index 7d9b964ac..cafe69093 100644 --- a/lib/sqlalchemy/ext/baked.py +++ b/lib/sqlalchemy/ext/baked.py @@ -25,6 +25,7 @@ from ..orm.session import Session from ..sql import func from ..sql import literal_column from ..sql import util as sql_util +from ..util import collections_abc log = logging.getLogger(__name__) @@ -472,7 +473,7 @@ class Result(object): """ try: ret = self.one() - if not isinstance(ret, tuple): + if not isinstance(ret, collections_abc.Sequence): return ret return ret[0] except orm_exc.NoResultFound: diff --git a/lib/sqlalchemy/future/__init__.py b/lib/sqlalchemy/future/__init__.py index 808ef076a..f8836112e 100644 --- a/lib/sqlalchemy/future/__init__.py +++ b/lib/sqlalchemy/future/__init__.py @@ -9,7 +9,8 @@ """ +from .result import Result # noqa from ..sql.selectable import Select from ..util.langhelpers import public_factory -select = public_factory(Select._create_select, ".expression.select") +select = public_factory(Select._create_select, ".future.select") diff --git a/lib/sqlalchemy/future/result.py b/lib/sqlalchemy/future/result.py new file mode 100644 index 000000000..583ff957a --- /dev/null +++ b/lib/sqlalchemy/future/result.py @@ -0,0 +1,171 @@ +import operator + +from .. import util +from ..engine.result import _baserow_usecext +from ..engine.result import BaseResult +from ..engine.result import CursorResultMetaData +from ..engine.result import DefaultCursorFetchStrategy +from ..engine.result import Row +from ..sql import util as sql_util +from ..sql.base import _generative +from ..sql.base import Generative + + +class Result(Generative, BaseResult): + """Interim "future" result proxy so that dialects can build on + upcoming 2.0 patterns. + + + """ + + _process_row = Row + _cursor_metadata = CursorResultMetaData + _cursor_strategy_cls = DefaultCursorFetchStrategy + + _column_slice_filter = None + _post_creational_filter = None + + def close(self): + """Close this :class:`.Result`. + + This closes out the underlying DBAPI cursor corresponding + to the statement execution, if one is still present. Note that the + DBAPI cursor is automatically released when the :class:`.Result` + exhausts all available rows. :meth:`.Result.close` is generally + an optional method except in the case when discarding a + :class:`.Result` that still has additional rows pending for fetch. + + After this method is called, it is no longer valid to call upon + the fetch methods, which will raise a :class:`.ResourceClosedError` + on subsequent use. + + .. seealso:: + + :ref:`connections_toplevel` + + """ + self._soft_close(hard=True) + + def columns(self, *col_expressions): + indexes = [] + for key in col_expressions: + try: + rec = self._keymap[key] + except KeyError: + rec = self._key_fallback(key, True) + if rec is None: + return None + + index, obj = rec[0:2] + + if index is None: + self._metadata._raise_for_ambiguous_column_name(obj) + indexes.append(index) + return self._column_slices(indexes) + + def scalars(self): + result = self._column_slices(0) + result._post_creational_filter = operator.itemgetter(0) + return result + + @_generative + def _column_slices(self, indexes): + if _baserow_usecext: + self._column_slice_filter = self._metadata._tuplegetter(*indexes) + else: + self._column_slice_filter = self._metadata._pure_py_tuplegetter( + *indexes + ) + + @_generative + def mappings(self): + self._post_creational_filter = operator.attrgetter("_mapping") + + def _row_getter(self): + process_row = self._process_row + metadata = self._metadata + keymap = metadata._keymap + processors = metadata._processors + + fns = () + + if self._echo: + log = self.context.engine.logger.debug + + def log_row(row): + log("Row %r", sql_util._repr_row(row)) + return row + + fns += (log_row,) + + if self._column_slice_filter: + fns += (self._column_slice_filter,) + + if self._post_creational_filter: + fns += (self._post_creational_filter,) + + def make_row(row): + row = process_row(metadata, processors, keymap, row) + for fn in fns: + row = fn(row) + return row + + return make_row + + def _safe_fetchone_impl(self): + try: + return self.cursor_strategy.fetchone() + except BaseException as e: + self.connection._handle_dbapi_exception( + e, None, None, self.cursor, self.context + ) + + def _safe_fetchall_impl(self): + try: + result = self.cursor_strategy.fetchall() + self._soft_close() + return result + except BaseException as e: + self.connection._handle_dbapi_exception( + e, None, None, self.cursor, self.context + ) + + def _safe_fetchmany_impl(self, size=None): + try: + l = self.process_rows(self.cursor_strategy.fetchmany(size)) + if len(l) == 0: + self._soft_close() + return l + except BaseException as e: + self.connection._handle_dbapi_exception( + e, None, None, self.cursor, self.context + ) + + def __iter__(self): + getter = self._row_getter() + return (getter(r) for r in self._safe_fetchall_impl()) + + def _onerow(self): + getter = self._row_getter() + row = self._safe_fetchone_impl() + if row is None: + return None + else: + return getter(row) + + def all(self): + getter = self._row_getter() + return [getter(r) for r in self._safe_fetchall_impl()] + + def first(self): + getter = self._row_getter() + row = self._safe_fetchone_impl() + if row is None: + return None + else: + row = getter(row) + second_row = self._safe_fetchone_impl() + if second_row is not None: + self._soft_close() + util.warn("Additional rows remain") + return row diff --git a/lib/sqlalchemy/orm/loading.py b/lib/sqlalchemy/orm/loading.py index 617f027d9..193980e6c 100644 --- a/lib/sqlalchemy/orm/loading.py +++ b/lib/sqlalchemy/orm/loading.py @@ -28,6 +28,7 @@ from .util import aliased from .util import state_str from .. import exc as sa_exc from .. import util +from ..engine import result_tuple from ..sql import util as sql_util @@ -56,7 +57,7 @@ def instances(query, cursor, context): ) try: - (process, labels) = list( + (process, labels, extra) = list( zip( *[ query_entity.row_processor(query, context, cursor) @@ -66,7 +67,7 @@ def instances(query, cursor, context): ) if not single_entity: - keyed_tuple = util.lightweight_named_tuple("result", labels) + keyed_tuple = result_tuple(labels, extra) while True: context.partials = {} @@ -138,7 +139,9 @@ def merge_result(querylib, query, iterator, load=True): ] result = [] keys = [ent._label_name for ent in query._entities] - keyed_tuple = util.lightweight_named_tuple("result", keys) + keyed_tuple = result_tuple( + keys, [ent.entities for ent in query._entities] + ) for row in iterator: newrow = list(row) for i in mapped_entities: @@ -190,7 +193,6 @@ def load_on_ident( query, key, refresh_state=None, with_for_update=None, only_load_props=None ): """Load the given identity key from the database.""" - if key is not None: ident = key[1] identity_token = key[2] @@ -452,10 +454,19 @@ def _instance_processor( instance_state = attributes.instance_state instance_dict = attributes.instance_dict session_id = context.session.hash_key - version_check = context.version_check runid = context.runid identity_token = context.identity_token + version_check = context.version_check + if version_check: + version_id_col = mapper.version_id_col + if version_id_col is not None: + if adapter: + version_id_col = adapter.columns[version_id_col] + version_id_getter = result._getter(version_id_col) + else: + version_id_getter = None + if not refresh_state and _polymorphic_from is not None: key = ("loader", path.path) if key in context.attributes and context.attributes[key].strategy == ( @@ -539,8 +550,10 @@ def _instance_processor( currentload = not isnew loaded_instance = False - if version_check and not currentload: - _validate_version_id(mapper, state, dict_, row, adapter) + if version_check and version_id_getter and not currentload: + _validate_version_id( + mapper, state, dict_, row, version_id_getter + ) else: # create a new instance @@ -667,7 +680,7 @@ def _instance_processor( def ensure_no_pk(row): identitykey = ( identity_class, - tuple([row[column] for column in pk_cols]), + tuple_getter(row), identity_token, ) if not is_not_primary_key(identitykey[1]): @@ -812,20 +825,11 @@ def _populate_partial( return to_load -def _validate_version_id(mapper, state, dict_, row, adapter): +def _validate_version_id(mapper, state, dict_, row, getter): - version_id_col = mapper.version_id_col - - if version_id_col is None: - return - - if adapter: - version_id_col = adapter.columns[version_id_col] - - if ( - mapper._get_state_attr_by_column(state, dict_, mapper.version_id_col) - != row[version_id_col] - ): + if mapper._get_state_attr_by_column( + state, dict_, mapper.version_id_col + ) != getter(row): raise orm_exc.StaleDataError( "Instance '%s' has version id '%s' which " "does not match database-loaded version id '%s'." @@ -834,7 +838,7 @@ def _validate_version_id(mapper, state, dict_, row, adapter): mapper._get_state_attr_by_column( state, dict_, mapper.version_id_col ), - row[version_id_col], + getter(row), ) ) diff --git a/lib/sqlalchemy/orm/mapper.py b/lib/sqlalchemy/orm/mapper.py index 82e68fd07..b84d41260 100644 --- a/lib/sqlalchemy/orm/mapper.py +++ b/lib/sqlalchemy/orm/mapper.py @@ -2631,7 +2631,7 @@ class Mapper(sql_base.HasCacheKey, InspectionAttr): """Return an identity-map key for use in storing/retrieving an item from the identity map. - :param row: A :class:`.RowProxy` instance. The columns which are + :param row: A :class:`.Row` instance. The columns which are mapped by this :class:`.Mapper` should be locatable in the row, preferably via the :class:`.Column` object directly (as is the case when a :func:`.select` construct is executed), or via string names of diff --git a/lib/sqlalchemy/orm/persistence.py b/lib/sqlalchemy/orm/persistence.py index 31b8b0a20..95c5f8fa2 100644 --- a/lib/sqlalchemy/orm/persistence.py +++ b/lib/sqlalchemy/orm/persistence.py @@ -1522,7 +1522,7 @@ def _postfetch( if returning_cols: row = result.context.returned_defaults if row is not None: - for col in returning_cols: + for row_value, col in zip(row, returning_cols): # pk cols returned from insert are handled # distinctly, don't step on the values here if col.primary_key and result.context.isinsert: @@ -1534,7 +1534,7 @@ def _postfetch( # when using declarative w/ single table inheritance prop = mapper._columntoproperty.get(col) if prop: - dict_[prop.key] = row[col] + dict_[prop.key] = row_value if refresh_flush: load_evt_attrs.append(prop.key) diff --git a/lib/sqlalchemy/orm/query.py b/lib/sqlalchemy/orm/query.py index f19ec5673..d237aa3bf 100644 --- a/lib/sqlalchemy/orm/query.py +++ b/lib/sqlalchemy/orm/query.py @@ -47,6 +47,7 @@ from .. import inspection from .. import log from .. import sql from .. import util +from ..engine import result_tuple from ..sql import coercions from ..sql import expression from ..sql import roles @@ -56,6 +57,7 @@ from ..sql.base import _generative from ..sql.base import ColumnCollection from ..sql.base import Generative from ..sql.selectable import ForUpdateArg +from ..util import collections_abc __all__ = ["Query", "QueryContext", "aliased"] @@ -3320,7 +3322,7 @@ class Query(Generative): """ try: ret = self.one() - if not isinstance(ret, tuple): + if not isinstance(ret, collections_abc.Sequence): return ret return ret[0] except orm_exc.NoResultFound: @@ -4259,7 +4261,7 @@ class _MapperEntity(_QueryEntity): polymorphic_discriminator=self._polymorphic_discriminator, ) - return _instance, self._label_name + return _instance, self._label_name, self.entities def setup_context(self, query, context): adapter = self._get_entity_clauses(query, context) @@ -4414,7 +4416,7 @@ class Bundle(InspectionAttr): :ref:`bundles` - includes an example of subclassing. """ - keyed_tuple = util.lightweight_named_tuple("result", labels) + keyed_tuple = result_tuple(labels, [() for l in labels]) def proc(row): return keyed_tuple([proc(row) for proc in procs]) @@ -4517,7 +4519,7 @@ class _BundleEntity(_QueryEntity): ent.setup_context(query, context) def row_processor(self, query, context, result): - procs, labels = zip( + procs, labels, extra = zip( *[ ent.row_processor(query, context, result) for ent in self._entities @@ -4526,7 +4528,7 @@ class _BundleEntity(_QueryEntity): proc = self.bundle.create_row_processor(query, procs, labels) - return proc, self._label_name + return proc, self._label_name, () class _ColumnEntity(_QueryEntity): @@ -4675,7 +4677,8 @@ class _ColumnEntity(_QueryEntity): column = context.adapter.columns[column] getter = result._getter(column) - return getter, self._label_name + + return getter, self._label_name, (self.expr, self.column) def setup_context(self, query, context): column = query._adapt_clause(self.column, False, True) diff --git a/lib/sqlalchemy/sql/compiler.py b/lib/sqlalchemy/sql/compiler.py index ed463ebe3..2e1809b07 100644 --- a/lib/sqlalchemy/sql/compiler.py +++ b/lib/sqlalchemy/sql/compiler.py @@ -647,6 +647,10 @@ class SQLCompiler(Compiled): """ + has_out_parameters = False + """if True, there are bindparam() objects that have the isoutparam + flag set.""" + insert_prefetch = update_prefetch = () def __init__( @@ -1006,7 +1010,7 @@ class SQLCompiler(Compiled): @util.dependencies("sqlalchemy.engine.result") def _create_result_map(self, result): """utility method used for unit tests only.""" - return result.ResultMetaData._create_description_match_map( + return result.CursorResultMetaData._create_description_match_map( self._result_columns ) @@ -1901,6 +1905,8 @@ class SQLCompiler(Compiled): ) self.binds[bindparam.key] = self.binds[name] = bindparam + if bindparam.isoutparam: + self.has_out_parameters = True if post_compile: if render_postcompile: diff --git a/lib/sqlalchemy/sql/selectable.py b/lib/sqlalchemy/sql/selectable.py index b2ec32c13..2cb5f8390 100644 --- a/lib/sqlalchemy/sql/selectable.py +++ b/lib/sqlalchemy/sql/selectable.py @@ -3180,6 +3180,36 @@ class Select( @classmethod def _create_select(cls, *entities): + r"""Construct a new :class:`.Select` using the 2.x style API. + + .. versionadded:: 2.0 - the :func:`.future.select` construct is + the same construct as the one returned by + :func:`.sql.expression.select`, except that the function only + accepts the "columns clause" entities up front; the rest of the + state of the SELECT should be built up using generative methods. + + Similar functionality is also available via the + :meth:`.FromClause.select` method on any :class:`.FromClause`. + + .. seealso:: + + :ref:`coretutorial_selecting` - Core Tutorial description of + :func:`.select`. + + :param \*entities: + Entities to SELECT from. For Core usage, this is typically a series + of :class:`.ColumnElement` and / or :class:`.FromClause` + objects which will form the columns clause of the resulting + statement. For those objects that are instances of + :class:`.FromClause` (typically :class:`.Table` or :class:`.Alias` + objects), the :attr:`.FromClause.c` collection is extracted + to form a collection of :class:`.ColumnElement` objects. + + This parameter will also accept :class:`.Text` constructs as + given, as well as ORM-mapped classes. + + """ + self = cls.__new__(cls) self._raw_columns = [ coercions.expect(roles.ColumnsClauseRole, ent) @@ -3430,7 +3460,8 @@ class Select( "The select() function in SQLAlchemy 2.0 will accept a " "series of columns / tables and other entities only, " "passed positionally. For forwards compatibility, use the " - "sqlalchemy.future.select() construct." + "sqlalchemy.future.select() construct.", + stacklevel=4, ) self._auto_correlate = correlate diff --git a/lib/sqlalchemy/testing/profiling.py b/lib/sqlalchemy/testing/profiling.py index 0026b5f8c..05a0fde49 100644 --- a/lib/sqlalchemy/testing/profiling.py +++ b/lib/sqlalchemy/testing/profiling.py @@ -289,7 +289,6 @@ def count_functions(variance=0.05): print(("Pstats calls: %d Expected %s" % (callcount, expected_count))) stats.sort_stats(_profile_stats.sort) stats.print_stats() - if _profile_stats.force_write: _profile_stats.replace(callcount) elif expected_count: diff --git a/lib/sqlalchemy/testing/suite/test_results.py b/lib/sqlalchemy/testing/suite/test_results.py index d77d13efa..5186e189c 100644 --- a/lib/sqlalchemy/testing/suite/test_results.py +++ b/lib/sqlalchemy/testing/suite/test_results.py @@ -51,13 +51,21 @@ class RowFetchTest(fixtures.TablesTest): [{"id": 1, "today": datetime.datetime(2006, 5, 12, 12, 0, 0)}], ) + def test_via_attr(self): + row = config.db.execute( + self.tables.plain_pk.select().order_by(self.tables.plain_pk.c.id) + ).first() + + eq_(row.id, 1) + eq_(row.data, "d1") + def test_via_string(self): row = config.db.execute( self.tables.plain_pk.select().order_by(self.tables.plain_pk.c.id) ).first() - eq_(row["id"], 1) - eq_(row["data"], "d1") + eq_(row._mapping["id"], 1) + eq_(row._mapping["data"], "d1") def test_via_int(self): row = config.db.execute( @@ -72,8 +80,8 @@ class RowFetchTest(fixtures.TablesTest): self.tables.plain_pk.select().order_by(self.tables.plain_pk.c.id) ).first() - eq_(row[self.tables.plain_pk.c.id], 1) - eq_(row[self.tables.plain_pk.c.data], "d1") + eq_(row._mapping[self.tables.plain_pk.c.id], 1) + eq_(row._mapping[self.tables.plain_pk.c.data], "d1") @requirements.duplicate_names_in_cursor_description def test_row_with_dupe_names(self): @@ -102,7 +110,7 @@ class RowFetchTest(fixtures.TablesTest): s2 = select([datetable.c.id, s.label("somelabel")]) row = config.db.execute(s2).first() - eq_(row["somelabel"], datetime.datetime(2006, 5, 12, 12, 0, 0)) + eq_(row.somelabel, datetime.datetime(2006, 5, 12, 12, 0, 0)) class PercentSchemaNamesTest(fixtures.TablesTest): @@ -191,11 +199,11 @@ class PercentSchemaNamesTest(fixtures.TablesTest): row = config.db.execute( table.select().order_by(table.c["percent%"]) ).first() - eq_(row["percent%"], 5) - eq_(row["spaces % more spaces"], 12) + eq_(row._mapping["percent%"], 5) + eq_(row._mapping["spaces % more spaces"], 12) - eq_(row[table.c["percent%"]], 5) - eq_(row[table.c["spaces % more spaces"]], 12) + eq_(row._mapping[table.c["percent%"]], 5) + eq_(row._mapping[table.c["spaces % more spaces"]], 12) config.db.execute( percent_table.update().values( diff --git a/lib/sqlalchemy/testing/warnings.py b/lib/sqlalchemy/testing/warnings.py index 08f543b47..cc11e556c 100644 --- a/lib/sqlalchemy/testing/warnings.py +++ b/lib/sqlalchemy/testing/warnings.py @@ -31,6 +31,7 @@ def setup_filters(): "ignore", category=DeprecationWarning, message=".*inspect.get.*argspec" ) + # ignore 2.0 warnings unless we are explicitly testing for them warnings.filterwarnings("ignore", category=sa_exc.RemovedIn20Warning) diff --git a/lib/sqlalchemy/util/__init__.py b/lib/sqlalchemy/util/__init__.py index 434c5cb79..b0ceb802a 100644 --- a/lib/sqlalchemy/util/__init__.py +++ b/lib/sqlalchemy/util/__init__.py @@ -23,8 +23,6 @@ from ._collections import IdentitySet # noqa from ._collections import ImmutableContainer # noqa from ._collections import immutabledict # noqa from ._collections import ImmutableProperties # noqa -from ._collections import KeyedTuple # noqa -from ._collections import lightweight_named_tuple # noqa from ._collections import LRUCache # noqa from ._collections import ordered_column_set # noqa from ._collections import OrderedDict # noqa diff --git a/lib/sqlalchemy/util/_collections.py b/lib/sqlalchemy/util/_collections.py index ac8d0aa82..2770cc239 100644 --- a/lib/sqlalchemy/util/_collections.py +++ b/lib/sqlalchemy/util/_collections.py @@ -24,108 +24,6 @@ from .compat import threading EMPTY_SET = frozenset() -class AbstractKeyedTuple(tuple): - __slots__ = () - - def keys(self): - """Return a list of string key names for this :class:`.KeyedTuple`. - - .. seealso:: - - :attr:`.KeyedTuple._fields` - - """ - - return list(self._fields) - - -class KeyedTuple(AbstractKeyedTuple): - """``tuple`` subclass that adds labeled names. - - E.g.:: - - >>> k = KeyedTuple([1, 2, 3], labels=["one", "two", "three"]) - >>> k.one - 1 - >>> k.two - 2 - - Result rows returned by :class:`.Query` that contain multiple - ORM entities and/or column expressions make use of this - class to return rows. - - The :class:`.KeyedTuple` exhibits similar behavior to the - ``collections.namedtuple()`` construct provided in the Python - standard library, however is architected very differently. - Unlike ``collections.namedtuple()``, :class:`.KeyedTuple` is - does not rely on creation of custom subtypes in order to represent - a new series of keys, instead each :class:`.KeyedTuple` instance - receives its list of keys in place. The subtype approach - of ``collections.namedtuple()`` introduces significant complexity - and performance overhead, which is not necessary for the - :class:`.Query` object's use case. - - .. seealso:: - - :ref:`ormtutorial_querying` - - """ - - def __new__(cls, vals, labels=None): - t = tuple.__new__(cls, vals) - if labels: - t.__dict__.update(zip(labels, vals)) - else: - labels = [] - t.__dict__["_labels"] = labels - return t - - @property - def _fields(self): - """Return a tuple of string key names for this :class:`.KeyedTuple`. - - This method provides compatibility with ``collections.namedtuple()``. - - .. seealso:: - - :meth:`.KeyedTuple.keys` - - """ - return tuple([l for l in self._labels if l is not None]) - - def __setattr__(self, key, value): - raise AttributeError("Can't set attribute: %s" % key) - - def _asdict(self): - """Return the contents of this :class:`.KeyedTuple` as a dictionary. - - This method provides compatibility with ``collections.namedtuple()``, - with the exception that the dictionary returned is **not** ordered. - - """ - return {key: self.__dict__[key] for key in self.keys()} - - -class _LW(AbstractKeyedTuple): - __slots__ = () - - def __new__(cls, vals): - return tuple.__new__(cls, vals) - - def __reduce__(self): - # for pickling, degrade down to the regular - # KeyedTuple, thus avoiding anonymous class pickling - # difficulties - return KeyedTuple, (list(self), self._real_fields) - - def _asdict(self): - """Return the contents of this :class:`.KeyedTuple` as a dictionary.""" - - d = dict(zip(self._real_fields, self)) - d.pop(None, None) - return d - - class ImmutableContainer(object): def _immutable(self, *arg, **kw): raise TypeError("%s object is immutable" % self.__class__.__name__) @@ -965,35 +863,6 @@ class LRUCache(dict): self._mutex.release() -_lw_tuples = LRUCache(100) - - -def lightweight_named_tuple(name, fields): - hash_ = (name,) + tuple(fields) - tp_cls = _lw_tuples.get(hash_) - if tp_cls: - return tp_cls - - tp_cls = type( - name, - (_LW,), - dict( - [ - (field, _property_getters[idx]) - for idx, field in enumerate(fields) - if field is not None - ] - + [("__slots__", ())] - ), - ) - - tp_cls._real_fields = fields - tp_cls._fields = tuple([f for f in fields if f is not None]) - - _lw_tuples[hash_] = tp_cls - return tp_cls - - class ScopedRegistry(object): """A Registry that can store one or multiple instances of a single class on the basis of a "scope" function. |
