summaryrefslogtreecommitdiff
path: root/lib/sqlalchemy/future/result.py
blob: 583ff957a17b9373ad8aafde5c691209393a307b (plain)
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
import operator

from .. import util
from ..engine.result import _baserow_usecext
from ..engine.result import BaseResult
from ..engine.result import CursorResultMetaData
from ..engine.result import DefaultCursorFetchStrategy
from ..engine.result import Row
from ..sql import util as sql_util
from ..sql.base import _generative
from ..sql.base import Generative


class Result(Generative, BaseResult):
    """Interim "future" result proxy so that dialects can build on
    upcoming 2.0 patterns.


    """

    _process_row = Row
    _cursor_metadata = CursorResultMetaData
    _cursor_strategy_cls = DefaultCursorFetchStrategy

    _column_slice_filter = None
    _post_creational_filter = None

    def close(self):
        """Close this :class:`.Result`.

        This closes out the underlying DBAPI cursor corresponding
        to the statement execution, if one is still present.  Note that the
        DBAPI cursor is automatically released when the :class:`.Result`
        exhausts all available rows.  :meth:`.Result.close` is generally
        an optional method except in the case when discarding a
        :class:`.Result` that still has additional rows pending for fetch.

        After this method is called, it is no longer valid to call upon
        the fetch methods, which will raise a :class:`.ResourceClosedError`
        on subsequent use.

        .. seealso::

            :ref:`connections_toplevel`

        """
        self._soft_close(hard=True)

    def columns(self, *col_expressions):
        indexes = []
        for key in col_expressions:
            try:
                rec = self._keymap[key]
            except KeyError:
                rec = self._key_fallback(key, True)
                if rec is None:
                    return None

            index, obj = rec[0:2]

            if index is None:
                self._metadata._raise_for_ambiguous_column_name(obj)
            indexes.append(index)
        return self._column_slices(indexes)

    def scalars(self):
        result = self._column_slices(0)
        result._post_creational_filter = operator.itemgetter(0)
        return result

    @_generative
    def _column_slices(self, indexes):
        if _baserow_usecext:
            self._column_slice_filter = self._metadata._tuplegetter(*indexes)
        else:
            self._column_slice_filter = self._metadata._pure_py_tuplegetter(
                *indexes
            )

    @_generative
    def mappings(self):
        self._post_creational_filter = operator.attrgetter("_mapping")

    def _row_getter(self):
        process_row = self._process_row
        metadata = self._metadata
        keymap = metadata._keymap
        processors = metadata._processors

        fns = ()

        if self._echo:
            log = self.context.engine.logger.debug

            def log_row(row):
                log("Row %r", sql_util._repr_row(row))
                return row

            fns += (log_row,)

        if self._column_slice_filter:
            fns += (self._column_slice_filter,)

        if self._post_creational_filter:
            fns += (self._post_creational_filter,)

        def make_row(row):
            row = process_row(metadata, processors, keymap, row)
            for fn in fns:
                row = fn(row)
            return row

        return make_row

    def _safe_fetchone_impl(self):
        try:
            return self.cursor_strategy.fetchone()
        except BaseException as e:
            self.connection._handle_dbapi_exception(
                e, None, None, self.cursor, self.context
            )

    def _safe_fetchall_impl(self):
        try:
            result = self.cursor_strategy.fetchall()
            self._soft_close()
            return result
        except BaseException as e:
            self.connection._handle_dbapi_exception(
                e, None, None, self.cursor, self.context
            )

    def _safe_fetchmany_impl(self, size=None):
        try:
            l = self.process_rows(self.cursor_strategy.fetchmany(size))
            if len(l) == 0:
                self._soft_close()
            return l
        except BaseException as e:
            self.connection._handle_dbapi_exception(
                e, None, None, self.cursor, self.context
            )

    def __iter__(self):
        getter = self._row_getter()
        return (getter(r) for r in self._safe_fetchall_impl())

    def _onerow(self):
        getter = self._row_getter()
        row = self._safe_fetchone_impl()
        if row is None:
            return None
        else:
            return getter(row)

    def all(self):
        getter = self._row_getter()
        return [getter(r) for r in self._safe_fetchall_impl()]

    def first(self):
        getter = self._row_getter()
        row = self._safe_fetchone_impl()
        if row is None:
            return None
        else:
            row = getter(row)
            second_row = self._safe_fetchone_impl()
            if second_row is not None:
                self._soft_close()
                util.warn("Additional rows remain")
            return row