diff options
| author | Mike Bayer <mike_mp@zzzcomputing.com> | 2016-04-27 11:37:58 -0500 |
|---|---|---|
| committer | Mike Bayer <mike_mp@zzzcomputing.com> | 2016-04-27 11:37:58 -0500 |
| commit | f3bc60bdd809235cbeb3f414717ac0e273269cf9 (patch) | |
| tree | eec41353d94d79aae07ebae6c42fe0df66526702 /lib/sqlalchemy | |
| parent | 84ba09a7d7b0160d8e3e52269519d0df0cd175ac (diff) | |
| download | sqlalchemy-f3bc60bdd809235cbeb3f414717ac0e273269cf9.tar.gz | |
Don't double-process ResultMetaData for BufferedColumnResultProxy
Fixed a bug in the result proxy used mainly by Oracle when binary and
other LOB types are in play, such that when query / statement caching
were used, the type-level result processors, notably that required by
the binary type itself but also any other processor, would become lost
after the first run of the statement due to it being removed from the
cached result metadata.
Change-Id: I751940866cffb4f48de46edc8137482eab59790c
Fixes: #3699
Diffstat (limited to 'lib/sqlalchemy')
| -rw-r--r-- | lib/sqlalchemy/engine/result.py | 24 |
1 files changed, 15 insertions, 9 deletions
diff --git a/lib/sqlalchemy/engine/result.py b/lib/sqlalchemy/engine/result.py index 773022ed2..c9eb53eb1 100644 --- a/lib/sqlalchemy/engine/result.py +++ b/lib/sqlalchemy/engine/result.py @@ -198,6 +198,7 @@ class ResultMetaData(object): dialect = context.dialect self.case_sensitive = dialect.case_sensitive self.matched_on_name = False + self._orig_processors = None if context.result_column_struct: result_columns, cols_are_ordered, textual_ordered = \ @@ -1394,16 +1395,21 @@ class BufferedColumnResultProxy(ResultProxy): def _init_metadata(self): super(BufferedColumnResultProxy, self)._init_metadata() + metadata = self._metadata - # orig_processors will be used to preprocess each row when they are - # constructed. - metadata._orig_processors = metadata._processors - # replace the all type processors by None processors. - metadata._processors = [None for _ in range(len(metadata.keys))] - keymap = {} - for k, (func, obj, index) in metadata._keymap.items(): - keymap[k] = (None, obj, index) - self._metadata._keymap = keymap + + # don't double-replace the processors, in the case + # of a cached ResultMetaData + if metadata._orig_processors is None: + # orig_processors will be used to preprocess each row when + # they are constructed. + metadata._orig_processors = metadata._processors + # replace the all type processors by None processors. + metadata._processors = [None for _ in range(len(metadata.keys))] + keymap = {} + for k, (func, obj, index) in metadata._keymap.items(): + keymap[k] = (None, obj, index) + metadata._keymap = keymap def fetchall(self): # can't call cursor.fetchall(), since rows must be |
