summaryrefslogtreecommitdiff
path: root/lib/sqlalchemy/ext
diff options
context:
space:
mode:
Diffstat (limited to 'lib/sqlalchemy/ext')
-rw-r--r--lib/sqlalchemy/ext/baked.py93
-rw-r--r--lib/sqlalchemy/ext/horizontal_shard.py14
-rw-r--r--lib/sqlalchemy/ext/serializer.py20
3 files changed, 80 insertions, 47 deletions
diff --git a/lib/sqlalchemy/ext/baked.py b/lib/sqlalchemy/ext/baked.py
index a9c79d6bd..24af454b6 100644
--- a/lib/sqlalchemy/ext/baked.py
+++ b/lib/sqlalchemy/ext/baked.py
@@ -13,19 +13,18 @@ compiled result to be fully cached.
"""
-import copy
import logging
from .. import exc as sa_exc
from .. import util
from ..orm import exc as orm_exc
from ..orm import strategy_options
+from ..orm.context import QueryContext
from ..orm.query import Query
from ..orm.session import Session
from ..sql import func
from ..sql import literal_column
from ..sql import util as sql_util
-from ..sql.selectable import LABEL_STYLE_TABLENAME_PLUS_COL
from ..util import collections_abc
@@ -209,9 +208,7 @@ class BakedQuery(object):
key += cache_key
self.add_criteria(
- lambda q: q._with_current_path(
- effective_path
- )._conditional_options(*options),
+ lambda q: q._with_current_path(effective_path).options(*options),
cache_path.path,
key,
)
@@ -228,14 +225,21 @@ class BakedQuery(object):
def _bake(self, session):
query = self._as_query(session)
- context = query._compile_context()
+ compile_state = query._compile_state()
- self._bake_subquery_loaders(session, context)
- context.session = None
- context.query = query = context.query.with_session(None)
+ self._bake_subquery_loaders(session, compile_state)
+
+ # TODO: compile_state clearly needs to be simplified here.
+ # if the session remains, fails memusage test
+ compile_state.orm_query = (
+ query
+ ) = (
+ compile_state.select_statement
+ ) = compile_state.query = compile_state.orm_query.with_session(None)
query._execution_options = query._execution_options.union(
{"compiled_cache": self._bakery}
)
+
# we'll be holding onto the query for some of its state,
# so delete some compilation-use-only attributes that can take up
# space
@@ -251,10 +255,10 @@ class BakedQuery(object):
# if the query is not safe to cache, we still do everything as though
# we did cache it, since the receiver of _bake() assumes subqueryload
# context was set up, etc.
- if context.query._bake_ok:
- self._bakery[self._effective_key(session)] = context
+ if compile_state.compile_options._bake_ok:
+ self._bakery[self._effective_key(session)] = compile_state
- return context
+ return compile_state
def to_query(self, query_or_session):
"""Return the :class:`_query.Query` object for use as a subquery.
@@ -314,9 +318,10 @@ class BakedQuery(object):
for step in self.steps[1:]:
query = step(query)
+
return query
- def _bake_subquery_loaders(self, session, context):
+ def _bake_subquery_loaders(self, session, compile_state):
"""convert subquery eager loaders in the cache into baked queries.
For subquery eager loading to work, all we need here is that the
@@ -325,28 +330,30 @@ class BakedQuery(object):
a "baked" query so that we save on performance too.
"""
- context.attributes["baked_queries"] = baked_queries = []
- for k, v in list(context.attributes.items()):
- if isinstance(v, Query):
- if "subquery" in k:
- bk = BakedQuery(self._bakery, lambda *args: v)
+ compile_state.attributes["baked_queries"] = baked_queries = []
+ for k, v in list(compile_state.attributes.items()):
+ if isinstance(v, dict) and "query" in v:
+ if "subqueryload_data" in k:
+ query = v["query"]
+ bk = BakedQuery(self._bakery, lambda *args: query)
bk._cache_key = self._cache_key + k
bk._bake(session)
baked_queries.append((k, bk._cache_key, v))
- del context.attributes[k]
+ del compile_state.attributes[k]
def _unbake_subquery_loaders(
- self, session, context, params, post_criteria
+ self, session, compile_state, context, params, post_criteria
):
"""Retrieve subquery eager loaders stored by _bake_subquery_loaders
and turn them back into Result objects that will iterate just
like a Query object.
"""
- if "baked_queries" not in context.attributes:
+ if "baked_queries" not in compile_state.attributes:
return
- for k, cache_key, query in context.attributes["baked_queries"]:
+ for k, cache_key, v in compile_state.attributes["baked_queries"]:
+ query = v["query"]
bk = BakedQuery(
self._bakery, lambda sess, q=query: q.with_session(sess)
)
@@ -354,7 +361,9 @@ class BakedQuery(object):
q = bk.for_session(session)
for fn in post_criteria:
q = q.with_post_criteria(fn)
- context.attributes[k] = q.params(**params)
+ v = dict(v)
+ v["query"] = q.params(**params)
+ context.attributes[k] = v
class Result(object):
@@ -432,26 +441,37 @@ class Result(object):
if not self.session.enable_baked_queries or bq._spoiled:
return self._as_query()._iter()
- baked_context = bq._bakery.get(bq._effective_key(self.session), None)
- if baked_context is None:
- baked_context = bq._bake(self.session)
+ baked_compile_state = bq._bakery.get(
+ bq._effective_key(self.session), None
+ )
+ if baked_compile_state is None:
+ baked_compile_state = bq._bake(self.session)
- context = copy.copy(baked_context)
+ context = QueryContext(baked_compile_state, self.session)
context.session = self.session
- context.attributes = context.attributes.copy()
bq._unbake_subquery_loaders(
- self.session, context, self._params, self._post_criteria
+ self.session,
+ baked_compile_state,
+ context,
+ self._params,
+ self._post_criteria,
)
- context.statement._label_style = LABEL_STYLE_TABLENAME_PLUS_COL
+ # asserts true
+ # if isinstance(baked_compile_state.statement, expression.Select):
+ # assert baked_compile_state.statement._label_style == \
+ # LABEL_STYLE_TABLENAME_PLUS_COL
+
if context.autoflush and not context.populate_existing:
self.session._autoflush()
- q = context.query.params(self._params).with_session(self.session)
+ q = context.orm_query.params(self._params).with_session(self.session)
for fn in self._post_criteria:
q = fn(q)
- return q._execute_and_instances(context)
+ params = q.load_options._params
+
+ return q._execute_and_instances(context, params=params)
def count(self):
"""return the 'count'.
@@ -566,7 +586,7 @@ class Result(object):
def _load_on_pk_identity(self, query, primary_key_identity):
"""Load the given primary key identity from the database."""
- mapper = query._mapper_zero()
+ mapper = query._only_full_mapper_zero("load_on_pk_identity")
_get_clause, _get_params = mapper._get_clause
@@ -592,8 +612,11 @@ class Result(object):
_lcl_get_clause, nones
)
- _lcl_get_clause = q._adapt_clause(_lcl_get_clause, True, False)
- q._criterion = _lcl_get_clause
+ # TODO: can mapper._get_clause be pre-adapted?
+ q._where_criteria = (
+ sql_util._deep_annotate(_lcl_get_clause, {"_orm_adapt": True}),
+ )
+
for fn in self._post_criteria:
q = fn(q)
return q
diff --git a/lib/sqlalchemy/ext/horizontal_shard.py b/lib/sqlalchemy/ext/horizontal_shard.py
index 931f45699..919f4409a 100644
--- a/lib/sqlalchemy/ext/horizontal_shard.py
+++ b/lib/sqlalchemy/ext/horizontal_shard.py
@@ -43,7 +43,10 @@ class ShardedQuery(Query):
q._shard_id = shard_id
return q
- def _execute_and_instances(self, context):
+ def _execute_and_instances(self, context, params=None):
+ if params is None:
+ params = self.load_options._params
+
def iter_for_shard(shard_id):
# shallow copy, so that each context may be used by
# ORM load events and similar.
@@ -54,8 +57,11 @@ class ShardedQuery(Query):
"shard_id"
] = copied_context.identity_token = shard_id
result_ = self._connection_from_session(
- mapper=self._bind_mapper(), shard_id=shard_id
- ).execute(copied_context.statement, self._params)
+ mapper=context.compile_state._bind_mapper(), shard_id=shard_id
+ ).execute(
+ copied_context.compile_state.statement,
+ self.load_options._params,
+ )
return self.instances(result_, copied_context)
if context.identity_token is not None:
@@ -78,7 +84,7 @@ class ShardedQuery(Query):
clause=stmt,
close_with_result=True,
)
- result = conn.execute(stmt, self._params)
+ result = conn.execute(stmt, self.load_options._params)
return result
if self._shard_id is not None:
diff --git a/lib/sqlalchemy/ext/serializer.py b/lib/sqlalchemy/ext/serializer.py
index ec5e8985c..afd44ca3d 100644
--- a/lib/sqlalchemy/ext/serializer.py
+++ b/lib/sqlalchemy/ext/serializer.py
@@ -59,7 +59,6 @@ from .. import Column
from .. import Table
from ..engine import Engine
from ..orm import class_mapper
-from ..orm.attributes import QueryableAttribute
from ..orm.interfaces import MapperProperty
from ..orm.mapper import Mapper
from ..orm.session import Session
@@ -78,11 +77,7 @@ def Serializer(*args, **kw):
def persistent_id(obj):
# print "serializing:", repr(obj)
- if isinstance(obj, QueryableAttribute):
- cls = obj.impl.class_
- key = obj.impl.key
- id_ = "attribute:" + key + ":" + b64encode(pickle.dumps(cls))
- elif isinstance(obj, Mapper) and not obj.non_primary:
+ if isinstance(obj, Mapper) and not obj.non_primary:
id_ = "mapper:" + b64encode(pickle.dumps(obj.class_))
elif isinstance(obj, MapperProperty) and not obj.parent.non_primary:
id_ = (
@@ -92,7 +87,12 @@ def Serializer(*args, **kw):
+ obj.key
)
elif isinstance(obj, Table):
- id_ = "table:" + text_type(obj.key)
+ if "parententity" in obj._annotations:
+ id_ = "mapper_selectable:" + b64encode(
+ pickle.dumps(obj._annotations["parententity"].class_)
+ )
+ else:
+ id_ = "table:" + text_type(obj.key)
elif isinstance(obj, Column) and isinstance(obj.table, Table):
id_ = (
"column:" + text_type(obj.table.key) + ":" + text_type(obj.key)
@@ -110,7 +110,8 @@ def Serializer(*args, **kw):
our_ids = re.compile(
- r"(mapperprop|mapper|table|column|session|attribute|engine):(.*)"
+ r"(mapperprop|mapper|mapper_selectable|table|column|"
+ r"session|attribute|engine):(.*)"
)
@@ -140,6 +141,9 @@ def Deserializer(file, metadata=None, scoped_session=None, engine=None):
elif type_ == "mapper":
cls = pickle.loads(b64decode(args))
return class_mapper(cls)
+ elif type_ == "mapper_selectable":
+ cls = pickle.loads(b64decode(args))
+ return class_mapper(cls).__clause_element__()
elif type_ == "mapperprop":
mapper, keyname = args.split(":")
cls = pickle.loads(b64decode(mapper))