summaryrefslogtreecommitdiff
path: root/lib
diff options
context:
space:
mode:
Diffstat (limited to 'lib')
-rw-r--r--lib/sqlalchemy/ext/serializer.py129
-rw-r--r--lib/sqlalchemy/orm/query.py12
-rw-r--r--lib/sqlalchemy/orm/util.py13
-rw-r--r--lib/sqlalchemy/sql/expression.py17
-rw-r--r--lib/sqlalchemy/sql/util.py29
5 files changed, 190 insertions, 10 deletions
diff --git a/lib/sqlalchemy/ext/serializer.py b/lib/sqlalchemy/ext/serializer.py
new file mode 100644
index 000000000..b62ee0ce6
--- /dev/null
+++ b/lib/sqlalchemy/ext/serializer.py
@@ -0,0 +1,129 @@
+"""Serializer/Deserializer objects for usage with SQLAlchemy structures.
+
+Any SQLAlchemy structure, including Tables, Columns, expressions, mappers,
+Query objects etc. can be serialized in a minimally-sized format,
+and deserialized when given a Metadata and optional ScopedSession object
+to use as context on the way out.
+
+Usage is nearly the same as that of the standard Python pickle module::
+
+ from sqlalchemy.ext.serializer import loads, dumps
+ metadata = MetaData(bind=some_engine)
+ Session = scoped_session(sessionmaker())
+
+ # ... define mappers
+
+ query = Session.query(MyClass).filter(MyClass.somedata=='foo').order_by(MyClass.sortkey)
+
+ # pickle the query
+ serialized = dumps(query)
+
+ # unpickle. Pass in metadata + scoped_session
+ query2 = loads(serialized, metadata, Session)
+
+ print query2.all()
+
+Similar restrictions as when using raw pickle apply; mapped classes must be
+themselves be pickleable, meaning they are importable from a module-level
+namespace.
+
+Note that instances of user-defined classes do not require this extension
+in order to be pickled; these contain no references to engines, sessions
+or expression constructs in the typical case and can be serialized directly.
+This module is specifically for ORM and expression constructs.
+
+"""
+
+from sqlalchemy.orm import class_mapper, Query
+from sqlalchemy.orm.session import Session
+from sqlalchemy.orm.mapper import Mapper
+from sqlalchemy.orm.attributes import QueryableAttribute
+from sqlalchemy import Table, Column
+from sqlalchemy.engine import Engine
+from sqlalchemy.util import pickle
+import re
+import base64
+from cStringIO import StringIO
+
+__all__ = ['Serializer', 'Deserializer', 'dumps', 'loads']
+
+def Serializer(*args, **kw):
+ pickler = pickle.Pickler(*args, **kw)
+
+ def persistent_id(obj):
+ #print "serializing:", repr(obj)
+ if isinstance(obj, QueryableAttribute):
+ cls = obj.impl.class_
+ key = obj.impl.key
+ id = "attribute:" + key + ":" + base64.b64encode(pickle.dumps(cls))
+ elif isinstance(obj, Mapper) and not obj.non_primary:
+ id = "mapper:" + base64.b64encode(pickle.dumps(obj.class_))
+ elif isinstance(obj, Table):
+ id = "table:" + str(obj)
+ elif isinstance(obj, Column) and isinstance(obj.table, Table):
+ id = "column:" + str(obj.table) + ":" + obj.key
+ elif isinstance(obj, Session):
+ id = "session:"
+ elif isinstance(obj, Engine):
+ id = "engine:"
+ else:
+ return None
+ return id
+
+ pickler.persistent_id = persistent_id
+ return pickler
+
+our_ids = re.compile(r'(mapper|table|column|session|attribute|engine):(.*)')
+
+def Deserializer(file, metadata=None, scoped_session=None, engine=None):
+ unpickler = pickle.Unpickler(file)
+
+ def get_engine():
+ if engine:
+ return engine
+ elif scoped_session and scoped_session().bind:
+ return scoped_session().bind
+ elif metadata and metadata.bind:
+ return metadata.bind
+ else:
+ return None
+
+ def persistent_load(id):
+ m = our_ids.match(id)
+ if not m:
+ return None
+ else:
+ type_, args = m.group(1, 2)
+ if type_ == 'attribute':
+ key, clsarg = args.split(":")
+ cls = pickle.loads(base64.b64decode(clsarg))
+ return getattr(cls, key)
+ elif type_ == "mapper":
+ cls = pickle.loads(base64.b64decode(args))
+ return class_mapper(cls)
+ elif type_ == "table":
+ return metadata.tables[args]
+ elif type_ == "column":
+ table, colname = args.split(':')
+ return metadata.tables[table].c[colname]
+ elif type_ == "session":
+ return scoped_session()
+ elif type_ == "engine":
+ return get_engine()
+ else:
+ raise Exception("Unknown token: %s" % type_)
+ unpickler.persistent_load = persistent_load
+ return unpickler
+
+def dumps(obj):
+ buf = StringIO()
+ pickler = Serializer(buf)
+ pickler.dump(obj)
+ return buf.getvalue()
+
+def loads(data, metadata=None, scoped_session=None, engine=None):
+ buf = StringIO(data)
+ unpickler = Deserializer(buf, metadata, scoped_session, engine)
+ return unpickler.load()
+
+ \ No newline at end of file
diff --git a/lib/sqlalchemy/orm/query.py b/lib/sqlalchemy/orm/query.py
index aa30f1517..51165287f 100644
--- a/lib/sqlalchemy/orm/query.py
+++ b/lib/sqlalchemy/orm/query.py
@@ -195,12 +195,12 @@ class Query(object):
if as_filter and self._filter_aliases:
adapters.append(self._filter_aliases.replace)
- if self._polymorphic_adapters:
- adapters.append(self.__adapt_polymorphic_element)
-
if self._from_obj_alias:
adapters.append(self._from_obj_alias.replace)
+ if self._polymorphic_adapters:
+ adapters.append(self.__adapt_polymorphic_element)
+
if not adapters:
return clause
@@ -1707,9 +1707,9 @@ class _MapperEntity(_QueryEntity):
if context.order_by is False and self.mapper.order_by:
context.order_by = self.mapper.order_by
- if context.order_by and adapter:
- context.order_by = adapter.adapt_list(util.to_list(context.order_by))
-
+ if adapter:
+ context.order_by = adapter.adapt_list(util.to_list(context.order_by))
+
for value in self.mapper._iterate_polymorphic_properties(self._with_polymorphic):
if query._only_load_props and value.key not in query._only_load_props:
continue
diff --git a/lib/sqlalchemy/orm/util.py b/lib/sqlalchemy/orm/util.py
index 689df8d86..405acda15 100644
--- a/lib/sqlalchemy/orm/util.py
+++ b/lib/sqlalchemy/orm/util.py
@@ -281,6 +281,19 @@ class AliasedClass(object):
self._sa_label_name = name
self.__name__ = 'AliasedClass_' + str(self.__target)
+ def __getstate__(self):
+ return {'mapper':self.__mapper, 'alias':self.__alias, 'name':self._sa_label_name}
+
+ def __setstate__(self, state):
+ self.__mapper = state['mapper']
+ self.__target = self.__mapper.class_
+ alias = state['alias']
+ self.__adapter = sql_util.ClauseAdapter(alias, equivalents=self.__mapper._equivalent_columns)
+ self.__alias = alias
+ name = state['name']
+ self._sa_label_name = name
+ self.__name__ = 'AliasedClass_' + str(self.__target)
+
def __adapt_element(self, elem):
return self.__adapter.traverse(elem)._annotate({'parententity': self})
diff --git a/lib/sqlalchemy/sql/expression.py b/lib/sqlalchemy/sql/expression.py
index 85f229ba0..5206dc5fa 100644
--- a/lib/sqlalchemy/sql/expression.py
+++ b/lib/sqlalchemy/sql/expression.py
@@ -1002,6 +1002,11 @@ class ClauseElement(Visitable):
yield f
f = getattr(f, '_is_clone_of', None)
+ def __getstate__(self):
+ d = self.__dict__.copy()
+ d.pop('_is_clone_of', None)
+ return d
+
def _get_from_objects(self, **modifiers):
"""Return objects represented in this ``ClauseElement`` that
should be added to the ``FROM`` list of a query, when this
@@ -1959,7 +1964,17 @@ class _BindParamClause(ColumnElement):
"""
return isinstance(other, _BindParamClause) and other.type.__class__ == self.type.__class__
-
+
+ def __getstate__(self):
+ """execute a deferred value for serialization purposes."""
+
+ d = self.__dict__.copy()
+ v = self.value
+ if callable(v):
+ v = v()
+ d['value'] = v
+ return d
+
def __repr__(self):
return "_BindParamClause(%s, %s, type_=%s)" % (repr(self.key), repr(self.value), repr(self.type))
diff --git a/lib/sqlalchemy/sql/util.py b/lib/sqlalchemy/sql/util.py
index 2a510906b..d5f2417c2 100644
--- a/lib/sqlalchemy/sql/util.py
+++ b/lib/sqlalchemy/sql/util.py
@@ -121,6 +121,7 @@ def join_condition(a, b, ignore_nonexistent_tables=False):
else:
return sql.and_(*crit)
+
class Annotated(object):
"""clones a ClauseElement and applies an 'annotations' dictionary.
@@ -133,14 +134,17 @@ class Annotated(object):
hash value may be reused, causing conflicts.
"""
+
def __new__(cls, *args):
if not args:
+ # clone constructor
return object.__new__(cls)
else:
element, values = args
- return object.__new__(
- type.__new__(type, "Annotated%s" % element.__class__.__name__, (Annotated, element.__class__), {})
- )
+ # pull appropriate subclass from this module's
+ # namespace (see below for rationale)
+ cls = eval("Annotated%s" % element.__class__.__name__)
+ return object.__new__(cls)
def __init__(self, element, values):
# force FromClause to generate their internal
@@ -180,6 +184,17 @@ class Annotated(object):
def __cmp__(self, other):
return cmp(hash(self.__element), hash(other))
+# hard-generate Annotated subclasses. this technique
+# is used instead of on-the-fly types (i.e. type.__new__())
+# so that the resulting objects are pickleable.
+from sqlalchemy.sql import expression
+for cls in expression.__dict__.values() + [schema.Column, schema.Table]:
+ if isinstance(cls, type) and issubclass(cls, expression.ClauseElement):
+ exec "class Annotated%s(Annotated, cls):\n" \
+ " __visit_name__ = cls.__visit_name__\n"\
+ " pass" % (cls.__name__, ) in locals()
+
+
def _deep_annotate(element, annotations, exclude=None):
"""Deep copy the given ClauseElement, annotating each element with the given annotations dictionary.
@@ -495,3 +510,11 @@ class ColumnAdapter(ClauseAdapter):
def adapted_row(self, row):
return AliasedRow(row, self.columns)
+ def __getstate__(self):
+ d = self.__dict__.copy()
+ del d['columns']
+ return d
+
+ def __setstate__(self, state):
+ self.__dict__.update(state)
+ self.columns = util.PopulateDict(self._locate_col)