summaryrefslogtreecommitdiff
path: root/examples/dogpile_caching
diff options
context:
space:
mode:
authorMike Bayer <mike_mp@zzzcomputing.com>2012-10-21 16:54:42 -0400
committerMike Bayer <mike_mp@zzzcomputing.com>2012-10-21 16:54:42 -0400
commit39d17c76df542d0040c2c8db2d2e3dc897b5cce5 (patch)
treef5aeba8be0f61c0db8d5ba0e76efdaa593cd85c4 /examples/dogpile_caching
parentf2bc0ddcb496e6a0cb0a0ad88c7c055dbf0c11a7 (diff)
downloadsqlalchemy-39d17c76df542d0040c2c8db2d2e3dc897b5cce5.tar.gz
- converted beaker demo to dogpile.cache, [ticket:2589]
Diffstat (limited to 'examples/dogpile_caching')
-rw-r--r--examples/dogpile_caching/__init__.py84
-rw-r--r--examples/dogpile_caching/advanced.py79
-rw-r--r--examples/dogpile_caching/caching_query.py255
-rw-r--r--examples/dogpile_caching/environment.py83
-rw-r--r--examples/dogpile_caching/fixture_data.py52
-rw-r--r--examples/dogpile_caching/helloworld.py62
-rw-r--r--examples/dogpile_caching/local_session_caching.py99
-rw-r--r--examples/dogpile_caching/model.py106
-rw-r--r--examples/dogpile_caching/relation_caching.py25
9 files changed, 845 insertions, 0 deletions
diff --git a/examples/dogpile_caching/__init__.py b/examples/dogpile_caching/__init__.py
new file mode 100644
index 000000000..00c386bda
--- /dev/null
+++ b/examples/dogpile_caching/__init__.py
@@ -0,0 +1,84 @@
+"""
+Illustrates how to embed `dogpile.cache <http://dogpilecache.readthedocs.org/>`_
+functionality within
+the :class:`.Query` object, allowing full cache control as well as the
+ability to pull "lazy loaded" attributes from long term cache
+as well.
+
+.. versionchanged:: 0.8 The example was modernized to use
+ dogpile.cache, replacing Beaker as the caching library in
+ use.
+
+In this demo, the following techniques are illustrated:
+
+* Using custom subclasses of :class:`.Query`
+* Basic technique of circumventing Query to pull from a
+ custom cache source instead of the database.
+* Rudimental caching with dogpile.cache, using "regions" which allow
+ global control over a fixed set of configurations.
+* Using custom :class:`.MapperOption` objects to configure options on
+ a Query, including the ability to invoke the options
+ deep within an object graph when lazy loads occur.
+
+E.g.::
+
+ # query for Person objects, specifying cache
+ q = Session.query(Person).options(FromCache("default"))
+
+ # specify that each Person's "addresses" collection comes from
+ # cache too
+ q = q.options(RelationshipCache(Person.addresses, "default"))
+
+ # query
+ print q.all()
+
+To run, both SQLAlchemy and dogpile.cache must be
+installed or on the current PYTHONPATH. The demo will create a local
+directory for datafiles, insert initial data, and run. Running the
+demo a second time will utilize the cache files already present, and
+exactly one SQL statement against two tables will be emitted - the
+displayed result however will utilize dozens of lazyloads that all
+pull from cache.
+
+The demo scripts themselves, in order of complexity, are run as follows::
+
+ python examples/dogpile_caching/helloworld.py
+
+ python examples/dogpile_caching/relationship_caching.py
+
+ python examples/dogpile_caching/advanced.py
+
+ python examples/dogpile_caching/local_session_caching.py
+
+
+Listing of files:
+
+ environment.py - Establish the Session, a dictionary
+ of "regions", a sample cache region against a .dbm
+ file, data / cache file paths, and configurations,
+ bootstrap fixture data if necessary.
+
+ caching_query.py - Represent functions and classes
+ which allow the usage of Dogpile caching with SQLAlchemy.
+ Introduces a query option called FromCache.
+
+ model.py - The datamodel, which represents Person that has multiple
+ Address objects, each with PostalCode, City, Country
+
+ fixture_data.py - creates demo PostalCode, Address, Person objects
+ in the database.
+
+ helloworld.py - the basic idea.
+
+ relationship_caching.py - Illustrates how to add cache options on
+ relationship endpoints, so that lazyloads load from cache.
+
+ advanced.py - Further examples of how to use FromCache. Combines
+ techniques from the first two scripts.
+
+ local_session_caching.py - Grok everything so far ? This example
+ creates a new dogpile.cache backend that will persist data in a dictionary
+ which is local to the current session. remove() the session
+ and the cache is gone.
+
+"""
diff --git a/examples/dogpile_caching/advanced.py b/examples/dogpile_caching/advanced.py
new file mode 100644
index 000000000..6bfacfcf0
--- /dev/null
+++ b/examples/dogpile_caching/advanced.py
@@ -0,0 +1,79 @@
+"""advanced.py
+
+Illustrate usage of Query combined with the FromCache option,
+including front-end loading, cache invalidation, namespace techniques
+and collection caching.
+
+"""
+
+from environment import Session
+from model import Person, Address, cache_address_bits
+from caching_query import FromCache, RelationshipCache
+from sqlalchemy.orm import joinedload
+
+def load_name_range(start, end, invalidate=False):
+ """Load Person objects on a range of names.
+
+ start/end are integers, range is then
+ "person <start>" - "person <end>".
+
+ The cache option we set up is called "name_range", indicating
+ a range of names for the Person class.
+
+ The `Person.addresses` collections are also cached. Its basically
+ another level of tuning here, as that particular cache option
+ can be transparently replaced with joinedload(Person.addresses).
+ The effect is that each Person and his/her Address collection
+ is cached either together or separately, affecting the kind of
+ SQL that emits for unloaded Person objects as well as the distribution
+ of data within the cache.
+ """
+ q = Session.query(Person).\
+ filter(Person.name.between("person %.2d" % start, "person %.2d" % end)).\
+ options(cache_address_bits).\
+ options(FromCache("default", "name_range"))
+
+ # have the "addresses" collection cached separately
+ # each lazyload of Person.addresses loads from cache.
+ q = q.options(RelationshipCache(Person.addresses, "default"))
+
+ # alternatively, eagerly load the "addresses" collection, so that they'd
+ # be cached together. This issues a bigger SQL statement and caches
+ # a single, larger value in the cache per person rather than two
+ # separate ones.
+ #q = q.options(joinedload(Person.addresses))
+
+ # if requested, invalidate the cache on current criterion.
+ if invalidate:
+ q.invalidate()
+
+ return q.all()
+
+print "two through twelve, possibly from cache:\n"
+print ", ".join([p.name for p in load_name_range(2, 12)])
+
+print "\ntwenty five through forty, possibly from cache:\n"
+print ", ".join([p.name for p in load_name_range(25, 40)])
+
+# loading them again, no SQL is emitted
+print "\ntwo through twelve, from the cache:\n"
+print ", ".join([p.name for p in load_name_range(2, 12)])
+
+# but with invalidate, they are
+print "\ntwenty five through forty, invalidate first:\n"
+print ", ".join([p.name for p in load_name_range(25, 40, True)])
+
+# illustrate the address loading from either cache/already
+# on the Person
+print "\n\nPeople plus addresses, two through twelve, addresses possibly from cache"
+for p in load_name_range(2, 12):
+ print p.format_full()
+
+# illustrate the address loading from either cache/already
+# on the Person
+print "\n\nPeople plus addresses, two through twelve, addresses from cache"
+for p in load_name_range(2, 12):
+ print p.format_full()
+
+print "\n\nIf this was the first run of advanced.py, try "\
+ "a second run. Only one SQL statement will be emitted."
diff --git a/examples/dogpile_caching/caching_query.py b/examples/dogpile_caching/caching_query.py
new file mode 100644
index 000000000..fb532fa63
--- /dev/null
+++ b/examples/dogpile_caching/caching_query.py
@@ -0,0 +1,255 @@
+"""caching_query.py
+
+Represent persistence structures which allow the usage of
+dogpile.cache caching with SQLAlchemy.
+
+The three new concepts introduced here are:
+
+ * CachingQuery - a Query subclass that caches and
+ retrieves results in/from dogpile.cache.
+ * FromCache - a query option that establishes caching
+ parameters on a Query
+ * RelationshipCache - a variant of FromCache which is specific
+ to a query invoked during a lazy load.
+ * _params_from_query - extracts value parameters from
+ a Query.
+
+The rest of what's here are standard SQLAlchemy and
+dogpile.cache constructs.
+
+"""
+from sqlalchemy.orm.interfaces import MapperOption
+from sqlalchemy.orm.query import Query
+from sqlalchemy.sql import visitors
+from dogpile.cache.api import NO_VALUE
+
+class CachingQuery(Query):
+ """A Query subclass which optionally loads full results from a dogpile
+ cache region.
+
+ The CachingQuery optionally stores additional state that allows it to consult
+ a dogpile.cache cache before accessing the database, in the form
+ of a FromCache or RelationshipCache object. Each of these objects
+ refer to the name of a :class:`dogpile.cache.Region` that's been configured
+ and stored in a lookup dictionary. When such an object has associated
+ itself with the CachingQuery, the corresponding :class:`dogpile.cache.Region`
+ is used to locate a cached result. If none is present, then the
+ Query is invoked normally, the results being cached.
+
+ The FromCache and RelationshipCache mapper options below represent
+ the "public" method of configuring this state upon the CachingQuery.
+
+ """
+
+ def __init__(self, regions, *args, **kw):
+ self.cache_regions = regions
+ Query.__init__(self, *args, **kw)
+
+ def __iter__(self):
+ """override __iter__ to pull results from dogpile
+ if particular attributes have been configured.
+
+ Note that this approach does *not* detach the loaded objects from
+ the current session. If the cache backend is an in-process cache
+ (like "memory") and lives beyond the scope of the current session's
+ transaction, those objects may be expired. The method here can be
+ modified to first expunge() each loaded item from the current
+ session before returning the list of items, so that the items
+ in the cache are not the same ones in the current Session.
+
+ """
+ if hasattr(self, '_cache_region'):
+ return self.get_value(createfunc=lambda: list(Query.__iter__(self)))
+ else:
+ return Query.__iter__(self)
+
+ def _get_cache_plus_key(self):
+ """Return a cache region plus key."""
+
+ dogpile_region = self.cache_regions[self._cache_region.region]
+ if self._cache_region.cache_key:
+ key = self._cache_region.cache_key
+ else:
+ key = _key_from_query(self)
+ return dogpile_region, key
+
+ def invalidate(self):
+ """Invalidate the cache value represented by this Query."""
+
+ dogpile_region, cache_key = self._get_cache_plus_key()
+ dogpile_region.delete(cache_key)
+
+ def get_value(self, merge=True, createfunc=None,
+ expiration_time=None, ignore_expiration=False):
+ """Return the value from the cache for this query.
+
+ Raise KeyError if no value present and no
+ createfunc specified.
+
+ """
+ dogpile_region, cache_key = self._get_cache_plus_key()
+
+ # ignore_expiration means, if the value is in the cache
+ # but is expired, return it anyway. This doesn't make sense
+ # with createfunc, which says, if the value is expired, generate
+ # a new value.
+ assert not ignore_expiration or not createfunc, \
+ "Can't ignore expiration and also provide createfunc"
+
+ if ignore_expiration or not createfunc:
+ cached_value = dogpile_region.get(cache_key,
+ expiration_time=expiration_time,
+ ignore_expiration=ignore_expiration)
+ else:
+ cached_value = dogpile_region.get_or_create(
+ cache_key,
+ createfunc,
+ expiration_time=expiration_time
+ )
+ if cached_value is NO_VALUE:
+ raise KeyError(cache_key)
+ if merge:
+ cached_value = self.merge_result(cached_value, load=False)
+ return cached_value
+
+ def set_value(self, value):
+ """Set the value in the cache for this query."""
+
+ dogpile_region, cache_key = self._get_cache_plus_key()
+ dogpile_region.set(cache_key, value)
+
+def query_callable(regions, query_cls=CachingQuery):
+ def query(*arg, **kw):
+ return query_cls(regions, *arg, **kw)
+ return query
+
+def _key_from_query(query, qualifier=None):
+ """Given a Query, create a cache key.
+
+ There are many approaches to this; here we use the simplest,
+ which is to create an md5 hash of the text of the SQL statement,
+ combined with stringified versions of all the bound parameters
+ within it. There's a bit of a performance hit with
+ compiling out "query.statement" here; other approaches include
+ setting up an explicit cache key with a particular Query,
+ then combining that with the bound parameter values.
+
+ """
+
+ v = []
+ def visit_bindparam(bind):
+
+ if bind.key in query._params:
+ value = query._params[bind.key]
+ elif bind.callable:
+ value = bind.callable()
+ else:
+ value = bind.value
+
+ v.append(unicode(value))
+
+ stmt = query.statement
+ visitors.traverse(stmt, {}, {'bindparam': visit_bindparam})
+
+ # here we return the key as a long string. our "key mangler"
+ # set up with the region will boil it down to an md5.
+ return " ".join([unicode(stmt)] + v)
+
+class FromCache(MapperOption):
+ """Specifies that a Query should load results from a cache."""
+
+ propagate_to_loaders = False
+
+ def __init__(self, region="default", cache_key=None):
+ """Construct a new FromCache.
+
+ :param region: the cache region. Should be a
+ region configured in the dictionary of dogpile
+ regions.
+
+ :param cache_key: optional. A string cache key
+ that will serve as the key to the query. Use this
+ if your query has a huge amount of parameters (such
+ as when using in_()) which correspond more simply to
+ some other identifier.
+
+ """
+ self.region = region
+ self.cache_key = cache_key
+
+ def process_query(self, query):
+ """Process a Query during normal loading operation."""
+ query._cache_region = self
+
+class RelationshipCache(MapperOption):
+ """Specifies that a Query as called within a "lazy load"
+ should load results from a cache."""
+
+ propagate_to_loaders = True
+
+ def __init__(self, attribute, region="default"):
+ self.region = region
+ self.cls_ = attribute.property.parent.class_
+ self.key = attribute.property.key
+
+ def process_query_conditionally(self, query):
+ if query._current_path:
+ mapper, key = query._current_path[-2:]
+ if issubclass(mapper.class_, self.cls_) and \
+ key == self.key:
+ query._cache_region = self
+
+class RelationshipCache(MapperOption):
+ """Specifies that a Query as called within a "lazy load"
+ should load results from a cache."""
+
+ propagate_to_loaders = True
+
+ def __init__(self, attribute, region="default", cache_key=None):
+ """Construct a new RelationshipCache.
+
+ :param attribute: A Class.attribute which
+ indicates a particular class relationship() whose
+ lazy loader should be pulled from the cache.
+
+ :param region: name of the cache region.
+
+ :param cache_key: optional. A string cache key
+ that will serve as the key to the query, bypassing
+ the usual means of forming a key from the Query itself.
+
+ """
+ self.region = region
+ self.cache_key = cache_key
+ self._relationship_options = {
+ (attribute.property.parent.class_, attribute.property.key): self
+ }
+
+ def process_query_conditionally(self, query):
+ """Process a Query that is used within a lazy loader.
+
+ (the process_query_conditionally() method is a SQLAlchemy
+ hook invoked only within lazyload.)
+
+ """
+ if query._current_path:
+ mapper, key = query._current_path[-2:]
+
+ for cls in mapper.class_.__mro__:
+ if (cls, key) in self._relationship_options:
+ relationship_option = self._relationship_options[(cls, key)]
+ query._cache_region = relationship_option
+ break
+
+ def and_(self, option):
+ """Chain another RelationshipCache option to this one.
+
+ While many RelationshipCache objects can be specified on a single
+ Query separately, chaining them together allows for a more efficient
+ lookup during load.
+
+ """
+ self._relationship_options.update(option._relationship_options)
+ return self
+
+
diff --git a/examples/dogpile_caching/environment.py b/examples/dogpile_caching/environment.py
new file mode 100644
index 000000000..f210d26ac
--- /dev/null
+++ b/examples/dogpile_caching/environment.py
@@ -0,0 +1,83 @@
+"""environment.py
+
+Establish data / cache file paths, and configurations,
+bootstrap fixture data if necessary.
+
+"""
+import caching_query
+from sqlalchemy import create_engine
+from sqlalchemy.orm import scoped_session, sessionmaker
+from sqlalchemy.ext.declarative import declarative_base
+from dogpile.cache.region import make_region
+import os
+import md5
+
+# dogpile cache regions. A home base for cache configurations.
+regions = {}
+
+
+# scoped_session. Apply our custom CachingQuery class to it,
+# using a callable that will associate the dictionary
+# of regions with the Query.
+Session = scoped_session(
+ sessionmaker(
+ query_cls=caching_query.query_callable(regions)
+ )
+ )
+
+# global declarative base class.
+Base = declarative_base()
+
+root = "./dogpile_data/"
+
+if not os.path.exists(root):
+ raw_input("Will create datafiles in %r.\n"
+ "To reset the cache + database, delete this directory.\n"
+ "Press enter to continue.\n" % root
+ )
+ os.makedirs(root)
+
+dbfile = os.path.join(root, "dogpile_demo.db")
+engine = create_engine('sqlite:///%s' % dbfile, echo=True)
+Session.configure(bind=engine)
+
+
+def md5_key_mangler(key):
+ """Receive cache keys as long concatenated strings;
+ distill them into an md5 hash.
+
+ """
+ return md5.md5(key).hexdigest()
+
+# configure the "default" cache region.
+regions['default'] = make_region(
+ # the "dbm" backend needs
+ # string-encoded keys
+ key_mangler=md5_key_mangler
+ ).configure(
+ # using type 'file' to illustrate
+ # serialized persistence. Normally
+ # memcached or similar is a better choice
+ # for caching.
+ 'dogpile.cache.dbm',
+ expiration_time=3600,
+ arguments={
+ "filename": os.path.join(root, "cache.dbm")
+ }
+ )
+
+# optional; call invalidate() on the region
+# once created so that all data is fresh when
+# the app is restarted. Good for development,
+# on a production system needs to be used carefully
+# regions['default'].invalidate()
+
+
+installed = False
+
+def bootstrap():
+ global installed
+ import fixture_data
+ if not os.path.exists(dbfile):
+ fixture_data.install()
+ installed = True \ No newline at end of file
diff --git a/examples/dogpile_caching/fixture_data.py b/examples/dogpile_caching/fixture_data.py
new file mode 100644
index 000000000..1db75ea05
--- /dev/null
+++ b/examples/dogpile_caching/fixture_data.py
@@ -0,0 +1,52 @@
+"""fixture_data.py
+
+Installs some sample data. Here we have a handful of postal codes for a few US/
+Canadian cities. Then, 100 Person records are installed, each with a
+randomly selected postal code.
+
+"""
+from environment import Session, Base
+from model import City, Country, PostalCode, Person, Address
+import random
+
+def install():
+ Base.metadata.create_all(Session().bind)
+
+ data = [
+ ('Chicago', 'United States', ('60601', '60602', '60603', '60604')),
+ ('Montreal', 'Canada', ('H2S 3K9', 'H2B 1V4', 'H7G 2T8')),
+ ('Edmonton', 'Canada', ('T5J 1R9', 'T5J 1Z4', 'T5H 1P6')),
+ ('New York', 'United States',
+ ('10001', '10002', '10003', '10004', '10005', '10006')),
+ ('San Francisco', 'United States',
+ ('94102', '94103', '94104', '94105', '94107', '94108'))
+ ]
+
+ countries = {}
+ all_post_codes = []
+ for city, country, postcodes in data:
+ try:
+ country = countries[country]
+ except KeyError:
+ countries[country] = country = Country(country)
+
+ city = City(city, country)
+ pc = [PostalCode(code, city) for code in postcodes]
+ Session.add_all(pc)
+ all_post_codes.extend(pc)
+
+ for i in xrange(1, 51):
+ person = Person(
+ "person %.2d" % i,
+ Address(
+ street="street %.2d" % i,
+ postal_code=all_post_codes[
+ random.randint(0, len(all_post_codes) - 1)]
+ )
+ )
+ Session.add(person)
+
+ Session.commit()
+
+ # start the demo fresh
+ Session.remove() \ No newline at end of file
diff --git a/examples/dogpile_caching/helloworld.py b/examples/dogpile_caching/helloworld.py
new file mode 100644
index 000000000..e2e4d4f78
--- /dev/null
+++ b/examples/dogpile_caching/helloworld.py
@@ -0,0 +1,62 @@
+"""helloworld.py
+
+Illustrate how to load some data, and cache the results.
+
+"""
+
+from environment import Session
+from model import Person
+from caching_query import FromCache
+
+# load Person objects. cache the result under the namespace "all_people".
+print "loading people...."
+people = Session.query(Person).options(FromCache("default")).all()
+
+# remove the Session. next query starts from scratch.
+Session.remove()
+
+# load again, using the same FromCache option. now they're cached
+# under "all_people", no SQL is emitted.
+print "loading people....again!"
+people = Session.query(Person).options(FromCache("default")).all()
+
+# want to load on some different kind of query ? change the namespace
+# you send to FromCache
+print "loading people two through twelve"
+people_two_through_twelve = Session.query(Person).\
+ options(FromCache("default")).\
+ filter(Person.name.between("person 02", "person 12")).\
+ all()
+
+# the data is cached under the "namespace" you send to FromCache, *plus*
+# the bind parameters of the query. So this query, having
+# different literal parameters under "Person.name.between()" than the
+# previous one, issues new SQL...
+print "loading people five through fifteen"
+people_five_through_fifteen = Session.query(Person).\
+ options(FromCache("default")).\
+ filter(Person.name.between("person 05", "person 15")).\
+ all()
+
+
+# ... but using the same params as are already cached, no SQL
+print "loading people two through twelve...again!"
+people_two_through_twelve = Session.query(Person).\
+ options(FromCache("default")).\
+ filter(Person.name.between("person 02", "person 12")).\
+ all()
+
+
+# invalidate the cache for the three queries we've done. Recreate
+# each Query, which includes at the very least the same FromCache,
+# same list of objects to be loaded, and the same parameters in the
+# same order, then call invalidate().
+print "invalidating everything"
+Session.query(Person).options(FromCache("default")).invalidate()
+Session.query(Person).\
+ options(FromCache("default")).\
+ filter(Person.name.between("person 02", "person 12")).invalidate()
+Session.query(Person).\
+ options(FromCache("default", "people_on_range")).\
+ filter(Person.name.between("person 05", "person 15")).invalidate()
+
diff --git a/examples/dogpile_caching/local_session_caching.py b/examples/dogpile_caching/local_session_caching.py
new file mode 100644
index 000000000..383b31c11
--- /dev/null
+++ b/examples/dogpile_caching/local_session_caching.py
@@ -0,0 +1,99 @@
+"""local_session_caching.py
+
+Create a new Dogpile cache backend that will store
+cached data local to the current Session.
+
+This is an advanced example which assumes familiarity
+with the basic operation of CachingQuery.
+
+"""
+
+from dogpile.cache.api import CacheBackend, NO_VALUE
+from dogpile.cache.region import register_backend
+
+class ScopedSessionBackend(CacheBackend):
+ """A dogpile backend which will cache objects locally on
+ the current session.
+
+ When used with the query_cache system, the effect is that the objects
+ in the cache are the same as that within the session - the merge()
+ is a formality that doesn't actually create a second instance.
+ This makes it safe to use for updates of data from an identity
+ perspective (still not ideal for deletes though).
+
+ When the session is removed, the cache is gone too, so the cache
+ is automatically disposed upon session.remove().
+
+ """
+
+ def __init__(self, arguments):
+ self.scoped_session = arguments['scoped_session']
+
+ def get(self, key):
+ return self._cache_dictionary.get(key, NO_VALUE)
+
+ def set(self, key, value):
+ self._cache_dictionary[key] = value
+
+ def delete(self, key):
+ self._cache_dictionary.pop(key, None)
+
+ @property
+ def _cache_dictionary(self):
+ """Return the cache dictionary linked to the current Session."""
+
+ sess = self.scoped_session()
+ try:
+ cache_dict = sess._cache_dictionary
+ except AttributeError:
+ sess._cache_dictionary = cache_dict = {}
+ return cache_dict
+
+register_backend("sqlalchemy.session", __name__, "ScopedSessionBackend")
+
+
+if __name__ == '__main__':
+ from environment import Session, regions
+ from caching_query import FromCache
+ from dogpile.cache import make_region
+
+ # set up a region based on the ScopedSessionBackend,
+ # pointing to the scoped_session declared in the example
+ # environment.
+ regions['local_session'] = make_region().configure(
+ 'sqlalchemy.session',
+ arguments={
+ "scoped_session": Session
+ }
+ )
+
+ from model import Person
+
+ # query to load Person by name, with criterion
+ # of "person 10"
+ q = Session.query(Person).\
+ options(FromCache("local_session")).\
+ filter(Person.name == "person 10")
+
+ # load from DB
+ person10 = q.one()
+
+ # next call, the query is cached.
+ person10 = q.one()
+
+ # clear out the Session. The "_cache_dictionary" dictionary
+ # disappears with it.
+ Session.remove()
+
+ # query calls from DB again
+ person10 = q.one()
+
+ # identity is preserved - person10 is the *same* object that's
+ # ultimately inside the cache. So it is safe to manipulate
+ # the not-queried-for attributes of objects when using such a
+ # cache without the need to invalidate - however, any change
+ # that would change the results of a cached query, such as
+ # inserts, deletes, or modification to attributes that are
+ # part of query criterion, still require careful invalidation.
+ cache, key = q._get_cache_plus_key()
+ assert person10 is cache.get(key)[0]
diff --git a/examples/dogpile_caching/model.py b/examples/dogpile_caching/model.py
new file mode 100644
index 000000000..6f1cffedf
--- /dev/null
+++ b/examples/dogpile_caching/model.py
@@ -0,0 +1,106 @@
+"""Model. We are modeling Person objects with a collection
+of Address objects. Each Address has a PostalCode, which
+in turn references a City and then a Country:
+
+Person --(1..n)--> Address
+Address --(has a)--> PostalCode
+PostalCode --(has a)--> City
+City --(has a)--> Country
+
+"""
+from sqlalchemy import Column, Integer, String, ForeignKey
+from sqlalchemy.orm import relationship
+from caching_query import FromCache, RelationshipCache
+from environment import Base, bootstrap
+
+class Country(Base):
+ __tablename__ = 'country'
+
+ id = Column(Integer, primary_key=True)
+ name = Column(String(100), nullable=False)
+
+ def __init__(self, name):
+ self.name = name
+
+class City(Base):
+ __tablename__ = 'city'
+
+ id = Column(Integer, primary_key=True)
+ name = Column(String(100), nullable=False)
+ country_id = Column(Integer, ForeignKey('country.id'), nullable=False)
+ country = relationship(Country)
+
+ def __init__(self, name, country):
+ self.name = name
+ self.country = country
+
+class PostalCode(Base):
+ __tablename__ = 'postal_code'
+
+ id = Column(Integer, primary_key=True)
+ code = Column(String(10), nullable=False)
+ city_id = Column(Integer, ForeignKey('city.id'), nullable=False)
+ city = relationship(City)
+
+ @property
+ def country(self):
+ return self.city.country
+
+ def __init__(self, code, city):
+ self.code = code
+ self.city = city
+
+class Address(Base):
+ __tablename__ = 'address'
+
+ id = Column(Integer, primary_key=True)
+ person_id = Column(Integer, ForeignKey('person.id'), nullable=False)
+ street = Column(String(200), nullable=False)
+ postal_code_id = Column(Integer, ForeignKey('postal_code.id'))
+ postal_code = relationship(PostalCode)
+
+ @property
+ def city(self):
+ return self.postal_code.city
+
+ @property
+ def country(self):
+ return self.postal_code.country
+
+ def __str__(self):
+ return "%s\t"\
+ "%s, %s\t"\
+ "%s" % (self.street, self.city.name,
+ self.postal_code.code, self.country.name)
+
+class Person(Base):
+ __tablename__ = 'person'
+
+ id = Column(Integer, primary_key=True)
+ name = Column(String(100), nullable=False)
+ addresses = relationship(Address, collection_class=set)
+
+ def __init__(self, name, *addresses):
+ self.name = name
+ self.addresses = set(addresses)
+
+ def __str__(self):
+ return self.name
+
+ def __repr__(self):
+ return "Person(name=%r)" % self.name
+
+ def format_full(self):
+ return "\t".join([str(x) for x in [self] + list(self.addresses)])
+
+# Caching options. A set of three RelationshipCache options
+# which can be applied to Query(), causing the "lazy load"
+# of these attributes to be loaded from cache.
+cache_address_bits = RelationshipCache(PostalCode.city, "default").\
+ and_(
+ RelationshipCache(City.country, "default")
+ ).and_(
+ RelationshipCache(Address.postal_code, "default")
+ )
+
+bootstrap() \ No newline at end of file
diff --git a/examples/dogpile_caching/relation_caching.py b/examples/dogpile_caching/relation_caching.py
new file mode 100644
index 000000000..7a5779620
--- /dev/null
+++ b/examples/dogpile_caching/relation_caching.py
@@ -0,0 +1,25 @@
+"""relationship_caching.py
+
+Load a set of Person and Address objects, specifying that
+related PostalCode, City, Country objects should be pulled from long
+term cache.
+
+"""
+from environment import Session, root
+from model import Person, cache_address_bits
+from sqlalchemy.orm import joinedload
+import os
+
+for p in Session.query(Person).options(joinedload(Person.addresses), cache_address_bits):
+ print p.format_full()
+
+
+print "\n\nIf this was the first run of relationship_caching.py, SQL was likely emitted to "\
+ "load postal codes, cities, countries.\n"\
+ "If run a second time, assuming the cache is still valid, "\
+ "only a single SQL statement will run - all "\
+ "related data is pulled from cache.\n"\
+ "To clear the cache, delete the file %r. \n"\
+ "This will cause a re-load of cities, postal codes and countries on "\
+ "the next run.\n"\
+ % os.path.join(root, 'cache.dbm')