summaryrefslogtreecommitdiff
diff options
context:
space:
mode:
authorMike Bayer <mike_mp@zzzcomputing.com>2010-01-19 00:53:12 +0000
committerMike Bayer <mike_mp@zzzcomputing.com>2010-01-19 00:53:12 +0000
commit40f8aadd582776524d3b98da1f577c2fc95619e7 (patch)
tree753eec3802734f397953976824a252bb60829189
parent56fe538cc7d81ce264fc6504feb1ead5e17d0f55 (diff)
downloadsqlalchemy-40f8aadd582776524d3b98da1f577c2fc95619e7.tar.gz
- mega example cleanup
- added READMEs to all examples in each __init__.py and added to sphinx documentation - added versioning example - removed vertical/vertical.py, the dictlikes are more straightforward
-rw-r--r--doc/build/conf.py3
-rw-r--r--doc/build/examples.rst121
-rw-r--r--doc/build/index.rst1
-rw-r--r--doc/build/mappers.rst2
-rw-r--r--examples/adjacency_list/__init__.py16
-rw-r--r--examples/adjacency_list/adjacency_list.py6
-rw-r--r--examples/association/__init__.py25
-rw-r--r--examples/beaker_caching/README62
-rw-r--r--examples/beaker_caching/__init__.py115
-rw-r--r--examples/beaker_caching/advanced.py2
-rw-r--r--examples/beaker_caching/environment.py44
-rw-r--r--examples/beaker_caching/helloworld.py2
-rw-r--r--examples/beaker_caching/local_session_caching.py2
-rw-r--r--examples/beaker_caching/relation_caching.py4
-rw-r--r--examples/custom_attributes/__init__.py8
-rw-r--r--examples/custom_attributes/custom_management.py1
-rw-r--r--examples/derived_attributes/__init__.py10
-rw-r--r--examples/derived_attributes/attributes.py3
-rw-r--r--examples/dynamic_dict/__init__.py5
-rw-r--r--examples/dynamic_dict/dynamic_dict.py9
-rw-r--r--examples/elementtree/__init__.py30
-rw-r--r--examples/elementtree/adjacency_list.py10
-rw-r--r--examples/elementtree/optimized_al.py10
-rw-r--r--examples/elementtree/pickle.py9
-rw-r--r--examples/graphs/__init__.py9
-rw-r--r--examples/graphs/directed_graph.py (renamed from examples/graphs/graph1.py)0
-rw-r--r--examples/inheritance/__init__.py4
-rw-r--r--examples/inheritance/concrete.py (renamed from examples/polymorph/concrete.py)0
-rw-r--r--examples/inheritance/polymorph.py (renamed from examples/polymorph/polymorph.py)5
-rw-r--r--examples/inheritance/single.py (renamed from examples/polymorph/single.py)6
-rw-r--r--examples/large_collection/__init__.py8
-rw-r--r--examples/large_collection/large_collection.py6
-rw-r--r--examples/nested_sets/__init__.py4
-rw-r--r--examples/poly_assoc/__init__.py10
-rw-r--r--examples/polymorph/__init__.py0
-rw-r--r--examples/postgis/__init__.py34
-rw-r--r--examples/postgis/postgis.py36
-rw-r--r--examples/sharding/__init__.py20
-rw-r--r--examples/sharding/attribute_shard.py18
-rw-r--r--examples/versioning/__init__.py67
-rw-r--r--examples/versioning/history_meta.py164
-rw-r--r--examples/versioning/test_versioning.py248
-rw-r--r--examples/vertical/__init__.py27
-rw-r--r--examples/vertical/dictlike-polymorphic.py2
-rw-r--r--examples/vertical/dictlike.py1
-rw-r--r--examples/vertical/vertical.py201
-rw-r--r--lib/sqlalchemy/orm/session.py2
-rw-r--r--lib/sqlalchemy/test/entities.py83
-rw-r--r--setup.cfg1
-rw-r--r--test/orm/_base.py83
50 files changed, 1029 insertions, 510 deletions
diff --git a/doc/build/conf.py b/doc/build/conf.py
index eaf51a628..8b7650e14 100644
--- a/doc/build/conf.py
+++ b/doc/build/conf.py
@@ -17,6 +17,7 @@ import sys, os
# is relative to the documentation root, use os.path.abspath to make it
# absolute, like shown here.
sys.path.insert(0, os.path.abspath('../../lib'))
+sys.path.insert(0, os.path.abspath('../../examples'))
sys.path.insert(0, os.path.abspath('.'))
import sqlalchemy
@@ -44,7 +45,7 @@ master_doc = 'index'
# General information about the project.
project = u'SQLAlchemy'
-copyright = u'2007, 2008, 2009, the SQLAlchemy authors and contributors'
+copyright = u'2007, 2008, 2009, 2010, the SQLAlchemy authors and contributors'
# The version info for the project you're documenting, acts as replacement for
# |version| and |release|, also used in various other places throughout the
diff --git a/doc/build/examples.rst b/doc/build/examples.rst
new file mode 100644
index 000000000..f91b8a69a
--- /dev/null
+++ b/doc/build/examples.rst
@@ -0,0 +1,121 @@
+.. _examples_toplevel:
+
+Examples
+========
+
+The SQLAlchemy distribution includes a variety of code examples illustrating a select set of patterns, some typical and some not so typical. All are runnable and can be found in the ``/examples`` directory of the distribution. Each example contains a README in its ``__init__.py`` file, each of which are listed below.
+
+Additional SQLAlchemy examples, some user contributed, are available on the wiki at `<http://www.sqlalchemy.org/trac/wiki/UsageRecipes>`_.
+
+Adjacency List
+--------------
+
+Location: /examples/adjacency_list/
+
+.. automodule:: adjacency_list
+
+Associations
+------------
+
+Location: /examples/association/
+
+.. automodule:: association
+
+Attribute Instrumentation
+-------------------------
+
+Location: /examples/custom_attributes/
+
+.. automodule:: custom_attributes
+
+Beaker Caching
+--------------
+
+Location: /examples/beaker_caching/
+
+.. automodule:: beaker_caching
+
+Derived Attributes
+------------------
+
+Location: /examples/derived_attributes/
+
+.. automodule:: derived_attributes
+
+
+Directed Graphs
+---------------
+
+Location: /examples/graphs/
+
+.. automodule:: graphs
+
+Dynamic Relations as Dictionaries
+----------------------------------
+
+Location: /examples/dynamic_dict/
+
+.. automodule:: dynamic_dict
+
+Horizontal Sharding
+-------------------
+
+Location: /examples/sharding
+
+.. automodule:: sharding
+
+Inheritance Mappings
+--------------------
+
+Location: /examples/inheritance/
+
+.. automodule:: inheritance
+
+Large Collections
+-----------------
+
+Location: /examples/large_collection/
+
+.. automodule:: large_collection
+
+Nested Sets
+-----------
+
+Location: /examples/nested_sets/
+
+.. automodule:: nested_sets
+
+Polymorphic Associations
+------------------------
+
+Location: /examples/poly_assoc/
+
+.. automodule:: poly_assoc
+
+PostGIS Integration
+-------------------
+
+Location: /examples/postgis
+
+.. automodule:: postgis
+
+Versioned Objects
+-----------------
+
+Location: /examples/versioning
+
+.. automodule:: versioning
+
+Vertical Attribute Mapping
+--------------------------
+
+Location: /examples/vertical
+
+.. automodule:: vertical
+
+XML Persistence
+---------------
+
+Location: /examples/elementtree/
+
+.. automodule:: elementtree
diff --git a/doc/build/index.rst b/doc/build/index.rst
index c3385d4e0..e2338c098 100644
--- a/doc/build/index.rst
+++ b/doc/build/index.rst
@@ -10,6 +10,7 @@ Table of Contents
session
dbengine
metadata
+ examples
reference/index
Indices and tables
diff --git a/doc/build/mappers.rst b/doc/build/mappers.rst
index 9017cf2d5..4b0ff6f9f 100644
--- a/doc/build/mappers.rst
+++ b/doc/build/mappers.rst
@@ -337,6 +337,8 @@ Ordering for rows loaded through ``Query`` is usually specified using the ``orde
Above, a ``Query`` issued for the ``User`` class will use the value of the mapper's ``order_by`` setting if the ``Query`` itself has no ordering specified.
+.. _datamapping_inheritance:
+
Mapping Class Inheritance Hierarchies
--------------------------------------
diff --git a/examples/adjacency_list/__init__.py b/examples/adjacency_list/__init__.py
index e69de29bb..44f27090b 100644
--- a/examples/adjacency_list/__init__.py
+++ b/examples/adjacency_list/__init__.py
@@ -0,0 +1,16 @@
+"""
+An example of a dictionary-of-dictionaries structure mapped using
+an adjacency list model.
+
+E.g.::
+
+ node = TreeNode('rootnode')
+ node.append('node1')
+ node.append('node3')
+ session.add(node)
+ session.commit()
+
+ dump_tree(node)
+
+"""
+
diff --git a/examples/adjacency_list/adjacency_list.py b/examples/adjacency_list/adjacency_list.py
index 3bf20dc5f..cc539c054 100644
--- a/examples/adjacency_list/adjacency_list.py
+++ b/examples/adjacency_list/adjacency_list.py
@@ -1,9 +1,3 @@
-"""
-An example of a dictionary-of-dictionaries structure mapped using
-an adjacency list model
-
-"""
-
from sqlalchemy import MetaData, Table, Column, Sequence, ForeignKey,\
Integer, String, create_engine
diff --git a/examples/association/__init__.py b/examples/association/__init__.py
index e69de29bb..46b596991 100644
--- a/examples/association/__init__.py
+++ b/examples/association/__init__.py
@@ -0,0 +1,25 @@
+"""
+
+Examples illustrating the usage of the "association object" pattern,
+where an intermediary object associates two endpoint objects together.
+
+The first example illustrates a basic association from a User object
+to a collection or Order objects, each which references a collection of Item objects.
+
+The second example builds upon the first to add the Association Proxy extension.
+
+E.g.::
+
+ # create an order
+ order = Order('john smith')
+
+ # append an OrderItem association via the "itemassociations"
+ # collection with a custom price.
+ order.itemassociations.append(OrderItem(item('MySQL Crowbar'), 10.99))
+
+ # append two more Items via the transparent "items" proxy, which
+ # will create OrderItems automatically using the default price.
+ order.items.append(item('SA Mug'))
+ order.items.append(item('SA Hat'))
+
+""" \ No newline at end of file
diff --git a/examples/beaker_caching/README b/examples/beaker_caching/README
deleted file mode 100644
index 702690036..000000000
--- a/examples/beaker_caching/README
+++ /dev/null
@@ -1,62 +0,0 @@
-Illustrates how to embed Beaker cache functionality within
-the Query object, allowing full cache control as well as the
-ability to pull "lazy loaded" attributes from long term cache
-as well.
-
-In this demo, the following techniques are illustrated:
-
- * Using custom subclasses of Query
- * Basic technique of circumventing Query to pull from a
- custom cache source instead of the database.
- * Rudimental caching with Beaker, using "regions" which allow
- global control over a fixed set of configurations.
- * Using custom MapperOption objects to configure options on
- a Query, including the ability to invoke the options
- deep within an object graph when lazy loads occur.
-
-To run, both SQLAlchemy and Beaker (1.4 or greater) must be
-installed or on the current PYTHONPATH. The demo will create a local
-directory for datafiles, insert initial data, and run. Running the
-demo a second time will utilize the cache files already present, and
-exactly one SQL statement against two tables will be emitted - the
-displayed result however will utilize dozens of lazyloads that all
-pull from cache.
-
-Three endpoint scripts, in order of complexity, are run as follows:
-
- python examples/beaker_caching/helloworld.py
-
- python examples/beaker_caching/relation_caching.py
-
- python examples/beaker_caching/advanced.py
-
- python examples/beaker_caching/local_session_caching.py
-
-
-Listing of files:
-
-__init__.py - Establish data / cache file paths, and configurations,
-bootstrap fixture data if necessary.
-
-meta.py - Represent persistence structures which allow the usage of
-Beaker caching with SQLAlchemy. Introduces a query option called
-FromCache.
-
-model.py - The datamodel, which represents Person that has multiple
-Address objects, each with PostalCode, City, Country
-
-fixture_data.py - creates demo PostalCode, Address, Person objects
-in the database.
-
-helloworld.py - the basic idea.
-
-relation_caching.py - Illustrates how to add cache options on
-relation endpoints, so that lazyloads load from cache.
-
-advanced.py - Further examples of how to use FromCache. Combines
-techniques from the first two scripts.
-
-local_session_caching.py - Grok everything so far ? This example
-creates a new Beaker container that will persist data in a dictionary
-which is local to the current session. remove() the session
-and the cache is gone. \ No newline at end of file
diff --git a/examples/beaker_caching/__init__.py b/examples/beaker_caching/__init__.py
index 40652136e..ba06c6ee8 100644
--- a/examples/beaker_caching/__init__.py
+++ b/examples/beaker_caching/__init__.py
@@ -1,44 +1,77 @@
-"""__init__.py
+"""
+Illustrates how to embed Beaker cache functionality within
+the Query object, allowing full cache control as well as the
+ability to pull "lazy loaded" attributes from long term cache
+as well.
-Establish data / cache file paths, and configurations,
-bootstrap fixture data if necessary.
+In this demo, the following techniques are illustrated:
-"""
-import meta, model, fixture_data
-from sqlalchemy import create_engine
-import os
-
-root = "./beaker_data/"
-
-if not os.path.exists(root):
- raw_input("Will create datafiles in %r.\n"
- "To reset the cache + database, delete this directory.\n"
- "Press enter to continue.\n" % root
- )
- os.makedirs(root)
+* Using custom subclasses of Query
+* Basic technique of circumventing Query to pull from a
+ custom cache source instead of the database.
+* Rudimental caching with Beaker, using "regions" which allow
+ global control over a fixed set of configurations.
+* Using custom MapperOption objects to configure options on
+ a Query, including the ability to invoke the options
+ deep within an object graph when lazy loads occur.
+
+E.g.::
+
+ # query for Person objects, specifying cache
+ q = Session.query(Person).options(FromCache("default", "all_people"))
+
+ # specify that each Person's "addresses" collection comes from
+ # cache too
+ q = q.options(FromCache("default", "by_person", Person.addresses))
+
+ # query
+ print q.all()
-dbfile = os.path.join(root, "beaker_demo.db")
-engine = create_engine('sqlite:///%s' % dbfile, echo=True)
-meta.Session.configure(bind=engine)
-
-# configure the "default" cache region.
-meta.cache_manager.regions['default'] ={
-
- # using type 'file' to illustrate
- # serialized persistence. In reality,
- # use memcached. Other backends
- # are much, much slower.
- 'type':'file',
- 'data_dir':root,
- 'expire':3600,
-
- # set start_time to current time
- # to re-cache everything
- # upon application startup
- #'start_time':time.time()
- }
-
-installed = False
-if not os.path.exists(dbfile):
- fixture_data.install()
- installed = True \ No newline at end of file
+To run, both SQLAlchemy and Beaker (1.4 or greater) must be
+installed or on the current PYTHONPATH. The demo will create a local
+directory for datafiles, insert initial data, and run. Running the
+demo a second time will utilize the cache files already present, and
+exactly one SQL statement against two tables will be emitted - the
+displayed result however will utilize dozens of lazyloads that all
+pull from cache.
+
+Three endpoint scripts, in order of complexity, are run as follows::
+
+ python examples/beaker_caching/helloworld.py
+
+ python examples/beaker_caching/relation_caching.py
+
+ python examples/beaker_caching/advanced.py
+
+ python examples/beaker_caching/local_session_caching.py
+
+
+Listing of files:
+
+ environment.py - Establish data / cache file paths, and configurations,
+ bootstrap fixture data if necessary.
+
+ meta.py - Represent persistence structures which allow the usage of
+ Beaker caching with SQLAlchemy. Introduces a query option called
+ FromCache.
+
+ model.py - The datamodel, which represents Person that has multiple
+ Address objects, each with PostalCode, City, Country
+
+ fixture_data.py - creates demo PostalCode, Address, Person objects
+ in the database.
+
+ helloworld.py - the basic idea.
+
+ relation_caching.py - Illustrates how to add cache options on
+ relation endpoints, so that lazyloads load from cache.
+
+ advanced.py - Further examples of how to use FromCache. Combines
+ techniques from the first two scripts.
+
+ local_session_caching.py - Grok everything so far ? This example
+ creates a new Beaker container that will persist data in a dictionary
+ which is local to the current session. remove() the session
+ and the cache is gone.
+
+"""
diff --git a/examples/beaker_caching/advanced.py b/examples/beaker_caching/advanced.py
index 6a8db082c..ebb17cfe5 100644
--- a/examples/beaker_caching/advanced.py
+++ b/examples/beaker_caching/advanced.py
@@ -6,7 +6,7 @@ and collection caching.
"""
-import __init__ # if running as a script
+import environment
from model import Person, Address, cache_address_bits
from meta import Session, FromCache
from sqlalchemy.orm import eagerload
diff --git a/examples/beaker_caching/environment.py b/examples/beaker_caching/environment.py
new file mode 100644
index 000000000..cdf1794fd
--- /dev/null
+++ b/examples/beaker_caching/environment.py
@@ -0,0 +1,44 @@
+"""environment.py
+
+Establish data / cache file paths, and configurations,
+bootstrap fixture data if necessary.
+
+"""
+import meta, model, fixture_data
+from sqlalchemy import create_engine
+import os
+
+root = "./beaker_data/"
+
+if not os.path.exists(root):
+ raw_input("Will create datafiles in %r.\n"
+ "To reset the cache + database, delete this directory.\n"
+ "Press enter to continue.\n" % root
+ )
+ os.makedirs(root)
+
+dbfile = os.path.join(root, "beaker_demo.db")
+engine = create_engine('sqlite:///%s' % dbfile, echo=True)
+meta.Session.configure(bind=engine)
+
+# configure the "default" cache region.
+meta.cache_manager.regions['default'] ={
+
+ # using type 'file' to illustrate
+ # serialized persistence. In reality,
+ # use memcached. Other backends
+ # are much, much slower.
+ 'type':'file',
+ 'data_dir':root,
+ 'expire':3600,
+
+ # set start_time to current time
+ # to re-cache everything
+ # upon application startup
+ #'start_time':time.time()
+ }
+
+installed = False
+if not os.path.exists(dbfile):
+ fixture_data.install()
+ installed = True \ No newline at end of file
diff --git a/examples/beaker_caching/helloworld.py b/examples/beaker_caching/helloworld.py
index 33454cf80..3d37777d7 100644
--- a/examples/beaker_caching/helloworld.py
+++ b/examples/beaker_caching/helloworld.py
@@ -4,7 +4,7 @@ Illustrate how to load some data, and cache the results.
"""
-import __init__ # if running as a script
+import environment
from model import Person
from meta import Session, FromCache
diff --git a/examples/beaker_caching/local_session_caching.py b/examples/beaker_caching/local_session_caching.py
index c422600d8..b5ce31483 100644
--- a/examples/beaker_caching/local_session_caching.py
+++ b/examples/beaker_caching/local_session_caching.py
@@ -53,7 +53,7 @@ class ScopedSessionNamespace(container.MemoryNamespaceManager):
if __name__ == '__main__':
- import __init__ # if running as a script
+ import environment
import meta
# create a Beaker container type called "ext:local_session".
diff --git a/examples/beaker_caching/relation_caching.py b/examples/beaker_caching/relation_caching.py
index b4508ba1e..2aeb87bc0 100644
--- a/examples/beaker_caching/relation_caching.py
+++ b/examples/beaker_caching/relation_caching.py
@@ -5,7 +5,7 @@ related PostalCode, City, Country objects should be pulled from long
term cache.
"""
-import __init__ # if running as a script
+import environment
from model import Person, Address, cache_address_bits
from meta import Session
from sqlalchemy.orm import eagerload
@@ -22,4 +22,4 @@ print "\n\nIf this was the first run of relation_caching.py, SQL was likely emit
"To clear the cache, delete the directory %r. \n"\
"This will cause a re-load of cities, postal codes and countries on "\
"the next run.\n"\
- % os.path.join(__init__.root, 'container_file')
+ % os.path.join(environment.root, 'container_file')
diff --git a/examples/custom_attributes/__init__.py b/examples/custom_attributes/__init__.py
index e69de29bb..6f7613e5c 100644
--- a/examples/custom_attributes/__init__.py
+++ b/examples/custom_attributes/__init__.py
@@ -0,0 +1,8 @@
+"""
+Two examples illustrating modifications to SQLAlchemy's attribute management system.
+
+``listen_for_events.py`` illustrates the usage of :class:`~sqlalchemy.orm.interfaces.AttributeExtension` to intercept attribute events. It additionally illustrates a way to automatically attach these listeners to all class attributes using a :class:`~sqlalchemy.orm.interfaces.InstrumentationManager`.
+
+``custom_management.py`` illustrates much deeper usage of :class:`~sqlalchemy.orm.interfaces.InstrumentationManager` as well as collection adaptation, to completely change the underlying method used to store state on an object. This example was developed to illustrate techniques which would be used by other third party object instrumentation systems to interact with SQLAlchemy's event system and is only intended for very intricate framework integrations.
+
+""" \ No newline at end of file
diff --git a/examples/custom_attributes/custom_management.py b/examples/custom_attributes/custom_management.py
index b8ea70dfd..3c80183e2 100644
--- a/examples/custom_attributes/custom_management.py
+++ b/examples/custom_attributes/custom_management.py
@@ -82,6 +82,7 @@ class MyClass(object):
class MyCollectionAdapter(object):
"""An wholly alternative instrumentation implementation."""
+
def __init__(self, key, state, collection):
self.key = key
self.state = state
diff --git a/examples/derived_attributes/__init__.py b/examples/derived_attributes/__init__.py
index e69de29bb..98c946fca 100644
--- a/examples/derived_attributes/__init__.py
+++ b/examples/derived_attributes/__init__.py
@@ -0,0 +1,10 @@
+"""Illustrates a clever technique using Python descriptors to create custom attributes representing SQL expressions when used at the class level, and Python expressions when used at the instance level. In some cases this technique replaces the need to configure the attribute in the mapping, instead relying upon ordinary Python behavior to create custom expression components.
+
+E.g.::
+
+ class BaseInterval(object):
+ @hybrid
+ def contains(self,point):
+ return (self.start <= point) & (point < self.end)
+
+"""
diff --git a/examples/derived_attributes/attributes.py b/examples/derived_attributes/attributes.py
index 4a1618985..15af7b2ad 100644
--- a/examples/derived_attributes/attributes.py
+++ b/examples/derived_attributes/attributes.py
@@ -1,6 +1,3 @@
-"""A couple of helper descriptors to allow to use the same code as query
-criterion creators and as instance code. As this doesn't do advanced
-magic recompiling, you can only use basic expression-like code."""
import new
diff --git a/examples/dynamic_dict/__init__.py b/examples/dynamic_dict/__init__.py
index e69de29bb..69ac40952 100644
--- a/examples/dynamic_dict/__init__.py
+++ b/examples/dynamic_dict/__init__.py
@@ -0,0 +1,5 @@
+"""Illustrates how to place a dictionary-like facade on top of a "dynamic" relation, so
+that dictionary operations (assuming simple string keys) can operate upon a large
+collection without loading the full collection at once.
+
+""" \ No newline at end of file
diff --git a/examples/dynamic_dict/dynamic_dict.py b/examples/dynamic_dict/dynamic_dict.py
index f58c19c00..d94c2426a 100644
--- a/examples/dynamic_dict/dynamic_dict.py
+++ b/examples/dynamic_dict/dynamic_dict.py
@@ -1,12 +1,3 @@
-"""Illustrates how to place a dictionary-like facade on top of a dynamic_loader, so
-that dictionary operations (assuming simple string keys) can operate upon a large
-collection without loading the full collection at once.
-
-Similar approaches could be taken towards sets and dictionaries with non-string keys
-although the hash policy of the members would need to be distilled into a filter() criterion.
-
-"""
-
class ProxyDict(object):
def __init__(self, parent, collection_name, childclass, keyname):
self.parent = parent
diff --git a/examples/elementtree/__init__.py b/examples/elementtree/__init__.py
index e69de29bb..70554f5c9 100644
--- a/examples/elementtree/__init__.py
+++ b/examples/elementtree/__init__.py
@@ -0,0 +1,30 @@
+"""
+Illustrates three strategies for persisting and querying XML documents as represented by
+ElementTree in a relational database. The techniques do not apply any mappings to the ElementTree objects directly, so are compatible with the native cElementTree as well as lxml, and can be adapted to suit any kind of DOM representation system. Querying along xpath-like strings is illustrated as well.
+
+In order of complexity:
+
+* ``pickle.py`` - Quick and dirty, serialize the whole DOM into a BLOB column. While the example
+ is very brief, it has very limited functionality.
+* ``adjacency_list.py`` - Each DOM node is stored in an individual table row, with attributes
+ represented in a separate table. The nodes are associated in a hierarchy using an adjacency list
+ structure. A query function is introduced which can search for nodes along any path with a given
+ structure of attributes, basically a (very narrow) subset of xpath.
+* ``optimized_al.py`` - Uses the same strategy as ``adjacency_list.py``, but adds a
+ :class:`~sqlalchemy.orm.interfaces.MapperExtension` which optimizes how the hierarchical structure
+ is loaded, such that the full set of DOM nodes are loaded within a single table result set, and
+ are organized hierarchically as they are received during a load.
+
+E.g.::
+
+ # parse an XML file and persist in the database
+ doc = ElementTree.parse("test.xml")
+ session.add(Document(file, doc))
+ session.commit()
+
+ # locate documents with a certain path/attribute structure
+ for document in find_document('/somefile/header/field2[@attr=foo]'):
+ # dump the XML
+ print document
+
+""" \ No newline at end of file
diff --git a/examples/elementtree/adjacency_list.py b/examples/elementtree/adjacency_list.py
index 58156dcb6..d2151d6ef 100644
--- a/examples/elementtree/adjacency_list.py
+++ b/examples/elementtree/adjacency_list.py
@@ -13,16 +13,6 @@ from sqlalchemy.orm import mapper, relation, create_session, lazyload
import sys, os, StringIO, re
-import logging
-logging.basicConfig()
-
-# uncomment to show SQL statements
-#logging.getLogger('sqlalchemy.engine').setLevel(logging.INFO)
-
-# uncomment to show SQL statements and result sets
-#logging.getLogger('sqlalchemy.engine').setLevel(logging.DEBUG)
-
-
from xml.etree import ElementTree
meta = MetaData()
diff --git a/examples/elementtree/optimized_al.py b/examples/elementtree/optimized_al.py
index c03acee1d..dcc3c00ba 100644
--- a/examples/elementtree/optimized_al.py
+++ b/examples/elementtree/optimized_al.py
@@ -12,16 +12,6 @@ from sqlalchemy.orm import mapper, relation, create_session, lazyload
import sys, os, StringIO, re
-import logging
-logging.basicConfig()
-
-# uncomment to show SQL statements
-#logging.getLogger('sqlalchemy.engine').setLevel(logging.INFO)
-
-# uncomment to show SQL statements and result sets
-#logging.getLogger('sqlalchemy.engine').setLevel(logging.DEBUG)
-
-
from xml.etree import ElementTree
meta = MetaData()
diff --git a/examples/elementtree/pickle.py b/examples/elementtree/pickle.py
index 2176512cf..4eaaa2f8d 100644
--- a/examples/elementtree/pickle.py
+++ b/examples/elementtree/pickle.py
@@ -12,15 +12,6 @@ from sqlalchemy.orm import mapper, create_session
import sys, os
-import logging
-logging.basicConfig()
-
-# uncomment to show SQL statements
-#logging.getLogger('sqlalchemy.engine').setLevel(logging.INFO)
-
-# uncomment to show SQL statements and result sets
-#logging.getLogger('sqlalchemy.engine').setLevel(logging.DEBUG)
-
from xml.etree import ElementTree
engine = create_engine('sqlite://')
diff --git a/examples/graphs/__init__.py b/examples/graphs/__init__.py
index e69de29bb..28a064bad 100644
--- a/examples/graphs/__init__.py
+++ b/examples/graphs/__init__.py
@@ -0,0 +1,9 @@
+"""
+An example of persistence for a directed graph structure. The graph is stored as a collection of edges, each referencing both a "lower" and an "upper" node in a table of nodes. Basic persistence and querying for lower- and upper- neighbors are illustrated::
+
+ n2 = Node(2)
+ n5 = Node(5)
+ n2.add_neighbor(n5)
+ print n2.higher_neighbors()
+
+""" \ No newline at end of file
diff --git a/examples/graphs/graph1.py b/examples/graphs/directed_graph.py
index 6122e65f1..6122e65f1 100644
--- a/examples/graphs/graph1.py
+++ b/examples/graphs/directed_graph.py
diff --git a/examples/inheritance/__init__.py b/examples/inheritance/__init__.py
new file mode 100644
index 000000000..a3b85460c
--- /dev/null
+++ b/examples/inheritance/__init__.py
@@ -0,0 +1,4 @@
+"""
+Working examples of single-table, joined-table, and concrete-table inheritance as described in :ref:`datamapping_inheritance`.
+
+""" \ No newline at end of file
diff --git a/examples/polymorph/concrete.py b/examples/inheritance/concrete.py
index c50513b55..c50513b55 100644
--- a/examples/polymorph/concrete.py
+++ b/examples/inheritance/concrete.py
diff --git a/examples/polymorph/polymorph.py b/examples/inheritance/polymorph.py
index 60ef98f57..4cf13985b 100644
--- a/examples/polymorph/polymorph.py
+++ b/examples/inheritance/polymorph.py
@@ -56,8 +56,6 @@ class Company(object):
return "Company %s" % self.name
-person_join = people.outerjoin(engineers).outerjoin(managers)
-
person_mapper = mapper(Person, people, polymorphic_on=people.c.type, polymorphic_identity='person')
mapper(Engineer, engineers, inherits=person_mapper, polymorphic_identity='engineer')
mapper(Manager, managers, inherits=person_mapper, polymorphic_identity='manager')
@@ -75,7 +73,6 @@ c.employees.append(Engineer(name='wally', status='CGG', engineer_name='engineer2
c.employees.append(Manager(name='jsmith', status='ABA', manager_name='manager2'))
session.add(c)
-print session.new
session.flush()
session.expunge_all()
@@ -96,7 +93,7 @@ session.expunge_all()
c = session.query(Company).get(1)
for e in c.employees:
- print e, e._sa_instance_state.key
+ print e
session.delete(c)
session.flush()
diff --git a/examples/polymorph/single.py b/examples/inheritance/single.py
index 5a4f5c841..00feaefe8 100644
--- a/examples/polymorph/single.py
+++ b/examples/inheritance/single.py
@@ -45,8 +45,6 @@ person_mapper = mapper(Person, employees_table, polymorphic_on=employees_table.c
manager_mapper = mapper(Manager, inherits=person_mapper, polymorphic_identity='manager')
engineer_mapper = mapper(Engineer, inherits=person_mapper, polymorphic_identity='engineer')
-
-
mapper(Company, companies, properties={
'employees': relation(Person, lazy=True, backref='company')
})
@@ -65,7 +63,7 @@ session.expunge_all()
c = session.query(Company).get(1)
for e in c.employees:
- print e, e._sa_instance_state.key, e.company
+ print e, e.company
print "\n"
@@ -80,7 +78,7 @@ session.expunge_all()
c = session.query(Company).get(1)
for e in c.employees:
- print e, e._sa_instance_state.key
+ print e
session.delete(c)
session.flush()
diff --git a/examples/large_collection/__init__.py b/examples/large_collection/__init__.py
index e69de29bb..a6c6179b8 100644
--- a/examples/large_collection/__init__.py
+++ b/examples/large_collection/__init__.py
@@ -0,0 +1,8 @@
+"""Large collection example.
+
+Illustrates the options to use with :func:`~sqlalchemy.orm.relation()` when the list of related objects is very large, including:
+
+* "dynamic" relations which query slices of data as accessed
+* how to use ON DELETE CASCADE in conjunction with ``passive_deletes=True`` to greatly improve the performance of related collection deletion.
+
+"""
diff --git a/examples/large_collection/large_collection.py b/examples/large_collection/large_collection.py
index 4d98eed2b..c6adf1310 100644
--- a/examples/large_collection/large_collection.py
+++ b/examples/large_collection/large_collection.py
@@ -1,9 +1,3 @@
-"""Large collection example.
-
-Illustrates the options to use on relation() when the list of related objects
-is very large.
-
-"""
from sqlalchemy import (MetaData, Table, Column, Integer, String, ForeignKey,
create_engine)
diff --git a/examples/nested_sets/__init__.py b/examples/nested_sets/__init__.py
index e69de29bb..b730f7173 100644
--- a/examples/nested_sets/__init__.py
+++ b/examples/nested_sets/__init__.py
@@ -0,0 +1,4 @@
+"""
+Illustrates a rudimentary way to implement the "nested sets" pattern for hierarchical data using the SQLAlchemy ORM.
+
+""" \ No newline at end of file
diff --git a/examples/poly_assoc/__init__.py b/examples/poly_assoc/__init__.py
index e69de29bb..19836a742 100644
--- a/examples/poly_assoc/__init__.py
+++ b/examples/poly_assoc/__init__.py
@@ -0,0 +1,10 @@
+"""
+Illustrates polymorphic associations, a method of associating a particular child object with many different types of parent object.
+
+This example is based off the original blog post at `<http://techspot.zzzeek.org/?p=13>`_ and illustrates three techniques:
+
+* ``poly_assoc.py`` - imitates the non-foreign-key schema used by Ruby on Rails' Active Record.
+* ``poly_assoc_fk.py`` - Adds a polymorphic association table so that referential integrity can be maintained.
+* ``poly_assoc_generic.py`` - further automates the approach of ``poly_assoc_fk.py`` to also generate the association table definitions automatically.
+
+""" \ No newline at end of file
diff --git a/examples/polymorph/__init__.py b/examples/polymorph/__init__.py
deleted file mode 100644
index e69de29bb..000000000
--- a/examples/polymorph/__init__.py
+++ /dev/null
diff --git a/examples/postgis/__init__.py b/examples/postgis/__init__.py
index e69de29bb..1a351f7ae 100644
--- a/examples/postgis/__init__.py
+++ b/examples/postgis/__init__.py
@@ -0,0 +1,34 @@
+"""A naive example illustrating techniques to help
+embed PostGIS functionality.
+
+This example was originally developed in the hopes that it would be extrapolated into a comprehensive PostGIS integration layer. We are pleased to announce that this has come to fruition as `GeoAlchemy <http://www.geoalchemy.org/>`_.
+
+The example illustrates:
+
+* a DDL extension which allows CREATE/DROP to work in
+ conjunction with AddGeometryColumn/DropGeometryColumn
+
+* a Geometry type, as well as a few subtypes, which
+ convert result row values to a GIS-aware object,
+ and also integrates with the DDL extension.
+
+* a GIS-aware object which stores a raw geometry value
+ and provides a factory for functions such as AsText().
+
+* an ORM comparator which can override standard column
+ methods on mapped objects to produce GIS operators.
+
+* an attribute event listener that intercepts strings
+ and converts to GeomFromText().
+
+* a standalone operator example.
+
+The implementation is limited to only public, well known
+and simple to use extension points.
+
+E.g.::
+
+ print session.query(Road).filter(Road.road_geom.intersects(r1.road_geom)).all()
+
+"""
+
diff --git a/examples/postgis/postgis.py b/examples/postgis/postgis.py
index d84648a95..d3f728293 100644
--- a/examples/postgis/postgis.py
+++ b/examples/postgis/postgis.py
@@ -1,39 +1,3 @@
-"""A naive example illustrating techniques to help
-embed PostGIS functionality.
-
-The techniques here could be used by a capable developer
-as the basis for a comprehensive PostGIS SQLAlchemy extension.
-Please note this is an entirely incomplete proof of concept
-only, and PostGIS support is *not* a supported feature
-of SQLAlchemy.
-
-Includes:
-
- * a DDL extension which allows CREATE/DROP to work in
- conjunction with AddGeometryColumn/DropGeometryColumn
-
- * a Geometry type, as well as a few subtypes, which
- convert result row values to a GIS-aware object,
- and also integrates with the DDL extension.
-
- * a GIS-aware object which stores a raw geometry value
- and provides a factory for functions such as AsText().
-
- * an ORM comparator which can override standard column
- methods on mapped objects to produce GIS operators.
-
- * an attribute event listener that intercepts strings
- and converts to GeomFromText().
-
- * a standalone operator example.
-
-The implementation is limited to only public, well known
-and simple to use extension points. Future SQLAlchemy
-expansion points may allow more seamless integration of
-some features.
-
-"""
-
from sqlalchemy.orm.interfaces import AttributeExtension
from sqlalchemy.orm.properties import ColumnProperty
from sqlalchemy.types import TypeEngine
diff --git a/examples/sharding/__init__.py b/examples/sharding/__init__.py
index e69de29bb..19f6db245 100644
--- a/examples/sharding/__init__.py
+++ b/examples/sharding/__init__.py
@@ -0,0 +1,20 @@
+"""a basic example of using the SQLAlchemy Sharding API.
+Sharding refers to horizontally scaling data across multiple
+databases.
+
+The basic components of a "sharded" mapping are:
+
+* multiple databases, each assigned a 'shard id'
+* a function which can return a single shard id, given an instance
+ to be saved; this is called "shard_chooser"
+* a function which can return a list of shard ids which apply to a particular
+ instance identifier; this is called "id_chooser". If it returns all shard ids,
+ all shards will be searched.
+* a function which can return a list of shard ids to try, given a particular
+ Query ("query_chooser"). If it returns all shard ids, all shards will be
+ queried and the results joined together.
+
+In this example, four sqlite databases will store information about
+weather data on a database-per-continent basis. We provide example shard_chooser, id_chooser and query_chooser functions. The query_chooser illustrates inspection of the SQL expression element in order to attempt to determine a single shard being requested.
+
+"""
diff --git a/examples/sharding/attribute_shard.py b/examples/sharding/attribute_shard.py
index 2ac0c88ff..89d4243fc 100644
--- a/examples/sharding/attribute_shard.py
+++ b/examples/sharding/attribute_shard.py
@@ -1,21 +1,3 @@
-"""a basic example of using the SQLAlchemy Sharding API.
-Sharding refers to horizontally scaling data across multiple
-databases.
-
-In this example, four sqlite databases will store information about
-weather data on a database-per-continent basis.
-
-To set up a sharding system, you need:
- 1. multiple databases, each assined a 'shard id'
- 2. a function which can return a single shard id, given an instance
- to be saved; this is called "shard_chooser"
- 3. a function which can return a list of shard ids which apply to a particular
- instance identifier; this is called "id_chooser". If it returns all shard ids,
- all shards will be searched.
- 4. a function which can return a list of shard ids to try, given a particular
- Query ("query_chooser"). If it returns all shard ids, all shards will be
- queried and the results joined together.
-"""
# step 1. imports
from sqlalchemy import (create_engine, MetaData, Table, Column, Integer,
diff --git a/examples/versioning/__init__.py b/examples/versioning/__init__.py
new file mode 100644
index 000000000..f0e1cf543
--- /dev/null
+++ b/examples/versioning/__init__.py
@@ -0,0 +1,67 @@
+"""
+Illustrates an extension which creates version tables for entities and stores records for each change. The same idea as Elixir's versioned extension, but more efficient (uses attribute API to get history) and handles class inheritance. The given extensions generate an anonymous "history" class which represents historical versions of the target object.
+
+Usage is illustrated via a unit test module ``test_versioning.py``, which can be run via nose::
+
+ nosetests -w examples/versioning/
+
+A fragment of example usage, using declarative::
+
+ from history_meta import VersionedMeta, VersionedListener
+
+ Base = declarative_base(metaclass=VersionedMeta, bind=engine)
+ Session = sessionmaker(extension=VersionedListener())
+
+ class SomeClass(Base):
+ __tablename__ = 'sometable'
+
+ id = Column(Integer, primary_key=True)
+ name = Column(String(50))
+
+ def __eq__(self, other):
+ assert type(other) is SomeClass and other.id == self.id
+
+ sess = Session()
+ sc = SomeClass(name='sc1')
+ sess.add(sc)
+ sess.commit()
+
+ sc.name = 'sc1modified'
+ sess.commit()
+
+ assert sc.version == 2
+
+ SomeClassHistory = SomeClass.__history_mapper__.class_
+
+ assert sess.query(SomeClassHistory).\\
+ filter(SomeClassHistory.version == 1).\\
+ all() \\
+ == [SomeClassHistory(version=1, name='sc1')]
+
+To apply ``VersionedMeta`` to a subset of classes (probably more typical), the metaclass can be applied on a per-class basis::
+
+ from history_meta import VersionedMeta, VersionedListener
+
+ Base = declarative_base(bind=engine)
+
+ class SomeClass(Base):
+ __tablename__ = 'sometable'
+
+ # ...
+
+ class SomeVersionedClass(Base):
+ __metaclass__ = VersionedMeta
+ __tablename__ = 'someothertable'
+
+ # ...
+
+The ``VersionedMeta`` is a declarative metaclass - to use the extension with plain mappers, the ``_history_mapper`` function can be applied::
+
+ from history_meta import _history_mapper
+
+ m = mapper(SomeClass, sometable)
+ _history_mapper(m)
+
+ SomeHistoryClass = SomeClass.__history_mapper__.class_
+
+""" \ No newline at end of file
diff --git a/examples/versioning/history_meta.py b/examples/versioning/history_meta.py
new file mode 100644
index 000000000..8a916791c
--- /dev/null
+++ b/examples/versioning/history_meta.py
@@ -0,0 +1,164 @@
+from sqlalchemy.ext.declarative import DeclarativeMeta
+from sqlalchemy.orm import mapper, class_mapper, attributes, object_mapper
+from sqlalchemy.orm.exc import UnmappedClassError, UnmappedColumnError
+from sqlalchemy import Table, Column, ForeignKeyConstraint, Integer
+from sqlalchemy.orm.interfaces import SessionExtension
+
+def col_references_table(col, table):
+ for fk in col.foreign_keys:
+ if fk.references(table):
+ return True
+ return False
+
+def _history_mapper(local_mapper):
+ cls = local_mapper.class_
+
+ # SLIGHT SQLA HACK #1 - set the "active_history" flag
+ # on on column-mapped attributes so that the old version
+ # of the info is always loaded (currently sets it on all attributes)
+ for prop in local_mapper.iterate_properties:
+ getattr(local_mapper.class_, prop.key).impl.active_history = True
+
+ super_mapper = local_mapper.inherits
+ super_history_mapper = getattr(cls, '__history_mapper__', None)
+
+ polymorphic_on = None
+ super_fks = []
+ if not super_mapper or local_mapper.local_table is not super_mapper.local_table:
+ cols = []
+ for column in local_mapper.local_table.c:
+ if column.name == 'version':
+ continue
+
+ col = column.copy()
+
+ if super_mapper and col_references_table(column, super_mapper.local_table):
+ super_fks.append((col.key, list(super_history_mapper.base_mapper.local_table.primary_key)[0]))
+
+ cols.append(col)
+
+ if column is local_mapper.polymorphic_on:
+ polymorphic_on = col
+
+ if super_mapper:
+ super_fks.append(('version', super_history_mapper.base_mapper.local_table.c.version))
+ cols.append(Column('version', Integer, primary_key=True))
+ else:
+ cols.append(Column('version', Integer, primary_key=True))
+
+ if super_fks:
+ cols.append(ForeignKeyConstraint(*zip(*super_fks)))
+
+ table = Table(local_mapper.local_table.name + '_history', local_mapper.local_table.metadata,
+ *cols
+ )
+ else:
+ # single table inheritance. take any additional columns that may have
+ # been added and add them to the history table.
+ for column in local_mapper.local_table.c:
+ if column.key not in super_history_mapper.local_table.c:
+ col = column.copy()
+ super_history_mapper.local_table.append_column(col)
+ table = None
+
+ if super_history_mapper:
+ bases = (super_history_mapper.class_,)
+ else:
+ bases = local_mapper.base_mapper.class_.__bases__
+ versioned_cls = type.__new__(type, "%sHistory" % cls.__name__, bases, {})
+
+ m = mapper(
+ versioned_cls,
+ table,
+ inherits=super_history_mapper,
+ polymorphic_on=polymorphic_on,
+ polymorphic_identity=local_mapper.polymorphic_identity
+ )
+ cls.__history_mapper__ = m
+
+ if not super_history_mapper:
+ cls.version = Column('version', Integer, default=1, nullable=False)
+
+
+class VersionedMeta(DeclarativeMeta):
+ def __init__(cls, classname, bases, dict_):
+ DeclarativeMeta.__init__(cls, classname, bases, dict_)
+
+ try:
+ mapper = class_mapper(cls)
+ _history_mapper(mapper)
+ except UnmappedClassError:
+ pass
+
+
+def versioned_objects(iter):
+ for obj in iter:
+ if hasattr(obj, '__history_mapper__'):
+ yield obj
+
+def create_version(obj, session, deleted = False):
+ obj_mapper = object_mapper(obj)
+ history_mapper = obj.__history_mapper__
+ history_cls = history_mapper.class_
+
+ obj_state = attributes.instance_state(obj)
+
+ attr = {}
+
+ obj_changed = False
+
+ for om, hm in zip(obj_mapper.iterate_to_root(), history_mapper.iterate_to_root()):
+ if hm.single:
+ continue
+
+ for hist_col in hm.local_table.c:
+ if hist_col.key == 'version':
+ continue
+
+ obj_col = om.local_table.c[hist_col.key]
+
+ # SLIGHT SQLA HACK #3 - get the value of the
+ # attribute based on the MapperProperty related to the
+ # mapped column. this will allow usage of MapperProperties
+ # that have a different keyname than that of the mapped column.
+ try:
+ prop = obj_mapper._get_col_to_prop(obj_col)
+ except UnmappedColumnError:
+ # in the case of single table inheritance, there may be
+ # columns on the mapped table intended for the subclass only.
+ # the "unmapped" status of the subclass column on the
+ # base class is a feature of the declarative module as of sqla 0.5.2.
+ continue
+
+ # expired object attributes and also deferred cols might not be in the
+ # dict. force it to load no matter what by using getattr().
+ if prop.key not in obj_state.dict:
+ getattr(obj, prop.key)
+
+ a, u, d = attributes.get_history(obj, prop.key)
+
+ if d:
+ attr[hist_col.key] = d[0]
+ obj_changed = True
+ elif u:
+ attr[hist_col.key] = u[0]
+ else:
+ raise Exception("TODO: what makes us arrive here ?")
+
+ if not obj_changed and not deleted:
+ return
+
+ attr['version'] = obj.version
+ hist = history_cls()
+ for key, value in attr.iteritems():
+ setattr(hist, key, value)
+ session.add(hist)
+ obj.version += 1
+
+class VersionedListener(SessionExtension):
+ def before_flush(self, session, flush_context, instances):
+ for obj in versioned_objects(session.dirty):
+ create_version(obj, session)
+ for obj in versioned_objects(session.deleted):
+ create_version(obj, session, deleted = True)
+
diff --git a/examples/versioning/test_versioning.py b/examples/versioning/test_versioning.py
new file mode 100644
index 000000000..1a72a0659
--- /dev/null
+++ b/examples/versioning/test_versioning.py
@@ -0,0 +1,248 @@
+from sqlalchemy.ext.declarative import declarative_base
+from history_meta import VersionedMeta, VersionedListener
+from sqlalchemy import create_engine, Column, Integer, String, ForeignKey
+from sqlalchemy.orm import clear_mappers, compile_mappers, sessionmaker, deferred
+from sqlalchemy.test.testing import TestBase, eq_
+from sqlalchemy.test.entities import ComparableEntity
+
+def setup():
+ global engine
+ engine = create_engine('sqlite://', echo=True)
+
+class TestVersioning(TestBase):
+ def setup(self):
+ global Base, Session
+ Base = declarative_base(metaclass=VersionedMeta, bind=engine)
+ Session = sessionmaker(extension=VersionedListener())
+
+ def teardown(self):
+ clear_mappers()
+ Base.metadata.drop_all()
+
+ def create_tables(self):
+ Base.metadata.create_all()
+
+ def test_plain(self):
+ class SomeClass(Base, ComparableEntity):
+ __tablename__ = 'sometable'
+
+ id = Column(Integer, primary_key=True)
+ name = Column(String(50))
+
+ self.create_tables()
+ sess = Session()
+ sc = SomeClass(name='sc1')
+ sess.add(sc)
+ sess.commit()
+
+ sc.name = 'sc1modified'
+ sess.commit()
+
+ assert sc.version == 2
+
+ SomeClassHistory = SomeClass.__history_mapper__.class_
+
+ eq_(
+ sess.query(SomeClassHistory).filter(SomeClassHistory.version == 1).all(),
+ [SomeClassHistory(version=1, name='sc1')]
+ )
+
+ sc.name = 'sc1modified2'
+
+ eq_(
+ sess.query(SomeClassHistory).order_by(SomeClassHistory.version).all(),
+ [
+ SomeClassHistory(version=1, name='sc1'),
+ SomeClassHistory(version=2, name='sc1modified')
+ ]
+ )
+
+ assert sc.version == 3
+
+ sess.commit()
+
+ sc.name = 'temp'
+ sc.name = 'sc1modified2'
+
+ sess.commit()
+
+ eq_(
+ sess.query(SomeClassHistory).order_by(SomeClassHistory.version).all(),
+ [
+ SomeClassHistory(version=1, name='sc1'),
+ SomeClassHistory(version=2, name='sc1modified')
+ ]
+ )
+
+ sess.delete(sc)
+ sess.commit()
+
+ eq_(
+ sess.query(SomeClassHistory).order_by(SomeClassHistory.version).all(),
+ [
+ SomeClassHistory(version=1, name='sc1'),
+ SomeClassHistory(version=2, name='sc1modified'),
+ SomeClassHistory(version=3, name='sc1modified2')
+ ]
+ )
+
+
+
+
+ def test_deferred(self):
+ """test versioning of unloaded, deferred columns."""
+
+ class SomeClass(Base, ComparableEntity):
+ __tablename__ = 'sometable'
+
+ id = Column(Integer, primary_key=True)
+ name = Column(String(50))
+ data = deferred(Column(String(25)))
+
+ self.create_tables()
+ sess = Session()
+ sc = SomeClass(name='sc1', data='somedata')
+ sess.add(sc)
+ sess.commit()
+ sess.close()
+
+ sc = sess.query(SomeClass).first()
+ assert 'data' not in sc.__dict__
+
+ sc.name = 'sc1modified'
+ sess.commit()
+
+ assert sc.version == 2
+
+ SomeClassHistory = SomeClass.__history_mapper__.class_
+
+ eq_(
+ sess.query(SomeClassHistory).filter(SomeClassHistory.version == 1).all(),
+ [SomeClassHistory(version=1, name='sc1', data='somedata')]
+ )
+
+
+ def test_joined_inheritance(self):
+ class BaseClass(Base, ComparableEntity):
+ __tablename__ = 'basetable'
+
+ id = Column(Integer, primary_key=True)
+ name = Column(String(50))
+ type = Column(String(20))
+
+ __mapper_args__ = {'polymorphic_on':type, 'polymorphic_identity':'base'}
+
+ class SubClassSeparatePk(BaseClass):
+ __tablename__ = 'subtable1'
+
+ id = Column(Integer, primary_key=True)
+ base_id = Column(Integer, ForeignKey('basetable.id'))
+ subdata1 = Column(String(50))
+
+ __mapper_args__ = {'polymorphic_identity':'sep'}
+
+ class SubClassSamePk(BaseClass):
+ __tablename__ = 'subtable2'
+
+ id = Column(Integer, ForeignKey('basetable.id'), primary_key=True)
+ subdata2 = Column(String(50))
+
+ __mapper_args__ = {'polymorphic_identity':'same'}
+
+ self.create_tables()
+ sess = Session()
+
+ sep1 = SubClassSeparatePk(name='sep1', subdata1='sep1subdata')
+ base1 = BaseClass(name='base1')
+ same1 = SubClassSamePk(name='same1', subdata2='same1subdata')
+ sess.add_all([sep1, base1, same1])
+ sess.commit()
+
+ base1.name = 'base1mod'
+ same1.subdata2 = 'same1subdatamod'
+ sep1.name ='sep1mod'
+ sess.commit()
+
+ BaseClassHistory = BaseClass.__history_mapper__.class_
+ SubClassSeparatePkHistory = SubClassSeparatePk.__history_mapper__.class_
+ SubClassSamePkHistory = SubClassSamePk.__history_mapper__.class_
+ eq_(
+ sess.query(BaseClassHistory).order_by(BaseClassHistory.id).all(),
+ [
+ SubClassSeparatePkHistory(id=1, name=u'sep1', type=u'sep', version=1),
+ BaseClassHistory(id=2, name=u'base1', type=u'base', version=1),
+ SubClassSamePkHistory(id=3, name=u'same1', type=u'same', version=1)
+ ]
+ )
+
+ same1.subdata2 = 'same1subdatamod2'
+
+ eq_(
+ sess.query(BaseClassHistory).order_by(BaseClassHistory.id, BaseClassHistory.version).all(),
+ [
+ SubClassSeparatePkHistory(id=1, name=u'sep1', type=u'sep', version=1),
+ BaseClassHistory(id=2, name=u'base1', type=u'base', version=1),
+ SubClassSamePkHistory(id=3, name=u'same1', type=u'same', version=1),
+ SubClassSamePkHistory(id=3, name=u'same1', type=u'same', version=2)
+ ]
+ )
+
+ base1.name = 'base1mod2'
+ eq_(
+ sess.query(BaseClassHistory).order_by(BaseClassHistory.id, BaseClassHistory.version).all(),
+ [
+ SubClassSeparatePkHistory(id=1, name=u'sep1', type=u'sep', version=1),
+ BaseClassHistory(id=2, name=u'base1', type=u'base', version=1),
+ BaseClassHistory(id=2, name=u'base1mod', type=u'base', version=2),
+ SubClassSamePkHistory(id=3, name=u'same1', type=u'same', version=1),
+ SubClassSamePkHistory(id=3, name=u'same1', type=u'same', version=2)
+ ]
+ )
+
+ def test_single_inheritance(self):
+ class BaseClass(Base, ComparableEntity):
+ __tablename__ = 'basetable'
+
+ id = Column(Integer, primary_key=True)
+ name = Column(String(50))
+ type = Column(String(50))
+ __mapper_args__ = {'polymorphic_on':type, 'polymorphic_identity':'base'}
+
+ class SubClass(BaseClass):
+
+ subname = Column(String(50))
+ __mapper_args__ = {'polymorphic_identity':'sub'}
+
+ self.create_tables()
+ sess = Session()
+
+ b1 = BaseClass(name='b1')
+ sc = SubClass(name='s1', subname='sc1')
+
+ sess.add_all([b1, sc])
+
+ sess.commit()
+
+ b1.name='b1modified'
+
+ BaseClassHistory = BaseClass.__history_mapper__.class_
+ SubClassHistory = SubClass.__history_mapper__.class_
+
+ eq_(
+ sess.query(BaseClassHistory).order_by(BaseClassHistory.id, BaseClassHistory.version).all(),
+ [BaseClassHistory(id=1, name=u'b1', type=u'base', version=1)]
+ )
+
+ sc.name ='s1modified'
+ b1.name='b1modified2'
+
+ eq_(
+ sess.query(BaseClassHistory).order_by(BaseClassHistory.id, BaseClassHistory.version).all(),
+ [
+ BaseClassHistory(id=1, name=u'b1', type=u'base', version=1),
+ BaseClassHistory(id=1, name=u'b1modified', type=u'base', version=2),
+ SubClassHistory(id=2, name=u's1', type=u'sub', version=1)
+ ]
+ )
+
+
diff --git a/examples/vertical/__init__.py b/examples/vertical/__init__.py
index e69de29bb..61ba2228b 100644
--- a/examples/vertical/__init__.py
+++ b/examples/vertical/__init__.py
@@ -0,0 +1,27 @@
+"""
+Illustrates "vertical table" mappings.
+
+A "vertical table" refers to a technique where individual attributes of an object are stored as distinct rows in a table.
+The "vertical table" technique is used to persist objects which can have a varied set of attributes, at the expense of simple query control and brevity. It is commonly found in content/document management systems in order to represent user-created structures flexibly.
+
+Two variants on the approach are given. In the second, each row references a "datatype" which contains information about the type of information stored in the attribute, such as integer, string, or date.
+
+
+Example::
+
+ shrew = Animal(u'shrew')
+ shrew[u'cuteness'] = 5
+ shrew[u'weasel-like'] = False
+ shrew[u'poisonous'] = True
+
+ session.add(shrew)
+ session.flush()
+
+ q = (session.query(Animal).
+ filter(Animal.facts.any(
+ and_(AnimalFact.key == u'weasel-like',
+ AnimalFact.value == True))))
+ print 'weasel-like animals', q.all()
+
+
+""" \ No newline at end of file
diff --git a/examples/vertical/dictlike-polymorphic.py b/examples/vertical/dictlike-polymorphic.py
index 6180fa303..a66576945 100644
--- a/examples/vertical/dictlike-polymorphic.py
+++ b/examples/vertical/dictlike-polymorphic.py
@@ -25,8 +25,6 @@ we'll use a Python @property to build a smart '.value' attribute that wraps up
reading and writing those various '_value' columns and keeps the '.type' up to
date.
-Note: Something much like 'comparable_property' is slated for inclusion in a
- future version of SQLAlchemy.
"""
from sqlalchemy.orm.interfaces import PropComparator
diff --git a/examples/vertical/dictlike.py b/examples/vertical/dictlike.py
index 1e1635ed7..683eda029 100644
--- a/examples/vertical/dictlike.py
+++ b/examples/vertical/dictlike.py
@@ -28,6 +28,7 @@ entity, and another related table holding key/value pairs::
Because the key/value pairs in a vertical scheme are not fixed in advance,
accessing them like a Python dict can be very convenient. The example below
can be used with many common vertical schemas as-is or with minor adaptations.
+
"""
class VerticalProperty(object):
diff --git a/examples/vertical/vertical.py b/examples/vertical/vertical.py
deleted file mode 100644
index 4a8bf7787..000000000
--- a/examples/vertical/vertical.py
+++ /dev/null
@@ -1,201 +0,0 @@
-"""this example illustrates a "vertical table". an object is stored with individual attributes
-represented in distinct database rows. This allows objects to be created with dynamically changing
-fields that are all persisted in a normalized fashion."""
-
-from sqlalchemy import (create_engine, MetaData, Table, Column, Integer, String,
- ForeignKey, PickleType, DateTime, and_)
-from sqlalchemy.orm import mapper, relation, sessionmaker, scoped_session
-from sqlalchemy.orm.collections import mapped_collection
-import datetime
-
-engine = create_engine('sqlite://', echo=False)
-meta = MetaData(engine)
-
-Session = scoped_session(sessionmaker())
-
-# represent Entity objects
-entities = Table('entities', meta,
- Column('entity_id', Integer, primary_key=True),
- Column('title', String(100), nullable=False),
- )
-
-# represent named, typed fields
-entity_fields = Table('entity_fields', meta,
- Column('field_id', Integer, primary_key=True),
- Column('name', String(40), nullable=False),
- Column('datatype', String(30), nullable=False))
-
-# associate a field row with an entity row, including a typed value
-entity_values = Table('entity_values', meta,
- Column('value_id', Integer, primary_key=True),
- Column('field_id', Integer, ForeignKey('entity_fields.field_id'), nullable=False),
- Column('entity_id', Integer, ForeignKey('entities.entity_id'), nullable=False),
- Column('int_value', Integer),
- Column('string_value', String(500)),
- Column('binary_value', PickleType),
- Column('datetime_value', DateTime))
-
-meta.create_all()
-
-class Entity(object):
- """a persistable dynamic object.
-
- Marshalls attributes into a dictionary which is
- mapped to the database.
-
- """
- def __init__(self, **kwargs):
- for k in kwargs:
- setattr(self, k, kwargs[k])
-
- def __getattr__(self, key):
- """Proxy requests for attributes to the underlying _entities dictionary."""
-
- if key[0] == '_':
- return super(Entity, self).__getattr__(key)
- try:
- return self._entities[key].value
- except KeyError:
- raise AttributeError(key)
-
- def __setattr__(self, key, value):
- """Proxy requests for attribute set operations to the underlying _entities dictionary."""
-
- if key[0] == "_" or hasattr(Entity, key):
- object.__setattr__(self, key, value)
- return
-
- try:
- ev = self._entities[key]
- ev.value = value
- except KeyError:
- ev = _EntityValue(key, value)
- self._entities[key] = ev
-
-class _EntityField(object):
- """Represents a field of a particular name and datatype."""
-
- def __init__(self, name, datatype):
- self.name = name
- self.datatype = datatype
-
-class _EntityValue(object):
- """Represents an individual value."""
-
- def __init__(self, key, value):
- datatype = self._figure_datatype(value)
- field = \
- Session.query(_EntityField).filter(
- and_(_EntityField.name==key, _EntityField.datatype==datatype)
- ).first()
-
- if not field:
- field = _EntityField(key, datatype)
- Session.add(field)
-
- self.field = field
- setattr(self, self.field.datatype + "_value", value)
-
- def _figure_datatype(self, value):
- typemap = {
- int:'int',
- str:'string',
- datetime.datetime:'datetime',
- }
- for k in typemap:
- if isinstance(value, k):
- return typemap[k]
- else:
- return 'binary'
-
- def _get_value(self):
- return getattr(self, self.field.datatype + "_value")
-
- def _set_value(self, value):
- setattr(self, self.field.datatype + "_value", value)
- value = property(_get_value, _set_value)
-
- def name(self):
- return self.field.name
- name = property(name)
-
-
-# the mappers are a straightforward eager chain of
-# Entity--(1->many)->EntityValue-(many->1)->EntityField
-# notice that we are identifying each mapper to its connecting
-# relation by just the class itself.
-mapper(_EntityField, entity_fields)
-mapper(
- _EntityValue, entity_values,
- properties = {
- 'field' : relation(_EntityField, lazy=False, cascade='all')
- }
-)
-
-mapper(Entity, entities, properties = {
- '_entities' : relation(
- _EntityValue,
- lazy=False,
- cascade='all',
- collection_class=mapped_collection(lambda entityvalue: entityvalue.field.name)
- )
-})
-
-session = Session()
-entity1 = Entity(
- title = 'this is the first entity',
- name = 'this is the name',
- price = 43,
- data = ('hello', 'there')
-)
-
-entity2 = Entity(
- title = 'this is the second entity',
- name = 'this is another name',
- price = 50,
- data = ('hoo', 'ha')
-)
-
-session.add_all([entity1, entity2])
-session.commit()
-
-for entity in session.query(Entity):
- print "Entity id %d:" % entity.entity_id, entity.title, entity.name, entity.price, entity.data
-
-# perform some changes, add a new Entity
-
-entity1.price = 90
-entity1.title = 'another new title'
-entity2.data = {'oof':5,'lala':8}
-
-entity3 = Entity(
- title = 'third entity',
- name = 'new name',
- price = '$1.95', # note we change 'price' to be a string.
- # this creates a new _EntityField separate from the
- # one used by integer 'price'.
- data = 'some data'
-)
-session.add(entity3)
-
-session.commit()
-
-print "----------------"
-for entity in session.query(Entity):
- print "Entity id %d:" % entity.entity_id, entity.title, entity.name, entity.price, entity.data
-
-print "----------------"
-# illustrate each _EntityField that's been created and list each Entity which uses it
-for ent_id, name, datatype in session.query(_EntityField.field_id, _EntityField.name, _EntityField.datatype):
- print name, datatype, "(Enitites:", ",".join([
- str(entid) for entid in session.query(Entity.entity_id).\
- join(
- (_EntityValue, _EntityValue.entity_id==Entity.entity_id),
- (_EntityField, _EntityField.field_id==_EntityValue.field_id)
- ).filter(_EntityField.field_id==ent_id)
- ]), ")"
-
-# delete all the Entity objects
-for entity in session.query(Entity):
- session.delete(entity)
-session.commit()
diff --git a/lib/sqlalchemy/orm/session.py b/lib/sqlalchemy/orm/session.py
index 6590864d6..77e15be5a 100644
--- a/lib/sqlalchemy/orm/session.py
+++ b/lib/sqlalchemy/orm/session.py
@@ -864,7 +864,7 @@ class Session(object):
context.append('mapper %s' % c_mapper)
if clause is not None:
context.append('SQL expression')
-
+
raise sa_exc.UnboundExecutionError(
"Could not locate a bind configured on %s or this Session" % (
', '.join(context)))
diff --git a/lib/sqlalchemy/test/entities.py b/lib/sqlalchemy/test/entities.py
new file mode 100644
index 000000000..0ec677eea
--- /dev/null
+++ b/lib/sqlalchemy/test/entities.py
@@ -0,0 +1,83 @@
+import sqlalchemy as sa
+from sqlalchemy import exc as sa_exc
+
+_repr_stack = set()
+class BasicEntity(object):
+ def __init__(self, **kw):
+ for key, value in kw.iteritems():
+ setattr(self, key, value)
+
+ def __repr__(self):
+ if id(self) in _repr_stack:
+ return object.__repr__(self)
+ _repr_stack.add(id(self))
+ try:
+ return "%s(%s)" % (
+ (self.__class__.__name__),
+ ', '.join(["%s=%r" % (key, getattr(self, key))
+ for key in sorted(self.__dict__.keys())
+ if not key.startswith('_')]))
+ finally:
+ _repr_stack.remove(id(self))
+
+_recursion_stack = set()
+class ComparableEntity(BasicEntity):
+ def __hash__(self):
+ return hash(self.__class__)
+
+ def __ne__(self, other):
+ return not self.__eq__(other)
+
+ def __eq__(self, other):
+ """'Deep, sparse compare.
+
+ Deeply compare two entities, following the non-None attributes of the
+ non-persisted object, if possible.
+
+ """
+ if other is self:
+ return True
+ elif not self.__class__ == other.__class__:
+ return False
+
+ if id(self) in _recursion_stack:
+ return True
+ _recursion_stack.add(id(self))
+
+ try:
+ # pick the entity thats not SA persisted as the source
+ try:
+ self_key = sa.orm.attributes.instance_state(self).key
+ except sa.orm.exc.NO_STATE:
+ self_key = None
+
+ if other is None:
+ a = self
+ b = other
+ elif self_key is not None:
+ a = other
+ b = self
+ else:
+ a = self
+ b = other
+
+ for attr in a.__dict__.keys():
+ if attr.startswith('_'):
+ continue
+ value = getattr(a, attr)
+
+ try:
+ # handle lazy loader errors
+ battr = getattr(b, attr)
+ except (AttributeError, sa_exc.UnboundExecutionError):
+ return False
+
+ if hasattr(value, '__iter__'):
+ if list(value) != list(battr):
+ return False
+ else:
+ if value is not None and value != battr:
+ return False
+ return True
+ finally:
+ _recursion_stack.remove(id(self))
diff --git a/setup.cfg b/setup.cfg
index 517492791..640111803 100644
--- a/setup.cfg
+++ b/setup.cfg
@@ -4,3 +4,4 @@ tag_svn_revision = true
[nosetests]
with-sqlalchemy = true
+exclude = ^examples \ No newline at end of file
diff --git a/test/orm/_base.py b/test/orm/_base.py
index f08d253d5..4d0031f5a 100644
--- a/test/orm/_base.py
+++ b/test/orm/_base.py
@@ -7,91 +7,10 @@ from sqlalchemy.test import config, testing
from sqlalchemy.test.testing import resolve_artifact_names, adict
from sqlalchemy.test.engines import drop_all_tables
from sqlalchemy.util import function_named
-
-
-_repr_stack = set()
-class BasicEntity(object):
- def __init__(self, **kw):
- for key, value in kw.iteritems():
- setattr(self, key, value)
-
- def __repr__(self):
- if id(self) in _repr_stack:
- return object.__repr__(self)
- _repr_stack.add(id(self))
- try:
- return "%s(%s)" % (
- (self.__class__.__name__),
- ', '.join(["%s=%r" % (key, getattr(self, key))
- for key in sorted(self.__dict__.keys())
- if not key.startswith('_')]))
- finally:
- _repr_stack.remove(id(self))
+from sqlalchemy.test.entities import BasicEntity, ComparableEntity
Entity = BasicEntity
-_recursion_stack = set()
-class ComparableEntity(BasicEntity):
- def __hash__(self):
- return hash(self.__class__)
-
- def __ne__(self, other):
- return not self.__eq__(other)
-
- def __eq__(self, other):
- """'Deep, sparse compare.
-
- Deeply compare two entities, following the non-None attributes of the
- non-persisted object, if possible.
-
- """
- if other is self:
- return True
- elif not self.__class__ == other.__class__:
- return False
-
- if id(self) in _recursion_stack:
- return True
- _recursion_stack.add(id(self))
-
- try:
- # pick the entity thats not SA persisted as the source
- try:
- self_key = sa.orm.attributes.instance_state(self).key
- except sa.orm.exc.NO_STATE:
- self_key = None
-
- if other is None:
- a = self
- b = other
- elif self_key is not None:
- a = other
- b = self
- else:
- a = self
- b = other
-
- for attr in a.__dict__.keys():
- if attr.startswith('_'):
- continue
- value = getattr(a, attr)
-
- try:
- # handle lazy loader errors
- battr = getattr(b, attr)
- except (AttributeError, sa_exc.UnboundExecutionError):
- return False
-
- if hasattr(value, '__iter__'):
- if list(value) != list(battr):
- return False
- else:
- if value is not None and value != battr:
- return False
- return True
- finally:
- _recursion_stack.remove(id(self))
-
class ORMTest(testing.TestBase, testing.AssertsExecutionResults):
__requires__ = ('subqueries',)