summaryrefslogtreecommitdiff
path: root/test/orm
diff options
context:
space:
mode:
authorJason Kirtland <jek@discorporate.us>2008-05-09 20:26:09 +0000
committerJason Kirtland <jek@discorporate.us>2008-05-09 20:26:09 +0000
commite41c0f4107a132b2feac83ba07a25a336e7eae0b (patch)
tree09c785fd5ef9557c3fc926afd7e0a78702dd8023 /test/orm
parenta2122a89f6d4f2d3ccc4ba7665cd588c2b0b93b0 (diff)
downloadsqlalchemy-e41c0f4107a132b2feac83ba07a25a336e7eae0b.tar.gz
Test suite modernization in progress. Big changes:
- @unsupported now only accepts a single target and demands a reason for not running the test. - @exclude also demands an exclusion reason - Greatly expanded @testing.requires.<feature>, eliminating many decorators in the suite and signficantly easing integration of multi-driver support. - New ORM test base class, and a featureful base for mapped tests - Usage of 'global' for shared setup going away, * imports as well
Diffstat (limited to 'test/orm')
-rw-r--r--test/orm/_base.py277
-rw-r--r--test/orm/_fixtures.py394
-rw-r--r--test/orm/association.py179
-rw-r--r--test/orm/assorted_eager.py74
-rw-r--r--test/orm/attributes.py339
-rw-r--r--test/orm/cascade.py653
-rw-r--r--test/orm/collection.py214
-rw-r--r--test/orm/compile.py12
-rw-r--r--test/orm/cycles.py17
-rw-r--r--test/orm/deprecations.py167
-rw-r--r--test/orm/dynamic.py136
-rw-r--r--test/orm/eager_relations.py598
-rw-r--r--test/orm/entity.py6
-rw-r--r--test/orm/expire.py188
-rw-r--r--test/orm/extendedattr.py4
-rw-r--r--test/orm/generative.py365
-rw-r--r--test/orm/instrumentation.py18
-rw-r--r--test/orm/lazy_relations.py122
-rw-r--r--test/orm/lazytest1.py91
-rw-r--r--test/orm/manytomany.py217
-rw-r--r--test/orm/mapper.py4
-rw-r--r--test/orm/memusage.py57
-rw-r--r--test/orm/merge.py334
-rw-r--r--test/orm/naturalpks.py115
-rw-r--r--test/orm/onetoone.py95
-rw-r--r--test/orm/pickled.py52
-rw-r--r--test/orm/query.py4
-rw-r--r--test/orm/relationships.py1443
-rw-r--r--test/orm/scoping.py176
-rw-r--r--test/orm/selectable.py68
-rw-r--r--test/orm/session.py544
-rw-r--r--test/orm/unitofwork.py2839
32 files changed, 5557 insertions, 4245 deletions
diff --git a/test/orm/_base.py b/test/orm/_base.py
new file mode 100644
index 000000000..bfce00d0f
--- /dev/null
+++ b/test/orm/_base.py
@@ -0,0 +1,277 @@
+import gc
+import inspect
+import sys
+import types
+from testlib import config, sa, testing
+from testlib.testing import resolve_artifact_names, adict
+from testlib.compat import set, sorted, _function_named
+
+
+_repr_stack = set()
+class BasicEntity(object):
+ def __init__(self, **kw):
+ for key, value in kw.iteritems():
+ setattr(self, key, value)
+
+ def __repr__(self):
+ if id(self) in _repr_stack:
+ return object.__repr__(self)
+ _repr_stack.add(id(self))
+ try:
+ return "%s(%s)" % (
+ (self.__class__.__name__),
+ ', '.join(["%s=%r" % (key, getattr(self, key))
+ for key in sorted(self.__dict__.keys())
+ if not key.startswith('_')]))
+ finally:
+ _repr_stack.remove(id(self))
+
+Entity = BasicEntity
+
+_recursion_stack = set()
+class ComparableEntity(BasicEntity):
+ def __hash__(self):
+ return hash(self.__class__)
+
+ def __ne__(self, other):
+ return not self.__eq__(other)
+
+ def __eq__(self, other):
+ """'Deep, sparse compare.
+
+ Deeply compare two entities, following the non-None attributes of the
+ non-persisted object, if possible.
+
+ """
+ if other is self:
+ return True
+ elif not self.__class__ == other.__class__:
+ return False
+
+ if id(self) in _recursion_stack:
+ return True
+ _recursion_stack.add(id(self))
+
+ try:
+ # pick the entity thats not SA persisted as the source
+ try:
+ self_key = sa.orm.attributes.instance_state(self).key
+ except (KeyError, AttributeError):
+ self_key = None
+ try:
+ other_key = sa.orm.attributes.instance_state(other).key
+ except (KeyError, AttributeError):
+ other_key = None
+
+ if other_key is None and self_key is not None:
+ a, b = other, self
+ else:
+ a, b = self, other
+
+ for attr in a.__dict__.keys():
+ if attr.startswith('_'):
+ continue
+ value = getattr(a, attr)
+ if (hasattr(value, '__iter__') and
+ not isinstance(value, basestring)):
+ try:
+ # catch AttributeError so that lazy loaders trigger
+ battr = getattr(b, attr)
+ except AttributeError:
+ return False
+
+ if list(value) != list(battr):
+ return False
+ else:
+ if value is not None:
+ if value != getattr(b, attr, None):
+ return False
+ return True
+ finally:
+ _recursion_stack.remove(id(self))
+
+
+class ORMTest(testing.TestBase, testing.AssertsExecutionResults):
+ __requires__ = ('subqueries',)
+
+ def tearDownAll(self):
+ sa.orm.session.Session.close_all()
+ sa.orm.clear_mappers()
+ # TODO: ensure mapper registry is empty
+ # TODO: ensure instrumentation registry is empty
+
+class MappedTest(ORMTest):
+ # 'once', 'foreach', None
+ run_define_tables = 'once'
+
+ # 'once', 'foreach', None
+ run_setup_classes = 'once'
+
+ # 'once', 'foreach', None
+ run_setup_mappers = 'each'
+
+ # 'once', 'foreach', None
+ run_inserts = 'each'
+
+ # 'foreach', None
+ run_deletes = 'each'
+
+ metadata = None
+
+ _artifact_registries = ('tables', 'classes', 'other_artifacts')
+ tables = None
+ classes = None
+ other_artifacts = None
+
+ def setUpAll(self):
+ if self.run_setup_classes == 'each':
+ assert self.run_setup_mappers != 'once'
+
+ assert self.run_deletes in (None, 'each')
+ if self.run_inserts == 'once':
+ assert self.run_deletes is None
+
+ assert not hasattr(self, 'keep_mappers')
+ assert not hasattr(self, 'keep_data')
+
+ cls = self.__class__
+ if cls.tables is None:
+ cls.tables = adict()
+ if cls.classes is None:
+ cls.classes = adict()
+ if cls.other_artifacts is None:
+ cls.other_artifacts = adict()
+
+ if self.metadata is None:
+ setattr(type(self), 'metadata', sa.MetaData())
+
+ if self.metadata.bind is None:
+ self.metadata.bind = getattr(self, 'engine', config.db)
+
+ if self.run_define_tables:
+ self.define_tables(self.metadata)
+ self.metadata.create_all()
+ self.tables.update(self.metadata.tables)
+
+ if self.run_setup_classes:
+ baseline = subclasses(BasicEntity)
+ self.setup_classes()
+ self._register_new_class_artifacts(baseline)
+
+ if self.run_setup_mappers:
+ baseline = subclasses(BasicEntity)
+ self.setup_mappers()
+ self._register_new_class_artifacts(baseline)
+
+ if self.run_inserts:
+ self._load_fixtures()
+ self.insert_data()
+
+ def setUp(self):
+ if self._sa_first_test:
+ return
+
+ if self.run_define_tables == 'each':
+ self.tables.clear()
+ self.metadata.drop_all()
+ self.define_tables(self.metadata)
+ self.metadata.create_all()
+
+ if self.run_setup_classes == 'each':
+ self.classes.clear()
+ baseline = subclasses(BasicEntity)
+ self.setup_classes()
+ self._register_new_class_artifacts(baseline)
+
+ if self.run_setup_mappers == 'each':
+ baseline = subclasses(BasicEntity)
+ self.setup_mappers()
+ self._register_new_class_artifacts(baseline)
+
+ if self.run_inserts == 'each':
+ self._load_fixtures()
+ self.insert_data()
+
+ def tearDown(self):
+ sa.orm.session.Session.close_all()
+
+ if self.run_setup_mappers == 'each':
+ sa.orm.clear_mappers()
+
+ if self.run_deletes:
+ for table in self.metadata.table_iterator(reverse=True):
+ try:
+ table.delete().execute().close()
+ except sa.orm.DBAPIError, ex:
+ print >> sys.stderr, "Error emptying table %s: %r" % (
+ table, ex)
+
+ def tearDownAll(self):
+ for cls in self.classes.values():
+ self.unregister_class(cls)
+ ORMTest.tearDownAll(self)
+ self.metadata.drop_all()
+ self.metadata.bind = None
+
+ def define_tables(self, metadata):
+ raise NotImplementedError()
+
+ def setup_classes(self):
+ pass
+
+ def setup_mappers(self):
+ pass
+
+ def fixtures(self):
+ return {}
+
+ def insert_data(self):
+ pass
+
+ def sql_count_(self, count, fn):
+ self.assert_sql_count(self.metadata.bind, fn, count)
+
+ @classmethod
+ def _register_new_class_artifacts(cls, baseline):
+ for class_ in subclasses(BasicEntity) - baseline:
+ cls.register_class(class_)
+
+ @classmethod
+ def register_class(cls, class_):
+ name = class_.__name__
+ if name[0].isupper:
+ setattr(cls, name, class_)
+ cls.classes[name] = class_
+
+ @classmethod
+ def unregister_class(cls, class_):
+ name = class_.__name__
+ if name[0].isupper:
+ delattr(cls, name)
+ del cls.classes[name]
+
+ def _load_fixtures(self):
+ headers, rows = {}, {}
+ for table, data in self.fixtures().iteritems():
+ if isinstance(table, basestring):
+ table = self.tables[table]
+ headers[table] = data[0]
+ rows[table] = data[1:]
+ for table in self.metadata.table_iterator(reverse=False):
+ if table not in headers:
+ continue
+ table.bind.execute(
+ table.insert(),
+ [dict(zip(headers[table], column_values))
+ for column_values in rows[table]])
+
+
+def subclasses(cls):
+ subs, process = set(), set(cls.__subclasses__())
+ while process:
+ cls = process.pop()
+ if cls not in subs:
+ subs.add(cls)
+ process |= set(cls.__subclasses__())
+ return subs
+
diff --git a/test/orm/_fixtures.py b/test/orm/_fixtures.py
new file mode 100644
index 000000000..07367895f
--- /dev/null
+++ b/test/orm/_fixtures.py
@@ -0,0 +1,394 @@
+from testlib.sa import MetaData, Table, Column, Integer, String, ForeignKey
+from testlib.sa.orm import attributes
+from testlib.compat import set
+from testlib.testing import fixture
+from orm import _base
+
+__all__ = ()
+
+fixture_metadata = MetaData()
+
+def fixture_table(table, columns, *rows):
+ def load_fixture(bind=None):
+ bind = bind or table.bind
+ bind.execute(
+ table.insert(),
+ [dict(zip(columns, column_values)) for column_values in rows])
+ table.info[('fixture', 'loader')] = load_fixture
+ table.info[('fixture', 'columns')] = columns
+ table.info[('fixture', 'rows')] = rows
+ return table
+
+users = fixture_table(
+ Table('users', fixture_metadata,
+ Column('id', Integer, primary_key=True),
+ Column('name', String(30), nullable=False),
+ test_needs_acid=True,
+ test_needs_fk=True),
+ ('id', 'name'),
+ (7, 'jack'),
+ (8, 'ed'),
+ (9, 'fred'),
+ (10, 'chuck'))
+
+addresses = fixture_table(
+ Table('addresses', fixture_metadata,
+ Column('id', Integer, primary_key=True),
+ Column('user_id', None, ForeignKey('users.id')),
+ Column('email_address', String(50), nullable=False),
+ test_needs_acid=True,
+ test_needs_fk=True),
+ ('id', 'user_id', 'email_address'),
+ (1, 7, "jack@bean.com"),
+ (2, 8, "ed@wood.com"),
+ (3, 8, "ed@bettyboop.com"),
+ (4, 8, "ed@lala.com"),
+ (5, 9, "fred@fred.com"))
+
+orders = fixture_table(
+ Table('orders', fixture_metadata,
+ Column('id', Integer, primary_key=True),
+ Column('user_id', None, ForeignKey('users.id')),
+ Column('address_id', None, ForeignKey('addresses.id')),
+ Column('description', String(30)),
+ Column('isopen', Integer),
+ test_needs_acid=True,
+ test_needs_fk=True),
+ ('id', 'user_id', 'description', 'isopen', 'address_id'),
+ (1, 7, 'order 1', 0, 1),
+ (2, 9, 'order 2', 0, 4),
+ (3, 7, 'order 3', 1, 1),
+ (4, 9, 'order 4', 1, 4),
+ (5, 7, 'order 5', 0, None))
+
+dingalings = fixture_table(
+ Table("dingalings", fixture_metadata,
+ Column('id', Integer, primary_key=True),
+ Column('address_id', None, ForeignKey('addresses.id')),
+ Column('data', String(30)),
+ test_needs_acid=True,
+ test_needs_fk=True),
+ ('id', 'address_id', 'data'),
+ (1, 2, 'ding 1/2'),
+ (2, 5, 'ding 2/5'))
+
+items = fixture_table(
+ Table('items', fixture_metadata,
+ Column('id', Integer, primary_key=True),
+ Column('description', String(30), nullable=False),
+ test_needs_acid=True,
+ test_needs_fk=True),
+ ('id', 'description'),
+ (1, 'item 1'),
+ (2, 'item 2'),
+ (3, 'item 3'),
+ (4, 'item 4'),
+ (5, 'item 5'))
+
+order_items = fixture_table(
+ Table('order_items', fixture_metadata,
+ Column('item_id', None, ForeignKey('items.id')),
+ Column('order_id', None, ForeignKey('orders.id')),
+ test_needs_acid=True,
+ test_needs_fk=True),
+ ('item_id', 'order_id'),
+ (1, 1),
+ (2, 1),
+ (3, 1),
+
+ (1, 2),
+ (2, 2),
+ (3, 2),
+
+ (3, 3),
+ (4, 3),
+ (5, 3),
+
+ (1, 4),
+ (5, 4),
+
+ (5, 5))
+
+keywords = fixture_table(
+ Table('keywords', fixture_metadata,
+ Column('id', Integer, primary_key=True),
+ Column('name', String(30), nullable=False),
+ test_needs_acid=True,
+ test_needs_fk=True),
+ ('id', 'name'),
+ (1, 'blue'),
+ (2, 'red'),
+ (3, 'green'),
+ (4, 'big'),
+ (5, 'small'),
+ (6, 'round'),
+ (7, 'square'))
+
+item_keywords = fixture_table(
+ Table('item_keywords', fixture_metadata,
+ Column('item_id', None, ForeignKey('items.id')),
+ Column('keyword_id', None, ForeignKey('keywords.id')),
+ test_needs_acid=True,
+ test_needs_fk=True),
+ ('keyword_id', 'item_id'),
+ (2, 1),
+ (2, 2),
+ (4, 1),
+ (6, 1),
+ (5, 2),
+ (3, 3),
+ (4, 3),
+ (7, 2),
+ (6, 3))
+
+
+def _load_fixtures():
+ for table in fixture_metadata.table_iterator(reverse=False):
+ table.info[('fixture', 'loader')]()
+
+def run_inserts_for(table, bind=None):
+ table.info[('fixture', 'loader')](bind)
+
+
+class Base(_base.ComparableEntity):
+ pass
+
+_recursion_stack = set()
+class ZBase(_base.BasicEntity):
+ def __ne__(self, other):
+ return not self.__eq__(other)
+
+ def __eq__(self, other):
+ """'passively' compare this object to another.
+
+ only look at attributes that are present on the source object.
+
+ """
+ if self in _recursion_stack:
+ return True
+ _recursion_stack.add(self)
+ try:
+ # pick the entity thats not SA persisted as the source
+ try:
+ state = attributes.instance_state(self)
+ key = state.key
+ except (KeyError, AttributeError):
+ key = None
+ if other is None:
+ a = self
+ b = other
+ elif key is not None:
+ a = other
+ b = self
+ else:
+ a = self
+ b = other
+
+ for attr in a.__dict__.keys():
+ if attr[0] == '_':
+ continue
+ value = getattr(a, attr)
+ #print "looking at attr:", attr, "start value:", value
+ if hasattr(value, '__iter__') and not isinstance(value, basestring):
+ try:
+ # catch AttributeError so that lazy loaders trigger
+ battr = getattr(b, attr)
+ except AttributeError:
+ #print "b class does not have attribute named '%s'" % attr
+ #raise
+ return False
+
+ if list(value) == list(battr):
+ continue
+ else:
+ return False
+ else:
+ if value is not None:
+ if value != getattr(b, attr, None):
+ #print "2. Attribute named '%s' does not match that of b" % attr
+ return False
+ else:
+ return True
+ finally:
+ _recursion_stack.remove(self)
+
+class User(Base):
+ pass
+class Order(Base):
+ pass
+class Item(Base):
+ pass
+class Keyword(Base):
+ pass
+class Address(Base):
+ pass
+class Dingaling(Base):
+ pass
+
+
+class FixtureTest(_base.MappedTest):
+ """A MappedTest pre-configured for fixtures.
+
+ All fixture tables are pre-loaded into cls.tables, as are all fixture
+ lasses in cls.classes and as cls.ClassName.
+
+ Fixture.mapper() still functions and willregister non-fixture classes into
+ cls.classes.
+
+ """
+
+ run_define_tables = 'once'
+ run_setup_classes = 'once'
+ run_setup_mappers = 'each'
+ run_inserts = 'each'
+ run_deletes = 'each'
+
+ metadata = fixture_metadata
+ fixture_classes = dict(User=User,
+ Order=Order,
+ Item=Item,
+ Keyword=Keyword,
+ Address=Address,
+ Dingaling=Dingaling)
+
+ def setUpAll(self):
+ assert not hasattr(self, 'refresh_data')
+ assert not hasattr(self, 'only_tables')
+ #refresh_data = False
+ #only_tables = False
+
+ #if type(self) is not FixtureTest:
+ # setattr(type(self), 'classes', _base.adict(self.classes))
+
+ #if self.run_setup_classes:
+ # for cls in self.classes.values():
+ # self.register_class(cls)
+ super(FixtureTest, self).setUpAll()
+
+ #if not self.only_tables and self.keep_data:
+ # _registry.load()
+
+ def define_tables(self, metadata):
+ pass
+
+ def setup_classes(self):
+ for cls in self.fixture_classes.values():
+ self.register_class(cls)
+
+ def setup_mappers(self):
+ pass
+
+ def insert_data(self):
+ _load_fixtures()
+
+
+class CannedResults(object):
+ """Built on demand, instances use mappers in effect at time of call."""
+
+ @property
+ def user_address_result(self):
+ return [
+ User(id=7, addresses=[
+ Address(id=1)
+ ]),
+ User(id=8, addresses=[
+ Address(id=2, email_address='ed@wood.com'),
+ Address(id=3, email_address='ed@bettyboop.com'),
+ Address(id=4, email_address='ed@lala.com'),
+ ]),
+ User(id=9, addresses=[
+ Address(id=5)
+ ]),
+ User(id=10, addresses=[])]
+
+ @property
+ def user_all_result(self):
+ return [
+ User(id=7,
+ addresses=[
+ Address(id=1)],
+ orders=[
+ Order(description='order 1',
+ items=[
+ Item(description='item 1'),
+ Item(description='item 2'),
+ Item(description='item 3')]),
+ Order(description='order 3'),
+ Order(description='order 5')]),
+ User(id=8,
+ addresses=[
+ Address(id=2),
+ Address(id=3),
+ Address(id=4)]),
+ User(id=9,
+ addresses=[
+ Address(id=5)],
+ orders=[
+ Order(description='order 2',
+ items=[
+ Item(description='item 1'),
+ Item(description='item 2'),
+ Item(description='item 3')]),
+ Order(description='order 4',
+ items=[
+ Item(description='item 1'),
+ Item(description='item 5')])]),
+ User(id=10, addresses=[])]
+
+ @property
+ def user_order_result(self):
+ return [
+ User(id=7,
+ orders=[
+ Order(id=1,
+ items=[
+ Item(id=1),
+ Item(id=2),
+ Item(id=3)]),
+ Order(id=3,
+ items=[
+ Item(id=3),
+ Item(id=4),
+ Item(id=5)]),
+ Order(id=5,
+ items=[
+ Item(id=5)])]),
+ User(id=8,
+ orders=[]),
+ User(id=9,
+ orders=[
+ Order(id=2,
+ items=[
+ Item(id=1),
+ Item(id=2),
+ Item(id=3)]),
+ Order(id=4,
+ items=[
+ Item(id=1),
+ Item(id=5)])]),
+ User(id=10)]
+
+ @property
+ def item_keyword_result(self):
+ return [
+ Item(id=1,
+ keywords=[
+ Keyword(name='red'),
+ Keyword(name='big'),
+ Keyword(name='round')]),
+ Item(id=2,
+ keywords=[
+ Keyword(name='red'),
+ Keyword(name='small'),
+ Keyword(name='square')]),
+ Item(id=3,
+ keywords=[
+ Keyword(name='green'),
+ Keyword(name='big'),
+ Keyword(name='round')]),
+ Item(id=4,
+ keywords=[]),
+ Item(id=5,
+ keywords=[])]
+FixtureTest.static = CannedResults()
+
diff --git a/test/orm/association.py b/test/orm/association.py
index 1115849d2..38c6dc02f 100644
--- a/test/orm/association.py
+++ b/test/orm/association.py
@@ -1,61 +1,67 @@
import testenv; testenv.configure_for_tests()
-from sqlalchemy import *
-from sqlalchemy.orm import *
-from testlib import *
-
-class AssociationTest(TestBase):
- def setUpAll(self):
- global items, item_keywords, keywords, metadata, Item, Keyword, KeywordAssociation
- metadata = MetaData(testing.db)
- items = Table('items', metadata,
+from testlib import testing
+from testlib.sa import Table, Column, Integer, String, ForeignKey
+from testlib.sa.orm import mapper, relation, create_session
+from orm import _base
+from testlib.testing import eq_
+
+class AssociationTest(_base.MappedTest):
+ run_setup_classes = 'once'
+ run_setup_mappers = 'once'
+
+ def define_tables(self, metadata):
+ Table('items', metadata,
Column('item_id', Integer, primary_key=True),
- Column('name', String(40)),
- )
- item_keywords = Table('item_keywords', metadata,
+ Column('name', String(40)))
+ Table('item_keywords', metadata,
Column('item_id', Integer, ForeignKey('items.item_id')),
Column('keyword_id', Integer, ForeignKey('keywords.keyword_id')),
- Column('data', String(40))
- )
- keywords = Table('keywords', metadata,
+ Column('data', String(40)))
+ Table('keywords', metadata,
Column('keyword_id', Integer, primary_key=True),
- Column('name', String(40))
- )
- metadata.create_all()
+ Column('name', String(40)))
- class Item(object):
+ def setup_classes(self):
+ class Item(_base.BasicEntity):
def __init__(self, name):
self.name = name
def __repr__(self):
- return "Item id=%d name=%s keywordassoc=%s" % (self.item_id, self.name, repr(self.keywords))
- class Keyword(object):
+ return "Item id=%d name=%s keywordassoc=%r" % (
+ self.item_id, self.name, self.keywords)
+
+ class Keyword(_base.BasicEntity):
def __init__(self, name):
self.name = name
def __repr__(self):
return "Keyword id=%d name=%s" % (self.keyword_id, self.name)
- class KeywordAssociation(object):
+
+ class KeywordAssociation(_base.BasicEntity):
def __init__(self, keyword, data):
self.keyword = keyword
self.data = data
def __repr__(self):
- return "KeywordAssociation itemid=%d keyword=%s data=%s" % (self.item_id, repr(self.keyword), self.data)
+ return "KeywordAssociation itemid=%d keyword=%r data=%s" % (
+ self.item_id, self.keyword, self.data)
+
+ @testing.resolve_artifact_names
+ def setup_mappers(self):
+ items, item_keywords, keywords = self.tables.get_all(
+ 'items', 'item_keywords', 'keywords')
mapper(Keyword, keywords)
mapper(KeywordAssociation, item_keywords, properties={
- 'keyword':relation(Keyword, lazy=False)
- }, primary_key=[item_keywords.c.item_id, item_keywords.c.keyword_id], order_by=[item_keywords.c.data])
+ 'keyword':relation(Keyword, lazy=False)},
+ primary_key=[item_keywords.c.item_id, item_keywords.c.keyword_id],
+ order_by=[item_keywords.c.data])
+
mapper(Item, items, properties={
- 'keywords' : relation(KeywordAssociation, cascade="all, delete-orphan")
+ 'keywords' : relation(KeywordAssociation,
+ cascade="all, delete-orphan")
})
- def tearDown(self):
- for t in metadata.table_iterator(reverse=True):
- t.delete().execute()
- def tearDownAll(self):
- clear_mappers()
- metadata.drop_all()
-
- def testinsert(self):
+ @testing.resolve_artifact_names
+ def test_insert(self):
sess = create_session()
item1 = Item('item1')
item2 = Item('item2')
@@ -69,11 +75,10 @@ class AssociationTest(TestBase):
sess.clear()
l = sess.query(Item).all()
loaded = repr(l)
- print saved
- print loaded
- self.assert_(saved == loaded)
+ eq_(saved, loaded)
- def testreplace(self):
+ @testing.resolve_artifact_names
+ def test_replace(self):
sess = create_session()
item1 = Item('item1')
item1.keywords.append(KeywordAssociation(Keyword('blue'), 'blue_assoc'))
@@ -89,11 +94,10 @@ class AssociationTest(TestBase):
sess.clear()
l = sess.query(Item).all()
loaded = repr(l)
- print saved
- print loaded
- self.assert_(saved == loaded)
+ eq_(saved, loaded)
- def testmodify(self):
+ @testing.resolve_artifact_names
+ def test_modify(self):
sess = create_session()
item1 = Item('item1')
item2 = Item('item2')
@@ -118,11 +122,10 @@ class AssociationTest(TestBase):
sess.clear()
l = sess.query(Item).all()
loaded = repr(l)
- print saved
- print loaded
- self.assert_(saved == loaded)
+ eq_(saved, loaded)
- def testdelete(self):
+ @testing.resolve_artifact_names
+ def test_delete(self):
sess = create_session()
item1 = Item('item1')
item2 = Item('item2')
@@ -132,92 +135,12 @@ class AssociationTest(TestBase):
sess.save(item1)
sess.save(item2)
sess.flush()
- self.assert_(item_keywords.count().scalar() == 3)
+ eq_(self.tables.item_keywords.count().scalar(), 3)
sess.delete(item1)
sess.delete(item2)
sess.flush()
- self.assert_(item_keywords.count().scalar() == 0)
-
-class AssociationTest2(TestBase):
- def setUpAll(self):
- global table_originals, table_people, table_isauthor, metadata, Originals, People, IsAuthor
- metadata = MetaData(testing.db)
- table_originals = Table('Originals', metadata,
- Column('ID', Integer, primary_key=True),
- Column('Title', String(200), nullable=False),
- Column('Date', Date ),
- )
- table_people = Table('People', metadata,
- Column('ID', Integer, primary_key=True),
- Column('Name', String(140), nullable=False),
- Column('Country', CHAR(2), default='es'),
- )
- table_isauthor = Table('IsAuthor', metadata,
- Column('OriginalsID', Integer, ForeignKey('Originals.ID'),
-default=None),
- Column('PeopleID', Integer, ForeignKey('People.ID'),
-default=None),
- Column('Kind', CHAR(1), default='A'),
- )
- metadata.create_all()
-
- class Base(object):
- def __init__(self, **kw):
- for k,v in kw.iteritems():
- setattr(self, k, v)
- def display(self):
- c = [ "%s=%s" % (col.key, repr(getattr(self, col.key))) for col
-in self.c ]
- return "%s(%s)" % (self.__class__.__name__, ', '.join(c))
- def __repr__(self):
- return self.display()
- def __str__(self):
- return self.display()
- class Originals(Base):
- order = [table_originals.c.Title, table_originals.c.Date]
- class People(Base):
- order = [table_people.c.Name]
- class IsAuthor(Base):
- pass
-
- mapper(Originals, table_originals, order_by=Originals.order,
- properties={
- 'people': relation(IsAuthor, cascade="all, delete-orphan"),
- 'authors': relation(People, secondary=table_isauthor, backref='written',
- primaryjoin=and_(table_originals.c.ID==table_isauthor.c.OriginalsID,
- table_isauthor.c.Kind=='A')),
- 'title': table_originals.c.Title,
- 'date': table_originals.c.Date,
- })
- mapper(People, table_people, order_by=People.order, properties= {
- 'originals': relation(IsAuthor, cascade="all, delete-orphan"),
- 'name': table_people.c.Name,
- 'country': table_people.c.Country,
- })
- mapper(IsAuthor, table_isauthor,
- primary_key=[table_isauthor.c.OriginalsID, table_isauthor.c.PeopleID,
-table_isauthor.c.Kind],
- properties={
- 'original': relation(Originals, lazy=False),
- 'person': relation(People, lazy=False),
- 'kind': table_isauthor.c.Kind,
- })
-
- def tearDown(self):
- for t in metadata.table_iterator(reverse=True):
- t.delete().execute()
- def tearDownAll(self):
- clear_mappers()
- metadata.drop_all()
-
- def testinsert(self):
- # this test is sure to get more complex...
- p = People(name='name', country='es')
- sess = create_session()
- sess.save(p)
- sess.flush()
-
+ eq_(self.tables.item_keywords.count().scalar(), 0)
if __name__ == "__main__":
diff --git a/test/orm/assorted_eager.py b/test/orm/assorted_eager.py
index 731a9f916..87c71bc5a 100644
--- a/test/orm/assorted_eager.py
+++ b/test/orm/assorted_eager.py
@@ -5,9 +5,9 @@ import random, datetime
from sqlalchemy import *
from sqlalchemy.orm import *
from testlib import *
-from testlib import fixtures
+from orm import _base
-class EagerTest(TestBase, AssertsExecutionResults):
+class EagerTest(_base.ORMTest):
def setUpAll(self):
global dbmeta, owners, categories, tests, options, Owner, Category, Test, Option, false
dbmeta = MetaData(testing.db)
@@ -22,20 +22,20 @@ class EagerTest(TestBase, AssertsExecutionResults):
else:
false = str(False)
- owners = Table ( 'owners', dbmeta ,
- Column ( 'id', Integer, primary_key=True, nullable=False ),
- Column('data', String(30)) )
- categories=Table( 'categories', dbmeta,
- Column ( 'id', Integer,primary_key=True, nullable=False ),
- Column ( 'name', VARCHAR(20), index=True ) )
- tests = Table ( 'tests', dbmeta ,
- Column ( 'id', Integer, primary_key=True, nullable=False ),
- Column ( 'owner_id',Integer, ForeignKey('owners.id'), nullable=False,index=True ),
- Column ( 'category_id', Integer, ForeignKey('categories.id'),nullable=False,index=True ))
- options = Table ( 'options', dbmeta ,
- Column ( 'test_id', Integer, ForeignKey ( 'tests.id' ), primary_key=True, nullable=False ),
- Column ( 'owner_id', Integer, ForeignKey ( 'owners.id' ), primary_key=True, nullable=False ),
- Column ( 'someoption', Boolean, PassiveDefault(false), nullable=False ) )
+ owners = Table('owners', dbmeta ,
+ Column('id', Integer, primary_key=True, nullable=False),
+ Column('data', String(30)))
+ categories=Table('categories', dbmeta,
+ Column('id', Integer,primary_key=True, nullable=False),
+ Column('name', VARCHAR(20), index=True))
+ tests = Table('tests', dbmeta ,
+ Column('id', Integer, primary_key=True, nullable=False ),
+ Column('owner_id',Integer, ForeignKey('owners.id'), nullable=False,index=True ),
+ Column('category_id', Integer, ForeignKey('categories.id'),nullable=False,index=True ))
+ options = Table('options', dbmeta ,
+ Column('test_id', Integer, ForeignKey('tests.id'), primary_key=True, nullable=False ),
+ Column('owner_id', Integer, ForeignKey('owners.id'), primary_key=True, nullable=False ),
+ Column('someoption', Boolean, PassiveDefault(false), nullable=False ) )
dbmeta.create_all()
@@ -154,7 +154,7 @@ class EagerTest(TestBase, AssertsExecutionResults):
print result
assert result == [u'1 Some Category', u'3 Some Category']
- @testing.unsupported('sybase')
+ @testing.unsupported('sybase', 'FIXME: unknown, verify not fails_on')
def test_withoutouterjoin_literal(self):
s = create_session()
q = s.query(Test).options(eagerload('category'))
@@ -175,7 +175,7 @@ class EagerTest(TestBase, AssertsExecutionResults):
print result
assert result == [u'3 Some Category']
-class EagerTest2(TestBase, AssertsExecutionResults):
+class EagerTest2(_base.ORMTest):
def setUpAll(self):
global metadata, middle, left, right
metadata = MetaData(testing.db)
@@ -230,22 +230,22 @@ class EagerTest2(TestBase, AssertsExecutionResults):
obj = session.query(Left).filter_by(tag='tag1').one()
print obj.middle.right[0]
-class EagerTest3(ORMTest):
+class EagerTest3(_base.MappedTest):
"""test eager loading combined with nested SELECT statements, functions, and aggregates"""
def define_tables(self, metadata):
global datas, foo, stats
- datas=Table( 'datas',metadata,
- Column ( 'id', Integer, primary_key=True,nullable=False ),
- Column ( 'a', Integer , nullable=False ) )
+ datas=Table('datas',metadata,
+ Column('id', Integer, primary_key=True,nullable=False ),
+ Column('a', Integer , nullable=False ) )
foo=Table('foo',metadata,
- Column ( 'data_id', Integer, ForeignKey('datas.id'),nullable=False,primary_key=True ),
- Column ( 'bar', Integer ) )
+ Column('data_id', Integer, ForeignKey('datas.id'),nullable=False,primary_key=True ),
+ Column('bar', Integer))
- stats=Table('stats',metadata,
- Column ( 'id', Integer, primary_key=True, nullable=False ),
- Column ( 'data_id', Integer, ForeignKey('datas.id')),
- Column ( 'somedata', Integer, nullable=False ))
+ stats=Table('stats', metadata,
+ Column('id', Integer, primary_key=True, nullable=False ),
+ Column('data_id', Integer, ForeignKey('datas.id')),
+ Column('somedata', Integer, nullable=False ))
@testing.fails_on('maxdb')
def test_nesting_with_functions(self):
@@ -300,7 +300,7 @@ class EagerTest3(ORMTest):
# algorithms and there are repeated 'somedata' values in the list)
assert verify_result == arb_result
-class EagerTest4(ORMTest):
+class EagerTest4(_base.MappedTest):
def define_tables(self, metadata):
global departments, employees
departments = Table('departments', metadata,
@@ -353,7 +353,7 @@ class EagerTest4(ORMTest):
assert q.count() == 2
assert q[0] is d2
-class EagerTest5(ORMTest):
+class EagerTest5(_base.MappedTest):
"""test the construction of AliasedClauses for the same eager load property but different
parent mappers, due to inheritance"""
def define_tables(self, metadata):
@@ -444,7 +444,7 @@ class EagerTest5(ORMTest):
# eager load had to succeed
assert len([c for c in d2.comments]) == 1
-class EagerTest6(ORMTest):
+class EagerTest6(_base.MappedTest):
def define_tables(self, metadata):
global designType, design, part, inheritedPart
designType = Table('design_types', metadata,
@@ -502,7 +502,7 @@ class EagerTest6(ORMTest):
x = sess.query(Design).get(1)
x.inheritedParts
-class EagerTest7(ORMTest):
+class EagerTest7(_base.MappedTest):
def define_tables(self, metadata):
global companies_table, addresses_table, invoice_table, phones_table, items_table, ctx
global Company, Address, Phone, Item,Invoice
@@ -707,7 +707,7 @@ class EagerTest7(ORMTest):
assert repr(i.company) == repr(c), repr(i.company) + " does not match " + repr(c)
-class EagerTest8(ORMTest):
+class EagerTest8(_base.MappedTest):
def define_tables(self, metadata):
global project_t, task_t, task_status_t, task_type_t, message_t, message_type_t
@@ -803,7 +803,7 @@ class EagerTest8(ORMTest):
for t in session.query(cls.mapper).limit(10).offset(0).all():
print t.id, t.title, t.props_cnt
-class EagerTest9(ORMTest):
+class EagerTest9(_base.MappedTest):
"""test the usage of query options to eagerly load specific paths.
this relies upon the 'path' construct used by PropertyOption to relate
@@ -830,13 +830,13 @@ class EagerTest9(ORMTest):
@testing.fails_on('maxdb')
def test_eagerload_on_path(self):
- class Account(fixtures.Base):
+ class Account(_base.BasicEntity):
pass
- class Transaction(fixtures.Base):
+ class Transaction(_base.BasicEntity):
pass
- class Entry(fixtures.Base):
+ class Entry(_base.BasicEntity):
pass
mapper(Account, accounts_table)
diff --git a/test/orm/attributes.py b/test/orm/attributes.py
index d31030f35..26828ecac 100644
--- a/test/orm/attributes.py
+++ b/test/orm/attributes.py
@@ -5,13 +5,15 @@ from sqlalchemy.orm.collections import collection
from sqlalchemy.orm.interfaces import AttributeExtension
from sqlalchemy import exc as sa_exc
from testlib import *
-from testlib import fixtures
+from testlib.testing import eq_
+from orm import _base
# global for pickling tests
MyTest = None
MyTest2 = None
-class AttributesTest(TestBase):
+
+class AttributesTest(_base.ORMTest):
def setUp(self):
global MyTest, MyTest2
class MyTest(object): pass
@@ -25,9 +27,9 @@ class AttributesTest(TestBase):
class User(object):pass
attributes.register_class(User)
- attributes.register_attribute(User, 'user_id', uselist = False, useobject=False)
- attributes.register_attribute(User, 'user_name', uselist = False, useobject=False)
- attributes.register_attribute(User, 'email_address', uselist = False, useobject=False)
+ attributes.register_attribute(User, 'user_id', uselist=False, useobject=False)
+ attributes.register_attribute(User, 'user_name', uselist=False, useobject=False)
+ attributes.register_attribute(User, 'email_address', uselist=False, useobject=False)
u = User()
u.user_id = 7
@@ -45,11 +47,11 @@ class AttributesTest(TestBase):
def test_pickleness(self):
attributes.register_class(MyTest)
attributes.register_class(MyTest2)
- attributes.register_attribute(MyTest, 'user_id', uselist = False, useobject=False)
- attributes.register_attribute(MyTest, 'user_name', uselist = False, useobject=False)
- attributes.register_attribute(MyTest, 'email_address', uselist = False, useobject=False)
- attributes.register_attribute(MyTest2, 'a', uselist = False, useobject=False)
- attributes.register_attribute(MyTest2, 'b', uselist = False, useobject=False)
+ attributes.register_attribute(MyTest, 'user_id', uselist=False, useobject=False)
+ attributes.register_attribute(MyTest, 'user_name', uselist=False, useobject=False)
+ attributes.register_attribute(MyTest, 'email_address', uselist=False, useobject=False)
+ attributes.register_attribute(MyTest2, 'a', uselist=False, useobject=False)
+ attributes.register_attribute(MyTest2, 'b', uselist=False, useobject=False)
# shouldnt be pickling callables at the class level
def somecallable(*args):
return None
@@ -118,30 +120,30 @@ class AttributesTest(TestBase):
f = Foo()
attributes.instance_state(f).expire_attributes(None)
- self.assertEquals(f.a, "this is a")
- self.assertEquals(f.b, 12)
+ eq_(f.a, "this is a")
+ eq_(f.b, 12)
f.a = "this is some new a"
attributes.instance_state(f).expire_attributes(None)
- self.assertEquals(f.a, "this is a")
- self.assertEquals(f.b, 12)
+ eq_(f.a, "this is a")
+ eq_(f.b, 12)
attributes.instance_state(f).expire_attributes(None)
f.a = "this is another new a"
- self.assertEquals(f.a, "this is another new a")
- self.assertEquals(f.b, 12)
+ eq_(f.a, "this is another new a")
+ eq_(f.b, 12)
attributes.instance_state(f).expire_attributes(None)
- self.assertEquals(f.a, "this is a")
- self.assertEquals(f.b, 12)
+ eq_(f.a, "this is a")
+ eq_(f.b, 12)
del f.a
- self.assertEquals(f.a, None)
- self.assertEquals(f.b, 12)
+ eq_(f.a, None)
+ eq_(f.b, 12)
attributes.instance_state(f).commit_all()
- self.assertEquals(f.a, None)
- self.assertEquals(f.b, 12)
+ eq_(f.a, None)
+ eq_(f.b, 12)
def test_deferred_pickleable(self):
data = {'a':'this is a', 'b':12}
@@ -161,8 +163,8 @@ class AttributesTest(TestBase):
assert 'a' not in m.__dict__
m2 = pickle.loads(pickle.dumps(m))
assert 'a' not in m2.__dict__
- self.assertEquals(m2.a, "this is a")
- self.assertEquals(m2.b, 12)
+ eq_(m2.a, "this is a")
+ eq_(m2.b, 12)
def test_list(self):
class User(object):pass
@@ -170,11 +172,11 @@ class AttributesTest(TestBase):
attributes.register_class(User)
attributes.register_class(Address)
- attributes.register_attribute(User, 'user_id', uselist = False, useobject=False)
- attributes.register_attribute(User, 'user_name', uselist = False, useobject=False)
+ attributes.register_attribute(User, 'user_id', uselist=False, useobject=False)
+ attributes.register_attribute(User, 'user_name', uselist=False, useobject=False)
attributes.register_attribute(User, 'addresses', uselist = True, useobject=True)
- attributes.register_attribute(Address, 'address_id', uselist = False, useobject=False)
- attributes.register_attribute(Address, 'email_address', uselist = False, useobject=False)
+ attributes.register_attribute(Address, 'address_id', uselist=False, useobject=False)
+ attributes.register_attribute(Address, 'email_address', uselist=False, useobject=False)
u = User()
u.user_id = 7
@@ -225,7 +227,7 @@ class AttributesTest(TestBase):
f.y = [4,5,6]
del f.y
- self.assertEquals(results, [
+ eq_(results, [
('set', f, 5, None),
('set', f, 17, 5),
('remove', f, 17),
@@ -313,8 +315,8 @@ class AttributesTest(TestBase):
attributes.register_class(Bar)
b = Bar()
- self.assertEquals(len(states), 1)
- self.assertEquals(list(states)[0].obj(), b)
+ eq_(len(states), 1)
+ eq_(list(states)[0].obj(), b)
def test_inheritance2(self):
@@ -332,7 +334,7 @@ class AttributesTest(TestBase):
el = Element()
x = Bar()
x.element = el
- self.assertEquals(attributes.get_history(attributes.instance_state(x), 'element'), ([el],[], []))
+ eq_(attributes.get_history(attributes.instance_state(x), 'element'), ([el],[], []))
attributes.instance_state(x).commit_all()
(added, unchanged, deleted) = attributes.get_history(attributes.instance_state(x), 'element')
@@ -342,9 +344,9 @@ class AttributesTest(TestBase):
def test_lazyhistory(self):
"""tests that history functions work with lazy-loading attributes"""
- class Foo(fixtures.Base):
+ class Foo(_base.BasicEntity):
pass
- class Bar(fixtures.Base):
+ class Bar(_base.BasicEntity):
pass
attributes.register_class(Foo)
@@ -363,7 +365,7 @@ class AttributesTest(TestBase):
x = Foo()
attributes.instance_state(x).commit_all()
x.col2.append(bar4)
- self.assertEquals(attributes.get_history(attributes.instance_state(x), 'col2'), ([bar4], [bar1, bar2, bar3], []))
+ eq_(attributes.get_history(attributes.instance_state(x), 'col2'), ([bar4], [bar1, bar2, bar3], []))
def test_parenttrack(self):
class Foo(object):pass
@@ -492,7 +494,7 @@ class AttributesTest(TestBase):
assert False
-class BackrefTest(TestBase):
+class BackrefTest(_base.ORMTest):
def test_manytomany(self):
class Student(object):pass
@@ -565,7 +567,7 @@ class BackrefTest(TestBase):
j.port = None
self.assert_(p.jack is None)
-class PendingBackrefTest(TestBase):
+class PendingBackrefTest(_base.ORMTest):
def setUp(self):
global Post, Blog, called, lazy_load
@@ -627,7 +629,7 @@ class PendingBackrefTest(TestBase):
p4 = Post("post 5")
p4.blog = b
assert called[0] == 0
- self.assertEquals(attributes.instance_state(b).get_history('posts'), ([p, p4], [p1, p2, p3], []))
+ eq_(attributes.instance_state(b).get_history('posts'), ([p, p4], [p1, p2, p3], []))
assert called[0] == 1
def test_lazy_remove(self):
@@ -669,7 +671,7 @@ class PendingBackrefTest(TestBase):
called[0] = 0
lazy_load = (p1, p2, p3) = [Post("post 1"), Post("post 2"), Post("post 3")]
-
+
def test_commit_removes_pending(self):
global lazy_load
lazy_load = (p1, ) = [Post("post 1"), ]
@@ -680,32 +682,31 @@ class PendingBackrefTest(TestBase):
attributes.instance_state(b).commit_all()
attributes.instance_state(p1).commit_all()
assert b.posts == [Post("post 1")]
-
-
-
-class HistoryTest(TestBase):
+
+class HistoryTest(_base.ORMTest):
+
def test_get_committed_value(self):
- class Foo(fixtures.Base):
+ class Foo(_base.BasicEntity):
pass
attributes.register_class(Foo)
attributes.register_attribute(Foo, 'someattr', uselist=False, useobject=False)
f = Foo()
- self.assertEquals(Foo.someattr.impl.get_committed_value(attributes.instance_state(f)), None)
+ eq_(Foo.someattr.impl.get_committed_value(attributes.instance_state(f)), None)
f.someattr = 3
- self.assertEquals(Foo.someattr.impl.get_committed_value(attributes.instance_state(f)), None)
+ eq_(Foo.someattr.impl.get_committed_value(attributes.instance_state(f)), None)
f = Foo()
f.someattr = 3
- self.assertEquals(Foo.someattr.impl.get_committed_value(attributes.instance_state(f)), None)
+ eq_(Foo.someattr.impl.get_committed_value(attributes.instance_state(f)), None)
attributes.instance_state(f).commit(['someattr'])
- self.assertEquals(Foo.someattr.impl.get_committed_value(attributes.instance_state(f)), 3)
+ eq_(Foo.someattr.impl.get_committed_value(attributes.instance_state(f)), 3)
def test_scalar(self):
- class Foo(fixtures.Base):
+ class Foo(_base.BasicEntity):
pass
attributes.register_class(Foo)
@@ -713,61 +714,61 @@ class HistoryTest(TestBase):
# case 1. new object
f = Foo()
- self.assertEquals(attributes.get_history(attributes.instance_state(f), 'someattr'), ([], [], []))
+ eq_(attributes.get_history(attributes.instance_state(f), 'someattr'), ([], [], []))
f.someattr = "hi"
- self.assertEquals(attributes.get_history(attributes.instance_state(f), 'someattr'), (['hi'], [], []))
+ eq_(attributes.get_history(attributes.instance_state(f), 'someattr'), (['hi'], [], []))
attributes.instance_state(f).commit(['someattr'])
- self.assertEquals(attributes.get_history(attributes.instance_state(f), 'someattr'), ([], ['hi'], []))
+ eq_(attributes.get_history(attributes.instance_state(f), 'someattr'), ([], ['hi'], []))
f.someattr = 'there'
- self.assertEquals(attributes.get_history(attributes.instance_state(f), 'someattr'), (['there'], [], ['hi']))
+ eq_(attributes.get_history(attributes.instance_state(f), 'someattr'), (['there'], [], ['hi']))
attributes.instance_state(f).commit(['someattr'])
- self.assertEquals(attributes.get_history(attributes.instance_state(f), 'someattr'), ([], ['there'], []))
+ eq_(attributes.get_history(attributes.instance_state(f), 'someattr'), ([], ['there'], []))
del f.someattr
- self.assertEquals(attributes.get_history(attributes.instance_state(f), 'someattr'), ([], [], ['there']))
+ eq_(attributes.get_history(attributes.instance_state(f), 'someattr'), ([], [], ['there']))
# case 2. object with direct dictionary settings (similar to a load operation)
f = Foo()
f.__dict__['someattr'] = 'new'
- self.assertEquals(attributes.get_history(attributes.instance_state(f), 'someattr'), ([], ['new'], []))
+ eq_(attributes.get_history(attributes.instance_state(f), 'someattr'), ([], ['new'], []))
f.someattr = 'old'
- self.assertEquals(attributes.get_history(attributes.instance_state(f), 'someattr'), (['old'], [], ['new']))
+ eq_(attributes.get_history(attributes.instance_state(f), 'someattr'), (['old'], [], ['new']))
attributes.instance_state(f).commit(['someattr'])
- self.assertEquals(attributes.get_history(attributes.instance_state(f), 'someattr'), ([], ['old'], []))
+ eq_(attributes.get_history(attributes.instance_state(f), 'someattr'), ([], ['old'], []))
# setting None on uninitialized is currently a change for a scalar attribute
# no lazyload occurs so this allows overwrite operation to proceed
f = Foo()
- self.assertEquals(attributes.get_history(attributes.instance_state(f), 'someattr'), ([], [], []))
+ eq_(attributes.get_history(attributes.instance_state(f), 'someattr'), ([], [], []))
print f._foostate.committed_state
f.someattr = None
print f._foostate.committed_state, f._foostate.dict
- self.assertEquals(attributes.get_history(attributes.instance_state(f), 'someattr'), ([None], [], []))
+ eq_(attributes.get_history(attributes.instance_state(f), 'someattr'), ([None], [], []))
f = Foo()
f.__dict__['someattr'] = 'new'
- self.assertEquals(attributes.get_history(attributes.instance_state(f), 'someattr'), ([], ['new'], []))
+ eq_(attributes.get_history(attributes.instance_state(f), 'someattr'), ([], ['new'], []))
f.someattr = None
- self.assertEquals(attributes.get_history(attributes.instance_state(f), 'someattr'), ([None], [], ['new']))
+ eq_(attributes.get_history(attributes.instance_state(f), 'someattr'), ([None], [], ['new']))
# set same value twice
f = Foo()
attributes.instance_state(f).commit(['someattr'])
f.someattr = 'one'
- self.assertEquals(attributes.get_history(attributes.instance_state(f), 'someattr'), (['one'], [], []))
+ eq_(attributes.get_history(attributes.instance_state(f), 'someattr'), (['one'], [], []))
f.someattr = 'two'
- self.assertEquals(attributes.get_history(attributes.instance_state(f), 'someattr'), (['two'], [], []))
+ eq_(attributes.get_history(attributes.instance_state(f), 'someattr'), (['two'], [], []))
def test_mutable_scalar(self):
- class Foo(fixtures.Base):
+ class Foo(_base.BasicEntity):
pass
attributes.register_class(Foo)
@@ -775,40 +776,40 @@ class HistoryTest(TestBase):
# case 1. new object
f = Foo()
- self.assertEquals(attributes.get_history(attributes.instance_state(f), 'someattr'), ([], [], []))
+ eq_(attributes.get_history(attributes.instance_state(f), 'someattr'), ([], [], []))
f.someattr = {'foo':'hi'}
- self.assertEquals(attributes.get_history(attributes.instance_state(f), 'someattr'), ([{'foo':'hi'}], [], []))
+ eq_(attributes.get_history(attributes.instance_state(f), 'someattr'), ([{'foo':'hi'}], [], []))
attributes.instance_state(f).commit(['someattr'])
- self.assertEquals(attributes.get_history(attributes.instance_state(f), 'someattr'), ([], [{'foo':'hi'}], []))
- self.assertEquals(attributes.instance_state(f).committed_state['someattr'], {'foo':'hi'})
+ eq_(attributes.get_history(attributes.instance_state(f), 'someattr'), ([], [{'foo':'hi'}], []))
+ eq_(attributes.instance_state(f).committed_state['someattr'], {'foo':'hi'})
f.someattr['foo'] = 'there'
- self.assertEquals(attributes.instance_state(f).committed_state['someattr'], {'foo':'hi'})
+ eq_(attributes.instance_state(f).committed_state['someattr'], {'foo':'hi'})
- self.assertEquals(attributes.get_history(attributes.instance_state(f), 'someattr'), ([{'foo':'there'}], [], [{'foo':'hi'}]))
+ eq_(attributes.get_history(attributes.instance_state(f), 'someattr'), ([{'foo':'there'}], [], [{'foo':'hi'}]))
attributes.instance_state(f).commit(['someattr'])
- self.assertEquals(attributes.get_history(attributes.instance_state(f), 'someattr'), ([], [{'foo':'there'}], []))
+ eq_(attributes.get_history(attributes.instance_state(f), 'someattr'), ([], [{'foo':'there'}], []))
# case 2. object with direct dictionary settings (similar to a load operation)
f = Foo()
f.__dict__['someattr'] = {'foo':'new'}
- self.assertEquals(attributes.get_history(attributes.instance_state(f), 'someattr'), ([], [{'foo':'new'}], []))
+ eq_(attributes.get_history(attributes.instance_state(f), 'someattr'), ([], [{'foo':'new'}], []))
f.someattr = {'foo':'old'}
- self.assertEquals(attributes.get_history(attributes.instance_state(f), 'someattr'), ([{'foo':'old'}], [], [{'foo':'new'}]))
+ eq_(attributes.get_history(attributes.instance_state(f), 'someattr'), ([{'foo':'old'}], [], [{'foo':'new'}]))
attributes.instance_state(f).commit(['someattr'])
- self.assertEquals(attributes.get_history(attributes.instance_state(f), 'someattr'), ([], [{'foo':'old'}], []))
+ eq_(attributes.get_history(attributes.instance_state(f), 'someattr'), ([], [{'foo':'old'}], []))
def test_use_object(self):
- class Foo(fixtures.Base):
+ class Foo(_base.BasicEntity):
pass
- class Bar(fixtures.Base):
+ class Bar(_base.BasicEntity):
_state = None
def __nonzero__(self):
assert False
@@ -823,61 +824,61 @@ class HistoryTest(TestBase):
# case 1. new object
f = Foo()
- self.assertEquals(attributes.get_history(attributes.instance_state(f), 'someattr'), ([], [None], []))
+ eq_(attributes.get_history(attributes.instance_state(f), 'someattr'), ([], [None], []))
f.someattr = hi
- self.assertEquals(attributes.get_history(attributes.instance_state(f), 'someattr'), ([hi], [], []))
+ eq_(attributes.get_history(attributes.instance_state(f), 'someattr'), ([hi], [], []))
attributes.instance_state(f).commit(['someattr'])
- self.assertEquals(attributes.get_history(attributes.instance_state(f), 'someattr'), ([], [hi], []))
+ eq_(attributes.get_history(attributes.instance_state(f), 'someattr'), ([], [hi], []))
f.someattr = there
- self.assertEquals(attributes.get_history(attributes.instance_state(f), 'someattr'), ([there], [], [hi]))
+ eq_(attributes.get_history(attributes.instance_state(f), 'someattr'), ([there], [], [hi]))
attributes.instance_state(f).commit(['someattr'])
- self.assertEquals(attributes.get_history(attributes.instance_state(f), 'someattr'), ([], [there], []))
+ eq_(attributes.get_history(attributes.instance_state(f), 'someattr'), ([], [there], []))
del f.someattr
- self.assertEquals(attributes.get_history(attributes.instance_state(f), 'someattr'), ([None], [], [there]))
+ eq_(attributes.get_history(attributes.instance_state(f), 'someattr'), ([None], [], [there]))
# case 2. object with direct dictionary settings (similar to a load operation)
f = Foo()
f.__dict__['someattr'] = 'new'
- self.assertEquals(attributes.get_history(attributes.instance_state(f), 'someattr'), ([], ['new'], []))
+ eq_(attributes.get_history(attributes.instance_state(f), 'someattr'), ([], ['new'], []))
f.someattr = old
- self.assertEquals(attributes.get_history(attributes.instance_state(f), 'someattr'), ([old], [], ['new']))
+ eq_(attributes.get_history(attributes.instance_state(f), 'someattr'), ([old], [], ['new']))
attributes.instance_state(f).commit(['someattr'])
- self.assertEquals(attributes.get_history(attributes.instance_state(f), 'someattr'), ([], [old], []))
+ eq_(attributes.get_history(attributes.instance_state(f), 'someattr'), ([], [old], []))
# setting None on uninitialized is currently not a change for an object attribute
# (this is different than scalar attribute). a lazyload has occured so if its
# None, its really None
f = Foo()
- self.assertEquals(attributes.get_history(attributes.instance_state(f), 'someattr'), ([], [None], []))
+ eq_(attributes.get_history(attributes.instance_state(f), 'someattr'), ([], [None], []))
f.someattr = None
- self.assertEquals(attributes.get_history(attributes.instance_state(f), 'someattr'), ([], [None], []))
+ eq_(attributes.get_history(attributes.instance_state(f), 'someattr'), ([], [None], []))
f = Foo()
f.__dict__['someattr'] = 'new'
- self.assertEquals(attributes.get_history(attributes.instance_state(f), 'someattr'), ([], ['new'], []))
+ eq_(attributes.get_history(attributes.instance_state(f), 'someattr'), ([], ['new'], []))
f.someattr = None
- self.assertEquals(attributes.get_history(attributes.instance_state(f), 'someattr'), ([None], [], ['new']))
+ eq_(attributes.get_history(attributes.instance_state(f), 'someattr'), ([None], [], ['new']))
# set same value twice
f = Foo()
attributes.instance_state(f).commit(['someattr'])
f.someattr = 'one'
- self.assertEquals(attributes.get_history(attributes.instance_state(f), 'someattr'), (['one'], [], []))
+ eq_(attributes.get_history(attributes.instance_state(f), 'someattr'), (['one'], [], []))
f.someattr = 'two'
- self.assertEquals(attributes.get_history(attributes.instance_state(f), 'someattr'), (['two'], [], []))
+ eq_(attributes.get_history(attributes.instance_state(f), 'someattr'), (['two'], [], []))
def test_object_collections_set(self):
- class Foo(fixtures.Base):
+ class Foo(_base.BasicEntity):
pass
- class Bar(fixtures.Base):
+ class Bar(_base.BasicEntity):
def __nonzero__(self):
assert False
@@ -891,44 +892,44 @@ class HistoryTest(TestBase):
# case 1. new object
f = Foo()
- self.assertEquals(attributes.get_history(attributes.instance_state(f), 'someattr'), ([], [], []))
+ eq_(attributes.get_history(attributes.instance_state(f), 'someattr'), ([], [], []))
f.someattr = [hi]
- self.assertEquals(attributes.get_history(attributes.instance_state(f), 'someattr'), ([hi], [], []))
+ eq_(attributes.get_history(attributes.instance_state(f), 'someattr'), ([hi], [], []))
attributes.instance_state(f).commit(['someattr'])
- self.assertEquals(attributes.get_history(attributes.instance_state(f), 'someattr'), ([], [hi], []))
+ eq_(attributes.get_history(attributes.instance_state(f), 'someattr'), ([], [hi], []))
f.someattr = [there]
- self.assertEquals(attributes.get_history(attributes.instance_state(f), 'someattr'), ([there], [], [hi]))
+ eq_(attributes.get_history(attributes.instance_state(f), 'someattr'), ([there], [], [hi]))
attributes.instance_state(f).commit(['someattr'])
- self.assertEquals(attributes.get_history(attributes.instance_state(f), 'someattr'), ([], [there], []))
+ eq_(attributes.get_history(attributes.instance_state(f), 'someattr'), ([], [there], []))
f.someattr = [hi]
- self.assertEquals(attributes.get_history(attributes.instance_state(f), 'someattr'), ([hi], [], [there]))
+ eq_(attributes.get_history(attributes.instance_state(f), 'someattr'), ([hi], [], [there]))
f.someattr = [old, new]
- self.assertEquals(attributes.get_history(attributes.instance_state(f), 'someattr'), ([old, new], [], [there]))
+ eq_(attributes.get_history(attributes.instance_state(f), 'someattr'), ([old, new], [], [there]))
# case 2. object with direct settings (similar to a load operation)
f = Foo()
collection = attributes.init_collection(attributes.instance_state(f), 'someattr')
collection.append_without_event(new)
attributes.instance_state(f).commit_all()
- self.assertEquals(attributes.get_history(attributes.instance_state(f), 'someattr'), ([], [new], []))
+ eq_(attributes.get_history(attributes.instance_state(f), 'someattr'), ([], [new], []))
f.someattr = [old]
- self.assertEquals(attributes.get_history(attributes.instance_state(f), 'someattr'), ([old], [], [new]))
+ eq_(attributes.get_history(attributes.instance_state(f), 'someattr'), ([old], [], [new]))
attributes.instance_state(f).commit(['someattr'])
- self.assertEquals(attributes.get_history(attributes.instance_state(f), 'someattr'), ([], [old], []))
+ eq_(attributes.get_history(attributes.instance_state(f), 'someattr'), ([], [old], []))
def test_dict_collections(self):
- class Foo(fixtures.Base):
+ class Foo(_base.BasicEntity):
pass
- class Bar(fixtures.Base):
+ class Bar(_base.BasicEntity):
pass
from sqlalchemy.orm.collections import attribute_mapped_collection
@@ -942,21 +943,21 @@ class HistoryTest(TestBase):
new = Bar(name='new')
f = Foo()
- self.assertEquals(attributes.get_history(attributes.instance_state(f), 'someattr'), ([], [], []))
+ eq_(attributes.get_history(attributes.instance_state(f), 'someattr'), ([], [], []))
f.someattr['hi'] = hi
- self.assertEquals(attributes.get_history(attributes.instance_state(f), 'someattr'), ([hi], [], []))
+ eq_(attributes.get_history(attributes.instance_state(f), 'someattr'), ([hi], [], []))
f.someattr['there'] = there
- self.assertEquals(tuple([set(x) for x in attributes.get_history(attributes.instance_state(f), 'someattr')]), (set([hi, there]), set([]), set([])))
+ eq_(tuple([set(x) for x in attributes.get_history(attributes.instance_state(f), 'someattr')]), (set([hi, there]), set([]), set([])))
attributes.instance_state(f).commit(['someattr'])
- self.assertEquals(tuple([set(x) for x in attributes.get_history(attributes.instance_state(f), 'someattr')]), (set([]), set([hi, there]), set([])))
+ eq_(tuple([set(x) for x in attributes.get_history(attributes.instance_state(f), 'someattr')]), (set([]), set([hi, there]), set([])))
def test_object_collections_mutate(self):
- class Foo(fixtures.Base):
+ class Foo(_base.BasicEntity):
pass
- class Bar(fixtures.Base):
+ class Bar(_base.BasicEntity):
pass
attributes.register_class(Foo)
@@ -970,32 +971,32 @@ class HistoryTest(TestBase):
# case 1. new object
f = Foo(id=1)
- self.assertEquals(attributes.get_history(attributes.instance_state(f), 'someattr'), ([], [], []))
+ eq_(attributes.get_history(attributes.instance_state(f), 'someattr'), ([], [], []))
f.someattr.append(hi)
- self.assertEquals(attributes.get_history(attributes.instance_state(f), 'someattr'), ([hi], [], []))
+ eq_(attributes.get_history(attributes.instance_state(f), 'someattr'), ([hi], [], []))
attributes.instance_state(f).commit(['someattr'])
- self.assertEquals(attributes.get_history(attributes.instance_state(f), 'someattr'), ([], [hi], []))
+ eq_(attributes.get_history(attributes.instance_state(f), 'someattr'), ([], [hi], []))
f.someattr.append(there)
- self.assertEquals(attributes.get_history(attributes.instance_state(f), 'someattr'), ([there], [hi], []))
+ eq_(attributes.get_history(attributes.instance_state(f), 'someattr'), ([there], [hi], []))
attributes.instance_state(f).commit(['someattr'])
- self.assertEquals(attributes.get_history(attributes.instance_state(f), 'someattr'), ([], [hi, there], []))
+ eq_(attributes.get_history(attributes.instance_state(f), 'someattr'), ([], [hi, there], []))
f.someattr.remove(there)
- self.assertEquals(attributes.get_history(attributes.instance_state(f), 'someattr'), ([], [hi], [there]))
+ eq_(attributes.get_history(attributes.instance_state(f), 'someattr'), ([], [hi], [there]))
f.someattr.append(old)
f.someattr.append(new)
- self.assertEquals(attributes.get_history(attributes.instance_state(f), 'someattr'), ([old, new], [hi], [there]))
+ eq_(attributes.get_history(attributes.instance_state(f), 'someattr'), ([old, new], [hi], [there]))
attributes.instance_state(f).commit(['someattr'])
- self.assertEquals(attributes.get_history(attributes.instance_state(f), 'someattr'), ([], [hi, old, new], []))
+ eq_(attributes.get_history(attributes.instance_state(f), 'someattr'), ([], [hi, old, new], []))
f.someattr.pop(0)
- self.assertEquals(attributes.get_history(attributes.instance_state(f), 'someattr'), ([], [old, new], [hi]))
+ eq_(attributes.get_history(attributes.instance_state(f), 'someattr'), ([], [old, new], [hi]))
# case 2. object with direct settings (similar to a load operation)
f = Foo()
@@ -1003,37 +1004,37 @@ class HistoryTest(TestBase):
collection = attributes.init_collection(attributes.instance_state(f), 'someattr')
collection.append_without_event(new)
attributes.instance_state(f).commit_all()
- self.assertEquals(attributes.get_history(attributes.instance_state(f), 'someattr'), ([], [new], []))
+ eq_(attributes.get_history(attributes.instance_state(f), 'someattr'), ([], [new], []))
f.someattr.append(old)
- self.assertEquals(attributes.get_history(attributes.instance_state(f), 'someattr'), ([old], [new], []))
+ eq_(attributes.get_history(attributes.instance_state(f), 'someattr'), ([old], [new], []))
attributes.instance_state(f).commit(['someattr'])
- self.assertEquals(attributes.get_history(attributes.instance_state(f), 'someattr'), ([], [new, old], []))
+ eq_(attributes.get_history(attributes.instance_state(f), 'someattr'), ([], [new, old], []))
f = Foo()
collection = attributes.init_collection(attributes.instance_state(f), 'someattr')
collection.append_without_event(new)
attributes.instance_state(f).commit_all()
- self.assertEquals(attributes.get_history(attributes.instance_state(f), 'someattr'), ([], [new], []))
+ eq_(attributes.get_history(attributes.instance_state(f), 'someattr'), ([], [new], []))
f.id = 1
f.someattr.remove(new)
- self.assertEquals(attributes.get_history(attributes.instance_state(f), 'someattr'), ([], [], [new]))
+ eq_(attributes.get_history(attributes.instance_state(f), 'someattr'), ([], [], [new]))
# case 3. mixing appends with sets
f = Foo()
f.someattr.append(hi)
- self.assertEquals(attributes.get_history(attributes.instance_state(f), 'someattr'), ([hi], [], []))
+ eq_(attributes.get_history(attributes.instance_state(f), 'someattr'), ([hi], [], []))
f.someattr.append(there)
- self.assertEquals(attributes.get_history(attributes.instance_state(f), 'someattr'), ([hi, there], [], []))
+ eq_(attributes.get_history(attributes.instance_state(f), 'someattr'), ([hi, there], [], []))
f.someattr = [there]
- self.assertEquals(attributes.get_history(attributes.instance_state(f), 'someattr'), ([there], [], []))
+ eq_(attributes.get_history(attributes.instance_state(f), 'someattr'), ([there], [], []))
def test_collections_via_backref(self):
- class Foo(fixtures.Base):
+ class Foo(_base.BasicEntity):
pass
- class Bar(fixtures.Base):
+ class Bar(_base.BasicEntity):
pass
attributes.register_class(Foo)
@@ -1043,24 +1044,24 @@ class HistoryTest(TestBase):
f1 = Foo()
b1 = Bar()
- self.assertEquals(attributes.get_history(attributes.instance_state(f1), 'bars'), ([], [], []))
- self.assertEquals(attributes.get_history(attributes.instance_state(b1), 'foo'), ([], [None], []))
+ eq_(attributes.get_history(attributes.instance_state(f1), 'bars'), ([], [], []))
+ eq_(attributes.get_history(attributes.instance_state(b1), 'foo'), ([], [None], []))
#b1.foo = f1
f1.bars.append(b1)
- self.assertEquals(attributes.get_history(attributes.instance_state(f1), 'bars'), ([b1], [], []))
- self.assertEquals(attributes.get_history(attributes.instance_state(b1), 'foo'), ([f1], [], []))
+ eq_(attributes.get_history(attributes.instance_state(f1), 'bars'), ([b1], [], []))
+ eq_(attributes.get_history(attributes.instance_state(b1), 'foo'), ([f1], [], []))
b2 = Bar()
f1.bars.append(b2)
- self.assertEquals(attributes.get_history(attributes.instance_state(f1), 'bars'), ([b1, b2], [], []))
- self.assertEquals(attributes.get_history(attributes.instance_state(b1), 'foo'), ([f1], [], []))
- self.assertEquals(attributes.get_history(attributes.instance_state(b2), 'foo'), ([f1], [], []))
+ eq_(attributes.get_history(attributes.instance_state(f1), 'bars'), ([b1, b2], [], []))
+ eq_(attributes.get_history(attributes.instance_state(b1), 'foo'), ([f1], [], []))
+ eq_(attributes.get_history(attributes.instance_state(b2), 'foo'), ([f1], [], []))
def test_lazy_backref_collections(self):
- class Foo(fixtures.Base):
+ class Foo(_base.BasicEntity):
pass
- class Bar(fixtures.Base):
+ class Bar(_base.BasicEntity):
pass
lazy_load = []
@@ -1080,22 +1081,22 @@ class HistoryTest(TestBase):
f = Foo()
bar4 = Bar()
bar4.foo = f
- self.assertEquals(attributes.get_history(attributes.instance_state(f), 'bars'), ([bar4], [bar1, bar2, bar3], []))
+ eq_(attributes.get_history(attributes.instance_state(f), 'bars'), ([bar4], [bar1, bar2, bar3], []))
lazy_load = None
f = Foo()
bar4 = Bar()
bar4.foo = f
- self.assertEquals(attributes.get_history(attributes.instance_state(f), 'bars'), ([bar4], [], []))
+ eq_(attributes.get_history(attributes.instance_state(f), 'bars'), ([bar4], [], []))
lazy_load = [bar1, bar2, bar3]
attributes.instance_state(f).expire_attributes(['bars'])
- self.assertEquals(attributes.get_history(attributes.instance_state(f), 'bars'), ([], [bar1, bar2, bar3], []))
+ eq_(attributes.get_history(attributes.instance_state(f), 'bars'), ([], [bar1, bar2, bar3], []))
def test_collections_via_lazyload(self):
- class Foo(fixtures.Base):
+ class Foo(_base.BasicEntity):
pass
- class Bar(fixtures.Base):
+ class Bar(_base.BasicEntity):
pass
lazy_load = []
@@ -1113,29 +1114,29 @@ class HistoryTest(TestBase):
f = Foo()
f.bars = []
- self.assertEquals(attributes.get_history(attributes.instance_state(f), 'bars'), ([], [], [bar1, bar2, bar3]))
+ eq_(attributes.get_history(attributes.instance_state(f), 'bars'), ([], [], [bar1, bar2, bar3]))
f = Foo()
f.bars.append(bar4)
- self.assertEquals(attributes.get_history(attributes.instance_state(f), 'bars'), ([bar4], [bar1, bar2, bar3], []) )
+ eq_(attributes.get_history(attributes.instance_state(f), 'bars'), ([bar4], [bar1, bar2, bar3], []) )
f = Foo()
f.bars.remove(bar2)
- self.assertEquals(attributes.get_history(attributes.instance_state(f), 'bars'), ([], [bar1, bar3], [bar2]))
+ eq_(attributes.get_history(attributes.instance_state(f), 'bars'), ([], [bar1, bar3], [bar2]))
f.bars.append(bar4)
- self.assertEquals(attributes.get_history(attributes.instance_state(f), 'bars'), ([bar4], [bar1, bar3], [bar2]))
+ eq_(attributes.get_history(attributes.instance_state(f), 'bars'), ([bar4], [bar1, bar3], [bar2]))
f = Foo()
del f.bars[1]
- self.assertEquals(attributes.get_history(attributes.instance_state(f), 'bars'), ([], [bar1, bar3], [bar2]))
+ eq_(attributes.get_history(attributes.instance_state(f), 'bars'), ([], [bar1, bar3], [bar2]))
lazy_load = None
f = Foo()
f.bars.append(bar2)
- self.assertEquals(attributes.get_history(attributes.instance_state(f), 'bars'), ([bar2], [], []))
+ eq_(attributes.get_history(attributes.instance_state(f), 'bars'), ([bar2], [], []))
def test_scalar_via_lazyload(self):
- class Foo(fixtures.Base):
+ class Foo(_base.BasicEntity):
pass
lazy_load = None
@@ -1152,30 +1153,30 @@ class HistoryTest(TestBase):
# operations
f = Foo()
- self.assertEquals(f.bar, "hi")
- self.assertEquals(attributes.get_history(attributes.instance_state(f), 'bar'), ([], ["hi"], []))
+ eq_(f.bar, "hi")
+ eq_(attributes.get_history(attributes.instance_state(f), 'bar'), ([], ["hi"], []))
f = Foo()
f.bar = None
- self.assertEquals(attributes.get_history(attributes.instance_state(f), 'bar'), ([None], [], []))
+ eq_(attributes.get_history(attributes.instance_state(f), 'bar'), ([None], [], []))
f = Foo()
f.bar = "there"
- self.assertEquals(attributes.get_history(attributes.instance_state(f), 'bar'), (["there"], [], []))
+ eq_(attributes.get_history(attributes.instance_state(f), 'bar'), (["there"], [], []))
f.bar = "hi"
- self.assertEquals(attributes.get_history(attributes.instance_state(f), 'bar'), (["hi"], [], []))
+ eq_(attributes.get_history(attributes.instance_state(f), 'bar'), (["hi"], [], []))
f = Foo()
- self.assertEquals(f.bar, "hi")
+ eq_(f.bar, "hi")
del f.bar
- self.assertEquals(attributes.get_history(attributes.instance_state(f), 'bar'), ([], [], ["hi"]))
+ eq_(attributes.get_history(attributes.instance_state(f), 'bar'), ([], [], ["hi"]))
assert f.bar is None
- self.assertEquals(attributes.get_history(attributes.instance_state(f), 'bar'), ([None], [], ["hi"]))
+ eq_(attributes.get_history(attributes.instance_state(f), 'bar'), ([None], [], ["hi"]))
def test_scalar_object_via_lazyload(self):
- class Foo(fixtures.Base):
+ class Foo(_base.BasicEntity):
pass
- class Bar(fixtures.Base):
+ class Bar(_base.BasicEntity):
pass
lazy_load = None
@@ -1194,24 +1195,24 @@ class HistoryTest(TestBase):
# operations
f = Foo()
- self.assertEquals(attributes.get_history(attributes.instance_state(f), 'bar'), ([], [bar1], []))
+ eq_(attributes.get_history(attributes.instance_state(f), 'bar'), ([], [bar1], []))
f = Foo()
f.bar = None
- self.assertEquals(attributes.get_history(attributes.instance_state(f), 'bar'), ([None], [], [bar1]))
+ eq_(attributes.get_history(attributes.instance_state(f), 'bar'), ([None], [], [bar1]))
f = Foo()
f.bar = bar2
- self.assertEquals(attributes.get_history(attributes.instance_state(f), 'bar'), ([bar2], [], [bar1]))
+ eq_(attributes.get_history(attributes.instance_state(f), 'bar'), ([bar2], [], [bar1]))
f.bar = bar1
- self.assertEquals(attributes.get_history(attributes.instance_state(f), 'bar'), ([], [bar1], []))
+ eq_(attributes.get_history(attributes.instance_state(f), 'bar'), ([], [bar1], []))
f = Foo()
- self.assertEquals(f.bar, bar1)
+ eq_(f.bar, bar1)
del f.bar
- self.assertEquals(attributes.get_history(attributes.instance_state(f), 'bar'), ([None], [], [bar1]))
+ eq_(attributes.get_history(attributes.instance_state(f), 'bar'), ([None], [], [bar1]))
assert f.bar is None
- self.assertEquals(attributes.get_history(attributes.instance_state(f), 'bar'), ([None], [], [bar1]))
+ eq_(attributes.get_history(attributes.instance_state(f), 'bar'), ([None], [], [bar1]))
if __name__ == "__main__":
diff --git a/test/orm/cascade.py b/test/orm/cascade.py
index 4a2dc4419..5a86596a3 100644
--- a/test/orm/cascade.py
+++ b/test/orm/cascade.py
@@ -1,46 +1,52 @@
import testenv; testenv.configure_for_tests()
-from sqlalchemy import *
-from sqlalchemy import exc as sa_exc
-from sqlalchemy.orm import *
-from sqlalchemy.orm import attributes, exc as orm_exc
-from testlib import *
-from testlib import fixtures
-
-class O2MCascadeTest(fixtures.FixtureTest):
- keep_mappers = True
- keep_data = False
- refresh_data = False
+from testlib.sa import Table, Column, Integer, String, ForeignKey, Sequence
+from testlib.sa.orm import mapper, relation, create_session, class_mapper
+from testlib.sa.orm import attributes, exc as orm_exc
+from testlib import testing
+from testlib.testing import eq_
+from orm import _base, _fixtures
+
+class O2MCascadeTest(_fixtures.FixtureTest):
+ run_inserts = None
+
+ @testing.resolve_artifact_names
def setup_mappers(self):
- global User, Address, Order, users, orders, addresses
- from testlib.fixtures import User, Address, Order, users, orders, addresses
-
mapper(Address, addresses)
mapper(User, users, properties = dict(
addresses = relation(Address, cascade="all, delete-orphan"),
orders = relation(
mapper(Order, orders), cascade="all, delete-orphan")
))
-
+
+ @testing.resolve_artifact_names
def test_list_assignment(self):
sess = create_session()
- u = User(name='jack', orders=[Order(description='someorder'), Order(description='someotherorder')])
+ u = User(name='jack', orders=[
+ Order(description='someorder'),
+ Order(description='someotherorder')])
sess.save(u)
sess.flush()
sess.clear()
-
+
u = sess.query(User).get(u.id)
- self.assertEquals(u, User(name='jack', orders=[Order(description='someorder'), Order(description='someotherorder')]))
-
+ eq_(u, User(name='jack',
+ orders=[Order(description='someorder'),
+ Order(description='someotherorder')]))
+
u.orders=[Order(description="order 3"), Order(description="order 4")]
sess.flush()
sess.clear()
-
+
u = sess.query(User).get(u.id)
- self.assertEquals(u, User(name='jack', orders=[Order(description="order 3"), Order(description="order 4")]))
+ eq_(u, User(name='jack',
+ orders=[Order(description="order 3"),
+ Order(description="order 4")]))
+
+ eq_(sess.query(Order).all(),
+ [Order(description="order 3"), Order(description="order 4")])
- self.assertEquals(sess.query(Order).all(), [Order(description="order 3"), Order(description="order 4")])
o5 = Order(description="order 5")
sess.save(o5)
try:
@@ -49,9 +55,12 @@ class O2MCascadeTest(fixtures.FixtureTest):
except orm_exc.FlushError, e:
assert "is an orphan" in str(e)
+ @testing.resolve_artifact_names
def test_delete(self):
sess = create_session()
- u = User(name='jack', orders=[Order(description='someorder'), Order(description='someotherorder')])
+ u = User(name='jack',
+ orders=[Order(description='someorder'),
+ Order(description='someotherorder')])
sess.save(u)
sess.flush()
@@ -60,34 +69,39 @@ class O2MCascadeTest(fixtures.FixtureTest):
assert users.count().scalar() == 0
assert orders.count().scalar() == 0
+ @testing.resolve_artifact_names
def test_delete_unloaded_collections(self):
- """test that unloaded collections are still included in a delete-cascade by default."""
-
+ """Unloaded collections are still included in a delete-cascade by default."""
sess = create_session()
- u = User(name='jack', addresses=[Address(email_address="address1"), Address(email_address="address2")])
+ u = User(name='jack',
+ addresses=[Address(email_address="address1"),
+ Address(email_address="address2")])
sess.save(u)
sess.flush()
sess.clear()
assert addresses.count().scalar() == 2
assert users.count().scalar() == 1
-
+
u = sess.query(User).get(u.id)
-
+
assert 'addresses' not in u.__dict__
sess.delete(u)
sess.flush()
assert addresses.count().scalar() == 0
assert users.count().scalar() == 0
+ @testing.resolve_artifact_names
def test_cascades_onlycollection(self):
- """test that cascade only reaches instances that are still part of the collection,
+ """Cascade only reaches instances that are still part of the collection,
not those that have been removed"""
sess = create_session()
- u = User(name='jack', orders=[Order(description='someorder'), Order(description='someotherorder')])
+ u = User(name='jack',
+ orders=[Order(description='someorder'),
+ Order(description='someotherorder')])
sess.save(u)
sess.flush()
-
+
o = u.orders[0]
del u.orders[0]
sess.delete(u)
@@ -101,11 +115,16 @@ class O2MCascadeTest(fixtures.FixtureTest):
sess.clear()
assert users.count().scalar() == 1
assert orders.count().scalar() == 1
- self.assertEquals(sess.query(User).all(), [User(name='newuser', orders=[Order(description='someorder')])])
+ eq_(sess.query(User).all(),
+ [User(name='newuser',
+ orders=[Order(description='someorder')])])
+ @testing.resolve_artifact_names
def test_cascade_delete_plusorphans(self):
sess = create_session()
- u = User(name='jack', orders=[Order(description='someorder'), Order(description='someotherorder')])
+ u = User(name='jack',
+ orders=[Order(description='someorder'),
+ Order(description='someotherorder')])
sess.save(u)
sess.flush()
assert users.count().scalar() == 1
@@ -116,10 +135,13 @@ class O2MCascadeTest(fixtures.FixtureTest):
sess.flush()
assert users.count().scalar() == 0
assert orders.count().scalar() == 0
-
+
+ @testing.resolve_artifact_names
def test_collection_orphans(self):
sess = create_session()
- u = User(name='jack', orders=[Order(description='someorder'), Order(description='someotherorder')])
+ u = User(name='jack',
+ orders=[Order(description='someorder'),
+ Order(description='someotherorder')])
sess.save(u)
sess.flush()
@@ -133,64 +155,62 @@ class O2MCascadeTest(fixtures.FixtureTest):
assert users.count().scalar() == 1
assert orders.count().scalar() == 0
-class O2MCascadeNoOrphanTest(fixtures.FixtureTest):
- keep_mappers = True
- keep_data = False
- refresh_data = False
+class O2MCascadeNoOrphanTest(_fixtures.FixtureTest):
+ run_inserts = None
+ @testing.resolve_artifact_names
def setup_mappers(self):
- global User, Address, Order, users, orders, addresses
- from testlib.fixtures import User, Address, Order, users, orders, addresses
-
mapper(User, users, properties = dict(
orders = relation(
mapper(Order, orders), cascade="all")
))
-
+
+ @testing.resolve_artifact_names
def test_cascade_delete_noorphans(self):
sess = create_session()
- u = User(name='jack', orders=[Order(description='someorder'), Order(description='someotherorder')])
+ u = User(name='jack',
+ orders=[Order(description='someorder'),
+ Order(description='someotherorder')])
sess.save(u)
sess.flush()
assert users.count().scalar() == 1
assert orders.count().scalar() == 2
-
+
del u.orders[0]
sess.delete(u)
sess.flush()
assert users.count().scalar() == 0
assert orders.count().scalar() == 1
-
-class M2OCascadeTest(ORMTest):
- keep_mappers = True
-
- def define_tables(self, metadata):
- global extra, prefs, users
- extra = Table("extra", metadata,
- Column("id", Integer, Sequence("extra_id_seq", optional=True), primary_key=True),
- Column("prefs_id", Integer, ForeignKey("prefs.id"))
- )
- prefs = Table('prefs', metadata,
- Column('id', Integer, Sequence('prefs_id_seq', optional=True), primary_key=True),
+class M2OCascadeTest(_base.MappedTest):
+ def define_tables(self, metadata):
+ Table("extra", metadata,
+ Column("id", Integer, Sequence("extra_id_seq", optional=True),
+ primary_key=True),
+ Column("prefs_id", Integer, ForeignKey("prefs.id")))
+
+ Table('prefs', metadata,
+ Column('id', Integer, Sequence('prefs_id_seq', optional=True),
+ primary_key=True),
Column('data', String(40)))
- users = Table('users', metadata,
- Column('id', Integer, Sequence('user_id_seq', optional=True), primary_key = True),
+ Table('users', metadata,
+ Column('id', Integer, Sequence('user_id_seq', optional=True),
+ primary_key=True),
Column('name', String(40)),
- Column('pref_id', Integer, ForeignKey('prefs.id'))
- )
-
- def setup_mappers(self):
- global User, Pref, Extra
- class User(fixtures.Base):
+ Column('pref_id', Integer, ForeignKey('prefs.id')))
+
+ def setup_classes(self):
+ class User(_fixtures.Base):
pass
- class Pref(fixtures.Base):
+ class Pref(_fixtures.Base):
pass
- class Extra(fixtures.Base):
+ class Extra(_fixtures.Base):
pass
+ @testing.resolve_artifact_names
+ def setup_mappers(self):
mapper(Extra, extra)
mapper(Pref, prefs, properties=dict(
extra = relation(Extra, cascade="all, delete")
@@ -199,18 +219,18 @@ class M2OCascadeTest(ORMTest):
pref = relation(Pref, lazy=False, cascade="all, delete-orphan")
))
- def setUp(self):
+ @testing.resolve_artifact_names
+ def insert_data(self):
u1 = User(name='ed', pref=Pref(data="pref 1", extra=[Extra()]))
u2 = User(name='jack', pref=Pref(data="pref 2", extra=[Extra()]))
u3 = User(name="foo", pref=Pref(data="pref 3", extra=[Extra()]))
sess = create_session()
- sess.save(u1)
- sess.save(u2)
- sess.save(u3)
+ sess.add_all((u1, u2, u3))
sess.flush()
sess.close()
@testing.fails_on('maxdb')
+ @testing.resolve_artifact_names
def test_orphan(self):
sess = create_session()
assert prefs.count().scalar() == 3
@@ -222,6 +242,7 @@ class M2OCascadeTest(ORMTest):
assert extra.count().scalar() == 2
@testing.fails_on('maxdb')
+ @testing.resolve_artifact_names
def test_orphan_on_update(self):
sess = create_session()
jack = sess.query(User).filter_by(name="jack").one()
@@ -238,7 +259,8 @@ class M2OCascadeTest(ORMTest):
sess.flush()
assert prefs.count().scalar() == 2
assert extra.count().scalar() == 2
-
+
+ @testing.resolve_artifact_names
def test_pending_expunge(self):
sess = create_session()
someuser = User(name='someuser')
@@ -249,11 +271,12 @@ class M2OCascadeTest(ORMTest):
someuser.pref = Pref(data='someotherpref')
assert p1 not in sess
sess.flush()
- self.assertEquals(sess.query(Pref).with_parent(someuser).all(), [Pref(data="someotherpref")])
+ eq_(sess.query(Pref).with_parent(someuser).all(),
+ [Pref(data="someotherpref")])
-
+ @testing.resolve_artifact_names
def test_double_assignment(self):
- """test that double assignment doesn't accidentally reset the 'parent' flag."""
+ """Double assignment will not accidentally reset the 'parent' flag."""
sess = create_session()
jack = sess.query(User).filter_by(name="jack").one()
@@ -262,43 +285,53 @@ class M2OCascadeTest(ORMTest):
jack.pref = newpref
jack.pref = newpref
sess.flush()
- self.assertEquals(sess.query(Pref).all(), [Pref(data="pref 1"), Pref(data="pref 3"), Pref(data="newpref")])
+ eq_(sess.query(Pref).all(),
+ [Pref(data="pref 1"), Pref(data="pref 3"), Pref(data="newpref")])
-class M2OCascadeDeleteTest(ORMTest):
- keep_mappers = True
-
+class M2OCascadeDeleteTest(_base.MappedTest):
def define_tables(self, metadata):
- global t1, t2, t3
- t1 = Table('t1', metadata, Column('id', Integer, primary_key=True), Column('data', String(50)), Column('t2id', Integer, ForeignKey('t2.id')))
- t2 = Table('t2', metadata, Column('id', Integer, primary_key=True), Column('data', String(50)), Column('t3id', Integer, ForeignKey('t3.id')))
- t3 = Table('t3', metadata, Column('id', Integer, primary_key=True), Column('data', String(50)))
-
+ Table('t1', metadata,
+ Column('id', Integer, primary_key=True),
+ Column('data', String(50)),
+ Column('t2id', Integer, ForeignKey('t2.id')))
+ Table('t2', metadata,
+ Column('id', Integer, primary_key=True),
+ Column('data', String(50)),
+ Column('t3id', Integer, ForeignKey('t3.id')))
+ Table('t3', metadata,
+ Column('id', Integer, primary_key=True),
+ Column('data', String(50)))
+
+ def setup_classes(self):
+ class T1(_fixtures.Base):
+ pass
+ class T2(_fixtures.Base):
+ pass
+ class T3(_fixtures.Base):
+ pass
+
+ @testing.resolve_artifact_names
def setup_mappers(self):
- global T1, T2, T3
- class T1(fixtures.Base):pass
- class T2(fixtures.Base):pass
- class T3(fixtures.Base):pass
-
- mapper(T1, t1, properties={'t2':relation(T2, cascade="all")})
- mapper(T2, t2, properties={'t3':relation(T3, cascade="all")})
+ mapper(T1, t1, properties={'t2': relation(T2, cascade="all")})
+ mapper(T2, t2, properties={'t3': relation(T3, cascade="all")})
mapper(T3, t3)
+ @testing.resolve_artifact_names
def test_cascade_delete(self):
sess = create_session()
-
x = T1(data='t1a', t2=T2(data='t2a', t3=T3(data='t3a')))
sess.save(x)
sess.flush()
-
+
sess.delete(x)
sess.flush()
- self.assertEquals(sess.query(T1).all(), [])
- self.assertEquals(sess.query(T2).all(), [])
- self.assertEquals(sess.query(T3).all(), [])
+ eq_(sess.query(T1).all(), [])
+ eq_(sess.query(T2).all(), [])
+ eq_(sess.query(T3).all(), [])
+ @testing.resolve_artifact_names
def test_cascade_delete_postappend_onelevel(self):
sess = create_session()
-
x1 = T1(data='t1', )
x2 = T2(data='t2')
x3 = T3(data='t3')
@@ -306,18 +339,18 @@ class M2OCascadeDeleteTest(ORMTest):
sess.save(x2)
sess.save(x3)
sess.flush()
-
+
sess.delete(x1)
x1.t2 = x2
x2.t3 = x3
sess.flush()
- self.assertEquals(sess.query(T1).all(), [])
- self.assertEquals(sess.query(T2).all(), [])
- self.assertEquals(sess.query(T3).all(), [])
+ eq_(sess.query(T1).all(), [])
+ eq_(sess.query(T2).all(), [])
+ eq_(sess.query(T3).all(), [])
+ @testing.resolve_artifact_names
def test_cascade_delete_postappend_twolevel(self):
sess = create_session()
-
x1 = T1(data='t1', t2=T2(data='t2'))
x3 = T3(data='t3')
sess.save(x1)
@@ -327,13 +360,13 @@ class M2OCascadeDeleteTest(ORMTest):
sess.delete(x1)
x1.t2.t3 = x3
sess.flush()
- self.assertEquals(sess.query(T1).all(), [])
- self.assertEquals(sess.query(T2).all(), [])
- self.assertEquals(sess.query(T3).all(), [])
-
+ eq_(sess.query(T1).all(), [])
+ eq_(sess.query(T2).all(), [])
+ eq_(sess.query(T3).all(), [])
+
+ @testing.resolve_artifact_names
def test_preserves_orphans_onelevel(self):
sess = create_session()
-
x2 = T1(data='t1b', t2=T2(data='t2b', t3=T3(data='t3b')))
sess.save(x2)
sess.flush()
@@ -341,14 +374,14 @@ class M2OCascadeDeleteTest(ORMTest):
sess.delete(x2)
sess.flush()
- self.assertEquals(sess.query(T1).all(), [])
- self.assertEquals(sess.query(T2).all(), [T2()])
- self.assertEquals(sess.query(T3).all(), [T3()])
+ eq_(sess.query(T1).all(), [])
+ eq_(sess.query(T2).all(), [T2()])
+ eq_(sess.query(T3).all(), [T3()])
@testing.future
+ @testing.resolve_artifact_names
def test_preserves_orphans_onelevel_postremove(self):
sess = create_session()
-
x2 = T1(data='t1b', t2=T2(data='t2b', t3=T3(data='t3b')))
sess.save(x2)
sess.flush()
@@ -356,59 +389,72 @@ class M2OCascadeDeleteTest(ORMTest):
sess.delete(x2)
x2.t2 = None
sess.flush()
- self.assertEquals(sess.query(T1).all(), [])
- self.assertEquals(sess.query(T2).all(), [T2()])
- self.assertEquals(sess.query(T3).all(), [T3()])
+ eq_(sess.query(T1).all(), [])
+ eq_(sess.query(T2).all(), [T2()])
+ eq_(sess.query(T3).all(), [T3()])
+ @testing.resolve_artifact_names
def test_preserves_orphans_twolevel(self):
sess = create_session()
-
x = T1(data='t1a', t2=T2(data='t2a', t3=T3(data='t3a')))
sess.save(x)
sess.flush()
-
+
x.t2.t3 = None
sess.delete(x)
sess.flush()
- self.assertEquals(sess.query(T1).all(), [])
- self.assertEquals(sess.query(T2).all(), [])
- self.assertEquals(sess.query(T3).all(), [T3()])
+ eq_(sess.query(T1).all(), [])
+ eq_(sess.query(T2).all(), [])
+ eq_(sess.query(T3).all(), [T3()])
-class M2OCascadeDeleteOrphanTest(ORMTest):
- keep_mappers = True
+
+class M2OCascadeDeleteOrphanTest(_base.MappedTest):
def define_tables(self, metadata):
- global t1, t2, t3
- t1 = Table('t1', metadata, Column('id', Integer, primary_key=True), Column('data', String(50)), Column('t2id', Integer, ForeignKey('t2.id')))
- t2 = Table('t2', metadata, Column('id', Integer, primary_key=True), Column('data', String(50)), Column('t3id', Integer, ForeignKey('t3.id')))
- t3 = Table('t3', metadata, Column('id', Integer, primary_key=True), Column('data', String(50)))
+ Table('t1', metadata,
+ Column('id', Integer, primary_key=True),
+ Column('data', String(50)),
+ Column('t2id', Integer, ForeignKey('t2.id')))
+ Table('t2', metadata,
+ Column('id', Integer, primary_key=True),
+ Column('data', String(50)),
+ Column('t3id', Integer, ForeignKey('t3.id')))
+ Table('t3', metadata,
+ Column('id', Integer, primary_key=True),
+ Column('data', String(50)))
+
+ def setup_classes(self):
+ class T1(_fixtures.Base):
+ pass
+ class T2(_fixtures.Base):
+ pass
+ class T3(_fixtures.Base):
+ pass
+ @testing.resolve_artifact_names
def setup_mappers(self):
- global T1, T2, T3
- class T1(fixtures.Base):pass
- class T2(fixtures.Base):pass
- class T3(fixtures.Base):pass
-
- mapper(T1, t1, properties={'t2':relation(T2, cascade="all, delete-orphan")})
- mapper(T2, t2, properties={'t3':relation(T3, cascade="all, delete-orphan")})
+ mapper(T1, t1, properties=dict(
+ t2=relation(T2, cascade="all, delete-orphan")))
+ mapper(T2, t2, properties=dict(
+ t3=relation(T3, cascade="all, delete-orphan")))
mapper(T3, t3)
+ @testing.resolve_artifact_names
def test_cascade_delete(self):
sess = create_session()
-
x = T1(data='t1a', t2=T2(data='t2a', t3=T3(data='t3a')))
sess.save(x)
sess.flush()
sess.delete(x)
sess.flush()
- self.assertEquals(sess.query(T1).all(), [])
- self.assertEquals(sess.query(T2).all(), [])
- self.assertEquals(sess.query(T3).all(), [])
+ eq_(sess.query(T1).all(), [])
+ eq_(sess.query(T2).all(), [])
+ eq_(sess.query(T3).all(), [])
+ @testing.resolve_artifact_names
def test_deletes_orphans_onelevel(self):
sess = create_session()
-
x2 = T1(data='t1b', t2=T2(data='t2b', t3=T3(data='t3b')))
sess.save(x2)
sess.flush()
@@ -416,13 +462,13 @@ class M2OCascadeDeleteOrphanTest(ORMTest):
sess.delete(x2)
sess.flush()
- self.assertEquals(sess.query(T1).all(), [])
- self.assertEquals(sess.query(T2).all(), [])
- self.assertEquals(sess.query(T3).all(), [])
+ eq_(sess.query(T1).all(), [])
+ eq_(sess.query(T2).all(), [])
+ eq_(sess.query(T3).all(), [])
+ @testing.resolve_artifact_names
def test_deletes_orphans_twolevel(self):
sess = create_session()
-
x = T1(data='t1a', t2=T2(data='t2a', t3=T3(data='t3a')))
sess.save(x)
sess.flush()
@@ -430,53 +476,53 @@ class M2OCascadeDeleteOrphanTest(ORMTest):
x.t2.t3 = None
sess.delete(x)
sess.flush()
- self.assertEquals(sess.query(T1).all(), [])
- self.assertEquals(sess.query(T2).all(), [])
- self.assertEquals(sess.query(T3).all(), [])
+ eq_(sess.query(T1).all(), [])
+ eq_(sess.query(T2).all(), [])
+ eq_(sess.query(T3).all(), [])
+ @testing.resolve_artifact_names
def test_finds_orphans_twolevel(self):
sess = create_session()
-
x = T1(data='t1a', t2=T2(data='t2a', t3=T3(data='t3a')))
sess.save(x)
sess.flush()
x.t2.t3 = None
sess.flush()
- self.assertEquals(sess.query(T1).all(), [T1()])
- self.assertEquals(sess.query(T2).all(), [T2()])
- self.assertEquals(sess.query(T3).all(), [])
+ eq_(sess.query(T1).all(), [T1()])
+ eq_(sess.query(T2).all(), [T2()])
+ eq_(sess.query(T3).all(), [])
-class M2MCascadeTest(ORMTest):
+class M2MCascadeTest(_base.MappedTest):
def define_tables(self, metadata):
- global a, b, atob, c
- a = Table('a', metadata,
+ Table('a', metadata,
Column('id', Integer, primary_key=True),
- Column('data', String(30))
- )
- b = Table('b', metadata,
+ Column('data', String(30)))
+ Table('b', metadata,
Column('id', Integer, primary_key=True),
- Column('data', String(30))
- )
- atob = Table('atob', metadata,
+ Column('data', String(30)))
+ Table('atob', metadata,
Column('aid', Integer, ForeignKey('a.id')),
- Column('bid', Integer, ForeignKey('b.id'))
- )
- c = Table('c', metadata,
- Column('id', Integer, primary_key=True),
- Column('data', String(30)),
- Column('bid', Integer, ForeignKey('b.id'))
- )
-
- def test_delete_orphan(self):
- class A(fixtures.Base):
+ Column('bid', Integer, ForeignKey('b.id')))
+ Table('c', metadata,
+ Column('id', Integer, primary_key=True),
+ Column('data', String(30)),
+ Column('bid', Integer, ForeignKey('b.id')))
+
+ def setup_classes(self):
+ class A(_fixtures.Base):
+ pass
+ class B(_fixtures.Base):
pass
- class B(fixtures.Base):
+ class C(_fixtures.Base):
pass
+ @testing.resolve_artifact_names
+ def test_delete_orphan(self):
mapper(A, a, properties={
- # if no backref here, delete-orphan failed until [ticket:427] was fixed
- 'bs':relation(B, secondary=atob, cascade="all, delete-orphan")
+ # if no backref here, delete-orphan failed until [ticket:427] was
+ # fixed
+ 'bs': relation(B, secondary=atob, cascade="all, delete-orphan")
})
mapper(B, b)
@@ -492,16 +538,11 @@ class M2MCascadeTest(ORMTest):
assert b.count().scalar() == 0
assert a.count().scalar() == 1
+ @testing.resolve_artifact_names
def test_delete_orphan_cascades(self):
- class A(fixtures.Base):
- pass
- class B(fixtures.Base):
- pass
- class C(fixtures.Base):
- pass
-
mapper(A, a, properties={
- # if no backref here, delete-orphan failed until [ticket:427] was fixed
+ # if no backref here, delete-orphan failed until [ticket:427] was
+ # fixed
'bs':relation(B, secondary=atob, cascade="all, delete-orphan")
})
mapper(B, b, properties={'cs':relation(C, cascade="all, delete-orphan")})
@@ -519,13 +560,9 @@ class M2MCascadeTest(ORMTest):
assert b.count().scalar() == 0
assert a.count().scalar() == 1
assert c.count().scalar() == 0
-
- def test_cascade_delete(self):
- class A(fixtures.Base):
- pass
- class B(fixtures.Base):
- pass
+ @testing.resolve_artifact_names
+ def test_cascade_delete(self):
mapper(A, a, properties={
'bs':relation(B, secondary=atob, cascade="all, delete-orphan")
})
@@ -542,26 +579,33 @@ class M2MCascadeTest(ORMTest):
assert b.count().scalar() == 0
assert a.count().scalar() == 0
-class UnsavedOrphansTest(ORMTest):
- """tests regarding pending entities that are orphans"""
- def define_tables(self, metadata):
- global users, addresses, User, Address
- users = Table('users', metadata,
- Column('user_id', Integer, Sequence('user_id_seq', optional=True), primary_key = True),
- Column('name', String(40)),
- )
+class UnsavedOrphansTest(_base.MappedTest):
+ """Pending entities that are orphans"""
- addresses = Table('email_addresses', metadata,
- Column('address_id', Integer, Sequence('address_id_seq', optional=True), primary_key = True),
- Column('user_id', Integer, ForeignKey(users.c.user_id)),
- Column('email_address', String(40)),
- )
- class User(fixtures.Base):pass
- class Address(fixtures.Base):pass
+ def define_tables(self, metadata):
+ Table('users', metadata,
+ Column('user_id', Integer,
+ Sequence('user_id_seq', optional=True),
+ primary_key=True),
+ Column('name', String(40)))
+
+ Table('addresses', metadata,
+ Column('address_id', Integer,
+ Sequence('address_id_seq', optional=True),
+ primary_key=True),
+ Column('user_id', Integer, ForeignKey('users.user_id')),
+ Column('email_address', String(40)))
+
+ def setup_classes(self):
+ class User(_fixtures.Base):
+ pass
+ class Address(_fixtures.Base):
+ pass
+ @testing.resolve_artifact_names
def test_pending_standalone_orphan(self):
- """test that an entity that never had a parent on a delete-orphan cascade cant be saved."""
+ """An entity that never had a parent on a delete-orphan cascade can't be saved."""
mapper(Address, addresses)
mapper(User, users, properties=dict(
@@ -576,8 +620,9 @@ class UnsavedOrphansTest(ORMTest):
pass
assert a.address_id is None, "Error: address should not be persistent"
+ @testing.resolve_artifact_names
def test_pending_collection_expunge(self):
- """test that removing a pending item from a collection expunges it from the session."""
+ """Removing a pending item from a collection expunges it from the session."""
mapper(Address, addresses)
mapper(User, users, properties=dict(
@@ -592,15 +637,16 @@ class UnsavedOrphansTest(ORMTest):
u.addresses.append(a)
assert a in s
-
+
u.addresses.remove(a)
assert a not in s
s.delete(u)
- s.flush()
+ s.flush()
assert a.address_id is None, "Error: address should not be persistent"
-
+
+ @testing.resolve_artifact_names
def test_nonorphans_ok(self):
mapper(Address, addresses)
mapper(User, users, properties=dict(
@@ -614,37 +660,39 @@ class UnsavedOrphansTest(ORMTest):
assert a1 in s
s.flush()
s.clear()
- self.assertEquals(s.query(Address).all(), [Address(email_address='ad1')])
-
-class UnsavedOrphansTest2(ORMTest):
- """same test as UnsavedOrphans only three levels deep"""
-
- def define_tables(self, meta):
- global orders, items, attributes
- orders = Table('orders', meta,
- Column('id', Integer, Sequence('order_id_seq'), primary_key = True),
- Column('name', VARCHAR(50)),
+ eq_(s.query(Address).all(), [Address(email_address='ad1')])
- )
- items = Table('items', meta,
- Column('id', Integer, Sequence('item_id_seq'), primary_key = True),
- Column('order_id', Integer, ForeignKey(orders.c.id), nullable=False),
- Column('name', VARCHAR(50)),
- )
- attributes = Table('attributes', meta,
- Column('id', Integer, Sequence('attribute_id_seq'), primary_key = True),
- Column('item_id', Integer, ForeignKey(items.c.id), nullable=False),
- Column('name', VARCHAR(50)),
-
- )
+class UnsavedOrphansTest2(_base.MappedTest):
+ """same test as UnsavedOrphans only three levels deep"""
+ def define_tables(self, meta):
+ Table('orders', meta,
+ Column('id', Integer, Sequence('order_id_seq'),
+ primary_key=True),
+ Column('name', String(50)))
+
+ Table('items', meta,
+ Column('id', Integer, Sequence('item_id_seq'),
+ primary_key=True),
+ Column('order_id', Integer, ForeignKey('orders.id'),
+ nullable=False),
+ Column('name', String(50)))
+
+ Table('attributes', meta,
+ Column('id', Integer, Sequence('attribute_id_seq'),
+ primary_key=True),
+ Column('item_id', Integer, ForeignKey('items.id'),
+ nullable=False),
+ Column('name', String(50)))
+
+ @testing.resolve_artifact_names
def test_pending_expunge(self):
- class Order(fixtures.Base):
+ class Order(_fixtures.Base):
pass
- class Item(fixtures.Base):
+ class Item(_fixtures.Base):
pass
- class Attribute(fixtures.Base):
+ class Attribute(_fixtures.Base):
pass
mapper(Attribute, attributes)
@@ -663,104 +711,114 @@ class UnsavedOrphansTest2(ORMTest):
item = Item(name="item1", attributes=[attr])
order.items.append(item)
- order.items.remove(item)
-
+ order.items.remove(item)
+
assert item not in s
assert attr not in s
-
+
s.flush()
assert orders.count().scalar() == 1
assert items.count().scalar() == 0
assert attributes.count().scalar() == 0
-class UnsavedOrphansTest3(ORMTest):
- """test not expuning double parents"""
+class UnsavedOrphansTest3(_base.MappedTest):
+ """test not expunging double parents"""
def define_tables(self, meta):
- global sales_reps, accounts, customers
- sales_reps = Table('sales_reps', meta,
- Column('sales_rep_id', Integer, Sequence('sales_rep_id_seq'), primary_key = True),
+ Table('sales_reps', meta,
+ Column('sales_rep_id', Integer,
+ Sequence('sales_rep_id_seq'),
+ primary_key=True),
+ Column('name', String(50)))
+ Table('accounts', meta,
+ Column('account_id', Integer,
+ Sequence('account_id_seq'),
+ primary_key=True),
+ Column('balance', Integer))
+ Table('customers', meta,
+ Column('customer_id', Integer,
+ Sequence('customer_id_seq'),
+ primary_key=True),
Column('name', String(50)),
- )
- accounts = Table('accounts', meta,
- Column('account_id', Integer, Sequence('account_id_seq'), primary_key = True),
- Column('balance', Integer),
- )
- customers = Table('customers', meta,
- Column('customer_id', Integer, Sequence('customer_id_seq'), primary_key = True),
- Column('name', String(50)),
- Column('sales_rep_id', Integer, ForeignKey('sales_reps.sales_rep_id')),
- Column('account_id', Integer, ForeignKey('accounts.account_id')),
- )
+ Column('sales_rep_id', Integer,
+ ForeignKey('sales_reps.sales_rep_id')),
+ Column('account_id', Integer,
+ ForeignKey('accounts.account_id')))
+ @testing.resolve_artifact_names
def test_double_parent_expunge(self):
- """test that removing a pending item from a collection expunges it from the session."""
- class Customer(fixtures.Base):
+ """Removing a pending item from a collection expunges it from the session."""
+
+ class Customer(_fixtures.Base):
pass
- class Account(fixtures.Base):
+ class Account(_fixtures.Base):
pass
- class SalesRep(fixtures.Base):
+ class SalesRep(_fixtures.Base):
pass
mapper(Customer, customers)
mapper(Account, accounts, properties=dict(
- customers=relation(Customer, cascade="all,delete-orphan", backref="account")
- ))
+ customers=relation(Customer,
+ cascade="all,delete-orphan",
+ backref="account")))
mapper(SalesRep, sales_reps, properties=dict(
- customers=relation(Customer, cascade="all,delete-orphan", backref="sales_rep")
- ))
+ customers=relation(Customer,
+ cascade="all,delete-orphan",
+ backref="sales_rep")))
s = create_session()
a = Account(balance=0)
sr = SalesRep(name="John")
[s.save(x) for x in [a,sr]]
s.flush()
-
+
c = Customer(name="Jane")
a.customers.append(c)
sr.customers.append(c)
assert c in s
-
+
a.customers.remove(c)
assert c in s, "Should not expunge customer yet, still has one parent"
sr.customers.remove(c)
assert c not in s, "Should expunge customer when both parents are gone"
-class DoubleParentOrphanTest(ORMTest):
+class DoubleParentOrphanTest(_base.MappedTest):
"""test orphan detection for an entity with two parent relations"""
def define_tables(self, metadata):
- global address_table, businesses, homes
- address_table = Table('addresses', metadata,
+ Table('addresses', metadata,
Column('address_id', Integer, primary_key=True),
Column('street', String(30)),
)
- homes = Table('homes', metadata,
+ Table('homes', metadata,
Column('home_id', Integer, primary_key=True, key="id"),
Column('description', String(30)),
- Column('address_id', Integer, ForeignKey('addresses.address_id'), nullable=False),
+ Column('address_id', Integer, ForeignKey('addresses.address_id'),
+ nullable=False),
)
- businesses = Table('businesses', metadata,
+ Table('businesses', metadata,
Column('business_id', Integer, primary_key=True, key="id"),
Column('description', String(30), key="description"),
- Column('address_id', Integer, ForeignKey('addresses.address_id'), nullable=False),
+ Column('address_id', Integer, ForeignKey('addresses.address_id'),
+ nullable=False),
)
-
+
+ @testing.resolve_artifact_names
def test_non_orphan(self):
"""test that an entity can have two parent delete-orphan cascades, and persists normally."""
- class Address(fixtures.Base):
+ class Address(_fixtures.Base):
pass
- class Home(fixtures.Base):
+ class Home(_fixtures.Base):
pass
- class Business(fixtures.Base):
+ class Business(_fixtures.Base):
pass
-
- mapper(Address, address_table)
+
+ mapper(Address, addresses)
mapper(Home, homes, properties={'address':relation(Address, cascade="all,delete-orphan")})
mapper(Business, businesses, properties={'address':relation(Address, cascade="all,delete-orphan")})
@@ -770,22 +828,23 @@ class DoubleParentOrphanTest(ORMTest):
[session.save(x) for x in [h1,b1]]
session.flush()
session.clear()
-
- self.assertEquals(session.query(Home).get(h1.id), Home(description='home1', address=Address(street='address1')))
- self.assertEquals(session.query(Business).get(b1.id), Business(description='business1', address=Address(street='address2')))
+ eq_(session.query(Home).get(h1.id), Home(description='home1', address=Address(street='address1')))
+ eq_(session.query(Business).get(b1.id), Business(description='business1', address=Address(street='address2')))
+
+ @testing.resolve_artifact_names
def test_orphan(self):
"""test that an entity can have two parent delete-orphan cascades, and is detected as an orphan
when saved without a parent."""
- class Address(fixtures.Base):
+ class Address(_fixtures.Base):
pass
- class Home(fixtures.Base):
+ class Home(_fixtures.Base):
pass
- class Business(fixtures.Base):
+ class Business(_fixtures.Base):
pass
-
- mapper(Address, address_table)
+
+ mapper(Address, addresses)
mapper(Home, homes, properties={'address':relation(Address, cascade="all,delete-orphan")})
mapper(Business, businesses, properties={'address':relation(Address, cascade="all,delete-orphan")})
@@ -798,22 +857,21 @@ class DoubleParentOrphanTest(ORMTest):
except orm_exc.FlushError, e:
assert True
-class CollectionAssignmentOrphanTest(ORMTest):
+class CollectionAssignmentOrphanTest(_base.MappedTest):
def define_tables(self, metadata):
- global table_a, table_b
-
- table_a = Table('a', metadata,
- Column('id', Integer, primary_key=True),
- Column('name', String(30)))
- table_b = Table('b', metadata,
- Column('id', Integer, primary_key=True),
- Column('name', String(30)),
- Column('a_id', Integer, ForeignKey('a.id')))
-
+ Table('table_a', metadata,
+ Column('id', Integer, primary_key=True),
+ Column('name', String(30)))
+ Table('table_b', metadata,
+ Column('id', Integer, primary_key=True),
+ Column('name', String(30)),
+ Column('a_id', Integer, ForeignKey('table_a.id')))
+
+ @testing.resolve_artifact_names
def test_basic(self):
- class A(fixtures.Base):
+ class A(_fixtures.Base):
pass
- class B(fixtures.Base):
+ class B(_fixtures.Base):
pass
mapper(A, table_a, properties={
@@ -828,17 +886,20 @@ class CollectionAssignmentOrphanTest(ORMTest):
sess.flush()
sess.clear()
-
- self.assertEquals(sess.query(A).get(a1.id), A(name='a1', bs=[B(name='b1'), B(name='b2'), B(name='b3')]))
+
+ eq_(sess.query(A).get(a1.id),
+ A(name='a1', bs=[B(name='b1'), B(name='b2'), B(name='b3')]))
a1 = sess.query(A).get(a1.id)
- assert not class_mapper(B)._is_orphan(attributes.instance_state(a1.bs[0]))
+ assert not class_mapper(B)._is_orphan(
+ attributes.instance_state(a1.bs[0]))
a1.bs[0].foo='b2modified'
a1.bs[1].foo='b3modified'
sess.flush()
sess.clear()
- self.assertEquals(sess.query(A).get(a1.id), A(name='a1', bs=[B(name='b1'), B(name='b2'), B(name='b3')]))
+ eq_(sess.query(A).get(a1.id),
+ A(name='a1', bs=[B(name='b1'), B(name='b2'), B(name='b3')]))
if __name__ == "__main__":
testenv.main()
diff --git a/test/orm/collection.py b/test/orm/collection.py
index 94e36f366..1fdcb8bdc 100644
--- a/test/orm/collection.py
+++ b/test/orm/collection.py
@@ -1,22 +1,27 @@
import testenv; testenv.configure_for_tests()
import sys
from operator import and_
-from sqlalchemy import *
-import sqlalchemy.exc as sa_exc
-from sqlalchemy.orm import create_session, mapper, relation, \
- interfaces, attributes
+
import sqlalchemy.orm.collections as collections
from sqlalchemy.orm.collections import collection
-from sqlalchemy import util
-from testlib import *
+
+from testlib import sa, testing
+from testlib.sa import Table, Column, Integer, String, ForeignKey
+from testlib.sa import util, exc as sa_exc
+from testlib.sa.orm import create_session, mapper, relation, \
+ attributes
+from testlib.compat import set, frozenset
+from orm import _base
+
try:
- py_set = __builtins__.set
-except AttributeError:
+ py_set = set
+except NameError:
import sets
py_set = sets.Set
-class Canary(interfaces.AttributeExtension):
+
+class Canary(sa.orm.interfaces.AttributeExtension):
def __init__(self):
self.data = set()
self.added = set()
@@ -34,30 +39,39 @@ class Canary(interfaces.AttributeExtension):
self.remove(obj, oldvalue, None)
self.append(obj, value, None)
-class Entity(object):
- def __init__(self, a=None, b=None, c=None):
- self.a = a
- self.b = b
- self.c = c
- def __repr__(self):
- return str((id(self), self.a, self.b, self.c))
-
-attributes.register_class(Entity)
-
-_id = 1
-def entity_maker():
- global _id
- _id += 1
- return Entity(_id)
-def dictable_entity(a=None, b=None, c=None):
- global _id
- _id += 1
- return Entity(a or str(_id), b or 'value %s' % _id, c)
-
-
-class CollectionsTest(TestBase):
- def _test_adapter(self, typecallable, creator=entity_maker,
- to_set=None):
+
+class CollectionsTest(_base.ORMTest):
+ class Entity(object):
+ def __init__(self, a=None, b=None, c=None):
+ self.a = a
+ self.b = b
+ self.c = c
+ def __repr__(self):
+ return str((id(self), self.a, self.b, self.c))
+
+ def setUpAll(self):
+ attributes.register_class(self.Entity)
+
+ def tearDownAll(self):
+ attributes.unregister_class(self.Entity)
+ _base.ORMTest.tearDownAll(self)
+
+ _entity_id = 1
+
+ @classmethod
+ def entity_maker(cls):
+ cls._entity_id += 1
+ return cls.Entity(cls._entity_id)
+
+ @classmethod
+ def dictable_entity(cls, a=None, b=None, c=None):
+ id = cls._entity_id = (cls._entity_id + 1)
+ return cls.Entity(a or str(id), b or 'value %s' % id, c)
+
+ def _test_adapter(self, typecallable, creator=None, to_set=None):
+ if creator is None:
+ creator = self.entity_maker
+
class Foo(object):
pass
@@ -95,7 +109,10 @@ class CollectionsTest(TestBase):
adapter.remove_with_event(e1)
assert_eq()
- def _test_list(self, typecallable, creator=entity_maker):
+ def _test_list(self, typecallable, creator=None):
+ if creator is None:
+ creator = self.entity_maker
+
class Foo(object):
pass
@@ -262,7 +279,10 @@ class CollectionsTest(TestBase):
control *= 2
assert_eq()
- def _test_list_bulk(self, typecallable, creator=entity_maker):
+ def _test_list_bulk(self, typecallable, creator=None):
+ if creator is None:
+ creator = self.entity_maker
+
class Foo(object):
pass
@@ -386,7 +406,10 @@ class CollectionsTest(TestBase):
self._test_list_bulk(ListIsh)
self.assert_(getattr(ListIsh, '_sa_instrumented') == id(ListIsh))
- def _test_set(self, typecallable, creator=entity_maker):
+ def _test_set(self, typecallable, creator=None):
+ if creator is None:
+ creator = self.entity_maker
+
class Foo(object):
pass
@@ -641,7 +664,10 @@ class CollectionsTest(TestBase):
except TypeError:
assert True
- def _test_set_bulk(self, typecallable, creator=entity_maker):
+ def _test_set_bulk(self, typecallable, creator=None):
+ if creator is None:
+ creator = self.entity_maker
+
class Foo(object):
pass
@@ -746,7 +772,10 @@ class CollectionsTest(TestBase):
self._test_set_bulk(SetIsh)
self.assert_(getattr(SetIsh, '_sa_instrumented') == id(SetIsh))
- def _test_dict(self, typecallable, creator=dictable_entity):
+ def _test_dict(self, typecallable, creator=None):
+ if creator is None:
+ creator = self.dictable_entity
+
class Foo(object):
pass
@@ -865,7 +894,10 @@ class CollectionsTest(TestBase):
control.update(**kw)
assert_eq()
- def _test_dict_bulk(self, typecallable, creator=dictable_entity):
+ def _test_dict_bulk(self, typecallable, creator=None):
+ if creator is None:
+ creator = self.dictable_entity
+
class Foo(object):
pass
@@ -930,7 +962,7 @@ class CollectionsTest(TestBase):
def test_dict(self):
try:
- self._test_adapter(dict, dictable_entity,
+ self._test_adapter(dict, self.dictable_entity,
to_set=lambda c: set(c.values()))
self.assert_(False)
except sa_exc.ArgumentError, e:
@@ -953,7 +985,7 @@ class CollectionsTest(TestBase):
def _remove(self, item, _sa_initiator=None):
self.__delitem__(item.a, _sa_initiator=_sa_initiator)
- self._test_adapter(MyDict, dictable_entity,
+ self._test_adapter(MyDict, self.dictable_entity,
to_set=lambda c: set(c.values()))
self._test_dict(MyDict)
self._test_dict_bulk(MyDict)
@@ -964,7 +996,7 @@ class CollectionsTest(TestBase):
def __init__(self):
super(MyEasyDict, self).__init__(lambda e: e.a)
- self._test_adapter(MyEasyDict, dictable_entity,
+ self._test_adapter(MyEasyDict, self.dictable_entity,
to_set=lambda c: set(c.values()))
self._test_dict(MyEasyDict)
self._test_dict_bulk(MyEasyDict)
@@ -976,7 +1008,7 @@ class CollectionsTest(TestBase):
collections.MappedCollection.__init__(self, lambda e: e.a)
util.OrderedDict.__init__(self)
- self._test_adapter(MyOrdered, dictable_entity,
+ self._test_adapter(MyOrdered, self.dictable_entity,
to_set=lambda c: set(c.values()))
self._test_dict(MyOrdered)
self._test_dict_bulk(MyOrdered)
@@ -1014,7 +1046,7 @@ class CollectionsTest(TestBase):
def __repr__(self):
return 'DictLike(%s)' % repr(self.data)
- self._test_adapter(DictLike, dictable_entity,
+ self._test_adapter(DictLike, self.dictable_entity,
to_set=lambda c: set(c.itervalues()))
self._test_dict(DictLike)
self._test_dict_bulk(DictLike)
@@ -1053,13 +1085,16 @@ class CollectionsTest(TestBase):
def __repr__(self):
return 'DictIsh(%s)' % repr(self.data)
- self._test_adapter(DictIsh, dictable_entity,
+ self._test_adapter(DictIsh, self.dictable_entity,
to_set=lambda c: set(c.itervalues()))
self._test_dict(DictIsh)
self._test_dict_bulk(DictIsh)
self.assert_(getattr(DictIsh, '_sa_instrumented') == id(DictIsh))
- def _test_object(self, typecallable, creator=entity_maker):
+ def _test_object(self, typecallable, creator=None):
+ if creator is None:
+ creator = self.entity_maker
+
class Foo(object):
pass
@@ -1210,7 +1245,7 @@ class CollectionsTest(TestBase):
self.assert_(set(direct) == canary.data)
self.assert_(set(adapter) == canary.data)
self.assert_(list(direct) == control)
- creator = entity_maker
+ creator = self.entity_maker
e1 = creator()
direct.put(e1)
@@ -1267,7 +1302,7 @@ class CollectionsTest(TestBase):
pass
canary = Canary()
- creator = entity_maker
+ creator = self.entity_maker
attributes.register_class(Foo)
attributes.register_attribute(Foo, 'attr', True, extension=canary, useobject=True)
@@ -1295,36 +1330,37 @@ class CollectionsTest(TestBase):
obj.attr[0] = e3
self.assert_(e3 in canary.data)
-class DictHelpersTest(ORMTest):
- def define_tables(self, metadata):
- global parents, children, Parent, Child
-
- parents = Table('parents', metadata,
- Column('id', Integer, primary_key=True),
- Column('label', String(128)))
- children = Table('children', metadata,
- Column('id', Integer, primary_key=True),
- Column('parent_id', Integer, ForeignKey('parents.id'),
- nullable=False),
- Column('a', String(128)),
- Column('b', String(128)),
- Column('c', String(128)))
+class DictHelpersTest(_base.MappedTest):
- class Parent(object):
+ def define_tables(self, metadata):
+ Table('parents', metadata,
+ Column('id', Integer, primary_key=True),
+ Column('label', String(128)))
+ Table('children', metadata,
+ Column('id', Integer, primary_key=True),
+ Column('parent_id', Integer, ForeignKey('parents.id'),
+ nullable=False),
+ Column('a', String(128)),
+ Column('b', String(128)),
+ Column('c', String(128)))
+
+ def setup_classes(self):
+ class Parent(_base.BasicEntity):
def __init__(self, label=None):
self.label = label
- class Child(object):
+
+ class Child(_base.BasicEntity):
def __init__(self, a=None, b=None, c=None):
self.a = a
self.b = b
self.c = c
+ @testing.resolve_artifact_names
def _test_scalar_mapped(self, collection_class):
mapper(Child, children)
mapper(Parent, parents, properties={
'children': relation(Child, collection_class=collection_class,
- cascade="all, delete-orphan")
- })
+ cascade="all, delete-orphan")})
p = Parent()
p.children['foo'] = Child('foo', 'value')
@@ -1377,6 +1413,7 @@ class DictHelpersTest(ORMTest):
self.assert_(len(list(collections.collection_adapter(p.children))) == 0)
+ @testing.resolve_artifact_names
def _test_composite_mapped(self, collection_class):
mapper(Child, children)
mapper(Parent, parents, properties={
@@ -1424,13 +1461,16 @@ class DictHelpersTest(ORMTest):
collection_class = collections.attribute_mapped_collection('a')
self._test_scalar_mapped(collection_class)
+ @testing.resolve_artifact_names
def test_column_mapped_collection(self):
- collection_class = collections.column_mapped_collection(children.c.a)
+ collection_class = collections.column_mapped_collection(
+ children.c.a)
self._test_scalar_mapped(collection_class)
+ @testing.resolve_artifact_names
def test_column_mapped_collection2(self):
- collection_class = collections.column_mapped_collection((children.c.a,
- children.c.b))
+ collection_class = collections.column_mapped_collection(
+ (children.c.a, children.c.b))
self._test_composite_mapped(collection_class)
def test_mixin(self):
@@ -1451,17 +1491,19 @@ class DictHelpersTest(ORMTest):
# TODO: are these tests redundant vs. the above tests ?
# remove if so
-class CustomCollectionsTest(ORMTest):
+class CustomCollectionsTest(_base.MappedTest):
+
def define_tables(self, metadata):
- global sometable, someothertable
- sometable = Table('sometable', metadata,
- Column('col1',Integer, primary_key=True),
- Column('data', String(30)))
- someothertable = Table('someothertable', metadata,
- Column('col1', Integer, primary_key=True),
- Column('scol1', Integer, ForeignKey(sometable.c.col1)),
- Column('data', String(20))
- )
+ Table('sometable', metadata,
+ Column('col1',Integer, primary_key=True),
+ Column('data', String(30)))
+ Table('someothertable', metadata,
+ Column('col1', Integer, primary_key=True),
+ Column('scol1', Integer,
+ ForeignKey('sometable.col1')),
+ Column('data', String(20)))
+
+ @testing.resolve_artifact_names
def test_basic(self):
class MyList(list):
pass
@@ -1469,13 +1511,15 @@ class CustomCollectionsTest(ORMTest):
pass
class Bar(object):
pass
+
mapper(Foo, sometable, properties={
'bars':relation(Bar, collection_class=MyList)
})
mapper(Bar, someothertable)
f = Foo()
assert isinstance(f.bars, MyList)
-
+
+ @testing.resolve_artifact_names
def test_lazyload(self):
"""test that a 'set' can be used as a collection and can lazyload."""
class Foo(object):
@@ -1497,6 +1541,7 @@ class CustomCollectionsTest(ORMTest):
assert len(list(f.bars)) == 2
f.bars.clear()
+ @testing.resolve_artifact_names
def test_dict(self):
"""test that a 'dict' can be used as a collection and can lazyload."""
@@ -1528,6 +1573,7 @@ class CustomCollectionsTest(ORMTest):
assert len(list(f.bars)) == 2
f.bars.clear()
+ @testing.resolve_artifact_names
def test_dict_wrapper(self):
"""test that the supplied 'dict' wrapper can be used as a collection and can lazyload."""
@@ -1538,7 +1584,8 @@ class CustomCollectionsTest(ORMTest):
mapper(Foo, sometable, properties={
'bars':relation(Bar,
- collection_class=collections.column_mapped_collection(someothertable.c.data))
+ collection_class=collections.column_mapped_collection(
+ someothertable.c.data))
})
mapper(Bar, someothertable)
@@ -1566,6 +1613,7 @@ class CustomCollectionsTest(ORMTest):
replaced = set([id(b) for b in f.bars.values()])
self.assert_(existing != replaced)
+ @testing.resolve_artifact_names
def test_list(self):
class Parent(object):
pass
@@ -1686,6 +1734,7 @@ class CustomCollectionsTest(ORMTest):
assert control == p.children
assert control == list(p.children)
+ @testing.resolve_artifact_names
def test_custom(self):
class Parent(object):
pass
@@ -1738,8 +1787,7 @@ class CustomCollectionsTest(ORMTest):
assert len(o) == 3
-class InstrumentationTest(TestBase):
-
+class InstrumentationTest(_base.ORMTest):
def test_uncooperative_descriptor_in_sweep(self):
class DoNotTouch(object):
def __get__(self, obj, owner):
diff --git a/test/orm/compile.py b/test/orm/compile.py
index 59d636bae..aa429bc29 100644
--- a/test/orm/compile.py
+++ b/test/orm/compile.py
@@ -3,15 +3,16 @@ from sqlalchemy import *
from sqlalchemy import exc as sa_exc
from sqlalchemy.orm import *
from testlib import *
+from orm import _base
-class CompileTest(TestBase, AssertsExecutionResults):
+class CompileTest(_base.ORMTest):
"""test various mapper compilation scenarios"""
+
def tearDown(self):
clear_mappers()
def testone(self):
- global metadata, order, employee, product, tax, orderproduct
metadata = MetaData(testing.db)
order = Table('orders', metadata,
@@ -66,13 +67,13 @@ class CompileTest(TestBase, AssertsExecutionResults):
mapper(OrderProduct, orderproduct)
- # this requires that the compilation of order_mapper's "surrogate mapper" occur after
- # the initial setup of MapperProperty objects on the mapper.
+ # this requires that the compilation of order_mapper's "surrogate
+ # mapper" occur after the initial setup of MapperProperty objects on
+ # the mapper.
class_mapper(Product).compile()
def testtwo(self):
"""test that conflicting backrefs raises an exception"""
- global metadata, order, employee, product, tax, orderproduct
metadata = MetaData(testing.db)
order = Table('orders', metadata,
@@ -180,5 +181,6 @@ class CompileTest(TestBase, AssertsExecutionResults):
except sa_exc.ArgumentError, e:
assert str(e).index("Error creating backref") > -1
+
if __name__ == '__main__':
testenv.main()
diff --git a/test/orm/cycles.py b/test/orm/cycles.py
index 8b5173d3c..7a6c3d31e 100644
--- a/test/orm/cycles.py
+++ b/test/orm/cycles.py
@@ -3,6 +3,7 @@ from sqlalchemy import *
from sqlalchemy.orm import *
from testlib import *
from testlib.tables import *
+from orm import _base
"""
Tests cyclical mapper relationships.
@@ -20,7 +21,7 @@ class Tester(object):
def __repr__(self):
return "%s(%s)" % (self.__class__.__name__, repr(self.data))
-class SelfReferentialTest(TestBase, AssertsExecutionResults):
+class SelfReferentialTest(_base.ORMTest):
"""tests a self-referential mapper, with an additional list of child objects."""
def setUpAll(self):
global t1, t2, metadata
@@ -152,7 +153,7 @@ class SelfReferentialNoPKTest(TestBase, AssertsExecutionResults):
assert t.uuid == t2.uuid
assert t.parent.uuid == t1.uuid
-class InheritTestOne(TestBase, AssertsExecutionResults):
+class InheritTestOne(_base.ORMTest):
def setUpAll(self):
global parent, child1, child2, meta
meta = MetaData(testing.db)
@@ -216,7 +217,7 @@ class InheritTestOne(TestBase, AssertsExecutionResults):
# attached to a task corresponding to c1, since "child1_id" is not nullable
session.flush()
-class InheritTestTwo(ORMTest):
+class InheritTestTwo(_base.MappedTest):
"""the fix in BiDirectionalManyToOneTest raised this issue, regarding
the 'circular sort' containing UOWTasks that were still polymorphic, which could
create duplicate entries in the final sort
@@ -264,7 +265,7 @@ class InheritTestTwo(ORMTest):
sess.flush()
-class BiDirectionalManyToOneTest(ORMTest):
+class BiDirectionalManyToOneTest(_base.MappedTest):
def define_tables(self, metadata):
global t1, t2, t3, t4
t1 = Table('t1', metadata,
@@ -400,7 +401,7 @@ class BiDirectionalOneToManyTest(TestBase, AssertsExecutionResults):
[sess.save(x) for x in [a,b,c,d,e,f]]
sess.flush()
-class BiDirectionalOneToManyTest2(TestBase, AssertsExecutionResults):
+class BiDirectionalOneToManyTest2(_base.ORMTest):
"""tests two mappers with a one-to-many relation to each other, with a second one-to-many on one of the mappers"""
def setUpAll(self):
global t1, t2, t3, metadata
@@ -460,7 +461,7 @@ class BiDirectionalOneToManyTest2(TestBase, AssertsExecutionResults):
sess.delete(c)
sess.flush()
-class OneToManyManyToOneTest(TestBase, AssertsExecutionResults):
+class OneToManyManyToOneTest(_base.ORMTest):
"""tests two mappers, one has a one-to-many on the other mapper, the other has a separate many-to-one relationship to the first.
two tests will have a row for each item that is dependent on the other. without the "post_update" flag, such relationships
raise an exception when dependencies are sorted."""
@@ -751,7 +752,7 @@ class OneToManyManyToOneTest(TestBase, AssertsExecutionResults):
)
])
-class SelfReferentialPostUpdateTest(TestBase, AssertsExecutionResults):
+class SelfReferentialPostUpdateTest(_base.ORMTest):
"""test using post_update on a single self-referential mapper"""
def setUpAll(self):
global metadata, node_table
@@ -855,7 +856,7 @@ class SelfReferentialPostUpdateTest(TestBase, AssertsExecutionResults):
),
])
-class SelfReferentialPostUpdateTest2(TestBase, AssertsExecutionResults):
+class SelfReferentialPostUpdateTest2(_base.ORMTest):
def setUpAll(self):
global metadata, a_table
metadata = MetaData(testing.db)
diff --git a/test/orm/deprecations.py b/test/orm/deprecations.py
index d6caaa196..3bc38da2a 100644
--- a/test/orm/deprecations.py
+++ b/test/orm/deprecations.py
@@ -6,24 +6,13 @@ be migrated directly to the wiki, docs, etc.
"""
import testenv; testenv.configure_for_tests()
-from sqlalchemy import *
-from sqlalchemy.orm import *
-from testlib import *
+from testlib import testing
+from testlib.sa import Table, Column, Integer, String, ForeignKey, func
+from testlib.sa.orm import mapper, relation, create_session
+from orm import _base
-users, addresses = None, None
-session = None
-
-class Base(object):
- def __init__(self, **kw):
- for k, v in kw.iteritems():
- setattr(self, k, v)
-
-class User(Base): pass
-class Address(Base): pass
-
-
-class QueryAlternativesTest(ORMTest):
+class QueryAlternativesTest(_base.MappedTest):
'''Collects modern idioms for Queries
The docstring for each test case serves as miniature documentation about
@@ -51,64 +40,64 @@ class QueryAlternativesTest(ORMTest):
assert output is correct
'''
- keep_mappers = True
- keep_data = True
+
+ run_inserts = 'once'
+ run_deletes = None
def define_tables(self, metadata):
- global users_table, addresses_table
- users_table = Table(
- 'users', metadata,
- Column('id', Integer, primary_key=True),
- Column('name', String(64)))
-
- addresses_table = Table(
- 'addresses', metadata,
- Column('id', Integer, primary_key=True),
- Column('user_id', Integer, ForeignKey('users.id')),
- Column('email_address', String(128)),
- Column('purpose', String(16)),
- Column('bounces', Integer, default=0))
+ Table('users_table', metadata,
+ Column('id', Integer, primary_key=True),
+ Column('name', String(64)))
+ Table('addresses_table', metadata,
+ Column('id', Integer, primary_key=True),
+ Column('user_id', Integer, ForeignKey('users_table.id')),
+ Column('email_address', String(128)),
+ Column('purpose', String(16)),
+ Column('bounces', Integer, default=0))
+
+ def setup_classes(self):
+ class User(_base.BasicEntity):
+ pass
+
+ class Address(_base.BasicEntity):
+ pass
+
+ @testing.resolve_artifact_names
def setup_mappers(self):
mapper(User, users_table, properties=dict(
addresses=relation(Address, backref='user'),
))
mapper(Address, addresses_table)
- def insert_data(self):
- user_cols = ('id', 'name')
- user_rows = ((1, 'jack'), (2, 'ed'), (3, 'fred'), (4, 'chuck'))
- users_table.insert().execute(
- [dict(zip(user_cols, row)) for row in user_rows])
-
- add_cols = ('id', 'user_id', 'email_address', 'purpose', 'bounces')
- add_rows = (
+ def fixtures(self):
+ return dict(
+ users_table=(
+ ('id', 'name'),
+ (1, 'jack'),
+ (2, 'ed'),
+ (3, 'fred'),
+ (4, 'chuck')),
+
+ addresses_table=(
+ ('id', 'user_id', 'email_address', 'purpose', 'bounces'),
(1, 1, 'jack@jack.home', 'Personal', 0),
(2, 1, 'jack@jack.bizz', 'Work', 1),
(3, 2, 'ed@foo.bar', 'Personal', 0),
- (4, 3, 'fred@the.fred', 'Personal', 10))
-
- addresses_table.insert().execute(
- [dict(zip(add_cols, row)) for row in add_rows])
+ (4, 3, 'fred@the.fred', 'Personal', 10)))
- def setUp(self):
- super(QueryAlternativesTest, self).setUp()
- global session
- if session is None:
- session = create_session()
-
- def tearDown(self):
- super(QueryAlternativesTest, self).tearDown()
- session.clear()
######################################################################
+ @testing.resolve_artifact_names
def test_apply_max(self):
"""Query.apply_max(col)
max = session.query(Address).apply_max(Address.bounces)
"""
+ session = create_session()
+
# 0.5.0
maxes = list(session.query(Address).values(func.max(Address.bounces)))
max = maxes[0][0]
@@ -117,12 +106,15 @@ class QueryAlternativesTest(ORMTest):
max = session.query(func.max(Address.bounces)).one()[0]
assert max == 10
+ @testing.resolve_artifact_names
def test_apply_min(self):
"""Query.apply_min(col)
min = session.query(Address).apply_min(Address.bounces)
"""
+ session = create_session()
+
# 0.5.0
mins = list(session.query(Address).values(func.min(Address.bounces)))
min = mins[0][0]
@@ -131,12 +123,15 @@ class QueryAlternativesTest(ORMTest):
min = session.query(func.min(Address.bounces)).one()[0]
assert min == 0
+ @testing.resolve_artifact_names
def test_apply_avg(self):
"""Query.apply_avg(col)
avg = session.query(Address).apply_avg(Address.bounces)
"""
+ session = create_session()
+
avgs = list(session.query(Address).values(func.avg(Address.bounces)))
avg = avgs[0][0]
assert avg > 0 and avg < 10
@@ -144,12 +139,15 @@ class QueryAlternativesTest(ORMTest):
avg = session.query(func.avg(Address.bounces)).one()[0]
assert avg > 0 and avg < 10
+ @testing.resolve_artifact_names
def test_apply_sum(self):
"""Query.apply_sum(col)
avg = session.query(Address).apply_avg(Address.bounces)
"""
+ session = create_session()
+
avgs = list(session.query(Address).values(func.avg(Address.bounces)))
avg = avgs[0][0]
assert avg > 0 and avg < 10
@@ -157,6 +155,7 @@ class QueryAlternativesTest(ORMTest):
avg = session.query(func.avg(Address.bounces)).one()[0]
assert avg > 0 and avg < 10
+ @testing.resolve_artifact_names
def test_count_by(self):
"""Query.count_by(*args, **params)
@@ -166,6 +165,8 @@ class QueryAlternativesTest(ORMTest):
num = session.query(User).count_by(purpose='Personal')
"""
+ session = create_session()
+
num = session.query(Address).filter_by(purpose='Personal').count()
assert num == 3, num
@@ -173,24 +174,31 @@ class QueryAlternativesTest(ORMTest):
filter(Address.purpose=='Personal')).count()
assert num == 3, num
+ @testing.resolve_artifact_names
def test_count_whereclause(self):
"""Query.count(whereclause=None, params=None, **kwargs)
num = session.query(Address).count(address_table.c.bounces > 1)
"""
+ session = create_session()
+
num = session.query(Address).filter(Address.bounces > 1).count()
assert num == 1, num
+ @testing.resolve_artifact_names
def test_execute(self):
"""Query.execute(clauseelement, params=None, *args, **kwargs)
users = session.query(User).execute(users_table.select())
"""
+ session = create_session()
+
users = session.query(User).from_statement(users_table.select()).all()
assert len(users) == 4
+ @testing.resolve_artifact_names
def test_get_by(self):
"""Query.get_by(*args, **params)
@@ -200,6 +208,8 @@ class QueryAlternativesTest(ORMTest):
user = session.query(User).get_by(email_addresss='fred@the.fred')
"""
+ session = create_session()
+
user = session.query(User).filter_by(name='ed').first()
assert user.name == 'ed'
@@ -211,6 +221,7 @@ class QueryAlternativesTest(ORMTest):
User.addresses.any(Address.email_address=='fred@the.fred')).first()
assert user.name == 'fred'
+ @testing.resolve_artifact_names
def test_instances_entities(self):
"""Query.instances(cursor, *mappers_or_columns, **kwargs)
@@ -218,6 +229,8 @@ class QueryAlternativesTest(ORMTest):
res = session.query(User).instances(sel.execute(), Address)
"""
+ session = create_session()
+
sel = users_table.join(addresses_table).select(use_labels=True)
res = session.query(User, Address).instances(sel.execute())
@@ -225,52 +238,70 @@ class QueryAlternativesTest(ORMTest):
cola, colb = res[0]
assert isinstance(cola, User) and isinstance(colb, Address)
-
+ @testing.resolve_artifact_names
def test_join_by(self):
"""Query.join_by(*args, **params)
TODO
"""
+ session = create_session()
+
+ @testing.resolve_artifact_names
def test_join_to(self):
"""Query.join_to(key)
TODO
"""
+ session = create_session()
+
+ @testing.resolve_artifact_names
def test_join_via(self):
"""Query.join_via(keys)
TODO
"""
+ session = create_session()
+
+ @testing.resolve_artifact_names
def test_list(self):
"""Query.list()
users = session.query(User).list()
"""
+ session = create_session()
+
users = session.query(User).all()
assert len(users) == 4
+ @testing.resolve_artifact_names
def test_scalar(self):
"""Query.scalar()
user = session.query(User).filter(User.id==1).scalar()
"""
+ session = create_session()
+
user = session.query(User).filter(User.id==1).first()
assert user.id==1
+ @testing.resolve_artifact_names
def test_select(self):
"""Query.select(arg=None, **kwargs)
users = session.query(User).select(users_table.c.name != None)
"""
+ session = create_session()
+
users = session.query(User).filter(User.name != None).all()
assert len(users) == 4
+ @testing.resolve_artifact_names
def test_select_by(self):
"""Query.select_by(*args, **params)
@@ -280,6 +311,8 @@ class QueryAlternativesTest(ORMTest):
users = session.query(User).select_by(email_address='fred@the.fred')
"""
+ session = create_session()
+
users = session.query(User).filter_by(name='fred').all()
assert len(users) == 1
@@ -294,6 +327,7 @@ class QueryAlternativesTest(ORMTest):
Address.email_address == 'fred@the.fred')).all()
assert len(users) == 1
+ @testing.resolve_artifact_names
def test_selectfirst(self):
"""Query.selectfirst(arg=None, **kwargs)
@@ -301,9 +335,12 @@ class QueryAlternativesTest(ORMTest):
addresses_table.c.bounces > 0)
"""
+ session = create_session()
+
bounced = session.query(Address).filter(Address.bounces > 0).first()
assert bounced.bounces > 0
+ @testing.resolve_artifact_names
def test_selectfirst_by(self):
"""Query.selectfirst_by(*args, **params)
@@ -313,6 +350,8 @@ class QueryAlternativesTest(ORMTest):
onebounce_user = session.query(User).selectfirst_by(bounces=1)
"""
+ session = create_session()
+
onebounce = session.query(Address).filter_by(bounces=1).first()
assert onebounce.bounces == 1
@@ -328,15 +367,18 @@ class QueryAlternativesTest(ORMTest):
Address.bounces == 1)).first()
assert onebounce_user.name == 'jack'
-
+ @testing.resolve_artifact_names
def test_selectone(self):
"""Query.selectone(arg=None, **kwargs)
ed = session.query(User).selectone(users_table.c.name == 'ed')
"""
+ session = create_session()
+
ed = session.query(User).filter(User.name == 'jack').one()
+ @testing.resolve_artifact_names
def test_selectone_by(self):
"""Query.selectone_by
@@ -346,6 +388,8 @@ class QueryAlternativesTest(ORMTest):
ed = session.query(User).selectone_by(email_address='ed@foo.bar')
"""
+ session = create_session()
+
ed = session.query(User).filter_by(name='jack').one()
ed = session.query(User).filter(User.name == 'jack').one()
@@ -356,24 +400,32 @@ class QueryAlternativesTest(ORMTest):
ed = session.query(User).filter(User.addresses.any(
Address.email_address == 'ed@foo.bar')).one()
+ @testing.resolve_artifact_names
def test_select_statement(self):
"""Query.select_statement(statement, **params)
users = session.query(User).select_statement(users_table.select())
"""
+ session = create_session()
+
users = session.query(User).from_statement(users_table.select()).all()
assert len(users) == 4
+ @testing.resolve_artifact_names
def test_select_text(self):
"""Query.select_text(text, **params)
- users = session.query(User).select_text('SELECT * FROM users')
+ users = session.query(User).select_text('SELECT * FROM users_table')
"""
- users = session.query(User).from_statement('SELECT * FROM users').all()
+ session = create_session()
+
+ users = (session.query(User).
+ from_statement('SELECT * FROM users_table')).all()
assert len(users) == 4
+ @testing.resolve_artifact_names
def test_select_whereclause(self):
"""Query.select_whereclause(whereclause=None, params=None, **kwargs)
@@ -382,6 +434,8 @@ class QueryAlternativesTest(ORMTest):
users = session.query(User).select_whereclause("name='ed'")
"""
+ session = create_session()
+
users = session.query(User).filter(User.name=='ed').all()
assert len(users) == 1 and users[0].name == 'ed'
@@ -389,6 +443,5 @@ class QueryAlternativesTest(ORMTest):
assert len(users) == 1 and users[0].name == 'ed'
-
if __name__ == '__main__':
testenv.main()
diff --git a/test/orm/dynamic.py b/test/orm/dynamic.py
index 0c3f1a95d..bcacf4389 100644
--- a/test/orm/dynamic.py
+++ b/test/orm/dynamic.py
@@ -1,16 +1,16 @@
import testenv; testenv.configure_for_tests()
import operator
-from sqlalchemy import *
-from sqlalchemy.orm import *
-from testlib import *
-from testlib.fixtures import *
+from sqlalchemy.orm import dynamic_loader, backref
+from testlib import testing
+from testlib.sa import Table, Column, Integer, String, ForeignKey, desc
+from testlib.sa.orm import mapper, relation, create_session
+from testlib.testing import eq_
+from testlib.compat import _function_named
+from orm import _base, _fixtures
-from query import QueryTest
-
-class DynamicTest(FixtureTest):
- keep_mappers = False
- refresh_data = True
+class DynamicTest(_fixtures.FixtureTest):
+ @testing.resolve_artifact_names
def test_basic(self):
mapper(User, users, properties={
'addresses':dynamic_loader(mapper(Address, addresses))
@@ -18,36 +18,40 @@ class DynamicTest(FixtureTest):
sess = create_session()
q = sess.query(User)
- print q.filter(User.id==7).all()
u = q.filter(User.id==7).first()
- print list(u.addresses)
- assert [User(id=7, addresses=[Address(id=1, email_address='jack@bean.com')])] == q.filter(User.id==7).all()
- assert fixtures.user_address_result == q.all()
-
+ eq_([User(id=7,
+ addresses=[Address(id=1, email_address='jack@bean.com')])],
+ q.filter(User.id==7).all())
+ eq_(self.static.user_address_result, q.all())
+
+ @testing.resolve_artifact_names
def test_order_by(self):
mapper(User, users, properties={
'addresses':dynamic_loader(mapper(Address, addresses))
})
sess = create_session()
u = sess.query(User).get(8)
- self.assertEquals(list(u.addresses.order_by(desc(Address.email_address))), [Address(email_address=u'ed@wood.com'), Address(email_address=u'ed@lala.com'), Address(email_address=u'ed@bettyboop.com')])
+ eq_(list(u.addresses.order_by(desc(Address.email_address))), [Address(email_address=u'ed@wood.com'), Address(email_address=u'ed@lala.com'), Address(email_address=u'ed@bettyboop.com')])
+ @testing.resolve_artifact_names
def test_configured_order_by(self):
mapper(User, users, properties={
'addresses':dynamic_loader(mapper(Address, addresses), order_by=desc(Address.email_address))
})
sess = create_session()
u = sess.query(User).get(8)
- self.assertEquals(list(u.addresses), [Address(email_address=u'ed@wood.com'), Address(email_address=u'ed@lala.com'), Address(email_address=u'ed@bettyboop.com')])
-
+ eq_(list(u.addresses), [Address(email_address=u'ed@wood.com'), Address(email_address=u'ed@lala.com'), Address(email_address=u'ed@bettyboop.com')])
+
+ @testing.resolve_artifact_names
def test_count(self):
mapper(User, users, properties={
'addresses':dynamic_loader(mapper(Address, addresses))
})
sess = create_session()
u = sess.query(User).first()
- assert u.addresses.count() == 1, u.addresses.count()
+ eq_(u.addresses.count(), 1)
+ @testing.resolve_artifact_names
def test_backref(self):
mapper(Address, addresses, properties={
'user':relation(User, backref=backref('addresses', lazy='dynamic'))
@@ -63,6 +67,7 @@ class DynamicTest(FixtureTest):
u = sess.query(User).get(7)
assert ad not in u.addresses
+ @testing.resolve_artifact_names
def test_no_count(self):
mapper(User, users, properties={
'addresses':dynamic_loader(mapper(Address, addresses))
@@ -70,15 +75,21 @@ class DynamicTest(FixtureTest):
sess = create_session()
q = sess.query(User)
- # dynamic collection cannot implement __len__() (at least one that returns a live database
- # result), else additional count() queries are issued when evaluating in a list context
+ # dynamic collection cannot implement __len__() (at least one that
+ # returns a live database result), else additional count() queries are
+ # issued when evaluating in a list context
def go():
- assert [User(id=7, addresses=[Address(id=1, email_address='jack@bean.com')])] == q.filter(User.id==7).all()
+ eq_([User(id=7,
+ addresses=[Address(id=1,
+ email_address='jack@bean.com')])],
+ q.filter(User.id==7).all())
self.assert_sql_count(testing.db, go, 2)
+ @testing.resolve_artifact_names
def test_m2m(self):
mapper(Order, orders, properties={
- 'items':relation(Item, secondary=order_items, lazy="dynamic", backref=backref('orders', lazy="dynamic"))
+ 'items':relation(Item, secondary=order_items, lazy="dynamic",
+ backref=backref('orders', lazy="dynamic"))
})
mapper(Item, items)
@@ -91,7 +102,8 @@ class DynamicTest(FixtureTest):
assert o1 in i1.orders.all()
assert i1 in o1.items.all()
-
+
+ @testing.resolve_artifact_names
def test_transient_detached(self):
mapper(User, users, properties={
'addresses':dynamic_loader(mapper(Address, addresses))
@@ -101,12 +113,12 @@ class DynamicTest(FixtureTest):
u1.addresses.append(Address())
assert u1.addresses.count() == 1
assert u1.addresses[0] == Address()
-
-class FlushTest(FixtureTest):
- def test_basic(self):
- class Fixture(Base):
- pass
+class FlushTest(_fixtures.FixtureTest):
+ run_inserts = None
+
+ @testing.resolve_artifact_names
+ def test_basic(self):
mapper(User, users, properties={
'addresses':dynamic_loader(mapper(Address, addresses))
})
@@ -129,11 +141,9 @@ class FlushTest(FixtureTest):
User(name='jack', addresses=[Address(email_address='lala@hoho.com')]),
User(name='ed', addresses=[Address(email_address='foo@bar.com')])
] == sess.query(User).all()
-
- def test_rollback(self):
- class Fixture(Base):
- pass
+ @testing.resolve_artifact_names
+ def test_rollback(self):
mapper(User, users, properties={
'addresses':dynamic_loader(mapper(Address, addresses))
})
@@ -144,11 +154,12 @@ class FlushTest(FixtureTest):
sess.flush()
sess.commit()
u1.addresses.append(Address(email_address='foo@bar.com'))
- self.assertEquals(u1.addresses.all(), [Address(email_address='lala@hoho.com'), Address(email_address='foo@bar.com')])
+ eq_(u1.addresses.all(), [Address(email_address='lala@hoho.com'), Address(email_address='foo@bar.com')])
sess.rollback()
- self.assertEquals(u1.addresses.all(), [Address(email_address='lala@hoho.com')])
-
+ eq_(u1.addresses.all(), [Address(email_address='lala@hoho.com')])
+
@testing.fails_on('maxdb')
+ @testing.resolve_artifact_names
def test_delete_nocascade(self):
mapper(User, users, properties={
'addresses':dynamic_loader(mapper(Address, addresses), backref='user')
@@ -182,6 +193,7 @@ class FlushTest(FixtureTest):
assert testing.db.scalar(addresses.count(addresses.c.user_id != None)) ==0
@testing.fails_on('maxdb')
+ @testing.resolve_artifact_names
def test_delete_cascade(self):
mapper(User, users, properties={
'addresses':dynamic_loader(mapper(Address, addresses), backref='user', cascade="all, delete-orphan")
@@ -201,10 +213,10 @@ class FlushTest(FixtureTest):
sess.delete(u.addresses[4])
sess.delete(u.addresses[3])
assert [Address(email_address='a'), Address(email_address='b'), Address(email_address='d')] == list(u.addresses)
-
+
sess.clear()
u = sess.query(User).get(u.id)
-
+
sess.delete(u)
# u.addresses relation will have to force the load
@@ -215,6 +227,7 @@ class FlushTest(FixtureTest):
assert testing.db.scalar(addresses.count()) ==0
@testing.fails_on('maxdb')
+ @testing.resolve_artifact_names
def test_remove_orphans(self):
mapper(User, users, properties={
'addresses':dynamic_loader(mapper(Address, addresses), cascade="all, delete-orphan", backref='user')
@@ -252,6 +265,8 @@ class FlushTest(FixtureTest):
def create_backref_test(autoflush, saveuser):
+
+ @testing.resolve_artifact_names
def test_backref(self):
mapper(User, users, properties={
'addresses':dynamic_loader(mapper(Address, addresses), backref='user')
@@ -293,27 +308,34 @@ for autoflush in (False, True):
for saveuser in (False, True):
create_backref_test(autoflush, saveuser)
-class DontDereferenceTest(ORMTest):
+class DontDereferenceTest(_base.MappedTest):
def define_tables(self, metadata):
- global users_table, addresses_table
-
- users_table = Table('users', metadata,
- Column('id', Integer, primary_key=True),
- Column('name', String(40)),
- Column('fullname', String(100)),
- Column('password', String(15)))
-
- addresses_table = Table('addresses', metadata,
- Column('id', Integer, primary_key=True),
- Column('email_address', String(100), nullable=False),
- Column('user_id', Integer, ForeignKey('users.id')))
- def test_no_deref(self):
- mapper(User, users_table, properties={
+ Table('users', metadata,
+ Column('id', Integer, primary_key=True),
+ Column('name', String(40)),
+ Column('fullname', String(100)),
+ Column('password', String(15)))
+
+ Table('addresses', metadata,
+ Column('id', Integer, primary_key=True),
+ Column('email_address', String(100), nullable=False),
+ Column('user_id', Integer, ForeignKey('users.id')))
+
+ @testing.resolve_artifact_names
+ def setup_mappers(self):
+ class User(_base.ComparableEntity):
+ pass
+
+ class Address(_base.ComparableEntity):
+ pass
+
+ mapper(User, users, properties={
'addresses': relation(Address, backref='user', lazy='dynamic')
})
+ mapper(Address, addresses)
- mapper(Address, addresses_table)
-
+ @testing.resolve_artifact_names
+ def test_no_deref(self):
session = create_session()
user = User()
user.name = 'joe'
@@ -340,9 +362,9 @@ class DontDereferenceTest(ORMTest):
user = session.query(User).first()
return session.query(User).first().addresses.all()
- self.assertEquals(query1(), [Address(email_address='joe@joesdomain.example')] )
- self.assertEquals(query2(), [Address(email_address='joe@joesdomain.example')] )
- self.assertEquals(query3(), [Address(email_address='joe@joesdomain.example')] )
+ eq_(query1(), [Address(email_address='joe@joesdomain.example')])
+ eq_(query2(), [Address(email_address='joe@joesdomain.example')])
+ eq_(query3(), [Address(email_address='joe@joesdomain.example')])
if __name__ == '__main__':
diff --git a/test/orm/eager_relations.py b/test/orm/eager_relations.py
index 544e505f4..b78ba12e9 100644
--- a/test/orm/eager_relations.py
+++ b/test/orm/eager_relations.py
@@ -1,17 +1,18 @@
"""basic tests of eager loaded attributes"""
import testenv; testenv.configure_for_tests()
-from sqlalchemy import *
-from sqlalchemy.orm import *
-from testlib import *
-from testlib.fixtures import *
-from query import QueryTest
-from sqlalchemy.orm import attributes
-
-class EagerTest(FixtureTest):
- keep_mappers = False
- keep_data = True
-
+from testlib import sa, testing
+from sqlalchemy.orm import eagerload, deferred, undefer
+from testlib.sa import Table, Column, Integer, String, ForeignKey
+from testlib.sa.orm import mapper, relation, create_session
+from testlib.testing import eq_
+from orm import _base, _fixtures
+
+class EagerTest(_fixtures.FixtureTest):
+ run_inserts = 'once'
+ run_deletes = None
+
+ @testing.resolve_artifact_names
def test_basic(self):
mapper(User, users, properties={
'addresses':relation(mapper(Address, addresses), lazy=False)
@@ -20,11 +21,11 @@ class EagerTest(FixtureTest):
q = sess.query(User)
assert [User(id=7, addresses=[Address(id=1, email_address='jack@bean.com')])] == q.filter(User.id==7).all()
- assert fixtures.user_address_result == q.all()
+ assert self.static.user_address_result == q.all()
+ @testing.resolve_artifact_names
def test_no_orphan(self):
- """test that an eagerly loaded child object is not marked as an orphan"""
-
+ """An eagerly loaded child object is not marked as an orphan"""
mapper(User, users, properties={
'addresses':relation(Address, cascade="all,delete-orphan", lazy=False)
})
@@ -32,9 +33,10 @@ class EagerTest(FixtureTest):
sess = create_session()
user = sess.query(User).get(7)
- assert getattr(User, 'addresses').hasparent(attributes.instance_state(user.addresses[0]), optimistic=True)
- assert not class_mapper(Address)._is_orphan(attributes.instance_state(user.addresses[0]))
+ assert getattr(User, 'addresses').hasparent(sa.orm.attributes.instance_state(user.addresses[0]), optimistic=True)
+ assert not sa.orm.class_mapper(Address)._is_orphan(sa.orm.attributes.instance_state(user.addresses[0]))
+ @testing.resolve_artifact_names
def test_orderby(self):
mapper(User, users, properties = {
'addresses':relation(mapper(Address, addresses), lazy=False, order_by=addresses.c.email_address),
@@ -55,6 +57,7 @@ class EagerTest(FixtureTest):
User(id=10, addresses=[])
] == q.all()
+ @testing.resolve_artifact_names
def test_orderby_multi(self):
mapper(User, users, properties = {
'addresses':relation(mapper(Address, addresses), lazy=False, order_by=[addresses.c.email_address, addresses.c.id]),
@@ -75,11 +78,10 @@ class EagerTest(FixtureTest):
User(id=10, addresses=[])
] == q.all()
+ @testing.resolve_artifact_names
def test_orderby_related(self):
- """tests that a regular mapper select on a single table can order by a relation to a second table"""
-
+ """A regular mapper select on a single table can order by a relation to a second table"""
mapper(Address, addresses)
-
mapper(User, users, properties = dict(
addresses = relation(Address, lazy=False),
))
@@ -101,11 +103,12 @@ class EagerTest(FixtureTest):
]),
] == l
+ @testing.resolve_artifact_names
def test_orderby_desc(self):
mapper(Address, addresses)
-
mapper(User, users, properties = dict(
- addresses = relation(Address, lazy=False, order_by=[desc(addresses.c.email_address)]),
+ addresses = relation(Address, lazy=False,
+ order_by=[sa.desc(addresses.c.email_address)]),
))
sess = create_session()
assert [
@@ -123,7 +126,13 @@ class EagerTest(FixtureTest):
User(id=10, addresses=[])
] == sess.query(User).all()
+ @testing.resolve_artifact_names
def test_deferred_fk_col(self):
+ User, Address, Dingaling = self.classes.get_all(
+ 'User', 'Address', 'Dingaling')
+ users, addresses, dingalings = self.tables.get_all(
+ 'users', 'addresses', 'dingalings')
+
mapper(Address, addresses, properties={
'user_id':deferred(addresses.c.user_id),
'user':relation(User, lazy=False)
@@ -131,39 +140,43 @@ class EagerTest(FixtureTest):
mapper(User, users)
sess = create_session()
-
+
for q in [
sess.query(Address).filter(Address.id.in_([1, 4, 5])),
sess.query(Address).filter(Address.id.in_([1, 4, 5])).limit(3)
]:
sess.clear()
- self.assertEquals(q.all(),
- [Address(id=1, user=User(id=7)), Address(id=4, user=User(id=8)), Address(id=5, user=User(id=9))]
+ eq_(q.all(),
+ [Address(id=1, user=User(id=7)),
+ Address(id=4, user=User(id=8)),
+ Address(id=5, user=User(id=9))]
)
a = sess.query(Address).filter(Address.id==1).first()
def go():
- assert a.user_id==7
- # assert that the eager loader added 'user_id' to the row
- # and deferred loading of that col was disabled
+ eq_(a.user_id, 7)
+ # assert that the eager loader added 'user_id' to the row and deferred
+ # loading of that col was disabled
self.assert_sql_count(testing.db, go, 0)
# do the mapping in reverse
- # (we would have just used an "addresses" backref but the test fixtures then require the whole
- # backref to be set up, lazy loaders trigger, etc.)
- clear_mappers()
+ # (we would have just used an "addresses" backref but the test
+ # fixtures then require the whole backref to be set up, lazy loaders
+ # trigger, etc.)
+ sa.orm.clear_mappers()
mapper(Address, addresses, properties={
'user_id':deferred(addresses.c.user_id),
})
- mapper(User, users, properties={'addresses':relation(Address, lazy=False)})
-
+ mapper(User, users, properties={
+ 'addresses':relation(Address, lazy=False)})
+
for q in [
sess.query(User).filter(User.id==7),
sess.query(User).filter(User.id==7).limit(1)
]:
sess.clear()
- self.assertEquals(q.all(),
+ eq_(q.all(),
[User(id=7, addresses=[Address(id=1)])]
)
@@ -175,81 +188,115 @@ class EagerTest(FixtureTest):
# and that its still deferred
self.assert_sql_count(testing.db, go, 1)
- clear_mappers()
+ sa.orm.clear_mappers()
- mapper(User, users, properties={'addresses':relation(Address, lazy=False)})
+ mapper(User, users, properties={
+ 'addresses':relation(Address, lazy=False)})
mapper(Address, addresses, properties={
'user_id':deferred(addresses.c.user_id),
- 'dingalings':relation(Dingaling, lazy=False)
- })
+ 'dingalings':relation(Dingaling, lazy=False)})
mapper(Dingaling, dingalings, properties={
- 'address_id':deferred(dingalings.c.address_id)
- })
+ 'address_id':deferred(dingalings.c.address_id)})
sess.clear()
def go():
u = sess.query(User).get(8)
- assert User(id=8, addresses=[Address(id=2, dingalings=[Dingaling(id=1)]), Address(id=3), Address(id=4)]) == u
+ eq_(User(id=8,
+ addresses=[Address(id=2, dingalings=[Dingaling(id=1)]),
+ Address(id=3),
+ Address(id=4)]),
+ u)
self.assert_sql_count(testing.db, go, 1)
+ @testing.resolve_artifact_names
def test_many_to_many(self):
+ Keyword, Item = self.Keyword, self.Item
+ keywords, item_keywords, items = self.tables.get_all(
+ 'keywords', 'item_keywords', 'items')
mapper(Keyword, keywords)
mapper(Item, items, properties = dict(
- keywords = relation(Keyword, secondary=item_keywords, lazy=False, order_by=keywords.c.id),
- ))
+ keywords = relation(Keyword, secondary=item_keywords,
+ lazy=False, order_by=keywords.c.id)))
q = create_session().query(Item)
def go():
- assert fixtures.item_keyword_result == q.all()
+ assert self.static.item_keyword_result == q.all()
self.assert_sql_count(testing.db, go, 1)
def go():
- assert fixtures.item_keyword_result[0:2] == q.join('keywords').filter(Keyword.name == 'red').all()
+ eq_(self.static.item_keyword_result[0:2],
+ q.join('keywords').filter(Keyword.name == 'red').all())
self.assert_sql_count(testing.db, go, 1)
def go():
- assert fixtures.item_keyword_result[0:2] == q.join('keywords', aliased=True).filter(Keyword.name == 'red').all()
+ eq_(self.static.item_keyword_result[0:2],
+ (q.join('keywords', aliased=True).
+ filter(Keyword.name == 'red')).all())
self.assert_sql_count(testing.db, go, 1)
-
+ @testing.resolve_artifact_names
def test_eager_option(self):
+ Keyword, Item = self.Keyword, self.Item
+ keywords, item_keywords, items = self.tables.get_all(
+ 'keywords', 'item_keywords', 'items')
+
mapper(Keyword, keywords)
mapper(Item, items, properties = dict(
- keywords = relation(Keyword, secondary=item_keywords, lazy=True, order_by=keywords.c.id),
- ))
+ keywords = relation(Keyword, secondary=item_keywords, lazy=True,
+ order_by=keywords.c.id)))
q = create_session().query(Item)
def go():
- assert fixtures.item_keyword_result[0:2] == q.options(eagerload('keywords')).join('keywords').filter(keywords.c.name == 'red').all()
+ eq_(self.static.item_keyword_result[0:2],
+ (q.options(eagerload('keywords')).
+ join('keywords').filter(keywords.c.name == 'red')).all())
self.assert_sql_count(testing.db, go, 1)
+ @testing.resolve_artifact_names
def test_cyclical(self):
- """test that a circular eager relationship breaks the cycle with a lazy loader"""
+ """A circular eager relationship breaks the cycle with a lazy loader"""
+ User, Address = self.User, self.Address
+ users, addresses = self.tables.get_all('users', 'addresses')
mapper(Address, addresses)
mapper(User, users, properties = dict(
- addresses = relation(Address, lazy=False, backref=backref('user', lazy=False))
+ addresses = relation(Address, lazy=False,
+ backref=sa.orm.backref('user', lazy=False))
))
- assert class_mapper(User).get_property('addresses').lazy is False
- assert class_mapper(Address).get_property('user').lazy is False
+ assert sa.orm.class_mapper(User).get_property('addresses').lazy is False
+ assert sa.orm.class_mapper(Address).get_property('user').lazy is False
sess = create_session()
- assert fixtures.user_address_result == sess.query(User).all()
+ assert self.static.user_address_result == sess.query(User).all()
+ @testing.resolve_artifact_names
def test_double(self):
- """tests eager loading with two relations simulatneously, from the same table, using aliases. """
- openorders = alias(orders, 'openorders')
- closedorders = alias(orders, 'closedorders')
+ """Eager loading with two relations simultaneously, from the same table, using aliases."""
+ User, Address, Order = self.classes.get_all(
+ 'User', 'Address', 'Order')
+ users, addresses, orders = self.tables.get_all(
+ 'users', 'addresses', 'orders')
+
+ openorders = sa.alias(orders, 'openorders')
+ closedorders = sa.alias(orders, 'closedorders')
mapper(Address, addresses)
mapper(User, users, properties = dict(
addresses = relation(Address, lazy=False),
- open_orders = relation(mapper(Order, openorders, entity_name='open'), primaryjoin = and_(openorders.c.isopen == 1, users.c.id==openorders.c.user_id), lazy=False),
- closed_orders = relation(mapper(Order, closedorders,entity_name='closed'), primaryjoin = and_(closedorders.c.isopen == 0, users.c.id==closedorders.c.user_id), lazy=False)
- ))
+ open_orders = relation(
+ mapper(Order, openorders, entity_name='open'),
+ primaryjoin=sa.and_(openorders.c.isopen == 1,
+ users.c.id==openorders.c.user_id),
+ lazy=False),
+ closed_orders = relation(
+ mapper(Order, closedorders,entity_name='closed'),
+ primaryjoin=sa.and_(closedorders.c.isopen == 0,
+ users.c.id==closedorders.c.user_id),
+ lazy=False)))
+
q = create_session().query(User)
def go():
@@ -277,48 +324,83 @@ class EagerTest(FixtureTest):
] == q.all()
self.assert_sql_count(testing.db, go, 1)
+ @testing.resolve_artifact_names
def test_double_same_mappers(self):
- """tests eager loading with two relations simulatneously, from the same table, using aliases. """
+ """Eager loading with two relations simulatneously, from the same table, using aliases."""
+ User, Address, Order = self.classes.get_all(
+ 'User', 'Address', 'Order')
+ users, addresses, orders = self.tables.get_all(
+ 'users', 'addresses', 'orders')
mapper(Address, addresses)
mapper(Order, orders, properties={
- 'items':relation(Item, secondary=order_items, lazy=False, order_by=items.c.id),
- })
+ 'items': relation(Item, secondary=order_items, lazy=False,
+ order_by=items.c.id)})
mapper(Item, items)
- mapper(User, users, properties = dict(
- addresses = relation(Address, lazy=False),
- open_orders = relation(Order, primaryjoin = and_(orders.c.isopen == 1, users.c.id==orders.c.user_id), lazy=False),
- closed_orders = relation(Order, primaryjoin = and_(orders.c.isopen == 0, users.c.id==orders.c.user_id), lazy=False)
- ))
+ mapper(User, users, properties=dict(
+ addresses=relation(Address, lazy=False),
+ open_orders=relation(
+ Order,
+ primaryjoin=sa.and_(orders.c.isopen == 1,
+ users.c.id==orders.c.user_id),
+ lazy=False),
+ closed_orders=relation(
+ Order,
+ primaryjoin=sa.and_(orders.c.isopen == 0,
+ users.c.id==orders.c.user_id),
+ lazy=False)))
q = create_session().query(User)
def go():
assert [
- User(
- id=7,
- addresses=[Address(id=1)],
- open_orders = [Order(id=3, items=[Item(id=3), Item(id=4), Item(id=5)])],
- closed_orders = [Order(id=1, items=[Item(id=1), Item(id=2), Item(id=3)]), Order(id=5, items=[Item(id=5)])]
- ),
- User(
- id=8,
- addresses=[Address(id=2), Address(id=3), Address(id=4)],
- open_orders = [],
- closed_orders = []
- ),
- User(
- id=9,
- addresses=[Address(id=5)],
- open_orders = [Order(id=4, items=[Item(id=1), Item(id=5)])],
- closed_orders = [Order(id=2, items=[Item(id=1), Item(id=2), Item(id=3)])]
- ),
+ User(id=7,
+ addresses=[
+ Address(id=1)],
+ open_orders=[Order(id=3,
+ items=[
+ Item(id=3),
+ Item(id=4),
+ Item(id=5)])],
+ closed_orders=[Order(id=1,
+ items=[
+ Item(id=1),
+ Item(id=2),
+ Item(id=3)]),
+ Order(id=5,
+ items=[
+ Item(id=5)])]),
+ User(id=8,
+ addresses=[
+ Address(id=2),
+ Address(id=3),
+ Address(id=4)],
+ open_orders = [],
+ closed_orders = []),
+ User(id=9,
+ addresses=[
+ Address(id=5)],
+ open_orders=[
+ Order(id=4,
+ items=[
+ Item(id=1),
+ Item(id=5)])],
+ closed_orders=[
+ Order(id=2,
+ items=[
+ Item(id=1),
+ Item(id=2),
+ Item(id=3)])]),
User(id=10)
-
] == q.all()
self.assert_sql_count(testing.db, go, 1)
+ @testing.resolve_artifact_names
def test_no_false_hits(self):
- """test that eager loaders don't interpret main table columns as part of their eager load."""
+ """Eager loaders don't interpret main table columns as part of their eager load."""
+ User, Address, Order = self.classes.get_all(
+ 'User', 'Address', 'Order')
+ users, addresses, orders = self.tables.get_all(
+ 'users', 'addresses', 'orders')
mapper(User, users, properties={
'addresses':relation(Address, lazy=False),
@@ -329,16 +411,22 @@ class EagerTest(FixtureTest):
allusers = create_session().query(User).all()
- # using a textual select, the columns will be 'id' and 'name'.
- # the eager loaders have aliases which should not hit on those columns, they should
- # be required to locate only their aliased/fully table qualified column name.
+ # using a textual select, the columns will be 'id' and 'name'. the
+ # eager loaders have aliases which should not hit on those columns,
+ # they should be required to locate only their aliased/fully table
+ # qualified column name.
noeagers = create_session().query(User).from_statement("select * from users").all()
assert 'orders' not in noeagers[0].__dict__
assert 'addresses' not in noeagers[0].__dict__
@testing.fails_on('maxdb')
+ @testing.resolve_artifact_names
def test_limit(self):
- """test limit operations combined with lazy-load relationships."""
+ """Limit operations combined with lazy-load relationships."""
+ User, Item, Address, Order = self.classes.get_all(
+ 'User', 'Item', 'Address', 'Order')
+ users, items, order_items, orders, addresses = self.tables.get_all(
+ 'users', 'items', 'order_items', 'orders', 'addresses')
mapper(Item, items)
mapper(Order, orders, properties={
@@ -354,19 +442,20 @@ class EagerTest(FixtureTest):
if testing.against('mysql'):
l = q.limit(2).all()
- assert fixtures.user_all_result[:2] == l
+ assert self.static.user_all_result[:2] == l
else:
l = q.limit(2).offset(1).order_by(User.id).all()
- print fixtures.user_all_result[1:3]
+ print self.static.user_all_result[1:3]
print l
- assert fixtures.user_all_result[1:3] == l
+ assert self.static.user_all_result[1:3] == l
+ @testing.resolve_artifact_names
def test_distinct(self):
# this is an involved 3x union of the users table to get a lot of rows.
# then see if the "distinct" works its way out. you actually get the same
# result with or without the distinct, just via less or more rows.
u2 = users.alias('u2')
- s = union_all(u2.select(use_labels=True), u2.select(use_labels=True), u2.select(use_labels=True)).alias('u')
+ s = sa.union_all(u2.select(use_labels=True), u2.select(use_labels=True), u2.select(use_labels=True)).alias('u')
mapper(User, users, properties={
'addresses':relation(mapper(Address, addresses), lazy=False),
@@ -377,10 +466,11 @@ class EagerTest(FixtureTest):
def go():
l = q.filter(s.c.u2_id==User.id).distinct().all()
- assert fixtures.user_address_result == l
+ assert self.static.user_address_result == l
self.assert_sql_count(testing.db, go, 1)
@testing.fails_on('maxdb')
+ @testing.resolve_artifact_names
def test_limit_2(self):
mapper(Keyword, keywords)
mapper(Item, items, properties = dict(
@@ -392,9 +482,10 @@ class EagerTest(FixtureTest):
l = q.filter((Item.description=='item 2') | (Item.description=='item 5') | (Item.description=='item 3')).\
order_by(Item.id).limit(2).all()
- assert fixtures.item_keyword_result[1:3] == l
+ assert self.static.item_keyword_result[1:3] == l
@testing.fails_on('maxdb')
+ @testing.resolve_artifact_names
def test_limit_3(self):
"""test that the ORDER BY is propigated from the inner select to the outer select, when using the
'wrapped' select statement resulting from the combination of eager loading and limit/offset clauses."""
@@ -434,16 +525,17 @@ class EagerTest(FixtureTest):
)
] == l.all()
+ @testing.resolve_artifact_names
def test_limit_4(self):
# tests the LIMIT/OFFSET aliasing on a mapper against a select. original issue from ticket #904
- sel = select([users, addresses.c.email_address], users.c.id==addresses.c.user_id).alias('useralias')
+ sel = sa.select([users, addresses.c.email_address], users.c.id==addresses.c.user_id).alias('useralias')
mapper(User, sel, properties={
'orders':relation(Order, primaryjoin=sel.c.id==orders.c.user_id, lazy=False)
})
mapper(Order, orders)
sess = create_session()
- self.assertEquals(sess.query(User).first(),
+ eq_(sess.query(User).first(),
User(name=u'jack',orders=[
Order(address_id=1,description=u'order 1',isopen=0,user_id=7,id=1),
Order(address_id=1,description=u'order 3',isopen=1,user_id=7,id=3),
@@ -451,6 +543,7 @@ class EagerTest(FixtureTest):
email_address=u'jack@bean.com',id=7)
)
+ @testing.resolve_artifact_names
def test_one_to_many_scalar(self):
mapper(User, users, properties = dict(
address = relation(mapper(Address, addresses), lazy=False, uselist=False)
@@ -463,6 +556,7 @@ class EagerTest(FixtureTest):
self.assert_sql_count(testing.db, go, 1)
@testing.fails_on('maxdb')
+ @testing.resolve_artifact_names
def test_many_to_one(self):
mapper(Address, addresses, properties = dict(
user = relation(mapper(User, users), lazy=False)
@@ -477,7 +571,7 @@ class EagerTest(FixtureTest):
assert a.user is u1
self.assert_sql_count(testing.db, go, 1)
-
+ @testing.resolve_artifact_names
def test_one_and_many(self):
"""tests eager load for a parent object with a child object that
contains a many-to-many relationship to a third object."""
@@ -495,12 +589,12 @@ class EagerTest(FixtureTest):
l = q.filter("users.id in (7, 8, 9)")
def go():
- assert fixtures.user_order_result[0:3] == l.all()
+ assert self.static.user_order_result[0:3] == l.all()
self.assert_sql_count(testing.db, go, 1)
+ @testing.resolve_artifact_names
def test_double_with_aggregate(self):
-
- max_orders_by_user = select([func.max(orders.c.id).label('order_id')], group_by=[orders.c.user_id]).alias('max_orders_by_user')
+ max_orders_by_user = sa.select([sa.func.max(orders.c.id).label('order_id')], group_by=[orders.c.user_id]).alias('max_orders_by_user')
max_orders = orders.select(orders.c.id==max_orders_by_user.c.order_id).alias('max_orders')
@@ -528,6 +622,7 @@ class EagerTest(FixtureTest):
] == q.all()
self.assert_sql_count(testing.db, go, 1)
+ @testing.resolve_artifact_names
def test_wide(self):
mapper(Order, orders, properties={'items':relation(Item, secondary=order_items, lazy=False, order_by=items.c.id)})
mapper(Item, items)
@@ -537,12 +632,13 @@ class EagerTest(FixtureTest):
))
q = create_session().query(User)
l = q.all()
- assert fixtures.user_all_result == q.all()
+ assert self.static.user_all_result == q.all()
+ @testing.resolve_artifact_names
def test_against_select(self):
"""test eager loading of a mapper which is against a select"""
- s = select([orders], orders.c.isopen==1).alias('openorders')
+ s = sa.select([orders], orders.c.isopen==1).alias('openorders')
mapper(Order, s, properties={
'user':relation(User, lazy=False)
@@ -561,6 +657,7 @@ class EagerTest(FixtureTest):
Order(id=3, user=User(id=7)),
] == q.all()
+ @testing.resolve_artifact_names
def test_aliasing(self):
"""test that eager loading uses aliases to insulate the eager load from regular criterion against those tables."""
@@ -569,12 +666,13 @@ class EagerTest(FixtureTest):
))
q = create_session().query(User)
l = q.filter(addresses.c.email_address == 'ed@lala.com').filter(Address.user_id==User.id)
- assert fixtures.user_address_result[1:2] == l.all()
+ assert self.static.user_address_result[1:2] == l.all()
-class AddEntityTest(FixtureTest):
- keep_mappers = False
- keep_data = True
+class AddEntityTest(_fixtures.FixtureTest):
+ run_inserts = 'once'
+ run_deletes = None
+ @testing.resolve_artifact_names
def _assert_result(self):
return [
(
@@ -619,6 +717,7 @@ class AddEntityTest(FixtureTest):
)
]
+ @testing.resolve_artifact_names
def test_mapper_configured(self):
mapper(User, users, properties={
'addresses':relation(Address, lazy=False),
@@ -632,12 +731,13 @@ class AddEntityTest(FixtureTest):
sess = create_session()
- oalias = aliased(Order)
+ oalias = sa.orm.aliased(Order)
def go():
ret = sess.query(User, oalias).join(('orders', oalias)).order_by(User.id, oalias.id).all()
- self.assertEquals(ret, self._assert_result())
+ eq_(ret, self._assert_result())
self.assert_sql_count(testing.db, go, 1)
+ @testing.resolve_artifact_names
def test_options(self):
mapper(User, users, properties={
'addresses':relation(Address),
@@ -651,82 +751,78 @@ class AddEntityTest(FixtureTest):
sess = create_session()
- oalias = aliased(Order)
+ oalias = sa.orm.aliased(Order)
def go():
ret = sess.query(User, oalias).options(eagerload('addresses')).join(('orders', oalias)).order_by(User.id, oalias.id).all()
- self.assertEquals(ret, self._assert_result())
+ eq_(ret, self._assert_result())
self.assert_sql_count(testing.db, go, 6)
sess.clear()
def go():
ret = sess.query(User, oalias).options(eagerload('addresses'), eagerload(oalias.items)).join(('orders', oalias)).order_by(User.id, oalias.id).all()
- self.assertEquals(ret, self._assert_result())
+ eq_(ret, self._assert_result())
self.assert_sql_count(testing.db, go, 1)
-class OrderBySecondaryTest(ORMTest):
+class OrderBySecondaryTest(_base.MappedTest):
def define_tables(self, metadata):
- global a, b, m2m
- m2m = Table('mtom', metadata,
- Column('id', Integer, primary_key=True),
- Column('aid', Integer, ForeignKey('a.id')),
- Column('bid', Integer, ForeignKey('b.id')),
- )
-
- a = Table('a', metadata,
- Column('id', Integer, primary_key=True),
- Column('data', String(50)),
- )
- b = Table('b', metadata,
- Column('id', Integer, primary_key=True),
- Column('data', String(50)),
- )
-
- def insert_data(self):
- a.insert().execute([
- {'id':1, 'data':'a1'},
- {'id':2, 'data':'a2'}
- ])
-
- b.insert().execute([
- {'id':1, 'data':'b1'},
- {'id':2, 'data':'b2'},
- {'id':3, 'data':'b3'},
- {'id':4, 'data':'b4'},
- ])
-
- m2m.insert().execute([
- {'id':2, 'aid':1, 'bid':1},
- {'id':4, 'aid':2, 'bid':4},
- {'id':1, 'aid':1, 'bid':3},
- {'id':6, 'aid':2, 'bid':2},
- {'id':3, 'aid':1, 'bid':2},
- {'id':5, 'aid':2, 'bid':3},
- ])
-
+ Table('m2m', metadata,
+ Column('id', Integer, primary_key=True),
+ Column('aid', Integer, ForeignKey('a.id')),
+ Column('bid', Integer, ForeignKey('b.id')))
+
+ Table('a', metadata,
+ Column('id', Integer, primary_key=True),
+ Column('data', String(50)))
+ Table('b', metadata,
+ Column('id', Integer, primary_key=True),
+ Column('data', String(50)))
+
+ def fixtures(self):
+ return dict(
+ a=(('id', 'data'),
+ (1, 'a1'),
+ (2, 'a2')),
+
+ b=(('id', 'data'),
+ (1, 'b1'),
+ (2, 'b2'),
+ (3, 'b3'),
+ (4, 'b4')),
+
+ m2m=(('id', 'aid', 'bid'),
+ (2, 1, 1),
+ (4, 2, 4),
+ (1, 1, 3),
+ (6, 2, 2),
+ (3, 1, 2),
+ (5, 2, 3)))
+
+ @testing.resolve_artifact_names
def test_ordering(self):
- class A(Base):pass
- class B(Base):pass
-
+ class A(_base.ComparableEntity):pass
+ class B(_base.ComparableEntity):pass
+
mapper(A, a, properties={
'bs':relation(B, secondary=m2m, lazy=False, order_by=m2m.c.id)
})
mapper(B, b)
-
+
sess = create_session()
- self.assertEquals(sess.query(A).all(), [A(data='a1', bs=[B(data='b3'), B(data='b1'), B(data='b2')]), A(bs=[B(data='b4'), B(data='b3'), B(data='b2')])])
-
-
-class SelfReferentialEagerTest(ORMTest):
+ eq_(sess.query(A).all(), [A(data='a1', bs=[B(data='b3'), B(data='b1'), B(data='b2')]), A(bs=[B(data='b4'), B(data='b3'), B(data='b2')])])
+
+
+class SelfReferentialEagerTest(_base.MappedTest):
def define_tables(self, metadata):
- global nodes
- nodes = Table('nodes', metadata,
- Column('id', Integer, Sequence('node_id_seq', optional=True), primary_key=True),
+ Table('nodes', metadata,
+ Column('id', Integer, sa.Sequence('node_id_seq', optional=True),
+ primary_key=True),
Column('parent_id', Integer, ForeignKey('nodes.id')),
Column('data', String(30)))
@testing.fails_on('maxdb')
+ @testing.resolve_artifact_names
def test_basic(self):
- class Node(Base):
+ class Node(_base.ComparableEntity):
def append(self, node):
self.children.append(node)
@@ -758,8 +854,9 @@ class SelfReferentialEagerTest(ORMTest):
self.assert_sql_count(testing.db, go, 1)
+ @testing.resolve_artifact_names
def test_lazy_fallback_doesnt_affect_eager(self):
- class Node(Base):
+ class Node(_base.ComparableEntity):
def append(self, node):
self.children.append(node)
@@ -778,14 +875,14 @@ class SelfReferentialEagerTest(ORMTest):
sess.flush()
sess.clear()
- # eager load with join depth 1. when eager load of 'n1'
- # hits the children of 'n12', no columns are present, eager loader
- # degrades to lazy loader; fine. but then, 'n12' is *also* in the
- # first level of columns since we're loading the whole table.
- # when those rows arrive, now we *can* eager load its children and an
- # eager collection should be initialized. essentially the 'n12' instance
- # is present in not just two different rows but two distinct sets of columns
- # in this result set.
+ # eager load with join depth 1. when eager load of 'n1' hits the
+ # children of 'n12', no columns are present, eager loader degrades to
+ # lazy loader; fine. but then, 'n12' is *also* in the first level of
+ # columns since we're loading the whole table. when those rows
+ # arrive, now we *can* eager load its children and an eager collection
+ # should be initialized. essentially the 'n12' instance is present in
+ # not just two different rows but two distinct sets of columns in this
+ # result set.
def go():
allnodes = sess.query(Node).order_by(Node.data).all()
n12 = allnodes[2]
@@ -799,8 +896,9 @@ class SelfReferentialEagerTest(ORMTest):
] == list(n12.children)
self.assert_sql_count(testing.db, go, 1)
+ @testing.resolve_artifact_names
def test_with_deferred(self):
- class Node(Base):
+ class Node(_base.ComparableEntity):
def append(self, node):
self.children.append(node)
@@ -833,9 +931,9 @@ class SelfReferentialEagerTest(ORMTest):
self.assert_sql_count(testing.db, go, 1)
-
+ @testing.resolve_artifact_names
def test_options(self):
- class Node(Base):
+ class Node(_base.ComparableEntity):
def append(self, node):
self.children.append(node)
@@ -881,8 +979,9 @@ class SelfReferentialEagerTest(ORMTest):
])
@testing.fails_on('maxdb')
+ @testing.resolve_artifact_names
def test_no_depth(self):
- class Node(Base):
+ class Node(_base.ComparableEntity):
def append(self, node):
self.children.append(node)
@@ -913,22 +1012,22 @@ class SelfReferentialEagerTest(ORMTest):
]) == d
self.assert_sql_count(testing.db, go, 3)
-class SelfReferentialM2MEagerTest(ORMTest):
+class SelfReferentialM2MEagerTest(_base.MappedTest):
def define_tables(self, metadata):
- global widget, widget_rel
-
- widget = Table('widget', metadata,
+ Table('widget', metadata,
Column('id', Integer, primary_key=True),
- Column('name', Unicode(40), nullable=False, unique=True),
+ Column('name', sa.Unicode(40), nullable=False, unique=True),
)
- widget_rel = Table('widget_rel', metadata,
+ Table('widget_rel', metadata,
Column('parent_id', Integer, ForeignKey('widget.id')),
Column('child_id', Integer, ForeignKey('widget.id')),
- UniqueConstraint('parent_id', 'child_id'),
+ sa.UniqueConstraint('parent_id', 'child_id'),
)
+
+ @testing.resolve_artifact_names
def test_basic(self):
- class Widget(Base):
+ class Widget(_base.ComparableEntity):
pass
mapper(Widget, widget, properties={
@@ -949,10 +1048,12 @@ class SelfReferentialM2MEagerTest(ORMTest):
assert [Widget(name='w1', children=[Widget(name='w2')])] == sess.query(Widget).filter(Widget.name==u'w1').all()
-class MixedEntitiesTest(FixtureTest, AssertsCompiledSQL):
- keep_mappers = True
- keep_data = True
-
+class MixedEntitiesTest(_fixtures.FixtureTest, testing.AssertsCompiledSQL):
+ run_setup_mappers = 'once'
+ run_inserts = 'once'
+ run_deletes = None
+
+ @testing.resolve_artifact_names
def setup_mappers(self):
mapper(User, users, properties={
'addresses':relation(Address, backref='user'),
@@ -966,13 +1067,14 @@ class MixedEntitiesTest(FixtureTest, AssertsCompiledSQL):
'keywords':relation(Keyword, secondary=item_keywords) #m2m
})
mapper(Keyword, keywords)
-
+
+ @testing.resolve_artifact_names
def test_two_entities(self):
sess = create_session()
# two FROM clauses
def go():
- self.assertEquals(
+ eq_(
[
(User(id=9, addresses=[Address(id=5)]), Order(id=2, items=[Item(id=1), Item(id=2), Item(id=3)])),
(User(id=9, addresses=[Address(id=5)]), Order(id=4, items=[Item(id=1), Item(id=5)])),
@@ -985,7 +1087,7 @@ class MixedEntitiesTest(FixtureTest, AssertsCompiledSQL):
# one FROM clause
def go():
- self.assertEquals(
+ eq_(
[
(User(id=9, addresses=[Address(id=5)]), Order(id=2, items=[Item(id=1), Item(id=2), Item(id=3)])),
(User(id=9, addresses=[Address(id=5)]), Order(id=4, items=[Item(id=1), Item(id=5)])),
@@ -994,15 +1096,16 @@ class MixedEntitiesTest(FixtureTest, AssertsCompiledSQL):
order_by(User.id, Order.id).all(),
)
self.assert_sql_count(testing.db, go, 1)
-
+
+ @testing.resolve_artifact_names
def test_aliased_entity(self):
sess = create_session()
-
- oalias = aliased(Order)
-
+
+ oalias = sa.orm.aliased(Order)
+
# two FROM clauses
def go():
- self.assertEquals(
+ eq_(
[
(User(id=9, addresses=[Address(id=5)]), Order(id=2, items=[Item(id=1), Item(id=2), Item(id=3)])),
(User(id=9, addresses=[Address(id=5)]), Order(id=4, items=[Item(id=1), Item(id=5)])),
@@ -1015,7 +1118,7 @@ class MixedEntitiesTest(FixtureTest, AssertsCompiledSQL):
# one FROM clause
def go():
- self.assertEquals(
+ eq_(
[
(User(id=9, addresses=[Address(id=5)]), Order(id=2, items=[Item(id=1), Item(id=2), Item(id=3)])),
(User(id=9, addresses=[Address(id=5)]), Order(id=4, items=[Item(id=1), Item(id=5)])),
@@ -1024,12 +1127,13 @@ class MixedEntitiesTest(FixtureTest, AssertsCompiledSQL):
order_by(User.id, oalias.id).all(),
)
self.assert_sql_count(testing.db, go, 1)
-
+
from sqlalchemy.engine.default import DefaultDialect
-
- # improper setup: oalias in the columns clause but join to usual orders alias.
- # this should create two FROM clauses even though the query has a from_clause set up via the join
- self.assert_compile(sess.query(User, oalias).join(User.orders).options(eagerload(oalias.items)).with_labels().statement,
+
+ # improper setup: oalias in the columns clause but join to usual
+ # orders alias. this should create two FROM clauses even though the
+ # query has a from_clause set up via the join
+ self.assert_compile(sess.query(User, oalias).join(User.orders).options(eagerload(oalias.items)).with_labels().statement,
"SELECT users.id AS users_id, users.name AS users_name, orders_1.id AS orders_1_id, "\
"orders_1.user_id AS orders_1_user_id, orders_1.address_id AS orders_1_address_id, "\
"orders_1.description AS orders_1_description, orders_1.isopen AS orders_1_isopen, items_1.id AS items_1_id, "\
@@ -1038,23 +1142,23 @@ class MixedEntitiesTest(FixtureTest, AssertsCompiledSQL):
"LEFT OUTER JOIN items AS items_1 ON items_1.id = order_items_1.item_id ORDER BY users.id, items_1.id",
dialect=DefaultDialect()
)
-
-class CyclicalInheritingEagerTest(ORMTest):
+
+class CyclicalInheritingEagerTest(_base.MappedTest):
def define_tables(self, metadata):
- global t1, t2
- t1 = Table('t1', metadata,
+ Table('t1', metadata,
Column('c1', Integer, primary_key=True),
Column('c2', String(30)),
Column('type', String(30))
)
- t2 = Table('t2', metadata,
+ Table('t2', metadata,
Column('c1', Integer, primary_key=True),
Column('c2', String(30)),
Column('type', String(30)),
Column('t1.id', Integer, ForeignKey('t1.c1')))
+ @testing.resolve_artifact_names
def test_basic(self):
class T(object):
pass
@@ -1070,7 +1174,7 @@ class CyclicalInheritingEagerTest(ORMTest):
mapper(T, t1, polymorphic_on=t1.c.type, polymorphic_identity='t1')
mapper(SubT, None, inherits=T, polymorphic_identity='subt1', properties={
- 't2s':relation(SubT2, lazy=False, backref=backref('subt', lazy=False))
+ 't2s':relation(SubT2, lazy=False, backref=sa.orm.backref('subt', lazy=False))
})
mapper(T2, t2, polymorphic_on=t2.c.type, polymorphic_identity='t2')
mapper(SubT2, None, inherits=T2, polymorphic_identity='subt2')
@@ -1078,65 +1182,66 @@ class CyclicalInheritingEagerTest(ORMTest):
# testing a particular endless loop condition in eager join setup
create_session().query(SubT).all()
-class SubqueryTest(ORMTest):
+class SubqueryTest(_base.MappedTest):
def define_tables(self, metadata):
- global users_table, tags_table
-
- users_table = Table('users', metadata,
+ Table('users_table', metadata,
Column('id', Integer, primary_key=True),
Column('name', String(16))
)
- tags_table = Table('tags', metadata,
+ Table('tags_table', metadata,
Column('id', Integer, primary_key=True),
- Column('user_id', Integer, ForeignKey("users.id")),
- Column('score1', Float),
- Column('score2', Float),
+ Column('user_id', Integer, ForeignKey("users_table.id")),
+ Column('score1', sa.Float),
+ Column('score2', sa.Float),
)
+ @testing.resolve_artifact_names
def test_label_anonymizing(self):
- """test that eager loading works with subqueries with labels,
- even if an explicit labelname which conflicts with a label on the parent.
-
- There's not much reason a column_property() would ever need to have a label
- of a specific name (and they don't even need labels these days),
- unless you'd like the name to line up with a name
- that you may be using for a straight textual statement used for loading
- instances of that type.
-
+ """Eager loading works with subqueries with labels,
+
+ Even if an explicit labelname which conflicts with a label on the
+ parent.
+
+ There's not much reason a column_property() would ever need to have a
+ label of a specific name (and they don't even need labels these days),
+ unless you'd like the name to line up with a name that you may be
+ using for a straight textual statement used for loading instances of
+ that type.
+
"""
- class User(Base):
+ class User(_base.ComparableEntity):
@property
def prop_score(self):
return sum([tag.prop_score for tag in self.tags])
- class Tag(Base):
+ class Tag(_base.ComparableEntity):
@property
def prop_score(self):
return self.score1 * self.score2
-
+
for labeled, labelname in [(True, 'score'), (True, None), (False, None)]:
- clear_mappers()
-
+ sa.orm.clear_mappers()
+
tag_score = (tags_table.c.score1 * tags_table.c.score2)
- user_score = select([func.sum(tags_table.c.score1 *
- tags_table.c.score2)],
- tags_table.c.user_id == users_table.c.id)
-
+ user_score = sa.select([sa.func.sum(tags_table.c.score1 *
+ tags_table.c.score2)],
+ tags_table.c.user_id == users_table.c.id)
+
if labeled:
tag_score = tag_score.label(labelname)
user_score = user_score.label(labelname)
else:
user_score = user_score.as_scalar()
-
+
mapper(Tag, tags_table, properties={
- 'query_score': column_property(tag_score),
+ 'query_score': sa.orm.column_property(tag_score),
})
mapper(User, users_table, properties={
- 'tags': relation(Tag, backref='user', lazy=False),
- 'query_score': column_property(user_score),
+ 'tags': relation(Tag, backref='user', lazy=False),
+ 'query_score': sa.orm.column_property(user_score),
})
session = create_session()
@@ -1144,17 +1249,18 @@ class SubqueryTest(ORMTest):
session.save(User(name='bar', tags=[Tag(score1=5.0, score2=4.0), Tag(score1=50.0, score2=1.0), Tag(score1=15.0, score2=2.0)]))
session.flush()
session.clear()
-
+
for user in session.query(User).all():
- self.assertEquals(user.query_score, user.prop_score)
+ eq_(user.query_score, user.prop_score)
def go():
u = session.query(User).filter_by(name='joe').one()
- self.assertEquals(u.query_score, u.prop_score)
+ eq_(u.query_score, u.prop_score)
self.assert_sql_count(testing.db, go, 1)
-
+
for t in (tags_table, users_table):
t.delete().execute()
-
+
+
if __name__ == '__main__':
testenv.main()
diff --git a/test/orm/entity.py b/test/orm/entity.py
index d9c9e4002..e62643138 100644
--- a/test/orm/entity.py
+++ b/test/orm/entity.py
@@ -4,8 +4,10 @@ from sqlalchemy.orm import *
from testlib import *
from testlib.tables import *
from testlib import fixtures
+from orm import _base
-class EntityTest(TestBase, AssertsExecutionResults):
+
+class EntityTest(_base.ORMTest):
"""tests mappers that are constructed based on "entity names", which allows the same class
to have multiple primary mappers """
@@ -224,7 +226,7 @@ class EntityTest(TestBase, AssertsExecutionResults):
assert u1list[0].name == 'this is user 1'
assert u2list[0].name == 'this is user 2'
-class SelfReferentialTest(ORMTest):
+class SelfReferentialTest(_base.MappedTest):
def define_tables(self, metadata):
global nodes
diff --git a/test/orm/expire.py b/test/orm/expire.py
index e99607866..35364b429 100644
--- a/test/orm/expire.py
+++ b/test/orm/expire.py
@@ -1,18 +1,15 @@
"""test attribute/instance expiration, deferral of attributes, etc."""
import testenv; testenv.configure_for_tests()
-from sqlalchemy import *
-from sqlalchemy import exc as sa_exc
-from sqlalchemy.orm import *
-from sqlalchemy.orm import attributes, exc as orm_exc
-from testlib import *
-from testlib.fixtures import *
import gc
+from testlib import sa, testing
+from testlib.sa import Table, Column, Integer, String, ForeignKey
+from testlib.sa.orm import mapper, relation, create_session, attributes
+from orm import _base, _fixtures
-class ExpireTest(FixtureTest):
- keep_mappers = False
- refresh_data = True
+class ExpireTest(_fixtures.FixtureTest):
+ @testing.resolve_artifact_names
def test_expire(self):
mapper(User, users, properties={
'addresses':relation(Address, backref='user'),
@@ -51,14 +48,16 @@ class ExpireTest(FixtureTest):
assert u.name == 'jack'
self.assert_sql_count(testing.db, go, 0)
+ @testing.resolve_artifact_names
def test_persistence_check(self):
mapper(User, users)
s = create_session()
u = s.get(User, 7)
s.clear()
- self.assertRaisesMessage(sa_exc.InvalidRequestError, r"is not persistent within this Session", s.expire, u)
-
+ self.assertRaisesMessage(sa.exc.InvalidRequestError, r"is not persistent within this Session", s.expire, u)
+
+ @testing.resolve_artifact_names
def test_get_refreshes(self):
mapper(User, users)
s = create_session()
@@ -77,29 +76,31 @@ class ExpireTest(FixtureTest):
s.expire_all()
users.delete().where(User.id==10).execute()
-
+
# object is gone, get() returns None
assert u in s
assert s.get(User, 10) is None
assert u not in s # and expunges
-
+
# add it back
s.add(u)
# nope, raises ObjectDeletedError
- self.assertRaises(orm_exc.ObjectDeletedError, getattr, u, 'name')
-
+ self.assertRaises(sa.orm.exc.ObjectDeletedError, getattr, u, 'name')
+
+ @testing.resolve_artifact_names
def test_refresh_cancels_expire(self):
mapper(User, users)
s = create_session()
u = s.get(User, 7)
s.expire(u)
s.refresh(u)
-
+
def go():
u = s.get(User, 7)
self.assertEquals(u.name, 'jack')
self.assert_sql_count(testing.db, go, 0)
-
+
+ @testing.resolve_artifact_names
def test_expire_doesntload_on_set(self):
mapper(User, users)
@@ -114,6 +115,7 @@ class ExpireTest(FixtureTest):
sess.clear()
assert sess.query(User).get(7).name == 'somenewname'
+ @testing.resolve_artifact_names
def test_no_session(self):
mapper(User, users)
sess = create_session()
@@ -121,8 +123,9 @@ class ExpireTest(FixtureTest):
sess.expire(u, attribute_names=['name'])
sess.expunge(u)
- self.assertRaises(sa_exc.UnboundExecutionError, getattr, u, 'name')
-
+ self.assertRaises(sa.exc.UnboundExecutionError, getattr, u, 'name')
+
+ @testing.resolve_artifact_names
def test_pending_raises(self):
# this was the opposite in 0.4, but the reasoning there seemed off.
# expiring a pending instance makes no sense, so should raise
@@ -130,12 +133,13 @@ class ExpireTest(FixtureTest):
sess = create_session()
u = User(id=15)
sess.save(u)
- self.assertRaises(sa_exc.InvalidRequestError, sess.expire, u, ['name'])
-
+ self.assertRaises(sa.exc.InvalidRequestError, sess.expire, u, ['name'])
+
+ @testing.resolve_artifact_names
def test_no_instance_key(self):
- # this tests an artificial condition such that
+ # this tests an artificial condition such that
# an instance is pending, but has expired attributes. this
- # is actually part of a larger behavior when postfetch needs to
+ # is actually part of a larger behavior when postfetch needs to
# occur during a flush() on an instance that was just inserted
mapper(User, users)
sess = create_session()
@@ -148,6 +152,7 @@ class ExpireTest(FixtureTest):
sess.save(u)
assert u.name == 'jack'
+ @testing.resolve_artifact_names
def test_expire_preserves_changes(self):
"""test that the expire load operation doesn't revert post-expire changes"""
@@ -191,8 +196,8 @@ class ExpireTest(FixtureTest):
sess.query(Order).all()
assert o.isopen == 1
assert o.description == 'another new description'
-
-
+
+ @testing.resolve_artifact_names
def test_expire_committed(self):
"""test that the committed state of the attribute receives the most recent DB data"""
mapper(Order, orders)
@@ -208,6 +213,7 @@ class ExpireTest(FixtureTest):
sess.flush()
self.assert_sql_count(testing.db, go, 0)
+ @testing.resolve_artifact_names
def test_expire_cascade(self):
mapper(User, users, properties={
'addresses':relation(Address, cascade="all, refresh-expire")
@@ -223,6 +229,7 @@ class ExpireTest(FixtureTest):
print attributes.instance_state(u).dict
assert u.addresses[0].email_address == 'ed@wood.com'
+ @testing.resolve_artifact_names
def test_expired_lazy(self):
mapper(User, users, properties={
'addresses':relation(Address, backref='user'),
@@ -244,6 +251,7 @@ class ExpireTest(FixtureTest):
assert 'name' in u.__dict__
assert 'addresses' in u.__dict__
+ @testing.resolve_artifact_names
def test_expired_eager(self):
mapper(User, users, properties={
'addresses':relation(Address, backref='user', lazy=False),
@@ -269,7 +277,7 @@ class ExpireTest(FixtureTest):
sess.expire(u, ['name', 'addresses'])
assert 'name' not in u.__dict__
assert 'addresses' not in u.__dict__
-
+
def go():
sess.query(User).filter_by(id=7).one()
assert u.addresses[0].email_address == 'jack@bean.com'
@@ -277,7 +285,8 @@ class ExpireTest(FixtureTest):
# one load, since relation() + scalar are
# together when eager load used with Query
self.assert_sql_count(testing.db, go, 1)
-
+
+ @testing.resolve_artifact_names
def test_relation_changes_preserved(self):
mapper(User, users, properties={
'addresses':relation(Address, backref='user', lazy=False),
@@ -293,15 +302,18 @@ class ExpireTest(FixtureTest):
assert 'name' in u.__dict__
assert len(u.addresses) == 2
+ @testing.resolve_artifact_names
def test_eagerload_props_dontload(self):
- # relations currently have to load separately from scalar instances. the use case is:
- # expire "addresses". then access it. lazy load fires off to load "addresses", but needs
- # foreign key or primary key attributes in order to lazy load; hits those attributes,
- # such as below it hits "u.id". "u.id" triggers full unexpire operation, eagerloads
- # addresses since lazy=False. this is all wihtin lazy load which fires unconditionally;
- # so an unnecessary eagerload (or lazyload) was issued. would prefer not to complicate
- # lazyloading to "figure out" that the operation should be aborted right now.
-
+ # relations currently have to load separately from scalar instances.
+ # the use case is: expire "addresses". then access it. lazy load
+ # fires off to load "addresses", but needs foreign key or primary key
+ # attributes in order to lazy load; hits those attributes, such as
+ # below it hits "u.id". "u.id" triggers full unexpire operation,
+ # eagerloads addresses since lazy=False. this is all wihtin lazy load
+ # which fires unconditionally; so an unnecessary eagerload (or
+ # lazyload) was issued. would prefer not to complicate lazyloading to
+ # "figure out" that the operation should be aborted right now.
+
mapper(User, users, properties={
'addresses':relation(Address, backref='user', lazy=False),
})
@@ -313,31 +325,33 @@ class ExpireTest(FixtureTest):
assert 'addresses' not in u.__dict__
u.addresses
assert 'addresses' in u.__dict__
-
+
+ @testing.resolve_artifact_names
def test_expire_synonym(self):
mapper(User, users, properties={
- 'uname':synonym('name')
+ 'uname': sa.orm.synonym('name')
})
-
+
sess = create_session()
u = sess.query(User).get(7)
assert 'name' in u.__dict__
assert u.uname == u.name
-
+
sess.expire(u)
assert 'name' not in u.__dict__
-
+
users.update(users.c.id==7).execute(name='jack2')
assert u.name == 'jack2'
assert u.uname == 'jack2'
assert 'name' in u.__dict__
-
- # this wont work unless we add API hooks through the attr. system
- # to provide "expire" behavior on a synonym
- #sess.expire(u, ['uname'])
- #users.update(users.c.id==7).execute(name='jack3')
- #assert u.uname == 'jack3'
-
+
+ # this wont work unless we add API hooks through the attr. system to
+ # provide "expire" behavior on a synonym
+ # sess.expire(u, ['uname'])
+ # users.update(users.c.id==7).execute(name='jack3')
+ # assert u.uname == 'jack3'
+
+ @testing.resolve_artifact_names
def test_partial_expire(self):
mapper(Order, orders)
@@ -382,6 +396,7 @@ class ExpireTest(FixtureTest):
assert o.isopen == 5
self.assert_sql_count(testing.db, go, 1)
+ @testing.resolve_artifact_names
def test_partial_expire_lazy(self):
mapper(User, users, properties={
'addresses':relation(Address, backref='user'),
@@ -426,6 +441,7 @@ class ExpireTest(FixtureTest):
assert u.name == 'ed'
self.assert_sql_count(testing.db, go, 1)
+ @testing.resolve_artifact_names
def test_partial_expire_eager(self):
mapper(User, users, properties={
'addresses':relation(Address, backref='user', lazy=False),
@@ -466,6 +482,7 @@ class ExpireTest(FixtureTest):
# doing it that way right now
#self.assert_sql_count(testing.db, go, 0)
+ @testing.resolve_artifact_names
def test_relations_load_on_query(self):
mapper(User, users, properties={
'addresses':relation(Address, backref='user'),
@@ -481,13 +498,15 @@ class ExpireTest(FixtureTest):
sess.expire(u, ['name', 'addresses'])
assert 'name' not in u.__dict__
assert 'addresses' not in u.__dict__
- sess.query(User).options(eagerload('addresses')).filter_by(id=8).all()
+ (sess.query(User).options(sa.orm.eagerload('addresses')).
+ filter_by(id=8).all())
assert 'name' in u.__dict__
assert 'addresses' in u.__dict__
-
+
+ @testing.resolve_artifact_names
def test_partial_expire_deferred(self):
mapper(Order, orders, properties={
- 'description':deferred(orders.c.description)
+ 'description': sa.orm.deferred(orders.c.description)
})
sess = create_session()
@@ -513,13 +532,13 @@ class ExpireTest(FixtureTest):
assert o.isopen == 1
self.assert_sql_count(testing.db, go, 1)
- clear_mappers()
+ sa.orm.clear_mappers()
mapper(Order, orders)
sess.clear()
# same tests, using deferred at the options level
- o = sess.query(Order).options(defer('description')).get(3)
+ o = sess.query(Order).options(sa.orm.defer('description')).get(3)
assert 'description' not in o.__dict__
@@ -550,7 +569,8 @@ class ExpireTest(FixtureTest):
assert o.description == 'order 3'
assert o.isopen == 1
self.assert_sql_count(testing.db, go, 1)
-
+
+ @testing.resolve_artifact_names
def test_eagerload_query_refreshes(self):
mapper(User, users, properties={
'addresses':relation(Address, backref='user', lazy=False),
@@ -566,7 +586,8 @@ class ExpireTest(FixtureTest):
sess.query(User).filter_by(id=8).all()
assert 'addresses' in u.__dict__
assert len(u.addresses) == 3
-
+
+ @testing.resolve_artifact_names
def test_expire_all(self):
mapper(User, users, properties={
'addresses':relation(Address, backref='user', lazy=False),
@@ -575,38 +596,43 @@ class ExpireTest(FixtureTest):
sess = create_session()
userlist = sess.query(User).all()
- assert fixtures.user_address_result == userlist
+ assert self.static.user_address_result == userlist
assert len(list(sess)) == 9
sess.expire_all()
gc.collect()
assert len(list(sess)) == 4 # since addresses were gc'ed
-
+
userlist = sess.query(User).all()
u = userlist[1]
- assert fixtures.user_address_result == userlist
+ assert self.static.user_address_result == userlist
assert len(list(sess)) == 9
-
-class PolymorphicExpireTest(ORMTest):
- keep_data = True
-
+
+class PolymorphicExpireTest(_base.MappedTest):
+ run_inserts = 'once'
+ run_deletes = None
+
def define_tables(self, metadata):
global people, engineers, Person, Engineer
people = Table('people', metadata,
- Column('person_id', Integer, Sequence('person_id_seq', optional=True), primary_key=True),
+ Column('person_id', Integer, primary_key=True,
+ test_needs_autoincrement=True),
Column('name', String(50)),
Column('type', String(30)))
engineers = Table('engineers', metadata,
- Column('person_id', Integer, ForeignKey('people.person_id'), primary_key=True),
+ Column('person_id', Integer, ForeignKey('people.person_id'),
+ primary_key=True),
Column('status', String(30)),
)
-
- class Person(Base):
+
+ def setup_classes(self):
+ class Person(_base.ComparableEntity):
pass
class Engineer(Person):
pass
-
+
+ @testing.resolve_artifact_names
def insert_data(self):
people.insert().execute(
{'person_id':1, 'name':'person1', 'type':'person'},
@@ -618,6 +644,7 @@ class PolymorphicExpireTest(ORMTest):
{'person_id':3, 'status':'old engineer'},
)
+ @testing.resolve_artifact_names
def test_poly_deferred(self):
mapper(Person, people, polymorphic_on=people.c.type, polymorphic_identity='person')
mapper(Engineer, engineers, inherits=Person, polymorphic_identity='engineer')
@@ -641,7 +668,7 @@ class PolymorphicExpireTest(ORMTest):
def go():
sess.query(Person).all()
self.assert_sql_count(testing.db, go, 1)
-
+
for p in [p1, e1, e2]:
assert 'name' in p.__dict__
@@ -655,12 +682,11 @@ class PolymorphicExpireTest(ORMTest):
assert e2.status == 'old engineer'
self.assert_sql_count(testing.db, go, 2)
self.assertEquals(Engineer.name.get_history(e1), (['new engineer name'], [], ['engineer1']))
-
-
-class RefreshTest(FixtureTest):
- keep_mappers = False
- refresh_data = True
+
+class RefreshTest(_fixtures.FixtureTest):
+
+ @testing.resolve_artifact_names
def test_refresh(self):
mapper(User, users, properties={
'addresses':relation(mapper(Address, addresses), backref='user')
@@ -669,7 +695,7 @@ class RefreshTest(FixtureTest):
u = s.get(User, 7)
u.name = 'foo'
a = Address()
- assert object_session(a) is None
+ assert sa.orm.object_session(a) is None
u.addresses.append(a)
assert a.email_address is None
assert id(a) in [id(x) for x in u.addresses]
@@ -698,14 +724,16 @@ class RefreshTest(FixtureTest):
# print u._state.callables
assert u.name == 'jack'
assert id(a) not in [id(x) for x in u.addresses]
-
+
+ @testing.resolve_artifact_names
def test_persistence_check(self):
mapper(User, users)
s = create_session()
u = s.get(User, 7)
s.clear()
- self.assertRaisesMessage(sa_exc.InvalidRequestError, r"is not persistent within this Session", lambda: s.refresh(u))
-
+ self.assertRaisesMessage(sa.exc.InvalidRequestError, r"is not persistent within this Session", lambda: s.refresh(u))
+
+ @testing.resolve_artifact_names
def test_refresh_expired(self):
mapper(User, users)
s = create_session()
@@ -715,6 +743,7 @@ class RefreshTest(FixtureTest):
s.refresh(u)
assert u.name == 'jack'
+ @testing.resolve_artifact_names
def test_refresh_with_lazy(self):
"""test that when a lazy loader is set as a trigger on an object's attribute
(at the attribute level, not the class level), a refresh() operation doesnt
@@ -722,13 +751,13 @@ class RefreshTest(FixtureTest):
s = create_session()
mapper(User, users, properties={'addresses':relation(mapper(Address, addresses))})
- q = s.query(User).options(lazyload('addresses'))
+ q = s.query(User).options(sa.orm.lazyload('addresses'))
u = q.filter(users.c.id==8).first()
def go():
s.refresh(u)
self.assert_sql_count(testing.db, go, 1)
-
+ @testing.resolve_artifact_names
def test_refresh_with_eager(self):
"""test that a refresh/expire operation loads rows properly and sends correct "isnew" state to eager loaders"""
@@ -749,6 +778,7 @@ class RefreshTest(FixtureTest):
assert len(u.addresses) == 3
@testing.fails_on('maxdb')
+ @testing.resolve_artifact_names
def test_refresh2(self):
"""test a hang condition that was occuring on expire/refresh"""
@@ -757,7 +787,7 @@ class RefreshTest(FixtureTest):
mapper(User, users, properties = dict(addresses=relation(Address,cascade="all, delete-orphan",lazy=False)) )
- u=User()
+ u = User()
u.name='Justin'
a = Address(id=10, email_address='lala')
u.addresses.append(a)
diff --git a/test/orm/extendedattr.py b/test/orm/extendedattr.py
index a5c2c4ace..4d0c41f31 100644
--- a/test/orm/extendedattr.py
+++ b/test/orm/extendedattr.py
@@ -6,8 +6,8 @@ from sqlalchemy.orm.collections import collection
from sqlalchemy.orm.attributes import set_attribute, get_attribute, del_attribute, is_instrumented
from sqlalchemy.orm import clear_mappers
from sqlalchemy.orm import InstrumentationManager
-
from testlib import *
+from orm import _base
class MyTypesManager(InstrumentationManager):
@@ -96,7 +96,7 @@ class MyClass(object):
else:
del self._goofy_dict[key]
-class UserDefinedExtensionTest(TestBase):
+class UserDefinedExtensionTest(_base.ORMTest):
def tearDownAll(self):
clear_mappers()
attributes._install_lookup_strategy(util.symbol('native'))
diff --git a/test/orm/generative.py b/test/orm/generative.py
index 88793f743..3fcbf2918 100644
--- a/test/orm/generative.py
+++ b/test/orm/generative.py
@@ -1,46 +1,45 @@
import testenv; testenv.configure_for_tests()
-from sqlalchemy import *
-from sqlalchemy.orm import *
-from testlib import *
-import testlib.tables as tables
-
-# TODO: these are more tests that should be updated to be part of test/orm/query.py
-
-class Foo(object):
- def __init__(self, **kwargs):
- for k in kwargs:
- setattr(self, k, kwargs[k])
-
-class GenerativeQueryTest(TestBase):
- def setUpAll(self):
- global foo, metadata
- metadata = MetaData(testing.db)
- foo = Table('foo', metadata,
- Column('id', Integer, Sequence('foo_id_seq'), primary_key=True),
- Column('bar', Integer),
- Column('range', Integer))
+from testlib import testing, sa
+from testlib.sa import Table, Column, Integer, String, ForeignKey, MetaData
+from sqlalchemy.orm import mapper, relation, create_session
+from testlib.testing import eq_
+from testlib.compat import set
+from orm import _base, _fixtures
+
+
+class GenerativeQueryTest(_base.MappedTest):
+ run_inserts = 'once'
+ run_deletes = None
+
+ def define_tables(self, metadata):
+ Table('foo', metadata,
+ Column('id', Integer, sa.Sequence('foo_id_seq'), primary_key=True),
+ Column('bar', Integer),
+ Column('range', Integer))
+
+ def fixtures(self):
+ rows = tuple([(i, i % 10) for i in range(100)])
+ foo_data = (('bar', 'range'),) + rows
+ return dict(foo=foo_data)
+
+ @testing.resolve_artifact_names
+ def setup_mappers(self):
+ class Foo(_base.BasicEntity):
+ pass
mapper(Foo, foo)
- metadata.create_all()
-
- sess = create_session(bind=testing.db)
- for i in range(100):
- sess.save(Foo(bar=i, range=i%10))
- sess.flush()
-
- def tearDownAll(self):
- metadata.drop_all()
- clear_mappers()
+ @testing.resolve_artifact_names
def test_selectby(self):
- res = create_session(bind=testing.db).query(Foo).filter_by(range=5)
+ res = create_session().query(Foo).filter_by(range=5)
assert res.order_by([Foo.bar])[0].bar == 5
- assert res.order_by([desc(Foo.bar)])[0].bar == 95
+ assert res.order_by([sa.desc(Foo.bar)])[0].bar == 95
- @testing.unsupported('mssql')
+ @testing.unsupported('mssql', 'FIXME: verify not fails_on()')
@testing.fails_on('maxdb')
+ @testing.resolve_artifact_names
def test_slice(self):
- sess = create_session(bind=testing.db)
+ sess = create_session()
query = sess.query(Foo)
orig = query.all()
assert query[1] == orig[1]
@@ -53,189 +52,221 @@ class GenerativeQueryTest(TestBase):
assert query[10:20][5] == orig[10:20][5]
@testing.uses_deprecated('Call to deprecated function apply_max')
+ @testing.resolve_artifact_names
def test_aggregate(self):
- sess = create_session(bind=testing.db)
+ sess = create_session()
query = sess.query(Foo)
assert query.count() == 100
assert query.filter(foo.c.bar<30).min(foo.c.bar) == 0
assert query.filter(foo.c.bar<30).max(foo.c.bar) == 29
- assert query.filter(foo.c.bar<30).values(func.max(foo.c.bar)).next()[0] == 29
- assert query.filter(foo.c.bar<30).values(func.max(foo.c.bar)).next()[0] == 29
+ assert query.filter(foo.c.bar<30).values(sa.func.max(foo.c.bar)).next()[0] == 29
+ assert query.filter(foo.c.bar<30).values(sa.func.max(foo.c.bar)).next()[0] == 29
+ @testing.resolve_artifact_names
def test_aggregate_1(self):
if (testing.against('mysql') and
testing.db.dialect.dbapi.version_info[:4] == (1, 2, 1, 'gamma')):
return
- query = create_session(bind=testing.db).query(Foo)
+ query = create_session().query(Foo)
assert query.filter(foo.c.bar<30).sum(foo.c.bar) == 435
@testing.fails_on('firebird', 'mssql')
+ @testing.resolve_artifact_names
def test_aggregate_2(self):
- query = create_session(bind=testing.db).query(Foo)
+ query = create_session().query(Foo)
avg = query.filter(foo.c.bar < 30).avg(foo.c.bar)
- assert round(avg, 1) == 14.5
+ eq_(round(avg, 1), 14.5)
+ @testing.resolve_artifact_names
def test_aggregate_3(self):
- query = create_session(bind=testing.db).query(Foo)
+ query = create_session().query(Foo)
- avg_f = query.filter(foo.c.bar<30).values(func.avg(foo.c.bar)).next()[0]
+ avg_f = query.filter(foo.c.bar<30).values(sa.func.avg(foo.c.bar)).next()[0]
assert round(avg_f, 1) == 14.5
- avg_o = query.filter(foo.c.bar<30).values(func.avg(foo.c.bar)).next()[0]
+ avg_o = query.filter(foo.c.bar<30).values(sa.func.avg(foo.c.bar)).next()[0]
assert round(avg_o, 1) == 14.5
+ @testing.resolve_artifact_names
def test_filter(self):
- query = create_session(bind=testing.db).query(Foo)
+ query = create_session().query(Foo)
assert query.count() == 100
assert query.filter(Foo.bar < 30).count() == 30
res2 = query.filter(Foo.bar < 30).filter(Foo.bar > 10)
assert res2.count() == 19
+ @testing.resolve_artifact_names
def test_options(self):
- query = create_session(bind=testing.db).query(Foo)
- class ext1(MapperExtension):
+ query = create_session().query(Foo)
+ class ext1(sa.orm.MapperExtension):
def populate_instance(self, mapper, selectcontext, row, instance, **flags):
instance.TEST = "hello world"
- return EXT_CONTINUE
- assert query.options(extension(ext1()))[0].TEST == "hello world"
+ return sa.orm.EXT_CONTINUE
+ assert query.options(sa.orm.extension(ext1()))[0].TEST == "hello world"
+ @testing.resolve_artifact_names
def test_order_by(self):
- query = create_session(bind=testing.db).query(Foo)
+ query = create_session().query(Foo)
assert query.order_by([Foo.bar])[0].bar == 0
- assert query.order_by([desc(Foo.bar)])[0].bar == 99
+ assert query.order_by([sa.desc(Foo.bar)])[0].bar == 99
+ @testing.resolve_artifact_names
def test_offset(self):
- query = create_session(bind=testing.db).query(Foo)
+ query = create_session().query(Foo)
assert list(query.order_by([Foo.bar]).offset(10))[0].bar == 10
+ @testing.resolve_artifact_names
def test_offset(self):
- query = create_session(bind=testing.db).query(Foo)
+ query = create_session().query(Foo)
assert len(list(query.limit(10))) == 10
-class Obj1(object):
- pass
-class Obj2(object):
- pass
-
-class GenerativeTest2(TestBase):
- def setUpAll(self):
- global metadata, table1, table2
- metadata = MetaData()
- table1 = Table('Table1', metadata,
- Column('id', Integer, primary_key=True),
- )
- table2 = Table('Table2', metadata,
- Column('t1id', Integer, ForeignKey("Table1.id"), primary_key=True),
- Column('num', Integer, primary_key=True),
- )
- mapper(Obj1, table1)
- mapper(Obj2, table2)
- metadata.create_all(bind=testing.db)
- testing.db.execute(table1.insert(), {'id':1},{'id':2},{'id':3},{'id':4})
- testing.db.execute(table2.insert(), {'num':1,'t1id':1},{'num':2,'t1id':1},{'num':3,'t1id':1},\
-{'num':4,'t1id':2},{'num':5,'t1id':2},{'num':6,'t1id':3})
-
- def tearDownAll(self):
- metadata.drop_all(bind=testing.db)
- clear_mappers()
-
- def test_distinctcount(self):
- query = create_session(bind=testing.db).query(Obj1)
- assert query.count() == 4
- res = query.filter(and_(table1.c.id==table2.c.t1id,table2.c.t1id==1))
- assert res.count() == 3
- res = query.filter(and_(table1.c.id==table2.c.t1id,table2.c.t1id==1)).distinct()
- self.assertEqual(res.count(), 1)
-class RelationsTest(TestBase, AssertsExecutionResults):
- def setUpAll(self):
- tables.create()
- tables.data()
- def tearDownAll(self):
- tables.drop()
- def tearDown(self):
- clear_mappers()
- def test_jointo(self):
- """test the join and outerjoin functions on Query"""
- mapper(tables.User, tables.users, properties={
- 'orders':relation(mapper(tables.Order, tables.orders, properties={
- 'items':relation(mapper(tables.Item, tables.orderitems))
- }))
- })
- session = create_session(bind=testing.db)
- query = session.query(tables.User)
- x = query.join(['orders', 'items']).filter(tables.Item.item_id==2)
- print x.compile()
- self.assert_result(list(x), tables.User, tables.user_result[2])
- def test_outerjointo(self):
+class GenerativeTest2(_base.MappedTest):
+
+ def define_tables(self, metadata):
+ Table('Table1', metadata,
+ Column('id', Integer, primary_key=True))
+ Table('Table2', metadata,
+ Column('t1id', Integer, ForeignKey("Table1.id"),
+ primary_key=True),
+ Column('num', Integer, primary_key=True))
+
+ @testing.resolve_artifact_names
+ def setup_mappers(self):
+ class Obj1(_base.BasicEntity):
+ pass
+ class Obj2(_base.BasicEntity):
+ pass
+
+ mapper(Obj1, Table1)
+ mapper(Obj2, Table2)
+
+ def fixtures(self):
+ return dict(
+ Table1=(('id',),
+ (1,),
+ (2,),
+ (3,),
+ (4,)),
+ Table2=(('num', 't1id'),
+ (1, 1),
+ (2, 1),
+ (3, 1),
+ (4, 2),
+ (5, 2),
+ (6, 3)))
+
+ @testing.resolve_artifact_names
+ def test_distinct_count(self):
+ query = create_session().query(Obj1)
+ eq_(query.count(), 4)
+
+ res = query.filter(sa.and_(Table1.c.id == Table2.c.t1id,
+ Table2.c.t1id == 1))
+ eq_(res.count(), 3)
+ res = query.filter(sa.and_(Table1.c.id == Table2.c.t1id,
+ Table2.c.t1id == 1)).distinct()
+ eq_(res.count(), 1)
+
+
+class RelationsTest(_fixtures.FixtureTest):
+ run_setup_mappers = 'once'
+ run_inserts = 'once'
+ run_deletes = None
+
+ @testing.resolve_artifact_names
+ def setup_mappers(self):
+ mapper(User, users, properties={
+ 'orders':relation(mapper(Order, orders, properties={
+ 'addresses':relation(mapper(Address, addresses))}))})
+
+
+ @testing.resolve_artifact_names
+ def test_join(self):
+ """Query.join"""
+
+ session = create_session()
+ q = (session.query(User).join(['orders', 'addresses']).
+ filter(Address.id == 1))
+ eq_([User(id=7)], q.all())
+
+ @testing.resolve_artifact_names
+ def test_outer_join(self):
+ """Query.outerjoin"""
+
+ session = create_session()
+ q = (session.query(User).outerjoin(['orders', 'addresses']).
+ filter(sa.or_(Order.id == None, Address.id == 1)))
+ eq_(set([User(id=7), User(id=8), User(id=10)]),
+ set(q.all()))
+
+ @testing.resolve_artifact_names
+ def test_outer_join_count(self):
"""test the join and outerjoin functions on Query"""
- mapper(tables.User, tables.users, properties={
- 'orders':relation(mapper(tables.Order, tables.orders, properties={
- 'items':relation(mapper(tables.Item, tables.orderitems))
- }))
- })
- session = create_session(bind=testing.db)
- query = session.query(tables.User)
- x = query.outerjoin(['orders', 'items']).filter(or_(tables.Order.order_id==None,tables.Item.item_id==2))
- print x.compile()
- self.assert_result(list(x), tables.User, *tables.user_result[1:3])
- def test_outerjointo_count(self):
- """test the join and outerjoin functions on Query"""
- mapper(tables.User, tables.users, properties={
- 'orders':relation(mapper(tables.Order, tables.orders, properties={
- 'items':relation(mapper(tables.Item, tables.orderitems))
- }))
- })
- session = create_session(bind=testing.db)
- query = session.query(tables.User)
- x = query.outerjoin(['orders', 'items']).filter(or_(tables.Order.order_id==None,tables.Item.item_id==2)).count()
- assert x==2
+
+ session = create_session()
+
+ q = (session.query(User).outerjoin(['orders', 'addresses']).
+ filter(sa.or_(Order.id == None, Address.id == 1)))
+ eq_(q.count(), 4)
+
+ @testing.resolve_artifact_names
def test_from(self):
- mapper(tables.User, tables.users, properties={
- 'orders':relation(mapper(tables.Order, tables.orders, properties={
- 'items':relation(mapper(tables.Item, tables.orderitems))
- }))
- })
- session = create_session(bind=testing.db)
- query = session.query(tables.User)
- x = query.select_from(tables.users.outerjoin(tables.orders).outerjoin(tables.orderitems)).\
- filter(or_(tables.Order.order_id==None,tables.Item.item_id==2))
- print x.compile()
- self.assert_result(list(x), tables.User, *tables.user_result[1:3])
-
-
-class CaseSensitiveTest(TestBase):
- def setUpAll(self):
- global metadata, table1, table2
- metadata = MetaData(testing.db)
- table1 = Table('Table1', metadata,
- Column('ID', Integer, primary_key=True),
- )
- table2 = Table('Table2', metadata,
- Column('T1ID', Integer, ForeignKey("Table1.ID"), primary_key=True),
- Column('NUM', Integer, primary_key=True),
- )
- mapper(Obj1, table1)
- mapper(Obj2, table2)
- metadata.create_all()
- table1.insert().execute({'ID':1},{'ID':2},{'ID':3},{'ID':4})
- table2.insert().execute({'NUM':1,'T1ID':1},{'NUM':2,'T1ID':1},{'NUM':3,'T1ID':1},\
-{'NUM':4,'T1ID':2},{'NUM':5,'T1ID':2},{'NUM':6,'T1ID':3})
-
- def tearDownAll(self):
- metadata.drop_all()
- clear_mappers()
-
- def test_distinctcount(self):
+ session = create_session()
+
+ sel = users.outerjoin(orders).outerjoin(
+ addresses, orders.c.address_id == addresses.c.id)
+ q = (session.query(User).select_from(sel).
+ filter(sa.or_(Order.id == None, Address.id == 1)))
+ eq_(set([User(id=7), User(id=8), User(id=10)]),
+ set(q.all()))
+
+
+class CaseSensitiveTest(_base.MappedTest):
+
+ def define_tables(self, metadata):
+ Table('Table1', metadata,
+ Column('ID', Integer, primary_key=True))
+ Table('Table2', metadata,
+ Column('T1ID', Integer, ForeignKey("Table1.ID"),
+ primary_key=True),
+ Column('NUM', Integer, primary_key=True))
+
+ @testing.resolve_artifact_names
+ def setup_mappers(self):
+ class Obj1(_base.BasicEntity):
+ pass
+ class Obj2(_base.BasicEntity):
+ pass
+
+ mapper(Obj1, Table1)
+ mapper(Obj2, Table2)
+
+ def fixtures(self):
+ return dict(
+ Table1=(('ID',),
+ (1,),
+ (2,),
+ (3,),
+ (4,)),
+ Table2=(('NUM', 'T1ID'),
+ (1, 1),
+ (2, 1),
+ (3, 1),
+ (4, 2),
+ (5, 2),
+ (6, 3)))
+
+ @testing.resolve_artifact_names
+ def test_distinct_count(self):
q = create_session(bind=testing.db).query(Obj1)
assert q.count() == 4
- res = q.filter(and_(table1.c.ID==table2.c.T1ID,table2.c.T1ID==1))
+ res = q.filter(sa.and_(Table1.c.ID==Table2.c.T1ID,Table2.c.T1ID==1))
assert res.count() == 3
- res = q.filter(and_(table1.c.ID==table2.c.T1ID,table2.c.T1ID==1)).distinct()
+ res = q.filter(sa.and_(Table1.c.ID==Table2.c.T1ID,Table2.c.T1ID==1)).distinct()
self.assertEqual(res.count(), 1)
-
if __name__ == "__main__":
testenv.main()
diff --git a/test/orm/instrumentation.py b/test/orm/instrumentation.py
index 5cb3a5c59..a5579b548 100644
--- a/test/orm/instrumentation.py
+++ b/test/orm/instrumentation.py
@@ -9,7 +9,7 @@ from sqlalchemy.orm import relation
from testlib.testing import eq_, ne_
from testlib.compat import _function_named
-from testlib import TestBase
+from orm import _base
def modifies_instrumentation_finders(fn):
@@ -35,7 +35,7 @@ def with_lookup_strategy(strategy):
return decorate
-class InitTest(TestBase):
+class InitTest(_base.ORMTest):
def fixture(self):
return Table('t', MetaData(),
Column('id', Integer, primary_key=True),
@@ -408,7 +408,7 @@ class InitTest(TestBase):
eq_(inits, [(C, 'on_init', C)])
-class MapperInitTest(TestBase):
+class MapperInitTest(_base.ORMTest):
def fixture(self):
return Table('t', MetaData(),
@@ -443,7 +443,7 @@ class MapperInitTest(TestBase):
self.assertRaises((AttributeError, TypeError),
attributes.instance_state, cobj)
-class InstrumentationCollisionTest(TestBase):
+class InstrumentationCollisionTest(_base.ORMTest):
def test_none(self):
class A(object): pass
attributes.register_class(A)
@@ -513,7 +513,7 @@ class InstrumentationCollisionTest(TestBase):
self.assertRaises(TypeError, attributes.register_class, B1)
-class OnLoadTest(TestBase):
+class OnLoadTest(_base.ORMTest):
"""Check that Events.on_load is not hit in regular attributes operations."""
def test_basic(self):
@@ -537,7 +537,7 @@ class OnLoadTest(TestBase):
del A
-class ExtendedEventsTest(TestBase):
+class ExtendedEventsTest(_base.ORMTest):
"""Allow custom Events implementations."""
@modifies_instrumentation_finders
@@ -556,7 +556,7 @@ class ExtendedEventsTest(TestBase):
assert isinstance(manager.events, MyEvents)
-class NativeInstrumentationTest(TestBase):
+class NativeInstrumentationTest(_base.ORMTest):
@with_lookup_strategy(util.symbol('native'))
def test_register_reserved_attribute(self):
class T(object): pass
@@ -595,7 +595,7 @@ class NativeInstrumentationTest(TestBase):
self.assertRaises(KeyError, mapper, T, t)
-class MiscTest(TestBase):
+class MiscTest(_base.ORMTest):
"""Seems basic, but not directly covered elsewhere!"""
def test_compileonattr(self):
@@ -696,7 +696,7 @@ class MiscTest(TestBase):
a = A()
assert not a.bs
-class FinderTest(TestBase):
+class FinderTest(_base.ORMTest):
def test_standard(self):
class A(object): pass
diff --git a/test/orm/lazy_relations.py b/test/orm/lazy_relations.py
index 1dd5d5e94..d236d1a0d 100644
--- a/test/orm/lazy_relations.py
+++ b/test/orm/lazy_relations.py
@@ -1,19 +1,21 @@
"""basic tests of lazy loaded attributes"""
import testenv; testenv.configure_for_tests()
-from sqlalchemy import *
-from sqlalchemy import exc as sa_exc
-from sqlalchemy.orm import *
-from testlib import *
-from testlib.fixtures import *
-from query import QueryTest
import datetime
+from sqlalchemy import exc as sa_exc
from sqlalchemy.orm import attributes
+from testlib import sa, testing
+from testlib.sa import Table, Column, Integer, String, ForeignKey
+from testlib.sa.orm import mapper, relation, create_session
+from testlib.testing import eq_
+from orm import _base, _fixtures
+
-class LazyTest(FixtureTest):
- keep_mappers = False
- keep_data = True
+class LazyTest(_fixtures.FixtureTest):
+ run_inserts = 'once'
+ run_deletes = None
+ @testing.resolve_artifact_names
def test_basic(self):
mapper(User, users, properties={
'addresses':relation(mapper(Address, addresses), lazy=True)
@@ -22,6 +24,7 @@ class LazyTest(FixtureTest):
q = sess.query(User)
assert [User(id=7, addresses=[Address(id=1, email_address='jack@bean.com')])] == q.filter(users.c.id == 7).all()
+ @testing.resolve_artifact_names
def test_needs_parent(self):
"""test the error raised when parent object is not bound."""
@@ -34,6 +37,7 @@ class LazyTest(FixtureTest):
sess.expunge(u)
self.assertRaises(sa_exc.InvalidRequestError, getattr, u, 'addresses')
+ @testing.resolve_artifact_names
def test_orderby(self):
mapper(User, users, properties = {
'addresses':relation(mapper(Address, addresses), lazy=True, order_by=addresses.c.email_address),
@@ -54,6 +58,7 @@ class LazyTest(FixtureTest):
User(id=10, addresses=[])
] == q.all()
+ @testing.resolve_artifact_names
def test_orderby_secondary(self):
"""tests that a regular mapper select on a single table can order by a relation to a second table"""
@@ -78,11 +83,12 @@ class LazyTest(FixtureTest):
]),
] == l
+ @testing.resolve_artifact_names
def test_orderby_desc(self):
mapper(Address, addresses)
mapper(User, users, properties = dict(
- addresses = relation(Address, lazy=True, order_by=[desc(addresses.c.email_address)]),
+ addresses = relation(Address, lazy=True, order_by=[sa.desc(addresses.c.email_address)]),
))
sess = create_session()
assert [
@@ -100,6 +106,7 @@ class LazyTest(FixtureTest):
User(id=10, addresses=[])
] == sess.query(User).all()
+ @testing.resolve_artifact_names
def test_no_orphan(self):
"""test that a lazily loaded child object is not marked as an orphan"""
@@ -111,9 +118,9 @@ class LazyTest(FixtureTest):
sess = create_session()
user = sess.query(User).get(7)
assert getattr(User, 'addresses').hasparent(attributes.instance_state(user.addresses[0]), optimistic=True)
- assert not class_mapper(Address)._is_orphan(attributes.instance_state(user.addresses[0]))
-
+ assert not sa.orm.class_mapper(Address)._is_orphan(attributes.instance_state(user.addresses[0]))
+ @testing.resolve_artifact_names
def test_limit(self):
"""test limit operations combined with lazy-load relationships."""
@@ -131,11 +138,12 @@ class LazyTest(FixtureTest):
if testing.against('maxdb', 'mssql'):
l = q.limit(2).all()
- assert fixtures.user_all_result[:2] == l
+ assert self.static.user_all_result[:2] == l
else:
l = q.limit(2).offset(1).all()
- assert fixtures.user_all_result[1:3] == l
+ assert self.static.user_all_result[1:3] == l
+ @testing.resolve_artifact_names
def test_distinct(self):
mapper(Item, items)
mapper(Order, orders, properties={
@@ -151,11 +159,12 @@ class LazyTest(FixtureTest):
# use a union all to get a lot of rows to join against
u2 = users.alias('u2')
- s = union_all(u2.select(use_labels=True), u2.select(use_labels=True), u2.select(use_labels=True)).alias('u')
+ s = sa.union_all(u2.select(use_labels=True), u2.select(use_labels=True), u2.select(use_labels=True)).alias('u')
print [key for key in s.c.keys()]
l = q.filter(s.c.u2_id==User.id).distinct().all()
- assert fixtures.user_all_result == l
+ assert self.static.user_all_result == l
+ @testing.resolve_artifact_names
def test_one_to_many_scalar(self):
mapper(User, users, properties = dict(
address = relation(mapper(Address, addresses), lazy=True, uselist=False)
@@ -164,17 +173,18 @@ class LazyTest(FixtureTest):
l = q.filter(users.c.id == 7).all()
assert [User(id=7, address=Address(id=1))] == l
+ @testing.resolve_artifact_names
def test_double(self):
"""tests lazy loading with two relations simulatneously, from the same table, using aliases. """
- openorders = alias(orders, 'openorders')
- closedorders = alias(orders, 'closedorders')
+ openorders = sa.alias(orders, 'openorders')
+ closedorders = sa.alias(orders, 'closedorders')
mapper(Address, addresses)
mapper(User, users, properties = dict(
addresses = relation(Address, lazy = True),
- open_orders = relation(mapper(Order, openorders, entity_name='open'), primaryjoin = and_(openorders.c.isopen == 1, users.c.id==openorders.c.user_id), lazy=True),
- closed_orders = relation(mapper(Order, closedorders,entity_name='closed'), primaryjoin = and_(closedorders.c.isopen == 0, users.c.id==closedorders.c.user_id), lazy=True)
+ open_orders = relation(mapper(Order, openorders, entity_name='open'), primaryjoin = sa.and_(openorders.c.isopen == 1, users.c.id==openorders.c.user_id), lazy=True),
+ closed_orders = relation(mapper(Order, closedorders,entity_name='closed'), primaryjoin = sa.and_(closedorders.c.isopen == 0, users.c.id==closedorders.c.user_id), lazy=True)
))
q = create_session().query(User)
@@ -206,6 +216,7 @@ class LazyTest(FixtureTest):
assert [Order(id=1), Order(id=5)] == create_session().query(Order, entity_name='closed').with_parent(user, property='closed_orders').all()
assert [Order(id=3)] == create_session().query(Order, entity_name='open').with_parent(user, property='open_orders').all()
+ @testing.resolve_artifact_names
def test_many_to_many(self):
mapper(Keyword, keywords)
@@ -214,10 +225,11 @@ class LazyTest(FixtureTest):
))
q = create_session().query(Item)
- assert fixtures.item_keyword_result == q.all()
+ assert self.static.item_keyword_result == q.all()
- assert fixtures.item_keyword_result[0:2] == q.join('keywords').filter(keywords.c.name == 'red').all()
+ assert self.static.item_keyword_result[0:2] == q.join('keywords').filter(keywords.c.name == 'red').all()
+ @testing.resolve_artifact_names
def test_uses_get(self):
"""test that a simple many-to-one lazyload optimizes to use query.get()."""
@@ -242,8 +254,9 @@ class LazyTest(FixtureTest):
# lazy load of a1.user should get it from the session
assert a1.user is u1
self.assert_sql_count(testing.db, go, 0)
- clear_mappers()
+ sa.orm.clear_mappers()
+ @testing.resolve_artifact_names
def test_many_to_one(self):
mapper(Address, addresses, properties = dict(
user = relation(mapper(User, users), lazy=True)
@@ -258,6 +271,7 @@ class LazyTest(FixtureTest):
assert a.user is u1
+ @testing.resolve_artifact_names
def test_backrefs_dont_lazyload(self):
mapper(User, users, properties={
'addresses':relation(Address, backref='user')
@@ -292,10 +306,12 @@ class LazyTest(FixtureTest):
assert ad2 in u1.addresses
self.assert_sql_count(testing.db, go, 1)
-class M2OGetTest(FixtureTest):
- keep_mappers = False
- keep_data = True
+class M2OGetTest(_fixtures.FixtureTest):
+ run_inserts = 'once'
+ run_deletes = None
+
+ @testing.resolve_artifact_names
def test_m2o_noload(self):
"""test that a NULL foreign key doesn't trigger a lazy load"""
mapper(User, users)
@@ -319,60 +335,56 @@ class M2OGetTest(FixtureTest):
assert ad3.user is None
self.assert_sql_count(testing.db, go, 1)
-class CorrelatedTest(ORMTest):
- keep_mappers = False
- keep_data = False
-
+class CorrelatedTest(_base.MappedTest):
+
def define_tables(self, meta):
- global user_t, stuff
-
- user_t = Table('users', meta,
- Column('id', Integer, primary_key=True),
- Column('name', String(50))
- )
-
- stuff = Table('stuff', meta,
- Column('id', Integer, primary_key=True),
- Column('date', Date),
- Column('user_id', Integer, ForeignKey('users.id')))
-
+ Table('user_t', meta,
+ Column('id', Integer, primary_key=True),
+ Column('name', String(50)))
+
+ Table('stuff', meta,
+ Column('id', Integer, primary_key=True),
+ Column('date', sa.Date),
+ Column('user_id', Integer, ForeignKey('user_t.id')))
+
+ @testing.resolve_artifact_names
def insert_data(self):
user_t.insert().execute(
{'id':1, 'name':'user1'},
{'id':2, 'name':'user2'},
- {'id':3, 'name':'user3'},
- )
+ {'id':3, 'name':'user3'})
stuff.insert().execute(
{'id':1, 'user_id':1, 'date':datetime.date(2007, 10, 15)},
{'id':2, 'user_id':1, 'date':datetime.date(2007, 12, 15)},
{'id':3, 'user_id':1, 'date':datetime.date(2007, 11, 15)},
{'id':4, 'user_id':2, 'date':datetime.date(2008, 1, 15)},
- {'id':5, 'user_id':3, 'date':datetime.date(2007, 6, 15)},
- )
-
+ {'id':5, 'user_id':3, 'date':datetime.date(2007, 6, 15)})
+
+ @testing.resolve_artifact_names
def test_correlated_lazyload(self):
- class User(Base):
+ class User(_base.ComparableEntity):
pass
- class Stuff(Base):
+ class Stuff(_base.ComparableEntity):
pass
-
+
mapper(Stuff, stuff)
- stuff_view = select([stuff.c.id]).where(stuff.c.user_id==user_t.c.id).correlate(user_t).order_by(desc(stuff.c.date)).limit(1)
+ stuff_view = sa.select([stuff.c.id]).where(stuff.c.user_id==user_t.c.id).correlate(user_t).order_by(sa.desc(stuff.c.date)).limit(1)
mapper(User, user_t, properties={
- 'stuff':relation(Stuff, primaryjoin=and_(user_t.c.id==stuff.c.user_id, stuff.c.id==(stuff_view.as_scalar())))
+ 'stuff':relation(Stuff, primaryjoin=sa.and_(user_t.c.id==stuff.c.user_id, stuff.c.id==(stuff_view.as_scalar())))
})
sess = create_session()
- self.assertEquals(sess.query(User).all(), [
- User(name='user1', stuff=[Stuff(date=datetime.date(2007, 12, 15), id=2)]),
- User(name='user2', stuff=[Stuff(id=4, date=datetime.date(2008, 1 , 15))]),
+ eq_(sess.query(User).all(), [
+ User(name='user1', stuff=[Stuff(date=datetime.date(2007, 12, 15), id=2)]),
+ User(name='user2', stuff=[Stuff(id=4, date=datetime.date(2008, 1 , 15))]),
User(name='user3', stuff=[Stuff(id=5, date=datetime.date(2007, 6, 15))])
])
+
if __name__ == '__main__':
testenv.main()
diff --git a/test/orm/lazytest1.py b/test/orm/lazytest1.py
index 90cbbe208..5ebb8feeb 100644
--- a/test/orm/lazytest1.py
+++ b/test/orm/lazytest1.py
@@ -1,46 +1,47 @@
import testenv; testenv.configure_for_tests()
-from sqlalchemy import *
-from sqlalchemy.orm import *
-from testlib import *
-
-class LazyTest(TestBase, AssertsExecutionResults):
- def setUpAll(self):
- global info_table, data_table, rel_table, metadata
- metadata = MetaData(testing.db)
- info_table = Table('infos', metadata,
- Column('pk', Integer, primary_key=True),
- Column('info', String(128)))
-
- data_table = Table('data', metadata,
- Column('data_pk', Integer, primary_key=True),
- Column('info_pk', Integer,
- ForeignKey(info_table.c.pk)),
- Column('timeval', Integer),
- Column('data_val', String(128)))
-
- rel_table = Table('rels', metadata,
- Column('rel_pk', Integer, primary_key=True),
- Column('info_pk', Integer,
- ForeignKey(info_table.c.pk)),
- Column('start', Integer),
- Column('finish', Integer))
-
- metadata.create_all()
- info_table.insert().execute(
+from testlib import sa, testing
+from testlib.sa import Table, Column, Integer, String, ForeignKey
+from testlib.sa.orm import mapper, relation, create_session
+from orm import _base
+
+
+class LazyTest(_base.MappedTest):
+ def define_tables(self, metadata):
+ Table('infos', metadata,
+ Column('pk', Integer, primary_key=True),
+ Column('info', String(128)))
+
+ Table('data', metadata,
+ Column('data_pk', Integer, primary_key=True),
+ Column('info_pk', Integer,
+ ForeignKey('infos.pk')),
+ Column('timeval', Integer),
+ Column('data_val', String(128)))
+
+ Table('rels', metadata,
+ Column('rel_pk', Integer, primary_key=True),
+ Column('info_pk', Integer,
+ ForeignKey('infos.pk')),
+ Column('start', Integer),
+ Column('finish', Integer))
+
+ @testing.resolve_artifact_names
+ def insert_data(self):
+ infos.insert().execute(
{'pk':1, 'info':'pk_1_info'},
{'pk':2, 'info':'pk_2_info'},
{'pk':3, 'info':'pk_3_info'},
{'pk':4, 'info':'pk_4_info'},
{'pk':5, 'info':'pk_5_info'})
- rel_table.insert().execute(
+ rels.insert().execute(
{'rel_pk':1, 'info_pk':1, 'start':10, 'finish':19},
{'rel_pk':2, 'info_pk':1, 'start':100, 'finish':199},
{'rel_pk':3, 'info_pk':2, 'start':20, 'finish':29},
{'rel_pk':4, 'info_pk':3, 'start':13, 'finish':23},
{'rel_pk':5, 'info_pk':5, 'start':15, 'finish':25})
- data_table.insert().execute(
+ data.insert().execute(
{'data_pk':1, 'info_pk':1, 'timeval':11, 'data_val':'11_data'},
{'data_pk':2, 'info_pk':1, 'timeval':9, 'data_val':'9_data'},
{'data_pk':3, 'info_pk':1, 'timeval':13, 'data_val':'13_data'},
@@ -48,16 +49,13 @@ class LazyTest(TestBase, AssertsExecutionResults):
{'data_pk':5, 'info_pk':2, 'timeval':13, 'data_val':'13_data'},
{'data_pk':6, 'info_pk':1, 'timeval':15, 'data_val':'15_data'})
-
- def tearDownAll(self):
- metadata.drop_all()
-
+ @testing.resolve_artifact_names
def testone(self):
- """Tests a lazy load which has multiple join conditions.
+ """A lazy load which has multiple join conditions.
- ...including two that are against the same column in the child table.
- """
+ Including two that are against the same column in the child table.
+ """
class Information(object):
pass
@@ -69,16 +67,16 @@ class LazyTest(TestBase, AssertsExecutionResults):
session = create_session()
- mapper(Data, data_table)
- mapper(Relation, rel_table, properties={
+ mapper(Data, data)
+ mapper(Relation, rels, properties={
'datas': relation(Data,
- primaryjoin=and_(
- rel_table.c.info_pk ==
- data_table.c.info_pk,
- data_table.c.timeval >= rel_table.c.start,
- data_table.c.timeval <= rel_table.c.finish),
- foreign_keys=[data_table.c.info_pk])})
- mapper(Information, info_table, properties={
+ primaryjoin=sa.and_(
+ rels.c.info_pk ==
+ data.c.info_pk,
+ data.c.timeval >= rels.c.start,
+ data.c.timeval <= rels.c.finish),
+ foreign_keys=[data.c.info_pk])})
+ mapper(Information, infos, properties={
'rels': relation(Relation)
})
@@ -87,5 +85,6 @@ class LazyTest(TestBase, AssertsExecutionResults):
assert len(info.rels) == 2
assert len(info.rels[0].datas) == 3
+
if __name__ == "__main__":
testenv.main()
diff --git a/test/orm/manytomany.py b/test/orm/manytomany.py
index e8580af4a..5a470d78e 100644
--- a/test/orm/manytomany.py
+++ b/test/orm/manytomany.py
@@ -1,73 +1,68 @@
import testenv; testenv.configure_for_tests()
-from sqlalchemy import *
-from sqlalchemy import exc as sa_exc
-from sqlalchemy.orm import *
-from testlib import *
-
-class Place(object):
- '''represents a place'''
- def __init__(self, name=None):
- self.name = name
- def __str__(self):
- return "(Place '%s')" % self.name
- def __repr__(self):
- return str(self)
-
-class PlaceThingy(object):
- '''represents a thingy attached to a Place'''
- def __init__(self, name=None):
- self.name = name
-
-class Transition(object):
- '''represents a transition'''
- def __init__(self, name=None):
- self.name = name
- self.inputs = []
- self.outputs = []
- def __repr__(self):
- return object.__repr__(self)+ " " + repr(self.inputs) + " " + repr(self.outputs)
-
-class M2MTest(ORMTest):
- def define_tables(self, metadata):
- global place
- place = Table('place', metadata,
- Column('place_id', Integer, Sequence('pid_seq', optional=True), primary_key=True),
- Column('name', String(30), nullable=False),
- )
+from testlib import sa, testing
+from testlib.sa import Table, Column, Integer, String, ForeignKey
+from testlib.sa.orm import mapper, relation, create_session
+from orm import _base
- global transition
- transition = Table('transition', metadata,
- Column('transition_id', Integer, Sequence('tid_seq', optional=True), primary_key=True),
- Column('name', String(30), nullable=False),
- )
- global place_thingy
- place_thingy = Table('place_thingy', metadata,
- Column('thingy_id', Integer, Sequence('thid_seq', optional=True), primary_key=True),
- Column('place_id', Integer, ForeignKey('place.place_id'), nullable=False),
- Column('name', String(30), nullable=False)
- )
+class M2MTest(_base.MappedTest):
+ def define_tables(self, metadata):
+ Table('place', metadata,
+ Column('place_id', Integer, sa.Sequence('pid_seq', optional=True),
+ primary_key=True),
+ Column('name', String(30), nullable=False))
+
+ Table('transition', metadata,
+ Column('transition_id', Integer,
+ sa.Sequence('tid_seq', optional=True), primary_key=True),
+ Column('name', String(30), nullable=False))
+
+ Table('place_thingy', metadata,
+ Column('thingy_id', Integer, sa.Sequence('thid_seq', optional=True),
+ primary_key=True),
+ Column('place_id', Integer, ForeignKey('place.place_id'),
+ nullable=False),
+ Column('name', String(30), nullable=False))
# association table #1
- global place_input
- place_input = Table('place_input', metadata,
+ Table('place_input', metadata,
Column('place_id', Integer, ForeignKey('place.place_id')),
- Column('transition_id', Integer, ForeignKey('transition.transition_id')),
- )
+ Column('transition_id', Integer,
+ ForeignKey('transition.transition_id')))
# association table #2
- global place_output
- place_output = Table('place_output', metadata,
+ Table('place_output', metadata,
Column('place_id', Integer, ForeignKey('place.place_id')),
- Column('transition_id', Integer, ForeignKey('transition.transition_id')),
- )
+ Column('transition_id', Integer,
+ ForeignKey('transition.transition_id')))
- global place_place
- place_place = Table('place_place', metadata,
- Column('pl1_id', Integer, ForeignKey('place.place_id')),
- Column('pl2_id', Integer, ForeignKey('place.place_id')),
- )
+ Table('place_place', metadata,
+ Column('pl1_id', Integer, ForeignKey('place.place_id')),
+ Column('pl2_id', Integer, ForeignKey('place.place_id')))
+ def setup_classes(self):
+ class Place(_base.BasicEntity):
+ def __init__(self, name=None):
+ self.name = name
+ def __str__(self):
+ return "(Place '%s')" % self.name
+ __repr__ = __str__
+
+ class PlaceThingy(_base.BasicEntity):
+ def __init__(self, name=None):
+ self.name = name
+
+ class Transition(_base.BasicEntity):
+ def __init__(self, name=None):
+ self.name = name
+ self.inputs = []
+ self.outputs = []
+ def __repr__(self):
+ return ' '.join((object.__repr__(self),
+ repr(self.inputs),
+ repr(self.outputs)))
+
+ @testing.resolve_artifact_names
def testerror(self):
mapper(Place, place, properties={
'transitions':relation(Transition, secondary=place_input, backref='places')
@@ -75,9 +70,10 @@ class M2MTest(ORMTest):
mapper(Transition, transition, properties={
'places':relation(Place, secondary=place_input, backref='transitions')
})
- self.assertRaisesMessage(sa_exc.ArgumentError, "Error creating backref", compile_mappers)
-
+ self.assertRaisesMessage(sa.exc.ArgumentError, "Error creating backref",
+ sa.orm.compile_mappers)
+ @testing.resolve_artifact_names
def testcircular(self):
"""tests a many-to-many relationship from a table to itself."""
@@ -127,6 +123,7 @@ class M2MTest(ORMTest):
[sess.delete(p) for p in p1,p2,p3,p4,p5,p6,p7]
sess.flush()
+ @testing.resolve_artifact_names
def testdouble(self):
"""tests that a mapper can have two eager relations to the same table, via
two different association tables. aliases are required."""
@@ -157,6 +154,7 @@ class M2MTest(ORMTest):
'outputs': (Place, [{'name':'place2'}, {'name':'place3'}])
})
+ @testing.resolve_artifact_names
def testbidirectional(self):
"""tests a many-to-many backrefs"""
Place.mapper = mapper(Place, place)
@@ -187,17 +185,22 @@ class M2MTest(ORMTest):
self.assert_result([t1], Transition, {'outputs': (Place, [{'name':'place3'}, {'name':'place1'}])})
self.assert_result([p2], Place, {'inputs': (Transition, [{'name':'transition1'},{'name':'transition2'}])})
-class M2MTest2(ORMTest):
+
+class M2MTest2(_base.MappedTest):
def define_tables(self, metadata):
- global studentTbl
- studentTbl = Table('student', metadata, Column('name', String(20), primary_key=True))
- global courseTbl
- courseTbl = Table('course', metadata, Column('name', String(20), primary_key=True))
- global enrolTbl
- enrolTbl = Table('enrol', metadata,
- Column('student_id', String(20), ForeignKey('student.name'),primary_key=True),
- Column('course_id', String(20), ForeignKey('course.name'), primary_key=True))
+ Table('student', metadata,
+ Column('name', String(20), primary_key=True))
+
+ Table('course', metadata,
+ Column('name', String(20), primary_key=True))
+ Table('enroll', metadata,
+ Column('student_id', String(20), ForeignKey('student.name'),
+ primary_key=True),
+ Column('course_id', String(20), ForeignKey('course.name'),
+ primary_key=True))
+
+ @testing.resolve_artifact_names
def testcircular(self):
class Student(object):
def __init__(self, name=''):
@@ -205,10 +208,11 @@ class M2MTest2(ORMTest):
class Course(object):
def __init__(self, name=''):
self.name = name
- Student.mapper = mapper(Student, studentTbl)
- Course.mapper = mapper(Course, courseTbl, properties = {
- 'students': relation(Student.mapper, enrolTbl, lazy=True, backref='courses')
- })
+
+ mapper(Student, student)
+ mapper(Course, course, properties={
+ 'students': relation(Student, enroll, lazy=True, backref='courses')})
+
sess = create_session()
s1 = Student('Student1')
c1 = Course('Course1')
@@ -228,18 +232,21 @@ class M2MTest2(ORMTest):
del s.courses[1]
self.assert_(len(s.courses) == 2)
+ @testing.resolve_artifact_names
def test_delete(self):
- """test that many-to-many table gets cleared out with deletion from the backref side"""
+ """A many-to-many table gets cleared out with deletion from the backref side"""
class Student(object):
def __init__(self, name=''):
self.name = name
class Course(object):
def __init__(self, name=''):
self.name = name
- Student.mapper = mapper(Student, studentTbl)
- Course.mapper = mapper(Course, courseTbl, properties = {
- 'students': relation(Student.mapper, enrolTbl, lazy=True, backref='courses')
- })
+
+ mapper(Student, student)
+ mapper(Course, course, properties = {
+ 'students': relation(Student, enroll, lazy=True,
+ backref='courses')})
+
sess = create_session()
s1 = Student('Student1')
c1 = Course('Course1')
@@ -252,37 +259,33 @@ class M2MTest2(ORMTest):
sess.flush()
sess.delete(s1)
sess.flush()
- assert enrolTbl.count().scalar() == 0
+ assert enroll.count().scalar() == 0
-class M2MTest3(ORMTest):
+class M2MTest3(_base.MappedTest):
def define_tables(self, metadata):
- global c, c2a1, c2a2, b, a
- c = Table('c', metadata,
+ Table('c', metadata,
Column('c1', Integer, primary_key = True),
- Column('c2', String(20)),
- )
+ Column('c2', String(20)))
- a = Table('a', metadata,
+ Table('a', metadata,
Column('a1', Integer, primary_key=True),
Column('a2', String(20)),
- Column('c1', Integer, ForeignKey('c.c1'))
- )
+ Column('c1', Integer, ForeignKey('c.c1')))
- c2a1 = Table('ctoaone', metadata,
+ Table('c2a1', metadata,
Column('c1', Integer, ForeignKey('c.c1')),
- Column('a1', Integer, ForeignKey('a.a1'))
- )
- c2a2 = Table('ctoatwo', metadata,
+ Column('a1', Integer, ForeignKey('a.a1')))
+
+ Table('c2a2', metadata,
Column('c1', Integer, ForeignKey('c.c1')),
- Column('a1', Integer, ForeignKey('a.a1'))
- )
+ Column('a1', Integer, ForeignKey('a.a1')))
- b = Table('b', metadata,
+ Table('b', metadata,
Column('b1', Integer, primary_key=True),
Column('a1', Integer, ForeignKey('a.a1')),
- Column('b2', Boolean)
- )
+ Column('b2', sa.Boolean))
+ @testing.resolve_artifact_names
def testbasic(self):
class C(object):pass
class A(object):pass
@@ -290,20 +293,16 @@ class M2MTest3(ORMTest):
mapper(B, b)
- mapper(A, a,
- properties = {
- 'tbs' : relation(B, primaryjoin=and_(b.c.a1==a.c.a1, b.c.b2 == True), lazy=False),
- }
- )
+ mapper(A, a, properties={
+ 'tbs': relation(B, primaryjoin=sa.and_(b.c.a1 == a.c.a1,
+ b.c.b2 == True),
+ lazy=False)})
- mapper(C, c,
- properties = {
- 'a1s' : relation(A, secondary=c2a1, lazy=False),
- 'a2s' : relation(A, secondary=c2a2, lazy=False)
- }
- )
+ mapper(C, c, properties={
+ 'a1s': relation(A, secondary=c2a1, lazy=False),
+ 'a2s': relation(A, secondary=c2a2, lazy=False)})
- o1 = create_session().query(C).get(1)
+ assert create_session().query(C).compile()
if __name__ == "__main__":
diff --git a/test/orm/mapper.py b/test/orm/mapper.py
index 017b2534c..2192e6857 100644
--- a/test/orm/mapper.py
+++ b/test/orm/mapper.py
@@ -503,7 +503,7 @@ class MapperTest(MapperSuperTest):
#l = create_session().query(User).select(order_by=None)
- @testing.unsupported('firebird')
+ @testing.unsupported('firebird', 'FIXME: verify not fails_on')
def test_function(self):
"""Test mapping to a SELECT statement that has functions in it."""
@@ -521,7 +521,7 @@ class MapperTest(MapperSuperTest):
assert l[0].concat == l[0].user_id * 2 == 14
assert l[1].concat == l[1].user_id * 2 == 16
- @testing.unsupported('firebird')
+ @testing.unsupported('firebird', 'FIXME: verify not fails_on')
def test_count(self):
"""test the count function on Query.
diff --git a/test/orm/memusage.py b/test/orm/memusage.py
index 1851639ed..af22d0b12 100644
--- a/test/orm/memusage.py
+++ b/test/orm/memusage.py
@@ -1,14 +1,18 @@
import testenv; testenv.configure_for_tests()
import gc
-from sqlalchemy import MetaData, Integer, String, ForeignKey
-from sqlalchemy.orm import mapper, relation, clear_mappers, create_session
-from sqlalchemy.orm.mapper import Mapper, _mapper_registry
-from sqlalchemy.orm.session import _sessions
-from testlib import *
-from testlib.fixtures import Base
+from sqlalchemy.orm import mapper, relation, create_session, clear_mappers
+from sqlalchemy.orm.mapper import _mapper_registry
+from sqlalchemy.orm.session import _sessions
-class A(Base):pass
-class B(Base):pass
+from testlib import testing
+from testlib.sa import MetaData, Table, Column, Integer, String, ForeignKey
+from orm import _base
+
+
+class A(_base.ComparableEntity):
+ pass
+class B(_base.ComparableEntity):
+ pass
def profile_memory(func):
# run the test 50 times. if length of gc.get_objects()
@@ -40,11 +44,11 @@ def assert_no_mappers():
gc.collect()
assert len(_mapper_registry) == 0
-class EnsureZeroed(TestBase, AssertsExecutionResults):
+class EnsureZeroed(_base.ORMTest):
def setUp(self):
_sessions.clear()
_mapper_registry.clear()
-
+
class MemUsageTest(EnsureZeroed):
def test_session(self):
@@ -52,20 +56,18 @@ class MemUsageTest(EnsureZeroed):
table1 = Table("mytable", metadata,
Column('col1', Integer, primary_key=True),
- Column('col2', String(30))
- )
+ Column('col2', String(30)))
table2 = Table("mytable2", metadata,
Column('col1', Integer, primary_key=True),
Column('col2', String(30)),
- Column('col3', Integer, ForeignKey("mytable.col1"))
- )
+ Column('col3', Integer, ForeignKey("mytable.col1")))
metadata.create_all()
m1 = mapper(A, table1, properties={
- "bs":relation(B, cascade="all, delete")
- })
+ "bs":relation(B, cascade="all, delete")})
+
m2 = mapper(B, table2)
m3 = mapper(A, table1, non_primary=True)
@@ -107,14 +109,12 @@ class MemUsageTest(EnsureZeroed):
table1 = Table("mytable", metadata,
Column('col1', Integer, primary_key=True),
- Column('col2', String(30))
- )
+ Column('col2', String(30)))
table2 = Table("mytable2", metadata,
Column('col1', Integer, primary_key=True),
Column('col2', String(30)),
- Column('col3', Integer, ForeignKey("mytable.col1"))
- )
+ Column('col3', Integer, ForeignKey("mytable.col1")))
@profile_memory
def go():
@@ -168,19 +168,24 @@ class MemUsageTest(EnsureZeroed):
)
table2 = Table("mytable2", metadata,
- Column('col1', Integer, ForeignKey('mytable.col1'), primary_key=True),
+ Column('col1', Integer, ForeignKey('mytable.col1'),
+ primary_key=True),
Column('col3', String(30)),
)
@profile_memory
def go():
- class A(Base):
+ class A(_base.ComparableEntity):
pass
class B(A):
pass
- mapper(A, table1, polymorphic_on=table1.c.col2, polymorphic_identity='a')
- mapper(B, table2, inherits=A, polymorphic_identity='b')
+ mapper(A, table1,
+ polymorphic_on=table1.c.col2,
+ polymorphic_identity='a')
+ mapper(B, table2,
+ inherits=A,
+ polymorphic_identity='b')
sess = create_session()
a1 = A()
@@ -234,9 +239,9 @@ class MemUsageTest(EnsureZeroed):
@profile_memory
def go():
- class A(Base):
+ class A(_base.ComparableEntity):
pass
- class B(Base):
+ class B(_base.ComparableEntity):
pass
mapper(A, table1, properties={
diff --git a/test/orm/merge.py b/test/orm/merge.py
index 6ca42d53d..a9386786f 100644
--- a/test/orm/merge.py
+++ b/test/orm/merge.py
@@ -1,25 +1,15 @@
import testenv; testenv.configure_for_tests()
-from sqlalchemy import *
-from sqlalchemy import exc as sa_exc
-from sqlalchemy.orm import *
-from sqlalchemy.orm import mapperlib, attributes
-from sqlalchemy.util import OrderedSet
-from testlib import *
-from testlib import fixtures
-from testlib.tables import *
-import testlib.tables as tables
-
-class MergeTest(TestBase, AssertsExecutionResults):
- """tests session.merge() functionality"""
- def setUpAll(self):
- tables.create()
-
- def tearDownAll(self):
- tables.drop()
-
- def tearDown(self):
- clear_mappers()
- tables.delete()
+from testlib import sa, testing
+from testlib.sa.util import OrderedSet
+from testlib.sa.orm import mapper, relation, create_session
+from testlib.testing import eq_
+from orm import _base, _fixtures
+
+
+class MergeTest(_fixtures.FixtureTest):
+ """Session..merge() functionality"""
+
+ run_inserts = None
def on_load_tracker(self, cls, canary=None):
if canary is None:
@@ -27,41 +17,37 @@ class MergeTest(TestBase, AssertsExecutionResults):
canary.called += 1
canary.called = 0
- manager = attributes.manager_of_class(cls)
+ manager = sa.orm.attributes.manager_of_class(cls)
manager.events.add_listener('on_load', canary)
return canary
+ @testing.resolve_artifact_names
def test_transient_to_pending(self):
- class User(fixtures.Base):
- pass
mapper(User, users)
sess = create_session()
on_load = self.on_load_tracker(User)
- u = User(user_id=7, user_name='fred')
+ u = User(id=7, name='fred')
assert on_load.called == 0
u2 = sess.merge(u)
assert on_load.called == 1
assert u2 in sess
- self.assertEquals(u2, User(user_id=7, user_name='fred'))
+ self.assertEquals(u2, User(id=7, name='fred'))
sess.flush()
sess.clear()
- self.assertEquals(sess.query(User).first(), User(user_id=7, user_name='fred'))
+ self.assertEquals(sess.query(User).first(), User(id=7, name='fred'))
+ @testing.resolve_artifact_names
def test_transient_to_pending_collection(self):
- class User(fixtures.Base):
- pass
- class Address(fixtures.Base):
- pass
mapper(User, users, properties={'addresses':relation(Address, backref='user', collection_class=OrderedSet)})
mapper(Address, addresses)
on_load = self.on_load_tracker(User)
self.on_load_tracker(Address, on_load)
- u = User(user_id=7, user_name='fred', addresses=OrderedSet([
- Address(address_id=1, email_address='fred1'),
- Address(address_id=2, email_address='fred2'),
+ u = User(id=7, name='fred', addresses=OrderedSet([
+ Address(id=1, email_address='fred1'),
+ Address(id=2, email_address='fred2'),
]))
assert on_load.called == 0
@@ -77,54 +63,50 @@ class MergeTest(TestBase, AssertsExecutionResults):
sess.clear()
self.assertEquals(sess.query(User).one(),
- User(user_id=7, user_name='fred', addresses=OrderedSet([
- Address(address_id=1, email_address='fred1'),
- Address(address_id=2, email_address='fred2'),
+ User(id=7, name='fred', addresses=OrderedSet([
+ Address(id=1, email_address='fred1'),
+ Address(id=2, email_address='fred2'),
]))
)
+ @testing.resolve_artifact_names
def test_transient_to_persistent(self):
- class User(fixtures.Base):
- pass
mapper(User, users)
on_load = self.on_load_tracker(User)
sess = create_session()
- u = User(user_id=7, user_name='fred')
+ u = User(id=7, name='fred')
sess.save(u)
sess.flush()
sess.clear()
assert on_load.called == 0
- _u2 = u2 = User(user_id=7, user_name='fred jones')
+ _u2 = u2 = User(id=7, name='fred jones')
assert on_load.called == 0
u2 = sess.merge(u2)
assert u2 is not _u2
assert on_load.called == 1
sess.flush()
sess.clear()
- self.assertEquals(sess.query(User).first(), User(user_id=7, user_name='fred jones'))
+ self.assertEquals(sess.query(User).first(), User(id=7, name='fred jones'))
assert on_load.called == 2
+ @testing.resolve_artifact_names
def test_transient_to_persistent_collection(self):
- class User(fixtures.Base):
- pass
- class Address(fixtures.Base):
- pass
mapper(User, users, properties={
- 'addresses':relation(Address,
- backref='user',
+ 'addresses':relation(Address,
+ backref='user',
collection_class=OrderedSet, cascade="all, delete-orphan")
})
mapper(Address, addresses)
-
+
on_load = self.on_load_tracker(User)
self.on_load_tracker(Address, on_load)
- u = User(user_id=7, user_name='fred', addresses=OrderedSet([
- Address(address_id=1, email_address='fred1'),
- Address(address_id=2, email_address='fred2'),
+ u = User(id=7, name='fred', addresses=OrderedSet([
+ Address(id=1, email_address='fred1'),
+ Address(id=2, email_address='fred2'),
]))
sess = create_session()
sess.save(u)
@@ -133,54 +115,51 @@ class MergeTest(TestBase, AssertsExecutionResults):
assert on_load.called == 0
- u = User(user_id=7, user_name='fred', addresses=OrderedSet([
- Address(address_id=3, email_address='fred3'),
- Address(address_id=4, email_address='fred4'),
+ u = User(id=7, name='fred', addresses=OrderedSet([
+ Address(id=3, email_address='fred3'),
+ Address(id=4, email_address='fred4'),
]))
u = sess.merge(u)
-
+
assert on_load.called == 5, on_load.called # 1. merges User object. updates into session.
# 2.,3. merges Address ids 3 & 4, saves into session.
- # 4.,5. loads pre-existing elements in "addresses" collection,
+ # 4.,5. loads pre-existing elements in "addresses" collection,
# marks as deleted, Address ids 1 and 2.
self.assertEquals(u,
- User(user_id=7, user_name='fred', addresses=OrderedSet([
- Address(address_id=3, email_address='fred3'),
- Address(address_id=4, email_address='fred4'),
+ User(id=7, name='fred', addresses=OrderedSet([
+ Address(id=3, email_address='fred3'),
+ Address(id=4, email_address='fred4'),
]))
)
sess.flush()
sess.clear()
self.assertEquals(sess.query(User).one(),
- User(user_id=7, user_name='fred', addresses=OrderedSet([
- Address(address_id=3, email_address='fred3'),
- Address(address_id=4, email_address='fred4'),
+ User(id=7, name='fred', addresses=OrderedSet([
+ Address(id=3, email_address='fred3'),
+ Address(id=4, email_address='fred4'),
]))
)
+ @testing.resolve_artifact_names
def test_detached_to_persistent_collection(self):
- class User(fixtures.Base):
- pass
- class Address(fixtures.Base):
- pass
mapper(User, users, properties={'addresses':relation(Address, backref='user', collection_class=OrderedSet)})
mapper(Address, addresses)
on_load = self.on_load_tracker(User)
self.on_load_tracker(Address, on_load)
- a = Address(address_id=1, email_address='fred1')
- u = User(user_id=7, user_name='fred', addresses=OrderedSet([
+ a = Address(id=1, email_address='fred1')
+ u = User(id=7, name='fred', addresses=OrderedSet([
a,
- Address(address_id=2, email_address='fred2'),
+ Address(id=2, email_address='fred2'),
]))
sess = create_session()
sess.save(u)
sess.flush()
sess.clear()
- u.user_name='fred jones'
- u.addresses.add(Address(address_id=3, email_address='fred3'))
+ u.name='fred jones'
+ u.addresses.add(Address(id=3, email_address='fred3'))
u.addresses.remove(a)
assert on_load.called == 0
@@ -190,20 +169,16 @@ class MergeTest(TestBase, AssertsExecutionResults):
sess.clear()
self.assertEquals(sess.query(User).first(),
- User(user_id=7, user_name='fred jones', addresses=OrderedSet([
- Address(address_id=2, email_address='fred2'),
- Address(address_id=3, email_address='fred3'),
+ User(id=7, name='fred jones', addresses=OrderedSet([
+ Address(id=2, email_address='fred2'),
+ Address(id=3, email_address='fred3'),
]))
)
+ @testing.resolve_artifact_names
def test_unsaved_cascade(self):
"""test merge of a transient entity with two child transient entities, with a bidirectional relation."""
- class User(fixtures.Base):
- pass
- class Address(fixtures.Base):
- pass
-
mapper(User, users, properties={
'addresses':relation(mapper(Address, addresses), cascade="all", backref="user")
})
@@ -211,7 +186,7 @@ class MergeTest(TestBase, AssertsExecutionResults):
self.on_load_tracker(Address, on_load)
sess = create_session()
- u = User(user_id=7, user_name='fred')
+ u = User(id=7, name='fred')
a1 = Address(email_address='foo@bar.com')
a2 = Address(email_address='hoho@bar.com')
u.addresses.append(a1)
@@ -220,23 +195,19 @@ class MergeTest(TestBase, AssertsExecutionResults):
u2 = sess.merge(u)
assert on_load.called == 3
- self.assertEquals(u, User(user_id=7, user_name='fred', addresses=[Address(email_address='foo@bar.com'), Address(email_address='hoho@bar.com')]))
- self.assertEquals(u2, User(user_id=7, user_name='fred', addresses=[Address(email_address='foo@bar.com'), Address(email_address='hoho@bar.com')]))
+ self.assertEquals(u, User(id=7, name='fred', addresses=[Address(email_address='foo@bar.com'), Address(email_address='hoho@bar.com')]))
+ self.assertEquals(u2, User(id=7, name='fred', addresses=[Address(email_address='foo@bar.com'), Address(email_address='hoho@bar.com')]))
sess.flush()
sess.clear()
u2 = sess.query(User).get(7)
- self.assertEquals(u2, User(user_id=7, user_name='fred', addresses=[Address(email_address='foo@bar.com'), Address(email_address='hoho@bar.com')]))
+ self.assertEquals(u2, User(id=7, name='fred', addresses=[Address(email_address='foo@bar.com'), Address(email_address='hoho@bar.com')]))
assert on_load.called == 6
+ @testing.resolve_artifact_names
def test_attribute_cascade(self):
"""test merge of a persistent entity with two child persistent entities."""
- class User(fixtures.Base):
- pass
- class Address(fixtures.Base):
- pass
-
mapper(User, users, properties={
'addresses':relation(mapper(Address, addresses), backref='user')
})
@@ -246,7 +217,7 @@ class MergeTest(TestBase, AssertsExecutionResults):
sess = create_session()
# set up data and save
- u = User(user_id=7, user_name='fred', addresses=[
+ u = User(id=7, name='fred', addresses=[
Address(email_address='foo@bar.com'),
Address(email_address = 'hoho@la.com')
])
@@ -256,10 +227,10 @@ class MergeTest(TestBase, AssertsExecutionResults):
# assert data was saved
sess2 = create_session()
u2 = sess2.query(User).get(7)
- self.assertEquals(u2, User(user_id=7, user_name='fred', addresses=[Address(email_address='foo@bar.com'), Address(email_address='hoho@la.com')]))
+ self.assertEquals(u2, User(id=7, name='fred', addresses=[Address(email_address='foo@bar.com'), Address(email_address='hoho@la.com')]))
# make local changes to data
- u.user_name = 'fred2'
+ u.name = 'fred2'
u.addresses[1].email_address = 'hoho@lalala.com'
assert on_load.called == 3
@@ -270,15 +241,15 @@ class MergeTest(TestBase, AssertsExecutionResults):
assert on_load.called == 6
# ensure local changes are pending
- self.assertEquals(u3, User(user_id=7, user_name='fred2', addresses=[Address(email_address='foo@bar.com'), Address(email_address='hoho@lalala.com')]))
-
+ self.assertEquals(u3, User(id=7, name='fred2', addresses=[Address(email_address='foo@bar.com'), Address(email_address='hoho@lalala.com')]))
+
# save merged data
sess3.flush()
# assert modified/merged data was saved
sess.clear()
u = sess.query(User).get(7)
- self.assertEquals(u, User(user_id=7, user_name='fred2', addresses=[Address(email_address='foo@bar.com'), Address(email_address='hoho@lalala.com')]))
+ self.assertEquals(u, User(id=7, name='fred2', addresses=[Address(email_address='foo@bar.com'), Address(email_address='hoho@lalala.com')]))
assert on_load.called == 9
# merge persistent object into another session
@@ -318,61 +289,95 @@ class MergeTest(TestBase, AssertsExecutionResults):
assert on_load.called == 18
sess5 = create_session()
- u2 = sess5.query(User).get(u.user_id)
- assert u2.user_name == 'fred2'
+ u2 = sess5.query(User).get(u.id)
+ assert u2.name == 'fred2'
assert u2.addresses[1].email_address == 'afafds'
assert on_load.called == 21
+ @testing.resolve_artifact_names
def test_one_to_many_cascade(self):
- mapper(Order, orders, properties={
- 'items':relation(mapper(Item, orderitems))
- })
-
mapper(User, users, properties={
- 'addresses':relation(mapper(Address, addresses)),
- 'orders':relation(Order, backref='customer')
- })
+ 'addresses':relation(mapper(Address, addresses))})
+
on_load = self.on_load_tracker(User)
self.on_load_tracker(Address, on_load)
- self.on_load_tracker(Order, on_load)
sess = create_session()
- u = User()
- u.user_name='fred'
- o = Order()
+ u = User(name='fred')
+ a1 = Address(email_address='foo@bar')
+ a2 = Address(email_address='foo@quux')
+ u.addresses.extend([a1, a2])
+
+ sess.save(u)
+ sess.flush()
+
+ eq_(on_load.called, 0)
+
+ sess2 = create_session()
+ u2 = sess2.query(User).get(u.id)
+ eq_(on_load.called, 1)
+
+ u.addresses[1].email_address = 'addr 2 modified'
+ sess2.merge(u)
+ eq_(u2.addresses[1].email_address, 'addr 2 modified')
+ eq_(on_load.called, 3)
+
+ sess3 = create_session()
+ u3 = sess3.query(User).get(u.id)
+ eq_(on_load.called, 4)
+
+ u.name = 'also fred'
+ sess3.merge(u)
+ eq_(on_load.called, 6)
+ eq_(u3.name, 'also fred')
+
+ @testing.resolve_artifact_names
+ def test_many_to_many_cascade(self):
+
+ mapper(Order, orders, properties={
+ 'items':relation(mapper(Item, items), secondary=order_items)})
+
+ on_load = self.on_load_tracker(Order)
+ self.on_load_tracker(Item, on_load)
+
+ sess = create_session()
+
i1 = Item()
- i1.item_name='item 1'
+ i1.description='item 1'
+
i2 = Item()
- i2.item_name = 'item 2'
+ i2.description = 'item 2'
+
+ o = Order()
o.description = 'order description'
o.items.append(i1)
o.items.append(i2)
- u.orders.append(o)
- sess.save(u)
+ sess.save(o)
sess.flush()
- assert on_load.called == 0
+ eq_(on_load.called, 0)
sess2 = create_session()
- u2 = sess2.query(User).get(u.user_id)
- assert on_load.called == 1
+ o2 = sess2.query(Order).get(o.id)
+ eq_(on_load.called, 1)
- u.orders[0].items[1].item_name = 'item 2 modified'
- sess2.merge(u)
- assert u2.orders[0].items[1].item_name == 'item 2 modified'
- assert on_load.called == 2
+ o.items[1].description = 'item 2 modified'
+ sess2.merge(o)
+ eq_(o2.items[1].description, 'item 2 modified')
+ eq_(on_load.called, 3)
sess3 = create_session()
- o2 = sess3.query(Order).get(o.order_id)
- assert on_load.called == 3
+ o3 = sess3.query(Order).get(o.id)
+ eq_( on_load.called, 4)
- o.customer.user_name = 'also fred'
+ o.description = 'desc modified'
sess3.merge(o)
- assert on_load.called == 4
- assert o2.customer.user_name == 'also fred'
+ eq_(on_load.called, 6)
+ eq_(o3.description, 'desc modified')
+ @testing.resolve_artifact_names
def test_one_to_one_cascade(self):
mapper(User, users, properties={
@@ -383,8 +388,8 @@ class MergeTest(TestBase, AssertsExecutionResults):
sess = create_session()
u = User()
- u.user_id = 7
- u.user_name = "fred"
+ u.id = 7
+ u.name = "fred"
a1 = Address()
a1.email_address='foo@bar.com'
u.address = a1
@@ -397,7 +402,7 @@ class MergeTest(TestBase, AssertsExecutionResults):
sess2 = create_session()
u2 = sess2.query(User).get(7)
assert on_load.called == 1
- u2.user_name = 'fred2'
+ u2.name = 'fred2'
u2.address.email_address = 'hoho@lalala.com'
assert on_load.called == 2
@@ -405,37 +410,35 @@ class MergeTest(TestBase, AssertsExecutionResults):
assert on_load.called == 2
assert u3 is u
+ @testing.resolve_artifact_names
def test_transient_dontload(self):
mapper(User, users)
sess = create_session()
u = User()
- self.assertRaisesMessage(sa_exc.InvalidRequestError, "dont_load=True option does not support", sess.merge, u, dont_load=True)
+ self.assertRaisesMessage(sa.exc.InvalidRequestError, "dont_load=True option does not support", sess.merge, u, dont_load=True)
+ @testing.resolve_artifact_names
def test_dontload_with_backrefs(self):
"""test that dontload populates relations in both directions without requiring a load"""
-
- class User(fixtures.Base):
- pass
- class Address(fixtures.Base):
- pass
+
mapper(User, users, properties={
'addresses':relation(mapper(Address, addresses), backref='user')
})
-
- u = User(user_id=7, user_name='fred', addresses=[Address(email_address='ad1'), Address(email_address='ad2')])
+
+ u = User(id=7, name='fred', addresses=[Address(email_address='ad1'), Address(email_address='ad2')])
sess = create_session()
sess.save(u)
sess.flush()
sess.close()
assert 'user' in u.addresses[1].__dict__
-
+
sess = create_session()
u2 = sess.merge(u, dont_load=True)
assert 'user' in u2.addresses[1].__dict__
- self.assertEquals(u2.addresses[1].user, User(user_id=7, user_name='fred'))
-
+ self.assertEquals(u2.addresses[1].user, User(id=7, name='fred'))
+
sess.expire(u2.addresses[1], ['user'])
assert 'user' not in u2.addresses[1].__dict__
sess.close()
@@ -443,9 +446,10 @@ class MergeTest(TestBase, AssertsExecutionResults):
sess = create_session()
u = sess.merge(u2, dont_load=True)
assert 'user' not in u.addresses[1].__dict__
- self.assertEquals(u.addresses[1].user, User(user_id=7, user_name='fred'))
-
-
+ self.assertEquals(u.addresses[1].user, User(id=7, name='fred'))
+
+
+ @testing.resolve_artifact_names
def test_dontload_with_eager(self):
"""this test illustrates that with dont_load=True, we can't just
copy the committed_state of the merged instance over; since it references collection objects
@@ -460,8 +464,8 @@ class MergeTest(TestBase, AssertsExecutionResults):
})
sess = create_session()
u = User()
- u.user_id = 7
- u.user_name = "fred"
+ u.id = 7
+ u.name = "fred"
a1 = Address()
a1.email_address='foo@bar.com'
u.addresses.append(a1)
@@ -470,7 +474,7 @@ class MergeTest(TestBase, AssertsExecutionResults):
sess.flush()
sess2 = create_session()
- u2 = sess2.query(User).options(eagerload('addresses')).get(7)
+ u2 = sess2.query(User).options(sa.orm.eagerload('addresses')).get(7)
sess3 = create_session()
u3 = sess3.merge(u2, dont_load=True)
@@ -478,6 +482,7 @@ class MergeTest(TestBase, AssertsExecutionResults):
sess3.flush()
self.assert_sql_count(testing.db, go, 0)
+ @testing.resolve_artifact_names
def test_dont_load_disallows_dirty(self):
"""dont_load doesnt support 'dirty' objects right now (see test_dont_load_with_eager()).
Therefore lets assert it."""
@@ -485,17 +490,17 @@ class MergeTest(TestBase, AssertsExecutionResults):
mapper(User, users)
sess = create_session()
u = User()
- u.user_id = 7
- u.user_name = "fred"
+ u.id = 7
+ u.name = "fred"
sess.save(u)
sess.flush()
- u.user_name = 'ed'
+ u.name = 'ed'
sess2 = create_session()
try:
sess2.merge(u, dont_load=True)
assert False
- except sa_exc.InvalidRequestError, e:
+ except sa.exc.InvalidRequestError, e:
assert "merge() with dont_load=True option does not support objects marked as 'dirty'. flush() all changes on mapped instances before merging with dont_load=True." in str(e)
u2 = sess2.query(User).get(7)
@@ -507,6 +512,7 @@ class MergeTest(TestBase, AssertsExecutionResults):
sess3.flush()
self.assert_sql_count(testing.db, go, 0)
+ @testing.resolve_artifact_names
def test_dont_load_sets_entityname(self):
"""test that a dont_load-merged entity has entity_name set, has_mapper() passes, and lazyloads work"""
mapper(User, users, properties={
@@ -514,8 +520,8 @@ class MergeTest(TestBase, AssertsExecutionResults):
})
sess = create_session()
u = User()
- u.user_id = 7
- u.user_name = "fred"
+ u.id = 7
+ u.name = "fred"
a1 = Address()
a1.email_address='foo@bar.com'
u.addresses.append(a1)
@@ -531,22 +537,23 @@ class MergeTest(TestBase, AssertsExecutionResults):
u2 = sess2.merge(u, dont_load=True)
assert not sess2.dirty
# assert merged instance has a mapper and lazy load proceeds
- state = attributes.instance_state(u2)
- assert state.entity_name is not attributes.NO_ENTITY_NAME
- assert mapperlib.has_mapper(u2)
+ state = sa.orm.attributes.instance_state(u2)
+ assert state.entity_name is not sa.orm.attributes.NO_ENTITY_NAME
+ assert sa.orm.mapperlib.has_mapper(u2)
def go():
assert u2.addresses != []
assert len(u2.addresses) == 1
self.assert_sql_count(testing.db, go, 1)
+ @testing.resolve_artifact_names
def test_dont_load_sets_backrefs(self):
mapper(User, users, properties={
'addresses':relation(mapper(Address, addresses),backref='user')
})
sess = create_session()
u = User()
- u.user_id = 7
- u.user_name = "fred"
+ u.id = 7
+ u.name = "fred"
a1 = Address()
a1.email_address='foo@bar.com'
u.addresses.append(a1)
@@ -563,6 +570,7 @@ class MergeTest(TestBase, AssertsExecutionResults):
assert u2.addresses[0].user is u2
self.assert_sql_count(testing.db, go, 0)
+ @testing.resolve_artifact_names
def test_dont_load_preserves_parents(self):
"""test that merge with dont_load does not trigger a 'delete-orphan' operation.
@@ -579,8 +587,8 @@ class MergeTest(TestBase, AssertsExecutionResults):
})
sess = create_session()
u = User()
- u.user_id = 7
- u.user_name = "fred"
+ u.id = 7
+ u.name = "fred"
a1 = Address()
a1.email_address='foo@bar.com'
u.addresses.append(a1)
@@ -594,10 +602,11 @@ class MergeTest(TestBase, AssertsExecutionResults):
assert not sess2.dirty
a2 = u2.addresses[0]
a2.email_address='somenewaddress'
- assert not object_mapper(a2)._is_orphan(attributes.instance_state(a2))
+ assert not sa.orm.object_mapper(a2)._is_orphan(
+ sa.orm.attributes.instance_state(a2))
sess2.flush()
sess2.clear()
- assert sess2.query(User).get(u2.user_id).addresses[0].email_address == 'somenewaddress'
+ assert sess2.query(User).get(u2.id).addresses[0].email_address == 'somenewaddress'
# this use case is not supported; this is with a pending Address on the pre-merged
# object, and we currently dont support 'dirty' objects being merged with dont_load=True.
@@ -615,11 +624,12 @@ class MergeTest(TestBase, AssertsExecutionResults):
# if dont_load is changed to support dirty objects, this code needs to pass
a2 = u2.addresses[0]
a2.email_address='somenewaddress'
- assert not object_mapper(a2)._is_orphan(attributes.instance_state(a2))
+ assert not sa.orm.object_mapper(a2)._is_orphan(
+ sa.orm.attributes.instance_state(a2))
sess2.flush()
sess2.clear()
- assert sess2.query(User).get(u2.user_id).addresses[0].email_address == 'somenewaddress'
- except sa_exc.InvalidRequestError, e:
+ assert sess2.query(User).get(u2.id).addresses[0].email_address == 'somenewaddress'
+ except sa.exc.InvalidRequestError, e:
assert "dont_load=True option does not support" in str(e)
diff --git a/test/orm/naturalpks.py b/test/orm/naturalpks.py
index 67cf5e9ad..7d4d6f5c5 100644
--- a/test/orm/naturalpks.py
+++ b/test/orm/naturalpks.py
@@ -1,16 +1,18 @@
+"""
+Primary key changing capabilities and passive/non-passive cascading updates.
+
+"""
import testenv; testenv.configure_for_tests()
-from sqlalchemy import *
-from sqlalchemy.orm import *
-from sqlalchemy.orm import attributes, exc as orm_exc
-from testlib.fixtures import *
-from testlib import *
+from testlib import sa, testing
+from testlib.sa import Table, Column, Integer, String, ForeignKey
+from testlib.sa.orm import mapper, relation, create_session
+from testlib.testing import eq_
+from testlib.compat import sorted
+from orm import _base
-"""test primary key changing capabilities and passive/non-passive cascading updates."""
+class NaturalPKTest(_base.MappedTest):
-class NaturalPKTest(ORMTest):
def define_tables(self, metadata):
- global users, addresses, items, users_to_items
-
users = Table('users', metadata,
Column('username', String(50), primary_key=True),
Column('fullname', String(100)))
@@ -23,11 +25,20 @@ class NaturalPKTest(ORMTest):
Column('itemname', String(50), primary_key=True),
Column('description', String(100)))
- users_to_items = Table('userstoitems', metadata,
+ users_to_items = Table('users_to_items', metadata,
Column('username', String(50), ForeignKey('users.username', onupdate='cascade'), primary_key=True),
Column('itemname', String(50), ForeignKey('items.itemname', onupdate='cascade'), primary_key=True),
)
+ def setup_classes(self):
+ class User(_base.ComparableEntity):
+ pass
+ class Address(_base.ComparableEntity):
+ pass
+ class Item(_base.ComparableEntity):
+ pass
+
+ @testing.resolve_artifact_names
def test_entity(self):
mapper(User, users)
@@ -51,6 +62,7 @@ class NaturalPKTest(ORMTest):
u1 = sess.query(User).get('ed')
self.assertEquals(User(username='ed', fullname='jack'), u1)
+ @testing.resolve_artifact_names
def test_expiry(self):
mapper(User, users)
@@ -67,19 +79,21 @@ class NaturalPKTest(ORMTest):
# in this case so theres no way to look it up. criterion-
# based session invalidation could solve this [ticket:911]
sess.expire(u1)
- self.assertRaises(orm_exc.ObjectDeletedError, getattr, u1, 'username')
+ self.assertRaises(sa.orm.exc.ObjectDeletedError, getattr, u1, 'username')
sess.clear()
assert sess.get(User, 'jack') is None
assert sess.get(User, 'ed').fullname == 'jack'
- @testing.unsupported('sqlite','mysql')
+ @testing.unsupported('mysql', 'FIXME: verify not fails_on')
+ @testing.fails_on('sqlite')
def test_onetomany_passive(self):
self._test_onetomany(True)
def test_onetomany_nonpassive(self):
self._test_onetomany(False)
+ @testing.resolve_artifact_names
def _test_onetomany(self, passive_updates):
mapper(User, users, properties={
'addresses':relation(Address, passive_updates=passive_updates)
@@ -129,6 +143,7 @@ class NaturalPKTest(ORMTest):
def test_manytoone_nonpassive(self):
self._test_manytoone(False)
+ @testing.resolve_artifact_names
def _test_manytoone(self, passive_updates):
mapper(User, users)
mapper(Address, addresses, properties={
@@ -149,7 +164,7 @@ class NaturalPKTest(ORMTest):
u1.username = 'ed'
print id(a1), id(a2), id(u1)
- print attributes.instance_state(u1).parents
+ print sa.orm.attributes.instance_state(u1).parents
def go():
sess.flush()
if passive_updates:
@@ -172,11 +187,12 @@ class NaturalPKTest(ORMTest):
def test_bidirectional_nonpassive(self):
self._test_bidirectional(False)
+ @testing.resolve_artifact_names
def _test_bidirectional(self, passive_updates):
mapper(User, users)
mapper(Address, addresses, properties={
- 'user':relation(User, passive_updates=passive_updates, backref='addresses')
- })
+ 'user':relation(User, passive_updates=passive_updates,
+ backref='addresses')})
sess = create_session()
a1 = Address(email='jack1')
@@ -224,10 +240,11 @@ class NaturalPKTest(ORMTest):
def test_manytomany_nonpassive(self):
self._test_manytomany(False)
+ @testing.resolve_artifact_names
def _test_manytomany(self, passive_updates):
mapper(User, users, properties={
- 'items':relation(Item, secondary=users_to_items, backref='users', passive_updates=passive_updates)
- })
+ 'items':relation(Item, secondary=users_to_items, backref='users',
+ passive_updates=passive_updates)})
mapper(Item, items)
sess = create_session()
@@ -244,7 +261,8 @@ class NaturalPKTest(ORMTest):
sess.flush()
r = sess.query(Item).all()
- # fixtures.Base can't handle a comparison with the backrefs involved....
+ # ComparableEntity can't handle a comparison with the backrefs
+ # involved....
self.assertEquals(Item(itemname='item1'), r[0])
self.assertEquals(['jack'], [u.username for u in r[0].users])
self.assertEquals(Item(itemname='item2'), r[1])
@@ -265,22 +283,25 @@ class NaturalPKTest(ORMTest):
self.assertEquals(Item(itemname='item2'), r[1])
self.assertEquals(['ed', 'jack'], sorted([u.username for u in r[1].users]))
-class SelfRefTest(ORMTest):
+class SelfRefTest(_base.MappedTest):
def define_tables(self, metadata):
- global nodes, Node
+ Table('nodes', metadata,
+ Column('name', String(50), primary_key=True),
+ Column('parent', String(50),
+ ForeignKey('nodes.name', onupdate='cascade')))
- nodes = Table('nodes', metadata,
- Column('name', String(50), primary_key=True),
- Column('parent', String(50), ForeignKey('nodes.name', onupdate='cascade'))
- )
-
- class Node(Base):
+ def setup_classes(self):
+ class Node(_base.ComparableEntity):
pass
+ @testing.resolve_artifact_names
def test_onetomany(self):
mapper(Node, nodes, properties={
- 'children':relation(Node, backref=backref('parentnode', remote_side=nodes.c.name, passive_updates=False), passive_updates=False)
- })
+ 'children': relation(Node,
+ backref=sa.orm.backref('parentnode',
+ remote_side=nodes.c.name,
+ passive_updates=False),
+ passive_updates=False)})
sess = create_session()
n1 = Node(name='n1')
@@ -292,23 +313,31 @@ class SelfRefTest(ORMTest):
n1.name = 'new n1'
sess.flush()
- self.assertEquals(n1.children[1].parent, 'new n1')
- self.assertEquals(['new n1', 'new n1', 'new n1'], [n.parent for n in sess.query(Node).filter(Node.name.in_(['n11', 'n12', 'n13']))])
+ eq_(n1.children[1].parent, 'new n1')
+ eq_(['new n1', 'new n1', 'new n1'],
+ [n.parent
+ for n in sess.query(Node).filter(
+ Node.name.in_(['n11', 'n12', 'n13']))])
-class NonPKCascadeTest(ORMTest):
+class NonPKCascadeTest(_base.MappedTest):
def define_tables(self, metadata):
- global users, addresses
-
- users = Table('users', metadata,
+ Table('users', metadata,
Column('id', Integer, primary_key=True),
Column('username', String(50), unique=True),
Column('fullname', String(100)))
- addresses = Table('addresses', metadata,
- Column('id', Integer, primary_key=True),
- Column('email', String(50)),
- Column('username', String(50), ForeignKey('users.username', onupdate="cascade")))
+ Table('addresses', metadata,
+ Column('id', Integer, primary_key=True),
+ Column('email', String(50)),
+ Column('username', String(50),
+ ForeignKey('users.username', onupdate="cascade")))
+
+ def setup_classes(self):
+ class User(_base.ComparableEntity):
+ pass
+ class Address(_base.ComparableEntity):
+ pass
@testing.unsupported('sqlite','mysql')
def test_onetomany_passive(self):
@@ -317,10 +346,10 @@ class NonPKCascadeTest(ORMTest):
def test_onetomany_nonpassive(self):
self._test_onetomany(False)
+ @testing.resolve_artifact_names
def _test_onetomany(self, passive_updates):
mapper(User, users, properties={
- 'addresses':relation(Address, passive_updates=passive_updates)
- })
+ 'addresses':relation(Address, passive_updates=passive_updates)})
mapper(Address, addresses)
sess = create_session()
@@ -331,14 +360,14 @@ class NonPKCascadeTest(ORMTest):
sess.flush()
a1 = u1.addresses[0]
- self.assertEquals(select([addresses.c.username]).execute().fetchall(), [('jack',), ('jack',)])
+ self.assertEquals(sa.select([addresses.c.username]).execute().fetchall(), [('jack',), ('jack',)])
assert sess.get(Address, a1.id) is u1.addresses[0]
u1.username = 'ed'
sess.flush()
assert u1.addresses[0].username == 'ed'
- self.assertEquals(select([addresses.c.username]).execute().fetchall(), [('ed',), ('ed',)])
+ self.assertEquals(sa.select([addresses.c.username]).execute().fetchall(), [('ed',), ('ed',)])
sess.clear()
self.assertEquals([Address(username='ed'), Address(username='ed')], sess.query(Address).all())
@@ -363,7 +392,7 @@ class NonPKCascadeTest(ORMTest):
a1 = sess.get(Address, a1.id)
self.assertEquals(a1.username, None)
- self.assertEquals(select([addresses.c.username]).execute().fetchall(), [(None,), (None,)])
+ self.assertEquals(sa.select([addresses.c.username]).execute().fetchall(), [(None,), (None,)])
u1 = sess.get(User, u1.id)
self.assertEquals(User(username='fred', fullname='jack'), u1)
diff --git a/test/orm/onetoone.py b/test/orm/onetoone.py
index eb425c577..d159d20f4 100644
--- a/test/orm/onetoone.py
+++ b/test/orm/onetoone.py
@@ -1,66 +1,44 @@
import testenv; testenv.configure_for_tests()
-from sqlalchemy import *
-from sqlalchemy.orm import *
-from testlib import *
+from testlib import sa, testing
+from testlib.sa import Table, Column, Integer, String, ForeignKey
+from testlib.sa.orm import mapper, relation
+from orm import _base
-class Jack(object):
- def __repr__(self):
- return "{Jack %s - %s}" % (self.id, self.number)
- def __init__(self, room=None, subroom=None, number=None, status=None):
- self.id = None
- self.room = room
- self.subroom = subroom
- self.number = number
- self.status = status
+class O2OTest(_base.MappedTest):
+ def define_tables(self, metadata):
+ Table('jack', metadata,
+ Column('id', Integer, primary_key=True),
+ Column('number', String(50)),
+ Column('status', String(20)),
+ Column('subroom', String(5)))
-class Port(object):
- def __repr__(self):
- return "{Port %s - %s}" % (self.id, self.name)
+ Table('port', metadata,
+ Column('id', Integer, primary_key=True),
+ Column('name', String(30)),
+ Column('description', String(100)),
+ Column('jack_id', Integer, ForeignKey("jack.id")))
- def __init__(self, name=None, description=None):
- self.id=None
- self.name=name
- self.description = description
+ def setup_mappers(self):
+ class Jack(_base.BasicEntity):
+ pass
+ class Port(_base.BasicEntity):
+ pass
-class O2OTest(TestBase, AssertsExecutionResults):
- @testing.uses_deprecated('SessionContext')
- def setUpAll(self):
- global jack, port, metadata, ctx
- metadata = MetaData(testing.db)
- ctx = scoped_session(create_session)
- jack = Table('jack', metadata,
- Column('id', Integer, primary_key=True),
- #Column('room_id', Integer, ForeignKey("room.id")),
- Column('number', String(50)),
- Column('status', String(20)),
- Column('subroom', String(5)),
- )
+ @testing.resolve_artifact_names
+ def test_1(self):
+ ctx = sa.orm.scoped_session(sa.orm.create_session)
-
- port = Table('port', metadata,
- Column('id', Integer, primary_key=True),
- #Column('device_id', Integer, ForeignKey("device.id")),
- Column('name', String(30)),
- Column('description', String(100)),
- Column('jack_id', Integer, ForeignKey("jack.id")),
- )
- metadata.create_all()
- def setUp(self):
- pass
- def tearDown(self):
- clear_mappers()
- def tearDownAll(self):
- metadata.drop_all()
-
- def test1(self):
mapper(Port, port, extension=ctx.extension)
- mapper(Jack, jack, order_by=[jack.c.number],properties = {
- 'port': relation(Port, backref='jack', uselist=False, lazy=True),
- }, extension=ctx.extension)
+ mapper(Jack, jack,
+ order_by=[jack.c.number],
+ properties=dict(
+ port=relation(Port, backref='jack',
+ uselist=False, lazy=True)),
+ extension=ctx.extension)
- j=Jack(number='101')
- p=Port(name='fa0/1')
+ j = Jack(number='101')
+ p = Port(name='fa0/1')
j.port=p
ctx.flush()
jid = j.id
@@ -68,17 +46,16 @@ class O2OTest(TestBase, AssertsExecutionResults):
j=ctx.query(Jack).get(jid)
p=ctx.query(Port).get(pid)
- print p.jack
assert p.jack is not None
assert p.jack is j
assert j.port is not None
- p.jack=None
- assert j.port is None #works
+ p.jack = None
+ assert j.port is None
ctx.clear()
- j=ctx.query(Jack).get(jid)
- p=ctx.query(Port).get(pid)
+ j = ctx.query(Jack).get(jid)
+ p = ctx.query(Port).get(pid)
j.port=None
self.assert_(p.jack is None)
diff --git a/test/orm/pickled.py b/test/orm/pickled.py
index 6bb455d41..1b472b53b 100644
--- a/test/orm/pickled.py
+++ b/test/orm/pickled.py
@@ -1,17 +1,16 @@
import testenv; testenv.configure_for_tests()
-from sqlalchemy import *
-from sqlalchemy.orm import *
-from testlib import *
-from testlib.fixtures import *
import pickle
+from testlib import sa, testing
+from testlib.sa import Table, Column, Integer, String, ForeignKey
+from testlib.sa.orm import mapper, relation, create_session
+from orm import _base, _fixtures
-class EmailUser(User):
- pass
-class PickleTest(FixtureTest):
- keep_mappers = False
- keep_data = False
+User, EmailUser = None, None
+class PickleTest(_fixtures.FixtureTest):
+
+ @testing.resolve_artifact_names
def test_transient(self):
mapper(User, users, properties={
'addresses':relation(Address, backref="user")
@@ -30,13 +29,14 @@ class PickleTest(FixtureTest):
self.assertEquals(u1, sess.query(User).get(u2.id))
+ @testing.resolve_artifact_names
def test_class_deferred_cols(self):
mapper(User, users, properties={
- 'name':deferred(users.c.name),
+ 'name':sa.orm.deferred(users.c.name),
'addresses':relation(Address, backref="user")
})
mapper(Address, addresses, properties={
- 'email_address':deferred(addresses.c.email_address)
+ 'email_address':sa.orm.deferred(addresses.c.email_address)
})
sess = create_session()
u1 = User(name='ed')
@@ -60,6 +60,7 @@ class PickleTest(FixtureTest):
self.assertEquals(u2.name, 'ed')
self.assertEquals(u2, User(name='ed', addresses=[Address(email_address='ed@bar.com')]))
+ @testing.resolve_artifact_names
def test_instance_deferred_cols(self):
mapper(User, users, properties={
'addresses':relation(Address, backref="user")
@@ -73,7 +74,7 @@ class PickleTest(FixtureTest):
sess.flush()
sess.clear()
- u1 = sess.query(User).options(defer('name'), defer('addresses.email_address')).get(u1.id)
+ u1 = sess.query(User).options(sa.orm.defer('name'), sa.orm.defer('addresses.email_address')).get(u1.id)
assert 'name' not in u1.__dict__
assert 'addresses' not in u1.__dict__
@@ -98,19 +99,30 @@ class PickleTest(FixtureTest):
self.assertEquals(u2, User(name='ed', addresses=[Address(email_address='ed@bar.com')]))
-class PolymorphicDeferredTest(ORMTest):
+class PolymorphicDeferredTest(_base.MappedTest):
def define_tables(self, metadata):
- global users, email_users
- users = Table('users', metadata,
+ Table('users', metadata,
Column('id', Integer, primary_key=True),
Column('name', String(30)),
- Column('type', String(30)),
- )
- email_users = Table('email_users', metadata,
+ Column('type', String(30)))
+ Table('email_users', metadata,
Column('id', Integer, ForeignKey('users.id'), primary_key=True),
- Column('email_address', String(30))
- )
+ Column('email_address', String(30)))
+
+ def setup_classes(self):
+ global User, EmailUser
+ class User(_base.BasicEntity):
+ pass
+
+ class EmailUser(User):
+ pass
+
+ def tearDownAll(self):
+ global User, EmailUser
+ User, EmailUser = None, None
+ _base.MappedTest.tearDownAll(self)
+ @testing.resolve_artifact_names
def test_polymorphic_deferred(self):
mapper(User, users, polymorphic_identity='user', polymorphic_on=users.c.type)
mapper(EmailUser, email_users, inherits=User, polymorphic_identity='emailuser')
diff --git a/test/orm/query.py b/test/orm/query.py
index cbf313bca..88cc309d0 100644
--- a/test/orm/query.py
+++ b/test/orm/query.py
@@ -116,7 +116,7 @@ class GetTest(QueryTest):
assert u2.name =='jack'
assert a not in u2.addresses
- @testing.exclude('mysql', '<', (4, 1))
+ @testing.requires.unicode_connections
def test_unicode(self):
"""test that Query.get properly sets up the type for the bind parameter. using unicode would normally fail
on postgres, mysql and oracle unless it is converted to an encoded string"""
@@ -406,7 +406,7 @@ class FilterTest(QueryTest):
assert [User(id=10)] == sess.query(User).outerjoin("addresses", aliased=True).filter(~User.addresses.any()).all()
- @testing.unsupported('maxdb') # can core
+ @testing.unsupported('maxdb', 'can dump core')
def test_has(self):
sess = create_session()
assert [Address(id=5)] == sess.query(Address).filter(Address.user.has(name='fred')).all()
diff --git a/test/orm/relationships.py b/test/orm/relationships.py
index b33684e2f..f16473766 100644
--- a/test/orm/relationships.py
+++ b/test/orm/relationships.py
@@ -1,14 +1,14 @@
import testenv; testenv.configure_for_tests()
import datetime
-from sqlalchemy import *
-from sqlalchemy import exc as sa_exc, types
-from sqlalchemy.orm import *
-from sqlalchemy.orm import collections, attributes, exc as orm_exc
-from sqlalchemy.orm.collections import collection
-from testlib import *
-from testlib import fixtures
-
-class RelationTest(TestBase):
+from testlib import sa, testing
+from testlib.sa import Table, Column, Integer, String, ForeignKey
+from testlib.sa.orm import mapper, relation, backref, create_session
+from testlib.testing import eq_, startswith_
+from testlib.compat import set
+from orm import _base
+
+
+class RelationTest(_base.MappedTest):
"""An extended topological sort test
This is essentially an extension of the "dependency.py" topological sort
@@ -20,142 +20,130 @@ class RelationTest(TestBase):
While the straight topological sort tests should expose this, since the
sorting can be different due to subtle differences in program execution,
this test case was exposing the bug whereas the simpler tests were not.
+
"""
- def setUpAll(self):
- global metadata, tbl_a, tbl_b, tbl_c, tbl_d
+ run_setup_mappers = 'once'
+ run_inserts = 'once'
+ run_deletes = None
- metadata = MetaData()
- tbl_a = Table("tbl_a", metadata,
+ def define_tables(self, metadata):
+ Table("tbl_a", metadata,
Column("id", Integer, primary_key=True),
- Column("name", String(128)),
- )
- tbl_b = Table("tbl_b", metadata,
+ Column("name", String(128)))
+ Table("tbl_b", metadata,
Column("id", Integer, primary_key=True),
- Column("name", String(128)),
- )
- tbl_c = Table("tbl_c", metadata,
+ Column("name", String(128)))
+ Table("tbl_c", metadata,
Column("id", Integer, primary_key=True),
Column("tbl_a_id", Integer, ForeignKey("tbl_a.id"), nullable=False),
- Column("name", String(128)),
- )
- tbl_d = Table("tbl_d", metadata,
+ Column("name", String(128)))
+ Table("tbl_d", metadata,
Column("id", Integer, primary_key=True),
Column("tbl_c_id", Integer, ForeignKey("tbl_c.id"), nullable=False),
Column("tbl_b_id", Integer, ForeignKey("tbl_b.id")),
- Column("name", String(128)),
- )
- def setUp(self):
- global session
- session = create_session(bind=testing.db)
- conn = testing.db.connect()
- conn.create(tbl_a)
- conn.create(tbl_b)
- conn.create(tbl_c)
- conn.create(tbl_d)
-
- class A(object):
+ Column("name", String(128)))
+
+ def setup_classes(self):
+ class A(_base.Entity):
pass
- class B(object):
+ class B(_base.Entity):
pass
- class C(object):
+ class C(_base.Entity):
pass
- class D(object):
+ class D(_base.Entity):
pass
- D.mapper = mapper(D, tbl_d)
- C.mapper = mapper(C, tbl_c, properties=dict(
- d_rows=relation(D, cascade="all, delete-orphan", backref="c_row"),
- ))
- B.mapper = mapper(B, tbl_b)
- A.mapper = mapper(A, tbl_a, properties=dict(
- c_rows=relation(C, cascade="all, delete-orphan", backref="a_row"),
- ))
- D.mapper.add_property("b_row", relation(B))
-
- global a
- global c
- a = A(); a.name = "a1"
- b = B(); b.name = "b1"
- c = C(); c.name = "c1"; c.a_row = a
- # we must have more than one d row or it won't fail
- d1 = D(); d1.name = "d1"; d1.b_row = b; d1.c_row = c
- d2 = D(); d2.name = "d2"; d2.b_row = b; d2.c_row = c
- d3 = D(); d3.name = "d3"; d3.b_row = b; d3.c_row = c
- session.save_or_update(a)
- session.save_or_update(b)
-
- def tearDown(self):
- conn = testing.db.connect()
- conn.drop(tbl_d)
- conn.drop(tbl_c)
- conn.drop(tbl_b)
- conn.drop(tbl_a)
-
- def tearDownAll(self):
- metadata.drop_all(testing.db)
+ @testing.resolve_artifact_names
+ def setup_mappers(self):
+ mapper(A, tbl_a, properties=dict(
+ c_rows=relation(C, cascade="all, delete-orphan", backref="a_row")))
+ mapper(B, tbl_b)
+ mapper(C, tbl_c, properties=dict(
+ d_rows=relation(D, cascade="all, delete-orphan", backref="c_row")))
+ mapper(D, tbl_d, properties=dict(
+ b_row=relation(B)))
+
+ @testing.resolve_artifact_names
+ def insert_data(self):
+ session = create_session()
+ a = A(name='a1')
+ b = B(name='b1')
+ c = C(name='c1', a_row=a)
+
+ d1 = D(name='d1', b_row=b, c_row=c)
+ d2 = D(name='d2', b_row=b, c_row=c)
+ d3 = D(name='d3', b_row=b, c_row=c)
+ session.save(a)
+ session.save(b)
+ session.flush()
+ @testing.resolve_artifact_names
def testDeleteRootTable(self):
- session.flush()
- session.delete(a) # works as expected
+ session = create_session()
+ a = session.query(A).filter_by(name='a1').one()
+
+ session.delete(a)
session.flush()
+ @testing.resolve_artifact_names
def testDeleteMiddleTable(self):
+ session = create_session()
+ c = session.query(C).filter_by(name='c1').one()
+
+ session.delete(c)
session.flush()
- session.delete(c) # fails
- session.flush()
-class RelationTest2(TestBase):
+
+class RelationTest2(_base.MappedTest):
"""Tests a relationship on a column included in multiple foreign keys.
This test tests a relationship on a column that is included in multiple
foreign keys, as well as a self-referential relationship on a composite
key where one column in the foreign key is 'joined to itself'.
- """
-
- def setUpAll(self):
- global metadata, company_tbl, employee_tbl
- metadata = MetaData(testing.db)
-
- company_tbl = Table('company', metadata,
- Column('company_id', Integer, primary_key=True),
- Column('name', Unicode(30)))
-
- employee_tbl = Table('employee', metadata,
- Column('company_id', Integer, primary_key=True),
- Column('emp_id', Integer, primary_key=True),
- Column('name', Unicode(30)),
- Column('reports_to_id', Integer),
- ForeignKeyConstraint(['company_id'], ['company.company_id']),
- ForeignKeyConstraint(['company_id', 'reports_to_id'],
- ['employee.company_id', 'employee.emp_id']))
- metadata.create_all()
-
- def tearDownAll(self):
- metadata.drop_all()
+ """
+ def define_tables(self, metadata):
+ Table('company_t', metadata,
+ Column('company_id', Integer, primary_key=True),
+ Column('name', sa.Unicode(30)))
+
+ Table('employee_t', metadata,
+ Column('company_id', Integer, primary_key=True),
+ Column('emp_id', Integer, primary_key=True),
+ Column('name', sa.Unicode(30)),
+ Column('reports_to_id', Integer),
+ sa.ForeignKeyConstraint(
+ ['company_id'],
+ ['company_t.company_id']),
+ sa.ForeignKeyConstraint(
+ ['company_id', 'reports_to_id'],
+ ['employee_t.company_id', 'employee_t.emp_id']))
+
+ @testing.resolve_artifact_names
def test_explicit(self):
"""test with mappers that have fairly explicit join conditions"""
- class Company(object):
+ class Company(_base.Entity):
pass
- class Employee(object):
+
+ class Employee(_base.Entity):
def __init__(self, name, company, emp_id, reports_to=None):
self.name = name
self.company = company
self.emp_id = emp_id
self.reports_to = reports_to
- mapper(Company, company_tbl)
- mapper(Employee, employee_tbl, properties= {
- 'company':relation(Company, primaryjoin=employee_tbl.c.company_id==company_tbl.c.company_id, backref='employees'),
+ mapper(Company, company_t)
+ mapper(Employee, employee_t, properties= {
+ 'company':relation(Company, primaryjoin=employee_t.c.company_id==company_t.c.company_id, backref='employees'),
'reports_to':relation(Employee, primaryjoin=
- and_(
- employee_tbl.c.emp_id==employee_tbl.c.reports_to_id,
- employee_tbl.c.company_id==employee_tbl.c.company_id
+ sa.and_(
+ employee_t.c.emp_id==employee_t.c.reports_to_id,
+ employee_t.c.company_id==employee_t.c.company_id
),
- remote_side=[employee_tbl.c.emp_id, employee_tbl.c.company_id],
- foreign_keys=[employee_tbl.c.reports_to_id],
+ remote_side=[employee_t.c.emp_id, employee_t.c.company_id],
+ foreign_keys=[employee_t.c.reports_to_id],
backref='employees')
})
@@ -171,7 +159,7 @@ class RelationTest2(TestBase):
e6 = Employee(u'emp6', c2, 2, e5)
e7 = Employee(u'emp7', c2, 3, e5)
- [sess.save(x) for x in [c1,c2]]
+ sess.add_all((c1, c2))
sess.flush()
sess.clear()
@@ -181,26 +169,27 @@ class RelationTest2(TestBase):
test_e5 = sess.query(Employee).get([c2.company_id, e5.emp_id])
assert test_e5.name == 'emp5', test_e5.name
assert [x.name for x in test_e1.employees] == ['emp2', 'emp3']
- assert sess.query(Employee).get([c1.company_id, 3]).reports_to.name == 'emp1'
- assert sess.query(Employee).get([c2.company_id, 3]).reports_to.name == 'emp5'
+ eq_(sess.query(Employee).get([c1.company_id, 3]).reports_to.name, 'emp1')
+ eq_(sess.query(Employee).get([c2.company_id, 3]).reports_to.name, 'emp5')
+ @testing.resolve_artifact_names
def test_implicit(self):
"""test with mappers that have the most minimal arguments"""
- class Company(object):
+ class Company(_base.Entity):
pass
- class Employee(object):
+ class Employee(_base.Entity):
def __init__(self, name, company, emp_id, reports_to=None):
self.name = name
self.company = company
self.emp_id = emp_id
self.reports_to = reports_to
- mapper(Company, company_tbl)
- mapper(Employee, employee_tbl, properties= {
+ mapper(Company, company_t)
+ mapper(Employee, employee_t, properties= {
'company':relation(Company, backref='employees'),
'reports_to':relation(Employee,
- remote_side=[employee_tbl.c.emp_id, employee_tbl.c.company_id],
- foreign_keys=[employee_tbl.c.reports_to_id],
+ remote_side=[employee_t.c.emp_id, employee_t.c.company_id],
+ foreign_keys=[employee_t.c.reports_to_id],
backref='employees')
})
@@ -229,64 +218,63 @@ class RelationTest2(TestBase):
assert sess.query(Employee).get([c1.company_id, 3]).reports_to.name == 'emp1'
assert sess.query(Employee).get([c2.company_id, 3]).reports_to.name == 'emp5'
-class RelationTest3(TestBase):
- def setUpAll(self):
- global jobs, pageversions, pages, metadata, Job, Page, PageVersion, PageComment
- import datetime
- metadata = MetaData(testing.db)
- jobs = Table("jobs", metadata,
- Column("jobno", Unicode(15), primary_key=True),
- Column("created", DateTime, nullable=False, default=datetime.datetime.now),
- Column("deleted", Boolean, nullable=False, default=False))
- pageversions = Table("pageversions", metadata,
- Column("jobno", Unicode(15), primary_key=True),
- Column("pagename", Unicode(30), primary_key=True),
- Column("version", Integer, primary_key=True, default=1),
- Column("created", DateTime, nullable=False, default=datetime.datetime.now),
- Column("md5sum", String(32)),
- Column("width", Integer, nullable=False, default=0),
- Column("height", Integer, nullable=False, default=0),
- ForeignKeyConstraint(["jobno", "pagename"], ["pages.jobno", "pages.pagename"])
- )
- pages = Table("pages", metadata,
- Column("jobno", Unicode(15), ForeignKey("jobs.jobno"), primary_key=True),
- Column("pagename", Unicode(30), primary_key=True),
- Column("created", DateTime, nullable=False, default=datetime.datetime.now),
- Column("deleted", Boolean, nullable=False, default=False),
- Column("current_version", Integer))
- pagecomments = Table("pagecomments", metadata,
- Column("jobno", Unicode(15), primary_key=True),
- Column("pagename", Unicode(30), primary_key=True),
- Column("comment_id", Integer, primary_key=True, autoincrement=False),
- Column("content", UnicodeText),
- ForeignKeyConstraint(["jobno", "pagename"], ["pages.jobno", "pages.pagename"])
- )
-
- metadata.create_all()
- class Job(object):
- def __init__(self, jobno=None):
- self.jobno = jobno
- def create_page(self, pagename, *args, **kwargs):
- return Page(job=self, pagename=pagename, *args, **kwargs)
- class PageVersion(object):
+class RelationTest3(_base.MappedTest):
+ def define_tables(self, metadata):
+ Table("jobs", metadata,
+ Column("jobno", sa.Unicode(15), primary_key=True),
+ Column("created", sa.DateTime, nullable=False,
+ default=datetime.datetime.now),
+ Column("deleted", sa.Boolean, nullable=False, default=False))
+
+ Table("pageversions", metadata,
+ Column("jobno", sa.Unicode(15), primary_key=True),
+ Column("pagename", sa.Unicode(30), primary_key=True),
+ Column("version", Integer, primary_key=True, default=1),
+ Column("created", sa.DateTime, nullable=False,
+ default=datetime.datetime.now),
+ Column("md5sum", String(32)),
+ Column("width", Integer, nullable=False, default=0),
+ Column("height", Integer, nullable=False, default=0),
+ sa.ForeignKeyConstraint(
+ ["jobno", "pagename"],
+ ["pages.jobno", "pages.pagename"]))
+
+ Table("pages", metadata,
+ Column("jobno", sa.Unicode(15), ForeignKey("jobs.jobno"),
+ primary_key=True),
+ Column("pagename", sa.Unicode(30), primary_key=True),
+ Column("created", sa.DateTime, nullable=False,
+ default=datetime.datetime.now),
+ Column("deleted", sa.Boolean, nullable=False, default=False),
+ Column("current_version", Integer))
+
+ Table("pagecomments", metadata,
+ Column("jobno", sa.Unicode(15), primary_key=True),
+ Column("pagename", sa.Unicode(30), primary_key=True),
+ Column("comment_id", Integer, primary_key=True,
+ autoincrement=False),
+ Column("content", sa.UnicodeText),
+ sa.ForeignKeyConstraint(
+ ["jobno", "pagename"],
+ ["pages.jobno", "pages.pagename"]))
+
+ @testing.resolve_artifact_names
+ def setup_mappers(self):
+ class Job(_base.Entity):
+ def create_page(self, pagename):
+ return Page(job=self, pagename=pagename)
+ class PageVersion(_base.Entity):
def __init__(self, page=None, version=None):
self.page = page
self.version = version
- class Page(object):
+ class Page(_base.Entity):
def __init__(self, job=None, pagename=None):
self.job = job
self.pagename = pagename
self.currentversion = PageVersion(self, 1)
- def __repr__(self):
- try:
- state = attributes.instance_state(self)
- key = state.key
- except (KeyError, AttributeError):
- key = None
- return ("Page jobno:%s pagename:%s %s" %
- (self.jobno, self.pagename, key))
def add_version(self):
- self.currentversion = PageVersion(self, self.currentversion.version+1)
+ self.currentversion = PageVersion(
+ page=self, version=self.currentversion.version+1)
comment = self.add_comment()
comment.closeable = False
comment.content = u'some content'
@@ -298,48 +286,58 @@ class RelationTest3(TestBase):
self.comments.append(newcomment)
newcomment.created_version = self.currentversion.version
return newcomment
- class PageComment(object):
+ class PageComment(_base.Entity):
pass
+
mapper(Job, jobs)
mapper(PageVersion, pageversions)
mapper(Page, pages, properties={
- 'job': relation(Job, backref=backref('pages', cascade="all, delete-orphan", order_by=pages.c.pagename)),
- 'currentversion': relation(PageVersion,
- foreign_keys=[pages.c.current_version],
- primaryjoin=and_(pages.c.jobno==pageversions.c.jobno,
- pages.c.pagename==pageversions.c.pagename,
- pages.c.current_version==pageversions.c.version),
- post_update=True),
- 'versions': relation(PageVersion, cascade="all, delete-orphan",
- primaryjoin=and_(pages.c.jobno==pageversions.c.jobno,
- pages.c.pagename==pageversions.c.pagename),
- order_by=pageversions.c.version,
- backref=backref('page', lazy=False,
- primaryjoin=and_(pages.c.jobno==pageversions.c.jobno,
- pages.c.pagename==pageversions.c.pagename)))
- })
+ 'job': relation(
+ Job,
+ backref=backref('pages',
+ cascade="all, delete-orphan",
+ order_by=pages.c.pagename)),
+ 'currentversion': relation(
+ PageVersion,
+ foreign_keys=[pages.c.current_version],
+ primaryjoin=sa.and_(
+ pages.c.jobno==pageversions.c.jobno,
+ pages.c.pagename==pageversions.c.pagename,
+ pages.c.current_version==pageversions.c.version),
+ post_update=True),
+ 'versions': relation(
+ PageVersion,
+ cascade="all, delete-orphan",
+ primaryjoin=sa.and_(pages.c.jobno==pageversions.c.jobno,
+ pages.c.pagename==pageversions.c.pagename),
+ order_by=pageversions.c.version,
+ backref=backref('page',
+ lazy=False,
+ primaryjoin=sa.and_(
+ pages.c.jobno==pageversions.c.jobno,
+ pages.c.pagename==pageversions.c.pagename)))})
mapper(PageComment, pagecomments, properties={
- 'page': relation(Page, primaryjoin=and_(pages.c.jobno==pagecomments.c.jobno,
- pages.c.pagename==pagecomments.c.pagename),
- backref=backref("comments", cascade="all, delete-orphan",
- primaryjoin=and_(pages.c.jobno==pagecomments.c.jobno,
- pages.c.pagename==pagecomments.c.pagename),
- order_by=pagecomments.c.comment_id))
- })
-
-
- def tearDownAll(self):
- clear_mappers()
- metadata.drop_all()
-
+ 'page': relation(
+ Page,
+ primaryjoin=sa.and_(pages.c.jobno==pagecomments.c.jobno,
+ pages.c.pagename==pagecomments.c.pagename),
+ backref=backref("comments",
+ cascade="all, delete-orphan",
+ primaryjoin=sa.and_(
+ pages.c.jobno==pagecomments.c.jobno,
+ pages.c.pagename==pagecomments.c.pagename),
+ order_by=pagecomments.c.comment_id))})
+
+ @testing.resolve_artifact_names
def testbasic(self):
- """test the combination of complicated join conditions with post_update"""
- j1 = Job(u'somejob')
+ """A combination of complicated join conditions with post_update."""
+
+ j1 = Job(jobno=u'somejob')
j1.create_page(u'page1')
j1.create_page(u'page2')
j1.create_page(u'page3')
- j2 = Job(u'somejob2')
+ j2 = Job(jobno=u'somejob2')
j2.create_page(u'page1')
j2.create_page(u'page2')
j2.create_page(u'page3')
@@ -347,13 +345,9 @@ class RelationTest3(TestBase):
j2.pages[0].add_version()
j2.pages[0].add_version()
j2.pages[1].add_version()
- print j2.pages
- print j2.pages[0].versions
- print j2.pages[1].versions
- s = create_session()
- s.save(j1)
- s.save(j2)
+ s = create_session()
+ s.add_all((j1, j2))
s.flush()
@@ -370,25 +364,32 @@ class RelationTest3(TestBase):
s.delete(j)
s.flush()
-class RelationTest4(ORMTest):
- """test syncrules on foreign keys that are also primary"""
+class RelationTest4(_base.MappedTest):
+ """Syncrules on foreign keys that are also primary"""
+
def define_tables(self, metadata):
- global tableA, tableB
- tableA = Table("A", metadata,
- Column("id",Integer,primary_key=True),
- Column("foo",Integer,),
- test_needs_fk=True)
- tableB = Table("B",metadata,
- Column("id",Integer,ForeignKey("A.id"),primary_key=True),
- test_needs_fk=True)
+ Table("tableA", metadata,
+ Column("id",Integer,primary_key=True),
+ Column("foo",Integer,),
+ test_needs_fk=True)
+ Table("tableB",metadata,
+ Column("id",Integer,ForeignKey("tableA.id"),primary_key=True),
+ test_needs_fk=True)
+
+ def setup_classes(self):
+ class A(_base.Entity):
+ pass
+
+ class B(_base.Entity):
+ pass
+
+ @testing.resolve_artifact_names
def test_no_delete_PK_AtoB(self):
- """test that A cant be deleted without B because B would have no PK value"""
- class A(object):pass
- class B(object):pass
+ """A cant be deleted without B because B would have no PK value."""
mapper(A, tableA, properties={
- 'bs':relation(B, cascade="save-update")
- })
+ 'bs':relation(B, cascade="save-update")})
mapper(B, tableB)
+
a1 = A()
a1.bs.append(B())
sess = create_session()
@@ -400,15 +401,16 @@ class RelationTest4(ORMTest):
sess.flush()
assert False
except AssertionError, e:
- assert str(e).startswith("Dependency rule tried to blank-out primary key column 'B.id' on instance ")
+ startswith_(str(e),
+ "Dependency rule tried to blank-out "
+ "primary key column 'tableB.id' on instance ")
+ @testing.resolve_artifact_names
def test_no_delete_PK_BtoA(self):
- class A(object):pass
- class B(object):pass
mapper(B, tableB, properties={
- 'a':relation(A, cascade="save-update")
- })
+ 'a':relation(A, cascade="save-update")})
mapper(A, tableA)
+
b1 = B()
a1 = A()
b1.a = a1
@@ -420,22 +422,27 @@ class RelationTest4(ORMTest):
sess.flush()
assert False
except AssertionError, e:
- assert str(e).startswith("Dependency rule tried to blank-out primary key column 'B.id' on instance ")
+ startswith_(str(e),
+ "Dependency rule tried to blank-out "
+ "primary key column 'tableB.id' on instance ")
@testing.fails_on_everything_except('sqlite', 'mysql')
+ @testing.resolve_artifact_names
def test_nullPKsOK_BtoA(self):
# postgres cant handle a nullable PK column...?
tableC = Table('tablec', tableA.metadata,
Column('id', Integer, primary_key=True),
- Column('a_id', Integer, ForeignKey('A.id'), primary_key=True, autoincrement=False, nullable=True))
+ Column('a_id', Integer, ForeignKey('tableA.id'),
+ primary_key=True, autoincrement=False, nullable=True))
tableC.create()
- class A(object):pass
- class C(object):pass
+ class C(_base.Entity):
+ pass
mapper(C, tableC, properties={
'a':relation(A, cascade="save-update")
}, allow_null_pks=True)
mapper(A, tableA)
+
c1 = C()
c1.id = 5
c1.a = None
@@ -444,20 +451,18 @@ class RelationTest4(ORMTest):
# test that no error is raised.
sess.flush()
+ @testing.resolve_artifact_names
def test_delete_cascade_BtoA(self):
- """test that the 'blank the PK' error doesnt get raised when the child is to be deleted as part of a
- cascade"""
- class A(object):pass
- class B(object):pass
- for cascade in (
- "save-update, delete",
- #"save-update, delete-orphan",
- "save-update, delete, delete-orphan"):
+ """No 'blank the PK' error when the child is to be deleted as part of a cascade"""
+ for cascade in ("save-update, delete",
+ #"save-update, delete-orphan",
+ "save-update, delete, delete-orphan"):
mapper(B, tableB, properties={
'a':relation(A, cascade=cascade)
})
mapper(A, tableA)
+
b1 = B()
a1 = A()
b1.a = a1
@@ -469,21 +474,19 @@ class RelationTest4(ORMTest):
assert a1 not in sess
assert b1 not in sess
sess.clear()
- clear_mappers()
+ sa.orm.clear_mappers()
+ @testing.resolve_artifact_names
def test_delete_cascade_AtoB(self):
- """test that the 'blank the PK' error doesnt get raised when the child is to be deleted as part of a
- cascade"""
- class A(object):pass
- class B(object):pass
- for cascade in (
- "save-update, delete",
- #"save-update, delete-orphan",
- "save-update, delete, delete-orphan"):
+ """No 'blank the PK' error when the child is to be deleted as part of a cascade"""
+ for cascade in ("save-update, delete",
+ #"save-update, delete-orphan",
+ "save-update, delete, delete-orphan"):
mapper(A, tableA, properties={
'bs':relation(B, cascade=cascade)
})
mapper(B, tableB)
+
a1 = A()
b1 = B()
a1.bs.append(b1)
@@ -496,15 +499,14 @@ class RelationTest4(ORMTest):
assert a1 not in sess
assert b1 not in sess
sess.clear()
- clear_mappers()
+ sa.orm.clear_mappers()
+ @testing.resolve_artifact_names
def test_delete_manual_AtoB(self):
- class A(object):pass
- class B(object):pass
mapper(A, tableA, properties={
- 'bs':relation(B, cascade="none")
- })
+ 'bs':relation(B, cascade="none")})
mapper(B, tableB)
+
a1 = A()
b1 = B()
a1.bs.append(b1)
@@ -520,13 +522,12 @@ class RelationTest4(ORMTest):
assert b1 not in sess
sess.clear()
+ @testing.resolve_artifact_names
def test_delete_manual_BtoA(self):
- class A(object):pass
- class B(object):pass
mapper(B, tableB, properties={
- 'a':relation(A, cascade="none")
- })
+ 'a':relation(A, cascade="none")})
mapper(A, tableA)
+
b1 = B()
a1 = A()
b1.a = a1
@@ -540,43 +541,51 @@ class RelationTest4(ORMTest):
assert a1 not in sess
assert b1 not in sess
-class RelationTest5(ORMTest):
+class RelationTest5(_base.MappedTest):
"""Test a map to a select that relates to a map to the table."""
def define_tables(self, metadata):
- global items
- items = Table('items', metadata,
- Column('item_policy_num', String(10), primary_key=True, key='policyNum'),
- Column('item_policy_eff_date', Date, primary_key=True, key='policyEffDate'),
- Column('item_type', String(20), primary_key=True, key='type'),
- Column('item_id', Integer, primary_key=True, key='id', autoincrement=False),
- )
-
+ Table('items', metadata,
+ Column('item_policy_num', String(10), primary_key=True,
+ key='policyNum'),
+ Column('item_policy_eff_date', sa.Date, primary_key=True,
+ key='policyEffDate'),
+ Column('item_type', String(20), primary_key=True,
+ key='type'),
+ Column('item_id', Integer, primary_key=True,
+ key='id', autoincrement=False))
+
+ @testing.resolve_artifact_names
def test_basic(self):
- class Container(object):pass
- class LineItem(object):pass
+ class Container(_base.Entity):
+ pass
+ class LineItem(_base.Entity):
+ pass
- container_select = select(
+ container_select = sa.select(
[items.c.policyNum, items.c.policyEffDate, items.c.type],
distinct=True,
).alias('container_select')
mapper(LineItem, items)
- mapper(Container, container_select, order_by=asc(container_select.c.type), properties=dict(
- lineItems = relation(LineItem, lazy=True, cascade='all, delete-orphan', order_by=asc(items.c.type),
- primaryjoin=and_(
- container_select.c.policyNum==items.c.policyNum,
- container_select.c.policyEffDate==items.c.policyEffDate,
- container_select.c.type==items.c.type
- ),
- foreign_keys=[
- items.c.policyNum,
- items.c.policyEffDate,
- items.c.type,
- ],
- )
- ))
+ mapper(Container,
+ container_select,
+ order_by=sa.asc(container_select.c.type),
+ properties=dict(
+ lineItems=relation(LineItem,
+ lazy=True,
+ cascade='all, delete-orphan',
+ order_by=sa.asc(items.c.type),
+ primaryjoin=sa.and_(
+ container_select.c.policyNum==items.c.policyNum,
+ container_select.c.policyEffDate==items.c.policyEffDate,
+ container_select.c.type==items.c.type),
+ foreign_keys=[
+ items.c.policyNum,
+ items.c.policyEffDate,
+ items.c.type])))
+
session = create_session()
con = Container()
con.policyNum = "99"
@@ -597,29 +606,31 @@ class RelationTest5(ORMTest):
assert old.id == new.id
-class TypeMatchTest(ORMTest):
+class TypeMatchTest(_base.MappedTest):
"""test errors raised when trying to add items whose type is not handled by a relation"""
+
def define_tables(self, metadata):
- global a, b, c, d
- a = Table("a", metadata,
- Column('aid', Integer, primary_key=True),
- Column('data', String(30)))
- b = Table("b", metadata,
- Column('bid', Integer, primary_key=True),
- Column("a_id", Integer, ForeignKey("a.aid")),
- Column('data', String(30)))
- c = Table("c", metadata,
- Column('cid', Integer, primary_key=True),
- Column("b_id", Integer, ForeignKey("b.bid")),
- Column('data', String(30)))
- d = Table("d", metadata,
- Column('did', Integer, primary_key=True),
- Column("a_id", Integer, ForeignKey("a.aid")),
- Column('data', String(30)))
+ Table("a", metadata,
+ Column('aid', Integer, primary_key=True),
+ Column('data', String(30)))
+ Table("b", metadata,
+ Column('bid', Integer, primary_key=True),
+ Column("a_id", Integer, ForeignKey("a.aid")),
+ Column('data', String(30)))
+ Table("c", metadata,
+ Column('cid', Integer, primary_key=True),
+ Column("b_id", Integer, ForeignKey("b.bid")),
+ Column('data', String(30)))
+ Table("d", metadata,
+ Column('did', Integer, primary_key=True),
+ Column("a_id", Integer, ForeignKey("a.aid")),
+ Column('data', String(30)))
+
+ @testing.resolve_artifact_names
def test_o2m_oncascade(self):
- class A(object):pass
- class B(object):pass
- class C(object):pass
+ class A(_base.Entity): pass
+ class B(_base.Entity): pass
+ class C(_base.Entity): pass
mapper(A, a, properties={'bs':relation(B)})
mapper(B, b)
mapper(C, c)
@@ -634,11 +645,15 @@ class TypeMatchTest(ORMTest):
sess.save(a1)
assert False
except AssertionError, err:
- assert str(err) == "Attribute 'bs' on class '%s' doesn't handle objects of type '%s'" % (A, C)
+ eq_(str(err),
+ "Attribute 'bs' on class '%s' doesn't handle "
+ "objects of type '%s'" % (A, C))
+
+ @testing.resolve_artifact_names
def test_o2m_onflush(self):
- class A(object):pass
- class B(object):pass
- class C(object):pass
+ class A(_base.Entity): pass
+ class B(_base.Entity): pass
+ class C(_base.Entity): pass
mapper(A, a, properties={'bs':relation(B, cascade="none")})
mapper(B, b)
mapper(C, c)
@@ -652,12 +667,14 @@ class TypeMatchTest(ORMTest):
sess.save(a1)
sess.save(b1)
sess.save(c1)
- self.assertRaisesMessage(orm_exc.FlushError, "Attempting to flush an item", sess.flush)
+ self.assertRaisesMessage(sa.orm.exc.FlushError,
+ "Attempting to flush an item", sess.flush)
+ @testing.resolve_artifact_names
def test_o2m_nopoly_onflush(self):
- class A(object):pass
- class B(object):pass
- class C(B):pass
+ class A(_base.Entity): pass
+ class B(_base.Entity): pass
+ class C(B): pass
mapper(A, a, properties={'bs':relation(B, cascade="none")})
mapper(B, b)
mapper(C, c, inherits=B)
@@ -671,12 +688,14 @@ class TypeMatchTest(ORMTest):
sess.save(a1)
sess.save(b1)
sess.save(c1)
- self.assertRaisesMessage(orm_exc.FlushError, "Attempting to flush an item", sess.flush)
+ self.assertRaisesMessage(sa.orm.exc.FlushError,
+ "Attempting to flush an item", sess.flush)
+ @testing.resolve_artifact_names
def test_m2o_nopoly_onflush(self):
- class A(object):pass
- class B(A):pass
- class D(object):pass
+ class A(_base.Entity): pass
+ class B(A): pass
+ class D(_base.Entity): pass
mapper(A, a)
mapper(B, b, inherits=A)
mapper(D, d, properties={"a":relation(A, cascade="none")})
@@ -686,12 +705,14 @@ class TypeMatchTest(ORMTest):
sess = create_session()
sess.save(b1)
sess.save(d1)
- self.assertRaisesMessage(orm_exc.FlushError, "Attempting to flush an item", sess.flush)
+ self.assertRaisesMessage(sa.orm.exc.FlushError,
+ "Attempting to flush an item", sess.flush)
+ @testing.resolve_artifact_names
def test_m2o_oncascade(self):
- class A(object):pass
- class B(object):pass
- class D(object):pass
+ class A(_base.Entity): pass
+ class B(_base.Entity): pass
+ class D(_base.Entity): pass
mapper(A, a)
mapper(B, b)
mapper(D, d, properties={"a":relation(A)})
@@ -699,38 +720,39 @@ class TypeMatchTest(ORMTest):
d1 = D()
d1.a = b1
sess = create_session()
- self.assertRaisesMessage(AssertionError, "doesn't handle objects of type", sess.save, d1)
+ self.assertRaisesMessage(AssertionError,
+ "doesn't handle objects of type", sess.save, d1)
-class TypedAssociationTable(ORMTest):
- def define_tables(self, metadata):
- global t1, t2, t3
+class TypedAssociationTable(_base.MappedTest):
- class MySpecialType(types.TypeDecorator):
+ def define_tables(self, metadata):
+ class MySpecialType(sa.types.TypeDecorator):
impl = String
def convert_bind_param(self, value, dialect):
return "lala" + value
def convert_result_value(self, value, dialect):
return value[4:]
- t1 = Table('t1', metadata,
- Column('col1', MySpecialType(30), primary_key=True),
- Column('col2', String(30)))
- t2 = Table('t2', metadata,
- Column('col1', MySpecialType(30), primary_key=True),
- Column('col2', String(30)))
- t3 = Table('t3', metadata,
- Column('t1c1', MySpecialType(30), ForeignKey('t1.col1')),
- Column('t2c1', MySpecialType(30), ForeignKey('t2.col1')),
- )
+ Table('t1', metadata,
+ Column('col1', MySpecialType(30), primary_key=True),
+ Column('col2', String(30)))
+ Table('t2', metadata,
+ Column('col1', MySpecialType(30), primary_key=True),
+ Column('col2', String(30)))
+ Table('t3', metadata,
+ Column('t1c1', MySpecialType(30), ForeignKey('t1.col1')),
+ Column('t2c1', MySpecialType(30), ForeignKey('t2.col1')))
+
+ @testing.resolve_artifact_names
def testm2m(self):
- """test many-to-many tables with special types for candidate keys"""
+ """Many-to-many tables with special types for candidate keys."""
- class T1(object):pass
- class T2(object):pass
+ class T1(_base.Entity): pass
+ class T2(_base.Entity): pass
mapper(T2, t2)
mapper(T1, t1, properties={
- 't2s':relation(T2, secondary=t3, backref='t1s')
- })
+ 't2s':relation(T2, secondary=t3, backref='t1s')})
+
a = T1()
a.col1 = "aid"
b = T2()
@@ -740,7 +762,7 @@ class TypedAssociationTable(ORMTest):
a.t2s.append(b)
a.t2s.append(c)
sess = create_session()
- sess.save(a)
+ sess.add(a)
sess.flush()
assert t3.count().scalar() == 2
@@ -750,40 +772,46 @@ class TypedAssociationTable(ORMTest):
assert t3.count().scalar() == 1
-
-
-class ViewOnlyTest(ORMTest):
- """test a view_only mapping where a third table is pulled into the primary join condition,
- using overlapping PK column names (should not produce "conflicting column" error)"""
+class ViewOnlyOverlappingNames(_base.MappedTest):
+ """'viewonly' mappings with overlapping PK column names."""
+
def define_tables(self, metadata):
- global t1, t2, t3
- t1 = Table("t1", metadata,
+ Table("t1", metadata,
Column('id', Integer, primary_key=True),
Column('data', String(40)))
- t2 = Table("t2", metadata,
+ Table("t2", metadata,
Column('id', Integer, primary_key=True),
Column('data', String(40)),
Column('t1id', Integer, ForeignKey('t1.id')))
- t3 = Table("t3", metadata,
+ Table("t3", metadata,
Column('id', Integer, primary_key=True),
Column('data', String(40)),
- Column('t2id', Integer, ForeignKey('t2.id'))
- )
+ Column('t2id', Integer, ForeignKey('t2.id')))
- def test_basic(self):
- class C1(object):pass
- class C2(object):pass
- class C3(object):pass
+ @testing.resolve_artifact_names
+ def test_three_table_view(self):
+ """A three table join with overlapping PK names.
+
+ A third table is pulled into the primary join condition using
+ overlapping PK column names and should not produce 'conflicting column'
+ error.
+
+ """
+ class C1(_base.Entity): pass
+ class C2(_base.Entity): pass
+ class C3(_base.Entity): pass
mapper(C1, t1, properties={
't2s':relation(C2),
- 't2_view':relation(C2, viewonly=True, primaryjoin=and_(t1.c.id==t2.c.t1id, t3.c.t2id==t2.c.id, t3.c.data==t1.c.data))
- })
+ 't2_view':relation(C2,
+ viewonly=True,
+ primaryjoin=sa.and_(t1.c.id==t2.c.t1id,
+ t3.c.t2id==t2.c.id,
+ t3.c.data==t1.c.data))})
mapper(C2, t2)
mapper(C3, t3, properties={
- 't2':relation(C2)
- })
+ 't2':relation(C2)})
c1 = C1()
c1.data = 'c1data'
@@ -804,36 +832,44 @@ class ViewOnlyTest(ORMTest):
assert set([x.id for x in c1.t2s]) == set([c2a.id, c2b.id])
assert set([x.id for x in c1.t2_view]) == set([c2b.id])
-class ViewOnlyTest2(ORMTest):
- """test a view_only mapping where a third table is pulled into the primary join condition,
- using non-overlapping PK column names (should not produce "mapper has no column X" error)"""
+class ViewOnlyUniqueNames(_base.MappedTest):
+ """'viewonly' mappings with unique PK column names."""
+
def define_tables(self, metadata):
- global t1, t2, t3
- t1 = Table("t1", metadata,
+ Table("t1", metadata,
Column('t1id', Integer, primary_key=True),
Column('data', String(40)))
- t2 = Table("t2", metadata,
+ Table("t2", metadata,
Column('t2id', Integer, primary_key=True),
Column('data', String(40)),
Column('t1id_ref', Integer, ForeignKey('t1.t1id')))
- t3 = Table("t3", metadata,
+ Table("t3", metadata,
Column('t3id', Integer, primary_key=True),
Column('data', String(40)),
- Column('t2id_ref', Integer, ForeignKey('t2.t2id'))
- )
- def test_basic(self):
- class C1(object):pass
- class C2(object):pass
- class C3(object):pass
+ Column('t2id_ref', Integer, ForeignKey('t2.t2id')))
+
+ @testing.resolve_artifact_names
+ def test_three_table_view(self):
+ """A three table join with overlapping PK names.
+
+ A third table is pulled into the primary join condition using unique
+ PK column names and should not produce 'mapper has no columnX' error.
+
+ """
+ class C1(_base.Entity): pass
+ class C2(_base.Entity): pass
+ class C3(_base.Entity): pass
mapper(C1, t1, properties={
't2s':relation(C2),
- 't2_view':relation(C2, viewonly=True, primaryjoin=and_(t1.c.t1id==t2.c.t1id_ref, t3.c.t2id_ref==t2.c.t2id, t3.c.data==t1.c.data))
- })
+ 't2_view':relation(C2,
+ viewonly=True,
+ primaryjoin=sa.and_(t1.c.t1id==t2.c.t1id_ref,
+ t3.c.t2id_ref==t2.c.t2id,
+ t3.c.data==t1.c.data))})
mapper(C2, t2)
mapper(C3, t3, properties={
- 't2':relation(C2)
- })
+ 't2':relation(C2)})
c1 = C1()
c1.data = 'c1data'
@@ -845,8 +881,8 @@ class ViewOnlyTest2(ORMTest):
c3.data='c1data'
c3.t2 = c2b
sess = create_session()
- sess.save(c1)
- sess.save(c3)
+
+ sess.add_all((c1, c3))
sess.flush()
sess.clear()
@@ -854,22 +890,29 @@ class ViewOnlyTest2(ORMTest):
assert set([x.t2id for x in c1.t2s]) == set([c2a.t2id, c2b.t2id])
assert set([x.t2id for x in c1.t2_view]) == set([c2b.t2id])
-class ViewOnlyTest3(ORMTest):
- """test relating on a join that has no equated columns"""
+
+class ViewOnlyNonEquijoin(_base.MappedTest):
+ """'viewonly' mappings based on non-equijoins."""
+
def define_tables(self, metadata):
- global foos, bars
- foos = Table('foos', metadata, Column('id', Integer, primary_key=True))
- bars = Table('bars', metadata, Column('id', Integer, primary_key=True), Column('fid', Integer))
+ Table('foos', metadata,
+ Column('id', Integer, primary_key=True))
+ Table('bars', metadata,
+ Column('id', Integer, primary_key=True),
+ Column('fid', Integer))
+ @testing.resolve_artifact_names
def test_viewonly_join(self):
- class Foo(fixtures.Base):
+ class Foo(_base.ComparableEntity):
pass
- class Bar(fixtures.Base):
+ class Bar(_base.ComparableEntity):
pass
mapper(Foo, foos, properties={
- 'bars':relation(Bar, primaryjoin=foos.c.id>bars.c.fid, foreign_keys=[bars.c.fid], viewonly=True)
- })
+ 'bars':relation(Bar,
+ primaryjoin=foos.c.id > bars.c.fid,
+ foreign_keys=[bars.c.fid],
+ viewonly=True)})
mapper(Bar, bars)
@@ -883,73 +926,86 @@ class ViewOnlyTest3(ORMTest):
sess.flush()
sess = create_session()
- self.assertEquals(sess.query(Foo).filter_by(id=4).one(), Foo(id=4, bars=[Bar(fid=2), Bar(fid=3)]))
- self.assertEquals(sess.query(Foo).filter_by(id=9).one(), Foo(id=9, bars=[Bar(fid=2), Bar(fid=3), Bar(fid=6), Bar(fid=7)]))
+ eq_(sess.query(Foo).filter_by(id=4).one(),
+ Foo(id=4, bars=[Bar(fid=2), Bar(fid=3)]))
+ eq_(sess.query(Foo).filter_by(id=9).one(),
+ Foo(id=9, bars=[Bar(fid=2), Bar(fid=3), Bar(fid=6), Bar(fid=7)]))
+
+
+class ViewOnlyRepeatedRemoteColumn(_base.MappedTest):
+ """'viewonly' mappings that contain the same 'remote' column twice"""
-class ViewOnlyTest4(ORMTest):
- """test relating on a join that contains the same 'remote' column twice"""
def define_tables(self, metadata):
- global foos, bars
- foos = Table('foos', metadata, Column('id', Integer, primary_key=True),
- Column('bid1', Integer,ForeignKey('bars.id')),
- Column('bid2', Integer,ForeignKey('bars.id')))
-
- bars = Table('bars', metadata, Column('id', Integer, primary_key=True), Column('data', String(50)))
-
+ Table('foos', metadata,
+ Column('id', Integer, primary_key=True),
+ Column('bid1', Integer,ForeignKey('bars.id')),
+ Column('bid2', Integer,ForeignKey('bars.id')))
+
+ Table('bars', metadata,
+ Column('id', Integer, primary_key=True),
+ Column('data', String(50)))
+
+ @testing.resolve_artifact_names
def test_relation_on_or(self):
- class Foo(fixtures.Base):
+ class Foo(_base.ComparableEntity):
pass
- class Bar(fixtures.Base):
+ class Bar(_base.ComparableEntity):
pass
mapper(Foo, foos, properties={
- 'bars':relation(Bar, primaryjoin=or_(bars.c.id==foos.c.bid1, bars.c.id==foos.c.bid2), uselist=True, viewonly=True)
- })
-
+ 'bars':relation(Bar,
+ primaryjoin=sa.or_(bars.c.id == foos.c.bid1,
+ bars.c.id == foos.c.bid2),
+ uselist=True,
+ viewonly=True)})
mapper(Bar, bars)
+
sess = create_session()
b1 = Bar(id=1, data='b1')
b2 = Bar(id=2, data='b2')
b3 = Bar(id=3, data='b3')
f1 = Foo(bid1=1, bid2=2)
f2 = Foo(bid1=3, bid2=None)
- sess.save(b1)
- sess.save(b2)
- sess.save(b3)
+
+ sess.add_all((b1, b2, b3))
sess.flush()
- sess.save(f1)
- sess.save(f2)
+
+ sess.add_all((f1, f2))
sess.flush()
-
+
sess.clear()
- self.assertEquals(sess.query(Foo).filter_by(id=f1.id).one(), Foo(bars=[Bar(data='b1'), Bar(data='b2')]))
- self.assertEquals(sess.query(Foo).filter_by(id=f2.id).one(), Foo(bars=[Bar(data='b3')]))
+ eq_(sess.query(Foo).filter_by(id=f1.id).one(),
+ Foo(bars=[Bar(data='b1'), Bar(data='b2')]))
+ eq_(sess.query(Foo).filter_by(id=f2.id).one(),
+ Foo(bars=[Bar(data='b3')]))
+
+class ViewOnlyRepeatedLocalColumn(_base.MappedTest):
+ """'viewonly' mappings that contain the same 'local' column twice"""
-class ViewOnlyTest5(ORMTest):
- """test relating on a join that contains the same 'local' column twice"""
def define_tables(self, metadata):
- global foos, bars
- foos = Table('foos', metadata, Column('id', Integer, primary_key=True),
- Column('data', String(50))
- )
+ Table('foos', metadata,
+ Column('id', Integer, primary_key=True),
+ Column('data', String(50)))
- bars = Table('bars', metadata, Column('id', Integer, primary_key=True),
- Column('fid1', Integer, ForeignKey('foos.id')),
- Column('fid2', Integer, ForeignKey('foos.id')),
- Column('data', String(50))
- )
+ Table('bars', metadata, Column('id', Integer, primary_key=True),
+ Column('fid1', Integer, ForeignKey('foos.id')),
+ Column('fid2', Integer, ForeignKey('foos.id')),
+ Column('data', String(50)))
+ @testing.resolve_artifact_names
def test_relation_on_or(self):
- class Foo(fixtures.Base):
+ class Foo(_base.ComparableEntity):
pass
- class Bar(fixtures.Base):
+ class Bar(_base.ComparableEntity):
pass
mapper(Foo, foos, properties={
- 'bars':relation(Bar, primaryjoin=or_(bars.c.fid1==foos.c.id, bars.c.fid2==foos.c.id), viewonly=True)
- })
-
+ 'bars':relation(Bar,
+ primaryjoin=sa.or_(bars.c.fid1 == foos.c.id,
+ bars.c.fid2 == foos.c.id),
+ viewonly=True)})
mapper(Bar, bars)
+
sess = create_session()
f1 = Foo(id=1, data='f1')
f2 = Foo(id=2, data='f2')
@@ -957,56 +1013,53 @@ class ViewOnlyTest5(ORMTest):
b2 = Bar(fid2=1, data='b2')
b3 = Bar(fid1=2, data='b3')
b4 = Bar(fid1=1, fid2=2, data='b4')
- sess.save(f1)
- sess.save(f2)
+
+ sess.add_all((f1, f2))
sess.flush()
- sess.save(b1)
- sess.save(b2)
- sess.save(b3)
- sess.save(b4)
+
+ sess.add_all((b1, b2, b3, b4))
sess.flush()
sess.clear()
- self.assertEquals(sess.query(Foo).filter_by(id=f1.id).one(), Foo(bars=[Bar(data='b1'), Bar(data='b2'), Bar(data='b4')]))
- self.assertEquals(sess.query(Foo).filter_by(id=f2.id).one(), Foo(bars=[Bar(data='b3'), Bar(data='b4')]))
+ eq_(sess.query(Foo).filter_by(id=f1.id).one(),
+ Foo(bars=[Bar(data='b1'), Bar(data='b2'), Bar(data='b4')]))
+ eq_(sess.query(Foo).filter_by(id=f2.id).one(),
+ Foo(bars=[Bar(data='b3'), Bar(data='b4')]))
+
+class ViewOnlyComplexJoin(_base.MappedTest):
+ """'viewonly' mappings with a complex join condition."""
-class ViewOnlyTest6(ORMTest):
- """test a long primaryjoin condition"""
def define_tables(self, metadata):
- global t1, t2, t3, t2tot3
- t1 = Table('t1', metadata,
+ Table('t1', metadata,
Column('id', Integer, primary_key=True),
- Column('data', String(50))
- )
- t2 = Table('t2', metadata,
+ Column('data', String(50)))
+ Table('t2', metadata,
Column('id', Integer, primary_key=True),
Column('data', String(50)),
- Column('t1id', Integer, ForeignKey('t1.id')),
- )
- t3 = Table('t3', metadata,
+ Column('t1id', Integer, ForeignKey('t1.id')))
+ Table('t3', metadata,
Column('id', Integer, primary_key=True),
- Column('data', String(50))
- )
- t2tot3 = Table('t2tot3', metadata,
+ Column('data', String(50)))
+ Table('t2tot3', metadata,
Column('t2id', Integer, ForeignKey('t2.id')),
- Column('t3id', Integer, ForeignKey('t3.id')),
- )
-
- def test_basic(self):
- class T1(fixtures.Base):
+ Column('t3id', Integer, ForeignKey('t3.id')))
+
+ def setup_classes(self):
+ class T1(_base.ComparableEntity):
pass
- class T2(fixtures.Base):
+ class T2(_base.ComparableEntity):
pass
- class T3(fixtures.Base):
+ class T3(_base.ComparableEntity):
pass
-
+
+ @testing.resolve_artifact_names
+ def test_basic(self):
mapper(T1, t1, properties={
- 't3s':relation(T3, primaryjoin=and_(
+ 't3s':relation(T3, primaryjoin=sa.and_(
t1.c.id==t2.c.t1id,
t2.c.id==t2tot3.c.t2id,
- t3.c.id==t2tot3.c.t3id
- ),
- viewonly=True,
+ t3.c.id==t2tot3.c.t3id),
+ viewonly=True,
foreign_keys=t3.c.id, remote_side=t2.c.t1id)
})
mapper(T2, t2, properties={
@@ -1014,159 +1067,135 @@ class ViewOnlyTest6(ORMTest):
't3s':relation(T3, secondary=t2tot3)
})
mapper(T3, t3)
-
+
sess = create_session()
- sess.save(T2(data='t2', t1=T1(data='t1'), t3s=[T3(data='t3')]))
+ sess.add(T2(data='t2', t1=T1(data='t1'), t3s=[T3(data='t3')]))
sess.flush()
sess.clear()
-
+
a = sess.query(T1).first()
- self.assertEquals(a.t3s, [T3(data='t3')])
+ eq_(a.t3s, [T3(data='t3')])
-
- def test_remote_side_escalation(self):
- class T1(fixtures.Base):
- pass
- class T2(fixtures.Base):
- pass
- class T3(fixtures.Base):
- pass
+ @testing.resolve_artifact_names
+ def test_remote_side_escalation(self):
mapper(T1, t1, properties={
- 't3s':relation(T3, primaryjoin=and_(
- t1.c.id==t2.c.t1id,
- t2.c.id==t2tot3.c.t2id,
- t3.c.id==t2tot3.c.t3id
- ),viewonly=True, foreign_keys=t3.c.id)
- })
+ 't3s':relation(T3,
+ primaryjoin=sa.and_(t1.c.id==t2.c.t1id,
+ t2.c.id==t2tot3.c.t2id,
+ t3.c.id==t2tot3.c.t3id
+ ),
+ viewonly=True,
+ foreign_keys=t3.c.id)})
mapper(T2, t2, properties={
't1':relation(T1),
- 't3s':relation(T3, secondary=t2tot3)
- })
+ 't3s':relation(T3, secondary=t2tot3)})
mapper(T3, t3)
- self.assertRaisesMessage(sa_exc.ArgumentError, "Specify remote_side argument", compile_mappers)
+ self.assertRaisesMessage(sa.exc.ArgumentError,
+ "Specify remote_side argument",
+ sa.orm.compile_mappers)
+
+
+class ExplicitLocalRemoteTest(_base.MappedTest):
-class ExplicitLocalRemoteTest(ORMTest):
def define_tables(self, metadata):
- global t1, t2
- t1 = Table('t1', metadata,
+ Table('t1', metadata,
Column('id', String(50), primary_key=True),
- Column('data', String(50))
- )
- t2 = Table('t2', metadata,
+ Column('data', String(50)))
+ Table('t2', metadata,
Column('id', Integer, primary_key=True),
Column('data', String(50)),
- Column('t1id', String(50)),
- )
+ Column('t1id', String(50)))
- def test_onetomany_funcfk(self):
- class T1(fixtures.Base):
+ @testing.resolve_artifact_names
+ def setup_classes(self):
+ class T1(_base.ComparableEntity):
pass
- class T2(fixtures.Base):
+ class T2(_base.ComparableEntity):
pass
-
+
+ @testing.resolve_artifact_names
+ def test_onetomany_funcfk(self):
# use a function within join condition. but specifying
# local_remote_pairs overrides all parsing of the join condition.
mapper(T1, t1, properties={
- 't2s':relation(T2, primaryjoin=t1.c.id==func.lower(t2.c.t1id),
- _local_remote_pairs=[(t1.c.id, t2.c.t1id)],
- foreign_keys=[t2.c.t1id]
- )
- })
+ 't2s':relation(T2,
+ primaryjoin=t1.c.id==sa.func.lower(t2.c.t1id),
+ _local_remote_pairs=[(t1.c.id, t2.c.t1id)],
+ foreign_keys=[t2.c.t1id])})
mapper(T2, t2)
-
+
sess = create_session()
a1 = T1(id='number1', data='a1')
a2 = T1(id='number2', data='a2')
b1 = T2(data='b1', t1id='NuMbEr1')
b2 = T2(data='b2', t1id='Number1')
b3 = T2(data='b3', t1id='Number2')
- sess.save(a1)
- sess.save(a2)
- sess.save(b1)
- sess.save(b2)
- sess.save(b3)
+ sess.add_all((a1, a2, b1, b2, b3))
sess.flush()
sess.clear()
-
- self.assertEquals(sess.query(T1).first(), T1(id='number1', data='a1', t2s=[T2(data='b1', t1id='NuMbEr1'), T2(data='b2', t1id='Number1')]))
-
+
+ eq_(sess.query(T1).first(),
+ T1(id='number1', data='a1', t2s=[
+ T2(data='b1', t1id='NuMbEr1'),
+ T2(data='b2', t1id='Number1')]))
+
+ @testing.resolve_artifact_names
def test_manytoone_funcfk(self):
- class T1(fixtures.Base):
- pass
- class T2(fixtures.Base):
- pass
mapper(T1, t1)
mapper(T2, t2, properties={
- 't1':relation(T1, primaryjoin=t1.c.id==func.lower(t2.c.t1id),
- _local_remote_pairs=[(t2.c.t1id, t1.c.id)],
- foreign_keys=[t2.c.t1id],
- uselist=True
- )
- })
+ 't1':relation(T1,
+ primaryjoin=t1.c.id==sa.func.lower(t2.c.t1id),
+ _local_remote_pairs=[(t2.c.t1id, t1.c.id)],
+ foreign_keys=[t2.c.t1id],
+ uselist=True)})
+
sess = create_session()
a1 = T1(id='number1', data='a1')
a2 = T1(id='number2', data='a2')
b1 = T2(data='b1', t1id='NuMbEr1')
b2 = T2(data='b2', t1id='Number1')
b3 = T2(data='b3', t1id='Number2')
- sess.save(a1)
- sess.save(a2)
- sess.save(b1)
- sess.save(b2)
- sess.save(b3)
+ sess.add_all((a1, a2, b1, b2, b3))
sess.flush()
sess.clear()
- self.assertEquals(sess.query(T2).filter(T2.data.in_(['b1', 'b2'])).all(),
- [
- T2(data='b1', t1=[T1(id='number1', data='a1')]),
- T2(data='b2', t1=[T1(id='number1', data='a1')])
- ]
- )
-
+
+ eq_(sess.query(T2).filter(T2.data.in_(['b1', 'b2'])).all(),
+ [T2(data='b1', t1=[T1(id='number1', data='a1')]),
+ T2(data='b2', t1=[T1(id='number1', data='a1')])])
+
+ @testing.resolve_artifact_names
def test_onetomany_func_referent(self):
- class T1(fixtures.Base):
- pass
- class T2(fixtures.Base):
- pass
-
mapper(T1, t1, properties={
- 't2s':relation(T2, primaryjoin=func.lower(t1.c.id)==t2.c.t1id,
- _local_remote_pairs=[(t1.c.id, t2.c.t1id)],
- foreign_keys=[t2.c.t1id]
- )
- })
+ 't2s':relation(T2,
+ primaryjoin=sa.func.lower(t1.c.id)==t2.c.t1id,
+ _local_remote_pairs=[(t1.c.id, t2.c.t1id)],
+ foreign_keys=[t2.c.t1id])})
mapper(T2, t2)
-
+
sess = create_session()
a1 = T1(id='NuMbeR1', data='a1')
a2 = T1(id='NuMbeR2', data='a2')
b1 = T2(data='b1', t1id='number1')
b2 = T2(data='b2', t1id='number1')
b3 = T2(data='b2', t1id='number2')
- sess.save(a1)
- sess.save(a2)
- sess.save(b1)
- sess.save(b2)
- sess.save(b3)
+ sess.add_all((a1, a2, b1, b2, b3))
sess.flush()
sess.clear()
-
- self.assertEquals(sess.query(T1).first(), T1(id='NuMbeR1', data='a1', t2s=[T2(data='b1', t1id='number1'), T2(data='b2', t1id='number1')]))
- def test_manytoone_func_referent(self):
- class T1(fixtures.Base):
- pass
- class T2(fixtures.Base):
- pass
+ eq_(sess.query(T1).first(),
+ T1(id='NuMbeR1', data='a1', t2s=[
+ T2(data='b1', t1id='number1'),
+ T2(data='b2', t1id='number1')]))
+ @testing.resolve_artifact_names
+ def test_manytoone_func_referent(self):
mapper(T1, t1)
mapper(T2, t2, properties={
- 't1':relation(T1, primaryjoin=func.lower(t1.c.id)==t2.c.t1id,
- _local_remote_pairs=[(t2.c.t1id, t1.c.id)],
- foreign_keys=[t2.c.t1id], uselist=True
- )
- })
+ 't1':relation(T1,
+ primaryjoin=sa.func.lower(t1.c.id)==t2.c.t1id,
+ _local_remote_pairs=[(t2.c.t1id, t1.c.id)],
+ foreign_keys=[t2.c.t1id], uselist=True)})
sess = create_session()
a1 = T1(id='NuMbeR1', data='a1')
@@ -1174,199 +1203,281 @@ class ExplicitLocalRemoteTest(ORMTest):
b1 = T2(data='b1', t1id='number1')
b2 = T2(data='b2', t1id='number1')
b3 = T2(data='b3', t1id='number2')
- sess.save(a1)
- sess.save(a2)
- sess.save(b1)
- sess.save(b2)
- sess.save(b3)
+ sess.add_all((a1, a2, b1, b2, b3))
sess.flush()
sess.clear()
- self.assertEquals(sess.query(T2).filter(T2.data.in_(['b1', 'b2'])).all(),
- [
- T2(data='b1', t1=[T1(id='NuMbeR1', data='a1')]),
- T2(data='b2', t1=[T1(id='NuMbeR1', data='a1')])
- ]
- )
-
- def test_escalation(self):
- class T1(fixtures.Base):
- pass
- class T2(fixtures.Base):
- pass
-
+ eq_(sess.query(T2).filter(T2.data.in_(['b1', 'b2'])).all(),
+ [T2(data='b1', t1=[T1(id='NuMbeR1', data='a1')]),
+ T2(data='b2', t1=[T1(id='NuMbeR1', data='a1')])])
+
+ @testing.resolve_artifact_names
+ def test_escalation_1(self):
mapper(T1, t1, properties={
- 't2s':relation(T2, primaryjoin=t1.c.id==func.lower(t2.c.t1id),
- _local_remote_pairs=[(t1.c.id, t2.c.t1id)],
- foreign_keys=[t2.c.t1id],
- remote_side=[t2.c.t1id]
- )
- })
+ 't2s':relation(T2,
+ primaryjoin=t1.c.id==sa.func.lower(t2.c.t1id),
+ _local_remote_pairs=[(t1.c.id, t2.c.t1id)],
+ foreign_keys=[t2.c.t1id],
+ remote_side=[t2.c.t1id])})
mapper(T2, t2)
- self.assertRaises(sa_exc.ArgumentError, compile_mappers)
-
- clear_mappers()
+ self.assertRaises(sa.exc.ArgumentError, sa.orm.compile_mappers)
+
+ @testing.resolve_artifact_names
+ def test_escalation_2(self):
mapper(T1, t1, properties={
- 't2s':relation(T2, primaryjoin=t1.c.id==func.lower(t2.c.t1id),
- _local_remote_pairs=[(t1.c.id, t2.c.t1id)],
- )
- })
+ 't2s':relation(T2,
+ primaryjoin=t1.c.id==sa.func.lower(t2.c.t1id),
+ _local_remote_pairs=[(t1.c.id, t2.c.t1id)])})
mapper(T2, t2)
- self.assertRaises(sa_exc.ArgumentError, compile_mappers)
-
-class InvalidRelationEscalationTest(ORMTest):
+ self.assertRaises(sa.exc.ArgumentError, sa.orm.compile_mappers)
+
+
+class InvalidRelationEscalationTest(_base.MappedTest):
+
def define_tables(self, metadata):
- global foos, bars, Foo, Bar
- foos = Table('foos', metadata, Column('id', Integer, primary_key=True), Column('fid', Integer))
- bars = Table('bars', metadata, Column('id', Integer, primary_key=True), Column('fid', Integer))
- class Foo(object):
+ Table('foos', metadata,
+ Column('id', Integer, primary_key=True),
+ Column('fid', Integer))
+ Table('bars', metadata,
+ Column('id', Integer, primary_key=True),
+ Column('fid', Integer))
+
+ def setup_classes(self):
+ class Foo(_base.Entity):
pass
- class Bar(object):
+ class Bar(_base.Entity):
pass
-
+
+ @testing.resolve_artifact_names
def test_no_join(self):
mapper(Foo, foos, properties={
- 'bars':relation(Bar)
- })
-
+ 'bars':relation(Bar)})
mapper(Bar, bars)
- self.assertRaisesMessage(sa_exc.ArgumentError, "Could not determine join condition between parent/child tables on relation", compile_mappers)
+ self.assertRaisesMessage(
+ sa.exc.ArgumentError,
+ "Could not determine join condition between parent/child "
+ "tables on relation", sa.orm.compile_mappers)
+
+ @testing.resolve_artifact_names
def test_no_join_self_ref(self):
mapper(Foo, foos, properties={
- 'foos':relation(Foo)
- })
-
+ 'foos':relation(Foo)})
mapper(Bar, bars)
- self.assertRaisesMessage(sa_exc.ArgumentError, "Could not determine join condition between parent/child tables on relation", compile_mappers)
-
+
+ self.assertRaisesMessage(
+ sa.exc.ArgumentError,
+ "Could not determine join condition between parent/child "
+ "tables on relation", sa.orm.compile_mappers)
+
+ @testing.resolve_artifact_names
def test_no_equated(self):
mapper(Foo, foos, properties={
- 'bars':relation(Bar, primaryjoin=foos.c.id>bars.c.fid)
- })
-
+ 'bars':relation(Bar,
+ primaryjoin=foos.c.id>bars.c.fid)})
mapper(Bar, bars)
- self.assertRaisesMessage(sa_exc.ArgumentError, "Could not determine relation direction for primaryjoin condition", compile_mappers)
+ self.assertRaisesMessage(
+ sa.exc.ArgumentError,
+ "Could not determine relation direction for primaryjoin condition",
+ sa.orm.compile_mappers)
+
+ @testing.resolve_artifact_names
def test_no_equated_fks(self):
mapper(Foo, foos, properties={
- 'bars':relation(Bar, primaryjoin=foos.c.id>bars.c.fid, foreign_keys=bars.c.fid)
- })
-
+ 'bars':relation(Bar,
+ primaryjoin=foos.c.id>bars.c.fid,
+ foreign_keys=bars.c.fid)})
mapper(Bar, bars)
- self.assertRaisesMessage(sa_exc.ArgumentError, "Could not locate any equated, locally mapped column pairs for primaryjoin condition", compile_mappers)
+ self.assertRaisesMessage(
+ sa.exc.ArgumentError,
+ "Could not locate any equated, locally mapped column pairs "
+ "for primaryjoin condition", sa.orm.compile_mappers)
+
+ @testing.resolve_artifact_names
def test_no_equated_self_ref(self):
mapper(Foo, foos, properties={
- 'foos':relation(Foo, primaryjoin=foos.c.id>foos.c.fid)
- })
-
+ 'foos':relation(Foo,
+ primaryjoin=foos.c.id>foos.c.fid)})
mapper(Bar, bars)
- self.assertRaisesMessage(sa_exc.ArgumentError, "Could not determine relation direction for primaryjoin condition", compile_mappers)
+ self.assertRaisesMessage(
+ sa.exc.ArgumentError,
+ "Could not determine relation direction for primaryjoin condition",
+ sa.orm.compile_mappers)
+
+ @testing.resolve_artifact_names
def test_no_equated_self_ref(self):
mapper(Foo, foos, properties={
- 'foos':relation(Foo, primaryjoin=foos.c.id>foos.c.fid, foreign_keys=[foos.c.fid])
- })
-
+ 'foos':relation(Foo,
+ primaryjoin=foos.c.id>foos.c.fid,
+ foreign_keys=[foos.c.fid])})
mapper(Bar, bars)
- self.assertRaisesMessage(sa_exc.ArgumentError, "Could not locate any equated, locally mapped column pairs for primaryjoin condition", compile_mappers)
+ self.assertRaisesMessage(
+ sa.exc.ArgumentError,
+ "Could not locate any equated, locally mapped column pairs "
+ "for primaryjoin condition", sa.orm.compile_mappers)
+
+ @testing.resolve_artifact_names
def test_no_equated_viewonly(self):
mapper(Foo, foos, properties={
- 'bars':relation(Bar, primaryjoin=foos.c.id>bars.c.fid, viewonly=True)
- })
-
+ 'bars':relation(Bar,
+ primaryjoin=foos.c.id>bars.c.fid,
+ viewonly=True)})
mapper(Bar, bars)
- self.assertRaisesMessage(sa_exc.ArgumentError, "Could not determine relation direction for primaryjoin condition", compile_mappers)
+ self.assertRaisesMessage(
+ sa.exc.ArgumentError,
+ "Could not determine relation direction for primaryjoin condition",
+ sa.orm.compile_mappers)
+
+ @testing.resolve_artifact_names
def test_no_equated_self_ref_viewonly(self):
mapper(Foo, foos, properties={
- 'foos':relation(Foo, primaryjoin=foos.c.id>foos.c.fid, viewonly=True)
- })
-
+ 'foos':relation(Foo,
+ primaryjoin=foos.c.id>foos.c.fid,
+ viewonly=True)})
mapper(Bar, bars)
- self.assertRaisesMessage(sa_exc.ArgumentError, "Specify the foreign_keys argument to indicate which columns on the relation are foreign.", compile_mappers)
+ self.assertRaisesMessage(
+ sa.exc.ArgumentError,
+ "Specify the foreign_keys argument to indicate which columns "
+ "on the relation are foreign.", sa.orm.compile_mappers)
+ @testing.resolve_artifact_names
def test_no_equated_self_ref_viewonly_fks(self):
mapper(Foo, foos, properties={
- 'foos':relation(Foo, primaryjoin=foos.c.id>foos.c.fid, viewonly=True, foreign_keys=[foos.c.fid])
- })
- compile_mappers()
- self.assertEquals(Foo.foos.property.local_remote_pairs, [(foos.c.id, foos.c.fid)])
+ 'foos':relation(Foo,
+ primaryjoin=foos.c.id>foos.c.fid,
+ viewonly=True,
+ foreign_keys=[foos.c.fid])})
+
+ sa.orm.compile_mappers()
+ eq_(Foo.foos.property.local_remote_pairs, [(foos.c.id, foos.c.fid)])
+ @testing.resolve_artifact_names
def test_equated(self):
mapper(Foo, foos, properties={
- 'bars':relation(Bar, primaryjoin=foos.c.id==bars.c.fid)
- })
+ 'bars':relation(Bar,
+ primaryjoin=foos.c.id==bars.c.fid)})
mapper(Bar, bars)
- self.assertRaisesMessage(sa_exc.ArgumentError, "Could not determine relation direction for primaryjoin condition", compile_mappers)
-
+
+ self.assertRaisesMessage(
+ sa.exc.ArgumentError,
+ "Could not determine relation direction for primaryjoin condition",
+ sa.orm.compile_mappers)
+
+ @testing.resolve_artifact_names
def test_equated_self_ref(self):
mapper(Foo, foos, properties={
- 'foos':relation(Foo, primaryjoin=foos.c.id==foos.c.fid)
- })
+ 'foos':relation(Foo,
+ primaryjoin=foos.c.id==foos.c.fid)})
- self.assertRaisesMessage(sa_exc.ArgumentError, "Could not determine relation direction for primaryjoin condition", compile_mappers)
+ self.assertRaisesMessage(
+ sa.exc.ArgumentError,
+ "Could not determine relation direction for primaryjoin condition",
+ sa.orm.compile_mappers)
+ @testing.resolve_artifact_names
def test_equated_self_ref_wrong_fks(self):
mapper(Foo, foos, properties={
- 'foos':relation(Foo, primaryjoin=foos.c.id==foos.c.fid, foreign_keys=[bars.c.id])
- })
+ 'foos':relation(Foo,
+ primaryjoin=foos.c.id==foos.c.fid,
+ foreign_keys=[bars.c.id])})
+
+ self.assertRaisesMessage(
+ sa.exc.ArgumentError,
+ "Could not determine relation direction for primaryjoin condition",
+ sa.orm.compile_mappers)
- self.assertRaisesMessage(sa_exc.ArgumentError, "Could not determine relation direction for primaryjoin condition", compile_mappers)
-class InvalidRelationEscalationTestM2M(ORMTest):
+class InvalidRelationEscalationTestM2M(_base.MappedTest):
+
def define_tables(self, metadata):
- global foos, bars, Foo, Bar, foobars
- foos = Table('foos', metadata, Column('id', Integer, primary_key=True))
- foobars = Table('foobars', metadata, Column('fid', Integer), Column('bid', Integer))
- bars = Table('bars', metadata, Column('id', Integer, primary_key=True))
- class Foo(object):
+ Table('foos', metadata,
+ Column('id', Integer, primary_key=True))
+ Table('foobars', metadata,
+ Column('fid', Integer), Column('bid', Integer))
+ Table('bars', metadata,
+ Column('id', Integer, primary_key=True))
+
+ @testing.resolve_artifact_names
+ def setup_classes(self):
+ class Foo(_base.Entity):
pass
- class Bar(object):
+ class Bar(_base.Entity):
pass
+ @testing.resolve_artifact_names
def test_no_join(self):
mapper(Foo, foos, properties={
- 'bars':relation(Bar, secondary=foobars)
- })
-
+ 'bars': relation(Bar, secondary=foobars)})
mapper(Bar, bars)
- self.assertRaisesMessage(sa_exc.ArgumentError, "Could not determine join condition between parent/child tables on relation", compile_mappers)
+ self.assertRaisesMessage(
+ sa.exc.ArgumentError,
+ "Could not determine join condition between parent/child tables "
+ "on relation", sa.orm.compile_mappers)
+
+ @testing.resolve_artifact_names
def test_no_secondaryjoin(self):
mapper(Foo, foos, properties={
- 'bars':relation(Bar, secondary=foobars, primaryjoin=foos.c.id>foobars.c.fid)
- })
-
+ 'bars': relation(Bar,
+ secondary=foobars,
+ primaryjoin=foos.c.id > foobars.c.fid)})
mapper(Bar, bars)
- self.assertRaisesMessage(sa_exc.ArgumentError, "Could not determine join condition between parent/child tables on relation", compile_mappers)
+ self.assertRaisesMessage(
+ sa.exc.ArgumentError,
+ "Could not determine join condition between parent/child tables "
+ "on relation",
+ sa.orm.compile_mappers)
+
+ @testing.resolve_artifact_names
def test_bad_primaryjoin(self):
mapper(Foo, foos, properties={
- 'bars':relation(Bar, secondary=foobars, primaryjoin=foos.c.id>foobars.c.fid, secondaryjoin=foobars.c.bid<=bars.c.id)
- })
-
+ 'bars': relation(Bar,
+ secondary=foobars,
+ primaryjoin=foos.c.id > foobars.c.fid,
+ secondaryjoin=foobars.c.bid<=bars.c.id)})
mapper(Bar, bars)
- self.assertRaisesMessage(sa_exc.ArgumentError, "Could not determine relation direction for primaryjoin condition", compile_mappers)
+ self.assertRaisesMessage(
+ sa.exc.ArgumentError,
+ "Could not determine relation direction for primaryjoin condition",
+ sa.orm.compile_mappers)
+
+ @testing.resolve_artifact_names
def test_bad_secondaryjoin(self):
mapper(Foo, foos, properties={
- 'bars':relation(Bar, secondary=foobars, primaryjoin=foos.c.id==foobars.c.fid, secondaryjoin=foobars.c.bid<=bars.c.id, foreign_keys=[foobars.c.fid])
- })
-
+ 'bars':relation(Bar,
+ secondary=foobars,
+ primaryjoin=foos.c.id == foobars.c.fid,
+ secondaryjoin=foobars.c.bid <= bars.c.id,
+ foreign_keys=[foobars.c.fid])})
mapper(Bar, bars)
- self.assertRaisesMessage(sa_exc.ArgumentError, "Could not determine relation direction for secondaryjoin condition", compile_mappers)
+ self.assertRaisesMessage(
+ sa.exc.ArgumentError,
+ "Could not determine relation direction for secondaryjoin "
+ "condition", sa.orm.compile_mappers)
+
+ @testing.resolve_artifact_names
def test_no_equated_secondaryjoin(self):
mapper(Foo, foos, properties={
- 'bars':relation(Bar, secondary=foobars, primaryjoin=foos.c.id==foobars.c.fid, secondaryjoin=foobars.c.bid<=bars.c.id, foreign_keys=[foobars.c.fid, foobars.c.bid])
- })
-
+ 'bars':relation(Bar,
+ secondary=foobars,
+ primaryjoin=foos.c.id == foobars.c.fid,
+ secondaryjoin=foobars.c.bid <= bars.c.id,
+ foreign_keys=[foobars.c.fid, foobars.c.bid])})
mapper(Bar, bars)
- self.assertRaisesMessage(sa_exc.ArgumentError, "Could not locate any equated, locally mapped column pairs for secondaryjoin condition", compile_mappers)
+
+ self.assertRaisesMessage(
+ sa.exc.ArgumentError,
+ "Could not locate any equated, locally mapped column pairs for "
+ "secondaryjoin condition", sa.orm.compile_mappers)
if __name__ == "__main__":
diff --git a/test/orm/scoping.py b/test/orm/scoping.py
index d885bae3d..cdc0c16b4 100644
--- a/test/orm/scoping.py
+++ b/test/orm/scoping.py
@@ -1,34 +1,50 @@
import testenv; testenv.configure_for_tests()
-from sqlalchemy import *
-from sqlalchemy import exc as sa_exc
-from sqlalchemy.orm import *
-from testlib import *
-from testlib import fixtures
+from testlib import sa, testing
+from sqlalchemy.orm import scoped_session
+from testlib.sa import Table, Column, Integer, String, ForeignKey
+from testlib.sa.orm import mapper, relation
+from testlib.testing import eq_
+from orm import _base
-class ScopedSessionTest(ORMTest):
+class _ScopedTest(_base.MappedTest):
+ """Adds another lookup bucket to emulate Session globals."""
+
+ run_setup_mappers = 'once'
+
+ _artifact_registries = (
+ _base.MappedTest._artifact_registries + ('scoping',))
+
+ def setUpAll(self):
+ type(self).scoping = _base.adict()
+ _base.MappedTest.setUpAll(self)
+
+ def tearDownAll(self):
+ self.scoping.clear()
+ _base.MappedTest.tearDownAll(self)
- def define_tables(self, metadata):
- global table, table2
- table = Table('sometable', metadata,
- Column('id', Integer, primary_key=True),
- Column('data', String(30)))
- table2 = Table('someothertable', metadata,
- Column('id', Integer, primary_key=True),
- Column('someid', None, ForeignKey('sometable.id'))
- )
+class ScopedSessionTest(_base.MappedTest):
+
+ def define_tables(self, metadata):
+ Table('table1', metadata,
+ Column('id', Integer, primary_key=True),
+ Column('data', String(30)))
+ Table('table2', metadata,
+ Column('id', Integer, primary_key=True),
+ Column('someid', None, ForeignKey('table1.id')))
+
+ @testing.resolve_artifact_names
def test_basic(self):
- Session = scoped_session(sessionmaker())
+ Session = scoped_session(sa.orm.sessionmaker())
- class SomeObject(fixtures.Base):
+ class SomeObject(_base.ComparableEntity):
query = Session.query_property()
- class SomeOtherObject(fixtures.Base):
+ class SomeOtherObject(_base.ComparableEntity):
query = Session.query_property()
- mapper(SomeObject, table, properties={
- 'options':relation(SomeOtherObject)
- })
+ mapper(SomeObject, table1, properties={
+ 'options':relation(SomeOtherObject)})
mapper(SomeOtherObject, table2)
s = SomeObject(id=1, data="hello")
@@ -39,37 +55,43 @@ class ScopedSessionTest(ORMTest):
Session.refresh(sso)
Session.remove()
- self.assertEquals(SomeObject(id=1, data="hello", options=[SomeOtherObject(someid=1)]), Session.query(SomeObject).one())
- self.assertEquals(SomeObject(id=1, data="hello", options=[SomeOtherObject(someid=1)]), SomeObject.query.one())
- self.assertEquals(SomeOtherObject(someid=1), SomeOtherObject.query.filter(SomeOtherObject.someid==sso.someid).one())
+ eq_(SomeObject(id=1, data="hello", options=[SomeOtherObject(someid=1)]),
+ Session.query(SomeObject).one())
+ eq_(SomeObject(id=1, data="hello", options=[SomeOtherObject(someid=1)]),
+ SomeObject.query.one())
+ eq_(SomeOtherObject(someid=1),
+ SomeOtherObject.query.filter(
+ SomeOtherObject.someid == sso.someid).one())
-class ScopedMapperTest(TestBase):
- def setUpAll(self):
- global metadata, table, table2
- metadata = MetaData(testing.db)
- table = Table('sometable', metadata,
+class ScopedMapperTest(_ScopedTest):
+
+ def define_tables(self, metadata):
+ Table('table1', metadata,
Column('id', Integer, primary_key=True),
Column('data', String(30)))
- table2 = Table('someothertable', metadata,
+ Table('table2', metadata,
Column('id', Integer, primary_key=True),
- Column('someid', None, ForeignKey('sometable.id'))
- )
- metadata.create_all()
-
- def setUp(self):
- global SomeObject, SomeOtherObject
- class SomeObject(fixtures.Base):pass
- class SomeOtherObject(fixtures.Base):pass
+ Column('someid', None, ForeignKey('table1.id')))
- global Session
+ def setup_classes(self):
+ class SomeObject(_base.ComparableEntity):
+ pass
+ class SomeOtherObject(_base.ComparableEntity):
+ pass
- Session = scoped_session(create_session)
- Session.mapper(SomeObject, table, properties={
+ @testing.resolve_artifact_names
+ def setup_mappers(self):
+ Session = scoped_session(sa.orm.create_session)
+ Session.mapper(SomeObject, table1, properties={
'options':relation(SomeOtherObject)
})
Session.mapper(SomeOtherObject, table2)
+ self.scoping['Session'] = Session
+
+ @testing.resolve_artifact_names
+ def insert_data(self):
s = SomeObject()
s.id = 1
s.data = 'hello'
@@ -78,25 +100,19 @@ class ScopedMapperTest(TestBase):
Session.flush()
Session.clear()
- def tearDownAll(self):
- metadata.drop_all()
-
- def tearDown(self):
- for table in metadata.table_iterator(reverse=True):
- table.delete().execute()
- clear_mappers()
-
+ @testing.resolve_artifact_names
def test_query(self):
sso = SomeOtherObject.query().first()
assert SomeObject.query.filter_by(id=1).one().options[0].id == sso.id
+ @testing.resolve_artifact_names
def test_query_compiles(self):
class Foo(object):
pass
Session.mapper(Foo, table2)
assert hasattr(Foo, 'query')
- ext = MapperExtension()
+ ext = sa.orm.MapperExtension()
class Bar(object):
pass
@@ -108,6 +124,7 @@ class ScopedMapperTest(TestBase):
Session.mapper(Baz, table2, extension=ext)
assert hasattr(Baz, 'query')
+ @testing.resolve_artifact_names
def test_validating_constructor(self):
s2 = SomeObject(someid=12)
s3 = SomeOtherObject(someid=123, bogus=345)
@@ -116,8 +133,10 @@ class ScopedMapperTest(TestBase):
Session.mapper(ValidatedOtherObject, table2, validate=True)
v1 = ValidatedOtherObject(someid=12)
- self.assertRaises(sa_exc.ArgumentError, ValidatedOtherObject, someid=12, bogus=345)
+ self.assertRaises(sa.exc.ArgumentError, ValidatedOtherObject,
+ someid=12, bogus=345)
+ @testing.resolve_artifact_names
def test_dont_clobber_methods(self):
class MyClass(object):
def expunge(self):
@@ -127,44 +146,55 @@ class ScopedMapperTest(TestBase):
assert MyClass().expunge() == "an expunge !"
-class ScopedMapperTest2(ORMTest):
+
+class ScopedMapperTest2(_ScopedTest):
+
def define_tables(self, metadata):
- global table, table2
- table = Table('sometable', metadata,
+ Table('table1', metadata,
Column('id', Integer, primary_key=True),
Column('data', String(30)),
- Column('type', String(30))
-
- )
- table2 = Table('someothertable', metadata,
+ Column('type', String(30)))
+ Table('table2', metadata,
Column('id', Integer, primary_key=True),
- Column('someid', None, ForeignKey('sometable.id')),
- Column('somedata', String(30)),
- )
+ Column('someid', None, ForeignKey('table1.id')),
+ Column('somedata', String(30)))
+
+ def setup_classes(self):
+ class BaseClass(_base.ComparableEntity):
+ pass
+ class SubClass(BaseClass):
+ pass
+
+ @testing.resolve_artifact_names
+ def setup_mappers(self):
+ Session = scoped_session(sa.orm.sessionmaker())
+
+ Session.mapper(BaseClass, table1,
+ polymorphic_identity='base',
+ polymorphic_on=table1.c.type)
+ Session.mapper(SubClass, table2,
+ polymorphic_identity='sub',
+ inherits=BaseClass)
+ self.scoping['Session'] = Session
+
+ @testing.resolve_artifact_names
def test_inheritance(self):
def expunge_list(l):
for x in l:
Session.expunge(x)
return l
- class BaseClass(fixtures.Base):
- pass
- class SubClass(BaseClass):
- pass
-
- Session = scoped_session(sessionmaker())
- Session.mapper(BaseClass, table, polymorphic_identity='base', polymorphic_on=table.c.type)
- Session.mapper(SubClass, table2, polymorphic_identity='sub', inherits=BaseClass)
-
b = BaseClass(data='b1')
s = SubClass(data='s1', somedata='somedata')
Session.commit()
Session.clear()
- assert expunge_list([BaseClass(data='b1'), SubClass(data='s1', somedata='somedata')]) == BaseClass.query.all()
- assert expunge_list([SubClass(data='s1', somedata='somedata')]) == SubClass.query.all()
-
+ eq_(expunge_list([BaseClass(data='b1'),
+ SubClass(data='s1', somedata='somedata')]),
+ BaseClass.query.all())
+ eq_(expunge_list([SubClass(data='s1', somedata='somedata')]),
+ SubClass.query.all())
if __name__ == "__main__":
diff --git a/test/orm/selectable.py b/test/orm/selectable.py
index a16c24fc1..1e4527e6b 100644
--- a/test/orm/selectable.py
+++ b/test/orm/selectable.py
@@ -1,52 +1,52 @@
-"""all tests involving generic mapping to Select statements"""
-
+"""Generic mapping to Select statements"""
import testenv; testenv.configure_for_tests()
-from sqlalchemy import *
-from sqlalchemy import exc as sa_exc
-from sqlalchemy.orm import *
-from testlib import *
-from testlib.fixtures import *
-from query import QueryTest
-
-class SelectableNoFromsTest(ORMTest):
+from testlib import sa, testing
+from testlib.sa import Table, Column, String, Integer, select
+from testlib.sa.orm import mapper, create_session
+from testlib.testing import eq_
+from orm import _base
+
+
+# TODO: more tests mapping to selects
+
+class SelectableNoFromsTest(_base.MappedTest):
def define_tables(self, metadata):
- global common_table
- common_table = Table('common', metadata,
- Column('id', Integer, primary_key=True),
- Column('data', Integer),
- Column('extra', String(45)),
- )
+ Table('common', metadata,
+ Column('id', Integer, primary_key=True),
+ Column('data', Integer),
+ Column('extra', String(45)))
- def test_no_tables(self):
- class Subset(object):
+ def setup_classes(self):
+ class Subset(_base.ComparableEntity):
pass
+
+ @testing.resolve_artifact_names
+ def test_no_tables(self):
+
selectable = select(["x", "y", "z"])
- self.assertRaisesMessage(sa_exc.InvalidRequestError, "Could not find any Table objects", mapper, Subset, selectable)
+ self.assertRaisesMessage(sa.exc.InvalidRequestError,
+ "Could not find any Table objects",
+ mapper, Subset, selectable)
@testing.emits_warning('.*creating an Alias.*')
+ @testing.resolve_artifact_names
def test_basic(self):
- class Subset(Base):
- pass
-
- subset_select = select([common_table.c.id, common_table.c.data])
+ subset_select = select([common.c.id, common.c.data])
subset_mapper = mapper(Subset, subset_select)
sess = create_session(bind=testing.db)
- l = Subset()
- l.data = 1
- sess.save(l)
+ sess.add(Subset(data=1))
sess.flush()
sess.clear()
- self.assertEquals(sess.query(Subset).all(), [Subset(data=1)])
- self.assertEquals(sess.query(Subset).filter(Subset.data==1).one(), Subset(data=1))
- self.assertEquals(sess.query(Subset).filter(Subset.data!=1).first(), None)
-
- subset_select = class_mapper(Subset).mapped_table
- self.assertEquals(sess.query(Subset).filter(subset_select.c.data==1).one(), Subset(data=1))
+ eq_(sess.query(Subset).all(), [Subset(data=1)])
+ eq_(sess.query(Subset).filter(Subset.data==1).one(), Subset(data=1))
+ eq_(sess.query(Subset).filter(Subset.data!=1).first(), None)
+
+ subset_select = sa.orm.class_mapper(Subset).mapped_table
+ eq_(sess.query(Subset).filter(subset_select.c.data==1).one(),
+ Subset(data=1))
-
- # TODO: more tests mapping to selects
if __name__ == '__main__':
testenv.main()
diff --git a/test/orm/session.py b/test/orm/session.py
index 719ecccf9..e3b9ec021 100644
--- a/test/orm/session.py
+++ b/test/orm/session.py
@@ -1,83 +1,80 @@
import testenv; testenv.configure_for_tests()
import gc
import pickle
-from sqlalchemy import *
-from sqlalchemy import exc as sa_exc, util
-from sqlalchemy.orm import *
-from sqlalchemy.orm import attributes
-from sqlalchemy.orm.session import SessionExtension
-from sqlalchemy.orm.session import Session as SessionCls
-from testlib import *
-from testlib.tables import *
-from testlib import fixtures, tables
-
-
-class SessionTest(TestBase, AssertsExecutionResults):
- def setUpAll(self):
- tables.create()
-
- def tearDownAll(self):
- tables.drop()
-
- def tearDown(self):
- SessionCls.close_all()
- tables.delete()
- clear_mappers()
+from sqlalchemy.orm import create_session, sessionmaker
+from testlib import engines, sa, testing
+from testlib.sa import Table, Column, Integer, String
+from testlib.sa.orm import mapper, relation, backref
+from testlib.testing import eq_
+from testlib.compat import set
+from engine import _base as engine_base
+from orm import _base, _fixtures
+
+
+class SessionTest(_fixtures.FixtureTest):
+ run_inserts = None
+
+ @testing.resolve_artifact_names
+ def test_no_close_on_flush(self):
+ """Flush() doesn't close a connection the session didn't open"""
+ c = testing.db.connect()
+ c.execute("select * from users")
- def setUp(self):
- pass
+ mapper(User, users)
+ s = create_session(bind=c)
+ s.save(User(name='first'))
+ s.flush()
+ c.execute("select * from users")
+ @testing.resolve_artifact_names
def test_close(self):
- """test that flush() doesn't close a connection the session didn't open"""
-
+ """close() doesn't close a connection the session didn't open"""
c = testing.db.connect()
- class User(object):pass
+ c.execute("select * from users")
+
mapper(User, users)
s = create_session(bind=c)
- s.save(User())
+ s.save(User(name='first'))
s.flush()
c.execute("select * from users")
- u = User()
- s.save(u)
- s.user_name = 'some user'
- s.flush()
- u = User()
- s.save(u)
- s.user_name = 'some other user'
- s.flush()
+ s.close()
+ c.execute("select * from users")
- def test_close_two(self):
+ @testing.resolve_artifact_names
+ def test_no_close_transaction_on_flulsh(self):
c = testing.db.connect()
try:
- class User(object):pass
mapper(User, users)
s = create_session(bind=c)
s.begin()
tran = s.transaction
- s.save(User())
+ s.save(User(name='first'))
s.flush()
c.execute("select * from users")
- u = User()
+ u = User(name='two')
s.save(u)
- s.user_name = 'some user'
s.flush()
- u = User()
+ u = User(name='third')
s.save(u)
- s.user_name = 'some other user'
s.flush()
assert s.transaction is tran
tran.close()
finally:
c.close()
+ @testing.resolve_artifact_names
def test_expunge_cascade(self):
- tables.data()
mapper(Address, addresses)
mapper(User, users, properties={
- 'addresses':relation(Address, backref=backref("user", cascade="all"), cascade="all")
- })
+ 'addresses':relation(Address,
+ backref=backref("user", cascade="all"),
+ cascade="all")})
+
+ _fixtures.run_inserts_for(users)
+ _fixtures.run_inserts_for(addresses)
+
session = create_session()
- u = session.query(User).filter_by(user_id=7).one()
+ u = session.query(User).filter_by(id=7).one()
# get everything to load in both directions
print [a.user for a in u.addresses]
@@ -85,53 +82,86 @@ class SessionTest(TestBase, AssertsExecutionResults):
# then see if expunge fails
session.expunge(u)
- assert object_session(u) is attributes.instance_state(u).session_id is None
+ assert sa.orm.object_session(u) is None
+ assert sa.orm.attributes.instance_state(u).session_id is None
for a in u.addresses:
- assert object_session(a) is attributes.instance_state(a).session_id is None
+ assert sa.orm.object_session(a) is None
+ assert sa.orm.attributes.instance_state(a).session_id is None
@engines.close_open_connections
- def test_binds_from_expression(self):
- """test that Session can extract Table objects from ClauseElements and match them to tables."""
+ @testing.resolve_artifact_names
+ def test_table_binds_from_expression(self):
+ """Session can extract Table objects from ClauseElements and match them to tables."""
- Session = sessionmaker(binds={users:testing.db, addresses:testing.db})
+ mapper(Address, addresses)
+ mapper(User, users, properties={
+ 'addresses':relation(Address,
+ backref=backref("user", cascade="all"),
+ cascade="all")})
+
+ Session = sessionmaker(binds={users: self.metadata.bind,
+ addresses: self.metadata.bind})
sess = Session()
- sess.execute(users.insert(), params=dict(user_id=1, user_name='ed'))
- assert sess.execute(users.select()).fetchall() == [(1, 'ed')]
+
+ sess.execute(users.insert(), params=dict(id=1, name='ed'))
+ eq_(sess.execute(users.select(users.c.id == 1)).fetchall(),
+ [(1, 'ed')])
+
+ eq_(sess.execute(users.select(User.id == 1)).fetchall(),
+ [(1, 'ed')])
+
+ sess.close()
+
+ @engines.close_open_connections
+ @testing.resolve_artifact_names
+ def test_mapped_binds_from_expression(self):
+ """Session can extract Table objects from ClauseElements and match them to tables."""
mapper(Address, addresses)
mapper(User, users, properties={
- 'addresses':relation(Address, backref=backref("user", cascade="all"), cascade="all")
- })
- Session = sessionmaker(binds={User:testing.db, Address:testing.db})
- sess.execute(users.insert(), params=dict(user_id=2, user_name='fred'))
- assert sess.execute(users.select()).fetchall() == [(1, 'ed'), (2, 'fred')]
+ 'addresses':relation(Address,
+ backref=backref("user", cascade="all"),
+ cascade="all")})
+
+ Session = sessionmaker(binds={User: self.metadata.bind,
+ Address: self.metadata.bind})
+ sess = Session()
+
+ sess.execute(users.insert(), params=dict(id=1, name='ed'))
+ eq_(sess.execute(users.select(users.c.id == 1)).fetchall(),
+ [(1, 'ed')])
+
+ eq_(sess.execute(users.select(User.id == 1)).fetchall(),
+ [(1, 'ed')])
+
sess.close()
@engines.close_open_connections
+ @testing.resolve_artifact_names
def test_bind_from_metadata(self):
- Session = sessionmaker()
- sess = Session()
mapper(User, users)
- sess.execute(users.insert(), dict(user_name='Johnny'))
+ session = create_session()
+ session.execute(users.insert(), dict(name='Johnny'))
- assert len(sess.query(User).all()) == 1
+ assert len(session.query(User).filter_by(name='Johnny').all()) == 1
- sess.execute(users.delete())
+ session.execute(users.delete())
- assert len(sess.query(User).all()) == 0
- sess.close()
+ assert len(session.query(User).filter_by(name='Johnny').all()) == 0
+ session.close()
- @testing.unsupported('sqlite', 'mssql') # TEMP: test causes mssql to hang
+ @testing.unsupported('mssql', 'test causes mssql to hang')
+ @testing.unsupported('sqlite', 'needs true independent connections')
@engines.close_open_connections
+ @testing.resolve_artifact_names
def test_transaction(self):
- class User(object):pass
mapper(User, users)
conn1 = testing.db.connect()
conn2 = testing.db.connect()
sess = create_session(autocommit=False, bind=conn1)
- u = User()
+ u = User(name='x')
sess.save(u)
sess.flush()
assert conn1.execute("select count(1) from users").scalar() == 1
@@ -141,54 +171,54 @@ class SessionTest(TestBase, AssertsExecutionResults):
assert testing.db.connect().execute("select count(1) from users").scalar() == 1
sess.close()
- @testing.unsupported('sqlite', 'mssql') # TEMP: test causes mssql to hang
+ @testing.unsupported('mssql', 'test causes mssql to hang')
+ @testing.unsupported('sqlite', 'needs true independent connections')
@engines.close_open_connections
+ @testing.resolve_artifact_names
def test_autoflush(self):
- class User(object):pass
+ bind = self.metadata.bind
mapper(User, users)
- conn1 = testing.db.connect()
- conn2 = testing.db.connect()
+ conn1 = bind.connect()
+ conn2 = bind.connect()
sess = create_session(bind=conn1, autocommit=False, autoflush=True)
u = User()
- u.user_name='ed'
+ u.name='ed'
sess.save(u)
- u2 = sess.query(User).filter_by(user_name='ed').one()
+ u2 = sess.query(User).filter_by(name='ed').one()
assert u2 is u
- assert conn1.execute("select count(1) from users").scalar() == 1
- assert conn2.execute("select count(1) from users").scalar() == 0
+ eq_(conn1.execute("select count(1) from users").scalar(), 1)
+ eq_(conn2.execute("select count(1) from users").scalar(), 0)
sess.commit()
- assert conn1.execute("select count(1) from users").scalar() == 1
- assert testing.db.connect().execute("select count(1) from users").scalar() == 1
+ eq_(conn1.execute("select count(1) from users").scalar(), 1)
+ eq_(bind.connect().execute("select count(1) from users").scalar(), 1)
sess.close()
+ @testing.resolve_artifact_names
def test_autoflush_expressions(self):
- class User(fixtures.Base):
- pass
- class Address(fixtures.Base):
- pass
mapper(User, users, properties={
- 'addresses':relation(Address, backref="user")
- })
+ 'addresses':relation(Address, backref="user")})
mapper(Address, addresses)
sess = create_session(autoflush=True, autocommit=False)
- u = User(user_name='ed', addresses=[Address(email_address='foo')])
+ u = User(name='ed', addresses=[Address(email_address='foo')])
sess.save(u)
- self.assertEquals(sess.query(Address).filter(Address.user==u).one(), Address(email_address='foo'))
+ eq_(sess.query(Address).filter(Address.user==u).one(),
+ Address(email_address='foo'))
- @testing.unsupported('sqlite', 'mssql') # TEMP: test causes mssql to hang
+ @testing.unsupported('mssql', 'test causes mssql to hang')
+ @testing.unsupported('sqlite', 'needs true independent connections')
@engines.close_open_connections
+ @testing.resolve_artifact_names
def test_autoflush_unbound(self):
- class User(object):pass
mapper(User, users)
try:
sess = create_session(autocommit=False, autoflush=True)
u = User()
- u.user_name='ed'
+ u.name='ed'
sess.save(u)
- u2 = sess.query(User).filter_by(user_name='ed').one()
+ u2 = sess.query(User).filter_by(name='ed').one()
assert u2 is u
assert sess.execute("select count(1) from users", mapper=User).scalar() == 1
assert testing.db.connect().execute("select count(1) from users").scalar() == 0
@@ -201,62 +231,67 @@ class SessionTest(TestBase, AssertsExecutionResults):
raise
@engines.close_open_connections
+ @testing.resolve_artifact_names
def test_autoflush_2(self):
- class User(object):pass
mapper(User, users)
conn1 = testing.db.connect()
conn2 = testing.db.connect()
sess = create_session(bind=conn1, autocommit=False, autoflush=True)
u = User()
- u.user_name='ed'
+ u.name='ed'
sess.save(u)
sess.commit()
assert conn1.execute("select count(1) from users").scalar() == 1
assert testing.db.connect().execute("select count(1) from users").scalar() == 1
sess.commit()
+ @testing.resolve_artifact_names
def test_autoflush_rollback(self):
- tables.data()
mapper(Address, addresses)
mapper(User, users, properties={
- 'addresses':relation(Address)
- })
+ 'addresses':relation(Address)})
+
+ _fixtures.run_inserts_for(users)
+ _fixtures.run_inserts_for(addresses)
sess = create_session(autocommit=False, autoflush=True)
u = sess.query(User).get(8)
- newad = Address()
- newad.email_address = 'something new'
+ newad = Address(email_address='a new address')
u.addresses.append(newad)
- u.user_name = 'some new name'
- assert u.user_name == 'some new name'
+ u.name = 'some new name'
+ assert u.name == 'some new name'
assert len(u.addresses) == 4
assert newad in u.addresses
sess.rollback()
- assert u.user_name == 'ed'
+ assert u.name == 'ed'
assert len(u.addresses) == 3
+
assert newad not in u.addresses
-
# pending objects dont get expired
- assert newad.email_address == 'something new'
-
+ assert newad.email_address == 'a new address'
+
+ @testing.resolve_artifact_names
def test_textual_execute(self):
"""test that Session.execute() converts to text()"""
-
- tables.data()
- sess = create_session(bind=testing.db)
+
+ sess = create_session(bind=self.metadata.bind)
+ users.insert().execute(id=7, name='jack')
+
# use :bindparam style
- self.assertEquals(sess.execute("select * from users where user_id=:id", {'id':7}).fetchall(), [(7, u'jack')])
+ eq_(sess.execute("select * from users where id=:id",
+ {'id':7}).fetchall(),
+ [(7, u'jack')])
@engines.close_open_connections
+ @testing.resolve_artifact_names
def test_subtransaction_on_external(self):
- class User(object):pass
mapper(User, users)
conn = testing.db.connect()
trans = conn.begin()
sess = create_session(bind=conn, autocommit=False, autoflush=True)
sess.begin(subtransactions=True)
- u = User()
+ u = User(name='ed')
sess.save(u)
sess.flush()
sess.commit() # commit does nothing
@@ -264,22 +299,21 @@ class SessionTest(TestBase, AssertsExecutionResults):
assert len(sess.query(User).all()) == 0
sess.close()
- @testing.unsupported('sqlite', 'mssql', 'firebird', 'sybase', 'access',
- 'oracle', 'maxdb')
+ @testing.requires.savepoints
@engines.close_open_connections
+ @testing.resolve_artifact_names
def test_external_nested_transaction(self):
- class User(object):pass
mapper(User, users)
try:
conn = testing.db.connect()
trans = conn.begin()
sess = create_session(bind=conn, autocommit=False, autoflush=True)
- u1 = User()
+ u1 = User(name='u1')
sess.save(u1)
sess.flush()
sess.begin_nested()
- u2 = User()
+ u2 = User(name='u2')
sess.save(u2)
sess.flush()
sess.rollback()
@@ -291,57 +325,56 @@ class SessionTest(TestBase, AssertsExecutionResults):
raise
@testing.requires.savepoints
+ @testing.resolve_artifact_names
def test_heavy_nesting(self):
session = create_session(bind=testing.db)
session.begin()
- session.connection().execute("insert into users (user_name) values ('user1')")
+ session.connection().execute("insert into users (name) values ('user1')")
session.begin(subtransactions=True)
session.begin_nested()
- session.connection().execute("insert into users (user_name) values ('user2')")
+ session.connection().execute("insert into users (name) values ('user2')")
assert session.connection().execute("select count(1) from users").scalar() == 2
session.rollback()
assert session.connection().execute("select count(1) from users").scalar() == 1
- session.connection().execute("insert into users (user_name) values ('user3')")
+ session.connection().execute("insert into users (name) values ('user3')")
session.commit()
assert session.connection().execute("select count(1) from users").scalar() == 2
@testing.requires.two_phase_transactions
+ @testing.resolve_artifact_names
def test_twophase(self):
# TODO: mock up a failure condition here
# to ensure a rollback succeeds
- class User(object):pass
- class Address(object):pass
mapper(User, users)
mapper(Address, addresses)
- engine2 = create_engine(testing.db.url)
+ engine2 = engines.testing_engine()
sess = create_session(autocommit=True, autoflush=False, twophase=True)
sess.bind_mapper(User, testing.db)
sess.bind_mapper(Address, engine2)
sess.begin()
- u1 = User()
- a1 = Address()
- sess.save(u1)
- sess.save(a1)
+ u1 = User(name='u1')
+ a1 = Address(email_address='u1@e')
+ sess.add_all((u1, a1))
sess.commit()
sess.close()
engine2.dispose()
assert users.count().scalar() == 1
assert addresses.count().scalar() == 1
+ @testing.resolve_artifact_names
def test_subtransaction_on_noautocommit(self):
- class User(object):pass
mapper(User, users)
sess = create_session(autocommit=False, autoflush=True)
sess.begin(subtransactions=True)
- u = User()
+ u = User(name='u1')
sess.save(u)
sess.flush()
sess.commit() # commit does nothing
@@ -350,19 +383,19 @@ class SessionTest(TestBase, AssertsExecutionResults):
sess.close()
@testing.requires.savepoints
+ @testing.resolve_artifact_names
def test_nested_transaction(self):
- class User(object):pass
mapper(User, users)
sess = create_session()
sess.begin()
- u = User()
+ u = User(name='u1')
sess.save(u)
sess.flush()
sess.begin_nested() # nested transaction
- u2 = User()
+ u2 = User(name='u2')
sess.save(u2)
sess.flush()
@@ -373,17 +406,17 @@ class SessionTest(TestBase, AssertsExecutionResults):
sess.close()
@testing.requires.savepoints
+ @testing.resolve_artifact_names
def test_nested_autotrans(self):
- class User(object):pass
mapper(User, users)
sess = create_session(autocommit=False)
- u = User()
+ u = User(name='u1')
sess.save(u)
sess.flush()
sess.begin_nested() # nested transaction
- u2 = User()
+ u2 = User(name='u2')
sess.save(u2)
sess.flush()
@@ -394,8 +427,8 @@ class SessionTest(TestBase, AssertsExecutionResults):
sess.close()
@testing.requires.savepoints
+ @testing.resolve_artifact_names
def test_nested_transaction_connection_add(self):
- class User(object): pass
mapper(User, users)
sess = create_session(autocommit=True)
@@ -403,34 +436,34 @@ class SessionTest(TestBase, AssertsExecutionResults):
sess.begin()
sess.begin_nested()
- u1 = User()
+ u1 = User(name='u1')
sess.save(u1)
sess.flush()
sess.rollback()
- u2 = User()
+ u2 = User(name='u2')
sess.save(u2)
sess.commit()
- self.assertEquals(util.Set(sess.query(User).all()), util.Set([u2]))
+ self.assertEquals(set(sess.query(User).all()), set([u2]))
sess.begin()
sess.begin_nested()
- u3 = User()
+ u3 = User(name='u3')
sess.save(u3)
sess.commit() # commit the nested transaction
sess.rollback()
- self.assertEquals(util.Set(sess.query(User).all()), util.Set([u2]))
+ self.assertEquals(set(sess.query(User).all()), set([u2]))
sess.close()
@testing.requires.savepoints
+ @testing.resolve_artifact_names
def test_mixed_transaction_control(self):
- class User(object): pass
mapper(User, users)
sess = create_session(autocommit=True)
@@ -439,7 +472,7 @@ class SessionTest(TestBase, AssertsExecutionResults):
sess.begin_nested()
transaction = sess.begin(subtransactions=True)
- sess.save(User())
+ sess.save(User(name='u1'))
transaction.commit()
sess.commit()
@@ -452,7 +485,7 @@ class SessionTest(TestBase, AssertsExecutionResults):
t1 = sess.begin()
t2 = sess.begin_nested()
- sess.save(User())
+ sess.save(User(name='u2'))
t2.commit()
assert sess.transaction is t1
@@ -460,28 +493,28 @@ class SessionTest(TestBase, AssertsExecutionResults):
sess.close()
@testing.requires.savepoints
+ @testing.resolve_artifact_names
def test_mixed_transaction_close(self):
- class User(object): pass
mapper(User, users)
sess = create_session(autocommit=False)
sess.begin_nested()
- sess.save(User())
+ sess.save(User(name='u1'))
sess.flush()
sess.close()
- sess.save(User())
+ sess.save(User(name='u2'))
sess.commit()
sess.close()
self.assertEquals(len(sess.query(User).all()), 1)
+ @testing.resolve_artifact_names
def test_error_on_using_inactive_session(self):
- class User(object): pass
mapper(User, users)
sess = create_session(autocommit=True)
@@ -489,39 +522,39 @@ class SessionTest(TestBase, AssertsExecutionResults):
sess.begin()
sess.begin(subtransactions=True)
- sess.save(User())
+ sess.save(User(name='u1'))
sess.flush()
sess.rollback()
- self.assertRaisesMessage(sa_exc.InvalidRequestError, "inactive due to a rollback in a subtransaction", sess.begin, subtransactions=True)
+ self.assertRaisesMessage(sa.exc.InvalidRequestError, "inactive due to a rollback in a subtransaction", sess.begin, subtransactions=True)
sess.close()
@engines.close_open_connections
+ @testing.resolve_artifact_names
def test_bound_connection(self):
- class User(object):pass
mapper(User, users)
c = testing.db.connect()
sess = create_session(bind=c)
sess.begin()
transaction = sess.transaction
- u = User()
+ u = User(name='u1')
sess.save(u)
sess.flush()
assert transaction._connection_for_bind(testing.db) is transaction._connection_for_bind(c) is c
- self.assertRaisesMessage(sa_exc.InvalidRequestError, "Session already has a Connection associated", transaction._connection_for_bind, testing.db.connect())
+ self.assertRaisesMessage(sa.exc.InvalidRequestError, "Session already has a Connection associated", transaction._connection_for_bind, testing.db.connect())
transaction.rollback()
assert len(sess.query(User).all()) == 0
sess.close()
+ @testing.resolve_artifact_names
def test_bound_connection_transactional(self):
- class User(object):pass
mapper(User, users)
c = testing.db.connect()
sess = create_session(bind=c, autocommit=False)
- u = User()
+ u = User(name='u1')
sess.save(u)
sess.flush()
sess.close()
@@ -529,7 +562,7 @@ class SessionTest(TestBase, AssertsExecutionResults):
assert c.scalar("select count(1) from users") == 0
sess = create_session(bind=c, autocommit=False)
- u = User()
+ u = User(name='u2')
sess.save(u)
sess.flush()
sess.commit()
@@ -542,7 +575,7 @@ class SessionTest(TestBase, AssertsExecutionResults):
trans = c.begin()
sess = create_session(bind=c, autocommit=True)
- u = User()
+ u = User(name='u3')
sess.save(u)
sess.flush()
assert c.in_transaction()
@@ -552,17 +585,16 @@ class SessionTest(TestBase, AssertsExecutionResults):
@engines.close_open_connections
+ @testing.resolve_artifact_names
def test_save_update_delete(self):
s = create_session()
- class User(object):
- pass
mapper(User, users)
- user = User()
+ user = User(name='u1')
- self.assertRaisesMessage(sa_exc.InvalidRequestError, "is not persisted", s.update, user)
- self.assertRaisesMessage(sa_exc.InvalidRequestError, "is not persisted", s.delete, user)
+ self.assertRaisesMessage(sa.exc.InvalidRequestError, "is not persisted", s.update, user)
+ self.assertRaisesMessage(sa.exc.InvalidRequestError, "is not persisted", s.delete, user)
s.save(user)
s.flush()
@@ -571,7 +603,7 @@ class SessionTest(TestBase, AssertsExecutionResults):
assert user not in s
# modify outside of session, assert changes remain/get saved
- user.user_name = "fred"
+ user.name = "fred"
s.update(user)
assert user in s
assert user in s.dirty
@@ -579,7 +611,7 @@ class SessionTest(TestBase, AssertsExecutionResults):
s.clear()
assert s.query(User).count() == 1
user = s.query(User).one()
- assert user.user_name == 'fred'
+ assert user.name == 'fred'
# ensure its not dirty if no changes occur
s.clear()
@@ -588,30 +620,28 @@ class SessionTest(TestBase, AssertsExecutionResults):
assert user in s
assert user not in s.dirty
- self.assertRaisesMessage(sa_exc.InvalidRequestError, "is already persistent", s.save, user)
+ self.assertRaisesMessage(sa.exc.InvalidRequestError, "is already persistent", s.save, user)
s2 = create_session()
- self.assertRaisesMessage(sa_exc.InvalidRequestError, "is already attached to session", s2.delete, user)
+ self.assertRaisesMessage(sa.exc.InvalidRequestError, "is already attached to session", s2.delete, user)
- u2 = s2.query(User).get(user.user_id)
- self.assertRaisesMessage(sa_exc.InvalidRequestError, "already persisted with a different identity", s.delete, u2)
+ u2 = s2.query(User).get(user.id)
+ self.assertRaisesMessage(sa.exc.InvalidRequestError, "already persisted with a different identity", s.delete, u2)
s.delete(user)
s.flush()
assert user not in s
assert s.query(User).count() == 0
+ @testing.resolve_artifact_names
def test_is_modified(self):
s = create_session()
- class User(object):pass
- class Address(object):pass
mapper(User, users, properties={'addresses':relation(Address)})
mapper(Address, addresses)
# save user
- u = User()
- u.user_name = 'fred'
+ u = User(name='fred')
s.save(u)
s.flush()
s.clear()
@@ -619,10 +649,10 @@ class SessionTest(TestBase, AssertsExecutionResults):
user = s.query(User).one()
assert user not in s.dirty
assert not s.is_modified(user)
- user.user_name = 'fred'
+ user.name = 'fred'
assert user in s.dirty
assert not s.is_modified(user)
- user.user_name = 'ed'
+ user.name = 'ed'
assert user in s.dirty
assert s.is_modified(user)
s.flush()
@@ -636,14 +666,14 @@ class SessionTest(TestBase, AssertsExecutionResults):
assert not s.is_modified(user, include_collections=False)
+ @testing.resolve_artifact_names
def test_weak_ref(self):
"""test the weak-referencing identity map, which strongly-references modified items."""
s = create_session()
- class User(fixtures.Base):pass
mapper(User, users)
- s.save(User(user_name='ed'))
+ s.save(User(name='ed'))
s.flush()
assert not s.dirty
@@ -653,7 +683,7 @@ class SessionTest(TestBase, AssertsExecutionResults):
assert len(s.identity_map) == 0
user = s.query(User).one()
- user.user_name = 'fred'
+ user.name = 'fred'
del user
gc.collect()
assert len(s.identity_map) == 1
@@ -665,16 +695,16 @@ class SessionTest(TestBase, AssertsExecutionResults):
assert not s.identity_map
user = s.query(User).one()
- assert user.user_name == 'fred'
+ assert user.name == 'fred'
assert s.identity_map
+ @testing.resolve_artifact_names
def test_strong_ref(self):
s = create_session(weak_identity_map=False)
- class User(object):pass
mapper(User, users)
# save user
- s.save(User())
+ s.save(User(name='u1'))
s.flush()
user = s.query(User).one()
user = None
@@ -683,12 +713,12 @@ class SessionTest(TestBase, AssertsExecutionResults):
gc.collect()
assert len(s.identity_map) == 1
+ @testing.resolve_artifact_names
def test_prune(self):
s = create_session(weak_identity_map=False)
- class User(object):pass
mapper(User, users)
- for o in [User() for x in xrange(10)]:
+ for o in [User(name='u%s' % x) for x in xrange(10)]:
s.save(o)
# o is still live after this loop...
@@ -700,15 +730,15 @@ class SessionTest(TestBase, AssertsExecutionResults):
self.assert_(s.prune() == 9)
self.assert_(len(s.identity_map) == 1)
- user_id = o.user_id
+ id = o.id
del o
self.assert_(s.prune() == 1)
self.assert_(len(s.identity_map) == 0)
- u = s.query(User).get(user_id)
+ u = s.query(User).get(id)
self.assert_(s.prune() == 0)
self.assert_(len(s.identity_map) == 1)
- u.user_name = 'squiznart'
+ u.name = 'squiznart'
del u
self.assert_(s.prune() == 0)
self.assert_(len(s.identity_map) == 1)
@@ -716,7 +746,7 @@ class SessionTest(TestBase, AssertsExecutionResults):
self.assert_(s.prune() == 1)
self.assert_(len(s.identity_map) == 0)
- s.save(User())
+ s.save(User(name='x'))
self.assert_(s.prune() == 0)
self.assert_(len(s.identity_map) == 0)
s.flush()
@@ -724,7 +754,7 @@ class SessionTest(TestBase, AssertsExecutionResults):
self.assert_(s.prune() == 1)
self.assert_(len(s.identity_map) == 0)
- u = s.query(User).get(user_id)
+ u = s.query(User).get(id)
s.delete(u)
del u
self.assert_(s.prune() == 0)
@@ -733,83 +763,85 @@ class SessionTest(TestBase, AssertsExecutionResults):
self.assert_(s.prune() == 0)
self.assert_(len(s.identity_map) == 0)
- def test_no_save_cascade(self):
+ @testing.resolve_artifact_names
+ def test_no_save_cascade_1(self):
mapper(Address, addresses)
mapper(User, users, properties=dict(
- addresses=relation(Address, cascade="none", backref="user")
- ))
+ addresses=relation(Address, cascade="none", backref="user")))
s = create_session()
- u = User()
+
+ u = User(name='u1')
s.save(u)
- a = Address()
+ a = Address(email_address='u1@e')
u.addresses.append(a)
assert u in s
assert a not in s
s.flush()
print "\n".join([repr(x.__dict__) for x in s])
s.clear()
- assert s.query(User).one().user_id == u.user_id
+ assert s.query(User).one().id == u.id
assert s.query(Address).first() is None
- clear_mappers()
-
- tables.delete()
+ @testing.resolve_artifact_names
+ def test_no_save_cascade_2(self):
mapper(Address, addresses)
mapper(User, users, properties=dict(
- addresses=relation(Address, cascade="all", backref=backref("user", cascade="none"))
- ))
+ addresses=relation(Address,
+ cascade="all",
+ backref=backref("user", cascade="none"))))
s = create_session()
- u = User()
- a = Address()
+ u = User(name='u1')
+ a = Address(email_address='u1@e')
a.user = u
s.save(a)
assert u not in s
assert a in s
s.flush()
s.clear()
- assert s.query(Address).one().address_id == a.address_id
+ assert s.query(Address).one().id == a.id
assert s.query(User).first() is None
- def _assert_key(self, got, expect):
- assert got == expect, "expected %r got %r" % (expect, got)
-
+ @testing.resolve_artifact_names
def test_identity_key_1(self):
mapper(User, users)
mapper(User, users, entity_name="en")
s = create_session()
key = s.identity_key(User, 1)
- self._assert_key(key, (User, (1,), None))
+ eq_(key, (User, (1,), None))
key = s.identity_key(User, 1, "en")
- self._assert_key(key, (User, (1,), "en"))
+ eq_(key, (User, (1,), "en"))
key = s.identity_key(User, 1, entity_name="en")
- self._assert_key(key, (User, (1,), "en"))
+ eq_(key, (User, (1,), "en"))
key = s.identity_key(User, ident=1, entity_name="en")
- self._assert_key(key, (User, (1,), "en"))
+ eq_(key, (User, (1,), "en"))
+ @testing.resolve_artifact_names
def test_identity_key_2(self):
mapper(User, users)
s = create_session()
- u = User()
+ u = User(name='u1')
s.save(u)
s.flush()
key = s.identity_key(instance=u)
- self._assert_key(key, (User, (u.user_id,), None))
+ eq_(key, (User, (u.id,), None))
+ @testing.resolve_artifact_names
def test_identity_key_3(self):
mapper(User, users)
mapper(User, users, entity_name="en")
s = create_session()
- row = {users.c.user_id: 1, users.c.user_name: "Frank"}
+ row = {users.c.id: 1, users.c.name: "Frank"}
key = s.identity_key(User, row=row)
- self._assert_key(key, (User, (1,), None))
+ eq_(key, (User, (1,), None))
key = s.identity_key(User, row=row, entity_name="en")
- self._assert_key(key, (User, (1,), "en"))
+ eq_(key, (User, (1,), "en"))
+ @testing.resolve_artifact_names
def test_extension(self):
mapper(User, users)
log = []
- class MyExt(SessionExtension):
+ class MyExt(sa.orm.session.SessionExtension):
def before_commit(self, session):
log.append('before_commit')
def after_commit(self, session):
@@ -825,20 +857,20 @@ class SessionTest(TestBase, AssertsExecutionResults):
def after_begin(self, session, transaction, connection):
log.append('after_begin')
sess = create_session(extension = MyExt())
- u = User()
+ u = User(name='u1')
sess.save(u)
sess.flush()
assert log == ['before_flush', 'after_begin', 'after_flush', 'before_commit', 'after_commit', 'after_flush_postexec']
log = []
sess = create_session(autocommit=False, extension=MyExt())
- u = User()
+ u = User(name='u1')
sess.save(u)
sess.flush()
assert log == ['before_flush', 'after_begin', 'after_flush', 'after_flush_postexec']
log = []
- u.user_name = 'ed'
+ u.name = 'ed'
sess.commit()
assert log == ['before_commit', 'before_flush', 'after_flush', 'after_flush_postexec', 'after_commit']
@@ -851,36 +883,38 @@ class SessionTest(TestBase, AssertsExecutionResults):
conn = sess.connection()
assert log == ['after_begin']
+ @testing.resolve_artifact_names
def test_pickled_update(self):
mapper(User, users)
sess1 = create_session()
sess2 = create_session()
- u1 = User()
+ u1 = User(name='u1')
sess1.save(u1)
- self.assertRaisesMessage(sa_exc.InvalidRequestError, "already attached to session", sess2.save, u1)
+ self.assertRaisesMessage(sa.exc.InvalidRequestError, "already attached to session", sess2.save, u1)
u2 = pickle.loads(pickle.dumps(u1))
sess2.save(u2)
+ @testing.resolve_artifact_names
def test_duplicate_update(self):
mapper(User, users)
Session = sessionmaker()
sess = Session()
- u1 = User()
+ u1 = User(name='u1')
sess.save(u1)
sess.flush()
- assert u1.user_id is not None
+ assert u1.id is not None
sess.expunge(u1)
assert u1 not in sess
assert Session.object_session(u1) is None
- u2 = sess.query(User).get(u1.user_id)
+ u2 = sess.query(User).get(u1.id)
assert u2 is not None and u2 is not u1
assert u2 in sess
@@ -890,8 +924,8 @@ class SessionTest(TestBase, AssertsExecutionResults):
assert u2 not in sess
assert Session.object_session(u2) is None
- u1.user_name = "John"
- u2.user_name = "Doe"
+ u1.name = "John"
+ u2.name = "Doe"
sess.update(u1)
assert u1 in sess
@@ -901,9 +935,10 @@ class SessionTest(TestBase, AssertsExecutionResults):
sess.clear()
- u3 = sess.query(User).get(u1.user_id)
- assert u3 is not u1 and u3 is not u2 and u3.user_name == u1.user_name
+ u3 = sess.query(User).get(u1.id)
+ assert u3 is not u1 and u3 is not u2 and u3.name == u1.name
+ @testing.resolve_artifact_names
def test_no_double_save(self):
sess = create_session()
class Foo(object):
@@ -921,39 +956,42 @@ class SessionTest(TestBase, AssertsExecutionResults):
assert len(list(sess)) == 1
-class TLTransactionTest(TestBase):
- def setUpAll(self):
- global users, metadata, tlengine
- tlengine = create_engine(testing.db.url, strategy='threadlocal')
- metadata = MetaData()
- users = Table('query_users', metadata,
- Column('user_id', INT, Sequence('query_users_id_seq', optional=True), primary_key=True),
- Column('user_name', VARCHAR(20)),
- test_needs_acid=True,
- )
- users.create(tlengine)
- def tearDown(self):
- tlengine.execute(users.delete())
+class TLTransactionTest(engine_base.AltEngineTest, _base.MappedTest):
+ def create_engine(self):
+ return engines.testing_engine(options=dict(strategy='threadlocal'))
- def tearDownAll(self):
- users.drop(tlengine)
- tlengine.dispose()
+ def define_tables(self, metadata):
+ Table('users', metadata,
+ Column('id', Integer, primary_key=True),
+ Column('name', String(20)),
+ test_needs_acid=True)
- @testing.exclude('mysql', '<', (5, 0, 3))
- def testsessionnesting(self):
- class User(object):
+ def setup_classes(self):
+ class User(_base.BasicEntity):
pass
- try:
- mapper(User, users)
- sess = create_session(bind=tlengine)
- tlengine.begin()
- u = User()
- sess.save(u)
- sess.flush()
- tlengine.commit()
- finally:
- clear_mappers()
+ @testing.resolve_artifact_names
+ def setup_mappers(self):
+ mapper(User, users)
+
+ def setUpAll(self):
+ engine_base.AltEngineTest.setUpAll(self)
+ _base.MappedTest.setUpAll(self)
+
+
+ def tearDownAll(self):
+ _base.MappedTest.tearDownAll(self)
+ engine_base.AltEngineTest.tearDownAll(self)
+
+ @testing.exclude('mysql', '<', (5, 0, 3), 'FIXME: unknown')
+ @testing.resolve_artifact_names
+ def test_session_nesting(self):
+ sess = create_session(bind=self.engine)
+ self.engine.begin()
+ u = User(name='ed')
+ sess.save(u)
+ sess.flush()
+ self.engine.commit()
if __name__ == "__main__":
diff --git a/test/orm/unitofwork.py b/test/orm/unitofwork.py
index 4c6f6f4cf..c14882979 100644
--- a/test/orm/unitofwork.py
+++ b/test/orm/unitofwork.py
@@ -3,121 +3,128 @@
"""Tests unitofwork operations."""
import testenv; testenv.configure_for_tests()
+import datetime
+import operator
+from sqlalchemy.orm import mapper as orm_mapper
+
+from testlib import engines, sa, testing
+from testlib.sa import Table, Column, Integer, String, ForeignKey
+from testlib.sa.orm import mapper, relation, create_session
+from testlib.testing import eq_, ne_
+from testlib.compat import set
+from orm import _base, _fixtures
+from engine import _base as engine_base
import pickleable
-from sqlalchemy import *
-from sqlalchemy import exc as sa_exc, sql
-from sqlalchemy.orm import *
-from sqlalchemy.orm import attributes, exc as orm_exc
-from testlib import *
-from testlib.tables import *
-from testlib import engines, tables, fixtures
-
-
-# TODO: convert suite to not use Session.mapper, use fixtures.Base
-# with explicit session.save()
-Session = scoped_session(sessionmaker(autoflush=True, autocommit=False, autoexpire=False))
-orm_mapper = mapper
-mapper = Session.mapper
class UnitOfWorkTest(object):
pass
-class HistoryTest(ORMTest):
- metadata = tables.metadata
- def define_tables(self, metadata):
- pass
+class HistoryTest(_fixtures.FixtureTest):
+ run_inserts = None
+
+ def setup_classes(self):
+ class User(_base.ComparableEntity):
+ pass
+ class Address(_base.ComparableEntity):
+ pass
+ @testing.resolve_artifact_names
def test_backref(self):
- s = Session()
- class User(object):
- def __init__(self, **kw): pass
- class Address(object):
- def __init__(self, _sa_session=None): pass
am = mapper(Address, addresses)
- m = mapper(User, users, properties = dict(
- addresses = relation(am, backref='user', lazy=False))
- )
+ m = mapper(User, users, properties=dict(
+ addresses = relation(am, backref='user', lazy=False)))
- u = User(_sa_session=s)
- a = Address(_sa_session=s)
+ session = create_session(autocommit=False)
+
+ u = User(name='u1')
+ a = Address(email_address='u1@e')
a.user = u
+ session.add(u)
self.assert_(u.addresses == [a])
- s.commit()
+ session.commit()
+ session.clear()
+
+ u = session.query(m).one()
+ assert u.addresses[0].user == u
+ session.close()
- s.close()
- u = s.query(m).all()[0]
- print u.addresses[0].user
-class VersioningTest(ORMTest):
+class VersioningTest(_base.MappedTest):
def define_tables(self, metadata):
- global version_table
- version_table = Table('version_test', metadata,
- Column('id', Integer, Sequence('version_test_seq', optional=True),
- primary_key=True ),
- Column('version_id', Integer, nullable=False),
- Column('value', String(40), nullable=False)
- )
+ Table('version_table', metadata,
+ Column('id', Integer, primary_key=True,
+ test_needs_autoincrement=True),
+ Column('version_id', Integer, nullable=False),
+ Column('value', String(40), nullable=False))
+
+ def setup_classes(self):
+ class Foo(_base.ComparableEntity):
+ pass
@engines.close_open_connections
+ @testing.resolve_artifact_names
def test_basic(self):
- s = Session(scope=None)
- class Foo(object):
- def __init__(self, value, _sa_session=None):
- self.value = value
mapper(Foo, version_table, version_id_col=version_table.c.version_id)
- f1 = Foo(value='f1', _sa_session=s)
- f2 = Foo(value='f2', _sa_session=s)
- s.commit()
+
+ s1 = create_session(autocommit=False)
+ f1 = Foo(value='f1')
+ f2 = Foo(value='f2')
+ s1.add_all((f1, f2))
+ s1.commit()
f1.value='f1rev2'
- s.commit()
+ s1.commit()
- s2 = Session()
+ s2 = create_session(autocommit=False)
f1_s = s2.query(Foo).get(f1.id)
f1_s.value='f1rev3'
s2.commit()
f1.value='f1rev3mine'
- # Only dialects with a sane rowcount can detect the ConcurrentModificationError
+ # Only dialects with a sane rowcount can detect the
+ # ConcurrentModificationError
if testing.db.dialect.supports_sane_rowcount:
- self.assertRaises(orm_exc.ConcurrentModificationError, s.commit)
- s.rollback()
+ self.assertRaises(sa.orm.exc.ConcurrentModificationError, s1.commit)
+ s1.rollback()
else:
- s.commit()
-
+ s1.commit()
+
# new in 0.5 ! dont need to close the session
- f1 = s.query(Foo).get(f1.id)
- f2 = s.query(Foo).get(f2.id)
+ f1 = s1.query(Foo).get(f1.id)
+ f2 = s1.query(Foo).get(f2.id)
f1_s.value='f1rev4'
s2.commit()
- s.delete(f1)
- s.delete(f2)
+ s1.delete(f1)
+ s1.delete(f2)
if testing.db.dialect.supports_sane_multi_rowcount:
- self.assertRaises(orm_exc.ConcurrentModificationError, s.commit)
+ self.assertRaises(sa.orm.exc.ConcurrentModificationError, s1.commit)
else:
- s.commit()
+ s1.commit()
@engines.close_open_connections
+ @testing.resolve_artifact_names
def test_versioncheck(self):
- """test that query.with_lockmode performs a 'version check' on an already loaded instance"""
- s1 = Session(scope=None)
- class Foo(object):
- def __init__(self, _sa_session=None): pass
+ """query.with_lockmode performs a 'version check' on an already loaded instance"""
+
+ s1 = create_session(autocommit=False)
+
mapper(Foo, version_table, version_id_col=version_table.c.version_id)
- f1s1 = Foo(_sa_session=s1)
- f1s1.value = 'f1 value'
+ f1s1 = Foo(value='f1 value')
+ s1.add(f1s1)
s1.commit()
- s2 = Session()
+
+ s2 = create_session(autocommit=False)
f1s2 = s2.query(Foo).get(f1s1.id)
f1s2.value='f1 new value'
s2.commit()
+
# load, version is wrong
- self.assertRaises(orm_exc.ConcurrentModificationError, s1.query(Foo).with_lockmode('read').get, f1s1.id)
+ self.assertRaises(sa.orm.exc.ConcurrentModificationError, s1.query(Foo).with_lockmode('read').get, f1s1.id)
# reload it
s1.query(Foo).load(f1s1.id)
@@ -129,584 +136,689 @@ class VersioningTest(ORMTest):
s1.query(Foo).with_lockmode('read').get(f1s1.id)
@engines.close_open_connections
+ @testing.resolve_artifact_names
def test_noversioncheck(self):
- """test that query.with_lockmode works OK when the mapper has no version id col"""
- s1 = Session()
- class Foo(object):
- def __init__(self, _sa_session=None): pass
+ """test query.with_lockmode works when the mapper has no version id col"""
+ s1 = create_session(autocommit=False)
mapper(Foo, version_table)
- f1s1 =Foo(_sa_session=s1)
- f1s1.value = 'foo'
- f1s1.version_id=0
+ f1s1 = Foo(value="foo", version_id=0)
+ s1.add(f1s1)
s1.commit()
- s2 = Session()
+
+ s2 = create_session(autocommit=False)
f1s2 = s2.query(Foo).with_lockmode('read').get(f1s1.id)
assert f1s2.id == f1s1.id
assert f1s2.value == f1s1.value
-class UnicodeTest(ORMTest):
+class UnicodeTest(_base.MappedTest):
+ __requires__ = ('unicode_connections',)
+
def define_tables(self, metadata):
- global uni_table, uni_table2
- uni_table = Table('uni_test', metadata,
- Column('id', Integer, Sequence("uni_test_id_seq", optional=True), primary_key=True),
- Column('txt', Unicode(50), unique=True))
- uni_table2 = Table('uni2', metadata,
- Column('id', Integer, Sequence("uni2_test_id_seq", optional=True), primary_key=True),
- Column('txt', Unicode(50), ForeignKey(uni_table.c.txt)))
+ Table('uni_t1', metadata,
+ Column('id', Integer, primary_key=True,
+ test_needs_autoincrement=True),
+ Column('txt', sa.Unicode(50), unique=True))
+ Table('uni_t2', metadata,
+ Column('id', Integer, primary_key=True,
+ test_needs_autoincrement=True),
+ Column('txt', sa.Unicode(50), ForeignKey('uni_t1')))
+
+ def setup_classes(self):
+ class Test(_base.BasicEntity):
+ pass
+ class Test2(_base.BasicEntity):
+ pass
+ @testing.resolve_artifact_names
def test_basic(self):
- class Test(object):
- def __init__(self, id, txt):
- self.id = id
- self.txt = txt
- mapper(Test, uni_table)
+ mapper(Test, uni_t1)
txt = u"\u0160\u0110\u0106\u010c\u017d"
- t1 = Test(id=1, txt = txt)
+ t1 = Test(id=1, txt=txt)
self.assert_(t1.txt == txt)
- Session.commit()
+
+ session = create_session(autocommit=False)
+ session.add(t1)
+ session.commit()
+
self.assert_(t1.txt == txt)
+ @testing.resolve_artifact_names
def test_relation(self):
- class Test(object):
- def __init__(self, txt):
- self.txt = txt
- class Test2(object):pass
-
- mapper(Test, uni_table, properties={
- 't2s':relation(Test2)
- })
- mapper(Test2, uni_table2)
+ mapper(Test, uni_t1, properties={
+ 't2s': relation(Test2)})
+ mapper(Test2, uni_t2)
txt = u"\u0160\u0110\u0106\u010c\u017d"
t1 = Test(txt=txt)
t1.t2s.append(Test2())
t1.t2s.append(Test2())
- Session.commit()
- Session.close()
- t1 = Session.query(Test).filter_by(id=t1.id).one()
+ session = create_session(autocommit=False)
+ session.add(t1)
+ session.commit()
+ session.close()
+
+ session = create_session()
+ t1 = session.query(Test).filter_by(id=t1.id).one()
assert len(t1.t2s) == 2
-class UnicodeSchemaTest(ORMTest):
- __unsupported_on__ = ('oracle', 'mssql', 'firebird', 'sybase',
- 'access', 'maxdb')
- __excluded_on__ = (('mysql', '<', (4, 1, 1)),)
+class UnicodeSchemaTest(engine_base.AltEngineTest, _base.MappedTest):
+ __requires__ = ('unicode_connections', 'unicode_ddl',)
- metadata = MetaData(engines.utf8_engine())
+ def create_engine(self):
+ return engines.utf8_engine()
def define_tables(self, metadata):
- global t1, t2
-
t1 = Table('unitable1', metadata,
- Column(u'méil', Integer, primary_key=True, key='a'),
- Column(u'\u6e2c\u8a66', Integer, key='b'),
- Column('type', String(20)),
- test_needs_fk=True,
- )
+ Column(u'méil', Integer, primary_key=True, key='a'),
+ Column(u'\u6e2c\u8a66', Integer, key='b'),
+ Column('type', String(20)),
+ test_needs_fk=True,
+ test_needs_autoincrement=True)
t2 = Table(u'Unitéble2', metadata,
- Column(u'méil', Integer, primary_key=True, key="cc"),
- Column(u'\u6e2c\u8a66', Integer, ForeignKey(u'unitable1.a'), key="d"),
- Column(u'\u6e2c\u8a66_2', Integer, key="e"),
- test_needs_fk=True,
- )
+ Column(u'méil', Integer, primary_key=True, key="cc"),
+ Column(u'\u6e2c\u8a66', Integer,
+ ForeignKey(u'unitable1.a'), key="d"),
+ Column(u'\u6e2c\u8a66_2', Integer, key="e"),
+ test_needs_fk=True,
+ test_needs_autoincrement=True)
+
+ self.tables['t1'] = t1
+ self.tables['t2'] = t2
+
+ def setUpAll(self):
+ engine_base.AltEngineTest.setUpAll(self)
+ _base.MappedTest.setUpAll(self)
+ def tearDownAll(self):
+ _base.MappedTest.tearDownAll(self)
+ engine_base.AltEngineTest.tearDownAll(self)
+
+ @testing.resolve_artifact_names
def test_mapping(self):
- class A(fixtures.Base):pass
- class B(fixtures.Base):pass
+ class A(_base.ComparableEntity):
+ pass
+ class B(_base.ComparableEntity):
+ pass
mapper(A, t1, properties={
- 't2s':relation(B),
- })
+ 't2s':relation(B)})
mapper(B, t2)
+
a1 = A()
b1 = B()
a1.t2s.append(b1)
- Session.flush()
- Session.clear()
- new_a1 = Session.query(A).filter(t1.c.a == a1.a).one()
+
+ session = create_session()
+ session.add(a1)
+ session.flush()
+ session.clear()
+
+ new_a1 = session.query(A).filter(t1.c.a == a1.a).one()
assert new_a1.a == a1.a
assert new_a1.t2s[0].d == b1.d
- Session.clear()
+ session.clear()
- new_a1 = Session.query(A).options(eagerload('t2s')).filter(t1.c.a == a1.a).one()
+ new_a1 = (session.query(A).options(sa.orm.eagerload('t2s')).
+ filter(t1.c.a == a1.a)).one()
assert new_a1.a == a1.a
assert new_a1.t2s[0].d == b1.d
- Session.clear()
+ session.clear()
- new_a1 = Session.query(A).filter(A.a == a1.a).one()
+ new_a1 = session.query(A).filter(A.a == a1.a).one()
assert new_a1.a == a1.a
assert new_a1.t2s[0].d == b1.d
- Session.clear()
+ session.clear()
+ @testing.resolve_artifact_names
def test_inheritance_mapping(self):
- class A(fixtures.Base):pass
- class B(A):pass
- mapper(A, t1, polymorphic_on=t1.c.type, polymorphic_identity='a')
- mapper(B, t2, inherits=A, polymorphic_identity='b')
+ class A(_base.ComparableEntity):
+ pass
+ class B(A):
+ pass
+
+ mapper(A, t1,
+ polymorphic_on=t1.c.type,
+ polymorphic_identity='a')
+ mapper(B, t2,
+ inherits=A,
+ polymorphic_identity='b')
a1 = A(b=5)
b1 = B(e=7)
- Session.flush()
- Session.clear()
- # TODO: somehow, not assigning to "l" first
- # breaks the comparison ?????
- l = Session.query(A).all()
- assert [A(b=5), B(e=7)] == l
+ session = create_session()
+ session.add_all((a1, b1))
+ session.flush()
+ session.clear()
+
+ eq_([A(b=5), B(e=7)], session.query(A).all())
+
+
+class MutableTypesTest(_base.MappedTest):
-class MutableTypesTest(ORMTest):
def define_tables(self, metadata):
- global table
- table = Table('mutabletest', metadata,
- Column('id', Integer, Sequence('mutableidseq', optional=True), primary_key=True),
- Column('data', PickleType),
- Column('val', Unicode(30)))
+ Table('mutable_t', metadata,
+ Column('id', Integer, primary_key=True,
+ test_needs_autoincrement=True),
+ Column('data', sa.PickleType),
+ Column('val', sa.Unicode(30)))
+
+ def setup_classes(self):
+ class Foo(_base.BasicEntity):
+ pass
+ @testing.resolve_artifact_names
+ def setup_mappers(self):
+ mapper(Foo, mutable_t)
+
+ @testing.resolve_artifact_names
def test_basic(self):
- """test that types marked as MutableType get changes detected on them"""
- class Foo(object):pass
- mapper(Foo, table)
+ """Changes are detected for types marked as MutableType."""
+
f1 = Foo()
f1.data = pickleable.Bar(4,5)
- Session.commit()
- Session.close()
- f2 = Session.query(Foo).filter_by(id=f1.id).one()
- assert 'data' in attributes.instance_state(f2).unmodified
- assert f2.data == f1.data
+
+ session = create_session()
+ session.add(f1)
+ session.flush()
+ session.clear()
+
+ f2 = session.query(Foo).filter_by(id=f1.id).one()
+ assert 'data' in sa.orm.attributes.instance_state(f2).unmodified
+ eq_(f2.data, f1.data)
+
f2.data.y = 19
- assert f2 in Session.dirty
- assert 'data' not in attributes.instance_state(f2).unmodified
- Session.commit()
- Session.close()
- f3 = Session.query(Foo).filter_by(id=f1.id).one()
- print f2.data, f3.data
- assert f3.data != f1.data
- assert f3.data == pickleable.Bar(4, 19)
-
- def test_mutablechanges(self):
- """test that mutable changes are detected or not detected correctly"""
- class Foo(object):pass
- mapper(Foo, table)
+ assert f2 in session.dirty
+ assert 'data' not in sa.orm.attributes.instance_state(f2).unmodified
+ session.flush()
+ session.clear()
+
+ f3 = session.query(Foo).filter_by(id=f1.id).one()
+ ne_(f3.data,f1.data)
+ eq_(f3.data, pickleable.Bar(4, 19))
+
+ @testing.resolve_artifact_names
+ def test_mutable_changes(self):
+ """Mutable changes are detected or not detected correctly"""
+
f1 = Foo()
f1.data = pickleable.Bar(4,5)
- f1.val = unicode('hi')
- Session.commit()
- def go():
- Session.commit()
- self.assert_sql_count(testing.db, go, 0)
- f1.val = unicode('someothervalue')
- self.assert_sql(testing.db, lambda: Session.commit(), [
- (
- "UPDATE mutabletest SET val=:val WHERE mutabletest.id = :mutabletest_id",
- {'mutabletest_id': f1.id, 'val': u'someothervalue'}
- ),
- ])
- f1.val = unicode('hi')
+ f1.val = u'hi'
+
+ session = create_session(autocommit=False)
+ session.add(f1)
+ session.commit()
+
+ bind = self.metadata.bind
+
+ self.sql_count_(0, session.commit)
+ f1.val = u'someothervalue'
+ self.assert_sql(bind, session.commit, [
+ ("UPDATE mutable_t SET val=:val "
+ "WHERE mutable_t.id = :mutable_t_id",
+ {'mutable_t_id': f1.id, 'val': u'someothervalue'})])
+
+ f1.val = u'hi'
f1.data.x = 9
- self.assert_sql(testing.db, lambda: Session.commit(), [
- (
- "UPDATE mutabletest SET data=:data, val=:val WHERE mutabletest.id = :mutabletest_id",
- {'mutabletest_id': f1.id, 'val': u'hi', 'data':f1.data}
- ),
- ])
+ self.assert_sql(bind, session.commit, [
+ ("UPDATE mutable_t SET data=:data, val=:val "
+ "WHERE mutable_t.id = :mutable_t_id",
+ {'mutable_t_id': f1.id, 'val': u'hi', 'data':f1.data})])
+ @testing.resolve_artifact_names
def test_nocomparison(self):
- """test that types marked as MutableType get changes detected on them when the type has no __eq__ method"""
- class Foo(object):pass
- mapper(Foo, table)
+ """Changes are detected on MutableTypes lacking an __eq__ method."""
+
f1 = Foo()
f1.data = pickleable.BarWithoutCompare(4,5)
- Session.commit()
+ session = create_session(autocommit=False)
+ session.add(f1)
+ session.commit()
- def go():
- Session.commit()
- self.assert_sql_count(testing.db, go, 0)
-
- Session.close()
+ self.sql_count_(0, session.commit)
+ session.close()
- f2 = Session.query(Foo).filter_by(id=f1.id).one()
-
- def go():
- Session.commit()
- self.assert_sql_count(testing.db, go, 0)
+ session = create_session(autocommit=False)
+ f2 = session.query(Foo).filter_by(id=f1.id).one()
+ self.sql_count_(0, session.commit)
f2.data.y = 19
- def go():
- Session.commit()
- self.assert_sql_count(testing.db, go, 1)
+ self.sql_count_(1, session.commit)
+ session.close()
- Session.close()
- f3 = Session.query(Foo).filter_by(id=f1.id).one()
- print f2.data, f3.data
- assert (f3.data.x, f3.data.y) == (4,19)
-
- def go():
- Session.commit()
- self.assert_sql_count(testing.db, go, 0)
+ session = create_session(autocommit=False)
+ f3 = session.query(Foo).filter_by(id=f1.id).one()
+ eq_((f3.data.x, f3.data.y), (4,19))
+ self.sql_count_(0, session.commit)
+ session.close()
+ @testing.resolve_artifact_names
def test_unicode(self):
- """test that two equivalent unicode values dont get flagged as changed.
+ """Equivalent Unicode values are not flagged as changed."""
- apparently two equal unicode objects dont compare via "is" in all cases, so this
- tests the compare_values() call on types.String and its usage via types.Unicode."""
- class Foo(object):pass
- mapper(Foo, table)
- f1 = Foo()
- f1.val = u'hi'
- Session.commit()
- Session.close()
- f1 = Session.get(Foo, f1.id)
+ f1 = Foo(val=u'hi')
+
+ session = create_session(autocommit=False)
+ session.add(f1)
+ session.commit()
+ session.clear()
+
+ f1 = session.get(Foo, f1.id)
f1.val = u'hi'
- def go():
- Session.commit()
- self.assert_sql_count(testing.db, go, 0)
+ self.sql_count_(0, session.commit)
-class MutableTypesTest2(ORMTest):
- def define_tables(self, metadata):
- global table
- import operator
- table = Table('mutabletest', metadata,
- Column('id', Integer, Sequence('mutableidseq', optional=True), primary_key=True),
- Column('data', PickleType(comparator=operator.eq)),
- )
- def test_dicts(self):
- """dictionaries dont pickle the same way twice, sigh."""
+class PickledDicts(_base.MappedTest):
- class Foo(object):pass
- mapper(Foo, table)
- f1 = Foo()
- f1.data = [{'personne': {'nom': u'Smith', 'pers_id': 1, 'prenom': u'john', 'civilite': u'Mr', \
- 'int_3': False, 'int_2': False, 'int_1': u'23', 'VenSoir': True, 'str_1': u'Test', \
- 'SamMidi': False, 'str_2': u'chien', 'DimMidi': False, 'SamSoir': True, 'SamAcc': False}}]
+ def define_tables(self, metadata):
+ Table('mutable_t', metadata,
+ Column('id', Integer, primary_key=True,
+ test_needs_autoincrement=True),
+ Column('data', sa.PickleType(comparator=operator.eq)))
- Session.commit()
- def go():
- Session.commit()
- self.assert_sql_count(testing.db, go, 0)
+ def setup_classes(self):
+ class Foo(_base.BasicEntity):
+ pass
- f1.data = [{'personne': {'nom': u'Smith', 'pers_id': 1, 'prenom': u'john', 'civilite': u'Mr', \
- 'int_3': False, 'int_2': False, 'int_1': u'23', 'VenSoir': True, 'str_1': u'Test', \
- 'SamMidi': False, 'str_2': u'chien', 'DimMidi': False, 'SamSoir': True, 'SamAcc': False}}]
+ @testing.resolve_artifact_names
+ def setup_mappers(self):
+ mapper(Foo, mutable_t)
- def go():
- Session.commit()
- self.assert_sql_count(testing.db, go, 0)
+ @testing.resolve_artifact_names
+ def test_dicts(self):
+ """Dictionaries may not pickle the same way twice."""
- f1.data[0]['personne']['VenSoir']= False
- def go():
- Session.commit()
- self.assert_sql_count(testing.db, go, 1)
+ f1 = Foo()
+ f1.data = [ {
+ 'personne': {'nom': u'Smith',
+ 'pers_id': 1,
+ 'prenom': u'john',
+ 'civilite': u'Mr',
+ 'int_3': False,
+ 'int_2': False,
+ 'int_1': u'23',
+ 'VenSoir': True,
+ 'str_1': u'Test',
+ 'SamMidi': False,
+ 'str_2': u'chien',
+ 'DimMidi': False,
+ 'SamSoir': True,
+ 'SamAcc': False} } ]
+
+ session = create_session(autocommit=False)
+ session.add(f1)
+ session.commit()
+
+ self.sql_count_(0, session.commit)
+
+ f1.data = [ {
+ 'personne': {'nom': u'Smith',
+ 'pers_id': 1,
+ 'prenom': u'john',
+ 'civilite': u'Mr',
+ 'int_3': False,
+ 'int_2': False,
+ 'int_1': u'23',
+ 'VenSoir': True,
+ 'str_1': u'Test',
+ 'SamMidi': False,
+ 'str_2': u'chien',
+ 'DimMidi': False,
+ 'SamSoir': True,
+ 'SamAcc': False} } ]
+
+ self.sql_count_(0, session.commit)
- Session.clear()
- f = Session.query(Foo).get(f1.id)
- assert f.data == [{'personne': {'nom': u'Smith', 'pers_id': 1, 'prenom': u'john', 'civilite': u'Mr', \
- 'int_3': False, 'int_2': False, 'int_1': u'23', 'VenSoir': False, 'str_1': u'Test', \
- 'SamMidi': False, 'str_2': u'chien', 'DimMidi': False, 'SamSoir': True, 'SamAcc': False}}]
+ f1.data[0]['personne']['VenSoir']= False
+ self.sql_count_(1, session.commit)
+
+ session.clear()
+ f = session.query(Foo).get(f1.id)
+ eq_(f.data,
+ [ {
+ 'personne': {'nom': u'Smith',
+ 'pers_id': 1,
+ 'prenom': u'john',
+ 'civilite': u'Mr',
+ 'int_3': False,
+ 'int_2': False,
+ 'int_1': u'23',
+ 'VenSoir': False,
+ 'str_1': u'Test',
+ 'SamMidi': False,
+ 'str_2': u'chien',
+ 'DimMidi': False,
+ 'SamSoir': True,
+ 'SamAcc': False} } ])
+
+
+class PKTest(_base.MappedTest):
-class PKTest(ORMTest):
def define_tables(self, metadata):
- global table, table2, table3
-
- table = Table(
- 'multipk', metadata,
- Column('multi_id', Integer, Sequence("multi_id_seq", optional=True), primary_key=True),
- Column('multi_rev', Integer, primary_key=True),
- Column('name', String(50), nullable=False),
- Column('value', String(100))
- )
-
- table2 = Table('multipk2', metadata,
- Column('pk_col_1', String(30), primary_key=True),
- Column('pk_col_2', String(30), primary_key=True),
- Column('data', String(30), )
- )
- table3 = Table('multipk3', metadata,
- Column('pri_code', String(30), key='primary', primary_key=True),
- Column('sec_code', String(30), key='secondary', primary_key=True),
- Column('date_assigned', Date, key='assigned', primary_key=True),
- Column('data', String(30), )
- )
+ Table('multipk1', metadata,
+ Column('multi_id', Integer, primary_key=True,
+ test_needs_autoincrement=True),
+ Column('multi_rev', Integer, primary_key=True),
+ Column('name', String(50), nullable=False),
+ Column('value', String(100)))
+
+ Table('multipk2', metadata,
+ Column('pk_col_1', String(30), primary_key=True),
+ Column('pk_col_2', String(30), primary_key=True),
+ Column('data', String(30)))
+ Table('multipk3', metadata,
+ Column('pri_code', String(30), key='primary', primary_key=True),
+ Column('sec_code', String(30), key='secondary', primary_key=True),
+ Column('date_assigned', sa.Date, key='assigned', primary_key=True),
+ Column('data', String(30)))
+
+ def setup_classes(self):
+ class Entry(_base.BasicEntity):
+ pass
# not supported on sqlite since sqlite's auto-pk generation only works with
# single column primary keys
@testing.fails_on('sqlite')
- def test_primarykey(self):
- class Entry(object):
- pass
- Entry.mapper = mapper(Entry, table)
- e = Entry()
- e.name = 'entry1'
- e.value = 'this is entry 1'
- e.multi_rev = 2
- Session.commit()
- Session.close()
- e2 = Session.query(Entry).get((e.multi_id, 2))
+ @testing.resolve_artifact_names
+ def test_primary_key(self):
+ mapper(Entry, multipk1)
+
+ e = Entry(name='entry1', value='this is entry 1', multi_rev=2)
+
+ session = create_session()
+ session.add(e)
+ session.flush()
+ session.clear()
+
+ e2 = session.query(Entry).get((e.multi_id, 2))
self.assert_(e is not e2)
- state = attributes.instance_state(e)
- state2 = attributes.instance_state(e2)
- self.assert_(state.key == state2.key)
+ state = sa.orm.attributes.instance_state(e)
+ state2 = sa.orm.attributes.instance_state(e2)
+ eq_(state.key, state2.key)
# this one works with sqlite since we are manually setting up pk values
- def test_manualpk(self):
- class Entry(object):
- pass
- Entry.mapper = mapper(Entry, table2)
- e = Entry()
- e.pk_col_1 = 'pk1'
- e.pk_col_2 = 'pk1_related'
- e.data = 'im the data'
- Session.commit()
-
- def test_keypks(self):
- import datetime
- class Entity(object):
- pass
- Entity.mapper = mapper(Entity, table3)
- e = Entity()
- e.primary = 'pk1'
- e.secondary = 'pk2'
- e.assigned = datetime.date.today()
- e.data = 'some more data'
- Session.commit()
-
-class ForeignPKTest(ORMTest):
- """tests mapper detection of the relationship direction when parent/child tables are joined on their
- primary keys"""
+ @testing.resolve_artifact_names
+ def test_manual_pk(self):
+ mapper(Entry, multipk2)
- def define_tables(self, metadata):
- global people, peoplesites
+ e = Entry(pk_col_1='pk1', pk_col_2='pk1_related', data='im the data')
+
+ session = create_session()
+ session.add(e)
+ session.flush()
+
+ @testing.resolve_artifact_names
+ def test_key_pks(self):
+ mapper(Entry, multipk3)
+
+ e = Entry(primary= 'pk1', secondary='pk2',
+ assigned=datetime.date.today(), data='some more data')
+
+ session = create_session()
+ session.add(e)
+ session.flush()
- people = Table("people", metadata,
- Column('person', String(10), primary_key=True),
- Column('firstname', String(10)),
- Column('lastname', String(10)),
- )
- peoplesites = Table("peoplesites", metadata,
- Column('person', String(10), ForeignKey("people.person"),
- primary_key=True),
- Column('site', String(10)),
- )
+class ForeignPKTest(_base.MappedTest):
+ """Detection of the relationship direction on PK joins."""
+ def define_tables(self, metadata):
+ Table("people", metadata,
+ Column('person', String(10), primary_key=True),
+ Column('firstname', String(10)),
+ Column('lastname', String(10)))
+
+ Table("peoplesites", metadata,
+ Column('person', String(10), ForeignKey("people.person"),
+ primary_key=True),
+ Column('site', String(10)))
+
+ def setup_classes(self):
+ class Person(_base.BasicEntity):
+ pass
+ class PersonSite(_base.BasicEntity):
+ pass
+
+ @testing.resolve_artifact_names
def test_basic(self):
- class PersonSite(object):pass
- class Person(object):pass
m1 = mapper(PersonSite, peoplesites)
+ m2 = mapper(Person, people, properties={
+ 'sites' : relation(PersonSite)})
+
+ sa.orm.compile_mappers()
+ eq_(list(m2.get_property('sites').foreign_keys),
+ [peoplesites.c.person])
- m2 = mapper(Person, people,
- properties = {
- 'sites' : relation(PersonSite),
- },
- )
- compile_mappers()
- assert list(m2.get_property('sites').foreign_keys) == [peoplesites.c.person]
- p = Person()
- p.person = 'im the key'
- p.firstname = 'asdf'
- ps = PersonSite()
- ps.site = 'asdf'
+ p = Person(person='im the key', firstname='asdf')
+ ps = PersonSite(site='asdf')
p.sites.append(ps)
- Session.commit()
- assert people.count(people.c.person=='im the key').scalar() == peoplesites.count(peoplesites.c.person=='im the key').scalar() == 1
-class ClauseAttributesTest(ORMTest):
+ session = create_session()
+ session.add(p)
+ session.flush()
+
+ p_count = people.count(people.c.person=='im the key').scalar()
+ eq_(p_count, 1)
+ eq_(peoplesites.count(peoplesites.c.person=='im the key').scalar(), 1)
+
+
+class ClauseAttributesTest(_base.MappedTest):
+
def define_tables(self, metadata):
- global users_table
- users_table = Table('users', metadata,
- Column('id', Integer, Sequence('users_id_seq', optional=True), primary_key=True),
+ Table('users_t', metadata,
+ Column('id', Integer, primary_key=True,
+ test_needs_autoincrement=True),
Column('name', String(30)),
Column('counter', Integer, default=1))
+ def setup_classes(self):
+ class User(_base.ComparableEntity):
+ pass
+
+ @testing.resolve_artifact_names
+ def setup_mappers(self):
+ mapper(User, users_t)
+
+ @testing.resolve_artifact_names
def test_update(self):
- class User(fixtures.Base): pass
- mapper(User, users_table)
u = User(name='test')
- sess = Session()
- sess.save(u)
- sess.flush()
- assert u.counter == 1
+
+ session = create_session()
+ session.add(u)
+ session.flush()
+
+ eq_(u.counter, 1)
u.counter = User.counter + 1
- sess.flush()
+ session.flush()
def go():
assert (u.counter == 2) is True # ensure its not a ClauseElement
- self.assert_sql_count(testing.db, go, 1)
+ self.sql_count_(1, go)
+ @testing.resolve_artifact_names
def test_multi_update(self):
- class User(fixtures.Base): pass
- mapper(User, users_table)
u = User(name='test')
- sess = Session()
- sess.save(u)
- sess.flush()
- assert u.counter == 1
+
+ session = create_session()
+ session.add(u)
+ session.flush()
+
+ eq_(u.counter, 1)
u.name = 'test2'
u.counter = User.counter + 1
- sess.flush()
+ session.flush()
+
def go():
- assert u.name == 'test2'
+ eq_(u.name, 'test2')
assert (u.counter == 2) is True
- self.assert_sql_count(testing.db, go, 1)
+ self.sql_count_(1, go)
- sess.clear()
- u = sess.query(User).get(u.id)
- assert u.name == 'test2'
- assert u.counter == 2
+ session.clear()
+ u = session.query(User).get(u.id)
+ eq_(u.name, 'test2')
+ eq_(u.counter, 2)
- @testing.unsupported('mssql')
+ @testing.unsupported('mssql', 'FIXME: unknown, verify not fails_on()')
+ @testing.resolve_artifact_names
def test_insert(self):
- class User(fixtures.Base): pass
- mapper(User, users_table)
- u = User(name='test', counter=select([5]))
- sess = Session()
- sess.save(u)
- sess.flush()
- assert (u.counter == 5) is True
+ u = User(name='test', counter=sa.select([5]))
+ session = create_session()
+ session.add(u)
+ session.flush()
-class PassiveDeletesTest(ORMTest):
- def define_tables(self, metadata):
- global mytable,myothertable
+ assert (u.counter == 5) is True
- mytable = Table('mytable', metadata,
- Column('id', Integer, primary_key=True),
- Column('data', String(30)),
- test_needs_fk=True,
- )
- myothertable = Table('myothertable', metadata,
- Column('id', Integer, primary_key=True),
- Column('parent_id', Integer),
- Column('data', String(30)),
- ForeignKeyConstraint(['parent_id'],['mytable.id'], ondelete="CASCADE"),
- test_needs_fk=True,
- )
+class PassiveDeletesTest(_base.MappedTest):
+ __requires__ = ('foreign_keys',)
- @testing.unsupported('sqlite')
- def test_basic(self):
- class MyClass(object):
+ def define_tables(self, metadata):
+ Table('mytable', metadata,
+ Column('id', Integer, primary_key=True),
+ Column('data', String(30)),
+ test_needs_fk=True)
+
+ Table('myothertable', metadata,
+ Column('id', Integer, primary_key=True),
+ Column('parent_id', Integer),
+ Column('data', String(30)),
+ sa.ForeignKeyConstraint(['parent_id'],
+ ['mytable.id'],
+ ondelete="CASCADE"),
+ test_needs_fk=True)
+
+ def setup_classes(self):
+ class MyClass(_base.BasicEntity):
pass
- class MyOtherClass(object):
+ class MyOtherClass(_base.BasicEntity):
pass
+ @testing.resolve_artifact_names
+ def setup_mappers(self):
mapper(MyOtherClass, myothertable)
-
mapper(MyClass, mytable, properties={
- 'children':relation(MyOtherClass, passive_deletes=True, cascade="all")
- })
+ 'children':relation(MyOtherClass,
+ passive_deletes=True,
+ cascade="all")})
- sess = Session
+ @testing.resolve_artifact_names
+ def test_basic(self):
+ session = create_session()
mc = MyClass()
mc.children.append(MyOtherClass())
mc.children.append(MyOtherClass())
mc.children.append(MyOtherClass())
mc.children.append(MyOtherClass())
- sess.save(mc)
- sess.commit()
- sess.close()
+
+ session.add(mc)
+ session.flush()
+ session.clear()
+
assert myothertable.count().scalar() == 4
- mc = sess.query(MyClass).get(mc.id)
- sess.delete(mc)
- sess.commit()
+ mc = session.query(MyClass).get(mc.id)
+ session.delete(mc)
+ session.flush()
+
assert mytable.count().scalar() == 0
assert myothertable.count().scalar() == 0
-class ExtraPassiveDeletesTest(ORMTest):
- def define_tables(self, metadata):
- global mytable,myothertable
-
- mytable = Table('mytable', metadata,
- Column('id', Integer, primary_key=True),
- Column('data', String(30)),
- test_needs_fk=True,
- )
-
- myothertable = Table('myothertable', metadata,
- Column('id', Integer, primary_key=True),
- Column('parent_id', Integer),
- Column('data', String(30)),
- ForeignKeyConstraint(['parent_id'],['mytable.id']), # no CASCADE, the same as ON DELETE RESTRICT
- test_needs_fk=True,
- )
+class ExtraPassiveDeletesTest(_base.MappedTest):
+ __requires__ = ('foreign_keys',)
- def test_assertions(self):
- class MyClass(object):
+ def define_tables(self, metadata):
+ Table('mytable', metadata,
+ Column('id', Integer, primary_key=True),
+ Column('data', String(30)),
+ test_needs_fk=True)
+
+ Table('myothertable', metadata,
+ Column('id', Integer, primary_key=True),
+ Column('parent_id', Integer),
+ Column('data', String(30)),
+ # no CASCADE, the same as ON DELETE RESTRICT
+ sa.ForeignKeyConstraint(['parent_id'],
+ ['mytable.id']),
+ test_needs_fk=True)
+
+ def setup_classes(self):
+ class MyClass(_base.BasicEntity):
pass
- class MyOtherClass(object):
+ class MyOtherClass(_base.BasicEntity):
pass
+ @testing.resolve_artifact_names
+ def test_assertions(self):
mapper(MyOtherClass, myothertable)
-
try:
mapper(MyClass, mytable, properties={
- 'children':relation(MyOtherClass, passive_deletes='all', cascade="all")
- })
+ 'children':relation(MyOtherClass,
+ passive_deletes='all',
+ cascade="all")})
assert False
- except sa_exc.ArgumentError, e:
- assert str(e) == "Can't set passive_deletes='all' in conjunction with 'delete' or 'delete-orphan' cascade"
+ except sa.exc.ArgumentError, e:
+ eq_(str(e),
+ "Can't set passive_deletes='all' in conjunction with 'delete' "
+ "or 'delete-orphan' cascade")
- @testing.unsupported('sqlite')
+ @testing.resolve_artifact_names
def test_extra_passive(self):
- class MyClass(object):
- pass
- class MyOtherClass(object):
- pass
-
mapper(MyOtherClass, myothertable)
-
mapper(MyClass, mytable, properties={
- 'children':relation(MyOtherClass, passive_deletes='all', cascade="save-update")
- })
+ 'children': relation(MyOtherClass,
+ passive_deletes='all',
+ cascade="save-update")})
- sess = Session
+ session = create_session()
mc = MyClass()
mc.children.append(MyOtherClass())
mc.children.append(MyOtherClass())
mc.children.append(MyOtherClass())
mc.children.append(MyOtherClass())
- sess.save(mc)
- sess.commit()
+ session.add(mc)
+ session.flush()
+ session.clear()
assert myothertable.count().scalar() == 4
- mc = sess.query(MyClass).get(mc.id)
- sess.delete(mc)
- self.assertRaises(sa_exc.DBAPIError, sess.commit)
+ mc = session.query(MyClass).get(mc.id)
+ session.delete(mc)
+ self.assertRaises(sa.exc.DBAPIError, session.flush)
- @testing.unsupported('sqlite')
+ @testing.resolve_artifact_names
def test_extra_passive_2(self):
- class MyClass(object):
- pass
- class MyOtherClass(object):
- pass
-
mapper(MyOtherClass, myothertable)
-
mapper(MyClass, mytable, properties={
- 'children':relation(MyOtherClass, passive_deletes='all', cascade="save-update")
- })
+ 'children': relation(MyOtherClass,
+ passive_deletes='all',
+ cascade="save-update")})
- sess = Session
+ session = create_session()
mc = MyClass()
mc.children.append(MyOtherClass())
- sess.save(mc)
- sess.commit()
+ session.add(mc)
+ session.flush()
+ session.clear()
assert myothertable.count().scalar() == 1
- mc = sess.query(MyClass).get(mc.id)
- sess.delete(mc)
+
+ mc = session.query(MyClass).get(mc.id)
+ session.delete(mc)
mc.children[0].data = 'some new data'
- self.assertRaises(sa_exc.DBAPIError, sess.commit)
+ self.assertRaises(sa.exc.DBAPIError, session.flush)
-class DefaultTest(ORMTest):
- """tests that when saving objects whose table contains DefaultGenerators, either python-side, preexec or database-side,
- the newly saved instances receive all the default values either through a post-fetch or getting the pre-exec'ed
- defaults back from the engine."""
+class DefaultTest(_base.MappedTest):
+ """Exercise mappings on columns with DefaultGenerators.
+
+ Tests that when saving objects whose table contains DefaultGenerators,
+ either python-side, preexec or database-side, the newly saved instances
+ receive all the default values either through a post-fetch or getting the
+ pre-exec'ed defaults back from the engine.
+
+ """
def define_tables(self, metadata):
- db = testing.db
- use_string_defaults = testing.against('postgres', 'oracle', 'sqlite')
- global hohoval, althohoval
+ use_string_defaults = testing.against('postgres', 'oracle', 'sqlite')
if use_string_defaults:
hohotype = String(30)
@@ -717,437 +829,473 @@ class DefaultTest(ORMTest):
hohoval = 9
althohoval = 15
- global default_table, secondary_table
- default_table = Table('default_test', metadata,
- Column('id', Integer, Sequence("dt_seq", optional=True), primary_key=True),
- Column('hoho', hohotype, PassiveDefault(str(hohoval))),
- Column('counter', Integer, default=func.length("1234567")),
- Column('foober', String(30), default="im foober", onupdate="im the update"),
- )
-
- secondary_table = Table('secondary_table', metadata,
+ self.other_artifacts['hohoval'] = hohoval
+ self.other_artifacts['althohoval'] = althohoval
+
+ dt = Table('default_t', metadata,
+ Column('id', Integer, primary_key=True,
+ test_needs_autoincrement=True),
+ Column('hoho', hohotype, sa.PassiveDefault(str(hohoval))),
+ Column('counter', Integer, default=sa.func.length("1234567")),
+ Column('foober', String(30), default="im foober",
+ onupdate="im the update"))
+
+ st = Table('secondary_table', metadata,
Column('id', Integer, primary_key=True),
- Column('data', String(50))
- )
-
+ Column('data', String(50)))
+
if testing.against('postgres', 'oracle'):
- default_table.append_column(Column('secondary_id', Integer, Sequence('sec_id_seq'), unique=True))
- secondary_table.append_column(Column('fk_val', Integer, ForeignKey('default_test.secondary_id')))
+ dt.append_column(
+ Column('secondary_id', Integer, sa.Sequence('sec_id_seq'),
+ unique=True))
+ st.append_column(
+ Column('fk_val', Integer,
+ ForeignKey('default_t.secondary_id')))
else:
- secondary_table.append_column(Column('hoho', hohotype, ForeignKey('default_test.hoho')))
+ st.append_column(
+ Column('hoho', hohotype, ForeignKey('default_t.hoho')))
+ def setup_classes(self):
+ class Hoho(_base.ComparableEntity):
+ pass
+ class Secondary(_base.ComparableEntity):
+ pass
+
+ @testing.resolve_artifact_names
def test_insert(self):
- class Hoho(fixtures.Base): pass
- mapper(Hoho, default_table)
+ mapper(Hoho, default_t)
h1 = Hoho(hoho=althohoval)
h2 = Hoho(counter=12)
h3 = Hoho(hoho=althohoval, counter=12)
h4 = Hoho()
h5 = Hoho(foober='im the new foober')
- Session.commit()
- self.assert_(h1.hoho==althohoval)
- self.assert_(h3.hoho==althohoval)
+ session = create_session(autocommit=False)
+ session.add_all((h1, h2, h3, h4, h5))
+ session.commit()
+
+ eq_(h1.hoho, althohoval)
+ eq_(h3.hoho, althohoval)
def go():
# test deferred load of attribues, one select per instance
- self.assert_(h2.hoho==h4.hoho==h5.hoho==hohoval)
- self.assert_sql_count(testing.db, go, 3)
+ self.assert_(h2.hoho == h4.hoho == h5.hoho == hohoval)
+ self.sql_count_(3, go)
def go():
- self.assert_(h1.counter == h4.counter==h5.counter==7)
- self.assert_sql_count(testing.db, go, 1)
+ self.assert_(h1.counter == h4.counter == h5.counter == 7)
+ self.sql_count_(1, go)
def go():
self.assert_(h3.counter == h2.counter == 12)
self.assert_(h2.foober == h3.foober == h4.foober == 'im foober')
- self.assert_(h5.foober=='im the new foober')
- self.assert_sql_count(testing.db, go, 0)
-
- Session.close()
+ self.assert_(h5.foober == 'im the new foober')
+ self.sql_count_(0, go)
- l = Hoho.query.all()
+ session.clear()
- (h1, h2, h3, h4, h5) = l
+ (h1, h2, h3, h4, h5) = session.query(Hoho).order_by(Hoho.id).all()
- self.assert_(h1.hoho==althohoval)
- self.assert_(h3.hoho==althohoval)
- self.assert_(h2.hoho==h4.hoho==h5.hoho==hohoval)
+ eq_(h1.hoho, althohoval)
+ eq_(h3.hoho, althohoval)
+ self.assert_(h2.hoho == h4.hoho == h5.hoho == hohoval)
self.assert_(h3.counter == h2.counter == 12)
- self.assert_(h1.counter == h4.counter==h5.counter==7)
+ self.assert_(h1.counter == h4.counter == h5.counter == 7)
self.assert_(h2.foober == h3.foober == h4.foober == 'im foober')
- self.assert_(h5.foober=='im the new foober')
+ eq_(h5.foober, 'im the new foober')
+ @testing.resolve_artifact_names
def test_eager_defaults(self):
- class Hoho(object):pass
- mapper(Hoho, default_table, eager_defaults=True)
+ mapper(Hoho, default_t, eager_defaults=True)
+
h1 = Hoho()
- Session.commit()
- def go():
- self.assert_(h1.hoho==hohoval)
- self.assert_sql_count(testing.db, go, 0)
+ session = create_session()
+ session.add(h1)
+ session.flush()
+
+ self.sql_count_(0, lambda: eq_(h1.hoho, hohoval))
+ @testing.resolve_artifact_names
def test_insert_nopostfetch(self):
- # populates the PassiveDefaults explicitly so there is no "post-update"
- class Hoho(fixtures.Base): pass
- mapper(Hoho, default_table)
+ # populates the PassiveDefaults explicitly so there is no
+ # "post-update"
+ mapper(Hoho, default_t)
h1 = Hoho(hoho="15", counter="15")
+ session = create_session()
+ session.add(h1)
+ session.flush()
- Session.commit()
def go():
- self.assert_(h1.hoho=="15")
- self.assert_(h1.counter=="15")
- self.assert_(h1.foober=="im foober")
- self.assert_sql_count(testing.db, go, 0)
+ eq_(h1.hoho, "15")
+ eq_(h1.counter, "15")
+ eq_(h1.foober, "im foober")
+ self.sql_count_(0, go)
+ @testing.resolve_artifact_names
def test_update(self):
- class Hoho(fixtures.Base): pass
- mapper(Hoho, default_table)
+ mapper(Hoho, default_t)
+
h1 = Hoho()
- Session.commit()
- self.assertEquals(h1.foober, 'im foober')
+ session = create_session()
+ session.add(h1)
+ session.flush()
+
+ eq_(h1.foober, 'im foober')
h1.counter = 19
- Session.commit()
- self.assertEquals(h1.foober, 'im the update')
-
+ session.flush()
+ eq_(h1.foober, 'im the update')
+
+ @testing.resolve_artifact_names
def test_used_in_relation(self):
- """test that a server-side generated default can be used as the target of a foreign key"""
-
- class Hoho(fixtures.Base):
- pass
- class Secondary(fixtures.Base):
- pass
- mapper(Hoho, default_table, properties={
- 'secondaries':relation(Secondary)
- }, save_on_init=False)
-
- mapper(Secondary, secondary_table, save_on_init=False)
+ """A server-side default can be used as the target of a foreign key"""
+
+ mapper(Hoho, default_t, properties={
+ 'secondaries':relation(Secondary)})
+ mapper(Secondary, secondary_table)
+
h1 = Hoho()
s1 = Secondary(data='s1')
h1.secondaries.append(s1)
- Session.save(h1)
- Session.commit()
- Session.clear()
-
- self.assertEquals(Session.query(Hoho).get(h1.id), Hoho(hoho=hohoval, secondaries=[Secondary(data='s1')]))
-
- h1 = Session.query(Hoho).get(h1.id)
+
+ session = create_session()
+ session.add(h1)
+ session.flush()
+ session.clear()
+
+ eq_(session.query(Hoho).get(h1.id),
+ Hoho(hoho=hohoval,
+ secondaries=[
+ Secondary(data='s1')]))
+
+ h1 = session.query(Hoho).get(h1.id)
h1.secondaries.append(Secondary(data='s2'))
- Session.commit()
- Session.clear()
+ session.flush()
+ session.clear()
- self.assertEquals(Session.query(Hoho).get(h1.id),
- Hoho(hoho=hohoval, secondaries=[Secondary(data='s1'), Secondary(data='s2')])
- )
-
-
-class OneToManyTest(ORMTest):
- metadata = tables.metadata
+ eq_(session.query(Hoho).get(h1.id),
+ Hoho(hoho=hohoval,
+ secondaries=[
+ Secondary(data='s1'),
+ Secondary(data='s2')]))
- def define_tables(self, metadata):
- pass
- def test_onetomany_1(self):
- """test basic save of one to many."""
- m = mapper(User, users, properties = dict(
- addresses = relation(mapper(Address, addresses), lazy = True)
+class OneToManyTest(_fixtures.FixtureTest):
+ run_inserts = None
+
+ @testing.resolve_artifact_names
+ def test_one_to_many_1(self):
+ """Basic save of one to many."""
+
+ m = mapper(User, users, properties=dict(
+ addresses = relation(mapper(Address, addresses), lazy=True)
))
- u = User()
- u.user_name = 'one2manytester'
- u.addresses = []
- a = Address()
- a.email_address = 'one2many@test.org'
+ u = User(name= 'one2manytester')
+ a = Address(email_address='one2many@test.org')
u.addresses.append(a)
- a2 = Address()
- a2.email_address = 'lala@test.org'
+
+ a2 = Address(email_address='lala@test.org')
u.addresses.append(a2)
- print repr(u.addresses)
- Session.commit()
- usertable = users.select(users.c.user_id.in_([u.user_id])).execute().fetchall()
- self.assertEqual(usertable[0].values(), [u.user_id, 'one2manytester'])
- addresstable = addresses.select(addresses.c.address_id.in_([a.address_id, a2.address_id]), order_by=[addresses.c.email_address]).execute().fetchall()
- self.assertEqual(addresstable[0].values(), [a2.address_id, u.user_id, 'lala@test.org'])
- self.assertEqual(addresstable[1].values(), [a.address_id, u.user_id, 'one2many@test.org'])
+ session = create_session()
+ session.add(u)
+ session.flush()
- userid = u.user_id
- addressid = a2.address_id
+ user_rows = users.select(users.c.id.in_([u.id])).execute().fetchall()
+ eq_(user_rows[0].values(), [u.id, 'one2manytester'])
+
+ address_rows = addresses.select(
+ addresses.c.id.in_([a.id, a2.id]),
+ order_by=[addresses.c.email_address]).execute().fetchall()
+ eq_(address_rows[0].values(), [a2.id, u.id, 'lala@test.org'])
+ eq_(address_rows[1].values(), [a.id, u.id, 'one2many@test.org'])
+
+ userid = u.id
+ addressid = a2.id
a2.email_address = 'somethingnew@foo.com'
- Session.commit()
+ session.flush()
- addresstable = addresses.select(addresses.c.address_id == addressid).execute().fetchall()
- self.assertEqual(addresstable[0].values(), [addressid, userid, 'somethingnew@foo.com'])
- self.assert_(u.user_id == userid and a2.address_id == addressid)
+ address_rows = addresses.select(
+ addresses.c.id == addressid).execute().fetchall()
+ eq_(address_rows[0].values(),
+ [addressid, userid, 'somethingnew@foo.com'])
+ self.assert_(u.id == userid and a2.id == addressid)
- def test_onetomany_2(self):
- """digs deeper into modifying the child items of an object to insure the correct
- updates take place"""
- m = mapper(User, users, properties = dict(
- addresses = relation(mapper(Address, addresses), lazy = True)
- ))
- u1 = User()
- u1.user_name = 'user1'
+ @testing.resolve_artifact_names
+ def test_one_to_many_2(self):
+ """Modifying the child items of an object."""
+
+ m = mapper(User, users, properties=dict(
+ addresses = relation(mapper(Address, addresses), lazy=True)))
+
+ u1 = User(name='user1')
u1.addresses = []
- a1 = Address()
- a1.email_address = 'emailaddress1'
+ a1 = Address(email_address='emailaddress1')
u1.addresses.append(a1)
- u2 = User()
- u2.user_name = 'user2'
+
+ u2 = User(name='user2')
u2.addresses = []
- a2 = Address()
- a2.email_address = 'emailaddress2'
+ a2 = Address(email_address='emailaddress2')
u2.addresses.append(a2)
- a3 = Address()
- a3.email_address = 'emailaddress3'
+ a3 = Address(email_address='emailaddress3')
- Session.commit()
+ session = create_session()
+ session.add_all((u1, u2, a3))
+ session.flush()
# modify user2 directly, append an address to user1.
# upon commit, user2 should be updated, user1 should not
# both address1 and address3 should be updated
- u2.user_name = 'user2modified'
+ u2.name = 'user2modified'
u1.addresses.append(a3)
del u1.addresses[0]
- self.assert_sql(testing.db, lambda: Session.commit(),
- [
- (
- "UPDATE users SET user_name=:user_name WHERE users.user_id = :users_user_id",
- {'users_user_id': u2.user_id, 'user_name': 'user2modified'}
- ),
- ("UPDATE email_addresses SET user_id=:user_id WHERE email_addresses.address_id = :email_addresses_address_id",
- {'user_id': None, 'email_addresses_address_id': a1.address_id}
- ),
- (
- "UPDATE email_addresses SET user_id=:user_id WHERE email_addresses.address_id = :email_addresses_address_id",
- {'user_id': u1.user_id, 'email_addresses_address_id': a3.address_id}
- ),
- ])
-
- def test_childmove(self):
- """tests moving a child from one parent to the other, then deleting the first parent, properly
- updates the child with the new parent. this tests the 'trackparent' option in the attributes module."""
- m = mapper(User, users, properties = dict(
- addresses = relation(mapper(Address, addresses), lazy = True)
- ))
- u1 = User()
- u1.user_name = 'user1'
- u2 = User()
- u2.user_name = 'user2'
- a = Address()
- a.email_address = 'address1'
+
+ self.assert_sql(testing.db, session.flush, [
+ ("UPDATE users SET name=:name "
+ "WHERE users.id = :users_id",
+ {'users_id': u2.id, 'name': 'user2modified'}),
+
+ ("UPDATE addresses SET user_id=:user_id "
+ "WHERE addresses.id = :email_addresses_id",
+ {'user_id': None, 'addresses_id': a1.id}),
+
+ ("UPDATE addresses SET user_id=:user_id "
+ "WHERE addresses.id = :addresses_id",
+ {'user_id': u1.id, 'addresses_id': a3.id})])
+
+ @testing.resolve_artifact_names
+ def test_child_move(self):
+ """Moving a child from one parent to another, with a delete.
+
+ Tests that deleting the first parent properly updates the child with
+ the new parent. This tests the 'trackparent' option in the attributes
+ module.
+
+ """
+ m = mapper(User, users, properties=dict(
+ addresses = relation(mapper(Address, addresses), lazy=True)))
+
+ u1 = User(name='user1')
+ u2 = User(name='user2')
+ a = Address(email_address='address1')
u1.addresses.append(a)
- Session.commit()
+
+ session = create_session()
+ session.add_all((u1, u2))
+ session.flush()
+
del u1.addresses[0]
u2.addresses.append(a)
- Session.delete(u1)
- Session.commit()
- Session.close()
- u2 = Session.get(User, u2.user_id)
- assert len(u2.addresses) == 1
-
- def test_childmove_2(self):
- m = mapper(User, users, properties = dict(
- addresses = relation(mapper(Address, addresses), lazy = True)
- ))
- u1 = User()
- u1.user_name = 'user1'
- u2 = User()
- u2.user_name = 'user2'
- a = Address()
- a.email_address = 'address1'
+ session.delete(u1)
+
+ session.flush()
+ session.clear()
+
+ u2 = session.get(User, u2.id)
+ eq_(len(u2.addresses), 1)
+
+ @testing.resolve_artifact_names
+ def test_child_move_2(self):
+ m = mapper(User, users, properties=dict(
+ addresses = relation(mapper(Address, addresses), lazy=True)))
+
+ u1 = User(name='user1')
+ u2 = User(name='user2')
+ a = Address(email_address='address1')
u1.addresses.append(a)
- Session.commit()
+
+ session = create_session()
+ session.add_all((u1, u2))
+ session.flush()
+
del u1.addresses[0]
u2.addresses.append(a)
- Session.commit()
- Session.close()
- u2 = Session.get(User, u2.user_id)
- assert len(u2.addresses) == 1
+ session.flush()
+ session.clear()
+
+ u2 = session.get(User, u2.id)
+ eq_(len(u2.addresses), 1)
+
+ @testing.resolve_artifact_names
def test_o2m_delete_parent(self):
- m = mapper(User, users, properties = dict(
- address = relation(mapper(Address, addresses), lazy=True, uselist=False)
- ))
- u = User()
- a = Address()
- u.user_name = 'one2onetester'
+ m = mapper(User, users, properties=dict(
+ address = relation(mapper(Address, addresses),
+ lazy=True,
+ uselist=False)))
+
+ u = User(name='one2onetester')
+ a = Address(email_address='myonlyaddress@foo.com')
u.address = a
- u.address.email_address = 'myonlyaddress@foo.com'
- Session.commit()
- Session.delete(u)
- Session.commit()
- self.assert_(a.address_id is not None)
- self.assert_(a.user_id is None)
- self.assert_(attributes.instance_state(a).key in Session.identity_map)
- self.assert_(attributes.instance_state(u).key not in Session.identity_map)
-
- def test_onetoone(self):
- m = mapper(User, users, properties = dict(
- address = relation(mapper(Address, addresses), lazy = True, uselist = False)
- ))
- u = User()
- u.user_name = 'one2onetester'
- u.address = Address()
- u.address.email_address = 'myonlyaddress@foo.com'
- Session.commit()
- u.user_name = 'imnew'
- Session.commit()
+
+ session = create_session()
+ session.add(u)
+ session.flush()
+
+ session.delete(u)
+ session.flush()
+
+ assert a.id is not None
+ assert a.user_id is None
+ assert sa.orm.attributes.instance_state(a).key in session.identity_map
+ assert sa.orm.attributes.instance_state(u).key not in session.identity_map
+
+ @testing.resolve_artifact_names
+ def test_one_to_one(self):
+ m = mapper(User, users, properties=dict(
+ address = relation(mapper(Address, addresses),
+ lazy=True,
+ uselist=False)))
+
+ u = User(name='one2onetester')
+ u.address = Address(email_address='myonlyaddress@foo.com')
+
+ session = create_session()
+ session.add(u)
+ session.flush()
+
+ u.name = 'imnew'
+ session.flush()
+
u.address.email_address = 'imnew@foo.com'
- Session.commit()
+ session.flush()
+ @testing.resolve_artifact_names
def test_bidirectional(self):
m1 = mapper(User, users)
-
- m2 = mapper(Address, addresses, properties = dict(
- user = relation(m1, lazy = False, backref='addresses')
- ))
+ m2 = mapper(Address, addresses, properties=dict(
+ user = relation(m1, lazy=False, backref='addresses')))
- u = User()
- print repr(u.addresses)
- u.user_name = 'test'
- a = Address()
- a.email_address = 'testaddress'
- a.user = u
- Session.commit()
- print repr(u.addresses)
- x = False
- try:
- u.addresses.append('hi')
- x = True
- except:
- pass
-
- if x:
- self.assert_(False, "User addresses element should be scalar based")
+ u = User(name='test')
+ a = Address(email_address='testaddress', user=u)
- Session.delete(u)
- Session.commit()
+ session = create_session()
+ session.add(u)
+ session.flush()
+ session.delete(u)
+ session.flush()
- def test_doublerelation(self):
+ @testing.resolve_artifact_names
+ def test_double_relation(self):
m2 = mapper(Address, addresses)
m = mapper(User, users, properties={
'boston_addresses' : relation(m2, primaryjoin=
- and_(users.c.user_id==addresses.c.user_id,
- addresses.c.email_address.like('%boston%'))),
+ sa.and_(users.c.id==addresses.c.user_id,
+ addresses.c.email_address.like('%boston%'))),
'newyork_addresses' : relation(m2, primaryjoin=
- and_(users.c.user_id==addresses.c.user_id,
- addresses.c.email_address.like('%newyork%'))),
- })
- u = User()
- a = Address()
- a.email_address = 'foo@boston.com'
- b = Address()
- b.email_address = 'bar@newyork.com'
+ sa.and_(users.c.id==addresses.c.user_id,
+ addresses.c.email_address.like('%newyork%')))})
+ u = User(name='u1')
+ a = Address(email_address='foo@boston.com')
+ b = Address(email_address='bar@newyork.com')
u.boston_addresses.append(a)
u.newyork_addresses.append(b)
- Session.commit()
-class SaveTest(ORMTest):
- metadata = tables.metadata
- def define_tables(self, metadata):
- pass
-
- def setUp(self):
- super(SaveTest, self).setUp()
- keywords.insert().execute(
- dict(name='blue'),
- dict(name='red'),
- dict(name='green'),
- dict(name='big'),
- dict(name='small'),
- dict(name='round'),
- dict(name='square')
- )
+ session = create_session()
+ session.add(u)
+ session.flush()
+
+class SaveTest(_fixtures.FixtureTest):
+ run_inserts = None
+ @testing.resolve_artifact_names
def test_basic(self):
- # save two users
- u = User()
- u.user_name = 'savetester'
m = mapper(User, users)
- u2 = User()
- u2.user_name = 'savetester2'
- Session.save(u)
+ # save two users
+ u = User(name='savetester')
+ u2 = User(name='savetester2')
- Session.flush([u])
- Session.commit()
+ session = create_session()
+ session.add_all((u, u2))
+ session.flush()
# assert the first one retreives the same from the identity map
- nu = Session.get(m, u.user_id)
- print "U: " + repr(u) + "NU: " + repr(nu)
- self.assert_(u is nu)
+ nu = session.get(m, u.id)
+ assert u is nu
# clear out the identity map, so next get forces a SELECT
- Session.close()
+ session.clear()
# check it again, identity should be different but ids the same
- nu = Session.get(m, u.user_id)
- self.assert_(u is not nu and u.user_id == nu.user_id and nu.user_name == 'savetester')
- Session.close()
+ nu = session.get(m, u.id)
+ assert u is not nu and u.id == nu.id and nu.name == 'savetester'
# change first users name and save
- Session.update(u)
- u.user_name = 'modifiedname'
- assert u in Session.dirty
- Session.commit()
+ session = create_session()
+ session.update(u)
+ u.name = 'modifiedname'
+ assert u in session.dirty
+ session.flush()
# select both
- #Session.close()
- userlist = User.query.filter(users.c.user_id.in_([u.user_id, u2.user_id])).order_by([users.c.user_name]).all()
- print repr(u.user_id), repr(userlist[0].user_id), repr(userlist[0].user_name)
- self.assert_(u.user_id == userlist[0].user_id and userlist[0].user_name == 'modifiedname')
- self.assert_(u2.user_id == userlist[1].user_id and userlist[1].user_name == 'savetester2')
+ userlist = session.query(User).filter(
+ users.c.id.in_([u.id, u2.id])).order_by([users.c.name]).all()
+
+ eq_(u.id, userlist[0].id)
+ eq_(userlist[0].name, 'modifiedname')
+ eq_(u2.id, userlist[1].id)
+ eq_(userlist[1].name, 'savetester2')
+ @testing.resolve_artifact_names
def test_synonym(self):
- class User(object):
+ class SUser(_base.BasicEntity):
def _get_name(self):
- return "User:" + self.user_name
+ return "User:" + self.name
def _set_name(self, name):
- self.user_name = name + ":User"
- name = property(_get_name, _set_name)
+ self.name = name + ":User"
+ syn_name = property(_get_name, _set_name)
- mapper(User, users, properties={
- 'name':synonym('user_name')
+ mapper(SUser, users, properties={
+ 'syn_name': sa.orm.synonym('name')
})
- u = User()
- u.name = "some name"
- assert u.name == 'User:some name:User'
- Session.save(u)
- Session.flush()
- Session.clear()
- u = Session.query(User).first()
- assert u.name == 'User:some name:User'
+ u = SUser(syn_name="some name")
+ eq_(u.syn_name, 'User:some name:User')
+
+ session = create_session()
+ session.add(u)
+ session.flush()
+ session.clear()
+
+ u = session.query(SUser).first()
+ eq_(u.syn_name, 'User:some name:User')
+ @testing.resolve_artifact_names
def test_lazyattr_commit(self):
- """tests that when a lazy-loaded list is unloaded, and a commit occurs, that the
- 'passive' call on that list does not blow away its value"""
+ """Lazily loaded relations.
- m1 = mapper(User, users, properties = {
- 'addresses': relation(mapper(Address, addresses))
- })
+ When a lazy-loaded list is unloaded, and a commit occurs, that the
+ 'passive' call on that list does not blow away its value
+
+ """
+ mapper(User, users, properties = {
+ 'addresses': relation(mapper(Address, addresses))})
- u = User()
- u.addresses.append(Address())
- u.addresses.append(Address())
- u.addresses.append(Address())
- u.addresses.append(Address())
- Session.commit()
- Session.close()
- ulist = Session.query(m1).all()
- u1 = ulist[0]
- u1.user_name = 'newname'
- Session.commit()
- self.assert_(len(u1.addresses) == 4)
+ u = User(name='u1')
+ u.addresses.append(Address(email_address='u1@e1'))
+ u.addresses.append(Address(email_address='u1@e2'))
+ u.addresses.append(Address(email_address='u1@e3'))
+ u.addresses.append(Address(email_address='u1@e4'))
+ session = create_session()
+ session.add(u)
+ session.flush()
+ session.clear()
+
+ u = session.query(User).one()
+ u.name = 'newname'
+ session.flush()
+ eq_(len(u.addresses), 4)
+
+ @testing.resolve_artifact_names
def test_inherits(self):
m1 = mapper(User, users)
@@ -1155,683 +1303,691 @@ class SaveTest(ORMTest):
"""a user object that also has the users mailing address."""
pass
- # define a mapper for AddressUser that inherits the User.mapper, and joins on the user_id column
- AddressUser.mapper = mapper(
- AddressUser,
- addresses, inherits=m1
- )
+ # define a mapper for AddressUser that inherits the User.mapper, and
+ # joins on the id column
+ mapper(AddressUser, addresses, inherits=m1)
+
+ au = AddressUser(name='u', email_address='u@e')
- au = AddressUser()
- Session.commit()
- Session.close()
- l = Session.query(AddressUser).one()
- self.assert_(l.user_id == au.user_id and l.address_id == au.address_id)
+ session = create_session()
+ session.add(au)
+ session.flush()
+ session.clear()
+ rt = session.query(AddressUser).one()
+ eq_(au.user_id, rt.user_id)
+ eq_(rt.id, rt.id)
+
+ @testing.resolve_artifact_names
def test_deferred(self):
- """test deferred column operations"""
+ """Deferred column operations"""
- mapper(User, users, properties={
- 'user_name':deferred(users.c.user_name)
- })
+ mapper(Order, orders, properties={
+ 'description': sa.orm.deferred(orders.c.description)})
# dont set deferred attribute, commit session
- u = User()
- u.user_id=42
- Session.commit()
+ o = Order(id=42)
+ session = create_session(autocommit=False)
+ session.add(o)
+ session.commit()
- # assert that changes get picked up
- u.user_name = 'some name'
- Session.commit()
- assert list(Session.execute(users.select(), mapper=User)) == [(42, 'some name')]
- Session.clear()
+ # assert that changes get picked up
+ o.description = 'foo'
+ session.commit()
+
+ eq_(list(session.execute(orders.select(), mapper=Order)),
+ [(42, None, None, 'foo', None)])
+ session.clear()
# assert that a set operation doesn't trigger a load operation
- u = Session.query(User).filter(User.user_name=='some name').one()
+ o = session.query(Order).filter(Order.description == 'foo').one()
def go():
- u.user_name = 'some other name'
- self.assert_sql_count(testing.db, go, 0)
- Session.flush()
- assert list(Session.execute(users.select(), mapper=User)) == [(42, 'some other name')]
+ o.description = 'hoho'
+ self.sql_count_(0, go)
+ session.flush()
- Session.clear()
+ eq_(list(session.execute(orders.select(), mapper=Order)),
+ [(42, None, None, 'hoho', None)])
- # test assigning None to an unloaded deferred also works
- u = Session.query(User).filter(User.user_name=='some other name').one()
- u.user_name = None
- Session.flush()
- assert list(Session.execute(users.select(), mapper=User)) == [(42, None)]
+ session.clear()
+ # test assigning None to an unloaded deferred also works
+ o = session.query(Order).filter(Order.description == 'hoho').one()
+ o.description = None
+ session.flush()
+ eq_(list(session.execute(orders.select(), mapper=Order)),
+ [(42, None, None, None, None)])
+ session.close()
# why no support on oracle ? because oracle doesn't save
# "blank" strings; it saves a single space character.
- @testing.unsupported('oracle')
+ @testing.fails_on('oracle')
+ @testing.resolve_artifact_names
def test_dont_update_blanks(self):
mapper(User, users)
- u = User()
- u.user_name = ""
- Session.commit()
- Session.close()
- u = Session.query(User).get(u.user_id)
- u.user_name = ""
- def go():
- Session.commit()
- self.assert_sql_count(testing.db, go, 0)
- def test_multitable(self):
- """tests a save of an object where each instance spans two tables. also tests
- redefinition of the keynames for the column properties."""
- usersaddresses = sql.join(users, addresses, users.c.user_id == addresses.c.user_id)
+ u = User(name='')
+ session = create_session()
+ session.add(u)
+ session.flush()
+ session.clear()
+
+ u = session.query(User).get(u.id)
+ u.name = ''
+ self.sql_count_(0, session.flush)
+
+ @testing.resolve_artifact_names
+ def test_multi_table_selectable(self):
+ """Mapped selectables that span tables.
+
+ Also tests redefinition of the keynames for the column properties.
+
+ """
+ usersaddresses = sa.join(users, addresses,
+ users.c.id == addresses.c.user_id)
+
m = mapper(User, usersaddresses,
- properties = dict(
+ properties=dict(
email = addresses.c.email_address,
- foo_id = [users.c.user_id, addresses.c.user_id],
- )
- )
+ foo_id = [users.c.id, addresses.c.user_id]))
- u = User()
- u.user_name = 'multitester'
- u.email = 'multi@test.org'
+ u = User(name='multitester', email='multi@test.org')
+ session = create_session()
+ session.add(u)
+ session.flush()
+ session.clear()
- Session.commit()
id = m.primary_key_from_instance(u)
- Session.close()
-
- u = Session.get(User, id)
- assert u.user_name == 'multitester'
+ u = session.get(User, id)
+ assert u.name == 'multitester'
- usertable = users.select(users.c.user_id.in_([u.foo_id])).execute().fetchall()
- self.assertEqual(usertable[0].values(), [u.foo_id, 'multitester'])
- addresstable = addresses.select(addresses.c.address_id.in_([u.address_id])).execute().fetchall()
- self.assertEqual(addresstable[0].values(), [u.address_id, u.foo_id, 'multi@test.org'])
+ user_rows = users.select(users.c.id.in_([u.foo_id])).execute().fetchall()
+ eq_(user_rows[0].values(), [u.foo_id, 'multitester'])
+ address_rows = addresses.select(addresses.c.id.in_([u.id])).execute().fetchall()
+ eq_(address_rows[0].values(), [u.id, u.foo_id, 'multi@test.org'])
u.email = 'lala@hey.com'
- u.user_name = 'imnew'
- Session.commit()
+ u.name = 'imnew'
+ session.flush()
- usertable = users.select(users.c.user_id.in_([u.foo_id])).execute().fetchall()
- self.assertEqual(usertable[0].values(), [u.foo_id, 'imnew'])
- addresstable = addresses.select(addresses.c.address_id.in_([u.address_id])).execute().fetchall()
- self.assertEqual(addresstable[0].values(), [u.address_id, u.foo_id, 'lala@hey.com'])
+ user_rows = users.select(users.c.id.in_([u.foo_id])).execute().fetchall()
+ eq_(user_rows[0].values(), [u.foo_id, 'imnew'])
+ address_rows = addresses.select(addresses.c.id.in_([u.id])).execute().fetchall()
+ eq_(address_rows[0].values(), [u.id, u.foo_id, 'lala@hey.com'])
- Session.close()
- u = Session.get(User, id)
- assert u.user_name == 'imnew'
+ session.clear()
+ u = session.get(User, id)
+ assert u.name == 'imnew'
+ @testing.resolve_artifact_names
def test_history_get(self):
- """tests that the history properly lazy-fetches data when it wasnt otherwise loaded"""
+ """The history lazy-fetches data when it wasn't otherwise loaded."""
mapper(User, users, properties={
- 'addresses':relation(Address, cascade="all, delete-orphan")
- })
+ 'addresses':relation(Address, cascade="all, delete-orphan")})
mapper(Address, addresses)
- u = User()
- u.addresses.append(Address())
- u.addresses.append(Address())
- Session.commit()
- Session.close()
- u = Session.query(User).get(u.user_id)
- Session.delete(u)
- Session.commit()
+ u = User(name='u1')
+ u.addresses.append(Address(email_address='u1@e1'))
+ u.addresses.append(Address(email_address='u1@e2'))
+ session = create_session()
+ session.add(u)
+ session.flush()
+ session.clear()
+
+ u = session.query(User).get(u.id)
+ session.delete(u)
+ session.flush()
assert users.count().scalar() == 0
assert addresses.count().scalar() == 0
+ @testing.resolve_artifact_names
+ def test_batch_mode(self):
+ """The 'batch=False' flag on mapper()"""
-
- def test_batchmode(self):
- """test the 'batch=False' flag on mapper()"""
-
- class TestExtension(MapperExtension):
+ class TestExtension(sa.orm.MapperExtension):
def before_insert(self, mapper, connection, instance):
self.current_instance = instance
def after_insert(self, mapper, connection, instance):
assert instance is self.current_instance
- m = mapper(User, users, extension=TestExtension(), batch=False)
- u1 = User()
- u1.username = 'user1'
- u2 = User()
- u2.username = 'user2'
- Session.commit()
- clear_mappers()
+ mapper(User, users, extension=TestExtension(), batch=False)
+ u1 = User(name='user1')
+ u2 = User(name='user2')
+
+ session = create_session()
+ session.add_all((u1, u2))
+ session.flush()
+ session.clear()
+
+ sa.orm.clear_mappers()
m = mapper(User, users, extension=TestExtension())
- u1 = User()
- u1.username = 'user1'
- u2 = User()
- u2.username = 'user2'
+ u1 = User(name='user1')
+ u2 = User(name='user2')
try:
- Session.commit()
+ session.flush()
assert False
except AssertionError:
assert True
-class ManyToOneTest(ORMTest):
- metadata = tables.metadata
-
- def define_tables(self, metadata):
- pass
+class ManyToOneTest(_fixtures.FixtureTest):
- def test_m2o_onetoone(self):
+ @testing.resolve_artifact_names
+ def test_m2o_one_to_one(self):
# TODO: put assertion in here !!!
- m = mapper(Address, addresses, properties = dict(
- user = relation(mapper(User, users), lazy = True, uselist = False)
- ))
+ m = mapper(Address, addresses, properties=dict(
+ user = relation(mapper(User, users), lazy=True, uselist=False)))
+
+ session = create_session()
+
data = [
- {'user_name' : 'thesub' , 'email_address' : 'bar@foo.com'},
- {'user_name' : 'assdkfj' , 'email_address' : 'thesdf@asdf.com'},
- {'user_name' : 'n4knd' , 'email_address' : 'asf3@bar.org'},
- {'user_name' : 'v88f4' , 'email_address' : 'adsd5@llala.net'},
- {'user_name' : 'asdf8d' , 'email_address' : 'theater@foo.com'}
+ {'name': 'thesub' , 'email_address': 'bar@foo.com'},
+ {'name': 'assdkfj' , 'email_address': 'thesdf@asdf.com'},
+ {'name': 'n4knd' , 'email_address': 'asf3@bar.org'},
+ {'name': 'v88f4' , 'email_address': 'adsd5@llala.net'},
+ {'name': 'asdf8d' , 'email_address': 'theater@foo.com'}
]
objects = []
for elem in data:
a = Address()
a.email_address = elem['email_address']
a.user = User()
- a.user.user_name = elem['user_name']
+ a.user.name = elem['name']
objects.append(a)
+ session.add(a)
+
+ session.flush()
- Session.commit()
objects[2].email_address = 'imnew@foo.bar'
objects[3].user = User()
- objects[3].user.user_name = 'imnewlyadded'
- self.assert_sql(testing.db, lambda: Session.commit(), [
- (
- "INSERT INTO users (user_name) VALUES (:user_name)",
- {'user_name': 'imnewlyadded'}
- ),
- {
- "UPDATE email_addresses SET email_address=:email_address WHERE email_addresses.address_id = :email_addresses_address_id":
- lambda ctx: {'email_address': 'imnew@foo.bar', 'email_addresses_address_id': objects[2].address_id}
- ,
-
- "UPDATE email_addresses SET user_id=:user_id WHERE email_addresses.address_id = :email_addresses_address_id":
- lambda ctx: {'user_id': objects[3].user.user_id, 'email_addresses_address_id': objects[3].address_id}
- },
-
- ],
- with_sequences=[
- (
- "INSERT INTO users (user_id, user_name) VALUES (:user_id, :user_name)",
- lambda ctx:{'user_name': 'imnewlyadded', 'user_id':ctx.last_inserted_ids()[0]}
- ),
- {
- "UPDATE email_addresses SET email_address=:email_address WHERE email_addresses.address_id = :email_addresses_address_id":
- lambda ctx: {'email_address': 'imnew@foo.bar', 'email_addresses_address_id': objects[2].address_id}
- ,
-
- "UPDATE email_addresses SET user_id=:user_id WHERE email_addresses.address_id = :email_addresses_address_id":
- lambda ctx: {'user_id': objects[3].user.user_id, 'email_addresses_address_id': objects[3].address_id}
- },
-
- ])
- l = sql.select([users, addresses], sql.and_(users.c.user_id==addresses.c.user_id, addresses.c.address_id==a.address_id)).execute()
- assert l.fetchone().values() == [a.user.user_id, 'asdf8d', a.address_id, a.user_id, 'theater@foo.com']
-
-
- def test_manytoone_1(self):
- m = mapper(Address, addresses, properties = dict(
- user = relation(mapper(User, users), lazy = True)
- ))
- a1 = Address()
- a1.email_address = 'emailaddress1'
- u1 = User()
- u1.user_name='user1'
-
+ objects[3].user.name = 'imnewlyadded'
+ self.assert_sql(testing.db,
+ session.flush,
+ [
+ ("INSERT INTO users (name) VALUES (:name)",
+ {'name': 'imnewlyadded'} ),
+
+ {"UPDATE addresses SET email_address=:email_address "
+ "WHERE addresses.id = :addresses_id":
+ lambda ctx: {'email_address': 'imnew@foo.bar',
+ 'addresses_id': objects[2].id},
+ "UPDATE addresses SET user_id=:user_id "
+ "WHERE addresses.id = :addresses_id":
+ lambda ctx: {'user_id': objects[3].user.id,
+ 'addresses_id': objects[3].id}},
+ ],
+ with_sequences=[
+ ("INSERT INTO users (id, name) VALUES (:id, :name)",
+ lambda ctx:{'name': 'imnewlyadded',
+ 'id':ctx.last_inserted_ids()[0]}),
+ {"UPDATE addresses SET email_address=:email_address "
+ "WHERE addresses.id = :addresses_id":
+ lambda ctx: {'email_address': 'imnew@foo.bar',
+ 'addresses_id': objects[2].id},
+ ("UPDATE addresses SET user_id=:user_id "
+ "WHERE addresses.id = :addresses_id"):
+ lambda ctx: {'user_id': objects[3].user.id,
+ 'addresses_id': objects[3].id}}])
+
+ l = sa.select([users, addresses],
+ sa.and_(users.c.id==addresses.c.user_id,
+ addresses.c.id==a.id)).execute()
+ eq_(l.fetchone().values(),
+ [a.user.id, 'asdf8d', a.id, a.user_id, 'theater@foo.com'])
+
+ @testing.resolve_artifact_names
+ def test_many_to_one_1(self):
+ m = mapper(Address, addresses, properties=dict(
+ user = relation(mapper(User, users), lazy=True)))
+
+ a1 = Address(email_address='emailaddress1')
+ u1 = User(name='user1')
a1.user = u1
- Session.commit()
- Session.close()
- a1 = Session.query(Address).get(a1.address_id)
- u1 = Session.query(User).get(u1.user_id)
+
+ session = create_session()
+ session.add(a1)
+ session.flush()
+ session.clear()
+
+ a1 = session.query(Address).get(a1.id)
+ u1 = session.query(User).get(u1.id)
assert a1.user is u1
a1.user = None
- Session.commit()
- Session.close()
- a1 = Session.query(Address).get(a1.address_id)
- u1 = Session.query(User).get(u1.user_id)
+ session.flush()
+ session.clear()
+ a1 = session.query(Address).get(a1.id)
+ u1 = session.query(User).get(u1.id)
assert a1.user is None
- def test_manytoone_2(self):
- m = mapper(Address, addresses, properties = dict(
- user = relation(mapper(User, users), lazy = True)
- ))
- a1 = Address()
- a1.email_address = 'emailaddress1'
- a2 = Address()
- a2.email_address = 'emailaddress2'
- u1 = User()
- u1.user_name='user1'
+ @testing.resolve_artifact_names
+ def test_many_to_one_2(self):
+ m = mapper(Address, addresses, properties=dict(
+ user = relation(mapper(User, users), lazy=True)))
+ a1 = Address(email_address='emailaddress1')
+ a2 = Address(email_address='emailaddress2')
+ u1 = User(name='user1')
a1.user = u1
- Session.commit()
- Session.close()
- a1 = Session.query(Address).get(a1.address_id)
- a2 = Session.query(Address).get(a2.address_id)
- u1 = Session.query(User).get(u1.user_id)
+
+ session = create_session()
+ session.add_all((a1, a2))
+ session.flush()
+ session.clear()
+
+ a1 = session.query(Address).get(a1.id)
+ a2 = session.query(Address).get(a2.id)
+ u1 = session.query(User).get(u1.id)
assert a1.user is u1
+
a1.user = None
a2.user = u1
- Session.commit()
- Session.close()
- a1 = Session.query(Address).get(a1.address_id)
- a2 = Session.query(Address).get(a2.address_id)
- u1 = Session.query(User).get(u1.user_id)
+ session.flush()
+ session.clear()
+
+ a1 = session.query(Address).get(a1.id)
+ a2 = session.query(Address).get(a2.id)
+ u1 = session.query(User).get(u1.id)
assert a1.user is None
assert a2.user is u1
- def test_manytoone_3(self):
- m = mapper(Address, addresses, properties = dict(
- user = relation(mapper(User, users), lazy = True)
- ))
- a1 = Address()
- a1.email_address = 'emailaddress1'
- u1 = User()
- u1.user_name='user1'
- u2 = User()
- u2.user_name='user2'
+ @testing.resolve_artifact_names
+ def test_many_to_one_3(self):
+ m = mapper(Address, addresses, properties=dict(
+ user = relation(mapper(User, users), lazy=True)))
+ a1 = Address(email_address='emailaddress1')
+ u1 = User(name='user1')
+ u2 = User(name='user2')
a1.user = u1
- Session.commit()
- Session.close()
- a1 = Session.query(Address).get(a1.address_id)
- u1 = Session.query(User).get(u1.user_id)
- u2 = Session.query(User).get(u2.user_id)
+
+ session = create_session()
+ session.add_all((a1, u1, u2))
+ session.flush()
+ session.clear()
+
+ a1 = session.query(Address).get(a1.id)
+ u1 = session.query(User).get(u1.id)
+ u2 = session.query(User).get(u2.id)
assert a1.user is u1
a1.user = u2
- Session.commit()
- Session.close()
- a1 = Session.query(Address).get(a1.address_id)
- u1 = Session.query(User).get(u1.user_id)
- u2 = Session.query(User).get(u2.user_id)
+ session.flush()
+ session.clear()
+ a1 = session.query(Address).get(a1.id)
+ u1 = session.query(User).get(u1.id)
+ u2 = session.query(User).get(u2.id)
assert a1.user is u2
- def test_bidirectional_noload(self):
+ @testing.resolve_artifact_names
+ def test_bidirectional_no_load(self):
mapper(User, users, properties={
- 'addresses':relation(Address, backref='user', lazy=None)
- })
+ 'addresses':relation(Address, backref='user', lazy=None)})
mapper(Address, addresses)
- sess = Session()
-
# try it on unsaved objects
- u1 = User()
- a1 = Address()
+ u1 = User(name='u1')
+ a1 = Address(email_address='e1')
a1.user = u1
- sess.save(u1)
- sess.flush()
- sess.clear()
- a1 = sess.query(Address).get(a1.address_id)
-
- a1.user = None
- sess.flush()
- sess.clear()
- assert sess.query(Address).get(a1.address_id).user is None
- assert sess.query(User).get(u1.user_id).addresses == []
+ session = create_session()
+ session.add(u1)
+ session.flush()
+ session.clear()
+ a1 = session.query(Address).get(a1.id)
-class ManyToManyTest(ORMTest):
- metadata = tables.metadata
+ a1.user = None
+ session.flush()
+ session.clear()
+ assert session.query(Address).get(a1.id).user is None
+ assert session.query(User).get(u1.id).addresses == []
- def define_tables(self, metadata):
- pass
- def test_manytomany(self):
- items = orderitems
+class ManyToManyTest(_fixtures.FixtureTest):
+ run_inserts = None
- keywordmapper = mapper(Keyword, keywords)
+ @testing.resolve_artifact_names
+ def test_many_to_many(self):
+ mapper(Keyword, keywords)
- m = mapper(Item, items, properties = dict(
- keywords = relation(keywordmapper, itemkeywords, lazy = False, order_by=keywords.c.name),
- ))
+ m = mapper(Item, items, properties=dict(
+ keywords=relation(Keyword,
+ item_keywords,
+ lazy=False,
+ order_by=keywords.c.name)))
data = [Item,
- {'item_name': 'mm_item1', 'keywords' : (Keyword,[{'name': 'big'},{'name': 'green'}, {'name': 'purple'},{'name': 'round'}])},
- {'item_name': 'mm_item2', 'keywords' : (Keyword,[{'name':'blue'}, {'name':'imnew'},{'name':'round'}, {'name':'small'}])},
- {'item_name': 'mm_item3', 'keywords' : (Keyword,[])},
- {'item_name': 'mm_item4', 'keywords' : (Keyword,[{'name':'big'}, {'name':'blue'},])},
- {'item_name': 'mm_item5', 'keywords' : (Keyword,[{'name':'big'},{'name':'exacting'},{'name':'green'}])},
- {'item_name': 'mm_item6', 'keywords' : (Keyword,[{'name':'red'},{'name':'round'},{'name':'small'}])},
- ]
+ {'description': 'mm_item1',
+ 'keywords' : (Keyword, [{'name': 'big'},
+ {'name': 'green'},
+ {'name': 'purple'},
+ {'name': 'round'}])},
+ {'description': 'mm_item2',
+ 'keywords' : (Keyword, [{'name':'blue'},
+ {'name':'imnew'},
+ {'name':'round'},
+ {'name':'small'}])},
+ {'description': 'mm_item3',
+ 'keywords' : (Keyword, [])},
+ {'description': 'mm_item4',
+ 'keywords' : (Keyword, [{'name':'big'},
+ {'name':'blue'},])},
+ {'description': 'mm_item5',
+ 'keywords' : (Keyword, [{'name':'big'},
+ {'name':'exacting'},
+ {'name':'green'}])},
+ {'description': 'mm_item6',
+ 'keywords' : (Keyword, [{'name':'red'},
+ {'name':'round'},
+ {'name':'small'}])}]
+
+ _fixtures.run_inserts_for(keywords)
+ session = create_session()
+
objects = []
+ _keywords = dict([(k.name, k) for k in session.query(Keyword)])
+
for elem in data[1:]:
- item = Item()
+ item = Item(description=elem['description'])
objects.append(item)
- item.item_name = elem['item_name']
- item.keywords = []
- if elem['keywords'][1]:
- klist = Session.query(keywordmapper).filter(keywords.c.name.in_([e['name'] for e in elem['keywords'][1]]))
- else:
- klist = []
- khash = {}
- for k in klist:
- khash[k.name] = k
- for kname in [e['name'] for e in elem['keywords'][1]]:
+
+ for spec in elem['keywords'][1]:
+ keyword_name = spec['name']
try:
- k = khash[kname]
+ kw = _keywords[keyword_name]
except KeyError:
- k = Keyword()
- k.name = kname
- item.keywords.append(k)
+ _keywords[keyword_name] = kw = Keyword(name=keyword_name)
+ item.keywords.append(kw)
- Session.commit()
+ session.add_all(objects)
+ session.flush()
- l = Session.query(m).filter(items.c.item_name.in_([e['item_name'] for e in data[1:]])).order_by(items.c.item_name).all()
+ l = (session.query(Item).
+ filter(Item.description.in_([e['description']
+ for e in data[1:]])).
+ order_by(Item.description).all())
self.assert_result(l, *data)
- objects[4].item_name = 'item4updated'
+ objects[4].description = 'item4updated'
k = Keyword()
k.name = 'yellow'
objects[5].keywords.append(k)
- self.assert_sql(testing.db, lambda:Session.commit(), [
- {
- "UPDATE items SET item_name=:item_name WHERE items.item_id = :items_item_id":
- {'item_name': 'item4updated', 'items_item_id': objects[4].item_id}
- ,
- "INSERT INTO keywords (name) VALUES (:name)":
- {'name': 'yellow'}
- },
- ("INSERT INTO itemkeywords (item_id, keyword_id) VALUES (:item_id, :keyword_id)",
- lambda ctx: [{'item_id': objects[5].item_id, 'keyword_id': k.keyword_id}]
- )
- ],
-
- with_sequences = [
- {
- "UPDATE items SET item_name=:item_name WHERE items.item_id = :items_item_id":
- {'item_name': 'item4updated', 'items_item_id': objects[4].item_id}
- ,
- "INSERT INTO keywords (keyword_id, name) VALUES (:keyword_id, :name)":
- lambda ctx: {'name': 'yellow', 'keyword_id':ctx.last_inserted_ids()[0]}
- },
- ("INSERT INTO itemkeywords (item_id, keyword_id) VALUES (:item_id, :keyword_id)",
- lambda ctx: [{'item_id': objects[5].item_id, 'keyword_id': k.keyword_id}]
- )
- ]
- )
+ self.assert_sql(testing.db, session.flush, [
+ {"UPDATE items SET description=:description "
+ "WHERE items.id = :items_id":
+ {'description': 'item4updated',
+ 'items_id': objects[4].id},
+ "INSERT INTO keywords (name) "
+ "VALUES (:name)":
+ {'name': 'yellow'}},
+ ("INSERT INTO item_keywords (item_id, keyword_id) "
+ "VALUES (:item_id, :keyword_id)",
+ lambda ctx: [{'item_id': objects[5].id,
+ 'keyword_id': k.id}])],
+ with_sequences = [
+ {"UPDATE items SET description=:description "
+ "WHERE items.id = :items_id":
+ {'description': 'item4updated',
+ 'items_id': objects[4].id},
+ "INSERT INTO keywords (id, name) "
+ "VALUES (:id, :name)":
+ lambda ctx: {'name': 'yellow',
+ 'id':ctx.last_inserted_ids()[0]}},
+ ("INSERT INTO item_keywords (item_id, keyword_id) "
+ "VALUES (:item_id, :keyword_id)",
+ lambda ctx: [{'item_id': objects[5].id,
+ 'keyword_id': k.id}])])
+
objects[2].keywords.append(k)
- dkid = objects[5].keywords[1].keyword_id
+ dkid = objects[5].keywords[1].id
del objects[5].keywords[1]
- self.assert_sql(testing.db, lambda:Session.commit(), [
- (
- "DELETE FROM itemkeywords WHERE itemkeywords.item_id = :item_id AND itemkeywords.keyword_id = :keyword_id",
- [{'item_id': objects[5].item_id, 'keyword_id': dkid}]
- ),
- (
- "INSERT INTO itemkeywords (item_id, keyword_id) VALUES (:item_id, :keyword_id)",
- lambda ctx: [{'item_id': objects[2].item_id, 'keyword_id': k.keyword_id}]
- )
- ])
-
- Session.delete(objects[3])
- Session.commit()
-
- def test_manytomany_remove(self):
- """tests that setting a list-based attribute to '[]' properly affects the history and allows
- the many-to-many rows to be deleted"""
- keywordmapper = mapper(Keyword, keywords)
-
- m = mapper(Item, orderitems, properties = dict(
- keywords = relation(keywordmapper, itemkeywords, lazy = False),
+ self.assert_sql(testing.db, session.flush, [
+ ("DELETE FROM item_keywords "
+ "WHERE item_keywords.item_id = :item_id AND "
+ "item_keywords.keyword_id = :keyword_id",
+ [{'item_id': objects[5].id, 'keyword_id': dkid}]),
+ ("INSERT INTO item_keywords (item_id, keyword_id) "
+ "VALUES (:item_id, :keyword_id)",
+ lambda ctx: [{'item_id': objects[2].id, 'keyword_id': k.id}]
+ )])
+
+ session.delete(objects[3])
+ session.flush()
+
+ @testing.resolve_artifact_names
+ def test_many_to_many_remove(self):
+ """Setting a collection to empty deletes many-to-many rows.
+
+ Tests that setting a list-based attribute to '[]' properly affects the
+ history and allows the many-to-many rows to be deleted
+
+ """
+ mapper(Keyword, keywords)
+ mapper(Item, items, properties=dict(
+ keywords = relation(Keyword, item_keywords, lazy=False),
))
- i = Item()
- k1 = Keyword()
- k2 = Keyword()
+ i = Item(description='i1')
+ k1 = Keyword(name='k1')
+ k2 = Keyword(name='k2')
i.keywords.append(k1)
i.keywords.append(k2)
- Session.commit()
- assert itemkeywords.count().scalar() == 2
+ session = create_session()
+ session.add(i)
+ session.flush()
+
+ assert item_keywords.count().scalar() == 2
i.keywords = []
- Session.commit()
- assert itemkeywords.count().scalar() == 0
+ session.flush()
+ assert item_keywords.count().scalar() == 0
+ @testing.resolve_artifact_names
def test_scalar(self):
- """test that dependency.py doesnt try to delete an m2m relation referencing None."""
+ """sa.dependency won't delete an m2m relation referencing None."""
mapper(Keyword, keywords)
- mapper(Item, orderitems, properties = dict(
- keyword = relation(Keyword, secondary=itemkeywords, uselist=False),
- ))
-
- i = Item()
- Session.commit()
- Session.delete(i)
- Session.commit()
+ mapper(Item, items, properties=dict(
+ keyword=relation(Keyword, secondary=item_keywords, uselist=False)))
+ i = Item(description='x')
+ session = create_session()
+ session.add(i)
+ session.flush()
+ session.delete(i)
+ session.flush()
+ @testing.resolve_artifact_names
+ def test_many_to_many_update(self):
+ """Assorted history operations on a many to many"""
+ mapper(Keyword, keywords)
+ mapper(Item, items, properties=dict(
+ keywords=relation(Keyword,
+ secondary=item_keywords,
+ lazy=False,
+ order_by=keywords.c.name)))
- def test_manytomany_update(self):
- """tests some history operations on a many to many"""
- class Keyword(object):
- def __init__(self, name):
- self.name = name
- def __eq__(self, other):
- return other.__class__ == Keyword and other.name == self.name
- def __repr__(self):
- return "Keyword(%s, %s)" % (getattr(self, 'keyword_id', 'None'), self.name)
+ k1 = Keyword(name='keyword 1')
+ k2 = Keyword(name='keyword 2')
+ k3 = Keyword(name='keyword 3')
- mapper(Keyword, keywords)
- mapper(Item, orderitems, properties = dict(
- keywords = relation(Keyword, secondary=itemkeywords, lazy=False, order_by=keywords.c.name),
- ))
+ item = Item(description='item 1')
+ item.keywords.extend([k1, k2, k3])
- (k1, k2, k3) = (Keyword('keyword 1'), Keyword('keyword 2'), Keyword('keyword 3'))
- item = Item()
- item.item_name = 'item 1'
- item.keywords.append(k1)
- item.keywords.append(k2)
- item.keywords.append(k3)
- Session.commit()
+ session = create_session()
+ session.add(item)
+ session.flush()
item.keywords = []
item.keywords.append(k1)
item.keywords.append(k2)
- Session.commit()
+ session.flush()
- Session.close()
- item = Session.query(Item).get(item.item_id)
- print [k1, k2]
- print item.keywords
+ session.clear()
+ item = session.query(Item).get(item.id)
assert item.keywords == [k1, k2]
+ @testing.resolve_artifact_names
def test_association(self):
- """basic test of an association object"""
- class IKAssociation(object):
- def __repr__(self):
- return "\nIKAssociation " + repr(self.item_id) + " " + repr(self.keyword)
-
- items = orderitems
-
- keywordmapper = mapper(Keyword, keywords)
-
- # note that we are breaking a rule here, making a second mapper(Keyword, keywords)
- # the reorganization of mapper construction affected this, but was fixed again
- m = mapper(Item, items, properties = dict(
- keywords = relation(mapper(IKAssociation, itemkeywords, properties = dict(
- keyword = relation(mapper(Keyword, keywords, non_primary=True), lazy = False, uselist = False, order_by=keywords.c.name)
- ), primary_key = [itemkeywords.c.item_id, itemkeywords.c.keyword_id]),
- lazy = False)
- ))
+ """Basic test of an association object"""
- data = [Item,
- {'item_name': 'a_item1', 'keywords' : (IKAssociation,
- [
- {'keyword' : (Keyword, {'name': 'big'})},
- {'keyword' : (Keyword, {'name': 'green'})},
- {'keyword' : (Keyword, {'name': 'purple'})},
- {'keyword' : (Keyword, {'name': 'round'})}
- ]
- )
- },
- {'item_name': 'a_item2', 'keywords' : (IKAssociation,
- [
- {'keyword' : (Keyword, {'name': 'huge'})},
- {'keyword' : (Keyword, {'name': 'violet'})},
- {'keyword' : (Keyword, {'name': 'yellow'})}
- ]
- )
- },
- {'item_name': 'a_item3', 'keywords' : (IKAssociation,
- [
- {'keyword' : (Keyword, {'name': 'big'})},
- {'keyword' : (Keyword, {'name': 'blue'})},
- ]
- )
- }
- ]
- for elem in data[1:]:
- item = Item()
- item.item_name = elem['item_name']
- item.keywords = []
- for kname in [e['keyword'][1]['name'] for e in elem['keywords'][1]]:
- try:
- k = Keyword.query.filter(keywords.c.name == kname)[0]
- except IndexError:
- k = Keyword()
- k.name= kname
- ik = IKAssociation()
- ik.keyword = k
- item.keywords.append(ik)
-
- Session.commit()
- Session.close()
- l = Item.query.filter(items.c.item_name.in_([e['item_name'] for e in data[1:]])).order_by(items.c.item_name).all()
- self.assert_result(l, *data)
+ class IKAssociation(_base.ComparableEntity):
+ pass
+
+ mapper(Keyword, keywords)
-class SaveTest2(ORMTest):
+ # note that we are breaking a rule here, making a second
+ # mapper(Keyword, keywords) the reorganization of mapper construction
+ # affected this, but was fixed again
- def define_tables(self, metadata):
- global users, addresses
- users = Table('users', metadata,
- Column('user_id', Integer, Sequence('user_id_seq', optional=True), primary_key = True),
- Column('user_name', String(20)),
- )
-
- addresses = Table('email_addresses', metadata,
- Column('address_id', Integer, Sequence('address_id_seq', optional=True), primary_key = True),
- Column('rel_user_id', Integer, ForeignKey(users.c.user_id)),
- Column('email_address', String(20)),
- )
+ mapper(IKAssociation, item_keywords,
+ primary_key=[item_keywords.c.item_id, item_keywords.c.keyword_id],
+ properties=dict(
+ keyword=relation(mapper(Keyword, keywords, non_primary=True),
+ lazy=False,
+ uselist=False,
+ order_by=keywords.c.name)))
- def test_m2o_nonmatch(self):
- m = mapper(Address, addresses, properties = dict(
- user = relation(mapper(User, users), lazy = True, uselist = False)
- ))
- data = [
- {'user_name' : 'thesub' , 'email_address' : 'bar@foo.com'},
- {'user_name' : 'assdkfj' , 'email_address' : 'thesdf@asdf.com'},
- ]
- objects = []
- for elem in data:
- a = Address()
- a.email_address = elem['email_address']
- a.user = User()
- a.user.user_name = elem['user_name']
- objects.append(a)
- self.assert_sql(testing.db, lambda: Session.commit(), [
- (
- "INSERT INTO users (user_name) VALUES (:user_name)",
- {'user_name': 'thesub'}
- ),
- (
- "INSERT INTO users (user_name) VALUES (:user_name)",
- {'user_name': 'assdkfj'}
- ),
- (
- "INSERT INTO email_addresses (rel_user_id, email_address) VALUES (:rel_user_id, :email_address)",
- {'rel_user_id': 1, 'email_address': 'bar@foo.com'}
- ),
- (
- "INSERT INTO email_addresses (rel_user_id, email_address) VALUES (:rel_user_id, :email_address)",
- {'rel_user_id': 2, 'email_address': 'thesdf@asdf.com'}
- )
- ],
-
- with_sequences = [
- (
- "INSERT INTO users (user_id, user_name) VALUES (:user_id, :user_name)",
- lambda ctx: {'user_name': 'thesub', 'user_id':ctx.last_inserted_ids()[0]}
- ),
- (
- "INSERT INTO users (user_id, user_name) VALUES (:user_id, :user_name)",
- lambda ctx: {'user_name': 'assdkfj', 'user_id':ctx.last_inserted_ids()[0]}
- ),
- (
- "INSERT INTO email_addresses (address_id, rel_user_id, email_address) VALUES (:address_id, :rel_user_id, :email_address)",
- lambda ctx:{'rel_user_id': 1, 'email_address': 'bar@foo.com', 'address_id':ctx.last_inserted_ids()[0]}
- ),
- (
- "INSERT INTO email_addresses (address_id, rel_user_id, email_address) VALUES (:address_id, :rel_user_id, :email_address)",
- lambda ctx:{'rel_user_id': 2, 'email_address': 'thesdf@asdf.com', 'address_id':ctx.last_inserted_ids()[0]}
- )
- ]
- )
-
-
-class SaveTest3(ORMTest):
- def define_tables(self, metadata):
- global t1, t2, t3
+ mapper(Item, items, properties=dict(
+ keywords=relation(IKAssociation, lazy=False)))
+
+ _fixtures.run_inserts_for(keywords)
+ session = create_session()
+
+ def fixture():
+ _kw = dict([(k.name, k) for k in session.query(Keyword)])
+ for n in ('big', 'green', 'purple', 'round', 'huge',
+ 'violet', 'yellow', 'blue'):
+ if n not in _kw:
+ _kw[n] = Keyword(name=n)
+
+ def assocs(*names):
+ return [IKAssociation(keyword=kw)
+ for kw in [_kw[n] for n in names]]
+
+ return [
+ Item(description='a_item1',
+ keywords=assocs('big', 'green', 'purple', 'round')),
+ Item(description='a_item2',
+ keywords=assocs('huge', 'violet', 'yellow')),
+ Item(description='a_item3',
+ keywords=assocs('big', 'blue'))]
- t1 = Table('items', metadata,
- Column('item_id', INT, Sequence('items_id_seq', optional=True), primary_key = True),
- Column('item_name', VARCHAR(50)),
- )
+ session.add_all(fixture())
+ session.flush()
+ eq_(fixture(), session.query(Item).order_by(Item.description).all())
- t3 = Table('keywords', metadata,
- Column('keyword_id', Integer, Sequence('keyword_id_seq', optional=True), primary_key = True),
- Column('name', VARCHAR(50)),
- )
- t2 = Table('assoc', metadata,
- Column('item_id', INT, ForeignKey("items")),
- Column('keyword_id', INT, ForeignKey("keywords")),
- Column('foo', Boolean, default=True)
- )
+class SaveTest2(_fixtures.FixtureTest):
+ run_inserts = None
+
+ @testing.resolve_artifact_names
+ def test_m2o_nonmatch(self):
+ mapper(User, users)
+ mapper(Address, addresses, properties=dict(
+ user = relation(User, lazy=True, uselist=False)))
+
+ session = create_session()
+
+ def fixture():
+ return [
+ Address(email_address='a1', user=User(name='u1')),
+ Address(email_address='a2', user=User(name='u2'))]
+
+ session.add_all(fixture())
+
+ self.assert_sql(testing.db, session.flush, [
+ ("INSERT INTO users (name) VALUES (:name)",
+ {'name': 'u1'}),
+ ("INSERT INTO users (name) VALUES (:name)",
+ {'name': 'u2'}),
+ ("INSERT INTO addresses (user_id, email_address) "
+ "VALUES (:user_id, :email_address)",
+ {'user_id': 1, 'email_address': 'a1'}),
+ ("INSERT INTO addresses (user_id, email_address) "
+ "VALUES (:user_id, :email_address)",
+ {'user_id': 2, 'email_address': 'a2'})],
+ with_sequences = [
+ ("INSERT INTO users (id, name) "
+ "VALUES (:id, :name)",
+ lambda ctx: {'name': 'u1', 'id':ctx.last_inserted_ids()[0]}),
+ ("INSERT INTO users (id, name) "
+ "VALUES (:id, :name)",
+ lambda ctx: {'name': 'u2', 'id':ctx.last_inserted_ids()[0]}),
+ ("INSERT INTO addresses (id, user_id, email_address) "
+ "VALUES (:id, :user_id, :email_address)",
+ lambda ctx:{'user_id': 1, 'email_address': 'a1',
+ 'id':ctx.last_inserted_ids()[0]}),
+ ("INSERT INTO addresses (id, user_id, email_address) "
+ "VALUES (:id, :user_id, :email_address)",
+ lambda ctx:{'user_id': 2, 'email_address': 'a2',
+ 'id':ctx.last_inserted_ids()[0]})])
+
+
+class SaveTest3(_base.MappedTest):
+ def define_tables(self, metadata):
+ Table('items', metadata,
+ Column('item_id', Integer, primary_key=True,
+ test_needs_autoincrement=True),
+ Column('item_name', String(50)))
+
+ Table('keywords', metadata,
+ Column('keyword_id', Integer, primary_key=True,
+ test_needs_autoincrement=True),
+ Column('name', String(50)))
+
+ Table('assoc', metadata,
+ Column('item_id', Integer, ForeignKey("items")),
+ Column('keyword_id', Integer, ForeignKey("keywords")),
+ Column('foo', sa.Boolean, default=True))
+
+ def setup_classes(self):
+ class Keyword(_base.BasicEntity):
+ pass
+ class Item(_base.BasicEntity):
+ pass
+ @testing.resolve_artifact_names
def test_manytomany_xtracol_delete(self):
- """test that a many-to-many on a table that has an extra column can properly delete rows from the table
- without referencing the extra column"""
- mapper(Keyword, t3)
+ """A many-to-many on a table that has an extra column can properly delete rows from the table without referencing the extra column"""
- mapper(Item, t1, properties = dict(
- keywords = relation(Keyword, secondary=t2, lazy = False),
- ))
+ mapper(Keyword, keywords)
+ mapper(Item, items, properties=dict(
+ keywords = relation(Keyword, secondary=assoc, lazy=False),))
i = Item()
k1 = Keyword()
k2 = Keyword()
i.keywords.append(k1)
i.keywords.append(k2)
- Session.commit()
- assert t2.count().scalar() == 2
+ session = create_session()
+ session.add(i)
+ session.flush()
+
+ assert assoc.count().scalar() == 2
i.keywords = []
print i.keywords
- Session.commit()
- assert t2.count().scalar() == 0
+ session.flush()
+ assert assoc.count().scalar() == 0
-class BooleanColTest(ORMTest):
+class BooleanColTest(_base.MappedTest):
def define_tables(self, metadata):
- global t
- t =Table('t1', metadata,
+ Table('t1_t', metadata,
Column('id', Integer, primary_key=True),
Column('name', String(30)),
- Column('value', Boolean))
+ Column('value', sa.Boolean))
+ @testing.resolve_artifact_names
def test_boolean(self):
# use the regular mapper
- from sqlalchemy.orm import mapper
-
- class T(fixtures.Base):
+ class T(_base.ComparableEntity):
pass
- mapper(T, t)
+ orm_mapper(T, t1_t)
sess = create_session()
t1 = T(value=True, name="t1")
@@ -1846,75 +2002,64 @@ class BooleanColTest(ORMTest):
for clear in (False, True):
if clear:
sess.clear()
- self.assertEquals(sess.query(T).all(), [T(value=True, name="t1"), T(value=False, name="t2"), T(value=True, name="t3")])
+ eq_(sess.query(T).all(), [T(value=True, name="t1"), T(value=False, name="t2"), T(value=True, name="t3")])
if clear:
sess.clear()
- self.assertEquals(sess.query(T).filter(T.value==True).all(), [T(value=True, name="t1"),T(value=True, name="t3")])
+ eq_(sess.query(T).filter(T.value==True).all(), [T(value=True, name="t1"),T(value=True, name="t3")])
if clear:
sess.clear()
- self.assertEquals(sess.query(T).filter(T.value==False).all(), [T(value=False, name="t2")])
+ eq_(sess.query(T).filter(T.value==False).all(), [T(value=False, name="t2")])
t2 = sess.query(T).get(t2.id)
t2.value = True
sess.flush()
- self.assertEquals(sess.query(T).filter(T.value==True).all(), [T(value=True, name="t1"), T(value=True, name="t2"), T(value=True, name="t3")])
+ eq_(sess.query(T).filter(T.value==True).all(), [T(value=True, name="t1"), T(value=True, name="t2"), T(value=True, name="t3")])
t2.value = False
sess.flush()
- self.assertEquals(sess.query(T).filter(T.value==True).all(), [T(value=True, name="t1"),T(value=True, name="t3")])
+ eq_(sess.query(T).filter(T.value==True).all(), [T(value=True, name="t1"),T(value=True, name="t3")])
-class RowSwitchTest(ORMTest):
+class RowSwitchTest(_base.MappedTest):
def define_tables(self, metadata):
- global t1, t2, t3, t1t3
-
- global T1, T2, T3
-
- Session.remove()
-
# parent
- t1 = Table('t1', metadata,
+ Table('t1', metadata,
Column('id', Integer, primary_key=True),
Column('data', String(30), nullable=False))
# onetomany
- t2 = Table('t2', metadata,
+ Table('t2', metadata,
Column('id', Integer, primary_key=True),
Column('data', String(30), nullable=False),
- Column('t1id', Integer, ForeignKey('t1.id'),nullable=False),
- )
+ Column('t1id', Integer, ForeignKey('t1.id'),nullable=False))
# associated
- t3 = Table('t3', metadata,
+ Table('t3', metadata,
Column('id', Integer, primary_key=True),
- Column('data', String(30), nullable=False),
- )
+ Column('data', String(30), nullable=False))
#manytomany
- t1t3 = Table('t1t3', metadata,
+ Table('t1t3', metadata,
Column('t1id', Integer, ForeignKey('t1.id'),nullable=False),
- Column('t3id', Integer, ForeignKey('t3.id'),nullable=False),
- )
+ Column('t3id', Integer, ForeignKey('t3.id'),nullable=False))
- class T1(fixtures.Base):
+ def setup_classes(self):
+ class T1(_base.ComparableEntity):
pass
- class T2(fixtures.Base):
+ class T2(_base.ComparableEntity):
pass
- class T3(fixtures.Base):
+ class T3(_base.ComparableEntity):
pass
- def tearDown(self):
- Session.remove()
- super(RowSwitchTest, self).tearDown()
-
+ @testing.resolve_artifact_names
def test_onetomany(self):
mapper(T1, t1, properties={
't2s':relation(T2, cascade="all, delete-orphan")
})
mapper(T2, t2)
- sess = Session(autoflush=False)
+ sess = create_session()
o1 = T1(data='some t1', id=1)
o1.t2s.append(T2(data='some t2', id=1))
@@ -1937,13 +2082,14 @@ class RowSwitchTest(ORMTest):
assert list(sess.execute(t1.select(), mapper=T1)) == [(1, 'some other t1')]
assert list(sess.execute(t2.select(), mapper=T1)) == [(3, 'third t2', 1), (4, 'fourth t2', 1)]
+ @testing.resolve_artifact_names
def test_manytomany(self):
mapper(T1, t1, properties={
't3s':relation(T3, secondary=t1t3, cascade="all, delete-orphan")
})
mapper(T3, t3)
- sess = Session(autoflush=False)
+ sess = create_session()
o1 = T1(data='some t1', id=1)
o1.t3s.append(T3(data='some t3', id=1))
@@ -1953,7 +2099,7 @@ class RowSwitchTest(ORMTest):
sess.flush()
assert list(sess.execute(t1.select(), mapper=T1)) == [(1, 'some t1')]
- assert rowset(sess.execute(t1t3.select(), mapper=T1)) == set([(1,1), (1, 2)])
+ assert testing.rowset(sess.execute(t1t3.select(), mapper=T1)) == set([(1,1), (1, 2)])
assert list(sess.execute(t3.select(), mapper=T1)) == [(1, 'some t3'), (2, 'some other t3')]
o2 = T1(data='some other t1', id=1, t3s=[
@@ -1967,6 +2113,7 @@ class RowSwitchTest(ORMTest):
assert list(sess.execute(t1.select(), mapper=T1)) == [(1, 'some other t1')]
assert list(sess.execute(t3.select(), mapper=T1)) == [(3, 'third t3'), (4, 'fourth t3')]
+ @testing.resolve_artifact_names
def test_manytoone(self):
mapper(T2, t2, properties={
@@ -1974,7 +2121,7 @@ class RowSwitchTest(ORMTest):
})
mapper(T1, t1)
- sess = Session(autoflush=False)
+ sess = create_session()
o1 = T2(data='some t2', id=1)
o1.t1 = T1(data='some t1', id=1)
@@ -1994,18 +2141,15 @@ class RowSwitchTest(ORMTest):
assert list(sess.execute(t1.select(), mapper=T1)) == [(2, 'some other t1')]
assert list(sess.execute(t2.select(), mapper=T1)) == [(1, 'some other t2', 2)]
-class TransactionTest(ORMTest):
- __unsupported_on__ = ('mysql', 'mssql')
+class TransactionTest(_base.MappedTest):
+ __requires__ = ('deferrable_constraints',)
+ __whitelist__ = ('sqlite',)
# sqlite doesn't have deferrable constraints, but it allows them to
# be specified. it'll raise immediately post-INSERT, instead of at
# COMMIT. either way, this test should pass.
def define_tables(self, metadata):
- global t1, T1, t2, T2
-
- Session.remove()
-
t1 = Table('t1', metadata,
Column('id', Integer, primary_key=True))
@@ -2014,43 +2158,38 @@ class TransactionTest(ORMTest):
Column('t1_id', Integer,
ForeignKey('t1.id', deferrable=True, initially='deferred')
))
-
- # deferred_constraint = \
- # DDL("ALTER TABLE t2 ADD CONSTRAINT t2_t1_id_fk FOREIGN KEY (t1_id) "
- # "REFERENCES t1 (id) DEFERRABLE INITIALLY DEFERRED")
- # deferred_constraint.execute_at('after-create', t2)
- # t1.create()
- # t2.create()
- # t2.append_constraint(ForeignKeyConstraint(['t1_id'], ['t1.id']))
-
- class T1(fixtures.Base):
+ def setup_classes(self):
+ class T1(_base.ComparableEntity):
pass
- class T2(fixtures.Base):
+ class T2(_base.ComparableEntity):
pass
+ @testing.resolve_artifact_names
+ def setup_mappers(self):
orm_mapper(T1, t1)
orm_mapper(T2, t2)
+ @testing.resolve_artifact_names
def test_close_transaction_on_commit_fail(self):
- Session = sessionmaker(autoflush=False, autocommit=True)
- sess = Session()
+ session = create_session(autocommit=True)
# with a deferred constraint, this fails at COMMIT time instead
# of at INSERT time.
- sess.save(T2(t1_id=123))
+ session.add(T2(t1_id=123))
try:
- sess.flush()
+ session.flush()
assert False
except:
# Flush needs to rollback also when commit fails
- assert sess.transaction is None
+ assert session.transaction is None
# todo: on 8.3 at least, the failed commit seems to close the cursor?
# needs investigation. leaving in the DDL above now to help verify
# that the new deferrable support on FK isn't involved in this issue.
if testing.against('postgres'):
t1.bind.engine.dispose()
+
if __name__ == "__main__":
testenv.main()