summaryrefslogtreecommitdiff
path: root/test
diff options
context:
space:
mode:
authorMike Bayer <mike_mp@zzzcomputing.com>2016-03-15 16:41:17 -0400
committerMike Bayer <mike_mp@zzzcomputing.com>2016-03-15 16:41:17 -0400
commit224b03f9c006b12e3bbae9190ca9d0132e843208 (patch)
tree4c54f3404af67962835c3a78849f8e89ebb98da0 /test
parenta87b3c2101114d82f999c23d113ad2018629ed48 (diff)
parent8bc370ed382a45654101fa34bac4a2886ce089c3 (diff)
downloadsqlalchemy-224b03f9c006b12e3bbae9190ca9d0132e843208.tar.gz
Merge branch 'master' into pr157
Diffstat (limited to 'test')
-rw-r--r--test/aaa_profiling/test_compiler.py4
-rw-r--r--test/aaa_profiling/test_orm.py53
-rw-r--r--test/aaa_profiling/test_resultset.py5
-rw-r--r--test/aaa_profiling/test_zoomark.py1
-rw-r--r--test/aaa_profiling/test_zoomark_orm.py13
-rw-r--r--test/base/test_dependency.py29
-rw-r--r--test/base/test_except.py57
-rw-r--r--test/base/test_tutorials.py145
-rw-r--r--test/base/test_utils.py212
-rwxr-xr-xtest/conftest.py13
-rw-r--r--test/dialect/mssql/test_compiler.py378
-rw-r--r--test/dialect/mssql/test_engine.py11
-rw-r--r--test/dialect/mssql/test_query.py360
-rw-r--r--test/dialect/mssql/test_reflection.py40
-rw-r--r--test/dialect/mssql/test_types.py324
-rw-r--r--test/dialect/mysql/test_compiler.py14
-rw-r--r--test/dialect/mysql/test_dialect.py14
-rw-r--r--test/dialect/mysql/test_query.py79
-rw-r--r--test/dialect/mysql/test_reflection.py360
-rw-r--r--test/dialect/mysql/test_types.py64
-rw-r--r--test/dialect/postgresql/test_compiler.py175
-rw-r--r--test/dialect/postgresql/test_dialect.py55
-rw-r--r--test/dialect/postgresql/test_query.py622
-rw-r--r--test/dialect/postgresql/test_reflection.py70
-rw-r--r--test/dialect/postgresql/test_types.py915
-rw-r--r--test/dialect/test_oracle.py94
-rw-r--r--test/dialect/test_sqlite.py108
-rw-r--r--test/dialect/test_sybase.py14
-rw-r--r--test/engine/test_bind.py7
-rw-r--r--test/engine/test_ddlevents.py301
-rw-r--r--test/engine/test_execute.py505
-rw-r--r--test/engine/test_logging.py130
-rw-r--r--test/engine/test_parseconnect.py102
-rw-r--r--test/engine/test_pool.py400
-rw-r--r--test/engine/test_processors.py15
-rw-r--r--test/engine/test_reconnect.py5
-rw-r--r--test/engine/test_reflection.py128
-rw-r--r--test/engine/test_transaction.py29
-rw-r--r--test/ext/declarative/test_basic.py57
-rw-r--r--test/ext/declarative/test_inheritance.py68
-rw-r--r--test/ext/declarative/test_mixin.py216
-rw-r--r--test/ext/test_associationproxy.py89
-rw-r--r--test/ext/test_baked.py920
-rw-r--r--test/ext/test_extendedattr.py465
-rw-r--r--test/ext/test_hybrid.py1
-rw-r--r--test/ext/test_mutable.py693
-rw-r--r--test/orm/_fixtures.py15
-rw-r--r--test/orm/inheritance/test_basic.py296
-rw-r--r--test/orm/inheritance/test_concrete.py39
-rw-r--r--test/orm/inheritance/test_poly_persistence.py38
-rw-r--r--test/orm/inheritance/test_polymorphic_rel.py75
-rw-r--r--test/orm/inheritance/test_relationship.py305
-rw-r--r--test/orm/inheritance/test_single.py40
-rw-r--r--test/orm/test_attributes.py9
-rw-r--r--test/orm/test_bulk.py114
-rw-r--r--test/orm/test_cascade.py10
-rw-r--r--test/orm/test_composites.py3
-rw-r--r--test/orm/test_cycles.py49
-rw-r--r--test/orm/test_deferred.py91
-rw-r--r--test/orm/test_descriptor.py1
-rw-r--r--test/orm/test_eager_relations.py730
-rw-r--r--test/orm/test_events.py1095
-rw-r--r--test/orm/test_hasparent.py4
-rw-r--r--test/orm/test_joins.py11
-rw-r--r--test/orm/test_lazy_relations.py184
-rw-r--r--test/orm/test_load_on_fks.py3
-rw-r--r--test/orm/test_loading.py18
-rw-r--r--test/orm/test_mapper.py1248
-rw-r--r--test/orm/test_merge.py146
-rw-r--r--test/orm/test_options.py140
-rw-r--r--test/orm/test_query.py1013
-rw-r--r--test/orm/test_rel_fn.py41
-rw-r--r--test/orm/test_relationships.py11
-rw-r--r--test/orm/test_session.py127
-rw-r--r--test/orm/test_transaction.py90
-rw-r--r--test/orm/test_unitofwork.py42
-rw-r--r--test/orm/test_unitofworkv2.py775
-rw-r--r--test/orm/test_update_delete.py97
-rw-r--r--test/orm/test_utils.py50
-rw-r--r--test/orm/test_versioning.py282
-rw-r--r--test/perf/orm2010.py4
-rw-r--r--test/profiles.txt804
-rw-r--r--test/requirements.py95
-rw-r--r--test/sql/test_compiler.py544
-rw-r--r--test/sql/test_constraints.py510
-rw-r--r--test/sql/test_cte.py154
-rw-r--r--test/sql/test_defaults.py93
-rw-r--r--test/sql/test_functions.py145
-rw-r--r--test/sql/test_generative.py23
-rw-r--r--test/sql/test_insert.py258
-rw-r--r--test/sql/test_insert_exec.py445
-rw-r--r--test/sql/test_join_rewriting.py7
-rw-r--r--test/sql/test_labels.py87
-rw-r--r--test/sql/test_metadata.py192
-rw-r--r--test/sql/test_operators.py590
-rw-r--r--test/sql/test_query.py1274
-rw-r--r--test/sql/test_resultset.py1319
-rw-r--r--test/sql/test_returning.py27
-rw-r--r--test/sql/test_rowcount.py17
-rw-r--r--test/sql/test_selectable.py73
-rw-r--r--test/sql/test_text.py43
-rw-r--r--test/sql/test_type_expressions.py11
-rw-r--r--test/sql/test_types.py538
-rw-r--r--test/sql/test_update.py160
104 files changed, 18566 insertions, 4399 deletions
diff --git a/test/aaa_profiling/test_compiler.py b/test/aaa_profiling/test_compiler.py
index 5eece4602..5095be103 100644
--- a/test/aaa_profiling/test_compiler.py
+++ b/test/aaa_profiling/test_compiler.py
@@ -32,8 +32,8 @@ class CompileTest(fixtures.TestBase, AssertsExecutionResults):
for t in (t1, t2):
for c in t.c:
c.type._type_affinity
- from sqlalchemy import types
- for t in list(types._type_map.values()):
+ from sqlalchemy.sql import sqltypes
+ for t in list(sqltypes._type_map.values()):
t._type_affinity
cls.dialect = default.DefaultDialect()
diff --git a/test/aaa_profiling/test_orm.py b/test/aaa_profiling/test_orm.py
index 9251e75e1..aeb069d90 100644
--- a/test/aaa_profiling/test_orm.py
+++ b/test/aaa_profiling/test_orm.py
@@ -461,3 +461,56 @@ class SessionTest(fixtures.MappedTest):
def go():
sess.expire_all()
go()
+
+
+class QueryTest(fixtures.MappedTest):
+ @classmethod
+ def define_tables(cls, metadata):
+ Table(
+ 'parent',
+ metadata,
+ Column('id', Integer,
+ primary_key=True, test_needs_autoincrement=True),
+ Column('data1', String(20)),
+ Column('data2', String(20)),
+ Column('data3', String(20)),
+ Column('data4', String(20)),
+ )
+
+ @classmethod
+ def setup_classes(cls):
+ class Parent(cls.Basic):
+ pass
+
+ @classmethod
+ def setup_mappers(cls):
+ Parent = cls.classes.Parent
+ parent = cls.tables.parent
+
+ mapper(Parent, parent)
+
+ def _fixture(self):
+ Parent = self.classes.Parent
+ sess = Session()
+ sess.add_all([
+ Parent(data1='d1', data2='d2', data3='d3', data4='d4')
+ for i in range(10)
+ ])
+ sess.commit()
+ sess.close()
+
+ def test_query_cols(self):
+ Parent = self.classes.Parent
+ self._fixture()
+ sess = Session()
+
+ @profiling.function_call_count()
+ def go():
+ for i in range(10):
+ q = sess.query(
+ Parent.data1, Parent.data2, Parent.data3, Parent.data4
+ )
+
+ q.all()
+
+ go()
diff --git a/test/aaa_profiling/test_resultset.py b/test/aaa_profiling/test_resultset.py
index a964adcae..9ffa21cb6 100644
--- a/test/aaa_profiling/test_resultset.py
+++ b/test/aaa_profiling/test_resultset.py
@@ -75,7 +75,10 @@ class ExecutionTest(fixtures.TestBase):
@profiling.function_call_count()
def go():
c.execute("select 1")
- go()
+ try:
+ go()
+ finally:
+ c.close()
def test_minimal_engine_execute(self, variance=0.10):
# create an engine without any instrumentation.
diff --git a/test/aaa_profiling/test_zoomark.py b/test/aaa_profiling/test_zoomark.py
index 5627ef8e0..3d97caa6a 100644
--- a/test/aaa_profiling/test_zoomark.py
+++ b/test/aaa_profiling/test_zoomark.py
@@ -408,4 +408,3 @@ class ZooMarkTest(replay_fixture.ReplayFixtureTest):
def _baseline_8_drop(self):
self.metadata.drop_all()
-
diff --git a/test/aaa_profiling/test_zoomark_orm.py b/test/aaa_profiling/test_zoomark_orm.py
index dece30d65..bfc7c114c 100644
--- a/test/aaa_profiling/test_zoomark_orm.py
+++ b/test/aaa_profiling/test_zoomark_orm.py
@@ -174,10 +174,14 @@ class ZooMarkTest(replay_fixture.ReplayFixtureTest):
# Animals
- list(self.session.query(Animal).filter(Animal.Species == 'Leopard'))
- list(self.session.query(Animal).filter(Animal.Species == 'Ostrich'))
- list(self.session.query(Animal).filter(Animal.Legs == 1000000))
- list(self.session.query(Animal).filter(Animal.Species == 'Tick'))
+ list(self.session.query(Animal).filter(
+ Animal.Species == 'Leopard'))
+ list(self.session.query(Animal).filter(
+ Animal.Species == 'Ostrich'))
+ list(self.session.query(Animal).filter(
+ Animal.Legs == 1000000))
+ list(self.session.query(Animal).filter(
+ Animal.Species == 'Tick'))
def _baseline_4_expressions(self):
for x in range(ITERATIONS):
@@ -342,4 +346,3 @@ class ZooMarkTest(replay_fixture.ReplayFixtureTest):
def _baseline_7_drop(self):
self.session.rollback()
self.metadata.drop_all()
-
diff --git a/test/base/test_dependency.py b/test/base/test_dependency.py
index b16516f15..a06bd5a51 100644
--- a/test/base/test_dependency.py
+++ b/test/base/test_dependency.py
@@ -14,6 +14,11 @@ class DependencySortTest(fixtures.TestBase):
result = list(topological.sort(tuples, allitems))
assert conforms_partial_ordering(tuples, result)
+ def assert_sort_deterministic(self, tuples, allitems, expected):
+ result = list(topological.sort(tuples, allitems, deterministic_order=True))
+ assert conforms_partial_ordering(tuples, result)
+ assert result == expected
+
def _nodes_from_tuples(self, tups):
s = set()
for tup in tups:
@@ -63,6 +68,30 @@ class DependencySortTest(fixtures.TestBase):
(node3, node2)]
self.assert_sort(tuples)
+ def test_sort_deterministic_one(self):
+ node1 = 'node1'
+ node2 = 'node2'
+ node3 = 'node3'
+ node4 = 'node4'
+ node5 = 'node5'
+ node6 = 'node6'
+ allitems = [node6, node5, node4, node3, node2, node1]
+ tuples = [(node6, node5), (node2, node1)]
+ expected = [node6, node4, node3, node2, node5, node1]
+ self.assert_sort_deterministic(tuples, allitems, expected)
+
+ def test_sort_deterministic_two(self):
+ node1 = 1
+ node2 = 2
+ node3 = 3
+ node4 = 4
+ node5 = 5
+ node6 = 6
+ allitems = [node6, node5, node4, node3, node2, node1]
+ tuples = [(node6, node5), (node4, node3), (node2, node1)]
+ expected = [node6, node4, node2, node5, node3, node1]
+ self.assert_sort_deterministic(tuples, allitems, expected)
+
def test_raise_on_cycle_one(self):
node1 = 'node1'
node2 = 'node2'
diff --git a/test/base/test_except.py b/test/base/test_except.py
index 918e7a042..9e8dd4760 100644
--- a/test/base/test_except.py
+++ b/test/base/test_except.py
@@ -4,6 +4,7 @@
from sqlalchemy import exc as sa_exceptions
from sqlalchemy.testing import fixtures
from sqlalchemy.testing import eq_
+from sqlalchemy.engine import default
class Error(Exception):
@@ -28,8 +29,28 @@ class OutOfSpec(DatabaseError):
pass
+# exception with a totally different name...
+class WrongNameError(DatabaseError):
+ pass
+
+# but they're going to call it their "IntegrityError"
+IntegrityError = WrongNameError
+
+
+# and they're going to subclass it!
+class SpecificIntegrityError(WrongNameError):
+ pass
+
+
class WrapTest(fixtures.TestBase):
+ def _translating_dialect_fixture(self):
+ d = default.DefaultDialect()
+ d.dbapi_exception_translation_map = {
+ "WrongNameError": "IntegrityError"
+ }
+ return d
+
def test_db_error_normal(self):
try:
raise sa_exceptions.DBAPIError.instance(
@@ -160,6 +181,42 @@ class WrapTest(fixtures.TestBase):
except sa_exceptions.ArgumentError:
self.assert_(False)
+ dialect = self._translating_dialect_fixture()
+ try:
+ raise sa_exceptions.DBAPIError.instance(
+ '', [],
+ sa_exceptions.ArgumentError(), DatabaseError,
+ dialect=dialect)
+ except sa_exceptions.DBAPIError as e:
+ self.assert_(e.__class__ is sa_exceptions.DBAPIError)
+ except sa_exceptions.ArgumentError:
+ self.assert_(False)
+
+ def test_db_error_dbapi_uses_wrong_names(self):
+ dialect = self._translating_dialect_fixture()
+
+ try:
+ raise sa_exceptions.DBAPIError.instance(
+ '', [], IntegrityError(),
+ DatabaseError, dialect=dialect)
+ except sa_exceptions.DBAPIError as e:
+ self.assert_(e.__class__ is sa_exceptions.IntegrityError)
+
+ try:
+ raise sa_exceptions.DBAPIError.instance(
+ '', [], SpecificIntegrityError(),
+ DatabaseError, dialect=dialect)
+ except sa_exceptions.DBAPIError as e:
+ self.assert_(e.__class__ is sa_exceptions.IntegrityError)
+
+ try:
+ raise sa_exceptions.DBAPIError.instance(
+ '', [], SpecificIntegrityError(),
+ DatabaseError)
+ except sa_exceptions.DBAPIError as e:
+ # doesn't work without a dialect
+ self.assert_(e.__class__ is not sa_exceptions.IntegrityError)
+
def test_db_error_keyboard_interrupt(self):
try:
raise sa_exceptions.DBAPIError.instance(
diff --git a/test/base/test_tutorials.py b/test/base/test_tutorials.py
new file mode 100644
index 000000000..55a0b92d6
--- /dev/null
+++ b/test/base/test_tutorials.py
@@ -0,0 +1,145 @@
+from __future__ import print_function
+from sqlalchemy.testing import fixtures
+from sqlalchemy.testing import config
+import doctest
+import logging
+import sys
+import re
+import os
+
+
+class DocTest(fixtures.TestBase):
+ def _setup_logger(self):
+ rootlogger = logging.getLogger('sqlalchemy.engine.base.Engine')
+
+ class MyStream(object):
+ def write(self, string):
+ sys.stdout.write(string)
+ sys.stdout.flush()
+
+ def flush(self):
+ pass
+
+ self._handler = handler = logging.StreamHandler(MyStream())
+ handler.setFormatter(logging.Formatter('%(message)s'))
+ rootlogger.addHandler(handler)
+
+ def _teardown_logger(self):
+ rootlogger = logging.getLogger('sqlalchemy.engine.base.Engine')
+ rootlogger.removeHandler(self._handler)
+
+ def _setup_create_table_patcher(self):
+ from sqlalchemy.sql import ddl
+ self.orig_sort = ddl.sort_tables_and_constraints
+
+ def our_sort(tables, **kw):
+ return self.orig_sort(
+ sorted(tables, key=lambda t: t.key), **kw
+ )
+ ddl.sort_tables_and_constraints = our_sort
+
+ def _teardown_create_table_patcher(self):
+ from sqlalchemy.sql import ddl
+ ddl.sort_tables_and_constraints = self.orig_sort
+
+ def setup(self):
+ self._setup_logger()
+ self._setup_create_table_patcher()
+
+ def teardown(self):
+ self._teardown_create_table_patcher()
+ self._teardown_logger()
+
+ def _run_doctest_for_content(self, name, content):
+ optionflags = (
+ doctest.ELLIPSIS |
+ doctest.NORMALIZE_WHITESPACE |
+ doctest.IGNORE_EXCEPTION_DETAIL |
+ _get_allow_unicode_flag()
+ )
+ runner = doctest.DocTestRunner(
+ verbose=None, optionflags=optionflags,
+ checker=_get_unicode_checker())
+ globs = {
+ 'print_function': print_function}
+ parser = doctest.DocTestParser()
+ test = parser.get_doctest(content, globs, name, name, 0)
+ runner.run(test)
+ runner.summarize()
+ assert not runner.failures
+
+ def _run_doctest(self, fname):
+ here = os.path.dirname(__file__)
+ sqla_base = os.path.normpath(os.path.join(here, "..", ".."))
+ path = os.path.join(sqla_base, "doc/build", fname)
+ if not os.path.exists(path):
+ config.skip_test("Can't find documentation file %r" % path)
+ with open(path) as file_:
+ content = file_.read()
+ content = re.sub(r'{(?:stop|sql|opensql)}', '', content)
+ self._run_doctest_for_content(fname, content)
+
+ def test_orm(self):
+ self._run_doctest("orm/tutorial.rst")
+
+ def test_core(self):
+ self._run_doctest("core/tutorial.rst")
+
+
+# unicode checker courtesy py.test
+
+
+def _get_unicode_checker():
+ """
+ Returns a doctest.OutputChecker subclass that takes in account the
+ ALLOW_UNICODE option to ignore u'' prefixes in strings. Useful
+ when the same doctest should run in Python 2 and Python 3.
+
+ An inner class is used to avoid importing "doctest" at the module
+ level.
+ """
+ if hasattr(_get_unicode_checker, 'UnicodeOutputChecker'):
+ return _get_unicode_checker.UnicodeOutputChecker()
+
+ import doctest
+ import re
+
+ class UnicodeOutputChecker(doctest.OutputChecker):
+ """
+ Copied from doctest_nose_plugin.py from the nltk project:
+ https://github.com/nltk/nltk
+ """
+
+ _literal_re = re.compile(r"(\W|^)[uU]([rR]?[\'\"])", re.UNICODE)
+
+ def check_output(self, want, got, optionflags):
+ res = doctest.OutputChecker.check_output(self, want, got,
+ optionflags)
+ if res:
+ return True
+
+ if not (optionflags & _get_allow_unicode_flag()):
+ return False
+
+ else: # pragma: no cover
+ # the code below will end up executed only in Python 2 in
+ # our tests, and our coverage check runs in Python 3 only
+ def remove_u_prefixes(txt):
+ return re.sub(self._literal_re, r'\1\2', txt)
+
+ want = remove_u_prefixes(want)
+ got = remove_u_prefixes(got)
+ res = doctest.OutputChecker.check_output(self, want, got,
+ optionflags)
+ return res
+
+ _get_unicode_checker.UnicodeOutputChecker = UnicodeOutputChecker
+ return _get_unicode_checker.UnicodeOutputChecker()
+
+
+def _get_allow_unicode_flag():
+ """
+ Registers and returns the ALLOW_UNICODE flag.
+ """
+ import doctest
+ return doctest.register_optionflag('ALLOW_UNICODE')
diff --git a/test/base/test_utils.py b/test/base/test_utils.py
index df61d7874..fcb9a59a3 100644
--- a/test/base/test_utils.py
+++ b/test/base/test_utils.py
@@ -1,12 +1,14 @@
import copy
+import sys
from sqlalchemy import util, sql, exc, testing
from sqlalchemy.testing import assert_raises, assert_raises_message, fixtures
-from sqlalchemy.testing import eq_, is_, ne_, fails_if
+from sqlalchemy.testing import eq_, is_, ne_, fails_if, mock, expect_warnings
from sqlalchemy.testing.util import picklers, gc_collect
from sqlalchemy.util import classproperty, WeakSequence, get_callable_argspec
from sqlalchemy.sql import column
-from sqlalchemy.util import langhelpers
+from sqlalchemy.util import langhelpers, compat
+import inspect
class _KeyedTupleTest(object):
@@ -276,6 +278,7 @@ class MemoizedAttrTest(fixtures.TestBase):
val[0] += 1
return v
+ assert inspect.ismethod(Foo().bar)
ne_(Foo.bar, None)
f1 = Foo()
assert 'bar' not in f1.__dict__
@@ -283,6 +286,102 @@ class MemoizedAttrTest(fixtures.TestBase):
eq_(f1.bar(), 20)
eq_(val[0], 21)
+ def test_memoized_slots(self):
+ canary = mock.Mock()
+
+ class Foob(util.MemoizedSlots):
+ __slots__ = ('foo_bar', 'gogo')
+
+ def _memoized_method_gogo(self):
+ canary.method()
+ return "gogo"
+
+ def _memoized_attr_foo_bar(self):
+ canary.attr()
+ return "foobar"
+
+ f1 = Foob()
+ assert_raises(AttributeError, setattr, f1, "bar", "bat")
+
+ eq_(f1.foo_bar, "foobar")
+
+ eq_(f1.foo_bar, "foobar")
+
+ eq_(f1.gogo(), "gogo")
+
+ eq_(f1.gogo(), "gogo")
+
+ eq_(canary.mock_calls, [mock.call.attr(), mock.call.method()])
+
+
+class WrapCallableTest(fixtures.TestBase):
+ def test_wrapping_update_wrapper_fn(self):
+ def my_fancy_default():
+ """run the fancy default"""
+ return 10
+
+ c = util.wrap_callable(lambda: my_fancy_default, my_fancy_default)
+
+ eq_(c.__name__, "my_fancy_default")
+ eq_(c.__doc__, "run the fancy default")
+
+ def test_wrapping_update_wrapper_fn_nodocstring(self):
+ def my_fancy_default():
+ return 10
+
+ c = util.wrap_callable(lambda: my_fancy_default, my_fancy_default)
+ eq_(c.__name__, "my_fancy_default")
+ eq_(c.__doc__, None)
+
+ def test_wrapping_update_wrapper_cls(self):
+ class MyFancyDefault(object):
+ """a fancy default"""
+
+ def __call__(self):
+ """run the fancy default"""
+ return 10
+
+ def_ = MyFancyDefault()
+ c = util.wrap_callable(lambda: def_(), def_)
+
+ eq_(c.__name__, "MyFancyDefault")
+ eq_(c.__doc__, "run the fancy default")
+
+ def test_wrapping_update_wrapper_cls_noclsdocstring(self):
+ class MyFancyDefault(object):
+
+ def __call__(self):
+ """run the fancy default"""
+ return 10
+
+ def_ = MyFancyDefault()
+ c = util.wrap_callable(lambda: def_(), def_)
+ eq_(c.__name__, "MyFancyDefault")
+ eq_(c.__doc__, "run the fancy default")
+
+ def test_wrapping_update_wrapper_cls_nomethdocstring(self):
+ class MyFancyDefault(object):
+ """a fancy default"""
+
+ def __call__(self):
+ return 10
+
+ def_ = MyFancyDefault()
+ c = util.wrap_callable(lambda: def_(), def_)
+ eq_(c.__name__, "MyFancyDefault")
+ eq_(c.__doc__, "a fancy default")
+
+ def test_wrapping_update_wrapper_cls_noclsdocstring_nomethdocstring(self):
+ class MyFancyDefault(object):
+
+ def __call__(self):
+ return 10
+
+ def_ = MyFancyDefault()
+ c = util.wrap_callable(lambda: def_(), def_)
+ eq_(c.__name__, "MyFancyDefault")
+ eq_(c.__doc__, None)
+
class ToListTest(fixtures.TestBase):
def test_from_string(self):
@@ -313,6 +412,20 @@ class ToListTest(fixtures.TestBase):
[1, 2, 3]
)
+ def test_from_bytes(self):
+
+ eq_(
+ util.to_list(compat.b('abc')),
+ [compat.b('abc')]
+ )
+
+ eq_(
+ util.to_list([
+ compat.b('abc'), compat.b('def')]),
+ [compat.b('abc'), compat.b('def')]
+ )
+
+
class ColumnCollectionTest(fixtures.TestBase):
def test_in(self):
@@ -1102,7 +1215,10 @@ class IdentitySetTest(fixtures.TestBase):
return super_, sub_, twin1, twin2, unique1, unique2
def _assert_unorderable_types(self, callable_):
- if util.py3k:
+ if util.py36:
+ assert_raises_message(
+ TypeError, 'not supported between instances of', callable_)
+ elif util.py3k:
assert_raises_message(
TypeError, 'unorderable types', callable_)
else:
@@ -2033,6 +2149,96 @@ class TestClassHierarchy(fixtures.TestBase):
eq_(set(util.class_hierarchy(A)), set((A, B, object)))
+class ReraiseTest(fixtures.TestBase):
+ @testing.requires.python3
+ def test_raise_from_cause_same_cause(self):
+ class MyException(Exception):
+ pass
+
+ def go():
+ try:
+ raise MyException("exc one")
+ except Exception as err:
+ util.raise_from_cause(err)
+
+ try:
+ go()
+ assert False
+ except MyException as err:
+ is_(err.__cause__, None)
+
+ def test_reraise_disallow_same_cause(self):
+ class MyException(Exception):
+ pass
+
+ def go():
+ try:
+ raise MyException("exc one")
+ except Exception as err:
+ type_, value, tb = sys.exc_info()
+ util.reraise(type_, err, tb, value)
+
+ assert_raises_message(
+ AssertionError,
+ "Same cause emitted",
+ go
+ )
+
+ def test_raise_from_cause(self):
+ class MyException(Exception):
+ pass
+
+ class MyOtherException(Exception):
+ pass
+
+ me = MyException("exc on")
+
+ def go():
+ try:
+ raise me
+ except Exception:
+ util.raise_from_cause(MyOtherException("exc two"))
+
+ try:
+ go()
+ assert False
+ except MyOtherException as moe:
+ if testing.requires.python3.enabled:
+ is_(moe.__cause__, me)
+
+ @testing.requires.python2
+ def test_safe_reraise_py2k_warning(self):
+ class MyException(Exception):
+ pass
+
+ class MyOtherException(Exception):
+ pass
+
+ m1 = MyException("exc one")
+ m2 = MyOtherException("exc two")
+
+ def go2():
+ raise m2
+
+ def go():
+ try:
+ raise m1
+ except:
+ with util.safe_reraise():
+ go2()
+
+ with expect_warnings(
+ "An exception has occurred during handling of a previous "
+ "exception. The previous exception "
+ "is:.*MyException.*exc one"
+ ):
+ try:
+ go()
+ assert False
+ except MyOtherException:
+ pass
+
+
class TestClassProperty(fixtures.TestBase):
def test_simple(self):
diff --git a/test/conftest.py b/test/conftest.py
index c697085ee..9488a7159 100755
--- a/test/conftest.py
+++ b/test/conftest.py
@@ -9,11 +9,17 @@ installs SQLAlchemy's testing plugin into the local environment.
import sys
import os
-for pth in ['../lib']:
+if not sys.flags.no_user_site:
+ # this is needed so that test scenarios like "python setup.py test"
+ # work correctly, as well as plain "py.test". These commands assume
+ # that the package in question is locally present, but since we have
+ # ./lib/, we need to punch that in.
+ # We check no_user_site to honor the use of this flag.
sys.path.insert(
0,
- os.path.join(os.path.dirname(os.path.abspath(__file__)), pth))
-
+ os.path.join(
+ os.path.dirname(os.path.abspath(__file__)), '..', 'lib')
+ )
# use bootstrapping so that test plugins are loaded
# without touching the main library before coverage starts
@@ -27,3 +33,4 @@ with open(bootstrap_file) as f:
to_bootstrap = "pytest"
exec(code, globals(), locals())
from pytestplugin import * # noqa
+
diff --git a/test/dialect/mssql/test_compiler.py b/test/dialect/mssql/test_compiler.py
index 3de8ea5c9..b59ca4fd1 100644
--- a/test/dialect/mssql/test_compiler.py
+++ b/test/dialect/mssql/test_compiler.py
@@ -1,12 +1,14 @@
# -*- encoding: utf-8
-from sqlalchemy.testing import eq_
-from sqlalchemy import *
+from sqlalchemy.testing import eq_, is_
from sqlalchemy import schema
from sqlalchemy.sql import table, column
from sqlalchemy.databases import mssql
from sqlalchemy.dialects.mssql import mxodbc
from sqlalchemy.testing import fixtures, AssertsCompiledSQL
from sqlalchemy import sql
+from sqlalchemy import Integer, String, Table, Column, select, MetaData,\
+ update, delete, insert, extract, union, func, PrimaryKeyConstraint, \
+ UniqueConstraint, Index, Sequence, literal
class CompileTest(fixtures.TestBase, AssertsCompiledSQL):
@@ -28,22 +30,32 @@ class CompileTest(fixtures.TestBase, AssertsCompiledSQL):
def test_select_with_nolock(self):
t = table('sometable', column('somecolumn'))
- self.assert_compile(t.select().with_hint(t, 'WITH (NOLOCK)'),
- 'SELECT sometable.somecolumn FROM sometable WITH (NOLOCK)')
+ self.assert_compile(
+ t.select().with_hint(t, 'WITH (NOLOCK)'),
+ 'SELECT sometable.somecolumn FROM sometable WITH (NOLOCK)')
+
+ def test_select_with_nolock_schema(self):
+ m = MetaData()
+ t = Table('sometable', m, Column('somecolumn', Integer),
+ schema='test_schema')
+ self.assert_compile(
+ t.select().with_hint(t, 'WITH (NOLOCK)'),
+ 'SELECT test_schema.sometable.somecolumn '
+ 'FROM test_schema.sometable WITH (NOLOCK)')
def test_join_with_hint(self):
t1 = table('t1',
- column('a', Integer),
- column('b', String),
- column('c', String),
- )
+ column('a', Integer),
+ column('b', String),
+ column('c', String),
+ )
t2 = table('t2',
- column("a", Integer),
- column("b", Integer),
- column("c", Integer),
- )
- join = t1.join(t2, t1.c.a==t2.c.a).\
- select().with_hint(t1, 'WITH (NOLOCK)')
+ column("a", Integer),
+ column("b", Integer),
+ column("c", Integer),
+ )
+ join = t1.join(t2, t1.c.a == t2.c.a).\
+ select().with_hint(t1, 'WITH (NOLOCK)')
self.assert_compile(
join,
'SELECT t1.a, t1.b, t1.c, t2.a, t2.b, t2.c '
@@ -69,10 +81,10 @@ class CompileTest(fixtures.TestBase, AssertsCompiledSQL):
for darg in ("*", "mssql"):
self.assert_compile(
t.insert().
- values(somecolumn="x").
- with_hint("WITH (PAGLOCK)",
- selectable=targ,
- dialect_name=darg),
+ values(somecolumn="x").
+ with_hint("WITH (PAGLOCK)",
+ selectable=targ,
+ dialect_name=darg),
"INSERT INTO sometable WITH (PAGLOCK) "
"(somecolumn) VALUES (:somecolumn)"
)
@@ -82,11 +94,11 @@ class CompileTest(fixtures.TestBase, AssertsCompiledSQL):
for targ in (None, t):
for darg in ("*", "mssql"):
self.assert_compile(
- t.update().where(t.c.somecolumn=="q").
- values(somecolumn="x").
- with_hint("WITH (PAGLOCK)",
- selectable=targ,
- dialect_name=darg),
+ t.update().where(t.c.somecolumn == "q").
+ values(somecolumn="x").
+ with_hint("WITH (PAGLOCK)",
+ selectable=targ,
+ dialect_name=darg),
"UPDATE sometable WITH (PAGLOCK) "
"SET somecolumn=:somecolumn "
"WHERE sometable.somecolumn = :somecolumn_1"
@@ -95,9 +107,9 @@ class CompileTest(fixtures.TestBase, AssertsCompiledSQL):
def test_update_exclude_hint(self):
t = table('sometable', column('somecolumn'))
self.assert_compile(
- t.update().where(t.c.somecolumn=="q").
- values(somecolumn="x").
- with_hint("XYZ", "mysql"),
+ t.update().where(t.c.somecolumn == "q").
+ values(somecolumn="x").
+ with_hint("XYZ", "mysql"),
"UPDATE sometable SET somecolumn=:somecolumn "
"WHERE sometable.somecolumn = :somecolumn_1"
)
@@ -107,10 +119,10 @@ class CompileTest(fixtures.TestBase, AssertsCompiledSQL):
for targ in (None, t):
for darg in ("*", "mssql"):
self.assert_compile(
- t.delete().where(t.c.somecolumn=="q").
- with_hint("WITH (PAGLOCK)",
- selectable=targ,
- dialect_name=darg),
+ t.delete().where(t.c.somecolumn == "q").
+ with_hint("WITH (PAGLOCK)",
+ selectable=targ,
+ dialect_name=darg),
"DELETE FROM sometable WITH (PAGLOCK) "
"WHERE sometable.somecolumn = :somecolumn_1"
)
@@ -118,9 +130,9 @@ class CompileTest(fixtures.TestBase, AssertsCompiledSQL):
def test_delete_exclude_hint(self):
t = table('sometable', column('somecolumn'))
self.assert_compile(
- t.delete().\
- where(t.c.somecolumn=="q").\
- with_hint("XYZ", dialect_name="mysql"),
+ t.delete().
+ where(t.c.somecolumn == "q").
+ with_hint("XYZ", dialect_name="mysql"),
"DELETE FROM sometable WHERE "
"sometable.somecolumn = :somecolumn_1"
)
@@ -130,18 +142,51 @@ class CompileTest(fixtures.TestBase, AssertsCompiledSQL):
t2 = table('othertable', column('somecolumn'))
for darg in ("*", "mssql"):
self.assert_compile(
- t.update().where(t.c.somecolumn==t2.c.somecolumn).
- values(somecolumn="x").
- with_hint("WITH (PAGLOCK)",
- selectable=t2,
- dialect_name=darg),
+ t.update().where(t.c.somecolumn == t2.c.somecolumn).
+ values(somecolumn="x").
+ with_hint("WITH (PAGLOCK)",
+ selectable=t2,
+ dialect_name=darg),
"UPDATE sometable SET somecolumn=:somecolumn "
"FROM sometable, othertable WITH (PAGLOCK) "
"WHERE sometable.somecolumn = othertable.somecolumn"
)
+ def test_update_to_select_schema(self):
+ meta = MetaData()
+ table = Table(
+ "sometable", meta,
+ Column("sym", String),
+ Column("val", Integer),
+ schema="schema"
+ )
+ other = Table(
+ "#other", meta,
+ Column("sym", String),
+ Column("newval", Integer)
+ )
+ stmt = table.update().values(
+ val=select([other.c.newval]).
+ where(table.c.sym == other.c.sym).as_scalar())
+
+ self.assert_compile(
+ stmt,
+ "UPDATE [schema].sometable SET val="
+ "(SELECT [#other].newval FROM [#other] "
+ "WHERE [schema].sometable.sym = [#other].sym)",
+ )
+
+ stmt = table.update().values(val=other.c.newval).\
+ where(table.c.sym == other.c.sym)
+ self.assert_compile(
+ stmt,
+ "UPDATE [schema].sometable SET val="
+ "[#other].newval FROM [schema].sometable, "
+ "[#other] WHERE [schema].sometable.sym = [#other].sym",
+ )
+
# TODO: not supported yet.
- #def test_delete_from_hint(self):
+ # def test_delete_from_hint(self):
# t = table('sometable', column('somecolumn'))
# t2 = table('othertable', column('somecolumn'))
# for darg in ("*", "mssql"):
@@ -173,8 +218,8 @@ class CompileTest(fixtures.TestBase, AssertsCompiledSQL):
"IN ('x', 'y', 'z')",
),
(
- t.c.foo.in_([None]),
- "sometable.foo IN (NULL)"
+ t.c.foo.in_([None]),
+ "sometable.foo IN (NULL)"
)
]:
self.assert_compile(expr, compile, dialect=mxodbc_dialect)
@@ -187,13 +232,13 @@ class CompileTest(fixtures.TestBase, AssertsCompiledSQL):
t = table('sometable', column('somecolumn'))
self.assert_compile(t.select().where(t.c.somecolumn
- == t.select()),
+ == t.select()),
'SELECT sometable.somecolumn FROM '
'sometable WHERE sometable.somecolumn = '
'(SELECT sometable.somecolumn FROM '
'sometable)')
self.assert_compile(t.select().where(t.c.somecolumn
- != t.select()),
+ != t.select()),
'SELECT sometable.somecolumn FROM '
'sometable WHERE sometable.somecolumn != '
'(SELECT sometable.somecolumn FROM '
@@ -210,10 +255,10 @@ class CompileTest(fixtures.TestBase, AssertsCompiledSQL):
subqueries"""
table1 = table('mytable',
- column('myid', Integer),
- column('name', String),
- column('description', String),
- )
+ column('myid', Integer),
+ column('name', String),
+ column('description', String),
+ )
q = select([table1.c.myid],
order_by=[table1.c.myid]).alias('foo')
@@ -223,74 +268,83 @@ class CompileTest(fixtures.TestBase, AssertsCompiledSQL):
"myid FROM mytable) AS foo, mytable WHERE "
"foo.myid = mytable.myid")
-
-
def test_delete_schema(self):
metadata = MetaData()
tbl = Table('test', metadata, Column('id', Integer,
- primary_key=True), schema='paj')
+ primary_key=True), schema='paj')
self.assert_compile(tbl.delete(tbl.c.id == 1),
'DELETE FROM paj.test WHERE paj.test.id = '
':id_1')
s = select([tbl.c.id]).where(tbl.c.id == 1)
self.assert_compile(tbl.delete().where(tbl.c.id.in_(s)),
'DELETE FROM paj.test WHERE paj.test.id IN '
- '(SELECT test_1.id FROM paj.test AS test_1 '
- 'WHERE test_1.id = :id_1)')
+ '(SELECT paj.test.id FROM paj.test '
+ 'WHERE paj.test.id = :id_1)')
def test_delete_schema_multipart(self):
metadata = MetaData()
- tbl = Table('test', metadata, Column('id', Integer,
- primary_key=True), schema='banana.paj')
+ tbl = Table(
+ 'test', metadata,
+ Column('id', Integer,
+ primary_key=True),
+ schema='banana.paj')
self.assert_compile(tbl.delete(tbl.c.id == 1),
'DELETE FROM banana.paj.test WHERE '
'banana.paj.test.id = :id_1')
s = select([tbl.c.id]).where(tbl.c.id == 1)
self.assert_compile(tbl.delete().where(tbl.c.id.in_(s)),
'DELETE FROM banana.paj.test WHERE '
- 'banana.paj.test.id IN (SELECT test_1.id '
- 'FROM banana.paj.test AS test_1 WHERE '
- 'test_1.id = :id_1)')
+ 'banana.paj.test.id IN (SELECT banana.paj.test.id '
+ 'FROM banana.paj.test WHERE '
+ 'banana.paj.test.id = :id_1)')
def test_delete_schema_multipart_needs_quoting(self):
metadata = MetaData()
- tbl = Table('test', metadata, Column('id', Integer,
- primary_key=True), schema='banana split.paj')
+ tbl = Table(
+ 'test', metadata,
+ Column('id', Integer, primary_key=True),
+ schema='banana split.paj')
self.assert_compile(tbl.delete(tbl.c.id == 1),
'DELETE FROM [banana split].paj.test WHERE '
'[banana split].paj.test.id = :id_1')
s = select([tbl.c.id]).where(tbl.c.id == 1)
self.assert_compile(tbl.delete().where(tbl.c.id.in_(s)),
'DELETE FROM [banana split].paj.test WHERE '
- '[banana split].paj.test.id IN (SELECT '
- 'test_1.id FROM [banana split].paj.test AS '
- 'test_1 WHERE test_1.id = :id_1)')
+ '[banana split].paj.test.id IN ('
+
+ 'SELECT [banana split].paj.test.id FROM '
+ '[banana split].paj.test WHERE '
+ '[banana split].paj.test.id = :id_1)')
def test_delete_schema_multipart_both_need_quoting(self):
metadata = MetaData()
tbl = Table('test', metadata, Column('id', Integer,
- primary_key=True),
+ primary_key=True),
schema='banana split.paj with a space')
self.assert_compile(tbl.delete(tbl.c.id == 1),
'DELETE FROM [banana split].[paj with a '
'space].test WHERE [banana split].[paj '
'with a space].test.id = :id_1')
s = select([tbl.c.id]).where(tbl.c.id == 1)
- self.assert_compile(tbl.delete().where(tbl.c.id.in_(s)),
- 'DELETE FROM [banana split].[paj with a '
- 'space].test WHERE [banana split].[paj '
- 'with a space].test.id IN (SELECT '
- 'test_1.id FROM [banana split].[paj with a '
- 'space].test AS test_1 WHERE test_1.id = '
- ':id_1)')
+ self.assert_compile(
+ tbl.delete().where(tbl.c.id.in_(s)),
+ "DELETE FROM [banana split].[paj with a space].test "
+ "WHERE [banana split].[paj with a space].test.id IN "
+ "(SELECT [banana split].[paj with a space].test.id "
+ "FROM [banana split].[paj with a space].test "
+ "WHERE [banana split].[paj with a space].test.id = :id_1)"
+ )
def test_union(self):
- t1 = table('t1', column('col1'), column('col2'), column('col3'
- ), column('col4'))
- t2 = table('t2', column('col1'), column('col2'), column('col3'
- ), column('col4'))
- s1, s2 = select([t1.c.col3.label('col3'), t1.c.col4.label('col4'
- )], t1.c.col2.in_(['t1col2r1', 't1col2r2'])), \
+ t1 = table(
+ 't1', column('col1'), column('col2'),
+ column('col3'), column('col4'))
+ t2 = table(
+ 't2', column('col1'), column('col2'),
+ column('col3'), column('col4'))
+ s1, s2 = select(
+ [t1.c.col3.label('col3'), t1.c.col4.label('col4')],
+ t1.c.col2.in_(['t1col2r1', 't1col2r2'])), \
select([t2.c.col3.label('col3'), t2.c.col4.label('col4')],
t2.c.col2.in_(['t2col2r2', 't2col2r3']))
u = union(s1, s2, order_by=['col3', 'col4'])
@@ -313,8 +367,8 @@ class CompileTest(fixtures.TestBase, AssertsCompiledSQL):
self.assert_compile(func.current_time(), 'CURRENT_TIME')
self.assert_compile(func.foo(), 'foo()')
m = MetaData()
- t = Table('sometable', m, Column('col1', Integer), Column('col2'
- , Integer))
+ t = Table(
+ 'sometable', m, Column('col1', Integer), Column('col2', Integer))
self.assert_compile(select([func.max(t.c.col1)]),
'SELECT max(sometable.col1) AS max_1 FROM '
'sometable')
@@ -329,14 +383,17 @@ class CompileTest(fixtures.TestBase, AssertsCompiledSQL):
for field in 'day', 'month', 'year':
self.assert_compile(
select([extract(field, t.c.col1)]),
- 'SELECT DATEPART("%s", t.col1) AS anon_1 FROM t' % field)
+ 'SELECT DATEPART(%s, t.col1) AS anon_1 FROM t' % field)
def test_update_returning(self):
- table1 = table('mytable', column('myid', Integer), column('name'
- , String(128)), column('description',
- String(128)))
- u = update(table1, values=dict(name='foo'
- )).returning(table1.c.myid, table1.c.name)
+ table1 = table(
+ 'mytable',
+ column('myid', Integer),
+ column('name', String(128)),
+ column('description', String(128)))
+ u = update(
+ table1,
+ values=dict(name='foo')).returning(table1.c.myid, table1.c.name)
self.assert_compile(u,
'UPDATE mytable SET name=:name OUTPUT '
'inserted.myid, inserted.name')
@@ -345,40 +402,43 @@ class CompileTest(fixtures.TestBase, AssertsCompiledSQL):
'UPDATE mytable SET name=:name OUTPUT '
'inserted.myid, inserted.name, '
'inserted.description')
- u = update(table1, values=dict(name='foo'
- )).returning(table1).where(table1.c.name == 'bar')
+ u = update(
+ table1,
+ values=dict(
+ name='foo')).returning(table1).where(table1.c.name == 'bar')
self.assert_compile(u,
'UPDATE mytable SET name=:name OUTPUT '
'inserted.myid, inserted.name, '
'inserted.description WHERE mytable.name = '
':name_1')
u = update(table1, values=dict(name='foo'
- )).returning(func.length(table1.c.name))
+ )).returning(func.length(table1.c.name))
self.assert_compile(u,
'UPDATE mytable SET name=:name OUTPUT '
'LEN(inserted.name) AS length_1')
def test_delete_returning(self):
- table1 = table('mytable', column('myid', Integer), column('name'
- , String(128)), column('description',
- String(128)))
+ table1 = table(
+ 'mytable', column('myid', Integer),
+ column('name', String(128)), column('description', String(128)))
d = delete(table1).returning(table1.c.myid, table1.c.name)
self.assert_compile(d,
'DELETE FROM mytable OUTPUT deleted.myid, '
'deleted.name')
d = delete(table1).where(table1.c.name == 'bar'
).returning(table1.c.myid,
- table1.c.name)
+ table1.c.name)
self.assert_compile(d,
'DELETE FROM mytable OUTPUT deleted.myid, '
'deleted.name WHERE mytable.name = :name_1')
def test_insert_returning(self):
- table1 = table('mytable', column('myid', Integer), column('name'
- , String(128)), column('description',
- String(128)))
- i = insert(table1, values=dict(name='foo'
- )).returning(table1.c.myid, table1.c.name)
+ table1 = table(
+ 'mytable', column('myid', Integer),
+ column('name', String(128)), column('description', String(128)))
+ i = insert(
+ table1,
+ values=dict(name='foo')).returning(table1.c.myid, table1.c.name)
self.assert_compile(i,
'INSERT INTO mytable (name) OUTPUT '
'inserted.myid, inserted.name VALUES '
@@ -389,7 +449,7 @@ class CompileTest(fixtures.TestBase, AssertsCompiledSQL):
'inserted.myid, inserted.name, '
'inserted.description VALUES (:name)')
i = insert(table1, values=dict(name='foo'
- )).returning(func.length(table1.c.name))
+ )).returning(func.length(table1.c.name))
self.assert_compile(i,
'INSERT INTO mytable (name) OUTPUT '
'LEN(inserted.name) AS length_1 VALUES '
@@ -398,7 +458,7 @@ class CompileTest(fixtures.TestBase, AssertsCompiledSQL):
def test_limit_using_top(self):
t = table('t', column('x', Integer), column('y', Integer))
- s = select([t]).where(t.c.x==5).order_by(t.c.y).limit(10)
+ s = select([t]).where(t.c.x == 5).order_by(t.c.y).limit(10)
self.assert_compile(
s,
@@ -409,18 +469,21 @@ class CompileTest(fixtures.TestBase, AssertsCompiledSQL):
def test_limit_zero_using_top(self):
t = table('t', column('x', Integer), column('y', Integer))
- s = select([t]).where(t.c.x==5).order_by(t.c.y).limit(0)
+ s = select([t]).where(t.c.x == 5).order_by(t.c.y).limit(0)
self.assert_compile(
s,
"SELECT TOP 0 t.x, t.y FROM t WHERE t.x = :x_1 ORDER BY t.y",
checkparams={'x_1': 5}
)
+ c = s.compile(dialect=mssql.MSDialect())
+ eq_(len(c._result_columns), 2)
+ assert t.c.x in set(c._create_result_map()['x'][1])
def test_offset_using_window(self):
t = table('t', column('x', Integer), column('y', Integer))
- s = select([t]).where(t.c.x==5).order_by(t.c.y).offset(20)
+ s = select([t]).where(t.c.x == 5).order_by(t.c.y).offset(20)
# test that the select is not altered with subsequent compile
# calls
@@ -434,10 +497,14 @@ class CompileTest(fixtures.TestBase, AssertsCompiledSQL):
checkparams={'param_1': 20, 'x_1': 5}
)
+ c = s.compile(dialect=mssql.MSDialect())
+ eq_(len(c._result_columns), 2)
+ assert t.c.x in set(c._create_result_map()['x'][1])
+
def test_limit_offset_using_window(self):
t = table('t', column('x', Integer), column('y', Integer))
- s = select([t]).where(t.c.x==5).order_by(t.c.y).limit(10).offset(20)
+ s = select([t]).where(t.c.x == 5).order_by(t.c.y).limit(10).offset(20)
self.assert_compile(
s,
@@ -449,6 +516,34 @@ class CompileTest(fixtures.TestBase, AssertsCompiledSQL):
"WHERE mssql_rn > :param_1 AND mssql_rn <= :param_2 + :param_1",
checkparams={'param_1': 20, 'param_2': 10, 'x_1': 5}
)
+ c = s.compile(dialect=mssql.MSDialect())
+ eq_(len(c._result_columns), 2)
+ assert t.c.x in set(c._create_result_map()['x'][1])
+ assert t.c.y in set(c._create_result_map()['y'][1])
+
+ def test_limit_offset_w_ambiguous_cols(self):
+ t = table('t', column('x', Integer), column('y', Integer))
+
+ cols = [t.c.x, t.c.x.label('q'), t.c.x.label('p'), t.c.y]
+ s = select(cols).where(t.c.x == 5).order_by(t.c.y).limit(10).offset(20)
+
+ self.assert_compile(
+ s,
+ "SELECT anon_1.x, anon_1.q, anon_1.p, anon_1.y "
+ "FROM (SELECT t.x AS x, t.x AS q, t.x AS p, t.y AS y, "
+ "ROW_NUMBER() OVER (ORDER BY t.y) AS mssql_rn "
+ "FROM t "
+ "WHERE t.x = :x_1) AS anon_1 "
+ "WHERE mssql_rn > :param_1 AND mssql_rn <= :param_2 + :param_1",
+ checkparams={'param_1': 20, 'param_2': 10, 'x_1': 5}
+ )
+ c = s.compile(dialect=mssql.MSDialect())
+ eq_(len(c._result_columns), 4)
+
+ result_map = c._create_result_map()
+
+ for col in cols:
+ is_(result_map[col.key][1][0], col)
def test_limit_offset_with_correlated_order_by(self):
t1 = table('t1', column('x', Integer), column('y', Integer))
@@ -471,10 +566,15 @@ class CompileTest(fixtures.TestBase, AssertsCompiledSQL):
checkparams={'param_1': 20, 'param_2': 10, 'x_1': 5}
)
+ c = s.compile(dialect=mssql.MSDialect())
+ eq_(len(c._result_columns), 2)
+ assert t1.c.x in set(c._create_result_map()['x'][1])
+ assert t1.c.y in set(c._create_result_map()['y'][1])
+
def test_limit_zero_offset_using_window(self):
t = table('t', column('x', Integer), column('y', Integer))
- s = select([t]).where(t.c.x==5).order_by(t.c.y).limit(0).offset(0)
+ s = select([t]).where(t.c.x == 5).order_by(t.c.y).limit(0).offset(0)
# render the LIMIT of zero, but not the OFFSET
# of zero, so produces TOP 0
@@ -489,26 +589,29 @@ class CompileTest(fixtures.TestBase, AssertsCompiledSQL):
metadata = MetaData()
tbl = Table('test', metadata,
Column('id', Integer, Sequence('', 0), primary_key=True))
- self.assert_compile(schema.CreateTable(tbl),
- "CREATE TABLE test (id INTEGER NOT NULL IDENTITY(0,1), "
- "PRIMARY KEY (id))"
- )
+ self.assert_compile(
+ schema.CreateTable(tbl),
+ "CREATE TABLE test (id INTEGER NOT NULL IDENTITY(0,1), "
+ "PRIMARY KEY (id))"
+ )
def test_sequence_non_primary_key(self):
metadata = MetaData()
tbl = Table('test', metadata,
Column('id', Integer, Sequence(''), primary_key=False))
- self.assert_compile(schema.CreateTable(tbl),
- "CREATE TABLE test (id INTEGER NOT NULL IDENTITY(1,1))"
- )
+ self.assert_compile(
+ schema.CreateTable(tbl),
+ "CREATE TABLE test (id INTEGER NOT NULL IDENTITY(1,1))"
+ )
def test_sequence_ignore_nullability(self):
metadata = MetaData()
tbl = Table('test', metadata,
Column('id', Integer, Sequence(''), nullable=True))
- self.assert_compile(schema.CreateTable(tbl),
- "CREATE TABLE test (id INTEGER NOT NULL IDENTITY(1,1))"
- )
+ self.assert_compile(
+ schema.CreateTable(tbl),
+ "CREATE TABLE test (id INTEGER NOT NULL IDENTITY(1,1))"
+ )
def test_table_pkc_clustering(self):
metadata = MetaData()
@@ -516,10 +619,11 @@ class CompileTest(fixtures.TestBase, AssertsCompiledSQL):
Column('x', Integer, autoincrement=False),
Column('y', Integer, autoincrement=False),
PrimaryKeyConstraint("x", "y", mssql_clustered=True))
- self.assert_compile(schema.CreateTable(tbl),
- "CREATE TABLE test (x INTEGER NOT NULL, y INTEGER NOT NULL, "
- "PRIMARY KEY CLUSTERED (x, y))"
- )
+ self.assert_compile(
+ schema.CreateTable(tbl),
+ "CREATE TABLE test (x INTEGER NOT NULL, y INTEGER NOT NULL, "
+ "PRIMARY KEY CLUSTERED (x, y))"
+ )
def test_table_uc_clustering(self):
metadata = MetaData()
@@ -528,10 +632,11 @@ class CompileTest(fixtures.TestBase, AssertsCompiledSQL):
Column('y', Integer, autoincrement=False),
PrimaryKeyConstraint("x"),
UniqueConstraint("y", mssql_clustered=True))
- self.assert_compile(schema.CreateTable(tbl),
- "CREATE TABLE test (x INTEGER NOT NULL, y INTEGER NULL, "
- "PRIMARY KEY (x), UNIQUE CLUSTERED (y))"
- )
+ self.assert_compile(
+ schema.CreateTable(tbl),
+ "CREATE TABLE test (x INTEGER NOT NULL, y INTEGER NULL, "
+ "PRIMARY KEY (x), UNIQUE CLUSTERED (y))"
+ )
def test_index_clustering(self):
metadata = MetaData()
@@ -544,8 +649,9 @@ class CompileTest(fixtures.TestBase, AssertsCompiledSQL):
def test_index_ordering(self):
metadata = MetaData()
- tbl = Table('test', metadata,
- Column('x', Integer), Column('y', Integer), Column('z', Integer))
+ tbl = Table(
+ 'test', metadata,
+ Column('x', Integer), Column('y', Integer), Column('z', Integer))
idx = Index("foo", tbl.c.x.desc(), "y")
self.assert_compile(schema.CreateIndex(idx),
"CREATE INDEX foo ON test (x DESC, y)"
@@ -554,8 +660,8 @@ class CompileTest(fixtures.TestBase, AssertsCompiledSQL):
def test_create_index_expr(self):
m = MetaData()
t1 = Table('foo', m,
- Column('x', Integer)
- )
+ Column('x', Integer)
+ )
self.assert_compile(
schema.CreateIndex(Index("bar", t1.c.x > 5)),
"CREATE INDEX bar ON foo (x > 5)"
@@ -564,9 +670,9 @@ class CompileTest(fixtures.TestBase, AssertsCompiledSQL):
def test_drop_index_w_schema(self):
m = MetaData()
t1 = Table('foo', m,
- Column('x', Integer),
- schema='bar'
- )
+ Column('x', Integer),
+ schema='bar'
+ )
self.assert_compile(
schema.DropIndex(Index("idx_foo", t1.c.x)),
"DROP INDEX idx_foo ON bar.foo"
@@ -574,8 +680,9 @@ class CompileTest(fixtures.TestBase, AssertsCompiledSQL):
def test_index_extra_include_1(self):
metadata = MetaData()
- tbl = Table('test', metadata,
- Column('x', Integer), Column('y', Integer), Column('z', Integer))
+ tbl = Table(
+ 'test', metadata,
+ Column('x', Integer), Column('y', Integer), Column('z', Integer))
idx = Index("foo", tbl.c.x, mssql_include=['y'])
self.assert_compile(schema.CreateIndex(idx),
"CREATE INDEX foo ON test (x) INCLUDE (y)"
@@ -583,8 +690,9 @@ class CompileTest(fixtures.TestBase, AssertsCompiledSQL):
def test_index_extra_include_2(self):
metadata = MetaData()
- tbl = Table('test', metadata,
- Column('x', Integer), Column('y', Integer), Column('z', Integer))
+ tbl = Table(
+ 'test', metadata,
+ Column('x', Integer), Column('y', Integer), Column('z', Integer))
idx = Index("foo", tbl.c.x, mssql_include=[tbl.c.y])
self.assert_compile(schema.CreateIndex(idx),
"CREATE INDEX foo ON test (x) INCLUDE (y)"
@@ -595,14 +703,14 @@ class SchemaTest(fixtures.TestBase):
def setup(self):
t = Table('sometable', MetaData(),
- Column('pk_column', Integer),
- Column('test_column', String)
- )
+ Column('pk_column', Integer),
+ Column('test_column', String)
+ )
self.column = t.c.test_column
dialect = mssql.dialect()
self.ddl_compiler = dialect.ddl_compiler(dialect,
- schema.CreateTable(t))
+ schema.CreateTable(t))
def _column_spec(self):
return self.ddl_compiler.get_column_specification(self.column)
diff --git a/test/dialect/mssql/test_engine.py b/test/dialect/mssql/test_engine.py
index a994b1787..929afc8f9 100644
--- a/test/dialect/mssql/test_engine.py
+++ b/test/dialect/mssql/test_engine.py
@@ -170,6 +170,17 @@ class ParseConnectTest(fixtures.TestBase):
engine.connect)
+class EngineFromConfigTest(fixtures.TestBase):
+ def test_legacy_schema_flag(self):
+ cfg = {
+ "sqlalchemy.url": "mssql://foodsn",
+ "sqlalchemy.legacy_schema_aliasing": "false"
+ }
+ e = engine_from_config(
+ cfg, module=Mock(version="MS SQL Server 11.0.92"))
+ eq_(e.dialect.legacy_schema_aliasing, False)
+
+
class VersionDetectionTest(fixtures.TestBase):
def test_pymssql_version(self):
dialect = pymssql.MSDialect_pymssql()
diff --git a/test/dialect/mssql/test_query.py b/test/dialect/mssql/test_query.py
index e0affe831..81fa2fb1e 100644
--- a/test/dialect/mssql/test_query.py
+++ b/test/dialect/mssql/test_query.py
@@ -1,25 +1,33 @@
# -*- encoding: utf-8
from sqlalchemy.testing import eq_, engines
-from sqlalchemy import *
from sqlalchemy.sql import table, column
from sqlalchemy.databases import mssql
-from sqlalchemy.testing import fixtures, AssertsCompiledSQL
+from sqlalchemy.testing import fixtures, AssertsCompiledSQL, assertions
from sqlalchemy import testing
from sqlalchemy.util import ue
from sqlalchemy import util
from sqlalchemy.testing.assertsql import CursorSQL
+from sqlalchemy import Integer, String, Table, Column, select, MetaData,\
+ func, PrimaryKeyConstraint, desc, Sequence, DDL, ForeignKey, or_, and_
+from sqlalchemy import event
+metadata = None
+cattable = None
+matchtable = None
-class SchemaAliasingTest(fixtures.TestBase, AssertsCompiledSQL):
- """SQL server cannot reference schema-qualified tables in a SELECT statement, they
- must be aliased.
+class LegacySchemaAliasingTest(fixtures.TestBase, AssertsCompiledSQL):
+ """Legacy behavior tried to prevent schema-qualified tables
+ from being rendered as dotted names, and were instead aliased.
+
+ This behavior no longer seems to be required.
+
"""
- __dialect__ = mssql.dialect()
def setup(self):
metadata = MetaData()
- self.t1 = table('t1',
+ self.t1 = table(
+ 't1',
column('a', Integer),
column('b', String),
column('c', String),
@@ -29,67 +37,104 @@ class SchemaAliasingTest(fixtures.TestBase, AssertsCompiledSQL):
Column("a", Integer),
Column("b", Integer),
Column("c", Integer),
- schema = 'schema'
+ schema='schema'
+ )
+
+ def _assert_sql(self, element, legacy_sql, modern_sql=None):
+ dialect = mssql.dialect(legacy_schema_aliasing=True)
+
+ self.assert_compile(
+ element,
+ legacy_sql,
+ dialect=dialect
+ )
+
+ dialect = mssql.dialect()
+ self.assert_compile(
+ element,
+ modern_sql or "foob",
+ dialect=dialect
)
+ def _legacy_dialect(self):
+ return mssql.dialect(legacy_schema_aliasing=True)
+
def test_result_map(self):
s = self.t2.select()
- c = s.compile(dialect=self.__dialect__)
- assert self.t2.c.a in set(c.result_map['a'][1])
+ c = s.compile(dialect=self._legacy_dialect())
+ assert self.t2.c.a in set(c._create_result_map()['a'][1])
def test_result_map_use_labels(self):
s = self.t2.select(use_labels=True)
- c = s.compile(dialect=self.__dialect__)
- assert self.t2.c.a in set(c.result_map['schema_t2_a'][1])
+ c = s.compile(dialect=self._legacy_dialect())
+ assert self.t2.c.a in set(c._create_result_map()['schema_t2_a'][1])
def test_straight_select(self):
- self.assert_compile(self.t2.select(),
- "SELECT t2_1.a, t2_1.b, t2_1.c FROM [schema].t2 AS t2_1"
+ self._assert_sql(
+ self.t2.select(),
+ "SELECT t2_1.a, t2_1.b, t2_1.c FROM [schema].t2 AS t2_1",
+ "SELECT [schema].t2.a, [schema].t2.b, "
+ "[schema].t2.c FROM [schema].t2"
)
def test_straight_select_use_labels(self):
- self.assert_compile(
+ self._assert_sql(
self.t2.select(use_labels=True),
"SELECT t2_1.a AS schema_t2_a, t2_1.b AS schema_t2_b, "
- "t2_1.c AS schema_t2_c FROM [schema].t2 AS t2_1"
+ "t2_1.c AS schema_t2_c FROM [schema].t2 AS t2_1",
+ "SELECT [schema].t2.a AS schema_t2_a, "
+ "[schema].t2.b AS schema_t2_b, "
+ "[schema].t2.c AS schema_t2_c FROM [schema].t2"
)
def test_join_to_schema(self):
t1, t2 = self.t1, self.t2
- self.assert_compile(
- t1.join(t2, t1.c.a==t2.c.a).select(),
+ self._assert_sql(
+ t1.join(t2, t1.c.a == t2.c.a).select(),
"SELECT t1.a, t1.b, t1.c, t2_1.a, t2_1.b, t2_1.c FROM t1 "
- "JOIN [schema].t2 AS t2_1 ON t2_1.a = t1.a"
+ "JOIN [schema].t2 AS t2_1 ON t2_1.a = t1.a",
+
+ "SELECT t1.a, t1.b, t1.c, [schema].t2.a, [schema].t2.b, "
+ "[schema].t2.c FROM t1 JOIN [schema].t2 ON [schema].t2.a = t1.a"
)
def test_union_schema_to_non(self):
t1, t2 = self.t1, self.t2
s = select([t2.c.a, t2.c.b]).apply_labels().\
- union(
- select([t1.c.a, t1.c.b]).apply_labels()
- ).alias().select()
- self.assert_compile(
+ union(
+ select([t1.c.a, t1.c.b]).apply_labels()).alias().select()
+ self._assert_sql(
s,
"SELECT anon_1.schema_t2_a, anon_1.schema_t2_b FROM "
"(SELECT t2_1.a AS schema_t2_a, t2_1.b AS schema_t2_b "
"FROM [schema].t2 AS t2_1 UNION SELECT t1.a AS t1_a, "
+ "t1.b AS t1_b FROM t1) AS anon_1",
+
+ "SELECT anon_1.schema_t2_a, anon_1.schema_t2_b FROM "
+ "(SELECT [schema].t2.a AS schema_t2_a, [schema].t2.b AS "
+ "schema_t2_b FROM [schema].t2 UNION SELECT t1.a AS t1_a, "
"t1.b AS t1_b FROM t1) AS anon_1"
)
def test_column_subquery_to_alias(self):
a1 = self.t2.alias('a1')
s = select([self.t2, select([a1.c.a]).as_scalar()])
- self.assert_compile(
+ self._assert_sql(
s,
"SELECT t2_1.a, t2_1.b, t2_1.c, "
"(SELECT a1.a FROM [schema].t2 AS a1) "
- "AS anon_1 FROM [schema].t2 AS t2_1"
+ "AS anon_1 FROM [schema].t2 AS t2_1",
+
+ "SELECT [schema].t2.a, [schema].t2.b, [schema].t2.c, "
+ "(SELECT a1.a FROM [schema].t2 AS a1) AS anon_1 FROM [schema].t2"
)
+
class IdentityInsertTest(fixtures.TestBase, AssertsCompiledSQL):
__only_on__ = 'mssql'
__dialect__ = mssql.MSDialect()
+ __backend__ = True
@classmethod
def setup_class(cls):
@@ -97,10 +142,10 @@ class IdentityInsertTest(fixtures.TestBase, AssertsCompiledSQL):
metadata = MetaData(testing.db)
cattable = Table('cattable', metadata,
- Column('id', Integer),
- Column('description', String(50)),
- PrimaryKeyConstraint('id', name='PK_cattable'),
- )
+ Column('id', Integer),
+ Column('description', String(50)),
+ PrimaryKeyConstraint('id', name='PK_cattable'),
+ )
def setup(self):
metadata.create_all()
@@ -110,7 +155,7 @@ class IdentityInsertTest(fixtures.TestBase, AssertsCompiledSQL):
def test_compiled(self):
self.assert_compile(cattable.insert().values(id=9,
- description='Python'),
+ description='Python'),
'INSERT INTO cattable (id, description) '
'VALUES (:id, :description)')
@@ -127,27 +172,30 @@ class IdentityInsertTest(fixtures.TestBase, AssertsCompiledSQL):
def test_executemany(self):
cattable.insert().execute([{'id': 89, 'description': 'Python'},
- {'id': 8, 'description': 'Ruby'},
- {'id': 3, 'description': 'Perl'},
- {'id': 1, 'description': 'Java'}])
+ {'id': 8, 'description': 'Ruby'},
+ {'id': 3, 'description': 'Perl'},
+ {'id': 1, 'description': 'Java'}])
cats = cattable.select().order_by(cattable.c.id).execute()
eq_([(1, 'Java'), (3, 'Perl'), (8, 'Ruby'), (89, 'Python')],
list(cats))
cattable.insert().execute([{'description': 'PHP'},
- {'description': 'Smalltalk'}])
+ {'description': 'Smalltalk'}])
lastcats = \
cattable.select().order_by(desc(cattable.c.id)).limit(2).execute()
eq_([(91, 'Smalltalk'), (90, 'PHP')], list(lastcats))
+
class QueryUnicodeTest(fixtures.TestBase):
__only_on__ = 'mssql'
+ __backend__ = True
def test_convert_unicode(self):
meta = MetaData(testing.db)
- t1 = Table('unitest_table', meta, Column('id', Integer,
- primary_key=True), Column('descr',
- mssql.MSText(convert_unicode=True)))
+ t1 = Table(
+ 'unitest_table', meta,
+ Column('id', Integer, primary_key=True),
+ Column('descr', mssql.MSText(convert_unicode=True)))
meta.create_all()
con = testing.db.connect()
@@ -159,13 +207,16 @@ class QueryUnicodeTest(fixtures.TestBase):
try:
r = t1.select().execute().first()
assert isinstance(r[1], util.text_type), \
- '%s is %s instead of unicode, working on %s' % (r[1],
- type(r[1]), meta.bind)
+ '%s is %s instead of unicode, working on %s' % (
+ r[1],
+ type(r[1]), meta.bind)
finally:
meta.drop_all()
+
class QueryTest(testing.AssertsExecutionResults, fixtures.TestBase):
__only_on__ = 'mssql'
+ __backend__ = True
def test_fetchid_trigger(self):
"""
@@ -194,27 +245,27 @@ class QueryTest(testing.AssertsExecutionResults, fixtures.TestBase):
with the init parameter 'implicit_returning = False'.
"""
- #todo: this same test needs to be tried in a multithreaded context
+ # todo: this same test needs to be tried in a multithreaded context
# with multiple threads inserting to the same table.
- #todo: check whether this error also occurs with clients other
+ # todo: check whether this error also occurs with clients other
# than the SQL Server Native Client. Maybe an assert_raises
# test should be written.
meta = MetaData(testing.db)
t1 = Table('t1', meta,
- Column('id', Integer, Sequence('fred', 100, 1),
- primary_key=True),
- Column('descr', String(200)),
- # the following flag will prevent the
- # MSSQLCompiler.returning_clause from getting called,
- # though the ExecutionContext will still have a
- # _select_lastrowid, so the SELECT SCOPE_IDENTITY() will
- # hopefully be called instead.
- implicit_returning = False
- )
+ Column('id', Integer, Sequence('fred', 100, 1),
+ primary_key=True),
+ Column('descr', String(200)),
+ # the following flag will prevent the
+ # MSSQLCompiler.returning_clause from getting called,
+ # though the ExecutionContext will still have a
+ # _select_lastrowid, so the SELECT SCOPE_IDENTITY() will
+ # hopefully be called instead.
+ implicit_returning=False
+ )
t2 = Table('t2', meta,
- Column('id', Integer, Sequence('fred', 200, 1),
- primary_key=True),
- Column('descr', String(200)))
+ Column('id', Integer, Sequence('fred', 200, 1),
+ primary_key=True),
+ Column('descr', String(200)))
meta.create_all()
con = testing.db.connect()
con.execute("""create trigger paj on t1 for insert as
@@ -300,66 +351,117 @@ class QueryTest(testing.AssertsExecutionResults, fixtures.TestBase):
),
)
+ @testing.provide_metadata
def test_insertid_schema(self):
- meta = MetaData(testing.db)
- con = testing.db.connect()
+ meta = self.metadata
+ eng = engines.testing_engine(
+ options=dict(legacy_schema_aliasing=False))
+ meta.bind = eng
+ con = eng.connect()
con.execute('create schema paj')
+
+ @event.listens_for(meta, "after_drop")
+ def cleanup(target, connection, **kw):
+ connection.execute('drop schema paj')
+
tbl = Table('test', meta,
Column('id', Integer, primary_key=True), schema='paj')
tbl.create()
- try:
- tbl.insert().execute({'id':1})
- finally:
- tbl.drop()
- con.execute('drop schema paj')
+ tbl.insert().execute({'id': 1})
+ eq_(tbl.select().scalar(), 1)
+
+ @testing.provide_metadata
+ def test_insertid_schema_legacy(self):
+ meta = self.metadata
+ eng = engines.testing_engine(
+ options=dict(legacy_schema_aliasing=True))
+ meta.bind = eng
+ con = eng.connect()
+ con.execute('create schema paj')
+ @event.listens_for(meta, "after_drop")
+ def cleanup(target, connection, **kw):
+ connection.execute('drop schema paj')
+
+ tbl = Table('test', meta,
+ Column('id', Integer, primary_key=True), schema='paj')
+ tbl.create()
+ tbl.insert().execute({'id': 1})
+ eq_(tbl.select().scalar(), 1)
+
+ @testing.provide_metadata
def test_returning_no_autoinc(self):
- meta = MetaData(testing.db)
- table = Table('t1', meta, Column('id', Integer,
- primary_key=True), Column('data', String(50)))
+ meta = self.metadata
+ table = Table(
+ 't1', meta,
+ Column('id', Integer, primary_key=True),
+ Column('data', String(50)))
table.create()
- try:
- result = table.insert().values(id=1,
- data=func.lower('SomeString'
- )).returning(table.c.id, table.c.data).execute()
- eq_(result.fetchall(), [(1, 'somestring')])
- finally:
+ result = table.insert().values(
+ id=1,
+ data=func.lower('SomeString')).\
+ returning(table.c.id, table.c.data).execute()
+ eq_(result.fetchall(), [(1, 'somestring')])
- # this will hang if the "SET IDENTITY_INSERT t1 OFF" occurs
- # before the result is fetched
+ @testing.provide_metadata
+ def test_delete_schema(self):
+ meta = self.metadata
+ eng = engines.testing_engine(
+ options=dict(legacy_schema_aliasing=False))
+ meta.bind = eng
+ con = eng.connect()
+ con.execute('create schema paj')
- table.drop()
+ @event.listens_for(meta, "after_drop")
+ def cleanup(target, connection, **kw):
+ connection.execute('drop schema paj')
- def test_delete_schema(self):
- meta = MetaData(testing.db)
- con = testing.db.connect()
+ tbl = Table(
+ 'test', meta,
+ Column('id', Integer, primary_key=True), schema='paj')
+ tbl.create()
+ tbl.insert().execute({'id': 1})
+ eq_(tbl.select().scalar(), 1)
+ tbl.delete(tbl.c.id == 1).execute()
+ eq_(tbl.select().scalar(), None)
+
+ @testing.provide_metadata
+ def test_delete_schema_legacy(self):
+ meta = self.metadata
+ eng = engines.testing_engine(
+ options=dict(legacy_schema_aliasing=True))
+ meta.bind = eng
+ con = eng.connect()
con.execute('create schema paj')
- tbl = Table('test', meta, Column('id', Integer,
- primary_key=True), schema='paj')
+
+ @event.listens_for(meta, "after_drop")
+ def cleanup(target, connection, **kw):
+ connection.execute('drop schema paj')
+
+ tbl = Table(
+ 'test', meta,
+ Column('id', Integer, primary_key=True), schema='paj')
tbl.create()
- try:
- tbl.insert().execute({'id': 1})
- tbl.delete(tbl.c.id == 1).execute()
- finally:
- tbl.drop()
- con.execute('drop schema paj')
+ tbl.insert().execute({'id': 1})
+ eq_(tbl.select().scalar(), 1)
+ tbl.delete(tbl.c.id == 1).execute()
+ eq_(tbl.select().scalar(), None)
+ @testing.provide_metadata
def test_insertid_reserved(self):
- meta = MetaData(testing.db)
+ meta = self.metadata
table = Table(
'select', meta,
Column('col', Integer, primary_key=True)
)
table.create()
- meta2 = MetaData(testing.db)
- try:
- table.insert().execute(col=7)
- finally:
- table.drop()
+ table.insert().execute(col=7)
+ eq_(table.select().scalar(), 7)
class Foo(object):
+
def __init__(self, **kw):
for k in kw:
setattr(self, k, kw[k])
@@ -380,10 +482,12 @@ def full_text_search_missing():
finally:
connection.close()
+
class MatchTest(fixtures.TestBase, AssertsCompiledSQL):
__only_on__ = 'mssql'
__skip_if__ = full_text_search_missing,
+ __backend__ = True
@classmethod
def setup_class(cls):
@@ -399,29 +503,24 @@ class MatchTest(fixtures.TestBase, AssertsCompiledSQL):
Column('title', String(200)),
Column('category_id', Integer, ForeignKey('cattable.id')),
PrimaryKeyConstraint('id', name='PK_matchtable'),
- )
+ )
DDL("""CREATE FULLTEXT INDEX
ON cattable (description)
- KEY INDEX PK_cattable""").execute_at('after-create'
- , matchtable)
+ KEY INDEX PK_cattable""").\
+ execute_at('after-create', matchtable)
DDL("""CREATE FULLTEXT INDEX
ON matchtable (title)
- KEY INDEX PK_matchtable""").execute_at('after-create'
- , matchtable)
+ KEY INDEX PK_matchtable""").\
+ execute_at('after-create', matchtable)
metadata.create_all()
cattable.insert().execute([{'id': 1, 'description': 'Python'},
- {'id': 2, 'description': 'Ruby'}])
- matchtable.insert().execute([{'id': 1, 'title'
- : 'Agile Web Development with Rails'
- , 'category_id': 2}, {'id': 2,
- 'title': 'Dive Into Python',
- 'category_id': 1}, {'id': 3, 'title'
- : "Programming Matz's Ruby",
- 'category_id': 2}, {'id': 4, 'title'
- : 'The Definitive Guide to Django',
- 'category_id': 1}, {'id': 5, 'title'
- : 'Python in a Nutshell',
- 'category_id': 1}])
+ {'id': 2, 'description': 'Ruby'}])
+ matchtable.insert().execute([
+ {'id': 1, 'title': 'Web Development with Rails', 'category_id': 2},
+ {'id': 2, 'title': 'Dive Into Python', 'category_id': 1},
+ {'id': 3, 'title': "Programming Matz's Ruby", 'category_id': 2},
+ {'id': 4, 'title': 'Guide to Django', 'category_id': 1},
+ {'id': 5, 'title': 'Python in a Nutshell', 'category_id': 1}])
DDL("WAITFOR DELAY '00:00:05'"
).execute(bind=engines.testing_engine())
@@ -438,59 +537,60 @@ class MatchTest(fixtures.TestBase, AssertsCompiledSQL):
def test_simple_match(self):
results = \
- matchtable.select().where(matchtable.c.title.match('python'
- )).order_by(matchtable.c.id).execute().fetchall()
+ matchtable.select().where(
+ matchtable.c.title.match('python')).\
+ order_by(matchtable.c.id).execute().fetchall()
eq_([2, 5], [r.id for r in results])
def test_simple_match_with_apostrophe(self):
results = \
- matchtable.select().where(matchtable.c.title.match("Matz's"
- )).execute().fetchall()
+ matchtable.select().where(
+ matchtable.c.title.match("Matz's")).execute().fetchall()
eq_([3], [r.id for r in results])
def test_simple_prefix_match(self):
results = \
- matchtable.select().where(matchtable.c.title.match('"nut*"'
- )).execute().fetchall()
+ matchtable.select().where(
+ matchtable.c.title.match('"nut*"')).execute().fetchall()
eq_([5], [r.id for r in results])
def test_simple_inflectional_match(self):
results = \
matchtable.select().where(
matchtable.c.title.match('FORMSOF(INFLECTIONAL, "dives")'
- )).execute().fetchall()
+ )).execute().fetchall()
eq_([2], [r.id for r in results])
def test_or_match(self):
results1 = \
- matchtable.select().where(or_(matchtable.c.title.match('nutshell'
- ), matchtable.c.title.match('ruby'
- ))).order_by(matchtable.c.id).execute().fetchall()
+ matchtable.select().where(or_(
+ matchtable.c.title.match('nutshell'),
+ matchtable.c.title.match('ruby'))).\
+ order_by(matchtable.c.id).execute().fetchall()
eq_([3, 5], [r.id for r in results1])
results2 = \
matchtable.select().where(
- matchtable.c.title.match('nutshell OR ruby'
- )).order_by(matchtable.c.id).execute().fetchall()
+ matchtable.c.title.match(
+ 'nutshell OR ruby')).\
+ order_by(matchtable.c.id).execute().fetchall()
eq_([3, 5], [r.id for r in results2])
def test_and_match(self):
results1 = \
- matchtable.select().where(and_(matchtable.c.title.match('python'
- ), matchtable.c.title.match('nutshell'
- ))).execute().fetchall()
+ matchtable.select().where(and_(
+ matchtable.c.title.match('python'),
+ matchtable.c.title.match('nutshell'))).execute().fetchall()
eq_([5], [r.id for r in results1])
results2 = \
matchtable.select().where(
matchtable.c.title.match('python AND nutshell'
- )).execute().fetchall()
+ )).execute().fetchall()
eq_([5], [r.id for r in results2])
def test_match_across_joins(self):
- results = matchtable.select().where(and_(cattable.c.id
- == matchtable.c.category_id,
- or_(cattable.c.description.match('Ruby'),
- matchtable.c.title.match('nutshell'
- )))).order_by(matchtable.c.id).execute().fetchall()
+ results = matchtable.select().where(
+ and_(cattable.c.id == matchtable.c.category_id,
+ or_(cattable.c.description.match('Ruby'),
+ matchtable.c.title.match('nutshell')))).\
+ order_by(matchtable.c.id).execute().fetchall()
eq_([1, 3, 5], [r.id for r in results])
-
-
diff --git a/test/dialect/mssql/test_reflection.py b/test/dialect/mssql/test_reflection.py
index bee441586..ccde93ba1 100644
--- a/test/dialect/mssql/test_reflection.py
+++ b/test/dialect/mssql/test_reflection.py
@@ -1,5 +1,5 @@
# -*- encoding: utf-8
-from sqlalchemy.testing import eq_
+from sqlalchemy.testing import eq_, is_, in_
from sqlalchemy import *
from sqlalchemy import types, schema, event
from sqlalchemy.databases import mssql
@@ -12,6 +12,7 @@ from sqlalchemy import util
class ReflectionTest(fixtures.TestBase, ComparesTables):
__only_on__ = 'mssql'
+ __backend__ = True
@testing.provide_metadata
def test_basic_reflection(self):
@@ -24,14 +25,14 @@ class ReflectionTest(fixtures.TestBase, ComparesTables):
Column('user_name', types.VARCHAR(20), nullable=False),
Column('test1', types.CHAR(5), nullable=False),
Column('test2', types.Float(5), nullable=False),
- Column('test3', types.Text('max')),
+ Column('test3', types.Text()),
Column('test4', types.Numeric, nullable=False),
Column('test5', types.DateTime),
Column('parent_user_id', types.Integer,
ForeignKey('engine_users.user_id')),
Column('test6', types.DateTime, nullable=False),
- Column('test7', types.Text('max')),
- Column('test8', types.LargeBinary('max')),
+ Column('test7', types.Text()),
+ Column('test8', types.LargeBinary()),
Column('test_passivedefault2', types.Integer,
server_default='5'),
Column('test9', types.BINARY(100)),
@@ -171,6 +172,32 @@ class ReflectionTest(fixtures.TestBase, ComparesTables):
set([t2.c['x col'], t2.c.y])
)
+ @testing.provide_metadata
+ def test_max_ident_in_varchar_not_present(self):
+ """test [ticket:3504].
+
+ Here we are testing not just that the "max" token comes back
+ as None, but also that these types accept "max" as the value
+ of "length" on construction, which isn't a directly documented
+ pattern however is likely in common use.
+
+ """
+ metadata = self.metadata
+
+ Table(
+ 't', metadata,
+ Column('t1', types.String),
+ Column('t2', types.Text('max')),
+ Column('t3', types.Text('max')),
+ Column('t4', types.LargeBinary('max')),
+ Column('t5', types.VARBINARY('max')),
+ )
+ metadata.create_all()
+ for col in inspect(testing.db).get_columns('t'):
+ is_(col['type'].length, None)
+ in_('max', str(col['type'].compile(dialect=testing.db.dialect)))
+
+
from sqlalchemy.dialects.mssql.information_schema import CoerceUnicode, tables
from sqlalchemy.dialects.mssql import base
@@ -187,7 +214,7 @@ class InfoCoerceUnicodeTest(fixtures.TestBase, AssertsCompiledSQL):
stmt = tables.c.table_name == 'somename'
self.assert_compile(
stmt,
- "[TABLES_1].[TABLE_NAME] = :table_name_1",
+ "[INFORMATION_SCHEMA].[TABLES].[TABLE_NAME] = :table_name_1",
dialect=dialect
)
@@ -197,12 +224,13 @@ class InfoCoerceUnicodeTest(fixtures.TestBase, AssertsCompiledSQL):
stmt = tables.c.table_name == 'somename'
self.assert_compile(
stmt,
- "[TABLES_1].[TABLE_NAME] = CAST(:table_name_1 AS NVARCHAR(max))",
+ "[INFORMATION_SCHEMA].[TABLES].[TABLE_NAME] = CAST(:table_name_1 AS NVARCHAR(max))",
dialect=dialect
)
class ReflectHugeViewTest(fixtures.TestBase):
__only_on__ = 'mssql'
+ __backend__ = True
# crashes on freetds 0.91, not worth it
__skip_if__ = (
diff --git a/test/dialect/mssql/test_types.py b/test/dialect/mssql/test_types.py
index 5c9157379..37c0e7060 100644
--- a/test/dialect/mssql/test_types.py
+++ b/test/dialect/mssql/test_types.py
@@ -1,5 +1,6 @@
# -*- encoding: utf-8
-from sqlalchemy.testing import eq_, engines, pickleable
+from sqlalchemy.testing import eq_, engines, pickleable, assert_raises_message
+from sqlalchemy.testing import is_, is_not_
import datetime
import os
from sqlalchemy import Table, Column, MetaData, Float, \
@@ -8,7 +9,8 @@ from sqlalchemy import Table, Column, MetaData, Float, \
UnicodeText, LargeBinary
from sqlalchemy import types, schema
from sqlalchemy.databases import mssql
-from sqlalchemy.dialects.mssql.base import TIME
+from sqlalchemy.dialects.mssql.base import TIME, _MSDate
+from sqlalchemy.dialects.mssql.base import MS_2005_VERSION, MS_2008_VERSION
from sqlalchemy.testing import fixtures, \
AssertsExecutionResults, ComparesTables
from sqlalchemy import testing
@@ -33,6 +35,48 @@ class TimeTypeTest(fixtures.TestBase):
result_processor = mssql_time_type.result_processor(None, None)
eq_(expected, result_processor(value))
+ def test_result_processor_invalid(self):
+ mssql_time_type = TIME()
+ result_processor = mssql_time_type.result_processor(None, None)
+ assert_raises_message(
+ ValueError,
+ "could not parse 'abc' as a time value",
+ result_processor, 'abc'
+ )
+
+
+class MSDateTypeTest(fixtures.TestBase):
+
+ def test_result_processor(self):
+ expected = datetime.date(2000, 1, 2)
+ self._assert_result_processor(expected, '2000-01-02')
+
+ def _assert_result_processor(self, expected, value):
+ mssql_date_type = _MSDate()
+ result_processor = mssql_date_type.result_processor(None, None)
+ eq_(expected, result_processor(value))
+
+ def test_result_processor_invalid(self):
+ mssql_date_type = _MSDate()
+ result_processor = mssql_date_type.result_processor(None, None)
+ assert_raises_message(
+ ValueError,
+ "could not parse 'abc' as a date value",
+ result_processor, 'abc'
+ )
+
+ def test_extract(self):
+ from sqlalchemy import extract
+ fivedaysago = datetime.datetime.now() \
+ - datetime.timedelta(days=5)
+ for field, exp in ('year', fivedaysago.year), \
+ ('month', fivedaysago.month), ('day', fivedaysago.day):
+ r = testing.db.execute(
+ select([
+ extract(field, fivedaysago)])
+ ).scalar()
+ eq_(r, exp)
+
class TypeDDLTest(fixtures.TestBase):
@@ -173,6 +217,91 @@ class TypeDDLTest(fixtures.TestBase):
"%s %s" % (col.name, columns[index][3]))
self.assert_(repr(col))
+ def test_dates(self):
+ "Exercise type specification for date types."
+
+ columns = [
+ # column type, args, kwargs, expected ddl
+ (mssql.MSDateTime, [], {},
+ 'DATETIME', None),
+
+ (types.DATE, [], {},
+ 'DATE', None),
+ (types.Date, [], {},
+ 'DATE', None),
+ (types.Date, [], {},
+ 'DATETIME', MS_2005_VERSION),
+ (mssql.MSDate, [], {},
+ 'DATE', None),
+ (mssql.MSDate, [], {},
+ 'DATETIME', MS_2005_VERSION),
+
+ (types.TIME, [], {},
+ 'TIME', None),
+ (types.Time, [], {},
+ 'TIME', None),
+ (mssql.MSTime, [], {},
+ 'TIME', None),
+ (mssql.MSTime, [1], {},
+ 'TIME(1)', None),
+ (types.Time, [], {},
+ 'DATETIME', MS_2005_VERSION),
+ (mssql.MSTime, [], {},
+ 'TIME', None),
+
+ (mssql.MSSmallDateTime, [], {},
+ 'SMALLDATETIME', None),
+
+ (mssql.MSDateTimeOffset, [], {},
+ 'DATETIMEOFFSET', None),
+ (mssql.MSDateTimeOffset, [1], {},
+ 'DATETIMEOFFSET(1)', None),
+
+ (mssql.MSDateTime2, [], {},
+ 'DATETIME2', None),
+ (mssql.MSDateTime2, [0], {},
+ 'DATETIME2(0)', None),
+ (mssql.MSDateTime2, [1], {},
+ 'DATETIME2(1)', None),
+
+ (mssql.MSTime, [0], {},
+ 'TIME(0)', None),
+
+ (mssql.MSDateTimeOffset, [0], {},
+ 'DATETIMEOFFSET(0)', None),
+
+ ]
+
+ metadata = MetaData()
+ table_args = ['test_mssql_dates', metadata]
+ for index, spec in enumerate(columns):
+ type_, args, kw, res, server_version = spec
+ table_args.append(
+ Column('c%s' % index, type_(*args, **kw), nullable=None))
+
+ date_table = Table(*table_args)
+ dialect = mssql.dialect()
+ dialect.server_version_info = MS_2008_VERSION
+ ms_2005_dialect = mssql.dialect()
+ ms_2005_dialect.server_version_info = MS_2005_VERSION
+ gen = dialect.ddl_compiler(dialect, schema.CreateTable(date_table))
+ gen2005 = ms_2005_dialect.ddl_compiler(
+ ms_2005_dialect, schema.CreateTable(date_table))
+
+ for col in date_table.c:
+ index = int(col.name[1:])
+ server_version = columns[index][4]
+ if not server_version:
+ testing.eq_(
+ gen.get_column_specification(col),
+ "%s %s" % (col.name, columns[index][3]))
+ else:
+ testing.eq_(
+ gen2005.get_column_specification(col),
+ "%s %s" % (col.name, columns[index][3]))
+
+ self.assert_(repr(col))
+
def test_large_type_deprecation(self):
d1 = mssql.dialect(deprecate_large_types=True)
d2 = mssql.dialect(deprecate_large_types=False)
@@ -313,9 +442,7 @@ class TypeRoundTripTest(
def teardown(self):
metadata.drop_all()
- @testing.fails_on_everything_except(
- 'mssql+pyodbc',
- 'this is some pyodbc-specific feature')
+ @testing.fails_on_everything_except('mssql+pyodbc')
def test_decimal_notation(self):
numeric_table = Table(
'numeric_table', metadata,
@@ -466,6 +593,8 @@ class TypeRoundTripTest(
(mssql.MSDateTime2, [], {},
'DATETIME2', ['>=', (10,)]),
+ (mssql.MSDateTime2, [0], {},
+ 'DATETIME2(0)', ['>=', (10,)]),
(mssql.MSDateTime2, [1], {},
'DATETIME2(1)', ['>=', (10,)]),
@@ -597,44 +726,37 @@ class TypeRoundTripTest(
def test_autoincrement(self):
Table(
'ai_1', metadata,
- Column('int_y', Integer, primary_key=True),
+ Column('int_y', Integer, primary_key=True, autoincrement=True),
Column(
- 'int_n', Integer, DefaultClause('0'),
- primary_key=True, autoincrement=False))
+ 'int_n', Integer, DefaultClause('0'), primary_key=True))
Table(
'ai_2', metadata,
- Column('int_y', Integer, primary_key=True),
- Column('int_n', Integer, DefaultClause('0'),
- primary_key=True, autoincrement=False))
+ Column('int_y', Integer, primary_key=True, autoincrement=True),
+ Column('int_n', Integer, DefaultClause('0'), primary_key=True))
Table(
'ai_3', metadata,
- Column('int_n', Integer, DefaultClause('0'),
- primary_key=True, autoincrement=False),
- Column('int_y', Integer, primary_key=True))
+ Column('int_n', Integer, DefaultClause('0'), primary_key=True),
+ Column('int_y', Integer, primary_key=True, autoincrement=True))
Table(
'ai_4', metadata,
- Column('int_n', Integer, DefaultClause('0'),
- primary_key=True, autoincrement=False),
- Column('int_n2', Integer, DefaultClause('0'),
- primary_key=True, autoincrement=False))
+ Column('int_n', Integer, DefaultClause('0'), primary_key=True),
+ Column('int_n2', Integer, DefaultClause('0'), primary_key=True))
Table(
'ai_5', metadata,
- Column('int_y', Integer, primary_key=True),
- Column('int_n', Integer, DefaultClause('0'),
- primary_key=True, autoincrement=False))
+ Column('int_y', Integer, primary_key=True, autoincrement=True),
+ Column('int_n', Integer, DefaultClause('0'), primary_key=True))
Table(
'ai_6', metadata,
- Column('o1', String(1), DefaultClause('x'),
- primary_key=True),
- Column('int_y', Integer, primary_key=True))
+ Column('o1', String(1), DefaultClause('x'), primary_key=True),
+ Column('int_y', Integer, primary_key=True, autoincrement=True))
Table(
'ai_7', metadata,
Column('o1', String(1), DefaultClause('x'),
primary_key=True),
Column('o2', String(1), DefaultClause('x'),
primary_key=True),
- Column('int_y', Integer, primary_key=True))
+ Column('int_y', Integer, autoincrement=True, primary_key=True))
Table(
'ai_8', metadata,
Column('o1', String(1), DefaultClause('x'),
@@ -650,13 +772,15 @@ class TypeRoundTripTest(
for name in table_names:
tbl = Table(name, mr, autoload=True)
tbl = metadata.tables[name]
- for c in tbl.c:
- if c.name.startswith('int_y'):
- assert c.autoincrement, name
- assert tbl._autoincrement_column is c, name
- elif c.name.startswith('int_n'):
- assert not c.autoincrement, name
- assert tbl._autoincrement_column is not c, name
+
+ # test that the flag itself reflects appropriately
+ for col in tbl.c:
+ if 'int_y' in col.name:
+ is_(col.autoincrement, True)
+ is_(tbl._autoincrement_column, col)
+ else:
+ eq_(col.autoincrement, 'auto')
+ is_not_(tbl._autoincrement_column, col)
# mxodbc can't handle scope_identity() with DEFAULT VALUES
@@ -712,7 +836,7 @@ class BinaryTest(fixtures.TestBase, AssertsExecutionResults):
@classmethod
def setup_class(cls):
- global binary_table, MyPickleType
+ global MyPickleType
class MyPickleType(types.TypeDecorator):
impl = PickleType
@@ -727,9 +851,13 @@ class BinaryTest(fixtures.TestBase, AssertsExecutionResults):
value.stuff = 'this is the right stuff'
return value
- binary_table = Table(
+ def teardown(self):
+ self.binary_table.drop(testing.db)
+
+ def _fixture(self, engine):
+ self.binary_table = binary_table = Table(
'binary_table',
- MetaData(testing.db),
+ MetaData(),
Column('primary_id', Integer, Sequence('binary_id_seq',
optional=True), primary_key=True),
Column('data', mssql.MSVarBinary(8000)),
@@ -739,51 +867,55 @@ class BinaryTest(fixtures.TestBase, AssertsExecutionResults):
Column('pickled', PickleType),
Column('mypickle', MyPickleType),
)
- binary_table.create()
+ binary_table.create(engine)
+ return binary_table
- def teardown(self):
- binary_table.delete().execute()
+ def test_binary_legacy_types(self):
+ self._test_binary(False)
- @classmethod
- def teardown_class(cls):
- binary_table.drop()
+ @testing.only_on('mssql >= 11')
+ def test_binary_updated_types(self):
+ self._test_binary(True)
- def test_binary(self):
+ def test_binary_none_legacy_types(self):
+ self._test_binary_none(False)
+
+ @testing.only_on('mssql >= 11')
+ def test_binary_none_updated_types(self):
+ self._test_binary_none(True)
+
+ def _test_binary(self, deprecate_large_types):
testobj1 = pickleable.Foo('im foo 1')
testobj2 = pickleable.Foo('im foo 2')
testobj3 = pickleable.Foo('im foo 3')
- stream1 = self.load_stream('binary_data_one.dat')
- stream2 = self.load_stream('binary_data_two.dat')
- binary_table.insert().execute(
- primary_id=1,
- misc='binary_data_one.dat',
- data=stream1,
- data_image=stream1,
- data_slice=stream1[0:100],
- pickled=testobj1,
- mypickle=testobj3,
- )
- binary_table.insert().execute(
- primary_id=2,
- misc='binary_data_two.dat',
- data=stream2,
- data_image=stream2,
- data_slice=stream2[0:99],
- pickled=testobj2,
- )
+ stream1 = self._load_stream('binary_data_one.dat')
+ stream2 = self._load_stream('binary_data_two.dat')
+ engine = engines.testing_engine(
+ options={"deprecate_large_types": deprecate_large_types})
+
+ binary_table = self._fixture(engine)
+
+ with engine.connect() as conn:
+ conn.execute(
+ binary_table.insert(),
+ primary_id=1,
+ misc='binary_data_one.dat',
+ data=stream1,
+ data_image=stream1,
+ data_slice=stream1[0:100],
+ pickled=testobj1,
+ mypickle=testobj3,
+ )
+ conn.execute(
+ binary_table.insert(),
+ primary_id=2,
+ misc='binary_data_two.dat',
+ data=stream2,
+ data_image=stream2,
+ data_slice=stream2[0:99],
+ pickled=testobj2,
+ )
- # TODO: pyodbc does not seem to accept "None" for a VARBINARY
- # column (data=None). error: [Microsoft][ODBC SQL Server
- # Driver][SQL Server]Implicit conversion from data type varchar
- # to varbinary is not allowed. Use the CONVERT function to run
- # this query. (257) binary_table.insert().execute(primary_id=3,
- # misc='binary_data_two.dat', data=None, data_image=None,
- # data_slice=stream2[0:99], pickled=None)
-
- binary_table.insert().execute(
- primary_id=3,
- misc='binary_data_two.dat', data_image=None,
- data_slice=stream2[0:99], pickled=None)
for stmt in \
binary_table.select(order_by=binary_table.c.primary_id), \
text(
@@ -795,7 +927,8 @@ class BinaryTest(fixtures.TestBase, AssertsExecutionResults):
data_slice=types.BINARY(100), pickled=PickleType,
mypickle=MyPickleType),
bind=testing.db):
- l = stmt.execute().fetchall()
+ with engine.connect() as conn:
+ l = conn.execute(stmt).fetchall()
eq_(list(stream1), list(l[0]['data']))
paddedstream = list(stream1[0:100])
paddedstream.extend(['\x00'] * (100 - len(paddedstream)))
@@ -807,7 +940,48 @@ class BinaryTest(fixtures.TestBase, AssertsExecutionResults):
eq_(testobj3.moredata, l[0]['mypickle'].moredata)
eq_(l[0]['mypickle'].stuff, 'this is the right stuff')
- def load_stream(self, name, len=3000):
+ def _test_binary_none(self, deprecate_large_types):
+ engine = engines.testing_engine(
+ options={"deprecate_large_types": deprecate_large_types})
+
+ binary_table = self._fixture(engine)
+
+ stream2 = self._load_stream('binary_data_two.dat')
+
+ with engine.connect() as conn:
+ conn.execute(
+ binary_table.insert(),
+ primary_id=3,
+ misc='binary_data_two.dat', data_image=None,
+ data_slice=stream2[0:99], pickled=None)
+ for stmt in \
+ binary_table.select(), \
+ text(
+ 'select * from binary_table',
+ typemap=dict(
+ data=mssql.MSVarBinary(8000),
+ data_image=mssql.MSImage,
+ data_slice=types.BINARY(100),
+ pickled=PickleType,
+ mypickle=MyPickleType),
+ bind=testing.db):
+ row = conn.execute(stmt).first()
+ eq_(
+ row['pickled'], None
+ )
+ eq_(
+ row['data_image'], None
+ )
+
+ # the type we used here is 100 bytes
+ # so we will get 100 bytes zero-padded
+ paddedstream = list(stream2[0:99])
+ paddedstream.extend(['\x00'] * (100 - len(paddedstream)))
+ eq_(
+ list(row['data_slice']), paddedstream
+ )
+
+ def _load_stream(self, name, len=3000):
fp = open(
os.path.join(os.path.dirname(__file__), "..", "..", name), 'rb')
stream = fp.read(len)
diff --git a/test/dialect/mysql/test_compiler.py b/test/dialect/mysql/test_compiler.py
index 304c31012..0571ce526 100644
--- a/test/dialect/mysql/test_compiler.py
+++ b/test/dialect/mysql/test_compiler.py
@@ -184,6 +184,12 @@ class CompileTest(fixtures.TestBase, AssertsCompiledSQL):
schema.CreateTable(t2).compile, dialect=mysql.dialect()
)
+ def test_match(self):
+ matchtable = table('matchtable', column('title', String))
+ self.assert_compile(
+ matchtable.c.title.match('somstr'),
+ "MATCH (matchtable.title) AGAINST (%s IN BOOLEAN MODE)")
+
def test_for_update(self):
table1 = table('mytable',
column('myid'), column('name'), column('description'))
@@ -511,9 +517,8 @@ class SQLTest(fixtures.TestBase, AssertsCompiledSQL):
self.assert_compile(schema.CreateTable(t1),
'CREATE TABLE sometable (assigned_id '
'INTEGER NOT NULL, id INTEGER NOT NULL '
- 'AUTO_INCREMENT, PRIMARY KEY (assigned_id, '
- 'id), KEY idx_autoinc_id (id))ENGINE=Inn'
- 'oDB')
+ 'AUTO_INCREMENT, PRIMARY KEY (id, assigned_id)'
+ ')ENGINE=InnoDB')
t1 = Table('sometable', MetaData(),
Column('assigned_id', Integer(), primary_key=True,
@@ -537,8 +542,7 @@ class SQLTest(fixtures.TestBase, AssertsCompiledSQL):
'CREATE TABLE sometable ('
'id INTEGER NOT NULL, '
'`order` INTEGER NOT NULL AUTO_INCREMENT, '
- 'PRIMARY KEY (id, `order`), '
- 'KEY idx_autoinc_order (`order`)'
+ 'PRIMARY KEY (`order`, id)'
')ENGINE=InnoDB')
def test_create_table_with_partition(self):
diff --git a/test/dialect/mysql/test_dialect.py b/test/dialect/mysql/test_dialect.py
index 03f4c494a..1288b50d7 100644
--- a/test/dialect/mysql/test_dialect.py
+++ b/test/dialect/mysql/test_dialect.py
@@ -8,7 +8,10 @@ from sqlalchemy import testing
from sqlalchemy.testing import engines
import datetime
+
class DialectTest(fixtures.TestBase):
+ __backend__ = True
+
def test_ssl_arguments_mysqldb(self):
from sqlalchemy.dialects.mysql import mysqldb
dialect = mysqldb.dialect()
@@ -83,6 +86,17 @@ class DialectTest(fixtures.TestBase):
)[1]
eq_(kw['foo'], "true")
+ @testing.only_on('mysql')
+ @testing.skip_if('mysql+mysqlconnector', "totally broken for the moment")
+ @testing.fails_on('mysql+oursql', "unsupported")
+ def test_special_encodings(self):
+
+ for enc in ['utf8mb4', 'utf8']:
+ eng = engines.testing_engine(
+ options={"connect_args": {'charset': enc, 'use_unicode': 0}})
+ conn = eng.connect()
+ eq_(conn.dialect._connection_charset, enc)
+
class SQLModeDetectionTest(fixtures.TestBase):
__only_on__ = 'mysql'
__backend__ = True
diff --git a/test/dialect/mysql/test_query.py b/test/dialect/mysql/test_query.py
index f19177c2a..c6b7a1036 100644
--- a/test/dialect/mysql/test_query.py
+++ b/test/dialect/mysql/test_query.py
@@ -2,11 +2,11 @@
from sqlalchemy.testing import eq_, is_
from sqlalchemy import *
-from sqlalchemy.testing import fixtures, AssertsCompiledSQL
+from sqlalchemy.testing import fixtures
from sqlalchemy import testing
-class IdiosyncrasyTest(fixtures.TestBase, AssertsCompiledSQL):
+class IdiosyncrasyTest(fixtures.TestBase):
__only_on__ = 'mysql'
__backend__ = True
@@ -28,7 +28,7 @@ class IdiosyncrasyTest(fixtures.TestBase, AssertsCompiledSQL):
)
-class MatchTest(fixtures.TestBase, AssertsCompiledSQL):
+class MatchTest(fixtures.TestBase):
__only_on__ = 'mysql'
__backend__ = True
@@ -76,25 +76,6 @@ class MatchTest(fixtures.TestBase, AssertsCompiledSQL):
def teardown_class(cls):
metadata.drop_all()
- @testing.fails_on('mysql+mysqlconnector', 'uses pyformat')
- def test_expression_format(self):
- format = testing.db.dialect.paramstyle == 'format' and '%s' or '?'
- self.assert_compile(
- matchtable.c.title.match('somstr'),
- "MATCH (matchtable.title) AGAINST (%s IN BOOLEAN MODE)" % format)
-
- @testing.fails_on('mysql+mysqldb', 'uses format')
- @testing.fails_on('mysql+pymysql', 'uses format')
- @testing.fails_on('mysql+cymysql', 'uses format')
- @testing.fails_on('mysql+oursql', 'uses format')
- @testing.fails_on('mysql+pyodbc', 'uses format')
- @testing.fails_on('mysql+zxjdbc', 'uses format')
- def test_expression_pyformat(self):
- format = '%(title_1)s'
- self.assert_compile(
- matchtable.c.title.match('somstr'),
- "MATCH (matchtable.title) AGAINST (%s IN BOOLEAN MODE)" % format)
-
def test_simple_match(self):
results = (matchtable.select().
where(matchtable.c.title.match('python')).
@@ -177,3 +158,57 @@ class MatchTest(fixtures.TestBase, AssertsCompiledSQL):
eq_([1, 3, 5], [r.id for r in results])
+class AnyAllTest(fixtures.TablesTest):
+ __only_on__ = 'mysql'
+ __backend__ = True
+
+ @classmethod
+ def define_tables(cls, metadata):
+ Table(
+ 'stuff', metadata,
+ Column('id', Integer, primary_key=True),
+ Column('value', Integer)
+ )
+
+ @classmethod
+ def insert_data(cls):
+ stuff = cls.tables.stuff
+ testing.db.execute(
+ stuff.insert(),
+ [
+ {'id': 1, 'value': 1},
+ {'id': 2, 'value': 2},
+ {'id': 3, 'value': 3},
+ {'id': 4, 'value': 4},
+ {'id': 5, 'value': 5},
+ ]
+ )
+
+ def test_any_w_comparator(self):
+ stuff = self.tables.stuff
+ stmt = select([stuff.c.id]).where(
+ stuff.c.value > any_(select([stuff.c.value])))
+
+ eq_(
+ testing.db.execute(stmt).fetchall(),
+ [(2,), (3,), (4,), (5,)]
+ )
+
+ def test_all_w_comparator(self):
+ stuff = self.tables.stuff
+ stmt = select([stuff.c.id]).where(
+ stuff.c.value >= all_(select([stuff.c.value])))
+
+ eq_(
+ testing.db.execute(stmt).fetchall(),
+ [(5,)]
+ )
+
+ def test_any_literal(self):
+ stuff = self.tables.stuff
+ stmt = select([4 == any_(select([stuff.c.value]))])
+
+ is_(
+ testing.db.execute(stmt).scalar(), True
+ )
+
diff --git a/test/dialect/mysql/test_reflection.py b/test/dialect/mysql/test_reflection.py
index 39b39e006..44880c36b 100644
--- a/test/dialect/mysql/test_reflection.py
+++ b/test/dialect/mysql/test_reflection.py
@@ -1,13 +1,196 @@
# coding: utf-8
-from sqlalchemy.testing import eq_
-from sqlalchemy import *
+from sqlalchemy.testing import eq_, is_
+from sqlalchemy import Column, Table, DDL, MetaData, TIMESTAMP, \
+ DefaultClause, String, Integer, Text, UnicodeText, SmallInteger,\
+ NCHAR, LargeBinary, DateTime, select, UniqueConstraint, Unicode,\
+ BigInteger
+from sqlalchemy import event
from sqlalchemy import sql
+from sqlalchemy import inspect
from sqlalchemy.dialects.mysql import base as mysql
+from sqlalchemy.dialects.mysql import reflection as _reflection
from sqlalchemy.testing import fixtures, AssertsExecutionResults
from sqlalchemy import testing
+class TypeReflectionTest(fixtures.TestBase):
+ __only_on__ = 'mysql'
+ __backend__ = True
+
+ @testing.provide_metadata
+ def _run_test(self, specs, attributes):
+ columns = [Column('c%i' % (i + 1), t[0]) for i, t in enumerate(specs)]
+
+ # Early 5.0 releases seem to report more "general" for columns
+ # in a view, e.g. char -> varchar, tinyblob -> mediumblob
+ use_views = testing.db.dialect.server_version_info > (5, 0, 10)
+
+ m = self.metadata
+ Table('mysql_types', m, *columns)
+
+ if use_views:
+ event.listen(
+ m, 'after_create',
+ DDL(
+ 'CREATE OR REPLACE VIEW mysql_types_v '
+ 'AS SELECT * from mysql_types')
+ )
+ event.listen(
+ m, 'before_drop',
+ DDL("DROP VIEW IF EXISTS mysql_types_v")
+ )
+ m.create_all()
+
+ m2 = MetaData(testing.db)
+ tables = [
+ Table('mysql_types', m2, autoload=True)
+ ]
+ if use_views:
+ tables.append(Table('mysql_types_v', m2, autoload=True))
+
+ for table in tables:
+ for i, (reflected_col, spec) in enumerate(zip(table.c, specs)):
+ expected_spec = spec[1]
+ reflected_type = reflected_col.type
+ is_(type(reflected_type), type(expected_spec))
+
+ for attr in attributes:
+ eq_(
+ getattr(reflected_type, attr),
+ getattr(expected_spec, attr),
+ "Column %s: Attribute %s value of %s does not "
+ "match %s for type %s" % (
+ "c%i" % (i + 1),
+ attr,
+ getattr(reflected_type, attr),
+ getattr(expected_spec, attr),
+ spec[0]
+ )
+ )
+
+ def test_time_types(self):
+ specs = []
+
+ if testing.requires.mysql_fsp.enabled:
+ fsps = [None, 0, 5]
+ else:
+ fsps = [None]
+
+ for type_ in (mysql.TIMESTAMP, mysql.DATETIME, mysql.TIME):
+ # MySQL defaults fsp to 0, and if 0 does not report it.
+ # we don't actually render 0 right now in DDL but even if we do,
+ # it comes back blank
+ for fsp in fsps:
+ if fsp:
+ specs.append((type_(fsp=fsp), type_(fsp=fsp)))
+ else:
+ specs.append((type_(), type_()))
+
+ specs.extend([
+ (TIMESTAMP(), mysql.TIMESTAMP()),
+ (DateTime(), mysql.DATETIME()),
+ ])
+
+ # note 'timezone' should always be None on both
+ self._run_test(specs, ['fsp', 'timezone'])
+
+ def test_year_types(self):
+ specs = [
+ (mysql.YEAR(), mysql.YEAR(display_width=4)),
+ (mysql.YEAR(display_width=2), mysql.YEAR(display_width=2)),
+ (mysql.YEAR(display_width=4), mysql.YEAR(display_width=4)),
+ ]
+
+ self._run_test(specs, ['display_width'])
+
+ def test_string_types(self):
+ specs = [
+ (String(1), mysql.MSString(1)),
+ (String(3), mysql.MSString(3)),
+ (Text(), mysql.MSText()),
+ (Unicode(1), mysql.MSString(1)),
+ (Unicode(3), mysql.MSString(3)),
+ (UnicodeText(), mysql.MSText()),
+ (mysql.MSChar(1), mysql.MSChar(1)),
+ (mysql.MSChar(3), mysql.MSChar(3)),
+ (NCHAR(2), mysql.MSChar(2)),
+ (mysql.MSNChar(2), mysql.MSChar(2)),
+ (mysql.MSNVarChar(22), mysql.MSString(22),),
+ ]
+ self._run_test(specs, ['length'])
+
+ def test_integer_types(self):
+ specs = []
+ for type_ in [
+ mysql.TINYINT, mysql.SMALLINT,
+ mysql.MEDIUMINT, mysql.INTEGER, mysql.BIGINT]:
+ for display_width in [None, 4, 7]:
+ for unsigned in [False, True]:
+ for zerofill in [None, True]:
+ kw = {}
+ if display_width:
+ kw['display_width'] = display_width
+ if unsigned is not None:
+ kw['unsigned'] = unsigned
+ if zerofill is not None:
+ kw['zerofill'] = zerofill
+
+ zerofill = bool(zerofill)
+ source_type = type_(**kw)
+
+ if display_width is None:
+ display_width = {
+ mysql.MEDIUMINT: 9,
+ mysql.SMALLINT: 6,
+ mysql.TINYINT: 4,
+ mysql.INTEGER: 11,
+ mysql.BIGINT: 20
+ }[type_]
+
+ if zerofill:
+ unsigned = True
+
+ expected_type = type_(
+ display_width=display_width,
+ unsigned=unsigned,
+ zerofill=zerofill
+ )
+ specs.append(
+ (source_type, expected_type)
+ )
+
+ specs.extend([
+ (SmallInteger(), mysql.SMALLINT(display_width=6)),
+ (Integer(), mysql.INTEGER(display_width=11)),
+ (BigInteger, mysql.BIGINT(display_width=20))
+ ])
+ self._run_test(specs, ['display_width', 'unsigned', 'zerofill'])
+
+ def test_binary_types(self):
+ specs = [
+ (LargeBinary(3), mysql.TINYBLOB(), ),
+ (LargeBinary(), mysql.BLOB()),
+ (mysql.MSBinary(3), mysql.MSBinary(3), ),
+ (mysql.MSVarBinary(3), mysql.MSVarBinary(3)),
+ (mysql.MSTinyBlob(), mysql.MSTinyBlob()),
+ (mysql.MSBlob(), mysql.MSBlob()),
+ (mysql.MSBlob(1234), mysql.MSBlob()),
+ (mysql.MSMediumBlob(), mysql.MSMediumBlob()),
+ (mysql.MSLongBlob(), mysql.MSLongBlob()),
+ ]
+ self._run_test(specs, [])
+
+ @testing.uses_deprecated('Manually quoting ENUM value literals')
+ def test_legacy_enum_types(self):
+
+ specs = [
+ (mysql.ENUM("''","'fleem'"), mysql.ENUM("''","'fleem'")), # noqa
+ ]
+
+ self._run_test(specs, ['enums'])
+
+
class ReflectionTest(fixtures.TestBase, AssertsExecutionResults):
__only_on__ = 'mysql'
@@ -75,7 +258,8 @@ class ReflectionTest(fixtures.TestBase, AssertsExecutionResults):
def test_reflection_with_table_options(self):
comment = r"""Comment types type speedily ' " \ '' Fun!"""
- def_table = Table('mysql_def', MetaData(testing.db),
+ def_table = Table(
+ 'mysql_def', MetaData(testing.db),
Column('c1', Integer()),
mysql_engine='MEMORY',
mysql_comment=comment,
@@ -88,8 +272,9 @@ class ReflectionTest(fixtures.TestBase, AssertsExecutionResults):
def_table.create()
try:
- reflected = Table('mysql_def', MetaData(testing.db),
- autoload=True)
+ reflected = Table(
+ 'mysql_def', MetaData(testing.db),
+ autoload=True)
finally:
def_table.drop()
@@ -108,15 +293,16 @@ class ReflectionTest(fixtures.TestBase, AssertsExecutionResults):
assert reflected.kwargs['mysql_connection'] == 'fish'
# This field doesn't seem to be returned by mysql itself.
- #assert reflected.kwargs['mysql_password'] == 'secret'
+ # assert reflected.kwargs['mysql_password'] == 'secret'
# This is explicitly ignored when reflecting schema.
- #assert reflected.kwargs['mysql_auto_increment'] == '5'
+ # assert reflected.kwargs['mysql_auto_increment'] == '5'
def test_reflection_on_include_columns(self):
"""Test reflection of include_columns to be sure they respect case."""
- case_table = Table('mysql_case', MetaData(testing.db),
+ case_table = Table(
+ 'mysql_case', MetaData(testing.db),
Column('c1', String(10)),
Column('C2', String(10)),
Column('C3', String(10)))
@@ -128,132 +314,68 @@ class ReflectionTest(fixtures.TestBase, AssertsExecutionResults):
for t in case_table, reflected:
assert 'c1' in t.c.keys()
assert 'C2' in t.c.keys()
- reflected2 = Table('mysql_case', MetaData(testing.db),
- autoload=True, include_columns=['c1', 'c2'])
+ reflected2 = Table(
+ 'mysql_case', MetaData(testing.db),
+ autoload=True, include_columns=['c1', 'c2'])
assert 'c1' in reflected2.c.keys()
for c in ['c2', 'C2', 'C3']:
assert c not in reflected2.c.keys()
finally:
case_table.drop()
- @testing.exclude('mysql', '<', (5, 0, 0), 'early types are squirrely')
- @testing.uses_deprecated('Using String type with no length')
- @testing.uses_deprecated('Manually quoting ENUM value literals')
- def test_type_reflection(self):
- # (ask_for, roundtripped_as_if_different)
- specs = [(String(1), mysql.MSString(1), ),
- (String(3), mysql.MSString(3), ),
- (Text(), mysql.MSText(), ),
- (Unicode(1), mysql.MSString(1), ),
- (Unicode(3), mysql.MSString(3), ),
- (UnicodeText(), mysql.MSText(), ),
- (mysql.MSChar(1), ),
- (mysql.MSChar(3), ),
- (NCHAR(2), mysql.MSChar(2), ),
- (mysql.MSNChar(2), mysql.MSChar(2), ), # N is CREATE only
- (mysql.MSNVarChar(22), mysql.MSString(22), ),
- (SmallInteger(), mysql.MSSmallInteger(), ),
- (SmallInteger(), mysql.MSSmallInteger(4), ),
- (mysql.MSSmallInteger(), ),
- (mysql.MSSmallInteger(4), mysql.MSSmallInteger(4), ),
- (mysql.MSMediumInteger(), mysql.MSMediumInteger(), ),
- (mysql.MSMediumInteger(8), mysql.MSMediumInteger(8), ),
- (LargeBinary(3), mysql.TINYBLOB(), ),
- (LargeBinary(), mysql.BLOB() ),
- (mysql.MSBinary(3), mysql.MSBinary(3), ),
- (mysql.MSVarBinary(3),),
- (mysql.MSTinyBlob(),),
- (mysql.MSBlob(),),
- (mysql.MSBlob(1234), mysql.MSBlob()),
- (mysql.MSMediumBlob(),),
- (mysql.MSLongBlob(),),
- (mysql.ENUM("''","'fleem'"), ),
- ]
-
- columns = [Column('c%i' % (i + 1), t[0]) for i, t in enumerate(specs)]
-
- db = testing.db
- m = MetaData(db)
- t_table = Table('mysql_types', m, *columns)
- try:
- m.create_all()
-
- m2 = MetaData(db)
- rt = Table('mysql_types', m2, autoload=True)
- try:
- db.execute('CREATE OR REPLACE VIEW mysql_types_v '
- 'AS SELECT * from mysql_types')
- rv = Table('mysql_types_v', m2, autoload=True)
-
- expected = [len(c) > 1 and c[1] or c[0] for c in specs]
-
- # Early 5.0 releases seem to report more "general" for columns
- # in a view, e.g. char -> varchar, tinyblob -> mediumblob
- #
- # Not sure exactly which point version has the fix.
- if db.dialect.server_version_info < (5, 0, 11):
- tables = rt,
- else:
- tables = rt, rv
-
- for table in tables:
- for i, reflected in enumerate(table.c):
- assert isinstance(reflected.type,
- type(expected[i])), \
- 'element %d: %r not instance of %r' % (i,
- reflected.type, type(expected[i]))
- finally:
- db.execute('DROP VIEW mysql_types_v')
- finally:
- m.drop_all()
-
def test_autoincrement(self):
meta = MetaData(testing.db)
try:
Table('ai_1', meta,
- Column('int_y', Integer, primary_key=True),
+ Column('int_y', Integer, primary_key=True,
+ autoincrement=True),
Column('int_n', Integer, DefaultClause('0'),
primary_key=True),
- mysql_engine='MyISAM')
+ mysql_engine='MyISAM')
Table('ai_2', meta,
- Column('int_y', Integer, primary_key=True),
+ Column('int_y', Integer, primary_key=True,
+ autoincrement=True),
Column('int_n', Integer, DefaultClause('0'),
primary_key=True),
- mysql_engine='MyISAM')
+ mysql_engine='MyISAM')
Table('ai_3', meta,
Column('int_n', Integer, DefaultClause('0'),
primary_key=True, autoincrement=False),
- Column('int_y', Integer, primary_key=True),
- mysql_engine='MyISAM')
+ Column('int_y', Integer, primary_key=True,
+ autoincrement=True),
+ mysql_engine='MyISAM')
Table('ai_4', meta,
Column('int_n', Integer, DefaultClause('0'),
primary_key=True, autoincrement=False),
Column('int_n2', Integer, DefaultClause('0'),
primary_key=True, autoincrement=False),
- mysql_engine='MyISAM')
+ mysql_engine='MyISAM')
Table('ai_5', meta,
- Column('int_y', Integer, primary_key=True),
+ Column('int_y', Integer, primary_key=True,
+ autoincrement=True),
Column('int_n', Integer, DefaultClause('0'),
primary_key=True, autoincrement=False),
- mysql_engine='MyISAM')
+ mysql_engine='MyISAM')
Table('ai_6', meta,
Column('o1', String(1), DefaultClause('x'),
primary_key=True),
- Column('int_y', Integer, primary_key=True),
- mysql_engine='MyISAM')
+ Column('int_y', Integer, primary_key=True,
+ autoincrement=True),
+ mysql_engine='MyISAM')
Table('ai_7', meta,
Column('o1', String(1), DefaultClause('x'),
primary_key=True),
Column('o2', String(1), DefaultClause('x'),
primary_key=True),
- Column('int_y', Integer, primary_key=True),
- mysql_engine='MyISAM')
+ Column('int_y', Integer, primary_key=True,
+ autoincrement=True),
+ mysql_engine='MyISAM')
Table('ai_8', meta,
Column('o1', String(1), DefaultClause('x'),
primary_key=True),
Column('o2', String(1), DefaultClause('x'),
primary_key=True),
- mysql_engine='MyISAM')
+ mysql_engine='MyISAM')
meta.create_all()
table_names = ['ai_1', 'ai_2', 'ai_3', 'ai_4',
@@ -276,6 +398,37 @@ class ReflectionTest(fixtures.TestBase, AssertsExecutionResults):
finally:
meta.drop_all()
+ @testing.provide_metadata
+ def test_view_reflection(self):
+ Table('x', self.metadata, Column('a', Integer), Column('b', String(50)))
+ self.metadata.create_all()
+
+ with testing.db.connect() as conn:
+ conn.execute("CREATE VIEW v1 AS SELECT * FROM x")
+ conn.execute(
+ "CREATE ALGORITHM=MERGE VIEW v2 AS SELECT * FROM x")
+ conn.execute(
+ "CREATE ALGORITHM=UNDEFINED VIEW v3 AS SELECT * FROM x")
+ conn.execute(
+ "CREATE DEFINER=CURRENT_USER VIEW v4 AS SELECT * FROM x")
+
+ @event.listens_for(self.metadata, "before_drop")
+ def cleanup(*arg, **kw):
+ with testing.db.connect() as conn:
+ for v in ['v1', 'v2', 'v3', 'v4']:
+ conn.execute("DROP VIEW %s" % v)
+
+ insp = inspect(testing.db)
+ for v in ['v1', 'v2', 'v3', 'v4']:
+ eq_(
+ [
+ (col['name'], col['type'].__class__)
+ for col in insp.get_columns(v)
+ ],
+ [('a', mysql.INTEGER), ('b', mysql.VARCHAR)]
+ )
+
+
@testing.exclude('mysql', '<', (5, 0, 0), 'no information_schema support')
def test_system_views(self):
dialect = testing.db.dialect
@@ -309,7 +462,7 @@ class ReflectionTest(fixtures.TestBase, AssertsExecutionResults):
["t TIMESTAMP"],
["u TIMESTAMP DEFAULT CURRENT_TIMESTAMP"]
]):
- Table("nn_t%d" % idx, meta) # to allow DROP
+ Table("nn_t%d" % idx, meta) # to allow DROP
testing.db.execute("""
CREATE TABLE nn_t%d (
@@ -380,7 +533,8 @@ class ReflectionTest(fixtures.TestBase, AssertsExecutionResults):
class RawReflectionTest(fixtures.TestBase):
def setup(self):
dialect = mysql.dialect()
- self.parser = mysql.MySQLTableDefinitionParser(dialect, dialect.identifier_preparer)
+ self.parser = _reflection.MySQLTableDefinitionParser(
+ dialect, dialect.identifier_preparer)
def test_key_reflection(self):
regex = self.parser._re_key
@@ -391,10 +545,14 @@ class RawReflectionTest(fixtures.TestBase):
assert regex.match(' PRIMARY KEY (`id`)')
assert regex.match(' PRIMARY KEY USING BTREE (`id`)')
assert regex.match(' PRIMARY KEY (`id`) USING BTREE')
- assert regex.match(' PRIMARY KEY (`id`) USING BTREE KEY_BLOCK_SIZE 16')
- assert regex.match(' PRIMARY KEY (`id`) USING BTREE KEY_BLOCK_SIZE=16')
- assert regex.match(' PRIMARY KEY (`id`) USING BTREE KEY_BLOCK_SIZE = 16')
- assert not regex.match(' PRIMARY KEY (`id`) USING BTREE KEY_BLOCK_SIZE = = 16')
+ assert regex.match(
+ ' PRIMARY KEY (`id`) USING BTREE KEY_BLOCK_SIZE 16')
+ assert regex.match(
+ ' PRIMARY KEY (`id`) USING BTREE KEY_BLOCK_SIZE=16')
+ assert regex.match(
+ ' PRIMARY KEY (`id`) USING BTREE KEY_BLOCK_SIZE = 16')
+ assert not regex.match(
+ ' PRIMARY KEY (`id`) USING BTREE KEY_BLOCK_SIZE = = 16')
def test_fk_reflection(self):
regex = self.parser._re_constraint
diff --git a/test/dialect/mysql/test_types.py b/test/dialect/mysql/test_types.py
index 7c279ffbf..e570e0db1 100644
--- a/test/dialect/mysql/test_types.py
+++ b/test/dialect/mysql/test_types.py
@@ -1,6 +1,6 @@
# coding: utf-8
-from sqlalchemy.testing import eq_, assert_raises, assert_raises_message
+from sqlalchemy.testing import eq_, assert_raises, assert_raises_message, is_
from sqlalchemy import *
from sqlalchemy import sql, exc, schema
from sqlalchemy.util import u
@@ -10,6 +10,7 @@ from sqlalchemy.testing import fixtures, AssertsCompiledSQL, AssertsExecutionRes
from sqlalchemy import testing
import datetime
import decimal
+from sqlalchemy import types as sqltypes
class TypesTest(fixtures.TestBase, AssertsExecutionResults, AssertsCompiledSQL):
@@ -602,6 +603,49 @@ class TypesTest(fixtures.TestBase, AssertsExecutionResults, AssertsCompiledSQL):
eq_(colspec(table.c.y5), 'y5 YEAR(4)')
+class JSONTest(fixtures.TestBase):
+ __requires__ = ('json_type', )
+ __only_on__ = 'mysql'
+ __backend__ = True
+
+ @testing.provide_metadata
+ def test_reflection(self):
+
+ Table(
+ 'mysql_json', self.metadata,
+ Column('foo', mysql.JSON)
+ )
+ self.metadata.create_all()
+
+ reflected = Table('mysql_json', MetaData(), autoload_with=testing.db)
+ is_(reflected.c.foo.type._type_affinity, sqltypes.JSON)
+ assert isinstance(reflected.c.foo.type, mysql.JSON)
+
+ @testing.provide_metadata
+ def test_rudimental_round_trip(self):
+ # note that test_suite has many more JSON round trip tests
+ # using the backend-agnostic JSON type
+
+ mysql_json = Table(
+ 'mysql_json', self.metadata,
+ Column('foo', mysql.JSON)
+ )
+ self.metadata.create_all()
+
+ value = {
+ 'json': {'foo': 'bar'},
+ 'recs': ['one', 'two']
+ }
+
+ with testing.db.connect() as conn:
+ conn.execute(mysql_json.insert(), foo=value)
+
+ eq_(
+ conn.scalar(select([mysql_json.c.foo])),
+ value
+ )
+
+
class EnumSetTest(
fixtures.TestBase, AssertsExecutionResults, AssertsCompiledSQL):
@@ -932,12 +976,12 @@ class EnumSetTest(
eq_(
t2.c.value.type.enums[0:2],
- (u('réveillé'), u('drôle')) # u'S’il') # eh ?
+ [u('réveillé'), u('drôle')] # u'S’il') # eh ?
)
eq_(
t2.c.value2.type.enums[0:2],
- (u('réveillé'), u('drôle')) # u'S’il') # eh ?
+ [u('réveillé'), u('drôle')] # u'S’il') # eh ?
)
def test_enum_compile(self):
@@ -975,13 +1019,13 @@ class EnumSetTest(
reflected = Table('mysql_enum', MetaData(testing.db),
autoload=True)
for t in enum_table, reflected:
- eq_(t.c.e1.type.enums, ("a",))
- eq_(t.c.e2.type.enums, ("",))
- eq_(t.c.e3.type.enums, ("a",))
- eq_(t.c.e4.type.enums, ("",))
- eq_(t.c.e5.type.enums, ("a", ""))
- eq_(t.c.e6.type.enums, ("", "a"))
- eq_(t.c.e7.type.enums, ("", "'a'", "b'b", "'"))
+ eq_(t.c.e1.type.enums, ["a"])
+ eq_(t.c.e2.type.enums, [""])
+ eq_(t.c.e3.type.enums, ["a"])
+ eq_(t.c.e4.type.enums, [""])
+ eq_(t.c.e5.type.enums, ["a", ""])
+ eq_(t.c.e6.type.enums, ["", "a"])
+ eq_(t.c.e7.type.enums, ["", "'a'", "b'b", "'"])
@testing.provide_metadata
@testing.exclude('mysql', '<', (5,))
diff --git a/test/dialect/postgresql/test_compiler.py b/test/dialect/postgresql/test_compiler.py
index 5717df9f7..87e48d3f2 100644
--- a/test/dialect/postgresql/test_compiler.py
+++ b/test/dialect/postgresql/test_compiler.py
@@ -5,14 +5,17 @@ from sqlalchemy.testing.assertions import AssertsCompiledSQL, is_, \
from sqlalchemy.testing import engines, fixtures
from sqlalchemy import testing
from sqlalchemy import Sequence, Table, Column, Integer, update, String,\
- insert, func, MetaData, Enum, Index, and_, delete, select, cast, text
+ insert, func, MetaData, Enum, Index, and_, delete, select, cast, text, \
+ Text
from sqlalchemy.dialects.postgresql import ExcludeConstraint, array
from sqlalchemy import exc, schema
-from sqlalchemy.dialects.postgresql import base as postgresql
+from sqlalchemy.dialects import postgresql
from sqlalchemy.dialects.postgresql import TSRANGE
from sqlalchemy.orm import mapper, aliased, Session
-from sqlalchemy.sql import table, column, operators
+from sqlalchemy.sql import table, column, operators, literal_column
+from sqlalchemy.sql import util as sql_util
from sqlalchemy.util import u
+from sqlalchemy.dialects.postgresql import aggregate_order_by
class SequenceTest(fixtures.TestBase, AssertsCompiledSQL):
@@ -20,7 +23,7 @@ class SequenceTest(fixtures.TestBase, AssertsCompiledSQL):
def test_format(self):
seq = Sequence('my_seq_no_schema')
- dialect = postgresql.PGDialect()
+ dialect = postgresql.dialect()
assert dialect.identifier_preparer.format_sequence(seq) \
== 'my_seq_no_schema'
seq = Sequence('my_seq', schema='some_schema')
@@ -166,6 +169,24 @@ class CompileTest(fixtures.TestBase, AssertsCompiledSQL):
"VARCHAR(1), CHECK (somecolumn IN ('x', "
"'y', 'z')))")
+ def test_create_type_schema_translate(self):
+ e1 = Enum('x', 'y', 'z', name='somename')
+ e2 = Enum('x', 'y', 'z', name='somename', schema='someschema')
+ schema_translate_map = {None: "foo", "someschema": "bar"}
+
+ self.assert_compile(
+ postgresql.CreateEnumType(e1),
+ "CREATE TYPE foo.somename AS ENUM ('x', 'y', 'z')",
+ schema_translate_map=schema_translate_map
+ )
+
+ self.assert_compile(
+ postgresql.CreateEnumType(e2),
+ "CREATE TYPE bar.somename AS ENUM ('x', 'y', 'z')",
+ schema_translate_map=schema_translate_map
+ )
+
+
def test_create_table_with_tablespace(self):
m = MetaData()
tbl = Table(
@@ -369,6 +390,28 @@ class CompileTest(fixtures.TestBase, AssertsCompiledSQL):
'USING hash (data)',
dialect=postgresql.dialect())
+ def test_create_index_with_with(self):
+ m = MetaData()
+ tbl = Table('testtbl', m, Column('data', String))
+
+ idx1 = Index('test_idx1', tbl.c.data)
+ idx2 = Index(
+ 'test_idx2', tbl.c.data, postgresql_with={"fillfactor": 50})
+ idx3 = Index('test_idx3', tbl.c.data, postgresql_using="gist",
+ postgresql_with={"buffering": "off"})
+
+ self.assert_compile(schema.CreateIndex(idx1),
+ 'CREATE INDEX test_idx1 ON testtbl '
+ '(data)')
+ self.assert_compile(schema.CreateIndex(idx2),
+ 'CREATE INDEX test_idx2 ON testtbl '
+ '(data) '
+ 'WITH (fillfactor = 50)')
+ self.assert_compile(schema.CreateIndex(idx3),
+ 'CREATE INDEX test_idx3 ON testtbl '
+ 'USING gist (data) '
+ 'WITH (buffering = off)')
+
def test_create_index_expr_gets_parens(self):
m = MetaData()
tbl = Table('testtbl', m, Column('x', Integer), Column('y', Integer))
@@ -389,6 +432,16 @@ class CompileTest(fixtures.TestBase, AssertsCompiledSQL):
"CREATE INDEX test_idx1 ON testtbl ((data + 5))"
)
+ def test_create_index_concurrently(self):
+ m = MetaData()
+ tbl = Table('testtbl', m, Column('data', Integer))
+
+ idx1 = Index('test_idx1', tbl.c.data, postgresql_concurrently=True)
+ self.assert_compile(
+ schema.CreateIndex(idx1),
+ "CREATE INDEX CONCURRENTLY test_idx1 ON testtbl (data)"
+ )
+
def test_exclude_constraint_min(self):
m = MetaData()
tbl = Table('testtbl', m,
@@ -433,7 +486,59 @@ class CompileTest(fixtures.TestBase, AssertsCompiledSQL):
tbl.append_constraint(cons_copy)
self.assert_compile(schema.AddConstraint(cons_copy),
'ALTER TABLE testtbl ADD EXCLUDE USING gist '
- '(room WITH =)',
+ '(room WITH =)')
+
+ def test_exclude_constraint_text(self):
+ m = MetaData()
+ cons = ExcludeConstraint((text('room::TEXT'), '='))
+ Table(
+ 'testtbl', m,
+ Column('room', String),
+ cons)
+ self.assert_compile(
+ schema.AddConstraint(cons),
+ 'ALTER TABLE testtbl ADD EXCLUDE USING gist '
+ '(room::TEXT WITH =)')
+
+ def test_exclude_constraint_cast(self):
+ m = MetaData()
+ tbl = Table(
+ 'testtbl', m,
+ Column('room', String)
+ )
+ cons = ExcludeConstraint((cast(tbl.c.room, Text), '='))
+ tbl.append_constraint(cons)
+ self.assert_compile(
+ schema.AddConstraint(cons),
+ 'ALTER TABLE testtbl ADD EXCLUDE USING gist '
+ '(CAST(room AS TEXT) WITH =)'
+ )
+
+ def test_exclude_constraint_cast_quote(self):
+ m = MetaData()
+ tbl = Table(
+ 'testtbl', m,
+ Column('Room', String)
+ )
+ cons = ExcludeConstraint((cast(tbl.c.Room, Text), '='))
+ tbl.append_constraint(cons)
+ self.assert_compile(
+ schema.AddConstraint(cons),
+ 'ALTER TABLE testtbl ADD EXCLUDE USING gist '
+ '(CAST("Room" AS TEXT) WITH =)'
+ )
+
+ def test_exclude_constraint_when(self):
+ m = MetaData()
+ tbl = Table(
+ 'testtbl', m,
+ Column('room', String)
+ )
+ cons = ExcludeConstraint(('room', '='), where=tbl.c.room.in_(['12']))
+ tbl.append_constraint(cons)
+ self.assert_compile(schema.AddConstraint(cons),
+ 'ALTER TABLE testtbl ADD EXCLUDE USING gist '
+ '(room WITH =) WHERE (testtbl.room IN (\'12\'))',
dialect=postgresql.dialect())
def test_substring(self):
@@ -506,6 +611,22 @@ class CompileTest(fixtures.TestBase, AssertsCompiledSQL):
"WHERE mytable_1.myid = %(myid_1)s FOR UPDATE OF mytable_1"
)
+ def test_for_update_with_schema(self):
+ m = MetaData()
+ table1 = Table(
+ 'mytable', m,
+ Column('myid'),
+ Column('name'),
+ schema='testschema'
+ )
+
+ self.assert_compile(
+ table1.select(table1.c.myid == 7).with_for_update(of=table1),
+ "SELECT testschema.mytable.myid, testschema.mytable.name "
+ "FROM testschema.mytable "
+ "WHERE testschema.mytable.myid = %(myid_1)s "
+ "FOR UPDATE OF mytable")
+
def test_reserved_words(self):
table = Table("pg_table", MetaData(),
Column("col1", Integer),
@@ -621,7 +742,7 @@ class CompileTest(fixtures.TestBase, AssertsCompiledSQL):
self._test_array_zero_indexes(False)
def test_array_literal_type(self):
- is_(postgresql.array([1, 2]).type._type_affinity, postgresql.ARRAY)
+ isinstance(postgresql.array([1, 2]).type, postgresql.ARRAY)
is_(postgresql.array([1, 2]).type.item_type._type_affinity, Integer)
is_(postgresql.array([1, 2], type_=String).
@@ -728,6 +849,48 @@ class CompileTest(fixtures.TestBase, AssertsCompiledSQL):
dialect=postgresql.dialect()
)
+ def test_aggregate_order_by_one(self):
+ m = MetaData()
+ table = Table('table1', m, Column('a', Integer), Column('b', Integer))
+ expr = func.array_agg(aggregate_order_by(table.c.a, table.c.b.desc()))
+ stmt = select([expr])
+
+ # note this tests that the object exports FROM objects
+ # correctly
+ self.assert_compile(
+ stmt,
+ "SELECT array_agg(table1.a ORDER BY table1.b DESC) "
+ "AS array_agg_1 FROM table1"
+ )
+
+ def test_aggregate_order_by_two(self):
+ m = MetaData()
+ table = Table('table1', m, Column('a', Integer), Column('b', Integer))
+ expr = func.string_agg(
+ table.c.a,
+ aggregate_order_by(literal_column("','"), table.c.a)
+ )
+ stmt = select([expr])
+
+ self.assert_compile(
+ stmt,
+ "SELECT string_agg(table1.a, ',' ORDER BY table1.a) "
+ "AS string_agg_1 FROM table1"
+ )
+
+ def test_aggregate_order_by_adapt(self):
+ m = MetaData()
+ table = Table('table1', m, Column('a', Integer), Column('b', Integer))
+ expr = func.array_agg(aggregate_order_by(table.c.a, table.c.b.desc()))
+ stmt = select([expr])
+
+ a1 = table.alias('foo')
+ stmt2 = sql_util.ClauseAdapter(a1).traverse(stmt)
+ self.assert_compile(
+ stmt2,
+ "SELECT array_agg(foo.a ORDER BY foo.b DESC) AS array_agg_1 FROM table1 AS foo"
+ )
+
class DistinctOnTest(fixtures.TestBase, AssertsCompiledSQL):
diff --git a/test/dialect/postgresql/test_dialect.py b/test/dialect/postgresql/test_dialect.py
index 9f86aaa7a..c0e1819d6 100644
--- a/test/dialect/postgresql/test_dialect.py
+++ b/test/dialect/postgresql/test_dialect.py
@@ -14,6 +14,10 @@ from sqlalchemy.dialects.postgresql import base as postgresql
import logging
import logging.handlers
from sqlalchemy.testing.mock import Mock
+from sqlalchemy.engine import engine_from_config
+from sqlalchemy.engine import url
+from sqlalchemy.testing import is_
+from sqlalchemy.testing import expect_deprecated
class MiscTest(fixtures.TestBase, AssertsExecutionResults, AssertsCompiledSQL):
@@ -59,26 +63,36 @@ class MiscTest(fixtures.TestBase, AssertsExecutionResults, AssertsCompiledSQL):
eq_(testing.db.dialect._get_server_version_info(mock_conn(string)),
version)
- @testing.only_on('postgresql+psycopg2', 'psycopg2-specific feature')
+ @testing.requires.psycopg2_compatibility
def test_psycopg2_version(self):
v = testing.db.dialect.psycopg2_version
assert testing.db.dialect.dbapi.__version__.\
startswith(".".join(str(x) for x in v))
- @testing.only_on('postgresql+psycopg2', 'psycopg2-specific feature')
+ @testing.requires.psycopg2_compatibility
def test_psycopg2_non_standard_err(self):
- from psycopg2.extensions import TransactionRollbackError
- import psycopg2
+ # under pypy the name here is psycopg2cffi
+ psycopg2 = testing.db.dialect.dbapi
+ TransactionRollbackError = __import__(
+ "%s.extensions" % psycopg2.__name__
+ ).extensions.TransactionRollbackError
exception = exc.DBAPIError.instance(
"some statement", {}, TransactionRollbackError("foo"),
psycopg2.Error)
assert isinstance(exception, exc.OperationalError)
+ def test_deprecated_dialect_name_still_loads(self):
+ with expect_deprecated(
+ "The 'postgres' dialect name "
+ "has been renamed to 'postgresql'"):
+ dialect = url.URL("postgres").get_dialect()
+ is_(dialect, postgresql.dialect)
+
# currently not passing with pg 9.3 that does not seem to generate
# any notices here, would rather find a way to mock this
@testing.requires.no_coverage
- @testing.only_on('postgresql+psycopg2', 'psycopg2-specific feature')
+ @testing.requires.psycopg2_compatibility
def _test_notice_logging(self):
log = logging.getLogger('sqlalchemy.dialects.postgresql')
buf = logging.handlers.BufferingHandler(100)
@@ -99,11 +113,11 @@ class MiscTest(fixtures.TestBase, AssertsExecutionResults, AssertsCompiledSQL):
assert 'will create implicit sequence' in msgs
assert 'will create implicit index' in msgs
- @testing.only_on('postgresql+psycopg2', 'psycopg2-specific feature')
+ @testing.requires.psycopg2_or_pg8000_compatibility
@engines.close_open_connections
def test_client_encoding(self):
c = testing.db.connect()
- current_encoding = c.connection.connection.encoding
+ current_encoding = c.execute("show client_encoding").fetchone()[0]
c.close()
# attempt to use an encoding that's not
@@ -115,12 +129,26 @@ class MiscTest(fixtures.TestBase, AssertsExecutionResults, AssertsCompiledSQL):
e = engines.testing_engine(options={'client_encoding': test_encoding})
c = e.connect()
- eq_(c.connection.connection.encoding, test_encoding)
+ new_encoding = c.execute("show client_encoding").fetchone()[0]
+ eq_(new_encoding, test_encoding)
+
+ @testing.requires.psycopg2_compatibility
+ def test_pg_dialect_use_native_unicode_from_config(self):
+ config = {
+ 'sqlalchemy.url': testing.db.url,
+ 'sqlalchemy.use_native_unicode': "false"}
+
+ e = engine_from_config(config, _initialize=False)
+ eq_(e.dialect.use_native_unicode, False)
+
+ config = {
+ 'sqlalchemy.url': testing.db.url,
+ 'sqlalchemy.use_native_unicode': "true"}
+
+ e = engine_from_config(config, _initialize=False)
+ eq_(e.dialect.use_native_unicode, True)
- @testing.only_on(
- ['postgresql+psycopg2', 'postgresql+pg8000',
- 'postgresql+psycopg2cffi'],
- 'psycopg2 / pg8000 - specific feature')
+ @testing.requires.psycopg2_or_pg8000_compatibility
@engines.close_open_connections
def test_autocommit_isolation_level(self):
c = testing.db.connect().execution_options(
@@ -214,8 +242,7 @@ class MiscTest(fixtures.TestBase, AssertsExecutionResults, AssertsCompiledSQL):
testing.db.execute('drop table speedy_users')
@testing.fails_on('+zxjdbc', 'psycopg2/pg8000 specific assertion')
- @testing.fails_on('pypostgresql',
- 'psycopg2/pg8000 specific assertion')
+ @testing.requires.psycopg2_or_pg8000_compatibility
def test_numeric_raise(self):
stmt = text(
"select cast('hi' as char) as hi", typemap={'hi': Numeric})
diff --git a/test/dialect/postgresql/test_query.py b/test/dialect/postgresql/test_query.py
index 27cb958fd..9f92a7830 100644
--- a/test/dialect/postgresql/test_query.py
+++ b/test/dialect/postgresql/test_query.py
@@ -12,7 +12,7 @@ from sqlalchemy import exc
from sqlalchemy.dialects import postgresql
import datetime
-metadata = matchtable = cattable = None
+matchtable = cattable = None
class InsertTest(fixtures.TestBase, AssertsExecutionResults):
@@ -22,23 +22,19 @@ class InsertTest(fixtures.TestBase, AssertsExecutionResults):
@classmethod
def setup_class(cls):
- global metadata
- cls.engine = testing.db
- metadata = MetaData(testing.db)
+ cls.metadata = MetaData(testing.db)
def teardown(self):
- metadata.drop_all()
- metadata.clear()
- if self.engine is not testing.db:
- self.engine.dispose()
+ self.metadata.drop_all()
+ self.metadata.clear()
def test_compiled_insert(self):
table = Table(
- 'testtable', metadata, Column(
+ 'testtable', self.metadata, Column(
'id', Integer, primary_key=True),
Column(
'data', String(30)))
- metadata.create_all()
+ self.metadata.create_all()
ins = table.insert(
inline=True,
values={'data': bindparam('x')}).compile()
@@ -49,17 +45,18 @@ class InsertTest(fixtures.TestBase, AssertsExecutionResults):
)
def test_foreignkey_missing_insert(self):
- t1 = Table('t1', metadata, Column('id', Integer,
- primary_key=True))
+ Table(
+ 't1', self.metadata,
+ Column('id', Integer, primary_key=True))
t2 = Table(
't2',
- metadata,
+ self.metadata,
Column(
'id',
Integer,
ForeignKey('t1.id'),
primary_key=True))
- metadata.create_all()
+ self.metadata.create_all()
# want to ensure that "null value in column "id" violates not-
# null constraint" is raised (IntegrityError on psycoopg2, but
@@ -72,14 +69,16 @@ class InsertTest(fixtures.TestBase, AssertsExecutionResults):
engines.testing_engine(options={'implicit_returning': False}),
engines.testing_engine(options={'implicit_returning': True})
]:
- assert_raises_message(exc.DBAPIError,
- 'violates not-null constraint',
- eng.execute, t2.insert())
+ assert_raises_message(
+ exc.CompileError,
+ ".*has no Python-side or server-side default.*",
+ eng.execute, t2.insert()
+ )
def test_sequence_insert(self):
table = Table(
'testtable',
- metadata,
+ self.metadata,
Column(
'id',
Integer,
@@ -88,14 +87,14 @@ class InsertTest(fixtures.TestBase, AssertsExecutionResults):
Column(
'data',
String(30)))
- metadata.create_all()
+ self.metadata.create_all()
self._assert_data_with_sequence(table, 'my_seq')
@testing.requires.returning
def test_sequence_returning_insert(self):
table = Table(
'testtable',
- metadata,
+ self.metadata,
Column(
'id',
Integer,
@@ -104,57 +103,57 @@ class InsertTest(fixtures.TestBase, AssertsExecutionResults):
Column(
'data',
String(30)))
- metadata.create_all()
+ self.metadata.create_all()
self._assert_data_with_sequence_returning(table, 'my_seq')
def test_opt_sequence_insert(self):
table = Table(
- 'testtable', metadata,
+ 'testtable', self.metadata,
Column(
'id', Integer, Sequence(
'my_seq', optional=True), primary_key=True),
Column(
'data', String(30)))
- metadata.create_all()
+ self.metadata.create_all()
self._assert_data_autoincrement(table)
@testing.requires.returning
def test_opt_sequence_returning_insert(self):
table = Table(
- 'testtable', metadata,
+ 'testtable', self.metadata,
Column(
'id', Integer, Sequence(
'my_seq', optional=True), primary_key=True),
Column(
'data', String(30)))
- metadata.create_all()
+ self.metadata.create_all()
self._assert_data_autoincrement_returning(table)
def test_autoincrement_insert(self):
table = Table(
- 'testtable', metadata,
+ 'testtable', self.metadata,
Column(
'id', Integer, primary_key=True),
Column(
'data', String(30)))
- metadata.create_all()
+ self.metadata.create_all()
self._assert_data_autoincrement(table)
@testing.requires.returning
def test_autoincrement_returning_insert(self):
table = Table(
- 'testtable', metadata,
+ 'testtable', self.metadata,
Column(
'id', Integer, primary_key=True),
Column(
'data', String(30)))
- metadata.create_all()
+ self.metadata.create_all()
self._assert_data_autoincrement_returning(table)
def test_noautoincrement_insert(self):
table = Table(
'testtable',
- metadata,
+ self.metadata,
Column(
'id',
Integer,
@@ -163,42 +162,45 @@ class InsertTest(fixtures.TestBase, AssertsExecutionResults):
Column(
'data',
String(30)))
- metadata.create_all()
+ self.metadata.create_all()
self._assert_data_noautoincrement(table)
def _assert_data_autoincrement(self, table):
- self.engine = \
+ engine = \
engines.testing_engine(options={'implicit_returning': False})
- metadata.bind = self.engine
- with self.sql_execution_asserter(self.engine) as asserter:
+ with self.sql_execution_asserter(engine) as asserter:
- # execute with explicit id
+ with engine.connect() as conn:
+ # execute with explicit id
- r = table.insert().execute({'id': 30, 'data': 'd1'})
- assert r.inserted_primary_key == [30]
+ r = conn.execute(table.insert(), {'id': 30, 'data': 'd1'})
+ eq_(r.inserted_primary_key, [30])
- # execute with prefetch id
+ # execute with prefetch id
- r = table.insert().execute({'data': 'd2'})
- assert r.inserted_primary_key == [1]
+ r = conn.execute(table.insert(), {'data': 'd2'})
+ eq_(r.inserted_primary_key, [1])
- # executemany with explicit ids
+ # executemany with explicit ids
- table.insert().execute({'id': 31, 'data': 'd3'}, {'id': 32,
- 'data': 'd4'})
+ conn.execute(
+ table.insert(),
+ {'id': 31, 'data': 'd3'}, {'id': 32, 'data': 'd4'})
- # executemany, uses SERIAL
+ # executemany, uses SERIAL
- table.insert().execute({'data': 'd5'}, {'data': 'd6'})
+ conn.execute(table.insert(), {'data': 'd5'}, {'data': 'd6'})
- # single execute, explicit id, inline
+ # single execute, explicit id, inline
- table.insert(inline=True).execute({'id': 33, 'data': 'd7'})
+ conn.execute(
+ table.insert(inline=True),
+ {'id': 33, 'data': 'd7'})
- # single execute, inline, uses SERIAL
+ # single execute, inline, uses SERIAL
- table.insert(inline=True).execute({'data': 'd8'})
+ conn.execute(table.insert(inline=True), {'data': 'd8'})
asserter.assert_(
DialectSQL(
@@ -221,37 +223,41 @@ class InsertTest(fixtures.TestBase, AssertsExecutionResults):
[{'data': 'd8'}]),
)
- eq_(
- table.select().execute().fetchall(),
- [
- (30, 'd1'),
- (1, 'd2'),
- (31, 'd3'),
- (32, 'd4'),
- (2, 'd5'),
- (3, 'd6'),
- (33, 'd7'),
- (4, 'd8'),
- ]
- )
+ with engine.connect() as conn:
+ eq_(
+ conn.execute(table.select()).fetchall(),
+ [
+ (30, 'd1'),
+ (1, 'd2'),
+ (31, 'd3'),
+ (32, 'd4'),
+ (2, 'd5'),
+ (3, 'd6'),
+ (33, 'd7'),
+ (4, 'd8'),
+ ]
+ )
- table.delete().execute()
+ conn.execute(table.delete())
# test the same series of events using a reflected version of
# the table
- m2 = MetaData(self.engine)
+ m2 = MetaData(engine)
table = Table(table.name, m2, autoload=True)
- with self.sql_execution_asserter(self.engine) as asserter:
- table.insert().execute({'id': 30, 'data': 'd1'})
- r = table.insert().execute({'data': 'd2'})
- assert r.inserted_primary_key == [5]
- table.insert().execute({'id': 31, 'data': 'd3'}, {'id': 32,
- 'data': 'd4'})
- table.insert().execute({'data': 'd5'}, {'data': 'd6'})
- table.insert(inline=True).execute({'id': 33, 'data': 'd7'})
- table.insert(inline=True).execute({'data': 'd8'})
+ with self.sql_execution_asserter(engine) as asserter:
+ with engine.connect() as conn:
+ conn.execute(table.insert(), {'id': 30, 'data': 'd1'})
+ r = conn.execute(table.insert(), {'data': 'd2'})
+ eq_(r.inserted_primary_key, [5])
+ conn.execute(
+ table.insert(),
+ {'id': 31, 'data': 'd3'}, {'id': 32, 'data': 'd4'})
+ conn.execute(table.insert(), {'data': 'd5'}, {'data': 'd6'})
+ conn.execute(
+ table.insert(inline=True), {'id': 33, 'data': 'd7'})
+ conn.execute(table.insert(inline=True), {'data': 'd8'})
asserter.assert_(
DialectSQL(
@@ -273,283 +279,310 @@ class InsertTest(fixtures.TestBase, AssertsExecutionResults):
'INSERT INTO testtable (data) VALUES (:data)',
[{'data': 'd8'}]),
)
- eq_(
- table.select().execute().fetchall(),
- [
- (30, 'd1'),
- (5, 'd2'),
- (31, 'd3'),
- (32, 'd4'),
- (6, 'd5'),
- (7, 'd6'),
- (33, 'd7'),
- (8, 'd8'),
- ]
- )
- table.delete().execute()
+ with engine.connect() as conn:
+ eq_(
+ conn.execute(table.select()).fetchall(),
+ [
+ (30, 'd1'),
+ (5, 'd2'),
+ (31, 'd3'),
+ (32, 'd4'),
+ (6, 'd5'),
+ (7, 'd6'),
+ (33, 'd7'),
+ (8, 'd8'),
+ ]
+ )
+ conn.execute(table.delete())
def _assert_data_autoincrement_returning(self, table):
- self.engine = \
+ engine = \
engines.testing_engine(options={'implicit_returning': True})
- metadata.bind = self.engine
- with self.sql_execution_asserter(self.engine) as asserter:
+ with self.sql_execution_asserter(engine) as asserter:
+ with engine.connect() as conn:
- # execute with explicit id
+ # execute with explicit id
- r = table.insert().execute({'id': 30, 'data': 'd1'})
- assert r.inserted_primary_key == [30]
+ r = conn.execute(table.insert(), {'id': 30, 'data': 'd1'})
+ eq_(r.inserted_primary_key, [30])
- # execute with prefetch id
+ # execute with prefetch id
- r = table.insert().execute({'data': 'd2'})
- assert r.inserted_primary_key == [1]
+ r = conn.execute(table.insert(), {'data': 'd2'})
+ eq_(r.inserted_primary_key, [1])
- # executemany with explicit ids
+ # executemany with explicit ids
- table.insert().execute({'id': 31, 'data': 'd3'}, {'id': 32,
- 'data': 'd4'})
+ conn.execute(
+ table.insert(),
+ {'id': 31, 'data': 'd3'}, {'id': 32, 'data': 'd4'})
- # executemany, uses SERIAL
+ # executemany, uses SERIAL
- table.insert().execute({'data': 'd5'}, {'data': 'd6'})
+ conn.execute(table.insert(), {'data': 'd5'}, {'data': 'd6'})
- # single execute, explicit id, inline
+ # single execute, explicit id, inline
- table.insert(inline=True).execute({'id': 33, 'data': 'd7'})
+ conn.execute(
+ table.insert(inline=True), {'id': 33, 'data': 'd7'})
- # single execute, inline, uses SERIAL
+ # single execute, inline, uses SERIAL
- table.insert(inline=True).execute({'data': 'd8'})
+ conn.execute(table.insert(inline=True), {'data': 'd8'})
asserter.assert_(
DialectSQL('INSERT INTO testtable (id, data) VALUES (:id, :data)',
- {'id': 30, 'data': 'd1'}),
+ {'id': 30, 'data': 'd1'}),
DialectSQL('INSERT INTO testtable (data) VALUES (:data) RETURNING '
- 'testtable.id', {'data': 'd2'}),
+ 'testtable.id', {'data': 'd2'}),
DialectSQL('INSERT INTO testtable (id, data) VALUES (:id, :data)',
- [{'id': 31, 'data': 'd3'}, {'id': 32, 'data': 'd4'}]),
+ [{'id': 31, 'data': 'd3'}, {'id': 32, 'data': 'd4'}]),
DialectSQL('INSERT INTO testtable (data) VALUES (:data)',
- [{'data': 'd5'}, {'data': 'd6'}]),
+ [{'data': 'd5'}, {'data': 'd6'}]),
DialectSQL('INSERT INTO testtable (id, data) VALUES (:id, :data)',
- [{'id': 33, 'data': 'd7'}]),
+ [{'id': 33, 'data': 'd7'}]),
DialectSQL('INSERT INTO testtable (data) VALUES (:data)',
- [{'data': 'd8'}]),
+ [{'data': 'd8'}]),
)
- eq_(
- table.select().execute().fetchall(),
- [
- (30, 'd1'),
- (1, 'd2'),
- (31, 'd3'),
- (32, 'd4'),
- (2, 'd5'),
- (3, 'd6'),
- (33, 'd7'),
- (4, 'd8'),
- ]
- )
- table.delete().execute()
+ with engine.connect() as conn:
+ eq_(
+ conn.execute(table.select()).fetchall(),
+ [
+ (30, 'd1'),
+ (1, 'd2'),
+ (31, 'd3'),
+ (32, 'd4'),
+ (2, 'd5'),
+ (3, 'd6'),
+ (33, 'd7'),
+ (4, 'd8'),
+ ]
+ )
+ conn.execute(table.delete())
# test the same series of events using a reflected version of
# the table
- m2 = MetaData(self.engine)
+ m2 = MetaData(engine)
table = Table(table.name, m2, autoload=True)
- with self.sql_execution_asserter(self.engine) as asserter:
- table.insert().execute({'id': 30, 'data': 'd1'})
- r = table.insert().execute({'data': 'd2'})
- assert r.inserted_primary_key == [5]
- table.insert().execute({'id': 31, 'data': 'd3'}, {'id': 32,
- 'data': 'd4'})
- table.insert().execute({'data': 'd5'}, {'data': 'd6'})
- table.insert(inline=True).execute({'id': 33, 'data': 'd7'})
- table.insert(inline=True).execute({'data': 'd8'})
+ with self.sql_execution_asserter(engine) as asserter:
+ with engine.connect() as conn:
+ conn.execute(table.insert(), {'id': 30, 'data': 'd1'})
+ r = conn.execute(table.insert(), {'data': 'd2'})
+ eq_(r.inserted_primary_key, [5])
+ conn.execute(
+ table.insert(),
+ {'id': 31, 'data': 'd3'}, {'id': 32, 'data': 'd4'})
+ conn.execute(table.insert(), {'data': 'd5'}, {'data': 'd6'})
+ conn.execute(
+ table.insert(inline=True), {'id': 33, 'data': 'd7'})
+ conn.execute(table.insert(inline=True), {'data': 'd8'})
asserter.assert_(
DialectSQL('INSERT INTO testtable (id, data) VALUES (:id, :data)',
- {'id': 30, 'data': 'd1'}),
+ {'id': 30, 'data': 'd1'}),
DialectSQL('INSERT INTO testtable (data) VALUES (:data) RETURNING '
- 'testtable.id', {'data': 'd2'}),
+ 'testtable.id', {'data': 'd2'}),
DialectSQL('INSERT INTO testtable (id, data) VALUES (:id, :data)',
- [{'id': 31, 'data': 'd3'}, {'id': 32, 'data': 'd4'}]),
+ [{'id': 31, 'data': 'd3'}, {'id': 32, 'data': 'd4'}]),
DialectSQL('INSERT INTO testtable (data) VALUES (:data)',
- [{'data': 'd5'}, {'data': 'd6'}]),
+ [{'data': 'd5'}, {'data': 'd6'}]),
DialectSQL('INSERT INTO testtable (id, data) VALUES (:id, :data)',
- [{'id': 33, 'data': 'd7'}]),
- DialectSQL('INSERT INTO testtable (data) VALUES (:data)', [{'data': 'd8'}]),
- )
- eq_(
- table.select().execute().fetchall(),
- [
- (30, 'd1'),
- (5, 'd2'),
- (31, 'd3'),
- (32, 'd4'),
- (6, 'd5'),
- (7, 'd6'),
- (33, 'd7'),
- (8, 'd8'),
- ]
+ [{'id': 33, 'data': 'd7'}]),
+ DialectSQL(
+ 'INSERT INTO testtable (data) VALUES (:data)',
+ [{'data': 'd8'}]),
)
- table.delete().execute()
+
+ with engine.connect() as conn:
+ eq_(
+ conn.execute(table.select()).fetchall(),
+ [
+ (30, 'd1'),
+ (5, 'd2'),
+ (31, 'd3'),
+ (32, 'd4'),
+ (6, 'd5'),
+ (7, 'd6'),
+ (33, 'd7'),
+ (8, 'd8'),
+ ]
+ )
+ conn.execute(table.delete())
def _assert_data_with_sequence(self, table, seqname):
- self.engine = \
+ engine = \
engines.testing_engine(options={'implicit_returning': False})
- metadata.bind = self.engine
- with self.sql_execution_asserter(self.engine) as asserter:
- table.insert().execute({'id': 30, 'data': 'd1'})
- table.insert().execute({'data': 'd2'})
- table.insert().execute({'id': 31, 'data': 'd3'}, {'id': 32,
- 'data': 'd4'})
- table.insert().execute({'data': 'd5'}, {'data': 'd6'})
- table.insert(inline=True).execute({'id': 33, 'data': 'd7'})
- table.insert(inline=True).execute({'data': 'd8'})
+ with self.sql_execution_asserter(engine) as asserter:
+ with engine.connect() as conn:
+ conn.execute(table.insert(), {'id': 30, 'data': 'd1'})
+ conn.execute(table.insert(), {'data': 'd2'})
+ conn.execute(table.insert(),
+ {'id': 31, 'data': 'd3'},
+ {'id': 32, 'data': 'd4'})
+ conn.execute(table.insert(), {'data': 'd5'}, {'data': 'd6'})
+ conn.execute(table.insert(inline=True),
+ {'id': 33, 'data': 'd7'})
+ conn.execute(table.insert(inline=True), {'data': 'd8'})
asserter.assert_(
DialectSQL('INSERT INTO testtable (id, data) VALUES (:id, :data)',
- {'id': 30, 'data': 'd1'}),
+ {'id': 30, 'data': 'd1'}),
CursorSQL("select nextval('my_seq')"),
DialectSQL('INSERT INTO testtable (id, data) VALUES (:id, :data)',
- {'id': 1, 'data': 'd2'}),
+ {'id': 1, 'data': 'd2'}),
DialectSQL('INSERT INTO testtable (id, data) VALUES (:id, :data)',
- [{'id': 31, 'data': 'd3'}, {'id': 32, 'data': 'd4'}]),
- DialectSQL("INSERT INTO testtable (id, data) VALUES (nextval('%s'), "
- ":data)" % seqname, [{'data': 'd5'}, {'data': 'd6'}]),
+ [{'id': 31, 'data': 'd3'}, {'id': 32, 'data': 'd4'}]),
+ DialectSQL(
+ "INSERT INTO testtable (id, data) VALUES (nextval('%s'), "
+ ":data)" % seqname, [{'data': 'd5'}, {'data': 'd6'}]),
DialectSQL('INSERT INTO testtable (id, data) VALUES (:id, :data)',
- [{'id': 33, 'data': 'd7'}]),
- DialectSQL("INSERT INTO testtable (id, data) VALUES (nextval('%s'), "
- ":data)" % seqname, [{'data': 'd8'}]),
- )
- eq_(
- table.select().execute().fetchall(),
- [
- (30, 'd1'),
- (1, 'd2'),
- (31, 'd3'),
- (32, 'd4'),
- (2, 'd5'),
- (3, 'd6'),
- (33, 'd7'),
- (4, 'd8'),
- ]
+ [{'id': 33, 'data': 'd7'}]),
+ DialectSQL(
+ "INSERT INTO testtable (id, data) VALUES (nextval('%s'), "
+ ":data)" % seqname, [{'data': 'd8'}]),
)
+ with engine.connect() as conn:
+ eq_(
+ conn.execute(table.select()).fetchall(),
+ [
+ (30, 'd1'),
+ (1, 'd2'),
+ (31, 'd3'),
+ (32, 'd4'),
+ (2, 'd5'),
+ (3, 'd6'),
+ (33, 'd7'),
+ (4, 'd8'),
+ ]
+ )
# cant test reflection here since the Sequence must be
# explicitly specified
def _assert_data_with_sequence_returning(self, table, seqname):
- self.engine = \
+ engine = \
engines.testing_engine(options={'implicit_returning': True})
- metadata.bind = self.engine
- with self.sql_execution_asserter(self.engine) as asserter:
- table.insert().execute({'id': 30, 'data': 'd1'})
- table.insert().execute({'data': 'd2'})
- table.insert().execute({'id': 31, 'data': 'd3'}, {'id': 32,
- 'data': 'd4'})
- table.insert().execute({'data': 'd5'}, {'data': 'd6'})
- table.insert(inline=True).execute({'id': 33, 'data': 'd7'})
- table.insert(inline=True).execute({'data': 'd8'})
+ with self.sql_execution_asserter(engine) as asserter:
+ with engine.connect() as conn:
+ conn.execute(table.insert(), {'id': 30, 'data': 'd1'})
+ conn.execute(table.insert(), {'data': 'd2'})
+ conn.execute(table.insert(),
+ {'id': 31, 'data': 'd3'},
+ {'id': 32, 'data': 'd4'})
+ conn.execute(table.insert(), {'data': 'd5'}, {'data': 'd6'})
+ conn.execute(
+ table.insert(inline=True), {'id': 33, 'data': 'd7'})
+ conn.execute(table.insert(inline=True), {'data': 'd8'})
asserter.assert_(
DialectSQL('INSERT INTO testtable (id, data) VALUES (:id, :data)',
- {'id': 30, 'data': 'd1'}),
+ {'id': 30, 'data': 'd1'}),
DialectSQL("INSERT INTO testtable (id, data) VALUES "
- "(nextval('my_seq'), :data) RETURNING testtable.id",
- {'data': 'd2'}),
+ "(nextval('my_seq'), :data) RETURNING testtable.id",
+ {'data': 'd2'}),
DialectSQL('INSERT INTO testtable (id, data) VALUES (:id, :data)',
- [{'id': 31, 'data': 'd3'}, {'id': 32, 'data': 'd4'}]),
- DialectSQL("INSERT INTO testtable (id, data) VALUES (nextval('%s'), "
- ":data)" % seqname, [{'data': 'd5'}, {'data': 'd6'}]),
+ [{'id': 31, 'data': 'd3'}, {'id': 32, 'data': 'd4'}]),
+ DialectSQL(
+ "INSERT INTO testtable (id, data) VALUES (nextval('%s'), "
+ ":data)" % seqname, [{'data': 'd5'}, {'data': 'd6'}]),
DialectSQL('INSERT INTO testtable (id, data) VALUES (:id, :data)',
- [{'id': 33, 'data': 'd7'}]),
- DialectSQL("INSERT INTO testtable (id, data) VALUES (nextval('%s'), "
- ":data)" % seqname, [{'data': 'd8'}]),
+ [{'id': 33, 'data': 'd7'}]),
+ DialectSQL(
+ "INSERT INTO testtable (id, data) VALUES (nextval('%s'), "
+ ":data)" % seqname, [{'data': 'd8'}]),
)
- eq_(
- table.select().execute().fetchall(),
- [
- (30, 'd1'),
- (1, 'd2'),
- (31, 'd3'),
- (32, 'd4'),
- (2, 'd5'),
- (3, 'd6'),
- (33, 'd7'),
- (4, 'd8'),
- ]
- )
+ with engine.connect() as conn:
+ eq_(
+ conn.execute(table.select()).fetchall(),
+ [
+ (30, 'd1'),
+ (1, 'd2'),
+ (31, 'd3'),
+ (32, 'd4'),
+ (2, 'd5'),
+ (3, 'd6'),
+ (33, 'd7'),
+ (4, 'd8'),
+ ]
+ )
- # cant test reflection here since the Sequence must be
- # explicitly specified
+ # cant test reflection here since the Sequence must be
+ # explicitly specified
def _assert_data_noautoincrement(self, table):
- self.engine = \
+ engine = \
engines.testing_engine(options={'implicit_returning': False})
- metadata.bind = self.engine
- table.insert().execute({'id': 30, 'data': 'd1'})
- if self.engine.driver == 'pg8000':
- exception_cls = exc.ProgrammingError
- elif self.engine.driver == 'pypostgresql':
- exception_cls = Exception
- else:
- exception_cls = exc.IntegrityError
- assert_raises_message(exception_cls,
- 'violates not-null constraint',
- table.insert().execute, {'data': 'd2'})
- assert_raises_message(exception_cls,
- 'violates not-null constraint',
- table.insert().execute, {'data': 'd2'},
- {'data': 'd3'})
- assert_raises_message(exception_cls,
- 'violates not-null constraint',
- table.insert().execute, {'data': 'd2'})
- assert_raises_message(exception_cls,
- 'violates not-null constraint',
- table.insert().execute, {'data': 'd2'},
- {'data': 'd3'})
- table.insert().execute({'id': 31, 'data': 'd2'}, {'id': 32,
- 'data': 'd3'})
- table.insert(inline=True).execute({'id': 33, 'data': 'd4'})
- assert table.select().execute().fetchall() == [
- (30, 'd1'),
- (31, 'd2'),
- (32, 'd3'),
- (33, 'd4')]
- table.delete().execute()
+
+ with engine.connect() as conn:
+ conn.execute(table.insert(), {'id': 30, 'data': 'd1'})
+
+ assert_raises_message(
+ exc.CompileError,
+ ".*has no Python-side or server-side default.*",
+ conn.execute, table.insert(), {'data': 'd2'})
+ assert_raises_message(
+ exc.CompileError,
+ ".*has no Python-side or server-side default.*",
+ conn.execute, table.insert(), {'data': 'd2'},
+ {'data': 'd3'})
+ assert_raises_message(
+ exc.CompileError,
+ ".*has no Python-side or server-side default.*",
+ conn.execute, table.insert(), {'data': 'd2'})
+ assert_raises_message(
+ exc.CompileError,
+ ".*has no Python-side or server-side default.*",
+ conn.execute, table.insert(), {'data': 'd2'},
+ {'data': 'd3'})
+
+ conn.execute(
+ table.insert(),
+ {'id': 31, 'data': 'd2'}, {'id': 32, 'data': 'd3'})
+ conn.execute(table.insert(inline=True), {'id': 33, 'data': 'd4'})
+ eq_(conn.execute(table.select()).fetchall(), [
+ (30, 'd1'),
+ (31, 'd2'),
+ (32, 'd3'),
+ (33, 'd4')])
+ conn.execute(table.delete())
# test the same series of events using a reflected version of
# the table
- m2 = MetaData(self.engine)
+ m2 = MetaData(engine)
table = Table(table.name, m2, autoload=True)
- table.insert().execute({'id': 30, 'data': 'd1'})
- assert_raises_message(exception_cls,
- 'violates not-null constraint',
- table.insert().execute, {'data': 'd2'})
- assert_raises_message(exception_cls,
- 'violates not-null constraint',
- table.insert().execute, {'data': 'd2'},
- {'data': 'd3'})
- table.insert().execute({'id': 31, 'data': 'd2'}, {'id': 32,
- 'data': 'd3'})
- table.insert(inline=True).execute({'id': 33, 'data': 'd4'})
- assert table.select().execute().fetchall() == [
- (30, 'd1'),
- (31, 'd2'),
- (32, 'd3'),
- (33, 'd4')]
+ with engine.connect() as conn:
+ conn.execute(table.insert(), {'id': 30, 'data': 'd1'})
+ assert_raises_message(
+ exc.CompileError,
+ ".*has no Python-side or server-side default.*",
+ conn.execute, table.insert(), {'data': 'd2'})
+ assert_raises_message(
+ exc.CompileError,
+ ".*has no Python-side or server-side default.*",
+ conn.execute, table.insert(), {'data': 'd2'},
+ {'data': 'd3'})
+ conn.execute(
+ table.insert(),
+ {'id': 31, 'data': 'd2'}, {'id': 32, 'data': 'd3'})
+ conn.execute(table.insert(inline=True), {'id': 33, 'data': 'd4'})
+ eq_(conn.execute(table.select()).fetchall(), [
+ (30, 'd1'),
+ (31, 'd2'),
+ (32, 'd3'),
+ (33, 'd4')])
class ServerSideCursorsTest(fixtures.TestBase, AssertsExecutionResults):
- __only_on__ = 'postgresql+psycopg2'
+ __requires__ = 'psycopg2_compatibility',
def _fixture(self, server_side_cursors):
self.engine = engines.testing_engine(
@@ -837,6 +870,19 @@ class ExtractTest(fixtures.TablesTest):
run_deletes = None
@classmethod
+ def setup_bind(cls):
+ from sqlalchemy import event
+ eng = engines.testing_engine()
+
+ @event.listens_for(eng, "connect")
+ def connect(dbapi_conn, rec):
+ cursor = dbapi_conn.cursor()
+ cursor.execute("SET SESSION TIME ZONE 0")
+ cursor.close()
+
+ return eng
+
+ @classmethod
def define_tables(cls, metadata):
Table('t', metadata,
Column('id', Integer, primary_key=True),
@@ -856,23 +902,17 @@ class ExtractTest(fixtures.TablesTest):
def utcoffset(self, dt):
return datetime.timedelta(hours=4)
- with testing.db.connect() as conn:
-
- # we aren't resetting this at the moment but we don't have
- # any other tests that are TZ specific
- conn.execute("SET SESSION TIME ZONE 0")
- conn.execute(
- cls.tables.t.insert(),
- {
- 'dtme': datetime.datetime(2012, 5, 10, 12, 15, 25),
- 'dt': datetime.date(2012, 5, 10),
- 'tm': datetime.time(12, 15, 25),
- 'intv': datetime.timedelta(seconds=570),
- 'dttz':
- datetime.datetime(2012, 5, 10, 12, 15, 25,
- tzinfo=TZ())
- },
- )
+ cls.bind.execute(
+ cls.tables.t.insert(),
+ {
+ 'dtme': datetime.datetime(2012, 5, 10, 12, 15, 25),
+ 'dt': datetime.date(2012, 5, 10),
+ 'tm': datetime.time(12, 15, 25),
+ 'intv': datetime.timedelta(seconds=570),
+ 'dttz': datetime.datetime(2012, 5, 10, 12, 15, 25,
+ tzinfo=TZ())
+ },
+ )
def _test(self, expr, field="all", overrides=None):
t = self.tables.t
@@ -898,7 +938,7 @@ class ExtractTest(fixtures.TablesTest):
fields.update(overrides)
for field in fields:
- result = testing.db.scalar(
+ result = self.bind.scalar(
select([extract(field, expr)]).select_from(t))
eq_(result, fields[field])
@@ -912,9 +952,9 @@ class ExtractTest(fixtures.TablesTest):
overrides={"epoch": 1336652695.0, "minute": 24})
def test_three(self):
- t = self.tables.t
+ self.tables.t
- actual_ts = testing.db.scalar(func.current_timestamp()) - \
+ actual_ts = self.bind.scalar(func.current_timestamp()) - \
datetime.timedelta(days=5)
self._test(func.current_timestamp() - datetime.timedelta(days=5),
{"hour": actual_ts.hour, "year": actual_ts.year,
@@ -963,7 +1003,7 @@ class ExtractTest(fixtures.TablesTest):
def test_twelve(self):
t = self.tables.t
- actual_ts = testing.db.scalar(
+ actual_ts = self.bind.scalar(
func.current_timestamp()).replace(tzinfo=None) - \
datetime.datetime(2012, 5, 10, 12, 15, 25)
diff --git a/test/dialect/postgresql/test_reflection.py b/test/dialect/postgresql/test_reflection.py
index 0dda1fa45..8da18108f 100644
--- a/test/dialect/postgresql/test_reflection.py
+++ b/test/dialect/postgresql/test_reflection.py
@@ -12,6 +12,8 @@ from sqlalchemy import Table, Column, MetaData, Integer, String, \
from sqlalchemy import exc
import sqlalchemy as sa
from sqlalchemy.dialects.postgresql import base as postgresql
+from sqlalchemy.dialects.postgresql import ARRAY
+import re
class ForeignTableReflectionTest(fixtures.TablesTest, AssertsExecutionResults):
@@ -70,7 +72,7 @@ class ForeignTableReflectionTest(fixtures.TablesTest, AssertsExecutionResults):
eq_(names, ['testtable'])
-class MaterialiedViewReflectionTest(
+class MaterializedViewReflectionTest(
fixtures.TablesTest, AssertsExecutionResults):
"""Test reflection on materialized views"""
@@ -129,6 +131,15 @@ class MaterialiedViewReflectionTest(
insp = inspect(testing.db)
eq_(set(insp.get_view_names()), set(['test_mview', 'test_regview']))
+ def test_get_view_definition(self):
+ insp = inspect(testing.db)
+ eq_(
+ re.sub(
+ r'[\n\t ]+', ' ',
+ insp.get_view_definition("test_mview").strip()),
+ "SELECT testtable.id, testtable.data FROM testtable;"
+ )
+
class DomainReflectionTest(fixtures.TestBase, AssertsExecutionResults):
"""Test PostgreSQL domains"""
@@ -191,7 +202,7 @@ class DomainReflectionTest(fixtures.TestBase, AssertsExecutionResults):
table = Table('enum_test', metadata, autoload=True)
eq_(
table.c.data.type.enums,
- ('test', )
+ ['test']
)
def test_table_is_reflected_test_schema(self):
@@ -672,6 +683,60 @@ class ReflectionTest(fixtures.TestBase):
eq_(ind, [{'unique': False, 'column_names': ['y'], 'name': 'idx1'}])
conn.close()
+ @testing.fails_if("postgresql < 8.2", "reloptions not supported")
+ @testing.provide_metadata
+ def test_index_reflection_with_storage_options(self):
+ """reflect indexes with storage options set"""
+
+ metadata = self.metadata
+
+ Table(
+ 't', metadata,
+ Column('id', Integer, primary_key=True),
+ Column('x', Integer)
+ )
+ metadata.create_all()
+
+ with testing.db.connect().execution_options(autocommit=True) as conn:
+ conn.execute("CREATE INDEX idx1 ON t (x) WITH (fillfactor = 50)")
+
+ ind = testing.db.dialect.get_indexes(conn, "t", None)
+ eq_(ind, [{'unique': False, 'column_names': ['x'], 'name': 'idx1',
+ 'dialect_options':
+ {"postgresql_with": {"fillfactor": "50"}}}])
+
+ m = MetaData()
+ t1 = Table('t', m, autoload_with=conn)
+ eq_(
+ list(t1.indexes)[0].dialect_options['postgresql']['with'],
+ {"fillfactor": "50"}
+ )
+
+ @testing.provide_metadata
+ def test_index_reflection_with_access_method(self):
+ """reflect indexes with storage options set"""
+
+ metadata = self.metadata
+
+ Table(
+ 't', metadata,
+ Column('id', Integer, primary_key=True),
+ Column('x', ARRAY(Integer))
+ )
+ metadata.create_all()
+ with testing.db.connect().execution_options(autocommit=True) as conn:
+ conn.execute("CREATE INDEX idx1 ON t USING gin (x)")
+
+ ind = testing.db.dialect.get_indexes(conn, "t", None)
+ eq_(ind, [{'unique': False, 'column_names': ['x'], 'name': 'idx1',
+ 'dialect_options': {'postgresql_using': 'gin'}}])
+ m = MetaData()
+ t1 = Table('t', m, autoload_with=conn)
+ eq_(
+ list(t1.indexes)[0].dialect_options['postgresql']['using'],
+ 'gin'
+ )
+
@testing.provide_metadata
def test_foreign_key_option_inspection(self):
metadata = self.metadata
@@ -817,6 +882,7 @@ class ReflectionTest(fixtures.TestBase):
}])
@testing.provide_metadata
+ @testing.only_on("postgresql >= 8.5")
def test_reflection_with_unique_constraint(self):
insp = inspect(testing.db)
diff --git a/test/dialect/postgresql/test_types.py b/test/dialect/postgresql/test_types.py
index 36f4fdc3f..8818a9941 100644
--- a/test/dialect/postgresql/test_types.py
+++ b/test/dialect/postgresql/test_types.py
@@ -7,10 +7,11 @@ from sqlalchemy import testing
import datetime
from sqlalchemy import Table, MetaData, Column, Integer, Enum, Float, select, \
func, DateTime, Numeric, exc, String, cast, REAL, TypeDecorator, Unicode, \
- Text, null, text
+ Text, null, text, column, ARRAY, any_, all_
from sqlalchemy.sql import operators
from sqlalchemy import types
-from sqlalchemy.dialects.postgresql import base as postgresql
+import sqlalchemy as sa
+from sqlalchemy.dialects import postgresql
from sqlalchemy.dialects.postgresql import HSTORE, hstore, array, \
INT4RANGE, INT8RANGE, NUMRANGE, DATERANGE, TSRANGE, TSTZRANGE, \
JSON, JSONB
@@ -19,6 +20,8 @@ from sqlalchemy import util
from sqlalchemy.testing.util import round_decimal
from sqlalchemy import inspect
from sqlalchemy import event
+from sqlalchemy.ext.declarative import declarative_base
+from sqlalchemy.orm import Session
tztable = notztable = metadata = table = None
@@ -167,11 +170,12 @@ class EnumTest(fixtures.TestBase, AssertsExecutionResults):
t2 = Table('table', m2, autoload=True)
eq_(
t2.c.value.type.enums,
- (util.u('réveillé'), util.u('drôle'), util.u('S’il'))
+ [util.u('réveillé'), util.u('drôle'), util.u('S’il')]
)
- def test_non_native_type(self):
- metadata = MetaData()
+ @testing.provide_metadata
+ def test_non_native_enum(self):
+ metadata = self.metadata
t1 = Table(
'foo',
metadata,
@@ -187,14 +191,53 @@ class EnumTest(fixtures.TestBase, AssertsExecutionResults):
def go():
t1.create(testing.db)
- try:
- self.assert_sql(
- testing.db, go, [
- ("CREATE TABLE foo (\tbar "
- "VARCHAR(5), \tCONSTRAINT myenum CHECK "
- "(bar IN ('one', 'two', 'three')))", {})])
- finally:
- metadata.drop_all(testing.db)
+ self.assert_sql(
+ testing.db, go, [
+ ("CREATE TABLE foo (\tbar "
+ "VARCHAR(5), \tCONSTRAINT myenum CHECK "
+ "(bar IN ('one', 'two', 'three')))", {})])
+ with testing.db.begin() as conn:
+ conn.execute(
+ t1.insert(), {'bar': 'two'}
+ )
+ eq_(
+ conn.scalar(select([t1.c.bar])), 'two'
+ )
+
+ @testing.provide_metadata
+ def test_non_native_enum_w_unicode(self):
+ metadata = self.metadata
+ t1 = Table(
+ 'foo',
+ metadata,
+ Column(
+ 'bar',
+ Enum('B', util.u('Ü'), name='myenum', native_enum=False)))
+
+ def go():
+ t1.create(testing.db)
+
+ self.assert_sql(
+ testing.db,
+ go,
+ [
+ (
+ util.u(
+ "CREATE TABLE foo (\tbar "
+ "VARCHAR(1), \tCONSTRAINT myenum CHECK "
+ "(bar IN ('B', 'Ü')))"
+ ),
+ {}
+ )
+ ])
+
+ with testing.db.begin() as conn:
+ conn.execute(
+ t1.insert(), {'bar': util.u('Ü')}
+ )
+ eq_(
+ conn.scalar(select([t1.c.bar])), util.u('Ü')
+ )
@testing.provide_metadata
def test_disable_create(self):
@@ -237,6 +280,104 @@ class EnumTest(fixtures.TestBase, AssertsExecutionResults):
metadata.create_all(checkfirst=False)
metadata.drop_all(checkfirst=False)
+ assert 'myenum' not in [
+ e['name'] for e in inspect(testing.db).get_enums()]
+
+ @testing.provide_metadata
+ def test_generate_alone_on_metadata(self):
+ """Test that the same enum twice only generates once
+ for the create_all() call, without using checkfirst.
+
+ A 'memo' collection held by the DDL runner
+ now handles this.
+
+ """
+ metadata = self.metadata
+
+ e1 = Enum('one', 'two', 'three',
+ name="myenum", metadata=self.metadata)
+
+ metadata.create_all(checkfirst=False)
+ assert 'myenum' in [
+ e['name'] for e in inspect(testing.db).get_enums()]
+ metadata.drop_all(checkfirst=False)
+ assert 'myenum' not in [
+ e['name'] for e in inspect(testing.db).get_enums()]
+
+ @testing.provide_metadata
+ def test_generate_multiple_on_metadata(self):
+ metadata = self.metadata
+
+ e1 = Enum('one', 'two', 'three',
+ name="myenum", metadata=metadata)
+
+ t1 = Table('e1', metadata,
+ Column('c1', e1)
+ )
+
+ t2 = Table('e2', metadata,
+ Column('c1', e1)
+ )
+
+ metadata.create_all(checkfirst=False)
+ assert 'myenum' in [
+ e['name'] for e in inspect(testing.db).get_enums()]
+ metadata.drop_all(checkfirst=False)
+ assert 'myenum' not in [
+ e['name'] for e in inspect(testing.db).get_enums()]
+
+ e1.create() # creates ENUM
+ t1.create() # does not create ENUM
+ t2.create() # does not create ENUM
+
+ @testing.provide_metadata
+ def test_drops_on_table(self):
+ metadata = self.metadata
+
+ e1 = Enum('one', 'two', 'three',
+ name="myenum")
+ table = Table(
+ 'e1', metadata,
+ Column('c1', e1)
+ )
+
+ table.create()
+ table.drop()
+ assert 'myenum' not in [
+ e['name'] for e in inspect(testing.db).get_enums()]
+ table.create()
+ assert 'myenum' in [
+ e['name'] for e in inspect(testing.db).get_enums()]
+ table.drop()
+ assert 'myenum' not in [
+ e['name'] for e in inspect(testing.db).get_enums()]
+
+ @testing.provide_metadata
+ def test_remain_on_table_metadata_wide(self):
+ metadata = self.metadata
+
+ e1 = Enum('one', 'two', 'three',
+ name="myenum", metadata=metadata)
+ table = Table(
+ 'e1', metadata,
+ Column('c1', e1)
+ )
+
+ # need checkfirst here, otherwise enum will not be created
+ assert_raises_message(
+ sa.exc.ProgrammingError,
+ '.*type "myenum" does not exist',
+ table.create,
+ )
+ table.create(checkfirst=True)
+ table.drop()
+ table.create(checkfirst=True)
+ table.drop()
+ assert 'myenum' in [
+ e['name'] for e in inspect(testing.db).get_enums()]
+ metadata.drop_all()
+ assert 'myenum' not in [
+ e['name'] for e in inspect(testing.db).get_enums()]
def test_non_native_dialect(self):
engine = engines.testing_engine()
@@ -304,8 +445,9 @@ class EnumTest(fixtures.TestBase, AssertsExecutionResults):
e.connect()
assert not dialect.supports_native_enum
+ @testing.provide_metadata
def test_reflection(self):
- metadata = MetaData(testing.db)
+ metadata = self.metadata
etype = Enum('four', 'five', 'six', name='fourfivesixtype',
metadata=metadata)
t1 = Table(
@@ -317,18 +459,16 @@ class EnumTest(fixtures.TestBase, AssertsExecutionResults):
'one', 'two', 'three', name='onetwothreetype')),
Column('value2', etype))
metadata.create_all()
- try:
- m2 = MetaData(testing.db)
- t2 = Table('table', m2, autoload=True)
- assert t2.c.value.type.enums == ('one', 'two', 'three')
- assert t2.c.value.type.name == 'onetwothreetype'
- assert t2.c.value2.type.enums == ('four', 'five', 'six')
- assert t2.c.value2.type.name == 'fourfivesixtype'
- finally:
- metadata.drop_all()
+ m2 = MetaData(testing.db)
+ t2 = Table('table', m2, autoload=True)
+ eq_(t2.c.value.type.enums, ['one', 'two', 'three'])
+ eq_(t2.c.value.type.name, 'onetwothreetype')
+ eq_(t2.c.value2.type.enums, ['four', 'five', 'six'])
+ eq_(t2.c.value2.type.name, 'fourfivesixtype')
+ @testing.provide_metadata
def test_schema_reflection(self):
- metadata = MetaData(testing.db)
+ metadata = self.metadata
etype = Enum(
'four',
'five',
@@ -337,7 +477,7 @@ class EnumTest(fixtures.TestBase, AssertsExecutionResults):
schema='test_schema',
metadata=metadata,
)
- t1 = Table(
+ Table(
'table', metadata,
Column(
'id', Integer, primary_key=True),
@@ -347,16 +487,41 @@ class EnumTest(fixtures.TestBase, AssertsExecutionResults):
name='onetwothreetype', schema='test_schema')),
Column('value2', etype))
metadata.create_all()
- try:
- m2 = MetaData(testing.db)
- t2 = Table('table', m2, autoload=True)
- assert t2.c.value.type.enums == ('one', 'two', 'three')
- assert t2.c.value.type.name == 'onetwothreetype'
- assert t2.c.value2.type.enums == ('four', 'five', 'six')
- assert t2.c.value2.type.name == 'fourfivesixtype'
- assert t2.c.value2.type.schema == 'test_schema'
- finally:
- metadata.drop_all()
+ m2 = MetaData(testing.db)
+ t2 = Table('table', m2, autoload=True)
+ eq_(t2.c.value.type.enums, ['one', 'two', 'three'])
+ eq_(t2.c.value.type.name, 'onetwothreetype')
+ eq_(t2.c.value2.type.enums, ['four', 'five', 'six'])
+ eq_(t2.c.value2.type.name, 'fourfivesixtype')
+ eq_(t2.c.value2.type.schema, 'test_schema')
+
+ @testing.provide_metadata
+ def test_custom_subclass(self):
+ class MyEnum(TypeDecorator):
+ impl = Enum('oneHI', 'twoHI', 'threeHI', name='myenum')
+
+ def process_bind_param(self, value, dialect):
+ if value is not None:
+ value += "HI"
+ return value
+
+ def process_result_value(self, value, dialect):
+ if value is not None:
+ value += "THERE"
+ return value
+
+ t1 = Table(
+ 'table1', self.metadata,
+ Column('data', MyEnum())
+ )
+ self.metadata.create_all(testing.db)
+
+ with testing.db.connect() as conn:
+ conn.execute(t1.insert(), {"data": "two"})
+ eq_(
+ conn.scalar(select([t1.c.data])),
+ "twoHITHERE"
+ )
class OIDTest(fixtures.TestBase):
@@ -420,6 +585,14 @@ class NumericInterpretationTest(fixtures.TestBase):
)
+class PythonTypeTest(fixtures.TestBase):
+ def test_interval(self):
+ is_(
+ postgresql.INTERVAL().python_type,
+ datetime.timedelta
+ )
+
+
class TimezoneTest(fixtures.TestBase):
__backend__ = True
@@ -559,7 +732,187 @@ class TimePrecisionTest(fixtures.TestBase, AssertsCompiledSQL):
eq_(t2.c.c6.type.timezone, True)
-class ArrayTest(fixtures.TablesTest, AssertsExecutionResults):
+class ArrayTest(AssertsCompiledSQL, fixtures.TestBase):
+ __dialect__ = 'postgresql'
+
+ def test_array_int_index(self):
+ col = column('x', postgresql.ARRAY(Integer))
+ self.assert_compile(
+ select([col[3]]),
+ "SELECT x[%(x_1)s] AS anon_1",
+ checkparams={'x_1': 3}
+ )
+
+ def test_array_any(self):
+ col = column('x', postgresql.ARRAY(Integer))
+ self.assert_compile(
+ select([col.any(7, operator=operators.lt)]),
+ "SELECT %(param_1)s < ANY (x) AS anon_1",
+ checkparams={'param_1': 7}
+ )
+
+ def test_array_all(self):
+ col = column('x', postgresql.ARRAY(Integer))
+ self.assert_compile(
+ select([col.all(7, operator=operators.lt)]),
+ "SELECT %(param_1)s < ALL (x) AS anon_1",
+ checkparams={'param_1': 7}
+ )
+
+ def test_array_contains(self):
+ col = column('x', postgresql.ARRAY(Integer))
+ self.assert_compile(
+ select([col.contains(array([4, 5, 6]))]),
+ "SELECT x @> ARRAY[%(param_1)s, %(param_2)s, %(param_3)s] "
+ "AS anon_1",
+ checkparams={'param_1': 4, 'param_3': 6, 'param_2': 5}
+ )
+
+ def test_contains_override_raises(self):
+ col = column('x', postgresql.ARRAY(Integer))
+
+ assert_raises_message(
+ NotImplementedError,
+ "Operator 'contains' is not supported on this expression",
+ lambda: 'foo' in col
+ )
+
+ def test_array_contained_by(self):
+ col = column('x', postgresql.ARRAY(Integer))
+ self.assert_compile(
+ select([col.contained_by(array([4, 5, 6]))]),
+ "SELECT x <@ ARRAY[%(param_1)s, %(param_2)s, %(param_3)s] "
+ "AS anon_1",
+ checkparams={'param_1': 4, 'param_3': 6, 'param_2': 5}
+ )
+
+ def test_array_overlap(self):
+ col = column('x', postgresql.ARRAY(Integer))
+ self.assert_compile(
+ select([col.overlap(array([4, 5, 6]))]),
+ "SELECT x && ARRAY[%(param_1)s, %(param_2)s, %(param_3)s] "
+ "AS anon_1",
+ checkparams={'param_1': 4, 'param_3': 6, 'param_2': 5}
+ )
+
+ def test_array_slice_index(self):
+ col = column('x', postgresql.ARRAY(Integer))
+ self.assert_compile(
+ select([col[5:10]]),
+ "SELECT x[%(x_1)s:%(x_2)s] AS anon_1",
+ checkparams={'x_2': 10, 'x_1': 5}
+ )
+
+ def test_array_dim_index(self):
+ col = column('x', postgresql.ARRAY(Integer, dimensions=2))
+ self.assert_compile(
+ select([col[3][5]]),
+ "SELECT x[%(x_1)s][%(param_1)s] AS anon_1",
+ checkparams={'x_1': 3, 'param_1': 5}
+ )
+
+ def test_array_concat(self):
+ col = column('x', postgresql.ARRAY(Integer))
+ literal = array([4, 5])
+
+ self.assert_compile(
+ select([col + literal]),
+ "SELECT x || ARRAY[%(param_1)s, %(param_2)s] AS anon_1",
+ checkparams={'param_1': 4, 'param_2': 5}
+ )
+
+ def test_array_index_map_dimensions(self):
+ col = column('x', postgresql.ARRAY(Integer, dimensions=3))
+ is_(
+ col[5].type._type_affinity, ARRAY
+ )
+ assert isinstance(
+ col[5].type, postgresql.ARRAY
+ )
+ eq_(
+ col[5].type.dimensions, 2
+ )
+ is_(
+ col[5][6].type._type_affinity, ARRAY
+ )
+ assert isinstance(
+ col[5][6].type, postgresql.ARRAY
+ )
+ eq_(
+ col[5][6].type.dimensions, 1
+ )
+ is_(
+ col[5][6][7].type._type_affinity, Integer
+ )
+
+ def test_array_getitem_single_type(self):
+ m = MetaData()
+ arrtable = Table(
+ 'arrtable', m,
+ Column('intarr', postgresql.ARRAY(Integer)),
+ Column('strarr', postgresql.ARRAY(String)),
+ )
+ is_(arrtable.c.intarr[1].type._type_affinity, Integer)
+ is_(arrtable.c.strarr[1].type._type_affinity, String)
+
+ def test_array_getitem_slice_type(self):
+ m = MetaData()
+ arrtable = Table(
+ 'arrtable', m,
+ Column('intarr', postgresql.ARRAY(Integer)),
+ Column('strarr', postgresql.ARRAY(String)),
+ )
+
+ # type affinity is Array...
+ is_(arrtable.c.intarr[1:3].type._type_affinity, ARRAY)
+ is_(arrtable.c.strarr[1:3].type._type_affinity, ARRAY)
+
+ # but the slice returns the actual type
+ assert isinstance(arrtable.c.intarr[1:3].type, postgresql.ARRAY)
+ assert isinstance(arrtable.c.strarr[1:3].type, postgresql.ARRAY)
+
+ def test_array_functions_plus_getitem(self):
+ """test parenthesizing of functions plus indexing, which seems
+ to be required by Postgresql.
+
+ """
+ stmt = select([
+ func.array_cat(
+ array([1, 2, 3]),
+ array([4, 5, 6]),
+ type_=postgresql.ARRAY(Integer)
+ )[2:5]
+ ])
+ self.assert_compile(
+ stmt,
+ "SELECT (array_cat(ARRAY[%(param_1)s, %(param_2)s, %(param_3)s], "
+ "ARRAY[%(param_4)s, %(param_5)s, %(param_6)s]))"
+ "[%(param_7)s:%(param_8)s] AS anon_1"
+ )
+
+ self.assert_compile(
+ func.array_cat(
+ array([1, 2, 3]),
+ array([4, 5, 6]),
+ type_=postgresql.ARRAY(Integer)
+ )[3],
+ "(array_cat(ARRAY[%(param_1)s, %(param_2)s, %(param_3)s], "
+ "ARRAY[%(param_4)s, %(param_5)s, %(param_6)s]))[%(array_cat_1)s]"
+ )
+
+ def test_array_agg_generic(self):
+ expr = func.array_agg(column('q', Integer))
+ is_(expr.type.__class__, types.ARRAY)
+ is_(expr.type.item_type.__class__, Integer)
+
+ def test_array_agg_specific(self):
+ from sqlalchemy.dialects.postgresql import array_agg
+ expr = array_agg(column('q', Integer))
+ is_(expr.type.__class__, postgresql.ARRAY)
+ is_(expr.type.item_type.__class__, Integer)
+
+
+class ArrayRoundTripTest(fixtures.TablesTest, AssertsExecutionResults):
__only_on__ = 'postgresql'
__backend__ = True
@@ -615,6 +968,89 @@ class ArrayTest(fixtures.TablesTest, AssertsExecutionResults):
assert isinstance(tbl.c.intarr.type.item_type, Integer)
assert isinstance(tbl.c.strarr.type.item_type, String)
+ @testing.provide_metadata
+ def test_array_agg(self):
+ values_table = Table('values', self.metadata, Column('value', Integer))
+ self.metadata.create_all(testing.db)
+ testing.db.execute(
+ values_table.insert(),
+ [{'value': i} for i in range(1, 10)]
+ )
+
+ stmt = select([func.array_agg(values_table.c.value)])
+ eq_(
+ testing.db.execute(stmt).scalar(),
+ list(range(1, 10))
+ )
+
+ stmt = select([func.array_agg(values_table.c.value)[3]])
+ eq_(
+ testing.db.execute(stmt).scalar(),
+ 3
+ )
+
+ stmt = select([func.array_agg(values_table.c.value)[2:4]])
+ eq_(
+ testing.db.execute(stmt).scalar(),
+ [2, 3, 4]
+ )
+
+ def test_array_index_slice_exprs(self):
+ """test a variety of expressions that sometimes need parenthesizing"""
+
+ stmt = select([array([1, 2, 3, 4])[2:3]])
+ eq_(
+ testing.db.execute(stmt).scalar(),
+ [2, 3]
+ )
+
+ stmt = select([array([1, 2, 3, 4])[2]])
+ eq_(
+ testing.db.execute(stmt).scalar(),
+ 2
+ )
+
+ stmt = select([(array([1, 2]) + array([3, 4]))[2:3]])
+ eq_(
+ testing.db.execute(stmt).scalar(),
+ [2, 3]
+ )
+
+ stmt = select([array([1, 2]) + array([3, 4])[2:3]])
+ eq_(
+ testing.db.execute(stmt).scalar(),
+ [1, 2, 4]
+ )
+
+ stmt = select([array([1, 2])[2:3] + array([3, 4])])
+ eq_(
+ testing.db.execute(stmt).scalar(),
+ [2, 3, 4]
+ )
+
+ stmt = select([
+ func.array_cat(
+ array([1, 2, 3]),
+ array([4, 5, 6]),
+ type_=postgresql.ARRAY(Integer)
+ )[2:5]
+ ])
+ eq_(
+ testing.db.execute(stmt).scalar(), [2, 3, 4, 5]
+ )
+
+ def test_any_all_exprs(self):
+ stmt = select([
+ 3 == any_(func.array_cat(
+ array([1, 2, 3]),
+ array([4, 5, 6]),
+ type_=postgresql.ARRAY(Integer)
+ ))
+ ])
+ eq_(
+ testing.db.execute(stmt).scalar(), True
+ )
+
def test_insert_array(self):
arrtable = self.tables.arrtable
arrtable.insert().execute(intarr=[1, 2, 3], strarr=[util.u('abc'),
@@ -648,13 +1084,13 @@ class ArrayTest(fixtures.TablesTest, AssertsExecutionResults):
def test_array_comparison(self):
arrtable = self.tables.arrtable
- arrtable.insert().execute(intarr=[1, 2, 3],
+ arrtable.insert().execute(id=5, intarr=[1, 2, 3],
strarr=[util.u('abc'), util.u('def')])
results = select([arrtable.c.id]).\
where(arrtable.c.intarr < [4, 5, 6]).execute()\
.fetchall()
eq_(len(results), 1)
- eq_(results[0][0], 3)
+ eq_(results[0][0], 5)
def test_array_subtype_resultprocessor(self):
arrtable = self.tables.arrtable
@@ -689,16 +1125,6 @@ class ArrayTest(fixtures.TablesTest, AssertsExecutionResults):
), True
)
- def test_array_getitem_single_type(self):
- arrtable = self.tables.arrtable
- is_(arrtable.c.intarr[1].type._type_affinity, Integer)
- is_(arrtable.c.strarr[1].type._type_affinity, String)
-
- def test_array_getitem_slice_type(self):
- arrtable = self.tables.arrtable
- is_(arrtable.c.intarr[1:3].type._type_affinity, postgresql.ARRAY)
- is_(arrtable.c.strarr[1:3].type._type_affinity, postgresql.ARRAY)
-
def test_array_getitem_single_exec(self):
arrtable = self.tables.arrtable
self._fixture_456(arrtable)
@@ -787,6 +1213,14 @@ class ArrayTest(fixtures.TablesTest, AssertsExecutionResults):
lambda elem: (
x for x in elem))
+ def test_multi_dim_roundtrip(self):
+ arrtable = self.tables.arrtable
+ testing.db.execute(arrtable.insert(), dimarr=[[1, 2, 3], [4, 5, 6]])
+ eq_(
+ testing.db.scalar(select([arrtable.c.dimarr])),
+ [[-1, 0, 1], [2, 3, 4]]
+ )
+
def test_array_contained_by_exec(self):
arrtable = self.tables.arrtable
with testing.db.connect() as conn:
@@ -891,12 +1325,98 @@ class ArrayTest(fixtures.TablesTest, AssertsExecutionResults):
set([('1', '2', '3'), ('4', '5', '6'), (('4', '5'), ('6', '7'))])
)
- def test_dimension(self):
- arrtable = self.tables.arrtable
- testing.db.execute(arrtable.insert(), dimarr=[[1, 2, 3], [4, 5, 6]])
+ def test_array_plus_native_enum_create(self):
+ m = MetaData()
+ t = Table(
+ 't', m,
+ Column(
+ 'data_1',
+ postgresql.ARRAY(
+ postgresql.ENUM('a', 'b', 'c', name='my_enum_1')
+ )
+ ),
+ Column(
+ 'data_2',
+ postgresql.ARRAY(
+ types.Enum('a', 'b', 'c', name='my_enum_2')
+ )
+ )
+ )
+
+ t.create(testing.db)
eq_(
- testing.db.scalar(select([arrtable.c.dimarr])),
- [[-1, 0, 1], [2, 3, 4]]
+ set(e['name'] for e in inspect(testing.db).get_enums()),
+ set(['my_enum_1', 'my_enum_2'])
+ )
+ t.drop(testing.db)
+ eq_(inspect(testing.db).get_enums(), [])
+
+
+class HashableFlagORMTest(fixtures.TestBase):
+ """test the various 'collection' types that they flip the 'hashable' flag
+ appropriately. [ticket:3499]"""
+
+ __only_on__ = 'postgresql'
+
+ def _test(self, type_, data):
+ Base = declarative_base(metadata=self.metadata)
+
+ class A(Base):
+ __tablename__ = 'a1'
+ id = Column(Integer, primary_key=True)
+ data = Column(type_)
+ Base.metadata.create_all(testing.db)
+ s = Session(testing.db)
+ s.add_all([
+ A(data=elem) for elem in data
+ ])
+ s.commit()
+
+ eq_(
+ [(obj.A.id, obj.data) for obj in
+ s.query(A, A.data).order_by(A.id)],
+ list(enumerate(data, 1))
+ )
+
+ @testing.provide_metadata
+ def test_array(self):
+ self._test(
+ postgresql.ARRAY(Text()),
+ [['a', 'b', 'c'], ['d', 'e', 'f']]
+ )
+
+ @testing.requires.hstore
+ @testing.provide_metadata
+ def test_hstore(self):
+ self._test(
+ postgresql.HSTORE(),
+ [
+ {'a': '1', 'b': '2', 'c': '3'},
+ {'d': '4', 'e': '5', 'f': '6'}
+ ]
+ )
+
+ @testing.provide_metadata
+ def test_json(self):
+ self._test(
+ postgresql.JSON(),
+ [
+ {'a': '1', 'b': '2', 'c': '3'},
+ {'d': '4', 'e': {'e1': '5', 'e2': '6'},
+ 'f': {'f1': [9, 10, 11]}}
+ ]
+ )
+
+ @testing.requires.postgresql_jsonb
+ @testing.provide_metadata
+ def test_jsonb(self):
+ self._test(
+ postgresql.JSONB(),
+ [
+ {'a': '1', 'b': '2', 'c': '3'},
+ {'d': '4', 'e': {'e1': '5', 'e2': '6'},
+ 'f': {'f1': [9, 10, 11]}}
+ ]
)
@@ -912,6 +1432,16 @@ class TimestampTest(fixtures.TestBase, AssertsExecutionResults):
result = connection.execute(s).first()
eq_(result[0], datetime.datetime(2007, 12, 25, 0, 0))
+ def test_interval_arithmetic(self):
+ # basically testing that we get timedelta back for an INTERVAL
+ # result. more of a driver assertion.
+ engine = testing.db
+ connection = engine.connect()
+
+ s = select([text("timestamp '2007-12-25' - timestamp '2007-11-15'")])
+ result = connection.execute(s).first()
+ eq_(result[0], datetime.timedelta(40))
+
class SpecialTypesTest(fixtures.TestBase, ComparesTables, AssertsCompiledSQL):
@@ -1233,6 +1763,19 @@ class HStoreTest(AssertsCompiledSQL, fixtures.TestBase):
{"key1": "value1", "key2": "value2"}
)
+ def test_ret_type_text(self):
+ col = column('x', HSTORE())
+
+ is_(col['foo'].type.__class__, Text)
+
+ def test_ret_type_custom(self):
+ class MyType(types.UserDefinedType):
+ pass
+
+ col = column('x', HSTORE(text_type=MyType))
+
+ is_(col['foo'].type.__class__, MyType)
+
def test_where_has_key(self):
self._test_where(
# hide from 2to3
@@ -1255,7 +1798,7 @@ class HStoreTest(AssertsCompiledSQL, fixtures.TestBase):
def test_where_defined(self):
self._test_where(
self.hashcol.defined('foo'),
- "defined(test_table.hash, %(param_1)s)"
+ "defined(test_table.hash, %(defined_1)s)"
)
def test_where_contains(self):
@@ -1273,7 +1816,7 @@ class HStoreTest(AssertsCompiledSQL, fixtures.TestBase):
def test_where_getitem(self):
self._test_where(
self.hashcol['bar'] == None,
- "(test_table.hash -> %(hash_1)s) IS NULL"
+ "test_table.hash -> %(hash_1)s IS NULL"
)
def test_cols_get(self):
@@ -1286,7 +1829,7 @@ class HStoreTest(AssertsCompiledSQL, fixtures.TestBase):
def test_cols_delete_single_key(self):
self._test_cols(
self.hashcol.delete('foo'),
- "delete(test_table.hash, %(param_1)s) AS delete_1",
+ "delete(test_table.hash, %(delete_2)s) AS delete_1",
True
)
@@ -1301,7 +1844,7 @@ class HStoreTest(AssertsCompiledSQL, fixtures.TestBase):
def test_cols_delete_matching_pairs(self):
self._test_cols(
self.hashcol.delete(hstore('1', '2')),
- ("delete(test_table.hash, hstore(%(param_1)s, %(param_2)s)) "
+ ("delete(test_table.hash, hstore(%(hstore_1)s, %(hstore_2)s)) "
"AS delete_1"),
True
)
@@ -1317,7 +1860,7 @@ class HStoreTest(AssertsCompiledSQL, fixtures.TestBase):
def test_cols_hstore_pair_text(self):
self._test_cols(
hstore('foo', '3')['foo'],
- "hstore(%(param_1)s, %(param_2)s) -> %(hstore_1)s AS anon_1",
+ "hstore(%(hstore_1)s, %(hstore_2)s) -> %(hstore_3)s AS anon_1",
False
)
@@ -1342,21 +1885,21 @@ class HStoreTest(AssertsCompiledSQL, fixtures.TestBase):
self._test_cols(
self.hashcol.concat(hstore(cast(self.test_table.c.id, Text), '3')),
("test_table.hash || hstore(CAST(test_table.id AS TEXT), "
- "%(param_1)s) AS anon_1"),
+ "%(hstore_1)s) AS anon_1"),
True
)
def test_cols_concat_op(self):
self._test_cols(
hstore('foo', 'bar') + self.hashcol,
- "hstore(%(param_1)s, %(param_2)s) || test_table.hash AS anon_1",
+ "hstore(%(hstore_1)s, %(hstore_2)s) || test_table.hash AS anon_1",
True
)
def test_cols_concat_get(self):
self._test_cols(
(self.hashcol + self.hashcol)['foo'],
- "test_table.hash || test_table.hash -> %(param_1)s AS anon_1"
+ "(test_table.hash || test_table.hash) -> %(param_1)s AS anon_1"
)
def test_cols_keys(self):
@@ -1428,7 +1971,7 @@ class HStoreRoundTripTest(fixtures.TablesTest):
self._assert_data([{"k1": "r1v1", "k2": "r1v2"}])
def _non_native_engine(self):
- if testing.against("postgresql+psycopg2"):
+ if testing.requires.psycopg2_native_hstore.enabled:
engine = engines.testing_engine(
options=dict(
use_native_hstore=False))
@@ -1442,7 +1985,22 @@ class HStoreRoundTripTest(fixtures.TablesTest):
cols = insp.get_columns('data_table')
assert isinstance(cols[2]['type'], HSTORE)
- @testing.only_on("postgresql+psycopg2")
+ def test_literal_round_trip(self):
+ # in particular, this tests that the array index
+ # operator against the function is handled by PG; with some
+ # array functions it requires outer parenthezisation on the left and
+ # we may not be doing that here
+ expr = hstore(
+ postgresql.array(['1', '2']),
+ postgresql.array(['3', None]))['1']
+ eq_(
+ testing.db.scalar(
+ select([expr])
+ ),
+ "3"
+ )
+
+ @testing.requires.psycopg2_native_hstore
def test_insert_native(self):
engine = testing.db
self._test_insert(engine)
@@ -1451,7 +2009,7 @@ class HStoreRoundTripTest(fixtures.TablesTest):
engine = self._non_native_engine()
self._test_insert(engine)
- @testing.only_on("postgresql+psycopg2")
+ @testing.requires.psycopg2_native_hstore
def test_criterion_native(self):
engine = testing.db
self._fixture_data(engine)
@@ -1485,7 +2043,7 @@ class HStoreRoundTripTest(fixtures.TablesTest):
engine = self._non_native_engine()
self._test_fixed_round_trip(engine)
- @testing.only_on("postgresql+psycopg2")
+ @testing.requires.psycopg2_native_hstore
def test_fixed_round_trip_native(self):
engine = testing.db
self._test_fixed_round_trip(engine)
@@ -1506,12 +2064,12 @@ class HStoreRoundTripTest(fixtures.TablesTest):
}
)
- @testing.only_on("postgresql+psycopg2")
+ @testing.requires.psycopg2_native_hstore
def test_unicode_round_trip_python(self):
engine = self._non_native_engine()
self._test_unicode_round_trip(engine)
- @testing.only_on("postgresql+psycopg2")
+ @testing.requires.psycopg2_native_hstore
def test_unicode_round_trip_native(self):
engine = testing.db
self._test_unicode_round_trip(engine)
@@ -1520,7 +2078,7 @@ class HStoreRoundTripTest(fixtures.TablesTest):
engine = self._non_native_engine()
self._test_escaped_quotes_round_trip(engine)
- @testing.only_on("postgresql+psycopg2")
+ @testing.requires.psycopg2_native_hstore
def test_escaped_quotes_round_trip_native(self):
engine = testing.db
self._test_escaped_quotes_round_trip(engine)
@@ -1552,14 +2110,16 @@ class HStoreRoundTripTest(fixtures.TablesTest):
class _RangeTypeMixin(object):
- __requires__ = 'range_types',
- __dialect__ = 'postgresql+psycopg2'
+ __requires__ = 'range_types', 'psycopg2_compatibility'
__backend__ = True
def extras(self):
# done this way so we don't get ImportErrors with
# older psycopg2 versions.
- from psycopg2 import extras
+ if testing.against("postgresql+psycopg2cffi"):
+ from psycopg2cffi import extras
+ else:
+ from psycopg2 import extras
return extras
@classmethod
@@ -1827,7 +2387,7 @@ class DateTimeTZRangeTests(_RangeTypeMixin, fixtures.TablesTest):
def tstzs(self):
if self._tstzs is None:
- lower = testing.db.connect().scalar(
+ lower = testing.db.scalar(
func.current_timestamp().select()
)
upper = lower + datetime.timedelta(1)
@@ -1871,104 +2431,77 @@ class JSONTest(AssertsCompiledSQL, fixtures.TestBase):
) % expected
)
- def test_bind_serialize_default(self):
- dialect = postgresql.dialect()
- proc = self.test_table.c.test_column.type._cached_bind_processor(
- dialect)
- eq_(
- proc({"A": [1, 2, 3, True, False]}),
- '{"A": [1, 2, 3, true, false]}'
+ # This test is a bit misleading -- in real life you will need to cast to
+ # do anything
+ def test_where_getitem(self):
+ self._test_where(
+ self.jsoncol['bar'] == None,
+ "test_table.test_column -> %(test_column_1)s IS NULL"
)
- def test_bind_serialize_None(self):
- dialect = postgresql.dialect()
- proc = self.test_table.c.test_column.type._cached_bind_processor(
- dialect)
- eq_(
- proc(None),
- 'null'
+ def test_where_path(self):
+ self._test_where(
+ self.jsoncol[("foo", 1)] == None,
+ "test_table.test_column #> %(test_column_1)s IS NULL"
)
- def test_bind_serialize_none_as_null(self):
- dialect = postgresql.dialect()
- proc = JSON(none_as_null=True)._cached_bind_processor(
- dialect)
- eq_(
- proc(None),
- None
+ def test_path_typing(self):
+ col = column('x', JSON())
+ is_(
+ col['q'].type._type_affinity, types.JSON
)
- eq_(
- proc(null()),
- None
+ is_(
+ col[('q', )].type._type_affinity, types.JSON
)
-
- def test_bind_serialize_null(self):
- dialect = postgresql.dialect()
- proc = self.test_table.c.test_column.type._cached_bind_processor(
- dialect)
- eq_(
- proc(null()),
- None
+ is_(
+ col['q']['p'].type._type_affinity, types.JSON
)
-
- def test_result_deserialize_default(self):
- dialect = postgresql.dialect()
- proc = self.test_table.c.test_column.type._cached_result_processor(
- dialect, None)
- eq_(
- proc('{"A": [1, 2, 3, true, false]}'),
- {"A": [1, 2, 3, True, False]}
+ is_(
+ col[('q', 'p')].type._type_affinity, types.JSON
)
- def test_result_deserialize_null(self):
- dialect = postgresql.dialect()
- proc = self.test_table.c.test_column.type._cached_result_processor(
- dialect, None)
- eq_(
- proc('null'),
- None
- )
+ def test_custom_astext_type(self):
+ class MyType(types.UserDefinedType):
+ pass
- def test_result_deserialize_None(self):
- dialect = postgresql.dialect()
- proc = self.test_table.c.test_column.type._cached_result_processor(
- dialect, None)
- eq_(
- proc(None),
- None
+ col = column('x', JSON(astext_type=MyType))
+
+ is_(
+ col['q'].astext.type.__class__, MyType
)
- # This test is a bit misleading -- in real life you will need to cast to
- # do anything
- def test_where_getitem(self):
- self._test_where(
- self.jsoncol['bar'] == None,
- "(test_table.test_column -> %(test_column_1)s) IS NULL"
+ is_(
+ col[('q', 'p')].astext.type.__class__, MyType
)
- def test_where_path(self):
- self._test_where(
- self.jsoncol[("foo", 1)] == None,
- "(test_table.test_column #> %(test_column_1)s) IS NULL"
+ is_(
+ col['q']['p'].astext.type.__class__, MyType
)
def test_where_getitem_as_text(self):
self._test_where(
self.jsoncol['bar'].astext == None,
- "(test_table.test_column ->> %(test_column_1)s) IS NULL"
+ "test_table.test_column ->> %(test_column_1)s IS NULL"
)
- def test_where_getitem_as_cast(self):
+ def test_where_getitem_astext_cast(self):
self._test_where(
- self.jsoncol['bar'].cast(Integer) == 5,
+ self.jsoncol['bar'].astext.cast(Integer) == 5,
"CAST(test_table.test_column ->> %(test_column_1)s AS INTEGER) "
"= %(param_1)s"
)
+ def test_where_getitem_json_cast(self):
+ self._test_where(
+ self.jsoncol['bar'].cast(Integer) == 5,
+ "CAST(test_table.test_column -> %(test_column_1)s AS INTEGER) "
+ "= %(param_1)s"
+ )
+
def test_where_path_as_text(self):
self._test_where(
self.jsoncol[("foo", 1)].astext == None,
- "(test_table.test_column #>> %(test_column_1)s) IS NULL"
+ "test_table.test_column #>> %(test_column_1)s IS NULL"
)
def test_cols_get(self):
@@ -2003,6 +2536,7 @@ class JSONRoundTripTest(fixtures.TablesTest):
{'name': 'r3', 'data': {"k1": "r3v1", "k2": "r3v2"}},
{'name': 'r4', 'data': {"k1": "r4v1", "k2": "r4v2"}},
{'name': 'r5', 'data': {"k1": "r5v1", "k2": "r5v2", "k3": 5}},
+ {'name': 'r6', 'data': {"k1": {"r6v1": {'subr': [1, 2, 3]}}}},
)
def _assert_data(self, compare, column='data'):
@@ -2023,6 +2557,15 @@ class JSONRoundTripTest(fixtures.TablesTest):
).fetchall()
eq_([d for d, in data], [None])
+ def _assert_column_is_JSON_NULL(self, column='data'):
+ col = self.tables.data_table.c[column]
+
+ data = testing.db.execute(
+ select([col]).
+ where(cast(col, String) == "null")
+ ).fetchall()
+ eq_([d for d, in data], [None])
+
def _test_insert(self, engine):
engine.execute(
self.tables.data_table.insert(),
@@ -2044,6 +2587,13 @@ class JSONRoundTripTest(fixtures.TablesTest):
)
self._assert_column_is_NULL(column='nulldata')
+ def _test_insert_nulljson_into_none_as_null(self, engine):
+ engine.execute(
+ self.tables.data_table.insert(),
+ {'name': 'r1', 'nulldata': JSON.NULL}
+ )
+ self._assert_column_is_JSON_NULL(column='nulldata')
+
def _non_native_engine(self, json_serializer=None, json_deserializer=None):
if json_serializer is not None or json_deserializer is not None:
options = {
@@ -2077,21 +2627,26 @@ class JSONRoundTripTest(fixtures.TablesTest):
cols = insp.get_columns('data_table')
assert isinstance(cols[2]['type'], self.test_type)
- @testing.only_on("postgresql+psycopg2")
+ @testing.requires.psycopg2_native_json
def test_insert_native(self):
engine = testing.db
self._test_insert(engine)
- @testing.only_on("postgresql+psycopg2")
+ @testing.requires.psycopg2_native_json
def test_insert_native_nulls(self):
engine = testing.db
self._test_insert_nulls(engine)
- @testing.only_on("postgresql+psycopg2")
+ @testing.requires.psycopg2_native_json
def test_insert_native_none_as_null(self):
engine = testing.db
self._test_insert_none_as_null(engine)
+ @testing.requires.psycopg2_native_json
+ def test_insert_native_nulljson_into_none_as_null(self):
+ engine = testing.db
+ self._test_insert_nulljson_into_none_as_null(engine)
+
def test_insert_python(self):
engine = self._non_native_engine()
self._test_insert(engine)
@@ -2104,6 +2659,10 @@ class JSONRoundTripTest(fixtures.TablesTest):
engine = self._non_native_engine()
self._test_insert_none_as_null(engine)
+ def test_insert_python_nulljson_into_none_as_null(self):
+ engine = self._non_native_engine()
+ self._test_insert_nulljson_into_none_as_null(engine)
+
def _test_custom_serialize_deserialize(self, native):
import json
@@ -2145,15 +2704,15 @@ class JSONRoundTripTest(fixtures.TablesTest):
},
)
- @testing.only_on("postgresql+psycopg2")
+ @testing.requires.psycopg2_native_json
def test_custom_native(self):
self._test_custom_serialize_deserialize(True)
- @testing.only_on("postgresql+psycopg2")
+ @testing.requires.psycopg2_native_json
def test_custom_python(self):
self._test_custom_serialize_deserialize(False)
- @testing.only_on("postgresql+psycopg2")
+ @testing.requires.psycopg2_native_json
def test_criterion_native(self):
engine = testing.db
self._fixture_data(engine)
@@ -2168,12 +2727,28 @@ class JSONRoundTripTest(fixtures.TablesTest):
engine = testing.db
self._fixture_data(engine)
data_table = self.tables.data_table
+
+ result = engine.execute(
+ select([data_table.c.name]).where(
+ data_table.c.data[('k1', 'r6v1', 'subr')].astext == "[1, 2, 3]"
+ )
+ )
+ eq_(result.scalar(), 'r6')
+
+ @testing.fails_on(
+ "postgresql < 9.4",
+ "Improvement in Postgresql behavior?")
+ def test_multi_index_query(self):
+ engine = testing.db
+ self._fixture_data(engine)
+ data_table = self.tables.data_table
+
result = engine.execute(
- select([data_table.c.data]).where(
- data_table.c.data[('k1',)].astext == 'r3v1'
+ select([data_table.c.name]).where(
+ data_table.c.data['k1']['r6v1']['subr'].astext == "[1, 2, 3]"
)
- ).first()
- eq_(result, ({'k1': 'r3v1', 'k2': 'r3v2'},))
+ )
+ eq_(result.scalar(), 'r6')
def test_query_returned_as_text(self):
engine = testing.db
@@ -2189,7 +2764,7 @@ class JSONRoundTripTest(fixtures.TablesTest):
self._fixture_data(engine)
data_table = self.tables.data_table
result = engine.execute(
- select([data_table.c.data['k3'].cast(Integer)]).where(
+ select([data_table.c.data['k3'].astext.cast(Integer)]).where(
data_table.c.name == 'r5')
).first()
assert isinstance(result[0], int)
@@ -2225,7 +2800,7 @@ class JSONRoundTripTest(fixtures.TablesTest):
engine = self._non_native_engine()
self._test_fixed_round_trip(engine)
- @testing.only_on("postgresql+psycopg2")
+ @testing.requires.psycopg2_native_json
def test_fixed_round_trip_native(self):
engine = testing.db
self._test_fixed_round_trip(engine)
@@ -2252,11 +2827,41 @@ class JSONRoundTripTest(fixtures.TablesTest):
engine = self._non_native_engine()
self._test_unicode_round_trip(engine)
- @testing.only_on("postgresql+psycopg2")
+ @testing.requires.psycopg2_native_json
def test_unicode_round_trip_native(self):
engine = testing.db
self._test_unicode_round_trip(engine)
+ def test_eval_none_flag_orm(self):
+ Base = declarative_base()
+
+ class Data(Base):
+ __table__ = self.tables.data_table
+
+ s = Session(testing.db)
+
+ d1 = Data(name='d1', data=None, nulldata=None)
+ s.add(d1)
+ s.commit()
+
+ s.bulk_insert_mappings(
+ Data, [{"name": "d2", "data": None, "nulldata": None}]
+ )
+ eq_(
+ s.query(
+ cast(self.tables.data_table.c.data, String),
+ cast(self.tables.data_table.c.nulldata, String)
+ ).filter(self.tables.data_table.c.name == 'd1').first(),
+ ("null", None)
+ )
+ eq_(
+ s.query(
+ cast(self.tables.data_table.c.data, String),
+ cast(self.tables.data_table.c.nulldata, String)
+ ).filter(self.tables.data_table.c.name == 'd2').first(),
+ ("null", None)
+ )
+
class JSONBTest(JSONTest):
@@ -2303,7 +2908,7 @@ class JSONBTest(JSONTest):
class JSONBRoundTripTest(JSONRoundTripTest):
- __only_on__ = ('postgresql >= 9.4',)
+ __requires__ = ('postgresql_jsonb', )
test_type = JSONB
diff --git a/test/dialect/test_oracle.py b/test/dialect/test_oracle.py
index 3c67f1590..1f3e63040 100644
--- a/test/dialect/test_oracle.py
+++ b/test/dialect/test_oracle.py
@@ -5,6 +5,7 @@ from sqlalchemy.testing import eq_
from sqlalchemy import *
from sqlalchemy import types as sqltypes, exc, schema
from sqlalchemy.sql import table, column
+from sqlalchemy.sql.elements import quoted_name
from sqlalchemy.testing import fixtures, AssertsExecutionResults, AssertsCompiledSQL
from sqlalchemy import testing
from sqlalchemy.util import u, b
@@ -22,6 +23,7 @@ from sqlalchemy.testing.mock import Mock
class OutParamTest(fixtures.TestBase, AssertsExecutionResults):
__only_on__ = 'oracle+cx_oracle'
+ __backend__ = True
@classmethod
def setup_class(cls):
@@ -54,6 +56,7 @@ class OutParamTest(fixtures.TestBase, AssertsExecutionResults):
class CXOracleArgsTest(fixtures.TestBase):
__only_on__ = 'oracle+cx_oracle'
+ __backend__ = True
def test_autosetinputsizes(self):
dialect = cx_oracle.dialect()
@@ -75,6 +78,7 @@ class CXOracleArgsTest(fixtures.TestBase):
class QuotedBindRoundTripTest(fixtures.TestBase):
__only_on__ = 'oracle'
+ __backend__ = True
@testing.provide_metadata
def test_table_round_trip(self):
@@ -229,7 +233,7 @@ class CompileTest(fixtures.TestBase, AssertsCompiledSQL):
t = table('sometable', column('col1'), column('col2'))
s = select([t])
c = s.compile(dialect=oracle.OracleDialect())
- assert t.c.col1 in set(c.result_map['col1'][1])
+ assert t.c.col1 in set(c._create_result_map()['col1'][1])
s = select([t]).limit(10).offset(20)
self.assert_compile(s,
'SELECT col1, col2 FROM (SELECT col1, '
@@ -240,9 +244,11 @@ class CompileTest(fixtures.TestBase, AssertsCompiledSQL):
checkparams={'param_1': 10, 'param_2': 20})
c = s.compile(dialect=oracle.OracleDialect())
- assert t.c.col1 in set(c.result_map['col1'][1])
- s = select([s.c.col1, s.c.col2])
- self.assert_compile(s,
+ eq_(len(c._result_columns), 2)
+ assert t.c.col1 in set(c._create_result_map()['col1'][1])
+
+ s2 = select([s.c.col1, s.c.col2])
+ self.assert_compile(s2,
'SELECT col1, col2 FROM (SELECT col1, col2 '
'FROM (SELECT col1, col2, ROWNUM AS ora_rn '
'FROM (SELECT sometable.col1 AS col1, '
@@ -251,13 +257,16 @@ class CompileTest(fixtures.TestBase, AssertsCompiledSQL):
':param_2)',
checkparams={'param_1': 10, 'param_2': 20})
- self.assert_compile(s,
+ self.assert_compile(s2,
'SELECT col1, col2 FROM (SELECT col1, col2 '
'FROM (SELECT col1, col2, ROWNUM AS ora_rn '
'FROM (SELECT sometable.col1 AS col1, '
'sometable.col2 AS col2 FROM sometable) '
'WHERE ROWNUM <= :param_1 + :param_2) WHERE ora_rn > '
':param_2)')
+ c = s2.compile(dialect=oracle.OracleDialect())
+ eq_(len(c._result_columns), 2)
+ assert s.c.col1 in set(c._create_result_map()['col1'][1])
s = select([t]).limit(10).offset(20).order_by(t.c.col2)
self.assert_compile(s,
@@ -269,6 +278,9 @@ class CompileTest(fixtures.TestBase, AssertsCompiledSQL):
':param_1 + :param_2) WHERE ora_rn > :param_2',
checkparams={'param_1': 10, 'param_2': 20}
)
+ c = s.compile(dialect=oracle.OracleDialect())
+ eq_(len(c._result_columns), 2)
+ assert t.c.col1 in set(c._create_result_map()['col1'][1])
s = select([t], for_update=True).limit(10).order_by(t.c.col2)
self.assert_compile(s,
@@ -339,7 +351,7 @@ class CompileTest(fixtures.TestBase, AssertsCompiledSQL):
stmt = select([type_coerce(column('x'), MyType).label('foo')]).limit(1)
dialect = oracle.dialect()
compiled = stmt.compile(dialect=dialect)
- assert isinstance(compiled.result_map['foo'][-1], MyType)
+ assert isinstance(compiled._create_result_map()['foo'][-1], MyType)
def test_use_binds_for_limits_disabled(self):
t = table('sometable', column('col1'), column('col2'))
@@ -651,7 +663,7 @@ class CompileTest(fixtures.TestBase, AssertsCompiledSQL):
stmt = t1.insert().values(c1=1).returning(fn, t1.c.c3)
compiled = stmt.compile(dialect=oracle.dialect())
eq_(
- compiled.result_map,
+ compiled._create_result_map(),
{'ret_1': ('ret_1', (t1.c.c3, 'c3', 'c3'), t1.c.c3.type),
'ret_0': ('ret_0', (fn, 'lower', None), fn.type)}
@@ -825,6 +837,7 @@ class CompatFlagsTest(fixtures.TestBase, AssertsCompiledSQL):
class MultiSchemaTest(fixtures.TestBase, AssertsCompiledSQL):
__only_on__ = 'oracle'
+ __backend__ = True
@classmethod
def setup_class(cls):
@@ -852,7 +865,7 @@ create table local_table(
create synonym %(test_schema)s.ptable for %(test_schema)s.parent;
create synonym %(test_schema)s.ctable for %(test_schema)s.child;
-create synonym %(test_schema)s_ptable for %(test_schema)s.parent;
+create synonym %(test_schema)s_pt for %(test_schema)s.parent;
create synonym %(test_schema)s.local_table for local_table;
@@ -874,7 +887,7 @@ drop table %(test_schema)s.parent;
drop table local_table;
drop synonym %(test_schema)s.ctable;
drop synonym %(test_schema)s.ptable;
-drop synonym %(test_schema)s_ptable;
+drop synonym %(test_schema)s_pt;
drop synonym %(test_schema)s.local_table;
""" % {"test_schema": testing.config.test_schema}).split(";"):
@@ -901,11 +914,12 @@ drop synonym %(test_schema)s.local_table;
def test_reflect_alt_table_owner_local_synonym(self):
meta = MetaData(testing.db)
- parent = Table('test_schema_ptable', meta, autoload=True,
+ parent = Table('%s_pt' % testing.config.test_schema, meta, autoload=True,
oracle_resolve_synonyms=True)
self.assert_compile(parent.select(),
- "SELECT test_schema_ptable.id, "
- "test_schema_ptable.data FROM test_schema_ptable")
+ "SELECT %(test_schema)s_pt.id, "
+ "%(test_schema)s_pt.data FROM %(test_schema)s_pt"
+ % {"test_schema": testing.config.test_schema})
select([parent]).execute().fetchall()
def test_reflect_alt_synonym_owner_local_table(self):
@@ -1036,6 +1050,7 @@ drop synonym %(test_schema)s.local_table;
class ConstraintTest(fixtures.TablesTest):
__only_on__ = 'oracle'
+ __backend__ = True
run_deletes = None
@classmethod
@@ -1062,6 +1077,7 @@ class TwoPhaseTest(fixtures.TablesTest):
so requires a carefully written test."""
__only_on__ = 'oracle+cx_oracle'
+ __backend__ = True
@classmethod
def define_tables(cls, metadata):
@@ -1226,6 +1242,7 @@ class DialectTypesTest(fixtures.TestBase, AssertsCompiledSQL):
class TypesTest(fixtures.TestBase):
__only_on__ = 'oracle'
__dialect__ = oracle.OracleDialect()
+ __backend__ = True
@testing.fails_on('+zxjdbc', 'zxjdbc lacks the FIXED_CHAR dbapi type')
@@ -1683,6 +1700,7 @@ class EuroNumericTest(fixtures.TestBase):
"""test the numeric output_type_handler when using non-US locale for NLS_LANG."""
__only_on__ = 'oracle+cx_oracle'
+ __backend__ = True
def setup(self):
self.old_nls_lang = os.environ.get('NLS_LANG', False)
@@ -1720,6 +1738,7 @@ class DontReflectIOTTest(fixtures.TestBase):
table_names."""
__only_on__ = 'oracle'
+ __backend__ = True
def setup(self):
testing.db.execute("""
@@ -1748,6 +1767,7 @@ class DontReflectIOTTest(fixtures.TestBase):
class BufferedColumnTest(fixtures.TestBase, AssertsCompiledSQL):
__only_on__ = 'oracle'
+ __backend__ = True
@classmethod
def setup_class(cls):
@@ -1785,6 +1805,7 @@ class BufferedColumnTest(fixtures.TestBase, AssertsCompiledSQL):
class UnsupportedIndexReflectTest(fixtures.TestBase):
__only_on__ = 'oracle'
+ __backend__ = True
@testing.emits_warning("No column names")
@testing.provide_metadata
@@ -1804,6 +1825,9 @@ class UnsupportedIndexReflectTest(fixtures.TestBase):
def all_tables_compression_missing():
try:
testing.db.execute('SELECT compression FROM all_tables')
+ if "Enterprise Edition" not in testing.db.scalar(
+ "select * from v$version"):
+ return True
return False
except:
return True
@@ -1812,6 +1836,9 @@ def all_tables_compression_missing():
def all_tables_compress_for_missing():
try:
testing.db.execute('SELECT compress_for FROM all_tables')
+ if "Enterprise Edition" not in testing.db.scalar(
+ "select * from v$version"):
+ return True
return False
except:
return True
@@ -1819,6 +1846,7 @@ def all_tables_compress_for_missing():
class TableReflectionTest(fixtures.TestBase):
__only_on__ = 'oracle'
+ __backend__ = True
@testing.provide_metadata
@testing.fails_if(all_tables_compression_missing)
@@ -1851,9 +1879,35 @@ class TableReflectionTest(fixtures.TestBase):
tbl = Table('test_compress', m2, autoload=True)
assert tbl.dialect_options['oracle']['compress'] == "OLTP"
+ @testing.provide_metadata
+ def test_reflect_lowercase_forced_tables(self):
+ metadata = self.metadata
+
+ Table(
+ quoted_name('t1', quote=True), metadata,
+ Column('id', Integer, primary_key=True),
+ )
+ Table(
+ quoted_name('t2', quote=True), metadata,
+ Column('id', Integer, primary_key=True),
+ Column('t1id', ForeignKey('t1.id'))
+ )
+ metadata.create_all()
+
+ m2 = MetaData(testing.db)
+ t2_ref = Table(quoted_name('t2', quote=True), m2, autoload=True)
+ t1_ref = m2.tables['t1']
+ assert t2_ref.c.t1id.references(t1_ref.c.id)
+
+ m3 = MetaData(testing.db)
+ m3.reflect(only=lambda name, m: name.lower() in ('t1', 't2'))
+ assert m3.tables['t2'].c.t1id.references(m3.tables['t1'].c.id)
+
+
class RoundTripIndexTest(fixtures.TestBase):
__only_on__ = 'oracle'
+ __backend__ = True
@testing.provide_metadata
def test_basic(self):
@@ -1949,6 +2003,7 @@ class SequenceTest(fixtures.TestBase, AssertsCompiledSQL):
class ExecuteTest(fixtures.TestBase):
__only_on__ = 'oracle'
+ __backend__ = True
def test_basic(self):
eq_(testing.db.execute('/*+ this is a comment */ SELECT 1 FROM '
@@ -2001,6 +2056,7 @@ class ExecuteTest(fixtures.TestBase):
class UnicodeSchemaTest(fixtures.TestBase):
__only_on__ = 'oracle'
+ __backend__ = True
@testing.provide_metadata
def test_quoted_column_non_unicode(self):
@@ -2038,12 +2094,16 @@ class UnicodeSchemaTest(fixtures.TestBase):
class DBLinkReflectionTest(fixtures.TestBase):
__requires__ = 'oracle_test_dblink',
__only_on__ = 'oracle'
+ __backend__ = True
@classmethod
def setup_class(cls):
from sqlalchemy.testing import config
cls.dblink = config.file_config.get('sqla_testing', 'oracle_db_link')
+ # note that the synonym here is still not totally functional
+ # when accessing via a different username as we do with the multiprocess
+ # test suite, so testing here is minimal
with testing.db.connect() as conn:
conn.execute(
"create table test_table "
@@ -2057,15 +2117,6 @@ class DBLinkReflectionTest(fixtures.TestBase):
conn.execute("drop synonym test_table_syn")
conn.execute("drop table test_table")
- def test_hello_world(self):
- """test that the synonym/dblink is functional."""
- testing.db.execute("insert into test_table_syn (id, data) "
- "values (1, 'some data')")
- eq_(
- testing.db.execute("select * from test_table_syn").first(),
- (1, 'some data')
- )
-
def test_reflection(self):
"""test the resolution of the synonym/dblink. """
m = MetaData()
@@ -2078,6 +2129,7 @@ class DBLinkReflectionTest(fixtures.TestBase):
class ServiceNameTest(fixtures.TestBase):
__only_on__ = 'oracle+cx_oracle'
+ __backend__ = True
def test_cx_oracle_service_name(self):
url_string = 'oracle+cx_oracle://scott:tiger@host/?service_name=hr'
diff --git a/test/dialect/test_sqlite.py b/test/dialect/test_sqlite.py
index 44e4eda42..33903ff89 100644
--- a/test/dialect/test_sqlite.py
+++ b/test/dialect/test_sqlite.py
@@ -8,7 +8,7 @@ from sqlalchemy.testing import eq_, assert_raises, \
assert_raises_message, is_
from sqlalchemy import Table, select, bindparam, Column,\
MetaData, func, extract, ForeignKey, text, DefaultClause, and_, \
- create_engine, UniqueConstraint
+ create_engine, UniqueConstraint, Index
from sqlalchemy.types import Integer, String, Boolean, DateTime, Date, Time
from sqlalchemy import types as sqltypes
from sqlalchemy import event, inspect
@@ -20,7 +20,7 @@ from sqlalchemy.engine.url import make_url
from sqlalchemy.testing import fixtures, AssertsCompiledSQL, \
AssertsExecutionResults, engines
from sqlalchemy import testing
-from sqlalchemy.schema import CreateTable
+from sqlalchemy.schema import CreateTable, FetchedValue
from sqlalchemy.engine.reflection import Inspector
from sqlalchemy.testing import mock
@@ -535,29 +535,12 @@ class DialectTest(fixtures.TestBase, AssertsExecutionResults):
assert e.pool.__class__ is pool.NullPool
-
-class AttachedMemoryDBTest(fixtures.TestBase):
+class AttachedDBTest(fixtures.TestBase):
__only_on__ = 'sqlite'
- dbname = None
-
- def setUp(self):
- self.conn = conn = testing.db.connect()
- if self.dbname is None:
- dbname = ':memory:'
- else:
- dbname = self.dbname
- conn.execute('ATTACH DATABASE "%s" AS test_schema' % dbname)
- self.metadata = MetaData()
-
- def tearDown(self):
- self.metadata.drop_all(self.conn)
- self.conn.execute('DETACH DATABASE test_schema')
- if self.dbname:
- os.remove(self.dbname)
-
def _fixture(self):
meta = self.metadata
+ self.conn = testing.db.connect()
ct = Table(
'created', meta,
Column('id', Integer),
@@ -567,6 +550,14 @@ class AttachedMemoryDBTest(fixtures.TestBase):
meta.create_all(self.conn)
return ct
+ def setup(self):
+ self.conn = testing.db.connect()
+ self.metadata = MetaData()
+
+ def teardown(self):
+ self.metadata.drop_all(self.conn)
+ self.conn.close()
+
def test_no_tables(self):
insp = inspect(self.conn)
eq_(insp.get_table_names("test_schema"), [])
@@ -581,6 +572,18 @@ class AttachedMemoryDBTest(fixtures.TestBase):
insp = inspect(self.conn)
eq_(insp.get_table_names("test_schema"), ["created"])
+ def test_schema_names(self):
+ self._fixture()
+ insp = inspect(self.conn)
+ eq_(insp.get_schema_names(), ["main", "test_schema"])
+
+ # implicitly creates a "temp" schema
+ self.conn.execute("select * from sqlite_temp_master")
+
+ # we're not including it
+ insp = inspect(self.conn)
+ eq_(insp.get_schema_names(), ["main", "test_schema"])
+
def test_reflect_system_table(self):
meta = MetaData(self.conn)
alt_master = Table(
@@ -633,10 +636,6 @@ class AttachedMemoryDBTest(fixtures.TestBase):
eq_(row['name'], 'foo')
-class AttachedFileDBTest(AttachedMemoryDBTest):
- dbname = 'attached_db.db'
-
-
class SQLTest(fixtures.TestBase, AssertsCompiledSQL):
"""Tests SQLite-dialect specific compilation."""
@@ -732,6 +731,38 @@ class SQLTest(fixtures.TestBase, AssertsCompiledSQL):
")"
)
+ def test_create_partial_index(self):
+ m = MetaData()
+ tbl = Table('testtbl', m, Column('data', Integer))
+ idx = Index('test_idx1', tbl.c.data,
+ sqlite_where=and_(tbl.c.data > 5, tbl.c.data < 10))
+
+ # test quoting and all that
+
+ idx2 = Index('test_idx2', tbl.c.data,
+ sqlite_where=and_(tbl.c.data > 'a', tbl.c.data
+ < "b's"))
+ self.assert_compile(schema.CreateIndex(idx),
+ 'CREATE INDEX test_idx1 ON testtbl (data) '
+ 'WHERE data > 5 AND data < 10',
+ dialect=sqlite.dialect())
+ self.assert_compile(schema.CreateIndex(idx2),
+ "CREATE INDEX test_idx2 ON testtbl (data) "
+ "WHERE data > 'a' AND data < 'b''s'",
+ dialect=sqlite.dialect())
+
+ def test_no_autoinc_on_composite_pk(self):
+ m = MetaData()
+ t = Table(
+ 't', m,
+ Column('x', Integer, primary_key=True, autoincrement=True),
+ Column('y', Integer, primary_key=True))
+ assert_raises_message(
+ exc.CompileError,
+ "SQLite does not support autoincrement for composite",
+ CreateTable(t).compile, dialect=sqlite.dialect()
+ )
+
class InsertTest(fixtures.TestBase, AssertsExecutionResults):
@@ -761,23 +792,46 @@ class InsertTest(fixtures.TestBase, AssertsExecutionResults):
@testing.exclude('sqlite', '<', (3, 3, 8), 'no database support')
def test_empty_insert_pk2(self):
+ # now raises CompileError due to [ticket:3216]
assert_raises(
- exc.DBAPIError, self._test_empty_insert,
+ exc.CompileError, self._test_empty_insert,
Table(
'b', MetaData(testing.db),
Column('x', Integer, primary_key=True),
Column('y', Integer, primary_key=True)))
@testing.exclude('sqlite', '<', (3, 3, 8), 'no database support')
- def test_empty_insert_pk3(self):
+ def test_empty_insert_pk2_fv(self):
assert_raises(
exc.DBAPIError, self._test_empty_insert,
Table(
+ 'b', MetaData(testing.db),
+ Column('x', Integer, primary_key=True,
+ server_default=FetchedValue()),
+ Column('y', Integer, primary_key=True,
+ server_default=FetchedValue())))
+
+ @testing.exclude('sqlite', '<', (3, 3, 8), 'no database support')
+ def test_empty_insert_pk3(self):
+ # now raises CompileError due to [ticket:3216]
+ assert_raises(
+ exc.CompileError, self._test_empty_insert,
+ Table(
'c', MetaData(testing.db),
Column('x', Integer, primary_key=True),
Column('y', Integer, DefaultClause('123'), primary_key=True)))
@testing.exclude('sqlite', '<', (3, 3, 8), 'no database support')
+ def test_empty_insert_pk3_fv(self):
+ assert_raises(
+ exc.DBAPIError, self._test_empty_insert,
+ Table(
+ 'c', MetaData(testing.db),
+ Column('x', Integer, primary_key=True,
+ server_default=FetchedValue()),
+ Column('y', Integer, DefaultClause('123'), primary_key=True)))
+
+ @testing.exclude('sqlite', '<', (3, 3, 8), 'no database support')
def test_empty_insert_pk4(self):
self._test_empty_insert(
Table(
diff --git a/test/dialect/test_sybase.py b/test/dialect/test_sybase.py
index 1318a282b..d8f7d3aae 100644
--- a/test/dialect/test_sybase.py
+++ b/test/dialect/test_sybase.py
@@ -1,7 +1,8 @@
-from sqlalchemy import *
+from sqlalchemy import extract, select
from sqlalchemy import sql
from sqlalchemy.databases import sybase
-from sqlalchemy.testing import *
+from sqlalchemy.testing import assert_raises_message, \
+ fixtures, AssertsCompiledSQL
class CompileTest(fixtures.TestBase, AssertsCompiledSQL):
@@ -17,12 +18,19 @@ class CompileTest(fixtures.TestBase, AssertsCompiledSQL):
'milliseconds': 'millisecond',
'millisecond': 'millisecond',
'year': 'year',
- }
+ }
for field, subst in list(mapping.items()):
self.assert_compile(
select([extract(field, t.c.col1)]),
'SELECT DATEPART("%s", t.col1) AS anon_1 FROM t' % subst)
+ def test_offset_not_supported(self):
+ stmt = select([1]).offset(10)
+ assert_raises_message(
+ NotImplementedError,
+ "Sybase ASE does not support OFFSET",
+ stmt.compile, dialect=self.__dialect__
+ )
diff --git a/test/engine/test_bind.py b/test/engine/test_bind.py
index 8f6c547f1..69ab721c1 100644
--- a/test/engine/test_bind.py
+++ b/test/engine/test_bind.py
@@ -11,6 +11,7 @@ import sqlalchemy as sa
from sqlalchemy import testing
from sqlalchemy.testing import fixtures
+
class BindTest(fixtures.TestBase):
def test_bind_close_engine(self):
e = testing.db
@@ -76,7 +77,8 @@ class BindTest(fixtures.TestBase):
]:
assert_raises_message(
exc.UnboundExecutionError,
- "Table object 'test_table' is not bound to an Engine or Connection.",
+ ("Table object 'test_table' is not bound to an Engine or "
+ "Connection."),
meth
)
@@ -163,7 +165,6 @@ class BindTest(fixtures.TestBase):
finally:
metadata.drop_all(bind=conn)
-
def test_clauseelement(self):
metadata = MetaData()
table = Table('test_table', metadata,
@@ -198,5 +199,3 @@ class BindTest(fixtures.TestBase):
if isinstance(bind, engine.Connection):
bind.close()
metadata.drop_all(bind=testing.db)
-
-
diff --git a/test/engine/test_ddlevents.py b/test/engine/test_ddlevents.py
index 0d828b340..8beb255eb 100644
--- a/test/engine/test_ddlevents.py
+++ b/test/engine/test_ddlevents.py
@@ -11,38 +11,10 @@ from sqlalchemy import testing
from sqlalchemy.testing import engines
from sqlalchemy.testing import AssertsCompiledSQL, eq_
from sqlalchemy.testing import fixtures
+from sqlalchemy.testing import mock
class DDLEventTest(fixtures.TestBase):
- class Canary(object):
- def __init__(self, schema_item, bind):
- self.state = None
- self.schema_item = schema_item
- self.bind = bind
-
- def before_create(self, schema_item, bind, **kw):
- assert self.state is None
- assert schema_item is self.schema_item
- assert bind is self.bind
- self.state = 'before-create'
-
- def after_create(self, schema_item, bind, **kw):
- assert self.state in ('before-create', 'skipped')
- assert schema_item is self.schema_item
- assert bind is self.bind
- self.state = 'after-create'
-
- def before_drop(self, schema_item, bind, **kw):
- assert self.state is None
- assert schema_item is self.schema_item
- assert bind is self.bind
- self.state = 'before-drop'
-
- def after_drop(self, schema_item, bind, **kw):
- assert self.state in ('before-drop', 'skipped')
- assert schema_item is self.schema_item
- assert bind is self.bind
- self.state = 'after-drop'
def setup(self):
self.bind = engines.mock_engine()
@@ -51,128 +23,276 @@ class DDLEventTest(fixtures.TestBase):
def test_table_create_before(self):
table, bind = self.table, self.bind
- canary = self.Canary(table, bind)
+ canary = mock.Mock()
event.listen(table, 'before_create', canary.before_create)
table.create(bind)
- assert canary.state == 'before-create'
table.drop(bind)
- assert canary.state == 'before-create'
+ eq_(
+ canary.mock_calls,
+ [
+ mock.call.before_create(
+ table, self.bind, checkfirst=False,
+ _ddl_runner=mock.ANY, _is_metadata_operation=mock.ANY)
+ ]
+ )
def test_table_create_after(self):
table, bind = self.table, self.bind
- canary = self.Canary(table, bind)
+ canary = mock.Mock()
event.listen(table, 'after_create', canary.after_create)
- canary.state = 'skipped'
table.create(bind)
- assert canary.state == 'after-create'
table.drop(bind)
- assert canary.state == 'after-create'
+ eq_(
+ canary.mock_calls,
+ [
+ mock.call.after_create(
+ table, self.bind, checkfirst=False,
+ _ddl_runner=mock.ANY, _is_metadata_operation=mock.ANY)
+ ]
+ )
def test_table_create_both(self):
table, bind = self.table, self.bind
- canary = self.Canary(table, bind)
+ canary = mock.Mock()
event.listen(table, 'before_create', canary.before_create)
event.listen(table, 'after_create', canary.after_create)
table.create(bind)
- assert canary.state == 'after-create'
table.drop(bind)
- assert canary.state == 'after-create'
+ eq_(
+ canary.mock_calls,
+ [
+ mock.call.before_create(
+ table, self.bind, checkfirst=False,
+ _ddl_runner=mock.ANY, _is_metadata_operation=mock.ANY),
+ mock.call.after_create(
+ table, self.bind, checkfirst=False,
+ _ddl_runner=mock.ANY, _is_metadata_operation=mock.ANY)
+ ]
+ )
def test_table_drop_before(self):
table, bind = self.table, self.bind
- canary = self.Canary(table, bind)
+ canary = mock.Mock()
event.listen(table, 'before_drop', canary.before_drop)
table.create(bind)
- assert canary.state is None
table.drop(bind)
- assert canary.state == 'before-drop'
+ eq_(
+ canary.mock_calls,
+ [
+ mock.call.before_drop(
+ table, self.bind, checkfirst=False,
+ _ddl_runner=mock.ANY, _is_metadata_operation=mock.ANY),
+ ]
+ )
def test_table_drop_after(self):
table, bind = self.table, self.bind
- canary = self.Canary(table, bind)
+ canary = mock.Mock()
event.listen(table, 'after_drop', canary.after_drop)
table.create(bind)
- assert canary.state is None
canary.state = 'skipped'
table.drop(bind)
- assert canary.state == 'after-drop'
+ eq_(
+ canary.mock_calls,
+ [
+ mock.call.after_drop(
+ table, self.bind, checkfirst=False,
+ _ddl_runner=mock.ANY, _is_metadata_operation=mock.ANY),
+ ]
+ )
def test_table_drop_both(self):
table, bind = self.table, self.bind
- canary = self.Canary(table, bind)
+ canary = mock.Mock()
event.listen(table, 'before_drop', canary.before_drop)
event.listen(table, 'after_drop', canary.after_drop)
table.create(bind)
- assert canary.state is None
table.drop(bind)
- assert canary.state == 'after-drop'
+ eq_(
+ canary.mock_calls,
+ [
+ mock.call.before_drop(
+ table, self.bind, checkfirst=False,
+ _ddl_runner=mock.ANY, _is_metadata_operation=mock.ANY),
+ mock.call.after_drop(
+ table, self.bind, checkfirst=False,
+ _ddl_runner=mock.ANY, _is_metadata_operation=mock.ANY),
+ ]
+ )
def test_table_all(self):
table, bind = self.table, self.bind
- canary = self.Canary(table, bind)
+ canary = mock.Mock()
event.listen(table, 'before_create', canary.before_create)
event.listen(table, 'after_create', canary.after_create)
event.listen(table, 'before_drop', canary.before_drop)
event.listen(table, 'after_drop', canary.after_drop)
- assert canary.state is None
table.create(bind)
- assert canary.state == 'after-create'
- canary.state = None
table.drop(bind)
- assert canary.state == 'after-drop'
+ eq_(
+ canary.mock_calls,
+ [
+ mock.call.before_create(
+ table, self.bind, checkfirst=False,
+ _ddl_runner=mock.ANY, _is_metadata_operation=mock.ANY),
+ mock.call.after_create(
+ table, self.bind, checkfirst=False,
+ _ddl_runner=mock.ANY, _is_metadata_operation=mock.ANY),
+ mock.call.before_drop(
+ table, self.bind, checkfirst=False,
+ _ddl_runner=mock.ANY, _is_metadata_operation=mock.ANY),
+ mock.call.after_drop(
+ table, self.bind, checkfirst=False,
+ _ddl_runner=mock.ANY, _is_metadata_operation=mock.ANY),
+ ]
+ )
- def test_table_create_before(self):
+ def test_metadata_create_before(self):
metadata, bind = self.metadata, self.bind
- canary = self.Canary(metadata, bind)
+ canary = mock.Mock()
event.listen(metadata, 'before_create', canary.before_create)
metadata.create_all(bind)
- assert canary.state == 'before-create'
metadata.drop_all(bind)
- assert canary.state == 'before-create'
+ eq_(
+ canary.mock_calls,
+ [
+ mock.call.before_create(
+ # checkfirst is False because of the MockConnection
+ # used in the current testing strategy.
+ metadata, self.bind, checkfirst=False,
+ tables=list(metadata.tables.values()),
+ _ddl_runner=mock.ANY),
+ ]
+ )
def test_metadata_create_after(self):
metadata, bind = self.metadata, self.bind
- canary = self.Canary(metadata, bind)
+ canary = mock.Mock()
event.listen(metadata, 'after_create', canary.after_create)
- canary.state = 'skipped'
metadata.create_all(bind)
- assert canary.state == 'after-create'
metadata.drop_all(bind)
- assert canary.state == 'after-create'
+ eq_(
+ canary.mock_calls,
+ [
+ mock.call.after_create(
+ metadata, self.bind, checkfirst=False,
+ tables=list(metadata.tables.values()),
+ _ddl_runner=mock.ANY),
+ ]
+ )
def test_metadata_create_both(self):
metadata, bind = self.metadata, self.bind
- canary = self.Canary(metadata, bind)
+ canary = mock.Mock()
event.listen(metadata, 'before_create', canary.before_create)
event.listen(metadata, 'after_create', canary.after_create)
metadata.create_all(bind)
- assert canary.state == 'after-create'
metadata.drop_all(bind)
- assert canary.state == 'after-create'
+ eq_(
+ canary.mock_calls,
+ [
+ mock.call.before_create(
+ metadata, self.bind, checkfirst=False,
+ tables=list(metadata.tables.values()),
+ _ddl_runner=mock.ANY),
+ mock.call.after_create(
+ metadata, self.bind, checkfirst=False,
+ tables=list(metadata.tables.values()),
+ _ddl_runner=mock.ANY),
+ ]
+ )
+
+ def test_metadata_drop_before(self):
+ metadata, bind = self.metadata, self.bind
+ canary = mock.Mock()
+ event.listen(metadata, 'before_drop', canary.before_drop)
+
+ metadata.create_all(bind)
+ metadata.drop_all(bind)
+ eq_(
+ canary.mock_calls,
+ [
+ mock.call.before_drop(
+ metadata, self.bind, checkfirst=False,
+ tables=list(metadata.tables.values()),
+ _ddl_runner=mock.ANY),
+ ]
+ )
+
+ def test_metadata_drop_after(self):
+ metadata, bind = self.metadata, self.bind
+ canary = mock.Mock()
+ event.listen(metadata, 'after_drop', canary.after_drop)
+
+ metadata.create_all(bind)
+ metadata.drop_all(bind)
+ eq_(
+ canary.mock_calls,
+ [
+ mock.call.after_drop(
+ metadata, self.bind, checkfirst=False,
+ tables=list(metadata.tables.values()),
+ _ddl_runner=mock.ANY),
+ ]
+ )
+
+ def test_metadata_drop_both(self):
+ metadata, bind = self.metadata, self.bind
+ canary = mock.Mock()
+
+ event.listen(metadata, 'before_drop', canary.before_drop)
+ event.listen(metadata, 'after_drop', canary.after_drop)
+
+ metadata.create_all(bind)
+ metadata.drop_all(bind)
+ eq_(
+ canary.mock_calls,
+ [
+ mock.call.before_drop(
+ metadata, self.bind, checkfirst=False,
+ tables=list(metadata.tables.values()),
+ _ddl_runner=mock.ANY),
+ mock.call.after_drop(
+ metadata, self.bind, checkfirst=False,
+ tables=list(metadata.tables.values()),
+ _ddl_runner=mock.ANY),
+ ]
+ )
def test_metadata_table_isolation(self):
- metadata, table, bind = self.metadata, self.table, self.bind
- table_canary = self.Canary(table, bind)
+ metadata, table = self.metadata, self.table
+ table_canary = mock.Mock()
+ metadata_canary = mock.Mock()
event.listen(table, 'before_create', table_canary.before_create)
- metadata_canary = self.Canary(metadata, bind)
event.listen(metadata, 'before_create', metadata_canary.before_create)
self.table.create(self.bind)
- assert metadata_canary.state == None
+ eq_(
+ table_canary.mock_calls,
+ [
+ mock.call.before_create(
+ table, self.bind, checkfirst=False,
+ _ddl_runner=mock.ANY, _is_metadata_operation=mock.ANY),
+ ]
+ )
+ eq_(
+ metadata_canary.mock_calls,
+ []
+ )
def test_append_listener(self):
metadata, table, bind = self.metadata, self.table, self.bind
@@ -266,16 +386,16 @@ class DDLExecutionTest(fixtures.TestBase):
metadata, users, engine = self.metadata, self.users, self.engine
canary = []
users.append_ddl_listener('before-create',
- lambda e, t, b:canary.append('mxyzptlk')
+ lambda e, t, b: canary.append('mxyzptlk')
)
users.append_ddl_listener('after-create',
- lambda e, t, b:canary.append('klptzyxm')
+ lambda e, t, b: canary.append('klptzyxm')
)
users.append_ddl_listener('before-drop',
- lambda e, t, b:canary.append('xyzzy')
+ lambda e, t, b: canary.append('xyzzy')
)
users.append_ddl_listener('after-drop',
- lambda e, t, b:canary.append('fnord')
+ lambda e, t, b: canary.append('fnord')
)
metadata.create_all()
@@ -295,16 +415,16 @@ class DDLExecutionTest(fixtures.TestBase):
metadata, users, engine = self.metadata, self.users, self.engine
canary = []
metadata.append_ddl_listener('before-create',
- lambda e, t, b, tables=None:canary.append('mxyzptlk')
+ lambda e, t, b, tables=None: canary.append('mxyzptlk')
)
metadata.append_ddl_listener('after-create',
- lambda e, t, b, tables=None:canary.append('klptzyxm')
+ lambda e, t, b, tables=None: canary.append('klptzyxm')
)
metadata.append_ddl_listener('before-drop',
- lambda e, t, b, tables=None:canary.append('xyzzy')
+ lambda e, t, b, tables=None: canary.append('xyzzy')
)
metadata.append_ddl_listener('after-drop',
- lambda e, t, b, tables=None:canary.append('fnord')
+ lambda e, t, b, tables=None: canary.append('fnord')
)
metadata.create_all()
@@ -369,8 +489,8 @@ class DDLExecutionTest(fixtures.TestBase):
metadata, users, engine = self.metadata, self.users, self.engine
nonpg_mock = engines.mock_engine(dialect_name='sqlite')
pg_mock = engines.mock_engine(dialect_name='postgresql')
- constraint = CheckConstraint('a < b', name='my_test_constraint'
- , table=users)
+ constraint = CheckConstraint('a < b', name='my_test_constraint',
+ table=users)
# by placing the constraint in an Add/Drop construct, the
# 'inline_ddl' flag is set to False
@@ -405,8 +525,8 @@ class DDLExecutionTest(fixtures.TestBase):
metadata, users, engine = self.metadata, self.users, self.engine
nonpg_mock = engines.mock_engine(dialect_name='sqlite')
pg_mock = engines.mock_engine(dialect_name='postgresql')
- constraint = CheckConstraint('a < b', name='my_test_constraint'
- , table=users)
+ constraint = CheckConstraint('a < b', name='my_test_constraint',
+ table=users)
# by placing the constraint in an Add/Drop construct, the
# 'inline_ddl' flag is set to False
@@ -489,8 +609,6 @@ class DDLExecutionTest(fixtures.TestBase):
)
-
-
class DDLTest(fixtures.TestBase, AssertsCompiledSQL):
def mock_engine(self):
executor = lambda *a, **kw: None
@@ -527,12 +645,11 @@ class DDLTest(fixtures.TestBase, AssertsCompiledSQL):
dialect=dialect)
self.assert_compile(ddl.against(sane_schema), 'S S-T T-s.t-b',
dialect=dialect)
- self.assert_compile(ddl.against(insane_alone), 'S S-T T-"t t"-b'
- , dialect=dialect)
+ self.assert_compile(ddl.against(insane_alone), 'S S-T T-"t t"-b',
+ dialect=dialect)
self.assert_compile(ddl.against(insane_schema),
'S S-T T-"s s"."t t"-b', dialect=dialect)
-
def test_filter(self):
cx = self.mock_engine()
@@ -543,10 +660,10 @@ class DDLTest(fixtures.TestBase, AssertsCompiledSQL):
assert DDL('').execute_if(dialect=target)._should_execute(tbl, cx)
assert not DDL('').execute_if(dialect='bogus').\
_should_execute(tbl, cx)
- assert DDL('').execute_if(callable_=lambda d, y,z, **kw: True).\
+ assert DDL('').execute_if(callable_=lambda d, y, z, **kw: True).\
_should_execute(tbl, cx)
assert(DDL('').execute_if(
- callable_=lambda d, y,z, **kw: z.engine.name
+ callable_=lambda d, y, z, **kw: z.engine.name
!= 'bogus').
_should_execute(tbl, cx))
@@ -561,16 +678,14 @@ class DDLTest(fixtures.TestBase, AssertsCompiledSQL):
assert DDL('', on=target)._should_execute_deprecated('x', tbl, cx)
assert not DDL('', on='bogus').\
_should_execute_deprecated('x', tbl, cx)
- assert DDL('', on=lambda d, x,y,z: True).\
+ assert DDL('', on=lambda d, x, y, z: True).\
_should_execute_deprecated('x', tbl, cx)
- assert(DDL('', on=lambda d, x,y,z: z.engine.name != 'bogus').
+ assert(DDL('', on=lambda d, x, y, z: z.engine.name != 'bogus').
_should_execute_deprecated('x', tbl, cx))
def test_repr(self):
assert repr(DDL('s'))
assert repr(DDL('s', on='engine'))
assert repr(DDL('s', on=lambda x: 1))
- assert repr(DDL('s', context={'a':1}))
- assert repr(DDL('s', on='engine', context={'a':1}))
-
-
+ assert repr(DDL('s', context={'a': 1}))
+ assert repr(DDL('s', on='engine', context={'a': 1}))
diff --git a/test/engine/test_execute.py b/test/engine/test_execute.py
index 730ef4446..5ea5d3515 100644
--- a/test/engine/test_execute.py
+++ b/test/engine/test_execute.py
@@ -1,7 +1,7 @@
# coding: utf-8
from sqlalchemy.testing import eq_, assert_raises, assert_raises_message, \
- config, is_
+ config, is_, is_not_, le_
import re
from sqlalchemy.testing.util import picklers
from sqlalchemy.interfaces import ConnectionProxy
@@ -21,6 +21,8 @@ from sqlalchemy.testing import fixtures
from sqlalchemy.testing.mock import Mock, call, patch
from contextlib import contextmanager
from sqlalchemy.util import nested
+from sqlalchemy.testing.assertsql import CompiledSQL
+
users, metadata, users_autoinc = None, None, None
@@ -484,6 +486,32 @@ class ExecuteTest(fixtures.TestBase):
eq_(canary, ["l1", "l2", "l3", "l1", "l2"])
@testing.requires.ad_hoc_engines
+ def test_dispose_event(self):
+ canary = Mock()
+ eng = create_engine(testing.db.url)
+ event.listen(eng, "engine_disposed", canary)
+
+ conn = eng.connect()
+ conn.close()
+ eng.dispose()
+
+
+ conn = eng.connect()
+ conn.close()
+
+ eq_(
+ canary.mock_calls,
+ [call(eng)]
+ )
+
+ eng.dispose()
+
+ eq_(
+ canary.mock_calls,
+ [call(eng), call(eng)]
+ )
+
+ @testing.requires.ad_hoc_engines
def test_autocommit_option_no_issue_first_connect(self):
eng = create_engine(testing.db.url)
eng.update_execution_options(autocommit=True)
@@ -779,6 +807,40 @@ class CompiledCacheTest(fixtures.TestBase):
eq_(compile_mock.call_count, 1)
eq_(len(cache), 1)
+ @testing.requires.schemas
+ @testing.provide_metadata
+ def test_schema_translate_in_key(self):
+ Table(
+ 'x', self.metadata, Column('q', Integer))
+ Table(
+ 'x', self.metadata, Column('q', Integer),
+ schema=config.test_schema)
+ self.metadata.create_all()
+
+ m = MetaData()
+ t1 = Table('x', m, Column('q', Integer))
+ ins = t1.insert()
+ stmt = select([t1.c.q])
+
+ cache = {}
+ with config.db.connect().execution_options(
+ compiled_cache=cache,
+ ) as conn:
+ conn.execute(ins, {"q": 1})
+ eq_(conn.scalar(stmt), 1)
+
+ with config.db.connect().execution_options(
+ compiled_cache=cache,
+ schema_translate_map={None: config.test_schema}
+ ) as conn:
+ conn.execute(ins, {"q": 2})
+ eq_(conn.scalar(stmt), 2)
+
+ with config.db.connect().execution_options(
+ compiled_cache=cache,
+ ) as conn:
+ eq_(conn.scalar(stmt), 1)
+
class MockStrategyTest(fixtures.TestBase):
@@ -963,6 +1025,156 @@ class ResultProxyTest(fixtures.TestBase):
finally:
r.close()
+class SchemaTranslateTest(fixtures.TestBase, testing.AssertsExecutionResults):
+ __requires__ = 'schemas',
+ __backend__ = True
+
+ def test_create_table(self):
+ map_ = {
+ None: config.test_schema,
+ "foo": config.test_schema, "bar": None}
+
+ metadata = MetaData()
+ t1 = Table('t1', metadata, Column('x', Integer))
+ t2 = Table('t2', metadata, Column('x', Integer), schema="foo")
+ t3 = Table('t3', metadata, Column('x', Integer), schema="bar")
+
+ with self.sql_execution_asserter(config.db) as asserter:
+ with config.db.connect().execution_options(
+ schema_translate_map=map_) as conn:
+
+ t1.create(conn)
+ t2.create(conn)
+ t3.create(conn)
+
+ t3.drop(conn)
+ t2.drop(conn)
+ t1.drop(conn)
+
+ asserter.assert_(
+ CompiledSQL("CREATE TABLE %s.t1 (x INTEGER)" % config.test_schema),
+ CompiledSQL("CREATE TABLE %s.t2 (x INTEGER)" % config.test_schema),
+ CompiledSQL("CREATE TABLE t3 (x INTEGER)"),
+ CompiledSQL("DROP TABLE t3"),
+ CompiledSQL("DROP TABLE %s.t2" % config.test_schema),
+ CompiledSQL("DROP TABLE %s.t1" % config.test_schema)
+ )
+
+ def _fixture(self):
+ metadata = self.metadata
+ Table(
+ 't1', metadata, Column('x', Integer),
+ schema=config.test_schema)
+ Table(
+ 't2', metadata, Column('x', Integer),
+ schema=config.test_schema)
+ Table('t3', metadata, Column('x', Integer), schema=None)
+ metadata.create_all()
+
+ def test_ddl_hastable(self):
+
+ map_ = {
+ None: config.test_schema,
+ "foo": config.test_schema, "bar": None}
+
+ metadata = MetaData()
+ Table('t1', metadata, Column('x', Integer))
+ Table('t2', metadata, Column('x', Integer), schema="foo")
+ Table('t3', metadata, Column('x', Integer), schema="bar")
+
+ with config.db.connect().execution_options(
+ schema_translate_map=map_) as conn:
+ metadata.create_all(conn)
+
+ assert config.db.has_table('t1', schema=config.test_schema)
+ assert config.db.has_table('t2', schema=config.test_schema)
+ assert config.db.has_table('t3', schema=None)
+
+ with config.db.connect().execution_options(
+ schema_translate_map=map_) as conn:
+ metadata.drop_all(conn)
+
+ assert not config.db.has_table('t1', schema=config.test_schema)
+ assert not config.db.has_table('t2', schema=config.test_schema)
+ assert not config.db.has_table('t3', schema=None)
+
+ @testing.provide_metadata
+ def test_crud(self):
+ self._fixture()
+
+ map_ = {
+ None: config.test_schema,
+ "foo": config.test_schema, "bar": None}
+
+ metadata = MetaData()
+ t1 = Table('t1', metadata, Column('x', Integer))
+ t2 = Table('t2', metadata, Column('x', Integer), schema="foo")
+ t3 = Table('t3', metadata, Column('x', Integer), schema="bar")
+
+ with self.sql_execution_asserter(config.db) as asserter:
+ with config.db.connect().execution_options(
+ schema_translate_map=map_) as conn:
+
+ conn.execute(t1.insert(), {'x': 1})
+ conn.execute(t2.insert(), {'x': 1})
+ conn.execute(t3.insert(), {'x': 1})
+
+ conn.execute(t1.update().values(x=1).where(t1.c.x == 1))
+ conn.execute(t2.update().values(x=2).where(t2.c.x == 1))
+ conn.execute(t3.update().values(x=3).where(t3.c.x == 1))
+
+ eq_(conn.scalar(select([t1.c.x])), 1)
+ eq_(conn.scalar(select([t2.c.x])), 2)
+ eq_(conn.scalar(select([t3.c.x])), 3)
+
+ conn.execute(t1.delete())
+ conn.execute(t2.delete())
+ conn.execute(t3.delete())
+
+ asserter.assert_(
+ CompiledSQL(
+ "INSERT INTO %s.t1 (x) VALUES (:x)" % config.test_schema),
+ CompiledSQL(
+ "INSERT INTO %s.t2 (x) VALUES (:x)" % config.test_schema),
+ CompiledSQL(
+ "INSERT INTO t3 (x) VALUES (:x)"),
+ CompiledSQL(
+ "UPDATE %s.t1 SET x=:x WHERE %s.t1.x = :x_1" % (
+ config.test_schema, config.test_schema)),
+ CompiledSQL(
+ "UPDATE %s.t2 SET x=:x WHERE %s.t2.x = :x_1" % (
+ config.test_schema, config.test_schema)),
+ CompiledSQL("UPDATE t3 SET x=:x WHERE t3.x = :x_1"),
+ CompiledSQL("SELECT %s.t1.x FROM %s.t1" % (
+ config.test_schema, config.test_schema)),
+ CompiledSQL("SELECT %s.t2.x FROM %s.t2" % (
+ config.test_schema, config.test_schema)),
+ CompiledSQL("SELECT t3.x FROM t3"),
+ CompiledSQL("DELETE FROM %s.t1" % config.test_schema),
+ CompiledSQL("DELETE FROM %s.t2" % config.test_schema),
+ CompiledSQL("DELETE FROM t3")
+ )
+
+ @testing.provide_metadata
+ def test_via_engine(self):
+ self._fixture()
+
+ map_ = {
+ None: config.test_schema,
+ "foo": config.test_schema, "bar": None}
+
+ metadata = MetaData()
+ t2 = Table('t2', metadata, Column('x', Integer), schema="foo")
+
+ with self.sql_execution_asserter(config.db) as asserter:
+ eng = config.db.execution_options(schema_translate_map=map_)
+ conn = eng.connect()
+ conn.execute(select([t2.c.x]))
+ asserter.assert_(
+ CompiledSQL("SELECT %s.t2.x FROM %s.t2" % (
+ config.test_schema, config.test_schema)),
+ )
+
class ExecutionOptionsTest(fixtures.TestBase):
@@ -1021,54 +1233,110 @@ class ExecutionOptionsTest(fixtures.TestBase):
)
-class AlternateResultProxyTest(fixtures.TestBase):
+class AlternateResultProxyTest(fixtures.TablesTest):
__requires__ = ('sqlite', )
@classmethod
- def setup_class(cls):
+ def setup_bind(cls):
cls.engine = engine = testing_engine('sqlite://')
- m = MetaData()
- cls.table = t = Table('test', m,
- Column('x', Integer, primary_key=True),
- Column('y', String(50, convert_unicode='force'))
- )
- m.create_all(engine)
- engine.execute(t.insert(), [
+ return engine
+
+ @classmethod
+ def define_tables(cls, metadata):
+ Table(
+ 'test', metadata,
+ Column('x', Integer, primary_key=True),
+ Column('y', String(50, convert_unicode='force'))
+ )
+
+ @classmethod
+ def insert_data(cls):
+ cls.engine.execute(cls.tables.test.insert(), [
{'x': i, 'y': "t_%d" % i} for i in range(1, 12)
])
- def _test_proxy(self, cls):
+ @contextmanager
+ def _proxy_fixture(self, cls):
+ self.table = self.tables.test
+
class ExcCtx(default.DefaultExecutionContext):
def get_result_proxy(self):
return cls(self)
- self.engine.dialect.execution_ctx_cls = ExcCtx
- rows = []
- r = self.engine.execute(select([self.table]))
- assert isinstance(r, cls)
- for i in range(5):
- rows.append(r.fetchone())
- eq_(rows, [(i, "t_%d" % i) for i in range(1, 6)])
-
- rows = r.fetchmany(3)
- eq_(rows, [(i, "t_%d" % i) for i in range(6, 9)])
-
- rows = r.fetchall()
- eq_(rows, [(i, "t_%d" % i) for i in range(9, 12)])
-
- r = self.engine.execute(select([self.table]))
- rows = r.fetchmany(None)
- eq_(rows[0], (1, "t_1"))
- # number of rows here could be one, or the whole thing
- assert len(rows) == 1 or len(rows) == 11
-
- r = self.engine.execute(select([self.table]).limit(1))
- r.fetchone()
- eq_(r.fetchone(), None)
-
- r = self.engine.execute(select([self.table]).limit(5))
- rows = r.fetchmany(6)
- eq_(rows, [(i, "t_%d" % i) for i in range(1, 6)])
+ self.patcher = patch.object(
+ self.engine.dialect, "execution_ctx_cls", ExcCtx)
+ with self.patcher:
+ yield
+
+ def _test_proxy(self, cls):
+ with self._proxy_fixture(cls):
+ rows = []
+ r = self.engine.execute(select([self.table]))
+ assert isinstance(r, cls)
+ for i in range(5):
+ rows.append(r.fetchone())
+ eq_(rows, [(i, "t_%d" % i) for i in range(1, 6)])
+
+ rows = r.fetchmany(3)
+ eq_(rows, [(i, "t_%d" % i) for i in range(6, 9)])
+
+ rows = r.fetchall()
+ eq_(rows, [(i, "t_%d" % i) for i in range(9, 12)])
+
+ r = self.engine.execute(select([self.table]))
+ rows = r.fetchmany(None)
+ eq_(rows[0], (1, "t_1"))
+ # number of rows here could be one, or the whole thing
+ assert len(rows) == 1 or len(rows) == 11
+
+ r = self.engine.execute(select([self.table]).limit(1))
+ r.fetchone()
+ eq_(r.fetchone(), None)
+
+ r = self.engine.execute(select([self.table]).limit(5))
+ rows = r.fetchmany(6)
+ eq_(rows, [(i, "t_%d" % i) for i in range(1, 6)])
+
+ # result keeps going just fine with blank results...
+ eq_(r.fetchmany(2), [])
+
+ eq_(r.fetchmany(2), [])
+
+ eq_(r.fetchall(), [])
+
+ eq_(r.fetchone(), None)
+
+ # until we close
+ r.close()
+
+ self._assert_result_closed(r)
+
+ r = self.engine.execute(select([self.table]).limit(5))
+ eq_(r.first(), (1, "t_1"))
+ self._assert_result_closed(r)
+
+ r = self.engine.execute(select([self.table]).limit(5))
+ eq_(r.scalar(), 1)
+ self._assert_result_closed(r)
+
+ def _assert_result_closed(self, r):
+ assert_raises_message(
+ tsa.exc.ResourceClosedError,
+ "object is closed",
+ r.fetchone
+ )
+
+ assert_raises_message(
+ tsa.exc.ResourceClosedError,
+ "object is closed",
+ r.fetchmany, 2
+ )
+
+ assert_raises_message(
+ tsa.exc.ResourceClosedError,
+ "object is closed",
+ r.fetchall
+ )
def test_plain(self):
self._test_proxy(_result.ResultProxy)
@@ -1082,6 +1350,42 @@ class AlternateResultProxyTest(fixtures.TestBase):
def test_buffered_column_result_proxy(self):
self._test_proxy(_result.BufferedColumnResultProxy)
+ def test_buffered_row_growth(self):
+ with self._proxy_fixture(_result.BufferedRowResultProxy):
+ with self.engine.connect() as conn:
+ conn.execute(self.table.insert(), [
+ {'x': i, 'y': "t_%d" % i} for i in range(15, 1200)
+ ])
+ result = conn.execute(self.table.select())
+ checks = {
+ 0: 5, 1: 10, 9: 20, 135: 250, 274: 500,
+ 1351: 1000
+ }
+ for idx, row in enumerate(result, 0):
+ if idx in checks:
+ eq_(result._bufsize, checks[idx])
+ le_(
+ len(result._BufferedRowResultProxy__rowbuffer),
+ 1000
+ )
+
+ def test_max_row_buffer_option(self):
+ with self._proxy_fixture(_result.BufferedRowResultProxy):
+ with self.engine.connect() as conn:
+ conn.execute(self.table.insert(), [
+ {'x': i, 'y': "t_%d" % i} for i in range(15, 1200)
+ ])
+ result = conn.execution_options(max_row_buffer=27).execute(
+ self.table.select()
+ )
+ for idx, row in enumerate(result, 0):
+ if idx in (16, 70, 150, 250):
+ eq_(result._bufsize, 27)
+ le_(
+ len(result._BufferedRowResultProxy__rowbuffer),
+ 27
+ )
+
class EngineEventsTest(fixtures.TestBase):
__requires__ = 'ad_hoc_engines',
@@ -1902,6 +2206,47 @@ class HandleErrorTest(fixtures.TestBase):
self._test_alter_disconnect(True, False)
self._test_alter_disconnect(False, False)
+ @testing.requires.independent_connections
+ def _test_alter_invalidate_pool_to_false(self, set_to_false):
+ orig_error = True
+
+ engine = engines.testing_engine()
+
+ @event.listens_for(engine, "handle_error")
+ def evt(ctx):
+ if set_to_false:
+ ctx.invalidate_pool_on_disconnect = False
+
+ c1, c2, c3 = engine.pool.connect(), \
+ engine.pool.connect(), engine.pool.connect()
+ crecs = [conn._connection_record for conn in (c1, c2, c3)]
+ c1.close()
+ c2.close()
+ c3.close()
+
+ with patch.object(engine.dialect, "is_disconnect",
+ Mock(return_value=orig_error)):
+
+ with engine.connect() as c:
+ target_crec = c.connection._connection_record
+ try:
+ c.execute("SELECT x FROM nonexistent")
+ assert False
+ except tsa.exc.StatementError as st:
+ eq_(st.connection_invalidated, True)
+
+ for crec in crecs:
+ if crec is target_crec or not set_to_false:
+ is_not_(crec.connection, crec.get_connection())
+ else:
+ is_(crec.connection, crec.get_connection())
+
+ def test_alter_invalidate_pool_to_false(self):
+ self._test_alter_invalidate_pool_to_false(True)
+
+ def test_alter_invalidate_pool_stays_true(self):
+ self._test_alter_invalidate_pool_to_false(False)
+
def test_handle_error_event_connect_isolation_level(self):
engine = engines.testing_engine()
@@ -2491,3 +2836,87 @@ class DialectEventTest(fixtures.TestBase):
def test_cursor_execute_wo_replace(self):
self._test_cursor_execute(False)
+
+ def test_connect_replace_params(self):
+ e = engines.testing_engine(options={"_initialize": False})
+
+ @event.listens_for(e, "do_connect")
+ def evt(dialect, conn_rec, cargs, cparams):
+ cargs[:] = ['foo', 'hoho']
+ cparams.clear()
+ cparams['bar'] = 'bat'
+ conn_rec.info['boom'] = "bap"
+
+ m1 = Mock()
+ e.dialect.connect = m1.real_connect
+
+ with e.connect() as conn:
+ eq_(m1.mock_calls, [call.real_connect('foo', 'hoho', bar='bat')])
+ eq_(conn.info['boom'], 'bap')
+
+ def test_connect_do_connect(self):
+ e = engines.testing_engine(options={"_initialize": False})
+
+ m1 = Mock()
+
+ @event.listens_for(e, "do_connect")
+ def evt1(dialect, conn_rec, cargs, cparams):
+ cargs[:] = ['foo', 'hoho']
+ cparams.clear()
+ cparams['bar'] = 'bat'
+ conn_rec.info['boom'] = "one"
+
+ @event.listens_for(e, "do_connect")
+ def evt2(dialect, conn_rec, cargs, cparams):
+ conn_rec.info['bap'] = "two"
+ return m1.our_connect(cargs, cparams)
+
+ with e.connect() as conn:
+ # called with args
+ eq_(
+ m1.mock_calls,
+ [call.our_connect(['foo', 'hoho'], {'bar': 'bat'})])
+
+ eq_(conn.info['boom'], "one")
+ eq_(conn.info['bap'], "two")
+
+ # returned our mock connection
+ is_(conn.connection.connection, m1.our_connect())
+
+ def test_connect_do_connect_info_there_after_recycle(self):
+ # test that info is maintained after the do_connect()
+ # event for a soft invalidation.
+
+ e = engines.testing_engine(options={"_initialize": False})
+
+ @event.listens_for(e, "do_connect")
+ def evt1(dialect, conn_rec, cargs, cparams):
+ conn_rec.info['boom'] = "one"
+
+ conn = e.connect()
+ eq_(conn.info['boom'], "one")
+
+ conn.connection.invalidate(soft=True)
+ conn.close()
+ conn = e.connect()
+ eq_(conn.info['boom'], "one")
+
+ def test_connect_do_connect_info_there_after_invalidate(self):
+ # test that info is maintained after the do_connect()
+ # event for a hard invalidation.
+
+ e = engines.testing_engine(options={"_initialize": False})
+
+ @event.listens_for(e, "do_connect")
+ def evt1(dialect, conn_rec, cargs, cparams):
+ assert not conn_rec.info
+ conn_rec.info['boom'] = "one"
+
+ conn = e.connect()
+ eq_(conn.info['boom'], "one")
+
+ conn.connection.invalidate()
+ conn = e.connect()
+ eq_(conn.info['boom'], "one")
+
+
diff --git a/test/engine/test_logging.py b/test/engine/test_logging.py
index 180ea9388..51ebc5250 100644
--- a/test/engine/test_logging.py
+++ b/test/engine/test_logging.py
@@ -1,4 +1,4 @@
-from sqlalchemy.testing import eq_, assert_raises_message
+from sqlalchemy.testing import eq_, assert_raises_message, eq_regex
from sqlalchemy import select
import sqlalchemy as tsa
from sqlalchemy.testing import engines
@@ -6,6 +6,7 @@ import logging.handlers
from sqlalchemy.testing import fixtures
from sqlalchemy.testing import mock
from sqlalchemy.testing.util import lazy_gc
+from sqlalchemy import util
class LogParamsTest(fixtures.TestBase):
@@ -53,6 +54,133 @@ class LogParamsTest(fixtures.TestBase):
"bound parameter sets ... ('98',), ('99',)]"
)
+ def test_log_large_parameter_single(self):
+ import random
+ largeparam = ''.join(chr(random.randint(52, 85)) for i in range(5000))
+
+ self.eng.execute(
+ "INSERT INTO foo (data) values (?)",
+ (largeparam, )
+ )
+
+ eq_(
+ self.buf.buffer[1].message,
+ "('%s ... (4702 characters truncated) ... %s',)" % (
+ largeparam[0:149], largeparam[-149:]
+ )
+ )
+
+ def test_log_large_multi_parameter(self):
+ import random
+ lp1 = ''.join(chr(random.randint(52, 85)) for i in range(5))
+ lp2 = ''.join(chr(random.randint(52, 85)) for i in range(8))
+ lp3 = ''.join(chr(random.randint(52, 85)) for i in range(670))
+
+ self.eng.execute(
+ "SELECT ?, ?, ?",
+ (lp1, lp2, lp3)
+ )
+
+ eq_(
+ self.buf.buffer[1].message,
+ "('%s', '%s', '%s ... (372 characters truncated) ... %s')" % (
+ lp1, lp2, lp3[0:149], lp3[-149:]
+ )
+ )
+
+ def test_log_large_parameter_multiple(self):
+ import random
+ lp1 = ''.join(chr(random.randint(52, 85)) for i in range(5000))
+ lp2 = ''.join(chr(random.randint(52, 85)) for i in range(200))
+ lp3 = ''.join(chr(random.randint(52, 85)) for i in range(670))
+
+ self.eng.execute(
+ "INSERT INTO foo (data) values (?)",
+ [(lp1, ), (lp2, ), (lp3, )]
+ )
+
+ eq_(
+ self.buf.buffer[1].message,
+ "[('%s ... (4702 characters truncated) ... %s',), ('%s',), "
+ "('%s ... (372 characters truncated) ... %s',)]" % (
+ lp1[0:149], lp1[-149:], lp2, lp3[0:149], lp3[-149:]
+ )
+ )
+
+ def test_exception_format_dict_param(self):
+ exception = tsa.exc.IntegrityError("foo", {"x": "y"}, None)
+ eq_regex(
+ str(exception),
+ r"\(.*.NoneType\) None \[SQL: 'foo'\] \[parameters: {'x': 'y'}\]"
+ )
+
+ def test_exception_format_unexpected_parameter(self):
+ # test that if the parameters aren't any known type, we just
+ # run through repr()
+ exception = tsa.exc.IntegrityError("foo", "bar", "bat")
+ eq_regex(
+ str(exception),
+ r"\(.*.str\) bat \[SQL: 'foo'\] \[parameters: 'bar'\]"
+ )
+
+ def test_exception_format_unexpected_member_parameter(self):
+ # test that if the parameters aren't any known type, we just
+ # run through repr()
+ exception = tsa.exc.IntegrityError("foo", ["bar", "bat"], "hoho")
+ eq_regex(
+ str(exception),
+ r"\(.*.str\) hoho \[SQL: 'foo'\] \[parameters: \['bar', 'bat'\]\]"
+ )
+
+ def test_result_large_param(self):
+ import random
+ largeparam = ''.join(chr(random.randint(52, 85)) for i in range(5000))
+
+ self.eng.echo = 'debug'
+ result = self.eng.execute(
+ "SELECT ?",
+ (largeparam, )
+ )
+
+ row = result.first()
+
+ eq_(
+ self.buf.buffer[1].message,
+ "('%s ... (4702 characters truncated) ... %s',)" % (
+ largeparam[0:149], largeparam[-149:]
+ )
+ )
+
+ if util.py3k:
+ eq_(
+ self.buf.buffer[3].message,
+ "Row ('%s ... (4702 characters truncated) ... %s',)" % (
+ largeparam[0:149], largeparam[-149:]
+ )
+ )
+ else:
+ eq_(
+ self.buf.buffer[3].message,
+ "Row (u'%s ... (4703 characters truncated) ... %s',)" % (
+ largeparam[0:148], largeparam[-149:]
+ )
+ )
+
+ if util.py3k:
+ eq_(
+ repr(row),
+ "('%s ... (4702 characters truncated) ... %s',)" % (
+ largeparam[0:149], largeparam[-149:]
+ )
+ )
+ else:
+ eq_(
+ repr(row),
+ "(u'%s ... (4703 characters truncated) ... %s',)" % (
+ largeparam[0:148], largeparam[-149:]
+ )
+ )
+
def test_error_large_dict(self):
assert_raises_message(
tsa.exc.DBAPIError,
diff --git a/test/engine/test_parseconnect.py b/test/engine/test_parseconnect.py
index e53a99e15..0e1f6c3d2 100644
--- a/test/engine/test_parseconnect.py
+++ b/test/engine/test_parseconnect.py
@@ -5,9 +5,9 @@ from sqlalchemy.engine.default import DefaultDialect
import sqlalchemy as tsa
from sqlalchemy.testing import fixtures
from sqlalchemy import testing
-from sqlalchemy.testing.mock import Mock, MagicMock
-from sqlalchemy import event
-from sqlalchemy import select
+from sqlalchemy.testing.mock import Mock, MagicMock, call
+from sqlalchemy.dialects import registry
+from sqlalchemy.dialects import plugins
dialect = None
@@ -138,9 +138,40 @@ class CreateEngineTest(fixtures.TestBase):
'z=somevalue')
assert e.echo is True
+ def test_pool_threadlocal_from_config(self):
+ dbapi = mock_dbapi
+
+ config = {
+ 'sqlalchemy.url': 'postgresql://scott:tiger@somehost/test',
+ 'sqlalchemy.pool_threadlocal': "false"}
+
+ e = engine_from_config(config, module=dbapi, _initialize=False)
+ eq_(e.pool._use_threadlocal, False)
+
+ config = {
+ 'sqlalchemy.url': 'postgresql://scott:tiger@somehost/test',
+ 'sqlalchemy.pool_threadlocal': "true"}
+
+ e = engine_from_config(config, module=dbapi, _initialize=False)
+ eq_(e.pool._use_threadlocal, True)
+
+ def test_pool_reset_on_return_from_config(self):
+ dbapi = mock_dbapi
+
+ for value, expected in [
+ ("rollback", pool.reset_rollback),
+ ("commit", pool.reset_commit),
+ ("none", pool.reset_none)
+ ]:
+ config = {
+ 'sqlalchemy.url': 'postgresql://scott:tiger@somehost/test',
+ 'sqlalchemy.pool_reset_on_return': value}
+
+ e = engine_from_config(config, module=dbapi, _initialize=False)
+ eq_(e.pool._reset_on_return, expected)
+
def test_engine_from_config_custom(self):
from sqlalchemy import util
- from sqlalchemy.dialects import registry
tokens = __name__.split(".")
class MyDialect(MockDialect):
@@ -293,21 +324,18 @@ class CreateEngineTest(fixtures.TestBase):
class TestRegNewDBAPI(fixtures.TestBase):
def test_register_base(self):
- from sqlalchemy.dialects import registry
registry.register("mockdialect", __name__, "MockDialect")
e = create_engine("mockdialect://")
assert isinstance(e.dialect, MockDialect)
def test_register_dotted(self):
- from sqlalchemy.dialects import registry
registry.register("mockdialect.foob", __name__, "MockDialect")
e = create_engine("mockdialect+foob://")
assert isinstance(e.dialect, MockDialect)
def test_register_legacy(self):
- from sqlalchemy.dialects import registry
tokens = __name__.split(".")
global dialect
@@ -319,12 +347,70 @@ class TestRegNewDBAPI(fixtures.TestBase):
assert isinstance(e.dialect, MockDialect)
def test_register_per_dbapi(self):
- from sqlalchemy.dialects import registry
registry.register("mysql.my_mock_dialect", __name__, "MockDialect")
e = create_engine("mysql+my_mock_dialect://")
assert isinstance(e.dialect, MockDialect)
+ @testing.requires.sqlite
+ def test_wrapper_hooks(self):
+ def get_dialect_cls(url):
+ url.drivername = "sqlite"
+ return url.get_dialect()
+
+ global WrapperFactory
+ WrapperFactory = Mock()
+ WrapperFactory.get_dialect_cls.side_effect = get_dialect_cls
+
+ registry.register("wrapperdialect", __name__, "WrapperFactory")
+
+ from sqlalchemy.dialects import sqlite
+ e = create_engine("wrapperdialect://")
+
+ eq_(e.dialect.name, "sqlite")
+ assert isinstance(e.dialect, sqlite.dialect)
+
+ eq_(
+ WrapperFactory.mock_calls,
+ [
+ call.get_dialect_cls(url.make_url("sqlite://")),
+ call.engine_created(e)
+ ]
+ )
+
+ @testing.requires.sqlite
+ def test_plugin_registration(self):
+ from sqlalchemy.dialects import sqlite
+
+ global MyEnginePlugin
+
+ def side_effect(url, kw):
+ eq_(kw, {"logging_name": "foob"})
+ kw['logging_name'] = 'bar'
+ return MyEnginePlugin
+
+ MyEnginePlugin = Mock(side_effect=side_effect)
+
+ plugins.register("engineplugin", __name__, "MyEnginePlugin")
+
+ e = create_engine(
+ "sqlite:///?plugin=engineplugin&foo=bar", logging_name='foob')
+ eq_(e.dialect.name, "sqlite")
+ eq_(e.logging_name, "bar")
+ assert isinstance(e.dialect, sqlite.dialect)
+
+ eq_(
+ MyEnginePlugin.mock_calls,
+ [
+ call(e.url, {}),
+ call.engine_created(e)
+ ]
+ )
+ eq_(
+ str(MyEnginePlugin.mock_calls[0][1][0]),
+ "sqlite:///?foo=bar"
+ )
+
class MockDialect(DefaultDialect):
@classmethod
diff --git a/test/engine/test_pool.py b/test/engine/test_pool.py
index 0c4557d49..8551e1fcb 100644
--- a/test/engine/test_pool.py
+++ b/test/engine/test_pool.py
@@ -4,11 +4,13 @@ from sqlalchemy import pool, select, event
import sqlalchemy as tsa
from sqlalchemy import testing
from sqlalchemy.testing.util import gc_collect, lazy_gc
-from sqlalchemy.testing import eq_, assert_raises, is_not_
+from sqlalchemy.testing import eq_, assert_raises, is_not_, is_
from sqlalchemy.testing.engines import testing_engine
from sqlalchemy.testing import fixtures
import random
-from sqlalchemy.testing.mock import Mock, call
+from sqlalchemy.testing.mock import Mock, call, patch, ANY
+import weakref
+import collections
join_timeout = 10
@@ -25,21 +27,34 @@ def MockDBAPI():
db.connect = Mock(side_effect=Exception("connect failed"))
else:
db.connect = Mock(side_effect=connect)
+ db.is_shutdown = value
db = Mock(
connect=Mock(side_effect=connect),
- shutdown=shutdown, _shutdown=False)
+ shutdown=shutdown,
+ is_shutdown=False)
return db
class PoolTestBase(fixtures.TestBase):
def setup(self):
pool.clear_managers()
+ self._teardown_conns = []
+
+ def teardown(self):
+ for ref in self._teardown_conns:
+ conn = ref()
+ if conn:
+ conn.close()
@classmethod
def teardown_class(cls):
pool.clear_managers()
+ def _with_teardown(self, connection):
+ self._teardown_conns.append(weakref.ref(connection))
+ return connection
+
def _queuepool_fixture(self, **kw):
dbapi, pool = self._queuepool_dbapi_fixture(**kw)
return pool
@@ -49,6 +64,7 @@ class PoolTestBase(fixtures.TestBase):
return dbapi, pool.QueuePool(creator=lambda: dbapi.connect('foo.db'),
**kw)
+
class PoolTest(PoolTestBase):
def test_manager(self):
manager = pool.manage(MockDBAPI(), use_threadlocal=True)
@@ -86,7 +102,6 @@ class PoolTest(PoolTestBase):
]
)
-
def test_bad_args(self):
manager = pool.manage(MockDBAPI())
manager.connect(None)
@@ -218,6 +233,7 @@ class PoolTest(PoolTestBase):
class PoolDialectTest(PoolTestBase):
def _dialect(self):
canary = []
+
class PoolDialect(object):
def do_rollback(self, dbapi_connection):
canary.append('R')
@@ -266,6 +282,7 @@ class PoolEventsTest(PoolTestBase):
def _first_connect_event_fixture(self):
p = self._queuepool_fixture()
canary = []
+
def first_connect(*arg, **kw):
canary.append('first_connect')
@@ -276,8 +293,10 @@ class PoolEventsTest(PoolTestBase):
def _connect_event_fixture(self):
p = self._queuepool_fixture()
canary = []
+
def connect(*arg, **kw):
canary.append('connect')
+
event.listen(p, 'connect', connect)
return p, canary
@@ -285,6 +304,7 @@ class PoolEventsTest(PoolTestBase):
def _checkout_event_fixture(self):
p = self._queuepool_fixture()
canary = []
+
def checkout(*arg, **kw):
canary.append('checkout')
event.listen(p, 'checkout', checkout)
@@ -294,6 +314,7 @@ class PoolEventsTest(PoolTestBase):
def _checkin_event_fixture(self):
p = self._queuepool_fixture()
canary = []
+
def checkin(*arg, **kw):
canary.append('checkin')
event.listen(p, 'checkin', checkin)
@@ -303,6 +324,7 @@ class PoolEventsTest(PoolTestBase):
def _reset_event_fixture(self):
p = self._queuepool_fixture()
canary = []
+
def reset(*arg, **kw):
canary.append('reset')
event.listen(p, 'reset', reset)
@@ -316,6 +338,13 @@ class PoolEventsTest(PoolTestBase):
return p, canary
+ def _soft_invalidate_event_fixture(self):
+ p = self._queuepool_fixture()
+ canary = Mock()
+ event.listen(p, 'soft_invalidate', canary)
+
+ return p, canary
+
def test_first_connect_event(self):
p, canary = self._first_connect_event_fixture()
@@ -419,6 +448,31 @@ class PoolEventsTest(PoolTestBase):
c1.close()
eq_(canary, ['reset'])
+ def test_soft_invalidate_event_no_exception(self):
+ p, canary = self._soft_invalidate_event_fixture()
+
+ c1 = p.connect()
+ c1.close()
+ assert not canary.called
+ c1 = p.connect()
+ dbapi_con = c1.connection
+ c1.invalidate(soft=True)
+ assert canary.call_args_list[0][0][0] is dbapi_con
+ assert canary.call_args_list[0][0][2] is None
+
+ def test_soft_invalidate_event_exception(self):
+ p, canary = self._soft_invalidate_event_fixture()
+
+ c1 = p.connect()
+ c1.close()
+ assert not canary.called
+ c1 = p.connect()
+ dbapi_con = c1.connection
+ exc = Exception("hi")
+ c1.invalidate(exc, soft=True)
+ assert canary.call_args_list[0][0][0] is dbapi_con
+ assert canary.call_args_list[0][0][2] is exc
+
def test_invalidate_event_no_exception(self):
p, canary = self._invalidate_event_fixture()
@@ -470,12 +524,16 @@ class PoolEventsTest(PoolTestBase):
def test_listen_targets_scope(self):
canary = []
+
def listen_one(*args):
canary.append("listen_one")
+
def listen_two(*args):
canary.append("listen_two")
+
def listen_three(*args):
canary.append("listen_three")
+
def listen_four(*args):
canary.append("listen_four")
@@ -492,13 +550,17 @@ class PoolEventsTest(PoolTestBase):
)
def test_listen_targets_per_subclass(self):
- """test that listen() called on a subclass remains specific to that subclass."""
+ """test that listen() called on a subclass remains specific to
+ that subclass."""
canary = []
+
def listen_one(*args):
canary.append("listen_one")
+
def listen_two(*args):
canary.append("listen_two")
+
def listen_three(*args):
canary.append("listen_three")
@@ -526,6 +588,7 @@ class PoolEventsTest(PoolTestBase):
# going
pool.Pool.dispatch._clear()
+
class PoolFirstConnectSyncTest(PoolTestBase):
# test [ticket:2964]
@@ -560,11 +623,14 @@ class PoolFirstConnectSyncTest(PoolTestBase):
th.join(join_timeout)
eq_(evt.mock_calls,
- [call.first_connect(), call.connect(), call.connect(), call.connect()]
+ [
+ call.first_connect(),
+ call.connect(),
+ call.connect(),
+ call.connect()]
)
-
class DeprecatedPoolListenerTest(PoolTestBase):
@testing.requires.predictable_gc
@testing.uses_deprecated(r".*Use event.listen")
@@ -580,38 +646,45 @@ class DeprecatedPoolListenerTest(PoolTestBase):
if hasattr(self, 'checkin'):
self.checkin = self.inst_checkin
self.clear()
+
def clear(self):
self.connected = []
self.first_connected = []
self.checked_out = []
self.checked_in = []
+
def assert_total(innerself, conn, fconn, cout, cin):
eq_(len(innerself.connected), conn)
eq_(len(innerself.first_connected), fconn)
eq_(len(innerself.checked_out), cout)
eq_(len(innerself.checked_in), cin)
+
def assert_in(innerself, item, in_conn, in_fconn,
in_cout, in_cin):
self.assert_((item in innerself.connected) == in_conn)
self.assert_((item in innerself.first_connected) == in_fconn)
self.assert_((item in innerself.checked_out) == in_cout)
self.assert_((item in innerself.checked_in) == in_cin)
+
def inst_connect(self, con, record):
print("connect(%s, %s)" % (con, record))
assert con is not None
assert record is not None
self.connected.append(con)
+
def inst_first_connect(self, con, record):
print("first_connect(%s, %s)" % (con, record))
assert con is not None
assert record is not None
self.first_connected.append(con)
+
def inst_checkout(self, con, record, proxy):
print("checkout(%s, %s, %s)" % (con, record, proxy))
assert con is not None
assert record is not None
assert proxy is not None
self.checked_out.append(con)
+
def inst_checkin(self, con, record):
print("checkin(%s, %s)" % (con, record))
# con can be None if invalidated
@@ -620,15 +693,19 @@ class DeprecatedPoolListenerTest(PoolTestBase):
class ListenAll(tsa.interfaces.PoolListener, InstrumentingListener):
pass
+
class ListenConnect(InstrumentingListener):
def connect(self, con, record):
pass
+
class ListenFirstConnect(InstrumentingListener):
def first_connect(self, con, record):
pass
+
class ListenCheckOut(InstrumentingListener):
def checkout(self, con, record, proxy, num):
pass
+
class ListenCheckIn(InstrumentingListener):
def checkin(self, con, record):
pass
@@ -746,8 +823,10 @@ class DeprecatedPoolListenerTest(PoolTestBase):
def test_listeners_callables(self):
def connect(dbapi_con, con_record):
counts[0] += 1
+
def checkout(dbapi_con, con_record, con_proxy):
counts[1] += 1
+
def checkin(dbapi_con, con_record):
counts[2] += 1
@@ -884,6 +963,7 @@ class QueuePoolTest(PoolTestBase):
pool_size=2,
max_overflow=1, use_threadlocal=False, timeout=3)
timeouts = []
+
def checkout():
for x in range(1):
now = time.time()
@@ -915,6 +995,7 @@ class QueuePoolTest(PoolTestBase):
dbapi = MockDBAPI()
mutex = threading.Lock()
+
def creator():
time.sleep(.05)
with mutex:
@@ -924,6 +1005,7 @@ class QueuePoolTest(PoolTestBase):
pool_size=3, timeout=2,
max_overflow=max_overflow)
peaks = []
+
def whammy():
for i in range(10):
try:
@@ -947,7 +1029,6 @@ class QueuePoolTest(PoolTestBase):
lazy_gc()
assert not pool._refs
-
def test_overflow_reset_on_failed_connect(self):
dbapi = Mock()
@@ -956,13 +1037,14 @@ class QueuePoolTest(PoolTestBase):
raise Exception("connection failed")
creator = dbapi.connect
+
def create():
return creator()
p = pool.QueuePool(creator=create, pool_size=2, max_overflow=3)
- c1 = p.connect()
- c2 = p.connect()
- c3 = p.connect()
+ c1 = self._with_teardown(p.connect())
+ c2 = self._with_teardown(p.connect())
+ c3 = self._with_teardown(p.connect())
eq_(p._overflow, 1)
creator = failing_dbapi
assert_raises(Exception, p.connect)
@@ -1029,7 +1111,6 @@ class QueuePoolTest(PoolTestBase):
call("overflow_one")]
)
-
@testing.requires.threading_with_mock
@testing.requires.timing_intensive
def test_waiters_handled(self):
@@ -1039,6 +1120,7 @@ class QueuePoolTest(PoolTestBase):
"""
mutex = threading.Lock()
dbapi = MockDBAPI()
+
def creator():
mutex.acquire()
try:
@@ -1052,6 +1134,7 @@ class QueuePoolTest(PoolTestBase):
p = pool.QueuePool(creator=creator,
pool_size=2, timeout=timeout,
max_overflow=max_overflow)
+
def waiter(p, timeout, max_overflow):
success_key = (timeout, max_overflow)
conn = p.connect()
@@ -1082,18 +1165,58 @@ class QueuePoolTest(PoolTestBase):
eq_(len(success), 12, "successes: %s" % success)
+ def test_connrec_invalidated_within_checkout_no_race(self):
+ """Test that a concurrent ConnectionRecord.invalidate() which
+ occurs after the ConnectionFairy has called _ConnectionRecord.checkout()
+ but before the ConnectionFairy tests "fairy.connection is None"
+ will not result in an InvalidRequestError.
+
+ This use case assumes that a listener on the checkout() event
+ will be raising DisconnectionError so that a reconnect attempt
+ may occur.
+
+ """
+ dbapi = MockDBAPI()
+
+ def creator():
+ return dbapi.connect()
+
+ p = pool.QueuePool(creator=creator, pool_size=1, max_overflow=0)
+
+ conn = p.connect()
+ conn.close()
+
+ _existing_checkout = pool._ConnectionRecord.checkout
+
+ @classmethod
+ def _decorate_existing_checkout(cls, *arg, **kw):
+ fairy = _existing_checkout(*arg, **kw)
+ connrec = fairy._connection_record
+ connrec.invalidate()
+ return fairy
+
+ with patch(
+ "sqlalchemy.pool._ConnectionRecord.checkout",
+ _decorate_existing_checkout):
+ conn = p.connect()
+ is_(conn._connection_record.connection, None)
+ conn.close()
+
+
@testing.requires.threading_with_mock
@testing.requires.timing_intensive
def test_notify_waiters(self):
dbapi = MockDBAPI()
canary = []
+
def creator():
canary.append(1)
return dbapi.connect()
p1 = pool.QueuePool(creator=creator,
pool_size=1, timeout=None,
max_overflow=0)
+
def waiter(p):
conn = p.connect()
canary.append(2)
@@ -1165,7 +1288,8 @@ class QueuePoolTest(PoolTestBase):
def test_mixed_close(self):
pool._refs.clear()
- p = self._queuepool_fixture(pool_size=3, max_overflow=-1, use_threadlocal=True)
+ p = self._queuepool_fixture(pool_size=3, max_overflow=-1,
+ use_threadlocal=True)
c1 = p.connect()
c2 = p.connect()
assert c1 is c2
@@ -1191,6 +1315,7 @@ class QueuePoolTest(PoolTestBase):
# disable weakref collection of the
# underlying connections
strong_refs = set()
+
def _conn():
c = p.connect()
strong_refs.add(c.connection)
@@ -1271,35 +1396,74 @@ class QueuePoolTest(PoolTestBase):
c2 = p.connect()
assert id(c2.connection) == c_id
+ c2_rec = c2._connection_record
p._invalidate(c2)
+ assert c2_rec.connection is None
+ c2.close()
+ time.sleep(.5)
+ c3 = p.connect()
+ assert id(c3.connection) != c_id
+
+ @testing.requires.timing_intensive
+ def test_recycle_on_soft_invalidate(self):
+ p = self._queuepool_fixture(pool_size=1,
+ max_overflow=0)
+ c1 = p.connect()
+ c_id = id(c1.connection)
+ c1.close()
+ c2 = p.connect()
+ assert id(c2.connection) == c_id
+
+ c2_rec = c2._connection_record
+ c2.invalidate(soft=True)
+ assert c2_rec.connection is c2.connection
+
c2.close()
time.sleep(.5)
c3 = p.connect()
assert id(c3.connection) != c_id
+ assert c3._connection_record is c2_rec
+ assert c2_rec.connection is c3.connection
+
+ def _no_wr_finalize(self):
+ finalize_fairy = pool._finalize_fairy
+
+ def assert_no_wr_callback(
+ connection, connection_record,
+ pool, ref, echo, fairy=None):
+ if fairy is None:
+ raise AssertionError(
+ "finalize fairy was called as a weakref callback")
+ return finalize_fairy(
+ connection, connection_record, pool, ref, echo, fairy)
+ return patch.object(
+ pool, '_finalize_fairy', assert_no_wr_callback)
def _assert_cleanup_on_pooled_reconnect(self, dbapi, p):
# p is QueuePool with size=1, max_overflow=2,
# and one connection in the pool that will need to
# reconnect when next used (either due to recycle or invalidate)
- eq_(p.checkedout(), 0)
- eq_(p._overflow, 0)
- dbapi.shutdown(True)
- assert_raises(
- Exception,
- p.connect
- )
- eq_(p._overflow, 0)
- eq_(p.checkedout(), 0) # and not 1
- dbapi.shutdown(False)
+ with self._no_wr_finalize():
+ eq_(p.checkedout(), 0)
+ eq_(p._overflow, 0)
+ dbapi.shutdown(True)
+ assert_raises(
+ Exception,
+ p.connect
+ )
+ eq_(p._overflow, 0)
+ eq_(p.checkedout(), 0) # and not 1
- c1 = p.connect()
- assert p._pool.empty() # poolsize is one, so we're empty OK
- c2 = p.connect()
- eq_(p._overflow, 1) # and not 2
+ dbapi.shutdown(False)
- # this hangs if p._overflow is 2
- c3 = p.connect()
+ c1 = self._with_teardown(p.connect())
+ assert p._pool.empty() # poolsize is one, so we're empty OK
+ c2 = self._with_teardown(p.connect())
+ eq_(p._overflow, 1) # and not 2
+
+ # this hangs if p._overflow is 2
+ c3 = self._with_teardown(p.connect())
def test_error_on_pooled_reconnect_cleanup_invalidate(self):
dbapi, p = self._queuepool_dbapi_fixture(pool_size=1, max_overflow=2)
@@ -1317,6 +1481,112 @@ class QueuePoolTest(PoolTestBase):
time.sleep(1.5)
self._assert_cleanup_on_pooled_reconnect(dbapi, p)
+ def test_connect_handler_not_called_for_recycled(self):
+ """test [ticket:3497]"""
+
+ dbapi, p = self._queuepool_dbapi_fixture(
+ pool_size=2, max_overflow=2)
+
+ canary = Mock()
+
+ c1 = p.connect()
+ c2 = p.connect()
+
+ c1.close()
+ c2.close()
+
+ dbapi.shutdown(True)
+
+ bad = p.connect()
+ p._invalidate(bad)
+ bad.close()
+ assert p._invalidate_time
+
+ event.listen(p, "connect", canary.connect)
+ event.listen(p, "checkout", canary.checkout)
+
+ assert_raises(
+ Exception,
+ p.connect
+ )
+
+ p._pool.queue = collections.deque(
+ [
+ c for c in p._pool.queue
+ if c.connection is not None
+ ]
+ )
+
+ dbapi.shutdown(False)
+ c = p.connect()
+ c.close()
+
+ eq_(
+ canary.mock_calls,
+ [
+ call.connect(ANY, ANY),
+ call.checkout(ANY, ANY, ANY)
+ ]
+ )
+
+ def test_connect_checkout_handler_always_gets_info(self):
+ """test [ticket:3497]"""
+
+ dbapi, p = self._queuepool_dbapi_fixture(
+ pool_size=2, max_overflow=2)
+
+ c1 = p.connect()
+ c2 = p.connect()
+
+ c1.close()
+ c2.close()
+
+ dbapi.shutdown(True)
+
+ bad = p.connect()
+ p._invalidate(bad)
+ bad.close()
+ assert p._invalidate_time
+
+ @event.listens_for(p, "connect")
+ def connect(conn, conn_rec):
+ conn_rec.info['x'] = True
+
+ @event.listens_for(p, "checkout")
+ def checkout(conn, conn_rec, conn_f):
+ assert 'x' in conn_rec.info
+
+ assert_raises(
+ Exception,
+ p.connect
+ )
+
+ p._pool.queue = collections.deque(
+ [
+ c for c in p._pool.queue
+ if c.connection is not None
+ ]
+ )
+
+ dbapi.shutdown(False)
+ c = p.connect()
+ c.close()
+
+
+ def test_error_on_pooled_reconnect_cleanup_wcheckout_event(self):
+ dbapi, p = self._queuepool_dbapi_fixture(pool_size=1,
+ max_overflow=2)
+
+ c1 = p.connect()
+ c1.close()
+
+ @event.listens_for(p, "checkout")
+ def handle_checkout_event(dbapi_con, con_record, con_proxy):
+ if dbapi.is_shutdown:
+ raise tsa.exc.DisconnectionError()
+
+ self._assert_cleanup_on_pooled_reconnect(dbapi, p)
+
@testing.requires.timing_intensive
def test_recycle_pool_no_race(self):
def slow_close():
@@ -1334,6 +1604,7 @@ class QueuePoolTest(PoolTestBase):
dialect.dbapi.Error = Error
pools = []
+
class TrackQueuePool(pool.QueuePool):
def __init__(self, *arg, **kw):
pools.append(self)
@@ -1357,11 +1628,13 @@ class QueuePoolTest(PoolTestBase):
def attempt(conn):
time.sleep(random.random())
try:
- conn._handle_dbapi_exception(Error(), "statement", {}, Mock(), Mock())
+ conn._handle_dbapi_exception(Error(), "statement", {},
+ Mock(), Mock())
except tsa.exc.DBAPIError:
pass
- # run an error + invalidate operation on the remaining 7 open connections
+ # run an error + invalidate operation on the remaining 7 open
+ #connections
threads = []
for conn in conns:
t = threading.Thread(target=attempt, args=(conn, ))
@@ -1399,7 +1672,8 @@ class QueuePoolTest(PoolTestBase):
assert c1.connection.id != c_id
def test_recreate(self):
- p = self._queuepool_fixture(reset_on_return=None, pool_size=1, max_overflow=0)
+ p = self._queuepool_fixture(reset_on_return=None, pool_size=1,
+ max_overflow=0)
p2 = p.recreate()
assert p2.size() == 1
assert p2._reset_on_return is pool.reset_none
@@ -1454,16 +1728,19 @@ class QueuePoolTest(PoolTestBase):
eq_(c2_con.close.call_count, 0)
def test_threadfairy(self):
- p = self._queuepool_fixture(pool_size=3, max_overflow=-1, use_threadlocal=True)
+ p = self._queuepool_fixture(pool_size=3, max_overflow=-1,
+ use_threadlocal=True)
c1 = p.connect()
c1.close()
c2 = p.connect()
assert c2.connection is not None
+
class ResetOnReturnTest(PoolTestBase):
def _fixture(self, **kw):
dbapi = Mock()
- return dbapi, pool.QueuePool(creator=lambda: dbapi.connect('foo.db'), **kw)
+ return dbapi, pool.QueuePool(creator=lambda: dbapi.connect('foo.db'),
+ **kw)
def test_plain_rollback(self):
dbapi, p = self._fixture(reset_on_return='rollback')
@@ -1550,6 +1827,7 @@ class ResetOnReturnTest(PoolTestBase):
assert not dbapi.connect().rollback.called
assert dbapi.connect().commit.called
+
class SingletonThreadPoolTest(PoolTestBase):
@testing.requires.threading_with_mock
@@ -1567,6 +1845,7 @@ class SingletonThreadPoolTest(PoolTestBase):
dbapi = MockDBAPI()
lock = threading.Lock()
+
def creator():
# the mock iterator isn't threadsafe...
with lock:
@@ -1575,6 +1854,7 @@ class SingletonThreadPoolTest(PoolTestBase):
if strong_refs:
sr = set()
+
def _conn():
c = p.connect()
sr.add(c.connection)
@@ -1604,6 +1884,7 @@ class SingletonThreadPoolTest(PoolTestBase):
still_opened = len([c for c in sr if not c.close.call_count])
eq_(still_opened, 3)
+
class AssertionPoolTest(PoolTestBase):
def test_connect_error(self):
dbapi = MockDBAPI()
@@ -1622,6 +1903,7 @@ class AssertionPoolTest(PoolTestBase):
c3 = p.connect()
assert_raises(AssertionError, p.connect)
+
class NullPoolTest(PoolTestBase):
def test_reconnect(self):
dbapi = MockDBAPI()
@@ -1649,3 +1931,53 @@ class StaticPoolTest(PoolTestBase):
p = pool.StaticPool(creator)
p2 = p.recreate()
assert p._creator is p2._creator
+
+
+class CreatorCompatibilityTest(PoolTestBase):
+ def test_creator_callable_outside_noarg(self):
+ e = testing_engine()
+
+ creator = e.pool._creator
+ try:
+ conn = creator()
+ finally:
+ conn.close()
+
+ def test_creator_callable_outside_witharg(self):
+ e = testing_engine()
+
+ creator = e.pool._creator
+ try:
+ conn = creator(Mock())
+ finally:
+ conn.close()
+
+ def test_creator_patching_arg_to_noarg(self):
+ e = testing_engine()
+ creator = e.pool._creator
+ try:
+ # the creator is the two-arg form
+ conn = creator(Mock())
+ finally:
+ conn.close()
+
+ def mock_create():
+ return creator()
+
+ conn = e.connect()
+ conn.invalidate()
+ conn.close()
+
+ # test that the 'should_wrap_creator' status
+ # will dynamically switch if the _creator is monkeypatched.
+
+ # patch it with a zero-arg form
+ with patch.object(e.pool, "_creator", mock_create):
+ conn = e.connect()
+ conn.invalidate()
+ conn.close()
+
+ conn = e.connect()
+ conn.close()
+
+
diff --git a/test/engine/test_processors.py b/test/engine/test_processors.py
index b1c482f09..f4df7827c 100644
--- a/test/engine/test_processors.py
+++ b/test/engine/test_processors.py
@@ -57,8 +57,10 @@ class PyDateProcessorTest(_DateProcessorTest):
)
)
+
class CDateProcessorTest(_DateProcessorTest):
__requires__ = ('cextensions',)
+
@classmethod
def setup_class(cls):
from sqlalchemy import cprocessors
@@ -104,7 +106,8 @@ class _DistillArgsTest(fixtures.TestBase):
def test_distill_single_list_tuples(self):
eq_(
- self.module._distill_params(([("foo", "bar"), ("bat", "hoho")],), {}),
+ self.module._distill_params(
+ ([("foo", "bar"), ("bat", "hoho")],), {}),
[('foo', 'bar'), ('bat', 'hoho')]
)
@@ -117,9 +120,7 @@ class _DistillArgsTest(fixtures.TestBase):
def test_distill_multi_list_tuple(self):
eq_(
self.module._distill_params(
- ([("foo", "bar")], [("bar", "bat")]),
- {}
- ),
+ ([("foo", "bar")], [("bar", "bat")]), {}),
([('foo', 'bar')], [('bar', 'bat')])
)
@@ -131,7 +132,8 @@ class _DistillArgsTest(fixtures.TestBase):
def test_distill_single_list_dicts(self):
eq_(
- self.module._distill_params(([{"foo": "bar"}, {"foo": "hoho"}],), {}),
+ self.module._distill_params(
+ ([{"foo": "bar"}, {"foo": "hoho"}],), {}),
[{'foo': 'bar'}, {'foo': 'hoho'}]
)
@@ -148,7 +150,6 @@ class _DistillArgsTest(fixtures.TestBase):
)
-
class PyDistillArgsTest(_DistillArgsTest):
@classmethod
def setup_class(cls):
@@ -160,8 +161,10 @@ class PyDistillArgsTest(_DistillArgsTest):
)
)
+
class CDistillArgsTest(_DistillArgsTest):
__requires__ = ('cextensions', )
+
@classmethod
def setup_class(cls):
from sqlalchemy import cutils as util
diff --git a/test/engine/test_reconnect.py b/test/engine/test_reconnect.py
index 619319693..0183df71b 100644
--- a/test/engine/test_reconnect.py
+++ b/test/engine/test_reconnect.py
@@ -370,6 +370,9 @@ class MockReconnectTest(fixtures.TestBase):
mock_dialect = Mock()
class MyURL(URL):
+ def _get_entrypoint(self):
+ return Dialect
+
def get_dialect(self):
return Dialect
@@ -420,6 +423,8 @@ class CursorErrTest(fixtures.TestBase):
from sqlalchemy.engine import default
url = Mock(
get_dialect=lambda: default.DefaultDialect,
+ _get_entrypoint=lambda: default.DefaultDialect,
+ _instantiate_plugins=lambda kwargs: (),
translate_connect_args=lambda: {}, query={},)
eng = testing_engine(
url, options=dict(module=dbapi, _initialize=initialize))
diff --git a/test/engine/test_reflection.py b/test/engine/test_reflection.py
index 087610333..f9799fda0 100644
--- a/test/engine/test_reflection.py
+++ b/test/engine/test_reflection.py
@@ -1,19 +1,19 @@
-import operator
-
import unicodedata
import sqlalchemy as sa
-from sqlalchemy import schema, events, event, inspect
+from sqlalchemy import schema, inspect
from sqlalchemy import MetaData, Integer, String
-from sqlalchemy.testing import (ComparesTables, engines, AssertsCompiledSQL,
+from sqlalchemy.testing import (
+ ComparesTables, engines, AssertsCompiledSQL,
fixtures, skip)
from sqlalchemy.testing.schema import Table, Column
from sqlalchemy.testing import eq_, assert_raises, assert_raises_message
from sqlalchemy import testing
from sqlalchemy.util import ue
-
+from sqlalchemy.testing import config
metadata, users = None, None
+
class ReflectionTest(fixtures.TestBase, ComparesTables):
__backend__ = True
@@ -253,7 +253,7 @@ class ReflectionTest(fixtures.TestBase, ComparesTables):
"""
Table('a', self.metadata, Column('id', Integer, primary_key=True))
Table('b', self.metadata, Column('id', Integer, primary_key=True),
- Column('a_id', Integer, sa.ForeignKey('a.id')))
+ Column('a_id', Integer, sa.ForeignKey('a.id')))
self.metadata.create_all()
m2 = MetaData()
@@ -275,7 +275,7 @@ class ReflectionTest(fixtures.TestBase, ComparesTables):
"""
Table('a', self.metadata, Column('id', Integer, primary_key=True))
Table('b', self.metadata, Column('id', Integer, primary_key=True),
- Column('a_id', Integer, sa.ForeignKey('a.id')))
+ Column('a_id', Integer, sa.ForeignKey('a.id')))
self.metadata.create_all()
m2 = MetaData()
@@ -310,22 +310,22 @@ class ReflectionTest(fixtures.TestBase, ComparesTables):
Don't mark this test as unsupported for any backend !
- (technically it fails with MySQL InnoDB since "id" comes before "id2")
-
"""
meta = self.metadata
- Table('test', meta,
+ Table(
+ 'test', meta,
Column('id', sa.Integer, primary_key=True),
Column('data', sa.String(50)),
- mysql_engine='MyISAM'
+ mysql_engine='InnoDB'
)
- Table('test2', meta,
- Column('id', sa.Integer, sa.ForeignKey('test.id'),
- primary_key=True),
+ Table(
+ 'test2', meta,
+ Column(
+ 'id', sa.Integer, sa.ForeignKey('test.id'), primary_key=True),
Column('id2', sa.Integer, primary_key=True),
Column('data', sa.String(50)),
- mysql_engine='MyISAM'
+ mysql_engine='InnoDB'
)
meta.create_all()
m2 = MetaData(testing.db)
@@ -333,7 +333,8 @@ class ReflectionTest(fixtures.TestBase, ComparesTables):
assert t1a._autoincrement_column is t1a.c.id
t2a = Table('test2', m2, autoload=True)
- assert t2a._autoincrement_column is t2a.c.id2
+ assert t2a._autoincrement_column is None
+
@skip('sqlite')
@testing.provide_metadata
@@ -404,7 +405,6 @@ class ReflectionTest(fixtures.TestBase, ComparesTables):
eq_(list(table.primary_key), [table.c.col1])
eq_(table.c.col1.primary_key, True)
-
@testing.provide_metadata
def test_override_pkfk(self):
"""test that you can override columns which contain foreign keys
@@ -419,7 +419,6 @@ class ReflectionTest(fixtures.TestBase, ComparesTables):
Column('id', sa.Integer, primary_key=True),
Column('street', sa.String(30)))
-
meta.create_all()
meta2 = MetaData(testing.db)
a2 = Table('addresses', meta2,
@@ -541,8 +540,6 @@ class ReflectionTest(fixtures.TestBase, ComparesTables):
assert f1 in b1.constraints
assert len(b1.constraints) == 2
-
-
@testing.provide_metadata
def test_override_keys(self):
"""test that columns can be overridden with a 'key',
@@ -654,12 +651,13 @@ class ReflectionTest(fixtures.TestBase, ComparesTables):
backends with {dialect}.get_foreign_keys() support)"""
if testing.against('postgresql'):
- test_attrs = ('match', 'onupdate', 'ondelete', 'deferrable', 'initially')
+ test_attrs = ('match', 'onupdate', 'ondelete',
+ 'deferrable', 'initially')
addresses_user_id_fkey = sa.ForeignKey(
# Each option is specifically not a Postgres default, or
# it won't be returned by PG's inspection
'users.id',
- name = 'addresses_user_id_fkey',
+ name='addresses_user_id_fkey',
match='FULL',
onupdate='RESTRICT',
ondelete='RESTRICT',
@@ -672,7 +670,7 @@ class ReflectionTest(fixtures.TestBase, ComparesTables):
# elided by MySQL's inspection
addresses_user_id_fkey = sa.ForeignKey(
'users.id',
- name = 'addresses_user_id_fkey',
+ name='addresses_user_id_fkey',
onupdate='CASCADE',
ondelete='CASCADE'
)
@@ -726,11 +724,12 @@ class ReflectionTest(fixtures.TestBase, ComparesTables):
Column('slot', sa.String(128)),
)
- assert_raises_message(sa.exc.InvalidRequestError,
- "Foreign key associated with column 'slots.pkg_id' "
- "could not find table 'pkgs' with which to generate "
- "a foreign key to target column 'pkg_id'",
- metadata.create_all)
+ assert_raises_message(
+ sa.exc.InvalidRequestError,
+ "Foreign key associated with column 'slots.pkg_id' "
+ "could not find table 'pkgs' with which to generate "
+ "a foreign key to target column 'pkg_id'",
+ metadata.create_all)
def test_composite_pks(self):
"""test reflection of a composite primary key"""
@@ -797,7 +796,6 @@ class ReflectionTest(fixtures.TestBase, ComparesTables):
table.c.multi_hoho
== table2.c.lala).compare(j.onclause))
-
@testing.crashes('oracle', 'FIXME: unknown, confirm not fails_on')
@testing.requires.check_constraints
@testing.provide_metadata
@@ -869,7 +867,6 @@ class ReflectionTest(fixtures.TestBase, ComparesTables):
def test_reflect_uses_bind_engine_reflect(self):
self._test_reflect_uses_bind(lambda e: MetaData().reflect(e))
-
@testing.provide_metadata
def test_reflect_all(self):
existing = testing.db.table_names()
@@ -1053,6 +1050,7 @@ class ReflectionTest(fixtures.TestBase, ComparesTables):
finally:
_drop_views(metadata.bind)
+
class CreateDropTest(fixtures.TestBase):
__backend__ = True
@@ -1101,7 +1099,6 @@ class CreateDropTest(fixtures.TestBase):
eq_(ua, ['users', 'email_addresses'])
eq_(oi, ['orders', 'items'])
-
def test_checkfirst(self):
try:
assert not users.exists(testing.db)
@@ -1141,6 +1138,7 @@ class CreateDropTest(fixtures.TestBase):
- set(testing.db.table_names()))
metadata.drop_all(bind=testing.db)
+
class SchemaManipulationTest(fixtures.TestBase):
__backend__ = True
@@ -1159,6 +1157,7 @@ class SchemaManipulationTest(fixtures.TestBase):
assert len(addresses.c.user_id.foreign_keys) == 1
assert addresses.constraints == set([addresses.primary_key, fk])
+
class UnicodeReflectionTest(fixtures.TestBase):
__backend__ = True
@@ -1170,16 +1169,40 @@ class UnicodeReflectionTest(fixtures.TestBase):
('plain', 'col_plain', 'ix_plain')
])
no_has_table = [
- ('no_has_table_1', ue('col_Unit\u00e9ble'), ue('ix_Unit\u00e9ble')),
- ('no_has_table_2', ue('col_\u6e2c\u8a66'), ue('ix_\u6e2c\u8a66')),
+ (
+ 'no_has_table_1',
+ ue('col_Unit\u00e9ble'),
+ ue('ix_Unit\u00e9ble')
+ ),
+ (
+ 'no_has_table_2',
+ ue('col_\u6e2c\u8a66'),
+ ue('ix_\u6e2c\u8a66')
+ ),
]
no_case_sensitivity = [
- (ue('\u6e2c\u8a66'), ue('col_\u6e2c\u8a66'), ue('ix_\u6e2c\u8a66')),
- (ue('unit\u00e9ble'), ue('col_unit\u00e9ble'), ue('ix_unit\u00e9ble')),
+ (
+ ue('\u6e2c\u8a66'),
+ ue('col_\u6e2c\u8a66'),
+ ue('ix_\u6e2c\u8a66')
+ ),
+ (
+ ue('unit\u00e9ble'),
+ ue('col_unit\u00e9ble'),
+ ue('ix_unit\u00e9ble')
+ ),
]
full = [
- (ue('Unit\u00e9ble'), ue('col_Unit\u00e9ble'), ue('ix_Unit\u00e9ble')),
- (ue('\u6e2c\u8a66'), ue('col_\u6e2c\u8a66'), ue('ix_\u6e2c\u8a66')),
+ (
+ ue('Unit\u00e9ble'),
+ ue('col_Unit\u00e9ble'),
+ ue('ix_Unit\u00e9ble')
+ ),
+ (
+ ue('\u6e2c\u8a66'),
+ ue('col_\u6e2c\u8a66'),
+ ue('ix_\u6e2c\u8a66')
+ ),
]
# as you can see, our options for this kind of thing
@@ -1268,6 +1291,7 @@ class UnicodeReflectionTest(fixtures.TestBase):
[(names[tname][1], names[tname][0])]
)
+
class SchemaTest(fixtures.TestBase):
__backend__ = True
@@ -1320,6 +1344,18 @@ class SchemaTest(fixtures.TestBase):
metadata.drop_all()
@testing.requires.schemas
+ @testing.provide_metadata
+ def test_schema_translation(self):
+ Table('foob', self.metadata, Column('q', Integer), schema=config.test_schema)
+ self.metadata.create_all()
+
+ m = MetaData()
+ map_ = {"foob": config.test_schema}
+ with config.db.connect().execution_options(schema_translate_map=map_) as conn:
+ t = Table('foob', m, schema="foob", autoload_with=conn)
+ eq_(t.schema, "foob")
+ eq_(t.c.keys(), ['q'])
+ @testing.requires.schemas
@testing.fails_on('sybase', 'FIXME: unknown')
def test_explicit_default_schema_metadata(self):
engine = testing.db
@@ -1398,8 +1434,6 @@ class SchemaTest(fixtures.TestBase):
)
-
-
# Tests related to engine.reflection
@@ -1432,7 +1466,8 @@ def createTables(meta, schema=None):
dingalings = Table("dingalings", meta,
Column('dingaling_id', sa.Integer, primary_key=True),
Column('address_id', sa.Integer,
- sa.ForeignKey('%semail_addresses.address_id' % schema_prefix)),
+ sa.ForeignKey(
+ '%semail_addresses.address_id' % schema_prefix)),
Column('data', sa.String(30)),
schema=schema, test_needs_fk=True,
)
@@ -1448,6 +1483,7 @@ def createTables(meta, schema=None):
return (users, addresses, dingalings)
+
def createIndexes(con, schema=None):
fullname = 'users'
if schema:
@@ -1455,6 +1491,7 @@ def createIndexes(con, schema=None):
query = "CREATE INDEX users_t_idx ON %s (test1, test2)" % fullname
con.execute(sa.sql.text(query))
+
@testing.requires.views
def _create_views(con, schema=None):
for table_name in ('users', 'email_addresses'):
@@ -1462,10 +1499,10 @@ def _create_views(con, schema=None):
if schema:
fullname = "%s.%s" % (schema, table_name)
view_name = fullname + '_v'
- query = "CREATE VIEW %s AS SELECT * FROM %s" % (view_name,
- fullname)
+ query = "CREATE VIEW %s AS SELECT * FROM %s" % (view_name, fullname)
con.execute(sa.sql.text(query))
+
@testing.requires.views
def _drop_views(con, schema=None):
for table_name in ('email_addresses', 'users'):
@@ -1504,6 +1541,7 @@ class ReverseCasingReflectTest(fixtures.TestBase, AssertsCompiledSQL):
'weird_casing."Col2", weird_casing."col3" '
'FROM weird_casing')
+
class CaseSensitiveTest(fixtures.TablesTest):
"""Nail down case sensitive behaviors, mostly on MySQL."""
__backend__ = True
@@ -1539,7 +1577,8 @@ class CaseSensitiveTest(fixtures.TablesTest):
)
def test_reflect_via_fk(self):
m = MetaData()
- t2 = Table("SomeOtherTable", m, autoload=True, autoload_with=testing.db)
+ t2 = Table("SomeOtherTable", m, autoload=True,
+ autoload_with=testing.db)
eq_(t2.name, "SomeOtherTable")
assert "SomeTable" in m.tables
@@ -1551,7 +1590,6 @@ class CaseSensitiveTest(fixtures.TablesTest):
eq_(t2.name, "sOmEtAbLe")
-
class ColumnEventsTest(fixtures.RemovesEvents, fixtures.TestBase):
__backend__ = True
@@ -1584,6 +1622,7 @@ class ColumnEventsTest(fixtures.RemovesEvents, fixtures.TestBase):
from sqlalchemy.schema import Table
m = MetaData(testing.db)
+
def column_reflect(insp, table, column_info):
if column_info['name'] == col:
column_info.update(update)
@@ -1620,6 +1659,7 @@ class ColumnEventsTest(fixtures.RemovesEvents, fixtures.TestBase):
def test_override_key_fk(self):
m = MetaData(testing.db)
+
def column_reflect(insp, table, column_info):
if column_info['name'] == 'q':
diff --git a/test/engine/test_transaction.py b/test/engine/test_transaction.py
index b662c7fcd..c81a7580f 100644
--- a/test/engine/test_transaction.py
+++ b/test/engine/test_transaction.py
@@ -12,6 +12,8 @@ from sqlalchemy.testing import fixtures
users, metadata = None, None
+
+
class TransactionTest(fixtures.TestBase):
__backend__ = True
@@ -20,7 +22,7 @@ class TransactionTest(fixtures.TestBase):
global users, metadata
metadata = MetaData()
users = Table('query_users', metadata,
- Column('user_id', INT, primary_key = True),
+ Column('user_id', INT, primary_key=True),
Column('user_name', VARCHAR(20)),
test_needs_acid=True,
)
@@ -216,6 +218,27 @@ class TransactionTest(fixtures.TestBase):
finally:
connection.close()
+ @testing.requires.python2
+ @testing.requires.savepoints_w_release
+ def test_savepoint_release_fails_warning(self):
+ with testing.db.connect() as connection:
+ connection.begin()
+
+ with expect_warnings(
+ "An exception has occurred during handling of a previous "
+ "exception. The previous exception "
+ "is:.*..SQL\:.*RELEASE SAVEPOINT"
+ ):
+ def go():
+ with connection.begin_nested() as savepoint:
+ connection.dialect.do_release_savepoint(
+ connection, savepoint._savepoint)
+ assert_raises_message(
+ exc.DBAPIError,
+ ".*SQL\:.*ROLLBACK TO SAVEPOINT",
+ go
+ )
+
def test_retains_through_options(self):
connection = testing.db.connect()
try:
@@ -497,6 +520,7 @@ class TransactionTest(fixtures.TestBase):
order_by(users.c.user_id))
eq_(result.fetchall(), [])
+
class ResetAgentTest(fixtures.TestBase):
__backend__ = True
@@ -600,6 +624,7 @@ class ResetAgentTest(fixtures.TestBase):
trans.rollback()
assert connection.connection._reset_agent is None
+
class AutoRollbackTest(fixtures.TestBase):
__backend__ = True
@@ -633,6 +658,7 @@ class AutoRollbackTest(fixtures.TestBase):
users.drop(conn2)
conn2.close()
+
class ExplicitAutoCommitTest(fixtures.TestBase):
"""test the 'autocommit' flag on select() and text() objects.
@@ -1440,4 +1466,3 @@ class IsolationLevelTest(fixtures.TestBase):
eq_(conn.get_isolation_level(),
self._non_default_isolation_level())
eq_(c2.get_isolation_level(), self._non_default_isolation_level())
-
diff --git a/test/ext/declarative/test_basic.py b/test/ext/declarative/test_basic.py
index 3fac39cac..ae1a85f8b 100644
--- a/test/ext/declarative/test_basic.py
+++ b/test/ext/declarative/test_basic.py
@@ -13,7 +13,10 @@ from sqlalchemy.orm import relationship, create_session, class_mapper, \
column_property, composite, Session, properties
from sqlalchemy.util import with_metaclass
from sqlalchemy.ext.declarative import declared_attr, synonym_for
-from sqlalchemy.testing import fixtures
+from sqlalchemy.testing import fixtures, mock
+from sqlalchemy.orm.events import MapperEvents
+from sqlalchemy.orm import mapper
+from sqlalchemy import event
Base = None
@@ -99,6 +102,29 @@ class DeclarativeTest(DeclarativeTestBase):
assert User.addresses.property.mapper.class_ is Address
+ def test_unicode_string_resolve_backref(self):
+ class User(Base, fixtures.ComparableEntity):
+ __tablename__ = 'users'
+
+ id = Column('id', Integer, primary_key=True,
+ test_needs_autoincrement=True)
+ name = Column('name', String(50))
+
+ class Address(Base, fixtures.ComparableEntity):
+ __tablename__ = 'addresses'
+
+ id = Column(Integer, primary_key=True,
+ test_needs_autoincrement=True)
+ email = Column(String(50), key='_email')
+ user_id = Column('user_id', Integer, ForeignKey('users.id'),
+ key='_user_id')
+ user = relationship(
+ User,
+ backref=backref("addresses",
+ order_by=util.u("Address.email")))
+
+ assert Address.user.property.mapper.class_ is User
+
def test_no_table(self):
def go():
class User(Base):
@@ -1567,8 +1593,7 @@ class DeclarativeTest(DeclarativeTestBase):
meta = MetaData(testing.db)
t1 = Table(
't1', meta,
- Column('id', String(50),
- primary_key=True, test_needs_autoincrement=True),
+ Column('id', String(50), primary_key=True),
Column('data', String(50)))
meta.create_all()
try:
@@ -1671,6 +1696,32 @@ class DeclarativeTest(DeclarativeTestBase):
))
)
+ @testing.teardown_events(MapperEvents)
+ def test_instrument_class_before_instrumentation(self):
+ # test #3388
+
+ canary = mock.Mock()
+
+ @event.listens_for(mapper, "instrument_class")
+ def instrument_class(mp, cls):
+ canary.instrument_class(mp, cls)
+
+ @event.listens_for(object, "class_instrument")
+ def class_instrument(cls):
+ canary.class_instrument(cls)
+
+ class Test(Base):
+ __tablename__ = 'test'
+ id = Column(Integer, primary_key=True)
+ # MARKMARK
+ eq_(
+ canary.mock_calls,
+ [
+ mock.call.instrument_class(Test.__mapper__, Test),
+ mock.call.class_instrument(Test)
+ ]
+ )
+
def _produce_test(inline, stringbased):
diff --git a/test/ext/declarative/test_inheritance.py b/test/ext/declarative/test_inheritance.py
index 6ea37e4d3..274a6aa28 100644
--- a/test/ext/declarative/test_inheritance.py
+++ b/test/ext/declarative/test_inheritance.py
@@ -485,6 +485,41 @@ class DeclarativeInheritanceTest(DeclarativeTestBase):
).one(),
Engineer(name='vlad', primary_language='cobol'))
+ def test_single_constraint_on_sub(self):
+ """test the somewhat unusual case of [ticket:3341]"""
+
+ class Person(Base, fixtures.ComparableEntity):
+
+ __tablename__ = 'people'
+ id = Column(Integer, primary_key=True,
+ test_needs_autoincrement=True)
+ name = Column(String(50))
+ discriminator = Column('type', String(50))
+ __mapper_args__ = {'polymorphic_on': discriminator}
+
+ class Engineer(Person):
+
+ __mapper_args__ = {'polymorphic_identity': 'engineer'}
+ primary_language = Column(String(50))
+
+ __hack_args_one__ = sa.UniqueConstraint(
+ Person.name, primary_language)
+ __hack_args_two__ = sa.CheckConstraint(
+ Person.name != primary_language)
+
+ uq = [c for c in Person.__table__.constraints
+ if isinstance(c, sa.UniqueConstraint)][0]
+ ck = [c for c in Person.__table__.constraints
+ if isinstance(c, sa.CheckConstraint)][0]
+ eq_(
+ list(uq.columns),
+ [Person.__table__.c.name, Person.__table__.c.primary_language]
+ )
+ eq_(
+ list(ck.columns),
+ [Person.__table__.c.name, Person.__table__.c.primary_language]
+ )
+
@testing.skip_if(lambda: testing.against('oracle'),
"Test has an empty insert in it at the moment")
def test_columns_single_inheritance_conflict_resolution(self):
@@ -1416,4 +1451,35 @@ class ConcreteExtensionConfigTest(
"actual_documents.send_method AS send_method, "
"actual_documents.id AS id, 'actual' AS type "
"FROM actual_documents) AS pjoin"
- ) \ No newline at end of file
+ )
+
+ def test_column_attr_names(self):
+ """test #3480"""
+
+ class Document(Base, AbstractConcreteBase):
+ documentType = Column('documenttype', String)
+
+ class Offer(Document):
+ __tablename__ = 'offers'
+
+ id = Column(Integer, primary_key=True)
+ __mapper_args__ = {
+ 'polymorphic_identity': 'offer'
+ }
+
+ configure_mappers()
+ session = Session()
+ self.assert_compile(
+ session.query(Document),
+ "SELECT pjoin.documenttype AS pjoin_documenttype, "
+ "pjoin.id AS pjoin_id, pjoin.type AS pjoin_type FROM "
+ "(SELECT offers.documenttype AS documenttype, offers.id AS id, "
+ "'offer' AS type FROM offers) AS pjoin"
+ )
+
+ self.assert_compile(
+ session.query(Document.documentType),
+ "SELECT pjoin.documenttype AS pjoin_documenttype FROM "
+ "(SELECT offers.documenttype AS documenttype, offers.id AS id, "
+ "'offer' AS type FROM offers) AS pjoin"
+ )
diff --git a/test/ext/declarative/test_mixin.py b/test/ext/declarative/test_mixin.py
index db86927a1..1f9fa1dfa 100644
--- a/test/ext/declarative/test_mixin.py
+++ b/test/ext/declarative/test_mixin.py
@@ -9,7 +9,8 @@ from sqlalchemy.orm import relationship, create_session, class_mapper, \
configure_mappers, clear_mappers, \
deferred, column_property, Session, base as orm_base
from sqlalchemy.util import classproperty
-from sqlalchemy.ext.declarative import declared_attr
+from sqlalchemy.ext.declarative import declared_attr, declarative_base
+from sqlalchemy.orm import events as orm_events
from sqlalchemy.testing import fixtures, mock
from sqlalchemy.testing.util import gc_collect
@@ -438,6 +439,90 @@ class DeclarativeMixinTest(DeclarativeTestBase):
eq_(MyModel.__table__.kwargs, {'mysql_engine': 'InnoDB'})
+ @testing.teardown_events(orm_events.MapperEvents)
+ def test_declare_first_mixin(self):
+ canary = mock.Mock()
+
+ class MyMixin(object):
+ @classmethod
+ def __declare_first__(cls):
+ canary.declare_first__(cls)
+
+ @classmethod
+ def __declare_last__(cls):
+ canary.declare_last__(cls)
+
+ class MyModel(Base, MyMixin):
+ __tablename__ = 'test'
+ id = Column(Integer, primary_key=True)
+
+ configure_mappers()
+
+ eq_(
+ canary.mock_calls,
+ [
+ mock.call.declare_first__(MyModel),
+ mock.call.declare_last__(MyModel),
+ ]
+ )
+
+ @testing.teardown_events(orm_events.MapperEvents)
+ def test_declare_first_base(self):
+ canary = mock.Mock()
+
+ class MyMixin(object):
+ @classmethod
+ def __declare_first__(cls):
+ canary.declare_first__(cls)
+
+ @classmethod
+ def __declare_last__(cls):
+ canary.declare_last__(cls)
+
+ class Base(MyMixin):
+ pass
+ Base = declarative_base(cls=Base)
+
+ class MyModel(Base):
+ __tablename__ = 'test'
+ id = Column(Integer, primary_key=True)
+
+ configure_mappers()
+
+ eq_(
+ canary.mock_calls,
+ [
+ mock.call.declare_first__(MyModel),
+ mock.call.declare_last__(MyModel),
+ ]
+ )
+
+ @testing.teardown_events(orm_events.MapperEvents)
+ def test_declare_first_direct(self):
+ canary = mock.Mock()
+
+ class MyOtherModel(Base):
+ __tablename__ = 'test2'
+ id = Column(Integer, primary_key=True)
+
+ @classmethod
+ def __declare_first__(cls):
+ canary.declare_first__(cls)
+
+ @classmethod
+ def __declare_last__(cls):
+ canary.declare_last__(cls)
+
+ configure_mappers()
+
+ eq_(
+ canary.mock_calls,
+ [
+ mock.call.declare_first__(MyOtherModel),
+ mock.call.declare_last__(MyOtherModel)
+ ]
+ )
+
def test_mapper_args_declared_attr(self):
class ComputedMapperArgs:
@@ -1356,7 +1441,7 @@ class DeclaredAttrTest(DeclarativeTestBase, testing.AssertsCompiledSQL):
"SELECT b.x AS b_x, b.x + :x_1 AS anon_1, b.id AS b_id FROM b"
)
-
+ @testing.requires.predictable_gc
def test_singleton_gc(self):
counter = mock.Mock()
@@ -1392,6 +1477,39 @@ class DeclaredAttrTest(DeclarativeTestBase, testing.AssertsCompiledSQL):
getattr, Mixin, "my_prop"
)
+ def test_non_decl_access(self):
+ counter = mock.Mock()
+
+ class Mixin(object):
+ @declared_attr
+ def __tablename__(cls):
+ counter(cls)
+ return "foo"
+
+ class Foo(Mixin, Base):
+ id = Column(Integer, primary_key=True)
+
+ @declared_attr
+ def x(cls):
+ cls.__tablename__
+
+ @declared_attr
+ def y(cls):
+ cls.__tablename__
+
+ eq_(
+ counter.mock_calls,
+ [mock.call(Foo)]
+ )
+
+ eq_(Foo.__tablename__, 'foo')
+ eq_(Foo.__tablename__, 'foo')
+
+ eq_(
+ counter.mock_calls,
+ [mock.call(Foo), mock.call(Foo), mock.call(Foo)]
+ )
+
def test_property_noncascade(self):
counter = mock.Mock()
@@ -1432,6 +1550,59 @@ class DeclaredAttrTest(DeclarativeTestBase, testing.AssertsCompiledSQL):
eq_(counter.mock_calls, [mock.call(A), mock.call(B)])
+ def test_col_prop_attrs_associated_w_class_for_mapper_args(self):
+ from sqlalchemy import Column
+ import collections
+
+ asserted = collections.defaultdict(set)
+
+ class Mixin(object):
+ @declared_attr.cascading
+ def my_attr(cls):
+ if decl.has_inherited_table(cls):
+ id = Column(ForeignKey('a.my_attr'), primary_key=True)
+ asserted['b'].add(id)
+ else:
+ id = Column(Integer, primary_key=True)
+ asserted['a'].add(id)
+ return id
+
+ class A(Base, Mixin):
+ __tablename__ = 'a'
+
+ @declared_attr
+ def __mapper_args__(cls):
+ asserted['a'].add(cls.my_attr)
+ return {}
+
+ # here:
+ # 1. A is mapped. so A.my_attr is now the InstrumentedAttribute.
+ # 2. B wants to call my_attr also. Due to .cascading, it has been
+ # invoked specific to B, and is present in the dict_ that will
+ # be used when we map the class. But except for the
+ # special setattr() we do in _scan_attributes() in this case, would
+ # otherwise not been set on the class as anything from this call;
+ # the usual mechanics of calling it from the descriptor also do not
+ # work because A is fully mapped and because A set it up, is currently
+ # that non-expected InstrumentedAttribute and replaces the
+ # descriptor from being invoked.
+
+ class B(A):
+ __tablename__ = 'b'
+
+ @declared_attr
+ def __mapper_args__(cls):
+ asserted['b'].add(cls.my_attr)
+ return {}
+
+ eq_(
+ asserted,
+ {
+ 'a': set([A.my_attr.property.columns[0]]),
+ 'b': set([B.my_attr.property.columns[0]])
+ }
+ )
+
def test_column_pre_map(self):
counter = mock.Mock()
@@ -1517,3 +1688,44 @@ class AbstractTest(DeclarativeTestBase):
id = Column(Integer, primary_key=True)
eq_(set(Base.metadata.tables), set(['y', 'z', 'q']))
+
+ def test_middle_abstract_attributes(self):
+ # test for [ticket:3219]
+ class A(Base):
+ __tablename__ = 'a'
+
+ id = Column(Integer, primary_key=True)
+ name = Column(String)
+
+ class B(A):
+ __abstract__ = True
+ data = Column(String)
+
+ class C(B):
+ c_value = Column(String)
+
+ eq_(
+ sa.inspect(C).attrs.keys(), ['id', 'name', 'data', 'c_value']
+ )
+
+ def test_middle_abstract_inherits(self):
+ # test for [ticket:3240]
+
+ class A(Base):
+ __tablename__ = 'a'
+ id = Column(Integer, primary_key=True)
+
+ class AAbs(A):
+ __abstract__ = True
+
+ class B1(A):
+ __tablename__ = 'b1'
+ id = Column(ForeignKey('a.id'), primary_key=True)
+
+ class B2(AAbs):
+ __tablename__ = 'b2'
+ id = Column(ForeignKey('a.id'), primary_key=True)
+
+ assert B1.__mapper__.inherits is A.__mapper__
+
+ assert B2.__mapper__.inherits is A.__mapper__
diff --git a/test/ext/test_associationproxy.py b/test/ext/test_associationproxy.py
index 67e474705..98e40b11e 100644
--- a/test/ext/test_associationproxy.py
+++ b/test/ext/test_associationproxy.py
@@ -13,6 +13,7 @@ from sqlalchemy.testing import fixtures, AssertsCompiledSQL
from sqlalchemy import testing
from sqlalchemy.testing.schema import Table, Column
from sqlalchemy.testing.mock import Mock, call
+from sqlalchemy.testing.assertions import expect_warnings
class DictCollection(dict):
@collection.appender
@@ -912,6 +913,22 @@ class LazyLoadTest(fixtures.TestBase):
self.assert_('_children' in p.__dict__)
self.assert_(len(p._children) == 3)
+ def test_slicing_list(self):
+ Parent, Child = self.Parent, self.Child
+
+ mapper(Parent, self.table, properties={
+ '_children': relationship(Child, lazy='select',
+ collection_class=list)})
+
+ p = Parent('p')
+ p.children = ['a', 'b', 'c']
+
+ p = self.roundtrip(p)
+
+ self.assert_(len(p._children) == 3)
+ eq_('b', p.children[1])
+ eq_(['b', 'c'], p.children[-2:])
+
def test_lazy_scalar(self):
Parent, Child = self.Parent, self.Child
@@ -1072,7 +1089,8 @@ class ComparatorTest(fixtures.MappedTest, AssertsCompiledSQL):
def define_tables(cls, metadata):
Table('userkeywords', metadata,
Column('keyword_id', Integer, ForeignKey('keywords.id'), primary_key=True),
- Column('user_id', Integer, ForeignKey('users.id'))
+ Column('user_id', Integer, ForeignKey('users.id')),
+ Column('value', String(50))
)
Table('users', metadata,
Column('id', Integer,
@@ -1111,6 +1129,9 @@ class ComparatorTest(fixtures.MappedTest, AssertsCompiledSQL):
# nonuselist
singular_value = association_proxy('singular', 'value')
+ # o2m -> scalar
+ singular_collection = association_proxy('user_keywords', 'value')
+
class Keyword(cls.Comparable):
def __init__(self, keyword):
self.keyword = keyword
@@ -1178,8 +1199,9 @@ class ComparatorTest(fixtures.MappedTest, AssertsCompiledSQL):
for jj in words[(ii % len(words)):((ii + 3) % len(words))]:
k = Keyword(jj)
user.keywords.append(k)
- if ii % 3 == None:
+ if ii % 2 == 0:
user.singular.keywords.append(k)
+ user.user_keywords[-1].value = "singular%d" % ii
orphan = Keyword('orphan')
orphan.user_keyword = UserKeyword(keyword=orphan, user=None)
@@ -1196,6 +1218,27 @@ class ComparatorTest(fixtures.MappedTest, AssertsCompiledSQL):
def _equivalent(self, q_proxy, q_direct):
eq_(q_proxy.all(), q_direct.all())
+ def test_filter_any_criterion_ul_scalar(self):
+ UserKeyword, User = self.classes.UserKeyword, self.classes.User
+
+ q1 = self.session.query(User).filter(
+ User.singular_collection.any(UserKeyword.value == 'singular8'))
+ self.assert_compile(
+ q1,
+ "SELECT users.id AS users_id, users.name AS users_name, "
+ "users.singular_id AS users_singular_id "
+ "FROM users "
+ "WHERE EXISTS (SELECT 1 "
+ "FROM userkeywords "
+ "WHERE users.id = userkeywords.user_id AND "
+ "userkeywords.value = :value_1)",
+ checkparams={'value_1': 'singular8'}
+ )
+
+ q2 = self.session.query(User).filter(
+ User.user_keywords.any(UserKeyword.value == 'singular8'))
+ self._equivalent(q1, q2)
+
def test_filter_any_kwarg_ul_nul(self):
UserKeyword, User = self.classes.UserKeyword, self.classes.User
@@ -1284,16 +1327,18 @@ class ComparatorTest(fixtures.MappedTest, AssertsCompiledSQL):
def test_filter_contains_nul_ul(self):
User, Singular = self.classes.User, self.classes.Singular
- self._equivalent(
- self.session.query(User).filter(
- User.singular_keywords.contains(self.kw)
- ),
- self.session.query(User).filter(
- User.singular.has(
- Singular.keywords.contains(self.kw)
- )
- ),
- )
+ with expect_warnings(
+ "Got None for value of column keywords.singular_id;"):
+ self._equivalent(
+ self.session.query(User).filter(
+ User.singular_keywords.contains(self.kw)
+ ),
+ self.session.query(User).filter(
+ User.singular.has(
+ Singular.keywords.contains(self.kw)
+ )
+ ),
+ )
def test_filter_eq_nul_nul(self):
Keyword = self.classes.Keyword
@@ -1548,3 +1593,23 @@ class DictOfTupleUpdateTest(fixtures.TestBase):
a1.elements.update,
(("B", 3), 'elem2'), (("C", 4), "elem3")
)
+
+
+class InfoTest(fixtures.TestBase):
+ def test_constructor(self):
+ assoc = association_proxy('a', 'b', info={'some_assoc': 'some_value'})
+ eq_(assoc.info, {"some_assoc": "some_value"})
+
+ def test_empty(self):
+ assoc = association_proxy('a', 'b')
+ eq_(assoc.info, {})
+
+ def test_via_cls(self):
+ class Foob(object):
+ assoc = association_proxy('a', 'b')
+
+ eq_(Foob.assoc.info, {})
+
+ Foob.assoc.info["foo"] = 'bar'
+
+ eq_(Foob.assoc.info, {'foo': 'bar'})
diff --git a/test/ext/test_baked.py b/test/ext/test_baked.py
new file mode 100644
index 000000000..8bfa58403
--- /dev/null
+++ b/test/ext/test_baked.py
@@ -0,0 +1,920 @@
+from sqlalchemy.orm import Session, subqueryload, \
+ mapper, relationship, lazyload, clear_mappers
+from sqlalchemy.testing import eq_, is_, is_not_
+from sqlalchemy.testing import assert_raises, assert_raises_message
+from sqlalchemy import testing
+from test.orm import _fixtures
+from sqlalchemy.ext.baked import BakedQuery, baked_lazyload, BakedLazyLoader
+from sqlalchemy.ext import baked
+from sqlalchemy import bindparam, func
+from sqlalchemy.orm import exc as orm_exc
+import itertools
+from sqlalchemy.testing import mock
+
+
+class BakedTest(_fixtures.FixtureTest):
+ run_setup_mappers = 'once'
+ run_inserts = 'once'
+ run_deletes = None
+
+ def setup(self):
+ self.bakery = baked.bakery()
+
+
+class StateChangeTest(BakedTest):
+ @classmethod
+ def setup_mappers(cls):
+ User = cls.classes.User
+
+ mapper(User, cls.tables.users)
+
+ def _assert_cache_key(self, key, elements):
+ eq_(
+ key,
+ tuple(elem.__code__ for elem in elements)
+ )
+
+ def test_initial_key(self):
+ User = self.classes.User
+ session = Session()
+ l1 = lambda: session.query(User)
+ q1 = self.bakery(l1)
+ self._assert_cache_key(
+ q1._cache_key,
+ [l1]
+ )
+ eq_(q1.steps, [l1])
+
+ def test_inplace_add(self):
+ User = self.classes.User
+ session = Session()
+ l1 = lambda: session.query(User)
+ l2 = lambda q: q.filter(User.name == bindparam('name'))
+ q1 = self.bakery(l1)
+ self._assert_cache_key(
+ q1._cache_key,
+ [l1]
+ )
+ eq_(q1.steps, [l1])
+
+ q2 = q1.add_criteria(l2)
+ is_(q2, q1)
+
+ self._assert_cache_key(
+ q1._cache_key,
+ [l1, l2]
+ )
+ eq_(q1.steps, [l1, l2])
+
+ def test_inplace_add_operator(self):
+ User = self.classes.User
+ session = Session()
+ l1 = lambda: session.query(User)
+ l2 = lambda q: q.filter(User.name == bindparam('name'))
+ q1 = self.bakery(l1)
+ self._assert_cache_key(
+ q1._cache_key,
+ [l1]
+ )
+
+ q1 += l2
+
+ self._assert_cache_key(
+ q1._cache_key,
+ [l1, l2]
+ )
+
+ def test_chained_add(self):
+ User = self.classes.User
+ session = Session()
+ l1 = lambda: session.query(User)
+ l2 = lambda q: q.filter(User.name == bindparam('name'))
+ q1 = self.bakery(l1)
+
+ q2 = q1.with_criteria(l2)
+ is_not_(q2, q1)
+
+ self._assert_cache_key(
+ q1._cache_key,
+ [l1]
+ )
+ self._assert_cache_key(
+ q2._cache_key,
+ [l1, l2]
+ )
+
+ def test_chained_add_operator(self):
+ User = self.classes.User
+ session = Session()
+ l1 = lambda: session.query(User)
+ l2 = lambda q: q.filter(User.name == bindparam('name'))
+ q1 = self.bakery(l1)
+
+ q2 = q1 + l2
+ is_not_(q2, q1)
+
+ self._assert_cache_key(
+ q1._cache_key,
+ [l1]
+ )
+ self._assert_cache_key(
+ q2._cache_key,
+ [l1, l2]
+ )
+
+
+class LikeQueryTest(BakedTest):
+ @classmethod
+ def setup_mappers(cls):
+ User = cls.classes.User
+
+ mapper(User, cls.tables.users)
+
+ def test_first_no_result(self):
+ User = self.classes.User
+
+ bq = self.bakery(lambda s: s.query(User))
+ bq += lambda q: q.filter(User.name == 'asdf')
+
+ eq_(
+ bq(Session()).first(),
+ None
+ )
+
+ def test_first_multiple_result(self):
+ User = self.classes.User
+
+ bq = self.bakery(lambda s: s.query(User.id))
+ bq += lambda q: q.filter(User.name.like('%ed%')).order_by(User.id)
+
+ eq_(
+ bq(Session()).first(),
+ (8, )
+ )
+
+ def test_one_or_none_no_result(self):
+ User = self.classes.User
+
+ bq = self.bakery(lambda s: s.query(User))
+ bq += lambda q: q.filter(User.name == 'asdf')
+
+ eq_(
+ bq(Session()).one_or_none(),
+ None
+ )
+
+ def test_one_or_none_result(self):
+ User = self.classes.User
+
+ bq = self.bakery(lambda s: s.query(User))
+ bq += lambda q: q.filter(User.name == 'ed')
+
+ u1 = bq(Session()).one_or_none()
+ eq_(u1.name, 'ed')
+
+ def test_one_or_none_multiple_result(self):
+ User = self.classes.User
+
+ bq = self.bakery(lambda s: s.query(User))
+ bq += lambda q: q.filter(User.name.like('%ed%'))
+
+ assert_raises_message(
+ orm_exc.MultipleResultsFound,
+ "Multiple rows were found for one_or_none()",
+ bq(Session()).one_or_none
+ )
+
+ def test_one_no_result(self):
+ User = self.classes.User
+
+ bq = self.bakery(lambda s: s.query(User))
+ bq += lambda q: q.filter(User.name == 'asdf')
+
+ assert_raises_message(
+ orm_exc.NoResultFound,
+ "No row was found for one()",
+ bq(Session()).one
+ )
+
+ def test_one_result(self):
+ User = self.classes.User
+
+ bq = self.bakery(lambda s: s.query(User))
+ bq += lambda q: q.filter(User.name == 'ed')
+
+ u1 = bq(Session()).one()
+ eq_(u1.name, 'ed')
+
+ def test_one_multiple_result(self):
+ User = self.classes.User
+
+ bq = self.bakery(lambda s: s.query(User))
+ bq += lambda q: q.filter(User.name.like('%ed%'))
+
+ assert_raises_message(
+ orm_exc.MultipleResultsFound,
+ "Multiple rows were found for one()",
+ bq(Session()).one
+ )
+
+ def test_get(self):
+ User = self.classes.User
+
+ bq = self.bakery(lambda s: s.query(User))
+
+ sess = Session()
+
+ def go():
+ u1 = bq(sess).get(7)
+ eq_(u1.name, 'jack')
+ self.assert_sql_count(testing.db, go, 1)
+
+ u1 = sess.query(User).get(7) # noqa
+
+ def go():
+ u2 = bq(sess).get(7)
+ eq_(u2.name, 'jack')
+ self.assert_sql_count(testing.db, go, 0)
+
+ def go():
+ u2 = bq(sess).get(8)
+ eq_(u2.name, 'ed')
+ self.assert_sql_count(testing.db, go, 1)
+
+ def test_get_pk_w_null(self):
+ """test the re-implementation of logic to do get with IS NULL."""
+
+ class AddressUser(object):
+ pass
+ mapper(
+ AddressUser,
+ self.tables.users.outerjoin(self.tables.addresses),
+ properties={
+ "id": self.tables.users.c.id,
+ "address_id": self.tables.addresses.c.id
+ }
+ )
+
+ bq = self.bakery(lambda s: s.query(AddressUser))
+
+ sess = Session()
+
+ def go():
+ u1 = bq(sess).get((10, None))
+ eq_(u1.name, 'chuck')
+ self.assert_sql_count(testing.db, go, 1)
+
+ u1 = sess.query(AddressUser).get((10, None)) # noqa
+
+ def go():
+ u2 = bq(sess).get((10, None))
+ eq_(u2.name, 'chuck')
+ self.assert_sql_count(testing.db, go, 0)
+
+ def test_get_includes_getclause(self):
+ # test issue #3597
+ User = self.classes.User
+
+ bq = self.bakery(lambda s: s.query(User))
+
+ for i in range(5):
+ sess = Session()
+ u1 = bq(sess).get(7)
+ eq_(u1.name, 'jack')
+ sess.close()
+
+ eq_(len(bq._bakery), 2)
+
+ # simulate race where mapper._get_clause
+ # may be generated more than once
+ from sqlalchemy import inspect
+ del inspect(User).__dict__['_get_clause']
+
+ for i in range(5):
+ sess = Session()
+ u1 = bq(sess).get(7)
+ eq_(u1.name, 'jack')
+ sess.close()
+ eq_(len(bq._bakery), 4)
+
+
+class ResultTest(BakedTest):
+ __backend__ = True
+
+ @classmethod
+ def setup_mappers(cls):
+ User = cls.classes.User
+ Address = cls.classes.Address
+
+ mapper(User, cls.tables.users, properties={
+ "addresses": relationship(
+ Address, order_by=cls.tables.addresses.c.id)
+ })
+ mapper(Address, cls.tables.addresses)
+
+ def test_cachekeys_on_constructor(self):
+ User = self.classes.User
+
+ queue = [7, 8]
+ fn = lambda s: s.query(User.id).filter_by(id=queue.pop(0))
+ bq1 = self.bakery(fn, 7)
+ bq2 = self.bakery(fn, 8)
+
+ for i in range(3):
+ session = Session(autocommit=True)
+ eq_(
+ bq1(session).all(),
+ [(7,)]
+ )
+
+ eq_(
+ bq2(session).all(),
+ [(8,)]
+ )
+
+ def test_no_steps(self):
+ User = self.classes.User
+
+ bq = self.bakery(
+ lambda s: s.query(User.id, User.name).order_by(User.id))
+
+ for i in range(3):
+ session = Session(autocommit=True)
+ eq_(
+ bq(session).all(),
+ [(7, 'jack'), (8, 'ed'), (9, 'fred'), (10, 'chuck')]
+ )
+
+ def test_different_limits(self):
+ User = self.classes.User
+
+ bq = self.bakery(
+ lambda s: s.query(User.id, User.name).order_by(User.id))
+
+ bq += lambda q: q.limit(bindparam('limit')).offset(bindparam('offset'))
+ session = Session(autocommit=True)
+
+ for i in range(4):
+ for limit, offset, exp in [
+ (2, 1, [(8, 'ed'), (9, 'fred')]),
+ (3, 0, [(7, 'jack'), (8, 'ed'), (9, 'fred')]),
+ (1, 2, [(9, 'fred')])
+ ]:
+ eq_(
+ bq(session).params(limit=limit, offset=offset).all(),
+ exp
+ )
+
+ def test_spoiled_full_w_params(self):
+ User = self.classes.User
+
+ canary = mock.Mock()
+
+ def fn1(s):
+ canary.fn1()
+ return s.query(User.id, User.name).order_by(User.id)
+
+ def fn2(q):
+ canary.fn2()
+ return q.filter(User.id == bindparam('id'))
+
+ def fn3(q):
+ canary.fn3()
+ return q
+
+ for x in range(3):
+ bq = self.bakery(fn1)
+
+ bq += fn2
+
+ sess = Session(autocommit=True)
+ eq_(
+ bq.spoil(full=True).add_criteria(fn3)(sess).params(id=7).all(),
+ [(7, 'jack')]
+ )
+
+ eq_(
+ canary.mock_calls,
+ [mock.call.fn1(), mock.call.fn2(), mock.call.fn3(),
+ mock.call.fn1(), mock.call.fn2(), mock.call.fn3(),
+ mock.call.fn1(), mock.call.fn2(), mock.call.fn3()]
+ )
+
+ def test_spoiled_half_w_params(self):
+ User = self.classes.User
+
+ canary = mock.Mock()
+
+ def fn1(s):
+ canary.fn1()
+ return s.query(User.id, User.name).order_by(User.id)
+
+ def fn2(q):
+ canary.fn2()
+ return q.filter(User.id == bindparam('id'))
+
+ def fn3(q):
+ canary.fn3()
+ return q
+
+ bq = self.bakery(fn1)
+
+ bq += fn2
+
+ for x in range(3):
+ bq = self.bakery(fn1)
+
+ bq += fn2
+
+ sess = Session(autocommit=True)
+ eq_(
+ bq.spoil().add_criteria(fn3)(sess).params(id=7).all(),
+ [(7, 'jack')]
+ )
+
+ eq_(
+ canary.mock_calls,
+ [mock.call.fn1(), mock.call.fn2(),
+ mock.call.fn3(), mock.call.fn3(), mock.call.fn3()]
+ )
+
+ def test_w_new_entities(self):
+ """Test that the query can have its entities modified in
+ an arbitrary callable, and that this new entity list is preserved
+ when the query is invoked.
+
+ """
+ User = self.classes.User
+
+ bq = self.bakery(
+ lambda s: s.query(User.id, User.name))
+
+ bq += lambda q: q.from_self().with_entities(
+ func.count(User.id))
+
+ for i in range(3):
+ session = Session(autocommit=True)
+ eq_(
+ bq(session).all(),
+ [(4, )]
+ )
+
+ def test_conditional_step(self):
+ """Test a large series of conditionals and assert that
+ results remain correct between all of them within a series
+ of loops.
+
+ """
+ User = self.classes.User
+
+ base_bq = self.bakery(
+ lambda s: s.query(User.id, User.name))
+
+ base_bq += lambda q: q.order_by(User.id)
+
+ for i in range(4):
+ for cond1, cond2, cond3, cond4 in itertools.product(
+ *[(False, True) for j in range(4)]):
+ bq = base_bq._clone()
+ if cond1:
+ bq += lambda q: q.filter(User.name != 'jack')
+ if cond2:
+ bq += lambda q: q.join(User.addresses)
+ else:
+ bq += lambda q: q.outerjoin(User.addresses)
+ elif cond3:
+ bq += lambda q: q.filter(User.name.like('%ed%'))
+ else:
+ bq += lambda q: q.filter(User.name == 'jack')
+
+ if cond4:
+ bq += lambda q: q.from_self().with_entities(
+ func.count(User.id))
+ sess = Session(autocommit=True)
+ result = bq(sess).all()
+ if cond4:
+ if cond1:
+ if cond2:
+ eq_(result, [(4,)])
+ else:
+ eq_(result, [(5,)])
+ elif cond3:
+ eq_(result, [(2,)])
+ else:
+ eq_(result, [(1,)])
+ else:
+ if cond1:
+ if cond2:
+ eq_(
+ result,
+ [(8, 'ed'), (8, 'ed'), (8, 'ed'),
+ (9, 'fred')]
+ )
+ else:
+ eq_(
+ result,
+ [(8, 'ed'), (8, 'ed'), (8, 'ed'),
+ (9, 'fred'), (10, 'chuck')]
+ )
+ elif cond3:
+ eq_(result, [(8, 'ed'), (9, 'fred')])
+ else:
+ eq_(result, [(7, 'jack')])
+
+ sess.close()
+
+ def test_conditional_step_oneline(self):
+ User = self.classes.User
+
+ base_bq = self.bakery(
+ lambda s: s.query(User.id, User.name))
+
+ base_bq += lambda q: q.order_by(User.id)
+
+ for i in range(4):
+ for cond1 in (False, True):
+ bq = base_bq._clone()
+
+ # we were using (filename, firstlineno) as cache key,
+ # which fails for this kind of thing!
+ bq += (lambda q: q.filter(User.name != 'jack')) if cond1 else (lambda q: q.filter(User.name == 'jack')) # noqa
+ sess = Session(autocommit=True)
+ result = bq(sess).all()
+
+ if cond1:
+ eq_(result, [(8, u'ed'), (9, u'fred'), (10, u'chuck')])
+ else:
+ eq_(result, [(7, 'jack')])
+
+ sess.close()
+
+ def test_subquery_eagerloading(self):
+ User = self.classes.User
+ Address = self.classes.Address
+
+ base_bq = self.bakery(
+ lambda s: s.query(User))
+
+ base_bq += lambda q: q.options(subqueryload(User.addresses))
+ base_bq += lambda q: q.order_by(User.id)
+
+ assert_result = [
+ User(id=7, addresses=[
+ Address(id=1, email_address='jack@bean.com')]),
+ User(id=8, addresses=[
+ Address(id=2, email_address='ed@wood.com'),
+ Address(id=3, email_address='ed@bettyboop.com'),
+ Address(id=4, email_address='ed@lala.com'),
+ ]),
+ User(id=9, addresses=[
+ Address(id=5)
+ ]),
+ User(id=10, addresses=[])
+ ]
+
+ for i in range(4):
+ for cond1, cond2 in itertools.product(
+ *[(False, True) for j in range(2)]):
+ bq = base_bq._clone()
+
+ sess = Session()
+
+ if cond1:
+ bq += lambda q: q.filter(User.name == 'jack')
+ else:
+ bq += lambda q: q.filter(User.name.like('%ed%'))
+
+ if cond2:
+ ct = func.count(Address.id).label('count')
+ subq = sess.query(
+ ct,
+ Address.user_id).group_by(Address.user_id).\
+ having(ct > 2).subquery()
+
+ bq += lambda q: q.join(subq)
+
+ if cond2:
+ if cond1:
+ def go():
+ result = bq(sess).all()
+ eq_([], result)
+ self.assert_sql_count(testing.db, go, 1)
+ else:
+ def go():
+ result = bq(sess).all()
+ eq_(assert_result[1:2], result)
+ self.assert_sql_count(testing.db, go, 2)
+ else:
+ if cond1:
+ def go():
+ result = bq(sess).all()
+ eq_(assert_result[0:1], result)
+ self.assert_sql_count(testing.db, go, 2)
+ else:
+ def go():
+ result = bq(sess).all()
+ eq_(assert_result[1:3], result)
+ self.assert_sql_count(testing.db, go, 2)
+
+ sess.close()
+
+
+class LazyLoaderTest(BakedTest):
+ run_setup_mappers = 'each'
+
+ def _o2m_fixture(self, lazy="select", **kw):
+ User = self.classes.User
+ Address = self.classes.Address
+
+ mapper(User, self.tables.users, properties={
+ 'addresses': relationship(
+ Address, order_by=self.tables.addresses.c.id,
+ lazy=lazy, **kw)
+ })
+ mapper(Address, self.tables.addresses)
+ return User, Address
+
+ def _m2o_fixture(self):
+ User = self.classes.User
+ Address = self.classes.Address
+
+ mapper(User, self.tables.users)
+ mapper(Address, self.tables.addresses, properties={
+ 'user': relationship(User)
+ })
+ return User, Address
+
+ def test_strategy_lookup(self):
+ """test that the lazy loader strategies aren't getting mixed up
+ with BakedLazyLoader as a subclass.
+
+ """
+ User, Address = self._o2m_fixture()
+
+ ll = User.addresses.property._get_strategy((('lazy', 'select'),))
+ assert not isinstance(ll, BakedLazyLoader)
+ eq_(ll._strategy_keys, [(('lazy', 'select'),), (('lazy', True),)])
+
+ ll = User.addresses.property._get_strategy((('lazy', True),))
+ assert not isinstance(ll, BakedLazyLoader)
+ eq_(ll._strategy_keys, [(('lazy', 'select'),), (('lazy', True),)])
+
+ bl = User.addresses.property._get_strategy((('lazy', 'baked_select'),))
+ assert isinstance(bl, BakedLazyLoader)
+ eq_(bl._strategy_keys, [(('lazy', 'baked_select'),)])
+
+ def test_invocation_per_state(self):
+ """test that BakedLazyLoader is getting invoked with the
+ baked_lazyload() loader.
+
+ """
+ User, Address = self._o2m_fixture()
+
+ sess = Session()
+ q = sess.query(User)
+
+ with mock.patch.object(BakedLazyLoader, "_emit_lazyload") as el:
+ u1 = q.first()
+ u1.addresses
+ # not invoked
+ eq_(el.mock_calls, [])
+
+ sess = Session()
+ q = sess.query(User).options(baked_lazyload(User.addresses))
+ with mock.patch.object(BakedLazyLoader, "_emit_lazyload") as el:
+ u1 = q.first()
+ u1.addresses
+ # invoked
+ is_(
+ el.mock_calls[0][1][1],
+ u1._sa_instance_state
+ )
+
+ def test_invocation_per_mapper(self):
+ """test that BakedLazyLoader is getting invoked with the
+ "baked_select" lazy setting.
+
+ """
+ User, Address = self._o2m_fixture(lazy="baked_select")
+
+ sess = Session()
+ q = sess.query(User).options(lazyload(User.addresses))
+
+ with mock.patch.object(BakedLazyLoader, "_emit_lazyload") as el:
+ u1 = q.first()
+ u1.addresses
+ # not invoked
+ eq_(el.mock_calls, [])
+
+ sess = Session()
+ q = sess.query(User)
+ with mock.patch.object(BakedLazyLoader, "_emit_lazyload") as el:
+ u1 = q.first()
+ u1.addresses
+ # invoked
+ is_(
+ el.mock_calls[0][1][1],
+ u1._sa_instance_state
+ )
+
+ def test_systemwide_loaders_loadable_via_lazyloader(self):
+ from sqlalchemy.orm import configure_mappers
+ from sqlalchemy.orm.strategies import LazyLoader
+
+ baked.bake_lazy_loaders()
+ try:
+ User, Address = self._o2m_fixture(lazy='joined')
+
+ configure_mappers()
+
+ is_(
+ User.addresses.property.
+ _get_strategy_by_cls(LazyLoader).__class__,
+ BakedLazyLoader
+ )
+ finally:
+ baked.unbake_lazy_loaders()
+
+ def test_invocation_systemwide_loaders(self):
+ baked.bake_lazy_loaders()
+ try:
+ User, Address = self._o2m_fixture()
+
+ sess = Session()
+ q = sess.query(User).options(lazyload(User.addresses))
+ with mock.patch.object(BakedLazyLoader, "_emit_lazyload") as el:
+ u1 = q.first()
+ u1.addresses
+ # invoked
+ is_(
+ el.mock_calls[0][1][1],
+ u1._sa_instance_state
+ )
+ finally:
+ baked.unbake_lazy_loaders()
+
+ clear_mappers()
+ User, Address = self._o2m_fixture()
+ sess = Session()
+ q = sess.query(User).options(lazyload(User.addresses))
+
+ with mock.patch.object(BakedLazyLoader, "_emit_lazyload") as el:
+ u1 = q.first()
+ u1.addresses
+ # not invoked
+ eq_(el.mock_calls, [])
+
+ def test_baked_lazy_loading_relationship_flag_true(self):
+ self._test_baked_lazy_loading_relationship_flag(True)
+
+ def test_baked_lazy_loading_relationship_flag_false(self):
+ self._test_baked_lazy_loading_relationship_flag(False)
+
+ def _test_baked_lazy_loading_relationship_flag(self, flag):
+ baked.bake_lazy_loaders()
+ try:
+ User, Address = self._o2m_fixture(bake_queries=flag)
+
+ sess = Session()
+ u1 = sess.query(User).first()
+
+ from sqlalchemy.orm import Query
+
+ canary = mock.Mock()
+
+ # I would think Mock can do this but apparently
+ # it cannot (wrap / autospec don't work together)
+ real_compile_context = Query._compile_context
+
+ def _my_compile_context(*arg, **kw):
+ if arg[0].column_descriptions[0]['entity'] is Address:
+ canary()
+ return real_compile_context(*arg, **kw)
+
+ with mock.patch.object(
+ Query,
+ "_compile_context",
+ _my_compile_context
+ ):
+ u1.addresses
+
+ sess.expire(u1)
+ u1.addresses
+ finally:
+ baked.unbake_lazy_loaders()
+
+ if flag:
+ eq_(canary.call_count, 1)
+ else:
+ eq_(canary.call_count, 2)
+
+ def test_baked_lazy_loading_option_o2m(self):
+ User, Address = self._o2m_fixture()
+ self._test_baked_lazy_loading(set_option=True)
+
+ def test_baked_lazy_loading_mapped_o2m(self):
+ User, Address = self._o2m_fixture(lazy="baked_select")
+ self._test_baked_lazy_loading(set_option=False)
+
+ def _test_baked_lazy_loading(self, set_option):
+ User, Address = self.classes.User, self.classes.Address
+
+ base_bq = self.bakery(
+ lambda s: s.query(User))
+
+ if set_option:
+ base_bq += lambda q: q.options(baked_lazyload(User.addresses))
+
+ base_bq += lambda q: q.order_by(User.id)
+
+ assert_result = self.static.user_address_result
+
+ for i in range(4):
+ for cond1, cond2 in itertools.product(
+ *[(False, True) for j in range(2)]):
+ bq = base_bq._clone()
+
+ sess = Session()
+
+ if cond1:
+ bq += lambda q: q.filter(User.name == 'jack')
+ else:
+ bq += lambda q: q.filter(User.name.like('%ed%'))
+
+ if cond2:
+ ct = func.count(Address.id).label('count')
+ subq = sess.query(
+ ct,
+ Address.user_id).group_by(Address.user_id).\
+ having(ct > 2).subquery()
+
+ bq += lambda q: q.join(subq)
+
+ if cond2:
+ if cond1:
+ def go():
+ result = bq(sess).all()
+ eq_([], result)
+ self.assert_sql_count(testing.db, go, 1)
+ else:
+ def go():
+ result = bq(sess).all()
+ eq_(assert_result[1:2], result)
+ self.assert_sql_count(testing.db, go, 2)
+ else:
+ if cond1:
+ def go():
+ result = bq(sess).all()
+ eq_(assert_result[0:1], result)
+ self.assert_sql_count(testing.db, go, 2)
+ else:
+ def go():
+ result = bq(sess).all()
+ eq_(assert_result[1:3], result)
+ self.assert_sql_count(testing.db, go, 3)
+
+ sess.close()
+
+ def test_baked_lazy_loading_m2o(self):
+ User, Address = self._m2o_fixture()
+
+ base_bq = self.bakery(
+ lambda s: s.query(Address))
+
+ base_bq += lambda q: q.options(baked_lazyload(Address.user))
+ base_bq += lambda q: q.order_by(Address.id)
+
+ assert_result = self.static.address_user_result
+
+ for i in range(4):
+ for cond1 in (False, True):
+ bq = base_bq._clone()
+
+ sess = Session()
+
+ if cond1:
+ bq += lambda q: q.filter(
+ Address.email_address == 'jack@bean.com')
+ else:
+ bq += lambda q: q.filter(
+ Address.email_address.like('ed@%'))
+
+ if cond1:
+ def go():
+ result = bq(sess).all()
+ eq_(assert_result[0:1], result)
+ self.assert_sql_count(testing.db, go, 2)
+ else:
+ def go():
+ result = bq(sess).all()
+ eq_(assert_result[1:4], result)
+ self.assert_sql_count(testing.db, go, 2)
+
+ sess.close()
+
+ # additional tests:
+ # 1. m2m w lazyload
+ # 2. o2m lazyload where m2o backrefs have an eager load, test
+ # that eager load is canceled out
+ # 3. uselist = False, uselist=False assertion
+
diff --git a/test/ext/test_extendedattr.py b/test/ext/test_extendedattr.py
index c7627c8b2..c4147ed85 100644
--- a/test/ext/test_extendedattr.py
+++ b/test/ext/test_extendedattr.py
@@ -1,17 +1,20 @@
from sqlalchemy.testing import eq_, assert_raises, assert_raises_message, ne_
from sqlalchemy import util
+import sqlalchemy as sa
+from sqlalchemy.orm import class_mapper
from sqlalchemy.orm import attributes
-from sqlalchemy.orm.attributes import set_attribute, get_attribute, del_attribute
+from sqlalchemy.orm.attributes import set_attribute, \
+ get_attribute, del_attribute
from sqlalchemy.orm.instrumentation import is_instrumented
from sqlalchemy.orm import clear_mappers
-from sqlalchemy import testing
from sqlalchemy.testing import fixtures
from sqlalchemy.ext import instrumentation
-from sqlalchemy.orm.instrumentation import register_class
+from sqlalchemy.orm.instrumentation import register_class, manager_of_class
from sqlalchemy.testing.util import decorator
from sqlalchemy.orm import events
from sqlalchemy import event
+
@decorator
def modifies_instrumentation_finders(fn, *args, **kw):
pristine = instrumentation.instrumentation_finders[:]
@@ -21,15 +24,11 @@ def modifies_instrumentation_finders(fn, *args, **kw):
del instrumentation.instrumentation_finders[:]
instrumentation.instrumentation_finders.extend(pristine)
-def with_lookup_strategy(strategy):
- @decorator
- def decorate(fn, *args, **kw):
- try:
- ext_instrumentation._install_instrumented_lookups()
- return fn(*args, **kw)
- finally:
- ext_instrumentation._reinstall_default_lookups()
- return decorate
+
+class _ExtBase(object):
+ @classmethod
+ def teardown_class(cls):
+ instrumentation._reinstall_default_lookups()
class MyTypesManager(instrumentation.InstrumentationManager):
@@ -58,16 +57,19 @@ class MyTypesManager(instrumentation.InstrumentationManager):
def state_getter(self, class_):
return lambda instance: instance.__dict__['_my_state']
+
class MyListLike(list):
# add @appender, @remover decorators as needed
_sa_iterator = list.__iter__
_sa_linker = None
_sa_converter = None
+
def _sa_appender(self, item, _sa_initiator=None):
if _sa_initiator is not False:
self._sa_adapter.fire_append_event(item, _sa_initiator)
list.append(self, item)
append = _sa_appender
+
def _sa_remover(self, item, _sa_initiator=None):
self._sa_adapter.fire_pre_remove_event(_sa_initiator)
if _sa_initiator is not False:
@@ -75,57 +77,64 @@ class MyListLike(list):
list.remove(self, item)
remove = _sa_remover
-class MyBaseClass(object):
- __sa_instrumentation_manager__ = instrumentation.InstrumentationManager
-
-class MyClass(object):
-
- # This proves that a staticmethod will work here; don't
- # flatten this back to a class assignment!
- def __sa_instrumentation_manager__(cls):
- return MyTypesManager(cls)
-
- __sa_instrumentation_manager__ = staticmethod(__sa_instrumentation_manager__)
-
- # This proves SA can handle a class with non-string dict keys
- if not util.pypy and not util.jython:
- locals()[42] = 99 # Don't remove this line!
-
- def __init__(self, **kwargs):
- for k in kwargs:
- setattr(self, k, kwargs[k])
-
- def __getattr__(self, key):
- if is_instrumented(self, key):
- return get_attribute(self, key)
- else:
- try:
- return self._goofy_dict[key]
- except KeyError:
- raise AttributeError(key)
-
- def __setattr__(self, key, value):
- if is_instrumented(self, key):
- set_attribute(self, key, value)
- else:
- self._goofy_dict[key] = value
-
- def __hasattr__(self, key):
- if is_instrumented(self, key):
- return True
- else:
- return key in self._goofy_dict
-
- def __delattr__(self, key):
- if is_instrumented(self, key):
- del_attribute(self, key)
- else:
- del self._goofy_dict[key]
-
-class UserDefinedExtensionTest(fixtures.ORMTest):
+
+MyBaseClass, MyClass = None, None
+
+
+class UserDefinedExtensionTest(_ExtBase, fixtures.ORMTest):
+
@classmethod
- def teardown_class(cls):
- instrumentation._reinstall_default_lookups()
+ def setup_class(cls):
+ global MyBaseClass, MyClass
+
+ class MyBaseClass(object):
+ __sa_instrumentation_manager__ = \
+ instrumentation.InstrumentationManager
+
+ class MyClass(object):
+
+ # This proves that a staticmethod will work here; don't
+ # flatten this back to a class assignment!
+ def __sa_instrumentation_manager__(cls):
+ return MyTypesManager(cls)
+
+ __sa_instrumentation_manager__ = staticmethod(
+ __sa_instrumentation_manager__)
+
+ # This proves SA can handle a class with non-string dict keys
+ if not util.pypy and not util.jython:
+ locals()[42] = 99 # Don't remove this line!
+
+ def __init__(self, **kwargs):
+ for k in kwargs:
+ setattr(self, k, kwargs[k])
+
+ def __getattr__(self, key):
+ if is_instrumented(self, key):
+ return get_attribute(self, key)
+ else:
+ try:
+ return self._goofy_dict[key]
+ except KeyError:
+ raise AttributeError(key)
+
+ def __setattr__(self, key, value):
+ if is_instrumented(self, key):
+ set_attribute(self, key, value)
+ else:
+ self._goofy_dict[key] = value
+
+ def __hasattr__(self, key):
+ if is_instrumented(self, key):
+ return True
+ else:
+ return key in self._goofy_dict
+
+ def __delattr__(self, key):
+ if is_instrumented(self, key):
+ del_attribute(self, key)
+ else:
+ del self._goofy_dict[key]
def teardown(self):
clear_mappers()
@@ -135,15 +144,25 @@ class UserDefinedExtensionTest(fixtures.ORMTest):
pass
register_class(User)
- attributes.register_attribute(User, 'user_id', uselist = False, useobject=False)
- attributes.register_attribute(User, 'user_name', uselist = False, useobject=False)
- attributes.register_attribute(User, 'email_address', uselist = False, useobject=False)
+ attributes.register_attribute(
+ User, 'user_id', uselist=False, useobject=False)
+ attributes.register_attribute(
+ User, 'user_name', uselist=False, useobject=False)
+ attributes.register_attribute(
+ User, 'email_address', uselist=False, useobject=False)
u = User()
u.user_id = 7
u.user_name = 'john'
u.email_address = 'lala@123.com'
- self.assert_(u.__dict__ == {'_my_state':u._my_state, '_goofy_dict':{'user_id':7, 'user_name':'john', 'email_address':'lala@123.com'}}, u.__dict__)
+ eq_(
+ u.__dict__,
+ {
+ '_my_state': u._my_state,
+ '_goofy_dict': {
+ 'user_id': 7, 'user_name': 'john',
+ 'email_address': 'lala@123.com'}}
+ )
def test_basic(self):
for base in (object, MyBaseClass, MyClass):
@@ -151,29 +170,40 @@ class UserDefinedExtensionTest(fixtures.ORMTest):
pass
register_class(User)
- attributes.register_attribute(User, 'user_id', uselist = False, useobject=False)
- attributes.register_attribute(User, 'user_name', uselist = False, useobject=False)
- attributes.register_attribute(User, 'email_address', uselist = False, useobject=False)
+ attributes.register_attribute(
+ User, 'user_id', uselist=False, useobject=False)
+ attributes.register_attribute(
+ User, 'user_name', uselist=False, useobject=False)
+ attributes.register_attribute(
+ User, 'email_address', uselist=False, useobject=False)
u = User()
u.user_id = 7
u.user_name = 'john'
u.email_address = 'lala@123.com'
- self.assert_(u.user_id == 7 and u.user_name == 'john' and u.email_address == 'lala@123.com')
- attributes.instance_state(u)._commit_all(attributes.instance_dict(u))
- self.assert_(u.user_id == 7 and u.user_name == 'john' and u.email_address == 'lala@123.com')
+ eq_(u.user_id, 7)
+ eq_(u.user_name, "john")
+ eq_(u.email_address, "lala@123.com")
+ attributes.instance_state(u)._commit_all(
+ attributes.instance_dict(u))
+ eq_(u.user_id, 7)
+ eq_(u.user_name, "john")
+ eq_(u.email_address, "lala@123.com")
u.user_name = 'heythere'
u.email_address = 'foo@bar.com'
- self.assert_(u.user_id == 7 and u.user_name == 'heythere' and u.email_address == 'foo@bar.com')
+ eq_(u.user_id, 7)
+ eq_(u.user_name, "heythere")
+ eq_(u.email_address, "foo@bar.com")
def test_deferred(self):
for base in (object, MyBaseClass, MyClass):
class Foo(base):
pass
- data = {'a':'this is a', 'b':12}
+ data = {'a': 'this is a', 'b': 12}
+
def loader(state, keys):
for k in keys:
state.dict[k] = data[k]
@@ -181,30 +211,38 @@ class UserDefinedExtensionTest(fixtures.ORMTest):
manager = register_class(Foo)
manager.deferred_scalar_loader = loader
- attributes.register_attribute(Foo, 'a', uselist=False, useobject=False)
- attributes.register_attribute(Foo, 'b', uselist=False, useobject=False)
+ attributes.register_attribute(
+ Foo, 'a', uselist=False, useobject=False)
+ attributes.register_attribute(
+ Foo, 'b', uselist=False, useobject=False)
if base is object:
- assert Foo not in instrumentation._instrumentation_factory._state_finders
+ assert Foo not in \
+ instrumentation._instrumentation_factory._state_finders
else:
- assert Foo in instrumentation._instrumentation_factory._state_finders
+ assert Foo in \
+ instrumentation._instrumentation_factory._state_finders
f = Foo()
- attributes.instance_state(f)._expire(attributes.instance_dict(f), set())
+ attributes.instance_state(f)._expire(
+ attributes.instance_dict(f), set())
eq_(f.a, "this is a")
eq_(f.b, 12)
f.a = "this is some new a"
- attributes.instance_state(f)._expire(attributes.instance_dict(f), set())
+ attributes.instance_state(f)._expire(
+ attributes.instance_dict(f), set())
eq_(f.a, "this is a")
eq_(f.b, 12)
- attributes.instance_state(f)._expire(attributes.instance_dict(f), set())
+ attributes.instance_state(f)._expire(
+ attributes.instance_dict(f), set())
f.a = "this is another new a"
eq_(f.a, "this is another new a")
eq_(f.b, 12)
- attributes.instance_state(f)._expire(attributes.instance_dict(f), set())
+ attributes.instance_state(f)._expire(
+ attributes.instance_dict(f), set())
eq_(f.a, "this is a")
eq_(f.b, 12)
@@ -212,7 +250,8 @@ class UserDefinedExtensionTest(fixtures.ORMTest):
eq_(f.a, None)
eq_(f.b, 12)
- attributes.instance_state(f)._commit_all(attributes.instance_dict(f))
+ attributes.instance_state(f)._commit_all(
+ attributes.instance_dict(f))
eq_(f.a, None)
eq_(f.b, 12)
@@ -220,27 +259,32 @@ class UserDefinedExtensionTest(fixtures.ORMTest):
"""tests that attributes are polymorphic"""
for base in (object, MyBaseClass, MyClass):
- class Foo(base):pass
- class Bar(Foo):pass
+ class Foo(base):
+ pass
+
+ class Bar(Foo):
+ pass
register_class(Foo)
register_class(Bar)
def func1(state, passive):
return "this is the foo attr"
+
def func2(state, passive):
return "this is the bar attr"
+
def func3(state, passive):
return "this is the shared attr"
attributes.register_attribute(Foo, 'element',
- uselist=False, callable_=func1,
- useobject=True)
+ uselist=False, callable_=func1,
+ useobject=True)
attributes.register_attribute(Foo, 'element2',
- uselist=False, callable_=func3,
- useobject=True)
+ uselist=False, callable_=func3,
+ useobject=True)
attributes.register_attribute(Bar, 'element',
- uselist=False, callable_=func2,
- useobject=True)
+ uselist=False, callable_=func2,
+ useobject=True)
x = Foo()
y = Bar()
@@ -251,15 +295,20 @@ class UserDefinedExtensionTest(fixtures.ORMTest):
def test_collection_with_backref(self):
for base in (object, MyBaseClass, MyClass):
- class Post(base):pass
- class Blog(base):pass
+ class Post(base):
+ pass
+
+ class Blog(base):
+ pass
register_class(Post)
register_class(Blog)
- attributes.register_attribute(Post, 'blog', uselist=False,
- backref='posts', trackparent=True, useobject=True)
- attributes.register_attribute(Blog, 'posts', uselist=True,
- backref='blog', trackparent=True, useobject=True)
+ attributes.register_attribute(
+ Post, 'blog', uselist=False,
+ backref='posts', trackparent=True, useobject=True)
+ attributes.register_attribute(
+ Blog, 'posts', uselist=True,
+ backref='blog', trackparent=True, useobject=True)
b = Blog()
(p1, p2, p3) = (Post(), Post(), Post())
b.posts.append(p1)
@@ -287,47 +336,77 @@ class UserDefinedExtensionTest(fixtures.ORMTest):
for base in (object, MyBaseClass, MyClass):
class Foo(base):
pass
+
class Bar(base):
pass
register_class(Foo)
register_class(Bar)
- attributes.register_attribute(Foo, "name", uselist=False, useobject=False)
- attributes.register_attribute(Foo, "bars", uselist=True, trackparent=True, useobject=True)
- attributes.register_attribute(Bar, "name", uselist=False, useobject=False)
-
+ attributes.register_attribute(
+ Foo, "name", uselist=False, useobject=False)
+ attributes.register_attribute(
+ Foo, "bars", uselist=True, trackparent=True, useobject=True)
+ attributes.register_attribute(
+ Bar, "name", uselist=False, useobject=False)
f1 = Foo()
f1.name = 'f1'
- eq_(attributes.get_state_history(attributes.instance_state(f1), 'name'), (['f1'], (), ()))
+ eq_(
+ attributes.get_state_history(
+ attributes.instance_state(f1), 'name'),
+ (['f1'], (), ()))
b1 = Bar()
b1.name = 'b1'
f1.bars.append(b1)
- eq_(attributes.get_state_history(attributes.instance_state(f1), 'bars'), ([b1], [], []))
-
- attributes.instance_state(f1)._commit_all(attributes.instance_dict(f1))
- attributes.instance_state(b1)._commit_all(attributes.instance_dict(b1))
-
- eq_(attributes.get_state_history(attributes.instance_state(f1), 'name'), ((), ['f1'], ()))
- eq_(attributes.get_state_history(attributes.instance_state(f1), 'bars'), ((), [b1], ()))
+ eq_(
+ attributes.get_state_history(
+ attributes.instance_state(f1), 'bars'),
+ ([b1], [], []))
+
+ attributes.instance_state(f1)._commit_all(
+ attributes.instance_dict(f1))
+ attributes.instance_state(b1)._commit_all(
+ attributes.instance_dict(b1))
+
+ eq_(
+ attributes.get_state_history(
+ attributes.instance_state(f1),
+ 'name'),
+ ((), ['f1'], ()))
+ eq_(
+ attributes.get_state_history(
+ attributes.instance_state(f1),
+ 'bars'),
+ ((), [b1], ()))
f1.name = 'f1mod'
b2 = Bar()
b2.name = 'b2'
f1.bars.append(b2)
- eq_(attributes.get_state_history(attributes.instance_state(f1), 'name'), (['f1mod'], (), ['f1']))
- eq_(attributes.get_state_history(attributes.instance_state(f1), 'bars'), ([b2], [b1], []))
+ eq_(
+ attributes.get_state_history(
+ attributes.instance_state(f1), 'name'),
+ (['f1mod'], (), ['f1']))
+ eq_(
+ attributes.get_state_history(
+ attributes.instance_state(f1), 'bars'),
+ ([b2], [b1], []))
f1.bars.remove(b1)
- eq_(attributes.get_state_history(attributes.instance_state(f1), 'bars'), ([b2], [], [b1]))
+ eq_(
+ attributes.get_state_history(
+ attributes.instance_state(f1), 'bars'),
+ ([b2], [], [b1]))
def test_null_instrumentation(self):
class Foo(MyBaseClass):
pass
register_class(Foo)
- attributes.register_attribute(Foo, "name", uselist=False, useobject=False)
- attributes.register_attribute(Foo, "bars", uselist=True, trackparent=True, useobject=True)
+ attributes.register_attribute(
+ Foo, "name", uselist=False, useobject=False)
+ attributes.register_attribute(
+ Foo, "bars", uselist=True, trackparent=True, useobject=True)
assert Foo.name == attributes.manager_of_class(Foo)['name']
assert Foo.bars == attributes.manager_of_class(Foo)['bars']
@@ -335,8 +414,11 @@ class UserDefinedExtensionTest(fixtures.ORMTest):
def test_alternate_finders(self):
"""Ensure the generic finder front-end deals with edge cases."""
- class Unknown(object): pass
- class Known(MyBaseClass): pass
+ class Unknown(object):
+ pass
+
+ class Known(MyBaseClass):
+ pass
register_class(Known)
k, u = Known(), Unknown()
@@ -347,61 +429,105 @@ class UserDefinedExtensionTest(fixtures.ORMTest):
assert attributes.instance_state(k) is not None
assert_raises((AttributeError, KeyError),
- attributes.instance_state, u)
+ attributes.instance_state, u)
assert_raises((AttributeError, KeyError),
- attributes.instance_state, None)
+ attributes.instance_state, None)
+
+ def test_unmapped_not_type_error(self):
+ """extension version of the same test in test_mapper.
+
+ fixes #3408
+ """
+ assert_raises_message(
+ sa.exc.ArgumentError,
+ "Class object expected, got '5'.",
+ class_mapper, 5
+ )
+
+ def test_unmapped_not_type_error_iter_ok(self):
+ """extension version of the same test in test_mapper.
+ fixes #3408
+ """
+ assert_raises_message(
+ sa.exc.ArgumentError,
+ r"Class object expected, got '\(5, 6\)'.",
+ class_mapper, (5, 6)
+ )
+
+
+class FinderTest(_ExtBase, fixtures.ORMTest):
-class FinderTest(fixtures.ORMTest):
def test_standard(self):
- class A(object): pass
+ class A(object):
+ pass
register_class(A)
- eq_(type(instrumentation.manager_of_class(A)), instrumentation.ClassManager)
+ eq_(
+ type(manager_of_class(A)),
+ instrumentation.ClassManager)
def test_nativeext_interfaceexact(self):
class A(object):
- __sa_instrumentation_manager__ = instrumentation.InstrumentationManager
+ __sa_instrumentation_manager__ = \
+ instrumentation.InstrumentationManager
register_class(A)
- ne_(type(instrumentation.manager_of_class(A)), instrumentation.ClassManager)
+ ne_(
+ type(manager_of_class(A)),
+ instrumentation.ClassManager)
def test_nativeext_submanager(self):
- class Mine(instrumentation.ClassManager): pass
+ class Mine(instrumentation.ClassManager):
+ pass
+
class A(object):
__sa_instrumentation_manager__ = Mine
register_class(A)
- eq_(type(instrumentation.manager_of_class(A)), Mine)
+ eq_(type(manager_of_class(A)), Mine)
@modifies_instrumentation_finders
def test_customfinder_greedy(self):
- class Mine(instrumentation.ClassManager): pass
- class A(object): pass
+ class Mine(instrumentation.ClassManager):
+ pass
+
+ class A(object):
+ pass
+
def find(cls):
return Mine
instrumentation.instrumentation_finders.insert(0, find)
register_class(A)
- eq_(type(instrumentation.manager_of_class(A)), Mine)
+ eq_(type(manager_of_class(A)), Mine)
@modifies_instrumentation_finders
def test_customfinder_pass(self):
- class A(object): pass
+ class A(object):
+ pass
+
def find(cls):
return None
instrumentation.instrumentation_finders.insert(0, find)
register_class(A)
- eq_(type(instrumentation.manager_of_class(A)), instrumentation.ClassManager)
-class InstrumentationCollisionTest(fixtures.ORMTest):
+ eq_(
+ type(manager_of_class(A)),
+ instrumentation.ClassManager)
+
+
+class InstrumentationCollisionTest(_ExtBase, fixtures.ORMTest):
+
def test_none(self):
- class A(object): pass
+ class A(object):
+ pass
register_class(A)
mgr_factory = lambda cls: instrumentation.ClassManager(cls)
+
class B(object):
__sa_instrumentation_manager__ = staticmethod(mgr_factory)
register_class(B)
@@ -411,79 +537,114 @@ class InstrumentationCollisionTest(fixtures.ORMTest):
register_class(C)
def test_single_down(self):
- class A(object): pass
+ class A(object):
+ pass
register_class(A)
mgr_factory = lambda cls: instrumentation.ClassManager(cls)
+
class B(A):
__sa_instrumentation_manager__ = staticmethod(mgr_factory)
- assert_raises_message(TypeError, "multiple instrumentation implementations", register_class, B)
+ assert_raises_message(
+ TypeError, "multiple instrumentation implementations",
+ register_class, B)
def test_single_up(self):
- class A(object): pass
+ class A(object):
+ pass
# delay registration
mgr_factory = lambda cls: instrumentation.ClassManager(cls)
+
class B(A):
__sa_instrumentation_manager__ = staticmethod(mgr_factory)
register_class(B)
- assert_raises_message(TypeError, "multiple instrumentation implementations", register_class, A)
+ assert_raises_message(
+ TypeError, "multiple instrumentation implementations",
+ register_class, A)
def test_diamond_b1(self):
mgr_factory = lambda cls: instrumentation.ClassManager(cls)
- class A(object): pass
- class B1(A): pass
+ class A(object):
+ pass
+
+ class B1(A):
+ pass
+
class B2(A):
__sa_instrumentation_manager__ = staticmethod(mgr_factory)
- class C(object): pass
- assert_raises_message(TypeError, "multiple instrumentation implementations", register_class, B1)
+ class C(object):
+ pass
+
+ assert_raises_message(
+ TypeError, "multiple instrumentation implementations",
+ register_class, B1)
def test_diamond_b2(self):
mgr_factory = lambda cls: instrumentation.ClassManager(cls)
- class A(object): pass
- class B1(A): pass
+ class A(object):
+ pass
+
+ class B1(A):
+ pass
+
class B2(A):
__sa_instrumentation_manager__ = staticmethod(mgr_factory)
- class C(object): pass
+
+ class C(object):
+ pass
register_class(B2)
- assert_raises_message(TypeError, "multiple instrumentation implementations", register_class, B1)
+ assert_raises_message(
+ TypeError, "multiple instrumentation implementations",
+ register_class, B1)
def test_diamond_c_b(self):
mgr_factory = lambda cls: instrumentation.ClassManager(cls)
- class A(object): pass
- class B1(A): pass
+ class A(object):
+ pass
+
+ class B1(A):
+ pass
+
class B2(A):
__sa_instrumentation_manager__ = staticmethod(mgr_factory)
- class C(object): pass
+
+ class C(object):
+ pass
register_class(C)
- assert_raises_message(TypeError, "multiple instrumentation implementations", register_class, B1)
+ assert_raises_message(
+ TypeError, "multiple instrumentation implementations",
+ register_class, B1)
-class ExtendedEventsTest(fixtures.ORMTest):
+class ExtendedEventsTest(_ExtBase, fixtures.ORMTest):
+
"""Allow custom Events implementations."""
@modifies_instrumentation_finders
def test_subclassed(self):
class MyEvents(events.InstanceEvents):
pass
+
class MyClassManager(instrumentation.ClassManager):
dispatch = event.dispatcher(MyEvents)
- instrumentation.instrumentation_finders.insert(0, lambda cls: MyClassManager)
+ instrumentation.instrumentation_finders.insert(
+ 0, lambda cls: MyClassManager)
- class A(object): pass
+ class A(object):
+ pass
register_class(A)
manager = instrumentation.manager_of_class(A)
assert issubclass(manager.dispatch._events, MyEvents)
-
diff --git a/test/ext/test_hybrid.py b/test/ext/test_hybrid.py
index b895d2fb2..e36b8f7e9 100644
--- a/test/ext/test_hybrid.py
+++ b/test/ext/test_hybrid.py
@@ -7,6 +7,7 @@ from sqlalchemy.testing import eq_, AssertsCompiledSQL, assert_raises_message
from sqlalchemy.testing import fixtures
from sqlalchemy import inspect
+
class PropertyComparatorTest(fixtures.TestBase, AssertsCompiledSQL):
__dialect__ = 'default'
diff --git a/test/ext/test_mutable.py b/test/ext/test_mutable.py
index f2d0123bd..1e1a75e7e 100644
--- a/test/ext/test_mutable.py
+++ b/test/ext/test_mutable.py
@@ -4,11 +4,11 @@ from sqlalchemy.orm import mapper, Session, composite
from sqlalchemy.orm.mapper import Mapper
from sqlalchemy.orm.instrumentation import ClassManager
from sqlalchemy.testing.schema import Table, Column
-from sqlalchemy.testing import eq_, assert_raises_message
+from sqlalchemy.testing import eq_, assert_raises_message, assert_raises
from sqlalchemy.testing.util import picklers
from sqlalchemy.testing import fixtures
from sqlalchemy.ext.mutable import MutableComposite
-from sqlalchemy.ext.mutable import MutableDict
+from sqlalchemy.ext.mutable import MutableDict, MutableList, MutableSet
class Foo(fixtures.BasicEntity):
@@ -20,6 +20,7 @@ class SubFoo(Foo):
class FooWithEq(object):
+
def __init__(self, **kw):
for k in kw:
setattr(self, k, kw[k])
@@ -32,6 +33,7 @@ class FooWithEq(object):
class Point(MutableComposite):
+
def __init__(self, x, y):
self.x = x
self.y = y
@@ -56,6 +58,7 @@ class Point(MutableComposite):
class MyPoint(Point):
+
@classmethod
def coerce(cls, key, value):
if isinstance(value, tuple):
@@ -63,23 +66,25 @@ class MyPoint(Point):
return value
-class _MutableDictTestBase(object):
- run_define_tables = 'each'
-
+class _MutableDictTestFixture(object):
@classmethod
def _type_fixture(cls):
return MutableDict
- def setup_mappers(cls):
- foo = cls.tables.foo
-
- mapper(Foo, foo)
-
def teardown(self):
# clear out mapper events
Mapper.dispatch._clear()
ClassManager.dispatch._clear()
- super(_MutableDictTestBase, self).teardown()
+ super(_MutableDictTestFixture, self).teardown()
+
+
+class _MutableDictTestBase(_MutableDictTestFixture):
+ run_define_tables = 'each'
+
+ def setup_mappers(cls):
+ foo = cls.tables.foo
+
+ mapper(Foo, foo)
def test_coerce_none(self):
sess = Session()
@@ -131,6 +136,53 @@ class _MutableDictTestBase(object):
eq_(f1.data, {'a': 'z'})
+ def test_pop(self):
+ sess = Session()
+
+ f1 = Foo(data={'a': 'b', 'c': 'd'})
+ sess.add(f1)
+ sess.commit()
+
+ eq_(f1.data.pop('a'), 'b')
+ sess.commit()
+
+ assert_raises(KeyError, f1.data.pop, 'g')
+
+ eq_(f1.data, {'c': 'd'})
+
+ def test_pop_default(self):
+ sess = Session()
+
+ f1 = Foo(data={'a': 'b', 'c': 'd'})
+ sess.add(f1)
+ sess.commit()
+
+ eq_(f1.data.pop('a', 'q'), 'b')
+ eq_(f1.data.pop('a', 'q'), 'q')
+ sess.commit()
+
+ eq_(f1.data, {'c': 'd'})
+
+ def test_popitem(self):
+ sess = Session()
+
+ orig = {'a': 'b', 'c': 'd'}
+
+ # the orig dict remains unchanged when we assign,
+ # but just making this future-proof
+ data = dict(orig)
+ f1 = Foo(data=data)
+ sess.add(f1)
+ sess.commit()
+
+ k, v = f1.data.popitem()
+ assert k in ('a', 'c')
+ orig.pop(k)
+
+ sess.commit()
+
+ eq_(f1.data, orig)
+
def test_setdefault(self):
sess = Session()
@@ -208,24 +260,440 @@ class _MutableDictTestBase(object):
eq_(f1.non_mutable_data, {'a': 'b'})
+
+class _MutableListTestFixture(object):
+ @classmethod
+ def _type_fixture(cls):
+ return MutableList
+
+ def teardown(self):
+ # clear out mapper events
+ Mapper.dispatch._clear()
+ ClassManager.dispatch._clear()
+ super(_MutableListTestFixture, self).teardown()
+
+
+class _MutableListTestBase(_MutableListTestFixture):
+ run_define_tables = 'each'
+
+ def setup_mappers(cls):
+ foo = cls.tables.foo
+
+ mapper(Foo, foo)
+
+ def test_coerce_none(self):
+ sess = Session()
+ f1 = Foo(data=None)
+ sess.add(f1)
+ sess.commit()
+ eq_(f1.data, None)
+
+ def test_coerce_raise(self):
+ assert_raises_message(
+ ValueError,
+ "Attribute 'data' does not accept objects of type",
+ Foo, data=set([1, 2, 3])
+ )
+
+ def test_in_place_mutation(self):
+ sess = Session()
+
+ f1 = Foo(data=[1, 2])
+ sess.add(f1)
+ sess.commit()
+
+ f1.data[0] = 3
+ sess.commit()
+
+ eq_(f1.data, [3, 2])
+
+ def test_in_place_slice_mutation(self):
+ sess = Session()
+
+ f1 = Foo(data=[1, 2, 3, 4])
+ sess.add(f1)
+ sess.commit()
+
+ f1.data[1:3] = 5, 6
+ sess.commit()
+
+ eq_(f1.data, [1, 5, 6, 4])
+
+ def test_del_slice(self):
+ sess = Session()
+
+ f1 = Foo(data=[1, 2, 3, 4])
+ sess.add(f1)
+ sess.commit()
+
+ del f1.data[1:3]
+ sess.commit()
+
+ eq_(f1.data, [1, 4])
+
+ def test_clear(self):
+ if not hasattr(list, 'clear'):
+ # py2 list doesn't have 'clear'
+ return
+ sess = Session()
+
+ f1 = Foo(data=[1, 2])
+ sess.add(f1)
+ sess.commit()
+
+ f1.data.clear()
+ sess.commit()
+
+ eq_(f1.data, [])
+
+ def test_pop(self):
+ sess = Session()
+
+ f1 = Foo(data=[1, 2, 3])
+ sess.add(f1)
+ sess.commit()
+
+ eq_(f1.data.pop(), 3)
+ eq_(f1.data.pop(0), 1)
+ sess.commit()
+
+ assert_raises(IndexError, f1.data.pop, 5)
+
+ eq_(f1.data, [2])
+
+ def test_append(self):
+ sess = Session()
+
+ f1 = Foo(data=[1, 2])
+ sess.add(f1)
+ sess.commit()
+
+ f1.data.append(5)
+ sess.commit()
+
+ eq_(f1.data, [1, 2, 5])
+
+ def test_extend(self):
+ sess = Session()
+
+ f1 = Foo(data=[1, 2])
+ sess.add(f1)
+ sess.commit()
+
+ f1.data.extend([5])
+ sess.commit()
+
+ eq_(f1.data, [1, 2, 5])
+
+ def test_insert(self):
+ sess = Session()
+
+ f1 = Foo(data=[1, 2])
+ sess.add(f1)
+ sess.commit()
+
+ f1.data.insert(1, 5)
+ sess.commit()
+
+ eq_(f1.data, [1, 5, 2])
+
+ def test_remove(self):
+ sess = Session()
+
+ f1 = Foo(data=[1, 2, 3])
+ sess.add(f1)
+ sess.commit()
+
+ f1.data.remove(2)
+ sess.commit()
+
+ eq_(f1.data, [1, 3])
+
+ def test_sort(self):
+ sess = Session()
+
+ f1 = Foo(data=[1, 3, 2])
+ sess.add(f1)
+ sess.commit()
+
+ f1.data.sort()
+ sess.commit()
+
+ eq_(f1.data, [1, 2, 3])
+
+ def test_reverse(self):
+ sess = Session()
+
+ f1 = Foo(data=[1, 3, 2])
+ sess.add(f1)
+ sess.commit()
+
+ f1.data.reverse()
+ sess.commit()
+
+ eq_(f1.data, [2, 3, 1])
+
+ def test_pickle_parent(self):
+ sess = Session()
+
+ f1 = Foo(data=[1, 2])
+ sess.add(f1)
+ sess.commit()
+ f1.data
+ sess.close()
+
+ for loads, dumps in picklers():
+ sess = Session()
+ f2 = loads(dumps(f1))
+ sess.add(f2)
+ f2.data[0] = 3
+ assert f2 in sess.dirty
+
+ def test_unrelated_flush(self):
+ sess = Session()
+ f1 = Foo(data=[1, 2], unrelated_data="unrelated")
+ sess.add(f1)
+ sess.flush()
+ f1.unrelated_data = "unrelated 2"
+ sess.flush()
+ f1.data[0] = 3
+ sess.commit()
+ eq_(f1.data[0], 3)
+
+
+class _MutableSetTestFixture(object):
+ @classmethod
+ def _type_fixture(cls):
+ return MutableSet
+
+ def teardown(self):
+ # clear out mapper events
+ Mapper.dispatch._clear()
+ ClassManager.dispatch._clear()
+ super(_MutableSetTestFixture, self).teardown()
+
+
+class _MutableSetTestBase(_MutableSetTestFixture):
+ run_define_tables = 'each'
+
+ def setup_mappers(cls):
+ foo = cls.tables.foo
+
+ mapper(Foo, foo)
+
+ def test_coerce_none(self):
+ sess = Session()
+ f1 = Foo(data=None)
+ sess.add(f1)
+ sess.commit()
+ eq_(f1.data, None)
+
+ def test_coerce_raise(self):
+ assert_raises_message(
+ ValueError,
+ "Attribute 'data' does not accept objects of type",
+ Foo, data=[1, 2, 3]
+ )
+
+ def test_clear(self):
+ sess = Session()
+
+ f1 = Foo(data=set([1, 2]))
+ sess.add(f1)
+ sess.commit()
+
+ f1.data.clear()
+ sess.commit()
+
+ eq_(f1.data, set())
+
+ def test_pop(self):
+ sess = Session()
+
+ f1 = Foo(data=set([1]))
+ sess.add(f1)
+ sess.commit()
+
+ eq_(f1.data.pop(), 1)
+ sess.commit()
+
+ assert_raises(KeyError, f1.data.pop)
+
+ eq_(f1.data, set())
+
+ def test_add(self):
+ sess = Session()
+
+ f1 = Foo(data=set([1, 2]))
+ sess.add(f1)
+ sess.commit()
+
+ f1.data.add(5)
+ sess.commit()
+
+ eq_(f1.data, set([1, 2, 5]))
+
+ def test_update(self):
+ sess = Session()
+
+ f1 = Foo(data=set([1, 2]))
+ sess.add(f1)
+ sess.commit()
+
+ f1.data.update(set([2, 5]))
+ sess.commit()
+
+ eq_(f1.data, set([1, 2, 5]))
+
+ def test_intersection_update(self):
+ sess = Session()
+
+ f1 = Foo(data=set([1, 2]))
+ sess.add(f1)
+ sess.commit()
+
+ f1.data.intersection_update(set([2, 5]))
+ sess.commit()
+
+ eq_(f1.data, set([2]))
+
+ def test_difference_update(self):
+ sess = Session()
+
+ f1 = Foo(data=set([1, 2]))
+ sess.add(f1)
+ sess.commit()
+
+ f1.data.difference_update(set([2, 5]))
+ sess.commit()
+
+ eq_(f1.data, set([1]))
+
+ def test_symmetric_difference_update(self):
+ sess = Session()
+
+ f1 = Foo(data=set([1, 2]))
+ sess.add(f1)
+ sess.commit()
+
+ f1.data.symmetric_difference_update(set([2, 5]))
+ sess.commit()
+
+ eq_(f1.data, set([1, 5]))
+
+ def test_remove(self):
+ sess = Session()
+
+ f1 = Foo(data=set([1, 2, 3]))
+ sess.add(f1)
+ sess.commit()
+
+ f1.data.remove(2)
+ sess.commit()
+
+ eq_(f1.data, set([1, 3]))
+
+ def test_discard(self):
+ sess = Session()
+
+ f1 = Foo(data=set([1, 2, 3]))
+ sess.add(f1)
+ sess.commit()
+
+ f1.data.discard(2)
+ sess.commit()
+
+ eq_(f1.data, set([1, 3]))
+
+ f1.data.discard(2)
+ sess.commit()
+
+ eq_(f1.data, set([1, 3]))
+
+ def test_pickle_parent(self):
+ sess = Session()
+
+ f1 = Foo(data=set([1, 2]))
+ sess.add(f1)
+ sess.commit()
+ f1.data
+ sess.close()
+
+ for loads, dumps in picklers():
+ sess = Session()
+ f2 = loads(dumps(f1))
+ sess.add(f2)
+ f2.data.add(3)
+ assert f2 in sess.dirty
+
+ def test_unrelated_flush(self):
+ sess = Session()
+ f1 = Foo(data=set([1, 2]), unrelated_data="unrelated")
+ sess.add(f1)
+ sess.flush()
+ f1.unrelated_data = "unrelated 2"
+ sess.flush()
+ f1.data.add(3)
+ sess.commit()
+ eq_(f1.data, set([1, 2, 3]))
+
+
+class MutableColumnDefaultTest(_MutableDictTestFixture, fixtures.MappedTest):
+ @classmethod
+ def define_tables(cls, metadata):
+ MutableDict = cls._type_fixture()
+
+ mutable_pickle = MutableDict.as_mutable(PickleType)
+ Table(
+ 'foo', metadata,
+ Column(
+ 'id', Integer, primary_key=True,
+ test_needs_autoincrement=True),
+ Column('data', mutable_pickle, default={}),
+ )
+
+ def setup_mappers(cls):
+ foo = cls.tables.foo
+
+ mapper(Foo, foo)
+
+ def test_evt_on_flush_refresh(self):
+ # test for #3427
+
+ sess = Session()
+
+ f1 = Foo()
+ sess.add(f1)
+ sess.flush()
+ assert isinstance(f1.data, self._type_fixture())
+ assert f1 not in sess.dirty
+ f1.data['foo'] = 'bar'
+ assert f1 in sess.dirty
+
+
+
class MutableWithScalarPickleTest(_MutableDictTestBase, fixtures.MappedTest):
+
@classmethod
def define_tables(cls, metadata):
MutableDict = cls._type_fixture()
mutable_pickle = MutableDict.as_mutable(PickleType)
Table('foo', metadata,
- Column('id', Integer, primary_key=True, test_needs_autoincrement=True),
- Column('skip', mutable_pickle),
- Column('data', mutable_pickle),
- Column('non_mutable_data', PickleType),
- Column('unrelated_data', String(50))
- )
+ Column('id', Integer, primary_key=True,
+ test_needs_autoincrement=True),
+ Column('skip', mutable_pickle),
+ Column('data', mutable_pickle),
+ Column('non_mutable_data', PickleType),
+ Column('unrelated_data', String(50))
+ )
def test_non_mutable(self):
self._test_non_mutable()
+
class MutableWithScalarJSONTest(_MutableDictTestBase, fixtures.MappedTest):
+
@classmethod
def define_tables(cls, metadata):
import json
@@ -247,29 +715,68 @@ class MutableWithScalarJSONTest(_MutableDictTestBase, fixtures.MappedTest):
MutableDict = cls._type_fixture()
Table('foo', metadata,
- Column('id', Integer, primary_key=True, test_needs_autoincrement=True),
- Column('data', MutableDict.as_mutable(JSONEncodedDict)),
- Column('non_mutable_data', JSONEncodedDict),
- Column('unrelated_data', String(50))
- )
+ Column('id', Integer, primary_key=True,
+ test_needs_autoincrement=True),
+ Column('data', MutableDict.as_mutable(JSONEncodedDict)),
+ Column('non_mutable_data', JSONEncodedDict),
+ Column('unrelated_data', String(50))
+ )
def test_non_mutable(self):
self._test_non_mutable()
-class MutableAssocWithAttrInheritTest(_MutableDictTestBase, fixtures.MappedTest):
+
+class MutableListWithScalarPickleTest(_MutableListTestBase, fixtures.MappedTest):
+
@classmethod
def define_tables(cls, metadata):
+ MutableList = cls._type_fixture()
+ mutable_pickle = MutableList.as_mutable(PickleType)
Table('foo', metadata,
- Column('id', Integer, primary_key=True, test_needs_autoincrement=True),
- Column('data', PickleType),
- Column('non_mutable_data', PickleType),
- Column('unrelated_data', String(50))
- )
+ Column('id', Integer, primary_key=True,
+ test_needs_autoincrement=True),
+ Column('skip', mutable_pickle),
+ Column('data', mutable_pickle),
+ Column('non_mutable_data', PickleType),
+ Column('unrelated_data', String(50))
+ )
+
+
+class MutableSetWithScalarPickleTest(_MutableSetTestBase, fixtures.MappedTest):
+
+ @classmethod
+ def define_tables(cls, metadata):
+ MutableSet = cls._type_fixture()
+
+ mutable_pickle = MutableSet.as_mutable(PickleType)
+ Table('foo', metadata,
+ Column('id', Integer, primary_key=True,
+ test_needs_autoincrement=True),
+ Column('skip', mutable_pickle),
+ Column('data', mutable_pickle),
+ Column('non_mutable_data', PickleType),
+ Column('unrelated_data', String(50))
+ )
+
+
+class MutableAssocWithAttrInheritTest(_MutableDictTestBase,
+ fixtures.MappedTest):
+
+ @classmethod
+ def define_tables(cls, metadata):
+
+ Table('foo', metadata,
+ Column('id', Integer, primary_key=True,
+ test_needs_autoincrement=True),
+ Column('data', PickleType),
+ Column('non_mutable_data', PickleType),
+ Column('unrelated_data', String(50))
+ )
Table('subfoo', metadata,
- Column('id', Integer, ForeignKey('foo.id'), primary_key=True),
- )
+ Column('id', Integer, ForeignKey('foo.id'), primary_key=True),
+ )
def setup_mappers(cls):
foo = cls.tables.foo
@@ -301,20 +808,27 @@ class MutableAssocWithAttrInheritTest(_MutableDictTestBase, fixtures.MappedTest)
sess.commit()
eq_(f1.data, {'b': 'c'})
-class MutableAssociationScalarPickleTest(_MutableDictTestBase, fixtures.MappedTest):
+
+class MutableAssociationScalarPickleTest(_MutableDictTestBase,
+ fixtures.MappedTest):
+
@classmethod
def define_tables(cls, metadata):
MutableDict = cls._type_fixture()
MutableDict.associate_with(PickleType)
Table('foo', metadata,
- Column('id', Integer, primary_key=True, test_needs_autoincrement=True),
- Column('skip', PickleType),
- Column('data', PickleType),
- Column('unrelated_data', String(50))
- )
+ Column('id', Integer, primary_key=True,
+ test_needs_autoincrement=True),
+ Column('skip', PickleType),
+ Column('data', PickleType),
+ Column('unrelated_data', String(50))
+ )
+
+
+class MutableAssociationScalarJSONTest(_MutableDictTestBase,
+ fixtures.MappedTest):
-class MutableAssociationScalarJSONTest(_MutableDictTestBase, fixtures.MappedTest):
@classmethod
def define_tables(cls, metadata):
import json
@@ -337,21 +851,24 @@ class MutableAssociationScalarJSONTest(_MutableDictTestBase, fixtures.MappedTest
MutableDict.associate_with(JSONEncodedDict)
Table('foo', metadata,
- Column('id', Integer, primary_key=True,
- test_needs_autoincrement=True),
- Column('data', JSONEncodedDict),
- Column('unrelated_data', String(50))
- )
+ Column('id', Integer, primary_key=True,
+ test_needs_autoincrement=True),
+ Column('data', JSONEncodedDict),
+ Column('unrelated_data', String(50))
+ )
-class CustomMutableAssociationScalarJSONTest(_MutableDictTestBase, fixtures.MappedTest):
+class CustomMutableAssociationScalarJSONTest(_MutableDictTestBase,
+ fixtures.MappedTest):
CustomMutableDict = None
@classmethod
def _type_fixture(cls):
if not(getattr(cls, 'CustomMutableDict')):
- MutableDict = super(CustomMutableAssociationScalarJSONTest, cls)._type_fixture()
+ MutableDict = super(
+ CustomMutableAssociationScalarJSONTest, cls)._type_fixture()
+
class CustomMutableDict(MutableDict):
pass
cls.CustomMutableDict = CustomMutableDict
@@ -379,14 +896,15 @@ class CustomMutableAssociationScalarJSONTest(_MutableDictTestBase, fixtures.Mapp
CustomMutableDict.associate_with(JSONEncodedDict)
Table('foo', metadata,
- Column('id', Integer, primary_key=True,
- test_needs_autoincrement=True),
- Column('data', JSONEncodedDict),
- Column('unrelated_data', String(50))
- )
+ Column('id', Integer, primary_key=True,
+ test_needs_autoincrement=True),
+ Column('data', JSONEncodedDict),
+ Column('unrelated_data', String(50))
+ )
def test_pickle_parent(self):
- # Picklers don't know how to pickle CustomMutableDict, but we aren't testing that here
+ # Picklers don't know how to pickle CustomMutableDict,
+ # but we aren't testing that here
pass
def test_coerce(self):
@@ -398,22 +916,22 @@ class CustomMutableAssociationScalarJSONTest(_MutableDictTestBase, fixtures.Mapp
class _CompositeTestBase(object):
+
@classmethod
def define_tables(cls, metadata):
Table('foo', metadata,
- Column('id', Integer, primary_key=True,
- test_needs_autoincrement=True),
- Column('x', Integer),
- Column('y', Integer),
- Column('unrelated_data', String(50))
- )
+ Column('id', Integer, primary_key=True,
+ test_needs_autoincrement=True),
+ Column('x', Integer),
+ Column('y', Integer),
+ Column('unrelated_data', String(50))
+ )
def setup(self):
from sqlalchemy.ext import mutable
mutable._setup_composite_listener()
super(_CompositeTestBase, self).setup()
-
def teardown(self):
# clear out mapper events
Mapper.dispatch._clear()
@@ -423,9 +941,46 @@ class _CompositeTestBase(object):
@classmethod
def _type_fixture(cls):
-
return Point
+
+class MutableCompositeColumnDefaultTest(_CompositeTestBase,
+ fixtures.MappedTest):
+ @classmethod
+ def define_tables(cls, metadata):
+ Table(
+ 'foo', metadata,
+ Column('id', Integer, primary_key=True,
+ test_needs_autoincrement=True),
+ Column('x', Integer, default=5),
+ Column('y', Integer, default=9),
+ Column('unrelated_data', String(50))
+ )
+
+ @classmethod
+ def setup_mappers(cls):
+ foo = cls.tables.foo
+
+ cls.Point = cls._type_fixture()
+
+ mapper(Foo, foo, properties={
+ 'data': composite(cls.Point, foo.c.x, foo.c.y)
+ })
+
+ def test_evt_on_flush_refresh(self):
+ # this still worked prior to #3427 being fixed in any case
+
+ sess = Session()
+
+ f1 = Foo(data=self.Point(None, None))
+ sess.add(f1)
+ sess.flush()
+ eq_(f1.data, self.Point(5, 9))
+ assert f1 not in sess.dirty
+ f1.data.x = 10
+ assert f1 in sess.dirty
+
+
class MutableCompositesUnpickleTest(_CompositeTestBase, fixtures.MappedTest):
@classmethod
@@ -443,6 +998,7 @@ class MutableCompositesUnpickleTest(_CompositeTestBase, fixtures.MappedTest):
for loads, dumps in picklers():
loads(dumps(u1))
+
class MutableCompositesTest(_CompositeTestBase, fixtures.MappedTest):
@classmethod
@@ -516,6 +1072,7 @@ class MutableCompositesTest(_CompositeTestBase, fixtures.MappedTest):
eq_(f1.data.x, 5)
+
class MutableCompositeCallableTest(_CompositeTestBase, fixtures.MappedTest):
@classmethod
@@ -542,13 +1099,14 @@ class MutableCompositeCallableTest(_CompositeTestBase, fixtures.MappedTest):
eq_(f1.data.x, 3)
-class MutableCompositeCustomCoerceTest(_CompositeTestBase, fixtures.MappedTest):
+class MutableCompositeCustomCoerceTest(_CompositeTestBase,
+ fixtures.MappedTest):
+
@classmethod
def _type_fixture(cls):
return MyPoint
-
@classmethod
def setup_mappers(cls):
foo = cls.tables.foo
@@ -576,16 +1134,18 @@ class MutableCompositeCustomCoerceTest(_CompositeTestBase, fixtures.MappedTest):
class MutableInheritedCompositesTest(_CompositeTestBase, fixtures.MappedTest):
+
@classmethod
def define_tables(cls, metadata):
Table('foo', metadata,
- Column('id', Integer, primary_key=True, test_needs_autoincrement=True),
- Column('x', Integer),
- Column('y', Integer)
- )
+ Column('id', Integer, primary_key=True,
+ test_needs_autoincrement=True),
+ Column('x', Integer),
+ Column('y', Integer)
+ )
Table('subfoo', metadata,
- Column('id', Integer, ForeignKey('foo.id'), primary_key=True),
- )
+ Column('id', Integer, ForeignKey('foo.id'), primary_key=True),
+ )
@classmethod
def setup_mappers(cls):
@@ -628,4 +1188,3 @@ class MutableInheritedCompositesTest(_CompositeTestBase, fixtures.MappedTest):
sess.add(f2)
f2.data.y = 12
assert f2 in sess.dirty
-
diff --git a/test/orm/_fixtures.py b/test/orm/_fixtures.py
index 0f6e522d4..4a2b8993e 100644
--- a/test/orm/_fixtures.py
+++ b/test/orm/_fixtures.py
@@ -294,6 +294,7 @@ class FixtureTest(fixtures.MappedTest):
def static(self):
return CannedResults(self)
+
class CannedResults(object):
"""Built on demand, instances use mappers in effect at time of call."""
@@ -329,6 +330,20 @@ class CannedResults(object):
User(id=10, addresses=[])]
@property
+ def address_user_result(self):
+ User, Address = self.test.classes.User, self.test.classes.Address
+ u7 = User(id=7)
+ u8 = User(id=8)
+ u9 = User(id=9)
+ return [
+ Address(id=1, email_address='jack@bean.com', user=u7),
+ Address(id=2, email_address='ed@wood.com', user=u8),
+ Address(id=3, email_address='ed@bettyboop.com', user=u8),
+ Address(id=4, email_address='ed@lala.com', user=u8),
+ Address(id=5, user=u9)
+ ]
+
+ @property
def user_all_result(self):
User, Address, Order, Item = self.test.classes.User, \
self.test.classes.Address, self.test.classes.Order, \
diff --git a/test/orm/inheritance/test_basic.py b/test/orm/inheritance/test_basic.py
index d8b2a44af..3717fafa0 100644
--- a/test/orm/inheritance/test_basic.py
+++ b/test/orm/inheritance/test_basic.py
@@ -5,7 +5,7 @@ from sqlalchemy import exc as sa_exc, util, event
from sqlalchemy.orm import *
from sqlalchemy.orm.util import instance_str
from sqlalchemy.orm import exc as orm_exc, attributes
-from sqlalchemy.testing.assertsql import AllOf, CompiledSQL, Or
+from sqlalchemy.testing.assertsql import AllOf, CompiledSQL, RegexSQL, Or
from sqlalchemy.sql import table, column
from sqlalchemy import testing
from sqlalchemy.testing import engines
@@ -1148,6 +1148,298 @@ class FlushTest(fixtures.MappedTest):
sess.flush()
assert user_roles.count().scalar() == 1
+
+class PassiveDeletesTest(fixtures.MappedTest):
+ __requires__ = ('foreign_keys',)
+
+ @classmethod
+ def define_tables(cls, metadata):
+ Table(
+ "a", metadata,
+ Column('id', Integer, primary_key=True),
+ Column('type', String(30))
+ )
+ Table(
+ "b", metadata,
+ Column(
+ 'id', Integer, ForeignKey('a.id', ondelete="CASCADE"),
+ primary_key=True),
+ Column('data', String(10))
+ )
+
+ Table(
+ "c", metadata,
+ Column('cid', Integer, primary_key=True),
+ Column('bid', ForeignKey('b.id', ondelete="CASCADE"))
+ )
+
+ @classmethod
+ def setup_classes(cls):
+ class A(cls.Basic):
+ pass
+
+ class B(A):
+ pass
+
+ class C(B):
+ pass
+
+ def _fixture(self, a_p=False, b_p=False, c_p=False):
+ A, B, C = self.classes("A", "B", "C")
+ a, b, c = self.tables("a", "b", "c")
+
+ mapper(
+ A, a, passive_deletes=a_p,
+ polymorphic_on=a.c.type, polymorphic_identity='a')
+ mapper(
+ B, b, inherits=A, passive_deletes=b_p, polymorphic_identity='b')
+ mapper(
+ C, c, inherits=B, passive_deletes=c_p, polymorphic_identity='c')
+
+ def test_none(self):
+ A, B, C = self.classes("A", "B", "C")
+ self._fixture()
+
+ s = Session()
+ a1, b1, c1 = A(id=1), B(id=2), C(cid=1, id=3)
+ s.add_all([a1, b1, c1])
+ s.commit()
+
+ # want to see if the 'C' table loads even though
+ # a and b are loaded
+ c1 = s.query(B).filter_by(id=3).first()
+ s.delete(c1)
+ with self.sql_execution_asserter(testing.db) as asserter:
+ s.flush()
+ asserter.assert_(
+ RegexSQL(
+ "SELECT .* "
+ "FROM c WHERE :param_1 = c.bid",
+ [{'param_1': 3}]
+ ),
+ CompiledSQL(
+ "DELETE FROM c WHERE c.cid = :cid",
+ [{'cid': 1}]
+ ),
+ CompiledSQL(
+ "DELETE FROM b WHERE b.id = :id",
+ [{'id': 3}]
+ ),
+ CompiledSQL(
+ "DELETE FROM a WHERE a.id = :id",
+ [{'id': 3}]
+ )
+ )
+
+ def test_c_only(self):
+ A, B, C = self.classes("A", "B", "C")
+ self._fixture(c_p=True)
+
+ s = Session()
+ a1, b1, c1 = A(id=1), B(id=2), C(cid=1, id=3)
+ s.add_all([a1, b1, c1])
+ s.commit()
+
+ s.delete(a1)
+
+ with self.sql_execution_asserter(testing.db) as asserter:
+ s.flush()
+ asserter.assert_(
+ CompiledSQL(
+ "SELECT a.id AS a_id, a.type AS a_type "
+ "FROM a WHERE a.id = :param_1",
+ [{'param_1': 1}]
+ ),
+ CompiledSQL(
+ "DELETE FROM a WHERE a.id = :id",
+ [{'id': 1}]
+ )
+ )
+
+ b1.id
+ s.delete(b1)
+ with self.sql_execution_asserter(testing.db) as asserter:
+ s.flush()
+ asserter.assert_(
+ CompiledSQL(
+ "DELETE FROM b WHERE b.id = :id",
+ [{'id': 2}]
+ ),
+ CompiledSQL(
+ "DELETE FROM a WHERE a.id = :id",
+ [{'id': 2}]
+ )
+ )
+
+ # want to see if the 'C' table loads even though
+ # a and b are loaded
+ c1 = s.query(A).filter_by(id=3).first()
+ s.delete(c1)
+ with self.sql_execution_asserter(testing.db) as asserter:
+ s.flush()
+ asserter.assert_(
+ CompiledSQL(
+ "DELETE FROM b WHERE b.id = :id",
+ [{'id': 3}]
+ ),
+ CompiledSQL(
+ "DELETE FROM a WHERE a.id = :id",
+ [{'id': 3}]
+ )
+ )
+
+ def test_b_only(self):
+ A, B, C = self.classes("A", "B", "C")
+ self._fixture(b_p=True)
+
+ s = Session()
+ a1, b1, c1 = A(id=1), B(id=2), C(cid=1, id=3)
+ s.add_all([a1, b1, c1])
+ s.commit()
+
+ s.delete(a1)
+
+ with self.sql_execution_asserter(testing.db) as asserter:
+ s.flush()
+ asserter.assert_(
+ CompiledSQL(
+ "SELECT a.id AS a_id, a.type AS a_type "
+ "FROM a WHERE a.id = :param_1",
+ [{'param_1': 1}]
+ ),
+ CompiledSQL(
+ "DELETE FROM a WHERE a.id = :id",
+ [{'id': 1}]
+ )
+ )
+
+ b1.id
+ s.delete(b1)
+ with self.sql_execution_asserter(testing.db) as asserter:
+ s.flush()
+ asserter.assert_(
+ CompiledSQL(
+ "DELETE FROM a WHERE a.id = :id",
+ [{'id': 2}]
+ )
+ )
+
+ c1.id
+ s.delete(c1)
+ with self.sql_execution_asserter(testing.db) as asserter:
+ s.flush()
+ asserter.assert_(
+ CompiledSQL(
+ "DELETE FROM a WHERE a.id = :id",
+ [{'id': 3}]
+ )
+ )
+
+ def test_a_only(self):
+ A, B, C = self.classes("A", "B", "C")
+ self._fixture(a_p=True)
+
+ s = Session()
+ a1, b1, c1 = A(id=1), B(id=2), C(cid=1, id=3)
+ s.add_all([a1, b1, c1])
+ s.commit()
+
+ s.delete(a1)
+
+ with self.sql_execution_asserter(testing.db) as asserter:
+ s.flush()
+ asserter.assert_(
+ CompiledSQL(
+ "SELECT a.id AS a_id, a.type AS a_type "
+ "FROM a WHERE a.id = :param_1",
+ [{'param_1': 1}]
+ ),
+ CompiledSQL(
+ "DELETE FROM a WHERE a.id = :id",
+ [{'id': 1}]
+ )
+ )
+
+ b1.id
+ s.delete(b1)
+ with self.sql_execution_asserter(testing.db) as asserter:
+ s.flush()
+ asserter.assert_(
+ CompiledSQL(
+ "DELETE FROM a WHERE a.id = :id",
+ [{'id': 2}]
+ )
+ )
+
+ # want to see if the 'C' table loads even though
+ # a and b are loaded
+ c1 = s.query(A).filter_by(id=3).first()
+ s.delete(c1)
+ with self.sql_execution_asserter(testing.db) as asserter:
+ s.flush()
+ asserter.assert_(
+ CompiledSQL(
+ "DELETE FROM a WHERE a.id = :id",
+ [{'id': 3}]
+ )
+ )
+
+
+class OptimizedGetOnDeferredTest(fixtures.MappedTest):
+ """test that the 'optimized get' path accommodates deferred columns."""
+
+ @classmethod
+ def define_tables(cls, metadata):
+ Table(
+ "a", metadata,
+ Column('id', Integer, primary_key=True,
+ test_needs_autoincrement=True)
+ )
+ Table(
+ "b", metadata,
+ Column('id', Integer, ForeignKey('a.id'), primary_key=True),
+ Column('data', String(10))
+ )
+
+ @classmethod
+ def setup_classes(cls):
+ class A(cls.Basic):
+ pass
+
+ class B(A):
+ pass
+
+ @classmethod
+ def setup_mappers(cls):
+ A, B = cls.classes("A", "B")
+ a, b = cls.tables("a", "b")
+
+ mapper(A, a)
+ mapper(B, b, inherits=A, properties={
+ 'data': deferred(b.c.data),
+ 'expr': column_property(b.c.data + 'q', deferred=True)
+ })
+
+ def test_column_property(self):
+ A, B = self.classes("A", "B")
+ sess = Session()
+ b1 = B(data='x')
+ sess.add(b1)
+ sess.flush()
+
+ eq_(b1.expr, 'xq')
+
+ def test_expired_column(self):
+ A, B = self.classes("A", "B")
+ sess = Session()
+ b1 = B(data='x')
+ sess.add(b1)
+ sess.flush()
+ sess.expire(b1, ['data'])
+
+ eq_(b1.data, 'x')
+
+
class JoinedNoFKSortingTest(fixtures.MappedTest):
@classmethod
def define_tables(cls, metadata):
@@ -2042,7 +2334,7 @@ class OptimizedLoadTest(fixtures.MappedTest):
)
class TransientInheritingGCTest(fixtures.TestBase):
- __requires__ = ('cpython',)
+ __requires__ = ('cpython', 'no_coverage')
def _fixture(self):
Base = declarative_base()
diff --git a/test/orm/inheritance/test_concrete.py b/test/orm/inheritance/test_concrete.py
index 573913f74..2539d4737 100644
--- a/test/orm/inheritance/test_concrete.py
+++ b/test/orm/inheritance/test_concrete.py
@@ -486,6 +486,45 @@ class PropertyInheritanceTest(fixtures.MappedTest):
assert dest1.many_b == [b1, b2]
assert sess.query(B).filter(B.bname == 'b1').one() is b1
+ def test_overlapping_backref_relationship(self):
+ A, B, b_table, a_table, Dest, dest_table = (
+ self.classes.A,
+ self.classes.B,
+ self.tables.b_table,
+ self.tables.a_table,
+ self.classes.Dest,
+ self.tables.dest_table)
+
+ # test issue #3630, no error or warning is generated
+ mapper(A, a_table)
+ mapper(B, b_table, inherits=A, concrete=True)
+ mapper(Dest, dest_table, properties={
+ 'a': relationship(A, backref='dest'),
+ 'a1': relationship(B, backref='dest')
+ })
+ configure_mappers()
+
+ def test_overlapping_forwards_relationship(self):
+ A, B, b_table, a_table, Dest, dest_table = (
+ self.classes.A,
+ self.classes.B,
+ self.tables.b_table,
+ self.tables.a_table,
+ self.classes.Dest,
+ self.tables.dest_table)
+
+ # this is the opposite mapping as that of #3630, never generated
+ # an error / warning
+ mapper(A, a_table, properties={
+ 'dest': relationship(Dest, backref='a')
+ })
+ mapper(B, b_table, inherits=A, concrete=True, properties={
+ 'dest': relationship(Dest, backref='a1')
+ })
+ mapper(Dest, dest_table)
+ configure_mappers()
+
+
def test_polymorphic_backref(self):
"""test multiple backrefs to the same polymorphically-loading
attribute."""
diff --git a/test/orm/inheritance/test_poly_persistence.py b/test/orm/inheritance/test_poly_persistence.py
index c6a54c0b5..361377de8 100644
--- a/test/orm/inheritance/test_poly_persistence.py
+++ b/test/orm/inheritance/test_poly_persistence.py
@@ -1,6 +1,6 @@
"""tests basic polymorphic mapper loading/saving, minimal relationships"""
-from sqlalchemy.testing import eq_, assert_raises, assert_raises_message
+from sqlalchemy.testing import eq_, is_, assert_raises, assert_raises_message
from sqlalchemy import *
from sqlalchemy.orm import *
from sqlalchemy.orm import exc as orm_exc
@@ -271,18 +271,30 @@ def _generate_round_trip_test(include_base, lazy_relationship,
# into the "person_join" conversion.
palias = people.alias("palias")
dilbert = session.query(Person).get(dilbert.person_id)
- assert dilbert is session.query(Person).filter(
- (palias.c.name=='dilbert') & \
- (palias.c.person_id==Person.person_id)).first()
- assert dilbert is session.query(Engineer).filter(
- (palias.c.name=='dilbert') & \
- (palias.c.person_id==Person.person_id)).first()
- assert dilbert is session.query(Person).filter(
- (Engineer.engineer_name=="engineer1") & \
- (engineers.c.person_id==people.c.person_id)
- ).first()
- assert dilbert is session.query(Engineer).\
- filter(Engineer.engineer_name=="engineer1")[0]
+ is_(
+ dilbert,
+ session.query(Person).filter(
+ (palias.c.name == 'dilbert') &
+ (palias.c.person_id == Person.person_id)).first()
+ )
+ is_(
+ dilbert,
+ session.query(Engineer).filter(
+ (palias.c.name == 'dilbert') &
+ (palias.c.person_id == Person.person_id)).first()
+ )
+ is_(
+ dilbert,
+ session.query(Person).filter(
+ (Engineer.engineer_name == "engineer1") &
+ (engineers.c.person_id == people.c.person_id)
+ ).first()
+ )
+ is_(
+ dilbert,
+ session.query(Engineer).
+ filter(Engineer.engineer_name == "engineer1")[0]
+ )
session.flush()
session.expunge_all()
diff --git a/test/orm/inheritance/test_polymorphic_rel.py b/test/orm/inheritance/test_polymorphic_rel.py
index 29fbcff85..c82c30d59 100644
--- a/test/orm/inheritance/test_polymorphic_rel.py
+++ b/test/orm/inheritance/test_polymorphic_rel.py
@@ -1,7 +1,7 @@
from sqlalchemy import func, desc
from sqlalchemy.orm import interfaces, create_session, joinedload, joinedload_all, \
subqueryload, subqueryload_all, aliased,\
- class_mapper
+ class_mapper, with_polymorphic
from sqlalchemy import exc as sa_exc
from sqlalchemy import testing
@@ -1250,6 +1250,44 @@ class _PolymorphicTestBase(object):
assert row.name == 'dilbert'
assert row.primary_language == 'java'
+ def test_correlation_one(self):
+ sess = create_session()
+
+ # unfortunately this pattern can't yet work for PolymorphicAliased
+ # and PolymorphicUnions, because the subquery does not compile
+ # out including the polymorphic selectable; only if Person is in
+ # the query() list does that happen.
+ eq_(sess.query(Person.name)
+ .filter(
+ sess.query(Company.name).
+ filter(Company.company_id == Person.company_id).
+ correlate(Person).as_scalar() == "Elbonia, Inc.").all(),
+ [(e3.name, )])
+
+ def test_correlation_two(self):
+ sess = create_session()
+
+ paliased = aliased(Person)
+
+ eq_(sess.query(paliased.name)
+ .filter(
+ sess.query(Company.name).
+ filter(Company.company_id == paliased.company_id).
+ correlate(paliased).as_scalar() == "Elbonia, Inc.").all(),
+ [(e3.name, )])
+
+ def test_correlation_three(self):
+ sess = create_session()
+
+ paliased = aliased(Person, flat=True)
+
+ eq_(sess.query(paliased.name)
+ .filter(
+ sess.query(Company.name).
+ filter(Company.company_id == paliased.company_id).
+ correlate(paliased).as_scalar() == "Elbonia, Inc.").all(),
+ [(e3.name, )])
+
class PolymorphicTest(_PolymorphicTestBase, _Polymorphic):
def test_join_to_subclass_four(self):
sess = create_session()
@@ -1266,6 +1304,31 @@ class PolymorphicTest(_PolymorphicTestBase, _Polymorphic):
.filter(Machine.name.ilike("%ibm%")).all(),
[e1, e3])
+ def test_correlation_w_polymorphic(self):
+
+ sess = create_session()
+
+ p_poly = with_polymorphic(Person, '*')
+
+ eq_(sess.query(p_poly.name)
+ .filter(
+ sess.query(Company.name).
+ filter(Company.company_id == p_poly.company_id).
+ correlate(p_poly).as_scalar() == "Elbonia, Inc.").all(),
+ [(e3.name, )])
+
+ def test_correlation_w_polymorphic_flat(self):
+
+ sess = create_session()
+
+ p_poly = with_polymorphic(Person, '*', flat=True)
+
+ eq_(sess.query(p_poly.name)
+ .filter(
+ sess.query(Company.name).
+ filter(Company.company_id == p_poly.company_id).
+ correlate(p_poly).as_scalar() == "Elbonia, Inc.").all(),
+ [(e3.name, )])
def test_join_to_subclass_ten(self):
pass
@@ -1377,10 +1440,16 @@ class PolymorphicPolymorphicTest(_PolymorphicTestBase, _PolymorphicPolymorphic):
class PolymorphicUnionsTest(_PolymorphicTestBase, _PolymorphicUnions):
- pass
+
+ @testing.fails()
+ def test_correlation_one(self):
+ super(PolymorphicUnionsTest, self).test_correlation_one()
+
class PolymorphicAliasedJoinsTest(_PolymorphicTestBase, _PolymorphicAliasedJoins):
- pass
+ @testing.fails()
+ def test_correlation_one(self):
+ super(PolymorphicAliasedJoinsTest, self).test_correlation_one()
class PolymorphicJoinsTest(_PolymorphicTestBase, _PolymorphicJoins):
pass
diff --git a/test/orm/inheritance/test_relationship.py b/test/orm/inheritance/test_relationship.py
index 4889ca59b..01167b23e 100644
--- a/test/orm/inheritance/test_relationship.py
+++ b/test/orm/inheritance/test_relationship.py
@@ -1,6 +1,6 @@
from sqlalchemy.orm import create_session, relationship, mapper, \
contains_eager, joinedload, subqueryload, subqueryload_all,\
- Session, aliased, with_polymorphic
+ Session, aliased, with_polymorphic, joinedload_all
from sqlalchemy import Integer, String, ForeignKey
from sqlalchemy.engine import default
@@ -571,20 +571,20 @@ class SelfReferentialM2MTest(fixtures.MappedTest, AssertsCompiledSQL):
# test that the splicing of the join works here, doesn't break in
# the middle of "parent join child1"
q = sess.query(Child1).options(joinedload('left_child2'))
- self.assert_compile(q.limit(1).with_labels().statement,
- "SELECT anon_1.child1_id AS anon_1_child1_id, anon_1.parent_id "
- "AS anon_1_parent_id, anon_1.parent_cls AS anon_1_parent_cls, "
- "child2_1.id AS child2_1_id, parent_1.id AS "
- "parent_1_id, parent_1.cls AS parent_1_cls FROM "
- "(SELECT child1.id AS child1_id, parent.id AS parent_id, "
- "parent.cls AS parent_cls "
+ self.assert_compile(
+ q.limit(1).with_labels().statement,
+ "SELECT child1.id AS child1_id, parent.id AS parent_id, "
+ "parent.cls AS parent_cls, child2_1.id AS child2_1_id, "
+ "parent_1.id AS parent_1_id, parent_1.cls AS parent_1_cls "
"FROM parent JOIN child1 ON parent.id = child1.id "
- "LIMIT :param_1) AS anon_1 LEFT OUTER JOIN "
- "(secondary AS secondary_1 JOIN "
+ "LEFT OUTER JOIN (secondary AS secondary_1 JOIN "
"(parent AS parent_1 JOIN child2 AS child2_1 "
- "ON parent_1.id = child2_1.id) ON parent_1.id = secondary_1.left_id) "
- "ON anon_1.parent_id = secondary_1.right_id",
- {'param_1':1})
+ "ON parent_1.id = child2_1.id) "
+ "ON parent_1.id = secondary_1.left_id) "
+ "ON parent.id = secondary_1.right_id "
+ "LIMIT :param_1",
+ checkparams={'param_1': 1}
+ )
# another way to check
assert q.limit(1).with_labels().subquery().count().scalar() == 1
@@ -1360,6 +1360,276 @@ class SubClassToSubClassMultiTest(AssertsCompiledSQL, fixtures.MappedTest):
"JOIN ep2 ON anon_1.base2_id = ep2.base2_id"
)
+
+class JoinedloadSinglePolysubSingle(
+ fixtures.DeclarativeMappedTest,
+ testing.AssertsCompiledSQL):
+ """exercise issue #3611, using the test from dupe issue 3614"""
+
+ run_define_tables = None
+ __dialect__ = 'default'
+
+ @classmethod
+ def setup_classes(cls):
+ Base = cls.DeclarativeBasic
+
+ class User(Base):
+ __tablename__ = 'users'
+ id = Column(Integer, primary_key=True)
+
+ class UserRole(Base):
+ __tablename__ = 'user_roles'
+
+ id = Column(Integer, primary_key=True)
+
+ row_type = Column(String(50), nullable=False)
+ __mapper_args__ = {'polymorphic_on': row_type}
+
+ user_id = Column(Integer, ForeignKey('users.id'), nullable=False)
+ user = relationship('User', lazy=False)
+
+ class Admin(UserRole):
+ __tablename__ = 'admins'
+ __mapper_args__ = {'polymorphic_identity': 'admin'}
+
+ id = Column(Integer, ForeignKey('user_roles.id'), primary_key=True)
+
+ class Thing(Base):
+ __tablename__ = 'things'
+
+ id = Column(Integer, primary_key=True)
+
+ admin_id = Column(Integer, ForeignKey('admins.id'))
+ admin = relationship('Admin', lazy=False)
+
+ def test_query(self):
+ Thing = self.classes.Thing
+ sess = Session()
+ self.assert_compile(
+ sess.query(Thing),
+ "SELECT things.id AS things_id, "
+ "things.admin_id AS things_admin_id, "
+ "users_1.id AS users_1_id, admins_1.id AS admins_1_id, "
+ "user_roles_1.id AS user_roles_1_id, "
+ "user_roles_1.row_type AS user_roles_1_row_type, "
+ "user_roles_1.user_id AS user_roles_1_user_id FROM things "
+ "LEFT OUTER JOIN (user_roles AS user_roles_1 JOIN admins "
+ "AS admins_1 ON user_roles_1.id = admins_1.id) ON "
+ "admins_1.id = things.admin_id "
+ "LEFT OUTER JOIN users AS "
+ "users_1 ON users_1.id = user_roles_1.user_id"
+ )
+
+
+class JoinedloadOverWPolyAliased(
+ fixtures.DeclarativeMappedTest,
+ testing.AssertsCompiledSQL):
+ """exercise issues in #3593 and #3611"""
+
+ run_setup_mappers = 'each'
+ run_setup_classes = 'each'
+ run_define_tables = 'each'
+ __dialect__ = 'default'
+
+ @classmethod
+ def setup_classes(cls):
+ Base = cls.DeclarativeBasic
+
+ class Owner(Base):
+ __tablename__ = 'owner'
+
+ id = Column(Integer, primary_key=True)
+ type = Column(String(20))
+
+ __mapper_args__ = {
+ 'polymorphic_on': type,
+ 'with_polymorphic': ('*', None),
+ }
+
+ class SubOwner(Owner):
+ __mapper_args__ = {'polymorphic_identity': 'so'}
+
+ class Parent(Base):
+ __tablename__ = 'parent'
+
+ id = Column(Integer, primary_key=True)
+ type = Column(String(20))
+
+ __mapper_args__ = {
+ 'polymorphic_on': type,
+ 'with_polymorphic': ('*', None),
+ }
+
+ class Sub1(Parent):
+ __mapper_args__ = {'polymorphic_identity': 's1'}
+
+ class Link(Base):
+ __tablename__ = 'link'
+
+ parent_id = Column(
+ Integer, ForeignKey('parent.id'), primary_key=True)
+ child_id = Column(
+ Integer, ForeignKey('parent.id'), primary_key=True)
+
+ def _fixture_from_base(self):
+ Parent = self.classes.Parent
+ Link = self.classes.Link
+ Link.child = relationship(
+ Parent, primaryjoin=Link.child_id == Parent.id)
+
+ Parent.links = relationship(
+ Link,
+ primaryjoin=Parent.id == Link.parent_id,
+ )
+ return Parent
+
+ def _fixture_from_subclass(self):
+ Sub1 = self.classes.Sub1
+ Link = self.classes.Link
+ Parent = self.classes.Parent
+ Link.child = relationship(
+ Parent, primaryjoin=Link.child_id == Parent.id)
+
+ Sub1.links = relationship(
+ Link,
+ primaryjoin=Sub1.id == Link.parent_id,
+ )
+ return Sub1
+
+ def _fixture_to_subclass_to_base(self):
+ Owner = self.classes.Owner
+ Parent = self.classes.Parent
+ Sub1 = self.classes.Sub1
+ Link = self.classes.Link
+
+ # Link -> Sub1 -> Owner
+
+ Link.child = relationship(
+ Sub1, primaryjoin=Link.child_id == Sub1.id)
+
+ Parent.owner_id = Column(ForeignKey('owner.id'))
+
+ Parent.owner = relationship(Owner)
+ return Parent
+
+ def _fixture_to_base_to_base(self):
+ Owner = self.classes.Owner
+ Parent = self.classes.Parent
+ Link = self.classes.Link
+
+ # Link -> Parent -> Owner
+
+ Link.child = relationship(
+ Parent, primaryjoin=Link.child_id == Parent.id)
+
+ Parent.owner_id = Column(ForeignKey('owner.id'))
+
+ Parent.owner = relationship(Owner)
+ return Parent
+
+ def test_from_base(self):
+ self._test_poly_single_poly(self._fixture_from_base)
+
+ def test_from_sub(self):
+ self._test_poly_single_poly(self._fixture_from_subclass)
+
+ def test_to_sub_to_base(self):
+ self._test_single_poly_poly(self._fixture_to_subclass_to_base)
+
+ def test_to_base_to_base(self):
+ self._test_single_poly_poly(self._fixture_to_base_to_base)
+
+ def _test_poly_single_poly(self, fn):
+ cls = fn()
+ Link = self.classes.Link
+
+ session = Session()
+ q = session.query(cls).options(
+ joinedload_all(
+ cls.links,
+ Link.child,
+ cls.links
+ )
+ )
+ if cls is self.classes.Sub1:
+ extra = " WHERE parent.type IN (:type_1)"
+ else:
+ extra = ""
+
+ self.assert_compile(
+ q,
+ "SELECT parent.id AS parent_id, parent.type AS parent_type, "
+ "link_1.parent_id AS link_1_parent_id, "
+ "link_1.child_id AS link_1_child_id, "
+ "parent_1.id AS parent_1_id, parent_1.type AS parent_1_type, "
+ "link_2.parent_id AS link_2_parent_id, "
+ "link_2.child_id AS link_2_child_id "
+ "FROM parent "
+ "LEFT OUTER JOIN link AS link_1 ON parent.id = link_1.parent_id "
+ "LEFT OUTER JOIN parent "
+ "AS parent_1 ON link_1.child_id = parent_1.id "
+ "LEFT OUTER JOIN link AS link_2 "
+ "ON parent_1.id = link_2.parent_id" + extra
+ )
+
+ def _test_single_poly_poly(self, fn):
+ parent_cls = fn()
+ Link = self.classes.Link
+
+ session = Session()
+ q = session.query(Link).options(
+ joinedload_all(
+ Link.child,
+ parent_cls.owner
+ )
+ )
+
+ if Link.child.property.mapper.class_ is self.classes.Sub1:
+ extra = "AND parent_1.type IN (:type_1) "
+ else:
+ extra = ""
+
+ self.assert_compile(
+ q,
+ "SELECT link.parent_id AS link_parent_id, "
+ "link.child_id AS link_child_id, parent_1.id AS parent_1_id, "
+ "parent_1.type AS parent_1_type, "
+ "parent_1.owner_id AS parent_1_owner_id, "
+ "owner_1.id AS owner_1_id, owner_1.type AS owner_1_type "
+ "FROM link LEFT OUTER JOIN parent AS parent_1 "
+ "ON link.child_id = parent_1.id " + extra +
+ "LEFT OUTER JOIN owner AS owner_1 "
+ "ON owner_1.id = parent_1.owner_id"
+ )
+
+ def test_local_wpoly(self):
+ Sub1 = self._fixture_from_subclass()
+ Parent = self.classes.Parent
+ Link = self.classes.Link
+
+ poly = with_polymorphic(Parent, [Sub1])
+
+ session = Session()
+ q = session.query(poly).options(
+ joinedload(poly.Sub1.links).
+ joinedload(Link.child.of_type(Sub1)).
+ joinedload(poly.Sub1.links)
+ )
+ self.assert_compile(
+ q,
+ "SELECT parent.id AS parent_id, parent.type AS parent_type, "
+ "link_1.parent_id AS link_1_parent_id, "
+ "link_1.child_id AS link_1_child_id, "
+ "parent_1.id AS parent_1_id, parent_1.type AS parent_1_type, "
+ "link_2.parent_id AS link_2_parent_id, "
+ "link_2.child_id AS link_2_child_id FROM parent "
+ "LEFT OUTER JOIN link AS link_1 ON parent.id = link_1.parent_id "
+ "LEFT OUTER JOIN parent AS parent_1 "
+ "ON link_1.child_id = parent_1.id "
+ "LEFT OUTER JOIN link AS link_2 ON parent_1.id = link_2.parent_id"
+ )
+
+
class JoinAcrossJoinedInhMultiPath(fixtures.DeclarativeMappedTest,
testing.AssertsCompiledSQL):
"""test long join paths with a joined-inh in the middle, where we go multiple
@@ -1564,12 +1834,9 @@ class MultipleAdaptUsesEntityOverTableTest(AssertsCompiledSQL, fixtures.MappedTe
bname, cname, dname = q._entities
- b_name_adapted = bname._resolve_expr_against_query_aliases(
- q, bname.column, None)
- c_name_adapted = cname._resolve_expr_against_query_aliases(
- q, cname.column, None)
- d_name_adapted = dname._resolve_expr_against_query_aliases(
- q, dname.column, None)
+ b_name_adapted = q._adapt_clause(bname.column, False, True)
+ c_name_adapted = q._adapt_clause(cname.column, False, True)
+ d_name_adapted = q._adapt_clause(dname.column, False, True)
assert bool(b_name_adapted == a.c.name)
assert bool(c_name_adapted == ac_adapted.c.name)
diff --git a/test/orm/inheritance/test_single.py b/test/orm/inheritance/test_single.py
index dbbe4c435..0d102c065 100644
--- a/test/orm/inheritance/test_single.py
+++ b/test/orm/inheritance/test_single.py
@@ -9,6 +9,8 @@ from sqlalchemy.testing.schema import Table, Column
class SingleInheritanceTest(testing.AssertsCompiledSQL, fixtures.MappedTest):
+ __dialect__ = 'default'
+
@classmethod
def define_tables(cls, metadata):
Table('employees', metadata,
@@ -208,6 +210,19 @@ class SingleInheritanceTest(testing.AssertsCompiledSQL, fixtures.MappedTest):
eq_(sess.query(Manager).filter(Manager.name.like('%m%')).count(), 2)
eq_(sess.query(Employee).filter(Employee.name.like('%m%')).count(), 3)
+ def test_exists_standalone(self):
+ Engineer = self.classes.Engineer
+
+ sess = create_session()
+
+ self.assert_compile(
+ sess.query(
+ sess.query(Engineer).filter(Engineer.name == 'foo').exists()),
+ "SELECT EXISTS (SELECT 1 FROM employees WHERE "
+ "employees.name = :name_1 AND employees.type "
+ "IN (:type_1, :type_2)) AS anon_1"
+ )
+
def test_type_filtering(self):
Employee, Manager, reports, Engineer = (self.classes.Employee,
self.classes.Manager,
@@ -410,6 +425,31 @@ class RelationshipToSingleTest(testing.AssertsCompiledSQL, fixtures.MappedTest):
"AND employees_1.type IN (:type_1)"
)
+ def test_join_explicit_onclause_no_discriminator(self):
+ # test issue #3462
+ Company, Employee, Engineer = (
+ self.classes.Company,
+ self.classes.Employee,
+ self.classes.Engineer)
+ companies, employees = self.tables.companies, self.tables.employees
+
+ mapper(Company, companies, properties={
+ 'employees': relationship(Employee)
+ })
+ mapper(Employee, employees)
+ mapper(Engineer, inherits=Employee)
+
+ sess = create_session()
+ self.assert_compile(
+ sess.query(Company, Engineer.name).join(
+ Engineer, Company.company_id == Engineer.company_id),
+ "SELECT companies.company_id AS companies_company_id, "
+ "companies.name AS companies_name, "
+ "employees.name AS employees_name "
+ "FROM companies JOIN "
+ "employees ON companies.company_id = employees.company_id"
+ )
+
def test_outer_join_prop(self):
Company, Employee, Engineer = self.classes.Company,\
self.classes.Employee,\
diff --git a/test/orm/test_attributes.py b/test/orm/test_attributes.py
index b22fff1a9..80d8cdc04 100644
--- a/test/orm/test_attributes.py
+++ b/test/orm/test_attributes.py
@@ -123,7 +123,7 @@ class AttributeImplAPITest(fixtures.MappedTest):
assert_raises_message(
ValueError,
- r"list.remove\(x\): x not in list",
+ r"list.remove\(.*?\): .* not in list",
A.b.impl.remove,
attributes.instance_state(a1),
attributes.instance_dict(a1), b2, None
@@ -1524,6 +1524,13 @@ class HistoryTest(fixtures.TestBase):
f.someattr = 3
eq_(self._someattr_committed_state(f), None)
+ def test_committed_value_set_active_hist(self):
+ Foo = self._fixture(uselist=False, useobject=False,
+ active_history=True)
+ f = Foo()
+ f.someattr = 3
+ eq_(self._someattr_committed_state(f), None)
+
def test_committed_value_set_commit(self):
Foo = self._fixture(uselist=False, useobject=False,
active_history=False)
diff --git a/test/orm/test_bulk.py b/test/orm/test_bulk.py
index e27d3b73c..878560cf6 100644
--- a/test/orm/test_bulk.py
+++ b/test/orm/test_bulk.py
@@ -2,7 +2,7 @@ from sqlalchemy import testing
from sqlalchemy.testing import eq_
from sqlalchemy.testing.schema import Table, Column
from sqlalchemy.testing import fixtures
-from sqlalchemy import Integer, String, ForeignKey
+from sqlalchemy import Integer, String, ForeignKey, FetchedValue
from sqlalchemy.orm import mapper, Session
from sqlalchemy.testing.assertsql import CompiledSQL
from test.orm import _fixtures
@@ -96,14 +96,118 @@ class BulkInsertUpdateTest(BulkTest, _fixtures.FixtureTest):
asserter.assert_(
CompiledSQL(
- "UPDATE users SET id=:id, name=:name WHERE "
+ "UPDATE users SET name=:name WHERE "
"users.id = :users_id",
- [{'users_id': 1, 'id': 1, 'name': 'u1new'},
- {'users_id': 2, 'id': 2, 'name': 'u2'},
- {'users_id': 3, 'id': 3, 'name': 'u3new'}]
+ [{'users_id': 1, 'name': 'u1new'},
+ {'users_id': 2, 'name': 'u2'},
+ {'users_id': 3, 'name': 'u3new'}]
)
)
+ def test_bulk_update(self):
+ User, = self.classes("User",)
+
+ s = Session(expire_on_commit=False)
+ objects = [
+ User(name="u1"),
+ User(name="u2"),
+ User(name="u3")
+ ]
+ s.add_all(objects)
+ s.commit()
+
+ s = Session()
+ with self.sql_execution_asserter() as asserter:
+ s.bulk_update_mappings(
+ User,
+ [{'id': 1, 'name': 'u1new'},
+ {'id': 2, 'name': 'u2'},
+ {'id': 3, 'name': 'u3new'}]
+ )
+
+ asserter.assert_(
+ CompiledSQL(
+ "UPDATE users SET name=:name WHERE users.id = :users_id",
+ [{'users_id': 1, 'name': 'u1new'},
+ {'users_id': 2, 'name': 'u2'},
+ {'users_id': 3, 'name': 'u3new'}]
+ )
+ )
+
+ def test_bulk_insert(self):
+ User, = self.classes("User",)
+
+ s = Session()
+ with self.sql_execution_asserter() as asserter:
+ s.bulk_insert_mappings(
+ User,
+ [{'id': 1, 'name': 'u1new'},
+ {'id': 2, 'name': 'u2'},
+ {'id': 3, 'name': 'u3new'}]
+ )
+
+ asserter.assert_(
+ CompiledSQL(
+ "INSERT INTO users (id, name) VALUES (:id, :name)",
+ [{'id': 1, 'name': 'u1new'},
+ {'id': 2, 'name': 'u2'},
+ {'id': 3, 'name': 'u3new'}]
+ )
+ )
+
+
+class BulkUDPostfetchTest(BulkTest, fixtures.MappedTest):
+ @classmethod
+ def define_tables(cls, metadata):
+ Table(
+ 'a', metadata,
+ Column(
+ 'id', Integer,
+ primary_key=True,
+ test_needs_autoincrement=True),
+ Column('x', Integer),
+ Column('y', Integer,
+ server_default=FetchedValue(),
+ server_onupdate=FetchedValue()))
+
+ @classmethod
+ def setup_classes(cls):
+ class A(cls.Comparable):
+ pass
+
+ @classmethod
+ def setup_mappers(cls):
+ A = cls.classes.A
+ a = cls.tables.a
+
+ mapper(A, a)
+
+ def test_insert_w_fetch(self):
+ A = self.classes.A
+
+ s = Session()
+ a1 = A(x=1)
+ s.bulk_save_objects([a1])
+ s.commit()
+
+ def test_update_w_fetch(self):
+ A = self.classes.A
+
+ s = Session()
+ a1 = A(x=1, y=2)
+ s.add(a1)
+ s.commit()
+
+ eq_(a1.id, 1) # force a load
+ a1.x = 5
+ s.expire(a1, ['y'])
+ assert 'y' not in a1.__dict__
+ s.bulk_save_objects([a1])
+ s.commit()
+
+ eq_(a1.x, 5)
+ eq_(a1.y, 2)
+
class BulkInheritanceTest(BulkTest, fixtures.MappedTest):
@classmethod
diff --git a/test/orm/test_cascade.py b/test/orm/test_cascade.py
index e39911d0f..f104ee34c 100644
--- a/test/orm/test_cascade.py
+++ b/test/orm/test_cascade.py
@@ -2,7 +2,7 @@ import copy
from sqlalchemy.testing import assert_raises, assert_raises_message
from sqlalchemy import Integer, String, ForeignKey, Sequence, \
- exc as sa_exc
+ exc as sa_exc, util
from sqlalchemy.testing.schema import Table, Column
from sqlalchemy.orm import mapper, relationship, create_session, \
sessionmaker, class_mapper, backref, Session, util as orm_util,\
@@ -119,6 +119,14 @@ class CascadeArgTest(fixtures.MappedTest):
'refresh-expire', 'save-update'])
)
+ def test_cascade_unicode(self):
+ User, Address = self.classes.User, self.classes.Address
+ users, addresses = self.tables.users, self.tables.addresses
+
+ rel = relationship(Address)
+ rel.cascade = util.u('save-update, merge, expunge')
+ eq_(rel.cascade, set(['save-update', 'merge', 'expunge']))
+
class O2MCascadeDeleteOrphanTest(fixtures.MappedTest):
run_inserts = None
diff --git a/test/orm/test_composites.py b/test/orm/test_composites.py
index 8b777dcdf..48027ec2d 100644
--- a/test/orm/test_composites.py
+++ b/test/orm/test_composites.py
@@ -313,8 +313,7 @@ class PrimaryKeyTest(fixtures.MappedTest):
@classmethod
def define_tables(cls, metadata):
Table('graphs', metadata,
- Column('id', Integer, primary_key=True,
- test_needs_autoincrement=True),
+ Column('id', Integer, primary_key=True),
Column('version_id', Integer, primary_key=True,
nullable=True),
Column('name', String(30)))
diff --git a/test/orm/test_cycles.py b/test/orm/test_cycles.py
index c95b8d152..b5c1b6467 100644
--- a/test/orm/test_cycles.py
+++ b/test/orm/test_cycles.py
@@ -10,7 +10,7 @@ from sqlalchemy import Integer, String, ForeignKey
from sqlalchemy.testing.schema import Table, Column
from sqlalchemy.orm import mapper, relationship, backref, \
create_session, sessionmaker
-from sqlalchemy.testing import eq_
+from sqlalchemy.testing import eq_, is_
from sqlalchemy.testing.assertsql import RegexSQL, CompiledSQL, AllOf
from sqlalchemy.testing import fixtures
@@ -816,6 +816,39 @@ class OneToManyManyToOneTest(fixtures.MappedTest):
{'id': b4.id}])
)
+ def test_post_update_m2o_detect_none(self):
+ person, ball, Ball, Person = (
+ self.tables.person,
+ self.tables.ball,
+ self.classes.Ball,
+ self.classes.Person)
+
+ mapper(Ball, ball, properties={
+ 'person': relationship(
+ Person, post_update=True,
+ primaryjoin=person.c.id == ball.c.person_id)
+ })
+ mapper(Person, person)
+
+ sess = create_session(autocommit=False, expire_on_commit=True)
+ sess.add(Ball(person=Person()))
+ sess.commit()
+ b1 = sess.query(Ball).first()
+
+ # needs to be unloaded
+ assert 'person' not in b1.__dict__
+ b1.person = None
+
+ self.assert_sql_execution(
+ testing.db,
+ sess.flush,
+ CompiledSQL(
+ "UPDATE ball SET person_id=:person_id WHERE ball.id = :ball_id",
+ lambda ctx: {'person_id': None, 'ball_id': b1.id})
+ )
+
+ is_(b1.person, None)
+
class SelfReferentialPostUpdateTest(fixtures.MappedTest):
"""Post_update on a single self-referential mapper.
@@ -1181,9 +1214,10 @@ class PostUpdateBatchingTest(fixtures.MappedTest):
testing.db,
sess.flush,
CompiledSQL(
- "UPDATE parent SET c1_id=:c1_id, c2_id=:c2_id, "
- "c3_id=:c3_id WHERE parent.id = :parent_id",
- lambda ctx: {'c2_id': c23.id, 'parent_id': p1.id, 'c1_id': c12.id, 'c3_id': c31.id}
+ "UPDATE parent SET c1_id=:c1_id, c2_id=:c2_id, c3_id=:c3_id "
+ "WHERE parent.id = :parent_id",
+ lambda ctx: {'c2_id': c23.id, 'parent_id': p1.id,
+ 'c1_id': c12.id, 'c3_id': c31.id}
)
)
@@ -1193,8 +1227,9 @@ class PostUpdateBatchingTest(fixtures.MappedTest):
testing.db,
sess.flush,
CompiledSQL(
- "UPDATE parent SET c1_id=:c1_id, c2_id=:c2_id, "
- "c3_id=:c3_id WHERE parent.id = :parent_id",
- lambda ctx: {'c2_id': None, 'parent_id': p1.id, 'c1_id': None, 'c3_id': None}
+ "UPDATE parent SET c1_id=:c1_id, c2_id=:c2_id, c3_id=:c3_id "
+ "WHERE parent.id = :parent_id",
+ lambda ctx: {'c2_id': None, 'parent_id': p1.id,
+ 'c1_id': None, 'c3_id': None}
)
)
diff --git a/test/orm/test_deferred.py b/test/orm/test_deferred.py
index 1b777b527..7f449c40a 100644
--- a/test/orm/test_deferred.py
+++ b/test/orm/test_deferred.py
@@ -320,6 +320,64 @@ class DeferredOptionsTest(AssertsCompiledSQL, _fixtures.FixtureTest):
"FROM orders ORDER BY orders.id",
{})])
+ def test_undefer_group_multi(self):
+ orders, Order = self.tables.orders, self.classes.Order
+
+ mapper(Order, orders, properties=util.OrderedDict([
+ ('userident', deferred(orders.c.user_id, group='primary')),
+ ('description', deferred(orders.c.description, group='primary')),
+ ('opened', deferred(orders.c.isopen, group='secondary'))
+ ]
+ ))
+
+ sess = create_session()
+ q = sess.query(Order).order_by(Order.id)
+ def go():
+ l = q.options(
+ undefer_group('primary'), undefer_group('secondary')).all()
+ o2 = l[2]
+ eq_(o2.opened, 1)
+ eq_(o2.userident, 7)
+ eq_(o2.description, 'order 3')
+
+ self.sql_eq_(go, [
+ ("SELECT orders.user_id AS orders_user_id, "
+ "orders.description AS orders_description, "
+ "orders.isopen AS orders_isopen, "
+ "orders.id AS orders_id, "
+ "orders.address_id AS orders_address_id "
+ "FROM orders ORDER BY orders.id",
+ {})])
+
+ def test_undefer_group_multi_pathed(self):
+ orders, Order = self.tables.orders, self.classes.Order
+
+ mapper(Order, orders, properties=util.OrderedDict([
+ ('userident', deferred(orders.c.user_id, group='primary')),
+ ('description', deferred(orders.c.description, group='primary')),
+ ('opened', deferred(orders.c.isopen, group='secondary'))
+ ]
+ ))
+
+ sess = create_session()
+ q = sess.query(Order).order_by(Order.id)
+ def go():
+ l = q.options(
+ Load(Order).undefer_group('primary').undefer_group('secondary')).all()
+ o2 = l[2]
+ eq_(o2.opened, 1)
+ eq_(o2.userident, 7)
+ eq_(o2.description, 'order 3')
+
+ self.sql_eq_(go, [
+ ("SELECT orders.user_id AS orders_user_id, "
+ "orders.description AS orders_description, "
+ "orders.isopen AS orders_isopen, "
+ "orders.id AS orders_id, "
+ "orders.address_id AS orders_address_id "
+ "FROM orders ORDER BY orders.id",
+ {})])
+
def test_undefer_star(self):
orders, Order = self.tables.orders, self.classes.Order
@@ -341,7 +399,8 @@ class DeferredOptionsTest(AssertsCompiledSQL, _fixtures.FixtureTest):
)
def test_locates_col(self):
- """Manually adding a column to the result undefers the column."""
+ """changed in 1.0 - we don't search for deferred cols in the result
+ now. """
orders, Order = self.tables.orders, self.classes.Order
@@ -350,18 +409,40 @@ class DeferredOptionsTest(AssertsCompiledSQL, _fixtures.FixtureTest):
'description': deferred(orders.c.description)})
sess = create_session()
- o1 = sess.query(Order).order_by(Order.id).first()
+ o1 = (sess.query(Order).
+ order_by(Order.id).
+ add_column(orders.c.description).first())[0]
def go():
eq_(o1.description, 'order 1')
+ # prior to 1.0 we'd search in the result for this column
+ # self.sql_count_(0, go)
self.sql_count_(1, go)
+ def test_locates_col_rowproc_only(self):
+ """changed in 1.0 - we don't search for deferred cols in the result
+ now.
+
+ Because the loading for ORM Query and Query from a core select
+ is now split off, we test loading from a plain select()
+ separately.
+
+ """
+
+ orders, Order = self.tables.orders, self.classes.Order
+
+
+ mapper(Order, orders, properties={
+ 'description': deferred(orders.c.description)})
+
sess = create_session()
+ stmt = sa.select([Order]).order_by(Order.id)
o1 = (sess.query(Order).
- order_by(Order.id).
- add_column(orders.c.description).first())[0]
+ from_statement(stmt).all())[0]
def go():
eq_(o1.description, 'order 1')
- self.sql_count_(0, go)
+ # prior to 1.0 we'd search in the result for this column
+ # self.sql_count_(0, go)
+ self.sql_count_(1, go)
def test_deep_options(self):
users, items, order_items, Order, Item, User, orders = (self.tables.users,
diff --git a/test/orm/test_descriptor.py b/test/orm/test_descriptor.py
index 2134d87b2..d9aca30e5 100644
--- a/test/orm/test_descriptor.py
+++ b/test/orm/test_descriptor.py
@@ -125,3 +125,4 @@ class DescriptorInstrumentationTest(fixtures.ORMTest):
str(aliased(Foo).foo == 'ed'),
"foobar(foo_1.name) = foobar(:foobar_1)"
)
+
diff --git a/test/orm/test_eager_relations.py b/test/orm/test_eager_relations.py
index 4c6d9bbe1..3ad641b8f 100644
--- a/test/orm/test_eager_relations.py
+++ b/test/orm/test_eager_relations.py
@@ -1,11 +1,11 @@
"""tests of joined-eager loaded attributes"""
-from sqlalchemy.testing import eq_, is_, is_not_
+from sqlalchemy.testing import eq_, is_, is_not_, in_
import sqlalchemy as sa
from sqlalchemy import testing
from sqlalchemy.orm import joinedload, deferred, undefer, \
joinedload_all, backref, Session,\
- defaultload, Load
+ defaultload, Load, load_only, contains_eager
from sqlalchemy import Integer, String, Date, ForeignKey, and_, select, \
func, text
from sqlalchemy.testing.schema import Table, Column
@@ -294,20 +294,21 @@ class EagerTest(_fixtures.FixtureTest, testing.AssertsCompiledSQL):
sess.expunge_all()
a = sess.query(Address).filter(Address.id == 1).all()[0]
+ # 1.0 change! we don't automatically undefer user_id here.
+ # if the user wants a column undeferred, add the option.
def go():
eq_(a.user_id, 7)
- # assert that the eager loader added 'user_id' to the row and deferred
- # loading of that col was disabled
- self.assert_sql_count(testing.db, go, 0)
+ # self.assert_sql_count(testing.db, go, 0)
+ self.assert_sql_count(testing.db, go, 1)
sess.expunge_all()
a = sess.query(Address).filter(Address.id == 1).first()
def go():
eq_(a.user_id, 7)
- # assert that the eager loader added 'user_id' to the row and deferred
- # loading of that col was disabled
- self.assert_sql_count(testing.db, go, 0)
+ # same, 1.0 doesn't check these
+ # self.assert_sql_count(testing.db, go, 0)
+ self.assert_sql_count(testing.db, go, 1)
# do the mapping in reverse
# (we would have just used an "addresses" backref but the test
@@ -1072,6 +1073,32 @@ class EagerTest(_fixtures.FixtureTest, testing.AssertsCompiledSQL):
eq_([User(id=7, address=Address(id=1))], l)
self.assert_sql_count(testing.db, go, 1)
+ def test_one_to_many_scalar_subq_wrapping(self):
+ Address, addresses, users, User = (self.classes.Address,
+ self.tables.addresses,
+ self.tables.users,
+ self.classes.User)
+
+ mapper(User, users, properties=dict(
+ address=relationship(mapper(Address, addresses),
+ lazy='joined', uselist=False)
+ ))
+ q = create_session().query(User)
+ q = q.filter(users.c.id == 7).limit(1)
+
+ self.assert_compile(
+ q,
+ "SELECT users.id AS users_id, users.name AS users_name, "
+ "addresses_1.id AS addresses_1_id, "
+ "addresses_1.user_id AS addresses_1_user_id, "
+ "addresses_1.email_address AS addresses_1_email_address "
+ "FROM users LEFT OUTER JOIN addresses AS addresses_1 "
+ "ON users.id = addresses_1.user_id "
+ "WHERE users.id = :id_1 "
+ "LIMIT :param_1",
+ checkparams={'id_1': 7, 'param_1': 1}
+ )
+
def test_many_to_one(self):
users, Address, addresses, User = (
self.tables.users,
@@ -1672,6 +1699,7 @@ class EagerTest(_fixtures.FixtureTest, testing.AssertsCompiledSQL):
"ON users.id = addresses_1.user_id"
)
+
def test_catch_the_right_target(self):
# test eager join chaining to the "nested" join on the left,
# a new feature as of [ticket:2369]
@@ -1979,11 +2007,442 @@ class EagerTest(_fixtures.FixtureTest, testing.AssertsCompiledSQL):
])
+class InnerJoinSplicingTest(fixtures.MappedTest, testing.AssertsCompiledSQL):
+ __dialect__ = 'default'
+ __backend__ = True # exercise hardcore join nesting on backends
+
+ @classmethod
+ def define_tables(cls, metadata):
+ Table('a', metadata,
+ Column('id', Integer, primary_key=True)
+ )
+
+ Table('b', metadata,
+ Column('id', Integer, primary_key=True),
+ Column('a_id', Integer, ForeignKey('a.id')),
+ Column('value', String(10)),
+ )
+ Table('c1', metadata,
+ Column('id', Integer, primary_key=True),
+ Column('b_id', Integer, ForeignKey('b.id')),
+ Column('value', String(10)),
+ )
+ Table('c2', metadata,
+ Column('id', Integer, primary_key=True),
+ Column('b_id', Integer, ForeignKey('b.id')),
+ Column('value', String(10)),
+ )
+ Table('d1', metadata,
+ Column('id', Integer, primary_key=True),
+ Column('c1_id', Integer, ForeignKey('c1.id')),
+ Column('value', String(10)),
+ )
+ Table('d2', metadata,
+ Column('id', Integer, primary_key=True),
+ Column('c2_id', Integer, ForeignKey('c2.id')),
+ Column('value', String(10)),
+ )
+ Table('e1', metadata,
+ Column('id', Integer, primary_key=True),
+ Column('d1_id', Integer, ForeignKey('d1.id')),
+ Column('value', String(10)),
+ )
+
+ @classmethod
+ def setup_classes(cls):
+
+ class A(cls.Comparable):
+ pass
+
+ class B(cls.Comparable):
+ pass
+
+ class C1(cls.Comparable):
+ pass
+
+ class C2(cls.Comparable):
+ pass
+
+ class D1(cls.Comparable):
+ pass
+
+ class D2(cls.Comparable):
+ pass
+
+ class E1(cls.Comparable):
+ pass
+
+ @classmethod
+ def setup_mappers(cls):
+ A, B, C1, C2, D1, D2, E1 = (
+ cls.classes.A, cls.classes.B, cls.classes.C1,
+ cls.classes.C2, cls.classes.D1, cls.classes.D2, cls.classes.E1)
+ mapper(A, cls.tables.a, properties={
+ 'bs': relationship(B)
+ })
+ mapper(B, cls.tables.b, properties=odict([
+ ('c1s', relationship(C1, order_by=cls.tables.c1.c.id)),
+ ('c2s', relationship(C2, order_by=cls.tables.c2.c.id))
+ ]))
+ mapper(C1, cls.tables.c1, properties={
+ 'd1s': relationship(D1, order_by=cls.tables.d1.c.id)
+ })
+ mapper(C2, cls.tables.c2, properties={
+ 'd2s': relationship(D2, order_by=cls.tables.d2.c.id)
+ })
+ mapper(D1, cls.tables.d1, properties={
+ 'e1s': relationship(E1, order_by=cls.tables.e1.c.id)
+ })
+ mapper(D2, cls.tables.d2)
+ mapper(E1, cls.tables.e1)
+
+ @classmethod
+ def _fixture_data(cls):
+ A, B, C1, C2, D1, D2, E1 = (
+ cls.classes.A, cls.classes.B, cls.classes.C1,
+ cls.classes.C2, cls.classes.D1, cls.classes.D2, cls.classes.E1)
+ return [
+ A(id=1, bs=[
+ B(
+ id=1,
+ c1s=[C1(
+ id=1, value='C11',
+ d1s=[
+ D1(id=1, e1s=[E1(id=1)]), D1(id=2, e1s=[E1(id=2)])
+ ]
+ )
+ ],
+ c2s=[C2(id=1, value='C21', d2s=[D2(id=3)]),
+ C2(id=2, value='C22', d2s=[D2(id=4)])]
+ ),
+ B(
+ id=2,
+ c1s=[
+ C1(
+ id=4, value='C14',
+ d1s=[D1(
+ id=3, e1s=[
+ E1(id=3, value='E13'),
+ E1(id=4, value="E14")
+ ]),
+ D1(id=4, e1s=[E1(id=5)])
+ ]
+ )
+ ],
+ c2s=[C2(id=4, value='C24', d2s=[])]
+ ),
+ ]),
+ A(id=2, bs=[
+ B(
+ id=3,
+ c1s=[
+ C1(
+ id=8,
+ d1s=[D1(id=5, value='D15', e1s=[E1(id=6)])]
+ )
+ ],
+ c2s=[C2(id=8, d2s=[D2(id=6, value='D26')])]
+ )
+ ])
+ ]
+
+ @classmethod
+ def insert_data(cls):
+ s = Session(testing.db)
+ s.add_all(cls._fixture_data())
+ s.commit()
+
+ def _assert_result(self, query):
+ eq_(
+ query.all(),
+ self._fixture_data()
+ )
+
+ def test_nested_innerjoin_propagation_multiple_paths_one(self):
+ A, B, C1, C2 = (
+ self.classes.A, self.classes.B, self.classes.C1,
+ self.classes.C2)
+
+ s = Session()
+
+ q = s.query(A).options(
+ joinedload(A.bs, innerjoin=False).
+ joinedload(B.c1s, innerjoin=True).
+ joinedload(C1.d1s, innerjoin=True),
+ defaultload(A.bs).joinedload(B.c2s, innerjoin=True).
+ joinedload(C2.d2s, innerjoin=False)
+ )
+ self.assert_compile(
+ q,
+ "SELECT a.id AS a_id, d1_1.id AS d1_1_id, "
+ "d1_1.c1_id AS d1_1_c1_id, d1_1.value AS d1_1_value, "
+ "c1_1.id AS c1_1_id, c1_1.b_id AS c1_1_b_id, "
+ "c1_1.value AS c1_1_value, d2_1.id AS d2_1_id, "
+ "d2_1.c2_id AS d2_1_c2_id, d2_1.value AS d2_1_value, "
+ "c2_1.id AS c2_1_id, c2_1.b_id AS c2_1_b_id, "
+ "c2_1.value AS c2_1_value, b_1.id AS b_1_id, "
+ "b_1.a_id AS b_1_a_id, b_1.value AS b_1_value "
+ "FROM a "
+ "LEFT OUTER JOIN "
+ "(b AS b_1 JOIN c2 AS c2_1 ON b_1.id = c2_1.b_id "
+ "JOIN c1 AS c1_1 ON b_1.id = c1_1.b_id "
+ "JOIN d1 AS d1_1 ON c1_1.id = d1_1.c1_id) ON a.id = b_1.a_id "
+ "LEFT OUTER JOIN d2 AS d2_1 ON c2_1.id = d2_1.c2_id "
+ "ORDER BY c1_1.id, d1_1.id, c2_1.id, d2_1.id"
+ )
+ self._assert_result(q)
+
+ def test_nested_innerjoin_propagation_multiple_paths_two(self):
+ # test #3447
+ A = self.classes.A
+
+ s = Session()
+
+ q = s.query(A).options(
+ joinedload('bs'),
+ joinedload('bs.c2s', innerjoin=True),
+ joinedload('bs.c1s', innerjoin=True),
+ joinedload('bs.c1s.d1s')
+ )
+ self.assert_compile(
+ q,
+ "SELECT a.id AS a_id, d1_1.id AS d1_1_id, "
+ "d1_1.c1_id AS d1_1_c1_id, d1_1.value AS d1_1_value, "
+ "c1_1.id AS c1_1_id, c1_1.b_id AS c1_1_b_id, "
+ "c1_1.value AS c1_1_value, c2_1.id AS c2_1_id, "
+ "c2_1.b_id AS c2_1_b_id, c2_1.value AS c2_1_value, "
+ "b_1.id AS b_1_id, b_1.a_id AS b_1_a_id, "
+ "b_1.value AS b_1_value "
+ "FROM a LEFT OUTER JOIN "
+ "(b AS b_1 JOIN c2 AS c2_1 ON b_1.id = c2_1.b_id "
+ "JOIN c1 AS c1_1 ON b_1.id = c1_1.b_id) ON a.id = b_1.a_id "
+ "LEFT OUTER JOIN d1 AS d1_1 ON c1_1.id = d1_1.c1_id "
+ "ORDER BY c1_1.id, d1_1.id, c2_1.id"
+ )
+ self._assert_result(q)
+
+ def test_multiple_splice_points(self):
+ A = self.classes.A
+
+ s = Session()
+
+ q = s.query(A).options(
+ joinedload('bs', innerjoin=False),
+ joinedload('bs.c1s', innerjoin=True),
+ joinedload('bs.c2s', innerjoin=True),
+ joinedload('bs.c1s.d1s', innerjoin=False),
+ joinedload('bs.c2s.d2s'),
+ joinedload('bs.c1s.d1s.e1s', innerjoin=True)
+ )
+
+ self.assert_compile(
+ q,
+ "SELECT a.id AS a_id, e1_1.id AS e1_1_id, "
+ "e1_1.d1_id AS e1_1_d1_id, e1_1.value AS e1_1_value, "
+ "d1_1.id AS d1_1_id, d1_1.c1_id AS d1_1_c1_id, "
+ "d1_1.value AS d1_1_value, c1_1.id AS c1_1_id, "
+ "c1_1.b_id AS c1_1_b_id, c1_1.value AS c1_1_value, "
+ "d2_1.id AS d2_1_id, d2_1.c2_id AS d2_1_c2_id, "
+ "d2_1.value AS d2_1_value, c2_1.id AS c2_1_id, "
+ "c2_1.b_id AS c2_1_b_id, c2_1.value AS c2_1_value, "
+ "b_1.id AS b_1_id, b_1.a_id AS b_1_a_id, b_1.value AS b_1_value "
+ "FROM a LEFT OUTER JOIN "
+ "(b AS b_1 JOIN c2 AS c2_1 ON b_1.id = c2_1.b_id "
+ "JOIN c1 AS c1_1 ON b_1.id = c1_1.b_id) ON a.id = b_1.a_id "
+ "LEFT OUTER JOIN ("
+ "d1 AS d1_1 JOIN e1 AS e1_1 ON d1_1.id = e1_1.d1_id) "
+ "ON c1_1.id = d1_1.c1_id "
+ "LEFT OUTER JOIN d2 AS d2_1 ON c2_1.id = d2_1.c2_id "
+ "ORDER BY c1_1.id, d1_1.id, e1_1.id, c2_1.id, d2_1.id"
+ )
+ self._assert_result(q)
+
+ def test_splice_onto_np_mapper(self):
+ A = self.classes.A
+ B = self.classes.B
+ C1 = self.classes.C1
+ b_table = self.tables.b
+ c1_table = self.tables.c1
+
+ from sqlalchemy import inspect
+
+ weird_selectable = b_table.outerjoin(c1_table)
+
+ b_np = mapper(
+ B, weird_selectable, non_primary=True, properties=odict([
+ # note we need to make this fixed with lazy=False until
+ # [ticket:3348] is resolved
+ ('c1s', relationship(C1, lazy=False, innerjoin=True)),
+ ('c_id', c1_table.c.id),
+ ('b_value', b_table.c.value),
+ ])
+ )
+
+ a_mapper = inspect(A)
+ a_mapper.add_property(
+ "bs_np", relationship(b_np)
+ )
+
+ s = Session()
+
+ q = s.query(A).options(
+ joinedload('bs_np', innerjoin=False)
+ )
+ self.assert_compile(
+ q,
+ "SELECT a.id AS a_id, c1_1.id AS c1_1_id, c1_1.b_id AS c1_1_b_id, "
+ "c1_1.value AS c1_1_value, c1_2.id AS c1_2_id, "
+ "b_1.value AS b_1_value, b_1.id AS b_1_id, "
+ "b_1.a_id AS b_1_a_id, c1_2.b_id AS c1_2_b_id, "
+ "c1_2.value AS c1_2_value "
+ "FROM a LEFT OUTER JOIN "
+ "(b AS b_1 LEFT OUTER JOIN c1 AS c1_2 ON b_1.id = c1_2.b_id "
+ "JOIN c1 AS c1_1 ON b_1.id = c1_1.b_id) ON a.id = b_1.a_id"
+ )
+
+
+class InnerJoinSplicingWSecondaryTest(
+ fixtures.MappedTest, testing.AssertsCompiledSQL):
+ __dialect__ = 'default'
+ __backend__ = True # exercise hardcore join nesting on backends
+
+ @classmethod
+ def define_tables(cls, metadata):
+ Table(
+ 'a', metadata,
+ Column('id', Integer, primary_key=True),
+ Column('bid', ForeignKey('b.id'))
+ )
+
+ Table(
+ 'b', metadata,
+ Column('id', Integer, primary_key=True),
+ Column('cid', ForeignKey('c.id'))
+ )
+
+ Table(
+ 'c', metadata,
+ Column('id', Integer, primary_key=True),
+ )
+
+ Table('ctod', metadata,
+ Column('cid', ForeignKey('c.id'), primary_key=True),
+ Column('did', ForeignKey('d.id'), primary_key=True),
+ )
+ Table('d', metadata,
+ Column('id', Integer, primary_key=True),
+ )
+
+ @classmethod
+ def setup_classes(cls):
+
+ class A(cls.Comparable):
+ pass
+
+ class B(cls.Comparable):
+ pass
+
+ class C(cls.Comparable):
+ pass
+
+ class D(cls.Comparable):
+ pass
+
+ @classmethod
+ def setup_mappers(cls):
+ A, B, C, D = (
+ cls.classes.A, cls.classes.B, cls.classes.C,
+ cls.classes.D)
+ mapper(A, cls.tables.a, properties={
+ 'b': relationship(B)
+ })
+ mapper(B, cls.tables.b, properties=odict([
+ ('c', relationship(C)),
+ ]))
+ mapper(C, cls.tables.c, properties=odict([
+ ('ds', relationship(D, secondary=cls.tables.ctod,
+ order_by=cls.tables.d.c.id)),
+ ]))
+ mapper(D, cls.tables.d)
+
+ @classmethod
+ def _fixture_data(cls):
+ A, B, C, D = (
+ cls.classes.A, cls.classes.B, cls.classes.C,
+ cls.classes.D)
+
+ d1, d2, d3 = D(id=1), D(id=2), D(id=3)
+ return [
+ A(
+ id=1,
+ b=B(
+ id=1,
+ c=C(
+ id=1,
+ ds=[d1, d2]
+ )
+ )
+ ),
+ A(
+ id=2,
+ b=B(
+ id=2,
+ c=C(
+ id=2,
+ ds=[d2, d3]
+ )
+ )
+ )
+ ]
+
+ @classmethod
+ def insert_data(cls):
+ s = Session(testing.db)
+ s.add_all(cls._fixture_data())
+ s.commit()
+
+ def _assert_result(self, query):
+ def go():
+ eq_(
+ query.all(),
+ self._fixture_data()
+ )
+
+ self.assert_sql_count(
+ testing.db,
+ go,
+ 1
+ )
+
+ def test_joined_across(self):
+ A = self.classes.A
+
+ s = Session()
+ q = s.query(A) \
+ .options(
+ joinedload('b').
+ joinedload('c', innerjoin=True).
+ joinedload('ds', innerjoin=True))
+ self.assert_compile(
+ q,
+ "SELECT a.id AS a_id, a.bid AS a_bid, d_1.id AS d_1_id, "
+ "c_1.id AS c_1_id, b_1.id AS b_1_id, b_1.cid AS b_1_cid "
+ "FROM a LEFT OUTER JOIN "
+ "(b AS b_1 JOIN "
+ "(c AS c_1 JOIN ctod AS ctod_1 ON c_1.id = ctod_1.cid) "
+ "ON c_1.id = b_1.cid "
+ "JOIN d AS d_1 ON d_1.id = ctod_1.did) ON b_1.id = a.bid "
+ "ORDER BY d_1.id"
+ )
+ self._assert_result(q)
+
+
class SubqueryAliasingTest(fixtures.MappedTest, testing.AssertsCompiledSQL):
"""test #2188"""
__dialect__ = 'default'
+ run_create_tables = None
@classmethod
def define_tables(cls, metadata):
@@ -3555,6 +4014,7 @@ class CyclicalInheritingEagerTestTwo(fixtures.DeclarativeMappedTest,
class CyclicalInheritingEagerTestThree(fixtures.DeclarativeMappedTest,
testing.AssertsCompiledSQL):
__dialect__ = 'default'
+ run_create_tables = None
@classmethod
def setup_classes(cls):
@@ -3609,3 +4069,257 @@ class CyclicalInheritingEagerTestThree(fixtures.DeclarativeMappedTest,
"director_1.id = persistent_1.id) "
"ON director.other_id = persistent_1.id"
)
+
+
+class EnsureColumnsAddedTest(
+ fixtures.DeclarativeMappedTest, testing.AssertsCompiledSQL):
+ __dialect__ = 'default'
+ run_create_tables = None
+
+ @classmethod
+ def setup_classes(cls):
+ Base = cls.DeclarativeBasic
+
+ class Parent(Base):
+ __tablename__ = 'parent'
+ id = Column(Integer, primary_key=True,
+ test_needs_autoincrement=True)
+ arb = Column(Integer, unique=True)
+ data = Column(Integer)
+ o2mchild = relationship("O2MChild")
+ m2mchild = relationship("M2MChild", secondary=Table(
+ 'parent_to_m2m', Base.metadata,
+ Column('parent_id', ForeignKey('parent.arb')),
+ Column('child_id', ForeignKey('m2mchild.id'))
+ ))
+
+ class O2MChild(Base):
+ __tablename__ = 'o2mchild'
+ id = Column(Integer, primary_key=True,
+ test_needs_autoincrement=True)
+ parent_id = Column(ForeignKey('parent.arb'))
+
+ class M2MChild(Base):
+ __tablename__ = 'm2mchild'
+ id = Column(Integer, primary_key=True,
+ test_needs_autoincrement=True)
+
+ def test_joinedload_defered_pk_limit_o2m(self):
+ Parent = self.classes.Parent
+
+ s = Session()
+
+ self.assert_compile(
+ s.query(Parent).options(
+ load_only('data'),
+ joinedload(Parent.o2mchild)).limit(10),
+ "SELECT anon_1.parent_id AS anon_1_parent_id, "
+ "anon_1.parent_data AS anon_1_parent_data, "
+ "anon_1.parent_arb AS anon_1_parent_arb, "
+ "o2mchild_1.id AS o2mchild_1_id, "
+ "o2mchild_1.parent_id AS o2mchild_1_parent_id "
+ "FROM (SELECT parent.id AS parent_id, parent.data AS parent_data, "
+ "parent.arb AS parent_arb FROM parent LIMIT :param_1) AS anon_1 "
+ "LEFT OUTER JOIN o2mchild AS o2mchild_1 "
+ "ON anon_1.parent_arb = o2mchild_1.parent_id"
+ )
+
+ def test_joinedload_defered_pk_limit_m2m(self):
+ Parent = self.classes.Parent
+
+ s = Session()
+
+ self.assert_compile(
+ s.query(Parent).options(
+ load_only('data'),
+ joinedload(Parent.m2mchild)).limit(10),
+ "SELECT anon_1.parent_id AS anon_1_parent_id, "
+ "anon_1.parent_data AS anon_1_parent_data, "
+ "anon_1.parent_arb AS anon_1_parent_arb, "
+ "m2mchild_1.id AS m2mchild_1_id "
+ "FROM (SELECT parent.id AS parent_id, "
+ "parent.data AS parent_data, parent.arb AS parent_arb "
+ "FROM parent LIMIT :param_1) AS anon_1 "
+ "LEFT OUTER JOIN (parent_to_m2m AS parent_to_m2m_1 "
+ "JOIN m2mchild AS m2mchild_1 "
+ "ON m2mchild_1.id = parent_to_m2m_1.child_id) "
+ "ON anon_1.parent_arb = parent_to_m2m_1.parent_id"
+ )
+
+ def test_joinedload_defered_pk_o2m(self):
+ Parent = self.classes.Parent
+
+ s = Session()
+
+ self.assert_compile(
+ s.query(Parent).options(
+ load_only('data'),
+ joinedload(Parent.o2mchild)),
+ "SELECT parent.id AS parent_id, parent.data AS parent_data, "
+ "parent.arb AS parent_arb, o2mchild_1.id AS o2mchild_1_id, "
+ "o2mchild_1.parent_id AS o2mchild_1_parent_id "
+ "FROM parent LEFT OUTER JOIN o2mchild AS o2mchild_1 "
+ "ON parent.arb = o2mchild_1.parent_id"
+ )
+
+ def test_joinedload_defered_pk_m2m(self):
+ Parent = self.classes.Parent
+
+ s = Session()
+
+ self.assert_compile(
+ s.query(Parent).options(
+ load_only('data'),
+ joinedload(Parent.m2mchild)),
+ "SELECT parent.id AS parent_id, parent.data AS parent_data, "
+ "parent.arb AS parent_arb, m2mchild_1.id AS m2mchild_1_id "
+ "FROM parent LEFT OUTER JOIN (parent_to_m2m AS parent_to_m2m_1 "
+ "JOIN m2mchild AS m2mchild_1 "
+ "ON m2mchild_1.id = parent_to_m2m_1.child_id) "
+ "ON parent.arb = parent_to_m2m_1.parent_id"
+ )
+
+
+class EntityViaMultiplePathTestOne(fixtures.DeclarativeMappedTest):
+ """test for [ticket:3431]"""
+
+ @classmethod
+ def setup_classes(cls):
+ Base = cls.DeclarativeBasic
+
+ class A(Base):
+ __tablename__ = 'a'
+ id = Column(Integer, primary_key=True)
+ b_id = Column(ForeignKey('b.id'))
+ c_id = Column(ForeignKey('c.id'))
+
+ b = relationship("B")
+ c = relationship("C")
+
+ class B(Base):
+ __tablename__ = 'b'
+ id = Column(Integer, primary_key=True)
+ c_id = Column(ForeignKey('c.id'))
+
+ c = relationship("C")
+
+ class C(Base):
+ __tablename__ = 'c'
+ id = Column(Integer, primary_key=True)
+ d_id = Column(ForeignKey('d.id'))
+ d = relationship("D")
+
+ class D(Base):
+ __tablename__ = 'd'
+ id = Column(Integer, primary_key=True)
+
+ @classmethod
+ def define_tables(cls, metadata):
+ Table(
+ 'a', metadata,
+ Column('id', Integer, primary_key=True),
+ Column('bid', ForeignKey('b.id'))
+ )
+
+ def test_multi_path_load(self):
+ A, B, C, D = self.classes('A', 'B', 'C', 'D')
+
+ s = Session()
+
+ c = C(d=D())
+
+ s.add(
+ A(b=B(c=c), c=c)
+ )
+ s.commit()
+
+ c_alias_1 = aliased(C)
+ c_alias_2 = aliased(C)
+
+ q = s.query(A)
+ q = q.join(A.b).join(c_alias_1, B.c).join(c_alias_1.d)
+ q = q.options(
+ contains_eager(A.b).
+ contains_eager(B.c, alias=c_alias_1).
+ contains_eager(C.d))
+ q = q.join(c_alias_2, A.c)
+ q = q.options(contains_eager(A.c, alias=c_alias_2))
+
+ a1 = q.all()[0]
+
+ # ensure 'd' key was populated in dict. Varies based on
+ # PYTHONHASHSEED
+ in_('d', a1.c.__dict__)
+
+
+class EntityViaMultiplePathTestTwo(fixtures.DeclarativeMappedTest):
+ """test for [ticket:3431]"""
+
+ @classmethod
+ def setup_classes(cls):
+ Base = cls.DeclarativeBasic
+
+ class User(Base):
+ __tablename__ = 'cs_user'
+
+ id = Column(Integer, primary_key=True)
+ data = Column(Integer)
+
+ class LD(Base):
+ """Child. The column we reference 'A' with is an integer."""
+
+ __tablename__ = 'cs_ld'
+
+ id = Column(Integer, primary_key=True)
+ user_id = Column(Integer, ForeignKey('cs_user.id'))
+ user = relationship(User, primaryjoin=user_id == User.id)
+
+ class A(Base):
+ """Child. The column we reference 'A' with is an integer."""
+
+ __tablename__ = 'cs_a'
+
+ id = Column(Integer, primary_key=True)
+ ld_id = Column(Integer, ForeignKey('cs_ld.id'))
+ ld = relationship(LD, primaryjoin=ld_id == LD.id)
+
+ class LDA(Base):
+ """Child. The column we reference 'A' with is an integer."""
+
+ __tablename__ = 'cs_lda'
+
+ id = Column(Integer, primary_key=True)
+ ld_id = Column(Integer, ForeignKey('cs_ld.id'))
+ a_id = Column(Integer, ForeignKey('cs_a.id'))
+ a = relationship(A, primaryjoin=a_id == A.id)
+ ld = relationship(LD, primaryjoin=ld_id == LD.id)
+
+ def test_multi_path_load(self):
+ User, LD, A, LDA = self.classes('User', 'LD', 'A', 'LDA')
+
+ s = Session()
+
+ u0 = User(data=42)
+ l0 = LD(user=u0)
+ z0 = A(ld=l0)
+ lz0 = LDA(ld=l0, a=z0)
+ s.add_all([
+ u0, l0, z0, lz0
+ ])
+ s.commit()
+
+ l_ac = aliased(LD)
+ u_ac = aliased(User)
+
+ lz_test = (s.query(LDA)
+ .join('ld')
+ .options(contains_eager('ld'))
+ .join('a', (l_ac, 'ld'), (u_ac, 'user'))
+ .options(contains_eager('a')
+ .contains_eager('ld', alias=l_ac)
+ .contains_eager('user', alias=u_ac))
+ .first())
+
+ in_(
+ 'user', lz_test.a.ld.__dict__
+ )
diff --git a/test/orm/test_events.py b/test/orm/test_events.py
index 904293102..ab61077ae 100644
--- a/test/orm/test_events.py
+++ b/test/orm/test_events.py
@@ -5,44 +5,56 @@ from sqlalchemy import Integer, String
from sqlalchemy.testing.schema import Table, Column
from sqlalchemy.orm import mapper, relationship, \
create_session, class_mapper, \
- Mapper, column_property, \
+ Mapper, column_property, query, \
Session, sessionmaker, attributes, configure_mappers
from sqlalchemy.orm.instrumentation import ClassManager
from sqlalchemy.orm import instrumentation, events
from sqlalchemy.testing import eq_
from sqlalchemy.testing import fixtures
+from sqlalchemy.testing import AssertsCompiledSQL
from sqlalchemy.testing.util import gc_collect
from test.orm import _fixtures
from sqlalchemy import event
-from sqlalchemy.testing.mock import Mock, call
+from sqlalchemy.testing.mock import Mock, call, ANY
+
class _RemoveListeners(object):
+
def teardown(self):
events.MapperEvents._clear()
events.InstanceEvents._clear()
events.SessionEvents._clear()
events.InstrumentationEvents._clear()
+ events.QueryEvents._clear()
super(_RemoveListeners, self).teardown()
class MapperEventsTest(_RemoveListeners, _fixtures.FixtureTest):
run_inserts = None
+ @classmethod
+ def define_tables(cls, metadata):
+ super(MapperEventsTest, cls).define_tables(metadata)
+ metadata.tables['users'].append_column(
+ Column('extra', Integer, default=5, onupdate=10)
+ )
+
def test_instance_event_listen(self):
"""test listen targets for instance events"""
users, addresses = self.tables.users, self.tables.addresses
-
canary = []
+
class A(object):
pass
+
class B(A):
pass
mapper(A, users)
mapper(B, addresses, inherits=A,
- properties={'address_id': addresses.c.id})
+ properties={'address_id': addresses.c.id})
def init_a(target, args, kwargs):
canary.append(('init_a', target))
@@ -67,16 +79,16 @@ class MapperEventsTest(_RemoveListeners, _fixtures.FixtureTest):
a = A()
eq_(canary, [('init_a', a), ('init_b', a),
- ('init_c', a), ('init_d', a), ('init_e', a)])
+ ('init_c', a), ('init_d', a), ('init_e', a)])
# test propagate flag
canary[:] = []
b = B()
eq_(canary, [('init_a', b), ('init_b', b), ('init_e', b)])
-
def listen_all(self, mapper, **kw):
canary = []
+
def evt(meth):
def go(*args, **kwargs):
canary.append(meth)
@@ -87,6 +99,7 @@ class MapperEventsTest(_RemoveListeners, _fixtures.FixtureTest):
'init_failure',
'load',
'refresh',
+ 'refresh_flush',
'expire',
'before_insert',
'after_insert',
@@ -98,6 +111,43 @@ class MapperEventsTest(_RemoveListeners, _fixtures.FixtureTest):
event.listen(mapper, meth, evt(meth), **kw)
return canary
+ def test_init_allow_kw_modify(self):
+ User, users = self.classes.User, self.tables.users
+ mapper(User, users)
+
+ @event.listens_for(User, 'init')
+ def add_name(obj, args, kwargs):
+ kwargs['name'] = 'ed'
+
+ u1 = User()
+ eq_(u1.name, 'ed')
+
+ def test_init_failure_hook(self):
+ users = self.tables.users
+
+ class Thing(object):
+ def __init__(self, **kw):
+ if kw.get('fail'):
+ raise Exception("failure")
+
+ mapper(Thing, users)
+
+ canary = Mock()
+ event.listen(Thing, 'init_failure', canary)
+
+ Thing()
+ eq_(canary.mock_calls, [])
+
+ assert_raises_message(
+ Exception,
+ "failure",
+ Thing, fail=True
+ )
+ eq_(
+ canary.mock_calls,
+ [call(ANY, (), {'fail': True})]
+ )
+
def test_listen_doesnt_force_compile(self):
User, users = self.classes.User, self.tables.users
m = mapper(User, users, properties={
@@ -109,7 +159,6 @@ class MapperEventsTest(_RemoveListeners, _fixtures.FixtureTest):
def test_basic(self):
User, users = self.classes.User, self.tables.users
-
mapper(User, users)
canary = self.listen_all(User)
named_canary = self.listen_all(User, named=True)
@@ -128,10 +177,11 @@ class MapperEventsTest(_RemoveListeners, _fixtures.FixtureTest):
sess.flush()
expected = [
'init', 'before_insert',
+ 'refresh_flush',
'after_insert', 'expire',
'refresh',
'load',
- 'before_update', 'after_update', 'before_delete',
+ 'before_update', 'refresh_flush', 'after_update', 'before_delete',
'after_delete']
eq_(canary, expected)
eq_(named_canary, expected)
@@ -187,13 +237,13 @@ class MapperEventsTest(_RemoveListeners, _fixtures.FixtureTest):
]
)
-
def test_merge(self):
users, User = self.tables.users, self.classes.User
mapper(User, users)
canary = []
+
def load(obj, ctx):
canary.append('load')
event.listen(mapper, 'load', load)
@@ -205,22 +255,22 @@ class MapperEventsTest(_RemoveListeners, _fixtures.FixtureTest):
s = Session()
u2 = s.merge(u)
s = Session()
- u2 = s.merge(User(name='u2'))
+ u2 = s.merge(User(name='u2')) # noqa
s.commit()
s.query(User).order_by(User.id).first()
eq_(canary, ['load', 'load', 'load'])
def test_inheritance(self):
users, addresses, User = (self.tables.users,
- self.tables.addresses,
- self.classes.User)
+ self.tables.addresses,
+ self.classes.User)
class AdminUser(User):
pass
mapper(User, users)
mapper(AdminUser, addresses, inherits=User,
- properties={'address_id': addresses.c.id})
+ properties={'address_id': addresses.c.id})
canary1 = self.listen_all(User, propagate=True)
canary2 = self.listen_all(User)
@@ -237,22 +287,23 @@ class MapperEventsTest(_RemoveListeners, _fixtures.FixtureTest):
sess.flush()
sess.delete(am)
sess.flush()
- eq_(canary1, ['init', 'before_insert', 'after_insert',
- 'refresh', 'load',
- 'before_update', 'after_update', 'before_delete',
- 'after_delete'])
+ eq_(canary1, ['init', 'before_insert', 'refresh_flush', 'after_insert',
+ 'refresh', 'load',
+ 'before_update', 'refresh_flush',
+ 'after_update', 'before_delete',
+ 'after_delete'])
eq_(canary2, [])
- eq_(canary3, ['init', 'before_insert', 'after_insert',
- 'refresh',
- 'load',
- 'before_update', 'after_update', 'before_delete',
- 'after_delete'])
+ eq_(canary3, ['init', 'before_insert', 'refresh_flush', 'after_insert',
+ 'refresh',
+ 'load',
+ 'before_update', 'refresh_flush',
+ 'after_update', 'before_delete',
+ 'after_delete'])
def test_inheritance_subclass_deferred(self):
users, addresses, User = (self.tables.users,
- self.tables.addresses,
- self.classes.User)
-
+ self.tables.addresses,
+ self.classes.User)
mapper(User, users)
@@ -262,7 +313,7 @@ class MapperEventsTest(_RemoveListeners, _fixtures.FixtureTest):
class AdminUser(User):
pass
mapper(AdminUser, addresses, inherits=User,
- properties={'address_id': addresses.c.id})
+ properties={'address_id': addresses.c.id})
canary3 = self.listen_all(AdminUser)
sess = create_session()
@@ -276,16 +327,17 @@ class MapperEventsTest(_RemoveListeners, _fixtures.FixtureTest):
sess.flush()
sess.delete(am)
sess.flush()
- eq_(canary1, ['init', 'before_insert', 'after_insert',
- 'refresh', 'load',
- 'before_update', 'after_update', 'before_delete',
- 'after_delete'])
+ eq_(canary1, ['init', 'before_insert', 'refresh_flush', 'after_insert',
+ 'refresh', 'load',
+ 'before_update', 'refresh_flush',
+ 'after_update', 'before_delete',
+ 'after_delete'])
eq_(canary2, [])
- eq_(canary3, ['init', 'before_insert', 'after_insert',
- 'refresh', 'load',
- 'before_update', 'after_update', 'before_delete',
- 'after_delete'])
-
+ eq_(canary3, ['init', 'before_insert', 'refresh_flush', 'after_insert',
+ 'refresh', 'load',
+ 'before_update', 'refresh_flush',
+ 'after_update', 'before_delete',
+ 'after_delete'])
def test_before_after_only_collection(self):
"""before_update is called on parent for collection modifications,
@@ -294,12 +346,11 @@ class MapperEventsTest(_RemoveListeners, _fixtures.FixtureTest):
"""
keywords, items, item_keywords, Keyword, Item = (
- self.tables.keywords,
- self.tables.items,
- self.tables.item_keywords,
- self.classes.Keyword,
- self.classes.Item)
-
+ self.tables.keywords,
+ self.tables.items,
+ self.tables.item_keywords,
+ self.classes.Keyword,
+ self.classes.Item)
mapper(Item, items, properties={
'keywords': relationship(Keyword, secondary=item_keywords)})
@@ -316,13 +367,13 @@ class MapperEventsTest(_RemoveListeners, _fixtures.FixtureTest):
sess.flush()
eq_(canary1,
['init',
- 'before_insert', 'after_insert'])
+ 'before_insert', 'after_insert'])
eq_(canary2,
['init',
- 'before_insert', 'after_insert'])
+ 'before_insert', 'after_insert'])
- canary1[:]= []
- canary2[:]= []
+ canary1[:] = []
+ canary2[:] = []
i1.keywords.append(k1)
sess.flush()
@@ -338,14 +389,16 @@ class MapperEventsTest(_RemoveListeners, _fixtures.FixtureTest):
assert_raises_message(
sa.exc.SAWarning,
"before_configured' and 'after_configured' ORM events only "
- "invoke with the mapper\(\) function or Mapper class as the target.",
+ "invoke with the mapper\(\) function or Mapper class as "
+ "the target.",
event.listen, User, 'before_configured', m1
)
assert_raises_message(
sa.exc.SAWarning,
"before_configured' and 'after_configured' ORM events only "
- "invoke with the mapper\(\) function or Mapper class as the target.",
+ "invoke with the mapper\(\) function or Mapper class as "
+ "the target.",
event.listen, User, 'after_configured', m1
)
@@ -368,11 +421,12 @@ class MapperEventsTest(_RemoveListeners, _fixtures.FixtureTest):
def test_instrument_event(self):
Address, addresses, users, User = (self.classes.Address,
- self.tables.addresses,
- self.tables.users,
- self.classes.User)
+ self.tables.addresses,
+ self.tables.users,
+ self.classes.User)
canary = []
+
def instrument_class(mapper, cls):
canary.append(cls)
@@ -383,7 +437,45 @@ class MapperEventsTest(_RemoveListeners, _fixtures.FixtureTest):
mapper(Address, addresses)
eq_(canary, [User, Address])
-class DeclarativeEventListenTest(_RemoveListeners, fixtures.DeclarativeMappedTest):
+ def test_instrument_class_precedes_class_instrumentation(self):
+ users = self.tables.users
+
+ class MyClass(object):
+ pass
+
+ canary = Mock()
+
+ def my_init(self):
+ canary.init()
+
+ # mapper level event
+ @event.listens_for(mapper, "instrument_class")
+ def instrument_class(mp, class_):
+ canary.instrument_class(class_)
+ class_.__init__ = my_init
+
+ # instrumentationmanager event
+ @event.listens_for(object, "class_instrument")
+ def class_instrument(class_):
+ canary.class_instrument(class_)
+
+ mapper(MyClass, users)
+
+ m1 = MyClass()
+ assert attributes.instance_state(m1)
+
+ eq_(
+ [
+ call.instrument_class(MyClass),
+ call.class_instrument(MyClass),
+ call.init()
+ ],
+ canary.mock_calls
+ )
+
+
+class DeclarativeEventListenTest(_RemoveListeners,
+ fixtures.DeclarativeMappedTest):
run_setup_classes = "each"
run_deletes = None
@@ -419,6 +511,7 @@ class DeclarativeEventListenTest(_RemoveListeners, fixtures.DeclarativeMappedTes
class DeferredMapperEventsTest(_RemoveListeners, _fixtures.FixtureTest):
+
""""test event listeners against unmapped classes.
This incurs special logic. Note if we ever do the "remove" case,
@@ -435,9 +528,10 @@ class DeferredMapperEventsTest(_RemoveListeners, _fixtures.FixtureTest):
"""
users, User = (self.tables.users,
- self.classes.User)
+ self.classes.User)
canary = []
+
def evt(x, y, z):
canary.append(x)
event.listen(User, "before_insert", evt, raw=True)
@@ -454,7 +548,7 @@ class DeferredMapperEventsTest(_RemoveListeners, _fixtures.FixtureTest):
"""
users, User = (self.tables.users,
- self.classes.User)
+ self.classes.User)
class SubUser(User):
pass
@@ -463,6 +557,7 @@ class DeferredMapperEventsTest(_RemoveListeners, _fixtures.FixtureTest):
pass
canary = Mock()
+
def evt(x, y, z):
canary.append(x)
event.listen(User, "before_insert", canary, propagate=True, raw=True)
@@ -470,14 +565,13 @@ class DeferredMapperEventsTest(_RemoveListeners, _fixtures.FixtureTest):
m = mapper(SubUser, users)
m.dispatch.before_insert(5, 6, 7)
eq_(canary.mock_calls,
- [call(5, 6, 7)])
+ [call(5, 6, 7)])
m2 = mapper(SubSubUser, users)
m2.dispatch.before_insert(8, 9, 10)
eq_(canary.mock_calls,
- [call(5, 6, 7), call(8, 9, 10)])
-
+ [call(5, 6, 7), call(8, 9, 10)])
def test_deferred_map_event_subclass_no_propagate(self):
"""
@@ -487,12 +581,13 @@ class DeferredMapperEventsTest(_RemoveListeners, _fixtures.FixtureTest):
"""
users, User = (self.tables.users,
- self.classes.User)
+ self.classes.User)
class SubUser(User):
pass
canary = []
+
def evt(x, y, z):
canary.append(x)
event.listen(User, "before_insert", evt, propagate=False)
@@ -509,7 +604,7 @@ class DeferredMapperEventsTest(_RemoveListeners, _fixtures.FixtureTest):
"""
users, User = (self.tables.users,
- self.classes.User)
+ self.classes.User)
class SubUser(User):
pass
@@ -517,6 +612,7 @@ class DeferredMapperEventsTest(_RemoveListeners, _fixtures.FixtureTest):
m = mapper(SubUser, users)
canary = []
+
def evt(x, y, z):
canary.append(x)
event.listen(User, "before_insert", evt, propagate=True, raw=True)
@@ -532,7 +628,7 @@ class DeferredMapperEventsTest(_RemoveListeners, _fixtures.FixtureTest):
"""
users, User = (self.tables.users,
- self.classes.User)
+ self.classes.User)
class SubUser(User):
pass
@@ -561,7 +657,7 @@ class DeferredMapperEventsTest(_RemoveListeners, _fixtures.FixtureTest):
"""
users, User = (self.tables.users,
- self.classes.User)
+ self.classes.User)
class SubUser(User):
pass
@@ -569,6 +665,7 @@ class DeferredMapperEventsTest(_RemoveListeners, _fixtures.FixtureTest):
m = mapper(SubUser, users)
canary = []
+
def evt(x):
canary.append(x)
event.listen(User, "load", evt, propagate=True, raw=True)
@@ -576,7 +673,6 @@ class DeferredMapperEventsTest(_RemoveListeners, _fixtures.FixtureTest):
m.class_manager.dispatch.load(5)
eq_(canary, [5])
-
def test_deferred_instance_event_plain(self):
"""
1. instance event listen on class, w/o propagate
@@ -585,9 +681,10 @@ class DeferredMapperEventsTest(_RemoveListeners, _fixtures.FixtureTest):
"""
users, User = (self.tables.users,
- self.classes.User)
+ self.classes.User)
canary = []
+
def evt(x):
canary.append(x)
event.listen(User, "load", evt, raw=True)
@@ -604,7 +701,7 @@ class DeferredMapperEventsTest(_RemoveListeners, _fixtures.FixtureTest):
"""
users, User = (self.tables.users,
- self.classes.User)
+ self.classes.User)
class SubUser(User):
pass
@@ -613,6 +710,7 @@ class DeferredMapperEventsTest(_RemoveListeners, _fixtures.FixtureTest):
pass
canary = []
+
def evt(x):
canary.append(x)
event.listen(User, "load", evt, propagate=True, raw=True)
@@ -629,14 +727,15 @@ class DeferredMapperEventsTest(_RemoveListeners, _fixtures.FixtureTest):
def test_deferred_instance_event_subclass_propagate_baseclass(self):
"""
1. instance event listen on class, w propagate
- 2. map one subclass of class, map base class, leave 2nd subclass unmapped
+ 2. map one subclass of class, map base class, leave 2nd subclass
+ unmapped
3. event fire on sub should receive one and only one event
4. event fire on base should receive one and only one event
5. map 2nd subclass
6. event fire on 2nd subclass should receive one and only one event
"""
users, User = (self.tables.users,
- self.classes.User)
+ self.classes.User)
class SubUser(User):
pass
@@ -662,7 +761,7 @@ class DeferredMapperEventsTest(_RemoveListeners, _fixtures.FixtureTest):
m3 = mapper(SubUser2, users)
m3.class_manager.dispatch.load(instance)
eq_(canary.mock_calls, [call(instance.obj()),
- call(instance.obj()), call(instance.obj())])
+ call(instance.obj()), call(instance.obj())])
def test_deferred_instance_event_subclass_no_propagate(self):
"""
@@ -671,12 +770,13 @@ class DeferredMapperEventsTest(_RemoveListeners, _fixtures.FixtureTest):
3. event fire on subclass should not receive event
"""
users, User = (self.tables.users,
- self.classes.User)
+ self.classes.User)
class SubUser(User):
pass
canary = []
+
def evt(x):
canary.append(x)
event.listen(User, "load", evt, propagate=False)
@@ -686,29 +786,32 @@ class DeferredMapperEventsTest(_RemoveListeners, _fixtures.FixtureTest):
eq_(canary, [])
def test_deferred_instrument_event(self):
- users, User = (self.tables.users,
- self.classes.User)
+ User = self.classes.User
canary = []
+
def evt(x):
canary.append(x)
event.listen(User, "attribute_instrument", evt)
- instrumentation._instrumentation_factory.dispatch.attribute_instrument(User)
+ instrumentation._instrumentation_factory.\
+ dispatch.attribute_instrument(User)
eq_(canary, [User])
def test_isolation_instrument_event(self):
- users, User = (self.tables.users,
- self.classes.User)
+ User = self.classes.User
+
class Bar(object):
pass
canary = []
+
def evt(x):
canary.append(x)
event.listen(Bar, "attribute_instrument", evt)
- instrumentation._instrumentation_factory.dispatch.attribute_instrument(User)
+ instrumentation._instrumentation_factory.dispatch.\
+ attribute_instrument(User)
eq_(canary, [])
@testing.requires.predictable_gc
@@ -728,35 +831,38 @@ class DeferredMapperEventsTest(_RemoveListeners, _fixtures.FixtureTest):
assert not dispatch.attribute_instrument
-
def test_deferred_instrument_event_subclass_propagate(self):
- users, User = (self.tables.users,
- self.classes.User)
+ User = self.classes.User
+
class SubUser(User):
pass
canary = []
+
def evt(x):
canary.append(x)
event.listen(User, "attribute_instrument", evt, propagate=True)
instrumentation._instrumentation_factory.dispatch.\
- attribute_instrument(SubUser)
+ attribute_instrument(SubUser)
eq_(canary, [SubUser])
def test_deferred_instrument_event_subclass_no_propagate(self):
users, User = (self.tables.users,
- self.classes.User)
+ self.classes.User)
+
class SubUser(User):
pass
canary = []
+
def evt(x):
canary.append(x)
event.listen(User, "attribute_instrument", evt, propagate=False)
mapper(SubUser, users)
- instrumentation._instrumentation_factory.dispatch.attribute_instrument(5)
+ instrumentation._instrumentation_factory.dispatch.\
+ attribute_instrument(5)
eq_(canary, [])
@@ -773,8 +879,10 @@ class LoadTest(_fixtures.FixtureTest):
User = self.classes.User
canary = []
+
def load(target, ctx):
canary.append("load")
+
def refresh(target, ctx, attrs):
canary.append(("refresh", attrs))
@@ -816,20 +924,19 @@ class LoadTest(_fixtures.FixtureTest):
class RemovalTest(_fixtures.FixtureTest):
run_inserts = None
-
def test_attr_propagated(self):
User = self.classes.User
users, addresses, User = (self.tables.users,
- self.tables.addresses,
- self.classes.User)
+ self.tables.addresses,
+ self.classes.User)
class AdminUser(User):
pass
mapper(User, users)
mapper(AdminUser, addresses, inherits=User,
- properties={'address_id': addresses.c.id})
+ properties={'address_id': addresses.c.id})
fn = Mock()
event.listen(User.name, "set", fn, propagate=True)
@@ -899,7 +1006,6 @@ class RemovalTest(_fixtures.FixtureTest):
eq_(fn.mock_calls, [call(u1, "u1")])
-
class RefreshTest(_fixtures.FixtureTest):
run_inserts = None
@@ -913,8 +1019,10 @@ class RefreshTest(_fixtures.FixtureTest):
User = self.classes.User
canary = []
+
def load(target, ctx):
canary.append("load")
+
def refresh(target, ctx, attrs):
canary.append(("refresh", attrs))
@@ -977,9 +1085,6 @@ class RefreshTest(_fixtures.FixtureTest):
assert "name" not in attributes.instance_state(u1).committed_state
assert u1 in sess.dirty
-
-
-
def test_repeated_rows(self):
User = self.classes.User
@@ -992,7 +1097,7 @@ class RefreshTest(_fixtures.FixtureTest):
sess.commit()
sess.query(User).union_all(sess.query(User)).all()
- eq_(canary, [('refresh', set(['id','name']))])
+ eq_(canary, [('refresh', set(['id', 'name']))])
def test_via_refresh_state(self):
User = self.classes.User
@@ -1006,7 +1111,7 @@ class RefreshTest(_fixtures.FixtureTest):
sess.commit()
u1.name
- eq_(canary, [('refresh', set(['id','name']))])
+ eq_(canary, [('refresh', set(['id', 'name']))])
def test_was_expired(self):
User = self.classes.User
@@ -1021,7 +1126,7 @@ class RefreshTest(_fixtures.FixtureTest):
sess.expire(u1)
sess.query(User).first()
- eq_(canary, [('refresh', set(['id','name']))])
+ eq_(canary, [('refresh', set(['id', 'name']))])
def test_was_expired_via_commit(self):
User = self.classes.User
@@ -1035,7 +1140,7 @@ class RefreshTest(_fixtures.FixtureTest):
sess.commit()
sess.query(User).first()
- eq_(canary, [('refresh', set(['id','name']))])
+ eq_(canary, [('refresh', set(['id', 'name']))])
def test_was_expired_attrs(self):
User = self.classes.User
@@ -1085,6 +1190,7 @@ class SessionEventsTest(_RemoveListeners, _fixtures.FixtureTest):
def my_listener_one(*arg, **kw):
pass
+
def my_listener_two(*arg, **kw):
pass
@@ -1108,7 +1214,7 @@ class SessionEventsTest(_RemoveListeners, _fixtures.FixtureTest):
def my_listener_one(*arg, **kw):
pass
- scope = scoped_session(lambda:Session())
+ scope = scoped_session(lambda: Session())
assert_raises_message(
sa.exc.ArgumentError,
@@ -1124,6 +1230,7 @@ class SessionEventsTest(_RemoveListeners, _fixtures.FixtureTest):
pass
class NotASession(object):
+
def __call__(self):
return Session()
@@ -1149,6 +1256,7 @@ class SessionEventsTest(_RemoveListeners, _fixtures.FixtureTest):
def _listener_fixture(self, **kw):
canary = []
+
def listener(name):
def go(*arg, **kw):
canary.append(name)
@@ -1179,24 +1287,23 @@ class SessionEventsTest(_RemoveListeners, _fixtures.FixtureTest):
def test_flush_autocommit_hook(self):
User, users = self.classes.User, self.tables.users
-
mapper(User, users)
- sess, canary = self._listener_fixture(autoflush=False,
- autocommit=True, expire_on_commit=False)
+ sess, canary = self._listener_fixture(
+ autoflush=False,
+ autocommit=True, expire_on_commit=False)
u = User(name='u1')
sess.add(u)
sess.flush()
eq_(
canary,
- [ 'before_attach', 'after_attach', 'before_flush',
- 'after_transaction_create', 'after_begin',
- 'after_flush', 'after_flush_postexec',
- 'before_commit', 'after_commit','after_transaction_end']
+ ['before_attach', 'after_attach', 'before_flush',
+ 'after_transaction_create', 'after_begin',
+ 'after_flush', 'after_flush_postexec',
+ 'before_commit', 'after_commit', 'after_transaction_end']
)
-
def test_rollback_hook(self):
User, users = self.classes.User, self.tables.users
sess, canary = self._listener_fixture()
@@ -1215,15 +1322,17 @@ class SessionEventsTest(_RemoveListeners, _fixtures.FixtureTest):
sess.rollback()
eq_(canary,
- ['before_attach', 'after_attach', 'before_commit', 'before_flush',
- 'after_transaction_create', 'after_begin', 'after_flush',
- 'after_flush_postexec', 'after_transaction_end', 'after_commit',
- 'after_transaction_end', 'after_transaction_create',
- 'before_attach', 'after_attach', 'before_commit',
- 'before_flush', 'after_transaction_create', 'after_begin', 'after_rollback',
- 'after_transaction_end',
- 'after_soft_rollback', 'after_transaction_end','after_transaction_create',
- 'after_soft_rollback'])
+ ['before_attach', 'after_attach', 'before_commit', 'before_flush',
+ 'after_transaction_create', 'after_begin', 'after_flush',
+ 'after_flush_postexec', 'after_transaction_end', 'after_commit',
+ 'after_transaction_end', 'after_transaction_create',
+ 'before_attach', 'after_attach', 'before_commit',
+ 'before_flush', 'after_transaction_create', 'after_begin',
+ 'after_rollback',
+ 'after_transaction_end',
+ 'after_soft_rollback', 'after_transaction_end',
+ 'after_transaction_create',
+ 'after_soft_rollback'])
def test_can_use_session_in_outer_rollback_hook(self):
User, users = self.classes.User, self.tables.users
@@ -1232,6 +1341,7 @@ class SessionEventsTest(_RemoveListeners, _fixtures.FixtureTest):
sess = Session()
assertions = []
+
@event.listens_for(sess, "after_soft_rollback")
def do_something(session, previous_transaction):
if session.is_active:
@@ -1251,7 +1361,6 @@ class SessionEventsTest(_RemoveListeners, _fixtures.FixtureTest):
sess.rollback()
eq_(assertions, [True, True])
-
def test_flush_noautocommit_hook(self):
User, users = self.classes.User, self.tables.users
@@ -1263,9 +1372,9 @@ class SessionEventsTest(_RemoveListeners, _fixtures.FixtureTest):
sess.add(u)
sess.flush()
eq_(canary, ['before_attach', 'after_attach', 'before_flush',
- 'after_transaction_create', 'after_begin',
- 'after_flush', 'after_flush_postexec',
- 'after_transaction_end'])
+ 'after_transaction_create', 'after_begin',
+ 'after_flush', 'after_flush_postexec',
+ 'after_transaction_end'])
def test_flush_in_commit_hook(self):
User, users = self.classes.User, self.tables.users
@@ -1280,11 +1389,12 @@ class SessionEventsTest(_RemoveListeners, _fixtures.FixtureTest):
u.name = 'ed'
sess.commit()
- eq_(canary, ['before_commit', 'before_flush', 'after_transaction_create', 'after_flush',
- 'after_flush_postexec',
- 'after_transaction_end',
- 'after_commit',
- 'after_transaction_end', 'after_transaction_create',])
+ eq_(canary, ['before_commit', 'before_flush',
+ 'after_transaction_create', 'after_flush',
+ 'after_flush_postexec',
+ 'after_transaction_end',
+ 'after_commit',
+ 'after_transaction_end', 'after_transaction_create', ])
def test_state_before_attach(self):
User, users = self.classes.User, self.tables.users
@@ -1299,7 +1409,7 @@ class SessionEventsTest(_RemoveListeners, _fixtures.FixtureTest):
assert inst not in session.new
mapper(User, users)
- u= User(name='u1')
+ u = User(name='u1')
sess.add(u)
sess.flush()
sess.expunge(u)
@@ -1318,7 +1428,7 @@ class SessionEventsTest(_RemoveListeners, _fixtures.FixtureTest):
assert inst in session.new
mapper(User, users)
- u= User(name='u1')
+ u = User(name='u1')
sess.add(u)
sess.flush()
sess.expunge(u)
@@ -1328,8 +1438,8 @@ class SessionEventsTest(_RemoveListeners, _fixtures.FixtureTest):
sess, canary = self._listener_fixture()
sess.commit()
eq_(canary, ['before_commit', 'after_commit',
- 'after_transaction_end',
- 'after_transaction_create'])
+ 'after_transaction_end',
+ 'after_transaction_create'])
def test_on_bulk_update_hook(self):
User, users = self.classes.User, self.tables.users
@@ -1367,7 +1477,6 @@ class SessionEventsTest(_RemoveListeners, _fixtures.FixtureTest):
[call(sess, upd.query, upd.context, upd.result)]
)
-
def test_on_bulk_delete_hook(self):
User, users = self.classes.User, self.tables.users
@@ -1406,13 +1515,13 @@ class SessionEventsTest(_RemoveListeners, _fixtures.FixtureTest):
def test_connection_emits_after_begin(self):
sess, canary = self._listener_fixture(bind=testing.db)
- conn = sess.connection()
+ sess.connection()
eq_(canary, ['after_begin'])
+ sess.close()
def test_reentrant_flush(self):
users, User = self.tables.users, self.classes.User
-
mapper(User, users)
def before_flush(session, flush_context, objects):
@@ -1427,7 +1536,6 @@ class SessionEventsTest(_RemoveListeners, _fixtures.FixtureTest):
def test_before_flush_affects_flush_plan(self):
users, User = self.tables.users, self.classes.User
-
mapper(User, users)
def before_flush(session, flush_context, objects):
@@ -1436,8 +1544,8 @@ class SessionEventsTest(_RemoveListeners, _fixtures.FixtureTest):
session.add(User(name='another %s' % obj.name))
for obj in list(session.deleted):
if isinstance(obj, User):
- x = session.query(User).filter(User.name
- == 'another %s' % obj.name).one()
+ x = session.query(User).filter(
+ User.name == 'another %s' % obj.name).one()
session.delete(x)
sess = Session()
@@ -1450,7 +1558,7 @@ class SessionEventsTest(_RemoveListeners, _fixtures.FixtureTest):
[
User(name='another u1'),
User(name='u1')
- ]
+ ]
)
sess.flush()
@@ -1458,17 +1566,17 @@ class SessionEventsTest(_RemoveListeners, _fixtures.FixtureTest):
[
User(name='another u1'),
User(name='u1')
- ]
+ ]
)
- u.name='u2'
+ u.name = 'u2'
sess.flush()
eq_(sess.query(User).order_by(User.name).all(),
[
User(name='another u1'),
User(name='another u2'),
User(name='u2')
- ]
+ ]
)
sess.delete(u)
@@ -1476,7 +1584,7 @@ class SessionEventsTest(_RemoveListeners, _fixtures.FixtureTest):
eq_(sess.query(User).order_by(User.name).all(),
[
User(name='another u1'),
- ]
+ ]
)
def test_before_flush_affects_dirty(self):
@@ -1496,7 +1604,7 @@ class SessionEventsTest(_RemoveListeners, _fixtures.FixtureTest):
sess.flush()
eq_(sess.query(User).order_by(User.name).all(),
[User(name='u1')]
- )
+ )
sess.add(User(name='u2'))
sess.flush()
@@ -1505,12 +1613,512 @@ class SessionEventsTest(_RemoveListeners, _fixtures.FixtureTest):
[
User(name='u1 modified'),
User(name='u2')
+ ]
+ )
+
+
+class SessionLifecycleEventsTest(_RemoveListeners, _fixtures.FixtureTest):
+ run_inserts = None
+
+ def _fixture(self, include_address=False):
+ users, User = self.tables.users, self.classes.User
+
+ if include_address:
+ addresses, Address = self.tables.addresses, self.classes.Address
+ mapper(User, users, properties={
+ "addresses": relationship(
+ Address, cascade="all, delete-orphan")
+ })
+ mapper(Address, addresses)
+ else:
+ mapper(User, users)
+
+ listener = Mock()
+
+ sess = Session()
+
+ def start_events():
+ event.listen(
+ sess, "transient_to_pending", listener.transient_to_pending)
+ event.listen(
+ sess, "pending_to_transient", listener.pending_to_transient)
+ event.listen(
+ sess, "persistent_to_transient",
+ listener.persistent_to_transient)
+ event.listen(
+ sess, "pending_to_persistent", listener.pending_to_persistent)
+ event.listen(
+ sess, "detached_to_persistent",
+ listener.detached_to_persistent)
+ event.listen(
+ sess, "loaded_as_persistent", listener.loaded_as_persistent)
+
+ event.listen(
+ sess, "persistent_to_detached",
+ listener.persistent_to_detached)
+ event.listen(
+ sess, "deleted_to_detached", listener.deleted_to_detached)
+
+ event.listen(
+ sess, "persistent_to_deleted", listener.persistent_to_deleted)
+ event.listen(
+ sess, "deleted_to_persistent", listener.deleted_to_persistent)
+ return listener
+
+ if include_address:
+ return sess, User, Address, start_events
+ else:
+ return sess, User, start_events
+
+ def test_transient_to_pending(self):
+ sess, User, start_events = self._fixture()
+
+ listener = start_events()
+
+ @event.listens_for(sess, "transient_to_pending")
+ def trans_to_pending(session, instance):
+ assert instance in session
+ listener.flag_checked(instance)
+
+ u1 = User(name='u1')
+ sess.add(u1)
+
+ eq_(
+ listener.mock_calls,
+ [
+ call.transient_to_pending(sess, u1),
+ call.flag_checked(u1)
+ ]
+ )
+
+ def test_pending_to_transient_via_rollback(self):
+ sess, User, start_events = self._fixture()
+
+ u1 = User(name='u1')
+ sess.add(u1)
+
+ listener = start_events()
+
+ @event.listens_for(sess, "pending_to_transient")
+ def test_deleted_flag(session, instance):
+ assert instance not in session
+ listener.flag_checked(instance)
+
+ sess.rollback()
+ assert u1 not in sess
+
+ eq_(
+ listener.mock_calls,
+ [
+ call.pending_to_transient(sess, u1),
+ call.flag_checked(u1)
+ ]
+ )
+
+ def test_pending_to_transient_via_expunge(self):
+ sess, User, start_events = self._fixture()
+
+ u1 = User(name='u1')
+ sess.add(u1)
+
+ listener = start_events()
+
+ @event.listens_for(sess, "pending_to_transient")
+ def test_deleted_flag(session, instance):
+ assert instance not in session
+ listener.flag_checked(instance)
+
+ sess.expunge(u1)
+ assert u1 not in sess
+
+ eq_(
+ listener.mock_calls,
+ [
+ call.pending_to_transient(sess, u1),
+ call.flag_checked(u1)
+ ]
+ )
+
+ def test_pending_to_persistent(self):
+ sess, User, start_events = self._fixture()
+
+ u1 = User(name='u1')
+ sess.add(u1)
+
+ listener = start_events()
+
+ @event.listens_for(sess, "pending_to_persistent")
+ def test_flag(session, instance):
+ assert instance in session
+ assert instance._sa_instance_state.persistent
+ assert instance._sa_instance_state.key in session.identity_map
+ listener.flag_checked(instance)
+
+ sess.flush()
+
+ eq_(
+ listener.mock_calls,
+ [
+ call.pending_to_persistent(sess, u1),
+ call.flag_checked(u1)
+ ]
+ )
+
+ def test_detached_to_persistent(self):
+ sess, User, start_events = self._fixture()
+
+ u1 = User(name='u1')
+ sess.add(u1)
+ sess.flush()
+
+ sess.expunge(u1)
+
+ listener = start_events()
+
+ @event.listens_for(sess, "detached_to_persistent")
+ def test_deleted_flag(session, instance):
+ assert instance not in session.deleted
+ assert instance in session
+ listener.flag_checked()
+
+ sess.add(u1)
+
+ eq_(
+ listener.mock_calls,
+ [
+ call.detached_to_persistent(sess, u1),
+ call.flag_checked()
+ ]
+ )
+
+ def test_loaded_as_persistent(self):
+ sess, User, start_events = self._fixture()
+
+ u1 = User(name='u1')
+ sess.add(u1)
+ sess.commit()
+ sess.close()
+
+ listener = start_events()
+
+ @event.listens_for(sess, "loaded_as_persistent")
+ def test_identity_flag(session, instance):
+ assert instance in session
+ assert instance._sa_instance_state.persistent
+ assert instance._sa_instance_state.key in session.identity_map
+ assert not instance._sa_instance_state.deleted
+ assert not instance._sa_instance_state.detached
+ assert instance._sa_instance_state.persistent
+ listener.flag_checked(instance)
+
+ u1 = sess.query(User).filter_by(name='u1').one()
+
+ eq_(
+ listener.mock_calls,
+ [
+ call.loaded_as_persistent(sess, u1),
+ call.flag_checked(u1)
+ ]
+ )
+
+ def test_detached_to_persistent_via_deleted(self):
+ sess, User, start_events = self._fixture()
+
+ u1 = User(name='u1')
+ sess.add(u1)
+ sess.commit()
+ sess.close()
+
+ listener = start_events()
+
+ @event.listens_for(sess, "detached_to_persistent")
+ def test_deleted_flag_persistent(session, instance):
+ assert instance not in session.deleted
+ assert instance in session
+ assert not instance._sa_instance_state.deleted
+ assert not instance._sa_instance_state.detached
+ assert instance._sa_instance_state.persistent
+ listener.dtp_flag_checked(instance)
+
+ @event.listens_for(sess, "persistent_to_deleted")
+ def test_deleted_flag_detached(session, instance):
+ assert instance not in session.deleted
+ assert instance not in session
+ assert not instance._sa_instance_state.persistent
+ assert instance._sa_instance_state.deleted
+ assert not instance._sa_instance_state.detached
+ listener.ptd_flag_checked(instance)
+
+ sess.delete(u1)
+ assert u1 in sess.deleted
+
+ eq_(
+ listener.mock_calls,
+ [
+ call.detached_to_persistent(sess, u1),
+ call.dtp_flag_checked(u1)
+ ]
+ )
+
+ sess.flush()
+
+ eq_(
+ listener.mock_calls,
+ [
+ call.detached_to_persistent(sess, u1),
+ call.dtp_flag_checked(u1),
+ call.persistent_to_deleted(sess, u1),
+ call.ptd_flag_checked(u1),
+ ]
+ )
+
+ def test_detached_to_persistent_via_cascaded_delete(self):
+ sess, User, Address, start_events = self._fixture(include_address=True)
+
+ u1 = User(name='u1')
+ sess.add(u1)
+ a1 = Address(email_address='e1')
+ u1.addresses.append(a1)
+ sess.commit()
+ u1.addresses # ensure u1.addresses refers to a1 before detachment
+ sess.close()
+
+ listener = start_events()
+
+ @event.listens_for(sess, "detached_to_persistent")
+ def test_deleted_flag(session, instance):
+ assert instance not in session.deleted
+ assert instance in session
+ assert not instance._sa_instance_state.deleted
+ assert not instance._sa_instance_state.detached
+ assert instance._sa_instance_state.persistent
+ listener.flag_checked(instance)
+
+ sess.delete(u1)
+ assert u1 in sess.deleted
+ assert a1 in sess.deleted
+
+ eq_(
+ listener.mock_calls,
+ [
+ call.detached_to_persistent(sess, u1),
+ call.flag_checked(u1),
+ call.detached_to_persistent(sess, a1),
+ call.flag_checked(a1),
+ ]
+ )
+
+ sess.flush()
+
+ def test_persistent_to_deleted(self):
+ sess, User, start_events = self._fixture()
+
+ u1 = User(name='u1')
+ sess.add(u1)
+ sess.commit()
+
+ listener = start_events()
+
+ @event.listens_for(sess, "persistent_to_deleted")
+ def test_deleted_flag(session, instance):
+ assert instance not in session.deleted
+ assert instance not in session
+ assert instance._sa_instance_state.deleted
+ assert not instance._sa_instance_state.detached
+ assert not instance._sa_instance_state.persistent
+ listener.flag_checked(instance)
+
+ sess.delete(u1)
+ assert u1 in sess.deleted
+
+ eq_(
+ listener.mock_calls,
+ []
+ )
+
+ sess.flush()
+ assert u1 not in sess
+
+ eq_(
+ listener.mock_calls,
+ [
+ call.persistent_to_deleted(sess, u1),
+ call.flag_checked(u1)
+ ]
+ )
+
+ def test_persistent_to_detached_via_expunge(self):
+ sess, User, start_events = self._fixture()
+
+ u1 = User(name='u1')
+ sess.add(u1)
+ sess.flush()
+
+ listener = start_events()
+
+ @event.listens_for(sess, "persistent_to_detached")
+ def test_deleted_flag(session, instance):
+ assert instance not in session.deleted
+ assert instance not in session
+ assert not instance._sa_instance_state.deleted
+ assert instance._sa_instance_state.detached
+ assert not instance._sa_instance_state.persistent
+ listener.flag_checked(instance)
+
+ assert u1 in sess
+ sess.expunge(u1)
+ assert u1 not in sess
+
+ eq_(
+ listener.mock_calls,
+ [
+ call.persistent_to_detached(sess, u1),
+ call.flag_checked(u1)
+ ]
+ )
+
+ def test_persistent_to_detached_via_expunge_all(self):
+ sess, User, start_events = self._fixture()
+
+ u1 = User(name='u1')
+ sess.add(u1)
+ sess.flush()
+
+ listener = start_events()
+
+ @event.listens_for(sess, "persistent_to_detached")
+ def test_deleted_flag(session, instance):
+ assert instance not in session.deleted
+ assert instance not in session
+ assert not instance._sa_instance_state.deleted
+ assert instance._sa_instance_state.detached
+ assert not instance._sa_instance_state.persistent
+ listener.flag_checked(instance)
+
+ assert u1 in sess
+ sess.expunge_all()
+ assert u1 not in sess
+
+ eq_(
+ listener.mock_calls,
+ [
+ call.persistent_to_detached(sess, u1),
+ call.flag_checked(u1)
+ ]
+ )
+
+ def test_persistent_to_transient_via_rollback(self):
+ sess, User, start_events = self._fixture()
+
+ u1 = User(name='u1')
+ sess.add(u1)
+ sess.flush()
+
+ listener = start_events()
+
+ @event.listens_for(sess, "persistent_to_transient")
+ def test_deleted_flag(session, instance):
+ assert instance not in session.deleted
+ assert instance not in session
+ assert not instance._sa_instance_state.deleted
+ assert not instance._sa_instance_state.detached
+ assert not instance._sa_instance_state.persistent
+ assert instance._sa_instance_state.transient
+ listener.flag_checked(instance)
+
+ sess.rollback()
+
+ eq_(
+ listener.mock_calls,
+ [
+ call.persistent_to_transient(sess, u1),
+ call.flag_checked(u1)
+ ]
+ )
+
+ def test_deleted_to_persistent_via_rollback(self):
+ sess, User, start_events = self._fixture()
+
+ u1 = User(name='u1')
+ sess.add(u1)
+ sess.commit()
+
+ sess.delete(u1)
+ sess.flush()
+
+ listener = start_events()
+
+ @event.listens_for(sess, "deleted_to_persistent")
+ def test_deleted_flag(session, instance):
+ assert instance not in session.deleted
+ assert instance in session
+ assert not instance._sa_instance_state.deleted
+ assert not instance._sa_instance_state.detached
+ assert instance._sa_instance_state.persistent
+ listener.flag_checked(instance)
+
+ assert u1 not in sess
+ assert u1._sa_instance_state.deleted
+ assert not u1._sa_instance_state.persistent
+ assert not u1._sa_instance_state.detached
+
+ sess.rollback()
+
+ assert u1 in sess
+ assert u1._sa_instance_state.persistent
+ assert not u1._sa_instance_state.deleted
+ assert not u1._sa_instance_state.detached
+
+ eq_(
+ listener.mock_calls,
+ [
+ call.deleted_to_persistent(sess, u1),
+ call.flag_checked(u1)
]
)
+ def test_deleted_to_detached_via_commit(self):
+ sess, User, start_events = self._fixture()
+
+ u1 = User(name='u1')
+ sess.add(u1)
+ sess.commit()
+
+ sess.delete(u1)
+ sess.flush()
+
+ listener = start_events()
+
+ @event.listens_for(sess, "deleted_to_detached")
+ def test_detached_flag(session, instance):
+ assert instance not in session.deleted
+ assert instance not in session
+ assert not instance._sa_instance_state.deleted
+ assert instance._sa_instance_state.detached
+ listener.flag_checked(instance)
+
+ assert u1 not in sess
+ assert u1._sa_instance_state.deleted
+ assert not u1._sa_instance_state.persistent
+ assert not u1._sa_instance_state.detached
+
+ sess.commit()
+
+ assert u1 not in sess
+ assert not u1._sa_instance_state.deleted
+ assert u1._sa_instance_state.detached
+
+ eq_(
+ listener.mock_calls,
+ [
+ call.deleted_to_detached(sess, u1),
+ call.flag_checked(u1)
+ ]
+ )
class MapperExtensionTest(_fixtures.FixtureTest):
+
"""Superseded by MapperEventsTest - test backwards
compatibility of MapperExtension."""
@@ -1520,15 +2128,18 @@ class MapperExtensionTest(_fixtures.FixtureTest):
methods = []
class Ext(sa.orm.MapperExtension):
+
def instrument_class(self, mapper, cls):
methods.append('instrument_class')
return sa.orm.EXT_CONTINUE
- def init_instance(self, mapper, class_, oldinit, instance, args, kwargs):
+ def init_instance(
+ self, mapper, class_, oldinit, instance, args, kwargs):
methods.append('init_instance')
return sa.orm.EXT_CONTINUE
- def init_failed(self, mapper, class_, oldinit, instance, args, kwargs):
+ def init_failed(
+ self, mapper, class_, oldinit, instance, args, kwargs):
methods.append('init_failed')
return sa.orm.EXT_CONTINUE
@@ -1589,8 +2200,8 @@ class MapperExtensionTest(_fixtures.FixtureTest):
def test_inheritance(self):
users, addresses, User = (self.tables.users,
- self.tables.addresses,
- self.classes.User)
+ self.tables.addresses,
+ self.classes.User)
Ext, methods = self.extension()
@@ -1599,7 +2210,7 @@ class MapperExtensionTest(_fixtures.FixtureTest):
mapper(User, users, extension=Ext())
mapper(AdminUser, addresses, inherits=User,
- properties={'address_id': addresses.c.id})
+ properties={'address_id': addresses.c.id})
sess = create_session()
am = AdminUser(name='au1', email_address='au1@e1')
@@ -1625,17 +2236,17 @@ class MapperExtensionTest(_fixtures.FixtureTest):
"""
- keywords, items, item_keywords, Keyword, Item = (self.tables.keywords,
- self.tables.items,
- self.tables.item_keywords,
- self.classes.Keyword,
- self.classes.Item)
-
+ keywords, items, item_keywords, Keyword, Item = (
+ self.tables.keywords,
+ self.tables.items,
+ self.tables.item_keywords,
+ self.classes.Keyword,
+ self.classes.Item)
Ext1, methods1 = self.extension()
Ext2, methods2 = self.extension()
- mapper(Item, items, extension=Ext1() , properties={
+ mapper(Item, items, extension=Ext1(), properties={
'keywords': relationship(Keyword, secondary=item_keywords)})
mapper(Keyword, keywords, extension=Ext2())
@@ -1647,10 +2258,10 @@ class MapperExtensionTest(_fixtures.FixtureTest):
sess.flush()
eq_(methods1,
['instrument_class', 'init_instance',
- 'before_insert', 'after_insert'])
+ 'before_insert', 'after_insert'])
eq_(methods2,
['instrument_class', 'init_instance',
- 'before_insert', 'after_insert'])
+ 'before_insert', 'after_insert'])
del methods1[:]
del methods2[:]
@@ -1659,13 +2270,12 @@ class MapperExtensionTest(_fixtures.FixtureTest):
eq_(methods1, ['before_update', 'after_update'])
eq_(methods2, [])
-
def test_inheritance_with_dupes(self):
"""Inheritance with the same extension instance on both mappers."""
users, addresses, User = (self.tables.users,
- self.tables.addresses,
- self.classes.User)
+ self.tables.addresses,
+ self.classes.User)
Ext, methods = self.extension()
@@ -1675,7 +2285,7 @@ class MapperExtensionTest(_fixtures.FixtureTest):
ext = Ext()
mapper(User, users, extension=ext)
mapper(AdminUser, addresses, inherits=User, extension=ext,
- properties={'address_id': addresses.c.id})
+ properties={'address_id': addresses.c.id})
sess = create_session()
am = AdminUser(name="au1", email_address="au1@e1")
@@ -1695,11 +2305,11 @@ class MapperExtensionTest(_fixtures.FixtureTest):
'before_update', 'after_update', 'before_delete',
'after_delete'])
-
def test_unnecessary_methods_not_evented(self):
users = self.tables.users
class MyExtension(sa.orm.MapperExtension):
+
def before_insert(self, mapper, connection, instance):
pass
@@ -1712,15 +2322,16 @@ class MapperExtensionTest(_fixtures.FixtureTest):
class AttributeExtensionTest(fixtures.MappedTest):
+
@classmethod
def define_tables(cls, metadata):
Table('t1',
- metadata,
- Column('id', Integer, primary_key=True),
- Column('type', String(40)),
- Column('data', String(50))
+ metadata,
+ Column('id', Integer, primary_key=True),
+ Column('type', String(40)),
+ Column('data', String(50))
- )
+ )
def test_cascading_extensions(self):
t1 = self.tables.t1
@@ -1728,28 +2339,35 @@ class AttributeExtensionTest(fixtures.MappedTest):
ext_msg = []
class Ex1(sa.orm.AttributeExtension):
+
def set(self, state, value, oldvalue, initiator):
ext_msg.append("Ex1 %r" % value)
return "ex1" + value
class Ex2(sa.orm.AttributeExtension):
+
def set(self, state, value, oldvalue, initiator):
ext_msg.append("Ex2 %r" % value)
return "ex2" + value
class A(fixtures.BasicEntity):
pass
+
class B(A):
pass
+
class C(B):
pass
- mapper(A, t1, polymorphic_on=t1.c.type, polymorphic_identity='a', properties={
- 'data':column_property(t1.c.data, extension=Ex1())
- })
+ mapper(
+ A, t1, polymorphic_on=t1.c.type, polymorphic_identity='a',
+ properties={
+ 'data': column_property(t1.c.data, extension=Ex1())
+ }
+ )
mapper(B, polymorphic_identity='b', inherits=A)
- mc = mapper(C, polymorphic_identity='c', inherits=B, properties={
- 'data':column_property(t1.c.data, extension=Ex2())
+ mapper(C, polymorphic_identity='c', inherits=B, properties={
+ 'data': column_property(t1.c.data, extension=Ex2())
})
a1 = A(data='a1')
@@ -1761,15 +2379,14 @@ class AttributeExtensionTest(fixtures.MappedTest):
eq_(c1.data, 'ex2c1')
a1.data = 'a2'
- b1.data='b2'
+ b1.data = 'b2'
c1.data = 'c2'
eq_(a1.data, 'ex1a2')
eq_(b1.data, 'ex1b2')
eq_(c1.data, 'ex2c2')
eq_(ext_msg, ["Ex1 'a1'", "Ex1 'b1'", "Ex2 'c1'",
- "Ex1 'a2'", "Ex1 'b2'", "Ex2 'c2'"])
-
+ "Ex1 'a2'", "Ex1 'b2'", "Ex2 'c2'"])
class SessionExtensionTest(_fixtures.FixtureTest):
@@ -1780,36 +2397,46 @@ class SessionExtensionTest(_fixtures.FixtureTest):
mapper(User, users)
log = []
+
class MyExt(sa.orm.session.SessionExtension):
+
def before_commit(self, session):
log.append('before_commit')
+
def after_commit(self, session):
log.append('after_commit')
+
def after_rollback(self, session):
log.append('after_rollback')
+
def before_flush(self, session, flush_context, objects):
log.append('before_flush')
+
def after_flush(self, session, flush_context):
log.append('after_flush')
+
def after_flush_postexec(self, session, flush_context):
log.append('after_flush_postexec')
+
def after_begin(self, session, transaction, connection):
log.append('after_begin')
+
def after_attach(self, session, instance):
log.append('after_attach')
+
def after_bulk_update(
self,
session, query, query_context, result
- ):
+ ):
log.append('after_bulk_update')
def after_bulk_delete(
self,
session, query, query_context, result
- ):
+ ):
log.append('after_bulk_delete')
- sess = create_session(extension = MyExt())
+ sess = create_session(extension=MyExt())
u = User(name='u1')
sess.add(u)
sess.flush()
@@ -1821,7 +2448,7 @@ class SessionExtensionTest(_fixtures.FixtureTest):
'after_flush_postexec',
'before_commit',
'after_commit',
- ]
+ ]
log = []
sess = create_session(autocommit=False, extension=MyExt())
u = User(name='u1')
@@ -1846,34 +2473,38 @@ class SessionExtensionTest(_fixtures.FixtureTest):
log = []
sess = create_session(autocommit=False, extension=MyExt(),
bind=testing.db)
- conn = sess.connection()
+ sess.connection()
assert log == ['after_begin']
+ sess.close()
def test_multiple_extensions(self):
User, users = self.classes.User, self.tables.users
log = []
+
class MyExt1(sa.orm.session.SessionExtension):
+
def before_commit(self, session):
log.append('before_commit_one')
-
class MyExt2(sa.orm.session.SessionExtension):
+
def before_commit(self, session):
log.append('before_commit_two')
mapper(User, users)
- sess = create_session(extension = [MyExt1(), MyExt2()])
+ sess = create_session(extension=[MyExt1(), MyExt2()])
u = User(name='u1')
sess.add(u)
sess.flush()
assert log == [
'before_commit_one',
'before_commit_two',
- ]
+ ]
def test_unnecessary_methods_not_evented(self):
class MyExtension(sa.orm.session.SessionExtension):
+
def before_commit(self, session):
pass
@@ -1881,3 +2512,119 @@ class SessionExtensionTest(_fixtures.FixtureTest):
assert not s.dispatch.after_commit
assert len(s.dispatch.before_commit) == 1
+
+class QueryEventsTest(
+ _RemoveListeners, _fixtures.FixtureTest, AssertsCompiledSQL):
+ __dialect__ = 'default'
+
+ @classmethod
+ def setup_mappers(cls):
+ User = cls.classes.User
+ users = cls.tables.users
+
+ mapper(User, users)
+
+ def test_before_compile(self):
+ @event.listens_for(query.Query, "before_compile", retval=True)
+ def no_deleted(query):
+ for desc in query.column_descriptions:
+ if desc['type'] is User:
+ entity = desc['expr']
+ query = query.filter(entity.id != 10)
+ return query
+
+ User = self.classes.User
+ s = Session()
+
+ q = s.query(User).filter_by(id=7)
+ self.assert_compile(
+ q,
+ "SELECT users.id AS users_id, users.name AS users_name "
+ "FROM users "
+ "WHERE users.id = :id_1 AND users.id != :id_2",
+ checkparams={'id_2': 10, 'id_1': 7}
+ )
+
+ def test_alters_entities(self):
+ User = self.classes.User
+
+ @event.listens_for(query.Query, "before_compile", retval=True)
+ def fn(query):
+ return query.add_columns(User.name)
+
+ s = Session()
+
+ q = s.query(User.id, ).filter_by(id=7)
+ self.assert_compile(
+ q,
+ "SELECT users.id AS users_id, users.name AS users_name "
+ "FROM users "
+ "WHERE users.id = :id_1",
+ checkparams={'id_1': 7}
+ )
+ eq_(
+ q.all(),
+ [(7, 'jack')]
+ )
+
+
+class RefreshFlushInReturningTest(fixtures.MappedTest):
+ """test [ticket:3427].
+
+ this is a rework of the test for [ticket:3167] stated
+ in test_unitofworkv2, which tests that returning doesn't trigger
+ attribute events; the test here is *reversed* so that we test that
+ it *does* trigger the new refresh_flush event.
+
+ """
+
+ __backend__ = True
+
+ @classmethod
+ def define_tables(cls, metadata):
+ Table(
+ 'test', metadata,
+ Column('id', Integer, primary_key=True,
+ test_needs_autoincrement=True),
+ Column('prefetch_val', Integer, default=5),
+ Column('returning_val', Integer, server_default="5")
+ )
+
+ @classmethod
+ def setup_classes(cls):
+ class Thing(cls.Basic):
+ pass
+
+ @classmethod
+ def setup_mappers(cls):
+ Thing = cls.classes.Thing
+
+ mapper(Thing, cls.tables.test, eager_defaults=True)
+
+ def test_no_attr_events_flush(self):
+ Thing = self.classes.Thing
+ mock = Mock()
+ event.listen(Thing, "refresh_flush", mock)
+ t1 = Thing()
+ s = Session()
+ s.add(t1)
+ s.flush()
+
+ if testing.requires.returning.enabled:
+ # ordering is deterministic in this test b.c. the routine
+ # appends the "returning" params before the "prefetch"
+ # ones. if there were more than one attribute in each category,
+ # then we'd have hash order issues.
+ eq_(
+ mock.mock_calls,
+ [call(t1, ANY, ['returning_val', 'prefetch_val'])]
+ )
+ else:
+ eq_(
+ mock.mock_calls,
+ [call(t1, ANY, ['prefetch_val'])]
+ )
+
+ eq_(t1.id, 1)
+ eq_(t1.prefetch_val, 5)
+ eq_(t1.returning_val, 5)
diff --git a/test/orm/test_hasparent.py b/test/orm/test_hasparent.py
index fd246b527..df4b05980 100644
--- a/test/orm/test_hasparent.py
+++ b/test/orm/test_hasparent.py
@@ -116,7 +116,7 @@ class ParentRemovalTest(fixtures.MappedTest):
User = self.classes.User
s, u1, a1 = self._fixture()
- s._expunge_state(attributes.instance_state(u1))
+ s._expunge_states([attributes.instance_state(u1)])
del u1
gc_collect()
@@ -178,7 +178,7 @@ class ParentRemovalTest(fixtures.MappedTest):
u2 = User(addresses=[a1])
s.add(u2)
s.flush()
- s._expunge_state(attributes.instance_state(u2))
+ s._expunge_states([attributes.instance_state(u2)])
del u2
gc_collect()
diff --git a/test/orm/test_joins.py b/test/orm/test_joins.py
index 23d220dcc..540056dae 100644
--- a/test/orm/test_joins.py
+++ b/test/orm/test_joins.py
@@ -750,6 +750,17 @@ class JoinTest(QueryTest, AssertsCompiledSQL):
filter_by(id=3).outerjoin('orders','address').filter_by(id=1).all()
assert [User(id=7, name='jack')] == result
+ def test_raises_on_dupe_target_rel(self):
+ User = self.classes.User
+
+ assert_raises_message(
+ sa.exc.SAWarning,
+ "Pathed join target Order.items has already been joined to; "
+ "skipping",
+ lambda: create_session().query(User).outerjoin('orders', 'items').\
+ outerjoin('orders', 'items')
+ )
+
def test_from_joinpoint(self):
Item, User, Order = (self.classes.Item,
self.classes.User,
diff --git a/test/orm/test_lazy_relations.py b/test/orm/test_lazy_relations.py
index e99e22725..f2e1db2da 100644
--- a/test/orm/test_lazy_relations.py
+++ b/test/orm/test_lazy_relations.py
@@ -9,6 +9,7 @@ from sqlalchemy import Integer, String, ForeignKey, SmallInteger, Boolean
from sqlalchemy.types import TypeDecorator
from sqlalchemy.testing.schema import Table
from sqlalchemy.testing.schema import Column
+from sqlalchemy import orm
from sqlalchemy.orm import mapper, relationship, create_session, Session
from sqlalchemy.testing import eq_
from sqlalchemy.testing import fixtures
@@ -559,7 +560,60 @@ class GetterStateTest(_fixtures.FixtureTest):
run_inserts = None
- def _u_ad_fixture(self, populate_user):
+ def _unhashable_fixture(self, metadata, load_on_pending=False):
+ class MyHashType(sa.TypeDecorator):
+ impl = sa.String(100)
+
+ def process_bind_param(self, value, dialect):
+ return ";".join(
+ "%s=%s" % (k, v)
+ for k, v in
+ sorted(value.items(), key=lambda key: key[0]))
+
+ def process_result_value(self, value, dialect):
+ return dict(elem.split("=", 1) for elem in value.split(";"))
+
+ category = Table(
+ 'category', metadata,
+ Column('id', Integer, primary_key=True),
+ Column('data', MyHashType())
+ )
+ article = Table(
+ 'article', metadata,
+ Column('id', Integer, primary_key=True),
+ Column('data', MyHashType())
+ )
+
+ class Category(fixtures.ComparableEntity):
+ pass
+
+ class Article(fixtures.ComparableEntity):
+ pass
+
+ mapper(Category, category)
+ mapper(Article, article, properties={
+ "category": relationship(
+ Category,
+ primaryjoin=orm.foreign(article.c.data) == category.c.data,
+ load_on_pending=load_on_pending
+ )
+ })
+
+ metadata.create_all()
+ sess = Session(autoflush=False)
+ data = {"im": "unhashable"}
+ a1 = Article(id=1, data=data)
+ c1 = Category(id=1, data=data)
+ if load_on_pending:
+ sess.add(c1)
+ else:
+ sess.add_all([c1, a1])
+ sess.flush()
+ if load_on_pending:
+ sess.add(a1)
+ return Category, Article, sess, a1, c1
+
+ def _u_ad_fixture(self, populate_user, dont_use_get=False):
users, Address, addresses, User = (
self.tables.users,
self.classes.Address,
@@ -567,9 +621,17 @@ class GetterStateTest(_fixtures.FixtureTest):
self.classes.User)
mapper(User, users, properties={
- 'addresses': relationship(Address, backref='user')
+ 'addresses': relationship(Address, back_populates='user')
+ })
+ mapper(Address, addresses, properties={
+ 'user': relationship(
+ User,
+ primaryjoin=and_(
+ users.c.id == addresses.c.user_id, users.c.id != 27)
+ if dont_use_get else None,
+ back_populates='addresses'
+ )
})
- mapper(Address, addresses)
sess = create_session()
a1 = Address(email_address='a1')
@@ -581,6 +643,47 @@ class GetterStateTest(_fixtures.FixtureTest):
sess.expire_all()
return User, Address, sess, a1
+ def test_no_use_get_params_missing(self):
+ User, Address, sess, a1 = self._u_ad_fixture(False, True)
+
+ def go():
+ eq_(a1.user, None)
+
+ # doesn't emit SQL
+ self.assert_sql_count(
+ testing.db,
+ go,
+ 0
+ )
+
+ @testing.provide_metadata
+ def test_no_use_get_params_not_hashable(self):
+ Category, Article, sess, a1, c1 = \
+ self._unhashable_fixture(self.metadata)
+
+ def go():
+ eq_(a1.category, c1)
+
+ self.assert_sql_count(
+ testing.db,
+ go,
+ 1
+ )
+
+ @testing.provide_metadata
+ def test_no_use_get_params_not_hashable_on_pending(self):
+ Category, Article, sess, a1, c1 = \
+ self._unhashable_fixture(self.metadata, load_on_pending=True)
+
+ def go():
+ eq_(a1.category, c1)
+
+ self.assert_sql_count(
+ testing.db,
+ go,
+ 1
+ )
+
def test_get_empty_passive_return_never_set(self):
User, Address, sess, a1 = self._u_ad_fixture(False)
eq_(
@@ -970,3 +1073,78 @@ class RefersToSelfLazyLoadInterferenceTest(fixtures.MappedTest):
session.query(B).options(
sa.orm.joinedload('parent').joinedload('zc')).all()
+
+class TypeCoerceTest(fixtures.MappedTest, testing.AssertsExecutionResults,):
+ """ORM-level test for [ticket:3531]"""
+
+ # mysql is having a recursion issue in the bind_expression
+ __only_on__ = ('sqlite', 'postgresql')
+
+ class StringAsInt(TypeDecorator):
+ impl = String(50)
+
+ def column_expression(self, col):
+ return sa.cast(col, Integer)
+
+ def bind_expression(self, col):
+ return sa.cast(col, String)
+
+ @classmethod
+ def define_tables(cls, metadata):
+ Table(
+ 'person', metadata,
+ Column("id", cls.StringAsInt, primary_key=True),
+ )
+ Table(
+ "pets", metadata,
+ Column("id", Integer, primary_key=True),
+ Column("person_id", Integer),
+ )
+
+ @classmethod
+ def setup_classes(cls):
+ class Person(cls.Basic):
+ pass
+
+ class Pet(cls.Basic):
+ pass
+
+ @classmethod
+ def setup_mappers(cls):
+ mapper(cls.classes.Person, cls.tables.person, properties=dict(
+ pets=relationship(
+ cls.classes.Pet, primaryjoin=(
+ orm.foreign(cls.tables.pets.c.person_id) ==
+ sa.cast(
+ sa.type_coerce(cls.tables.person.c.id, Integer),
+ Integer
+ )
+ )
+ )
+ ))
+
+ mapper(cls.classes.Pet, cls.tables.pets)
+
+ def test_lazyload_singlecast(self):
+ Person = self.classes.Person
+ Pet = self.classes.Pet
+
+ s = Session()
+ s.add_all([
+ Person(id=5), Pet(id=1, person_id=5)
+ ])
+ s.commit()
+
+ p1 = s.query(Person).first()
+
+ with self.sql_execution_asserter() as asserter:
+ p1.pets
+
+ asserter.assert_(
+ CompiledSQL(
+ "SELECT pets.id AS pets_id, pets.person_id "
+ "AS pets_person_id FROM pets "
+ "WHERE pets.person_id = CAST(:param_1 AS INTEGER)",
+ [{'param_1': 5}]
+ )
+ )
diff --git a/test/orm/test_load_on_fks.py b/test/orm/test_load_on_fks.py
index 813d8d17a..471c8665a 100644
--- a/test/orm/test_load_on_fks.py
+++ b/test/orm/test_load_on_fks.py
@@ -301,7 +301,8 @@ class LoadOnFKsTest(AssertsExecutionResults, fixtures.TestBase):
c2 = Child()
if attach:
- sess._attach(instance_state(c2))
+ state = instance_state(c2)
+ state.session_id = sess.hash_key
if enable_relationship_rel:
sess.enable_relationship_loading(c2)
diff --git a/test/orm/test_loading.py b/test/orm/test_loading.py
index f86477ec2..6f3f6a016 100644
--- a/test/orm/test_loading.py
+++ b/test/orm/test_loading.py
@@ -1,8 +1,11 @@
from . import _fixtures
from sqlalchemy.orm import loading, Session, aliased
-from sqlalchemy.testing.assertions import eq_, assert_raises
+from sqlalchemy.testing.assertions import eq_, \
+ assert_raises, assert_raises_message
from sqlalchemy.util import KeyedTuple
from sqlalchemy.testing import mock
+from sqlalchemy import select
+from sqlalchemy import exc
# class GetFromIdentityTest(_fixtures.FixtureTest):
# class LoadOnIdentTest(_fixtures.FixtureTest):
# class InstanceProcessorTest(_fixture.FixtureTest):
@@ -34,6 +37,19 @@ class InstancesTest(_fixtures.FixtureTest):
)
assert cursor.close.called, "Cursor wasn't closed"
+ def test_row_proc_not_created(self):
+ User = self.classes.User
+ s = Session()
+
+ q = s.query(User.id, User.name)
+ stmt = select([User.id])
+
+ assert_raises_message(
+ exc.NoSuchColumnError,
+ "Could not locate column in row for column 'users.name'",
+ q.from_statement(stmt).all
+ )
+
class MergeResultTest(_fixtures.FixtureTest):
run_setup_mappers = 'once'
diff --git a/test/orm/test_mapper.py b/test/orm/test_mapper.py
index 264b386d4..6845ababb 100644
--- a/test/orm/test_mapper.py
+++ b/test/orm/test_mapper.py
@@ -8,7 +8,7 @@ from sqlalchemy.testing.schema import Table, Column
from sqlalchemy.engine import default
from sqlalchemy.orm import mapper, relationship, backref, \
create_session, class_mapper, configure_mappers, reconstructor, \
- validates, aliased, defer, deferred, synonym, attributes, \
+ aliased, deferred, synonym, attributes, \
column_property, composite, dynamic_loader, \
comparable_property, Session
from sqlalchemy.orm.persistence import _sort_states
@@ -19,6 +19,7 @@ from sqlalchemy.testing.assertsql import CompiledSQL
import logging
import logging.handlers
+
class MapperTest(_fixtures.FixtureTest, AssertsCompiledSQL):
__dialect__ = 'default'
@@ -26,33 +27,34 @@ class MapperTest(_fixtures.FixtureTest, AssertsCompiledSQL):
"""A backref name may not shadow an existing property name."""
Address, addresses, users, User = (self.classes.Address,
- self.tables.addresses,
- self.tables.users,
- self.classes.User)
-
+ self.tables.addresses,
+ self.tables.users,
+ self.classes.User)
mapper(Address, addresses)
mapper(User, users,
- properties={
- 'addresses':relationship(Address, backref='email_address')
- })
+ properties={
+ 'addresses': relationship(Address, backref='email_address')
+ })
assert_raises(sa.exc.ArgumentError, sa.orm.configure_mappers)
def test_update_attr_keys(self):
- """test that update()/insert() use the correct key when given InstrumentedAttributes."""
+ """test that update()/insert() use the correct key when given
+ InstrumentedAttributes."""
User, users = self.classes.User, self.tables.users
-
mapper(User, users, properties={
- 'foobar':users.c.name
+ 'foobar': users.c.name
})
- users.insert().values({User.foobar:'name1'}).execute()
- eq_(sa.select([User.foobar]).where(User.foobar=='name1').execute().fetchall(), [('name1',)])
+ users.insert().values({User.foobar: 'name1'}).execute()
+ eq_(sa.select([User.foobar]).where(User.foobar == 'name1').
+ execute().fetchall(), [('name1',)])
- users.update().values({User.foobar:User.foobar + 'foo'}).execute()
- eq_(sa.select([User.foobar]).where(User.foobar=='name1foo').execute().fetchall(), [('name1foo',)])
+ users.update().values({User.foobar: User.foobar + 'foo'}).execute()
+ eq_(sa.select([User.foobar]).where(User.foobar == 'name1foo').
+ execute().fetchall(), [('name1foo',)])
def test_utils(self):
users = self.tables.users
@@ -63,12 +65,12 @@ class MapperTest(_fixtures.FixtureTest, AssertsCompiledSQL):
class Foo(object):
x = "something"
+
@property
def y(self):
return "something else"
-
- m = mapper(Foo, users, properties={"addresses":relationship(Address)})
+ m = mapper(Foo, users, properties={"addresses": relationship(Address)})
mapper(Address, addresses)
a1 = aliased(Foo)
@@ -100,14 +102,13 @@ class MapperTest(_fixtures.FixtureTest, AssertsCompiledSQL):
class Foo(object):
x = "something"
+
@property
def y(self):
return "something else"
m = mapper(Foo, users)
a1 = aliased(Foo)
- f = Foo()
-
for arg, key, ret in [
(m, "x", Foo.x),
(Foo, "x", Foo.x),
@@ -122,7 +123,7 @@ class MapperTest(_fixtures.FixtureTest, AssertsCompiledSQL):
def boom():
raise Exception("it broke")
mapper(User, users, properties={
- 'addresses':relationship(boom)
+ 'addresses': relationship(boom)
})
# test that QueryableAttribute.__str__() doesn't
@@ -137,12 +138,11 @@ class MapperTest(_fixtures.FixtureTest, AssertsCompiledSQL):
"""
Address, addresses, User = (self.classes.Address,
- self.tables.addresses,
- self.classes.User)
-
+ self.tables.addresses,
+ self.classes.User)
mapper(Address, addresses, properties={
- 'user':relationship(User)
+ 'user': relationship(User)
})
try:
@@ -156,8 +156,8 @@ class MapperTest(_fixtures.FixtureTest, AssertsCompiledSQL):
"initialize - can't proceed with "
"initialization of other mappers. "
"Original exception was: Class "
- "'test.orm._fixtures.User' is not mapped$"
- , configure_mappers)
+ "'test.orm._fixtures.User' is not mapped$",
+ configure_mappers)
def test_column_prefix(self):
users, User = self.tables.users, self.classes.User
@@ -169,7 +169,7 @@ class MapperTest(_fixtures.FixtureTest, AssertsCompiledSQL):
s = create_session()
u = s.query(User).get(7)
eq_(u._name, 'jack')
- eq_(u._id,7)
+ eq_(u._id, 7)
u2 = s.query(User).filter_by(user_name='jack').one()
assert u is u2
@@ -190,16 +190,16 @@ class MapperTest(_fixtures.FixtureTest, AssertsCompiledSQL):
still triggers a check against all mappers."""
users, Address, addresses, User = (self.tables.users,
- self.classes.Address,
- self.tables.addresses,
- self.classes.User)
+ self.classes.Address,
+ self.tables.addresses,
+ self.classes.User)
mapper(User, users)
sa.orm.configure_mappers()
assert sa.orm.mapperlib.Mapper._new_mappers is False
m = mapper(Address, addresses, properties={
- 'user': relationship(User, backref="addresses")})
+ 'user': relationship(User, backref="addresses")})
assert m.configured is False
assert sa.orm.mapperlib.Mapper._new_mappers is True
@@ -232,13 +232,13 @@ class MapperTest(_fixtures.FixtureTest, AssertsCompiledSQL):
def test_column_not_present(self):
users, addresses, User = (self.tables.users,
- self.tables.addresses,
- self.classes.User)
+ self.tables.addresses,
+ self.classes.User)
assert_raises_message(sa.exc.ArgumentError,
"not represented in the mapper's table",
- mapper, User, users, properties={'foo'
- : addresses.c.user_id})
+ mapper, User, users,
+ properties={'foo': addresses.c.user_id})
def test_constructor_exc(self):
"""TypeError is raised for illegal constructor args,
@@ -246,10 +246,11 @@ class MapperTest(_fixtures.FixtureTest, AssertsCompiledSQL):
users, addresses = self.tables.users, self.tables.addresses
-
class Foo(object):
+
def __init__(self):
pass
+
class Bar(object):
pass
@@ -266,13 +267,15 @@ class MapperTest(_fixtures.FixtureTest, AssertsCompiledSQL):
"""
class Foo(object):
+
def __init__(self, id):
self.id = id
m = MetaData()
foo_t = Table('foo', m,
- Column('id', String, primary_key=True)
- )
+ Column('id', String, primary_key=True)
+ )
m = mapper(Foo, foo_t)
+
class DontCompareMeToString(int):
if util.py2k:
def __lt__(self, other):
@@ -292,24 +295,23 @@ class MapperTest(_fixtures.FixtureTest, AssertsCompiledSQL):
[states[4], states[3], states[0], states[1], states[2]]
)
-
def test_props(self):
users, Address, addresses, User = (self.tables.users,
- self.classes.Address,
- self.tables.addresses,
- self.classes.User)
+ self.classes.Address,
+ self.tables.addresses,
+ self.classes.User)
- m = mapper(User, users, properties = {
- 'addresses' : relationship(mapper(Address, addresses))
+ m = mapper(User, users, properties={
+ 'addresses': relationship(mapper(Address, addresses))
})
assert User.addresses.property is m.get_property('addresses')
def test_unicode_relationship_backref_names(self):
# test [ticket:2901]
users, Address, addresses, User = (self.tables.users,
- self.classes.Address,
- self.tables.addresses,
- self.classes.User)
+ self.classes.Address,
+ self.tables.addresses,
+ self.classes.User)
mapper(Address, addresses)
mapper(User, users, properties={
@@ -322,56 +324,62 @@ class MapperTest(_fixtures.FixtureTest, AssertsCompiledSQL):
def test_configure_on_prop_1(self):
users, Address, addresses, User = (self.tables.users,
- self.classes.Address,
- self.tables.addresses,
- self.classes.User)
+ self.classes.Address,
+ self.tables.addresses,
+ self.classes.User)
- mapper(User, users, properties = {
- 'addresses' : relationship(mapper(Address, addresses))
+ mapper(User, users, properties={
+ 'addresses': relationship(mapper(Address, addresses))
})
- User.addresses.any(Address.email_address=='foo@bar.com')
+ User.addresses.any(Address.email_address == 'foo@bar.com')
def test_configure_on_prop_2(self):
users, Address, addresses, User = (self.tables.users,
- self.classes.Address,
- self.tables.addresses,
- self.classes.User)
+ self.classes.Address,
+ self.tables.addresses,
+ self.classes.User)
- mapper(User, users, properties = {
- 'addresses' : relationship(mapper(Address, addresses))
+ mapper(User, users, properties={
+ 'addresses': relationship(mapper(Address, addresses))
})
- eq_(str(User.id == 3), str(users.c.id==3))
+ eq_(str(User.id == 3), str(users.c.id == 3))
def test_configure_on_prop_3(self):
users, addresses, User = (self.tables.users,
- self.tables.addresses,
- self.classes.User)
+ self.tables.addresses,
+ self.classes.User)
+
+ class Foo(User):
+ pass
- class Foo(User):pass
mapper(User, users)
mapper(Foo, addresses, inherits=User, properties={
- 'address_id': addresses.c.id
- })
+ 'address_id': addresses.c.id
+ })
assert getattr(Foo().__class__, 'name').impl is not None
def test_deferred_subclass_attribute_instrument(self):
users, addresses, User = (self.tables.users,
- self.tables.addresses,
- self.classes.User)
+ self.tables.addresses,
+ self.classes.User)
+
+ class Foo(User):
+ pass
- class Foo(User):pass
mapper(User, users)
configure_mappers()
mapper(Foo, addresses, inherits=User, properties={
- 'address_id': addresses.c.id
- })
+ 'address_id': addresses.c.id
+ })
assert getattr(Foo().__class__, 'name').impl is not None
def test_check_descriptor_as_method(self):
User, users = self.classes.User, self.tables.users
m = mapper(User, users)
+
class MyClass(User):
+
def foo(self):
pass
m._is_userland_descriptor(MyClass.foo)
@@ -379,7 +387,7 @@ class MapperTest(_fixtures.FixtureTest, AssertsCompiledSQL):
def test_configure_on_get_props_1(self):
User, users = self.classes.User, self.tables.users
- m =mapper(User, users)
+ m = mapper(User, users)
assert not m.configured
assert list(m.iterate_properties)
assert m.configured
@@ -387,29 +395,30 @@ class MapperTest(_fixtures.FixtureTest, AssertsCompiledSQL):
def test_configure_on_get_props_2(self):
User, users = self.classes.User, self.tables.users
- m= mapper(User, users)
+ m = mapper(User, users)
assert not m.configured
assert m.get_property('name')
assert m.configured
def test_configure_on_get_props_3(self):
users, Address, addresses, User = (self.tables.users,
- self.classes.Address,
- self.tables.addresses,
- self.classes.User)
+ self.classes.Address,
+ self.tables.addresses,
+ self.classes.User)
- m= mapper(User, users)
+ m = mapper(User, users)
assert not m.configured
configure_mappers()
m2 = mapper(Address, addresses, properties={
- 'user':relationship(User, backref='addresses')
- })
+ 'user': relationship(User, backref='addresses')
+ })
assert m.get_property('addresses')
def test_info(self):
users = self.tables.users
Address = self.classes.Address
+
class MyComposite(object):
pass
for constructor, args in [
@@ -434,17 +443,17 @@ class MapperTest(_fixtures.FixtureTest, AssertsCompiledSQL):
# create specific tables here as we don't want
# users.c.id.info to be pre-initialized
users = Table('u', m, Column('id', Integer, primary_key=True),
- Column('name', String))
+ Column('name', String))
addresses = Table('a', m, Column('id', Integer, primary_key=True),
- Column('name', String),
- Column('user_id', Integer, ForeignKey('u.id')))
+ Column('name', String),
+ Column('user_id', Integer, ForeignKey('u.id')))
Address = self.classes.Address
User = self.classes.User
mapper(User, users, properties={
- "name_lower": column_property(func.lower(users.c.name)),
- "addresses": relationship(Address)
- })
+ "name_lower": column_property(func.lower(users.c.name)),
+ "addresses": relationship(Address)
+ })
mapper(Address, addresses)
# attr.info goes down to the original Column object
@@ -460,18 +469,19 @@ class MapperTest(_fixtures.FixtureTest, AssertsCompiledSQL):
# same for relationships
is_(User.addresses.info, User.addresses.property.info)
-
def test_add_property(self):
users, addresses, Address = (self.tables.users,
- self.tables.addresses,
- self.classes.Address)
+ self.tables.addresses,
+ self.classes.Address)
assert_col = []
class User(fixtures.ComparableEntity):
+
def _get_name(self):
assert_col.append(('get', self._name))
return self._name
+
def _set_name(self, name):
assert_col.append(('set', name))
self._name = name
@@ -503,7 +513,7 @@ class MapperTest(_fixtures.FixtureTest, AssertsCompiledSQL):
m.add_property('addresses', relationship(Address))
m.add_property('uc_name', sa.orm.comparable_property(UCComparator))
m.add_property('uc_name2', sa.orm.comparable_property(
- UCComparator, User.uc_name2))
+ UCComparator, User.uc_name2))
sess = create_session(autocommit=False)
assert sess.query(User).get(7)
@@ -534,7 +544,7 @@ class MapperTest(_fixtures.FixtureTest, AssertsCompiledSQL):
User()
m2 = mapper(Address, addresses, properties={
- 'user':relationship(User, backref="addresses")
+ 'user': relationship(User, backref="addresses")
})
# configure mappers takes place when User is generated
User()
@@ -545,7 +555,7 @@ class MapperTest(_fixtures.FixtureTest, AssertsCompiledSQL):
users, User = self.tables.users, self.classes.User
m = mapper(User, users)
- m.add_property('_name',users.c.name)
+ m.add_property('_name', users.c.name)
m.add_property('name', synonym('_name'))
sess = create_session()
@@ -572,8 +582,8 @@ class MapperTest(_fixtures.FixtureTest, AssertsCompiledSQL):
addresses, Address = self.tables.addresses, self.classes.Address
m = mapper(User, users, properties={
- "addresses": relationship(Address)
- })
+ "addresses": relationship(Address)
+ })
mapper(Address, addresses)
assert_raises_message(
@@ -588,14 +598,15 @@ class MapperTest(_fixtures.FixtureTest, AssertsCompiledSQL):
def test_add_column_prop_deannotate(self):
User, users = self.classes.User, self.tables.users
Address, addresses = self.classes.Address, self.tables.addresses
+
class SubUser(User):
pass
m = mapper(User, users)
m2 = mapper(SubUser, addresses, inherits=User, properties={
- 'address_id': addresses.c.id
- })
+ 'address_id': addresses.c.id
+ })
m3 = mapper(Address, addresses, properties={
- 'foo':relationship(m2)
+ 'foo': relationship(m2)
})
# add property using annotated User.name,
# needs to be deannotated
@@ -612,7 +623,8 @@ class MapperTest(_fixtures.FixtureTest, AssertsCompiledSQL):
"addresses_1.email_address AS "
"addresses_1_email_address, "
"users_1.name || :name_1 AS anon_1 "
- "FROM addresses JOIN (users AS users_1 JOIN addresses AS addresses_1 ON users_1.id = "
+ "FROM addresses JOIN (users AS users_1 JOIN addresses "
+ "AS addresses_1 ON users_1.id = "
"addresses_1.user_id) ON "
"users_1.id = addresses.user_id"
)
@@ -638,20 +650,23 @@ class MapperTest(_fixtures.FixtureTest, AssertsCompiledSQL):
assert User.y.property.columns[0] is not expr2
assert User.y.property.columns[0].element.\
- _raw_columns[0] is users.c.name
+ _raw_columns[0] is users.c.name
assert User.y.property.columns[0].element.\
- _raw_columns[1] is users.c.id
+ _raw_columns[1] is users.c.id
def test_synonym_replaces_backref(self):
addresses, users, User = (self.tables.addresses,
- self.tables.users,
- self.classes.User)
+ self.tables.users,
+ self.classes.User)
assert_calls = []
+
class Address(object):
+
def _get_user(self):
assert_calls.append("get")
return self._user
+
def _set_user(self, user):
assert_calls.append("set")
self._user = user
@@ -659,20 +674,20 @@ class MapperTest(_fixtures.FixtureTest, AssertsCompiledSQL):
# synonym is created against nonexistent prop
mapper(Address, addresses, properties={
- 'user':synonym('_user')
+ 'user': synonym('_user')
})
sa.orm.configure_mappers()
# later, backref sets up the prop
mapper(User, users, properties={
- 'addresses':relationship(Address, backref='_user')
+ 'addresses': relationship(Address, backref='_user')
})
sess = create_session()
u1 = sess.query(User).get(7)
u2 = sess.query(User).get(8)
# comparaison ops need to work
- a1 = sess.query(Address).filter(Address.user==u1).one()
+ a1 = sess.query(Address).filter(Address.user == u1).one()
eq_(a1.id, 1)
a1.user = u2
assert a1.user is u2
@@ -680,16 +695,19 @@ class MapperTest(_fixtures.FixtureTest, AssertsCompiledSQL):
def test_self_ref_synonym(self):
t = Table('nodes', MetaData(),
- Column('id', Integer, primary_key=True, test_needs_autoincrement=True),
- Column('parent_id', Integer, ForeignKey('nodes.id')))
+ Column(
+ 'id', Integer, primary_key=True,
+ test_needs_autoincrement=True),
+ Column('parent_id', Integer, ForeignKey('nodes.id')))
class Node(object):
pass
mapper(Node, t, properties={
- '_children':relationship(Node, backref=backref('_parent', remote_side=t.c.id)),
- 'children':synonym('_children'),
- 'parent':synonym('_parent')
+ '_children': relationship(
+ Node, backref=backref('_parent', remote_side=t.c.id)),
+ 'children': synonym('_children'),
+ 'parent': synonym('_parent')
})
n1 = Node()
@@ -702,13 +720,14 @@ class MapperTest(_fixtures.FixtureTest, AssertsCompiledSQL):
def test_non_primary_identity_class(self):
User = self.classes.User
users, addresses = self.tables.users, self.tables.addresses
+
class AddressUser(User):
pass
m1 = mapper(User, users, polymorphic_identity='user')
m2 = mapper(AddressUser, addresses, inherits=User,
- polymorphic_identity='address', properties={
- 'address_id': addresses.c.id
- })
+ polymorphic_identity='address', properties={
+ 'address_id': addresses.c.id
+ })
m3 = mapper(AddressUser, addresses, non_primary=True)
assert m3._identity_class is m2._identity_class
eq_(
@@ -719,6 +738,7 @@ class MapperTest(_fixtures.FixtureTest, AssertsCompiledSQL):
def test_reassign_polymorphic_identity_warns(self):
User = self.classes.User
users = self.tables.users
+
class MyUser(User):
pass
m1 = mapper(User, users, polymorphic_on=users.c.name,
@@ -730,17 +750,16 @@ class MapperTest(_fixtures.FixtureTest, AssertsCompiledSQL):
MyUser, users, inherits=User, polymorphic_identity='user'
)
-
def test_illegal_non_primary(self):
users, Address, addresses, User = (self.tables.users,
- self.classes.Address,
- self.tables.addresses,
- self.classes.User)
+ self.classes.Address,
+ self.tables.addresses,
+ self.classes.User)
mapper(User, users)
mapper(Address, addresses)
mapper(User, users, non_primary=True, properties={
- 'addresses':relationship(Address)
+ 'addresses': relationship(Address)
})
assert_raises_message(
sa.exc.ArgumentError,
@@ -762,62 +781,90 @@ class MapperTest(_fixtures.FixtureTest, AssertsCompiledSQL):
class Base(object):
pass
+
class Sub(Base):
pass
mapper(Base, users)
assert_raises_message(sa.exc.InvalidRequestError,
- "Configure a primary mapper first",
- mapper, Sub, addresses, non_primary=True
- )
+ "Configure a primary mapper first",
+ mapper, Sub, addresses, non_primary=True
+ )
def test_prop_filters(self):
t = Table('person', MetaData(),
Column('id', Integer, primary_key=True,
- test_needs_autoincrement=True),
+ test_needs_autoincrement=True),
Column('type', String(128)),
Column('name', String(128)),
Column('employee_number', Integer),
Column('boss_id', Integer, ForeignKey('person.id')),
Column('vendor_id', Integer))
- class Person(object): pass
- class Vendor(Person): pass
- class Employee(Person): pass
- class Manager(Employee): pass
- class Hoho(object): pass
- class Lala(object): pass
- class Fub(object):pass
- class Frob(object):pass
+ class Person(object):
+ pass
+
+ class Vendor(Person):
+ pass
+
+ class Employee(Person):
+ pass
+
+ class Manager(Employee):
+ pass
+
+ class Hoho(object):
+ pass
+
+ class Lala(object):
+ pass
+
+ class Fub(object):
+ pass
+
+ class Frob(object):
+ pass
+
class HasDef(object):
+
def name(self):
pass
- class Empty(object):pass
- empty = mapper(Empty, t, properties={'empty_id' : t.c.id},
- include_properties=[])
+ class Empty(object):
+ pass
+
+ mapper(
+ Empty, t, properties={'empty_id': t.c.id},
+ include_properties=[])
p_m = mapper(Person, t, polymorphic_on=t.c.type,
include_properties=('id', 'type', 'name'))
e_m = mapper(Employee, inherits=p_m,
- polymorphic_identity='employee', properties={'boss'
- : relationship(Manager, backref=backref('peon'),
- remote_side=t.c.id)},
+ polymorphic_identity='employee',
+ properties={
+ 'boss': relationship(
+ Manager, backref=backref('peon'),
+ remote_side=t.c.id)},
exclude_properties=('vendor_id', ))
- m_m = mapper(Manager, inherits=e_m, polymorphic_identity='manager',
- include_properties=('id', 'type'))
+ mapper(
+ Manager, inherits=e_m, polymorphic_identity='manager',
+ include_properties=('id', 'type'))
- v_m = mapper(Vendor, inherits=p_m, polymorphic_identity='vendor',
- exclude_properties=('boss_id', 'employee_number'))
- h_m = mapper(Hoho, t, include_properties=('id', 'type', 'name'))
- l_m = mapper(Lala, t, exclude_properties=('vendor_id', 'boss_id'),
- column_prefix="p_")
+ mapper(
+ Vendor, inherits=p_m, polymorphic_identity='vendor',
+ exclude_properties=('boss_id', 'employee_number'))
+ mapper(Hoho, t, include_properties=('id', 'type', 'name'))
+ mapper(
+ Lala, t, exclude_properties=('vendor_id', 'boss_id'),
+ column_prefix="p_")
- hd_m = mapper(HasDef, t, column_prefix="h_")
+ mapper(HasDef, t, column_prefix="h_")
- fb_m = mapper(Fub, t, include_properties=(t.c.id, t.c.type))
- frb_m = mapper(Frob, t, column_prefix='f_',
- exclude_properties=(t.c.boss_id,
- 'employee_number', t.c.vendor_id))
+ mapper(Fub, t, include_properties=(t.c.id, t.c.type))
+ mapper(
+ Frob, t, column_prefix='f_',
+ exclude_properties=(
+ t.c.boss_id,
+ 'employee_number', t.c.vendor_id))
configure_mappers()
@@ -832,13 +879,13 @@ class MapperTest(_fixtures.FixtureTest, AssertsCompiledSQL):
eq_(have, want)
assert_props(HasDef, ['h_boss_id', 'h_employee_number', 'h_id',
- 'name', 'h_name', 'h_vendor_id', 'h_type'])
+ 'name', 'h_name', 'h_vendor_id', 'h_type'])
assert_props(Person, ['id', 'name', 'type'])
assert_instrumented(Person, ['id', 'name', 'type'])
assert_props(Employee, ['boss', 'boss_id', 'employee_number',
'id', 'name', 'type'])
- assert_instrumented(Employee,['boss', 'boss_id', 'employee_number',
- 'id', 'name', 'type'])
+ assert_instrumented(Employee, ['boss', 'boss_id', 'employee_number',
+ 'id', 'name', 'type'])
assert_props(Manager, ['boss', 'boss_id', 'employee_number', 'peon',
'id', 'name', 'type'])
@@ -851,7 +898,6 @@ class MapperTest(_fixtures.FixtureTest, AssertsCompiledSQL):
assert_props(Fub, ['id', 'type'])
assert_props(Frob, ['f_id', 'f_type', 'f_name', ])
-
# putting the discriminator column in exclude_properties,
# very weird. As of 0.7.4 this re-maps it.
class Foo(Person):
@@ -869,10 +915,13 @@ class MapperTest(_fixtures.FixtureTest, AssertsCompiledSQL):
def test_prop_filters_defaults(self):
metadata = self.metadata
t = Table('t', metadata,
- Column('id', Integer(), primary_key=True, test_needs_autoincrement=True),
- Column('x', Integer(), nullable=False, server_default='0')
- )
+ Column(
+ 'id', Integer(), primary_key=True,
+ test_needs_autoincrement=True),
+ Column('x', Integer(), nullable=False, server_default='0')
+ )
t.create()
+
class A(object):
pass
mapper(A, t, include_properties=['id'])
@@ -882,6 +931,7 @@ class MapperTest(_fixtures.FixtureTest, AssertsCompiledSQL):
def test_we_dont_call_bool(self):
class NoBoolAllowed(object):
+
def __bool__(self):
raise Exception("nope")
mapper(NoBoolAllowed, self.tables.users)
@@ -894,6 +944,7 @@ class MapperTest(_fixtures.FixtureTest, AssertsCompiledSQL):
def test_we_dont_call_eq(self):
class NoEqAllowed(object):
+
def __eq__(self, other):
raise Exception("nope")
@@ -901,7 +952,7 @@ class MapperTest(_fixtures.FixtureTest, AssertsCompiledSQL):
Address = self.classes.Address
mapper(NoEqAllowed, users, properties={
- 'addresses':relationship(Address, backref='user')
+ 'addresses': relationship(Address, backref='user')
})
mapper(Address, addresses)
@@ -919,9 +970,8 @@ class MapperTest(_fixtures.FixtureTest, AssertsCompiledSQL):
"""Test implicit merging of two cols raises."""
addresses, users, User = (self.tables.addresses,
- self.tables.users,
- self.classes.User)
-
+ self.tables.users,
+ self.classes.User)
usersaddresses = sa.join(users, addresses,
users.c.id == addresses.c.user_id)
@@ -935,14 +985,13 @@ class MapperTest(_fixtures.FixtureTest, AssertsCompiledSQL):
"""Mapping to a join"""
User, addresses, users = (self.classes.User,
- self.tables.addresses,
- self.tables.users)
-
+ self.tables.addresses,
+ self.tables.users)
usersaddresses = sa.join(users, addresses, users.c.id
== addresses.c.user_id)
mapper(User, usersaddresses, primary_key=[users.c.id],
- properties={'add_id':addresses.c.id}
+ properties={'add_id': addresses.c.id}
)
l = create_session().query(User).order_by(users.c.id).all()
eq_(l, self.static.user_result[:3])
@@ -951,9 +1000,8 @@ class MapperTest(_fixtures.FixtureTest, AssertsCompiledSQL):
"""Mapping to a join"""
User, addresses, users = (self.classes.User,
- self.tables.addresses,
- self.tables.users)
-
+ self.tables.addresses,
+ self.tables.users)
usersaddresses = sa.join(users, addresses, users.c.id
== addresses.c.user_id)
@@ -965,13 +1013,13 @@ class MapperTest(_fixtures.FixtureTest, AssertsCompiledSQL):
def test_mapping_to_join_no_pk(self):
email_bounces, addresses, Address = (self.tables.email_bounces,
- self.tables.addresses,
- self.classes.Address)
+ self.tables.addresses,
+ self.classes.Address)
m = mapper(Address,
- addresses.join(email_bounces),
- properties={'id':[addresses.c.id, email_bounces.c.id]}
- )
+ addresses.join(email_bounces),
+ properties={'id': [addresses.c.id, email_bounces.c.id]}
+ )
configure_mappers()
assert addresses in m._pks_by_table
assert email_bounces not in m._pks_by_table
@@ -988,10 +1036,8 @@ class MapperTest(_fixtures.FixtureTest, AssertsCompiledSQL):
"""Mapping to an outer join with a nullable composite primary key."""
users, addresses, User = (self.tables.users,
- self.tables.addresses,
- self.classes.User)
-
-
+ self.tables.addresses,
+ self.classes.User)
mapper(User, users.outerjoin(addresses),
primary_key=[users.c.id, addresses.c.id],
@@ -1013,13 +1059,11 @@ class MapperTest(_fixtures.FixtureTest, AssertsCompiledSQL):
"""test the allow_partial_pks=False flag."""
users, addresses, User = (self.tables.users,
- self.tables.addresses,
- self.classes.User)
-
-
+ self.tables.addresses,
+ self.classes.User)
mapper(User, users.outerjoin(addresses),
- allow_partial_pks=False,
+ allow_partial_pks=False,
primary_key=[users.c.id, addresses.c.id],
properties=dict(
address_id=addresses.c.id))
@@ -1037,11 +1081,11 @@ class MapperTest(_fixtures.FixtureTest, AssertsCompiledSQL):
def test_scalar_pk_arg(self):
users, Keyword, items, Item, User, keywords = (self.tables.users,
- self.classes.Keyword,
- self.tables.items,
- self.classes.Item,
- self.classes.User,
- self.tables.keywords)
+ self.classes.Keyword,
+ self.tables.items,
+ self.classes.Item,
+ self.classes.User,
+ self.tables.keywords)
m1 = mapper(Item, items, primary_key=[items.c.id])
m2 = mapper(Keyword, keywords, primary_key=keywords.c.id)
@@ -1051,18 +1095,17 @@ class MapperTest(_fixtures.FixtureTest, AssertsCompiledSQL):
assert m2.primary_key[0] is keywords.c.id
assert m3.primary_key[0] is users.c.id
-
def test_custom_join(self):
"""select_from totally replace the FROM parameters."""
- users, items, order_items, orders, Item, User, Order = (self.tables.users,
- self.tables.items,
- self.tables.order_items,
- self.tables.orders,
- self.classes.Item,
- self.classes.User,
- self.classes.Order)
-
+ users, items, order_items, orders, Item, User, Order = (
+ self.tables.users,
+ self.tables.items,
+ self.tables.order_items,
+ self.tables.orders,
+ self.classes.Item,
+ self.classes.User,
+ self.classes.Order)
mapper(Item, items)
@@ -1086,18 +1129,24 @@ class MapperTest(_fixtures.FixtureTest, AssertsCompiledSQL):
mapper(User, users, order_by=users.c.name.desc())
- assert "order by users.name desc" in str(create_session().query(User).statement).lower()
- assert "order by" not in str(create_session().query(User).order_by(None).statement).lower()
- assert "order by users.name asc" in str(create_session().query(User).order_by(User.name.asc()).statement).lower()
+ assert "order by users.name desc" in \
+ str(create_session().query(User).statement).lower()
+ assert "order by" not in \
+ str(create_session().query(User).order_by(None).statement).lower()
+ assert "order by users.name asc" in \
+ str(create_session().query(User).order_by(
+ User.name.asc()).statement).lower()
eq_(
create_session().query(User).all(),
- [User(id=7, name='jack'), User(id=9, name='fred'), User(id=8, name='ed'), User(id=10, name='chuck')]
+ [User(id=7, name='jack'), User(id=9, name='fred'),
+ User(id=8, name='ed'), User(id=10, name='chuck')]
)
eq_(
create_session().query(User).order_by(User.name).all(),
- [User(id=10, name='chuck'), User(id=8, name='ed'), User(id=9, name='fred'), User(id=7, name='jack')]
+ [User(id=10, name='chuck'), User(id=8, name='ed'),
+ User(id=9, name='fred'), User(id=7, name='jack')]
)
# 'Raises a "expression evaluation not supported" error at prepare time
@@ -1106,9 +1155,8 @@ class MapperTest(_fixtures.FixtureTest, AssertsCompiledSQL):
"""Mapping to a SELECT statement that has functions in it."""
addresses, users, User = (self.tables.addresses,
- self.tables.users,
- self.classes.User)
-
+ self.tables.users,
+ self.classes.User)
s = sa.select([users,
(users.c.id * 2).label('concat'),
@@ -1129,29 +1177,29 @@ class MapperTest(_fixtures.FixtureTest, AssertsCompiledSQL):
User, users = self.classes.User, self.tables.users
-
mapper(User, users)
session = create_session()
q = session.query(User)
eq_(q.count(), 4)
- eq_(q.filter(User.id.in_([8,9])).count(), 2)
- eq_(q.filter(users.c.id.in_([8,9])).count(), 2)
+ eq_(q.filter(User.id.in_([8, 9])).count(), 2)
+ eq_(q.filter(users.c.id.in_([8, 9])).count(), 2)
eq_(session.query(User.id).count(), 4)
eq_(session.query(User.id).filter(User.id.in_((8, 9))).count(), 2)
def test_many_to_many_count(self):
- keywords, items, item_keywords, Keyword, Item = (self.tables.keywords,
- self.tables.items,
- self.tables.item_keywords,
- self.classes.Keyword,
- self.classes.Item)
+ keywords, items, item_keywords, Keyword, Item = (
+ self.tables.keywords,
+ self.tables.items,
+ self.tables.item_keywords,
+ self.classes.Keyword,
+ self.classes.Item)
mapper(Keyword, keywords)
mapper(Item, items, properties=dict(
- keywords = relationship(Keyword, item_keywords, lazy='select')))
+ keywords=relationship(Keyword, item_keywords, lazy='select')))
session = create_session()
q = (session.query(Item).
@@ -1164,9 +1212,9 @@ class MapperTest(_fixtures.FixtureTest, AssertsCompiledSQL):
"""Overriding a column raises an error."""
Address, addresses, users, User = (self.classes.Address,
- self.tables.addresses,
- self.tables.users,
- self.classes.User)
+ self.tables.addresses,
+ self.tables.users,
+ self.classes.User)
def go():
mapper(User, users,
@@ -1179,10 +1227,9 @@ class MapperTest(_fixtures.FixtureTest, AssertsCompiledSQL):
"""exclude_properties cancels the error."""
Address, addresses, users, User = (self.classes.Address,
- self.tables.addresses,
- self.tables.users,
- self.classes.User)
-
+ self.tables.addresses,
+ self.tables.users,
+ self.classes.User)
mapper(User, users,
exclude_properties=['name'],
@@ -1195,9 +1242,9 @@ class MapperTest(_fixtures.FixtureTest, AssertsCompiledSQL):
"""The column being named elsewhere also cancels the error,"""
Address, addresses, users, User = (self.classes.Address,
- self.tables.addresses,
- self.tables.users,
- self.classes.User)
+ self.tables.addresses,
+ self.tables.users,
+ self.classes.User)
mapper(User, users,
properties=dict(
@@ -1206,28 +1253,30 @@ class MapperTest(_fixtures.FixtureTest, AssertsCompiledSQL):
def test_synonym(self):
users, addresses, Address = (self.tables.users,
- self.tables.addresses,
- self.classes.Address)
-
+ self.tables.addresses,
+ self.classes.Address)
assert_col = []
+
class extendedproperty(property):
attribute = 123
class User(object):
+
def _get_name(self):
assert_col.append(('get', self.name))
return self.name
+
def _set_name(self, name):
assert_col.append(('set', name))
self.name = name
uname = extendedproperty(_get_name, _set_name)
mapper(User, users, properties=dict(
- addresses = relationship(mapper(Address, addresses), lazy='select'),
- uname = synonym('name'),
- adlist = synonym('addresses'),
- adname = synonym('addresses')
+ addresses=relationship(mapper(Address, addresses), lazy='select'),
+ uname=synonym('name'),
+ adlist=synonym('addresses'),
+ adname=synonym('addresses')
))
# ensure the synonym can get at the proxied comparators without
@@ -1251,7 +1300,7 @@ class MapperTest(_fixtures.FixtureTest, AssertsCompiledSQL):
row = sess.query(User.id, User.uname).first()
assert row.uname == row[1]
- u = sess.query(User).filter(User.uname=='jack').one()
+ u = sess.query(User).filter(User.uname == 'jack').one()
fixture = self.static.user_address_result[0].addresses
eq_(u.adlist, fixture)
@@ -1274,25 +1323,24 @@ class MapperTest(_fixtures.FixtureTest, AssertsCompiledSQL):
eq_(User.uname.attribute, 123)
def test_synonym_of_synonym(self):
- users, User = (self.tables.users,
- self.classes.User)
+ users, User = (self.tables.users,
+ self.classes.User)
mapper(User, users, properties={
- 'x':synonym('id'),
- 'y':synonym('x')
+ 'x': synonym('id'),
+ 'y': synonym('x')
})
s = Session()
- u = s.query(User).filter(User.y==8).one()
+ u = s.query(User).filter(User.y == 8).one()
eq_(u.y, 8)
-
def test_synonym_column_location(self):
users, User = self.tables.users, self.classes.User
def go():
mapper(User, users, properties={
- 'not_name':synonym('_name', map_column=True)})
+ 'not_name': synonym('_name', map_column=True)})
assert_raises_message(
sa.exc.ArgumentError,
@@ -1301,28 +1349,30 @@ class MapperTest(_fixtures.FixtureTest, AssertsCompiledSQL):
go)
def test_column_synonyms(self):
- """Synonyms which automatically instrument properties, set up aliased column, etc."""
+ """Synonyms which automatically instrument properties,
+ set up aliased column, etc."""
addresses, users, Address = (self.tables.addresses,
- self.tables.users,
- self.classes.Address)
-
-
+ self.tables.users,
+ self.classes.Address)
assert_col = []
+
class User(object):
+
def _get_name(self):
assert_col.append(('get', self._name))
return self._name
+
def _set_name(self, name):
assert_col.append(('set', name))
self._name = name
name = property(_get_name, _set_name)
mapper(Address, addresses)
- mapper(User, users, properties = {
- 'addresses':relationship(Address, lazy='select'),
- 'name':synonym('_name', map_column=True)
+ mapper(User, users, properties={
+ 'addresses': relationship(Address, lazy='select'),
+ 'name': synonym('_name', map_column=True)
})
# test compile
@@ -1369,6 +1419,7 @@ class MapperTest(_fixtures.FixtureTest, AssertsCompiledSQL):
return "method1"
from sqlalchemy.orm.properties import ColumnProperty
+
class UCComparator(ColumnProperty.Comparator):
__hash__ = None
@@ -1388,6 +1439,7 @@ class MapperTest(_fixtures.FixtureTest, AssertsCompiledSQL):
def map_(with_explicit_property):
class User(object):
+
@extendedproperty
def uc_name(self):
if self.name is None:
@@ -1398,7 +1450,7 @@ class MapperTest(_fixtures.FixtureTest, AssertsCompiledSQL):
else:
args = (UCComparator,)
mapper(User, users, properties=dict(
- uc_name = sa.orm.comparable_property(*args)))
+ uc_name=sa.orm.comparable_property(*args)))
return User
for User in (map_(True), map_(False)):
@@ -1415,12 +1467,13 @@ class MapperTest(_fixtures.FixtureTest, AssertsCompiledSQL):
assert_raises_message(
AttributeError,
"Neither 'extendedproperty' object nor 'UCComparator' "
- "object associated with User.uc_name has an attribute 'nonexistent'",
+ "object associated with User.uc_name has an attribute "
+ "'nonexistent'",
getattr, User.uc_name, 'nonexistent')
# test compile
assert not isinstance(User.uc_name == 'jack', bool)
- u = q.filter(User.uc_name=='JACK').one()
+ u = q.filter(User.uc_name == 'JACK').one()
assert u.uc_name == "JACK"
assert u not in sess.dirty
@@ -1447,10 +1500,11 @@ class MapperTest(_fixtures.FixtureTest, AssertsCompiledSQL):
class MyComparator(sa.orm.properties.ColumnProperty.Comparator):
__hash__ = None
+
def __eq__(self, other):
# lower case comparison
return func.lower(self.__clause_element__()
- ) == func.lower(other)
+ ) == func.lower(other)
def intersects(self, other):
# non-standard comparator
@@ -1458,7 +1512,7 @@ class MapperTest(_fixtures.FixtureTest, AssertsCompiledSQL):
mapper(User, users, properties={
'name': sa.orm.column_property(users.c.name,
- comparator_factory=MyComparator)
+ comparator_factory=MyComparator)
})
assert_raises_message(
@@ -1470,39 +1524,41 @@ class MapperTest(_fixtures.FixtureTest, AssertsCompiledSQL):
eq_(
str((User.name == 'ed').compile(
- dialect=sa.engine.default.DefaultDialect())),
+ dialect=sa.engine.default.DefaultDialect())),
"lower(users.name) = lower(:lower_1)")
eq_(
str((User.name.intersects('ed')).compile(
- dialect=sa.engine.default.DefaultDialect())),
+ dialect=sa.engine.default.DefaultDialect())),
"users.name &= :name_1")
-
def test_reentrant_compile(self):
users, Address, addresses, User = (self.tables.users,
- self.classes.Address,
- self.tables.addresses,
- self.classes.User)
+ self.classes.Address,
+ self.tables.addresses,
+ self.classes.User)
class MyFakeProperty(sa.orm.properties.ColumnProperty):
+
def post_instrument_class(self, mapper):
super(MyFakeProperty, self).post_instrument_class(mapper)
configure_mappers()
m1 = mapper(User, users, properties={
- 'name':MyFakeProperty(users.c.name)
+ 'name': MyFakeProperty(users.c.name)
})
m2 = mapper(Address, addresses)
configure_mappers()
sa.orm.clear_mappers()
+
class MyFakeProperty(sa.orm.properties.ColumnProperty):
+
def post_instrument_class(self, mapper):
super(MyFakeProperty, self).post_instrument_class(mapper)
configure_mappers()
m1 = mapper(User, users, properties={
- 'name':MyFakeProperty(users.c.name)
+ 'name': MyFakeProperty(users.c.name)
})
m2 = mapper(Address, addresses)
configure_mappers()
@@ -1513,6 +1569,7 @@ class MapperTest(_fixtures.FixtureTest, AssertsCompiledSQL):
recon = []
class User(object):
+
@reconstructor
def reconstruct(self):
recon.append('go')
@@ -1528,19 +1585,23 @@ class MapperTest(_fixtures.FixtureTest, AssertsCompiledSQL):
users = self.tables.users
recon = []
+
class A(object):
+
@reconstructor
def reconstruct(self):
assert isinstance(self, A)
recon.append('A')
class B(A):
+
@reconstructor
def reconstruct(self):
assert isinstance(self, B)
recon.append('B')
class C(A):
+
@reconstructor
def reconstruct(self):
assert isinstance(self, C)
@@ -1566,7 +1627,9 @@ class MapperTest(_fixtures.FixtureTest, AssertsCompiledSQL):
users = self.tables.users
recon = []
+
class Base(object):
+
@reconstructor
def reconstruct(self):
recon.append('go')
@@ -1584,15 +1647,15 @@ class MapperTest(_fixtures.FixtureTest, AssertsCompiledSQL):
def test_unmapped_error(self):
Address, addresses, users, User = (self.classes.Address,
- self.tables.addresses,
- self.tables.users,
- self.classes.User)
+ self.tables.addresses,
+ self.tables.users,
+ self.classes.User)
mapper(Address, addresses)
sa.orm.clear_mappers()
mapper(User, users, properties={
- 'addresses':relationship(Address)
+ 'addresses': relationship(Address)
})
assert_raises_message(
@@ -1621,9 +1684,10 @@ class MapperTest(_fixtures.FixtureTest, AssertsCompiledSQL):
Address = self.classes.Address
mapper(User, users, properties={
- "addresses": relationship(Address,
- primaryjoin=lambda: users.c.id == addresses.wrong.user_id)
- })
+ "addresses": relationship(
+ Address,
+ primaryjoin=lambda: users.c.id == addresses.wrong.user_id)
+ })
mapper(Address, addresses)
assert_raises_message(
AttributeError,
@@ -1638,10 +1702,10 @@ class MapperTest(_fixtures.FixtureTest, AssertsCompiledSQL):
Address = self.classes.Address
mapper(User, users, properties={
- "addresses": relationship(Address,
- primaryjoin=lambda: users.c.id ==
- addresses.__dict__['wrong'].user_id)
- })
+ "addresses": relationship(Address,
+ primaryjoin=lambda: users.c.id ==
+ addresses.__dict__['wrong'].user_id)
+ })
mapper(Address, addresses)
assert_raises_message(
KeyError,
@@ -1654,6 +1718,7 @@ class MapperTest(_fixtures.FixtureTest, AssertsCompiledSQL):
class Base(object):
pass
+
class Sub(Base):
pass
@@ -1671,7 +1736,7 @@ class MapperTest(_fixtures.FixtureTest, AssertsCompiledSQL):
# using it with an ORM operation, raises
assert_raises(sa.orm.exc.UnmappedClassError,
- create_session().add, Sub())
+ create_session().add, Sub())
def test_unmapped_subclass_error_premap(self):
users = self.tables.users
@@ -1697,13 +1762,14 @@ class MapperTest(_fixtures.FixtureTest, AssertsCompiledSQL):
# using it with an ORM operation, raises
assert_raises(sa.orm.exc.UnmappedClassError,
- create_session().add, Sub())
+ create_session().add, Sub())
def test_oldstyle_mixin(self):
users = self.tables.users
class OldStyle:
pass
+
class NewStyle(object):
pass
@@ -1717,22 +1783,26 @@ class MapperTest(_fixtures.FixtureTest, AssertsCompiledSQL):
mapper(B, users)
+
class DocumentTest(fixtures.TestBase):
def test_doc_propagate(self):
metadata = MetaData()
t1 = Table('t1', metadata,
- Column('col1', Integer, primary_key=True, doc="primary key column"),
- Column('col2', String, doc="data col"),
- Column('col3', String, doc="data col 2"),
- Column('col4', String, doc="data col 3"),
- Column('col5', String),
- )
+ Column('col1', Integer, primary_key=True,
+ doc="primary key column"),
+ Column('col2', String, doc="data col"),
+ Column('col3', String, doc="data col 2"),
+ Column('col4', String, doc="data col 3"),
+ Column('col5', String),
+ )
t2 = Table('t2', metadata,
- Column('col1', Integer, primary_key=True, doc="primary key column"),
- Column('col2', String, doc="data col"),
- Column('col3', Integer, ForeignKey('t1.col1'), doc="foreign key to t1.col1")
- )
+ Column('col1', Integer, primary_key=True,
+ doc="primary key column"),
+ Column('col2', String, doc="data col"),
+ Column('col3', Integer, ForeignKey('t1.col1'),
+ doc="foreign key to t1.col1")
+ )
class Foo(object):
pass
@@ -1741,12 +1811,12 @@ class DocumentTest(fixtures.TestBase):
pass
mapper(Foo, t1, properties={
- 'bars':relationship(Bar,
- doc="bar relationship",
- backref=backref('foo',doc='foo relationship')
- ),
- 'foober':column_property(t1.c.col3, doc='alternate data col'),
- 'hoho':synonym("col4", doc="syn of col4")
+ 'bars': relationship(Bar,
+ doc="bar relationship",
+ backref=backref('foo', doc='foo relationship')
+ ),
+ 'foober': column_property(t1.c.col3, doc='alternate data col'),
+ 'hoho': synonym("col4", doc="syn of col4")
})
mapper(Bar, t2)
configure_mappers()
@@ -1759,7 +1829,9 @@ class DocumentTest(fixtures.TestBase):
eq_(Bar.col1.__doc__, "primary key column")
eq_(Bar.foo.__doc__, "foo relationship")
+
class ORMLoggingTest(_fixtures.FixtureTest):
+
def setup(self):
self.buf = logging.handlers.BufferingHandler(100)
for log in [
@@ -1787,18 +1859,19 @@ class ORMLoggingTest(_fixtures.FixtureTest):
for msg in self._current_messages():
assert msg.startswith('(User|%%(%d anon)s) ' % id(tb))
+
class OptionsTest(_fixtures.FixtureTest):
def test_synonym_options(self):
Address, addresses, users, User = (self.classes.Address,
- self.tables.addresses,
- self.tables.users,
- self.classes.User)
+ self.tables.addresses,
+ self.tables.users,
+ self.classes.User)
mapper(User, users, properties=dict(
- addresses = relationship(mapper(Address, addresses), lazy='select',
- order_by=addresses.c.id),
- adlist = synonym('addresses')))
+ addresses=relationship(mapper(Address, addresses), lazy='select',
+ order_by=addresses.c.id),
+ adlist=synonym('addresses')))
def go():
sess = create_session()
@@ -1814,13 +1887,13 @@ class OptionsTest(_fixtures.FixtureTest):
"""A lazy relationship can be upgraded to an eager relationship."""
Address, addresses, users, User = (self.classes.Address,
- self.tables.addresses,
- self.tables.users,
- self.classes.User)
+ self.tables.addresses,
+ self.tables.users,
+ self.classes.User)
mapper(User, users, properties=dict(
- addresses = relationship(mapper(Address, addresses),
- order_by=addresses.c.id)))
+ addresses=relationship(mapper(Address, addresses),
+ order_by=addresses.c.id)))
sess = create_session()
l = (sess.query(User).
@@ -1833,9 +1906,9 @@ class OptionsTest(_fixtures.FixtureTest):
def test_eager_options_with_limit(self):
Address, addresses, users, User = (self.classes.Address,
- self.tables.addresses,
- self.tables.users,
- self.classes.User)
+ self.tables.addresses,
+ self.tables.users,
+ self.classes.User)
mapper(User, users, properties=dict(
addresses=relationship(mapper(Address, addresses), lazy='select')))
@@ -1858,12 +1931,12 @@ class OptionsTest(_fixtures.FixtureTest):
def test_lazy_options_with_limit(self):
Address, addresses, users, User = (self.classes.Address,
- self.tables.addresses,
- self.tables.users,
- self.classes.User)
+ self.tables.addresses,
+ self.tables.users,
+ self.classes.User)
mapper(User, users, properties=dict(
- addresses = relationship(mapper(Address, addresses), lazy='joined')))
+ addresses=relationship(mapper(Address, addresses), lazy='joined')))
sess = create_session()
u = (sess.query(User).
@@ -1880,16 +1953,17 @@ class OptionsTest(_fixtures.FixtureTest):
if eager columns are not available"""
Address, addresses, users, User = (self.classes.Address,
- self.tables.addresses,
- self.tables.users,
- self.classes.User)
+ self.tables.addresses,
+ self.tables.users,
+ self.classes.User)
mapper(User, users, properties=dict(
- addresses = relationship(mapper(Address, addresses),
- lazy='joined', order_by=addresses.c.id)))
+ addresses=relationship(mapper(Address, addresses),
+ lazy='joined', order_by=addresses.c.id)))
sess = create_session()
# first test straight eager load, 1 statement
+
def go():
l = sess.query(User).order_by(User.id).all()
eq_(l, self.static.user_address_result)
@@ -1902,24 +1976,27 @@ class OptionsTest(_fixtures.FixtureTest):
# (previous users in session fell out of scope and were removed from
# session's identity map)
r = users.select().order_by(users.c.id).execute()
+
def go():
l = list(sess.query(User).instances(r))
eq_(l, self.static.user_address_result)
self.sql_count_(4, go)
def test_eager_degrade_deep(self):
- users, Keyword, items, order_items, orders, Item, User, Address, keywords, item_keywords, Order, addresses = (self.tables.users,
- self.classes.Keyword,
- self.tables.items,
- self.tables.order_items,
- self.tables.orders,
- self.classes.Item,
- self.classes.User,
- self.classes.Address,
- self.tables.keywords,
- self.tables.item_keywords,
- self.classes.Order,
- self.tables.addresses)
+ users, Keyword, items, order_items, orders, \
+ Item, User, Address, keywords, item_keywords, Order, addresses = (
+ self.tables.users,
+ self.classes.Keyword,
+ self.tables.items,
+ self.tables.order_items,
+ self.tables.orders,
+ self.classes.Item,
+ self.classes.User,
+ self.classes.Address,
+ self.tables.keywords,
+ self.tables.item_keywords,
+ self.classes.Order,
+ self.tables.addresses)
# test with a deeper set of eager loads. when we first load the three
# users, they will have no addresses or orders. the number of lazy
@@ -1931,18 +2008,18 @@ class OptionsTest(_fixtures.FixtureTest):
mapper(Item, items, properties=dict(
keywords=relationship(Keyword, secondary=item_keywords,
- lazy='joined',
- order_by=item_keywords.c.keyword_id)))
+ lazy='joined',
+ order_by=item_keywords.c.keyword_id)))
mapper(Order, orders, properties=dict(
items=relationship(Item, secondary=order_items, lazy='joined',
- order_by=order_items.c.item_id)))
+ order_by=order_items.c.item_id)))
mapper(User, users, properties=dict(
addresses=relationship(Address, lazy='joined',
- order_by=addresses.c.id),
+ order_by=addresses.c.id),
orders=relationship(Order, lazy='joined',
- order_by=orders.c.id)))
+ order_by=orders.c.id)))
sess = create_session()
@@ -1957,6 +2034,7 @@ class OptionsTest(_fixtures.FixtureTest):
# then select just from users. run it into instances.
# then assert the data, which will launch 6 more lazy loads
r = users.select().execute()
+
def go():
l = list(sess.query(User).instances(r))
eq_(l, self.static.user_all_result)
@@ -1966,12 +2044,12 @@ class OptionsTest(_fixtures.FixtureTest):
"""An eager relationship can be upgraded to a lazy relationship."""
Address, addresses, users, User = (self.classes.Address,
- self.tables.addresses,
- self.tables.users,
- self.classes.User)
+ self.tables.addresses,
+ self.tables.users,
+ self.classes.User)
mapper(User, users, properties=dict(
- addresses = relationship(mapper(Address, addresses), lazy='joined')
+ addresses=relationship(mapper(Address, addresses), lazy='joined')
))
sess = create_session()
@@ -1984,19 +2062,20 @@ class OptionsTest(_fixtures.FixtureTest):
self.sql_count_(4, go)
def test_option_propagate(self):
- users, items, order_items, Order, Item, User, orders = (self.tables.users,
- self.tables.items,
- self.tables.order_items,
- self.classes.Order,
- self.classes.Item,
- self.classes.User,
- self.tables.orders)
+ users, items, order_items, Order, Item, User, orders = (
+ self.tables.users,
+ self.tables.items,
+ self.tables.order_items,
+ self.classes.Order,
+ self.classes.Item,
+ self.classes.User,
+ self.tables.orders)
mapper(User, users, properties=dict(
- orders = relationship(Order)
+ orders=relationship(Order)
))
mapper(Order, orders, properties=dict(
- items = relationship(Item, secondary=order_items)
+ items=relationship(Item, secondary=order_items)
))
mapper(Item, items)
@@ -2005,35 +2084,39 @@ class OptionsTest(_fixtures.FixtureTest):
oalias = aliased(Order)
opt1 = sa.orm.joinedload(User.orders, Order.items)
opt2 = sa.orm.contains_eager(User.orders, Order.items, alias=oalias)
- u1 = sess.query(User).join(oalias, User.orders).options(opt1, opt2).first()
+ u1 = sess.query(User).join(oalias, User.orders).\
+ options(opt1, opt2).first()
ustate = attributes.instance_state(u1)
assert opt1 in ustate.load_options
assert opt2 not in ustate.load_options
class DeepOptionsTest(_fixtures.FixtureTest):
+
@classmethod
def setup_mappers(cls):
- users, Keyword, items, order_items, Order, Item, User, keywords, item_keywords, orders = (cls.tables.users,
- cls.classes.Keyword,
- cls.tables.items,
- cls.tables.order_items,
- cls.classes.Order,
- cls.classes.Item,
- cls.classes.User,
- cls.tables.keywords,
- cls.tables.item_keywords,
- cls.tables.orders)
+ users, Keyword, items, order_items, Order, Item, User, \
+ keywords, item_keywords, orders = (
+ cls.tables.users,
+ cls.classes.Keyword,
+ cls.tables.items,
+ cls.tables.order_items,
+ cls.classes.Order,
+ cls.classes.Item,
+ cls.classes.User,
+ cls.tables.keywords,
+ cls.tables.item_keywords,
+ cls.tables.orders)
mapper(Keyword, keywords)
mapper(Item, items, properties=dict(
keywords=relationship(Keyword, item_keywords,
- order_by=item_keywords.c.item_id)))
+ order_by=item_keywords.c.item_id)))
mapper(Order, orders, properties=dict(
items=relationship(Item, order_items,
- order_by=items.c.id)))
+ order_by=items.c.id)))
mapper(User, users, order_by=users.c.id, properties=dict(
orders=relationship(Order, order_by=orders.c.id)))
@@ -2045,8 +2128,9 @@ class DeepOptionsTest(_fixtures.FixtureTest):
# joinedload nothing.
u = sess.query(User).all()
+
def go():
- x = u[0].orders[1].items[0].keywords[1]
+ u[0].orders[1].items[0].keywords[1]
self.assert_sql_count(testing.db, go, 3)
def test_deep_options_2(self):
@@ -2054,24 +2138,24 @@ class DeepOptionsTest(_fixtures.FixtureTest):
User = self.classes.User
-
sess = create_session()
l = (sess.query(User).
- options(sa.orm.joinedload_all('orders.items.keywords'))).all()
+ options(sa.orm.joinedload_all('orders.items.keywords'))).all()
+
def go():
- x = l[0].orders[1].items[0].keywords[1]
+ l[0].orders[1].items[0].keywords[1]
self.sql_count_(0, go)
sess = create_session()
l = (sess.query(User).
- options(sa.orm.subqueryload_all('orders.items.keywords'))).all()
+ options(sa.orm.subqueryload_all('orders.items.keywords'))).all()
+
def go():
- x = l[0].orders[1].items[0].keywords[1]
+ l[0].orders[1].items[0].keywords[1]
self.sql_count_(0, go)
-
def test_deep_options_3(self):
User = self.classes.User
@@ -2083,14 +2167,15 @@ class DeepOptionsTest(_fixtures.FixtureTest):
options(sa.orm.joinedload('orders.items')).
options(sa.orm.joinedload('orders.items.keywords')))
u = q2.all()
+
def go():
- x = u[0].orders[1].items[0].keywords[1]
+ u[0].orders[1].items[0].keywords[1]
self.sql_count_(0, go)
def test_deep_options_4(self):
Item, User, Order = (self.classes.Item,
- self.classes.User,
- self.classes.Order)
+ self.classes.User,
+ self.classes.Order)
sess = create_session()
@@ -2103,25 +2188,31 @@ class DeepOptionsTest(_fixtures.FixtureTest):
# joinedload "keywords" on items. it will lazy load "orders", then
# lazy load the "items" on the order, but on "items" it will eager
# load the "keywords"
- q3 = sess.query(User).options(sa.orm.joinedload('orders.items.keywords'))
+ q3 = sess.query(User).options(
+ sa.orm.joinedload('orders.items.keywords'))
u = q3.all()
+
def go():
- x = u[0].orders[1].items[0].keywords[1]
+ u[0].orders[1].items[0].keywords[1]
self.sql_count_(2, go)
sess = create_session()
q3 = sess.query(User).options(
- sa.orm.joinedload(User.orders, Order.items, Item.keywords))
+ sa.orm.joinedload(User.orders, Order.items, Item.keywords))
u = q3.all()
+
def go():
- x = u[0].orders[1].items[0].keywords[1]
+ u[0].orders[1].items[0].keywords[1]
self.sql_count_(2, go)
+
class ComparatorFactoryTest(_fixtures.FixtureTest, AssertsCompiledSQL):
+
def test_kwarg_accepted(self):
users, Address = self.tables.users, self.classes.Address
class DummyComposite(object):
+
def __init__(self, x, y):
pass
@@ -2151,41 +2242,56 @@ class ComparatorFactoryTest(_fixtures.FixtureTest, AssertsCompiledSQL):
class MyFactory(ColumnProperty.Comparator):
__hash__ = None
+
def __eq__(self, other):
- return func.foobar(self.__clause_element__()) == func.foobar(other)
- mapper(User, users, properties={'name':column_property(users.c.name, comparator_factory=MyFactory)})
- self.assert_compile(User.name == 'ed', "foobar(users.name) = foobar(:foobar_1)", dialect=default.DefaultDialect())
- self.assert_compile(aliased(User).name == 'ed', "foobar(users_1.name) = foobar(:foobar_1)", dialect=default.DefaultDialect())
+ return func.foobar(self.__clause_element__()) == \
+ func.foobar(other)
+ mapper(
+ User, users,
+ properties={
+ 'name': column_property(
+ users.c.name, comparator_factory=MyFactory)})
+ self.assert_compile(
+ User.name == 'ed',
+ "foobar(users.name) = foobar(:foobar_1)",
+ dialect=default.DefaultDialect()
+ )
+ self.assert_compile(
+ aliased(User).name == 'ed',
+ "foobar(users_1.name) = foobar(:foobar_1)",
+ dialect=default.DefaultDialect())
def test_synonym(self):
users, User = self.tables.users, self.classes.User
from sqlalchemy.orm.properties import ColumnProperty
+
class MyFactory(ColumnProperty.Comparator):
__hash__ = None
+
def __eq__(self, other):
return func.foobar(self.__clause_element__()) ==\
- func.foobar(other)
+ func.foobar(other)
mapper(User, users, properties={
- 'name':synonym('_name', map_column=True,
- comparator_factory=MyFactory)
- })
+ 'name': synonym('_name', map_column=True,
+ comparator_factory=MyFactory)
+ })
self.assert_compile(
- User.name == 'ed',
- "foobar(users.name) = foobar(:foobar_1)",
- dialect=default.DefaultDialect())
+ User.name == 'ed',
+ "foobar(users.name) = foobar(:foobar_1)",
+ dialect=default.DefaultDialect())
self.assert_compile(
- aliased(User).name == 'ed',
- "foobar(users_1.name) = foobar(:foobar_1)",
- dialect=default.DefaultDialect())
+ aliased(User).name == 'ed',
+ "foobar(users_1.name) = foobar(:foobar_1)",
+ dialect=default.DefaultDialect())
def test_relationship(self):
users, Address, addresses, User = (self.tables.users,
- self.classes.Address,
- self.tables.addresses,
- self.classes.User)
+ self.classes.Address,
+ self.tables.addresses,
+ self.classes.User)
from sqlalchemy.orm.properties import RelationshipProperty
@@ -2194,46 +2300,50 @@ class ComparatorFactoryTest(_fixtures.FixtureTest, AssertsCompiledSQL):
# primaryjoin/secondaryjoin
class MyFactory(RelationshipProperty.Comparator):
__hash__ = None
+
def __eq__(self, other):
return func.foobar(self._source_selectable().c.user_id) == \
func.foobar(other.id)
class MyFactory2(RelationshipProperty.Comparator):
__hash__ = None
+
def __eq__(self, other):
return func.foobar(self._source_selectable().c.id) == \
func.foobar(other.user_id)
mapper(User, users)
mapper(Address, addresses, properties={
- 'user': relationship(User, comparator_factory=MyFactory,
+ 'user': relationship(
+ User, comparator_factory=MyFactory,
backref=backref("addresses", comparator_factory=MyFactory2)
)
- }
+ }
)
# these are kind of nonsensical tests.
self.assert_compile(Address.user == User(id=5),
- "foobar(addresses.user_id) = foobar(:foobar_1)",
- dialect=default.DefaultDialect())
+ "foobar(addresses.user_id) = foobar(:foobar_1)",
+ dialect=default.DefaultDialect())
self.assert_compile(User.addresses == Address(id=5, user_id=7),
- "foobar(users.id) = foobar(:foobar_1)",
- dialect=default.DefaultDialect())
+ "foobar(users.id) = foobar(:foobar_1)",
+ dialect=default.DefaultDialect())
self.assert_compile(
- aliased(Address).user == User(id=5),
- "foobar(addresses_1.user_id) = foobar(:foobar_1)",
- dialect=default.DefaultDialect())
+ aliased(Address).user == User(id=5),
+ "foobar(addresses_1.user_id) = foobar(:foobar_1)",
+ dialect=default.DefaultDialect())
self.assert_compile(
- aliased(User).addresses == Address(id=5, user_id=7),
- "foobar(users_1.id) = foobar(:foobar_1)",
- dialect=default.DefaultDialect())
-
+ aliased(User).addresses == Address(id=5, user_id=7),
+ "foobar(users_1.id) = foobar(:foobar_1)",
+ dialect=default.DefaultDialect())
class SecondaryOptionsTest(fixtures.MappedTest):
- """test that the contains_eager() option doesn't bleed into a secondary load."""
+
+ """test that the contains_eager() option doesn't bleed
+ into a secondary load."""
run_inserts = 'once'
@@ -2242,80 +2352,84 @@ class SecondaryOptionsTest(fixtures.MappedTest):
@classmethod
def define_tables(cls, metadata):
Table("base", metadata,
- Column('id', Integer, primary_key=True),
- Column('type', String(50), nullable=False)
- )
+ Column('id', Integer, primary_key=True),
+ Column('type', String(50), nullable=False)
+ )
Table("child1", metadata,
- Column('id', Integer, ForeignKey('base.id'), primary_key=True),
- Column('child2id', Integer, ForeignKey('child2.id'), nullable=False)
- )
+ Column('id', Integer, ForeignKey('base.id'), primary_key=True),
+ Column(
+ 'child2id', Integer, ForeignKey('child2.id'), nullable=False)
+ )
Table("child2", metadata,
- Column('id', Integer, ForeignKey('base.id'), primary_key=True),
- )
+ Column('id', Integer, ForeignKey('base.id'), primary_key=True),
+ )
Table('related', metadata,
- Column('id', Integer, ForeignKey('base.id'), primary_key=True),
- )
+ Column('id', Integer, ForeignKey('base.id'), primary_key=True),
+ )
@classmethod
def setup_mappers(cls):
child1, child2, base, related = (cls.tables.child1,
- cls.tables.child2,
- cls.tables.base,
- cls.tables.related)
+ cls.tables.child2,
+ cls.tables.base,
+ cls.tables.related)
class Base(cls.Comparable):
pass
+
class Child1(Base):
pass
+
class Child2(Base):
pass
+
class Related(cls.Comparable):
pass
mapper(Base, base, polymorphic_on=base.c.type, properties={
- 'related':relationship(Related, uselist=False)
+ 'related': relationship(Related, uselist=False)
})
mapper(Child1, child1, inherits=Base,
- polymorphic_identity='child1',
- properties={
- 'child2':relationship(Child2,
- primaryjoin=child1.c.child2id==base.c.id,
- foreign_keys=child1.c.child2id)
- })
+ polymorphic_identity='child1',
+ properties={
+ 'child2': relationship(Child2,
+ primaryjoin=child1.c.child2id == base.c.id,
+ foreign_keys=child1.c.child2id)
+ })
mapper(Child2, child2, inherits=Base, polymorphic_identity='child2')
mapper(Related, related)
@classmethod
def insert_data(cls):
child1, child2, base, related = (cls.tables.child1,
- cls.tables.child2,
- cls.tables.base,
- cls.tables.related)
+ cls.tables.child2,
+ cls.tables.base,
+ cls.tables.related)
base.insert().execute([
- {'id':1, 'type':'child1'},
- {'id':2, 'type':'child1'},
- {'id':3, 'type':'child1'},
- {'id':4, 'type':'child2'},
- {'id':5, 'type':'child2'},
- {'id':6, 'type':'child2'},
+ {'id': 1, 'type': 'child1'},
+ {'id': 2, 'type': 'child1'},
+ {'id': 3, 'type': 'child1'},
+ {'id': 4, 'type': 'child2'},
+ {'id': 5, 'type': 'child2'},
+ {'id': 6, 'type': 'child2'},
])
child2.insert().execute([
- {'id':4},
- {'id':5},
- {'id':6},
+ {'id': 4},
+ {'id': 5},
+ {'id': 6},
])
child1.insert().execute([
- {'id':1, 'child2id':4},
- {'id':2, 'child2id':5},
- {'id':3, 'child2id':6},
+ {'id': 1, 'child2id': 4},
+ {'id': 2, 'child2id': 5},
+ {'id': 3, 'child2id': 6},
])
related.insert().execute([
- {'id':1},
- {'id':2},
- {'id':3},
- {'id':4},
- {'id':5},
- {'id':6},
+ {'id': 1},
+ {'id': 2},
+ {'id': 3},
+ {'id': 4},
+ {'id': 5},
+ {'id': 6},
])
def test_contains_eager(self):
@@ -2324,9 +2438,9 @@ class SecondaryOptionsTest(fixtures.MappedTest):
sess = create_session()
child1s = sess.query(Child1).\
- join(Child1.related).\
- options(sa.orm.contains_eager(Child1.related)).\
- order_by(Child1.id)
+ join(Child1.related).\
+ options(sa.orm.contains_eager(Child1.related)).\
+ order_by(Child1.id)
def go():
eq_(
@@ -2345,10 +2459,11 @@ class SecondaryOptionsTest(fixtures.MappedTest):
testing.db,
lambda: c1.child2,
CompiledSQL(
- "SELECT child2.id AS child2_id, base.id AS base_id, base.type AS base_type "
+ "SELECT child2.id AS child2_id, base.id AS base_id, "
+ "base.type AS base_type "
"FROM base JOIN child2 ON base.id = child2.id "
"WHERE base.id = :param_1",
- {'param_1':4}
+ {'param_1': 4}
)
)
@@ -2357,12 +2472,15 @@ class SecondaryOptionsTest(fixtures.MappedTest):
sess = create_session()
- child1s = sess.query(Child1).join(Child1.related).options(sa.orm.joinedload(Child1.related)).order_by(Child1.id)
+ child1s = sess.query(Child1).join(Child1.related).options(
+ sa.orm.joinedload(Child1.related)).order_by(Child1.id)
def go():
eq_(
child1s.all(),
- [Child1(id=1, related=Related(id=1)), Child1(id=2, related=Related(id=2)), Child1(id=3, related=Related(id=3))]
+ [Child1(id=1, related=Related(id=1)),
+ Child1(id=2, related=Related(id=2)),
+ Child1(id=3, related=Related(id=3))]
)
self.assert_sql_count(testing.db, go, 1)
@@ -2372,30 +2490,32 @@ class SecondaryOptionsTest(fixtures.MappedTest):
testing.db,
lambda: c1.child2,
CompiledSQL(
- "SELECT child2.id AS child2_id, base.id AS base_id, base.type AS base_type "
- "FROM base JOIN child2 ON base.id = child2.id WHERE base.id = :param_1",
-
-# joinedload- this shouldn't happen
-# "SELECT base.id AS base_id, child2.id AS child2_id, base.type AS base_type, "
-# "related_1.id AS related_1_id FROM base JOIN child2 ON base.id = child2.id "
-# "LEFT OUTER JOIN related AS related_1 ON base.id = related_1.id WHERE base.id = :param_1",
- {'param_1':4}
+ "SELECT child2.id AS child2_id, base.id AS base_id, "
+ "base.type AS base_type "
+ "FROM base JOIN child2 ON base.id = child2.id "
+ "WHERE base.id = :param_1",
+
+ {'param_1': 4}
)
)
def test_joinedload_on_same(self):
Child1, Child2, Related = (self.classes.Child1,
- self.classes.Child2,
- self.classes.Related)
+ self.classes.Child2,
+ self.classes.Related)
sess = create_session()
- child1s = sess.query(Child1).join(Child1.related).options(sa.orm.joinedload(Child1.child2, Child2.related)).order_by(Child1.id)
+ child1s = sess.query(Child1).join(Child1.related).options(
+ sa.orm.joinedload(Child1.child2, Child2.related)
+ ).order_by(Child1.id)
def go():
eq_(
child1s.all(),
- [Child1(id=1, related=Related(id=1)), Child1(id=2, related=Related(id=2)), Child1(id=3, related=Related(id=3))]
+ [Child1(id=1, related=Related(id=1)),
+ Child1(id=2, related=Related(id=2)),
+ Child1(id=3, related=Related(id=3))]
)
self.assert_sql_count(testing.db, go, 4)
@@ -2406,32 +2526,43 @@ class SecondaryOptionsTest(fixtures.MappedTest):
testing.db,
lambda: c1.child2,
CompiledSQL(
- "SELECT child2.id AS child2_id, base.id AS base_id, base.type AS base_type, "
- "related_1.id AS related_1_id FROM base JOIN child2 ON base.id = child2.id "
- "LEFT OUTER JOIN related AS related_1 ON base.id = related_1.id WHERE base.id = :param_1",
- {'param_1':4}
+ "SELECT child2.id AS child2_id, base.id AS base_id, "
+ "base.type AS base_type, "
+ "related_1.id AS related_1_id FROM base JOIN child2 "
+ "ON base.id = child2.id "
+ "LEFT OUTER JOIN related AS related_1 "
+ "ON base.id = related_1.id WHERE base.id = :param_1",
+ {'param_1': 4}
)
)
class DeferredPopulationTest(fixtures.MappedTest):
+
@classmethod
def define_tables(cls, metadata):
Table("thing", metadata,
- Column("id", Integer, primary_key=True, test_needs_autoincrement=True),
- Column("name", String(20)))
+ Column(
+ "id", Integer, primary_key=True,
+ test_needs_autoincrement=True),
+ Column("name", String(20)))
Table("human", metadata,
- Column("id", Integer, primary_key=True, test_needs_autoincrement=True),
- Column("thing_id", Integer, ForeignKey("thing.id")),
- Column("name", String(20)))
+ Column(
+ "id", Integer, primary_key=True,
+ test_needs_autoincrement=True),
+ Column("thing_id", Integer, ForeignKey("thing.id")),
+ Column("name", String(20)))
@classmethod
def setup_mappers(cls):
thing, human = cls.tables.thing, cls.tables.human
- class Human(cls.Basic): pass
- class Thing(cls.Basic): pass
+ class Human(cls.Basic):
+ pass
+
+ class Thing(cls.Basic):
+ pass
mapper(Human, human, properties={"thing": relationship(Thing)})
mapper(Thing, thing, properties={"name": deferred(thing.c.name)})
@@ -2462,7 +2593,7 @@ class DeferredPopulationTest(fixtures.MappedTest):
Thing = self.classes.Thing
session = create_session()
- result = session.query(Thing).first()
+ result = session.query(Thing).first() # noqa
session.expunge_all()
thing = session.query(Thing).options(sa.orm.undefer("name")).first()
self._test(thing)
@@ -2471,7 +2602,7 @@ class DeferredPopulationTest(fixtures.MappedTest):
Thing = self.classes.Thing
session = create_session()
- result = session.query(Thing).first()
+ result = session.query(Thing).first() # noqa
thing = session.query(Thing).options(sa.orm.undefer("name")).first()
self._test(thing)
@@ -2479,7 +2610,8 @@ class DeferredPopulationTest(fixtures.MappedTest):
Thing, Human = self.classes.Thing, self.classes.Human
session = create_session()
- human = session.query(Human).options(sa.orm.joinedload("thing")).first()
+ human = session.query(Human).options( # noqa
+ sa.orm.joinedload("thing")).first()
session.expunge_all()
thing = session.query(Thing).options(sa.orm.undefer("name")).first()
self._test(thing)
@@ -2488,7 +2620,8 @@ class DeferredPopulationTest(fixtures.MappedTest):
Thing, Human = self.classes.Thing, self.classes.Human
session = create_session()
- human = session.query(Human).options(sa.orm.joinedload("thing")).first()
+ human = session.query(Human).options( # noqa
+ sa.orm.joinedload("thing")).first()
thing = session.query(Thing).options(sa.orm.undefer("name")).first()
self._test(thing)
@@ -2496,7 +2629,8 @@ class DeferredPopulationTest(fixtures.MappedTest):
Thing, Human = self.classes.Thing, self.classes.Human
session = create_session()
- result = session.query(Human).add_entity(Thing).join("thing").first()
+ result = session.query(Human).add_entity( # noqa
+ Thing).join("thing").first()
session.expunge_all()
thing = session.query(Thing).options(sa.orm.undefer("name")).first()
self._test(thing)
@@ -2505,88 +2639,119 @@ class DeferredPopulationTest(fixtures.MappedTest):
Thing, Human = self.classes.Thing, self.classes.Human
session = create_session()
- result = session.query(Human).add_entity(Thing).join("thing").first()
+ result = session.query(Human).add_entity( # noqa
+ Thing).join("thing").first()
thing = session.query(Thing).options(sa.orm.undefer("name")).first()
self._test(thing)
-
-
class NoLoadTest(_fixtures.FixtureTest):
run_inserts = 'once'
run_deletes = None
- def test_basic(self):
- """A basic one-to-many lazy load"""
+ def test_o2m_noload(self):
- Address, addresses, users, User = (self.classes.Address,
- self.tables.addresses,
- self.tables.users,
- self.classes.User)
+ Address, addresses, users, User = (
+ self.classes.Address,
+ self.tables.addresses,
+ self.tables.users,
+ self.classes.User)
m = mapper(User, users, properties=dict(
- addresses = relationship(mapper(Address, addresses), lazy='noload')
+ addresses=relationship(mapper(Address, addresses), lazy='noload')
))
q = create_session().query(m)
l = [None]
+
def go():
x = q.filter(User.id == 7).all()
x[0].addresses
l[0] = x
self.assert_sql_count(testing.db, go, 1)
- self.assert_result(l[0], User,
- {'id' : 7, 'addresses' : (Address, [])},
- )
+ self.assert_result(
+ l[0], User,
+ {'id': 7, 'addresses': (Address, [])},
+ )
- def test_options(self):
- Address, addresses, users, User = (self.classes.Address,
- self.tables.addresses,
- self.tables.users,
- self.classes.User)
+ def test_upgrade_o2m_noload_lazyload_option(self):
+ Address, addresses, users, User = (
+ self.classes.Address,
+ self.tables.addresses,
+ self.tables.users,
+ self.classes.User)
m = mapper(User, users, properties=dict(
- addresses = relationship(mapper(Address, addresses), lazy='noload')
+ addresses=relationship(mapper(Address, addresses), lazy='noload')
))
q = create_session().query(m).options(sa.orm.lazyload('addresses'))
l = [None]
+
def go():
x = q.filter(User.id == 7).all()
x[0].addresses
l[0] = x
self.sql_count_(2, go)
- self.assert_result(l[0], User,
- {'id' : 7, 'addresses' : (Address, [{'id' : 1}])},
- )
-
+ self.assert_result(
+ l[0], User,
+ {'id': 7, 'addresses': (Address, [{'id': 1}])},
+ )
+ def test_m2o_noload_option(self):
+ Address, addresses, users, User = (
+ self.classes.Address,
+ self.tables.addresses,
+ self.tables.users,
+ self.classes.User)
+ mapper(Address, addresses, properties={
+ 'user': relationship(User)
+ })
+ mapper(User, users)
+ s = Session()
+ a1 = s.query(Address).filter_by(id=1).options(
+ sa.orm.noload('user')).first()
+ def go():
+ eq_(a1.user, None)
+ self.sql_count_(0, go)
class RequirementsTest(fixtures.MappedTest):
+
"""Tests the contract for user classes."""
@classmethod
def define_tables(cls, metadata):
Table('ht1', metadata,
- Column('id', Integer, primary_key=True, test_needs_autoincrement=True),
+ Column(
+ 'id', Integer, primary_key=True,
+ test_needs_autoincrement=True),
Column('value', String(10)))
Table('ht2', metadata,
- Column('id', Integer, primary_key=True, test_needs_autoincrement=True),
+ Column(
+ 'id', Integer, primary_key=True,
+ test_needs_autoincrement=True),
Column('ht1_id', Integer, ForeignKey('ht1.id')),
Column('value', String(10)))
Table('ht3', metadata,
- Column('id', Integer, primary_key=True, test_needs_autoincrement=True),
+ Column(
+ 'id', Integer, primary_key=True,
+ test_needs_autoincrement=True),
Column('value', String(10)))
Table('ht4', metadata,
- Column('ht1_id', Integer, ForeignKey('ht1.id'), primary_key=True),
- Column('ht3_id', Integer, ForeignKey('ht3.id'), primary_key=True))
+ Column('ht1_id', Integer, ForeignKey('ht1.id'),
+ primary_key=True),
+ Column('ht3_id', Integer, ForeignKey('ht3.id'),
+ primary_key=True))
Table('ht5', metadata,
- Column('ht1_id', Integer, ForeignKey('ht1.id'), primary_key=True))
+ Column('ht1_id', Integer, ForeignKey('ht1.id'),
+ primary_key=True))
Table('ht6', metadata,
- Column('ht1a_id', Integer, ForeignKey('ht1.id'), primary_key=True),
- Column('ht1b_id', Integer, ForeignKey('ht1.id'), primary_key=True),
+ Column('ht1a_id', Integer, ForeignKey('ht1.id'),
+ primary_key=True),
+ Column('ht1b_id', Integer, ForeignKey('ht1.id'),
+ primary_key=True),
Column('value', String(10)))
if util.py2k:
@@ -2604,16 +2769,21 @@ class RequirementsTest(fixtures.MappedTest):
pass
# TODO: is weakref support detectable without an instance?
- #self.assertRaises(sa.exc.ArgumentError, mapper, NoWeakrefSupport, t2)
+ # self.assertRaises(
+ # sa.exc.ArgumentError, mapper, NoWeakrefSupport, t2)
class _ValueBase(object):
+
def __init__(self, value='abc', id=None):
self.id = id
self.value = value
+
def __bool__(self):
return False
+
def __hash__(self):
return hash(self.value)
+
def __eq__(self, other):
if isinstance(other, type(self)):
return self.value == other.value
@@ -2630,19 +2800,21 @@ class RequirementsTest(fixtures.MappedTest):
"""
ht6, ht5, ht4, ht3, ht2, ht1 = (self.tables.ht6,
- self.tables.ht5,
- self.tables.ht4,
- self.tables.ht3,
- self.tables.ht2,
- self.tables.ht1)
-
+ self.tables.ht5,
+ self.tables.ht4,
+ self.tables.ht3,
+ self.tables.ht2,
+ self.tables.ht1)
class H1(self._ValueBase):
pass
+
class H2(self._ValueBase):
pass
+
class H3(self._ValueBase):
pass
+
class H6(self._ValueBase):
pass
@@ -2651,10 +2823,10 @@ class RequirementsTest(fixtures.MappedTest):
'h3s': relationship(H3, secondary=ht4, backref='h1s'),
'h1s': relationship(H1, secondary=ht5, backref='parent_h1'),
't6a': relationship(H6, backref='h1a',
- primaryjoin=ht1.c.id==ht6.c.ht1a_id),
+ primaryjoin=ht1.c.id == ht6.c.ht1a_id),
't6b': relationship(H6, backref='h1b',
- primaryjoin=ht1.c.id==ht6.c.ht1b_id),
- })
+ primaryjoin=ht1.c.id == ht6.c.ht1b_id),
+ })
mapper(H2, ht2)
mapper(H3, ht3)
mapper(H6, ht6)
@@ -2709,18 +2881,19 @@ class RequirementsTest(fixtures.MappedTest):
sa.orm.joinedload_all('h3s.h1s')).all()
eq_(len(h1s), 5)
-
def test_composite_results(self):
ht2, ht1 = (self.tables.ht2,
- self.tables.ht1)
-
+ self.tables.ht1)
class H1(self._ValueBase):
+
def __init__(self, value, id, h2s):
self.value = value
self.id = id
self.h2s = h2s
+
class H2(self._ValueBase):
+
def __init__(self, value, id):
self.value = value
self.id = id
@@ -2745,8 +2918,8 @@ class RequirementsTest(fixtures.MappedTest):
s.commit()
eq_(
[(h1.value, h1.id, h2.value, h2.id)
- for h1, h2 in
- s.query(H1, H2).join(H1.h2s).order_by(H1.id, H2.id)],
+ for h1, h2 in
+ s.query(H1, H2).join(H1.h2s).order_by(H1.id, H2.id)],
[
('abc', 1, 'abc', 1),
('abc', 1, 'def', 2),
@@ -2761,6 +2934,7 @@ class RequirementsTest(fixtures.MappedTest):
ht1 = self.tables.ht1
class H1(object):
+
def __len__(self):
return len(self.get_value())
@@ -2769,6 +2943,7 @@ class RequirementsTest(fixtures.MappedTest):
return self.value
class H2(object):
+
def __bool__(self):
return bool(self.get_value())
@@ -2781,19 +2956,21 @@ class RequirementsTest(fixtures.MappedTest):
h1 = H1()
h1.value = "Asdf"
- h1.value = "asdf asdf" # ding
+ h1.value = "asdf asdf" # ding
h2 = H2()
h2.value = "Asdf"
- h2.value = "asdf asdf" # ding
+ h2.value = "asdf asdf" # ding
+
class IsUserlandTest(fixtures.MappedTest):
+
@classmethod
def define_tables(cls, metadata):
Table('foo', metadata,
- Column('id', Integer, primary_key=True),
- Column('someprop', Integer)
- )
+ Column('id', Integer, primary_key=True),
+ Column('someprop', Integer)
+ )
def _test(self, value, instancelevel=None):
class Foo(object):
@@ -2842,17 +3019,20 @@ class IsUserlandTest(fixtures.MappedTest):
return "hi"
self._test(property(somefunc), "hi")
+
class MagicNamesTest(fixtures.MappedTest):
@classmethod
def define_tables(cls, metadata):
Table('cartographers', metadata,
- Column('id', Integer, primary_key=True, test_needs_autoincrement=True),
+ Column('id', Integer, primary_key=True,
+ test_needs_autoincrement=True),
Column('name', String(50)),
Column('alias', String(50)),
Column('quip', String(100)))
Table('maps', metadata,
- Column('id', Integer, primary_key=True, test_needs_autoincrement=True),
+ Column('id', Integer, primary_key=True,
+ test_needs_autoincrement=True),
Column('cart_id', Integer,
ForeignKey('cartographers.id')),
Column('state', String(2)),
@@ -2868,9 +3048,9 @@ class MagicNamesTest(fixtures.MappedTest):
def test_mappish(self):
maps, Cartographer, cartographers, Map = (self.tables.maps,
- self.classes.Cartographer,
- self.tables.cartographers,
- self.classes.Map)
+ self.classes.Cartographer,
+ self.tables.cartographers,
+ self.classes.Map)
mapper(Cartographer, cartographers, properties=dict(
query=cartographers.c.quip))
@@ -2879,7 +3059,7 @@ class MagicNamesTest(fixtures.MappedTest):
c = Cartographer(name='Lenny', alias='The Dude',
query='Where be dragons?')
- m = Map(state='AK', mapper=c)
+ Map(state='AK', mapper=c)
sess = create_session()
sess.add(c)
@@ -2889,16 +3069,18 @@ class MagicNamesTest(fixtures.MappedTest):
for C, M in ((Cartographer, Map),
(sa.orm.aliased(Cartographer), sa.orm.aliased(Map))):
c1 = (sess.query(C).
- filter(C.alias=='The Dude').
- filter(C.query=='Where be dragons?')).one()
- m1 = sess.query(M).filter(M.mapper==c1).one()
+ filter(C.alias == 'The Dude').
+ filter(C.query == 'Where be dragons?')).one()
+ sess.query(M).filter(M.mapper == c1).one()
def test_direct_stateish(self):
for reserved in (sa.orm.instrumentation.ClassManager.STATE_ATTR,
sa.orm.instrumentation.ClassManager.MANAGER_ATTR):
t = Table('t', sa.MetaData(),
- Column('id', Integer, primary_key=True, test_needs_autoincrement=True),
+ Column('id', Integer, primary_key=True,
+ test_needs_autoincrement=True),
Column(reserved, Integer))
+
class T(object):
pass
assert_raises_message(
@@ -2920,6 +3102,4 @@ class MagicNamesTest(fixtures.MappedTest):
('requested attribute name conflicts with '
'instrumentation attribute of the same name'),
mapper, M, maps, properties={
- reserved: maps.c.state})
-
-
+ reserved: maps.c.state})
diff --git a/test/orm/test_merge.py b/test/orm/test_merge.py
index a52274896..f69b07fe8 100644
--- a/test/orm/test_merge.py
+++ b/test/orm/test_merge.py
@@ -6,7 +6,7 @@ from sqlalchemy import testing
from sqlalchemy.util import OrderedSet
from sqlalchemy.orm import mapper, relationship, create_session, \
PropComparator, synonym, comparable_property, sessionmaker, \
- attributes, Session, backref, configure_mappers
+ attributes, Session, backref, configure_mappers, foreign
from sqlalchemy.orm.collections import attribute_mapped_collection
from sqlalchemy.orm.interfaces import MapperOption
from sqlalchemy.testing import eq_, ne_
@@ -451,6 +451,55 @@ class MergeTest(_fixtures.FixtureTest):
eq_(u2.addresses[1].email_address, 'afafds')
eq_(load.called, 21)
+ def test_dont_send_neverset_to_get(self):
+ # test issue #3647
+ CompositePk, composite_pk_table = (
+ self.classes.CompositePk, self.tables.composite_pk_table
+ )
+ mapper(CompositePk, composite_pk_table)
+ cp1 = CompositePk(j=1, k=1)
+
+ sess = Session()
+
+ rec = []
+
+ def go():
+ rec.append(sess.merge(cp1))
+ self.assert_sql_count(testing.db, go, 0)
+ rec[0].i = 5
+ sess.commit()
+ eq_(rec[0].i, 5)
+
+ def test_dont_send_neverset_to_get_w_relationship(self):
+ # test issue #3647
+ CompositePk, composite_pk_table = (
+ self.classes.CompositePk, self.tables.composite_pk_table
+ )
+ User, users = (
+ self.classes.User, self.tables.users
+ )
+ mapper(User, users, properties={
+ 'elements': relationship(
+ CompositePk,
+ primaryjoin=users.c.id == foreign(composite_pk_table.c.i))
+ })
+ mapper(CompositePk, composite_pk_table)
+
+ u1 = User(id=5, name='some user')
+ cp1 = CompositePk(j=1, k=1)
+ u1.elements.append(cp1)
+ sess = Session()
+
+ rec = []
+
+ def go():
+ rec.append(sess.merge(u1))
+ self.assert_sql_count(testing.db, go, 1)
+ u2 = rec[0]
+ sess.commit()
+ eq_(u2.elements[0].i, 5)
+ eq_(u2.id, 5)
+
def test_no_relationship_cascade(self):
"""test that merge doesn't interfere with a relationship()
target that specifically doesn't include 'merge' cascade.
@@ -1102,6 +1151,101 @@ class MergeTest(_fixtures.FixtureTest):
eq_(ustate.load_path.path, (umapper, ))
eq_(ustate.load_options, set([opt2]))
+ def test_resolve_conflicts_pending_doesnt_interfere_no_ident(self):
+ User, Address, Order = (
+ self.classes.User, self.classes.Address, self.classes.Order)
+ users, addresses, orders = (
+ self.tables.users, self.tables.addresses, self.tables.orders)
+
+ mapper(User, users, properties={
+ 'orders': relationship(Order)
+ })
+ mapper(Order, orders, properties={
+ 'address': relationship(Address)
+ })
+ mapper(Address, addresses)
+
+ u1 = User(id=7, name='x')
+ u1.orders = [
+ Order(description='o1', address=Address(email_address='a')),
+ Order(description='o2', address=Address(email_address='b')),
+ Order(description='o3', address=Address(email_address='c'))
+ ]
+
+ sess = Session()
+ sess.merge(u1)
+ sess.flush()
+
+ eq_(
+ sess.query(Address.email_address).order_by(
+ Address.email_address).all(),
+ [('a', ), ('b', ), ('c', )]
+ )
+
+ def test_resolve_conflicts_pending(self):
+ User, Address, Order = (
+ self.classes.User, self.classes.Address, self.classes.Order)
+ users, addresses, orders = (
+ self.tables.users, self.tables.addresses, self.tables.orders)
+
+ mapper(User, users, properties={
+ 'orders': relationship(Order)
+ })
+ mapper(Order, orders, properties={
+ 'address': relationship(Address)
+ })
+ mapper(Address, addresses)
+
+ u1 = User(id=7, name='x')
+ u1.orders = [
+ Order(description='o1', address=Address(id=1, email_address='a')),
+ Order(description='o2', address=Address(id=1, email_address='b')),
+ Order(description='o3', address=Address(id=1, email_address='c'))
+ ]
+
+ sess = Session()
+ sess.merge(u1)
+ sess.flush()
+
+ eq_(
+ sess.query(Address).one(),
+ Address(id=1, email_address='c')
+ )
+
+ def test_resolve_conflicts_persistent(self):
+ User, Address, Order = (
+ self.classes.User, self.classes.Address, self.classes.Order)
+ users, addresses, orders = (
+ self.tables.users, self.tables.addresses, self.tables.orders)
+
+ mapper(User, users, properties={
+ 'orders': relationship(Order)
+ })
+ mapper(Order, orders, properties={
+ 'address': relationship(Address)
+ })
+ mapper(Address, addresses)
+
+ sess = Session()
+ sess.add(Address(id=1, email_address='z'))
+ sess.commit()
+
+ u1 = User(id=7, name='x')
+ u1.orders = [
+ Order(description='o1', address=Address(id=1, email_address='a')),
+ Order(description='o2', address=Address(id=1, email_address='b')),
+ Order(description='o3', address=Address(id=1, email_address='c'))
+ ]
+
+ sess = Session()
+ sess.merge(u1)
+ sess.flush()
+
+ eq_(
+ sess.query(Address).one(),
+ Address(id=1, email_address='c')
+ )
+
class M2ONoUseGetLoadingTest(fixtures.MappedTest):
"""Merge a one-to-many. The many-to-one on the other side is set up
diff --git a/test/orm/test_options.py b/test/orm/test_options.py
index 1c1a797a6..e7b750cf4 100644
--- a/test/orm/test_options.py
+++ b/test/orm/test_options.py
@@ -2,12 +2,15 @@ from sqlalchemy import inspect
from sqlalchemy.orm import attributes, mapper, relationship, backref, \
configure_mappers, create_session, synonym, Session, class_mapper, \
aliased, column_property, joinedload_all, joinedload, Query,\
- util as orm_util, Load
+ util as orm_util, Load, defer
+from sqlalchemy.orm.query import QueryContext
+from sqlalchemy.orm import strategy_options
import sqlalchemy as sa
from sqlalchemy import testing
-from sqlalchemy.testing.assertions import eq_, assert_raises, assert_raises_message
+from sqlalchemy.testing.assertions import eq_, assert_raises_message
from test.orm import _fixtures
+
class QueryTest(_fixtures.FixtureTest):
run_setup_mappers = 'once'
run_inserts = 'once'
@@ -17,6 +20,7 @@ class QueryTest(_fixtures.FixtureTest):
def setup_mappers(cls):
cls._setup_stock_mapping()
+
class PathTest(object):
def _make_path(self, path):
r = []
@@ -46,8 +50,18 @@ class PathTest(object):
set([self._make_path(p) for p in paths])
)
+
class LoadTest(PathTest, QueryTest):
+ def test_str(self):
+ User = self.classes.User
+ l = Load(User)
+ l.strategy = (('deferred', False), ('instrument', True))
+ eq_(
+ str(l),
+ "Load(strategy=(('deferred', False), ('instrument', True)))"
+ )
+
def test_gen_path_attr_entity(self):
User = self.classes.User
Address = self.classes.Address
@@ -150,11 +164,11 @@ class LoadTest(PathTest, QueryTest):
)
+
+
class OptionsTest(PathTest, QueryTest):
def _option_fixture(self, *arg):
- from sqlalchemy.orm import strategy_options
-
return strategy_options._UnboundLoad._from_keys(
strategy_options._UnboundLoad.joinedload, arg, True, {})
@@ -758,3 +772,121 @@ class OptionsNoPropTest(_fixtures.FixtureTest):
create_session().query(column).options,
joinedload(eager_option))
+
+class LocalOptsTest(PathTest, QueryTest):
+ @classmethod
+ def setup_class(cls):
+ super(LocalOptsTest, cls).setup_class()
+
+ @strategy_options.loader_option()
+ def some_col_opt_only(loadopt, key, opts):
+ return loadopt.set_column_strategy(
+ (key, ),
+ None,
+ opts,
+ opts_only=True
+ )
+
+ @strategy_options.loader_option()
+ def some_col_opt_strategy(loadopt, key, opts):
+ return loadopt.set_column_strategy(
+ (key, ),
+ {"deferred": True, "instrument": True},
+ opts
+ )
+
+ cls.some_col_opt_only = some_col_opt_only
+ cls.some_col_opt_strategy = some_col_opt_strategy
+
+ def _assert_attrs(self, opts, expected):
+ User = self.classes.User
+
+ query = create_session().query(User)
+ attr = {}
+
+ for opt in opts:
+ if isinstance(opt, strategy_options._UnboundLoad):
+ for tb in opt._to_bind:
+ tb._bind_loader(query, attr, False)
+ else:
+ attr.update(opt.context)
+
+ key = (
+ 'loader',
+ tuple(inspect(User)._path_registry[User.name.property]))
+ eq_(
+ attr[key].local_opts,
+ expected
+ )
+
+ def test_single_opt_only(self):
+ opt = strategy_options._UnboundLoad().some_col_opt_only(
+ "name", {"foo": "bar"}
+ )
+ self._assert_attrs([opt], {"foo": "bar"})
+
+ def test_unbound_multiple_opt_only(self):
+ opts = [
+ strategy_options._UnboundLoad().some_col_opt_only(
+ "name", {"foo": "bar"}
+ ),
+ strategy_options._UnboundLoad().some_col_opt_only(
+ "name", {"bat": "hoho"}
+ )
+ ]
+ self._assert_attrs(opts, {"foo": "bar", "bat": "hoho"})
+
+ def test_bound_multiple_opt_only(self):
+ User = self.classes.User
+ opts = [
+ Load(User).some_col_opt_only(
+ "name", {"foo": "bar"}
+ ).some_col_opt_only(
+ "name", {"bat": "hoho"}
+ )
+ ]
+ self._assert_attrs(opts, {"foo": "bar", "bat": "hoho"})
+
+ def test_bound_strat_opt_recvs_from_optonly(self):
+ User = self.classes.User
+ opts = [
+ Load(User).some_col_opt_only(
+ "name", {"foo": "bar"}
+ ).some_col_opt_strategy(
+ "name", {"bat": "hoho"}
+ )
+ ]
+ self._assert_attrs(opts, {"foo": "bar", "bat": "hoho"})
+
+ def test_unbound_strat_opt_recvs_from_optonly(self):
+ opts = [
+ strategy_options._UnboundLoad().some_col_opt_only(
+ "name", {"foo": "bar"}
+ ),
+ strategy_options._UnboundLoad().some_col_opt_strategy(
+ "name", {"bat": "hoho"}
+ )
+ ]
+ self._assert_attrs(opts, {"foo": "bar", "bat": "hoho"})
+
+ def test_unbound_opt_only_adds_to_strat(self):
+ opts = [
+ strategy_options._UnboundLoad().some_col_opt_strategy(
+ "name", {"bat": "hoho"}
+ ),
+ strategy_options._UnboundLoad().some_col_opt_only(
+ "name", {"foo": "bar"}
+ ),
+ ]
+ self._assert_attrs(opts, {"foo": "bar", "bat": "hoho"})
+
+ def test_bound_opt_only_adds_to_strat(self):
+ User = self.classes.User
+ opts = [
+ Load(User).some_col_opt_strategy(
+ "name", {"bat": "hoho"}
+ ).some_col_opt_only(
+ "name", {"foo": "bar"}
+ ),
+ ]
+ self._assert_attrs(opts, {"foo": "bar", "bat": "hoho"})
diff --git a/test/orm/test_query.py b/test/orm/test_query.py
index 84ebf393e..cdc4ac2c2 100644
--- a/test/orm/test_query.py
+++ b/test/orm/test_query.py
@@ -1,7 +1,7 @@
from sqlalchemy import (
testing, null, exists, text, union, literal, literal_column, func, between,
Unicode, desc, and_, bindparam, select, distinct, or_, collate, insert,
- Integer, String, Boolean, exc as sa_exc, util, cast)
+ Integer, String, Boolean, exc as sa_exc, util, cast, MetaData)
from sqlalchemy.sql import operators, expression
from sqlalchemy import column, table
from sqlalchemy.engine import default
@@ -13,7 +13,8 @@ from sqlalchemy.testing.assertsql import CompiledSQL
from sqlalchemy.testing.schema import Table, Column
import sqlalchemy as sa
from sqlalchemy.testing.assertions import (
- eq_, assert_raises, assert_raises_message, expect_warnings)
+ eq_, assert_raises, assert_raises_message, expect_warnings,
+ eq_ignore_whitespace)
from sqlalchemy.testing import fixtures, AssertsCompiledSQL, assert_warnings
from test.orm import _fixtures
from sqlalchemy.orm.util import join, with_parent
@@ -69,27 +70,29 @@ class RowTupleTest(QueryTest):
mapper(Address, addresses)
sess = create_session()
user_alias = aliased(User)
+ user_alias_id_label = user_alias.id.label('foo')
address_alias = aliased(Address, name='aalias')
fn = func.count(User.id)
name_label = User.name.label('uname')
bundle = Bundle('b1', User.id, User.name)
+ cte = sess.query(User.id).cte()
for q, asserted in [
(
sess.query(User),
[
{
'name': 'User', 'type': User, 'aliased': False,
- 'expr': User}]
+ 'expr': User, 'entity': User}]
),
(
sess.query(User.id, User),
[
{
'name': 'id', 'type': users.c.id.type,
- 'aliased': False, 'expr': User.id},
+ 'aliased': False, 'expr': User.id, 'entity': User},
{
'name': 'User', 'type': User, 'aliased': False,
- 'expr': User}
+ 'expr': User, 'entity': User}
]
),
(
@@ -97,10 +100,28 @@ class RowTupleTest(QueryTest):
[
{
'name': 'id', 'type': users.c.id.type,
- 'aliased': False, 'expr': User.id},
+ 'aliased': False, 'expr': User.id, 'entity': User},
{
'name': None, 'type': User, 'aliased': True,
- 'expr': user_alias}
+ 'expr': user_alias, 'entity': user_alias}
+ ]
+ ),
+ (
+ sess.query(user_alias.id),
+ [
+ {
+ 'name': 'id', 'type': users.c.id.type,
+ 'aliased': True, 'expr': user_alias.id,
+ 'entity': user_alias},
+ ]
+ ),
+ (
+ sess.query(user_alias_id_label),
+ [
+ {
+ 'name': 'foo', 'type': users.c.id.type,
+ 'aliased': True, 'expr': user_alias_id_label,
+ 'entity': user_alias},
]
),
(
@@ -108,7 +129,7 @@ class RowTupleTest(QueryTest):
[
{
'name': 'aalias', 'type': Address, 'aliased': True,
- 'expr': address_alias}
+ 'expr': address_alias, 'entity': address_alias}
]
),
(
@@ -116,19 +137,48 @@ class RowTupleTest(QueryTest):
[
{
'name': 'uname', 'type': users.c.name.type,
- 'aliased': False, 'expr': name_label},
+ 'aliased': False, 'expr': name_label, 'entity': User},
{
'name': None, 'type': fn.type, 'aliased': False,
- 'expr': fn},
+ 'expr': fn, 'entity': User},
]
),
(
- sess.query(bundle),
+ sess.query(cte),
+ [
+ {
+ 'aliased': False,
+ 'expr': cte.c.id, 'type': cte.c.id.type,
+ 'name': 'id', 'entity': None
+ }]
+ ),
+ (
+ sess.query(users),
[
{'aliased': False,
- 'expr': bundle,
- 'type': Bundle,
- 'name': 'b1'}
+ 'expr': users.c.id, 'type': users.c.id.type,
+ 'name': 'id', 'entity': None},
+ {'aliased': False,
+ 'expr': users.c.name, 'type': users.c.name.type,
+ 'name': 'name', 'entity': None}
+ ]
+ ),
+ (
+ sess.query(users.c.name),
+ [{
+ "name": "name", "type": users.c.name.type,
+ "aliased": False, "expr": users.c.name, "entity": None
+ }]
+ ),
+ (
+ sess.query(bundle),
+ [
+ {
+ 'aliased': False,
+ 'expr': bundle,
+ 'type': Bundle,
+ 'name': 'b1', 'entity': User
+ }
]
)
]:
@@ -161,6 +211,69 @@ class RowTupleTest(QueryTest):
)
+class BindSensitiveStringifyTest(fixtures.TestBase):
+ def _fixture(self, bind_to=None):
+ # building a totally separate metadata /mapping here
+ # because we need to control if the MetaData is bound or not
+
+ class User(object):
+ pass
+
+ m = MetaData(bind=bind_to)
+ user_table = Table(
+ 'users', m,
+ Column('id', Integer, primary_key=True),
+ Column('name', String(50)))
+
+ mapper(User, user_table)
+ return User
+
+ def _dialect_fixture(self):
+ class MyDialect(default.DefaultDialect):
+ default_paramstyle = 'qmark'
+
+ from sqlalchemy.engine import base
+ return base.Engine(mock.Mock(), MyDialect(), mock.Mock())
+
+ def _test(
+ self, bound_metadata, bound_session,
+ session_present, expect_bound):
+ if bound_metadata or bound_session:
+ eng = self._dialect_fixture()
+ else:
+ eng = None
+
+ User = self._fixture(bind_to=eng if bound_metadata else None)
+
+ s = Session(eng if bound_session else None)
+ q = s.query(User).filter(User.id == 7)
+ if not session_present:
+ q = q.with_session(None)
+
+ eq_ignore_whitespace(
+ str(q),
+ "SELECT users.id AS users_id, users.name AS users_name "
+ "FROM users WHERE users.id = ?" if expect_bound else
+ "SELECT users.id AS users_id, users.name AS users_name "
+ "FROM users WHERE users.id = :id_1"
+ )
+
+ def test_query_unbound_metadata_bound_session(self):
+ self._test(False, True, True, True)
+
+ def test_query_bound_metadata_unbound_session(self):
+ self._test(True, False, True, True)
+
+ def test_query_unbound_metadata_no_session(self):
+ self._test(False, False, False, False)
+
+ def test_query_unbound_metadata_unbound_session(self):
+ self._test(False, False, True, False)
+
+ def test_query_bound_metadata_bound_session(self):
+ self._test(True, True, True, True)
+
+
class RawSelectTest(QueryTest, AssertsCompiledSQL):
__dialect__ = 'default'
@@ -371,7 +484,7 @@ class RawSelectTest(QueryTest, AssertsCompiledSQL):
self.assert_compile(
select([Foo]).where(Foo.foob == 'somename').order_by(Foo.foob),
"SELECT users.id, users.name FROM users "
- "WHERE coalesce(users.name) = :coalesce_1 "
+ "WHERE coalesce(users.name) = :param_1 "
"ORDER BY coalesce(users.name)"
)
@@ -530,8 +643,7 @@ class GetTest(QueryTest):
table = Table(
'unicode_data', metadata,
Column(
- 'id', Unicode(40), primary_key=True,
- test_needs_autoincrement=True),
+ 'id', Unicode(40), primary_key=True),
Column('data', Unicode(40)))
metadata.create_all()
ustring = util.b('petit voix m\xe2\x80\x99a').decode('utf-8')
@@ -693,39 +805,6 @@ class InvalidGenerationsTest(QueryTest, AssertsCompiledSQL):
text("select * from table"))
assert_raises(sa_exc.InvalidRequestError, q.with_polymorphic, User)
- def test_cancel_order_by(self):
- User = self.classes.User
-
- s = create_session()
-
- q = s.query(User).order_by(User.id)
- self.assert_compile(
- q,
- "SELECT users.id AS users_id, users.name AS users_name "
- "FROM users ORDER BY users.id",
- use_default_dialect=True)
-
- assert_raises(
- sa_exc.InvalidRequestError, q._no_select_modifiers, "foo")
-
- q = q.order_by(None)
- self.assert_compile(
- q,
- "SELECT users.id AS users_id, users.name AS users_name FROM users",
- use_default_dialect=True)
-
- assert_raises(
- sa_exc.InvalidRequestError, q._no_select_modifiers, "foo")
-
- q = q.order_by(False)
- self.assert_compile(
- q,
- "SELECT users.id AS users_id, users.name AS users_name FROM users",
- use_default_dialect=True)
-
- # after False was set, this should pass
- q._no_select_modifiers("foo")
-
def test_mapper_zero(self):
User, Address = self.classes.User, self.classes.Address
@@ -760,13 +839,49 @@ class InvalidGenerationsTest(QueryTest, AssertsCompiledSQL):
meth, q, *arg, **kw
)
+ def test_illegal_coercions(self):
+ User = self.classes.User
+
+ assert_raises_message(
+ sa_exc.ArgumentError,
+ "Object .*User.* is not legal as a SQL literal value",
+ distinct, User
+ )
+
+ ua = aliased(User)
+ assert_raises_message(
+ sa_exc.ArgumentError,
+ "Object .*User.* is not legal as a SQL literal value",
+ distinct, ua
+ )
+
+ s = Session()
+ assert_raises_message(
+ sa_exc.ArgumentError,
+ "Object .*User.* is not legal as a SQL literal value",
+ lambda: s.query(User).filter(User.name == User)
+ )
+
+ u1 = User()
+ assert_raises_message(
+ sa_exc.ArgumentError,
+ "Object .*User.* is not legal as a SQL literal value",
+ distinct, u1
+ )
+
+ assert_raises_message(
+ sa_exc.ArgumentError,
+ "Object .*User.* is not legal as a SQL literal value",
+ lambda: s.query(User).filter(User.name == u1)
+ )
+
class OperatorTest(QueryTest, AssertsCompiledSQL):
"""test sql.Comparator implementation for MapperProperties"""
__dialect__ = 'default'
- def _test(self, clause, expected, entity=None):
+ def _test(self, clause, expected, entity=None, checkparams=None):
dialect = default.DefaultDialect()
if entity is not None:
# specify a lead entity, so that when we are testing
@@ -778,9 +893,11 @@ class OperatorTest(QueryTest, AssertsCompiledSQL):
lead = context.statement.compile(dialect=dialect)
expected = (str(lead) + " WHERE " + expected).replace("\n", "")
clause = sess.query(entity).filter(clause)
- self.assert_compile(clause, expected)
+ self.assert_compile(clause, expected, checkparams=checkparams)
- def _test_filter_aliases(self, clause, expected, from_, onclause):
+ def _test_filter_aliases(
+ self,
+ clause, expected, from_, onclause, checkparams=None):
dialect = default.DefaultDialect()
sess = Session()
lead = sess.query(from_).join(onclause, aliased=True)
@@ -790,7 +907,7 @@ class OperatorTest(QueryTest, AssertsCompiledSQL):
lead = context.statement.compile(dialect=dialect)
expected = (str(lead) + " WHERE " + expected).replace("\n", "")
- self.assert_compile(full, expected)
+ self.assert_compile(full, expected, checkparams=checkparams)
def test_arithmetic(self):
User = self.classes.User
@@ -957,65 +1074,126 @@ class OperatorTest(QueryTest, AssertsCompiledSQL):
def test_m2o_compare_instance(self):
User, Address = self.classes.User, self.classes.Address
- u7 = User(id=7)
+ u7 = User(id=5)
attributes.instance_state(u7)._commit_all(attributes.instance_dict(u7))
+ u7.id = 7
self._test(Address.user == u7, ":param_1 = addresses.user_id")
def test_m2o_compare_instance_negated(self):
User, Address = self.classes.User, self.classes.Address
- u7 = User(id=7)
+ u7 = User(id=5)
attributes.instance_state(u7)._commit_all(attributes.instance_dict(u7))
+ u7.id = 7
self._test(
Address.user != u7,
- "addresses.user_id != :user_id_1 OR addresses.user_id IS NULL")
+ "addresses.user_id != :user_id_1 OR addresses.user_id IS NULL",
+ checkparams={'user_id_1': 7})
def test_m2o_compare_instance_orm_adapt(self):
User, Address = self.classes.User, self.classes.Address
- u7 = User(id=7)
+ u7 = User(id=5)
attributes.instance_state(u7)._commit_all(attributes.instance_dict(u7))
+ u7.id = 7
self._test_filter_aliases(
Address.user == u7,
- ":param_1 = addresses_1.user_id", User, User.addresses
+ ":param_1 = addresses_1.user_id", User, User.addresses,
+ checkparams={'param_1': 7}
)
+ def test_m2o_compare_instance_negated_warn_on_none(self):
+ User, Address = self.classes.User, self.classes.Address
+
+ u7_transient = User(id=None)
+
+ with expect_warnings("Got None for value of column users.id; "):
+ self._test_filter_aliases(
+ Address.user != u7_transient,
+ "addresses_1.user_id != :user_id_1 "
+ "OR addresses_1.user_id IS NULL",
+ User, User.addresses,
+ checkparams={'user_id_1': None}
+ )
+
def test_m2o_compare_instance_negated_orm_adapt(self):
User, Address = self.classes.User, self.classes.Address
- u7 = User(id=7)
+ u7 = User(id=5)
attributes.instance_state(u7)._commit_all(attributes.instance_dict(u7))
+ u7.id = 7
+
+ u7_transient = User(id=7)
self._test_filter_aliases(
Address.user != u7,
"addresses_1.user_id != :user_id_1 OR addresses_1.user_id IS NULL",
- User, User.addresses
+ User, User.addresses,
+ checkparams={'user_id_1': 7}
)
self._test_filter_aliases(
~(Address.user == u7), ":param_1 != addresses_1.user_id",
- User, User.addresses
+ User, User.addresses,
+ checkparams={'param_1': 7}
)
self._test_filter_aliases(
~(Address.user != u7),
"NOT (addresses_1.user_id != :user_id_1 "
- "OR addresses_1.user_id IS NULL)", User, User.addresses
+ "OR addresses_1.user_id IS NULL)", User, User.addresses,
+ checkparams={'user_id_1': 7}
+ )
+
+ self._test_filter_aliases(
+ Address.user != u7_transient,
+ "addresses_1.user_id != :user_id_1 OR addresses_1.user_id IS NULL",
+ User, User.addresses,
+ checkparams={'user_id_1': 7}
+ )
+
+ self._test_filter_aliases(
+ ~(Address.user == u7_transient), ":param_1 != addresses_1.user_id",
+ User, User.addresses,
+ checkparams={'param_1': 7}
+ )
+
+ self._test_filter_aliases(
+ ~(Address.user != u7_transient),
+ "NOT (addresses_1.user_id != :user_id_1 "
+ "OR addresses_1.user_id IS NULL)", User, User.addresses,
+ checkparams={'user_id_1': 7}
)
def test_m2o_compare_instance_aliased(self):
User, Address = self.classes.User, self.classes.Address
- u7 = User(id=7)
+ u7 = User(id=5)
attributes.instance_state(u7)._commit_all(attributes.instance_dict(u7))
+ u7.id = 7
+
+ u7_transient = User(id=7)
a1 = aliased(Address)
self._test(
a1.user == u7,
- ":param_1 = addresses_1.user_id")
+ ":param_1 = addresses_1.user_id",
+ checkparams={'param_1': 7})
self._test(
a1.user != u7,
- "addresses_1.user_id != :user_id_1 OR addresses_1.user_id IS NULL")
+ "addresses_1.user_id != :user_id_1 OR addresses_1.user_id IS NULL",
+ checkparams={'user_id_1': 7})
+
+ a1 = aliased(Address)
+ self._test(
+ a1.user == u7_transient,
+ ":param_1 = addresses_1.user_id",
+ checkparams={'param_1': 7})
+
+ self._test(
+ a1.user != u7_transient,
+ "addresses_1.user_id != :user_id_1 OR addresses_1.user_id IS NULL",
+ checkparams={'user_id_1': 7})
def test_selfref_relationship(self):
@@ -1028,7 +1206,8 @@ class OperatorTest(QueryTest, AssertsCompiledSQL):
Node.children.any(Node.data == 'n1'),
"EXISTS (SELECT 1 FROM nodes AS nodes_1 WHERE "
"nodes.id = nodes_1.parent_id AND nodes_1.data = :data_1)",
- entity=Node
+ entity=Node,
+ checkparams={'data_1': 'n1'}
)
# needs autoaliasing
@@ -1036,36 +1215,43 @@ class OperatorTest(QueryTest, AssertsCompiledSQL):
Node.children == None,
"NOT (EXISTS (SELECT 1 FROM nodes AS nodes_1 "
"WHERE nodes.id = nodes_1.parent_id))",
- entity=Node
+ entity=Node,
+ checkparams={}
)
self._test(
Node.parent == None,
- "nodes.parent_id IS NULL"
+ "nodes.parent_id IS NULL",
+ checkparams={}
)
self._test(
nalias.parent == None,
- "nodes_1.parent_id IS NULL"
+ "nodes_1.parent_id IS NULL",
+ checkparams={}
)
self._test(
nalias.parent != None,
- "nodes_1.parent_id IS NOT NULL"
+ "nodes_1.parent_id IS NOT NULL",
+ checkparams={}
)
self._test(
nalias.children == None,
"NOT (EXISTS ("
"SELECT 1 FROM nodes WHERE nodes_1.id = nodes.parent_id))",
- entity=nalias
+ entity=nalias,
+ checkparams={}
)
self._test(
nalias.children.any(Node.data == 'some data'),
"EXISTS (SELECT 1 FROM nodes WHERE "
"nodes_1.id = nodes.parent_id AND nodes.data = :data_1)",
- entity=nalias)
+ entity=nalias,
+ checkparams={'data_1': 'some data'}
+ )
# this fails because self-referential any() is auto-aliasing;
# the fact that we use "nalias" here means we get two aliases.
@@ -1080,33 +1266,48 @@ class OperatorTest(QueryTest, AssertsCompiledSQL):
nalias.parent.has(Node.data == 'some data'),
"EXISTS (SELECT 1 FROM nodes WHERE nodes.id = nodes_1.parent_id "
"AND nodes.data = :data_1)",
- entity=nalias
+ entity=nalias,
+ checkparams={'data_1': 'some data'}
)
self._test(
Node.parent.has(Node.data == 'some data'),
"EXISTS (SELECT 1 FROM nodes AS nodes_1 WHERE "
"nodes_1.id = nodes.parent_id AND nodes_1.data = :data_1)",
- entity=Node
+ entity=Node,
+ checkparams={'data_1': 'some data'}
)
self._test(
Node.parent == Node(id=7),
- ":param_1 = nodes.parent_id"
+ ":param_1 = nodes.parent_id",
+ checkparams={"param_1": 7}
)
self._test(
nalias.parent == Node(id=7),
- ":param_1 = nodes_1.parent_id"
+ ":param_1 = nodes_1.parent_id",
+ checkparams={"param_1": 7}
+ )
+
+ self._test(
+ nalias.parent != Node(id=7),
+ 'nodes_1.parent_id != :parent_id_1 '
+ 'OR nodes_1.parent_id IS NULL',
+ checkparams={"parent_id_1": 7}
)
self._test(
nalias.parent != Node(id=7),
- 'nodes_1.parent_id != :parent_id_1 OR nodes_1.parent_id IS NULL'
+ 'nodes_1.parent_id != :parent_id_1 '
+ 'OR nodes_1.parent_id IS NULL',
+ checkparams={"parent_id_1": 7}
)
self._test(
- nalias.children.contains(Node(id=7)), "nodes_1.id = :param_1"
+ nalias.children.contains(Node(id=7, parent_id=12)),
+ "nodes_1.id = :param_1",
+ checkparams={"param_1": 12}
)
def test_multilevel_any(self):
@@ -1392,6 +1593,63 @@ class ColumnPropertyTest(_fixtures.FixtureTest, AssertsCompiledSQL):
}, with_polymorphic="*" if polymorphic else None)
mapper(Address, addresses)
+ def _func_fixture(self, label=False):
+ User = self.classes.User
+ users = self.tables.users
+
+ if label:
+ mapper(User, users, properties={
+ "foobar": column_property(
+ func.foob(users.c.name).label(None)
+ )
+ })
+ else:
+ mapper(User, users, properties={
+ "foobar": column_property(
+ func.foob(users.c.name)
+ )
+ })
+
+ def test_anon_label_function_auto(self):
+ self._func_fixture()
+ User = self.classes.User
+
+ s = Session()
+
+ u1 = aliased(User)
+ self.assert_compile(
+ s.query(User.foobar, u1.foobar),
+ "SELECT foob(users.name) AS foob_1, foob(users_1.name) AS foob_2 "
+ "FROM users, users AS users_1"
+ )
+
+ def test_anon_label_function_manual(self):
+ self._func_fixture(label=True)
+ User = self.classes.User
+
+ s = Session()
+
+ u1 = aliased(User)
+ self.assert_compile(
+ s.query(User.foobar, u1.foobar),
+ "SELECT foob(users.name) AS foob_1, foob(users_1.name) AS foob_2 "
+ "FROM users, users AS users_1"
+ )
+
+ def test_anon_label_ad_hoc_labeling(self):
+ self._func_fixture()
+ User = self.classes.User
+
+ s = Session()
+
+ u1 = aliased(User)
+ self.assert_compile(
+ s.query(User.foobar.label('x'), u1.foobar.label('y')),
+ "SELECT foob(users.name) AS x, foob(users_1.name) AS y "
+ "FROM users, users AS users_1"
+ )
+
+
def test_order_by_column_prop_string(self):
User, Address = self.classes("User", "Address")
self._fixture(label=True)
@@ -1616,6 +1874,25 @@ class ColumnPropertyTest(_fixtures.FixtureTest, AssertsCompiledSQL):
)
+class ComparatorTest(QueryTest):
+ def test_clause_element_query_resolve(self):
+ from sqlalchemy.orm.properties import ColumnProperty
+ User = self.classes.User
+
+ class Comparator(ColumnProperty.Comparator):
+ def __init__(self, expr):
+ self.expr = expr
+
+ def __clause_element__(self):
+ return self.expr
+
+ sess = Session()
+ eq_(
+ sess.query(Comparator(User.id)).order_by(Comparator(User.id)).all(),
+ [(7, ), (8, ), (9, ), (10, )]
+ )
+
+
# more slice tests are available in test/orm/generative.py
class SliceTest(QueryTest):
def test_first(self):
@@ -1839,13 +2116,6 @@ class FilterTest(QueryTest, AssertsCompiledSQL):
sess.query(User). \
filter(User.addresses.any(email_address='fred@fred.com')).all()
- # test that any() doesn't overcorrelate
- assert [User(id=7), User(id=8)] == \
- sess.query(User).join("addresses"). \
- filter(
- ~User.addresses.any(
- Address.email_address == 'fred@fred.com')).all()
-
# test that the contents are not adapted by the aliased join
assert [User(id=7), User(id=8)] == \
sess.query(User).join("addresses", aliased=True). \
@@ -1857,6 +2127,18 @@ class FilterTest(QueryTest, AssertsCompiledSQL):
sess.query(User).outerjoin("addresses", aliased=True). \
filter(~User.addresses.any()).all()
+ def test_any_doesnt_overcorrelate(self):
+ User, Address = self.classes.User, self.classes.Address
+
+ sess = create_session()
+
+ # test that any() doesn't overcorrelate
+ assert [User(id=7), User(id=8)] == \
+ sess.query(User).join("addresses"). \
+ filter(
+ ~User.addresses.any(
+ Address.email_address == 'fred@fred.com')).all()
+
def test_has(self):
Dingaling, User, Address = (
self.classes.Dingaling, self.classes.User, self.classes.Address)
@@ -2069,6 +2351,42 @@ class FilterTest(QueryTest, AssertsCompiledSQL):
)
+class HasMapperEntitiesTest(QueryTest):
+ def test_entity(self):
+ User = self.classes.User
+ s = Session()
+
+ q = s.query(User)
+
+ assert q._has_mapper_entities
+
+ def test_cols(self):
+ User = self.classes.User
+ s = Session()
+
+ q = s.query(User.id)
+
+ assert not q._has_mapper_entities
+
+ def test_cols_set_entities(self):
+ User = self.classes.User
+ s = Session()
+
+ q = s.query(User.id)
+
+ q._set_entities(User)
+ assert q._has_mapper_entities
+
+ def test_entity_set_entities(self):
+ User = self.classes.User
+ s = Session()
+
+ q = s.query(User)
+
+ q._set_entities(User.id)
+ assert not q._has_mapper_entities
+
+
class SetOpsTest(QueryTest, AssertsCompiledSQL):
__dialect__ = 'default'
@@ -2434,7 +2752,9 @@ class CountTest(QueryTest):
eq_(q.distinct().count(), 3)
-class DistinctTest(QueryTest):
+class DistinctTest(QueryTest, AssertsCompiledSQL):
+ __dialect__ = 'default'
+
def test_basic(self):
User = self.classes.User
@@ -2448,19 +2768,22 @@ class DistinctTest(QueryTest):
order_by(desc(User.name)).all()
)
- def test_joined(self):
- """test that orderbys from a joined table get placed into the columns
- clause when DISTINCT is used"""
-
+ def test_columns_augmented_roundtrip_one(self):
User, Address = self.classes.User, self.classes.Address
sess = create_session()
q = sess.query(User).join('addresses').distinct(). \
order_by(desc(Address.email_address))
- assert [User(id=7), User(id=9), User(id=8)] == q.all()
+ eq_(
+ [User(id=7), User(id=9), User(id=8)],
+ q.all()
+ )
- sess.expunge_all()
+ def test_columns_augmented_roundtrip_two(self):
+ User, Address = self.classes.User, self.classes.Address
+
+ sess = create_session()
# test that it works on embedded joinedload/LIMIT subquery
q = sess.query(User).join('addresses').distinct(). \
@@ -2478,6 +2801,131 @@ class DistinctTest(QueryTest):
] == q.all()
self.assert_sql_count(testing.db, go, 1)
+ def test_columns_augmented_roundtrip_three(self):
+ User, Address = self.classes.User, self.classes.Address
+
+ sess = create_session()
+
+ q = sess.query(User.id, User.name.label('foo'), Address.id).\
+ filter(User.name == 'jack').\
+ distinct().\
+ order_by(User.id, User.name, Address.email_address)
+
+ # even though columns are added, they aren't in the result
+ eq_(
+ q.all(),
+ [(7, 'jack', 3), (7, 'jack', 4), (7, 'jack', 2),
+ (7, 'jack', 5), (7, 'jack', 1)]
+ )
+ for row in q:
+ eq_(row.keys(), ['id', 'foo', 'id'])
+
+ def test_columns_augmented_sql_one(self):
+ User, Address = self.classes.User, self.classes.Address
+
+ sess = create_session()
+
+ q = sess.query(User.id, User.name.label('foo'), Address.id).\
+ distinct().\
+ order_by(User.id, User.name, Address.email_address)
+
+ # Address.email_address is added because of DISTINCT,
+ # however User.id, User.name are not b.c. they're already there,
+ # even though User.name is labeled
+ self.assert_compile(
+ q,
+ "SELECT DISTINCT users.id AS users_id, users.name AS foo, "
+ "addresses.id AS addresses_id, "
+ "addresses.email_address AS addresses_email_address FROM users, "
+ "addresses ORDER BY users.id, users.name, addresses.email_address"
+ )
+
+ def test_columns_augmented_sql_two(self):
+ User, Address = self.classes.User, self.classes.Address
+
+ sess = create_session()
+
+ q = sess.query(User).\
+ options(joinedload(User.addresses)).\
+ distinct().\
+ order_by(User.name, Address.email_address).\
+ limit(5)
+
+ # addresses.email_address is added to inner query so that
+ # it is available in ORDER BY
+ self.assert_compile(
+ q,
+ "SELECT anon_1.users_id AS anon_1_users_id, "
+ "anon_1.users_name AS anon_1_users_name, "
+ "anon_1.addresses_email_address AS "
+ "anon_1_addresses_email_address, "
+ "addresses_1.id AS addresses_1_id, "
+ "addresses_1.user_id AS addresses_1_user_id, "
+ "addresses_1.email_address AS addresses_1_email_address "
+ "FROM (SELECT DISTINCT users.id AS users_id, "
+ "users.name AS users_name, "
+ "addresses.email_address AS addresses_email_address "
+ "FROM users, addresses "
+ "ORDER BY users.name, addresses.email_address "
+ "LIMIT :param_1) AS anon_1 LEFT OUTER JOIN "
+ "addresses AS addresses_1 "
+ "ON anon_1.users_id = addresses_1.user_id "
+ "ORDER BY anon_1.users_name, "
+ "anon_1.addresses_email_address, addresses_1.id"
+ )
+
+ def test_columns_augmented_sql_three(self):
+ User, Address = self.classes.User, self.classes.Address
+
+ sess = create_session()
+
+ q = sess.query(User.id, User.name.label('foo'), Address.id).\
+ distinct(User.name).\
+ order_by(User.id, User.name, Address.email_address)
+
+ # no columns are added when DISTINCT ON is used
+ self.assert_compile(
+ q,
+ "SELECT DISTINCT ON (users.name) users.id AS users_id, "
+ "users.name AS foo, addresses.id AS addresses_id FROM users, "
+ "addresses ORDER BY users.id, users.name, addresses.email_address",
+ dialect='postgresql'
+ )
+
+ def test_columns_augmented_sql_four(self):
+ User, Address = self.classes.User, self.classes.Address
+
+ sess = create_session()
+
+ q = sess.query(User).join('addresses').\
+ distinct(Address.email_address). \
+ options(joinedload('addresses')).\
+ order_by(desc(Address.email_address)).limit(2)
+
+ # but for the subquery / eager load case, we still need to make
+ # the inner columns available for the ORDER BY even though its
+ # a DISTINCT ON
+ self.assert_compile(
+ q,
+ "SELECT anon_1.users_id AS anon_1_users_id, "
+ "anon_1.users_name AS anon_1_users_name, "
+ "anon_1.addresses_email_address AS "
+ "anon_1_addresses_email_address, "
+ "addresses_1.id AS addresses_1_id, "
+ "addresses_1.user_id AS addresses_1_user_id, "
+ "addresses_1.email_address AS addresses_1_email_address "
+ "FROM (SELECT DISTINCT ON (addresses.email_address) "
+ "users.id AS users_id, users.name AS users_name, "
+ "addresses.email_address AS addresses_email_address "
+ "FROM users JOIN addresses ON users.id = addresses.user_id "
+ "ORDER BY addresses.email_address DESC "
+ "LIMIT %(param_1)s) AS anon_1 "
+ "LEFT OUTER JOIN addresses AS addresses_1 "
+ "ON anon_1.users_id = addresses_1.user_id "
+ "ORDER BY anon_1.addresses_email_address DESC, addresses_1.id",
+ dialect='postgresql'
+ )
+
class PrefixWithTest(QueryTest, AssertsCompiledSQL):
@@ -2554,10 +3002,12 @@ class YieldTest(_fixtures.FixtureTest):
User = self.classes.User
sess = create_session()
- q = sess.query(User).yield_per(1)
+ q = sess.query(User).yield_per(15)
q = q.execution_options(foo='bar')
assert q._yield_per
- eq_(q._execution_options, {"stream_results": True, "foo": "bar"})
+ eq_(
+ q._execution_options,
+ {"stream_results": True, "foo": "bar", "max_row_buffer": 15})
def test_no_joinedload_opt(self):
self._eagerload_mappings()
@@ -2798,44 +3248,143 @@ class TextTest(QueryTest, AssertsCompiledSQL):
[User(id=7), User(id=8), User(id=9), User(id=10)]
)
- def test_order_by_w_eager(self):
+ def test_order_by_w_eager_one(self):
+ User = self.classes.User
+ s = create_session()
+
+ # from 1.0.0 thru 1.0.2, the "name" symbol here was considered
+ # to be part of the things we need to ORDER BY and it was being
+ # placed into the inner query's columns clause, as part of
+ # query._compound_eager_statement where we add unwrap_order_by()
+ # to the columns clause. However, as #3392 illustrates, unlocatable
+ # string expressions like "name desc" will only fail in this scenario,
+ # so in general the changing of the query structure with string labels
+ # is dangerous.
+ #
+ # the queries here are again "invalid" from a SQL perspective, as the
+ # "name" field isn't matched up to anything.
+ #
+ with expect_warnings("Can't resolve label reference 'name';"):
+ self.assert_compile(
+ s.query(User).options(joinedload("addresses")).
+ order_by(desc("name")).limit(1),
+ "SELECT anon_1.users_id AS anon_1_users_id, "
+ "anon_1.users_name AS anon_1_users_name, "
+ "addresses_1.id AS addresses_1_id, "
+ "addresses_1.user_id AS addresses_1_user_id, "
+ "addresses_1.email_address AS addresses_1_email_address "
+ "FROM (SELECT users.id AS users_id, users.name AS users_name "
+ "FROM users ORDER BY users.name "
+ "DESC LIMIT :param_1) AS anon_1 "
+ "LEFT OUTER JOIN addresses AS addresses_1 "
+ "ON anon_1.users_id = addresses_1.user_id "
+ "ORDER BY name DESC, addresses_1.id"
+ )
+
+ def test_order_by_w_eager_two(self):
+ User = self.classes.User
+ s = create_session()
+
+ with expect_warnings("Can't resolve label reference 'name';"):
+ self.assert_compile(
+ s.query(User).options(joinedload("addresses")).
+ order_by("name").limit(1),
+ "SELECT anon_1.users_id AS anon_1_users_id, "
+ "anon_1.users_name AS anon_1_users_name, "
+ "addresses_1.id AS addresses_1_id, "
+ "addresses_1.user_id AS addresses_1_user_id, "
+ "addresses_1.email_address AS addresses_1_email_address "
+ "FROM (SELECT users.id AS users_id, users.name AS users_name "
+ "FROM users ORDER BY users.name "
+ "LIMIT :param_1) AS anon_1 "
+ "LEFT OUTER JOIN addresses AS addresses_1 "
+ "ON anon_1.users_id = addresses_1.user_id "
+ "ORDER BY name, addresses_1.id"
+ )
+
+ def test_order_by_w_eager_three(self):
+ User = self.classes.User
+ s = create_session()
+
+ self.assert_compile(
+ s.query(User).options(joinedload("addresses")).
+ order_by("users_name").limit(1),
+ "SELECT anon_1.users_id AS anon_1_users_id, "
+ "anon_1.users_name AS anon_1_users_name, "
+ "addresses_1.id AS addresses_1_id, "
+ "addresses_1.user_id AS addresses_1_user_id, "
+ "addresses_1.email_address AS addresses_1_email_address "
+ "FROM (SELECT users.id AS users_id, users.name AS users_name "
+ "FROM users ORDER BY users.name "
+ "LIMIT :param_1) AS anon_1 "
+ "LEFT OUTER JOIN addresses AS addresses_1 "
+ "ON anon_1.users_id = addresses_1.user_id "
+ "ORDER BY anon_1.users_name, addresses_1.id"
+ )
+
+ # however! this works (again?)
+ eq_(
+ s.query(User).options(joinedload("addresses")).
+ order_by("users_name").first(),
+ User(name='chuck', addresses=[])
+ )
+
+ def test_order_by_w_eager_four(self):
User = self.classes.User
Address = self.classes.Address
s = create_session()
- # here, we are seeing how Query has to take the order by expressions
- # of the query and then add them to the columns list, so that the
- # outer subquery can order by that same label. With the anonymous
- # label, our column gets sucked up and restated again in the
- # inner columns list!
- # we could try to play games with making this "smarter" but it
- # would add permanent overhead to Select._columns_plus_names,
- # since that's where references would need to be resolved.
- # so as it is, this query takes the _label_reference and makes a
- # full blown proxy and all the rest of it.
self.assert_compile(
s.query(User).options(joinedload("addresses")).
- order_by(desc("name")).limit(1),
+ order_by(desc("users_name")).limit(1),
"SELECT anon_1.users_id AS anon_1_users_id, "
"anon_1.users_name AS anon_1_users_name, "
- "anon_1.anon_2 AS anon_1_anon_2, "
"addresses_1.id AS addresses_1_id, "
"addresses_1.user_id AS addresses_1_user_id, "
"addresses_1.email_address AS addresses_1_email_address "
- "FROM (SELECT users.id AS users_id, users.name AS users_name, "
- "users.name AS anon_2 FROM users ORDER BY users.name "
- "DESC LIMIT :param_1) AS anon_1 "
+ "FROM (SELECT users.id AS users_id, users.name AS users_name "
+ "FROM users ORDER BY users.name DESC "
+ "LIMIT :param_1) AS anon_1 "
"LEFT OUTER JOIN addresses AS addresses_1 "
"ON anon_1.users_id = addresses_1.user_id "
- "ORDER BY anon_1.anon_2 DESC, addresses_1.id"
+ "ORDER BY anon_1.users_name DESC, addresses_1.id"
)
+ # however! this works (again?)
eq_(
s.query(User).options(joinedload("addresses")).
- order_by(desc("name")).first(),
+ order_by(desc("users_name")).first(),
User(name='jack', addresses=[Address()])
)
+ def test_order_by_w_eager_five(self):
+ """essentially the same as test_eager_relations -> test_limit_3,
+ but test for textual label elements that are freeform.
+ this is again #3392."""
+
+ User = self.classes.User
+ Address = self.classes.Address
+ Order = self.classes.Order
+
+ sess = create_session()
+
+ q = sess.query(User, Address.email_address.label('email_address'))
+
+ l = q.join('addresses').options(joinedload(User.orders)).\
+ order_by(
+ "email_address desc").limit(1).offset(0)
+ with expect_warnings(
+ "Can't resolve label reference 'email_address desc'"):
+ eq_(
+ [
+ (User(
+ id=7,
+ orders=[Order(id=1), Order(id=3), Order(id=5)],
+ addresses=[Address(id=1)]
+ ), 'jack@bean.com')
+ ],
+ l.all())
+
class TextWarningTest(QueryTest, AssertsCompiledSQL):
def _test(self, fn, arg, offending_clause, expected):
@@ -2918,6 +3467,39 @@ class ParentTest(QueryTest, AssertsCompiledSQL):
# sess.query(Order).with_parent(None, property='addresses').all()
# == [Order(description="order 5")]
+ def test_select_from(self):
+ User, Address = self.classes.User, self.classes.Address
+
+ sess = create_session()
+ u1 = sess.query(User).get(7)
+ q = sess.query(Address).select_from(Address).with_parent(u1)
+ self.assert_compile(
+ q,
+ "SELECT addresses.id AS addresses_id, "
+ "addresses.user_id AS addresses_user_id, "
+ "addresses.email_address AS addresses_email_address "
+ "FROM addresses WHERE :param_1 = addresses.user_id",
+ {'param_1': 7}
+ )
+
+ @testing.fails("issue #3607")
+ def test_select_from_alias(self):
+ User, Address = self.classes.User, self.classes.Address
+
+ sess = create_session()
+ u1 = sess.query(User).get(7)
+ a1 = aliased(Address)
+ q = sess.query(a1).with_parent(u1)
+ self.assert_compile(
+ q,
+ "SELECT addresses_1.id AS addresses_1_id, "
+ "addresses_1.user_id AS addresses_1_user_id, "
+ "addresses_1.email_address AS addresses_1_email_address "
+ "FROM addresses AS addresses_1 "
+ "WHERE :param_1 = addresses_1.user_id",
+ {'param_1': 7}
+ )
+
def test_noparent(self):
Item, User = self.classes.Item, self.classes.User
@@ -2968,6 +3550,7 @@ class ParentTest(QueryTest, AssertsCompiledSQL):
o.all()
)
+
def test_with_pending_autoflush(self):
Order, User = self.classes.Order, self.classes.User
@@ -3042,7 +3625,133 @@ class ParentTest(QueryTest, AssertsCompiledSQL):
)
-class SynonymTest(QueryTest):
+class WithTransientOnNone(_fixtures.FixtureTest, AssertsCompiledSQL):
+ run_inserts = None
+ __dialect__ = 'default'
+
+ def _fixture1(self):
+ User, Address = self.classes.User, self.classes.Address
+ users, addresses = self.tables.users, self.tables.addresses
+
+ mapper(User, users)
+ mapper(Address, addresses, properties={
+ 'user': relationship(User),
+ 'special_user': relationship(
+ User, primaryjoin=and_(
+ users.c.id == addresses.c.user_id,
+ users.c.name == addresses.c.email_address))
+ })
+
+ def test_filter_with_transient_assume_pk(self):
+ self._fixture1()
+ User, Address = self.classes.User, self.classes.Address
+
+ sess = Session()
+
+ q = sess.query(Address).filter(Address.user == User())
+ with expect_warnings("Got None for value of column "):
+ self.assert_compile(
+ q,
+ "SELECT addresses.id AS addresses_id, "
+ "addresses.user_id AS addresses_user_id, "
+ "addresses.email_address AS addresses_email_address "
+ "FROM addresses WHERE :param_1 = addresses.user_id",
+ checkparams={'param_1': None}
+ )
+
+ def test_filter_with_transient_warn_for_none_against_non_pk(self):
+ self._fixture1()
+ User, Address = self.classes.User, self.classes.Address
+
+ s = Session()
+ q = s.query(Address).filter(Address.special_user == User())
+ with expect_warnings("Got None for value of column"):
+
+ self.assert_compile(
+ q,
+ "SELECT addresses.id AS addresses_id, "
+ "addresses.user_id AS addresses_user_id, "
+ "addresses.email_address AS addresses_email_address "
+ "FROM addresses WHERE :param_1 = addresses.user_id "
+ "AND :param_2 = addresses.email_address",
+ checkparams={"param_1": None, "param_2": None}
+ )
+
+ def test_with_parent_with_transient_assume_pk(self):
+ self._fixture1()
+ User, Address = self.classes.User, self.classes.Address
+
+ sess = Session()
+
+ q = sess.query(User).with_parent(Address(), "user")
+ with expect_warnings("Got None for value of column"):
+ self.assert_compile(
+ q,
+ "SELECT users.id AS users_id, users.name AS users_name "
+ "FROM users WHERE users.id = :param_1",
+ checkparams={'param_1': None}
+ )
+
+ def test_with_parent_with_transient_warn_for_none_against_non_pk(self):
+ self._fixture1()
+ User, Address = self.classes.User, self.classes.Address
+
+ s = Session()
+ q = s.query(User).with_parent(Address(), "special_user")
+ with expect_warnings("Got None for value of column"):
+
+ self.assert_compile(
+ q,
+ "SELECT users.id AS users_id, users.name AS users_name "
+ "FROM users WHERE users.id = :param_1 "
+ "AND users.name = :param_2",
+ checkparams={"param_1": None, "param_2": None}
+ )
+
+ def test_negated_contains_or_equals_plain_m2o(self):
+ self._fixture1()
+ User, Address = self.classes.User, self.classes.Address
+
+ s = Session()
+ q = s.query(Address).filter(Address.user != User())
+ with expect_warnings("Got None for value of column"):
+ self.assert_compile(
+ q,
+
+ "SELECT addresses.id AS addresses_id, "
+ "addresses.user_id AS addresses_user_id, "
+ "addresses.email_address AS addresses_email_address "
+ "FROM addresses "
+ "WHERE addresses.user_id != :user_id_1 "
+ "OR addresses.user_id IS NULL",
+ checkparams={'user_id_1': None}
+ )
+
+ def test_negated_contains_or_equals_complex_rel(self):
+ self._fixture1()
+ User, Address = self.classes.User, self.classes.Address
+
+ s = Session()
+
+ # this one does *not* warn because we do the criteria
+ # without deferral
+ q = s.query(Address).filter(Address.special_user != User())
+ self.assert_compile(
+ q,
+ "SELECT addresses.id AS addresses_id, "
+ "addresses.user_id AS addresses_user_id, "
+ "addresses.email_address AS addresses_email_address "
+ "FROM addresses "
+ "WHERE NOT (EXISTS (SELECT 1 "
+ "FROM users "
+ "WHERE users.id = addresses.user_id AND "
+ "users.name = addresses.email_address AND users.id IS NULL))",
+ checkparams={}
+ )
+
+
+class SynonymTest(QueryTest, AssertsCompiledSQL):
+ __dialect__ = 'default'
@classmethod
def setup_mappers(cls):
@@ -3162,6 +3871,20 @@ class SynonymTest(QueryTest):
Order(description="order 1"), Order(description="order 3"),
Order(description="order 5")] == o
+ def test_froms_aliased_col(self):
+ Address, User = self.classes.Address, self.classes.User
+
+ sess = create_session()
+ ua = aliased(User)
+
+ q = sess.query(ua.name_syn).join(
+ Address, ua.id == Address.user_id)
+ self.assert_compile(
+ q,
+ "SELECT users_1.name AS users_1_name FROM "
+ "users AS users_1 JOIN addresses ON users_1.id = addresses.user_id"
+ )
+
class ImmediateTest(_fixtures.FixtureTest):
run_inserts = 'once'
@@ -3184,13 +3907,17 @@ class ImmediateTest(_fixtures.FixtureTest):
sess = create_session()
- assert_raises(
+ assert_raises_message(
sa.orm.exc.NoResultFound,
+ "No row was found for one\(\)",
sess.query(User).filter(User.id == 99).one)
eq_(sess.query(User).filter(User.id == 7).one().id, 7)
- assert_raises(sa.orm.exc.MultipleResultsFound, sess.query(User).one)
+ assert_raises_message(
+ sa.orm.exc.MultipleResultsFound,
+ "Multiple rows were found for one\(\)",
+ sess.query(User).one)
assert_raises(
sa.orm.exc.NoResultFound,
@@ -3235,6 +3962,60 @@ class ImmediateTest(_fixtures.FixtureTest):
sess.query(User).join(User.addresses).filter(User.id.in_([8, 9])).
order_by(User.id).one)
+ def test_one_or_none(self):
+ User, Address = self.classes.User, self.classes.Address
+
+ sess = create_session()
+
+ eq_(sess.query(User).filter(User.id == 99).one_or_none(), None)
+
+ eq_(sess.query(User).filter(User.id == 7).one_or_none().id, 7)
+
+ assert_raises_message(
+ sa.orm.exc.MultipleResultsFound,
+ "Multiple rows were found for one_or_none\(\)",
+ sess.query(User).one_or_none)
+
+ eq_(sess.query(User.id, User.name).filter(User.id == 99).one_or_none(), None)
+
+ eq_(sess.query(User.id, User.name).filter(User.id == 7).one_or_none(),
+ (7, 'jack'))
+
+ assert_raises(
+ sa.orm.exc.MultipleResultsFound,
+ sess.query(User.id, User.name).one_or_none)
+
+ eq_(
+ (sess.query(User, Address).join(User.addresses).
+ filter(Address.id == 99)).one_or_none(), None)
+
+ eq_((sess.query(User, Address).
+ join(User.addresses).
+ filter(Address.id == 4)).one_or_none(),
+ (User(id=8), Address(id=4)))
+
+ assert_raises(
+ sa.orm.exc.MultipleResultsFound,
+ sess.query(User, Address).join(User.addresses).one_or_none)
+
+ # this result returns multiple rows, the first
+ # two rows being the same. but uniquing is
+ # not applied for a column based result.
+ assert_raises(
+ sa.orm.exc.MultipleResultsFound,
+ sess.query(User.id).join(User.addresses).
+ filter(User.id.in_([8, 9])).order_by(User.id).one_or_none)
+
+ # test that a join which ultimately returns
+ # multiple identities across many rows still
+ # raises, even though the first two rows are of
+ # the same identity and unique filtering
+ # is applied ([ticket:1688])
+ assert_raises(
+ sa.orm.exc.MultipleResultsFound,
+ sess.query(User).join(User.addresses).filter(User.id.in_([8, 9])).
+ order_by(User.id).one_or_none)
+
@testing.future
def test_getslice(self):
assert False
diff --git a/test/orm/test_rel_fn.py b/test/orm/test_rel_fn.py
index 230f3b18a..8f15c4c39 100644
--- a/test/orm/test_rel_fn.py
+++ b/test/orm/test_rel_fn.py
@@ -3,9 +3,9 @@ from sqlalchemy.testing import assert_raises_message, eq_, \
from sqlalchemy.testing import fixtures
from sqlalchemy.orm import relationships, foreign, remote
from sqlalchemy import MetaData, Table, Column, ForeignKey, Integer, \
- select, ForeignKeyConstraint, exc, func, and_, String
+ select, ForeignKeyConstraint, exc, func, and_, String, Boolean
from sqlalchemy.orm.interfaces import ONETOMANY, MANYTOONE, MANYTOMANY
-
+from sqlalchemy.testing import mock
class _JoinFixtures(object):
@classmethod
@@ -71,6 +71,7 @@ class _JoinFixtures(object):
)
cls.base = Table('base', m,
Column('id', Integer, primary_key=True),
+ Column('flag', Boolean)
)
cls.sub = Table('sub', m,
Column('id', Integer, ForeignKey('base.id'),
@@ -504,6 +505,31 @@ class _JoinFixtures(object):
foreign(remote(self.selfref.c.sid)))
)
+ def _join_fixture_inh_selfref_w_entity(self, **kw):
+ fake_logger = mock.Mock(info=lambda *arg, **kw: None)
+ prop = mock.Mock(
+ parent=mock.Mock(),
+ mapper=mock.Mock(),
+ logger=fake_logger
+ )
+ local_selectable = self.base.join(self.sub)
+ remote_selectable = self.base.join(self.sub_w_sub_rel)
+
+ sub_w_sub_rel__sub_id = self.sub_w_sub_rel.c.sub_id._annotate(
+ {'parentmapper': prop.mapper})
+ sub__id = self.sub.c.id._annotate({'parentmapper': prop.parent})
+ sub_w_sub_rel__flag = self.base.c.flag._annotate(
+ {"parentmapper": prop.mapper})
+ return relationships.JoinCondition(
+ local_selectable, remote_selectable,
+ local_selectable, remote_selectable,
+ primaryjoin=and_(
+ sub_w_sub_rel__sub_id == sub__id,
+ sub_w_sub_rel__flag == True
+ ),
+ prop=prop
+ )
+
def _assert_non_simple_warning(self, fn):
assert_raises_message(
exc.SAWarning,
@@ -904,6 +930,17 @@ class ColumnCollectionsTest(_JoinFixtures, fixtures.TestBase,
[(self.purely_single_col.c.path, self.purely_single_col.c.path)]
)
+ def test_determine_local_remote_pairs_inh_selfref_w_entities(self):
+ joincond = self._join_fixture_inh_selfref_w_entity()
+ eq_(
+ joincond.local_remote_pairs,
+ [(self.sub.c.id, self.sub_w_sub_rel.c.sub_id)]
+ )
+ eq_(
+ joincond.remote_columns,
+ set([self.base.c.flag, self.sub_w_sub_rel.c.sub_id])
+ )
+
class DirectionTest(_JoinFixtures, fixtures.TestBase, AssertsCompiledSQL):
def test_determine_direction_compound_2(self):
joincond = self._join_fixture_compound_expression_2(
diff --git a/test/orm/test_relationships.py b/test/orm/test_relationships.py
index 9e4b38a90..061187330 100644
--- a/test/orm/test_relationships.py
+++ b/test/orm/test_relationships.py
@@ -931,14 +931,12 @@ class SynonymsAsFKsTest(fixtures.MappedTest):
@classmethod
def define_tables(cls, metadata):
Table("tableA", metadata,
- Column("id", Integer, primary_key=True,
- test_needs_autoincrement=True),
+ Column("id", Integer, primary_key=True),
Column("foo", Integer,),
test_needs_fk=True)
Table("tableB", metadata,
- Column("id", Integer, primary_key=True,
- test_needs_autoincrement=True),
+ Column("id", Integer, primary_key=True),
Column("_a_id", Integer, key='a_id', primary_key=True),
test_needs_fk=True)
@@ -1093,7 +1091,7 @@ class FKsAsPksTest(fixtures.MappedTest):
'tablec', tableA.metadata,
Column('id', Integer, primary_key=True),
Column('a_id', Integer, ForeignKey('tableA.id'),
- primary_key=True, autoincrement=False, nullable=True))
+ primary_key=True, nullable=True))
tableC.create()
class C(fixtures.BasicEntity):
@@ -2703,8 +2701,7 @@ class ExplicitLocalRemoteTest(fixtures.MappedTest):
@classmethod
def define_tables(cls, metadata):
Table('t1', metadata,
- Column('id', String(50), primary_key=True,
- test_needs_autoincrement=True),
+ Column('id', String(50), primary_key=True),
Column('data', String(50)))
Table('t2', metadata,
Column('id', Integer, primary_key=True,
diff --git a/test/orm/test_session.py b/test/orm/test_session.py
index 2aa0cd3eb..caeb08530 100644
--- a/test/orm/test_session.py
+++ b/test/orm/test_session.py
@@ -17,6 +17,7 @@ from sqlalchemy.util import pypy
from sqlalchemy.testing import fixtures
from test.orm import _fixtures
from sqlalchemy import event, ForeignKey
+from sqlalchemy.util.compat import inspect_getargspec
class ExecutionTest(_fixtures.FixtureTest):
@@ -493,8 +494,10 @@ class SessionStateTest(_fixtures.FixtureTest):
'is already attached to session',
s2.delete, user)
u2 = s2.query(User).get(user.id)
- assert_raises_message(sa.exc.InvalidRequestError,
- 'another instance with key', s.delete, u2)
+ s2.expunge(u2)
+ assert_raises_message(
+ sa.exc.InvalidRequestError,
+ 'another instance .* is already present', s.delete, u2)
s.expire(user)
s.expunge(user)
assert user not in s
@@ -505,6 +508,25 @@ class SessionStateTest(_fixtures.FixtureTest):
assert user not in s
assert s.query(User).count() == 0
+ def test_already_attached(self):
+ User = self.classes.User
+ users = self.tables.users
+ mapper(User, users)
+
+ s1 = Session()
+ s2 = Session()
+
+ u1 = User(id=1, name='u1')
+ make_transient_to_detached(u1) # shorthand for actually persisting it
+ s1.add(u1)
+
+ assert_raises_message(
+ sa.exc.InvalidRequestError,
+ "Object '<User.*?>' is already attached to session",
+ s2.add, u1
+ )
+ assert u1 not in s2
+ assert not s2.identity_map.keys()
@testing.uses_deprecated()
def test_identity_conflict(self):
@@ -524,8 +546,14 @@ class SessionStateTest(_fixtures.FixtureTest):
s.expunge(u2)
s.identity_map.add(sa.orm.attributes.instance_state(u1))
- assert_raises(AssertionError, s.identity_map.add,
- sa.orm.attributes.instance_state(u2))
+ assert_raises_message(
+ sa.exc.InvalidRequestError,
+ "Can't attach instance <User.*?>; another instance "
+ "with key .*? is already "
+ "present in this session.",
+ s.identity_map.add,
+ sa.orm.attributes.instance_state(u2)
+ )
def test_pickled_update(self):
users, User = self.tables.users, pickleable.User
@@ -562,7 +590,13 @@ class SessionStateTest(_fixtures.FixtureTest):
assert u2 is not None and u2 is not u1
assert u2 in sess
- assert_raises(Exception, lambda: sess.add(u1))
+ assert_raises_message(
+ sa.exc.InvalidRequestError,
+ "Can't attach instance <User.*?>; another instance "
+ "with key .*? is already "
+ "present in this session.",
+ sess.add, u1
+ )
sess.expunge(u2)
assert u2 not in sess
@@ -1105,11 +1139,56 @@ class WeakIdentityMapTest(_fixtures.FixtureTest):
class StrongIdentityMapTest(_fixtures.FixtureTest):
run_inserts = None
+ def _strong_ident_fixture(self):
+ sess = create_session(weak_identity_map=False)
+ return sess, sess.prune
+
+ def _event_fixture(self):
+ session = create_session()
+
+ @event.listens_for(session, "pending_to_persistent")
+ @event.listens_for(session, "deleted_to_persistent")
+ @event.listens_for(session, "detached_to_persistent")
+ @event.listens_for(session, "loaded_as_persistent")
+ def strong_ref_object(sess, instance):
+ if 'refs' not in sess.info:
+ sess.info['refs'] = refs = set()
+ else:
+ refs = sess.info['refs']
+
+ refs.add(instance)
+
+ @event.listens_for(session, "persistent_to_detached")
+ @event.listens_for(session, "persistent_to_deleted")
+ @event.listens_for(session, "persistent_to_transient")
+ def deref_object(sess, instance):
+ sess.info['refs'].discard(instance)
+
+ def prune():
+ if 'refs' not in session.info:
+ return 0
+
+ sess_size = len(session.identity_map)
+ session.info['refs'].clear()
+ gc_collect()
+ session.info['refs'] = set(
+ s.obj() for s in session.identity_map.all_states())
+ return sess_size - len(session.identity_map)
+
+ return session, prune
+
@testing.uses_deprecated()
- def test_strong_ref(self):
+ def test_strong_ref_imap(self):
+ self._test_strong_ref(self._strong_ident_fixture)
+
+ def test_strong_ref_events(self):
+ self._test_strong_ref(self._event_fixture)
+
+ def _test_strong_ref(self, fixture):
+ s, prune = fixture()
+
users, User = self.tables.users, self.classes.User
- s = create_session(weak_identity_map=False)
mapper(User, users)
# save user
@@ -1129,12 +1208,19 @@ class StrongIdentityMapTest(_fixtures.FixtureTest):
eq_(users.select().execute().fetchall(), [(user.id, 'u2')])
@testing.uses_deprecated()
+ def test_prune_imap(self):
+ self._test_prune(self._strong_ident_fixture)
+
+ def test_prune_events(self):
+ self._test_prune(self._event_fixture)
+
@testing.fails_if(lambda: pypy, "pypy has a real GC")
@testing.fails_on('+zxjdbc', 'http://www.sqlalchemy.org/trac/ticket/1473')
- def test_prune(self):
+ def _test_prune(self, fixture):
+ s, prune = fixture()
+
users, User = self.tables.users, self.classes.User
- s = create_session(weak_identity_map=False)
mapper(User, users)
for o in [User(name='u%s' % x) for x in range(10)]:
@@ -1142,43 +1228,44 @@ class StrongIdentityMapTest(_fixtures.FixtureTest):
# o is still live after this loop...
self.assert_(len(s.identity_map) == 0)
- self.assert_(s.prune() == 0)
+ eq_(prune(), 0)
s.flush()
gc_collect()
- self.assert_(s.prune() == 9)
+ eq_(prune(), 9)
+ # o is still in local scope here, so still present
self.assert_(len(s.identity_map) == 1)
id = o.id
del o
- self.assert_(s.prune() == 1)
+ eq_(prune(), 1)
self.assert_(len(s.identity_map) == 0)
u = s.query(User).get(id)
- self.assert_(s.prune() == 0)
+ eq_(prune(), 0)
self.assert_(len(s.identity_map) == 1)
u.name = 'squiznart'
del u
- self.assert_(s.prune() == 0)
+ eq_(prune(), 0)
self.assert_(len(s.identity_map) == 1)
s.flush()
- self.assert_(s.prune() == 1)
+ eq_(prune(), 1)
self.assert_(len(s.identity_map) == 0)
s.add(User(name='x'))
- self.assert_(s.prune() == 0)
+ eq_(prune(), 0)
self.assert_(len(s.identity_map) == 0)
s.flush()
self.assert_(len(s.identity_map) == 1)
- self.assert_(s.prune() == 1)
+ eq_(prune(), 1)
self.assert_(len(s.identity_map) == 0)
u = s.query(User).get(id)
s.delete(u)
del u
- self.assert_(s.prune() == 0)
+ eq_(prune(), 0)
self.assert_(len(s.identity_map) == 1)
s.flush()
- self.assert_(s.prune() == 0)
+ eq_(prune(), 0)
self.assert_(len(s.identity_map) == 0)
@@ -1397,7 +1484,7 @@ class SessionInterface(fixtures.TestBase):
for meth in Session.public_methods:
if meth in blacklist:
continue
- spec = inspect.getargspec(getattr(Session, meth))
+ spec = inspect_getargspec(getattr(Session, meth))
if len(spec[0]) > 1 or spec[1]:
ok.add(meth)
return ok
diff --git a/test/orm/test_transaction.py b/test/orm/test_transaction.py
index 6bea5cc7b..c1662c9d1 100644
--- a/test/orm/test_transaction.py
+++ b/test/orm/test_transaction.py
@@ -9,7 +9,7 @@ from sqlalchemy.orm import (
relationship, attributes)
from sqlalchemy.testing.util import gc_collect
from test.orm._fixtures import FixtureTest
-
+from sqlalchemy import inspect
class SessionTransactionTest(FixtureTest):
run_inserts = None
@@ -143,6 +143,34 @@ class SessionTransactionTest(FixtureTest):
assert session.connection().execute(
'select count(1) from users').scalar() == 2
+ @testing.requires.savepoints
+ def test_dirty_state_transferred_deep_nesting(self):
+ User, users = self.classes.User, self.tables.users
+
+ mapper(User, users)
+
+ s = Session(testing.db)
+ u1 = User(name='u1')
+ s.add(u1)
+ s.commit()
+
+ nt1 = s.begin_nested()
+ nt2 = s.begin_nested()
+ u1.name = 'u2'
+ assert attributes.instance_state(u1) not in nt2._dirty
+ assert attributes.instance_state(u1) not in nt1._dirty
+ s.flush()
+ assert attributes.instance_state(u1) in nt2._dirty
+ assert attributes.instance_state(u1) not in nt1._dirty
+
+ s.commit()
+ assert attributes.instance_state(u1) in nt2._dirty
+ assert attributes.instance_state(u1) in nt1._dirty
+
+ s.rollback()
+ assert attributes.instance_state(u1).expired
+ eq_(u1.name, 'u1')
+
@testing.requires.independent_connections
def test_transactions_isolated(self):
User, users = self.classes.User, self.tables.users
@@ -629,6 +657,34 @@ class SessionTransactionTest(FixtureTest):
assert session.transaction is not None, \
'autocommit=False should start a new transaction'
+ @testing.requires.python2
+ @testing.requires.savepoints_w_release
+ def test_report_primary_error_when_rollback_fails(self):
+ User, users = self.classes.User, self.tables.users
+
+ mapper(User, users)
+
+ session = Session(testing.db)
+
+ with expect_warnings(".*during handling of a previous exception.*"):
+ session.begin_nested()
+ savepoint = session.\
+ connection()._Connection__transaction._savepoint
+
+ # force the savepoint to disappear
+ session.connection().dialect.do_release_savepoint(
+ session.connection(), savepoint
+ )
+
+ # now do a broken flush
+ session.add_all([User(id=1), User(id=1)])
+
+ assert_raises_message(
+ sa_exc.DBAPIError,
+ "ROLLBACK TO SAVEPOINT ",
+ session.flush
+ )
+
class _LocalFixture(FixtureTest):
run_setup_mappers = 'once'
@@ -867,7 +923,13 @@ class AutoExpireTest(_LocalFixture):
assert u1_state.obj() is None
s.rollback()
- assert u1_state in s.identity_map.all_states()
+ # new in 1.1, not in identity map if the object was
+ # gc'ed and we restore snapshot; we've changed update_impl
+ # to just skip this object
+ assert u1_state not in s.identity_map.all_states()
+
+ # in any version, the state is replaced by the query
+ # because the identity map would switch it
u1 = s.query(User).filter_by(name='ed').one()
assert u1_state not in s.identity_map.all_states()
assert s.scalar(users.count()) == 1
@@ -1456,6 +1518,30 @@ class NaturalPKRollbackTest(fixtures.MappedTest):
session.rollback()
+ def test_reloaded_deleted_checked_for_expiry(self):
+ """test issue #3677"""
+ users, User = self.tables.users, self.classes.User
+
+ mapper(User, users)
+
+ u1 = User(name='u1')
+
+ s = Session()
+ s.add(u1)
+ s.flush()
+ del u1
+ gc_collect()
+
+ u1 = s.query(User).first() # noqa
+
+ s.rollback()
+
+ u2 = User(name='u1')
+ s.add(u2)
+ s.commit()
+
+ assert inspect(u2).persistent
+
def test_key_replaced_by_update(self):
users, User = self.tables.users, self.classes.User
diff --git a/test/orm/test_unitofwork.py b/test/orm/test_unitofwork.py
index ae5a8ef60..2f67943f1 100644
--- a/test/orm/test_unitofwork.py
+++ b/test/orm/test_unitofwork.py
@@ -7,7 +7,8 @@ from sqlalchemy.orm import mapper as orm_mapper
import sqlalchemy as sa
from sqlalchemy.util import u, ue, b
-from sqlalchemy import Integer, String, ForeignKey, literal_column, event
+from sqlalchemy import Integer, String, ForeignKey, \
+ literal_column, event, Boolean
from sqlalchemy.testing import engines
from sqlalchemy import testing
from sqlalchemy.testing.schema import Table
@@ -18,6 +19,7 @@ from sqlalchemy.testing import fixtures
from test.orm import _fixtures
from sqlalchemy.testing.assertsql import AllOf, CompiledSQL
+
class UnitOfWorkTest(object):
pass
@@ -258,7 +260,7 @@ class PKTest(fixtures.MappedTest):
def define_tables(cls, metadata):
Table('multipk1', metadata,
Column('multi_id', Integer, primary_key=True,
- test_needs_autoincrement=True),
+ test_needs_autoincrement=not testing.against('sqlite')),
Column('multi_rev', Integer, primary_key=True),
Column('name', String(50), nullable=False),
Column('value', String(100)))
@@ -383,16 +385,26 @@ class ClauseAttributesTest(fixtures.MappedTest):
Column('name', String(30)),
Column('counter', Integer, default=1))
+ Table('boolean_t', metadata,
+ Column('id', Integer, primary_key=True,
+ test_needs_autoincrement=True),
+ Column('value', Boolean),
+ )
+
@classmethod
def setup_classes(cls):
class User(cls.Comparable):
pass
+ class HasBoolean(cls.Comparable):
+ pass
+
@classmethod
def setup_mappers(cls):
User, users_t = cls.classes.User, cls.tables.users_t
-
+ HasBoolean, boolean_t = cls.classes.HasBoolean, cls.tables.boolean_t
mapper(User, users_t)
+ mapper(HasBoolean, boolean_t)
def test_update(self):
User = self.classes.User
@@ -446,6 +458,30 @@ class ClauseAttributesTest(fixtures.MappedTest):
assert (u.counter == 5) is True
+ def test_update_special_comparator(self):
+ HasBoolean = self.classes.HasBoolean
+
+ # make sure the comparison we're shooting
+ # for is invalid, otherwise we need to
+ # test something else here
+ assert_raises_message(
+ TypeError,
+ "Boolean value of this clause is not defined",
+ bool, None == sa.false()
+ )
+ s = create_session()
+ hb = HasBoolean(value=None)
+ s.add(hb)
+ s.flush()
+
+ hb.value = sa.false()
+
+ s.flush()
+
+ # needs to be refreshed
+ assert 'value' not in hb.__dict__
+ eq_(hb.value, False)
+
class PassiveDeletesTest(fixtures.MappedTest):
__requires__ = ('foreign_keys',)
diff --git a/test/orm/test_unitofworkv2.py b/test/orm/test_unitofworkv2.py
index cef71370d..c8ce13c91 100644
--- a/test/orm/test_unitofworkv2.py
+++ b/test/orm/test_unitofworkv2.py
@@ -5,7 +5,8 @@ from sqlalchemy.testing.schema import Table, Column
from test.orm import _fixtures
from sqlalchemy import exc, util
from sqlalchemy.testing import fixtures, config
-from sqlalchemy import Integer, String, ForeignKey, func, literal
+from sqlalchemy import Integer, String, ForeignKey, func, \
+ literal, FetchedValue, text
from sqlalchemy.orm import mapper, relationship, backref, \
create_session, unitofwork, attributes,\
Session, exc as orm_exc
@@ -1800,7 +1801,13 @@ class LoadersUsingCommittedTest(UOWTest):
class NoAttrEventInFlushTest(fixtures.MappedTest):
- """test [ticket:3167]"""
+ """test [ticket:3167].
+
+ See also RefreshFlushInReturningTest in test/orm/test_events.py which
+ tests the positive case for the refresh_flush event, added in
+ [ticket:3427].
+
+ """
__backend__ = True
@@ -1840,3 +1847,767 @@ class NoAttrEventInFlushTest(fixtures.MappedTest):
eq_(t1.id, 1)
eq_(t1.prefetch_val, 5)
eq_(t1.returning_val, 5)
+
+
+class EagerDefaultsTest(fixtures.MappedTest):
+ __backend__ = True
+
+ @classmethod
+ def define_tables(cls, metadata):
+ Table(
+ 'test', metadata,
+ Column('id', Integer, primary_key=True),
+ Column('foo', Integer, server_default="3")
+ )
+
+ Table(
+ 'test2', metadata,
+ Column('id', Integer, primary_key=True),
+ Column('foo', Integer),
+ Column('bar', Integer, server_onupdate=FetchedValue())
+ )
+
+ @classmethod
+ def setup_classes(cls):
+ class Thing(cls.Basic):
+ pass
+
+ class Thing2(cls.Basic):
+ pass
+
+ @classmethod
+ def setup_mappers(cls):
+ Thing = cls.classes.Thing
+
+ mapper(Thing, cls.tables.test, eager_defaults=True)
+
+ Thing2 = cls.classes.Thing2
+
+ mapper(Thing2, cls.tables.test2, eager_defaults=True)
+
+ def test_insert_defaults_present(self):
+ Thing = self.classes.Thing
+ s = Session()
+
+ t1, t2 = (
+ Thing(id=1, foo=5),
+ Thing(id=2, foo=10)
+ )
+
+ s.add_all([t1, t2])
+
+ self.assert_sql_execution(
+ testing.db,
+ s.flush,
+ CompiledSQL(
+ "INSERT INTO test (id, foo) VALUES (:id, :foo)",
+ [{'foo': 5, 'id': 1}, {'foo': 10, 'id': 2}]
+ ),
+ )
+
+ def go():
+ eq_(t1.foo, 5)
+ eq_(t2.foo, 10)
+
+ self.assert_sql_count(testing.db, go, 0)
+
+ def test_insert_defaults_present_as_expr(self):
+ Thing = self.classes.Thing
+ s = Session()
+
+ t1, t2 = (
+ Thing(id=1, foo=text("2 + 5")),
+ Thing(id=2, foo=text("5 + 5"))
+ )
+
+ s.add_all([t1, t2])
+
+ if testing.db.dialect.implicit_returning:
+ self.assert_sql_execution(
+ testing.db,
+ s.flush,
+ CompiledSQL(
+ "INSERT INTO test (id, foo) VALUES (%(id)s, 2 + 5) "
+ "RETURNING test.foo",
+ [{'id': 1}],
+ dialect='postgresql'
+ ),
+ CompiledSQL(
+ "INSERT INTO test (id, foo) VALUES (%(id)s, 5 + 5) "
+ "RETURNING test.foo",
+ [{'id': 2}],
+ dialect='postgresql'
+ )
+ )
+
+ else:
+ self.assert_sql_execution(
+ testing.db,
+ s.flush,
+ CompiledSQL(
+ "INSERT INTO test (id, foo) VALUES (:id, 2 + 5)",
+ [{'id': 1}]
+ ),
+ CompiledSQL(
+ "INSERT INTO test (id, foo) VALUES (:id, 5 + 5)",
+ [{'id': 2}]
+ ),
+ CompiledSQL(
+ "SELECT test.foo AS test_foo FROM test "
+ "WHERE test.id = :param_1",
+ [{'param_1': 1}]
+ ),
+ CompiledSQL(
+ "SELECT test.foo AS test_foo FROM test "
+ "WHERE test.id = :param_1",
+ [{'param_1': 2}]
+ ),
+ )
+
+ def go():
+ eq_(t1.foo, 7)
+ eq_(t2.foo, 10)
+
+ self.assert_sql_count(testing.db, go, 0)
+
+ def test_insert_defaults_nonpresent(self):
+ Thing = self.classes.Thing
+ s = Session()
+
+ t1, t2 = (
+ Thing(id=1),
+ Thing(id=2)
+ )
+
+ s.add_all([t1, t2])
+
+ if testing.db.dialect.implicit_returning:
+ self.assert_sql_execution(
+ testing.db,
+ s.commit,
+ CompiledSQL(
+ "INSERT INTO test (id) VALUES (%(id)s) RETURNING test.foo",
+ [{'id': 1}],
+ dialect='postgresql'
+ ),
+ CompiledSQL(
+ "INSERT INTO test (id) VALUES (%(id)s) RETURNING test.foo",
+ [{'id': 2}],
+ dialect='postgresql'
+ ),
+ )
+ else:
+ self.assert_sql_execution(
+ testing.db,
+ s.commit,
+ CompiledSQL(
+ "INSERT INTO test (id) VALUES (:id)",
+ [{'id': 1}, {'id': 2}]
+ ),
+ CompiledSQL(
+ "SELECT test.foo AS test_foo FROM test "
+ "WHERE test.id = :param_1",
+ [{'param_1': 1}]
+ ),
+ CompiledSQL(
+ "SELECT test.foo AS test_foo FROM test "
+ "WHERE test.id = :param_1",
+ [{'param_1': 2}]
+ )
+ )
+
+ def test_update_defaults_nonpresent(self):
+ Thing2 = self.classes.Thing2
+ s = Session()
+
+ t1, t2, t3, t4 = (
+ Thing2(id=1, foo=1, bar=2),
+ Thing2(id=2, foo=2, bar=3),
+ Thing2(id=3, foo=3, bar=4),
+ Thing2(id=4, foo=4, bar=5)
+ )
+
+ s.add_all([t1, t2, t3, t4])
+ s.flush()
+
+ t1.foo = 5
+ t2.foo = 6
+ t2.bar = 10
+ t3.foo = 7
+ t4.foo = 8
+ t4.bar = 12
+
+ if testing.db.dialect.implicit_returning:
+ self.assert_sql_execution(
+ testing.db,
+ s.flush,
+ CompiledSQL(
+ "UPDATE test2 SET foo=%(foo)s "
+ "WHERE test2.id = %(test2_id)s "
+ "RETURNING test2.bar",
+ [{'foo': 5, 'test2_id': 1}],
+ dialect='postgresql'
+ ),
+ CompiledSQL(
+ "UPDATE test2 SET foo=%(foo)s, bar=%(bar)s "
+ "WHERE test2.id = %(test2_id)s",
+ [{'foo': 6, 'bar': 10, 'test2_id': 2}],
+ dialect='postgresql'
+ ),
+ CompiledSQL(
+ "UPDATE test2 SET foo=%(foo)s "
+ "WHERE test2.id = %(test2_id)s "
+ "RETURNING test2.bar",
+ [{'foo': 7, 'test2_id': 3}],
+ dialect='postgresql'
+ ),
+ CompiledSQL(
+ "UPDATE test2 SET foo=%(foo)s, bar=%(bar)s "
+ "WHERE test2.id = %(test2_id)s",
+ [{'foo': 8, 'bar': 12, 'test2_id': 4}],
+ dialect='postgresql'
+ ),
+ )
+ else:
+ self.assert_sql_execution(
+ testing.db,
+ s.flush,
+ CompiledSQL(
+ "UPDATE test2 SET foo=:foo WHERE test2.id = :test2_id",
+ [{'foo': 5, 'test2_id': 1}]
+ ),
+ CompiledSQL(
+ "UPDATE test2 SET foo=:foo, bar=:bar "
+ "WHERE test2.id = :test2_id",
+ [{'foo': 6, 'bar': 10, 'test2_id': 2}],
+ ),
+ CompiledSQL(
+ "UPDATE test2 SET foo=:foo WHERE test2.id = :test2_id",
+ [{'foo': 7, 'test2_id': 3}]
+ ),
+ CompiledSQL(
+ "UPDATE test2 SET foo=:foo, bar=:bar "
+ "WHERE test2.id = :test2_id",
+ [{'foo': 8, 'bar': 12, 'test2_id': 4}],
+ ),
+ CompiledSQL(
+ "SELECT test2.bar AS test2_bar FROM test2 "
+ "WHERE test2.id = :param_1",
+ [{'param_1': 1}]
+ ),
+ CompiledSQL(
+ "SELECT test2.bar AS test2_bar FROM test2 "
+ "WHERE test2.id = :param_1",
+ [{'param_1': 3}]
+ )
+ )
+
+ def go():
+ eq_(t1.bar, 2)
+ eq_(t2.bar, 10)
+ eq_(t3.bar, 4)
+ eq_(t4.bar, 12)
+
+ self.assert_sql_count(testing.db, go, 0)
+
+ def test_update_defaults_present_as_expr(self):
+ Thing2 = self.classes.Thing2
+ s = Session()
+
+ t1, t2, t3, t4 = (
+ Thing2(id=1, foo=1, bar=2),
+ Thing2(id=2, foo=2, bar=3),
+ Thing2(id=3, foo=3, bar=4),
+ Thing2(id=4, foo=4, bar=5)
+ )
+
+ s.add_all([t1, t2, t3, t4])
+ s.flush()
+
+ t1.foo = 5
+ t1.bar = text("1 + 1")
+ t2.foo = 6
+ t2.bar = 10
+ t3.foo = 7
+ t4.foo = 8
+ t4.bar = text("5 + 7")
+
+ if testing.db.dialect.implicit_returning:
+ self.assert_sql_execution(
+ testing.db,
+ s.flush,
+ CompiledSQL(
+ "UPDATE test2 SET foo=%(foo)s, bar=1 + 1 "
+ "WHERE test2.id = %(test2_id)s "
+ "RETURNING test2.bar",
+ [{'foo': 5, 'test2_id': 1}],
+ dialect='postgresql'
+ ),
+ CompiledSQL(
+ "UPDATE test2 SET foo=%(foo)s, bar=%(bar)s "
+ "WHERE test2.id = %(test2_id)s",
+ [{'foo': 6, 'bar': 10, 'test2_id': 2}],
+ dialect='postgresql'
+ ),
+ CompiledSQL(
+ "UPDATE test2 SET foo=%(foo)s "
+ "WHERE test2.id = %(test2_id)s "
+ "RETURNING test2.bar",
+ [{'foo': 7, 'test2_id': 3}],
+ dialect='postgresql'
+ ),
+ CompiledSQL(
+ "UPDATE test2 SET foo=%(foo)s, bar=5 + 7 "
+ "WHERE test2.id = %(test2_id)s RETURNING test2.bar",
+ [{'foo': 8, 'test2_id': 4}],
+ dialect='postgresql'
+ ),
+ )
+ else:
+ self.assert_sql_execution(
+ testing.db,
+ s.flush,
+ CompiledSQL(
+ "UPDATE test2 SET foo=:foo, bar=1 + 1 "
+ "WHERE test2.id = :test2_id",
+ [{'foo': 5, 'test2_id': 1}]
+ ),
+ CompiledSQL(
+ "UPDATE test2 SET foo=:foo, bar=:bar "
+ "WHERE test2.id = :test2_id",
+ [{'foo': 6, 'bar': 10, 'test2_id': 2}],
+ ),
+ CompiledSQL(
+ "UPDATE test2 SET foo=:foo WHERE test2.id = :test2_id",
+ [{'foo': 7, 'test2_id': 3}]
+ ),
+ CompiledSQL(
+ "UPDATE test2 SET foo=:foo, bar=5 + 7 "
+ "WHERE test2.id = :test2_id",
+ [{'foo': 8, 'test2_id': 4}],
+ ),
+ CompiledSQL(
+ "SELECT test2.bar AS test2_bar FROM test2 "
+ "WHERE test2.id = :param_1",
+ [{'param_1': 1}]
+ ),
+ CompiledSQL(
+ "SELECT test2.bar AS test2_bar FROM test2 "
+ "WHERE test2.id = :param_1",
+ [{'param_1': 3}]
+ ),
+ CompiledSQL(
+ "SELECT test2.bar AS test2_bar FROM test2 "
+ "WHERE test2.id = :param_1",
+ [{'param_1': 4}]
+ )
+ )
+
+ def go():
+ eq_(t1.bar, 2)
+ eq_(t2.bar, 10)
+ eq_(t3.bar, 4)
+ eq_(t4.bar, 12)
+
+ self.assert_sql_count(testing.db, go, 0)
+
+ def test_insert_defaults_bulk_insert(self):
+ Thing = self.classes.Thing
+ s = Session()
+
+ mappings = [
+ {"id": 1},
+ {"id": 2}
+ ]
+
+ self.assert_sql_execution(
+ testing.db,
+ lambda: s.bulk_insert_mappings(Thing, mappings),
+ CompiledSQL(
+ "INSERT INTO test (id) VALUES (:id)",
+ [{'id': 1}, {'id': 2}]
+ )
+ )
+
+ def test_update_defaults_bulk_update(self):
+ Thing2 = self.classes.Thing2
+ s = Session()
+
+ t1, t2, t3, t4 = (
+ Thing2(id=1, foo=1, bar=2),
+ Thing2(id=2, foo=2, bar=3),
+ Thing2(id=3, foo=3, bar=4),
+ Thing2(id=4, foo=4, bar=5)
+ )
+
+ s.add_all([t1, t2, t3, t4])
+ s.flush()
+
+ mappings = [
+ {"id": 1, "foo": 5},
+ {"id": 2, "foo": 6, "bar": 10},
+ {"id": 3, "foo": 7},
+ {"id": 4, "foo": 8}
+ ]
+
+ self.assert_sql_execution(
+ testing.db,
+ lambda: s.bulk_update_mappings(Thing2, mappings),
+ CompiledSQL(
+ "UPDATE test2 SET foo=:foo WHERE test2.id = :test2_id",
+ [{'foo': 5, 'test2_id': 1}]
+ ),
+ CompiledSQL(
+ "UPDATE test2 SET foo=:foo, bar=:bar "
+ "WHERE test2.id = :test2_id",
+ [{'foo': 6, 'bar': 10, 'test2_id': 2}]
+ ),
+ CompiledSQL(
+ "UPDATE test2 SET foo=:foo WHERE test2.id = :test2_id",
+ [{'foo': 7, 'test2_id': 3}, {'foo': 8, 'test2_id': 4}]
+ )
+ )
+
+ def test_update_defaults_present(self):
+ Thing2 = self.classes.Thing2
+ s = Session()
+
+ t1, t2 = (
+ Thing2(id=1, foo=1, bar=2),
+ Thing2(id=2, foo=2, bar=3)
+ )
+
+ s.add_all([t1, t2])
+ s.flush()
+
+ t1.bar = 5
+ t2.bar = 10
+
+ self.assert_sql_execution(
+ testing.db,
+ s.commit,
+ CompiledSQL(
+ "UPDATE test2 SET bar=%(bar)s WHERE test2.id = %(test2_id)s",
+ [{'bar': 5, 'test2_id': 1}, {'bar': 10, 'test2_id': 2}],
+ dialect='postgresql'
+ )
+ )
+
+class TypeWoBoolTest(fixtures.MappedTest, testing.AssertsExecutionResults):
+ """test support for custom datatypes that return a non-__bool__ value
+ when compared via __eq__(), eg. ticket 3469"""
+
+ @classmethod
+ def define_tables(cls, metadata):
+ from sqlalchemy import TypeDecorator
+
+ class NoBool(object):
+ def __nonzero__(self):
+ raise NotImplementedError("not supported")
+
+ class MyWidget(object):
+ def __init__(self, text):
+ self.text = text
+
+ def __eq__(self, other):
+ return NoBool()
+
+ cls.MyWidget = MyWidget
+
+ class MyType(TypeDecorator):
+ impl = String(50)
+
+ def process_bind_param(self, value, dialect):
+ if value is not None:
+ value = value.text
+ return value
+
+ def process_result_value(self, value, dialect):
+ if value is not None:
+ value = MyWidget(value)
+ return value
+
+ Table(
+ 'test', metadata,
+ Column('id', Integer, primary_key=True,
+ test_needs_autoincrement=True),
+ Column('value', MyType),
+ Column('unrelated', String(50))
+ )
+
+ @classmethod
+ def setup_classes(cls):
+ class Thing(cls.Basic):
+ pass
+
+ @classmethod
+ def setup_mappers(cls):
+ Thing = cls.classes.Thing
+
+ mapper(Thing, cls.tables.test)
+
+ def test_update_against_none(self):
+ Thing = self.classes.Thing
+
+ s = Session()
+ s.add(Thing(value=self.MyWidget("foo")))
+ s.commit()
+
+ t1 = s.query(Thing).first()
+ t1.value = None
+ s.commit()
+
+ eq_(
+ s.query(Thing.value).scalar(), None
+ )
+
+ def test_update_against_something_else(self):
+ Thing = self.classes.Thing
+
+ s = Session()
+ s.add(Thing(value=self.MyWidget("foo")))
+ s.commit()
+
+ t1 = s.query(Thing).first()
+ t1.value = self.MyWidget("bar")
+ s.commit()
+
+ eq_(
+ s.query(Thing.value).scalar().text, "bar"
+ )
+
+ def test_no_update_no_change(self):
+ Thing = self.classes.Thing
+
+ s = Session()
+ s.add(Thing(value=self.MyWidget("foo"), unrelated='unrelated'))
+ s.commit()
+
+ t1 = s.query(Thing).first()
+ t1.unrelated = 'something else'
+
+ self.assert_sql_execution(
+ testing.db,
+ s.commit,
+ CompiledSQL(
+ "UPDATE test SET unrelated=:unrelated "
+ "WHERE test.id = :test_id",
+ [{'test_id': 1, 'unrelated': 'something else'}]
+ ),
+ )
+
+ eq_(
+ s.query(Thing.value).scalar().text, "foo"
+ )
+
+
+class NullEvaluatingTest(fixtures.MappedTest, testing.AssertsExecutionResults):
+ @classmethod
+ def define_tables(cls, metadata):
+ from sqlalchemy import TypeDecorator
+
+ class EvalsNull(TypeDecorator):
+ impl = String(50)
+
+ should_evaluate_none = True
+
+ def process_bind_param(self, value, dialect):
+ if value is None:
+ value = 'nothing'
+ return value
+
+ Table(
+ 'test', metadata,
+ Column('id', Integer, primary_key=True,
+ test_needs_autoincrement=True),
+ Column('evals_null_no_default', EvalsNull()),
+ Column('evals_null_default', EvalsNull(), default='default_val'),
+ Column('no_eval_null_no_default', String(50)),
+ Column('no_eval_null_default', String(50), default='default_val'),
+ Column(
+ 'builtin_evals_null_no_default', String(50).evaluates_none()),
+ Column(
+ 'builtin_evals_null_default',
+ String(50).evaluates_none(), default='default_val'),
+ )
+
+ @classmethod
+ def setup_classes(cls):
+ class Thing(cls.Basic):
+ pass
+
+ @classmethod
+ def setup_mappers(cls):
+ Thing = cls.classes.Thing
+
+ mapper(Thing, cls.tables.test)
+
+ def _assert_col(self, name, value):
+ Thing = self.classes.Thing
+ s = Session()
+
+ col = getattr(Thing, name)
+ obj = s.query(col).filter(col == value).one()
+ eq_(obj[0], value)
+
+ def _test_insert(self, attr, expected):
+ Thing = self.classes.Thing
+
+ s = Session()
+ t1 = Thing(**{attr: None})
+ s.add(t1)
+ s.commit()
+
+ self._assert_col(attr, expected)
+
+ def _test_bulk_insert(self, attr, expected):
+ Thing = self.classes.Thing
+
+ s = Session()
+ s.bulk_insert_mappings(
+ Thing, [{attr: None}]
+ )
+ s.commit()
+
+ self._assert_col(attr, expected)
+
+ def _test_insert_novalue(self, attr, expected):
+ Thing = self.classes.Thing
+
+ s = Session()
+ t1 = Thing()
+ s.add(t1)
+ s.commit()
+
+ self._assert_col(attr, expected)
+
+ def _test_bulk_insert_novalue(self, attr, expected):
+ Thing = self.classes.Thing
+
+ s = Session()
+ s.bulk_insert_mappings(
+ Thing, [{}]
+ )
+ s.commit()
+
+ self._assert_col(attr, expected)
+
+ def test_evalnull_nodefault_insert(self):
+ self._test_insert(
+ "evals_null_no_default", 'nothing'
+ )
+
+ def test_evalnull_nodefault_bulk_insert(self):
+ self._test_bulk_insert(
+ "evals_null_no_default", 'nothing'
+ )
+
+ def test_evalnull_nodefault_insert_novalue(self):
+ self._test_insert_novalue(
+ "evals_null_no_default", None
+ )
+
+ def test_evalnull_nodefault_bulk_insert_novalue(self):
+ self._test_bulk_insert_novalue(
+ "evals_null_no_default", None
+ )
+
+ def test_evalnull_default_insert(self):
+ self._test_insert(
+ "evals_null_default", 'nothing'
+ )
+
+ def test_evalnull_default_bulk_insert(self):
+ self._test_bulk_insert(
+ "evals_null_default", 'nothing'
+ )
+
+ def test_evalnull_default_insert_novalue(self):
+ self._test_insert_novalue(
+ "evals_null_default", 'default_val'
+ )
+
+ def test_evalnull_default_bulk_insert_novalue(self):
+ self._test_bulk_insert_novalue(
+ "evals_null_default", 'default_val'
+ )
+
+ def test_no_evalnull_nodefault_insert(self):
+ self._test_insert(
+ "no_eval_null_no_default", None
+ )
+
+ def test_no_evalnull_nodefault_bulk_insert(self):
+ self._test_bulk_insert(
+ "no_eval_null_no_default", None
+ )
+
+ def test_no_evalnull_nodefault_insert_novalue(self):
+ self._test_insert_novalue(
+ "no_eval_null_no_default", None
+ )
+
+ def test_no_evalnull_nodefault_bulk_insert_novalue(self):
+ self._test_bulk_insert_novalue(
+ "no_eval_null_no_default", None
+ )
+
+ def test_no_evalnull_default_insert(self):
+ self._test_insert(
+ "no_eval_null_default", 'default_val'
+ )
+
+ def test_no_evalnull_default_bulk_insert(self):
+ self._test_bulk_insert(
+ "no_eval_null_default", 'default_val'
+ )
+
+ def test_no_evalnull_default_insert_novalue(self):
+ self._test_insert_novalue(
+ "no_eval_null_default", 'default_val'
+ )
+
+ def test_no_evalnull_default_bulk_insert_novalue(self):
+ self._test_bulk_insert_novalue(
+ "no_eval_null_default", 'default_val'
+ )
+
+ def test_builtin_evalnull_nodefault_insert(self):
+ self._test_insert(
+ "builtin_evals_null_no_default", None
+ )
+
+ def test_builtin_evalnull_nodefault_bulk_insert(self):
+ self._test_bulk_insert(
+ "builtin_evals_null_no_default", None
+ )
+
+ def test_builtin_evalnull_nodefault_insert_novalue(self):
+ self._test_insert_novalue(
+ "builtin_evals_null_no_default", None
+ )
+
+ def test_builtin_evalnull_nodefault_bulk_insert_novalue(self):
+ self._test_bulk_insert_novalue(
+ "builtin_evals_null_no_default", None
+ )
+
+ def test_builtin_evalnull_default_insert(self):
+ self._test_insert(
+ "builtin_evals_null_default", None
+ )
+
+ def test_builtin_evalnull_default_bulk_insert(self):
+ self._test_bulk_insert(
+ "builtin_evals_null_default", None
+ )
+
+ def test_builtin_evalnull_default_insert_novalue(self):
+ self._test_insert_novalue(
+ "builtin_evals_null_default", 'default_val'
+ )
+
+ def test_builtin_evalnull_default_bulk_insert_novalue(self):
+ self._test_bulk_insert_novalue(
+ "builtin_evals_null_default", 'default_val'
+ )
diff --git a/test/orm/test_update_delete.py b/test/orm/test_update_delete.py
index a3ad37e60..593714a06 100644
--- a/test/orm/test_update_delete.py
+++ b/test/orm/test_update_delete.py
@@ -1,10 +1,11 @@
-from sqlalchemy.testing import eq_, assert_raises, assert_raises_message
+from sqlalchemy.testing import eq_, assert_raises, assert_raises_message, is_
from sqlalchemy.testing import fixtures
from sqlalchemy import Integer, String, ForeignKey, or_, exc, \
select, func, Boolean, case, text, column
from sqlalchemy.orm import mapper, relationship, backref, Session, \
- joinedload, synonym
+ joinedload, synonym, query
from sqlalchemy import testing
+from sqlalchemy.testing import mock
from sqlalchemy.testing.schema import Table, Column
@@ -19,12 +20,20 @@ class UpdateDeleteTest(fixtures.MappedTest):
test_needs_autoincrement=True),
Column('name', String(32)),
Column('age_int', Integer))
+ Table(
+ "addresses", metadata,
+ Column('id', Integer, primary_key=True),
+ Column('user_id', ForeignKey('users.id'))
+ )
@classmethod
def setup_classes(cls):
class User(cls.Comparable):
pass
+ class Address(cls.Comparable):
+ pass
+
@classmethod
def insert_data(cls):
users = cls.tables.users
@@ -41,9 +50,14 @@ class UpdateDeleteTest(fixtures.MappedTest):
User = cls.classes.User
users = cls.tables.users
+ Address = cls.classes.Address
+ addresses = cls.tables.addresses
+
mapper(User, users, properties={
- 'age': users.c.age_int
+ 'age': users.c.age_int,
+ 'addresses': relationship(Address)
})
+ mapper(Address, addresses)
def test_illegal_eval(self):
User = self.classes.User
@@ -59,27 +73,36 @@ class UpdateDeleteTest(fixtures.MappedTest):
def test_illegal_operations(self):
User = self.classes.User
+ Address = self.classes.Address
s = Session()
for q, mname in (
- (s.query(User).limit(2), "limit"),
- (s.query(User).offset(2), "offset"),
- (s.query(User).limit(2).offset(2), "limit"),
- (s.query(User).order_by(User.id), "order_by"),
- (s.query(User).group_by(User.id), "group_by"),
- (s.query(User).distinct(), "distinct")
+ (s.query(User).limit(2), r"limit\(\)"),
+ (s.query(User).offset(2), r"offset\(\)"),
+ (s.query(User).limit(2).offset(2), r"limit\(\)"),
+ (s.query(User).order_by(User.id), r"order_by\(\)"),
+ (s.query(User).group_by(User.id), r"group_by\(\)"),
+ (s.query(User).distinct(), r"distinct\(\)"),
+ (s.query(User).join(User.addresses),
+ r"join\(\), outerjoin\(\), select_from\(\), or from_self\(\)"),
+ (s.query(User).outerjoin(User.addresses),
+ r"join\(\), outerjoin\(\), select_from\(\), or from_self\(\)"),
+ (s.query(User).select_from(Address),
+ r"join\(\), outerjoin\(\), select_from\(\), or from_self\(\)"),
+ (s.query(User).from_self(),
+ r"join\(\), outerjoin\(\), select_from\(\), or from_self\(\)"),
):
assert_raises_message(
exc.InvalidRequestError,
- r"Can't call Query.update\(\) when "
- "%s\(\) has been called" % mname,
+ r"Can't call Query.update\(\) or Query.delete\(\) when "
+ "%s has been called" % mname,
q.update,
{'name': 'ed'})
assert_raises_message(
exc.InvalidRequestError,
- r"Can't call Query.delete\(\) when "
- "%s\(\) has been called" % mname,
+ r"Can't call Query.update\(\) or Query.delete\(\) when "
+ "%s has been called" % mname,
q.delete)
def test_evaluate_clauseelement(self):
@@ -587,6 +610,42 @@ class UpdateDeleteTest(fixtures.MappedTest):
synchronize_session='fetch')
assert john not in sess
+ def test_update_unordered_dict(self):
+ User = self.classes.User
+ session = Session()
+
+ # Do an update using unordered dict and check that the parameters used
+ # are ordered in table order
+ with mock.patch.object(session, "execute") as exec_:
+ session.query(User).filter(User.id == 15).update(
+ {'name': 'foob', 'id': 123})
+ # Confirm that parameters are a dict instead of tuple or list
+ params_type = type(exec_.mock_calls[0][1][0].parameters)
+ is_(params_type, dict)
+
+ def test_update_preserve_parameter_order(self):
+ User = self.classes.User
+ session = Session()
+
+ # Do update using a tuple and check that order is preserved
+ with mock.patch.object(session, "execute") as exec_:
+ session.query(User).filter(User.id == 15).update(
+ (('id', 123), ('name', 'foob')),
+ update_args={"preserve_parameter_order": True})
+ cols = [c.key
+ for c in exec_.mock_calls[0][1][0]._parameter_ordering]
+ eq_(['id', 'name'], cols)
+
+ # Now invert the order and use a list instead, and check that order is
+ # also preserved
+ with mock.patch.object(session, "execute") as exec_:
+ session.query(User).filter(User.id == 15).update(
+ [('name', 'foob'), ('id', 123)],
+ update_args={"preserve_parameter_order": True})
+ cols = [c.key
+ for c in exec_.mock_calls[0][1][0]._parameter_ordering]
+ eq_(['name', 'id'], cols)
+
class UpdateDeleteIgnoresLoadersTest(fixtures.MappedTest):
@@ -885,6 +944,18 @@ class ExpressionUpdateTest(fixtures.MappedTest):
eq_(d1.cnt, 2)
sess.close()
+ def test_update_args(self):
+ Data = self.classes.Data
+ session = testing.mock.Mock(wraps=Session())
+ update_args = {"mysql_limit": 1}
+ query.Query(Data, session).update({Data.cnt: Data.cnt + 1},
+ update_args=update_args)
+ eq_(session.execute.call_count, 1)
+ args, kwargs = session.execute.call_args
+ eq_(len(args), 1)
+ update_stmt = args[0]
+ eq_(update_stmt.dialect_kwargs, update_args)
+
class InheritTest(fixtures.DeclarativeMappedTest):
diff --git a/test/orm/test_utils.py b/test/orm/test_utils.py
index ae225ad92..168cee19c 100644
--- a/test/orm/test_utils.py
+++ b/test/orm/test_utils.py
@@ -222,6 +222,56 @@ class AliasedClassTest(fixtures.TestBase, AssertsCompiledSQL):
"WHERE point_1.x > point.x"
)
+ def test_parententity_vs_parentmapper(self):
+ class Point(object):
+ pass
+
+ self._fixture(Point, properties={
+ 'x_syn': synonym("x")
+ })
+ pa = aliased(Point)
+
+ is_(Point.x_syn._parententity, inspect(Point))
+ is_(Point.x._parententity, inspect(Point))
+ is_(Point.x_syn._parentmapper, inspect(Point))
+ is_(Point.x._parentmapper, inspect(Point))
+
+ is_(
+ Point.x_syn.__clause_element__()._annotations['parententity'],
+ inspect(Point))
+ is_(
+ Point.x.__clause_element__()._annotations['parententity'],
+ inspect(Point))
+ is_(
+ Point.x_syn.__clause_element__()._annotations['parentmapper'],
+ inspect(Point))
+ is_(
+ Point.x.__clause_element__()._annotations['parentmapper'],
+ inspect(Point))
+
+ pa = aliased(Point)
+
+ is_(pa.x_syn._parententity, inspect(pa))
+ is_(pa.x._parententity, inspect(pa))
+ is_(pa.x_syn._parentmapper, inspect(Point))
+ is_(pa.x._parentmapper, inspect(Point))
+
+ is_(
+ pa.x_syn.__clause_element__()._annotations['parententity'],
+ inspect(pa)
+ )
+ is_(
+ pa.x.__clause_element__()._annotations['parententity'],
+ inspect(pa)
+ )
+ is_(
+ pa.x_syn.__clause_element__()._annotations['parentmapper'],
+ inspect(Point))
+ is_(
+ pa.x.__clause_element__()._annotations['parentmapper'],
+ inspect(Point))
+
+
class IdentityKeyTest(_fixtures.FixtureTest):
run_inserts = None
diff --git a/test/orm/test_versioning.py b/test/orm/test_versioning.py
index 8348cb588..07b090c60 100644
--- a/test/orm/test_versioning.py
+++ b/test/orm/test_versioning.py
@@ -112,6 +112,61 @@ class VersioningTest(fixtures.MappedTest):
else:
s1.commit()
+ def test_multiple_updates(self):
+ Foo = self.classes.Foo
+
+ s1 = self._fixture()
+ f1 = Foo(value='f1')
+ f2 = Foo(value='f2')
+ s1.add_all((f1, f2))
+ s1.commit()
+
+ f1.value = 'f1rev2'
+ f2.value = 'f2rev2'
+ s1.commit()
+
+ eq_(
+ s1.query(Foo.id, Foo.value, Foo.version_id).order_by(Foo.id).all(),
+ [(f1.id, 'f1rev2', 2), (f2.id, 'f2rev2', 2)]
+ )
+
+ def test_bulk_insert(self):
+ Foo = self.classes.Foo
+
+ s1 = self._fixture()
+ s1.bulk_insert_mappings(
+ Foo,
+ [{"id": 1, "value": "f1"}, {"id": 2, "value": "f2"}]
+ )
+ eq_(
+ s1.query(Foo.id, Foo.value, Foo.version_id).order_by(Foo.id).all(),
+ [(1, 'f1', 1), (2, 'f2', 1)]
+ )
+
+ def test_bulk_update(self):
+ Foo = self.classes.Foo
+
+ s1 = self._fixture()
+ f1 = Foo(value='f1')
+ f2 = Foo(value='f2')
+ s1.add_all((f1, f2))
+ s1.commit()
+
+ s1.bulk_update_mappings(
+ Foo,
+ [
+ {"id": f1.id, "value": "f1rev2", "version_id": 1},
+ {"id": f2.id, "value": "f2rev2", "version_id": 1},
+
+ ]
+ )
+ s1.commit()
+
+ eq_(
+ s1.query(Foo.id, Foo.value, Foo.version_id).order_by(Foo.id).all(),
+ [(f1.id, 'f1rev2', 2), (f2.id, 'f2rev2', 2)]
+ )
+
@testing.emits_warning_on(
'+zxjdbc', r'.*does not support (update|delete)d rowcount')
def test_bump_version(self):
@@ -355,6 +410,97 @@ class VersioningTest(fixtures.MappedTest):
)
+class NoBumpOnRelationshipTest(fixtures.MappedTest):
+ __backend__ = True
+
+ @classmethod
+ def define_tables(cls, metadata):
+ Table(
+ 'a', metadata,
+ Column(
+ 'id', Integer, primary_key=True,
+ test_needs_autoincrement=True),
+ Column('version_id', Integer)
+ )
+ Table(
+ 'b', metadata,
+ Column(
+ 'id', Integer, primary_key=True,
+ test_needs_autoincrement=True),
+ Column('a_id', ForeignKey('a.id'))
+ )
+
+ @classmethod
+ def setup_classes(cls):
+ class A(cls.Basic):
+ pass
+
+ class B(cls.Basic):
+ pass
+
+ def _run_test(self, auto_version_counter=True):
+ A, B = self.classes('A', 'B')
+ s = Session()
+ if auto_version_counter:
+ a1 = A()
+ else:
+ a1 = A(version_id=1)
+ s.add(a1)
+ s.commit()
+ eq_(a1.version_id, 1)
+
+ b1 = B()
+ b1.a = a1
+ s.add(b1)
+ s.commit()
+
+ eq_(a1.version_id, 1)
+
+ def test_plain_counter(self):
+ A, B = self.classes('A', 'B')
+ a, b = self.tables('a', 'b')
+
+ mapper(
+ A, a, properties={
+ 'bs': relationship(B, backref='a')
+ },
+ version_id_col=a.c.version_id,
+ )
+ mapper(B, b)
+
+ self._run_test()
+
+ def test_functional_counter(self):
+ A, B = self.classes('A', 'B')
+ a, b = self.tables('a', 'b')
+
+ mapper(
+ A, a, properties={
+ 'bs': relationship(B, backref='a')
+ },
+ version_id_col=a.c.version_id,
+ version_id_generator=lambda num: (num or 0) + 1
+ )
+ mapper(B, b)
+
+ self._run_test()
+
+ def test_no_counter(self):
+ A, B = self.classes('A', 'B')
+ a, b = self.tables('a', 'b')
+
+ mapper(
+ A, a, properties={
+ 'bs': relationship(B, backref='a')
+ },
+ version_id_col=a.c.version_id,
+ version_id_generator=False
+ )
+ mapper(B, b)
+
+ self._run_test(False)
+
+
class ColumnTypeTest(fixtures.MappedTest):
__backend__ = True
@@ -587,6 +733,53 @@ class AlternateGeneratorTest(fixtures.MappedTest):
sess2.commit
+class PlainInheritanceTest(fixtures.MappedTest):
+ __backend__ = True
+
+ @classmethod
+ def define_tables(cls, metadata):
+ Table(
+ 'base', metadata,
+ Column(
+ 'id', Integer, primary_key=True,
+ test_needs_autoincrement=True),
+ Column('version_id', Integer, nullable=True),
+ Column('data', String(50))
+ )
+ Table(
+ 'sub', metadata,
+ Column('id', Integer, ForeignKey('base.id'), primary_key=True),
+ Column('sub_data', String(50))
+ )
+
+ @classmethod
+ def setup_classes(cls):
+
+ class Base(cls.Basic):
+ pass
+
+ class Sub(Base):
+ pass
+
+ def test_update_child_table_only(self):
+ Base, sub, base, Sub = (
+ self.classes.Base, self.tables.sub, self.tables.base,
+ self.classes.Sub)
+
+ mapper(Base, base, version_id_col=base.c.version_id)
+ mapper(Sub, sub, inherits=Base)
+
+ s = Session()
+ s1 = Sub(data='b', sub_data='s')
+ s.add(s1)
+ s.commit()
+
+ s1.sub_data = 's2'
+ s.commit()
+
+ eq_(s1.version_id, 2)
+
+
class InheritanceTwoVersionIdsTest(fixtures.MappedTest):
"""Test versioning where both parent/child table have a
versioning column.
@@ -738,19 +931,26 @@ class ServerVersioningTest(fixtures.MappedTest):
class Bar(cls.Basic):
pass
- def _fixture(self, expire_on_commit=True):
+ def _fixture(self, expire_on_commit=True, eager_defaults=False):
Foo, version_table = self.classes.Foo, self.tables.version_table
mapper(
Foo, version_table, version_id_col=version_table.c.version_id,
version_id_generator=False,
+ eager_defaults=eager_defaults
)
s1 = Session(expire_on_commit=expire_on_commit)
return s1
def test_insert_col(self):
- sess = self._fixture()
+ self._test_insert_col()
+
+ def test_insert_col_eager_defaults(self):
+ self._test_insert_col(eager_defaults=True)
+
+ def _test_insert_col(self, **kw):
+ sess = self._fixture(**kw)
f1 = self.classes.Foo(value='f1')
sess.add(f1)
@@ -779,7 +979,13 @@ class ServerVersioningTest(fixtures.MappedTest):
self.assert_sql_execution(testing.db, sess.flush, *statements)
def test_update_col(self):
- sess = self._fixture()
+ self._test_update_col()
+
+ def test_update_col_eager_defaults(self):
+ self._test_update_col(eager_defaults=True)
+
+ def _test_update_col(self, **kw):
+ sess = self._fixture(**kw)
f1 = self.classes.Foo(value='f1')
sess.add(f1)
@@ -814,6 +1020,76 @@ class ServerVersioningTest(fixtures.MappedTest):
)
self.assert_sql_execution(testing.db, sess.flush, *statements)
+ def test_multi_update(self):
+ sess = self._fixture()
+
+ f1 = self.classes.Foo(value='f1')
+ f2 = self.classes.Foo(value='f2')
+ f3 = self.classes.Foo(value='f3')
+ sess.add_all([f1, f2, f3])
+ sess.flush()
+
+ f1.value = 'f1a'
+ f2.value = 'f2a'
+ f3.value = 'f3a'
+
+ statements = [
+ # note that the assertsql tests the rule against
+ # "default" - on a "returning" backend, the statement
+ # includes "RETURNING"
+ CompiledSQL(
+ "UPDATE version_table SET version_id=2, value=:value "
+ "WHERE version_table.id = :version_table_id AND "
+ "version_table.version_id = :version_table_version_id",
+ lambda ctx: [
+ {
+ "version_table_id": 1,
+ "version_table_version_id": 1, "value": "f1a"}]
+ ),
+ CompiledSQL(
+ "UPDATE version_table SET version_id=2, value=:value "
+ "WHERE version_table.id = :version_table_id AND "
+ "version_table.version_id = :version_table_version_id",
+ lambda ctx: [
+ {
+ "version_table_id": 2,
+ "version_table_version_id": 1, "value": "f2a"}]
+ ),
+ CompiledSQL(
+ "UPDATE version_table SET version_id=2, value=:value "
+ "WHERE version_table.id = :version_table_id AND "
+ "version_table.version_id = :version_table_version_id",
+ lambda ctx: [
+ {
+ "version_table_id": 3,
+ "version_table_version_id": 1, "value": "f3a"}]
+ )
+ ]
+ if not testing.db.dialect.implicit_returning:
+ # DBs without implicit returning, we must immediately
+ # SELECT for the new version id
+ statements.extend([
+ CompiledSQL(
+ "SELECT version_table.version_id "
+ "AS version_table_version_id "
+ "FROM version_table WHERE version_table.id = :param_1",
+ lambda ctx: [{"param_1": 1}]
+ ),
+ CompiledSQL(
+ "SELECT version_table.version_id "
+ "AS version_table_version_id "
+ "FROM version_table WHERE version_table.id = :param_1",
+ lambda ctx: [{"param_1": 2}]
+ ),
+ CompiledSQL(
+ "SELECT version_table.version_id "
+ "AS version_table_version_id "
+ "FROM version_table WHERE version_table.id = :param_1",
+ lambda ctx: [{"param_1": 3}]
+ )
+ ])
+ self.assert_sql_execution(testing.db, sess.flush, *statements)
+
def test_delete_col(self):
sess = self._fixture()
diff --git a/test/perf/orm2010.py b/test/perf/orm2010.py
index 633315f24..27fcd4b35 100644
--- a/test/perf/orm2010.py
+++ b/test/perf/orm2010.py
@@ -62,7 +62,7 @@ Base.metadata.create_all(engine)
sess = Session(engine)
-def runit(status, factor=1):
+def runit(status, factor=1, query_runs=5):
num_bosses = 100 * factor
num_grunts = num_bosses * 100
@@ -104,7 +104,7 @@ def runit(status, factor=1):
status("Associated grunts w/ bosses and committed")
# do some heavier reading
- for i in range(int(round(factor / 2.0))):
+ for i in range(query_runs):
status("Heavy query run #%d" % (i + 1))
report = []
diff --git a/test/profiles.txt b/test/profiles.txt
index 7b2890894..519259e75 100644
--- a/test/profiles.txt
+++ b/test/profiles.txt
@@ -1,422 +1,542 @@
-# /Users/classic/dev/sqlalchemy/test/profiles.txt
+# /home/classic/dev/sqlalchemy/test/profiles.txt
# This file is written out on a per-environment basis.
-# For each test in aaa_profiling, the corresponding function and
+# For each test in aaa_profiling, the corresponding function and
# environment is located within this file. If it doesn't exist,
# the test is skipped.
-# If a callcount does exist, it is compared to what we received.
+# If a callcount does exist, it is compared to what we received.
# assertions are raised if the counts do not match.
-#
-# To add a new callcount test, apply the function_call_count
-# decorator and re-run the tests using the --write-profiles
+#
+# To add a new callcount test, apply the function_call_count
+# decorator and re-run the tests using the --write-profiles
# option - this file will be rewritten including the new count.
-#
+#
# TEST: test.aaa_profiling.test_compiler.CompileTest.test_insert
-test.aaa_profiling.test_compiler.CompileTest.test_insert 2.7_mysql_mysqldb_cextensions 74
-test.aaa_profiling.test_compiler.CompileTest.test_insert 2.7_mysql_mysqldb_nocextensions 74
-test.aaa_profiling.test_compiler.CompileTest.test_insert 2.7_postgresql_psycopg2_cextensions 74
-test.aaa_profiling.test_compiler.CompileTest.test_insert 2.7_postgresql_psycopg2_nocextensions 76
-test.aaa_profiling.test_compiler.CompileTest.test_insert 2.7_sqlite_pysqlite_cextensions 74
-test.aaa_profiling.test_compiler.CompileTest.test_insert 2.7_sqlite_pysqlite_nocextensions 76
-test.aaa_profiling.test_compiler.CompileTest.test_insert 3.3_postgresql_psycopg2_cextensions 77
-test.aaa_profiling.test_compiler.CompileTest.test_insert 3.3_postgresql_psycopg2_nocextensions 77
-test.aaa_profiling.test_compiler.CompileTest.test_insert 3.3_sqlite_pysqlite_cextensions 77
-test.aaa_profiling.test_compiler.CompileTest.test_insert 3.3_sqlite_pysqlite_nocextensions 77
-test.aaa_profiling.test_compiler.CompileTest.test_insert 3.4_postgresql_psycopg2_cextensions 77
-test.aaa_profiling.test_compiler.CompileTest.test_insert 3.4_postgresql_psycopg2_nocextensions 77
-test.aaa_profiling.test_compiler.CompileTest.test_insert 3.4_sqlite_pysqlite_cextensions 77
-test.aaa_profiling.test_compiler.CompileTest.test_insert 3.4_sqlite_pysqlite_nocextensions 77
+test.aaa_profiling.test_compiler.CompileTest.test_insert 2.7_mysql_mysqldb_dbapiunicode_cextensions 73
+test.aaa_profiling.test_compiler.CompileTest.test_insert 2.7_mysql_mysqldb_dbapiunicode_nocextensions 73
+test.aaa_profiling.test_compiler.CompileTest.test_insert 2.7_mysql_pymysql_dbapiunicode_cextensions 73
+test.aaa_profiling.test_compiler.CompileTest.test_insert 2.7_mysql_pymysql_dbapiunicode_nocextensions 73
+test.aaa_profiling.test_compiler.CompileTest.test_insert 2.7_postgresql_psycopg2_dbapiunicode_cextensions 73
+test.aaa_profiling.test_compiler.CompileTest.test_insert 2.7_postgresql_psycopg2_dbapiunicode_nocextensions 73
+test.aaa_profiling.test_compiler.CompileTest.test_insert 2.7_sqlite_pysqlite_dbapiunicode_cextensions 73
+test.aaa_profiling.test_compiler.CompileTest.test_insert 2.7_sqlite_pysqlite_dbapiunicode_nocextensions 73
+test.aaa_profiling.test_compiler.CompileTest.test_insert 3.4_mysql_mysqldb_dbapiunicode_cextensions 76
+test.aaa_profiling.test_compiler.CompileTest.test_insert 3.4_mysql_mysqldb_dbapiunicode_nocextensions 76
+test.aaa_profiling.test_compiler.CompileTest.test_insert 3.4_mysql_pymysql_dbapiunicode_cextensions 76
+test.aaa_profiling.test_compiler.CompileTest.test_insert 3.4_mysql_pymysql_dbapiunicode_nocextensions 76
+test.aaa_profiling.test_compiler.CompileTest.test_insert 3.4_postgresql_psycopg2_dbapiunicode_cextensions 76
+test.aaa_profiling.test_compiler.CompileTest.test_insert 3.4_postgresql_psycopg2_dbapiunicode_nocextensions 76
+test.aaa_profiling.test_compiler.CompileTest.test_insert 3.4_sqlite_pysqlite_dbapiunicode_cextensions 76
+test.aaa_profiling.test_compiler.CompileTest.test_insert 3.4_sqlite_pysqlite_dbapiunicode_nocextensions 76
+test.aaa_profiling.test_compiler.CompileTest.test_insert 3.5_mysql_mysqldb_dbapiunicode_cextensions 76
+test.aaa_profiling.test_compiler.CompileTest.test_insert 3.5_mysql_mysqldb_dbapiunicode_nocextensions 76
+test.aaa_profiling.test_compiler.CompileTest.test_insert 3.5_mysql_pymysql_dbapiunicode_cextensions 76
+test.aaa_profiling.test_compiler.CompileTest.test_insert 3.5_mysql_pymysql_dbapiunicode_nocextensions 76
+test.aaa_profiling.test_compiler.CompileTest.test_insert 3.5_sqlite_pysqlite_dbapiunicode_cextensions 76
+test.aaa_profiling.test_compiler.CompileTest.test_insert 3.5_sqlite_pysqlite_dbapiunicode_nocextensions 76
# TEST: test.aaa_profiling.test_compiler.CompileTest.test_select
-test.aaa_profiling.test_compiler.CompileTest.test_select 2.7_mysql_mysqldb_cextensions 152
-test.aaa_profiling.test_compiler.CompileTest.test_select 2.7_mysql_mysqldb_nocextensions 152
-test.aaa_profiling.test_compiler.CompileTest.test_select 2.7_postgresql_psycopg2_cextensions 152
-test.aaa_profiling.test_compiler.CompileTest.test_select 2.7_postgresql_psycopg2_nocextensions 154
-test.aaa_profiling.test_compiler.CompileTest.test_select 2.7_sqlite_pysqlite_cextensions 152
-test.aaa_profiling.test_compiler.CompileTest.test_select 2.7_sqlite_pysqlite_nocextensions 154
-test.aaa_profiling.test_compiler.CompileTest.test_select 3.3_postgresql_psycopg2_cextensions 165
-test.aaa_profiling.test_compiler.CompileTest.test_select 3.3_postgresql_psycopg2_nocextensions 165
-test.aaa_profiling.test_compiler.CompileTest.test_select 3.3_sqlite_pysqlite_cextensions 165
-test.aaa_profiling.test_compiler.CompileTest.test_select 3.3_sqlite_pysqlite_nocextensions 165
-test.aaa_profiling.test_compiler.CompileTest.test_select 3.4_postgresql_psycopg2_cextensions 165
-test.aaa_profiling.test_compiler.CompileTest.test_select 3.4_postgresql_psycopg2_nocextensions 165
-test.aaa_profiling.test_compiler.CompileTest.test_select 3.4_sqlite_pysqlite_cextensions 165
-test.aaa_profiling.test_compiler.CompileTest.test_select 3.4_sqlite_pysqlite_nocextensions 165
+test.aaa_profiling.test_compiler.CompileTest.test_select 2.7_mysql_mysqldb_dbapiunicode_cextensions 156
+test.aaa_profiling.test_compiler.CompileTest.test_select 2.7_mysql_mysqldb_dbapiunicode_nocextensions 156
+test.aaa_profiling.test_compiler.CompileTest.test_select 2.7_mysql_pymysql_dbapiunicode_cextensions 156
+test.aaa_profiling.test_compiler.CompileTest.test_select 2.7_mysql_pymysql_dbapiunicode_nocextensions 156
+test.aaa_profiling.test_compiler.CompileTest.test_select 2.7_postgresql_psycopg2_dbapiunicode_cextensions 156
+test.aaa_profiling.test_compiler.CompileTest.test_select 2.7_postgresql_psycopg2_dbapiunicode_nocextensions 156
+test.aaa_profiling.test_compiler.CompileTest.test_select 2.7_sqlite_pysqlite_dbapiunicode_cextensions 156
+test.aaa_profiling.test_compiler.CompileTest.test_select 2.7_sqlite_pysqlite_dbapiunicode_nocextensions 156
+test.aaa_profiling.test_compiler.CompileTest.test_select 3.4_mysql_mysqldb_dbapiunicode_cextensions 169
+test.aaa_profiling.test_compiler.CompileTest.test_select 3.4_mysql_mysqldb_dbapiunicode_nocextensions 169
+test.aaa_profiling.test_compiler.CompileTest.test_select 3.4_mysql_pymysql_dbapiunicode_cextensions 169
+test.aaa_profiling.test_compiler.CompileTest.test_select 3.4_mysql_pymysql_dbapiunicode_nocextensions 169
+test.aaa_profiling.test_compiler.CompileTest.test_select 3.4_postgresql_psycopg2_dbapiunicode_cextensions 169
+test.aaa_profiling.test_compiler.CompileTest.test_select 3.4_postgresql_psycopg2_dbapiunicode_nocextensions 169
+test.aaa_profiling.test_compiler.CompileTest.test_select 3.4_sqlite_pysqlite_dbapiunicode_cextensions 169
+test.aaa_profiling.test_compiler.CompileTest.test_select 3.4_sqlite_pysqlite_dbapiunicode_nocextensions 169
+test.aaa_profiling.test_compiler.CompileTest.test_select 3.5_mysql_mysqldb_dbapiunicode_cextensions 169
+test.aaa_profiling.test_compiler.CompileTest.test_select 3.5_mysql_mysqldb_dbapiunicode_nocextensions 169
+test.aaa_profiling.test_compiler.CompileTest.test_select 3.5_mysql_pymysql_dbapiunicode_cextensions 169
+test.aaa_profiling.test_compiler.CompileTest.test_select 3.5_mysql_pymysql_dbapiunicode_nocextensions 169
+test.aaa_profiling.test_compiler.CompileTest.test_select 3.5_sqlite_pysqlite_dbapiunicode_cextensions 169
+test.aaa_profiling.test_compiler.CompileTest.test_select 3.5_sqlite_pysqlite_dbapiunicode_nocextensions 169
# TEST: test.aaa_profiling.test_compiler.CompileTest.test_select_labels
-test.aaa_profiling.test_compiler.CompileTest.test_select_labels 2.7_mysql_mysqldb_cextensions 186
-test.aaa_profiling.test_compiler.CompileTest.test_select_labels 2.7_mysql_mysqldb_nocextensions 186
-test.aaa_profiling.test_compiler.CompileTest.test_select_labels 2.7_postgresql_psycopg2_cextensions 186
-test.aaa_profiling.test_compiler.CompileTest.test_select_labels 2.7_postgresql_psycopg2_nocextensions 189
-test.aaa_profiling.test_compiler.CompileTest.test_select_labels 2.7_sqlite_pysqlite_cextensions 186
-test.aaa_profiling.test_compiler.CompileTest.test_select_labels 2.7_sqlite_pysqlite_nocextensions 189
-test.aaa_profiling.test_compiler.CompileTest.test_select_labels 3.3_postgresql_psycopg2_cextensions 199
-test.aaa_profiling.test_compiler.CompileTest.test_select_labels 3.3_postgresql_psycopg2_nocextensions 199
-test.aaa_profiling.test_compiler.CompileTest.test_select_labels 3.3_sqlite_pysqlite_cextensions 199
-test.aaa_profiling.test_compiler.CompileTest.test_select_labels 3.3_sqlite_pysqlite_nocextensions 199
-test.aaa_profiling.test_compiler.CompileTest.test_select_labels 3.4_postgresql_psycopg2_cextensions 199
-test.aaa_profiling.test_compiler.CompileTest.test_select_labels 3.4_postgresql_psycopg2_nocextensions 199
-test.aaa_profiling.test_compiler.CompileTest.test_select_labels 3.4_sqlite_pysqlite_cextensions 199
-test.aaa_profiling.test_compiler.CompileTest.test_select_labels 3.4_sqlite_pysqlite_nocextensions 199
+test.aaa_profiling.test_compiler.CompileTest.test_select_labels 2.7_mysql_mysqldb_dbapiunicode_cextensions 189
+test.aaa_profiling.test_compiler.CompileTest.test_select_labels 2.7_mysql_mysqldb_dbapiunicode_nocextensions 189
+test.aaa_profiling.test_compiler.CompileTest.test_select_labels 2.7_mysql_pymysql_dbapiunicode_cextensions 189
+test.aaa_profiling.test_compiler.CompileTest.test_select_labels 2.7_mysql_pymysql_dbapiunicode_nocextensions 189
+test.aaa_profiling.test_compiler.CompileTest.test_select_labels 2.7_postgresql_psycopg2_dbapiunicode_cextensions 189
+test.aaa_profiling.test_compiler.CompileTest.test_select_labels 2.7_postgresql_psycopg2_dbapiunicode_nocextensions 189
+test.aaa_profiling.test_compiler.CompileTest.test_select_labels 2.7_sqlite_pysqlite_dbapiunicode_cextensions 189
+test.aaa_profiling.test_compiler.CompileTest.test_select_labels 2.7_sqlite_pysqlite_dbapiunicode_nocextensions 189
+test.aaa_profiling.test_compiler.CompileTest.test_select_labels 3.4_mysql_mysqldb_dbapiunicode_cextensions 202
+test.aaa_profiling.test_compiler.CompileTest.test_select_labels 3.4_mysql_mysqldb_dbapiunicode_nocextensions 202
+test.aaa_profiling.test_compiler.CompileTest.test_select_labels 3.4_mysql_pymysql_dbapiunicode_cextensions 202
+test.aaa_profiling.test_compiler.CompileTest.test_select_labels 3.4_mysql_pymysql_dbapiunicode_nocextensions 202
+test.aaa_profiling.test_compiler.CompileTest.test_select_labels 3.4_postgresql_psycopg2_dbapiunicode_cextensions 202
+test.aaa_profiling.test_compiler.CompileTest.test_select_labels 3.4_postgresql_psycopg2_dbapiunicode_nocextensions 202
+test.aaa_profiling.test_compiler.CompileTest.test_select_labels 3.4_sqlite_pysqlite_dbapiunicode_cextensions 202
+test.aaa_profiling.test_compiler.CompileTest.test_select_labels 3.4_sqlite_pysqlite_dbapiunicode_nocextensions 202
+test.aaa_profiling.test_compiler.CompileTest.test_select_labels 3.5_mysql_mysqldb_dbapiunicode_cextensions 202
+test.aaa_profiling.test_compiler.CompileTest.test_select_labels 3.5_mysql_mysqldb_dbapiunicode_nocextensions 202
+test.aaa_profiling.test_compiler.CompileTest.test_select_labels 3.5_mysql_pymysql_dbapiunicode_cextensions 202
+test.aaa_profiling.test_compiler.CompileTest.test_select_labels 3.5_mysql_pymysql_dbapiunicode_nocextensions 202
+test.aaa_profiling.test_compiler.CompileTest.test_select_labels 3.5_sqlite_pysqlite_dbapiunicode_cextensions 202
+test.aaa_profiling.test_compiler.CompileTest.test_select_labels 3.5_sqlite_pysqlite_dbapiunicode_nocextensions 202
# TEST: test.aaa_profiling.test_compiler.CompileTest.test_update
-test.aaa_profiling.test_compiler.CompileTest.test_update 2.7_mysql_mysqldb_cextensions 79
-test.aaa_profiling.test_compiler.CompileTest.test_update 2.7_mysql_mysqldb_nocextensions 79
-test.aaa_profiling.test_compiler.CompileTest.test_update 2.7_postgresql_psycopg2_cextensions 77
-test.aaa_profiling.test_compiler.CompileTest.test_update 2.7_postgresql_psycopg2_nocextensions 79
-test.aaa_profiling.test_compiler.CompileTest.test_update 2.7_sqlite_pysqlite_cextensions 77
-test.aaa_profiling.test_compiler.CompileTest.test_update 2.7_sqlite_pysqlite_nocextensions 77
-test.aaa_profiling.test_compiler.CompileTest.test_update 3.3_postgresql_psycopg2_cextensions 78
-test.aaa_profiling.test_compiler.CompileTest.test_update 3.3_postgresql_psycopg2_nocextensions 78
-test.aaa_profiling.test_compiler.CompileTest.test_update 3.3_sqlite_pysqlite_cextensions 78
-test.aaa_profiling.test_compiler.CompileTest.test_update 3.3_sqlite_pysqlite_nocextensions 78
-test.aaa_profiling.test_compiler.CompileTest.test_update 3.4_postgresql_psycopg2_cextensions 78
-test.aaa_profiling.test_compiler.CompileTest.test_update 3.4_postgresql_psycopg2_nocextensions 78
-test.aaa_profiling.test_compiler.CompileTest.test_update 3.4_sqlite_pysqlite_cextensions 78
-test.aaa_profiling.test_compiler.CompileTest.test_update 3.4_sqlite_pysqlite_nocextensions 78
+test.aaa_profiling.test_compiler.CompileTest.test_update 2.7_mysql_mysqldb_dbapiunicode_cextensions 76
+test.aaa_profiling.test_compiler.CompileTest.test_update 2.7_mysql_mysqldb_dbapiunicode_nocextensions 76
+test.aaa_profiling.test_compiler.CompileTest.test_update 2.7_mysql_pymysql_dbapiunicode_cextensions 74
+test.aaa_profiling.test_compiler.CompileTest.test_update 2.7_mysql_pymysql_dbapiunicode_nocextensions 74
+test.aaa_profiling.test_compiler.CompileTest.test_update 2.7_postgresql_psycopg2_dbapiunicode_cextensions 76
+test.aaa_profiling.test_compiler.CompileTest.test_update 2.7_postgresql_psycopg2_dbapiunicode_nocextensions 76
+test.aaa_profiling.test_compiler.CompileTest.test_update 2.7_sqlite_pysqlite_dbapiunicode_cextensions 76
+test.aaa_profiling.test_compiler.CompileTest.test_update 2.7_sqlite_pysqlite_dbapiunicode_nocextensions 76
+test.aaa_profiling.test_compiler.CompileTest.test_update 3.4_mysql_mysqldb_dbapiunicode_cextensions 77
+test.aaa_profiling.test_compiler.CompileTest.test_update 3.4_mysql_mysqldb_dbapiunicode_nocextensions 77
+test.aaa_profiling.test_compiler.CompileTest.test_update 3.4_mysql_pymysql_dbapiunicode_cextensions 75
+test.aaa_profiling.test_compiler.CompileTest.test_update 3.4_mysql_pymysql_dbapiunicode_nocextensions 75
+test.aaa_profiling.test_compiler.CompileTest.test_update 3.4_postgresql_psycopg2_dbapiunicode_cextensions 77
+test.aaa_profiling.test_compiler.CompileTest.test_update 3.4_postgresql_psycopg2_dbapiunicode_nocextensions 77
+test.aaa_profiling.test_compiler.CompileTest.test_update 3.4_sqlite_pysqlite_dbapiunicode_cextensions 77
+test.aaa_profiling.test_compiler.CompileTest.test_update 3.4_sqlite_pysqlite_dbapiunicode_nocextensions 77
+test.aaa_profiling.test_compiler.CompileTest.test_update 3.5_mysql_mysqldb_dbapiunicode_cextensions 77
+test.aaa_profiling.test_compiler.CompileTest.test_update 3.5_mysql_mysqldb_dbapiunicode_nocextensions 77
+test.aaa_profiling.test_compiler.CompileTest.test_update 3.5_mysql_pymysql_dbapiunicode_cextensions 75
+test.aaa_profiling.test_compiler.CompileTest.test_update 3.5_mysql_pymysql_dbapiunicode_nocextensions 75
+test.aaa_profiling.test_compiler.CompileTest.test_update 3.5_sqlite_pysqlite_dbapiunicode_cextensions 77
+test.aaa_profiling.test_compiler.CompileTest.test_update 3.5_sqlite_pysqlite_dbapiunicode_nocextensions 77
# TEST: test.aaa_profiling.test_compiler.CompileTest.test_update_whereclause
-test.aaa_profiling.test_compiler.CompileTest.test_update_whereclause 2.7_mysql_mysqldb_cextensions 148
-test.aaa_profiling.test_compiler.CompileTest.test_update_whereclause 2.7_mysql_mysqldb_nocextensions 148
-test.aaa_profiling.test_compiler.CompileTest.test_update_whereclause 2.7_postgresql_psycopg2_cextensions 148
-test.aaa_profiling.test_compiler.CompileTest.test_update_whereclause 2.7_postgresql_psycopg2_nocextensions 148
-test.aaa_profiling.test_compiler.CompileTest.test_update_whereclause 2.7_sqlite_pysqlite_cextensions 148
-test.aaa_profiling.test_compiler.CompileTest.test_update_whereclause 2.7_sqlite_pysqlite_nocextensions 148
-test.aaa_profiling.test_compiler.CompileTest.test_update_whereclause 3.3_postgresql_psycopg2_cextensions 148
-test.aaa_profiling.test_compiler.CompileTest.test_update_whereclause 3.3_postgresql_psycopg2_nocextensions 148
-test.aaa_profiling.test_compiler.CompileTest.test_update_whereclause 3.3_sqlite_pysqlite_cextensions 148
-test.aaa_profiling.test_compiler.CompileTest.test_update_whereclause 3.3_sqlite_pysqlite_nocextensions 148
-test.aaa_profiling.test_compiler.CompileTest.test_update_whereclause 3.4_postgresql_psycopg2_cextensions 148
-test.aaa_profiling.test_compiler.CompileTest.test_update_whereclause 3.4_postgresql_psycopg2_nocextensions 148
-test.aaa_profiling.test_compiler.CompileTest.test_update_whereclause 3.4_sqlite_pysqlite_cextensions 148
-test.aaa_profiling.test_compiler.CompileTest.test_update_whereclause 3.4_sqlite_pysqlite_nocextensions 148
+test.aaa_profiling.test_compiler.CompileTest.test_update_whereclause 2.7_mysql_mysqldb_dbapiunicode_cextensions 147
+test.aaa_profiling.test_compiler.CompileTest.test_update_whereclause 2.7_mysql_mysqldb_dbapiunicode_nocextensions 147
+test.aaa_profiling.test_compiler.CompileTest.test_update_whereclause 2.7_mysql_pymysql_dbapiunicode_cextensions 147
+test.aaa_profiling.test_compiler.CompileTest.test_update_whereclause 2.7_mysql_pymysql_dbapiunicode_nocextensions 147
+test.aaa_profiling.test_compiler.CompileTest.test_update_whereclause 2.7_postgresql_psycopg2_dbapiunicode_cextensions 147
+test.aaa_profiling.test_compiler.CompileTest.test_update_whereclause 2.7_postgresql_psycopg2_dbapiunicode_nocextensions 147
+test.aaa_profiling.test_compiler.CompileTest.test_update_whereclause 2.7_sqlite_pysqlite_dbapiunicode_cextensions 147
+test.aaa_profiling.test_compiler.CompileTest.test_update_whereclause 2.7_sqlite_pysqlite_dbapiunicode_nocextensions 147
+test.aaa_profiling.test_compiler.CompileTest.test_update_whereclause 3.4_mysql_mysqldb_dbapiunicode_cextensions 147
+test.aaa_profiling.test_compiler.CompileTest.test_update_whereclause 3.4_mysql_mysqldb_dbapiunicode_nocextensions 147
+test.aaa_profiling.test_compiler.CompileTest.test_update_whereclause 3.4_mysql_pymysql_dbapiunicode_cextensions 147
+test.aaa_profiling.test_compiler.CompileTest.test_update_whereclause 3.4_mysql_pymysql_dbapiunicode_nocextensions 147
+test.aaa_profiling.test_compiler.CompileTest.test_update_whereclause 3.4_postgresql_psycopg2_dbapiunicode_cextensions 147
+test.aaa_profiling.test_compiler.CompileTest.test_update_whereclause 3.4_postgresql_psycopg2_dbapiunicode_nocextensions 147
+test.aaa_profiling.test_compiler.CompileTest.test_update_whereclause 3.4_sqlite_pysqlite_dbapiunicode_cextensions 147
+test.aaa_profiling.test_compiler.CompileTest.test_update_whereclause 3.4_sqlite_pysqlite_dbapiunicode_nocextensions 147
+test.aaa_profiling.test_compiler.CompileTest.test_update_whereclause 3.5_mysql_mysqldb_dbapiunicode_cextensions 147
+test.aaa_profiling.test_compiler.CompileTest.test_update_whereclause 3.5_mysql_mysqldb_dbapiunicode_nocextensions 147
+test.aaa_profiling.test_compiler.CompileTest.test_update_whereclause 3.5_mysql_pymysql_dbapiunicode_cextensions 147
+test.aaa_profiling.test_compiler.CompileTest.test_update_whereclause 3.5_mysql_pymysql_dbapiunicode_nocextensions 147
+test.aaa_profiling.test_compiler.CompileTest.test_update_whereclause 3.5_sqlite_pysqlite_dbapiunicode_cextensions 147
+test.aaa_profiling.test_compiler.CompileTest.test_update_whereclause 3.5_sqlite_pysqlite_dbapiunicode_nocextensions 147
# TEST: test.aaa_profiling.test_orm.AttributeOverheadTest.test_attribute_set
-test.aaa_profiling.test_orm.AttributeOverheadTest.test_attribute_set 2.7_mysql_mysqldb_cextensions 4265
-test.aaa_profiling.test_orm.AttributeOverheadTest.test_attribute_set 2.7_mysql_mysqldb_nocextensions 4265
-test.aaa_profiling.test_orm.AttributeOverheadTest.test_attribute_set 2.7_postgresql_psycopg2_cextensions 4265
-test.aaa_profiling.test_orm.AttributeOverheadTest.test_attribute_set 2.7_postgresql_psycopg2_nocextensions 4265
-test.aaa_profiling.test_orm.AttributeOverheadTest.test_attribute_set 2.7_sqlite_pysqlite_cextensions 4265
-test.aaa_profiling.test_orm.AttributeOverheadTest.test_attribute_set 2.7_sqlite_pysqlite_nocextensions 4262
-test.aaa_profiling.test_orm.AttributeOverheadTest.test_attribute_set 3.3_postgresql_psycopg2_cextensions 4263
-test.aaa_profiling.test_orm.AttributeOverheadTest.test_attribute_set 3.3_postgresql_psycopg2_nocextensions 4266
-test.aaa_profiling.test_orm.AttributeOverheadTest.test_attribute_set 3.3_sqlite_pysqlite_cextensions 4266
-test.aaa_profiling.test_orm.AttributeOverheadTest.test_attribute_set 3.3_sqlite_pysqlite_nocextensions 4266
-test.aaa_profiling.test_orm.AttributeOverheadTest.test_attribute_set 3.4_postgresql_psycopg2_cextensions 4266
-test.aaa_profiling.test_orm.AttributeOverheadTest.test_attribute_set 3.4_postgresql_psycopg2_nocextensions 4266
-test.aaa_profiling.test_orm.AttributeOverheadTest.test_attribute_set 3.4_sqlite_pysqlite_cextensions 4263
-test.aaa_profiling.test_orm.AttributeOverheadTest.test_attribute_set 3.4_sqlite_pysqlite_nocextensions 4267
+test.aaa_profiling.test_orm.AttributeOverheadTest.test_attribute_set 2.7_mysql_mysqldb_dbapiunicode_cextensions 4260
+test.aaa_profiling.test_orm.AttributeOverheadTest.test_attribute_set 2.7_mysql_mysqldb_dbapiunicode_nocextensions 4260
+test.aaa_profiling.test_orm.AttributeOverheadTest.test_attribute_set 2.7_postgresql_psycopg2_dbapiunicode_cextensions 4260
+test.aaa_profiling.test_orm.AttributeOverheadTest.test_attribute_set 2.7_postgresql_psycopg2_dbapiunicode_nocextensions 4260
+test.aaa_profiling.test_orm.AttributeOverheadTest.test_attribute_set 2.7_sqlite_pysqlite_dbapiunicode_cextensions 4260
+test.aaa_profiling.test_orm.AttributeOverheadTest.test_attribute_set 2.7_sqlite_pysqlite_dbapiunicode_nocextensions 4260
+test.aaa_profiling.test_orm.AttributeOverheadTest.test_attribute_set 3.4_mysql_mysqldb_dbapiunicode_cextensions 4261
+test.aaa_profiling.test_orm.AttributeOverheadTest.test_attribute_set 3.4_mysql_mysqldb_dbapiunicode_nocextensions 4261
+test.aaa_profiling.test_orm.AttributeOverheadTest.test_attribute_set 3.4_postgresql_psycopg2_dbapiunicode_cextensions 4261
+test.aaa_profiling.test_orm.AttributeOverheadTest.test_attribute_set 3.4_postgresql_psycopg2_dbapiunicode_nocextensions 4261
+test.aaa_profiling.test_orm.AttributeOverheadTest.test_attribute_set 3.4_sqlite_pysqlite_dbapiunicode_cextensions 4261
+test.aaa_profiling.test_orm.AttributeOverheadTest.test_attribute_set 3.4_sqlite_pysqlite_dbapiunicode_nocextensions 4261
+test.aaa_profiling.test_orm.AttributeOverheadTest.test_attribute_set 3.5_mysql_mysqldb_dbapiunicode_cextensions 4261
+test.aaa_profiling.test_orm.AttributeOverheadTest.test_attribute_set 3.5_mysql_mysqldb_dbapiunicode_nocextensions 4261
+test.aaa_profiling.test_orm.AttributeOverheadTest.test_attribute_set 3.5_sqlite_pysqlite_dbapiunicode_cextensions 4256
+test.aaa_profiling.test_orm.AttributeOverheadTest.test_attribute_set 3.5_sqlite_pysqlite_dbapiunicode_nocextensions 4256
# TEST: test.aaa_profiling.test_orm.AttributeOverheadTest.test_collection_append_remove
-test.aaa_profiling.test_orm.AttributeOverheadTest.test_collection_append_remove 2.7_mysql_mysqldb_cextensions 6426
-test.aaa_profiling.test_orm.AttributeOverheadTest.test_collection_append_remove 2.7_mysql_mysqldb_nocextensions 6426
-test.aaa_profiling.test_orm.AttributeOverheadTest.test_collection_append_remove 2.7_postgresql_psycopg2_cextensions 6426
-test.aaa_profiling.test_orm.AttributeOverheadTest.test_collection_append_remove 2.7_postgresql_psycopg2_nocextensions 6426
-test.aaa_profiling.test_orm.AttributeOverheadTest.test_collection_append_remove 2.7_sqlite_pysqlite_cextensions 6426
-test.aaa_profiling.test_orm.AttributeOverheadTest.test_collection_append_remove 2.7_sqlite_pysqlite_nocextensions 6426
-test.aaa_profiling.test_orm.AttributeOverheadTest.test_collection_append_remove 3.3_postgresql_psycopg2_cextensions 6428
-test.aaa_profiling.test_orm.AttributeOverheadTest.test_collection_append_remove 3.3_postgresql_psycopg2_nocextensions 6428
-test.aaa_profiling.test_orm.AttributeOverheadTest.test_collection_append_remove 3.3_sqlite_pysqlite_cextensions 6428
-test.aaa_profiling.test_orm.AttributeOverheadTest.test_collection_append_remove 3.3_sqlite_pysqlite_nocextensions 6428
-test.aaa_profiling.test_orm.AttributeOverheadTest.test_collection_append_remove 3.4_postgresql_psycopg2_cextensions 6428
-test.aaa_profiling.test_orm.AttributeOverheadTest.test_collection_append_remove 3.4_postgresql_psycopg2_nocextensions 6428
-test.aaa_profiling.test_orm.AttributeOverheadTest.test_collection_append_remove 3.4_sqlite_pysqlite_cextensions 6428
-test.aaa_profiling.test_orm.AttributeOverheadTest.test_collection_append_remove 3.4_sqlite_pysqlite_nocextensions 6630
+test.aaa_profiling.test_orm.AttributeOverheadTest.test_collection_append_remove 2.7_mysql_mysqldb_dbapiunicode_cextensions 6424
+test.aaa_profiling.test_orm.AttributeOverheadTest.test_collection_append_remove 2.7_mysql_mysqldb_dbapiunicode_nocextensions 6424
+test.aaa_profiling.test_orm.AttributeOverheadTest.test_collection_append_remove 2.7_postgresql_psycopg2_dbapiunicode_cextensions 6424
+test.aaa_profiling.test_orm.AttributeOverheadTest.test_collection_append_remove 2.7_postgresql_psycopg2_dbapiunicode_nocextensions 6424
+test.aaa_profiling.test_orm.AttributeOverheadTest.test_collection_append_remove 2.7_sqlite_pysqlite_dbapiunicode_cextensions 6424
+test.aaa_profiling.test_orm.AttributeOverheadTest.test_collection_append_remove 2.7_sqlite_pysqlite_dbapiunicode_nocextensions 6424
+test.aaa_profiling.test_orm.AttributeOverheadTest.test_collection_append_remove 3.4_mysql_mysqldb_dbapiunicode_cextensions 6426
+test.aaa_profiling.test_orm.AttributeOverheadTest.test_collection_append_remove 3.4_mysql_mysqldb_dbapiunicode_nocextensions 6426
+test.aaa_profiling.test_orm.AttributeOverheadTest.test_collection_append_remove 3.4_postgresql_psycopg2_dbapiunicode_cextensions 6426
+test.aaa_profiling.test_orm.AttributeOverheadTest.test_collection_append_remove 3.4_postgresql_psycopg2_dbapiunicode_nocextensions 6426
+test.aaa_profiling.test_orm.AttributeOverheadTest.test_collection_append_remove 3.4_sqlite_pysqlite_dbapiunicode_cextensions 6426
+test.aaa_profiling.test_orm.AttributeOverheadTest.test_collection_append_remove 3.4_sqlite_pysqlite_dbapiunicode_nocextensions 6426
+test.aaa_profiling.test_orm.AttributeOverheadTest.test_collection_append_remove 3.5_mysql_mysqldb_dbapiunicode_cextensions 6426
+test.aaa_profiling.test_orm.AttributeOverheadTest.test_collection_append_remove 3.5_mysql_mysqldb_dbapiunicode_nocextensions 6426
+test.aaa_profiling.test_orm.AttributeOverheadTest.test_collection_append_remove 3.5_sqlite_pysqlite_dbapiunicode_cextensions 6426
+test.aaa_profiling.test_orm.AttributeOverheadTest.test_collection_append_remove 3.5_sqlite_pysqlite_dbapiunicode_nocextensions 6426
# TEST: test.aaa_profiling.test_orm.DeferOptionsTest.test_baseline
-test.aaa_profiling.test_orm.DeferOptionsTest.test_baseline 2.7_mysql_mysqldb_cextensions 16236
-test.aaa_profiling.test_orm.DeferOptionsTest.test_baseline 2.7_mysql_mysqldb_nocextensions 25253
-test.aaa_profiling.test_orm.DeferOptionsTest.test_baseline 2.7_postgresql_psycopg2_cextensions 28219
-test.aaa_profiling.test_orm.DeferOptionsTest.test_baseline 2.7_postgresql_psycopg2_nocextensions 40149
-test.aaa_profiling.test_orm.DeferOptionsTest.test_baseline 2.7_sqlite_pysqlite_cextensions 16386
-test.aaa_profiling.test_orm.DeferOptionsTest.test_baseline 2.7_sqlite_pysqlite_nocextensions 25403
-test.aaa_profiling.test_orm.DeferOptionsTest.test_baseline 3.3_postgresql_psycopg2_cextensions 17219
-test.aaa_profiling.test_orm.DeferOptionsTest.test_baseline 3.3_postgresql_psycopg2_nocextensions 26222
-test.aaa_profiling.test_orm.DeferOptionsTest.test_baseline 3.3_sqlite_pysqlite_cextensions 17408
-test.aaa_profiling.test_orm.DeferOptionsTest.test_baseline 3.3_sqlite_pysqlite_nocextensions 26411
-test.aaa_profiling.test_orm.DeferOptionsTest.test_baseline 3.4_postgresql_psycopg2_cextensions 17219
-test.aaa_profiling.test_orm.DeferOptionsTest.test_baseline 3.4_postgresql_psycopg2_nocextensions 26222
-test.aaa_profiling.test_orm.DeferOptionsTest.test_baseline 3.4_sqlite_pysqlite_cextensions 17408
-test.aaa_profiling.test_orm.DeferOptionsTest.test_baseline 3.4_sqlite_pysqlite_nocextensions 26411
+test.aaa_profiling.test_orm.DeferOptionsTest.test_baseline 2.7_mysql_mysqldb_dbapiunicode_cextensions 41218
+test.aaa_profiling.test_orm.DeferOptionsTest.test_baseline 2.7_mysql_mysqldb_dbapiunicode_nocextensions 50221
+test.aaa_profiling.test_orm.DeferOptionsTest.test_baseline 2.7_postgresql_psycopg2_dbapiunicode_cextensions 29199
+test.aaa_profiling.test_orm.DeferOptionsTest.test_baseline 2.7_postgresql_psycopg2_dbapiunicode_nocextensions 38202
+test.aaa_profiling.test_orm.DeferOptionsTest.test_baseline 2.7_sqlite_pysqlite_dbapiunicode_cextensions 17164
+test.aaa_profiling.test_orm.DeferOptionsTest.test_baseline 2.7_sqlite_pysqlite_dbapiunicode_nocextensions 26167
+test.aaa_profiling.test_orm.DeferOptionsTest.test_baseline 3.4_mysql_mysqldb_dbapiunicode_cextensions 30236
+test.aaa_profiling.test_orm.DeferOptionsTest.test_baseline 3.4_mysql_mysqldb_dbapiunicode_nocextensions 39239
+test.aaa_profiling.test_orm.DeferOptionsTest.test_baseline 3.4_postgresql_psycopg2_dbapiunicode_cextensions 18213
+test.aaa_profiling.test_orm.DeferOptionsTest.test_baseline 3.4_postgresql_psycopg2_dbapiunicode_nocextensions 27216
+test.aaa_profiling.test_orm.DeferOptionsTest.test_baseline 3.4_sqlite_pysqlite_dbapiunicode_cextensions 18187
+test.aaa_profiling.test_orm.DeferOptionsTest.test_baseline 3.4_sqlite_pysqlite_dbapiunicode_nocextensions 27190
+test.aaa_profiling.test_orm.DeferOptionsTest.test_baseline 3.5_mysql_mysqldb_dbapiunicode_cextensions 30236
+test.aaa_profiling.test_orm.DeferOptionsTest.test_baseline 3.5_mysql_mysqldb_dbapiunicode_nocextensions 39239
+test.aaa_profiling.test_orm.DeferOptionsTest.test_baseline 3.5_sqlite_pysqlite_dbapiunicode_cextensions 18173
+test.aaa_profiling.test_orm.DeferOptionsTest.test_baseline 3.5_sqlite_pysqlite_dbapiunicode_nocextensions 27176
# TEST: test.aaa_profiling.test_orm.DeferOptionsTest.test_defer_many_cols
-test.aaa_profiling.test_orm.DeferOptionsTest.test_defer_many_cols 2.7_mysql_mysqldb_cextensions 22227
-test.aaa_profiling.test_orm.DeferOptionsTest.test_defer_many_cols 2.7_mysql_mysqldb_nocextensions 25232
-test.aaa_profiling.test_orm.DeferOptionsTest.test_defer_many_cols 2.7_postgresql_psycopg2_cextensions 22198
-test.aaa_profiling.test_orm.DeferOptionsTest.test_defer_many_cols 2.7_postgresql_psycopg2_nocextensions 25203
-test.aaa_profiling.test_orm.DeferOptionsTest.test_defer_many_cols 2.7_sqlite_pysqlite_cextensions 24293
-test.aaa_profiling.test_orm.DeferOptionsTest.test_defer_many_cols 2.7_sqlite_pysqlite_nocextensions 25298
-test.aaa_profiling.test_orm.DeferOptionsTest.test_defer_many_cols 3.3_postgresql_psycopg2_cextensions 23212
-test.aaa_profiling.test_orm.DeferOptionsTest.test_defer_many_cols 3.3_postgresql_psycopg2_nocextensions 26215
-test.aaa_profiling.test_orm.DeferOptionsTest.test_defer_many_cols 3.3_sqlite_pysqlite_cextensions 23323
-test.aaa_profiling.test_orm.DeferOptionsTest.test_defer_many_cols 3.3_sqlite_pysqlite_nocextensions 26326
-test.aaa_profiling.test_orm.DeferOptionsTest.test_defer_many_cols 3.4_postgresql_psycopg2_cextensions 23212
-test.aaa_profiling.test_orm.DeferOptionsTest.test_defer_many_cols 3.4_postgresql_psycopg2_nocextensions 26215
-test.aaa_profiling.test_orm.DeferOptionsTest.test_defer_many_cols 3.4_sqlite_pysqlite_cextensions 23323
-test.aaa_profiling.test_orm.DeferOptionsTest.test_defer_many_cols 3.4_sqlite_pysqlite_nocextensions 28326
+test.aaa_profiling.test_orm.DeferOptionsTest.test_defer_many_cols 2.7_mysql_mysqldb_dbapiunicode_cextensions 23235
+test.aaa_profiling.test_orm.DeferOptionsTest.test_defer_many_cols 2.7_mysql_mysqldb_dbapiunicode_nocextensions 26238
+test.aaa_profiling.test_orm.DeferOptionsTest.test_defer_many_cols 2.7_postgresql_psycopg2_dbapiunicode_cextensions 23204
+test.aaa_profiling.test_orm.DeferOptionsTest.test_defer_many_cols 2.7_postgresql_psycopg2_dbapiunicode_nocextensions 26207
+test.aaa_profiling.test_orm.DeferOptionsTest.test_defer_many_cols 2.7_sqlite_pysqlite_dbapiunicode_cextensions 23181
+test.aaa_profiling.test_orm.DeferOptionsTest.test_defer_many_cols 2.7_sqlite_pysqlite_dbapiunicode_nocextensions 26184
+test.aaa_profiling.test_orm.DeferOptionsTest.test_defer_many_cols 3.4_mysql_mysqldb_dbapiunicode_cextensions 24260
+test.aaa_profiling.test_orm.DeferOptionsTest.test_defer_many_cols 3.4_mysql_mysqldb_dbapiunicode_nocextensions 27263
+test.aaa_profiling.test_orm.DeferOptionsTest.test_defer_many_cols 3.4_postgresql_psycopg2_dbapiunicode_cextensions 24225
+test.aaa_profiling.test_orm.DeferOptionsTest.test_defer_many_cols 3.4_postgresql_psycopg2_dbapiunicode_nocextensions 27228
+test.aaa_profiling.test_orm.DeferOptionsTest.test_defer_many_cols 3.4_sqlite_pysqlite_dbapiunicode_cextensions 24211
+test.aaa_profiling.test_orm.DeferOptionsTest.test_defer_many_cols 3.4_sqlite_pysqlite_dbapiunicode_nocextensions 27214
+test.aaa_profiling.test_orm.DeferOptionsTest.test_defer_many_cols 3.5_mysql_mysqldb_dbapiunicode_cextensions 24260
+test.aaa_profiling.test_orm.DeferOptionsTest.test_defer_many_cols 3.5_mysql_mysqldb_dbapiunicode_nocextensions 27263
+test.aaa_profiling.test_orm.DeferOptionsTest.test_defer_many_cols 3.5_sqlite_pysqlite_dbapiunicode_cextensions 24211
+test.aaa_profiling.test_orm.DeferOptionsTest.test_defer_many_cols 3.5_sqlite_pysqlite_dbapiunicode_nocextensions 27219
# TEST: test.aaa_profiling.test_orm.LoadManyToOneFromIdentityTest.test_many_to_one_load_identity
-test.aaa_profiling.test_orm.LoadManyToOneFromIdentityTest.test_many_to_one_load_identity 2.7_mysql_mysqldb_cextensions 17988
-test.aaa_profiling.test_orm.LoadManyToOneFromIdentityTest.test_many_to_one_load_identity 2.7_mysql_mysqldb_nocextensions 17988
-test.aaa_profiling.test_orm.LoadManyToOneFromIdentityTest.test_many_to_one_load_identity 2.7_postgresql_psycopg2_cextensions 17988
-test.aaa_profiling.test_orm.LoadManyToOneFromIdentityTest.test_many_to_one_load_identity 2.7_postgresql_psycopg2_nocextensions 17988
-test.aaa_profiling.test_orm.LoadManyToOneFromIdentityTest.test_many_to_one_load_identity 2.7_sqlite_pysqlite_cextensions 17988
-test.aaa_profiling.test_orm.LoadManyToOneFromIdentityTest.test_many_to_one_load_identity 2.7_sqlite_pysqlite_nocextensions 17988
-test.aaa_profiling.test_orm.LoadManyToOneFromIdentityTest.test_many_to_one_load_identity 3.3_postgresql_psycopg2_cextensions 18988
-test.aaa_profiling.test_orm.LoadManyToOneFromIdentityTest.test_many_to_one_load_identity 3.3_postgresql_psycopg2_nocextensions 18988
-test.aaa_profiling.test_orm.LoadManyToOneFromIdentityTest.test_many_to_one_load_identity 3.3_sqlite_pysqlite_cextensions 18988
-test.aaa_profiling.test_orm.LoadManyToOneFromIdentityTest.test_many_to_one_load_identity 3.3_sqlite_pysqlite_nocextensions 18988
-test.aaa_profiling.test_orm.LoadManyToOneFromIdentityTest.test_many_to_one_load_identity 3.4_postgresql_psycopg2_cextensions 18988
-test.aaa_profiling.test_orm.LoadManyToOneFromIdentityTest.test_many_to_one_load_identity 3.4_postgresql_psycopg2_nocextensions 18988
-test.aaa_profiling.test_orm.LoadManyToOneFromIdentityTest.test_many_to_one_load_identity 3.4_sqlite_pysqlite_cextensions 18988
-test.aaa_profiling.test_orm.LoadManyToOneFromIdentityTest.test_many_to_one_load_identity 3.4_sqlite_pysqlite_nocextensions 18988
+test.aaa_profiling.test_orm.LoadManyToOneFromIdentityTest.test_many_to_one_load_identity 2.7_mysql_mysqldb_dbapiunicode_cextensions 17988
+test.aaa_profiling.test_orm.LoadManyToOneFromIdentityTest.test_many_to_one_load_identity 2.7_mysql_mysqldb_dbapiunicode_nocextensions 17988
+test.aaa_profiling.test_orm.LoadManyToOneFromIdentityTest.test_many_to_one_load_identity 2.7_postgresql_psycopg2_dbapiunicode_cextensions 17988
+test.aaa_profiling.test_orm.LoadManyToOneFromIdentityTest.test_many_to_one_load_identity 2.7_postgresql_psycopg2_dbapiunicode_nocextensions 17988
+test.aaa_profiling.test_orm.LoadManyToOneFromIdentityTest.test_many_to_one_load_identity 2.7_sqlite_pysqlite_dbapiunicode_cextensions 17988
+test.aaa_profiling.test_orm.LoadManyToOneFromIdentityTest.test_many_to_one_load_identity 2.7_sqlite_pysqlite_dbapiunicode_nocextensions 17988
+test.aaa_profiling.test_orm.LoadManyToOneFromIdentityTest.test_many_to_one_load_identity 3.4_mysql_mysqldb_dbapiunicode_cextensions 18988
+test.aaa_profiling.test_orm.LoadManyToOneFromIdentityTest.test_many_to_one_load_identity 3.4_mysql_mysqldb_dbapiunicode_nocextensions 18988
+test.aaa_profiling.test_orm.LoadManyToOneFromIdentityTest.test_many_to_one_load_identity 3.4_postgresql_psycopg2_dbapiunicode_cextensions 18988
+test.aaa_profiling.test_orm.LoadManyToOneFromIdentityTest.test_many_to_one_load_identity 3.4_postgresql_psycopg2_dbapiunicode_nocextensions 18988
+test.aaa_profiling.test_orm.LoadManyToOneFromIdentityTest.test_many_to_one_load_identity 3.4_sqlite_pysqlite_dbapiunicode_cextensions 18988
+test.aaa_profiling.test_orm.LoadManyToOneFromIdentityTest.test_many_to_one_load_identity 3.4_sqlite_pysqlite_dbapiunicode_nocextensions 18988
+test.aaa_profiling.test_orm.LoadManyToOneFromIdentityTest.test_many_to_one_load_identity 3.5_mysql_mysqldb_dbapiunicode_cextensions 18988
+test.aaa_profiling.test_orm.LoadManyToOneFromIdentityTest.test_many_to_one_load_identity 3.5_mysql_mysqldb_dbapiunicode_nocextensions 18988
+test.aaa_profiling.test_orm.LoadManyToOneFromIdentityTest.test_many_to_one_load_identity 3.5_sqlite_pysqlite_dbapiunicode_cextensions 18988
+test.aaa_profiling.test_orm.LoadManyToOneFromIdentityTest.test_many_to_one_load_identity 3.5_sqlite_pysqlite_dbapiunicode_nocextensions 18988
# TEST: test.aaa_profiling.test_orm.LoadManyToOneFromIdentityTest.test_many_to_one_load_no_identity
-test.aaa_profiling.test_orm.LoadManyToOneFromIdentityTest.test_many_to_one_load_no_identity 2.7_mysql_mysqldb_cextensions 127099
-test.aaa_profiling.test_orm.LoadManyToOneFromIdentityTest.test_many_to_one_load_no_identity 2.7_mysql_mysqldb_nocextensions 129849
-test.aaa_profiling.test_orm.LoadManyToOneFromIdentityTest.test_many_to_one_load_no_identity 2.7_postgresql_psycopg2_cextensions 119849
-test.aaa_profiling.test_orm.LoadManyToOneFromIdentityTest.test_many_to_one_load_no_identity 2.7_postgresql_psycopg2_nocextensions 122553
-test.aaa_profiling.test_orm.LoadManyToOneFromIdentityTest.test_many_to_one_load_no_identity 2.7_sqlite_pysqlite_cextensions 162315
-test.aaa_profiling.test_orm.LoadManyToOneFromIdentityTest.test_many_to_one_load_no_identity 2.7_sqlite_pysqlite_nocextensions 164551
-test.aaa_profiling.test_orm.LoadManyToOneFromIdentityTest.test_many_to_one_load_no_identity 3.3_postgresql_psycopg2_cextensions 126351
-test.aaa_profiling.test_orm.LoadManyToOneFromIdentityTest.test_many_to_one_load_no_identity 3.3_postgresql_psycopg2_nocextensions 125352
-test.aaa_profiling.test_orm.LoadManyToOneFromIdentityTest.test_many_to_one_load_no_identity 3.3_sqlite_pysqlite_cextensions 169566
-test.aaa_profiling.test_orm.LoadManyToOneFromIdentityTest.test_many_to_one_load_no_identity 3.3_sqlite_pysqlite_nocextensions 171364
-test.aaa_profiling.test_orm.LoadManyToOneFromIdentityTest.test_many_to_one_load_no_identity 3.4_postgresql_psycopg2_cextensions 123602
-test.aaa_profiling.test_orm.LoadManyToOneFromIdentityTest.test_many_to_one_load_no_identity 3.4_postgresql_psycopg2_nocextensions 125352
-test.aaa_profiling.test_orm.LoadManyToOneFromIdentityTest.test_many_to_one_load_no_identity 3.4_sqlite_pysqlite_cextensions 170351
-test.aaa_profiling.test_orm.LoadManyToOneFromIdentityTest.test_many_to_one_load_no_identity 3.4_sqlite_pysqlite_nocextensions 174099
+test.aaa_profiling.test_orm.LoadManyToOneFromIdentityTest.test_many_to_one_load_no_identity 2.7_mysql_mysqldb_dbapiunicode_cextensions 132554
+test.aaa_profiling.test_orm.LoadManyToOneFromIdentityTest.test_many_to_one_load_no_identity 2.7_mysql_mysqldb_dbapiunicode_nocextensions 134304
+test.aaa_profiling.test_orm.LoadManyToOneFromIdentityTest.test_many_to_one_load_no_identity 2.7_postgresql_psycopg2_dbapiunicode_cextensions 124101
+test.aaa_profiling.test_orm.LoadManyToOneFromIdentityTest.test_many_to_one_load_no_identity 2.7_postgresql_psycopg2_dbapiunicode_nocextensions 125851
+test.aaa_profiling.test_orm.LoadManyToOneFromIdentityTest.test_many_to_one_load_no_identity 2.7_sqlite_pysqlite_dbapiunicode_cextensions 121304
+test.aaa_profiling.test_orm.LoadManyToOneFromIdentityTest.test_many_to_one_load_no_identity 2.7_sqlite_pysqlite_dbapiunicode_nocextensions 122852
+test.aaa_profiling.test_orm.LoadManyToOneFromIdentityTest.test_many_to_one_load_no_identity 3.4_mysql_mysqldb_dbapiunicode_cextensions 139356
+test.aaa_profiling.test_orm.LoadManyToOneFromIdentityTest.test_many_to_one_load_no_identity 3.4_mysql_mysqldb_dbapiunicode_nocextensions 141106
+test.aaa_profiling.test_orm.LoadManyToOneFromIdentityTest.test_many_to_one_load_no_identity 3.4_postgresql_psycopg2_dbapiunicode_cextensions 129306
+test.aaa_profiling.test_orm.LoadManyToOneFromIdentityTest.test_many_to_one_load_no_identity 3.4_postgresql_psycopg2_dbapiunicode_nocextensions 130857
+test.aaa_profiling.test_orm.LoadManyToOneFromIdentityTest.test_many_to_one_load_no_identity 3.4_sqlite_pysqlite_dbapiunicode_cextensions 127556
+test.aaa_profiling.test_orm.LoadManyToOneFromIdentityTest.test_many_to_one_load_no_identity 3.4_sqlite_pysqlite_dbapiunicode_nocextensions 129107
+test.aaa_profiling.test_orm.LoadManyToOneFromIdentityTest.test_many_to_one_load_no_identity 3.5_mysql_mysqldb_dbapiunicode_cextensions 139356
+test.aaa_profiling.test_orm.LoadManyToOneFromIdentityTest.test_many_to_one_load_no_identity 3.5_mysql_mysqldb_dbapiunicode_nocextensions 141106
+test.aaa_profiling.test_orm.LoadManyToOneFromIdentityTest.test_many_to_one_load_no_identity 3.5_sqlite_pysqlite_dbapiunicode_cextensions 127357
+test.aaa_profiling.test_orm.LoadManyToOneFromIdentityTest.test_many_to_one_load_no_identity 3.5_sqlite_pysqlite_dbapiunicode_nocextensions 129306
# TEST: test.aaa_profiling.test_orm.MergeBackrefsTest.test_merge_pending_with_all_pks
-test.aaa_profiling.test_orm.MergeBackrefsTest.test_merge_pending_with_all_pks 2.7_mysql_mysqldb_cextensions 19441
-test.aaa_profiling.test_orm.MergeBackrefsTest.test_merge_pending_with_all_pks 2.7_mysql_mysqldb_nocextensions 19747
-test.aaa_profiling.test_orm.MergeBackrefsTest.test_merge_pending_with_all_pks 2.7_postgresql_psycopg2_cextensions 18959
-test.aaa_profiling.test_orm.MergeBackrefsTest.test_merge_pending_with_all_pks 2.7_postgresql_psycopg2_nocextensions 19219
-test.aaa_profiling.test_orm.MergeBackrefsTest.test_merge_pending_with_all_pks 2.7_sqlite_pysqlite_cextensions 22288
-test.aaa_profiling.test_orm.MergeBackrefsTest.test_merge_pending_with_all_pks 2.7_sqlite_pysqlite_nocextensions 21852
-test.aaa_profiling.test_orm.MergeBackrefsTest.test_merge_pending_with_all_pks 3.3_postgresql_psycopg2_cextensions 19423
-test.aaa_profiling.test_orm.MergeBackrefsTest.test_merge_pending_with_all_pks 3.3_postgresql_psycopg2_nocextensions 19492
-test.aaa_profiling.test_orm.MergeBackrefsTest.test_merge_pending_with_all_pks 3.3_sqlite_pysqlite_cextensions 23067
-test.aaa_profiling.test_orm.MergeBackrefsTest.test_merge_pending_with_all_pks 3.3_sqlite_pysqlite_nocextensions 23271
-test.aaa_profiling.test_orm.MergeBackrefsTest.test_merge_pending_with_all_pks 3.4_postgresql_psycopg2_cextensions 19228
-test.aaa_profiling.test_orm.MergeBackrefsTest.test_merge_pending_with_all_pks 3.4_postgresql_psycopg2_nocextensions 19480
-test.aaa_profiling.test_orm.MergeBackrefsTest.test_merge_pending_with_all_pks 3.4_sqlite_pysqlite_cextensions 22354
-test.aaa_profiling.test_orm.MergeBackrefsTest.test_merge_pending_with_all_pks 3.4_sqlite_pysqlite_nocextensions 22597
+test.aaa_profiling.test_orm.MergeBackrefsTest.test_merge_pending_with_all_pks 2.7_mysql_mysqldb_dbapiunicode_cextensions 19504
+test.aaa_profiling.test_orm.MergeBackrefsTest.test_merge_pending_with_all_pks 2.7_mysql_mysqldb_dbapiunicode_nocextensions 19702
+test.aaa_profiling.test_orm.MergeBackrefsTest.test_merge_pending_with_all_pks 2.7_postgresql_psycopg2_dbapiunicode_cextensions 18956
+test.aaa_profiling.test_orm.MergeBackrefsTest.test_merge_pending_with_all_pks 2.7_postgresql_psycopg2_dbapiunicode_nocextensions 19160
+test.aaa_profiling.test_orm.MergeBackrefsTest.test_merge_pending_with_all_pks 2.7_sqlite_pysqlite_dbapiunicode_cextensions 18820
+test.aaa_profiling.test_orm.MergeBackrefsTest.test_merge_pending_with_all_pks 2.7_sqlite_pysqlite_dbapiunicode_nocextensions 18970
+test.aaa_profiling.test_orm.MergeBackrefsTest.test_merge_pending_with_all_pks 3.4_mysql_mysqldb_dbapiunicode_cextensions 20153
+test.aaa_profiling.test_orm.MergeBackrefsTest.test_merge_pending_with_all_pks 3.4_mysql_mysqldb_dbapiunicode_nocextensions 20307
+test.aaa_profiling.test_orm.MergeBackrefsTest.test_merge_pending_with_all_pks 3.4_postgresql_psycopg2_dbapiunicode_cextensions 19503
+test.aaa_profiling.test_orm.MergeBackrefsTest.test_merge_pending_with_all_pks 3.4_postgresql_psycopg2_dbapiunicode_nocextensions 19707
+test.aaa_profiling.test_orm.MergeBackrefsTest.test_merge_pending_with_all_pks 3.4_sqlite_pysqlite_dbapiunicode_cextensions 19363
+test.aaa_profiling.test_orm.MergeBackrefsTest.test_merge_pending_with_all_pks 3.4_sqlite_pysqlite_dbapiunicode_nocextensions 19623
+test.aaa_profiling.test_orm.MergeBackrefsTest.test_merge_pending_with_all_pks 3.5_mysql_mysqldb_dbapiunicode_cextensions 20153
+test.aaa_profiling.test_orm.MergeBackrefsTest.test_merge_pending_with_all_pks 3.5_mysql_mysqldb_dbapiunicode_nocextensions 20357
+test.aaa_profiling.test_orm.MergeBackrefsTest.test_merge_pending_with_all_pks 3.5_sqlite_pysqlite_dbapiunicode_cextensions 19368
+test.aaa_profiling.test_orm.MergeBackrefsTest.test_merge_pending_with_all_pks 3.5_sqlite_pysqlite_dbapiunicode_nocextensions 19572
# TEST: test.aaa_profiling.test_orm.MergeTest.test_merge_load
-test.aaa_profiling.test_orm.MergeTest.test_merge_load 2.7_mysql_mysqldb_cextensions 1411
-test.aaa_profiling.test_orm.MergeTest.test_merge_load 2.7_mysql_mysqldb_nocextensions 1436
-test.aaa_profiling.test_orm.MergeTest.test_merge_load 2.7_postgresql_psycopg2_cextensions 1323
-test.aaa_profiling.test_orm.MergeTest.test_merge_load 2.7_postgresql_psycopg2_nocextensions 1348
-test.aaa_profiling.test_orm.MergeTest.test_merge_load 2.7_sqlite_pysqlite_cextensions 1601
-test.aaa_profiling.test_orm.MergeTest.test_merge_load 2.7_sqlite_pysqlite_nocextensions 1603
-test.aaa_profiling.test_orm.MergeTest.test_merge_load 3.3_postgresql_psycopg2_cextensions 1354
-test.aaa_profiling.test_orm.MergeTest.test_merge_load 3.3_postgresql_psycopg2_nocextensions 1355
-test.aaa_profiling.test_orm.MergeTest.test_merge_load 3.3_sqlite_pysqlite_cextensions 1656
-test.aaa_profiling.test_orm.MergeTest.test_merge_load 3.3_sqlite_pysqlite_nocextensions 1671
-test.aaa_profiling.test_orm.MergeTest.test_merge_load 3.4_postgresql_psycopg2_cextensions 1340
-test.aaa_profiling.test_orm.MergeTest.test_merge_load 3.4_postgresql_psycopg2_nocextensions 1355
-test.aaa_profiling.test_orm.MergeTest.test_merge_load 3.4_sqlite_pysqlite_cextensions 1641
-test.aaa_profiling.test_orm.MergeTest.test_merge_load 3.4_sqlite_pysqlite_nocextensions 1658
+test.aaa_profiling.test_orm.MergeTest.test_merge_load 2.7_mysql_mysqldb_dbapiunicode_cextensions 1419
+test.aaa_profiling.test_orm.MergeTest.test_merge_load 2.7_mysql_mysqldb_dbapiunicode_nocextensions 1433
+test.aaa_profiling.test_orm.MergeTest.test_merge_load 2.7_postgresql_psycopg2_dbapiunicode_cextensions 1316
+test.aaa_profiling.test_orm.MergeTest.test_merge_load 2.7_postgresql_psycopg2_dbapiunicode_nocextensions 1331
+test.aaa_profiling.test_orm.MergeTest.test_merge_load 2.7_sqlite_pysqlite_dbapiunicode_cextensions 1193
+test.aaa_profiling.test_orm.MergeTest.test_merge_load 2.7_sqlite_pysqlite_dbapiunicode_nocextensions 1207
+test.aaa_profiling.test_orm.MergeTest.test_merge_load 3.4_mysql_mysqldb_dbapiunicode_cextensions 1475
+test.aaa_profiling.test_orm.MergeTest.test_merge_load 3.4_mysql_mysqldb_dbapiunicode_nocextensions 1490
+test.aaa_profiling.test_orm.MergeTest.test_merge_load 3.4_postgresql_psycopg2_dbapiunicode_cextensions 1345
+test.aaa_profiling.test_orm.MergeTest.test_merge_load 3.4_postgresql_psycopg2_dbapiunicode_nocextensions 1360
+test.aaa_profiling.test_orm.MergeTest.test_merge_load 3.4_sqlite_pysqlite_dbapiunicode_cextensions 1239
+test.aaa_profiling.test_orm.MergeTest.test_merge_load 3.4_sqlite_pysqlite_dbapiunicode_nocextensions 1253
+test.aaa_profiling.test_orm.MergeTest.test_merge_load 3.5_mysql_mysqldb_dbapiunicode_cextensions 1475
+test.aaa_profiling.test_orm.MergeTest.test_merge_load 3.5_mysql_mysqldb_dbapiunicode_nocextensions 1490
+test.aaa_profiling.test_orm.MergeTest.test_merge_load 3.5_sqlite_pysqlite_dbapiunicode_cextensions 1238
+test.aaa_profiling.test_orm.MergeTest.test_merge_load 3.5_sqlite_pysqlite_dbapiunicode_nocextensions 1253
# TEST: test.aaa_profiling.test_orm.MergeTest.test_merge_no_load
-test.aaa_profiling.test_orm.MergeTest.test_merge_no_load 2.7_mysql_mysqldb_cextensions 91,18
-test.aaa_profiling.test_orm.MergeTest.test_merge_no_load 2.7_mysql_mysqldb_nocextensions 91,18
-test.aaa_profiling.test_orm.MergeTest.test_merge_no_load 2.7_postgresql_psycopg2_cextensions 91,18
-test.aaa_profiling.test_orm.MergeTest.test_merge_no_load 2.7_postgresql_psycopg2_nocextensions 91,18
-test.aaa_profiling.test_orm.MergeTest.test_merge_no_load 2.7_sqlite_pysqlite_cextensions 91,18
-test.aaa_profiling.test_orm.MergeTest.test_merge_no_load 2.7_sqlite_pysqlite_nocextensions 91,18
-test.aaa_profiling.test_orm.MergeTest.test_merge_no_load 3.3_postgresql_psycopg2_cextensions 94,19
-test.aaa_profiling.test_orm.MergeTest.test_merge_no_load 3.3_postgresql_psycopg2_nocextensions 94,19
-test.aaa_profiling.test_orm.MergeTest.test_merge_no_load 3.3_sqlite_pysqlite_cextensions 94,19
-test.aaa_profiling.test_orm.MergeTest.test_merge_no_load 3.3_sqlite_pysqlite_nocextensions 94,19
-test.aaa_profiling.test_orm.MergeTest.test_merge_no_load 3.4_postgresql_psycopg2_cextensions 94,19
-test.aaa_profiling.test_orm.MergeTest.test_merge_no_load 3.4_postgresql_psycopg2_nocextensions 94,19
-test.aaa_profiling.test_orm.MergeTest.test_merge_no_load 3.4_sqlite_pysqlite_cextensions 96,20
-test.aaa_profiling.test_orm.MergeTest.test_merge_no_load 3.4_sqlite_pysqlite_nocextensions 96,20
+test.aaa_profiling.test_orm.MergeTest.test_merge_no_load 2.7_mysql_mysqldb_dbapiunicode_cextensions 91,19
+test.aaa_profiling.test_orm.MergeTest.test_merge_no_load 2.7_mysql_mysqldb_dbapiunicode_nocextensions 91,19
+test.aaa_profiling.test_orm.MergeTest.test_merge_no_load 2.7_postgresql_psycopg2_dbapiunicode_cextensions 91,19
+test.aaa_profiling.test_orm.MergeTest.test_merge_no_load 2.7_postgresql_psycopg2_dbapiunicode_nocextensions 91,19
+test.aaa_profiling.test_orm.MergeTest.test_merge_no_load 2.7_sqlite_pysqlite_dbapiunicode_cextensions 91,19
+test.aaa_profiling.test_orm.MergeTest.test_merge_no_load 2.7_sqlite_pysqlite_dbapiunicode_nocextensions 91,19
+test.aaa_profiling.test_orm.MergeTest.test_merge_no_load 3.4_mysql_mysqldb_dbapiunicode_cextensions 94,20
+test.aaa_profiling.test_orm.MergeTest.test_merge_no_load 3.4_mysql_mysqldb_dbapiunicode_nocextensions 94,20
+test.aaa_profiling.test_orm.MergeTest.test_merge_no_load 3.4_postgresql_psycopg2_dbapiunicode_cextensions 94,20
+test.aaa_profiling.test_orm.MergeTest.test_merge_no_load 3.4_postgresql_psycopg2_dbapiunicode_nocextensions 94,20
+test.aaa_profiling.test_orm.MergeTest.test_merge_no_load 3.4_sqlite_pysqlite_dbapiunicode_cextensions 94,20
+test.aaa_profiling.test_orm.MergeTest.test_merge_no_load 3.4_sqlite_pysqlite_dbapiunicode_nocextensions 94,20
+test.aaa_profiling.test_orm.MergeTest.test_merge_no_load 3.5_mysql_mysqldb_dbapiunicode_cextensions 94,20
+test.aaa_profiling.test_orm.MergeTest.test_merge_no_load 3.5_mysql_mysqldb_dbapiunicode_nocextensions 94,20
+test.aaa_profiling.test_orm.MergeTest.test_merge_no_load 3.5_sqlite_pysqlite_dbapiunicode_cextensions 94,20
+test.aaa_profiling.test_orm.MergeTest.test_merge_no_load 3.5_sqlite_pysqlite_dbapiunicode_nocextensions 94,20
+
+# TEST: test.aaa_profiling.test_orm.QueryTest.test_query_cols
+
+test.aaa_profiling.test_orm.QueryTest.test_query_cols 2.7_mysql_mysqldb_dbapiunicode_cextensions 7860
+test.aaa_profiling.test_orm.QueryTest.test_query_cols 2.7_mysql_mysqldb_dbapiunicode_nocextensions 8390
+test.aaa_profiling.test_orm.QueryTest.test_query_cols 2.7_postgresql_psycopg2_dbapiunicode_cextensions 6810
+test.aaa_profiling.test_orm.QueryTest.test_query_cols 2.7_postgresql_psycopg2_dbapiunicode_nocextensions 7340
+test.aaa_profiling.test_orm.QueryTest.test_query_cols 2.7_sqlite_pysqlite_dbapiunicode_cextensions 5864
+test.aaa_profiling.test_orm.QueryTest.test_query_cols 2.7_sqlite_pysqlite_dbapiunicode_nocextensions 6394
+test.aaa_profiling.test_orm.QueryTest.test_query_cols 3.4_mysql_mysqldb_dbapiunicode_cextensions 7274
+test.aaa_profiling.test_orm.QueryTest.test_query_cols 3.4_mysql_mysqldb_dbapiunicode_nocextensions 7804
+test.aaa_profiling.test_orm.QueryTest.test_query_cols 3.4_postgresql_psycopg2_dbapiunicode_cextensions 6184
+test.aaa_profiling.test_orm.QueryTest.test_query_cols 3.4_postgresql_psycopg2_dbapiunicode_nocextensions 6714
+test.aaa_profiling.test_orm.QueryTest.test_query_cols 3.4_sqlite_pysqlite_dbapiunicode_cextensions 6056
+test.aaa_profiling.test_orm.QueryTest.test_query_cols 3.4_sqlite_pysqlite_dbapiunicode_nocextensions 6586
+test.aaa_profiling.test_orm.QueryTest.test_query_cols 3.5_mysql_mysqldb_dbapiunicode_cextensions 7274
+test.aaa_profiling.test_orm.QueryTest.test_query_cols 3.5_mysql_mysqldb_dbapiunicode_nocextensions 7804
+test.aaa_profiling.test_orm.QueryTest.test_query_cols 3.5_sqlite_pysqlite_dbapiunicode_cextensions 6054
+test.aaa_profiling.test_orm.QueryTest.test_query_cols 3.5_sqlite_pysqlite_dbapiunicode_nocextensions 6584
# TEST: test.aaa_profiling.test_orm.SessionTest.test_expire_lots
-test.aaa_profiling.test_orm.SessionTest.test_expire_lots 2.7_mysql_mysqldb_cextensions 1138
-test.aaa_profiling.test_orm.SessionTest.test_expire_lots 2.7_mysql_mysqldb_nocextensions 1142
-test.aaa_profiling.test_orm.SessionTest.test_expire_lots 2.7_postgresql_psycopg2_cextensions 1160
-test.aaa_profiling.test_orm.SessionTest.test_expire_lots 2.7_postgresql_psycopg2_nocextensions 1144
-test.aaa_profiling.test_orm.SessionTest.test_expire_lots 2.7_sqlite_pysqlite_cextensions 1135
-test.aaa_profiling.test_orm.SessionTest.test_expire_lots 2.7_sqlite_pysqlite_nocextensions 1152
-test.aaa_profiling.test_orm.SessionTest.test_expire_lots 3.3_postgresql_psycopg2_cextensions 1257
-test.aaa_profiling.test_orm.SessionTest.test_expire_lots 3.3_postgresql_psycopg2_nocextensions 1255
-test.aaa_profiling.test_orm.SessionTest.test_expire_lots 3.3_sqlite_pysqlite_cextensions 1250
-test.aaa_profiling.test_orm.SessionTest.test_expire_lots 3.3_sqlite_pysqlite_nocextensions 1253
-test.aaa_profiling.test_orm.SessionTest.test_expire_lots 3.4_postgresql_psycopg2_cextensions 1260
-test.aaa_profiling.test_orm.SessionTest.test_expire_lots 3.4_postgresql_psycopg2_nocextensions 1257
-test.aaa_profiling.test_orm.SessionTest.test_expire_lots 3.4_sqlite_pysqlite_cextensions 1249
-test.aaa_profiling.test_orm.SessionTest.test_expire_lots 3.4_sqlite_pysqlite_nocextensions 1231
-
+test.aaa_profiling.test_orm.SessionTest.test_expire_lots 2.7_mysql_mysqldb_dbapiunicode_cextensions 1140
+test.aaa_profiling.test_orm.SessionTest.test_expire_lots 2.7_mysql_mysqldb_dbapiunicode_nocextensions 1155
+test.aaa_profiling.test_orm.SessionTest.test_expire_lots 2.7_postgresql_psycopg2_dbapiunicode_cextensions 1158
+test.aaa_profiling.test_orm.SessionTest.test_expire_lots 2.7_postgresql_psycopg2_dbapiunicode_nocextensions 1159
+test.aaa_profiling.test_orm.SessionTest.test_expire_lots 2.7_sqlite_pysqlite_dbapiunicode_cextensions 1133
+test.aaa_profiling.test_orm.SessionTest.test_expire_lots 2.7_sqlite_pysqlite_dbapiunicode_nocextensions 1161
+test.aaa_profiling.test_orm.SessionTest.test_expire_lots 3.4_mysql_mysqldb_dbapiunicode_cextensions 1254
+test.aaa_profiling.test_orm.SessionTest.test_expire_lots 3.4_mysql_mysqldb_dbapiunicode_nocextensions 1255
+test.aaa_profiling.test_orm.SessionTest.test_expire_lots 3.4_postgresql_psycopg2_dbapiunicode_cextensions 1247
+test.aaa_profiling.test_orm.SessionTest.test_expire_lots 3.4_postgresql_psycopg2_dbapiunicode_nocextensions 1253
+test.aaa_profiling.test_orm.SessionTest.test_expire_lots 3.4_sqlite_pysqlite_dbapiunicode_cextensions 1247
+test.aaa_profiling.test_orm.SessionTest.test_expire_lots 3.4_sqlite_pysqlite_dbapiunicode_nocextensions 1256
+test.aaa_profiling.test_orm.SessionTest.test_expire_lots 3.5_mysql_mysqldb_dbapiunicode_cextensions 1248
+test.aaa_profiling.test_orm.SessionTest.test_expire_lots 3.5_mysql_mysqldb_dbapiunicode_nocextensions 1259
+test.aaa_profiling.test_orm.SessionTest.test_expire_lots 3.5_sqlite_pysqlite_dbapiunicode_cextensions 1269
+test.aaa_profiling.test_orm.SessionTest.test_expire_lots 3.5_sqlite_pysqlite_dbapiunicode_nocextensions 1271
# TEST: test.aaa_profiling.test_pool.QueuePoolTest.test_first_connect
-test.aaa_profiling.test_pool.QueuePoolTest.test_first_connect 2.7_mysql_mysqldb_cextensions 91
-test.aaa_profiling.test_pool.QueuePoolTest.test_first_connect 2.7_mysql_mysqldb_nocextensions 91
-test.aaa_profiling.test_pool.QueuePoolTest.test_first_connect 2.7_postgresql_psycopg2_cextensions 91
-test.aaa_profiling.test_pool.QueuePoolTest.test_first_connect 2.7_postgresql_psycopg2_nocextensions 91
-test.aaa_profiling.test_pool.QueuePoolTest.test_first_connect 2.7_sqlite_pysqlite_cextensions 91
-test.aaa_profiling.test_pool.QueuePoolTest.test_first_connect 2.7_sqlite_pysqlite_nocextensions 91
-test.aaa_profiling.test_pool.QueuePoolTest.test_first_connect 3.3_postgresql_psycopg2_cextensions 82
-test.aaa_profiling.test_pool.QueuePoolTest.test_first_connect 3.3_postgresql_psycopg2_nocextensions 78
-test.aaa_profiling.test_pool.QueuePoolTest.test_first_connect 3.3_sqlite_pysqlite_cextensions 78
-test.aaa_profiling.test_pool.QueuePoolTest.test_first_connect 3.3_sqlite_pysqlite_nocextensions 78
-test.aaa_profiling.test_pool.QueuePoolTest.test_first_connect 3.4_postgresql_psycopg2_cextensions 78
-test.aaa_profiling.test_pool.QueuePoolTest.test_first_connect 3.4_postgresql_psycopg2_nocextensions 78
-test.aaa_profiling.test_pool.QueuePoolTest.test_first_connect 3.4_sqlite_pysqlite_cextensions 82
-test.aaa_profiling.test_pool.QueuePoolTest.test_first_connect 3.4_sqlite_pysqlite_nocextensions 82
+test.aaa_profiling.test_pool.QueuePoolTest.test_first_connect 2.7_mysql_mysqldb_dbapiunicode_cextensions 96
+test.aaa_profiling.test_pool.QueuePoolTest.test_first_connect 2.7_mysql_mysqldb_dbapiunicode_nocextensions 96
+test.aaa_profiling.test_pool.QueuePoolTest.test_first_connect 2.7_postgresql_psycopg2_dbapiunicode_cextensions 96
+test.aaa_profiling.test_pool.QueuePoolTest.test_first_connect 2.7_postgresql_psycopg2_dbapiunicode_nocextensions 96
+test.aaa_profiling.test_pool.QueuePoolTest.test_first_connect 2.7_sqlite_pysqlite_dbapiunicode_cextensions 96
+test.aaa_profiling.test_pool.QueuePoolTest.test_first_connect 2.7_sqlite_pysqlite_dbapiunicode_nocextensions 96
+test.aaa_profiling.test_pool.QueuePoolTest.test_first_connect 3.4_mysql_mysqldb_dbapiunicode_cextensions 83
+test.aaa_profiling.test_pool.QueuePoolTest.test_first_connect 3.4_mysql_mysqldb_dbapiunicode_nocextensions 83
+test.aaa_profiling.test_pool.QueuePoolTest.test_first_connect 3.4_postgresql_psycopg2_dbapiunicode_cextensions 83
+test.aaa_profiling.test_pool.QueuePoolTest.test_first_connect 3.4_postgresql_psycopg2_dbapiunicode_nocextensions 83
+test.aaa_profiling.test_pool.QueuePoolTest.test_first_connect 3.4_sqlite_pysqlite_dbapiunicode_cextensions 83
+test.aaa_profiling.test_pool.QueuePoolTest.test_first_connect 3.4_sqlite_pysqlite_dbapiunicode_nocextensions 83
+test.aaa_profiling.test_pool.QueuePoolTest.test_first_connect 3.5_mysql_mysqldb_dbapiunicode_cextensions 83
+test.aaa_profiling.test_pool.QueuePoolTest.test_first_connect 3.5_mysql_mysqldb_dbapiunicode_nocextensions 83
+test.aaa_profiling.test_pool.QueuePoolTest.test_first_connect 3.5_sqlite_pysqlite_dbapiunicode_cextensions 83
+test.aaa_profiling.test_pool.QueuePoolTest.test_first_connect 3.5_sqlite_pysqlite_dbapiunicode_nocextensions 83
# TEST: test.aaa_profiling.test_pool.QueuePoolTest.test_second_connect
-test.aaa_profiling.test_pool.QueuePoolTest.test_second_connect 2.7_mysql_mysqldb_cextensions 31
-test.aaa_profiling.test_pool.QueuePoolTest.test_second_connect 2.7_mysql_mysqldb_nocextensions 31
-test.aaa_profiling.test_pool.QueuePoolTest.test_second_connect 2.7_postgresql_psycopg2_cextensions 31
-test.aaa_profiling.test_pool.QueuePoolTest.test_second_connect 2.7_postgresql_psycopg2_nocextensions 31
-test.aaa_profiling.test_pool.QueuePoolTest.test_second_connect 2.7_sqlite_pysqlite_cextensions 31
-test.aaa_profiling.test_pool.QueuePoolTest.test_second_connect 2.7_sqlite_pysqlite_nocextensions 31
-test.aaa_profiling.test_pool.QueuePoolTest.test_second_connect 3.3_postgresql_psycopg2_cextensions 24
-test.aaa_profiling.test_pool.QueuePoolTest.test_second_connect 3.3_postgresql_psycopg2_nocextensions 24
-test.aaa_profiling.test_pool.QueuePoolTest.test_second_connect 3.3_sqlite_pysqlite_cextensions 24
-test.aaa_profiling.test_pool.QueuePoolTest.test_second_connect 3.3_sqlite_pysqlite_nocextensions 24
-test.aaa_profiling.test_pool.QueuePoolTest.test_second_connect 3.4_postgresql_psycopg2_cextensions 24
-test.aaa_profiling.test_pool.QueuePoolTest.test_second_connect 3.4_postgresql_psycopg2_nocextensions 24
-test.aaa_profiling.test_pool.QueuePoolTest.test_second_connect 3.4_sqlite_pysqlite_cextensions 24
-test.aaa_profiling.test_pool.QueuePoolTest.test_second_connect 3.4_sqlite_pysqlite_nocextensions 24
+test.aaa_profiling.test_pool.QueuePoolTest.test_second_connect 2.7_mysql_mysqldb_dbapiunicode_cextensions 31
+test.aaa_profiling.test_pool.QueuePoolTest.test_second_connect 2.7_mysql_mysqldb_dbapiunicode_nocextensions 31
+test.aaa_profiling.test_pool.QueuePoolTest.test_second_connect 2.7_postgresql_psycopg2_dbapiunicode_cextensions 31
+test.aaa_profiling.test_pool.QueuePoolTest.test_second_connect 2.7_postgresql_psycopg2_dbapiunicode_nocextensions 31
+test.aaa_profiling.test_pool.QueuePoolTest.test_second_connect 2.7_sqlite_pysqlite_dbapiunicode_cextensions 31
+test.aaa_profiling.test_pool.QueuePoolTest.test_second_connect 2.7_sqlite_pysqlite_dbapiunicode_nocextensions 31
+test.aaa_profiling.test_pool.QueuePoolTest.test_second_connect 3.4_mysql_mysqldb_dbapiunicode_cextensions 24
+test.aaa_profiling.test_pool.QueuePoolTest.test_second_connect 3.4_mysql_mysqldb_dbapiunicode_nocextensions 24
+test.aaa_profiling.test_pool.QueuePoolTest.test_second_connect 3.4_postgresql_psycopg2_dbapiunicode_cextensions 24
+test.aaa_profiling.test_pool.QueuePoolTest.test_second_connect 3.4_postgresql_psycopg2_dbapiunicode_nocextensions 24
+test.aaa_profiling.test_pool.QueuePoolTest.test_second_connect 3.4_sqlite_pysqlite_dbapiunicode_cextensions 24
+test.aaa_profiling.test_pool.QueuePoolTest.test_second_connect 3.4_sqlite_pysqlite_dbapiunicode_nocextensions 24
+test.aaa_profiling.test_pool.QueuePoolTest.test_second_connect 3.5_mysql_mysqldb_dbapiunicode_cextensions 24
+test.aaa_profiling.test_pool.QueuePoolTest.test_second_connect 3.5_mysql_mysqldb_dbapiunicode_nocextensions 24
+test.aaa_profiling.test_pool.QueuePoolTest.test_second_connect 3.5_sqlite_pysqlite_dbapiunicode_cextensions 24
+test.aaa_profiling.test_pool.QueuePoolTest.test_second_connect 3.5_sqlite_pysqlite_dbapiunicode_nocextensions 24
# TEST: test.aaa_profiling.test_pool.QueuePoolTest.test_second_samethread_connect
-test.aaa_profiling.test_pool.QueuePoolTest.test_second_samethread_connect 2.7_mysql_mysqldb_cextensions 8
-test.aaa_profiling.test_pool.QueuePoolTest.test_second_samethread_connect 2.7_mysql_mysqldb_nocextensions 8
-test.aaa_profiling.test_pool.QueuePoolTest.test_second_samethread_connect 2.7_postgresql_psycopg2_cextensions 8
-test.aaa_profiling.test_pool.QueuePoolTest.test_second_samethread_connect 2.7_postgresql_psycopg2_nocextensions 8
-test.aaa_profiling.test_pool.QueuePoolTest.test_second_samethread_connect 2.7_sqlite_pysqlite_cextensions 8
-test.aaa_profiling.test_pool.QueuePoolTest.test_second_samethread_connect 2.7_sqlite_pysqlite_nocextensions 8
-test.aaa_profiling.test_pool.QueuePoolTest.test_second_samethread_connect 3.3_postgresql_psycopg2_cextensions 9
-test.aaa_profiling.test_pool.QueuePoolTest.test_second_samethread_connect 3.3_postgresql_psycopg2_nocextensions 9
-test.aaa_profiling.test_pool.QueuePoolTest.test_second_samethread_connect 3.3_sqlite_pysqlite_cextensions 9
-test.aaa_profiling.test_pool.QueuePoolTest.test_second_samethread_connect 3.3_sqlite_pysqlite_nocextensions 9
-test.aaa_profiling.test_pool.QueuePoolTest.test_second_samethread_connect 3.4_postgresql_psycopg2_cextensions 9
-test.aaa_profiling.test_pool.QueuePoolTest.test_second_samethread_connect 3.4_postgresql_psycopg2_nocextensions 9
-test.aaa_profiling.test_pool.QueuePoolTest.test_second_samethread_connect 3.4_sqlite_pysqlite_cextensions 9
-test.aaa_profiling.test_pool.QueuePoolTest.test_second_samethread_connect 3.4_sqlite_pysqlite_nocextensions 9
+test.aaa_profiling.test_pool.QueuePoolTest.test_second_samethread_connect 2.7_mysql_mysqldb_dbapiunicode_cextensions 8
+test.aaa_profiling.test_pool.QueuePoolTest.test_second_samethread_connect 2.7_mysql_mysqldb_dbapiunicode_nocextensions 8
+test.aaa_profiling.test_pool.QueuePoolTest.test_second_samethread_connect 2.7_postgresql_psycopg2_dbapiunicode_cextensions 8
+test.aaa_profiling.test_pool.QueuePoolTest.test_second_samethread_connect 2.7_postgresql_psycopg2_dbapiunicode_nocextensions 8
+test.aaa_profiling.test_pool.QueuePoolTest.test_second_samethread_connect 2.7_sqlite_pysqlite_dbapiunicode_cextensions 8
+test.aaa_profiling.test_pool.QueuePoolTest.test_second_samethread_connect 2.7_sqlite_pysqlite_dbapiunicode_nocextensions 8
+test.aaa_profiling.test_pool.QueuePoolTest.test_second_samethread_connect 3.4_mysql_mysqldb_dbapiunicode_cextensions 9
+test.aaa_profiling.test_pool.QueuePoolTest.test_second_samethread_connect 3.4_mysql_mysqldb_dbapiunicode_nocextensions 9
+test.aaa_profiling.test_pool.QueuePoolTest.test_second_samethread_connect 3.4_postgresql_psycopg2_dbapiunicode_cextensions 9
+test.aaa_profiling.test_pool.QueuePoolTest.test_second_samethread_connect 3.4_postgresql_psycopg2_dbapiunicode_nocextensions 9
+test.aaa_profiling.test_pool.QueuePoolTest.test_second_samethread_connect 3.4_sqlite_pysqlite_dbapiunicode_cextensions 9
+test.aaa_profiling.test_pool.QueuePoolTest.test_second_samethread_connect 3.4_sqlite_pysqlite_dbapiunicode_nocextensions 9
+test.aaa_profiling.test_pool.QueuePoolTest.test_second_samethread_connect 3.5_mysql_mysqldb_dbapiunicode_cextensions 9
+test.aaa_profiling.test_pool.QueuePoolTest.test_second_samethread_connect 3.5_mysql_mysqldb_dbapiunicode_nocextensions 9
+test.aaa_profiling.test_pool.QueuePoolTest.test_second_samethread_connect 3.5_sqlite_pysqlite_dbapiunicode_cextensions 9
+test.aaa_profiling.test_pool.QueuePoolTest.test_second_samethread_connect 3.5_sqlite_pysqlite_dbapiunicode_nocextensions 9
# TEST: test.aaa_profiling.test_resultset.ExecutionTest.test_minimal_connection_execute
-test.aaa_profiling.test_resultset.ExecutionTest.test_minimal_connection_execute 2.7_mysql_mysqldb_cextensions 43
-test.aaa_profiling.test_resultset.ExecutionTest.test_minimal_connection_execute 2.7_mysql_mysqldb_nocextensions 45
-test.aaa_profiling.test_resultset.ExecutionTest.test_minimal_connection_execute 2.7_postgresql_psycopg2_cextensions 43
-test.aaa_profiling.test_resultset.ExecutionTest.test_minimal_connection_execute 2.7_postgresql_psycopg2_nocextensions 45
-test.aaa_profiling.test_resultset.ExecutionTest.test_minimal_connection_execute 2.7_sqlite_pysqlite_cextensions 43
-test.aaa_profiling.test_resultset.ExecutionTest.test_minimal_connection_execute 2.7_sqlite_pysqlite_nocextensions 45
-test.aaa_profiling.test_resultset.ExecutionTest.test_minimal_connection_execute 3.3_postgresql_psycopg2_cextensions 43
-test.aaa_profiling.test_resultset.ExecutionTest.test_minimal_connection_execute 3.3_postgresql_psycopg2_nocextensions 43
-test.aaa_profiling.test_resultset.ExecutionTest.test_minimal_connection_execute 3.3_sqlite_pysqlite_cextensions 43
-test.aaa_profiling.test_resultset.ExecutionTest.test_minimal_connection_execute 3.3_sqlite_pysqlite_nocextensions 43
-test.aaa_profiling.test_resultset.ExecutionTest.test_minimal_connection_execute 3.4_postgresql_psycopg2_cextensions 43
-test.aaa_profiling.test_resultset.ExecutionTest.test_minimal_connection_execute 3.4_postgresql_psycopg2_nocextensions 43
-test.aaa_profiling.test_resultset.ExecutionTest.test_minimal_connection_execute 3.4_sqlite_pysqlite_cextensions 43
-test.aaa_profiling.test_resultset.ExecutionTest.test_minimal_connection_execute 3.4_sqlite_pysqlite_nocextensions 43
+test.aaa_profiling.test_resultset.ExecutionTest.test_minimal_connection_execute 2.7_mysql_mysqldb_dbapiunicode_cextensions 47
+test.aaa_profiling.test_resultset.ExecutionTest.test_minimal_connection_execute 2.7_mysql_mysqldb_dbapiunicode_nocextensions 49
+test.aaa_profiling.test_resultset.ExecutionTest.test_minimal_connection_execute 2.7_mysql_pymysql_dbapiunicode_cextensions 47
+test.aaa_profiling.test_resultset.ExecutionTest.test_minimal_connection_execute 2.7_mysql_pymysql_dbapiunicode_nocextensions 49
+test.aaa_profiling.test_resultset.ExecutionTest.test_minimal_connection_execute 2.7_postgresql_psycopg2_dbapiunicode_cextensions 47
+test.aaa_profiling.test_resultset.ExecutionTest.test_minimal_connection_execute 2.7_postgresql_psycopg2_dbapiunicode_nocextensions 49
+test.aaa_profiling.test_resultset.ExecutionTest.test_minimal_connection_execute 2.7_sqlite_pysqlite_dbapiunicode_cextensions 47
+test.aaa_profiling.test_resultset.ExecutionTest.test_minimal_connection_execute 2.7_sqlite_pysqlite_dbapiunicode_nocextensions 49
+test.aaa_profiling.test_resultset.ExecutionTest.test_minimal_connection_execute 3.4_mysql_mysqldb_dbapiunicode_cextensions 52
+test.aaa_profiling.test_resultset.ExecutionTest.test_minimal_connection_execute 3.4_mysql_mysqldb_dbapiunicode_nocextensions 52
+test.aaa_profiling.test_resultset.ExecutionTest.test_minimal_connection_execute 3.4_mysql_pymysql_dbapiunicode_cextensions 52
+test.aaa_profiling.test_resultset.ExecutionTest.test_minimal_connection_execute 3.4_mysql_pymysql_dbapiunicode_nocextensions 52
+test.aaa_profiling.test_resultset.ExecutionTest.test_minimal_connection_execute 3.4_postgresql_psycopg2_dbapiunicode_cextensions 52
+test.aaa_profiling.test_resultset.ExecutionTest.test_minimal_connection_execute 3.4_postgresql_psycopg2_dbapiunicode_nocextensions 52
+test.aaa_profiling.test_resultset.ExecutionTest.test_minimal_connection_execute 3.4_sqlite_pysqlite_dbapiunicode_cextensions 52
+test.aaa_profiling.test_resultset.ExecutionTest.test_minimal_connection_execute 3.4_sqlite_pysqlite_dbapiunicode_nocextensions 52
+test.aaa_profiling.test_resultset.ExecutionTest.test_minimal_connection_execute 3.5_mysql_mysqldb_dbapiunicode_cextensions 52
+test.aaa_profiling.test_resultset.ExecutionTest.test_minimal_connection_execute 3.5_mysql_mysqldb_dbapiunicode_nocextensions 52
+test.aaa_profiling.test_resultset.ExecutionTest.test_minimal_connection_execute 3.5_mysql_pymysql_dbapiunicode_cextensions 52
+test.aaa_profiling.test_resultset.ExecutionTest.test_minimal_connection_execute 3.5_mysql_pymysql_dbapiunicode_nocextensions 52
+test.aaa_profiling.test_resultset.ExecutionTest.test_minimal_connection_execute 3.5_sqlite_pysqlite_dbapiunicode_cextensions 52
+test.aaa_profiling.test_resultset.ExecutionTest.test_minimal_connection_execute 3.5_sqlite_pysqlite_dbapiunicode_nocextensions 52
# TEST: test.aaa_profiling.test_resultset.ExecutionTest.test_minimal_engine_execute
-test.aaa_profiling.test_resultset.ExecutionTest.test_minimal_engine_execute 2.7_mysql_mysqldb_cextensions 78
-test.aaa_profiling.test_resultset.ExecutionTest.test_minimal_engine_execute 2.7_mysql_mysqldb_nocextensions 80
-test.aaa_profiling.test_resultset.ExecutionTest.test_minimal_engine_execute 2.7_postgresql_psycopg2_cextensions 78
-test.aaa_profiling.test_resultset.ExecutionTest.test_minimal_engine_execute 2.7_postgresql_psycopg2_nocextensions 84
-test.aaa_profiling.test_resultset.ExecutionTest.test_minimal_engine_execute 2.7_sqlite_pysqlite_cextensions 78
-test.aaa_profiling.test_resultset.ExecutionTest.test_minimal_engine_execute 2.7_sqlite_pysqlite_nocextensions 84
-test.aaa_profiling.test_resultset.ExecutionTest.test_minimal_engine_execute 3.3_postgresql_psycopg2_cextensions 78
-test.aaa_profiling.test_resultset.ExecutionTest.test_minimal_engine_execute 3.3_postgresql_psycopg2_nocextensions 78
-test.aaa_profiling.test_resultset.ExecutionTest.test_minimal_engine_execute 3.3_sqlite_pysqlite_cextensions 78
-test.aaa_profiling.test_resultset.ExecutionTest.test_minimal_engine_execute 3.3_sqlite_pysqlite_nocextensions 78
-test.aaa_profiling.test_resultset.ExecutionTest.test_minimal_engine_execute 3.4_postgresql_psycopg2_cextensions 78
-test.aaa_profiling.test_resultset.ExecutionTest.test_minimal_engine_execute 3.4_postgresql_psycopg2_nocextensions 78
-test.aaa_profiling.test_resultset.ExecutionTest.test_minimal_engine_execute 3.4_sqlite_pysqlite_cextensions 78
-test.aaa_profiling.test_resultset.ExecutionTest.test_minimal_engine_execute 3.4_sqlite_pysqlite_nocextensions 78
+test.aaa_profiling.test_resultset.ExecutionTest.test_minimal_engine_execute 2.7_mysql_mysqldb_dbapiunicode_cextensions 86
+test.aaa_profiling.test_resultset.ExecutionTest.test_minimal_engine_execute 2.7_mysql_mysqldb_dbapiunicode_nocextensions 88
+test.aaa_profiling.test_resultset.ExecutionTest.test_minimal_engine_execute 2.7_mysql_pymysql_dbapiunicode_cextensions 86
+test.aaa_profiling.test_resultset.ExecutionTest.test_minimal_engine_execute 2.7_mysql_pymysql_dbapiunicode_nocextensions 88
+test.aaa_profiling.test_resultset.ExecutionTest.test_minimal_engine_execute 2.7_postgresql_psycopg2_dbapiunicode_cextensions 86
+test.aaa_profiling.test_resultset.ExecutionTest.test_minimal_engine_execute 2.7_postgresql_psycopg2_dbapiunicode_nocextensions 88
+test.aaa_profiling.test_resultset.ExecutionTest.test_minimal_engine_execute 2.7_sqlite_pysqlite_dbapiunicode_cextensions 86
+test.aaa_profiling.test_resultset.ExecutionTest.test_minimal_engine_execute 2.7_sqlite_pysqlite_dbapiunicode_nocextensions 88
+test.aaa_profiling.test_resultset.ExecutionTest.test_minimal_engine_execute 3.4_mysql_mysqldb_dbapiunicode_cextensions 91
+test.aaa_profiling.test_resultset.ExecutionTest.test_minimal_engine_execute 3.4_mysql_mysqldb_dbapiunicode_nocextensions 91
+test.aaa_profiling.test_resultset.ExecutionTest.test_minimal_engine_execute 3.4_mysql_pymysql_dbapiunicode_cextensions 91
+test.aaa_profiling.test_resultset.ExecutionTest.test_minimal_engine_execute 3.4_mysql_pymysql_dbapiunicode_nocextensions 91
+test.aaa_profiling.test_resultset.ExecutionTest.test_minimal_engine_execute 3.4_postgresql_psycopg2_dbapiunicode_cextensions 91
+test.aaa_profiling.test_resultset.ExecutionTest.test_minimal_engine_execute 3.4_postgresql_psycopg2_dbapiunicode_nocextensions 91
+test.aaa_profiling.test_resultset.ExecutionTest.test_minimal_engine_execute 3.4_sqlite_pysqlite_dbapiunicode_cextensions 91
+test.aaa_profiling.test_resultset.ExecutionTest.test_minimal_engine_execute 3.4_sqlite_pysqlite_dbapiunicode_nocextensions 91
+test.aaa_profiling.test_resultset.ExecutionTest.test_minimal_engine_execute 3.5_mysql_mysqldb_dbapiunicode_cextensions 91
+test.aaa_profiling.test_resultset.ExecutionTest.test_minimal_engine_execute 3.5_mysql_mysqldb_dbapiunicode_nocextensions 91
+test.aaa_profiling.test_resultset.ExecutionTest.test_minimal_engine_execute 3.5_mysql_pymysql_dbapiunicode_cextensions 91
+test.aaa_profiling.test_resultset.ExecutionTest.test_minimal_engine_execute 3.5_mysql_pymysql_dbapiunicode_nocextensions 91
+test.aaa_profiling.test_resultset.ExecutionTest.test_minimal_engine_execute 3.5_sqlite_pysqlite_dbapiunicode_cextensions 91
+test.aaa_profiling.test_resultset.ExecutionTest.test_minimal_engine_execute 3.5_sqlite_pysqlite_dbapiunicode_nocextensions 91
# TEST: test.aaa_profiling.test_resultset.ResultSetTest.test_contains_doesnt_compile
-test.aaa_profiling.test_resultset.ResultSetTest.test_contains_doesnt_compile 2.7_mysql_mysqldb_cextensions 15
-test.aaa_profiling.test_resultset.ResultSetTest.test_contains_doesnt_compile 2.7_mysql_mysqldb_nocextensions 15
-test.aaa_profiling.test_resultset.ResultSetTest.test_contains_doesnt_compile 2.7_postgresql_psycopg2_cextensions 15
-test.aaa_profiling.test_resultset.ResultSetTest.test_contains_doesnt_compile 2.7_postgresql_psycopg2_nocextensions 15
-test.aaa_profiling.test_resultset.ResultSetTest.test_contains_doesnt_compile 2.7_sqlite_pysqlite_cextensions 15
-test.aaa_profiling.test_resultset.ResultSetTest.test_contains_doesnt_compile 2.7_sqlite_pysqlite_nocextensions 15
-test.aaa_profiling.test_resultset.ResultSetTest.test_contains_doesnt_compile 3.3_postgresql_psycopg2_cextensions 16
-test.aaa_profiling.test_resultset.ResultSetTest.test_contains_doesnt_compile 3.3_postgresql_psycopg2_nocextensions 16
-test.aaa_profiling.test_resultset.ResultSetTest.test_contains_doesnt_compile 3.3_sqlite_pysqlite_cextensions 16
-test.aaa_profiling.test_resultset.ResultSetTest.test_contains_doesnt_compile 3.3_sqlite_pysqlite_nocextensions 16
-test.aaa_profiling.test_resultset.ResultSetTest.test_contains_doesnt_compile 3.4_postgresql_psycopg2_cextensions 16
-test.aaa_profiling.test_resultset.ResultSetTest.test_contains_doesnt_compile 3.4_postgresql_psycopg2_nocextensions 16
-test.aaa_profiling.test_resultset.ResultSetTest.test_contains_doesnt_compile 3.4_sqlite_pysqlite_cextensions 16
-test.aaa_profiling.test_resultset.ResultSetTest.test_contains_doesnt_compile 3.4_sqlite_pysqlite_nocextensions 16
+test.aaa_profiling.test_resultset.ResultSetTest.test_contains_doesnt_compile 2.7_mysql_mysqldb_dbapiunicode_cextensions 15
+test.aaa_profiling.test_resultset.ResultSetTest.test_contains_doesnt_compile 2.7_mysql_mysqldb_dbapiunicode_nocextensions 15
+test.aaa_profiling.test_resultset.ResultSetTest.test_contains_doesnt_compile 2.7_mysql_pymysql_dbapiunicode_cextensions 15
+test.aaa_profiling.test_resultset.ResultSetTest.test_contains_doesnt_compile 2.7_mysql_pymysql_dbapiunicode_nocextensions 15
+test.aaa_profiling.test_resultset.ResultSetTest.test_contains_doesnt_compile 2.7_postgresql_psycopg2_dbapiunicode_cextensions 15
+test.aaa_profiling.test_resultset.ResultSetTest.test_contains_doesnt_compile 2.7_postgresql_psycopg2_dbapiunicode_nocextensions 15
+test.aaa_profiling.test_resultset.ResultSetTest.test_contains_doesnt_compile 2.7_sqlite_pysqlite_dbapiunicode_cextensions 15
+test.aaa_profiling.test_resultset.ResultSetTest.test_contains_doesnt_compile 2.7_sqlite_pysqlite_dbapiunicode_nocextensions 15
+test.aaa_profiling.test_resultset.ResultSetTest.test_contains_doesnt_compile 3.4_mysql_mysqldb_dbapiunicode_cextensions 16
+test.aaa_profiling.test_resultset.ResultSetTest.test_contains_doesnt_compile 3.4_mysql_mysqldb_dbapiunicode_nocextensions 16
+test.aaa_profiling.test_resultset.ResultSetTest.test_contains_doesnt_compile 3.4_mysql_pymysql_dbapiunicode_cextensions 16
+test.aaa_profiling.test_resultset.ResultSetTest.test_contains_doesnt_compile 3.4_mysql_pymysql_dbapiunicode_nocextensions 16
+test.aaa_profiling.test_resultset.ResultSetTest.test_contains_doesnt_compile 3.4_postgresql_psycopg2_dbapiunicode_cextensions 16
+test.aaa_profiling.test_resultset.ResultSetTest.test_contains_doesnt_compile 3.4_postgresql_psycopg2_dbapiunicode_nocextensions 16
+test.aaa_profiling.test_resultset.ResultSetTest.test_contains_doesnt_compile 3.4_sqlite_pysqlite_dbapiunicode_cextensions 16
+test.aaa_profiling.test_resultset.ResultSetTest.test_contains_doesnt_compile 3.4_sqlite_pysqlite_dbapiunicode_nocextensions 16
+test.aaa_profiling.test_resultset.ResultSetTest.test_contains_doesnt_compile 3.5_mysql_mysqldb_dbapiunicode_cextensions 16
+test.aaa_profiling.test_resultset.ResultSetTest.test_contains_doesnt_compile 3.5_mysql_mysqldb_dbapiunicode_nocextensions 16
+test.aaa_profiling.test_resultset.ResultSetTest.test_contains_doesnt_compile 3.5_mysql_pymysql_dbapiunicode_cextensions 16
+test.aaa_profiling.test_resultset.ResultSetTest.test_contains_doesnt_compile 3.5_mysql_pymysql_dbapiunicode_nocextensions 16
+test.aaa_profiling.test_resultset.ResultSetTest.test_contains_doesnt_compile 3.5_sqlite_pysqlite_dbapiunicode_cextensions 16
+test.aaa_profiling.test_resultset.ResultSetTest.test_contains_doesnt_compile 3.5_sqlite_pysqlite_dbapiunicode_nocextensions 16
# TEST: test.aaa_profiling.test_resultset.ResultSetTest.test_string
-test.aaa_profiling.test_resultset.ResultSetTest.test_string 2.7_mysql_mysqldb_cextensions 514
-test.aaa_profiling.test_resultset.ResultSetTest.test_string 2.7_mysql_mysqldb_nocextensions 15534
-test.aaa_profiling.test_resultset.ResultSetTest.test_string 2.7_postgresql_psycopg2_cextensions 20501
-test.aaa_profiling.test_resultset.ResultSetTest.test_string 2.7_postgresql_psycopg2_nocextensions 35528
-test.aaa_profiling.test_resultset.ResultSetTest.test_string 2.7_sqlite_pysqlite_cextensions 457
-test.aaa_profiling.test_resultset.ResultSetTest.test_string 2.7_sqlite_pysqlite_nocextensions 15481
-test.aaa_profiling.test_resultset.ResultSetTest.test_string 3.3_postgresql_psycopg2_cextensions 489
-test.aaa_profiling.test_resultset.ResultSetTest.test_string 3.3_postgresql_psycopg2_nocextensions 14489
-test.aaa_profiling.test_resultset.ResultSetTest.test_string 3.3_sqlite_pysqlite_cextensions 462
-test.aaa_profiling.test_resultset.ResultSetTest.test_string 3.3_sqlite_pysqlite_nocextensions 14462
-test.aaa_profiling.test_resultset.ResultSetTest.test_string 3.4_postgresql_psycopg2_cextensions 489
-test.aaa_profiling.test_resultset.ResultSetTest.test_string 3.4_postgresql_psycopg2_nocextensions 14489
-test.aaa_profiling.test_resultset.ResultSetTest.test_string 3.4_sqlite_pysqlite_cextensions 462
-test.aaa_profiling.test_resultset.ResultSetTest.test_string 3.4_sqlite_pysqlite_nocextensions 14462
+test.aaa_profiling.test_resultset.ResultSetTest.test_string 2.7_mysql_mysqldb_dbapiunicode_cextensions 40510
+test.aaa_profiling.test_resultset.ResultSetTest.test_string 2.7_mysql_mysqldb_dbapiunicode_nocextensions 55510
+test.aaa_profiling.test_resultset.ResultSetTest.test_string 2.7_mysql_pymysql_dbapiunicode_cextensions 117410
+test.aaa_profiling.test_resultset.ResultSetTest.test_string 2.7_mysql_pymysql_dbapiunicode_nocextensions 132410
+test.aaa_profiling.test_resultset.ResultSetTest.test_string 2.7_postgresql_psycopg2_dbapiunicode_cextensions 20497
+test.aaa_profiling.test_resultset.ResultSetTest.test_string 2.7_postgresql_psycopg2_dbapiunicode_nocextensions 35497
+test.aaa_profiling.test_resultset.ResultSetTest.test_string 2.7_sqlite_pysqlite_dbapiunicode_cextensions 439
+test.aaa_profiling.test_resultset.ResultSetTest.test_string 2.7_sqlite_pysqlite_dbapiunicode_nocextensions 15439
+test.aaa_profiling.test_resultset.ResultSetTest.test_string 3.4_mysql_mysqldb_dbapiunicode_cextensions 20518
+test.aaa_profiling.test_resultset.ResultSetTest.test_string 3.4_mysql_mysqldb_dbapiunicode_nocextensions 34518
+test.aaa_profiling.test_resultset.ResultSetTest.test_string 3.4_mysql_pymysql_dbapiunicode_cextensions 87265
+test.aaa_profiling.test_resultset.ResultSetTest.test_string 3.4_mysql_pymysql_dbapiunicode_nocextensions 101265
+test.aaa_profiling.test_resultset.ResultSetTest.test_string 3.4_postgresql_psycopg2_dbapiunicode_cextensions 501
+test.aaa_profiling.test_resultset.ResultSetTest.test_string 3.4_postgresql_psycopg2_dbapiunicode_nocextensions 14501
+test.aaa_profiling.test_resultset.ResultSetTest.test_string 3.4_sqlite_pysqlite_dbapiunicode_cextensions 460
+test.aaa_profiling.test_resultset.ResultSetTest.test_string 3.4_sqlite_pysqlite_dbapiunicode_nocextensions 14460
+test.aaa_profiling.test_resultset.ResultSetTest.test_string 3.5_mysql_mysqldb_dbapiunicode_cextensions 20518
+test.aaa_profiling.test_resultset.ResultSetTest.test_string 3.5_mysql_mysqldb_dbapiunicode_nocextensions 34518
+test.aaa_profiling.test_resultset.ResultSetTest.test_string 3.5_mysql_pymysql_dbapiunicode_cextensions 87265
+test.aaa_profiling.test_resultset.ResultSetTest.test_string 3.5_mysql_pymysql_dbapiunicode_nocextensions 101265
+test.aaa_profiling.test_resultset.ResultSetTest.test_string 3.5_sqlite_pysqlite_dbapiunicode_cextensions 460
+test.aaa_profiling.test_resultset.ResultSetTest.test_string 3.5_sqlite_pysqlite_dbapiunicode_nocextensions 14460
# TEST: test.aaa_profiling.test_resultset.ResultSetTest.test_unicode
-test.aaa_profiling.test_resultset.ResultSetTest.test_unicode 2.7_mysql_mysqldb_cextensions 514
-test.aaa_profiling.test_resultset.ResultSetTest.test_unicode 2.7_mysql_mysqldb_nocextensions 45534
-test.aaa_profiling.test_resultset.ResultSetTest.test_unicode 2.7_postgresql_psycopg2_cextensions 20501
-test.aaa_profiling.test_resultset.ResultSetTest.test_unicode 2.7_postgresql_psycopg2_nocextensions 35528
-test.aaa_profiling.test_resultset.ResultSetTest.test_unicode 2.7_sqlite_pysqlite_cextensions 457
-test.aaa_profiling.test_resultset.ResultSetTest.test_unicode 2.7_sqlite_pysqlite_nocextensions 15481
-test.aaa_profiling.test_resultset.ResultSetTest.test_unicode 3.3_postgresql_psycopg2_cextensions 489
-test.aaa_profiling.test_resultset.ResultSetTest.test_unicode 3.3_postgresql_psycopg2_nocextensions 14489
-test.aaa_profiling.test_resultset.ResultSetTest.test_unicode 3.3_sqlite_pysqlite_cextensions 462
-test.aaa_profiling.test_resultset.ResultSetTest.test_unicode 3.3_sqlite_pysqlite_nocextensions 14462
-test.aaa_profiling.test_resultset.ResultSetTest.test_unicode 3.4_postgresql_psycopg2_cextensions 489
-test.aaa_profiling.test_resultset.ResultSetTest.test_unicode 3.4_postgresql_psycopg2_nocextensions 14489
-test.aaa_profiling.test_resultset.ResultSetTest.test_unicode 3.4_sqlite_pysqlite_cextensions 462
-test.aaa_profiling.test_resultset.ResultSetTest.test_unicode 3.4_sqlite_pysqlite_nocextensions 14462
+test.aaa_profiling.test_resultset.ResultSetTest.test_unicode 2.7_mysql_mysqldb_dbapiunicode_cextensions 40510
+test.aaa_profiling.test_resultset.ResultSetTest.test_unicode 2.7_mysql_mysqldb_dbapiunicode_nocextensions 55510
+test.aaa_profiling.test_resultset.ResultSetTest.test_unicode 2.7_mysql_pymysql_dbapiunicode_cextensions 117410
+test.aaa_profiling.test_resultset.ResultSetTest.test_unicode 2.7_mysql_pymysql_dbapiunicode_nocextensions 132410
+test.aaa_profiling.test_resultset.ResultSetTest.test_unicode 2.7_postgresql_psycopg2_dbapiunicode_cextensions 20497
+test.aaa_profiling.test_resultset.ResultSetTest.test_unicode 2.7_postgresql_psycopg2_dbapiunicode_nocextensions 35497
+test.aaa_profiling.test_resultset.ResultSetTest.test_unicode 2.7_sqlite_pysqlite_dbapiunicode_cextensions 439
+test.aaa_profiling.test_resultset.ResultSetTest.test_unicode 2.7_sqlite_pysqlite_dbapiunicode_nocextensions 15439
+test.aaa_profiling.test_resultset.ResultSetTest.test_unicode 3.4_mysql_mysqldb_dbapiunicode_cextensions 20518
+test.aaa_profiling.test_resultset.ResultSetTest.test_unicode 3.4_mysql_mysqldb_dbapiunicode_nocextensions 34518
+test.aaa_profiling.test_resultset.ResultSetTest.test_unicode 3.4_mysql_pymysql_dbapiunicode_cextensions 87265
+test.aaa_profiling.test_resultset.ResultSetTest.test_unicode 3.4_mysql_pymysql_dbapiunicode_nocextensions 101265
+test.aaa_profiling.test_resultset.ResultSetTest.test_unicode 3.4_postgresql_psycopg2_dbapiunicode_cextensions 501
+test.aaa_profiling.test_resultset.ResultSetTest.test_unicode 3.4_postgresql_psycopg2_dbapiunicode_nocextensions 14501
+test.aaa_profiling.test_resultset.ResultSetTest.test_unicode 3.4_sqlite_pysqlite_dbapiunicode_cextensions 460
+test.aaa_profiling.test_resultset.ResultSetTest.test_unicode 3.4_sqlite_pysqlite_dbapiunicode_nocextensions 14460
+test.aaa_profiling.test_resultset.ResultSetTest.test_unicode 3.5_mysql_mysqldb_dbapiunicode_cextensions 20518
+test.aaa_profiling.test_resultset.ResultSetTest.test_unicode 3.5_mysql_mysqldb_dbapiunicode_nocextensions 34518
+test.aaa_profiling.test_resultset.ResultSetTest.test_unicode 3.5_mysql_pymysql_dbapiunicode_cextensions 87265
+test.aaa_profiling.test_resultset.ResultSetTest.test_unicode 3.5_mysql_pymysql_dbapiunicode_nocextensions 101265
+test.aaa_profiling.test_resultset.ResultSetTest.test_unicode 3.5_sqlite_pysqlite_dbapiunicode_cextensions 460
+test.aaa_profiling.test_resultset.ResultSetTest.test_unicode 3.5_sqlite_pysqlite_dbapiunicode_nocextensions 14460
# TEST: test.aaa_profiling.test_zoomark.ZooMarkTest.test_invocation
-test.aaa_profiling.test_zoomark.ZooMarkTest.test_invocation 2.7_postgresql_psycopg2_cextensions 5892,292,3697,11893,1106,1968,2433
-test.aaa_profiling.test_zoomark.ZooMarkTest.test_invocation 2.7_postgresql_psycopg2_nocextensions 5936,295,3985,13782,1255,2064,2759
-test.aaa_profiling.test_zoomark.ZooMarkTest.test_invocation 3.3_postgresql_psycopg2_cextensions 5497,274,3609,11647,1097,1921,2486
-test.aaa_profiling.test_zoomark.ZooMarkTest.test_invocation 3.3_postgresql_psycopg2_nocextensions 5519,274,3705,12819,1191,1928,2678
-test.aaa_profiling.test_zoomark.ZooMarkTest.test_invocation 3.4_postgresql_psycopg2_cextensions 5497,273,3577,11529,1077,1883,2439
-test.aaa_profiling.test_zoomark.ZooMarkTest.test_invocation 3.4_postgresql_psycopg2_nocextensions 5519,273,3697,12796,1187,1923,2653
+test.aaa_profiling.test_zoomark.ZooMarkTest.test_invocation 2.7_postgresql_psycopg2_dbapiunicode_cextensions 5834,294,3729,11963,1149,2023,2486
+test.aaa_profiling.test_zoomark.ZooMarkTest.test_invocation 2.7_postgresql_psycopg2_dbapiunicode_nocextensions 5856,294,3833,13221,1256,2030,2707
+test.aaa_profiling.test_zoomark.ZooMarkTest.test_invocation 3.4_postgresql_psycopg2_dbapiunicode_cextensions 5597,276,3721,11963,1149,1974,2548
+test.aaa_profiling.test_zoomark.ZooMarkTest.test_invocation 3.4_postgresql_psycopg2_dbapiunicode_nocextensions 5619,276,3817,13135,1243,1981,2740
# TEST: test.aaa_profiling.test_zoomark_orm.ZooMarkTest.test_invocation
-test.aaa_profiling.test_zoomark_orm.ZooMarkTest.test_invocation 2.7_postgresql_psycopg2_cextensions 6389,407,6826,18499,1134,2661
-test.aaa_profiling.test_zoomark_orm.ZooMarkTest.test_invocation 2.7_postgresql_psycopg2_nocextensions 6379,412,7054,19930,1258,2718
-test.aaa_profiling.test_zoomark_orm.ZooMarkTest.test_invocation 3.3_postgresql_psycopg2_cextensions 6268,394,6860,18613,1107,2679
-test.aaa_profiling.test_zoomark_orm.ZooMarkTest.test_invocation 3.3_postgresql_psycopg2_nocextensions 6361,399,6964,19640,1193,2708
-test.aaa_profiling.test_zoomark_orm.ZooMarkTest.test_invocation 3.4_postgresql_psycopg2_cextensions 6275,394,6860,18613,1107,2679
-test.aaa_profiling.test_zoomark_orm.ZooMarkTest.test_invocation 3.4_postgresql_psycopg2_nocextensions 6360,399,6964,19640,1193,2708
+test.aaa_profiling.test_zoomark_orm.ZooMarkTest.test_invocation 2.7_postgresql_psycopg2_dbapiunicode_cextensions 6367,405,6777,17707,1162,2636
+test.aaa_profiling.test_zoomark_orm.ZooMarkTest.test_invocation 2.7_postgresql_psycopg2_dbapiunicode_nocextensions 6452,410,6881,18734,1260,2665
+test.aaa_profiling.test_zoomark_orm.ZooMarkTest.test_invocation 3.4_postgresql_psycopg2_dbapiunicode_cextensions 6339,396,6917,18130,1164,2686
+test.aaa_profiling.test_zoomark_orm.ZooMarkTest.test_invocation 3.4_postgresql_psycopg2_dbapiunicode_nocextensions 6424,401,7021,19157,1250,2715
diff --git a/test/requirements.py b/test/requirements.py
index 4d5869226..abc8ad5c2 100644
--- a/test/requirements.py
+++ b/test/requirements.py
@@ -130,7 +130,7 @@ class DefaultRequirements(SuiteRequirements):
def temporary_tables(self):
"""target database supports temporary tables"""
return skip_if(
- ["mssql"], "sql server has some other syntax?"
+ ["mssql", "firebird"], "not supported (?)"
)
@property
@@ -286,6 +286,10 @@ class DefaultRequirements(SuiteRequirements):
("mysql", "<", (5, 0, 3)),
], "savepoints not supported")
+ @property
+ def savepoints_w_release(self):
+ return self.savepoints + skip_if(
+ "oracle", "oracle doesn't support release of savepoint")
@property
def schemas(self):
@@ -293,7 +297,6 @@ class DefaultRequirements(SuiteRequirements):
named 'test_schema'."""
return skip_if([
- "sqlite",
"firebird"
], "no schema support")
@@ -362,6 +365,32 @@ class DefaultRequirements(SuiteRequirements):
], 'no support for EXCEPT')
@property
+ def parens_in_union_contained_select_w_limit_offset(self):
+ """Target database must support parenthesized SELECT in UNION
+ when LIMIT/OFFSET is specifically present.
+
+ E.g. (SELECT ...) UNION (SELECT ..)
+
+ This is known to fail on SQLite.
+
+ """
+ return fails_if('sqlite')
+
+ @property
+ def parens_in_union_contained_select_wo_limit_offset(self):
+ """Target database must support parenthesized SELECT in UNION
+ when OFFSET/LIMIT is specifically not present.
+
+ E.g. (SELECT ... LIMIT ..) UNION (SELECT .. OFFSET ..)
+
+ This is known to fail on SQLite. It also fails on Oracle
+ because without LIMIT/OFFSET, there is currently no step that
+ creates an additional subquery.
+
+ """
+ return fails_if(['sqlite', 'oracle'])
+
+ @property
def offset(self):
"""Target database must support some method of adding OFFSET or
equivalent to a result set."""
@@ -372,7 +401,7 @@ class DefaultRequirements(SuiteRequirements):
@property
def window_functions(self):
return only_if([
- "postgresql", "mssql", "oracle"
+ "postgresql>=8.4", "mssql", "oracle"
], "Backend does not support window functions")
@property
@@ -506,6 +535,18 @@ class DefaultRequirements(SuiteRequirements):
'sybase')
@property
+ def json_type(self):
+ return only_on([
+ lambda config: against(config, "mysql >= 5.7") and
+ not config.db.dialect._is_mariadb,
+ "postgresql >= 9.3"
+ ])
+
+ @property
+ def json_array_indexes(self):
+ return self.json_type + fails_if("+pg8000")
+
+ @property
def datetime_literals(self):
"""target dialect supports rendering of a date, time, or datetime as a
literal string, e.g. via the TypeEngine.literal_processor() method.
@@ -669,6 +710,10 @@ class DefaultRequirements(SuiteRequirements):
)
@property
+ def duplicate_key_raises_integrity_error(self):
+ return fails_on("postgresql+pg8000")
+
+ @property
def python2(self):
return skip_if(
lambda: sys.version_info >= (3,),
@@ -723,12 +768,12 @@ class DefaultRequirements(SuiteRequirements):
@property
def range_types(self):
def check_range_types(config):
- if not against(config, "postgresql+psycopg2"):
+ if not against(
+ config,
+ ["postgresql+psycopg2", "postgresql+psycopg2cffi"]):
return False
try:
- config.db.execute("select '[1,2)'::int4range;")
- # only supported in psycopg 2.5+
- from psycopg2.extras import NumericRange
+ config.db.scalar("select '[1,2)'::int4range;")
return True
except:
return False
@@ -753,6 +798,35 @@ class DefaultRequirements(SuiteRequirements):
)
@property
+ def postgresql_jsonb(self):
+ return only_on("postgresql >= 9.4") + skip_if(
+ lambda config:
+ config.db.dialect.driver == "pg8000" and
+ config.db.dialect._dbapi_version <= (1, 10, 1)
+ )
+
+ @property
+ def psycopg2_native_json(self):
+ return self.psycopg2_compatibility
+
+ @property
+ def psycopg2_native_hstore(self):
+ return self.psycopg2_compatibility
+
+ @property
+ def psycopg2_compatibility(self):
+ return only_on(
+ ["postgresql+psycopg2", "postgresql+psycopg2cffi"]
+ )
+
+ @property
+ def psycopg2_or_pg8000_compatibility(self):
+ return only_on(
+ ["postgresql+psycopg2", "postgresql+psycopg2cffi",
+ "postgresql+pg8000"]
+ )
+
+ @property
def percent_schema_names(self):
return skip_if(
[
@@ -799,10 +873,17 @@ class DefaultRequirements(SuiteRequirements):
)
@property
+ def no_mssql_freetds(self):
+ return self.mssql_freetds.not_()
+
+ @property
def selectone(self):
"""target driver must support the literal statement 'select 1'"""
return skip_if(["oracle", "firebird"], "non-standard SELECT scalar syntax")
+ @property
+ def mysql_fsp(self):
+ return only_if('mysql >= 5.6.4')
@property
def mysql_fully_case_sensitive(self):
diff --git a/test/sql/test_compiler.py b/test/sql/test_compiler.py
index 428fc8986..8e75638a2 100644
--- a/test/sql/test_compiler.py
+++ b/test/sql/test_compiler.py
@@ -10,7 +10,8 @@ styling and coherent test organization.
"""
-from sqlalchemy.testing import eq_, is_, assert_raises, assert_raises_message
+from sqlalchemy.testing import eq_, is_, assert_raises, \
+ assert_raises_message, eq_ignore_whitespace
from sqlalchemy import testing
from sqlalchemy.testing import fixtures, AssertsCompiledSQL
from sqlalchemy import Integer, String, MetaData, Table, Column, select, \
@@ -18,7 +19,7 @@ from sqlalchemy import Integer, String, MetaData, Table, Column, select, \
literal, and_, null, type_coerce, alias, or_, literal_column,\
Float, TIMESTAMP, Numeric, Date, Text, union, except_,\
intersect, union_all, Boolean, distinct, join, outerjoin, asc, desc,\
- over, subquery, case, true
+ over, subquery, case, true, CheckConstraint
import decimal
from sqlalchemy.util import u
from sqlalchemy import exc, sql, util, types, schema
@@ -260,16 +261,16 @@ class SelectTest(fixtures.TestBase, AssertsCompiledSQL):
class MyCompiler(compiler.SQLCompiler):
- def get_select_precolumns(self, select):
+ def get_select_precolumns(self, select, **kw):
result = ""
if select._limit:
result += "FIRST %s " % self.process(
literal(
- select._limit))
+ select._limit), **kw)
if select._offset:
result += "SKIP %s " % self.process(
literal(
- select._offset))
+ select._offset), **kw)
return result
def limit_clause(self, select, **kw):
@@ -380,7 +381,7 @@ class SelectTest(fixtures.TestBase, AssertsCompiledSQL):
# this is native_boolean=False for default dialect
self.assert_compile(
select([not_(True)], use_labels=True),
- "SELECT :param_1 = 0"
+ "SELECT :param_1 = 0 AS anon_1"
)
self.assert_compile(
@@ -561,13 +562,13 @@ class SelectTest(fixtures.TestBase, AssertsCompiledSQL):
self.assert_compile(exists([table1.c.myid], table1.c.myid
== 5).select(),
'SELECT EXISTS (SELECT mytable.myid FROM '
- 'mytable WHERE mytable.myid = :myid_1)',
+ 'mytable WHERE mytable.myid = :myid_1) AS anon_1',
params={'mytable_myid': 5})
self.assert_compile(select([table1, exists([1],
from_obj=table2)]),
'SELECT mytable.myid, mytable.name, '
'mytable.description, EXISTS (SELECT 1 '
- 'FROM myothertable) FROM mytable',
+ 'FROM myothertable) AS anon_1 FROM mytable',
params={})
self.assert_compile(select([table1,
exists([1],
@@ -961,6 +962,19 @@ class SelectTest(fixtures.TestBase, AssertsCompiledSQL):
dialect=dialect
)
+ def test_no_group_by_labels(self):
+ lab1 = (table1.c.myid + 12).label('foo')
+ lab2 = func.somefunc(table1.c.name).label('bar')
+ dialect = default.DefaultDialect()
+
+ self.assert_compile(
+ select([lab1, lab2]).group_by(lab1, lab2),
+ "SELECT mytable.myid + :myid_1 AS foo, somefunc(mytable.name) "
+ "AS bar FROM mytable GROUP BY mytable.myid + :myid_1, "
+ "somefunc(mytable.name)",
+ dialect=dialect
+ )
+
def test_conjunctions(self):
a, b, c = text('a'), text('b'), text('c')
x = and_(a, b, c)
@@ -1630,14 +1644,12 @@ class SelectTest(fixtures.TestBase, AssertsCompiledSQL):
s = select([column('foo'), column('bar')])
- # ORDER BY's even though not supported by
- # all DB's, are rendered if requested
self.assert_compile(
union(
s.order_by("foo"),
s.order_by("bar")),
- "SELECT foo, bar ORDER BY foo UNION SELECT foo, bar ORDER BY bar")
- # self_group() is honored
+ "(SELECT foo, bar ORDER BY foo) UNION "
+ "(SELECT foo, bar ORDER BY bar)")
self.assert_compile(
union(s.order_by("foo").self_group(),
s.order_by("bar").limit(10).self_group()),
@@ -1746,6 +1758,67 @@ class SelectTest(fixtures.TestBase, AssertsCompiledSQL):
"SELECT foo, bar FROM bat)"
)
+ # tests for [ticket:2528]
+ # sqlite hates all of these.
+ self.assert_compile(
+ union(
+ s.limit(1),
+ s.offset(2)
+ ),
+ "(SELECT foo, bar FROM bat LIMIT :param_1) "
+ "UNION (SELECT foo, bar FROM bat LIMIT -1 OFFSET :param_2)"
+ )
+
+ self.assert_compile(
+ union(
+ s.order_by(column('bar')),
+ s.offset(2)
+ ),
+ "(SELECT foo, bar FROM bat ORDER BY bar) "
+ "UNION (SELECT foo, bar FROM bat LIMIT -1 OFFSET :param_1)"
+ )
+
+ self.assert_compile(
+ union(
+ s.limit(1).alias('a'),
+ s.limit(2).alias('b')
+ ),
+ "(SELECT foo, bar FROM bat LIMIT :param_1) "
+ "UNION (SELECT foo, bar FROM bat LIMIT :param_2)"
+ )
+
+ self.assert_compile(
+ union(
+ s.limit(1).self_group(),
+ s.limit(2).self_group()
+ ),
+ "(SELECT foo, bar FROM bat LIMIT :param_1) "
+ "UNION (SELECT foo, bar FROM bat LIMIT :param_2)"
+ )
+
+ self.assert_compile(
+ union(s.limit(1), s.limit(2).offset(3)).alias().select(),
+ "SELECT anon_1.foo, anon_1.bar FROM "
+ "((SELECT foo, bar FROM bat LIMIT :param_1) "
+ "UNION (SELECT foo, bar FROM bat LIMIT :param_2 OFFSET :param_3)) "
+ "AS anon_1"
+ )
+
+ # this version works for SQLite
+ self.assert_compile(
+ union(
+ s.limit(1).alias().select(),
+ s.offset(2).alias().select(),
+ ),
+ "SELECT anon_1.foo, anon_1.bar "
+ "FROM (SELECT foo, bar FROM bat"
+ " LIMIT :param_1) AS anon_1 "
+ "UNION SELECT anon_2.foo, anon_2.bar "
+ "FROM (SELECT foo, bar "
+ "FROM bat"
+ " LIMIT -1 OFFSET :param_2) AS anon_2"
+ )
+
def test_binds(self):
for (
stmt,
@@ -2027,6 +2100,8 @@ class SelectTest(fixtures.TestBase, AssertsCompiledSQL):
'Incorrect number of expected results')
eq_(str(cast(tbl.c.v1, Numeric).compile(dialect=dialect)),
'CAST(casttest.v1 AS %s)' % expected_results[0])
+ eq_(str(tbl.c.v1.cast(Numeric).compile(dialect=dialect)),
+ 'CAST(casttest.v1 AS %s)' % expected_results[0])
eq_(str(cast(tbl.c.v1, Numeric(12, 9)).compile(dialect=dialect)),
'CAST(casttest.v1 AS %s)' % expected_results[1])
eq_(str(cast(tbl.c.ts, Date).compile(dialect=dialect)),
@@ -2488,7 +2563,7 @@ class UnsupportedTest(fixtures.TestBase):
assert_raises_message(
exc.UnsupportedCompilationError,
- r"Compiler <sqlalchemy.sql.compiler.SQLCompiler .*"
+ r"Compiler <sqlalchemy.sql.compiler.StrSQLCompiler .*"
r"can't render element of type <class '.*SomeElement'>",
SomeElement().compile
)
@@ -2504,7 +2579,7 @@ class UnsupportedTest(fixtures.TestBase):
assert_raises_message(
exc.UnsupportedCompilationError,
- r"Compiler <sqlalchemy.sql.compiler.SQLCompiler .*"
+ r"Compiler <sqlalchemy.sql.compiler.StrSQLCompiler .*"
r"can't render element of type <class '.*SomeElement'>",
SomeElement().compile
)
@@ -2517,12 +2592,76 @@ class UnsupportedTest(fixtures.TestBase):
binary = BinaryExpression(column("foo"), column("bar"), myop)
assert_raises_message(
exc.UnsupportedCompilationError,
- r"Compiler <sqlalchemy.sql.compiler.SQLCompiler .*"
+ r"Compiler <sqlalchemy.sql.compiler.StrSQLCompiler .*"
r"can't render element of type <function.*",
binary.compile
)
+class StringifySpecialTest(fixtures.TestBase):
+ def test_basic(self):
+ stmt = select([table1]).where(table1.c.myid == 10)
+ eq_ignore_whitespace(
+ str(stmt),
+ "SELECT mytable.myid, mytable.name, mytable.description "
+ "FROM mytable WHERE mytable.myid = :myid_1"
+ )
+
+ def test_cte(self):
+ # stringify of these was supported anyway by defaultdialect.
+ stmt = select([table1.c.myid]).cte()
+ stmt = select([stmt])
+ eq_ignore_whitespace(
+ str(stmt),
+ "WITH anon_1 AS (SELECT mytable.myid AS myid FROM mytable) "
+ "SELECT anon_1.myid FROM anon_1"
+ )
+
+ def test_returning(self):
+ stmt = table1.insert().returning(table1.c.myid)
+
+ eq_ignore_whitespace(
+ str(stmt),
+ "INSERT INTO mytable (myid, name, description) "
+ "VALUES (:myid, :name, :description) RETURNING mytable.myid"
+ )
+
+ def test_array_index(self):
+ stmt = select([column('foo', types.ARRAY(Integer))[5]])
+
+ eq_ignore_whitespace(
+ str(stmt),
+ "SELECT foo[:foo_1] AS anon_1"
+ )
+
+ def test_unknown_type(self):
+ class MyType(types.TypeEngine):
+ __visit_name__ = 'mytype'
+
+ stmt = select([cast(table1.c.myid, MyType)])
+
+ eq_ignore_whitespace(
+ str(stmt),
+ "SELECT CAST(mytable.myid AS MyType) AS anon_1 FROM mytable"
+ )
+
+ def test_within_group(self):
+ # stringify of these was supported anyway by defaultdialect.
+ from sqlalchemy import within_group
+ stmt = select([
+ table1.c.myid,
+ within_group(
+ func.percentile_cont(0.5),
+ table1.c.name.desc()
+ )
+ ])
+ eq_ignore_whitespace(
+ str(stmt),
+ "SELECT mytable.myid, percentile_cont(:percentile_cont_1) "
+ "WITHIN GROUP (ORDER BY mytable.name DESC) AS anon_1 FROM mytable"
+ )
+
+
class KwargPropagationTest(fixtures.TestBase):
@classmethod
@@ -2575,6 +2714,31 @@ class KwargPropagationTest(fixtures.TestBase):
class CRUDTest(fixtures.TestBase, AssertsCompiledSQL):
__dialect__ = 'default'
+ def test_insert_literal_binds(self):
+ stmt = table1.insert().values(myid=3, name='jack')
+
+ self.assert_compile(
+ stmt,
+ "INSERT INTO mytable (myid, name) VALUES (3, 'jack')",
+ literal_binds=True)
+
+ def test_update_literal_binds(self):
+ stmt = table1.update().values(name='jack').\
+ where(table1.c.name == 'jill')
+
+ self.assert_compile(
+ stmt,
+ "UPDATE mytable SET name='jack' WHERE mytable.name = 'jill'",
+ literal_binds=True)
+
+ def test_delete_literal_binds(self):
+ stmt = table1.delete().where(table1.c.name == 'jill')
+
+ self.assert_compile(
+ stmt,
+ "DELETE FROM mytable WHERE mytable.name = 'jill'",
+ literal_binds=True)
+
def test_correlated_update(self):
# test against a straight text subquery
u = update(
@@ -2734,48 +2898,6 @@ class CRUDTest(fixtures.TestBase, AssertsCompiledSQL):
'x2': 1,
'y': 2})
- def test_unconsumed_names(self):
- t = table("t", column("x"), column("y"))
- t2 = table("t2", column("q"), column("z"))
- assert_raises_message(
- exc.CompileError,
- "Unconsumed column names: z",
- t.insert().values(x=5, z=5).compile,
- )
- assert_raises_message(
- exc.CompileError,
- "Unconsumed column names: z",
- t.update().values(x=5, z=5).compile,
- )
-
- assert_raises_message(
- exc.CompileError,
- "Unconsumed column names: j",
- t.update().values(x=5, j=7).values({t2.c.z: 5}).
- where(t.c.x == t2.c.q).compile,
- )
-
- # bindparam names don't get counted
- i = t.insert().values(x=3 + bindparam('x2'))
- self.assert_compile(
- i,
- "INSERT INTO t (x) VALUES ((:param_1 + :x2))"
- )
-
- # even if in the params list
- i = t.insert().values(x=3 + bindparam('x2'))
- self.assert_compile(
- i,
- "INSERT INTO t (x) VALUES ((:param_1 + :x2))",
- params={"x2": 1}
- )
-
- assert_raises_message(
- exc.CompileError,
- "Unconsumed column names: j",
- t.update().values(x=5, j=7).compile,
- column_keys=['j']
- )
def test_labels_no_collision(self):
@@ -2842,6 +2964,96 @@ class DDLTest(fixtures.TestBase, AssertsCompiledSQL):
"CREATE TABLE t (x INTEGER, z INTEGER)"
)
+ def test_composite_pk_constraint_autoinc_first(self):
+ m = MetaData()
+ t = Table(
+ 't', m,
+ Column('a', Integer, primary_key=True),
+ Column('b', Integer, primary_key=True, autoincrement=True)
+ )
+ self.assert_compile(
+ schema.CreateTable(t),
+ "CREATE TABLE t ("
+ "a INTEGER NOT NULL, "
+ "b INTEGER NOT NULL, "
+ "PRIMARY KEY (b, a))"
+ )
+
+ def test_table_no_cols(self):
+ m = MetaData()
+ t1 = Table('t1', m)
+ self.assert_compile(
+ schema.CreateTable(t1),
+ "CREATE TABLE t1 ()"
+ )
+
+ def test_table_no_cols_w_constraint(self):
+ m = MetaData()
+ t1 = Table('t1', m, CheckConstraint('a = 1'))
+ self.assert_compile(
+ schema.CreateTable(t1),
+ "CREATE TABLE t1 (CHECK (a = 1))"
+ )
+
+ def test_table_one_col_w_constraint(self):
+ m = MetaData()
+ t1 = Table('t1', m, Column('q', Integer), CheckConstraint('a = 1'))
+ self.assert_compile(
+ schema.CreateTable(t1),
+ "CREATE TABLE t1 (q INTEGER, CHECK (a = 1))"
+ )
+
+ def test_schema_translate_map_table(self):
+ m = MetaData()
+ t1 = Table('t1', m, Column('q', Integer))
+ t2 = Table('t2', m, Column('q', Integer), schema='foo')
+ t3 = Table('t3', m, Column('q', Integer), schema='bar')
+
+ schema_translate_map = {None: "z", "bar": None, "foo": "bat"}
+
+ self.assert_compile(
+ schema.CreateTable(t1),
+ "CREATE TABLE z.t1 (q INTEGER)",
+ schema_translate_map=schema_translate_map
+ )
+
+ self.assert_compile(
+ schema.CreateTable(t2),
+ "CREATE TABLE bat.t2 (q INTEGER)",
+ schema_translate_map=schema_translate_map
+ )
+
+ self.assert_compile(
+ schema.CreateTable(t3),
+ "CREATE TABLE t3 (q INTEGER)",
+ schema_translate_map=schema_translate_map
+ )
+
+ def test_schema_translate_map_sequence(self):
+ s1 = schema.Sequence('s1')
+ s2 = schema.Sequence('s2', schema='foo')
+ s3 = schema.Sequence('s3', schema='bar')
+
+ schema_translate_map = {None: "z", "bar": None, "foo": "bat"}
+
+ self.assert_compile(
+ schema.CreateSequence(s1),
+ "CREATE SEQUENCE z.s1",
+ schema_translate_map=schema_translate_map
+ )
+
+ self.assert_compile(
+ schema.CreateSequence(s2),
+ "CREATE SEQUENCE bat.s2",
+ schema_translate_map=schema_translate_map
+ )
+
+ self.assert_compile(
+ schema.CreateSequence(s3),
+ "CREATE SEQUENCE s3",
+ schema_translate_map=schema_translate_map
+ )
+
class InlineDefaultTest(fixtures.TestBase, AssertsCompiledSQL):
__dialect__ = 'default'
@@ -2936,6 +3148,82 @@ class SchemaTest(fixtures.TestBase, AssertsCompiledSQL):
' "dbo.remote_owner".remotetable'
)
+ def test_schema_translate_select(self):
+ schema_translate_map = {"remote_owner": "foob", None: 'bar'}
+
+ self.assert_compile(
+ table1.select().where(table1.c.name == 'hi'),
+ "SELECT bar.mytable.myid, bar.mytable.name, "
+ "bar.mytable.description FROM bar.mytable "
+ "WHERE bar.mytable.name = :name_1",
+ schema_translate_map=schema_translate_map
+ )
+
+ self.assert_compile(
+ table4.select().where(table4.c.value == 'hi'),
+ "SELECT foob.remotetable.rem_id, foob.remotetable.datatype_id, "
+ "foob.remotetable.value FROM foob.remotetable "
+ "WHERE foob.remotetable.value = :value_1",
+ schema_translate_map=schema_translate_map
+ )
+
+ schema_translate_map = {"remote_owner": "foob"}
+ self.assert_compile(
+ select([
+ table1, table4
+ ]).select_from(
+ join(table1, table4, table1.c.myid == table4.c.rem_id)
+ ),
+ "SELECT mytable.myid, mytable.name, mytable.description, "
+ "foob.remotetable.rem_id, foob.remotetable.datatype_id, "
+ "foob.remotetable.value FROM mytable JOIN foob.remotetable "
+ "ON foob.remotetable.rem_id = mytable.myid",
+ schema_translate_map=schema_translate_map
+ )
+
+ def test_schema_translate_crud(self):
+ schema_translate_map = {"remote_owner": "foob", None: 'bar'}
+
+ self.assert_compile(
+ table1.insert().values(description='foo'),
+ "INSERT INTO bar.mytable (description) VALUES (:description)",
+ schema_translate_map=schema_translate_map
+ )
+
+ self.assert_compile(
+ table1.update().where(table1.c.name == 'hi').
+ values(description='foo'),
+ "UPDATE bar.mytable SET description=:description "
+ "WHERE bar.mytable.name = :name_1",
+ schema_translate_map=schema_translate_map
+ )
+ self.assert_compile(
+ table1.delete().where(table1.c.name == 'hi'),
+ "DELETE FROM bar.mytable WHERE bar.mytable.name = :name_1",
+ schema_translate_map=schema_translate_map
+ )
+
+ self.assert_compile(
+ table4.insert().values(value='there'),
+ "INSERT INTO foob.remotetable (value) VALUES (:value)",
+ schema_translate_map=schema_translate_map
+ )
+
+ self.assert_compile(
+ table4.update().where(table4.c.value == 'hi').
+ values(value='there'),
+ "UPDATE foob.remotetable SET value=:value "
+ "WHERE foob.remotetable.value = :value_1",
+ schema_translate_map=schema_translate_map
+ )
+
+ self.assert_compile(
+ table4.delete().where(table4.c.value == 'hi'),
+ "DELETE FROM foob.remotetable WHERE "
+ "foob.remotetable.value = :value_1",
+ schema_translate_map=schema_translate_map
+ )
+
def test_alias(self):
a = alias(table4, 'remtable')
self.assert_compile(a.select(a.c.datatype_id == 7),
@@ -3380,7 +3668,7 @@ class ResultMapTest(fixtures.TestBase):
stmt = select([t]).union(select([t]))
comp = stmt.compile()
eq_(
- comp.result_map,
+ comp._create_result_map(),
{'a': ('a', (t.c.a, 'a', 'a'), t.c.a.type),
'b': ('b', (t.c.b, 'b', 'b'), t.c.b.type)}
)
@@ -3391,7 +3679,7 @@ class ResultMapTest(fixtures.TestBase):
stmt = select([t.c.a]).select_from(t.join(subq, t.c.a == subq.c.a))
comp = stmt.compile()
eq_(
- comp.result_map,
+ comp._create_result_map(),
{'a': ('a', (t.c.a, 'a', 'a'), t.c.a.type)}
)
@@ -3400,7 +3688,7 @@ class ResultMapTest(fixtures.TestBase):
stmt = select([t.c.a]).union(select([t.c.b]))
comp = stmt.compile()
eq_(
- comp.result_map,
+ comp._create_result_map(),
{'a': ('a', (t.c.a, 'a', 'a'), t.c.a.type)},
)
@@ -3410,13 +3698,15 @@ class ResultMapTest(fixtures.TestBase):
tc = type_coerce(t.c.a, String)
stmt = select([t.c.a, l1, tc])
comp = stmt.compile()
- tc_anon_label = comp.result_map['a_1'][1][0]
+ tc_anon_label = comp._create_result_map()['anon_1'][1][0]
eq_(
- comp.result_map,
+ comp._create_result_map(),
{
'a': ('a', (t.c.a, 'a', 'a'), t.c.a.type),
'bar': ('bar', (l1, 'bar'), l1.type),
- 'a_1': ('%%(%d a)s' % id(tc), (tc_anon_label, 'a_1'), tc.type),
+ 'anon_1': (
+ '%%(%d anon)s' % id(tc),
+ (tc_anon_label, 'anon_1', tc), tc.type),
},
)
@@ -3431,11 +3721,11 @@ class ResultMapTest(fixtures.TestBase):
t1.join(union, t1.c.a == union.c.t1_a)).apply_labels()
comp = stmt.compile()
eq_(
- set(comp.result_map),
+ set(comp._create_result_map()),
set(['t1_1_b', 't1_1_a', 't1_a', 't1_b'])
)
is_(
- comp.result_map['t1_a'][1][2], t1.c.a
+ comp._create_result_map()['t1_a'][1][2], t1.c.a
)
def test_insert_with_select_values(self):
@@ -3448,7 +3738,7 @@ class ResultMapTest(fixtures.TestBase):
stmt = t2.insert().values(a=select([astring])).returning(aint)
comp = stmt.compile(dialect=postgresql.dialect())
eq_(
- comp.result_map,
+ comp._create_result_map(),
{'a': ('a', (aint, 'a', 'a'), aint.type)}
)
@@ -3463,6 +3753,126 @@ class ResultMapTest(fixtures.TestBase):
returning(aint)
comp = stmt.compile(dialect=postgresql.dialect())
eq_(
- comp.result_map,
+ comp._create_result_map(),
{'a': ('a', (aint, 'a', 'a'), aint.type)}
)
+
+ def test_nested_api(self):
+ from sqlalchemy.engine.result import ResultMetaData
+ stmt2 = select([table2])
+
+ stmt1 = select([table1]).select_from(stmt2)
+
+ contexts = {}
+
+ int_ = Integer()
+
+ class MyCompiler(compiler.SQLCompiler):
+ def visit_select(self, stmt, *arg, **kw):
+
+ if stmt is stmt2:
+ with self._nested_result() as nested:
+ contexts[stmt2] = nested
+ text = super(MyCompiler, self).visit_select(stmt2)
+ self._add_to_result_map("k1", "k1", (1, 2, 3), int_)
+ else:
+ text = super(MyCompiler, self).visit_select(
+ stmt, *arg, **kw)
+ self._add_to_result_map("k2", "k2", (3, 4, 5), int_)
+ return text
+
+ comp = MyCompiler(default.DefaultDialect(), stmt1)
+
+ eq_(
+ ResultMetaData._create_result_map(contexts[stmt2][0]),
+ {
+ 'otherid': (
+ 'otherid',
+ (table2.c.otherid, 'otherid', 'otherid'),
+ table2.c.otherid.type),
+ 'othername': (
+ 'othername',
+ (table2.c.othername, 'othername', 'othername'),
+ table2.c.othername.type),
+ 'k1': ('k1', (1, 2, 3), int_)
+ }
+ )
+ eq_(
+ comp._create_result_map(),
+ {
+ 'myid': (
+ 'myid',
+ (table1.c.myid, 'myid', 'myid'), table1.c.myid.type
+ ),
+ 'k2': ('k2', (3, 4, 5), int_),
+ 'name': (
+ 'name', (table1.c.name, 'name', 'name'),
+ table1.c.name.type),
+ 'description': (
+ 'description',
+ (table1.c.description, 'description', 'description'),
+ table1.c.description.type)}
+ )
+
+ def test_select_wraps_for_translate_ambiguity(self):
+ # test for issue #3657
+ t = table('a', column('x'), column('y'), column('z'))
+
+ l1, l2, l3 = t.c.z.label('a'), t.c.x.label('b'), t.c.x.label('c')
+ orig = [t.c.x, t.c.y, l1, l2, l3]
+ stmt = select(orig)
+ wrapped = stmt._generate()
+ wrapped = wrapped.column(
+ func.ROW_NUMBER().over(order_by=t.c.z)).alias()
+
+ wrapped_again = select([c for c in wrapped.c])
+
+ compiled = wrapped_again.compile(
+ compile_kwargs={'select_wraps_for': stmt})
+
+ proxied = [obj[0] for (k, n, obj, type_) in compiled._result_columns]
+ for orig_obj, proxied_obj in zip(
+ orig,
+ proxied
+ ):
+ is_(orig_obj, proxied_obj)
+
+ def test_select_wraps_for_translate_ambiguity_dupe_cols(self):
+ # test for issue #3657
+ t = table('a', column('x'), column('y'), column('z'))
+
+ l1, l2, l3 = t.c.z.label('a'), t.c.x.label('b'), t.c.x.label('c')
+ orig = [t.c.x, t.c.y, l1, l2, l3]
+
+ # create the statement with some duplicate columns. right now
+ # the behavior is that these redundant columns are deduped.
+ stmt = select([t.c.x, t.c.y, l1, t.c.y, l2, t.c.x, l3])
+
+ # so the statement has 7 inner columns...
+ eq_(len(list(stmt.inner_columns)), 7)
+
+ # but only exposes 5 of them, the other two are dupes of x and y
+ eq_(len(stmt.c), 5)
+
+ # and when it generates a SELECT it will also render only 5
+ eq_(len(stmt._columns_plus_names), 5)
+
+ wrapped = stmt._generate()
+ wrapped = wrapped.column(
+ func.ROW_NUMBER().over(order_by=t.c.z)).alias()
+
+ # so when we wrap here we're going to have only 5 columns
+ wrapped_again = select([c for c in wrapped.c])
+
+ # so the compiler logic that matches up the "wrapper" to the
+ # "select_wraps_for" can't use inner_columns to match because
+ # these collections are not the same
+ compiled = wrapped_again.compile(
+ compile_kwargs={'select_wraps_for': stmt})
+
+ proxied = [obj[0] for (k, n, obj, type_) in compiled._result_columns]
+ for orig_obj, proxied_obj in zip(
+ orig,
+ proxied
+ ):
+ is_(orig_obj, proxied_obj)
diff --git a/test/sql/test_constraints.py b/test/sql/test_constraints.py
index eb558fc95..3e8021ebe 100644
--- a/test/sql/test_constraints.py
+++ b/test/sql/test_constraints.py
@@ -8,8 +8,9 @@ from sqlalchemy.testing import fixtures, AssertsExecutionResults, \
from sqlalchemy import testing
from sqlalchemy.engine import default
from sqlalchemy.testing import engines
+from sqlalchemy.testing.assertions import expect_warnings
from sqlalchemy.testing import eq_
-from sqlalchemy.testing.assertsql import AllOf, RegexSQL, CompiledSQL
+from sqlalchemy.testing.assertsql import AllOf, RegexSQL, CompiledSQL, DialectSQL
from sqlalchemy.sql import table, column
@@ -84,9 +85,11 @@ class ConstraintGenTest(fixtures.TestBase, AssertsExecutionResults):
metadata.drop_all, testing.db
)
else:
-
- with self.sql_execution_asserter() as asserter:
- metadata.drop_all(testing.db, checkfirst=False)
+ with expect_warnings(
+ "Can't sort tables for DROP; an unresolvable "
+ "foreign key dependency "):
+ with self.sql_execution_asserter() as asserter:
+ metadata.drop_all(testing.db, checkfirst=False)
asserter.assert_(
AllOf(
@@ -109,10 +112,11 @@ class ConstraintGenTest(fixtures.TestBase, AssertsExecutionResults):
Column('id', Integer, primary_key=True),
Column("aid", Integer),
ForeignKeyConstraint(["aid"], ["a.id"], name="bfk"))
- self._assert_cyclic_constraint(metadata, auto=True)
+ self._assert_cyclic_constraint(
+ metadata, auto=True, sqlite_warning=True)
@testing.provide_metadata
- def test_fk_column_auto_alter_constraint_create(self):
+ def test_fk_column_auto_alter_inline_constraint_create(self):
metadata = self.metadata
Table("a", metadata,
@@ -125,7 +129,24 @@ class ConstraintGenTest(fixtures.TestBase, AssertsExecutionResults):
ForeignKey("a.id", name="bfk")
),
)
- self._assert_cyclic_constraint(metadata, auto=True)
+ self._assert_cyclic_constraint(
+ metadata, auto=True, sqlite_warning=True)
+
+ @testing.provide_metadata
+ def test_fk_column_use_alter_inline_constraint_create(self):
+ metadata = self.metadata
+
+ Table("a", metadata,
+ Column('id', Integer, primary_key=True),
+ Column('bid', Integer, ForeignKey("b.id")),
+ )
+ Table("b", metadata,
+ Column('id', Integer, primary_key=True),
+ Column("aid", Integer,
+ ForeignKey("a.id", name="bfk", use_alter=True)
+ ),
+ )
+ self._assert_cyclic_constraint(metadata, auto=False)
@testing.provide_metadata
def test_fk_table_use_alter_constraint_create(self):
@@ -137,9 +158,10 @@ class ConstraintGenTest(fixtures.TestBase, AssertsExecutionResults):
ForeignKeyConstraint(["bid"], ["b.id"])
)
Table(
- "b", metadata, Column(
- 'id', Integer, primary_key=True), Column(
- "aid", Integer), ForeignKeyConstraint(
+ "b", metadata,
+ Column('id', Integer, primary_key=True),
+ Column("aid", Integer),
+ ForeignKeyConstraint(
["aid"], ["a.id"], use_alter=True, name="bfk"))
self._assert_cyclic_constraint(metadata)
@@ -157,63 +179,42 @@ class ConstraintGenTest(fixtures.TestBase, AssertsExecutionResults):
ForeignKey("a.id", use_alter=True, name="bfk")
),
)
- self._assert_cyclic_constraint(metadata)
+ self._assert_cyclic_constraint(metadata, auto=False)
+
+ def _assert_cyclic_constraint(
+ self, metadata, auto=False, sqlite_warning=False):
+ if testing.db.dialect.supports_alter:
+ self._assert_cyclic_constraint_supports_alter(metadata, auto=auto)
+ else:
+ self._assert_cyclic_constraint_no_alter(
+ metadata, auto=auto, sqlite_warning=sqlite_warning)
- def _assert_cyclic_constraint(self, metadata, auto=False):
+ def _assert_cyclic_constraint_supports_alter(self, metadata, auto=False):
table_assertions = []
if auto:
- if testing.db.dialect.supports_alter:
- table_assertions.append(
- CompiledSQL('CREATE TABLE b ('
- 'id INTEGER NOT NULL, '
- 'aid INTEGER, '
- 'PRIMARY KEY (id)'
- ')'
- )
- )
- else:
- table_assertions.append(
- CompiledSQL(
- 'CREATE TABLE b ('
- 'id INTEGER NOT NULL, '
- 'aid INTEGER, '
- 'PRIMARY KEY (id), '
- 'CONSTRAINT bfk FOREIGN KEY(aid) REFERENCES a (id)'
- ')'
- )
- )
-
- if testing.db.dialect.supports_alter:
- table_assertions.append(
- CompiledSQL(
- 'CREATE TABLE a ('
- 'id INTEGER NOT NULL, '
- 'bid INTEGER, '
- 'PRIMARY KEY (id)'
- ')'
- )
- )
- else:
- table_assertions.append(
- CompiledSQL(
- 'CREATE TABLE a ('
- 'id INTEGER NOT NULL, '
- 'bid INTEGER, '
- 'PRIMARY KEY (id), '
- 'FOREIGN KEY(bid) REFERENCES b (id)'
- ')'
- )
+ table_assertions = [
+ CompiledSQL('CREATE TABLE b ('
+ 'id INTEGER NOT NULL, '
+ 'aid INTEGER, '
+ 'PRIMARY KEY (id)'
+ ')'
+ ),
+ CompiledSQL(
+ 'CREATE TABLE a ('
+ 'id INTEGER NOT NULL, '
+ 'bid INTEGER, '
+ 'PRIMARY KEY (id)'
+ ')'
)
+ ]
else:
- table_assertions.append(
+ table_assertions = [
CompiledSQL('CREATE TABLE b ('
'id INTEGER NOT NULL, '
'aid INTEGER, '
'PRIMARY KEY (id)'
')'
- )
- )
- table_assertions.append(
+ ),
CompiledSQL(
'CREATE TABLE a ('
'id INTEGER NOT NULL, '
@@ -222,41 +223,238 @@ class ConstraintGenTest(fixtures.TestBase, AssertsExecutionResults):
'FOREIGN KEY(bid) REFERENCES b (id)'
')'
)
- )
+ ]
assertions = [AllOf(*table_assertions)]
- if testing.db.dialect.supports_alter:
- fk_assertions = []
+ fk_assertions = []
+ fk_assertions.append(
+ CompiledSQL('ALTER TABLE b ADD CONSTRAINT bfk '
+ 'FOREIGN KEY(aid) REFERENCES a (id)')
+ )
+ if auto:
fk_assertions.append(
- CompiledSQL('ALTER TABLE b ADD CONSTRAINT bfk '
- 'FOREIGN KEY(aid) REFERENCES a (id)')
+ CompiledSQL('ALTER TABLE a ADD '
+ 'FOREIGN KEY(bid) REFERENCES b (id)')
)
- if auto:
- fk_assertions.append(
- CompiledSQL('ALTER TABLE a ADD '
- 'FOREIGN KEY(bid) REFERENCES b (id)')
+ assertions.append(AllOf(*fk_assertions))
+
+ with self.sql_execution_asserter() as asserter:
+ metadata.create_all(checkfirst=False)
+ asserter.assert_(*assertions)
+
+ assertions = [
+ CompiledSQL('ALTER TABLE b DROP CONSTRAINT bfk'),
+ CompiledSQL("DROP TABLE a"),
+ CompiledSQL("DROP TABLE b")
+ ]
+
+ with self.sql_execution_asserter() as asserter:
+ metadata.drop_all(checkfirst=False),
+ asserter.assert_(*assertions)
+
+ def _assert_cyclic_constraint_no_alter(
+ self, metadata, auto=False, sqlite_warning=False):
+ table_assertions = []
+ if auto:
+ table_assertions.append(
+ DialectSQL(
+ 'CREATE TABLE b ('
+ 'id INTEGER NOT NULL, '
+ 'aid INTEGER, '
+ 'PRIMARY KEY (id), '
+ 'CONSTRAINT bfk FOREIGN KEY(aid) REFERENCES a (id)'
+ ')'
+ )
+ )
+ table_assertions.append(
+ DialectSQL(
+ 'CREATE TABLE a ('
+ 'id INTEGER NOT NULL, '
+ 'bid INTEGER, '
+ 'PRIMARY KEY (id), '
+ 'FOREIGN KEY(bid) REFERENCES b (id)'
+ ')'
)
- assertions.append(AllOf(*fk_assertions))
+ )
+ else:
+ table_assertions.append(
+ DialectSQL(
+ 'CREATE TABLE b ('
+ 'id INTEGER NOT NULL, '
+ 'aid INTEGER, '
+ 'PRIMARY KEY (id), '
+ 'CONSTRAINT bfk FOREIGN KEY(aid) REFERENCES a (id)'
+ ')'
+ )
+ )
+
+ table_assertions.append(
+ DialectSQL(
+ 'CREATE TABLE a ('
+ 'id INTEGER NOT NULL, '
+ 'bid INTEGER, '
+ 'PRIMARY KEY (id), '
+ 'FOREIGN KEY(bid) REFERENCES b (id)'
+ ')'
+ )
+ )
+
+ assertions = [AllOf(*table_assertions)]
with self.sql_execution_asserter() as asserter:
metadata.create_all(checkfirst=False)
asserter.assert_(*assertions)
+ assertions = [AllOf(
+ CompiledSQL("DROP TABLE a"),
+ CompiledSQL("DROP TABLE b")
+ )]
+
+ if sqlite_warning:
+ with expect_warnings("Can't sort tables for DROP; "):
+ with self.sql_execution_asserter() as asserter:
+ metadata.drop_all(checkfirst=False),
+ else:
+ with self.sql_execution_asserter() as asserter:
+ metadata.drop_all(checkfirst=False),
+ asserter.assert_(*assertions)
+
+ @testing.force_drop_names("a", "b")
+ def test_cycle_unnamed_fks(self):
+ metadata = MetaData(testing.db)
+
+ Table("a", metadata,
+ Column('id', Integer, primary_key=True),
+ Column('bid', Integer, ForeignKey("b.id")),
+ )
+
+ Table("b", metadata,
+ Column('id', Integer, primary_key=True),
+ Column("aid", Integer, ForeignKey("a.id")),
+ )
+
+ assertions = [
+ AllOf(
+ CompiledSQL(
+ 'CREATE TABLE b ('
+ 'id INTEGER NOT NULL, '
+ 'aid INTEGER, '
+ 'PRIMARY KEY (id)'
+ ')'
+ ),
+ CompiledSQL(
+ 'CREATE TABLE a ('
+ 'id INTEGER NOT NULL, '
+ 'bid INTEGER, '
+ 'PRIMARY KEY (id)'
+ ')'
+ )
+ ),
+ AllOf(
+ CompiledSQL('ALTER TABLE b ADD '
+ 'FOREIGN KEY(aid) REFERENCES a (id)'),
+ CompiledSQL('ALTER TABLE a ADD '
+ 'FOREIGN KEY(bid) REFERENCES b (id)')
+ ),
+ ]
+ with self.sql_execution_asserter() as asserter:
+ metadata.create_all(checkfirst=False)
+
if testing.db.dialect.supports_alter:
- assertions = [
- CompiledSQL('ALTER TABLE b DROP CONSTRAINT bfk'),
- CompiledSQL("DROP TABLE a"),
- CompiledSQL("DROP TABLE b")
- ]
+ asserter.assert_(*assertions)
+
+ assert_raises_message(
+ exc.CircularDependencyError,
+ "Can't sort tables for DROP; an unresolvable foreign key "
+ "dependency exists between tables: a, b. "
+ "Please ensure that the "
+ "ForeignKey and ForeignKeyConstraint objects involved in the "
+ "cycle have names so that they can be dropped using "
+ "DROP CONSTRAINT.",
+ metadata.drop_all, checkfirst=False
+ )
else:
- assertions = [AllOf(
- CompiledSQL("DROP TABLE a"),
- CompiledSQL("DROP TABLE b")
- )]
+ with expect_warnings(
+ "Can't sort tables for DROP; an unresolvable "
+ "foreign key dependency exists between tables"):
+ with self.sql_execution_asserter() as asserter:
+ metadata.drop_all(checkfirst=False)
+
+ asserter.assert_(
+ AllOf(
+ CompiledSQL("DROP TABLE b"),
+ CompiledSQL("DROP TABLE a"),
+ )
+ )
+
+ @testing.force_drop_names("a", "b")
+ def test_cycle_named_fks(self):
+ metadata = MetaData(testing.db)
+ Table("a", metadata,
+ Column('id', Integer, primary_key=True),
+ Column('bid', Integer, ForeignKey("b.id")),
+ )
+
+ Table("b", metadata,
+ Column('id', Integer, primary_key=True),
+ Column(
+ "aid", Integer,
+ ForeignKey("a.id", use_alter=True, name='aidfk')),
+ )
+
+ assertions = [
+ AllOf(
+ CompiledSQL(
+ 'CREATE TABLE b ('
+ 'id INTEGER NOT NULL, '
+ 'aid INTEGER, '
+ 'PRIMARY KEY (id)'
+ ')'
+ ),
+ CompiledSQL(
+ 'CREATE TABLE a ('
+ 'id INTEGER NOT NULL, '
+ 'bid INTEGER, '
+ 'PRIMARY KEY (id), '
+ 'FOREIGN KEY(bid) REFERENCES b (id)'
+ ')'
+ )
+ ),
+ CompiledSQL('ALTER TABLE b ADD CONSTRAINT aidfk '
+ 'FOREIGN KEY(aid) REFERENCES a (id)'),
+ ]
with self.sql_execution_asserter() as asserter:
- metadata.drop_all(checkfirst=False),
- asserter.assert_(*assertions)
+ metadata.create_all(checkfirst=False)
+
+ if testing.db.dialect.supports_alter:
+ asserter.assert_(*assertions)
+
+ with self.sql_execution_asserter() as asserter:
+ metadata.drop_all(checkfirst=False)
+
+ asserter.assert_(
+ CompiledSQL("ALTER TABLE b DROP CONSTRAINT aidfk"),
+ AllOf(
+ CompiledSQL("DROP TABLE b"),
+ CompiledSQL("DROP TABLE a"),
+ )
+ )
+ else:
+ with self.sql_execution_asserter() as asserter:
+ metadata.drop_all(checkfirst=False)
+
+ asserter.assert_(
+ AllOf(
+ CompiledSQL("DROP TABLE b"),
+ CompiledSQL("DROP TABLE a"),
+ ),
+ )
+
+
+
+
+
@testing.requires.check_constraints
@testing.provide_metadata
@@ -1052,6 +1250,162 @@ class ConstraintAPITest(fixtures.TestBase):
assert c not in t.constraints
assert c not in t2.constraints
+ def test_auto_append_ck_on_col_attach_one(self):
+ m = MetaData()
+
+ a = Column('a', Integer)
+ b = Column('b', Integer)
+ ck = CheckConstraint(a > b)
+
+ t = Table('tbl', m, a, b)
+ assert ck in t.constraints
+
+ def test_auto_append_ck_on_col_attach_two(self):
+ m = MetaData()
+
+ a = Column('a', Integer)
+ b = Column('b', Integer)
+ c = Column('c', Integer)
+ ck = CheckConstraint(a > b + c)
+
+ t = Table('tbl', m, a)
+ assert ck not in t.constraints
+
+ t.append_column(b)
+ assert ck not in t.constraints
+
+ t.append_column(c)
+ assert ck in t.constraints
+
+ def test_auto_append_ck_on_col_attach_three(self):
+ m = MetaData()
+
+ a = Column('a', Integer)
+ b = Column('b', Integer)
+ c = Column('c', Integer)
+ ck = CheckConstraint(a > b + c)
+
+ t = Table('tbl', m, a)
+ assert ck not in t.constraints
+
+ t.append_column(b)
+ assert ck not in t.constraints
+
+ t2 = Table('t2', m)
+ t2.append_column(c)
+
+ # two different tables, so CheckConstraint does nothing.
+ assert ck not in t.constraints
+
+ def test_auto_append_uq_on_col_attach_one(self):
+ m = MetaData()
+
+ a = Column('a', Integer)
+ b = Column('b', Integer)
+ uq = UniqueConstraint(a, b)
+
+ t = Table('tbl', m, a, b)
+ assert uq in t.constraints
+
+ def test_auto_append_uq_on_col_attach_two(self):
+ m = MetaData()
+
+ a = Column('a', Integer)
+ b = Column('b', Integer)
+ c = Column('c', Integer)
+ uq = UniqueConstraint(a, b, c)
+
+ t = Table('tbl', m, a)
+ assert uq not in t.constraints
+
+ t.append_column(b)
+ assert uq not in t.constraints
+
+ t.append_column(c)
+ assert uq in t.constraints
+
+ def test_auto_append_uq_on_col_attach_three(self):
+ m = MetaData()
+
+ a = Column('a', Integer)
+ b = Column('b', Integer)
+ c = Column('c', Integer)
+ uq = UniqueConstraint(a, b, c)
+
+ t = Table('tbl', m, a)
+ assert uq not in t.constraints
+
+ t.append_column(b)
+ assert uq not in t.constraints
+
+ t2 = Table('t2', m)
+
+ # two different tables, so UniqueConstraint raises
+ assert_raises_message(
+ exc.ArgumentError,
+ r"Column\(s\) 't2\.c' are not part of table 'tbl'\.",
+ t2.append_column, c
+ )
+
+ def test_auto_append_uq_on_col_attach_four(self):
+ """Test that a uniqueconstraint that names Column and string names
+ won't autoattach using deferred column attachment.
+
+ """
+ m = MetaData()
+
+ a = Column('a', Integer)
+ b = Column('b', Integer)
+ c = Column('c', Integer)
+ uq = UniqueConstraint(a, 'b', 'c')
+
+ t = Table('tbl', m, a)
+ assert uq not in t.constraints
+
+ t.append_column(b)
+ assert uq not in t.constraints
+
+ t.append_column(c)
+
+ # we don't track events for previously unknown columns
+ # named 'c' to be attached
+ assert uq not in t.constraints
+
+ t.append_constraint(uq)
+
+ assert uq in t.constraints
+
+ eq_(
+ [cn for cn in t.constraints if isinstance(cn, UniqueConstraint)],
+ [uq]
+ )
+
+ def test_auto_append_uq_on_col_attach_five(self):
+ """Test that a uniqueconstraint that names Column and string names
+ *will* autoattach if the table has all those names up front.
+
+ """
+ m = MetaData()
+
+ a = Column('a', Integer)
+ b = Column('b', Integer)
+ c = Column('c', Integer)
+
+ t = Table('tbl', m, a, c, b)
+
+ uq = UniqueConstraint(a, 'b', 'c')
+
+ assert uq in t.constraints
+
+ t.append_constraint(uq)
+
+ assert uq in t.constraints
+
+ eq_(
+ [cn for cn in t.constraints if isinstance(cn, UniqueConstraint)],
+ [uq]
+ )
+
def test_index_asserts_cols_standalone(self):
metadata = MetaData()
diff --git a/test/sql/test_cte.py b/test/sql/test_cte.py
index c7906dcb7..aa674403e 100644
--- a/test/sql/test_cte.py
+++ b/test/sql/test_cte.py
@@ -1,6 +1,6 @@
from sqlalchemy.testing import fixtures
from sqlalchemy.testing import AssertsCompiledSQL, assert_raises_message
-from sqlalchemy.sql import table, column, select, func, literal
+from sqlalchemy.sql import table, column, select, func, literal, exists, and_
from sqlalchemy.dialects import mssql
from sqlalchemy.engine import default
from sqlalchemy.exc import CompileError
@@ -8,7 +8,7 @@ from sqlalchemy.exc import CompileError
class CTETest(fixtures.TestBase, AssertsCompiledSQL):
- __dialect__ = 'default'
+ __dialect__ = 'default_enhanced'
def test_nonrecursive(self):
orders = table('orders',
@@ -491,4 +491,152 @@ class CTETest(fixtures.TestBase, AssertsCompiledSQL):
'FROM "order") pg suffix SELECT "order"."order" FROM "order", '
'regional_sales WHERE "order"."order" > regional_sales."order"',
dialect='postgresql'
- ) \ No newline at end of file
+ )
+
+ def test_upsert_from_select(self):
+ orders = table(
+ 'orders',
+ column('region'),
+ column('amount'),
+ column('product'),
+ column('quantity')
+ )
+
+ upsert = (
+ orders.update()
+ .where(orders.c.region == 'Region1')
+ .values(amount=1.0, product='Product1', quantity=1)
+ .returning(*(orders.c._all_columns)).cte('upsert'))
+
+ insert = orders.insert().from_select(
+ orders.c.keys(),
+ select([
+ literal('Region1'), literal(1.0),
+ literal('Product1'), literal(1)
+ ]).where(~exists(upsert.select()))
+ )
+
+ self.assert_compile(
+ insert,
+ "WITH upsert AS (UPDATE orders SET amount=:amount, "
+ "product=:product, quantity=:quantity "
+ "WHERE orders.region = :region_1 "
+ "RETURNING orders.region, orders.amount, "
+ "orders.product, orders.quantity) "
+ "INSERT INTO orders (region, amount, product, quantity) "
+ "SELECT :param_1 AS anon_1, :param_2 AS anon_2, "
+ ":param_3 AS anon_3, :param_4 AS anon_4 WHERE NOT (EXISTS "
+ "(SELECT upsert.region, upsert.amount, upsert.product, "
+ "upsert.quantity FROM upsert))"
+ )
+
+ def test_pg_example_one(self):
+ products = table('products', column('id'), column('date'))
+ products_log = table('products_log', column('id'), column('date'))
+
+ moved_rows = products.delete().where(and_(
+ products.c.date >= 'dateone',
+ products.c.date < 'datetwo')).returning(*products.c).\
+ cte('moved_rows')
+
+ stmt = products_log.insert().from_select(
+ products_log.c, moved_rows.select())
+ self.assert_compile(
+ stmt,
+ "WITH moved_rows AS "
+ "(DELETE FROM products WHERE products.date >= :date_1 "
+ "AND products.date < :date_2 "
+ "RETURNING products.id, products.date) "
+ "INSERT INTO products_log (id, date) "
+ "SELECT moved_rows.id, moved_rows.date FROM moved_rows"
+ )
+
+ def test_pg_example_two(self):
+ products = table('products', column('id'), column('price'))
+
+ t = products.update().values(price='someprice').\
+ returning(*products.c).cte('t')
+ stmt = t.select()
+
+ self.assert_compile(
+ stmt,
+ "WITH t AS "
+ "(UPDATE products SET price=:price "
+ "RETURNING products.id, products.price) "
+ "SELECT t.id, t.price "
+ "FROM t"
+ )
+
+ def test_pg_example_three(self):
+
+ parts = table(
+ 'parts',
+ column('part'),
+ column('sub_part'),
+ )
+
+ included_parts = select([
+ parts.c.sub_part,
+ parts.c.part]).\
+ where(parts.c.part == 'our part').\
+ cte("included_parts", recursive=True)
+
+ pr = included_parts.alias('pr')
+ p = parts.alias('p')
+ included_parts = included_parts.union_all(
+ select([
+ p.c.sub_part,
+ p.c.part]).
+ where(p.c.part == pr.c.sub_part)
+ )
+ stmt = parts.delete().where(
+ parts.c.part.in_(select([included_parts.c.part]))).returning(
+ parts.c.part)
+
+ # the outer RETURNING is a bonus over what PG's docs have
+ self.assert_compile(
+ stmt,
+ "WITH RECURSIVE included_parts(sub_part, part) AS "
+ "(SELECT parts.sub_part AS sub_part, parts.part AS part "
+ "FROM parts "
+ "WHERE parts.part = :part_1 "
+ "UNION ALL SELECT p.sub_part AS sub_part, p.part AS part "
+ "FROM parts AS p, included_parts AS pr "
+ "WHERE p.part = pr.sub_part) "
+ "DELETE FROM parts WHERE parts.part IN "
+ "(SELECT included_parts.part FROM included_parts) "
+ "RETURNING parts.part"
+ )
+
+ def test_insert_in_the_cte(self):
+ products = table('products', column('id'), column('price'))
+
+ cte = products.insert().values(id=1, price=27.0).\
+ returning(*products.c).cte('pd')
+
+ stmt = select([cte])
+
+ self.assert_compile(
+ stmt,
+ "WITH pd AS "
+ "(INSERT INTO products (id, price) VALUES (:id, :price) "
+ "RETURNING products.id, products.price) "
+ "SELECT pd.id, pd.price "
+ "FROM pd"
+ )
+
+ def test_update_pulls_from_cte(self):
+ products = table('products', column('id'), column('price'))
+
+ cte = products.select().cte('pd')
+
+ stmt = products.update().where(products.c.price == cte.c.price)
+
+ self.assert_compile(
+ stmt,
+ "WITH pd AS "
+ "(SELECT products.id AS id, products.price AS price "
+ "FROM products) "
+ "UPDATE products SET id=:id, price=:price FROM pd "
+ "WHERE products.price = pd.price"
+ )
diff --git a/test/sql/test_defaults.py b/test/sql/test_defaults.py
index ecf3dcc4d..e21b21ab2 100644
--- a/test/sql/test_defaults.py
+++ b/test/sql/test_defaults.py
@@ -123,6 +123,14 @@ class DefaultTest(fixtures.TestBase):
def gen_default(cls, ctx):
return "hi"
+ class MyType(TypeDecorator):
+ impl = String(50)
+
+ def process_bind_param(self, value, dialect):
+ if value is not None:
+ value = "BIND" + value
+ return value
+
# select "count(1)" returns different results on different DBs also
# correct for "current_date" compatible as column default, value
# differences
@@ -133,7 +141,7 @@ class DefaultTest(fixtures.TestBase):
[
func.trunc(
func.sysdate(), sa.literal_column("'DAY'"),
- type_=sa.Date).label('today')]))
+ type_=sa.Date)]))
assert isinstance(ts, datetime.date) and not isinstance(
ts, datetime.datetime)
f = sa.select([func.length('abcdef')], bind=db).scalar()
@@ -211,7 +219,10 @@ class DefaultTest(fixtures.TestBase):
server_default='ddl'),
# python method w/ context
- Column('col10', String(20), default=MyClass.gen_default)
+ Column('col10', String(20), default=MyClass.gen_default),
+
+ # fixed default w/ type that has bound processor
+ Column('col11', MyType(), default='foo')
)
t.create()
@@ -290,6 +301,7 @@ class DefaultTest(fixtures.TestBase):
c = sa.ColumnDefault(fn)
c.arg("context")
+
@testing.fails_on('firebird', 'Data type unknown')
def test_standalone(self):
c = testing.db.engine.contextual_connect()
@@ -391,7 +403,7 @@ class DefaultTest(fixtures.TestBase):
today = datetime.date.today()
eq_(l.fetchall(), [
(x, 'imthedefault', f, ts, ts, ctexec, True, False,
- 12, today, 'py', 'hi')
+ 12, today, 'py', 'hi', 'BINDfoo')
for x in range(51, 54)])
t.insert().execute(col9=None)
@@ -401,7 +413,7 @@ class DefaultTest(fixtures.TestBase):
eq_(t.select(t.c.col1 == 54).execute().fetchall(),
[(54, 'imthedefault', f, ts, ts, ctexec, True, False,
- 12, today, None, 'hi')])
+ 12, today, None, 'hi', 'BINDfoo')])
def test_insertmany(self):
t.insert().execute({}, {}, {})
@@ -411,11 +423,11 @@ class DefaultTest(fixtures.TestBase):
today = datetime.date.today()
eq_(l.fetchall(),
[(51, 'imthedefault', f, ts, ts, ctexec, True, False,
- 12, today, 'py', 'hi'),
+ 12, today, 'py', 'hi', 'BINDfoo'),
(52, 'imthedefault', f, ts, ts, ctexec, True, False,
- 12, today, 'py', 'hi'),
+ 12, today, 'py', 'hi', 'BINDfoo'),
(53, 'imthedefault', f, ts, ts, ctexec, True, False,
- 12, today, 'py', 'hi')])
+ 12, today, 'py', 'hi', 'BINDfoo')])
@testing.requires.multivalues_inserts
def test_insert_multivalues(self):
@@ -427,11 +439,11 @@ class DefaultTest(fixtures.TestBase):
today = datetime.date.today()
eq_(l.fetchall(),
[(51, 'imthedefault', f, ts, ts, ctexec, True, False,
- 12, today, 'py', 'hi'),
+ 12, today, 'py', 'hi', 'BINDfoo'),
(52, 'imthedefault', f, ts, ts, ctexec, True, False,
- 12, today, 'py', 'hi'),
+ 12, today, 'py', 'hi', 'BINDfoo'),
(53, 'imthedefault', f, ts, ts, ctexec, True, False,
- 12, today, 'py', 'hi')])
+ 12, today, 'py', 'hi', 'BINDfoo')])
def test_no_embed_in_sql(self):
"""Using a DefaultGenerator, Sequence, DefaultClause
@@ -498,11 +510,11 @@ class DefaultTest(fixtures.TestBase):
today = datetime.date.today()
eq_(l.fetchall(),
[(51, 'im the update', f2, ts, ts, ctexec, False, False,
- 13, today, 'py', 'hi'),
+ 13, today, 'py', 'hi', 'BINDfoo'),
(52, 'im the update', f2, ts, ts, ctexec, True, False,
- 13, today, 'py', 'hi'),
+ 13, today, 'py', 'hi', 'BINDfoo'),
(53, 'im the update', f2, ts, ts, ctexec, True, False,
- 13, today, 'py', 'hi')])
+ 13, today, 'py', 'hi', 'BINDfoo')])
@testing.fails_on('firebird', 'Data type unknown')
def test_update(self):
@@ -514,7 +526,7 @@ class DefaultTest(fixtures.TestBase):
l = l.first()
eq_(l,
(pk, 'im the update', f2, None, None, ctexec, True, False,
- 13, datetime.date.today(), 'py', 'hi'))
+ 13, datetime.date.today(), 'py', 'hi', 'BINDfoo'))
eq_(11, f2)
@testing.fails_on('firebird', 'Data type unknown')
@@ -721,7 +733,6 @@ class AutoIncrementTest(fixtures.TablesTest):
)
assert x._autoincrement_column is None
- @testing.fails_on('sqlite', 'FIXME: unknown')
def test_non_autoincrement(self):
# sqlite INT primary keys can be non-unique! (only for ints)
nonai = Table(
@@ -735,8 +746,9 @@ class AutoIncrementTest(fixtures.TablesTest):
# mysql in legacy mode fails on second row
nonai.insert().execute(data='row 1')
nonai.insert().execute(data='row 2')
- assert_raises(
- sa.exc.DBAPIError,
+ assert_raises_message(
+ sa.exc.CompileError,
+ ".*has no Python-side or server-side default.*",
go
)
@@ -793,6 +805,36 @@ class SequenceDDLTest(fixtures.TestBase, testing.AssertsCompiledSQL):
)
self.assert_compile(
+ CreateSequence(Sequence(
+ 'foo_seq', increment=2, start=0, minvalue=0)),
+ "CREATE SEQUENCE foo_seq INCREMENT BY 2 START WITH 0 MINVALUE 0",
+ )
+
+ self.assert_compile(
+ CreateSequence(Sequence(
+ 'foo_seq', increment=2, start=1, maxvalue=5)),
+ "CREATE SEQUENCE foo_seq INCREMENT BY 2 START WITH 1 MAXVALUE 5",
+ )
+
+ self.assert_compile(
+ CreateSequence(Sequence(
+ 'foo_seq', increment=2, start=1, nomaxvalue=True)),
+ "CREATE SEQUENCE foo_seq INCREMENT BY 2 START WITH 1 NO MAXVALUE",
+ )
+
+ self.assert_compile(
+ CreateSequence(Sequence(
+ 'foo_seq', increment=2, start=0, nominvalue=True)),
+ "CREATE SEQUENCE foo_seq INCREMENT BY 2 START WITH 0 NO MINVALUE",
+ )
+
+ self.assert_compile(
+ CreateSequence(Sequence(
+ 'foo_seq', start=1, maxvalue=10, cycle=True)),
+ "CREATE SEQUENCE foo_seq START WITH 1 MAXVALUE 10 CYCLE",
+ )
+
+ self.assert_compile(
DropSequence(Sequence('foo_seq')),
"DROP SEQUENCE foo_seq",
)
@@ -1039,6 +1081,23 @@ class SequenceTest(fixtures.TestBase, testing.AssertsCompiledSQL):
assert not self._has_sequence('s1')
assert not self._has_sequence('s2')
+ @testing.requires.returning
+ @testing.provide_metadata
+ def test_freestanding_sequence_via_autoinc(self):
+ t = Table(
+ 'some_table', self.metadata,
+ Column(
+ 'id', Integer,
+ autoincrement=True,
+ primary_key=True,
+ default=Sequence(
+ 'my_sequence', metadata=self.metadata).next_value())
+ )
+ self.metadata.create_all(testing.db)
+
+ result = testing.db.execute(t.insert())
+ eq_(result.inserted_primary_key, [1])
+
cartitems = sometable = metadata = None
diff --git a/test/sql/test_functions.py b/test/sql/test_functions.py
index ec8d9b5c0..0074d789b 100644
--- a/test/sql/test_functions.py
+++ b/test/sql/test_functions.py
@@ -1,20 +1,20 @@
-from sqlalchemy.testing import eq_
+from sqlalchemy.testing import eq_, is_
import datetime
from sqlalchemy import func, select, Integer, literal, DateTime, Table, \
Column, Sequence, MetaData, extract, Date, String, bindparam, \
- literal_column
+ literal_column, ARRAY, Numeric
from sqlalchemy.sql import table, column
from sqlalchemy import sql, util
from sqlalchemy.sql.compiler import BIND_TEMPLATES
from sqlalchemy.testing.engines import all_dialects
from sqlalchemy import types as sqltypes
from sqlalchemy.sql import functions
-from sqlalchemy.sql.functions import GenericFunction
+from sqlalchemy.sql.functions import GenericFunction, FunctionElement
import decimal
from sqlalchemy import testing
from sqlalchemy.testing import fixtures, AssertsCompiledSQL, engines
from sqlalchemy.dialects import sqlite, postgresql, mysql, oracle
-
+from sqlalchemy.testing import assert_raises_message
table1 = table('mytable',
column('myid', Integer),
@@ -52,7 +52,7 @@ class CompileTest(fixtures.TestBase, AssertsCompiledSQL):
self.assert_compile(
fake_func('foo'),
"fake_func(%s)" %
- bindtemplate % {'name': 'param_1', 'position': 1},
+ bindtemplate % {'name': 'fake_func_1', 'position': 1},
dialect=dialect)
def test_use_labels(self):
@@ -89,7 +89,7 @@ class CompileTest(fixtures.TestBase, AssertsCompiledSQL):
def test_generic_annotation(self):
fn = func.coalesce('x', 'y')._annotate({"foo": "bar"})
self.assert_compile(
- fn, "coalesce(:param_1, :param_2)"
+ fn, "coalesce(:coalesce_1, :coalesce_2)"
)
def test_custom_default_namespace(self):
@@ -140,7 +140,7 @@ class CompileTest(fixtures.TestBase, AssertsCompiledSQL):
self.assert_compile(
func.my_func(1, 2),
- "my_func(:param_1, :param_2, :param_3)"
+ "my_func(:my_func_1, :my_func_2, :my_func_3)"
)
def test_custom_registered_identifier(self):
@@ -178,7 +178,7 @@ class CompileTest(fixtures.TestBase, AssertsCompiledSQL):
self.assert_compile(
myfunc(1, 2, 3),
- "myfunc(:param_1, :param_2, :param_3)"
+ "myfunc(:myfunc_1, :myfunc_2, :myfunc_3)"
)
def test_namespacing_conflicts(self):
@@ -188,7 +188,7 @@ class CompileTest(fixtures.TestBase, AssertsCompiledSQL):
assert isinstance(func.count().type, sqltypes.Integer)
self.assert_compile(func.count(), 'count(*)')
- self.assert_compile(func.count(1), 'count(:param_1)')
+ self.assert_compile(func.count(1), 'count(:count_1)')
c = column('abc')
self.assert_compile(func.count(c), 'count(abc)')
@@ -378,7 +378,7 @@ class CompileTest(fixtures.TestBase, AssertsCompiledSQL):
def test_funcfilter_empty(self):
self.assert_compile(
func.count(1).filter(),
- "count(:param_1)"
+ "count(:count_1)"
)
def test_funcfilter_criterion(self):
@@ -386,7 +386,7 @@ class CompileTest(fixtures.TestBase, AssertsCompiledSQL):
func.count(1).filter(
table1.c.name != None
),
- "count(:param_1) FILTER (WHERE mytable.name IS NOT NULL)"
+ "count(:count_1) FILTER (WHERE mytable.name IS NOT NULL)"
)
def test_funcfilter_compound_criterion(self):
@@ -395,7 +395,7 @@ class CompileTest(fixtures.TestBase, AssertsCompiledSQL):
table1.c.name == None,
table1.c.myid > 0
),
- "count(:param_1) FILTER (WHERE mytable.name IS NULL AND "
+ "count(:count_1) FILTER (WHERE mytable.name IS NULL AND "
"mytable.myid > :myid_1)"
)
@@ -404,7 +404,7 @@ class CompileTest(fixtures.TestBase, AssertsCompiledSQL):
select([func.count(1).filter(
table1.c.description != None
).label('foo')]),
- "SELECT count(:param_1) FILTER (WHERE mytable.description "
+ "SELECT count(:count_1) FILTER (WHERE mytable.description "
"IS NOT NULL) AS foo FROM mytable"
)
@@ -429,7 +429,7 @@ class CompileTest(fixtures.TestBase, AssertsCompiledSQL):
table1.c.name == 'name'
)
]),
- "SELECT count(:param_1) FILTER (WHERE mytable.name = :name_1) "
+ "SELECT count(:count_1) FILTER (WHERE mytable.name = :name_1) "
"AS anon_1 FROM mytable"
)
@@ -443,7 +443,7 @@ class CompileTest(fixtures.TestBase, AssertsCompiledSQL):
table1.c.description == 'description'
)
]),
- "SELECT count(:param_1) FILTER (WHERE "
+ "SELECT count(:count_1) FILTER (WHERE "
"mytable.name = :name_1 AND mytable.description = :description_1) "
"AS anon_1 FROM mytable"
)
@@ -477,6 +477,121 @@ class CompileTest(fixtures.TestBase, AssertsCompiledSQL):
"AS anon_1 FROM mytable"
)
+ def test_funcfilter_within_group(self):
+ stmt = select([
+ table1.c.myid,
+ func.percentile_cont(0.5).within_group(
+ table1.c.name
+ )
+ ])
+ self.assert_compile(
+ stmt,
+ "SELECT mytable.myid, percentile_cont(:percentile_cont_1) "
+ "WITHIN GROUP (ORDER BY mytable.name) "
+ "AS anon_1 "
+ "FROM mytable",
+ {'percentile_cont_1': 0.5}
+ )
+
+ def test_funcfilter_within_group_multi(self):
+ stmt = select([
+ table1.c.myid,
+ func.percentile_cont(0.5).within_group(
+ table1.c.name, table1.c.description
+ )
+ ])
+ self.assert_compile(
+ stmt,
+ "SELECT mytable.myid, percentile_cont(:percentile_cont_1) "
+ "WITHIN GROUP (ORDER BY mytable.name, mytable.description) "
+ "AS anon_1 "
+ "FROM mytable",
+ {'percentile_cont_1': 0.5}
+ )
+
+ def test_funcfilter_within_group_desc(self):
+ stmt = select([
+ table1.c.myid,
+ func.percentile_cont(0.5).within_group(
+ table1.c.name.desc()
+ )
+ ])
+ self.assert_compile(
+ stmt,
+ "SELECT mytable.myid, percentile_cont(:percentile_cont_1) "
+ "WITHIN GROUP (ORDER BY mytable.name DESC) "
+ "AS anon_1 "
+ "FROM mytable",
+ {'percentile_cont_1': 0.5}
+ )
+
+ def test_funcfilter_within_group_w_over(self):
+ stmt = select([
+ table1.c.myid,
+ func.percentile_cont(0.5).within_group(
+ table1.c.name.desc()
+ ).over(partition_by=table1.c.description)
+ ])
+ self.assert_compile(
+ stmt,
+ "SELECT mytable.myid, percentile_cont(:percentile_cont_1) "
+ "WITHIN GROUP (ORDER BY mytable.name DESC) "
+ "OVER (PARTITION BY mytable.description) AS anon_1 "
+ "FROM mytable",
+ {'percentile_cont_1': 0.5}
+ )
+
+ def test_incorrect_none_type(self):
+ class MissingType(FunctionElement):
+ name = 'mt'
+ type = None
+
+ assert_raises_message(
+ TypeError,
+ "Object None associated with '.type' attribute is "
+ "not a TypeEngine class or object",
+ MissingType().compile
+ )
+
+
+class ReturnTypeTest(fixtures.TestBase):
+
+ def test_array_agg(self):
+ expr = func.array_agg(column('data', Integer))
+ is_(expr.type._type_affinity, ARRAY)
+ is_(expr.type.item_type._type_affinity, Integer)
+
+ def test_mode(self):
+ expr = func.mode(0.5).within_group(
+ column('data', Integer).desc())
+ is_(expr.type._type_affinity, Integer)
+
+ def test_percentile_cont(self):
+ expr = func.percentile_cont(0.5).within_group(column('data', Integer))
+ is_(expr.type._type_affinity, Integer)
+
+ def test_percentile_cont_array(self):
+ expr = func.percentile_cont(0.5, 0.7).within_group(
+ column('data', Integer))
+ is_(expr.type._type_affinity, ARRAY)
+ is_(expr.type.item_type._type_affinity, Integer)
+
+ def test_percentile_cont_array_desc(self):
+ expr = func.percentile_cont(0.5, 0.7).within_group(
+ column('data', Integer).desc())
+ is_(expr.type._type_affinity, ARRAY)
+ is_(expr.type.item_type._type_affinity, Integer)
+
+ def test_cume_dist(self):
+ expr = func.cume_dist(0.5).within_group(
+ column('data', Integer).desc())
+ is_(expr.type._type_affinity, Numeric)
+
+ def test_percent_rank(self):
+ expr = func.percent_rank(0.5).within_group(
+ column('data', Integer))
+ is_(expr.type._type_affinity, Numeric)
+
class ExecuteTest(fixtures.TestBase):
diff --git a/test/sql/test_generative.py b/test/sql/test_generative.py
index 6b86614e6..9cf1ef612 100644
--- a/test/sql/test_generative.py
+++ b/test/sql/test_generative.py
@@ -454,6 +454,27 @@ class ClauseTest(fixtures.TestBase, AssertsCompiledSQL):
str(f1), str(f2)
)
+ def test_labeled_expression_adapt(self):
+ lbl_x = (t3.c.col1 == 1).label('x')
+ t3_alias = t3.alias()
+
+ adapter = sql_util.ColumnAdapter(t3_alias)
+
+ lblx_adapted = adapter.traverse(lbl_x)
+ is_not_(lblx_adapted._element, lbl_x._element)
+
+ lblx_adapted = adapter.traverse(lbl_x)
+ self.assert_compile(
+ select([lblx_adapted.self_group()]),
+ "SELECT (table3_1.col1 = :col1_1) AS x FROM table3 AS table3_1"
+ )
+
+ self.assert_compile(
+ select([lblx_adapted.is_(True)]),
+ "SELECT (table3_1.col1 = :col1_1) IS 1 AS anon_1 "
+ "FROM table3 AS table3_1"
+ )
+
def test_text(self):
clause = text(
"select * from table where foo=:bar",
@@ -878,7 +899,6 @@ class ColumnAdapterTest(fixtures.TestBase, AssertsCompiledSQL):
a2_to_a1.columns[t2.c.col2], stmt2.c.col2
)
-
def test_wrapping_multiple(self):
"""illustrate that wrapping runs both adapters"""
@@ -1531,7 +1551,6 @@ class ClauseAdapterTest(fixtures.TestBase, AssertsCompiledSQL):
eq_(l3._allow_label_resolve, False)
-
class SpliceJoinsTest(fixtures.TestBase, AssertsCompiledSQL):
__dialect__ = 'default'
diff --git a/test/sql/test_insert.py b/test/sql/test_insert.py
index 8a41d4be7..f2515c4eb 100644
--- a/test/sql/test_insert.py
+++ b/test/sql/test_insert.py
@@ -1,11 +1,11 @@
#! coding:utf-8
from sqlalchemy import Column, Integer, MetaData, String, Table,\
- bindparam, exc, func, insert, select, column, text
+ bindparam, exc, func, insert, select, column, text, table
from sqlalchemy.dialects import mysql, postgresql
from sqlalchemy.engine import default
from sqlalchemy.testing import AssertsCompiledSQL,\
- assert_raises_message, fixtures
+ assert_raises_message, fixtures, eq_
from sqlalchemy.sql import crud
class _InsertTestBase(object):
@@ -54,6 +54,69 @@ class InsertTest(_InsertTestBase, fixtures.TablesTest, AssertsCompiledSQL):
'INSERT INTO mytable (myid, name) VALUES (:myid, :name)',
checkparams=checkparams)
+ def test_unconsumed_names_kwargs(self):
+ t = table("t", column("x"), column("y"))
+ assert_raises_message(
+ exc.CompileError,
+ "Unconsumed column names: z",
+ t.insert().values(x=5, z=5).compile,
+ )
+
+ def test_bindparam_name_no_consume_error(self):
+ t = table("t", column("x"), column("y"))
+ # bindparam names don't get counted
+ i = t.insert().values(x=3 + bindparam('x2'))
+ self.assert_compile(
+ i,
+ "INSERT INTO t (x) VALUES ((:param_1 + :x2))"
+ )
+
+ # even if in the params list
+ i = t.insert().values(x=3 + bindparam('x2'))
+ self.assert_compile(
+ i,
+ "INSERT INTO t (x) VALUES ((:param_1 + :x2))",
+ params={"x2": 1}
+ )
+
+ def test_unconsumed_names_values_dict(self):
+ table1 = self.tables.mytable
+
+ checkparams = {
+ 'myid': 3,
+ 'name': 'jack',
+ 'unknowncol': 'oops'
+ }
+
+ stmt = insert(table1, values=checkparams)
+ assert_raises_message(
+ exc.CompileError,
+ 'Unconsumed column names: unknowncol',
+ stmt.compile,
+ dialect=postgresql.dialect()
+ )
+
+ def test_unconsumed_names_multi_values_dict(self):
+ table1 = self.tables.mytable
+
+ checkparams = [{
+ 'myid': 3,
+ 'name': 'jack',
+ 'unknowncol': 'oops'
+ }, {
+ 'myid': 4,
+ 'name': 'someone',
+ 'unknowncol': 'oops'
+ }]
+
+ stmt = insert(table1, values=checkparams)
+ assert_raises_message(
+ exc.CompileError,
+ 'Unconsumed column names: unknowncol',
+ stmt.compile,
+ dialect=postgresql.dialect()
+ )
+
def test_insert_with_values_tuple(self):
table1 = self.tables.mytable
@@ -175,6 +238,42 @@ class InsertTest(_InsertTestBase, fixtures.TablesTest, AssertsCompiledSQL):
checkparams={"name_1": "foo"}
)
+ def test_insert_from_select_cte_one(self):
+ table1 = self.tables.mytable
+
+ cte = select([table1.c.name]).where(table1.c.name == 'bar').cte()
+
+ sel = select([table1.c.myid, table1.c.name]).where(
+ table1.c.name == cte.c.name)
+
+ ins = self.tables.myothertable.insert().\
+ from_select(("otherid", "othername"), sel)
+ self.assert_compile(
+ ins,
+ "WITH anon_1 AS "
+ "(SELECT mytable.name AS name FROM mytable "
+ "WHERE mytable.name = :name_1) "
+ "INSERT INTO myothertable (otherid, othername) "
+ "SELECT mytable.myid, mytable.name FROM mytable, anon_1 "
+ "WHERE mytable.name = anon_1.name",
+ checkparams={"name_1": "bar"}
+ )
+
+ def test_insert_from_select_cte_two(self):
+ table1 = self.tables.mytable
+
+ cte = table1.select().cte("c")
+ stmt = cte.select()
+ ins = table1.insert().from_select(table1.c, stmt)
+
+ self.assert_compile(
+ ins,
+ "WITH c AS (SELECT mytable.myid AS myid, mytable.name AS name, "
+ "mytable.description AS description FROM mytable) "
+ "INSERT INTO mytable (myid, name, description) "
+ "SELECT c.myid, c.name, c.description FROM c"
+ )
+
def test_insert_from_select_select_alt_ordering(self):
table1 = self.tables.mytable
sel = select([table1.c.name, table1.c.myid]).where(
@@ -283,6 +382,32 @@ class InsertTest(_InsertTestBase, fixtures.TablesTest, AssertsCompiledSQL):
checkparams={"name_1": "foo", "foo": None}
)
+ def test_insert_from_select_dont_mutate_raw_columns(self):
+ # test [ticket:3603]
+ from sqlalchemy import table
+ table_ = table(
+ 'mytable',
+ Column('foo', String),
+ Column('bar', String, default='baz'),
+ )
+
+ stmt = select([table_.c.foo])
+ insert = table_.insert().from_select(['foo'], stmt)
+
+ self.assert_compile(stmt, "SELECT mytable.foo FROM mytable")
+ self.assert_compile(
+ insert,
+ "INSERT INTO mytable (foo, bar) "
+ "SELECT mytable.foo, :bar AS anon_1 FROM mytable"
+ )
+ self.assert_compile(stmt, "SELECT mytable.foo FROM mytable")
+ self.assert_compile(
+ insert,
+ "INSERT INTO mytable (foo, bar) "
+ "SELECT mytable.foo, :bar AS anon_1 FROM mytable"
+ )
+
+
def test_insert_mix_select_values_exception(self):
table1 = self.tables.mytable
sel = select([table1.c.myid, table1.c.name]).where(
@@ -354,6 +479,106 @@ class InsertTest(_InsertTestBase, fixtures.TablesTest, AssertsCompiledSQL):
checkparams={"name_1": "foo"}
)
+ def test_anticipate_no_pk_composite_pk(self):
+ t = Table(
+ 't', MetaData(), Column('x', Integer, primary_key=True),
+ Column('y', Integer, primary_key=True)
+ )
+ assert_raises_message(
+ exc.CompileError,
+ "Column 't.y' is marked as a member.*"
+ "Note that as of SQLAlchemy 1.1,",
+ t.insert().compile, column_keys=['x']
+
+ )
+
+ def test_anticipate_no_pk_composite_pk_implicit_returning(self):
+ t = Table(
+ 't', MetaData(), Column('x', Integer, primary_key=True),
+ Column('y', Integer, primary_key=True)
+ )
+ d = postgresql.dialect()
+ d.implicit_returning = True
+ assert_raises_message(
+ exc.CompileError,
+ "Column 't.y' is marked as a member.*"
+ "Note that as of SQLAlchemy 1.1,",
+ t.insert().compile, dialect=d, column_keys=['x']
+
+ )
+
+ def test_anticipate_no_pk_composite_pk_prefetch(self):
+ t = Table(
+ 't', MetaData(), Column('x', Integer, primary_key=True),
+ Column('y', Integer, primary_key=True)
+ )
+ d = postgresql.dialect()
+ d.implicit_returning = False
+ assert_raises_message(
+ exc.CompileError,
+ "Column 't.y' is marked as a member.*"
+ "Note that as of SQLAlchemy 1.1,",
+ t.insert().compile, dialect=d, column_keys=['x']
+
+ )
+
+ def test_anticipate_nullable_composite_pk(self):
+ t = Table(
+ 't', MetaData(), Column('x', Integer, primary_key=True),
+ Column('y', Integer, primary_key=True, nullable=True)
+ )
+ self.assert_compile(
+ t.insert(),
+ "INSERT INTO t (x) VALUES (:x)",
+ params={'x': 5},
+ )
+
+ def test_anticipate_no_pk_non_composite_pk(self):
+ t = Table(
+ 't', MetaData(),
+ Column('x', Integer, primary_key=True, autoincrement=False),
+ Column('q', Integer)
+ )
+ assert_raises_message(
+ exc.CompileError,
+ "Column 't.x' is marked as a member.*"
+ "may not store NULL.$",
+ t.insert().compile, column_keys=['q']
+
+ )
+
+ def test_anticipate_no_pk_non_composite_pk_implicit_returning(self):
+ t = Table(
+ 't', MetaData(),
+ Column('x', Integer, primary_key=True, autoincrement=False),
+ Column('q', Integer)
+ )
+ d = postgresql.dialect()
+ d.implicit_returning = True
+ assert_raises_message(
+ exc.CompileError,
+ "Column 't.x' is marked as a member.*"
+ "may not store NULL.$",
+ t.insert().compile, dialect=d, column_keys=['q']
+
+ )
+
+ def test_anticipate_no_pk_non_composite_pk_prefetch(self):
+ t = Table(
+ 't', MetaData(),
+ Column('x', Integer, primary_key=True, autoincrement=False),
+ Column('q', Integer)
+ )
+ d = postgresql.dialect()
+ d.implicit_returning = False
+ assert_raises_message(
+ exc.CompileError,
+ "Column 't.x' is marked as a member.*"
+ "may not store NULL.$",
+ t.insert().compile, dialect=d, column_keys=['q']
+
+ )
+
class InsertImplicitReturningTest(
_InsertTestBase, fixtures.TablesTest, AssertsCompiledSQL):
@@ -658,8 +883,21 @@ class MultirowTest(_InsertTestBase, fixtures.TablesTest, AssertsCompiledSQL):
'foo_2': None # evaluated later
}
+ stmt = table.insert().values(values)
+
+ eq_(
+ dict([
+ (k, v.type._type_affinity)
+ for (k, v) in
+ stmt.compile(dialect=postgresql.dialect()).binds.items()]),
+ {
+ 'foo': Integer, 'data_2': String, 'id_0': Integer,
+ 'id_2': Integer, 'foo_1': Integer, 'data_1': String,
+ 'id_1': Integer, 'foo_2': Integer, 'data_0': String}
+ )
+
self.assert_compile(
- table.insert().values(values),
+ stmt,
'INSERT INTO sometable (id, data, foo) VALUES '
'(%(id_0)s, %(data_0)s, %(foo)s), '
'(%(id_1)s, %(data_1)s, %(foo_1)s), '
@@ -692,8 +930,20 @@ class MultirowTest(_InsertTestBase, fixtures.TablesTest, AssertsCompiledSQL):
'foo_2': None, # evaluated later
}
+ stmt = table.insert().values(values)
+ eq_(
+ dict([
+ (k, v.type._type_affinity)
+ for (k, v) in
+ stmt.compile(dialect=postgresql.dialect()).binds.items()]),
+ {
+ 'foo': Integer, 'data_2': String, 'id_0': Integer,
+ 'id_2': Integer, 'foo_1': Integer, 'data_1': String,
+ 'id_1': Integer, 'foo_2': Integer, 'data_0': String}
+ )
+
self.assert_compile(
- table.insert().values(values),
+ stmt,
"INSERT INTO sometable (id, data, foo) VALUES "
"(%(id_0)s, %(data_0)s, %(foo)s), "
"(%(id_1)s, %(data_1)s, %(foo_1)s), "
diff --git a/test/sql/test_insert_exec.py b/test/sql/test_insert_exec.py
new file mode 100644
index 000000000..c49947425
--- /dev/null
+++ b/test/sql/test_insert_exec.py
@@ -0,0 +1,445 @@
+from sqlalchemy.testing import eq_, assert_raises_message, is_
+from sqlalchemy import testing
+from sqlalchemy.testing import fixtures, engines
+from sqlalchemy import (
+ exc, sql, String, Integer, MetaData, and_, ForeignKey,
+ VARCHAR, INT, Sequence, func)
+from sqlalchemy.testing.schema import Table, Column
+
+
+class InsertExecTest(fixtures.TablesTest):
+ __backend__ = True
+
+ @classmethod
+ def define_tables(cls, metadata):
+ Table(
+ 'users', metadata,
+ Column(
+ 'user_id', INT, primary_key=True,
+ test_needs_autoincrement=True),
+ Column('user_name', VARCHAR(20)),
+ test_needs_acid=True
+ )
+
+ @testing.requires.multivalues_inserts
+ def test_multivalues_insert(self):
+ users = self.tables.users
+ users.insert(
+ values=[
+ {'user_id': 7, 'user_name': 'jack'},
+ {'user_id': 8, 'user_name': 'ed'}]).execute()
+ rows = users.select().order_by(users.c.user_id).execute().fetchall()
+ eq_(rows[0], (7, 'jack'))
+ eq_(rows[1], (8, 'ed'))
+ users.insert(values=[(9, 'jack'), (10, 'ed')]).execute()
+ rows = users.select().order_by(users.c.user_id).execute().fetchall()
+ eq_(rows[2], (9, 'jack'))
+ eq_(rows[3], (10, 'ed'))
+
+ def test_insert_heterogeneous_params(self):
+ """test that executemany parameters are asserted to match the
+ parameter set of the first."""
+ users = self.tables.users
+
+ assert_raises_message(
+ exc.StatementError,
+ r"\(sqlalchemy.exc.InvalidRequestError\) A value is required for "
+ "bind parameter 'user_name', in "
+ "parameter group 2 "
+ r"\[SQL: u?'INSERT INTO users",
+ users.insert().execute,
+ {'user_id': 7, 'user_name': 'jack'},
+ {'user_id': 8, 'user_name': 'ed'},
+ {'user_id': 9}
+ )
+
+ # this succeeds however. We aren't yet doing
+ # a length check on all subsequent parameters.
+ users.insert().execute(
+ {'user_id': 7},
+ {'user_id': 8, 'user_name': 'ed'},
+ {'user_id': 9}
+ )
+
+ def _test_lastrow_accessor(self, table_, values, assertvalues):
+ """Tests the inserted_primary_key and lastrow_has_id() functions."""
+
+ def insert_values(engine, table_, values):
+ """
+ Inserts a row into a table, returns the full list of values
+ INSERTed including defaults that fired off on the DB side and
+ detects rows that had defaults and post-fetches.
+ """
+
+ # verify implicit_returning is working
+ if engine.dialect.implicit_returning:
+ ins = table_.insert()
+ comp = ins.compile(engine, column_keys=list(values))
+ if not set(values).issuperset(
+ c.key for c in table_.primary_key):
+ is_(bool(comp.returning), True)
+
+ result = engine.execute(table_.insert(), **values)
+ ret = values.copy()
+
+ for col, id in zip(
+ table_.primary_key, result.inserted_primary_key):
+ ret[col.key] = id
+
+ if result.lastrow_has_defaults():
+ criterion = and_(
+ *[
+ col == id for col, id in
+ zip(table_.primary_key, result.inserted_primary_key)])
+ row = engine.execute(table_.select(criterion)).first()
+ for c in table_.c:
+ ret[c.key] = row[c]
+ return ret
+
+ if testing.against('firebird', 'postgresql', 'oracle', 'mssql'):
+ assert testing.db.dialect.implicit_returning
+
+ if testing.db.dialect.implicit_returning:
+ test_engines = [
+ engines.testing_engine(options={'implicit_returning': False}),
+ engines.testing_engine(options={'implicit_returning': True}),
+ ]
+ else:
+ test_engines = [testing.db]
+
+ for engine in test_engines:
+ try:
+ table_.create(bind=engine, checkfirst=True)
+ i = insert_values(engine, table_, values)
+ eq_(i, assertvalues)
+ finally:
+ table_.drop(bind=engine)
+
+ @testing.skip_if('sqlite')
+ def test_lastrow_accessor_one(self):
+ metadata = MetaData()
+ self._test_lastrow_accessor(
+ Table(
+ "t1", metadata,
+ Column(
+ 'id', Integer, primary_key=True,
+ test_needs_autoincrement=True),
+ Column('foo', String(30), primary_key=True)),
+ {'foo': 'hi'},
+ {'id': 1, 'foo': 'hi'}
+ )
+
+ @testing.skip_if('sqlite')
+ def test_lastrow_accessor_two(self):
+ metadata = MetaData()
+ self._test_lastrow_accessor(
+ Table(
+ "t2", metadata,
+ Column(
+ 'id', Integer, primary_key=True,
+ test_needs_autoincrement=True),
+ Column('foo', String(30), primary_key=True),
+ Column('bar', String(30), server_default='hi')
+ ),
+ {'foo': 'hi'},
+ {'id': 1, 'foo': 'hi', 'bar': 'hi'}
+ )
+
+ def test_lastrow_accessor_three(self):
+ metadata = MetaData()
+ self._test_lastrow_accessor(
+ Table(
+ "t3", metadata,
+ Column("id", String(40), primary_key=True),
+ Column('foo', String(30), primary_key=True),
+ Column("bar", String(30))
+ ),
+ {'id': 'hi', 'foo': 'thisisfoo', 'bar': "thisisbar"},
+ {'id': 'hi', 'foo': 'thisisfoo', 'bar': "thisisbar"}
+ )
+
+ def test_lastrow_accessor_four(self):
+ metadata = MetaData()
+ self._test_lastrow_accessor(
+ Table(
+ "t4", metadata,
+ Column(
+ 'id', Integer,
+ Sequence('t4_id_seq', optional=True),
+ primary_key=True),
+ Column('foo', String(30), primary_key=True),
+ Column('bar', String(30), server_default='hi')
+ ),
+ {'foo': 'hi', 'id': 1},
+ {'id': 1, 'foo': 'hi', 'bar': 'hi'}
+ )
+
+ def test_lastrow_accessor_five(self):
+ metadata = MetaData()
+ self._test_lastrow_accessor(
+ Table(
+ "t5", metadata,
+ Column('id', String(10), primary_key=True),
+ Column('bar', String(30), server_default='hi')
+ ),
+ {'id': 'id1'},
+ {'id': 'id1', 'bar': 'hi'},
+ )
+
+ @testing.skip_if('sqlite')
+ def test_lastrow_accessor_six(self):
+ metadata = MetaData()
+ self._test_lastrow_accessor(
+ Table(
+ "t6", metadata,
+ Column(
+ 'id', Integer, primary_key=True,
+ test_needs_autoincrement=True),
+ Column('bar', Integer, primary_key=True)
+ ),
+ {'bar': 0},
+ {'id': 1, 'bar': 0},
+ )
+
+ # TODO: why not in the sqlite suite?
+ @testing.only_on('sqlite+pysqlite')
+ @testing.provide_metadata
+ def test_lastrowid_zero(self):
+ from sqlalchemy.dialects import sqlite
+ eng = engines.testing_engine()
+
+ class ExcCtx(sqlite.base.SQLiteExecutionContext):
+
+ def get_lastrowid(self):
+ return 0
+ eng.dialect.execution_ctx_cls = ExcCtx
+ t = Table(
+ 't', self.metadata, Column('x', Integer, primary_key=True),
+ Column('y', Integer))
+ t.create(eng)
+ r = eng.execute(t.insert().values(y=5))
+ eq_(r.inserted_primary_key, [0])
+
+ @testing.fails_on(
+ 'sqlite', "sqlite autoincremnt doesn't work with composite pks")
+ @testing.provide_metadata
+ def test_misordered_lastrow(self):
+ metadata = self.metadata
+
+ related = Table(
+ 'related', metadata,
+ Column('id', Integer, primary_key=True),
+ mysql_engine='MyISAM'
+ )
+ t6 = Table(
+ "t6", metadata,
+ Column(
+ 'manual_id', Integer, ForeignKey('related.id'),
+ primary_key=True),
+ Column(
+ 'auto_id', Integer, primary_key=True,
+ test_needs_autoincrement=True),
+ mysql_engine='MyISAM'
+ )
+
+ metadata.create_all()
+ r = related.insert().values(id=12).execute()
+ id_ = r.inserted_primary_key[0]
+ eq_(id_, 12)
+
+ r = t6.insert().values(manual_id=id_).execute()
+ eq_(r.inserted_primary_key, [12, 1])
+
+ def test_implicit_id_insert_select_columns(self):
+ users = self.tables.users
+ stmt = users.insert().from_select(
+ (users.c.user_id, users.c.user_name),
+ users.select().where(users.c.user_id == 20))
+
+ testing.db.execute(stmt)
+
+ def test_implicit_id_insert_select_keys(self):
+ users = self.tables.users
+ stmt = users.insert().from_select(
+ ["user_id", "user_name"],
+ users.select().where(users.c.user_id == 20))
+
+ testing.db.execute(stmt)
+
+ @testing.requires.empty_inserts
+ @testing.requires.returning
+ def test_no_inserted_pk_on_returning(self):
+ users = self.tables.users
+ result = testing.db.execute(users.insert().returning(
+ users.c.user_id, users.c.user_name))
+ assert_raises_message(
+ exc.InvalidRequestError,
+ r"Can't call inserted_primary_key when returning\(\) is used.",
+ getattr, result, 'inserted_primary_key'
+ )
+
+
+class TableInsertTest(fixtures.TablesTest):
+
+ """test for consistent insert behavior across dialects
+ regarding the inline=True flag, lower-case 't' tables.
+
+ """
+ run_create_tables = 'each'
+ __backend__ = True
+
+ @classmethod
+ def define_tables(cls, metadata):
+ Table(
+ 'foo', metadata,
+ Column('id', Integer, Sequence('t_id_seq'), primary_key=True),
+ Column('data', String(50)),
+ Column('x', Integer)
+ )
+
+ def _fixture(self, types=True):
+ if types:
+ t = sql.table(
+ 'foo', sql.column('id', Integer),
+ sql.column('data', String),
+ sql.column('x', Integer))
+ else:
+ t = sql.table(
+ 'foo', sql.column('id'), sql.column('data'), sql.column('x'))
+ return t
+
+ def _test(self, stmt, row, returning=None, inserted_primary_key=False):
+ r = testing.db.execute(stmt)
+
+ if returning:
+ returned = r.first()
+ eq_(returned, returning)
+ elif inserted_primary_key is not False:
+ eq_(r.inserted_primary_key, inserted_primary_key)
+
+ eq_(testing.db.execute(self.tables.foo.select()).first(), row)
+
+ def _test_multi(self, stmt, rows, data):
+ testing.db.execute(stmt, rows)
+ eq_(
+ testing.db.execute(
+ self.tables.foo.select().
+ order_by(self.tables.foo.c.id)).fetchall(),
+ data)
+
+ @testing.requires.sequences
+ def test_expicit_sequence(self):
+ t = self._fixture()
+ self._test(
+ t.insert().values(
+ id=func.next_value(Sequence('t_id_seq')), data='data', x=5),
+ (1, 'data', 5)
+ )
+
+ def test_uppercase(self):
+ t = self.tables.foo
+ self._test(
+ t.insert().values(id=1, data='data', x=5),
+ (1, 'data', 5),
+ inserted_primary_key=[1]
+ )
+
+ def test_uppercase_inline(self):
+ t = self.tables.foo
+ self._test(
+ t.insert(inline=True).values(id=1, data='data', x=5),
+ (1, 'data', 5),
+ inserted_primary_key=[1]
+ )
+
+ @testing.crashes(
+ "mssql+pyodbc",
+ "Pyodbc + SQL Server + Py3K, some decimal handling issue")
+ def test_uppercase_inline_implicit(self):
+ t = self.tables.foo
+ self._test(
+ t.insert(inline=True).values(data='data', x=5),
+ (1, 'data', 5),
+ inserted_primary_key=[None]
+ )
+
+ def test_uppercase_implicit(self):
+ t = self.tables.foo
+ self._test(
+ t.insert().values(data='data', x=5),
+ (1, 'data', 5),
+ inserted_primary_key=[1]
+ )
+
+ def test_uppercase_direct_params(self):
+ t = self.tables.foo
+ self._test(
+ t.insert().values(id=1, data='data', x=5),
+ (1, 'data', 5),
+ inserted_primary_key=[1]
+ )
+
+ @testing.requires.returning
+ def test_uppercase_direct_params_returning(self):
+ t = self.tables.foo
+ self._test(
+ t.insert().values(id=1, data='data', x=5).returning(t.c.id, t.c.x),
+ (1, 'data', 5),
+ returning=(1, 5)
+ )
+
+ @testing.fails_on(
+ 'mssql', "lowercase table doesn't support identity insert disable")
+ def test_direct_params(self):
+ t = self._fixture()
+ self._test(
+ t.insert().values(id=1, data='data', x=5),
+ (1, 'data', 5),
+ inserted_primary_key=[]
+ )
+
+ @testing.fails_on(
+ 'mssql', "lowercase table doesn't support identity insert disable")
+ @testing.requires.returning
+ def test_direct_params_returning(self):
+ t = self._fixture()
+ self._test(
+ t.insert().values(id=1, data='data', x=5).returning(t.c.id, t.c.x),
+ (1, 'data', 5),
+ returning=(1, 5)
+ )
+
+ @testing.requires.emulated_lastrowid
+ def test_implicit_pk(self):
+ t = self._fixture()
+ self._test(
+ t.insert().values(data='data', x=5),
+ (1, 'data', 5),
+ inserted_primary_key=[]
+ )
+
+ @testing.requires.emulated_lastrowid
+ def test_implicit_pk_multi_rows(self):
+ t = self._fixture()
+ self._test_multi(
+ t.insert(),
+ [
+ {'data': 'd1', 'x': 5},
+ {'data': 'd2', 'x': 6},
+ {'data': 'd3', 'x': 7},
+ ],
+ [
+ (1, 'd1', 5),
+ (2, 'd2', 6),
+ (3, 'd3', 7)
+ ],
+ )
+
+ @testing.requires.emulated_lastrowid
+ def test_implicit_pk_inline(self):
+ t = self._fixture()
+ self._test(
+ t.insert(inline=True).values(data='data', x=5),
+ (1, 'data', 5),
+ inserted_primary_key=[]
+ )
diff --git a/test/sql/test_join_rewriting.py b/test/sql/test_join_rewriting.py
index f99dfda4e..c699a5c97 100644
--- a/test/sql/test_join_rewriting.py
+++ b/test/sql/test_join_rewriting.py
@@ -1,3 +1,8 @@
+"""These tests are all about the "join rewriting" feature built
+to support SQLite's lack of right-nested joins. SQlite as of
+version 3.7.16 no longer has this limitation.
+
+"""
from sqlalchemy import Table, Column, Integer, MetaData, ForeignKey, \
select, exists, union
from sqlalchemy.testing import fixtures, AssertsCompiledSQL
@@ -80,7 +85,7 @@ class _JoinRewriteTestBase(AssertsCompiledSQL):
# .key in SQL
for key, col in zip([c.name for c in s.c], s.inner_columns):
key = key % compiled.anon_map
- assert col in compiled.result_map[key][1]
+ assert col in compiled._create_result_map()[key][1]
_a_bkeyselect_bkey = ""
diff --git a/test/sql/test_labels.py b/test/sql/test_labels.py
index 4aa923080..7f548eb49 100644
--- a/test/sql/test_labels.py
+++ b/test/sql/test_labels.py
@@ -90,7 +90,7 @@ class MaxIdentTest(fixtures.TestBase, AssertsCompiledSQL):
table1 = self.table1
compiled = s.compile(dialect=self._length_fixture())
- assert set(compiled.result_map['some_large_named_table__2'][1]).\
+ assert set(compiled._create_result_map()['some_large_named_table__2'][1]).\
issuperset(
[
'some_large_named_table_this_is_the_data_column',
@@ -99,7 +99,7 @@ class MaxIdentTest(fixtures.TestBase, AssertsCompiledSQL):
]
)
- assert set(compiled.result_map['some_large_named_table__1'][1]).\
+ assert set(compiled._create_result_map()['some_large_named_table__1'][1]).\
issuperset(
[
'some_large_named_table_this_is_the_primarykey_column',
@@ -134,12 +134,13 @@ class MaxIdentTest(fixtures.TestBase, AssertsCompiledSQL):
s2 = select([s])
compiled = s2.compile(dialect=self._length_fixture())
assert \
- set(compiled.result_map['this_is_the_data_column'][1]).\
+ set(compiled._create_result_map()['this_is_the_data_column'][1]).\
issuperset(['this_is_the_data_column',
s.c.this_is_the_data_column])
assert \
- set(compiled.result_map['this_is_the_primarykey_column'][1]).\
+ set(compiled._create_result_map()['this_is_the_primarykey__1'][1]).\
issuperset(['this_is_the_primarykey_column',
+ 'this_is_the_primarykey__1',
s.c.this_is_the_primarykey_column])
def test_result_map_anon_alias(self):
@@ -150,34 +151,33 @@ class MaxIdentTest(fixtures.TestBase, AssertsCompiledSQL):
s = select([q]).apply_labels()
self.assert_compile(
- s, 'SELECT '
- 'anon_1.this_is_the_primarykey_column '
- 'AS anon_1_this_is_the_prim_1, '
- 'anon_1.this_is_the_data_column '
- 'AS anon_1_this_is_the_data_2 '
- 'FROM ('
- 'SELECT '
- 'some_large_named_table.'
- 'this_is_the_primarykey_column '
- 'AS this_is_the_primarykey_column, '
- 'some_large_named_table.this_is_the_data_column '
- 'AS this_is_the_data_column '
- 'FROM '
- 'some_large_named_table '
- 'WHERE '
- 'some_large_named_table.this_is_the_primarykey_column '
- '= :this_is_the_primarykey__1'
- ') '
- 'AS anon_1', dialect=dialect)
+ s,
+ "SELECT "
+ "anon_1.this_is_the_primarykey__2 AS anon_1_this_is_the_prim_1, "
+ "anon_1.this_is_the_data_column AS anon_1_this_is_the_data_3 "
+ "FROM ("
+ "SELECT "
+ "some_large_named_table."
+ "this_is_the_primarykey_column AS this_is_the_primarykey__2, "
+ "some_large_named_table."
+ "this_is_the_data_column AS this_is_the_data_column "
+ "FROM "
+ "some_large_named_table "
+ "WHERE "
+ "some_large_named_table.this_is_the_primarykey_column "
+ "= :this_is_the_primarykey__1"
+ ") "
+ "AS anon_1", dialect=dialect)
+
compiled = s.compile(dialect=dialect)
- assert set(compiled.result_map['anon_1_this_is_the_data_2'][1]).\
+ assert set(compiled._create_result_map()['anon_1_this_is_the_data_3'][1]).\
issuperset([
- 'anon_1_this_is_the_data_2',
+ 'anon_1_this_is_the_data_3',
q.corresponding_column(
table1.c.this_is_the_data_column)
])
- assert set(compiled.result_map['anon_1_this_is_the_prim_1'][1]).\
+ assert set(compiled._create_result_map()['anon_1_this_is_the_prim_1'][1]).\
issuperset([
'anon_1_this_is_the_prim_1',
q.corresponding_column(
@@ -437,13 +437,13 @@ class LabelLengthTest(fixtures.TestBase, AssertsCompiledSQL):
dialect = default.DefaultDialect(label_length=10)
compiled = q.compile(dialect=dialect)
- assert set(compiled.result_map['some_2'][1]).issuperset([
+ assert set(compiled._create_result_map()['some_2'][1]).issuperset([
table1.c.this_is_the_data_column,
'some_large_named_table_this_is_the_data_column',
'some_2'
])
- assert set(compiled.result_map['some_1'][1]).issuperset([
+ assert set(compiled._create_result_map()['some_1'][1]).issuperset([
table1.c.this_is_the_primarykey_column,
'some_large_named_table_this_is_the_primarykey_column',
'some_1'
@@ -459,12 +459,12 @@ class LabelLengthTest(fixtures.TestBase, AssertsCompiledSQL):
dialect = default.DefaultDialect(label_length=10)
compiled = x.compile(dialect=dialect)
- assert set(compiled.result_map['this_2'][1]).issuperset([
+ assert set(compiled._create_result_map()['this_2'][1]).issuperset([
q.corresponding_column(table1.c.this_is_the_data_column),
'this_is_the_data_column',
'this_2'])
- assert set(compiled.result_map['this_1'][1]).issuperset([
+ assert set(compiled._create_result_map()['this_1'][1]).issuperset([
q.corresponding_column(table1.c.this_is_the_primarykey_column),
'this_is_the_primarykey_column',
'this_1'])
@@ -531,7 +531,7 @@ class LabelLengthTest(fixtures.TestBase, AssertsCompiledSQL):
'SELECT asdf.abcde FROM a AS asdf',
dialect=dialect)
compiled = s.compile(dialect=dialect)
- assert set(compiled.result_map['abcde'][1]).issuperset([
+ assert set(compiled._create_result_map()['abcde'][1]).issuperset([
'abcde', a1.c.abcde, 'abcde'])
# column still there, but short label
@@ -540,5 +540,28 @@ class LabelLengthTest(fixtures.TestBase, AssertsCompiledSQL):
'SELECT asdf.abcde AS _1 FROM a AS asdf',
dialect=dialect)
compiled = s.compile(dialect=dialect)
- assert set(compiled.result_map['_1'][1]).issuperset([
+ assert set(compiled._create_result_map()['_1'][1]).issuperset([
'asdf_abcde', a1.c.abcde, '_1'])
+
+ def test_label_overlap_unlabeled(self):
+ """test that an anon col can't overlap with a fixed name, #3396"""
+
+ table1 = table(
+ "tablename", column('columnname_one'), column('columnn_1'))
+
+ stmt = select([table1]).apply_labels()
+
+ dialect = default.DefaultDialect(label_length=23)
+ self.assert_compile(
+ stmt,
+ "SELECT tablename.columnname_one AS tablename_columnn_1, "
+ "tablename.columnn_1 AS tablename_columnn_2 FROM tablename",
+ dialect=dialect
+ )
+ compiled = stmt.compile(dialect=dialect)
+ eq_(
+ set(compiled._create_result_map()),
+ set(['tablename_columnn_1', 'tablename_columnn_2'])
+ )
+
+
diff --git a/test/sql/test_metadata.py b/test/sql/test_metadata.py
index 1eec502e7..050929d3d 100644
--- a/test/sql/test_metadata.py
+++ b/test/sql/test_metadata.py
@@ -7,8 +7,9 @@ from sqlalchemy import Integer, String, UniqueConstraint, \
CheckConstraint, ForeignKey, MetaData, Sequence, \
ForeignKeyConstraint, PrimaryKeyConstraint, ColumnDefault, Index, event,\
events, Unicode, types as sqltypes, bindparam, \
- Table, Column, Boolean, Enum, func, text
+ Table, Column, Boolean, Enum, func, text, TypeDecorator
from sqlalchemy import schema, exc
+from sqlalchemy.engine import default
from sqlalchemy.sql import elements, naming
import sqlalchemy as tsa
from sqlalchemy.testing import fixtures
@@ -18,6 +19,7 @@ from sqlalchemy.testing import eq_, is_, mock
from contextlib import contextmanager
from sqlalchemy import util
+
class MetaDataTest(fixtures.TestBase, ComparesTables):
def test_metadata_connect(self):
@@ -393,7 +395,6 @@ class MetaDataTest(fixtures.TestBase, ComparesTables):
assert t.c.x.default is s2
assert m1._sequences['x_seq'] is s2
-
def test_sequence_attach_to_table(self):
m1 = MetaData()
s1 = Sequence("s")
@@ -492,6 +493,21 @@ class MetaDataTest(fixtures.TestBase, ComparesTables):
[d, b, a, c, e]
)
+ def test_deterministic_order(self):
+ meta = MetaData()
+ a = Table('a', meta, Column('foo', Integer))
+ b = Table('b', meta, Column('foo', Integer))
+ c = Table('c', meta, Column('foo', Integer))
+ d = Table('d', meta, Column('foo', Integer))
+ e = Table('e', meta, Column('foo', Integer))
+
+ e.add_is_dependent_on(c)
+ a.add_is_dependent_on(b)
+ eq_(
+ meta.sorted_tables,
+ [b, c, d, a, e]
+ )
+
def test_nonexistent(self):
assert_raises(tsa.exc.NoSuchTableError, Table,
'fake_table',
@@ -1242,6 +1258,25 @@ class TableTest(fixtures.TestBase, AssertsCompiledSQL):
assign2
)
+ def test_c_mutate_after_unpickle(self):
+ m = MetaData()
+
+ y = Column('y', Integer)
+ t1 = Table('t', m, Column('x', Integer), y)
+
+ t2 = pickle.loads(pickle.dumps(t1))
+ z = Column('z', Integer)
+ g = Column('g', Integer)
+ t2.append_column(z)
+
+ is_(t1.c.contains_column(y), True)
+ is_(t2.c.contains_column(y), False)
+ y2 = t2.c.y
+ is_(t2.c.contains_column(y2), True)
+
+ is_(t2.c.contains_column(z), True)
+ is_(t2.c.contains_column(g), False)
+
def test_autoincrement_replace(self):
m = MetaData()
@@ -1346,6 +1381,123 @@ class TableTest(fixtures.TestBase, AssertsCompiledSQL):
assert not t1.c.x.nullable
+class PKAutoIncrementTest(fixtures.TestBase):
+ def test_multi_integer_no_autoinc(self):
+ pk = PrimaryKeyConstraint(
+ Column('a', Integer),
+ Column('b', Integer)
+ )
+ t = Table('t', MetaData())
+ t.append_constraint(pk)
+
+ is_(pk._autoincrement_column, None)
+
+ def test_multi_integer_multi_autoinc(self):
+ pk = PrimaryKeyConstraint(
+ Column('a', Integer, autoincrement=True),
+ Column('b', Integer, autoincrement=True)
+ )
+ t = Table('t', MetaData())
+ t.append_constraint(pk)
+
+ assert_raises_message(
+ exc.ArgumentError,
+ "Only one Column may be marked",
+ lambda: pk._autoincrement_column
+ )
+
+ def test_single_integer_no_autoinc(self):
+ pk = PrimaryKeyConstraint(
+ Column('a', Integer),
+ )
+ t = Table('t', MetaData())
+ t.append_constraint(pk)
+
+ is_(pk._autoincrement_column, pk.columns['a'])
+
+ def test_single_string_no_autoinc(self):
+ pk = PrimaryKeyConstraint(
+ Column('a', String),
+ )
+ t = Table('t', MetaData())
+ t.append_constraint(pk)
+
+ is_(pk._autoincrement_column, None)
+
+ def test_single_string_illegal_autoinc(self):
+ t = Table('t', MetaData(), Column('a', String, autoincrement=True))
+ pk = PrimaryKeyConstraint(
+ t.c.a
+ )
+ t.append_constraint(pk)
+
+ assert_raises_message(
+ exc.ArgumentError,
+ "Column type VARCHAR on column 't.a'",
+ lambda: pk._autoincrement_column
+ )
+
+ def test_single_integer_default(self):
+ t = Table(
+ 't', MetaData(),
+ Column('a', Integer, autoincrement=True, default=lambda: 1))
+ pk = PrimaryKeyConstraint(
+ t.c.a
+ )
+ t.append_constraint(pk)
+
+ is_(pk._autoincrement_column, t.c.a)
+
+ def test_single_integer_server_default(self):
+ # new as of 1.1; now that we have three states for autoincrement,
+ # if the user puts autoincrement=True with a server_default, trust
+ # them on it
+ t = Table(
+ 't', MetaData(),
+ Column('a', Integer,
+ autoincrement=True, server_default=func.magic()))
+ pk = PrimaryKeyConstraint(
+ t.c.a
+ )
+ t.append_constraint(pk)
+
+ is_(pk._autoincrement_column, t.c.a)
+
+ def test_implicit_autoinc_but_fks(self):
+ m = MetaData()
+ Table('t1', m, Column('id', Integer, primary_key=True))
+ t2 = Table(
+ 't2', MetaData(),
+ Column('a', Integer, ForeignKey('t1.id')))
+ pk = PrimaryKeyConstraint(
+ t2.c.a
+ )
+ t2.append_constraint(pk)
+ is_(pk._autoincrement_column, None)
+
+ def test_explicit_autoinc_but_fks(self):
+ m = MetaData()
+ Table('t1', m, Column('id', Integer, primary_key=True))
+ t2 = Table(
+ 't2', MetaData(),
+ Column('a', Integer, ForeignKey('t1.id'), autoincrement=True))
+ pk = PrimaryKeyConstraint(
+ t2.c.a
+ )
+ t2.append_constraint(pk)
+ is_(pk._autoincrement_column, t2.c.a)
+
+ t3 = Table(
+ 't3', MetaData(),
+ Column('a', Integer,
+ ForeignKey('t1.id'), autoincrement='ignore_fk'))
+ pk = PrimaryKeyConstraint(
+ t3.c.a
+ )
+ t3.append_constraint(pk)
+ is_(pk._autoincrement_column, t3.c.a)
+
+
class SchemaTypeTest(fixtures.TestBase):
class MyType(sqltypes.SchemaType, sqltypes.TypeEngine):
@@ -1415,6 +1567,20 @@ class SchemaTypeTest(fixtures.TestBase):
# our test type sets table, though
is_(t2.c.y.type.table, t2)
+ def test_tometadata_copy_decorated(self):
+
+ class MyDecorated(TypeDecorator):
+ impl = self.MyType
+
+ m1 = MetaData()
+
+ type_ = MyDecorated(schema="z")
+ t1 = Table('x', m1, Column("y", type_))
+
+ m2 = MetaData()
+ t2 = t1.tometadata(m2)
+ eq_(t2.c.y.type.schema, "z")
+
def test_tometadata_independent_schema(self):
m1 = MetaData()
@@ -1922,6 +2088,13 @@ class IndexTest(fixtures.TestBase):
t.append_constraint, idx
)
+ def test_column_associated_w_lowercase_table(self):
+ from sqlalchemy import table
+ c = Column('x', Integer)
+ table('foo', c)
+ idx = Index('q', c)
+ is_(idx.table, None) # lower-case-T table doesn't have indexes
+
class ConstraintTest(fixtures.TestBase):
@@ -3529,7 +3702,7 @@ class NamingConventionTest(fixtures.TestBase, AssertsCompiledSQL):
exc.InvalidRequestError,
"Naming convention including \%\(constraint_name\)s token "
"requires that constraint is explicitly named.",
- schema.CreateTable(u1).compile
+ schema.CreateTable(u1).compile, dialect=default.DefaultDialect()
)
def test_schematype_no_ck_name_boolean_no_name(self):
@@ -3560,3 +3733,16 @@ class NamingConventionTest(fixtures.TestBase, AssertsCompiledSQL):
u1.append_constraint(ck1)
eq_(ck1.name, "ck_user_foo")
+
+ def test_pickle_metadata(self):
+ m = MetaData(naming_convention={"pk": "%(table_name)s_pk"})
+
+ m2 = pickle.loads(pickle.dumps(m))
+
+ eq_(m2.naming_convention, {"pk": "%(table_name)s_pk"})
+
+ t2a = Table('t2', m, Column('id', Integer, primary_key=True))
+ t2b = Table('t2', m2, Column('id', Integer, primary_key=True))
+
+ eq_(t2a.primary_key.name, t2b.primary_key.name)
+ eq_(t2b.primary_key.name, "t2_pk")
diff --git a/test/sql/test_operators.py b/test/sql/test_operators.py
index 0985020d1..86286a9a3 100644
--- a/test/sql/test_operators.py
+++ b/test/sql/test_operators.py
@@ -1,7 +1,8 @@
from sqlalchemy.testing import fixtures, eq_, is_, is_not_
from sqlalchemy import testing
from sqlalchemy.testing import assert_raises_message
-from sqlalchemy.sql import column, desc, asc, literal, collate, null, true, false
+from sqlalchemy.sql import column, desc, asc, literal, collate, null, \
+ true, false, any_, all_
from sqlalchemy.sql.expression import BinaryExpression, \
ClauseList, Grouping, \
UnaryExpression, select, union, func, tuple_
@@ -12,8 +13,10 @@ from sqlalchemy import exc
from sqlalchemy.engine import default
from sqlalchemy.sql.elements import _literal_as_text
from sqlalchemy.schema import Column, Table, MetaData
+from sqlalchemy.sql import compiler
from sqlalchemy.types import TypeEngine, TypeDecorator, UserDefinedType, \
- Boolean, NullType, MatchType
+ Boolean, NullType, MatchType, Indexable, Concatenable, ARRAY, JSON, \
+ DateTime
from sqlalchemy.dialects import mysql, firebird, postgresql, oracle, \
sqlite, mssql
from sqlalchemy import util
@@ -21,7 +24,6 @@ import datetime
import collections
from sqlalchemy import text, literal_column
from sqlalchemy import and_, not_, between, or_
-from sqlalchemy.sql import true, false, null
class LoopOperate(operators.ColumnOperators):
@@ -210,6 +212,72 @@ class DefaultColumnComparatorTest(fixtures.TestBase):
def test_concat(self):
self._do_operate_test(operators.concat_op)
+ def test_default_adapt(self):
+ class TypeOne(TypeEngine):
+ pass
+
+ class TypeTwo(TypeEngine):
+ pass
+
+ expr = column('x', TypeOne()) - column('y', TypeTwo())
+ is_(
+ expr.type._type_affinity, TypeOne
+ )
+
+ def test_concatenable_adapt(self):
+ class TypeOne(Concatenable, TypeEngine):
+ pass
+
+ class TypeTwo(Concatenable, TypeEngine):
+ pass
+
+ class TypeThree(TypeEngine):
+ pass
+
+ expr = column('x', TypeOne()) - column('y', TypeTwo())
+ is_(
+ expr.type._type_affinity, TypeOne
+ )
+ is_(
+ expr.operator, operator.sub
+ )
+
+ expr = column('x', TypeOne()) + column('y', TypeTwo())
+ is_(
+ expr.type._type_affinity, TypeOne
+ )
+ is_(
+ expr.operator, operators.concat_op
+ )
+
+ expr = column('x', TypeOne()) - column('y', TypeThree())
+ is_(
+ expr.type._type_affinity, TypeOne
+ )
+ is_(
+ expr.operator, operator.sub
+ )
+
+ expr = column('x', TypeOne()) + column('y', TypeThree())
+ is_(
+ expr.type._type_affinity, TypeOne
+ )
+ is_(
+ expr.operator, operator.add
+ )
+
+ def test_contains_override_raises(self):
+ for col in [
+ Column('x', String),
+ Column('x', Integer),
+ Column('x', DateTime)
+ ]:
+ assert_raises_message(
+ NotImplementedError,
+ "Operator 'contains' is not supported on this expression",
+ lambda: 'foo' in col
+ )
+
class CustomUnaryOperatorTest(fixtures.TestBase, testing.AssertsCompiledSQL):
__dialect__ = 'default'
@@ -577,6 +645,310 @@ class ExtensionOperatorTest(fixtures.TestBase, testing.AssertsCompiledSQL):
)
+class JSONIndexOpTest(fixtures.TestBase, testing.AssertsCompiledSQL):
+ def setUp(self):
+ class MyTypeCompiler(compiler.GenericTypeCompiler):
+ def visit_mytype(self, type, **kw):
+ return "MYTYPE"
+
+ def visit_myothertype(self, type, **kw):
+ return "MYOTHERTYPE"
+
+ class MyCompiler(compiler.SQLCompiler):
+
+ def visit_json_getitem_op_binary(self, binary, operator, **kw):
+ return self._generate_generic_binary(
+ binary, " -> ", **kw
+ )
+
+ def visit_json_path_getitem_op_binary(
+ self, binary, operator, **kw):
+ return self._generate_generic_binary(
+ binary, " #> ", **kw
+ )
+
+ def visit_getitem_binary(self, binary, operator, **kw):
+ raise NotImplementedError()
+
+ class MyDialect(default.DefaultDialect):
+ statement_compiler = MyCompiler
+ type_compiler = MyTypeCompiler
+
+ class MyType(JSON):
+ __visit_name__ = 'mytype'
+
+ pass
+
+ self.MyType = MyType
+ self.__dialect__ = MyDialect()
+
+ def test_setup_getitem(self):
+ col = Column('x', self.MyType())
+
+ is_(
+ col[5].type._type_affinity, JSON
+ )
+ is_(
+ col[5]['foo'].type._type_affinity, JSON
+ )
+ is_(
+ col[('a', 'b', 'c')].type._type_affinity, JSON
+ )
+
+ def test_getindex_literal_integer(self):
+
+ col = Column('x', self.MyType())
+
+ self.assert_compile(
+ col[5],
+ "x -> :x_1",
+ checkparams={'x_1': 5}
+ )
+
+ def test_getindex_literal_string(self):
+
+ col = Column('x', self.MyType())
+
+ self.assert_compile(
+ col['foo'],
+ "x -> :x_1",
+ checkparams={'x_1': 'foo'}
+ )
+
+ def test_path_getindex_literal(self):
+
+ col = Column('x', self.MyType())
+
+ self.assert_compile(
+ col[('a', 'b', 3, 4, 'd')],
+ "x #> :x_1",
+ checkparams={'x_1': ('a', 'b', 3, 4, 'd')}
+ )
+
+ def test_getindex_sqlexpr(self):
+
+ col = Column('x', self.MyType())
+ col2 = Column('y', Integer())
+
+ self.assert_compile(
+ col[col2],
+ "x -> y",
+ checkparams={}
+ )
+
+ self.assert_compile(
+ col[col2 + 8],
+ "x -> (y + :y_1)",
+ checkparams={'y_1': 8}
+ )
+
+ def test_override_operators(self):
+ special_index_op = operators.custom_op('$$>')
+
+ class MyOtherType(JSON, TypeEngine):
+ __visit_name__ = 'myothertype'
+
+ class Comparator(TypeEngine.Comparator):
+
+ def _adapt_expression(self, op, other_comparator):
+ return special_index_op, MyOtherType()
+
+ comparator_factory = Comparator
+
+ col = Column('x', MyOtherType())
+ self.assert_compile(
+ col[5],
+ "x $$> :x_1",
+ checkparams={'x_1': 5}
+ )
+
+
+class ArrayIndexOpTest(fixtures.TestBase, testing.AssertsCompiledSQL):
+ def setUp(self):
+ class MyTypeCompiler(compiler.GenericTypeCompiler):
+ def visit_mytype(self, type, **kw):
+ return "MYTYPE"
+
+ def visit_myothertype(self, type, **kw):
+ return "MYOTHERTYPE"
+
+ class MyCompiler(compiler.SQLCompiler):
+ def visit_slice(self, element, **kw):
+ return "%s:%s" % (
+ self.process(element.start, **kw),
+ self.process(element.stop, **kw),
+ )
+
+ def visit_getitem_binary(self, binary, operator, **kw):
+ return "%s[%s]" % (
+ self.process(binary.left, **kw),
+ self.process(binary.right, **kw)
+ )
+
+ class MyDialect(default.DefaultDialect):
+ statement_compiler = MyCompiler
+ type_compiler = MyTypeCompiler
+
+ class MyType(ARRAY):
+ __visit_name__ = 'mytype'
+
+ def __init__(self, zero_indexes=False, dimensions=1):
+ if zero_indexes:
+ self.zero_indexes = zero_indexes
+ self.dimensions = dimensions
+ self.item_type = Integer()
+
+ self.MyType = MyType
+ self.__dialect__ = MyDialect()
+
+ def test_setup_getitem_w_dims(self):
+ """test the behavior of the _setup_getitem() method given a simple
+ 'dimensions' scheme - this is identical to postgresql.ARRAY."""
+
+ col = Column('x', self.MyType(dimensions=3))
+
+ is_(
+ col[5].type._type_affinity, ARRAY
+ )
+ eq_(
+ col[5].type.dimensions, 2
+ )
+ is_(
+ col[5][6].type._type_affinity, ARRAY
+ )
+ eq_(
+ col[5][6].type.dimensions, 1
+ )
+ is_(
+ col[5][6][7].type._type_affinity, Integer
+ )
+
+ def test_getindex_literal(self):
+
+ col = Column('x', self.MyType())
+
+ self.assert_compile(
+ col[5],
+ "x[:x_1]",
+ checkparams={'x_1': 5}
+ )
+
+ def test_contains_override_raises(self):
+ col = Column('x', self.MyType())
+
+ assert_raises_message(
+ NotImplementedError,
+ "Operator 'contains' is not supported on this expression",
+ lambda: 'foo' in col
+ )
+
+ def test_getindex_sqlexpr(self):
+
+ col = Column('x', self.MyType())
+ col2 = Column('y', Integer())
+
+ self.assert_compile(
+ col[col2],
+ "x[y]",
+ checkparams={}
+ )
+
+ self.assert_compile(
+ col[col2 + 8],
+ "x[(y + :y_1)]",
+ checkparams={'y_1': 8}
+ )
+
+ def test_getslice_literal(self):
+
+ col = Column('x', self.MyType())
+
+ self.assert_compile(
+ col[5:6],
+ "x[:x_1::x_2]",
+ checkparams={'x_1': 5, 'x_2': 6}
+ )
+
+ def test_getslice_sqlexpr(self):
+
+ col = Column('x', self.MyType())
+ col2 = Column('y', Integer())
+
+ self.assert_compile(
+ col[col2:col2 + 5],
+ "x[y:y + :y_1]",
+ checkparams={'y_1': 5}
+ )
+
+ def test_getindex_literal_zeroind(self):
+
+ col = Column('x', self.MyType(zero_indexes=True))
+
+ self.assert_compile(
+ col[5],
+ "x[:x_1]",
+ checkparams={'x_1': 6}
+ )
+
+ def test_getindex_sqlexpr_zeroind(self):
+
+ col = Column('x', self.MyType(zero_indexes=True))
+ col2 = Column('y', Integer())
+
+ self.assert_compile(
+ col[col2],
+ "x[(y + :y_1)]",
+ checkparams={'y_1': 1}
+ )
+
+ self.assert_compile(
+ col[col2 + 8],
+ "x[(y + :y_1 + :param_1)]",
+ checkparams={'y_1': 8, 'param_1': 1}
+ )
+
+ def test_getslice_literal_zeroind(self):
+
+ col = Column('x', self.MyType(zero_indexes=True))
+
+ self.assert_compile(
+ col[5:6],
+ "x[:x_1::x_2]",
+ checkparams={'x_1': 6, 'x_2': 7}
+ )
+
+ def test_getslice_sqlexpr_zeroind(self):
+
+ col = Column('x', self.MyType(zero_indexes=True))
+ col2 = Column('y', Integer())
+
+ self.assert_compile(
+ col[col2:col2 + 5],
+ "x[y + :y_1:y + :y_2 + :param_1]",
+ checkparams={'y_1': 1, 'y_2': 5, 'param_1': 1}
+ )
+
+ def test_override_operators(self):
+ special_index_op = operators.custom_op('->')
+
+ class MyOtherType(Indexable, TypeEngine):
+ __visit_name__ = 'myothertype'
+
+ class Comparator(TypeEngine.Comparator):
+
+ def _adapt_expression(self, op, other_comparator):
+ return special_index_op, MyOtherType()
+
+ comparator_factory = Comparator
+
+ col = Column('x', MyOtherType())
+ self.assert_compile(
+ col[5],
+ "x -> :x_1",
+ checkparams={'x_1': 5}
+ )
+
+
class BooleanEvalTest(fixtures.TestBase, testing.AssertsCompiledSQL):
"""test standalone booleans being wrapped in an AsBoolean, as well
@@ -825,6 +1197,64 @@ class ConjunctionTest(fixtures.TestBase, testing.AssertsCompiledSQL):
"SELECT false AS anon_1, false AS anon_2"
)
+ def test_is_true_literal(self):
+ c = column('x', Boolean)
+ self.assert_compile(
+ c.is_(True),
+ "x IS true"
+ )
+
+ def test_is_false_literal(self):
+ c = column('x', Boolean)
+ self.assert_compile(
+ c.is_(False),
+ "x IS false"
+ )
+
+ def test_and_false_literal_leading(self):
+ self.assert_compile(
+ and_(False, True),
+ "false"
+ )
+
+ self.assert_compile(
+ and_(False, False),
+ "false"
+ )
+
+ def test_and_true_literal_leading(self):
+ self.assert_compile(
+ and_(True, True),
+ "true"
+ )
+
+ self.assert_compile(
+ and_(True, False),
+ "false"
+ )
+
+ def test_or_false_literal_leading(self):
+ self.assert_compile(
+ or_(False, True),
+ "true"
+ )
+
+ self.assert_compile(
+ or_(False, False),
+ "false"
+ )
+
+ def test_or_true_literal_leading(self):
+ self.assert_compile(
+ or_(True, True),
+ "true"
+ )
+
+ self.assert_compile(
+ or_(True, False),
+ "true"
+ )
+
class OperatorPrecedenceTest(fixtures.TestBase, testing.AssertsCompiledSQL):
__dialect__ = 'default'
@@ -1327,6 +1757,9 @@ class MathOperatorTest(fixtures.TestBase, testing.AssertsCompiledSQL):
else:
self._test_math_op(operator.div, '/')
+ def test_math_op_mod(self):
+ self._test_math_op(operator.mod, '%')
+
class ComparisonOperatorTest(fixtures.TestBase, testing.AssertsCompiledSQL):
__dialect__ = 'default'
@@ -1953,3 +2386,154 @@ class TupleTypingTest(fixtures.TestBase):
eq_(len(expr.right.clauses), 2)
for elem in expr.right.clauses:
self._assert_types(elem)
+
+
+class AnyAllTest(fixtures.TestBase, testing.AssertsCompiledSQL):
+ __dialect__ = 'default'
+
+ def _fixture(self):
+ m = MetaData()
+
+ t = Table(
+ 'tab1', m,
+ Column('arrval', ARRAY(Integer)),
+ Column('data', Integer)
+ )
+ return t
+
+ def test_any_array(self):
+ t = self._fixture()
+
+ self.assert_compile(
+ 5 == any_(t.c.arrval),
+ ":param_1 = ANY (tab1.arrval)",
+ checkparams={"param_1": 5}
+ )
+
+ def test_all_array(self):
+ t = self._fixture()
+
+ self.assert_compile(
+ 5 == all_(t.c.arrval),
+ ":param_1 = ALL (tab1.arrval)",
+ checkparams={"param_1": 5}
+ )
+
+ def test_any_comparator_array(self):
+ t = self._fixture()
+
+ self.assert_compile(
+ 5 > any_(t.c.arrval),
+ ":param_1 > ANY (tab1.arrval)",
+ checkparams={"param_1": 5}
+ )
+
+ def test_all_comparator_array(self):
+ t = self._fixture()
+
+ self.assert_compile(
+ 5 > all_(t.c.arrval),
+ ":param_1 > ALL (tab1.arrval)",
+ checkparams={"param_1": 5}
+ )
+
+ def test_any_comparator_array_wexpr(self):
+ t = self._fixture()
+
+ self.assert_compile(
+ t.c.data > any_(t.c.arrval),
+ "tab1.data > ANY (tab1.arrval)",
+ checkparams={}
+ )
+
+ def test_all_comparator_array_wexpr(self):
+ t = self._fixture()
+
+ self.assert_compile(
+ t.c.data > all_(t.c.arrval),
+ "tab1.data > ALL (tab1.arrval)",
+ checkparams={}
+ )
+
+ def test_illegal_ops(self):
+ t = self._fixture()
+
+ assert_raises_message(
+ exc.ArgumentError,
+ "Only comparison operators may be used with ANY/ALL",
+ lambda: 5 + all_(t.c.arrval)
+ )
+
+ # TODO:
+ # this is invalid but doesn't raise an error,
+ # as the left-hand side just does its thing. Types
+ # would need to reject their right-hand side.
+ self.assert_compile(
+ t.c.data + all_(t.c.arrval),
+ "tab1.data + ALL (tab1.arrval)"
+ )
+
+ def test_any_array_comparator_accessor(self):
+ t = self._fixture()
+
+ self.assert_compile(
+ t.c.arrval.any(5, operator.gt),
+ ":param_1 > ANY (tab1.arrval)",
+ checkparams={"param_1": 5}
+ )
+
+ def test_all_array_comparator_accessor(self):
+ t = self._fixture()
+
+ self.assert_compile(
+ t.c.arrval.all(5, operator.gt),
+ ":param_1 > ALL (tab1.arrval)",
+ checkparams={"param_1": 5}
+ )
+
+ def test_any_array_expression(self):
+ t = self._fixture()
+
+ self.assert_compile(
+ 5 == any_(t.c.arrval[5:6] + postgresql.array([3, 4])),
+ "%(param_1)s = ANY (tab1.arrval[%(arrval_1)s:%(arrval_2)s] || "
+ "ARRAY[%(param_2)s, %(param_3)s])",
+ checkparams={
+ 'arrval_2': 6, 'param_1': 5, 'param_3': 4,
+ 'arrval_1': 5, 'param_2': 3},
+ dialect='postgresql'
+ )
+
+ def test_all_array_expression(self):
+ t = self._fixture()
+
+ self.assert_compile(
+ 5 == all_(t.c.arrval[5:6] + postgresql.array([3, 4])),
+ "%(param_1)s = ALL (tab1.arrval[%(arrval_1)s:%(arrval_2)s] || "
+ "ARRAY[%(param_2)s, %(param_3)s])",
+ checkparams={
+ 'arrval_2': 6, 'param_1': 5, 'param_3': 4,
+ 'arrval_1': 5, 'param_2': 3},
+ dialect='postgresql'
+ )
+
+ def test_any_subq(self):
+ t = self._fixture()
+
+ self.assert_compile(
+ 5 == any_(select([t.c.data]).where(t.c.data < 10)),
+ ":param_1 = ANY (SELECT tab1.data "
+ "FROM tab1 WHERE tab1.data < :data_1)",
+ checkparams={'data_1': 10, 'param_1': 5}
+ )
+
+ def test_all_subq(self):
+ t = self._fixture()
+
+ self.assert_compile(
+ 5 == all_(select([t.c.data]).where(t.c.data < 10)),
+ ":param_1 = ALL (SELECT tab1.data "
+ "FROM tab1 WHERE tab1.data < :data_1)",
+ checkparams={'data_1': 10, 'param_1': 5}
+ )
+
diff --git a/test/sql/test_query.py b/test/sql/test_query.py
index 2f13486eb..aca933fc9 100644
--- a/test/sql/test_query.py
+++ b/test/sql/test_query.py
@@ -1,13 +1,13 @@
-from sqlalchemy.testing import eq_, assert_raises_message, assert_raises, is_
+from sqlalchemy.testing import eq_, assert_raises_message, assert_raises, \
+ is_, in_, not_in_
from sqlalchemy import testing
from sqlalchemy.testing import fixtures, engines
-from sqlalchemy import util
from sqlalchemy import (
exc, sql, func, select, String, Integer, MetaData, and_, ForeignKey,
- union, intersect, except_, union_all, VARCHAR, INT, CHAR, text, Sequence,
- bindparam, literal, not_, type_coerce, literal_column, desc, asc,
- TypeDecorator, or_, cast, table, column)
-from sqlalchemy.engine import default, result as _result
+ union, intersect, except_, union_all, VARCHAR, INT, text,
+ bindparam, literal, not_, literal_column, desc, asc,
+ TypeDecorator, or_, cast)
+from sqlalchemy.engine import default
from sqlalchemy.testing.schema import Table, Column
# ongoing - these are old tests. those which are of general use
@@ -61,253 +61,6 @@ class QueryTest(fixtures.TestBase):
def teardown_class(cls):
metadata.drop_all()
- @testing.requires.multivalues_inserts
- def test_multivalues_insert(self):
- users.insert(
- values=[
- {'user_id': 7, 'user_name': 'jack'},
- {'user_id': 8, 'user_name': 'ed'}]).execute()
- rows = users.select().order_by(users.c.user_id).execute().fetchall()
- self.assert_(rows[0] == (7, 'jack'))
- self.assert_(rows[1] == (8, 'ed'))
- users.insert(values=[(9, 'jack'), (10, 'ed')]).execute()
- rows = users.select().order_by(users.c.user_id).execute().fetchall()
- self.assert_(rows[2] == (9, 'jack'))
- self.assert_(rows[3] == (10, 'ed'))
-
- def test_insert_heterogeneous_params(self):
- """test that executemany parameters are asserted to match the
- parameter set of the first."""
-
- assert_raises_message(
- exc.StatementError,
- r"\(sqlalchemy.exc.InvalidRequestError\) A value is required for "
- "bind parameter 'user_name', in "
- "parameter group 2 "
- r"\[SQL: u?'INSERT INTO query_users",
- users.insert().execute,
- {'user_id': 7, 'user_name': 'jack'},
- {'user_id': 8, 'user_name': 'ed'},
- {'user_id': 9}
- )
-
- # this succeeds however. We aren't yet doing
- # a length check on all subsequent parameters.
- users.insert().execute(
- {'user_id': 7},
- {'user_id': 8, 'user_name': 'ed'},
- {'user_id': 9}
- )
-
- def test_lastrow_accessor(self):
- """Tests the inserted_primary_key and lastrow_has_id() functions."""
-
- def insert_values(engine, table, values):
- """
- Inserts a row into a table, returns the full list of values
- INSERTed including defaults that fired off on the DB side and
- detects rows that had defaults and post-fetches.
- """
-
- # verify implicit_returning is working
- if engine.dialect.implicit_returning:
- ins = table.insert()
- comp = ins.compile(engine, column_keys=list(values))
- if not set(values).issuperset(
- c.key for c in table.primary_key):
- assert comp.returning
-
- result = engine.execute(table.insert(), **values)
- ret = values.copy()
-
- for col, id in zip(table.primary_key, result.inserted_primary_key):
- ret[col.key] = id
-
- if result.lastrow_has_defaults():
- criterion = and_(
- *[
- col == id for col, id in
- zip(table.primary_key, result.inserted_primary_key)])
- row = engine.execute(table.select(criterion)).first()
- for c in table.c:
- ret[c.key] = row[c]
- return ret
-
- if testing.against('firebird', 'postgresql', 'oracle', 'mssql'):
- assert testing.db.dialect.implicit_returning
-
- if testing.db.dialect.implicit_returning:
- test_engines = [
- engines.testing_engine(options={'implicit_returning': False}),
- engines.testing_engine(options={'implicit_returning': True}),
- ]
- else:
- test_engines = [testing.db]
-
- for engine in test_engines:
- metadata = MetaData()
- for supported, table, values, assertvalues in [
- (
- {'unsupported': ['sqlite']},
- Table(
- "t1", metadata,
- Column(
- 'id', Integer, primary_key=True,
- test_needs_autoincrement=True),
- Column('foo', String(30), primary_key=True)),
- {'foo': 'hi'},
- {'id': 1, 'foo': 'hi'}
- ),
- (
- {'unsupported': ['sqlite']},
- Table(
- "t2", metadata,
- Column(
- 'id', Integer, primary_key=True,
- test_needs_autoincrement=True),
- Column('foo', String(30), primary_key=True),
- Column('bar', String(30), server_default='hi')
- ),
- {'foo': 'hi'},
- {'id': 1, 'foo': 'hi', 'bar': 'hi'}
- ),
- (
- {'unsupported': []},
- Table(
- "t3", metadata,
- Column("id", String(40), primary_key=True),
- Column('foo', String(30), primary_key=True),
- Column("bar", String(30))
- ),
- {'id': 'hi', 'foo': 'thisisfoo', 'bar': "thisisbar"},
- {'id': 'hi', 'foo': 'thisisfoo', 'bar': "thisisbar"}
- ),
- (
- {'unsupported': []},
- Table(
- "t4", metadata,
- Column(
- 'id', Integer,
- Sequence('t4_id_seq', optional=True),
- primary_key=True),
- Column('foo', String(30), primary_key=True),
- Column('bar', String(30), server_default='hi')
- ),
- {'foo': 'hi', 'id': 1},
- {'id': 1, 'foo': 'hi', 'bar': 'hi'}
- ),
- (
- {'unsupported': []},
- Table(
- "t5", metadata,
- Column('id', String(10), primary_key=True),
- Column('bar', String(30), server_default='hi')
- ),
- {'id': 'id1'},
- {'id': 'id1', 'bar': 'hi'},
- ),
- (
- {'unsupported': ['sqlite']},
- Table(
- "t6", metadata,
- Column(
- 'id', Integer, primary_key=True,
- test_needs_autoincrement=True),
- Column('bar', Integer, primary_key=True)
- ),
- {'bar': 0},
- {'id': 1, 'bar': 0},
- ),
- ]:
- if testing.db.name in supported['unsupported']:
- continue
- try:
- table.create(bind=engine, checkfirst=True)
- i = insert_values(engine, table, values)
- assert i == assertvalues, "tablename: %s %r %r" % \
- (table.name, repr(i), repr(assertvalues))
- finally:
- table.drop(bind=engine)
-
- # TODO: why not in the sqlite suite?
- @testing.only_on('sqlite+pysqlite')
- @testing.provide_metadata
- def test_lastrowid_zero(self):
- from sqlalchemy.dialects import sqlite
- eng = engines.testing_engine()
-
- class ExcCtx(sqlite.base.SQLiteExecutionContext):
-
- def get_lastrowid(self):
- return 0
- eng.dialect.execution_ctx_cls = ExcCtx
- t = Table(
- 't', self.metadata, Column('x', Integer, primary_key=True),
- Column('y', Integer))
- t.create(eng)
- r = eng.execute(t.insert().values(y=5))
- eq_(r.inserted_primary_key, [0])
-
- @testing.fails_on(
- 'sqlite', "sqlite autoincremnt doesn't work with composite pks")
- def test_misordered_lastrow(self):
- related = Table(
- 'related', metadata,
- Column('id', Integer, primary_key=True),
- mysql_engine='MyISAM'
- )
- t6 = Table(
- "t6", metadata,
- Column(
- 'manual_id', Integer, ForeignKey('related.id'),
- primary_key=True),
- Column(
- 'auto_id', Integer, primary_key=True,
- test_needs_autoincrement=True),
- mysql_engine='MyISAM'
- )
-
- metadata.create_all()
- r = related.insert().values(id=12).execute()
- id = r.inserted_primary_key[0]
- assert id == 12
-
- r = t6.insert().values(manual_id=id).execute()
- eq_(r.inserted_primary_key, [12, 1])
-
- def test_implicit_id_insert_select(self):
- stmt = users.insert().from_select(
- (users.c.user_id, users.c.user_name),
- users.select().where(users.c.user_id == 20))
-
- testing.db.execute(stmt)
-
- def test_row_iteration(self):
- users.insert().execute(
- {'user_id': 7, 'user_name': 'jack'},
- {'user_id': 8, 'user_name': 'ed'},
- {'user_id': 9, 'user_name': 'fred'},
- )
- r = users.select().execute()
- l = []
- for row in r:
- l.append(row)
- self.assert_(len(l) == 3)
-
- @testing.requires.subqueries
- def test_anonymous_rows(self):
- users.insert().execute(
- {'user_id': 7, 'user_name': 'jack'},
- {'user_id': 8, 'user_name': 'ed'},
- {'user_id': 9, 'user_name': 'fred'},
- )
-
- sel = select([users.c.user_id]).where(users.c.user_name == 'jack'). \
- as_scalar()
- for row in select([sel + 1, sel + 3], bind=users.bind).execute():
- assert row['anon_1'] == 8
- assert row['anon_2'] == 10
-
@testing.fails_on(
'firebird', "kinterbasdb doesn't send full type information")
def test_order_by_label(self):
@@ -357,154 +110,6 @@ class QueryTest(fixtures.TestBase):
[("test: ed",), ("test: fred",), ("test: jack",)]
)
- def test_row_comparison(self):
- users.insert().execute(user_id=7, user_name='jack')
- rp = users.select().execute().first()
-
- self.assert_(rp == rp)
- self.assert_(not(rp != rp))
-
- equal = (7, 'jack')
-
- self.assert_(rp == equal)
- self.assert_(equal == rp)
- self.assert_(not (rp != equal))
- self.assert_(not (equal != equal))
-
- def endless():
- while True:
- yield 1
- self.assert_(rp != endless())
- self.assert_(endless() != rp)
-
- # test that everything compares the same
- # as it would against a tuple
- import operator
- for compare in [False, 8, endless(), 'xyz', (7, 'jack')]:
- for op in [
- operator.eq, operator.ne, operator.gt,
- operator.lt, operator.ge, operator.le
- ]:
-
- try:
- control = op(equal, compare)
- except TypeError:
- # Py3K raises TypeError for some invalid comparisons
- assert_raises(TypeError, op, rp, compare)
- else:
- eq_(control, op(rp, compare))
-
- try:
- control = op(compare, equal)
- except TypeError:
- # Py3K raises TypeError for some invalid comparisons
- assert_raises(TypeError, op, compare, rp)
- else:
- eq_(control, op(compare, rp))
-
- @testing.provide_metadata
- def test_column_label_overlap_fallback(self):
- content = Table(
- 'content', self.metadata,
- Column('type', String(30)),
- )
- bar = Table(
- 'bar', self.metadata,
- Column('content_type', String(30))
- )
- self.metadata.create_all(testing.db)
- testing.db.execute(content.insert().values(type="t1"))
-
- row = testing.db.execute(content.select(use_labels=True)).first()
- assert content.c.type in row
- assert bar.c.content_type not in row
- assert sql.column('content_type') in row
-
- row = testing.db.execute(
- select([content.c.type.label("content_type")])).first()
- assert content.c.type in row
-
- assert bar.c.content_type not in row
-
- assert sql.column('content_type') in row
-
- row = testing.db.execute(select([func.now().label("content_type")])). \
- first()
- assert content.c.type not in row
-
- assert bar.c.content_type not in row
-
- assert sql.column('content_type') in row
-
- def test_pickled_rows(self):
- users.insert().execute(
- {'user_id': 7, 'user_name': 'jack'},
- {'user_id': 8, 'user_name': 'ed'},
- {'user_id': 9, 'user_name': 'fred'},
- )
-
- for pickle in False, True:
- for use_labels in False, True:
- result = users.select(use_labels=use_labels).order_by(
- users.c.user_id).execute().fetchall()
-
- if pickle:
- result = util.pickle.loads(util.pickle.dumps(result))
-
- eq_(
- result,
- [(7, "jack"), (8, "ed"), (9, "fred")]
- )
- if use_labels:
- eq_(result[0]['query_users_user_id'], 7)
- eq_(
- list(result[0].keys()),
- ["query_users_user_id", "query_users_user_name"])
- else:
- eq_(result[0]['user_id'], 7)
- eq_(list(result[0].keys()), ["user_id", "user_name"])
-
- eq_(result[0][0], 7)
- eq_(result[0][users.c.user_id], 7)
- eq_(result[0][users.c.user_name], 'jack')
-
- if not pickle or use_labels:
- assert_raises(
- exc.NoSuchColumnError,
- lambda: result[0][addresses.c.user_id])
- else:
- # test with a different table. name resolution is
- # causing 'user_id' to match when use_labels wasn't used.
- eq_(result[0][addresses.c.user_id], 7)
-
- assert_raises(
- exc.NoSuchColumnError, lambda: result[0]['fake key'])
- assert_raises(
- exc.NoSuchColumnError,
- lambda: result[0][addresses.c.address_id])
-
- def test_column_error_printing(self):
- row = testing.db.execute(select([1])).first()
-
- class unprintable(object):
-
- def __str__(self):
- raise ValueError("nope")
-
- msg = r"Could not locate column in row for column '%s'"
-
- for accessor, repl in [
- ("x", "x"),
- (Column("q", Integer), "q"),
- (Column("q", Integer) + 12, r"q \+ :q_1"),
- (unprintable(), "unprintable element.*"),
- ]:
- assert_raises_message(
- exc.NoSuchColumnError,
- msg % repl,
- lambda: row[accessor]
- )
-
@testing.requires.boolean_col_expressions
def test_or_and_as_columns(self):
true, false = literal(True), literal(False)
@@ -531,16 +136,6 @@ class QueryTest(fixtures.TestBase):
assert row.x == True # noqa
assert row.y == False # noqa
- def test_fetchmany(self):
- users.insert().execute(user_id=7, user_name='jack')
- users.insert().execute(user_id=8, user_name='ed')
- users.insert().execute(user_id=9, user_name='fred')
- r = users.select().execute()
- l = []
- for row in r.fetchmany(size=2):
- l.append(row)
- self.assert_(len(l) == 2, "fetchmany(size=2) got %s rows" % len(l))
-
def test_like_ops(self):
users.insert().execute(
{'user_id': 1, 'user_name': 'apples'},
@@ -809,476 +404,6 @@ class QueryTest(fixtures.TestBase):
use_labels=labels),
[(3, 'a'), (2, 'b'), (1, None)])
- def test_column_slices(self):
- users.insert().execute(user_id=1, user_name='john')
- users.insert().execute(user_id=2, user_name='jack')
- addresses.insert().execute(
- address_id=1, user_id=2, address='foo@bar.com')
-
- r = text(
- "select * from query_addresses", bind=testing.db).execute().first()
- self.assert_(r[0:1] == (1,))
- self.assert_(r[1:] == (2, 'foo@bar.com'))
- self.assert_(r[:-1] == (1, 2))
-
- def test_column_accessor_basic_compiled(self):
- users.insert().execute(
- dict(user_id=1, user_name='john'),
- dict(user_id=2, user_name='jack')
- )
-
- r = users.select(users.c.user_id == 2).execute().first()
- self.assert_(r.user_id == r['user_id'] == r[users.c.user_id] == 2)
- self.assert_(
- r.user_name == r['user_name'] == r[users.c.user_name] == 'jack')
-
- def test_column_accessor_basic_text(self):
- users.insert().execute(
- dict(user_id=1, user_name='john'),
- dict(user_id=2, user_name='jack')
- )
- r = testing.db.execute(
- text("select * from query_users where user_id=2")).first()
- self.assert_(r.user_id == r['user_id'] == r[users.c.user_id] == 2)
- self.assert_(
- r.user_name == r['user_name'] == r[users.c.user_name] == 'jack')
-
- def test_column_accessor_textual_select(self):
- users.insert().execute(
- dict(user_id=1, user_name='john'),
- dict(user_id=2, user_name='jack')
- )
- # this will create column() objects inside
- # the select(), these need to match on name anyway
- r = testing.db.execute(
- select([
- column('user_id'), column('user_name')
- ]).select_from(table('query_users')).
- where(text('user_id=2'))
- ).first()
- self.assert_(r.user_id == r['user_id'] == r[users.c.user_id] == 2)
- self.assert_(
- r.user_name == r['user_name'] == r[users.c.user_name] == 'jack')
-
- def test_column_accessor_dotted_union(self):
- users.insert().execute(
- dict(user_id=1, user_name='john'),
- )
-
- # test a little sqlite weirdness - with the UNION,
- # cols come back as "query_users.user_id" in cursor.description
- r = testing.db.execute(
- text(
- "select query_users.user_id, query_users.user_name "
- "from query_users "
- "UNION select query_users.user_id, "
- "query_users.user_name from query_users"
- )
- ).first()
- eq_(r['user_id'], 1)
- eq_(r['user_name'], "john")
- eq_(list(r.keys()), ["user_id", "user_name"])
-
- @testing.only_on("sqlite", "sqlite specific feature")
- def test_column_accessor_sqlite_raw(self):
- users.insert().execute(
- dict(user_id=1, user_name='john'),
- )
-
- r = text(
- "select query_users.user_id, query_users.user_name "
- "from query_users "
- "UNION select query_users.user_id, "
- "query_users.user_name from query_users",
- bind=testing.db).execution_options(sqlite_raw_colnames=True). \
- execute().first()
- assert 'user_id' not in r
- assert 'user_name' not in r
- eq_(r['query_users.user_id'], 1)
- eq_(r['query_users.user_name'], "john")
- eq_(list(r.keys()), ["query_users.user_id", "query_users.user_name"])
-
- @testing.only_on("sqlite", "sqlite specific feature")
- def test_column_accessor_sqlite_translated(self):
- users.insert().execute(
- dict(user_id=1, user_name='john'),
- )
-
- r = text(
- "select query_users.user_id, query_users.user_name "
- "from query_users "
- "UNION select query_users.user_id, "
- "query_users.user_name from query_users",
- bind=testing.db).execute().first()
- eq_(r['user_id'], 1)
- eq_(r['user_name'], "john")
- eq_(r['query_users.user_id'], 1)
- eq_(r['query_users.user_name'], "john")
- eq_(list(r.keys()), ["user_id", "user_name"])
-
- def test_column_accessor_labels_w_dots(self):
- users.insert().execute(
- dict(user_id=1, user_name='john'),
- )
- # test using literal tablename.colname
- r = text(
- 'select query_users.user_id AS "query_users.user_id", '
- 'query_users.user_name AS "query_users.user_name" '
- 'from query_users', bind=testing.db).\
- execution_options(sqlite_raw_colnames=True).execute().first()
- eq_(r['query_users.user_id'], 1)
- eq_(r['query_users.user_name'], "john")
- assert "user_name" not in r
- eq_(list(r.keys()), ["query_users.user_id", "query_users.user_name"])
-
- def test_column_accessor_unary(self):
- users.insert().execute(
- dict(user_id=1, user_name='john'),
- )
-
- # unary experssions
- r = select([users.c.user_name.distinct()]).order_by(
- users.c.user_name).execute().first()
- eq_(r[users.c.user_name], 'john')
- eq_(r.user_name, 'john')
-
- def test_column_accessor_err(self):
- r = testing.db.execute(select([1])).first()
- assert_raises_message(
- AttributeError,
- "Could not locate column in row for column 'foo'",
- getattr, r, "foo"
- )
- assert_raises_message(
- KeyError,
- "Could not locate column in row for column 'foo'",
- lambda: r['foo']
- )
-
- def test_graceful_fetch_on_non_rows(self):
- """test that calling fetchone() etc. on a result that doesn't
- return rows fails gracefully.
-
- """
-
- # these proxies don't work with no cursor.description present.
- # so they don't apply to this test at the moment.
- # result.FullyBufferedResultProxy,
- # result.BufferedRowResultProxy,
- # result.BufferedColumnResultProxy
-
- conn = testing.db.connect()
- for meth in ('fetchone', 'fetchall', 'first', 'scalar', 'fetchmany'):
- trans = conn.begin()
- result = conn.execute(users.insert(), user_id=1)
- assert_raises_message(
- exc.ResourceClosedError,
- "This result object does not return rows. "
- "It has been closed automatically.",
- getattr(result, meth),
- )
- trans.rollback()
-
- @testing.requires.empty_inserts
- @testing.requires.returning
- def test_no_inserted_pk_on_returning(self):
- result = testing.db.execute(users.insert().returning(
- users.c.user_id, users.c.user_name))
- assert_raises_message(
- exc.InvalidRequestError,
- r"Can't call inserted_primary_key when returning\(\) is used.",
- getattr, result, 'inserted_primary_key'
- )
-
- def test_fetchone_til_end(self):
- result = testing.db.execute("select * from query_users")
- eq_(result.fetchone(), None)
- assert_raises_message(
- exc.ResourceClosedError,
- "This result object is closed.",
- result.fetchone
- )
-
- def test_row_case_sensitive(self):
- row = testing.db.execute(
- select([
- literal_column("1").label("case_insensitive"),
- literal_column("2").label("CaseSensitive")
- ])
- ).first()
-
- eq_(list(row.keys()), ["case_insensitive", "CaseSensitive"])
- eq_(row["case_insensitive"], 1)
- eq_(row["CaseSensitive"], 2)
-
- assert_raises(
- KeyError,
- lambda: row["Case_insensitive"]
- )
- assert_raises(
- KeyError,
- lambda: row["casesensitive"]
- )
-
- def test_row_case_insensitive(self):
- ins_db = engines.testing_engine(options={"case_sensitive": False})
- row = ins_db.execute(
- select([
- literal_column("1").label("case_insensitive"),
- literal_column("2").label("CaseSensitive")
- ])
- ).first()
-
- eq_(list(row.keys()), ["case_insensitive", "CaseSensitive"])
- eq_(row["case_insensitive"], 1)
- eq_(row["CaseSensitive"], 2)
- eq_(row["Case_insensitive"], 1)
- eq_(row["casesensitive"], 2)
-
- def test_row_as_args(self):
- users.insert().execute(user_id=1, user_name='john')
- r = users.select(users.c.user_id == 1).execute().first()
- users.delete().execute()
- users.insert().execute(r)
- eq_(users.select().execute().fetchall(), [(1, 'john')])
-
- def test_result_as_args(self):
- users.insert().execute([
- dict(user_id=1, user_name='john'),
- dict(user_id=2, user_name='ed')])
- r = users.select().execute()
- users2.insert().execute(list(r))
- eq_(
- users2.select().order_by(users2.c.user_id).execute().fetchall(),
- [(1, 'john'), (2, 'ed')]
- )
-
- users2.delete().execute()
- r = users.select().execute()
- users2.insert().execute(*list(r))
- eq_(
- users2.select().order_by(users2.c.user_id).execute().fetchall(),
- [(1, 'john'), (2, 'ed')]
- )
-
- @testing.requires.duplicate_names_in_cursor_description
- def test_ambiguous_column(self):
- users.insert().execute(user_id=1, user_name='john')
- result = users.outerjoin(addresses).select().execute()
- r = result.first()
-
- assert_raises_message(
- exc.InvalidRequestError,
- "Ambiguous column name",
- lambda: r['user_id']
- )
-
- assert_raises_message(
- exc.InvalidRequestError,
- "Ambiguous column name",
- lambda: r[users.c.user_id]
- )
-
- assert_raises_message(
- exc.InvalidRequestError,
- "Ambiguous column name",
- lambda: r[addresses.c.user_id]
- )
-
- # try to trick it - fake_table isn't in the result!
- # we get the correct error
- fake_table = Table('fake', MetaData(), Column('user_id', Integer))
- assert_raises_message(
- exc.InvalidRequestError,
- "Could not locate column in row for column 'fake.user_id'",
- lambda: r[fake_table.c.user_id]
- )
-
- r = util.pickle.loads(util.pickle.dumps(r))
- assert_raises_message(
- exc.InvalidRequestError,
- "Ambiguous column name",
- lambda: r['user_id']
- )
-
- result = users.outerjoin(addresses).select().execute()
- result = _result.BufferedColumnResultProxy(result.context)
- r = result.first()
- assert isinstance(r, _result.BufferedColumnRow)
- assert_raises_message(
- exc.InvalidRequestError,
- "Ambiguous column name",
- lambda: r['user_id']
- )
-
- @testing.requires.duplicate_names_in_cursor_description
- def test_ambiguous_column_by_col(self):
- users.insert().execute(user_id=1, user_name='john')
- ua = users.alias()
- u2 = users.alias()
- result = select([users.c.user_id, ua.c.user_id]).execute()
- row = result.first()
-
- assert_raises_message(
- exc.InvalidRequestError,
- "Ambiguous column name",
- lambda: row[users.c.user_id]
- )
-
- assert_raises_message(
- exc.InvalidRequestError,
- "Ambiguous column name",
- lambda: row[ua.c.user_id]
- )
-
- # Unfortunately, this fails -
- # we'd like
- # "Could not locate column in row"
- # to be raised here, but the check for
- # "common column" in _compare_name_for_result()
- # has other requirements to be more liberal.
- # Ultimately the
- # expression system would need a way to determine
- # if given two columns in a "proxy" relationship, if they
- # refer to a different parent table
- assert_raises_message(
- exc.InvalidRequestError,
- "Ambiguous column name",
- lambda: row[u2.c.user_id]
- )
-
- @testing.requires.duplicate_names_in_cursor_description
- def test_ambiguous_column_contains(self):
- # ticket 2702. in 0.7 we'd get True, False.
- # in 0.8, both columns are present so it's True;
- # but when they're fetched you'll get the ambiguous error.
- users.insert().execute(user_id=1, user_name='john')
- result = select([users.c.user_id, addresses.c.user_id]).\
- select_from(users.outerjoin(addresses)).execute()
- row = result.first()
-
- eq_(
- set([users.c.user_id in row, addresses.c.user_id in row]),
- set([True])
- )
-
- def test_ambiguous_column_by_col_plus_label(self):
- users.insert().execute(user_id=1, user_name='john')
- result = select(
- [users.c.user_id,
- type_coerce(users.c.user_id, Integer).label('foo')]).execute()
- row = result.first()
- eq_(
- row[users.c.user_id], 1
- )
- eq_(
- row[1], 1
- )
-
- @testing.requires.subqueries
- def test_column_label_targeting(self):
- users.insert().execute(user_id=7, user_name='ed')
-
- for s in (
- users.select().alias('foo'),
- users.select().alias(users.name),
- ):
- row = s.select(use_labels=True).execute().first()
- assert row[s.c.user_id] == 7
- assert row[s.c.user_name] == 'ed'
-
- def test_keys(self):
- users.insert().execute(user_id=1, user_name='foo')
- r = users.select().execute()
- eq_([x.lower() for x in list(r.keys())], ['user_id', 'user_name'])
- r = r.first()
- eq_([x.lower() for x in list(r.keys())], ['user_id', 'user_name'])
-
- def test_items(self):
- users.insert().execute(user_id=1, user_name='foo')
- r = users.select().execute().first()
- eq_(
- [(x[0].lower(), x[1]) for x in list(r.items())],
- [('user_id', 1), ('user_name', 'foo')])
-
- def test_len(self):
- users.insert().execute(user_id=1, user_name='foo')
- r = users.select().execute().first()
- eq_(len(r), 2)
-
- r = testing.db.execute('select user_name, user_id from query_users'). \
- first()
- eq_(len(r), 2)
- r = testing.db.execute('select user_name from query_users').first()
- eq_(len(r), 1)
-
- def test_sorting_in_python(self):
- users.insert().execute(
- dict(user_id=1, user_name='foo'),
- dict(user_id=2, user_name='bar'),
- dict(user_id=3, user_name='def'),
- )
-
- rows = users.select().order_by(users.c.user_name).execute().fetchall()
-
- eq_(rows, [(2, 'bar'), (3, 'def'), (1, 'foo')])
-
- eq_(sorted(rows), [(1, 'foo'), (2, 'bar'), (3, 'def')])
-
- def test_column_order_with_simple_query(self):
- # should return values in column definition order
- users.insert().execute(user_id=1, user_name='foo')
- r = users.select(users.c.user_id == 1).execute().first()
- eq_(r[0], 1)
- eq_(r[1], 'foo')
- eq_([x.lower() for x in list(r.keys())], ['user_id', 'user_name'])
- eq_(list(r.values()), [1, 'foo'])
-
- def test_column_order_with_text_query(self):
- # should return values in query order
- users.insert().execute(user_id=1, user_name='foo')
- r = testing.db.execute('select user_name, user_id from query_users'). \
- first()
- eq_(r[0], 'foo')
- eq_(r[1], 1)
- eq_([x.lower() for x in list(r.keys())], ['user_name', 'user_id'])
- eq_(list(r.values()), ['foo', 1])
-
- @testing.crashes('oracle', 'FIXME: unknown, varify not fails_on()')
- @testing.crashes('firebird', 'An identifier must begin with a letter')
- def test_column_accessor_shadow(self):
- meta = MetaData(testing.db)
- shadowed = Table(
- 'test_shadowed', meta,
- Column('shadow_id', INT, primary_key=True),
- Column('shadow_name', VARCHAR(20)),
- Column('parent', VARCHAR(20)),
- Column('row', VARCHAR(40)),
- Column('_parent', VARCHAR(20)),
- Column('_row', VARCHAR(20)),
- )
- shadowed.create(checkfirst=True)
- try:
- shadowed.insert().execute(
- shadow_id=1, shadow_name='The Shadow', parent='The Light',
- row='Without light there is no shadow',
- _parent='Hidden parent', _row='Hidden row')
- r = shadowed.select(shadowed.c.shadow_id == 1).execute().first()
- self.assert_(
- r.shadow_id == r['shadow_id'] == r[shadowed.c.shadow_id] == 1)
- self.assert_(
- r.shadow_name == r['shadow_name'] ==
- r[shadowed.c.shadow_name] == 'The Shadow')
- self.assert_(
- r.parent == r['parent'] == r[shadowed.c.parent] == 'The Light')
- self.assert_(
- r.row == r['row'] == r[shadowed.c.row] ==
- 'Without light there is no shadow')
- self.assert_(r['_parent'] == 'Hidden parent')
- self.assert_(r['_row'] == 'Hidden row')
- finally:
- shadowed.drop(checkfirst=True)
-
@testing.emits_warning('.*empty sequence.*')
def test_in_filtering(self):
"""test the behavior of the in_() function."""
@@ -1428,393 +553,6 @@ class RequiredBindTest(fixtures.TablesTest):
is_(bindparam('foo', callable_=c, required=False).required, False)
-class TableInsertTest(fixtures.TablesTest):
-
- """test for consistent insert behavior across dialects
- regarding the inline=True flag, lower-case 't' tables.
-
- """
- run_create_tables = 'each'
- __backend__ = True
-
- @classmethod
- def define_tables(cls, metadata):
- Table(
- 'foo', metadata,
- Column('id', Integer, Sequence('t_id_seq'), primary_key=True),
- Column('data', String(50)),
- Column('x', Integer)
- )
-
- def _fixture(self, types=True):
- if types:
- t = sql.table(
- 'foo', sql.column('id', Integer),
- sql.column('data', String),
- sql.column('x', Integer))
- else:
- t = sql.table(
- 'foo', sql.column('id'), sql.column('data'), sql.column('x'))
- return t
-
- def _test(self, stmt, row, returning=None, inserted_primary_key=False):
- r = testing.db.execute(stmt)
-
- if returning:
- returned = r.first()
- eq_(returned, returning)
- elif inserted_primary_key is not False:
- eq_(r.inserted_primary_key, inserted_primary_key)
-
- eq_(testing.db.execute(self.tables.foo.select()).first(), row)
-
- def _test_multi(self, stmt, rows, data):
- testing.db.execute(stmt, rows)
- eq_(
- testing.db.execute(
- self.tables.foo.select().
- order_by(self.tables.foo.c.id)).fetchall(),
- data)
-
- @testing.requires.sequences
- def test_expicit_sequence(self):
- t = self._fixture()
- self._test(
- t.insert().values(
- id=func.next_value(Sequence('t_id_seq')), data='data', x=5),
- (1, 'data', 5)
- )
-
- def test_uppercase(self):
- t = self.tables.foo
- self._test(
- t.insert().values(id=1, data='data', x=5),
- (1, 'data', 5),
- inserted_primary_key=[1]
- )
-
- def test_uppercase_inline(self):
- t = self.tables.foo
- self._test(
- t.insert(inline=True).values(id=1, data='data', x=5),
- (1, 'data', 5),
- inserted_primary_key=[1]
- )
-
- @testing.crashes(
- "mssql+pyodbc",
- "Pyodbc + SQL Server + Py3K, some decimal handling issue")
- def test_uppercase_inline_implicit(self):
- t = self.tables.foo
- self._test(
- t.insert(inline=True).values(data='data', x=5),
- (1, 'data', 5),
- inserted_primary_key=[None]
- )
-
- def test_uppercase_implicit(self):
- t = self.tables.foo
- self._test(
- t.insert().values(data='data', x=5),
- (1, 'data', 5),
- inserted_primary_key=[1]
- )
-
- def test_uppercase_direct_params(self):
- t = self.tables.foo
- self._test(
- t.insert().values(id=1, data='data', x=5),
- (1, 'data', 5),
- inserted_primary_key=[1]
- )
-
- @testing.requires.returning
- def test_uppercase_direct_params_returning(self):
- t = self.tables.foo
- self._test(
- t.insert().values(id=1, data='data', x=5).returning(t.c.id, t.c.x),
- (1, 'data', 5),
- returning=(1, 5)
- )
-
- @testing.fails_on(
- 'mssql', "lowercase table doesn't support identity insert disable")
- def test_direct_params(self):
- t = self._fixture()
- self._test(
- t.insert().values(id=1, data='data', x=5),
- (1, 'data', 5),
- inserted_primary_key=[]
- )
-
- @testing.fails_on(
- 'mssql', "lowercase table doesn't support identity insert disable")
- @testing.requires.returning
- def test_direct_params_returning(self):
- t = self._fixture()
- self._test(
- t.insert().values(id=1, data='data', x=5).returning(t.c.id, t.c.x),
- (1, 'data', 5),
- returning=(1, 5)
- )
-
- @testing.requires.emulated_lastrowid
- def test_implicit_pk(self):
- t = self._fixture()
- self._test(
- t.insert().values(data='data', x=5),
- (1, 'data', 5),
- inserted_primary_key=[]
- )
-
- @testing.requires.emulated_lastrowid
- def test_implicit_pk_multi_rows(self):
- t = self._fixture()
- self._test_multi(
- t.insert(),
- [
- {'data': 'd1', 'x': 5},
- {'data': 'd2', 'x': 6},
- {'data': 'd3', 'x': 7},
- ],
- [
- (1, 'd1', 5),
- (2, 'd2', 6),
- (3, 'd3', 7)
- ],
- )
-
- @testing.requires.emulated_lastrowid
- def test_implicit_pk_inline(self):
- t = self._fixture()
- self._test(
- t.insert(inline=True).values(data='data', x=5),
- (1, 'data', 5),
- inserted_primary_key=[]
- )
-
-
-class KeyTargetingTest(fixtures.TablesTest):
- run_inserts = 'once'
- run_deletes = None
- __backend__ = True
-
- @classmethod
- def define_tables(cls, metadata):
- Table(
- 'keyed1', metadata, Column("a", CHAR(2), key="b"),
- Column("c", CHAR(2), key="q")
- )
- Table('keyed2', metadata, Column("a", CHAR(2)), Column("b", CHAR(2)))
- Table('keyed3', metadata, Column("a", CHAR(2)), Column("d", CHAR(2)))
- Table('keyed4', metadata, Column("b", CHAR(2)), Column("q", CHAR(2)))
- Table('content', metadata, Column('t', String(30), key="type"))
- Table('bar', metadata, Column('ctype', String(30), key="content_type"))
-
- if testing.requires.schemas.enabled:
- Table(
- 'wschema', metadata,
- Column("a", CHAR(2), key="b"),
- Column("c", CHAR(2), key="q"),
- schema=testing.config.test_schema
- )
-
- @classmethod
- def insert_data(cls):
- cls.tables.keyed1.insert().execute(dict(b="a1", q="c1"))
- cls.tables.keyed2.insert().execute(dict(a="a2", b="b2"))
- cls.tables.keyed3.insert().execute(dict(a="a3", d="d3"))
- cls.tables.keyed4.insert().execute(dict(b="b4", q="q4"))
- cls.tables.content.insert().execute(type="t1")
-
- if testing.requires.schemas.enabled:
- cls.tables['%s.wschema' % testing.config.test_schema].insert().execute(
- dict(b="a1", q="c1"))
-
- @testing.requires.schemas
- def test_keyed_accessor_wschema(self):
- keyed1 = self.tables['%s.wschema' % testing.config.test_schema]
- row = testing.db.execute(keyed1.select()).first()
-
- eq_(row.b, "a1")
- eq_(row.q, "c1")
- eq_(row.a, "a1")
- eq_(row.c, "c1")
-
- def test_keyed_accessor_single(self):
- keyed1 = self.tables.keyed1
- row = testing.db.execute(keyed1.select()).first()
-
- eq_(row.b, "a1")
- eq_(row.q, "c1")
- eq_(row.a, "a1")
- eq_(row.c, "c1")
-
- def test_keyed_accessor_single_labeled(self):
- keyed1 = self.tables.keyed1
- row = testing.db.execute(keyed1.select().apply_labels()).first()
-
- eq_(row.keyed1_b, "a1")
- eq_(row.keyed1_q, "c1")
- eq_(row.keyed1_a, "a1")
- eq_(row.keyed1_c, "c1")
-
- @testing.requires.duplicate_names_in_cursor_description
- def test_keyed_accessor_composite_conflict_2(self):
- keyed1 = self.tables.keyed1
- keyed2 = self.tables.keyed2
-
- row = testing.db.execute(select([keyed1, keyed2])).first()
- # row.b is unambiguous
- eq_(row.b, "b2")
- # row.a is ambiguous
- assert_raises_message(
- exc.InvalidRequestError,
- "Ambig",
- getattr, row, "a"
- )
-
- def test_keyed_accessor_composite_names_precedent(self):
- keyed1 = self.tables.keyed1
- keyed4 = self.tables.keyed4
-
- row = testing.db.execute(select([keyed1, keyed4])).first()
- eq_(row.b, "b4")
- eq_(row.q, "q4")
- eq_(row.a, "a1")
- eq_(row.c, "c1")
-
- @testing.requires.duplicate_names_in_cursor_description
- def test_keyed_accessor_composite_keys_precedent(self):
- keyed1 = self.tables.keyed1
- keyed3 = self.tables.keyed3
-
- row = testing.db.execute(select([keyed1, keyed3])).first()
- eq_(row.q, "c1")
- assert_raises_message(
- exc.InvalidRequestError,
- "Ambiguous column name 'b'",
- getattr, row, "b"
- )
- assert_raises_message(
- exc.InvalidRequestError,
- "Ambiguous column name 'a'",
- getattr, row, "a"
- )
- eq_(row.d, "d3")
-
- def test_keyed_accessor_composite_labeled(self):
- keyed1 = self.tables.keyed1
- keyed2 = self.tables.keyed2
-
- row = testing.db.execute(select([keyed1, keyed2]).apply_labels()). \
- first()
- eq_(row.keyed1_b, "a1")
- eq_(row.keyed1_a, "a1")
- eq_(row.keyed1_q, "c1")
- eq_(row.keyed1_c, "c1")
- eq_(row.keyed2_a, "a2")
- eq_(row.keyed2_b, "b2")
- assert_raises(KeyError, lambda: row['keyed2_c'])
- assert_raises(KeyError, lambda: row['keyed2_q'])
-
- def test_column_label_overlap_fallback(self):
- content, bar = self.tables.content, self.tables.bar
- row = testing.db.execute(
- select([content.c.type.label("content_type")])).first()
- assert content.c.type not in row
- assert bar.c.content_type not in row
- assert sql.column('content_type') in row
-
- row = testing.db.execute(select([func.now().label("content_type")])). \
- first()
- assert content.c.type not in row
- assert bar.c.content_type not in row
- assert sql.column('content_type') in row
-
- def test_column_label_overlap_fallback_2(self):
- content, bar = self.tables.content, self.tables.bar
- row = testing.db.execute(content.select(use_labels=True)).first()
- assert content.c.type in row
- assert bar.c.content_type not in row
- assert sql.column('content_type') not in row
-
- def test_columnclause_schema_column_one(self):
- keyed2 = self.tables.keyed2
-
- # this is addressed by [ticket:2932]
- # ColumnClause._compare_name_for_result allows the
- # columns which the statement is against to be lightweight
- # cols, which results in a more liberal comparison scheme
- a, b = sql.column('a'), sql.column('b')
- stmt = select([a, b]).select_from(table("keyed2"))
- row = testing.db.execute(stmt).first()
-
- assert keyed2.c.a in row
- assert keyed2.c.b in row
- assert a in row
- assert b in row
-
- def test_columnclause_schema_column_two(self):
- keyed2 = self.tables.keyed2
-
- a, b = sql.column('a'), sql.column('b')
- stmt = select([keyed2.c.a, keyed2.c.b])
- row = testing.db.execute(stmt).first()
-
- assert keyed2.c.a in row
- assert keyed2.c.b in row
- assert a in row
- assert b in row
-
- def test_columnclause_schema_column_three(self):
- keyed2 = self.tables.keyed2
-
- # this is also addressed by [ticket:2932]
-
- a, b = sql.column('a'), sql.column('b')
- stmt = text("select a, b from keyed2").columns(a=CHAR, b=CHAR)
- row = testing.db.execute(stmt).first()
-
- assert keyed2.c.a in row
- assert keyed2.c.b in row
- assert a in row
- assert b in row
- assert stmt.c.a in row
- assert stmt.c.b in row
-
- def test_columnclause_schema_column_four(self):
- keyed2 = self.tables.keyed2
-
- # this is also addressed by [ticket:2932]
-
- a, b = sql.column('keyed2_a'), sql.column('keyed2_b')
- stmt = text("select a AS keyed2_a, b AS keyed2_b from keyed2").columns(
- a, b)
- row = testing.db.execute(stmt).first()
-
- assert keyed2.c.a in row
- assert keyed2.c.b in row
- assert a in row
- assert b in row
- assert stmt.c.keyed2_a in row
- assert stmt.c.keyed2_b in row
-
- def test_columnclause_schema_column_five(self):
- keyed2 = self.tables.keyed2
-
- # this is also addressed by [ticket:2932]
-
- stmt = text("select a AS keyed2_a, b AS keyed2_b from keyed2").columns(
- keyed2_a=CHAR, keyed2_b=CHAR)
- row = testing.db.execute(stmt).first()
-
- assert keyed2.c.a in row
- assert keyed2.c.b in row
- assert stmt.c.keyed2_a in row
- assert stmt.c.keyed2_b in row
-
-
class LimitTest(fixtures.TestBase):
__backend__ = True
diff --git a/test/sql/test_resultset.py b/test/sql/test_resultset.py
new file mode 100644
index 000000000..aaeb82fa4
--- /dev/null
+++ b/test/sql/test_resultset.py
@@ -0,0 +1,1319 @@
+from sqlalchemy.testing import eq_, assert_raises_message, assert_raises, \
+ in_, not_in_, is_, ne_
+from sqlalchemy import testing
+from sqlalchemy.testing import fixtures, engines
+from sqlalchemy import util
+from sqlalchemy import (
+ exc, sql, func, select, String, Integer, MetaData, ForeignKey,
+ VARCHAR, INT, CHAR, text, type_coerce, literal_column,
+ TypeDecorator, table, column)
+from sqlalchemy.engine import result as _result
+from sqlalchemy.testing.schema import Table, Column
+import operator
+from sqlalchemy.testing import assertions
+
+
+class ResultProxyTest(fixtures.TablesTest):
+ __backend__ = True
+
+ @classmethod
+ def define_tables(cls, metadata):
+ Table(
+ 'users', metadata,
+ Column(
+ 'user_id', INT, primary_key=True,
+ test_needs_autoincrement=True),
+ Column('user_name', VARCHAR(20)),
+ test_needs_acid=True
+ )
+ Table(
+ 'addresses', metadata,
+ Column(
+ 'address_id', Integer, primary_key=True,
+ test_needs_autoincrement=True),
+ Column('user_id', Integer, ForeignKey('users.user_id')),
+ Column('address', String(30)),
+ test_needs_acid=True
+ )
+
+ Table(
+ 'users2', metadata,
+ Column('user_id', INT, primary_key=True),
+ Column('user_name', VARCHAR(20)),
+ test_needs_acid=True
+ )
+
+ def test_row_iteration(self):
+ users = self.tables.users
+
+ users.insert().execute(
+ {'user_id': 7, 'user_name': 'jack'},
+ {'user_id': 8, 'user_name': 'ed'},
+ {'user_id': 9, 'user_name': 'fred'},
+ )
+ r = users.select().execute()
+ l = []
+ for row in r:
+ l.append(row)
+ eq_(len(l), 3)
+
+ @testing.requires.subqueries
+ def test_anonymous_rows(self):
+ users = self.tables.users
+
+ users.insert().execute(
+ {'user_id': 7, 'user_name': 'jack'},
+ {'user_id': 8, 'user_name': 'ed'},
+ {'user_id': 9, 'user_name': 'fred'},
+ )
+
+ sel = select([users.c.user_id]).where(users.c.user_name == 'jack'). \
+ as_scalar()
+ for row in select([sel + 1, sel + 3], bind=users.bind).execute():
+ eq_(row['anon_1'], 8)
+ eq_(row['anon_2'], 10)
+
+ def test_row_comparison(self):
+ users = self.tables.users
+
+ users.insert().execute(user_id=7, user_name='jack')
+ rp = users.select().execute().first()
+
+ eq_(rp, rp)
+ is_(not(rp != rp), True)
+
+ equal = (7, 'jack')
+
+ eq_(rp, equal)
+ eq_(equal, rp)
+ is_((not (rp != equal)), True)
+ is_(not (equal != equal), True)
+
+ def endless():
+ while True:
+ yield 1
+ ne_(rp, endless())
+ ne_(endless(), rp)
+
+ # test that everything compares the same
+ # as it would against a tuple
+ for compare in [False, 8, endless(), 'xyz', (7, 'jack')]:
+ for op in [
+ operator.eq, operator.ne, operator.gt,
+ operator.lt, operator.ge, operator.le
+ ]:
+
+ try:
+ control = op(equal, compare)
+ except TypeError:
+ # Py3K raises TypeError for some invalid comparisons
+ assert_raises(TypeError, op, rp, compare)
+ else:
+ eq_(control, op(rp, compare))
+
+ try:
+ control = op(compare, equal)
+ except TypeError:
+ # Py3K raises TypeError for some invalid comparisons
+ assert_raises(TypeError, op, compare, rp)
+ else:
+ eq_(control, op(compare, rp))
+
+ @testing.provide_metadata
+ def test_column_label_overlap_fallback(self):
+ content = Table(
+ 'content', self.metadata,
+ Column('type', String(30)),
+ )
+ bar = Table(
+ 'bar', self.metadata,
+ Column('content_type', String(30))
+ )
+ self.metadata.create_all(testing.db)
+ testing.db.execute(content.insert().values(type="t1"))
+
+ row = testing.db.execute(content.select(use_labels=True)).first()
+ in_(content.c.type, row)
+ not_in_(bar.c.content_type, row)
+ in_(sql.column('content_type'), row)
+
+ row = testing.db.execute(
+ select([content.c.type.label("content_type")])).first()
+ in_(content.c.type, row)
+
+ not_in_(bar.c.content_type, row)
+
+ in_(sql.column('content_type'), row)
+
+ row = testing.db.execute(select([func.now().label("content_type")])). \
+ first()
+ not_in_(content.c.type, row)
+
+ not_in_(bar.c.content_type, row)
+
+ in_(sql.column('content_type'), row)
+
+ def test_pickled_rows(self):
+ users = self.tables.users
+ addresses = self.tables.addresses
+
+ users.insert().execute(
+ {'user_id': 7, 'user_name': 'jack'},
+ {'user_id': 8, 'user_name': 'ed'},
+ {'user_id': 9, 'user_name': 'fred'},
+ )
+
+ for pickle in False, True:
+ for use_labels in False, True:
+ result = users.select(use_labels=use_labels).order_by(
+ users.c.user_id).execute().fetchall()
+
+ if pickle:
+ result = util.pickle.loads(util.pickle.dumps(result))
+
+ eq_(
+ result,
+ [(7, "jack"), (8, "ed"), (9, "fred")]
+ )
+ if use_labels:
+ eq_(result[0]['users_user_id'], 7)
+ eq_(
+ list(result[0].keys()),
+ ["users_user_id", "users_user_name"])
+ else:
+ eq_(result[0]['user_id'], 7)
+ eq_(list(result[0].keys()), ["user_id", "user_name"])
+
+ eq_(result[0][0], 7)
+ eq_(result[0][users.c.user_id], 7)
+ eq_(result[0][users.c.user_name], 'jack')
+
+ if not pickle or use_labels:
+ assert_raises(
+ exc.NoSuchColumnError,
+ lambda: result[0][addresses.c.user_id])
+ else:
+ # test with a different table. name resolution is
+ # causing 'user_id' to match when use_labels wasn't used.
+ eq_(result[0][addresses.c.user_id], 7)
+
+ assert_raises(
+ exc.NoSuchColumnError, lambda: result[0]['fake key'])
+ assert_raises(
+ exc.NoSuchColumnError,
+ lambda: result[0][addresses.c.address_id])
+
+ def test_column_error_printing(self):
+ result = testing.db.execute(select([1]))
+ row = result.first()
+
+ class unprintable(object):
+
+ def __str__(self):
+ raise ValueError("nope")
+
+ msg = r"Could not locate column in row for column '%s'"
+
+ for accessor, repl in [
+ ("x", "x"),
+ (Column("q", Integer), "q"),
+ (Column("q", Integer) + 12, r"q \+ :q_1"),
+ (unprintable(), "unprintable element.*"),
+ ]:
+ assert_raises_message(
+ exc.NoSuchColumnError,
+ msg % repl,
+ result._getter, accessor
+ )
+
+ is_(result._getter(accessor, False), None)
+
+ assert_raises_message(
+ exc.NoSuchColumnError,
+ msg % repl,
+ lambda: row[accessor]
+ )
+
+ def test_fetchmany(self):
+ users = self.tables.users
+
+ users.insert().execute(user_id=7, user_name='jack')
+ users.insert().execute(user_id=8, user_name='ed')
+ users.insert().execute(user_id=9, user_name='fred')
+ r = users.select().execute()
+ l = []
+ for row in r.fetchmany(size=2):
+ l.append(row)
+ eq_(len(l), 2)
+
+ def test_column_slices(self):
+ users = self.tables.users
+ addresses = self.tables.addresses
+
+ users.insert().execute(user_id=1, user_name='john')
+ users.insert().execute(user_id=2, user_name='jack')
+ addresses.insert().execute(
+ address_id=1, user_id=2, address='foo@bar.com')
+
+ r = text(
+ "select * from addresses", bind=testing.db).execute().first()
+ eq_(r[0:1], (1,))
+ eq_(r[1:], (2, 'foo@bar.com'))
+ eq_(r[:-1], (1, 2))
+
+ def test_column_accessor_basic_compiled(self):
+ users = self.tables.users
+
+ users.insert().execute(
+ dict(user_id=1, user_name='john'),
+ dict(user_id=2, user_name='jack')
+ )
+
+ r = users.select(users.c.user_id == 2).execute().first()
+ eq_(r.user_id, 2)
+ eq_(r['user_id'], 2)
+ eq_(r[users.c.user_id], 2)
+
+ eq_(r.user_name, 'jack')
+ eq_(r['user_name'], 'jack')
+ eq_(r[users.c.user_name], 'jack')
+
+ def test_column_accessor_basic_text(self):
+ users = self.tables.users
+
+ users.insert().execute(
+ dict(user_id=1, user_name='john'),
+ dict(user_id=2, user_name='jack')
+ )
+ r = testing.db.execute(
+ text("select * from users where user_id=2")).first()
+
+ eq_(r.user_id, 2)
+ eq_(r['user_id'], 2)
+ eq_(r[users.c.user_id], 2)
+
+ eq_(r.user_name, 'jack')
+ eq_(r['user_name'], 'jack')
+ eq_(r[users.c.user_name], 'jack')
+
+ def test_column_accessor_textual_select(self):
+ users = self.tables.users
+
+ users.insert().execute(
+ dict(user_id=1, user_name='john'),
+ dict(user_id=2, user_name='jack')
+ )
+ # this will create column() objects inside
+ # the select(), these need to match on name anyway
+ r = testing.db.execute(
+ select([
+ column('user_id'), column('user_name')
+ ]).select_from(table('users')).
+ where(text('user_id=2'))
+ ).first()
+
+ eq_(r.user_id, 2)
+ eq_(r['user_id'], 2)
+ eq_(r[users.c.user_id], 2)
+
+ eq_(r.user_name, 'jack')
+ eq_(r['user_name'], 'jack')
+ eq_(r[users.c.user_name], 'jack')
+
+ def test_column_accessor_dotted_union(self):
+ users = self.tables.users
+
+ users.insert().execute(
+ dict(user_id=1, user_name='john'),
+ )
+
+ # test a little sqlite < 3.10.0 weirdness - with the UNION,
+ # cols come back as "users.user_id" in cursor.description
+ r = testing.db.execute(
+ text(
+ "select users.user_id, users.user_name "
+ "from users "
+ "UNION select users.user_id, "
+ "users.user_name from users"
+ )
+ ).first()
+ eq_(r['user_id'], 1)
+ eq_(r['user_name'], "john")
+ eq_(list(r.keys()), ["user_id", "user_name"])
+
+ def test_column_accessor_sqlite_raw(self):
+ users = self.tables.users
+
+ users.insert().execute(
+ dict(user_id=1, user_name='john'),
+ )
+
+ r = text(
+ "select users.user_id, users.user_name "
+ "from users "
+ "UNION select users.user_id, "
+ "users.user_name from users",
+ bind=testing.db).execution_options(sqlite_raw_colnames=True). \
+ execute().first()
+
+ if testing.against("sqlite < 3.10.0"):
+ not_in_('user_id', r)
+ not_in_('user_name', r)
+ eq_(r['users.user_id'], 1)
+ eq_(r['users.user_name'], "john")
+
+ eq_(list(r.keys()), ["users.user_id", "users.user_name"])
+ else:
+ not_in_('users.user_id', r)
+ not_in_('users.user_name', r)
+ eq_(r['user_id'], 1)
+ eq_(r['user_name'], "john")
+
+ eq_(list(r.keys()), ["user_id", "user_name"])
+
+ def test_column_accessor_sqlite_translated(self):
+ users = self.tables.users
+
+ users.insert().execute(
+ dict(user_id=1, user_name='john'),
+ )
+
+ r = text(
+ "select users.user_id, users.user_name "
+ "from users "
+ "UNION select users.user_id, "
+ "users.user_name from users",
+ bind=testing.db).execute().first()
+ eq_(r['user_id'], 1)
+ eq_(r['user_name'], "john")
+
+ if testing.against("sqlite < 3.10.0"):
+ eq_(r['users.user_id'], 1)
+ eq_(r['users.user_name'], "john")
+ else:
+ not_in_('users.user_id', r)
+ not_in_('users.user_name', r)
+
+ eq_(list(r.keys()), ["user_id", "user_name"])
+
+ def test_column_accessor_labels_w_dots(self):
+ users = self.tables.users
+
+ users.insert().execute(
+ dict(user_id=1, user_name='john'),
+ )
+ # test using literal tablename.colname
+ r = text(
+ 'select users.user_id AS "users.user_id", '
+ 'users.user_name AS "users.user_name" '
+ 'from users', bind=testing.db).\
+ execution_options(sqlite_raw_colnames=True).execute().first()
+ eq_(r['users.user_id'], 1)
+ eq_(r['users.user_name'], "john")
+ not_in_("user_name", r)
+ eq_(list(r.keys()), ["users.user_id", "users.user_name"])
+
+ def test_column_accessor_unary(self):
+ users = self.tables.users
+
+ users.insert().execute(
+ dict(user_id=1, user_name='john'),
+ )
+
+ # unary experssions
+ r = select([users.c.user_name.distinct()]).order_by(
+ users.c.user_name).execute().first()
+ eq_(r[users.c.user_name], 'john')
+ eq_(r.user_name, 'john')
+
+ def test_column_accessor_err(self):
+ r = testing.db.execute(select([1])).first()
+ assert_raises_message(
+ AttributeError,
+ "Could not locate column in row for column 'foo'",
+ getattr, r, "foo"
+ )
+ assert_raises_message(
+ KeyError,
+ "Could not locate column in row for column 'foo'",
+ lambda: r['foo']
+ )
+
+ def test_graceful_fetch_on_non_rows(self):
+ """test that calling fetchone() etc. on a result that doesn't
+ return rows fails gracefully.
+
+ """
+
+ # these proxies don't work with no cursor.description present.
+ # so they don't apply to this test at the moment.
+ # result.FullyBufferedResultProxy,
+ # result.BufferedRowResultProxy,
+ # result.BufferedColumnResultProxy
+
+ users = self.tables.users
+
+ conn = testing.db.connect()
+ for meth in [
+ lambda r: r.fetchone(),
+ lambda r: r.fetchall(),
+ lambda r: r.first(),
+ lambda r: r.scalar(),
+ lambda r: r.fetchmany(),
+ lambda r: r._getter('user'),
+ lambda r: r._has_key('user'),
+ ]:
+ trans = conn.begin()
+ result = conn.execute(users.insert(), user_id=1)
+ assert_raises_message(
+ exc.ResourceClosedError,
+ "This result object does not return rows. "
+ "It has been closed automatically.",
+ meth, result,
+ )
+ trans.rollback()
+
+ def test_fetchone_til_end(self):
+ result = testing.db.execute("select * from users")
+ eq_(result.fetchone(), None)
+ eq_(result.fetchone(), None)
+ eq_(result.fetchone(), None)
+ result.close()
+ assert_raises_message(
+ exc.ResourceClosedError,
+ "This result object is closed.",
+ result.fetchone
+ )
+
+ def test_row_case_sensitive(self):
+ row = testing.db.execute(
+ select([
+ literal_column("1").label("case_insensitive"),
+ literal_column("2").label("CaseSensitive")
+ ])
+ ).first()
+
+ eq_(list(row.keys()), ["case_insensitive", "CaseSensitive"])
+
+ in_("case_insensitive", row._keymap)
+ in_("CaseSensitive", row._keymap)
+ not_in_("casesensitive", row._keymap)
+
+ eq_(row["case_insensitive"], 1)
+ eq_(row["CaseSensitive"], 2)
+
+ assert_raises(
+ KeyError,
+ lambda: row["Case_insensitive"]
+ )
+ assert_raises(
+ KeyError,
+ lambda: row["casesensitive"]
+ )
+
+ def test_row_case_sensitive_unoptimized(self):
+ ins_db = engines.testing_engine(options={"case_sensitive": True})
+ row = ins_db.execute(
+ select([
+ literal_column("1").label("case_insensitive"),
+ literal_column("2").label("CaseSensitive"),
+ text("3 AS screw_up_the_cols")
+ ])
+ ).first()
+
+ eq_(
+ list(row.keys()),
+ ["case_insensitive", "CaseSensitive", "screw_up_the_cols"])
+
+ in_("case_insensitive", row._keymap)
+ in_("CaseSensitive", row._keymap)
+ not_in_("casesensitive", row._keymap)
+
+ eq_(row["case_insensitive"], 1)
+ eq_(row["CaseSensitive"], 2)
+ eq_(row["screw_up_the_cols"], 3)
+
+ assert_raises(KeyError, lambda: row["Case_insensitive"])
+ assert_raises(KeyError, lambda: row["casesensitive"])
+ assert_raises(KeyError, lambda: row["screw_UP_the_cols"])
+
+ def test_row_case_insensitive(self):
+ ins_db = engines.testing_engine(options={"case_sensitive": False})
+ row = ins_db.execute(
+ select([
+ literal_column("1").label("case_insensitive"),
+ literal_column("2").label("CaseSensitive")
+ ])
+ ).first()
+
+ eq_(list(row.keys()), ["case_insensitive", "CaseSensitive"])
+
+ in_("case_insensitive", row._keymap)
+ in_("CaseSensitive", row._keymap)
+ in_("casesensitive", row._keymap)
+
+ eq_(row["case_insensitive"], 1)
+ eq_(row["CaseSensitive"], 2)
+ eq_(row["Case_insensitive"], 1)
+ eq_(row["casesensitive"], 2)
+
+ def test_row_case_insensitive_unoptimized(self):
+ ins_db = engines.testing_engine(options={"case_sensitive": False})
+ row = ins_db.execute(
+ select([
+ literal_column("1").label("case_insensitive"),
+ literal_column("2").label("CaseSensitive"),
+ text("3 AS screw_up_the_cols")
+ ])
+ ).first()
+
+ eq_(
+ list(row.keys()),
+ ["case_insensitive", "CaseSensitive", "screw_up_the_cols"])
+
+ in_("case_insensitive", row._keymap)
+ in_("CaseSensitive", row._keymap)
+ in_("casesensitive", row._keymap)
+
+ eq_(row["case_insensitive"], 1)
+ eq_(row["CaseSensitive"], 2)
+ eq_(row["screw_up_the_cols"], 3)
+ eq_(row["Case_insensitive"], 1)
+ eq_(row["casesensitive"], 2)
+ eq_(row["screw_UP_the_cols"], 3)
+
+ def test_row_as_args(self):
+ users = self.tables.users
+
+ users.insert().execute(user_id=1, user_name='john')
+ r = users.select(users.c.user_id == 1).execute().first()
+ users.delete().execute()
+ users.insert().execute(r)
+ eq_(users.select().execute().fetchall(), [(1, 'john')])
+
+ def test_result_as_args(self):
+ users = self.tables.users
+ users2 = self.tables.users2
+
+ users.insert().execute([
+ dict(user_id=1, user_name='john'),
+ dict(user_id=2, user_name='ed')])
+ r = users.select().execute()
+ users2.insert().execute(list(r))
+ eq_(
+ users2.select().order_by(users2.c.user_id).execute().fetchall(),
+ [(1, 'john'), (2, 'ed')]
+ )
+
+ users2.delete().execute()
+ r = users.select().execute()
+ users2.insert().execute(*list(r))
+ eq_(
+ users2.select().order_by(users2.c.user_id).execute().fetchall(),
+ [(1, 'john'), (2, 'ed')]
+ )
+
+ @testing.requires.duplicate_names_in_cursor_description
+ def test_ambiguous_column(self):
+ users = self.tables.users
+ addresses = self.tables.addresses
+
+ users.insert().execute(user_id=1, user_name='john')
+ result = users.outerjoin(addresses).select().execute()
+ r = result.first()
+
+ assert_raises_message(
+ exc.InvalidRequestError,
+ "Ambiguous column name",
+ lambda: r['user_id']
+ )
+
+ # pure positional targeting; users.c.user_id
+ # and addresses.c.user_id are known!
+ # works as of 1.1 issue #3501
+ eq_(r[users.c.user_id], 1)
+ eq_(r[addresses.c.user_id], None)
+
+ # try to trick it - fake_table isn't in the result!
+ # we get the correct error
+ fake_table = Table('fake', MetaData(), Column('user_id', Integer))
+ assert_raises_message(
+ exc.InvalidRequestError,
+ "Could not locate column in row for column 'fake.user_id'",
+ lambda: r[fake_table.c.user_id]
+ )
+
+ r = util.pickle.loads(util.pickle.dumps(r))
+ assert_raises_message(
+ exc.InvalidRequestError,
+ "Ambiguous column name",
+ lambda: r['user_id']
+ )
+
+ result = users.outerjoin(addresses).select().execute()
+ result = _result.BufferedColumnResultProxy(result.context)
+ r = result.first()
+ assert isinstance(r, _result.BufferedColumnRow)
+ assert_raises_message(
+ exc.InvalidRequestError,
+ "Ambiguous column name",
+ lambda: r['user_id']
+ )
+
+ @testing.requires.duplicate_names_in_cursor_description
+ def test_ambiguous_column_by_col(self):
+ users = self.tables.users
+
+ users.insert().execute(user_id=1, user_name='john')
+ ua = users.alias()
+ u2 = users.alias()
+ result = select([users.c.user_id, ua.c.user_id]).execute()
+ row = result.first()
+
+ # as of 1.1 issue #3501, we use pure positional
+ # targeting for the column objects here
+ eq_(row[users.c.user_id], 1)
+
+ eq_(row[ua.c.user_id], 1)
+
+ # this now works as of 1.1 issue #3501;
+ # previously this was stuck on "ambiguous column name"
+ assert_raises_message(
+ exc.InvalidRequestError,
+ "Could not locate column in row",
+ lambda: row[u2.c.user_id]
+ )
+
+ @testing.requires.duplicate_names_in_cursor_description
+ def test_ambiguous_column_contains(self):
+ users = self.tables.users
+ addresses = self.tables.addresses
+
+ # ticket 2702. in 0.7 we'd get True, False.
+ # in 0.8, both columns are present so it's True;
+ # but when they're fetched you'll get the ambiguous error.
+ users.insert().execute(user_id=1, user_name='john')
+ result = select([users.c.user_id, addresses.c.user_id]).\
+ select_from(users.outerjoin(addresses)).execute()
+ row = result.first()
+
+ eq_(
+ set([users.c.user_id in row, addresses.c.user_id in row]),
+ set([True])
+ )
+
+ def test_ambiguous_column_by_col_plus_label(self):
+ users = self.tables.users
+
+ users.insert().execute(user_id=1, user_name='john')
+ result = select(
+ [users.c.user_id,
+ type_coerce(users.c.user_id, Integer).label('foo')]).execute()
+ row = result.first()
+ eq_(
+ row[users.c.user_id], 1
+ )
+ eq_(
+ row[1], 1
+ )
+
+ def test_fetch_partial_result_map(self):
+ users = self.tables.users
+
+ users.insert().execute(user_id=7, user_name='ed')
+
+ t = text("select * from users").columns(
+ user_name=String()
+ )
+ eq_(
+ testing.db.execute(t).fetchall(), [(7, 'ed')]
+ )
+
+ def test_fetch_unordered_result_map(self):
+ users = self.tables.users
+
+ users.insert().execute(user_id=7, user_name='ed')
+
+ class Goofy1(TypeDecorator):
+ impl = String
+
+ def process_result_value(self, value, dialect):
+ return value + "a"
+
+ class Goofy2(TypeDecorator):
+ impl = String
+
+ def process_result_value(self, value, dialect):
+ return value + "b"
+
+ class Goofy3(TypeDecorator):
+ impl = String
+
+ def process_result_value(self, value, dialect):
+ return value + "c"
+
+ t = text(
+ "select user_name as a, user_name as b, "
+ "user_name as c from users").columns(
+ a=Goofy1(), b=Goofy2(), c=Goofy3()
+ )
+ eq_(
+ testing.db.execute(t).fetchall(), [
+ ('eda', 'edb', 'edc')
+ ]
+ )
+
+ @testing.requires.subqueries
+ def test_column_label_targeting(self):
+ users = self.tables.users
+
+ users.insert().execute(user_id=7, user_name='ed')
+
+ for s in (
+ users.select().alias('foo'),
+ users.select().alias(users.name),
+ ):
+ row = s.select(use_labels=True).execute().first()
+ eq_(row[s.c.user_id], 7)
+ eq_(row[s.c.user_name], 'ed')
+
+ def test_keys(self):
+ users = self.tables.users
+
+ users.insert().execute(user_id=1, user_name='foo')
+ result = users.select().execute()
+ eq_(
+ result.keys(),
+ ['user_id', 'user_name']
+ )
+ row = result.first()
+ eq_(
+ row.keys(),
+ ['user_id', 'user_name']
+ )
+
+ def test_keys_anon_labels(self):
+ """test [ticket:3483]"""
+
+ users = self.tables.users
+
+ users.insert().execute(user_id=1, user_name='foo')
+ result = testing.db.execute(
+ select([
+ users.c.user_id,
+ users.c.user_name.label(None),
+ func.count(literal_column('1'))]).
+ group_by(users.c.user_id, users.c.user_name)
+ )
+
+ eq_(
+ result.keys(),
+ ['user_id', 'user_name_1', 'count_1']
+ )
+ row = result.first()
+ eq_(
+ row.keys(),
+ ['user_id', 'user_name_1', 'count_1']
+ )
+
+ def test_items(self):
+ users = self.tables.users
+
+ users.insert().execute(user_id=1, user_name='foo')
+ r = users.select().execute().first()
+ eq_(
+ [(x[0].lower(), x[1]) for x in list(r.items())],
+ [('user_id', 1), ('user_name', 'foo')])
+
+ def test_len(self):
+ users = self.tables.users
+
+ users.insert().execute(user_id=1, user_name='foo')
+ r = users.select().execute().first()
+ eq_(len(r), 2)
+
+ r = testing.db.execute('select user_name, user_id from users'). \
+ first()
+ eq_(len(r), 2)
+ r = testing.db.execute('select user_name from users').first()
+ eq_(len(r), 1)
+
+ def test_sorting_in_python(self):
+ users = self.tables.users
+
+ users.insert().execute(
+ dict(user_id=1, user_name='foo'),
+ dict(user_id=2, user_name='bar'),
+ dict(user_id=3, user_name='def'),
+ )
+
+ rows = users.select().order_by(users.c.user_name).execute().fetchall()
+
+ eq_(rows, [(2, 'bar'), (3, 'def'), (1, 'foo')])
+
+ eq_(sorted(rows), [(1, 'foo'), (2, 'bar'), (3, 'def')])
+
+ def test_column_order_with_simple_query(self):
+ # should return values in column definition order
+ users = self.tables.users
+
+ users.insert().execute(user_id=1, user_name='foo')
+ r = users.select(users.c.user_id == 1).execute().first()
+ eq_(r[0], 1)
+ eq_(r[1], 'foo')
+ eq_([x.lower() for x in list(r.keys())], ['user_id', 'user_name'])
+ eq_(list(r.values()), [1, 'foo'])
+
+ def test_column_order_with_text_query(self):
+ # should return values in query order
+ users = self.tables.users
+
+ users.insert().execute(user_id=1, user_name='foo')
+ r = testing.db.execute('select user_name, user_id from users'). \
+ first()
+ eq_(r[0], 'foo')
+ eq_(r[1], 1)
+ eq_([x.lower() for x in list(r.keys())], ['user_name', 'user_id'])
+ eq_(list(r.values()), ['foo', 1])
+
+ @testing.crashes('oracle', 'FIXME: unknown, varify not fails_on()')
+ @testing.crashes('firebird', 'An identifier must begin with a letter')
+ @testing.provide_metadata
+ def test_column_accessor_shadow(self):
+ shadowed = Table(
+ 'test_shadowed', self.metadata,
+ Column('shadow_id', INT, primary_key=True),
+ Column('shadow_name', VARCHAR(20)),
+ Column('parent', VARCHAR(20)),
+ Column('row', VARCHAR(40)),
+ Column('_parent', VARCHAR(20)),
+ Column('_row', VARCHAR(20)),
+ )
+ self.metadata.create_all()
+ shadowed.insert().execute(
+ shadow_id=1, shadow_name='The Shadow', parent='The Light',
+ row='Without light there is no shadow',
+ _parent='Hidden parent', _row='Hidden row')
+ r = shadowed.select(shadowed.c.shadow_id == 1).execute().first()
+
+ eq_(r.shadow_id, 1)
+ eq_(r['shadow_id'], 1)
+ eq_(r[shadowed.c.shadow_id], 1)
+
+ eq_(r.shadow_name, 'The Shadow')
+ eq_(r['shadow_name'], 'The Shadow')
+ eq_(r[shadowed.c.shadow_name], 'The Shadow')
+
+ eq_(r.parent, 'The Light')
+ eq_(r['parent'], 'The Light')
+ eq_(r[shadowed.c.parent], 'The Light')
+
+ eq_(r.row, 'Without light there is no shadow')
+ eq_(r['row'], 'Without light there is no shadow')
+ eq_(r[shadowed.c.row], 'Without light there is no shadow')
+
+ eq_(r['_parent'], 'Hidden parent')
+ eq_(r['_row'], 'Hidden row')
+
+
+class KeyTargetingTest(fixtures.TablesTest):
+ run_inserts = 'once'
+ run_deletes = None
+ __backend__ = True
+
+ @classmethod
+ def define_tables(cls, metadata):
+ Table(
+ 'keyed1', metadata, Column("a", CHAR(2), key="b"),
+ Column("c", CHAR(2), key="q")
+ )
+ Table('keyed2', metadata, Column("a", CHAR(2)), Column("b", CHAR(2)))
+ Table('keyed3', metadata, Column("a", CHAR(2)), Column("d", CHAR(2)))
+ Table('keyed4', metadata, Column("b", CHAR(2)), Column("q", CHAR(2)))
+ Table('content', metadata, Column('t', String(30), key="type"))
+ Table('bar', metadata, Column('ctype', String(30), key="content_type"))
+
+ if testing.requires.schemas.enabled:
+ Table(
+ 'wschema', metadata,
+ Column("a", CHAR(2), key="b"),
+ Column("c", CHAR(2), key="q"),
+ schema=testing.config.test_schema
+ )
+
+ @classmethod
+ def insert_data(cls):
+ cls.tables.keyed1.insert().execute(dict(b="a1", q="c1"))
+ cls.tables.keyed2.insert().execute(dict(a="a2", b="b2"))
+ cls.tables.keyed3.insert().execute(dict(a="a3", d="d3"))
+ cls.tables.keyed4.insert().execute(dict(b="b4", q="q4"))
+ cls.tables.content.insert().execute(type="t1")
+
+ if testing.requires.schemas.enabled:
+ cls.tables[
+ '%s.wschema' % testing.config.test_schema].insert().execute(
+ dict(b="a1", q="c1"))
+
+ @testing.requires.schemas
+ def test_keyed_accessor_wschema(self):
+ keyed1 = self.tables['%s.wschema' % testing.config.test_schema]
+ row = testing.db.execute(keyed1.select()).first()
+
+ eq_(row.b, "a1")
+ eq_(row.q, "c1")
+ eq_(row.a, "a1")
+ eq_(row.c, "c1")
+
+ def test_keyed_accessor_single(self):
+ keyed1 = self.tables.keyed1
+ row = testing.db.execute(keyed1.select()).first()
+
+ eq_(row.b, "a1")
+ eq_(row.q, "c1")
+ eq_(row.a, "a1")
+ eq_(row.c, "c1")
+
+ def test_keyed_accessor_single_labeled(self):
+ keyed1 = self.tables.keyed1
+ row = testing.db.execute(keyed1.select().apply_labels()).first()
+
+ eq_(row.keyed1_b, "a1")
+ eq_(row.keyed1_q, "c1")
+ eq_(row.keyed1_a, "a1")
+ eq_(row.keyed1_c, "c1")
+
+ @testing.requires.duplicate_names_in_cursor_description
+ def test_keyed_accessor_composite_conflict_2(self):
+ keyed1 = self.tables.keyed1
+ keyed2 = self.tables.keyed2
+
+ row = testing.db.execute(select([keyed1, keyed2])).first()
+ # row.b is unambiguous
+ eq_(row.b, "b2")
+ # row.a is ambiguous
+ assert_raises_message(
+ exc.InvalidRequestError,
+ "Ambig",
+ getattr, row, "a"
+ )
+
+ def test_keyed_accessor_composite_names_precedent(self):
+ keyed1 = self.tables.keyed1
+ keyed4 = self.tables.keyed4
+
+ row = testing.db.execute(select([keyed1, keyed4])).first()
+ eq_(row.b, "b4")
+ eq_(row.q, "q4")
+ eq_(row.a, "a1")
+ eq_(row.c, "c1")
+
+ @testing.requires.duplicate_names_in_cursor_description
+ def test_keyed_accessor_composite_keys_precedent(self):
+ keyed1 = self.tables.keyed1
+ keyed3 = self.tables.keyed3
+
+ row = testing.db.execute(select([keyed1, keyed3])).first()
+ eq_(row.q, "c1")
+ assert_raises_message(
+ exc.InvalidRequestError,
+ "Ambiguous column name 'a'",
+ getattr, row, "b"
+ )
+ assert_raises_message(
+ exc.InvalidRequestError,
+ "Ambiguous column name 'a'",
+ getattr, row, "a"
+ )
+ eq_(row.d, "d3")
+
+ def test_keyed_accessor_composite_labeled(self):
+ keyed1 = self.tables.keyed1
+ keyed2 = self.tables.keyed2
+
+ row = testing.db.execute(select([keyed1, keyed2]).apply_labels()). \
+ first()
+ eq_(row.keyed1_b, "a1")
+ eq_(row.keyed1_a, "a1")
+ eq_(row.keyed1_q, "c1")
+ eq_(row.keyed1_c, "c1")
+ eq_(row.keyed2_a, "a2")
+ eq_(row.keyed2_b, "b2")
+ assert_raises(KeyError, lambda: row['keyed2_c'])
+ assert_raises(KeyError, lambda: row['keyed2_q'])
+
+ def test_column_label_overlap_fallback(self):
+ content, bar = self.tables.content, self.tables.bar
+ row = testing.db.execute(
+ select([content.c.type.label("content_type")])).first()
+
+ not_in_(content.c.type, row)
+ not_in_(bar.c.content_type, row)
+
+ in_(sql.column('content_type'), row)
+
+ row = testing.db.execute(select([func.now().label("content_type")])). \
+ first()
+ not_in_(content.c.type, row)
+ not_in_(bar.c.content_type, row)
+ in_(sql.column('content_type'), row)
+
+ def test_column_label_overlap_fallback_2(self):
+ content, bar = self.tables.content, self.tables.bar
+ row = testing.db.execute(content.select(use_labels=True)).first()
+ in_(content.c.type, row)
+ not_in_(bar.c.content_type, row)
+ not_in_(sql.column('content_type'), row)
+
+ def test_columnclause_schema_column_one(self):
+ keyed2 = self.tables.keyed2
+
+ # this is addressed by [ticket:2932]
+ # ColumnClause._compare_name_for_result allows the
+ # columns which the statement is against to be lightweight
+ # cols, which results in a more liberal comparison scheme
+ a, b = sql.column('a'), sql.column('b')
+ stmt = select([a, b]).select_from(table("keyed2"))
+ row = testing.db.execute(stmt).first()
+
+ in_(keyed2.c.a, row)
+ in_(keyed2.c.b, row)
+ in_(a, row)
+ in_(b, row)
+
+ def test_columnclause_schema_column_two(self):
+ keyed2 = self.tables.keyed2
+
+ a, b = sql.column('a'), sql.column('b')
+ stmt = select([keyed2.c.a, keyed2.c.b])
+ row = testing.db.execute(stmt).first()
+
+ in_(keyed2.c.a, row)
+ in_(keyed2.c.b, row)
+ in_(a, row)
+ in_(b, row)
+
+ def test_columnclause_schema_column_three(self):
+ keyed2 = self.tables.keyed2
+
+ # this is also addressed by [ticket:2932]
+
+ a, b = sql.column('a'), sql.column('b')
+ stmt = text("select a, b from keyed2").columns(a=CHAR, b=CHAR)
+ row = testing.db.execute(stmt).first()
+
+ in_(keyed2.c.a, row)
+ in_(keyed2.c.b, row)
+ in_(a, row)
+ in_(b, row)
+ in_(stmt.c.a, row)
+ in_(stmt.c.b, row)
+
+ def test_columnclause_schema_column_four(self):
+ keyed2 = self.tables.keyed2
+
+ # this is also addressed by [ticket:2932]
+
+ a, b = sql.column('keyed2_a'), sql.column('keyed2_b')
+ stmt = text("select a AS keyed2_a, b AS keyed2_b from keyed2").columns(
+ a, b)
+ row = testing.db.execute(stmt).first()
+
+ in_(keyed2.c.a, row)
+ in_(keyed2.c.b, row)
+ in_(a, row)
+ in_(b, row)
+ in_(stmt.c.keyed2_a, row)
+ in_(stmt.c.keyed2_b, row)
+
+ def test_columnclause_schema_column_five(self):
+ keyed2 = self.tables.keyed2
+
+ # this is also addressed by [ticket:2932]
+
+ stmt = text("select a AS keyed2_a, b AS keyed2_b from keyed2").columns(
+ keyed2_a=CHAR, keyed2_b=CHAR)
+ row = testing.db.execute(stmt).first()
+
+ in_(keyed2.c.a, row)
+ in_(keyed2.c.b, row)
+ in_(stmt.c.keyed2_a, row)
+ in_(stmt.c.keyed2_b, row)
+
+
+class PositionalTextTest(fixtures.TablesTest):
+ run_inserts = 'once'
+ run_deletes = None
+ __backend__ = True
+
+ @classmethod
+ def define_tables(cls, metadata):
+ Table(
+ 'text1',
+ metadata,
+ Column("a", CHAR(2)),
+ Column("b", CHAR(2)),
+ Column("c", CHAR(2)),
+ Column("d", CHAR(2))
+ )
+
+ @classmethod
+ def insert_data(cls):
+ cls.tables.text1.insert().execute([
+ dict(a="a1", b="b1", c="c1", d="d1"),
+ ])
+
+ def test_via_column(self):
+ c1, c2, c3, c4 = column('q'), column('p'), column('r'), column('d')
+ stmt = text("select a, b, c, d from text1").columns(c1, c2, c3, c4)
+
+ result = testing.db.execute(stmt)
+ row = result.first()
+
+ eq_(row[c2], "b1")
+ eq_(row[c4], "d1")
+ eq_(row[1], "b1")
+ eq_(row["b"], "b1")
+ eq_(row.keys(), ["a", "b", "c", "d"])
+ eq_(row["r"], "c1")
+ eq_(row["d"], "d1")
+
+ def test_fewer_cols_than_sql_positional(self):
+ c1, c2 = column('q'), column('p')
+ stmt = text("select a, b, c, d from text1").columns(c1, c2)
+
+ # no warning as this can be similar for non-positional
+ result = testing.db.execute(stmt)
+ row = result.first()
+
+ eq_(row[c1], "a1")
+ eq_(row["c"], "c1")
+
+ def test_fewer_cols_than_sql_non_positional(self):
+ c1, c2 = column('a'), column('p')
+ stmt = text("select a, b, c, d from text1").columns(c2, c1, d=CHAR)
+
+ # no warning as this can be similar for non-positional
+ result = testing.db.execute(stmt)
+ row = result.first()
+
+ # c1 name matches, locates
+ eq_(row[c1], "a1")
+ eq_(row["c"], "c1")
+
+ # c2 name does not match, doesn't locate
+ assert_raises_message(
+ exc.NoSuchColumnError,
+ "in row for column 'p'",
+ lambda: row[c2]
+ )
+
+ def test_more_cols_than_sql(self):
+ c1, c2, c3, c4 = column('q'), column('p'), column('r'), column('d')
+ stmt = text("select a, b from text1").columns(c1, c2, c3, c4)
+
+ with assertions.expect_warnings(
+ r"Number of columns in textual SQL \(4\) is "
+ "smaller than number of columns requested \(2\)"):
+ result = testing.db.execute(stmt)
+
+ row = result.first()
+ eq_(row[c2], "b1")
+
+ assert_raises_message(
+ exc.NoSuchColumnError,
+ "in row for column 'r'",
+ lambda: row[c3]
+ )
+
+ def test_dupe_col_obj(self):
+ c1, c2, c3 = column('q'), column('p'), column('r')
+ stmt = text("select a, b, c, d from text1").columns(c1, c2, c3, c2)
+
+ assert_raises_message(
+ exc.InvalidRequestError,
+ "Duplicate column expression requested in "
+ "textual SQL: <.*.ColumnClause.*; p>",
+ testing.db.execute, stmt
+ )
+
+ def test_anon_aliased_unique(self):
+ text1 = self.tables.text1
+
+ c1 = text1.c.a.label(None)
+ c2 = text1.alias().c.c
+ c3 = text1.alias().c.b
+ c4 = text1.alias().c.d.label(None)
+
+ stmt = text("select a, b, c, d from text1").columns(c1, c2, c3, c4)
+ result = testing.db.execute(stmt)
+ row = result.first()
+
+ eq_(row[c1], "a1")
+ eq_(row[c2], "b1")
+ eq_(row[c3], "c1")
+ eq_(row[c4], "d1")
+
+ # key fallback rules still match this to a column
+ # unambiguously based on its name
+ eq_(row[text1.c.a], "a1")
+
+ # key fallback rules still match this to a column
+ # unambiguously based on its name
+ eq_(row[text1.c.d], "d1")
+
+ # text1.c.b goes nowhere....because we hit key fallback
+ # but the text1.c.b doesn't derive from text1.c.c
+ assert_raises_message(
+ exc.NoSuchColumnError,
+ "Could not locate column in row for column 'text1.b'",
+ lambda: row[text1.c.b]
+ )
+
+ def test_anon_aliased_overlapping(self):
+ text1 = self.tables.text1
+
+ c1 = text1.c.a.label(None)
+ c2 = text1.alias().c.a
+ c3 = text1.alias().c.a.label(None)
+ c4 = text1.c.a.label(None)
+
+ stmt = text("select a, b, c, d from text1").columns(c1, c2, c3, c4)
+ result = testing.db.execute(stmt)
+ row = result.first()
+
+ eq_(row[c1], "a1")
+ eq_(row[c2], "b1")
+ eq_(row[c3], "c1")
+ eq_(row[c4], "d1")
+
+ # key fallback rules still match this to a column
+ # unambiguously based on its name
+ eq_(row[text1.c.a], "a1")
+
+ def test_anon_aliased_name_conflict(self):
+ text1 = self.tables.text1
+
+ c1 = text1.c.a.label("a")
+ c2 = text1.alias().c.a
+ c3 = text1.alias().c.a.label("a")
+ c4 = text1.c.a.label("a")
+
+ # all cols are named "a". if we are positional, we don't care.
+ # this is new logic in 1.1
+ stmt = text("select a, b as a, c as a, d as a from text1").columns(
+ c1, c2, c3, c4)
+ result = testing.db.execute(stmt)
+ row = result.first()
+
+ eq_(row[c1], "a1")
+ eq_(row[c2], "b1")
+ eq_(row[c3], "c1")
+ eq_(row[c4], "d1")
+
+ # fails, because we hit key fallback and find conflicts
+ # in columns that are presnet
+ assert_raises_message(
+ exc.NoSuchColumnError,
+ "Could not locate column in row for column 'text1.a'",
+ lambda: row[text1.c.a]
+ )
diff --git a/test/sql/test_returning.py b/test/sql/test_returning.py
index cd9f632b9..8c189a0dd 100644
--- a/test/sql/test_returning.py
+++ b/test/sql/test_returning.py
@@ -387,6 +387,33 @@ class ReturnDefaultsTest(fixtures.TablesTest):
{"data": None, 'upddef': 1}
)
+ @testing.fails_on("oracle+cx_oracle", "seems like a cx_oracle bug")
+ def test_insert_all(self):
+ t1 = self.tables.t1
+ result = testing.db.execute(
+ t1.insert().values(upddef=1).return_defaults()
+ )
+ eq_(
+ dict(result.returned_defaults),
+ {"id": 1, "data": None, "insdef": 0}
+ )
+
+ @testing.fails_on("oracle+cx_oracle", "seems like a cx_oracle bug")
+ def test_update_all(self):
+ t1 = self.tables.t1
+ testing.db.execute(
+ t1.insert().values(upddef=1)
+ )
+ result = testing.db.execute(
+ t1.update().
+ values(insdef=2).return_defaults()
+ )
+ eq_(
+ dict(result.returned_defaults),
+ {'upddef': 1}
+ )
+
+
class ImplicitReturningFlag(fixtures.TestBase):
__backend__ = True
diff --git a/test/sql/test_rowcount.py b/test/sql/test_rowcount.py
index 46e10e192..110f3639f 100644
--- a/test/sql/test_rowcount.py
+++ b/test/sql/test_rowcount.py
@@ -1,6 +1,7 @@
from sqlalchemy import *
from sqlalchemy.testing import fixtures, AssertsExecutionResults
from sqlalchemy import testing
+from sqlalchemy.testing import eq_
class FoundRowsTest(fixtures.TestBase, AssertsExecutionResults):
@@ -65,6 +66,22 @@ class FoundRowsTest(fixtures.TestBase, AssertsExecutionResults):
print("expecting 3, dialect reports %s" % r.rowcount)
assert r.rowcount == 3
+ def test_raw_sql_rowcount(self):
+ # test issue #3622, make sure eager rowcount is called for text
+ with testing.db.connect() as conn:
+ result = conn.execute(
+ "update employees set department='Z' where department='C'")
+ eq_(result.rowcount, 3)
+
+ def test_text_rowcount(self):
+ # test issue #3622, make sure eager rowcount is called for text
+ with testing.db.connect() as conn:
+ result = conn.execute(
+ text(
+ "update employees set department='Z' "
+ "where department='C'"))
+ eq_(result.rowcount, 3)
+
def test_delete_rowcount(self):
# WHERE matches 3, 3 rows deleted
department = employees_table.c.department
diff --git a/test/sql/test_selectable.py b/test/sql/test_selectable.py
index 99d0cbe76..7203cc5a3 100644
--- a/test/sql/test_selectable.py
+++ b/test/sql/test_selectable.py
@@ -155,15 +155,19 @@ class SelectableTest(
assert c in s.c.bar.proxy_set
def test_no_error_on_unsupported_expr_key(self):
- from sqlalchemy.dialects.postgresql import ARRAY
+ from sqlalchemy.sql.expression import BinaryExpression
- t = table('t', column('x', ARRAY(Integer)))
+ def myop(x, y):
+ pass
+
+ t = table('t', column('x'), column('y'))
+
+ expr = BinaryExpression(t.c.x, t.c.y, myop)
- expr = t.c.x[5]
s = select([t, expr])
eq_(
s.c.keys(),
- ['x', expr.anon_label]
+ ['x', 'y', expr.anon_label]
)
def test_cloned_intersection(self):
@@ -458,6 +462,26 @@ class SelectableTest(
assert u1.corresponding_column(table2.c.col1) is u1.c._all_columns[0]
assert u1.corresponding_column(table2.c.col3) is u1.c._all_columns[2]
+ @testing.emits_warning("Column 'col1'")
+ def test_union_alias_dupe_keys_grouped(self):
+ s1 = select([table1.c.col1, table1.c.col2, table2.c.col1]).\
+ limit(1).alias()
+ s2 = select([table2.c.col1, table2.c.col2, table2.c.col3]).limit(1)
+ u1 = union(s1, s2)
+
+ assert u1.corresponding_column(
+ s1.c._all_columns[0]) is u1.c._all_columns[0]
+ assert u1.corresponding_column(s2.c.col1) is u1.c._all_columns[0]
+ assert u1.corresponding_column(s1.c.col2) is u1.c.col2
+ assert u1.corresponding_column(s2.c.col2) is u1.c.col2
+
+ assert u1.corresponding_column(s2.c.col3) is u1.c._all_columns[2]
+
+ # this differs from the non-alias test because table2.c.col1 is
+ # more directly at s2.c.col1 than it is s1.c.col1.
+ assert u1.corresponding_column(table2.c.col1) is u1.c._all_columns[0]
+ assert u1.corresponding_column(table2.c.col3) is u1.c._all_columns[2]
+
def test_select_union(self):
# like testaliasunion, but off a Select off the union.
@@ -912,10 +936,10 @@ class AnonLabelTest(fixtures.TestBase):
c1 = func.count('*')
assert c1.label(None) is not c1
- eq_(str(select([c1])), "SELECT count(:param_1) AS count_1")
+ eq_(str(select([c1])), "SELECT count(:count_2) AS count_1")
c2 = select([c1]).compile()
- eq_(str(select([c1.label(None)])), "SELECT count(:param_1) AS count_1")
+ eq_(str(select([c1.label(None)])), "SELECT count(:count_2) AS count_1")
def test_named_labels_named_column(self):
c1 = column('x')
@@ -1969,11 +1993,11 @@ class WithLabelsTest(fixtures.TestBase):
def _assert_result_keys(self, s, keys):
compiled = s.compile()
- eq_(set(compiled.result_map), set(keys))
+ eq_(set(compiled._create_result_map()), set(keys))
def _assert_subq_result_keys(self, s, keys):
compiled = s.select().compile()
- eq_(set(compiled.result_map), set(keys))
+ eq_(set(compiled._create_result_map()), set(keys))
def _names_overlap(self):
m = MetaData()
@@ -2113,7 +2137,7 @@ class WithLabelsTest(fixtures.TestBase):
self._assert_result_keys(sel, ['t1_a', 't2_b'])
-class SelectProxyTest(fixtures.TestBase):
+class ResultMapTest(fixtures.TestBase):
def _fixture(self):
m = MetaData()
@@ -2124,7 +2148,7 @@ class SelectProxyTest(fixtures.TestBase):
compiled = stmt.compile()
return dict(
(elem, key)
- for key, elements in compiled.result_map.items()
+ for key, elements in compiled._create_result_map().items()
for elem in elements[1]
)
@@ -2183,6 +2207,35 @@ class SelectProxyTest(fixtures.TestBase):
assert l1 in mapping
assert ta.c.x not in mapping
+ def test_column_subquery_exists(self):
+ t = self._fixture()
+ s = exists().where(t.c.x == 5).select()
+ mapping = self._mapping(s)
+ assert t.c.x not in mapping
+ eq_(
+ [type(entry[-1]) for entry in s.compile()._result_columns],
+ [Boolean]
+ )
+
+ def test_column_subquery_plain(self):
+ t = self._fixture()
+ s1 = select([t.c.x]).where(t.c.x > 5).as_scalar()
+ s2 = select([s1])
+ mapping = self._mapping(s2)
+ assert t.c.x not in mapping
+ assert s1 in mapping
+ eq_(
+ [type(entry[-1]) for entry in s2.compile()._result_columns],
+ [Integer]
+ )
+
+ def test_unary_boolean(self):
+
+ s1 = select([not_(True)], use_labels=True)
+ eq_(
+ [type(entry[-1]) for entry in s1.compile()._result_columns],
+ [Boolean]
+ )
class ForUpdateTest(fixtures.TestBase, AssertsCompiledSQL):
__dialect__ = "default"
diff --git a/test/sql/test_text.py b/test/sql/test_text.py
index 4302dde48..20cb2a6fb 100644
--- a/test/sql/test_text.py
+++ b/test/sql/test_text.py
@@ -281,6 +281,17 @@ class BindParamTest(fixtures.TestBase, AssertsCompiledSQL):
dialect="postgresql"
)
+ def test_escaping_double_colons(self):
+ self.assert_compile(
+ text(
+ "SELECT * FROM pg_attribute WHERE "
+ "attrelid = :tab\:\:regclass"),
+ "SELECT * FROM pg_attribute WHERE "
+ "attrelid = %(tab)s::regclass",
+ params={'tab': None},
+ dialect="postgresql"
+ )
+
def test_text_in_select_nonfrom(self):
generate_series = text("generate_series(:x, :y, :z) as s(a)").\
@@ -315,7 +326,7 @@ class AsFromTest(fixtures.TestBase, AssertsCompiledSQL):
)
compiled = t.compile()
- eq_(compiled.result_map,
+ eq_(compiled._create_result_map(),
{'id': ('id',
(t.c.id._proxies[0],
'id',
@@ -331,7 +342,7 @@ class AsFromTest(fixtures.TestBase, AssertsCompiledSQL):
t = text("select id, name from user").columns(id=Integer, name=String)
compiled = t.compile()
- eq_(compiled.result_map,
+ eq_(compiled._create_result_map(),
{'id': ('id',
(t.c.id._proxies[0],
'id',
@@ -350,7 +361,7 @@ class AsFromTest(fixtures.TestBase, AssertsCompiledSQL):
table1.join(t, table1.c.myid == t.c.id))
compiled = stmt.compile()
eq_(
- compiled.result_map,
+ compiled._create_result_map(),
{
"myid": ("myid",
(table1.c.myid, "myid", "myid"), table1.c.myid.type),
@@ -382,7 +393,7 @@ class AsFromTest(fixtures.TestBase, AssertsCompiledSQL):
compiled = stmt.compile()
return dict(
(elem, key)
- for key, elements in compiled.result_map.items()
+ for key, elements in compiled._create_result_map().items()
for elem in elements[1]
)
@@ -574,6 +585,29 @@ class OrderByLabelResolutionTest(fixtures.TestBase, AssertsCompiledSQL):
"FROM mytable AS mytable_1 ORDER BY mytable_1.name"
)
+ def test_order_by_named_label_from_anon_label(self):
+ s1 = select([table1.c.myid.label(None).label("foo"), table1.c.name])
+ stmt = s1.order_by("foo")
+ self.assert_compile(
+ stmt,
+ "SELECT mytable.myid AS foo, mytable.name "
+ "FROM mytable ORDER BY foo"
+ )
+
+ def test_order_by_outermost_label(self):
+ # test [ticket:3335], assure that order_by("foo")
+ # catches the label named "foo" in the columns clause only,
+ # and not the label named "foo" in the FROM clause
+ s1 = select([table1.c.myid.label("foo"), table1.c.name]).alias()
+ stmt = select([s1.c.name, func.bar().label("foo")]).order_by("foo")
+
+ self.assert_compile(
+ stmt,
+ "SELECT anon_1.name, bar() AS foo FROM "
+ "(SELECT mytable.myid AS foo, mytable.name AS name "
+ "FROM mytable) AS anon_1 ORDER BY foo"
+ )
+
def test_unresolvable_warning_order_by(self):
stmt = select([table1.c.myid]).order_by('foobar')
self._test_warning(
@@ -738,4 +772,3 @@ class OrderByLabelResolutionTest(fixtures.TestBase, AssertsCompiledSQL):
"mytable_1.name AS t1name, foo(:foo_1) AS x "
"FROM mytable AS mytable_1 ORDER BY mytable_1.myid, t1name, x"
)
-
diff --git a/test/sql/test_type_expressions.py b/test/sql/test_type_expressions.py
index c82ad3b94..0ef3a3e16 100644
--- a/test/sql/test_type_expressions.py
+++ b/test/sql/test_type_expressions.py
@@ -53,19 +53,20 @@ class SelectTest(_ExprFixture, fixtures.TestBase, AssertsCompiledSQL):
table = self._fixture()
compiled = select([table]).apply_labels().compile()
- assert table.c.y in compiled.result_map['test_table_y'][1]
- assert table.c.x in compiled.result_map['test_table_x'][1]
+ assert table.c.y in compiled._create_result_map()['test_table_y'][1]
+ assert table.c.x in compiled._create_result_map()['test_table_x'][1]
# the lower() function goes into the result_map, we don't really
# need this but it's fine
self.assert_compile(
- compiled.result_map['test_table_y'][1][2],
+ compiled._create_result_map()['test_table_y'][1][3],
"lower(test_table.y)"
)
# then the original column gets put in there as well.
- # it's not important that it's the last value.
+ # as of 1.1 it's important that it is first as this is
+ # taken as significant by the result processor.
self.assert_compile(
- compiled.result_map['test_table_y'][1][-1],
+ compiled._create_result_map()['test_table_y'][1][0],
"test_table.y"
)
diff --git a/test/sql/test_types.py b/test/sql/test_types.py
index 8a56c685a..3d527b261 100644
--- a/test/sql/test_types.py
+++ b/test/sql/test_types.py
@@ -1,5 +1,6 @@
# coding: utf-8
-from sqlalchemy.testing import eq_, assert_raises, assert_raises_message
+from sqlalchemy.testing import eq_, is_, assert_raises, \
+ assert_raises_message, expect_warnings
import decimal
import datetime
import os
@@ -9,13 +10,15 @@ from sqlalchemy import (
and_, func, Date, LargeBinary, literal, cast, text, Enum,
type_coerce, VARCHAR, Time, DateTime, BigInteger, SmallInteger, BOOLEAN,
BLOB, NCHAR, NVARCHAR, CLOB, TIME, DATE, DATETIME, TIMESTAMP, SMALLINT,
- INTEGER, DECIMAL, NUMERIC, FLOAT, REAL)
+ INTEGER, DECIMAL, NUMERIC, FLOAT, REAL, ARRAY, JSON)
from sqlalchemy.sql import ddl
-
+from sqlalchemy.sql import visitors
+from sqlalchemy import inspection
from sqlalchemy import exc, types, util, dialects
+from sqlalchemy.util import OrderedDict
for name in dialects.__all__:
__import__("sqlalchemy.dialects.%s" % name)
-from sqlalchemy.sql import operators, column, table
+from sqlalchemy.sql import operators, column, table, null
from sqlalchemy.schema import CheckConstraint, AddConstraint
from sqlalchemy.engine import default
from sqlalchemy.testing.schema import Table, Column
@@ -25,6 +28,9 @@ from sqlalchemy.testing import AssertsCompiledSQL, AssertsExecutionResults, \
from sqlalchemy.testing.util import picklers
from sqlalchemy.testing.util import round_decimal
from sqlalchemy.testing import fixtures
+from sqlalchemy.testing import mock
+
+
class AdaptTest(fixtures.TestBase):
@@ -137,7 +143,7 @@ class AdaptTest(fixtures.TestBase):
for is_down_adaption, typ, target_adaptions in adaptions():
if typ in (types.TypeDecorator, types.TypeEngine, types.Variant):
continue
- elif typ is dialects.postgresql.ARRAY:
+ elif issubclass(typ, ARRAY):
t1 = typ(String)
else:
t1 = typ()
@@ -145,6 +151,8 @@ class AdaptTest(fixtures.TestBase):
if not issubclass(typ, types.Enum) and \
issubclass(cls, types.Enum):
continue
+ if cls.__module__.startswith("test"):
+ continue
# print("ADAPT %s -> %s" % (t1.__class__, cls))
t2 = t1.adapt(cls)
@@ -154,7 +162,7 @@ class AdaptTest(fixtures.TestBase):
t2, t1 = t1, t2
for k in t1.__dict__:
- if k in ('impl', '_is_oracle_number'):
+ if k in ('impl', '_is_oracle_number', '_create_events'):
continue
# assert each value was copied, or that
# the adapted type has a more specific
@@ -176,6 +184,7 @@ class AdaptTest(fixtures.TestBase):
eq_(types.String().python_type, str)
eq_(types.Unicode().python_type, util.text_type)
eq_(types.String(convert_unicode=True).python_type, util.text_type)
+ eq_(types.Enum('one', 'two', 'three').python_type, str)
assert_raises(
NotImplementedError,
@@ -187,12 +196,28 @@ class AdaptTest(fixtures.TestBase):
for typ in self._all_types():
if typ in (types.TypeDecorator, types.TypeEngine, types.Variant):
continue
- elif typ is dialects.postgresql.ARRAY:
+ elif issubclass(typ, ARRAY):
t1 = typ(String)
else:
t1 = typ()
repr(t1)
+ def test_adapt_constructor_copy_override_kw(self):
+ """test that adapt() can accept kw args that override
+ the state of the original object.
+
+ This essentially is testing the behavior of util.constructor_copy().
+
+ """
+ t1 = String(length=50, convert_unicode=False)
+ t2 = t1.adapt(Text, convert_unicode=True)
+ eq_(
+ t2.length, 50
+ )
+ eq_(
+ t2.convert_unicode, True
+ )
+
class TypeAffinityTest(fixtures.TestBase):
@@ -256,7 +281,6 @@ class PickleTypesTest(fixtures.TestBase):
Column('Lar', LargeBinary()),
Column('Pic', PickleType()),
Column('Int', Interval()),
- Column('Enu', Enum('x', 'y', 'z', name="somename")),
]
for column_type in column_types:
meta = MetaData()
@@ -771,6 +795,68 @@ class TypeCoerceCastTest(fixtures.TablesTest):
[('BIND_INd1', 'BIND_INd1BIND_OUT')]
)
+ def test_cast_replace_col_w_bind(self):
+ self._test_replace_col_w_bind(cast)
+
+ def test_type_coerce_replace_col_w_bind(self):
+ self._test_replace_col_w_bind(type_coerce)
+
+ def _test_replace_col_w_bind(self, coerce_fn):
+ MyType = self.MyType
+
+ t = self.tables.t
+ t.insert().values(data=coerce_fn('d1', MyType)).execute()
+
+ stmt = select([t.c.data, coerce_fn(t.c.data, MyType)])
+
+ def col_to_bind(col):
+ if col is t.c.data:
+ return bindparam(None, "x", type_=col.type, unique=True)
+ return None
+
+ # ensure we evaulate the expression so that we can see
+ # the clone resets this info
+ stmt.compile()
+
+ new_stmt = visitors.replacement_traverse(stmt, {}, col_to_bind)
+
+ # original statement
+ eq_(
+ testing.db.execute(stmt).fetchall(),
+ [('BIND_INd1', 'BIND_INd1BIND_OUT')]
+ )
+
+ # replaced with binds; CAST can't affect the bound parameter
+ # on the way in here
+ eq_(
+ testing.db.execute(new_stmt).fetchall(),
+ [('x', 'BIND_INxBIND_OUT')] if coerce_fn is type_coerce
+ else [('x', 'xBIND_OUT')]
+ )
+
+ def test_cast_bind(self):
+ self._test_bind(cast)
+
+ def test_type_bind(self):
+ self._test_bind(type_coerce)
+
+ def _test_bind(self, coerce_fn):
+ MyType = self.MyType
+
+ t = self.tables.t
+ t.insert().values(data=coerce_fn('d1', MyType)).execute()
+
+ stmt = select([
+ bindparam(None, "x", String(50), unique=True),
+ coerce_fn(bindparam(None, "x", String(50), unique=True), MyType)
+ ])
+
+ eq_(
+ testing.db.execute(stmt).fetchall(),
+ [('x', 'BIND_INxBIND_OUT')] if coerce_fn is type_coerce
+ else [('x', 'xBIND_OUT')]
+ )
+
@testing.fails_on(
"oracle", "ORA-00906: missing left parenthesis - "
"seems to be CAST(:param AS type)")
@@ -804,6 +890,7 @@ class TypeCoerceCastTest(fixtures.TablesTest):
[('BIND_INd1BIND_OUT', )])
+
class VariantTest(fixtures.TestBase, AssertsCompiledSQL):
def setup(self):
@@ -933,57 +1020,6 @@ class UnicodeTest(fixtures.TestBase):
"""
__backend__ = True
- def test_native_unicode(self):
- """assert expected values for 'native unicode' mode"""
-
- if testing.against('mssql+pyodbc'):
- eq_(
- testing.db.dialect.returns_unicode_strings,
- 'conditional'
- )
-
- elif testing.against('mssql+mxodbc'):
- eq_(
- testing.db.dialect.returns_unicode_strings,
- 'conditional'
- )
-
- elif testing.against('mssql+pymssql'):
- eq_(
- testing.db.dialect.returns_unicode_strings,
- ('charset' in testing.db.url.query)
- )
-
- elif testing.against('mysql+cymysql', 'mysql+pymssql'):
- eq_(
- testing.db.dialect.returns_unicode_strings,
- True if util.py3k else False
- )
- elif testing.against('oracle+cx_oracle'):
- eq_(
- testing.db.dialect.returns_unicode_strings,
- True if util.py3k else "conditional"
- )
- else:
- expected = (testing.db.name, testing.db.driver) in \
- (
- ('postgresql', 'psycopg2'),
- ('postgresql', 'psycopg2cffi'),
- ('postgresql', 'pypostgresql'),
- ('postgresql', 'pg8000'),
- ('postgresql', 'zxjdbc'),
- ('mysql', 'oursql'),
- ('mysql', 'zxjdbc'),
- ('mysql', 'mysqlconnector'),
- ('sqlite', 'pysqlite'),
- ('oracle', 'zxjdbc'),
- )
-
- eq_(
- testing.db.dialect.returns_unicode_strings,
- expected
- )
-
data = util.u(
"Alors vous imaginez ma surprise, au lever du jour, quand "
"une drôle de petite voix m’a réveillé. "
@@ -1014,6 +1050,15 @@ class UnicodeTest(fixtures.TestBase):
eq_(uni(unicodedata), unicodedata.encode('utf-8'))
+ def test_unicode_warnings_totally_wrong_type(self):
+ u = Unicode()
+ dialect = default.DefaultDialect()
+ dialect.supports_unicode_binds = False
+ uni = u.dialect_impl(dialect).bind_processor(dialect)
+ with expect_warnings(
+ "Unicode type received non-unicode bind param value 5."):
+ eq_(uni(5), 5)
+
def test_unicode_warnings_dialectlevel(self):
unicodedata = self.data
@@ -1045,41 +1090,128 @@ class UnicodeTest(fixtures.TestBase):
unicodedata.encode('ascii', 'ignore').decode()
)
-enum_table = non_native_enum_table = metadata = None
+class EnumTest(AssertsCompiledSQL, fixtures.TablesTest):
+ __backend__ = True
+
+ class SomeEnum(object):
+ # Implements PEP 435 in the minimal fashion needed by SQLAlchemy
+ __members__ = OrderedDict()
+
+ def __init__(self, name, value):
+ self.name = name
+ self.value = value
+ self.__members__[name] = self
+ setattr(self.__class__, name, self)
-class EnumTest(AssertsCompiledSQL, fixtures.TestBase):
+ one = SomeEnum('one', 1)
+ two = SomeEnum('two', 2)
+ three = SomeEnum('three', 3)
@classmethod
- def setup_class(cls):
- global enum_table, non_native_enum_table, metadata
- metadata = MetaData(testing.db)
- enum_table = Table(
+ def define_tables(cls, metadata):
+ Table(
'enum_table', metadata, Column("id", Integer, primary_key=True),
Column('someenum', Enum('one', 'two', 'three', name='myenum'))
)
- non_native_enum_table = Table(
+ Table(
'non_native_enum_table', metadata,
Column("id", Integer, primary_key=True),
Column('someenum', Enum('one', 'two', 'three', native_enum=False)),
+ Column('someotherenum',
+ Enum('one', 'two', 'three',
+ create_constraint=False, native_enum=False)),
)
- metadata.create_all()
+ Table(
+ 'stdlib_enum_table', metadata,
+ Column("id", Integer, primary_key=True),
+ Column('someenum', Enum(cls.SomeEnum))
+ )
- def teardown(self):
- enum_table.delete().execute()
- non_native_enum_table.delete().execute()
+ def test_python_type(self):
+ eq_(types.Enum(self.SomeEnum).python_type, self.SomeEnum)
+
+ def test_pickle_types(self):
+ global SomeEnum
+ SomeEnum = self.SomeEnum
+ for loads, dumps in picklers():
+ column_types = [
+ Column('Enu', Enum('x', 'y', 'z', name="somename")),
+ Column('En2', Enum(self.SomeEnum)),
+ ]
+ for column_type in column_types:
+ meta = MetaData()
+ Table('foo', meta, column_type)
+ loads(dumps(column_type))
+ loads(dumps(meta))
+
+ def test_validators_pep435(self):
+ type_ = Enum(self.SomeEnum)
+
+ bind_processor = type_.bind_processor(testing.db.dialect)
+ eq_(bind_processor('one'), "one")
+ eq_(bind_processor(self.one), "one")
+ assert_raises_message(
+ LookupError,
+ '"foo" is not among the defined enum values',
+ bind_processor, "foo"
+ )
+
+ result_processor = type_.result_processor(testing.db.dialect, None)
+
+ eq_(result_processor('one'), self.one)
+ assert_raises_message(
+ LookupError,
+ '"foo" is not among the defined enum values',
+ result_processor, "foo"
+ )
+
+ literal_processor = type_.literal_processor(testing.db.dialect)
+ eq_(literal_processor("one"), "'one'")
+ assert_raises_message(
+ LookupError,
+ '"foo" is not among the defined enum values',
+ literal_processor, "foo"
+ )
+
+ def test_validators_plain(self):
+ type_ = Enum("one", "two")
+
+ bind_processor = type_.bind_processor(testing.db.dialect)
+ eq_(bind_processor('one'), "one")
+ assert_raises_message(
+ LookupError,
+ '"foo" is not among the defined enum values',
+ bind_processor, "foo"
+ )
+
+ result_processor = type_.result_processor(testing.db.dialect, None)
+
+ eq_(result_processor('one'), "one")
+ assert_raises_message(
+ LookupError,
+ '"foo" is not among the defined enum values',
+ result_processor, "foo"
+ )
+
+ literal_processor = type_.literal_processor(testing.db.dialect)
+ eq_(literal_processor("one"), "'one'")
+ assert_raises_message(
+ LookupError,
+ '"foo" is not among the defined enum values',
+ literal_processor, "foo"
+ )
- @classmethod
- def teardown_class(cls):
- metadata.drop_all()
@testing.fails_on(
'postgresql+zxjdbc',
'zxjdbc fails on ENUM: column "XXX" is of type XXX '
'but expression is of type character varying')
def test_round_trip(self):
+ enum_table = self.tables['enum_table']
+
enum_table.insert().execute([
{'id': 1, 'someenum': 'two'},
{'id': 2, 'someenum': 'two'},
@@ -1095,7 +1227,51 @@ class EnumTest(AssertsCompiledSQL, fixtures.TestBase):
]
)
+ def test_null_round_trip(self):
+ enum_table = self.tables.enum_table
+ non_native_enum_table = self.tables.non_native_enum_table
+
+ with testing.db.connect() as conn:
+ conn.execute(enum_table.insert(), {"id": 1, "someenum": None})
+ eq_(conn.scalar(select([enum_table.c.someenum])), None)
+
+ with testing.db.connect() as conn:
+ conn.execute(
+ non_native_enum_table.insert(), {"id": 1, "someenum": None})
+ eq_(conn.scalar(select([non_native_enum_table.c.someenum])), None)
+
+
+ @testing.fails_on(
+ 'mysql',
+ "The CHECK clause is parsed but ignored by all storage engines.")
+ @testing.fails_on(
+ 'mssql', "FIXME: MS-SQL 2005 doesn't honor CHECK ?!?")
+ def test_check_constraint(self):
+ assert_raises(
+ (exc.IntegrityError, exc.ProgrammingError),
+ testing.db.execute,
+ "insert into non_native_enum_table "
+ "(id, someenum) values(1, 'four')")
+
+ def test_skip_check_constraint(self):
+ with testing.db.connect() as conn:
+ conn.execute(
+ "insert into non_native_enum_table "
+ "(id, someotherenum) values(1, 'four')"
+ )
+ eq_(
+ conn.scalar("select someotherenum from non_native_enum_table"),
+ "four")
+ assert_raises_message(
+ LookupError,
+ '"four" is not among the defined enum values',
+ conn.scalar,
+ select([self.tables.non_native_enum_table.c.someotherenum])
+ )
+
def test_non_native_round_trip(self):
+ non_native_enum_table = self.tables['non_native_enum_table']
+
non_native_enum_table.insert().execute([
{'id': 1, 'someenum': 'two'},
{'id': 2, 'someenum': 'two'},
@@ -1103,7 +1279,9 @@ class EnumTest(AssertsCompiledSQL, fixtures.TestBase):
])
eq_(
- non_native_enum_table.select().
+ select([
+ non_native_enum_table.c.id,
+ non_native_enum_table.c.someenum]).
order_by(non_native_enum_table.c.id).execute().fetchall(),
[
(1, 'two'),
@@ -1112,6 +1290,25 @@ class EnumTest(AssertsCompiledSQL, fixtures.TestBase):
]
)
+ def test_pep435_enum_round_trip(self):
+ stdlib_enum_table = self.tables['stdlib_enum_table']
+
+ stdlib_enum_table.insert().execute([
+ {'id': 1, 'someenum': self.SomeEnum.two},
+ {'id': 2, 'someenum': self.SomeEnum.two},
+ {'id': 3, 'someenum': self.SomeEnum.one},
+ ])
+
+ eq_(
+ stdlib_enum_table.select().
+ order_by(stdlib_enum_table.c.id).execute().fetchall(),
+ [
+ (1, self.SomeEnum.two),
+ (2, self.SomeEnum.two),
+ (3, self.SomeEnum.one),
+ ]
+ )
+
def test_adapt(self):
from sqlalchemy.dialects.postgresql import ENUM
e1 = Enum('one', 'two', 'three', native_enum=False)
@@ -1121,6 +1318,9 @@ class EnumTest(AssertsCompiledSQL, fixtures.TestBase):
e1 = Enum('one', 'two', 'three', name='foo', schema='bar')
eq_(e1.adapt(ENUM).name, 'foo')
eq_(e1.adapt(ENUM).schema, 'bar')
+ e1 = Enum(self.SomeEnum)
+ eq_(e1.adapt(ENUM).name, 'someenum')
+ eq_(e1.adapt(ENUM).enums, ['one', 'two', 'three'])
@testing.provide_metadata
def test_create_metadata_bound_no_crash(self):
@@ -1129,29 +1329,19 @@ class EnumTest(AssertsCompiledSQL, fixtures.TestBase):
m1.create_all(testing.db)
- @testing.crashes(
- 'mysql', 'Inconsistent behavior across various OS/drivers')
- def test_constraint(self):
- assert_raises(
- exc.DBAPIError, enum_table.insert().execute,
- {'id': 4, 'someenum': 'four'})
-
def test_non_native_constraint_custom_type(self):
class Foob(object):
def __init__(self, name):
self.name = name
- class MyEnum(types.SchemaType, TypeDecorator):
+ class MyEnum(TypeDecorator):
def __init__(self, values):
self.impl = Enum(
*[v.name for v in values], name="myenum",
native_enum=False)
- def _set_table(self, table, column):
- self.impl._set_table(table, column)
-
# future method
def process_literal_param(self, value, dialect):
return value.name
@@ -1170,12 +1360,10 @@ class EnumTest(AssertsCompiledSQL, fixtures.TestBase):
dialect="default"
)
- @testing.fails_on(
- 'mysql',
- "the CHECK constraint doesn't raise an exception for unknown reason")
- def test_non_native_constraint(self):
+ def test_lookup_failure(self):
assert_raises(
- exc.DBAPIError, non_native_enum_table.insert().execute,
+ exc.StatementError,
+ self.tables['non_native_enum_table'].insert().execute,
{'id': 4, 'someenum': 'four'}
)
@@ -1308,6 +1496,143 @@ class BinaryTest(fixtures.TestBase, AssertsExecutionResults):
with open(f, mode='rb') as o:
return o.read()
+
+class JSONTest(fixtures.TestBase):
+
+ def setup(self):
+ metadata = MetaData()
+ self.test_table = Table('test_table', metadata,
+ Column('id', Integer, primary_key=True),
+ Column('test_column', JSON),
+ )
+ self.jsoncol = self.test_table.c.test_column
+
+ self.dialect = default.DefaultDialect()
+ self.dialect._json_serializer = None
+ self.dialect._json_deserializer = None
+
+ def test_bind_serialize_default(self):
+ proc = self.test_table.c.test_column.type._cached_bind_processor(
+ self.dialect)
+ eq_(
+ proc({"A": [1, 2, 3, True, False]}),
+ '{"A": [1, 2, 3, true, false]}'
+ )
+
+ def test_bind_serialize_None(self):
+ proc = self.test_table.c.test_column.type._cached_bind_processor(
+ self.dialect)
+ eq_(
+ proc(None),
+ 'null'
+ )
+
+ def test_bind_serialize_none_as_null(self):
+ proc = JSON(none_as_null=True)._cached_bind_processor(
+ self.dialect)
+ eq_(
+ proc(None),
+ None
+ )
+ eq_(
+ proc(null()),
+ None
+ )
+
+ def test_bind_serialize_null(self):
+ proc = self.test_table.c.test_column.type._cached_bind_processor(
+ self.dialect)
+ eq_(
+ proc(null()),
+ None
+ )
+
+ def test_result_deserialize_default(self):
+ proc = self.test_table.c.test_column.type._cached_result_processor(
+ self.dialect, None)
+ eq_(
+ proc('{"A": [1, 2, 3, true, false]}'),
+ {"A": [1, 2, 3, True, False]}
+ )
+
+ def test_result_deserialize_null(self):
+ proc = self.test_table.c.test_column.type._cached_result_processor(
+ self.dialect, None)
+ eq_(
+ proc('null'),
+ None
+ )
+
+ def test_result_deserialize_None(self):
+ proc = self.test_table.c.test_column.type._cached_result_processor(
+ self.dialect, None)
+ eq_(
+ proc(None),
+ None
+ )
+
+
+class ArrayTest(fixtures.TestBase):
+
+ def _myarray_fixture(self):
+ class MyArray(ARRAY):
+ pass
+ return MyArray
+
+ def test_array_index_map_dimensions(self):
+ col = column('x', ARRAY(Integer, dimensions=3))
+ is_(
+ col[5].type._type_affinity, ARRAY
+ )
+ eq_(
+ col[5].type.dimensions, 2
+ )
+ is_(
+ col[5][6].type._type_affinity, ARRAY
+ )
+ eq_(
+ col[5][6].type.dimensions, 1
+ )
+ is_(
+ col[5][6][7].type._type_affinity, Integer
+ )
+
+ def test_array_getitem_single_type(self):
+ m = MetaData()
+ arrtable = Table(
+ 'arrtable', m,
+ Column('intarr', ARRAY(Integer)),
+ Column('strarr', ARRAY(String)),
+ )
+ is_(arrtable.c.intarr[1].type._type_affinity, Integer)
+ is_(arrtable.c.strarr[1].type._type_affinity, String)
+
+ def test_array_getitem_slice_type(self):
+ m = MetaData()
+ arrtable = Table(
+ 'arrtable', m,
+ Column('intarr', ARRAY(Integer)),
+ Column('strarr', ARRAY(String)),
+ )
+ is_(arrtable.c.intarr[1:3].type._type_affinity, ARRAY)
+ is_(arrtable.c.strarr[1:3].type._type_affinity, ARRAY)
+
+ def test_array_getitem_slice_type_dialect_level(self):
+ MyArray = self._myarray_fixture()
+ m = MetaData()
+ arrtable = Table(
+ 'arrtable', m,
+ Column('intarr', MyArray(Integer)),
+ Column('strarr', MyArray(String)),
+ )
+ is_(arrtable.c.intarr[1:3].type._type_affinity, ARRAY)
+ is_(arrtable.c.strarr[1:3].type._type_affinity, ARRAY)
+
+ # but the slice returns the actual type
+ assert isinstance(arrtable.c.intarr[1:3].type, MyArray)
+ assert isinstance(arrtable.c.strarr[1:3].type, MyArray)
+
+
test_table = meta = MyCustomType = MyTypeDec = None
@@ -1613,6 +1938,34 @@ class ExpressionTest(
assert distinct(test_table.c.data).type == test_table.c.data.type
assert test_table.c.data.distinct().type == test_table.c.data.type
+ def test_detect_coercion_of_builtins(self):
+ @inspection._self_inspects
+ class SomeSQLAThing(object):
+ def __repr__(self):
+ return "some_sqla_thing()"
+
+ class SomeOtherThing(object):
+ pass
+
+ assert_raises_message(
+ exc.ArgumentError,
+ r"Object some_sqla_thing\(\) is not legal as a SQL literal value",
+ lambda: column('a', String) == SomeSQLAThing()
+ )
+
+ is_(
+ bindparam('x', SomeOtherThing()).type,
+ types.NULLTYPE
+ )
+
+ def test_detect_coercion_not_fooled_by_mock(self):
+ m1 = mock.Mock()
+ is_(
+ bindparam('x', m1).type,
+ types.NULLTYPE
+ )
+
+
class CompileTest(fixtures.TestBase, AssertsCompiledSQL):
__dialect__ = 'default'
@@ -1881,12 +2234,9 @@ class BooleanTest(
def __init__(self, value):
self.value = value
- class MyBool(types.SchemaType, TypeDecorator):
+ class MyBool(TypeDecorator):
impl = Boolean()
- def _set_table(self, table, column):
- self.impl._set_table(table, column)
-
# future method
def process_literal_param(self, value, dialect):
return value.value
diff --git a/test/sql/test_update.py b/test/sql/test_update.py
index 58c86613b..872671008 100644
--- a/test/sql/test_update.py
+++ b/test/sql/test_update.py
@@ -1,9 +1,12 @@
-from sqlalchemy import *
+from sqlalchemy import Integer, String, ForeignKey, and_, or_, func, \
+ literal, update, table, bindparam, column, select, exc
from sqlalchemy import testing
from sqlalchemy.dialects import mysql
from sqlalchemy.engine import default
-from sqlalchemy.testing import AssertsCompiledSQL, eq_, fixtures
+from sqlalchemy.testing import AssertsCompiledSQL, eq_, fixtures, \
+ assert_raises_message
from sqlalchemy.testing.schema import Table, Column
+from sqlalchemy import util
class _UpdateFromTestBase(object):
@@ -32,6 +35,11 @@ class _UpdateFromTestBase(object):
test_needs_autoincrement=True),
Column('address_id', None, ForeignKey('addresses.id')),
Column('data', String(30)))
+ Table('update_w_default', metadata,
+ Column('id', Integer, primary_key=True),
+ Column('x', Integer),
+ Column('ycol', Integer, key='y'),
+ Column('data', String(30), onupdate=lambda: "hi"))
@classmethod
def fixtures(cls):
@@ -165,6 +173,154 @@ class UpdateTest(_UpdateFromTestBase, fixtures.TablesTest, AssertsCompiledSQL):
table1.c.name: table1.c.name + 'lala',
table1.c.myid: func.do_stuff(table1.c.myid, literal('hoho'))
}
+
+ self.assert_compile(
+ update(
+ table1,
+ (table1.c.myid == func.hoho(4)) & (
+ table1.c.name == literal('foo') +
+ table1.c.name +
+ literal('lala')),
+ values=values),
+ 'UPDATE mytable '
+ 'SET '
+ 'myid=do_stuff(mytable.myid, :param_1), '
+ 'name=(mytable.name || :name_1) '
+ 'WHERE '
+ 'mytable.myid = hoho(:hoho_1) AND '
+ 'mytable.name = :param_2 || mytable.name || :param_3')
+
+ def test_unconsumed_names_kwargs(self):
+ t = table("t", column("x"), column("y"))
+
+ assert_raises_message(
+ exc.CompileError,
+ "Unconsumed column names: z",
+ t.update().values(x=5, z=5).compile,
+ )
+
+ def test_unconsumed_names_values_dict(self):
+ t = table("t", column("x"), column("y"))
+ t2 = table("t2", column("q"), column("z"))
+
+ assert_raises_message(
+ exc.CompileError,
+ "Unconsumed column names: j",
+ t.update().values(x=5, j=7).values({t2.c.z: 5}).
+ where(t.c.x == t2.c.q).compile,
+ )
+
+ def test_unconsumed_names_kwargs_w_keys(self):
+ t = table("t", column("x"), column("y"))
+
+ assert_raises_message(
+ exc.CompileError,
+ "Unconsumed column names: j",
+ t.update().values(x=5, j=7).compile,
+ column_keys=['j']
+ )
+
+ def test_update_ordered_parameters_1(self):
+ table1 = self.tables.mytable
+
+ # Confirm that we can pass values as list value pairs
+ # note these are ordered *differently* from table.c
+ values = [
+ (table1.c.name, table1.c.name + 'lala'),
+ (table1.c.myid, func.do_stuff(table1.c.myid, literal('hoho'))),
+ ]
+ self.assert_compile(
+ update(
+ table1,
+ (table1.c.myid == func.hoho(4)) & (
+ table1.c.name == literal('foo') +
+ table1.c.name +
+ literal('lala')),
+ preserve_parameter_order=True,
+ values=values),
+ 'UPDATE mytable '
+ 'SET '
+ 'name=(mytable.name || :name_1), '
+ 'myid=do_stuff(mytable.myid, :param_1) '
+ 'WHERE '
+ 'mytable.myid = hoho(:hoho_1) AND '
+ 'mytable.name = :param_2 || mytable.name || :param_3')
+
+ def test_update_ordered_parameters_2(self):
+ table1 = self.tables.mytable
+
+ # Confirm that we can pass values as list value pairs
+ # note these are ordered *differently* from table.c
+ values = [
+ (table1.c.name, table1.c.name + 'lala'),
+ ('description', 'some desc'),
+ (table1.c.myid, func.do_stuff(table1.c.myid, literal('hoho')))
+ ]
+ self.assert_compile(
+ update(
+ table1,
+ (table1.c.myid == func.hoho(4)) & (
+ table1.c.name == literal('foo') +
+ table1.c.name +
+ literal('lala')),
+ preserve_parameter_order=True).values(values),
+ 'UPDATE mytable '
+ 'SET '
+ 'name=(mytable.name || :name_1), '
+ 'description=:description, '
+ 'myid=do_stuff(mytable.myid, :param_1) '
+ 'WHERE '
+ 'mytable.myid = hoho(:hoho_1) AND '
+ 'mytable.name = :param_2 || mytable.name || :param_3')
+
+ def test_update_ordered_parameters_fire_onupdate(self):
+ table = self.tables.update_w_default
+
+ values = [
+ (table.c.y, table.c.x + 5),
+ ('x', 10)
+ ]
+
+ self.assert_compile(
+ table.update(preserve_parameter_order=True).values(values),
+ "UPDATE update_w_default SET ycol=(update_w_default.x + :x_1), "
+ "x=:x, data=:data"
+ )
+
+ def test_update_ordered_parameters_override_onupdate(self):
+ table = self.tables.update_w_default
+
+ values = [
+ (table.c.y, table.c.x + 5),
+ (table.c.data, table.c.x + 10),
+ ('x', 10)
+ ]
+
+ self.assert_compile(
+ table.update(preserve_parameter_order=True).values(values),
+ "UPDATE update_w_default SET ycol=(update_w_default.x + :x_1), "
+ "data=(update_w_default.x + :x_2), x=:x"
+ )
+
+ def test_update_preserve_order_reqs_listtups(self):
+ table1 = self.tables.mytable
+ testing.assert_raises_message(
+ ValueError,
+ "When preserve_parameter_order is True, values\(\) "
+ "only accepts a list of 2-tuples",
+ table1.update(preserve_parameter_order=True).values,
+ {"description": "foo", "name": "bar"}
+ )
+
+ def test_update_ordereddict(self):
+ table1 = self.tables.mytable
+
+ # Confirm that ordered dicts are treated as normal dicts,
+ # columns sorted in table order
+ values = util.OrderedDict((
+ (table1.c.name, table1.c.name + 'lala'),
+ (table1.c.myid, func.do_stuff(table1.c.myid, literal('hoho')))))
+
self.assert_compile(
update(
table1,