diff options
Diffstat (limited to 'test')
98 files changed, 9506 insertions, 2896 deletions
diff --git a/test/aaa_profiling/test_memusage.py b/test/aaa_profiling/test_memusage.py index 20c6f0a65..3766abb88 100644 --- a/test/aaa_profiling/test_memusage.py +++ b/test/aaa_profiling/test_memusage.py @@ -47,6 +47,7 @@ def profile_memory(times=50): gc_collect() samples[x] = len(get_objects_skipping_sqlite_issue()) + print("sample gc sizes:", samples) assert len(_sessions) == 0 @@ -307,7 +308,7 @@ class MemUsageTest(EnsureZeroed): finally: metadata.drop_all() - @testing.crashes('mysql+cymysql', 'blocking with cymysql >= 0.6') + @testing.crashes('mysql+cymysql', 'blocking') def test_unicode_warnings(self): metadata = MetaData(testing.db) table1 = Table('mytable', metadata, Column('col1', Integer, @@ -603,6 +604,7 @@ class MemUsageTest(EnsureZeroed): # in pysqlite itself. background at: # http://thread.gmane.org/gmane.comp.python.db.pysqlite.user/2290 + @testing.crashes('mysql+cymysql', 'blocking') def test_join_cache(self): metadata = MetaData(testing.db) table1 = Table('table1', metadata, Column('id', Integer, diff --git a/test/aaa_profiling/test_orm.py b/test/aaa_profiling/test_orm.py index 6d71468b7..2c1e84afb 100644 --- a/test/aaa_profiling/test_orm.py +++ b/test/aaa_profiling/test_orm.py @@ -310,3 +310,63 @@ class DeferOptionsTest(fixtures.MappedTest): *[defer(letter) for letter in ['x', 'y', 'z', 'p', 'q', 'r']]).\ all() + +class AttributeOverheadTest(fixtures.MappedTest): + @classmethod + def define_tables(cls, metadata): + Table('parent', metadata, Column('id', Integer, + primary_key=True, + test_needs_autoincrement=True), Column('data', + String(20))) + Table('child', metadata, Column('id', Integer, + primary_key=True, test_needs_autoincrement=True), + Column('data', String(20)), Column('parent_id', + Integer, ForeignKey('parent.id'), nullable=False)) + + @classmethod + def setup_classes(cls): + class Parent(cls.Basic): + pass + + class Child(cls.Basic): + pass + + @classmethod + def setup_mappers(cls): + Child, Parent, parent, child = (cls.classes.Child, + cls.classes.Parent, + cls.tables.parent, + cls.tables.child) + + mapper(Parent, parent, properties={'children': + relationship(Child, backref='parent')}) + mapper(Child, child) + + + def test_attribute_set(self): + Parent, Child = self.classes.Parent, self.classes.Child + p1 = Parent() + c1 = Child() + + @profiling.function_call_count() + def go(): + for i in range(30): + c1.parent = p1 + c1.parent = None + c1.parent = p1 + del c1.parent + go() + + def test_collection_append_remove(self): + Parent, Child = self.classes.Parent, self.classes.Child + p1 = Parent() + children = [Child() for i in range(100)] + + @profiling.function_call_count() + def go(): + for child in children: + p1.children.append(child) + for child in children: + p1.children.remove(child) + go() + diff --git a/test/aaa_profiling/test_resultset.py b/test/aaa_profiling/test_resultset.py index bbd8c4dba..d2f8c2256 100644 --- a/test/aaa_profiling/test_resultset.py +++ b/test/aaa_profiling/test_resultset.py @@ -53,6 +53,7 @@ class ResultSetTest(fixtures.TestBase, AssertsExecutionResults): c1 in row go() + class ExecutionTest(fixtures.TestBase): def test_minimal_connection_execute(self): diff --git a/test/aaa_profiling/test_zoomark.py b/test/aaa_profiling/test_zoomark.py index 145f3c594..d850782e0 100644 --- a/test/aaa_profiling/test_zoomark.py +++ b/test/aaa_profiling/test_zoomark.py @@ -30,7 +30,6 @@ class ZooMarkTest(fixtures.TestBase): """ __requires__ = 'cpython', __only_on__ = 'postgresql+psycopg2' - __skip_if__ = lambda : sys.version_info < (2, 5), def test_baseline_0_setup(self): global metadata diff --git a/test/aaa_profiling/test_zoomark_orm.py b/test/aaa_profiling/test_zoomark_orm.py index ddcad681a..c9d1438aa 100644 --- a/test/aaa_profiling/test_zoomark_orm.py +++ b/test/aaa_profiling/test_zoomark_orm.py @@ -32,7 +32,6 @@ class ZooMarkTest(fixtures.TestBase): __requires__ = 'cpython', __only_on__ = 'postgresql+psycopg2' - __skip_if__ = lambda : sys.version_info < (2, 5), def test_baseline_0_setup(self): global metadata, session diff --git a/test/base/test_events.py b/test/base/test_events.py index 1e0568f27..e985f8d5b 100644 --- a/test/base/test_events.py +++ b/test/base/test_events.py @@ -6,15 +6,12 @@ from sqlalchemy import event, exc from sqlalchemy.testing import fixtures from sqlalchemy.testing.util import gc_collect from sqlalchemy.testing.mock import Mock, call - +from sqlalchemy import testing class EventsTest(fixtures.TestBase): """Test class- and instance-level event registration.""" def setUp(self): - assert 'event_one' not in event._registrars - assert 'event_two' not in event._registrars - class TargetEvents(event.Events): def event_one(self, x, y): pass @@ -30,7 +27,7 @@ class EventsTest(fixtures.TestBase): self.Target = Target def tearDown(self): - event._remove_dispatcher(self.Target.__dict__['dispatch'].events) + event.base._remove_dispatcher(self.Target.__dict__['dispatch'].events) def test_register_class(self): def listen(x, y): @@ -84,7 +81,7 @@ class EventsTest(fixtures.TestBase): eq_(len(self.Target().dispatch.event_one), 2) eq_(len(t1.dispatch.event_one), 3) - def test_append_vs_insert(self): + def test_append_vs_insert_cls(self): def listen_one(x, y): pass @@ -103,6 +100,26 @@ class EventsTest(fixtures.TestBase): [listen_three, listen_one, listen_two] ) + def test_append_vs_insert_instance(self): + def listen_one(x, y): + pass + + def listen_two(x, y): + pass + + def listen_three(x, y): + pass + + target = self.Target() + event.listen(target, "event_one", listen_one) + event.listen(target, "event_one", listen_two) + event.listen(target, "event_one", listen_three, insert=True) + + eq_( + list(target.dispatch.event_one), + [listen_three, listen_one, listen_two] + ) + def test_decorator(self): @event.listens_for(self.Target, "event_one") def listen_one(x, y): @@ -189,7 +206,7 @@ class NamedCallTest(fixtures.TestBase): self.TargetOne = TargetOne def tearDown(self): - event._remove_dispatcher(self.TargetOne.__dict__['dispatch'].events) + event.base._remove_dispatcher(self.TargetOne.__dict__['dispatch'].events) def test_kw_accept(self): @@ -261,7 +278,7 @@ class LegacySignatureTest(fixtures.TestBase): self.TargetOne = TargetOne def tearDown(self): - event._remove_dispatcher(self.TargetOne.__dict__['dispatch'].events) + event.base._remove_dispatcher(self.TargetOne.__dict__['dispatch'].events) def test_legacy_accept(self): canary = Mock() @@ -294,6 +311,26 @@ class LegacySignatureTest(fixtures.TestBase): canary(x, y, kw) self._test_legacy_accept_kw(inst, canary) + def test_legacy_accept_partial(self): + canary = Mock() + def evt(a, x, y, **kw): + canary(a, x, y, **kw) + from functools import partial + evt_partial = partial(evt, 5) + target = self.TargetOne() + event.listen(target, "event_four", evt_partial) + # can't do legacy accept on a partial; we can't inspect it + assert_raises( + TypeError, + target.dispatch.event_four, 4, 5, 6, 7, foo="bar" + ) + target.dispatch.event_four(4, 5, foo="bar") + eq_( + canary.mock_calls, + [call(5, 4, 5, foo="bar")] + ) + + def _test_legacy_accept_kw(self, target, canary): target.dispatch.event_four(4, 5, 6, 7, foo="bar") @@ -375,7 +412,7 @@ class ClsLevelListenTest(fixtures.TestBase): def tearDown(self): - event._remove_dispatcher(self.TargetOne.__dict__['dispatch'].events) + event.base._remove_dispatcher(self.TargetOne.__dict__['dispatch'].events) def setUp(self): class TargetEventsOne(event.Events): @@ -386,7 +423,7 @@ class ClsLevelListenTest(fixtures.TestBase): self.TargetOne = TargetOne def tearDown(self): - event._remove_dispatcher( + event.base._remove_dispatcher( self.TargetOne.__dict__['dispatch'].events) def test_lis_subcalss_lis(self): @@ -473,8 +510,8 @@ class AcceptTargetsTest(fixtures.TestBase): self.TargetTwo = TargetTwo def tearDown(self): - event._remove_dispatcher(self.TargetOne.__dict__['dispatch'].events) - event._remove_dispatcher(self.TargetTwo.__dict__['dispatch'].events) + event.base._remove_dispatcher(self.TargetOne.__dict__['dispatch'].events) + event.base._remove_dispatcher(self.TargetTwo.__dict__['dispatch'].events) def test_target_accept(self): """Test that events of the same name are routed to the correct @@ -543,7 +580,7 @@ class CustomTargetsTest(fixtures.TestBase): self.Target = Target def tearDown(self): - event._remove_dispatcher(self.Target.__dict__['dispatch'].events) + event.base._remove_dispatcher(self.Target.__dict__['dispatch'].events) def test_indirect(self): def listen(x, y): @@ -593,14 +630,14 @@ class ListenOverrideTest(fixtures.TestBase): def setUp(self): class TargetEvents(event.Events): @classmethod - def _listen(cls, target, identifier, fn, add=False): + def _listen(cls, event_key, add=False): + fn = event_key.fn if add: def adapt(x, y): fn(x + y) - else: - adapt = fn + event_key = event_key.with_wrapper(adapt) - event.Events._listen(target, identifier, adapt) + event_key.base_listen() def event_one(self, x, y): pass @@ -610,7 +647,7 @@ class ListenOverrideTest(fixtures.TestBase): self.Target = Target def tearDown(self): - event._remove_dispatcher(self.Target.__dict__['dispatch'].events) + event.base._remove_dispatcher(self.Target.__dict__['dispatch'].events) def test_listen_override(self): listen_one = Mock() @@ -700,7 +737,7 @@ class JoinTest(fixtures.TestBase): for cls in (self.TargetElement, self.TargetFactory, self.BaseTarget): if 'dispatch' in cls.__dict__: - event._remove_dispatcher(cls.__dict__['dispatch'].events) + event.base._remove_dispatcher(cls.__dict__['dispatch'].events) def test_neither(self): element = self.TargetFactory().create() @@ -842,13 +879,19 @@ class JoinTest(fixtures.TestBase): element.run_event(2) element.run_event(3) - # c1 gets no events due to _JoinedListener - # fixing the "parent" at construction time. - # this can be changed to be "live" at the cost - # of performance. + # if _JoinedListener fixes .listeners + # at construction time, then we don't get + # the new listeners. + #eq_(l1.mock_calls, []) + + # alternatively, if _JoinedListener shares the list + # using a @property, then we get them, at the arguable + # expense of the extra method call to access the .listeners + # collection eq_( - l1.mock_calls, [] + l1.mock_calls, [call(element, 2), call(element, 3)] ) + eq_( l2.mock_calls, [call(element, 1), call(element, 2), call(element, 3)] @@ -892,3 +935,160 @@ class JoinTest(fixtures.TestBase): l1.mock_calls, [call(element, 1), call(element, 2), call(element, 3)] ) + +class RemovalTest(fixtures.TestBase): + def _fixture(self): + class TargetEvents(event.Events): + def event_one(self, x, y): + pass + + def event_two(self, x): + pass + + def event_three(self, x): + pass + + class Target(object): + dispatch = event.dispatcher(TargetEvents) + return Target + + def test_clslevel(self): + Target = self._fixture() + + m1 = Mock() + + event.listen(Target, "event_two", m1) + + t1 = Target() + t1.dispatch.event_two("x") + + event.remove(Target, "event_two", m1) + + t1.dispatch.event_two("y") + + eq_(m1.mock_calls, [call("x")]) + + def test_clslevel_subclass(self): + Target = self._fixture() + class SubTarget(Target): + pass + + m1 = Mock() + + event.listen(Target, "event_two", m1) + + t1 = SubTarget() + t1.dispatch.event_two("x") + + event.remove(Target, "event_two", m1) + + t1.dispatch.event_two("y") + + eq_(m1.mock_calls, [call("x")]) + + def test_instance(self): + Target = self._fixture() + + class Foo(object): + def __init__(self): + self.mock = Mock() + + def evt(self, arg): + self.mock(arg) + + f1 = Foo() + f2 = Foo() + + event.listen(Target, "event_one", f1.evt) + event.listen(Target, "event_one", f2.evt) + + t1 = Target() + t1.dispatch.event_one("x") + + event.remove(Target, "event_one", f1.evt) + + t1.dispatch.event_one("y") + + eq_(f1.mock.mock_calls, [call("x")]) + eq_(f2.mock.mock_calls, [call("x"), call("y")]) + + def test_propagate(self): + Target = self._fixture() + + m1 = Mock() + + t1 = Target() + t2 = Target() + + event.listen(t1, "event_one", m1, propagate=True) + event.listen(t1, "event_two", m1, propagate=False) + + t2.dispatch._update(t1.dispatch) + + t1.dispatch.event_one("t1e1x") + t1.dispatch.event_two("t1e2x") + t2.dispatch.event_one("t2e1x") + t2.dispatch.event_two("t2e2x") + + event.remove(t1, "event_one", m1) + event.remove(t1, "event_two", m1) + + t1.dispatch.event_one("t1e1y") + t1.dispatch.event_two("t1e2y") + t2.dispatch.event_one("t2e1y") + t2.dispatch.event_two("t2e2y") + + eq_(m1.mock_calls, + [call('t1e1x'), call('t1e2x'), + call('t2e1x')]) + + @testing.requires.predictable_gc + def test_listener_collection_removed_cleanup(self): + from sqlalchemy.event import registry + + Target = self._fixture() + + m1 = Mock() + + t1 = Target() + + event.listen(t1, "event_one", m1) + + key = (id(t1), "event_one", id(m1)) + + assert key in registry._key_to_collection + collection_ref = list(registry._key_to_collection[key])[0] + assert collection_ref in registry._collection_to_key + + t1.dispatch.event_one("t1") + + del t1 + + gc_collect() + + assert key not in registry._key_to_collection + assert collection_ref not in registry._collection_to_key + + def test_remove_not_listened(self): + Target = self._fixture() + + m1 = Mock() + + t1 = Target() + + event.listen(t1, "event_one", m1, propagate=True) + event.listen(t1, "event_three", m1) + + event.remove(t1, "event_one", m1) + assert_raises_message( + exc.InvalidRequestError, + r"No listeners found for event <.*Target.*> / 'event_two' / <Mock.*> ", + event.remove, t1, "event_two", m1 + ) + + event.remove(t1, "event_three", m1) + + + + + diff --git a/test/base/test_utils.py b/test/base/test_utils.py index aefc6d421..86e4b190a 100644 --- a/test/base/test_utils.py +++ b/test/base/test_utils.py @@ -1,10 +1,10 @@ import copy -from sqlalchemy import util, sql, exc +from sqlalchemy import util, sql, exc, testing from sqlalchemy.testing import assert_raises, assert_raises_message, fixtures from sqlalchemy.testing import eq_, is_, ne_, fails_if -from sqlalchemy.testing.util import picklers -from sqlalchemy.util import classproperty +from sqlalchemy.testing.util import picklers, gc_collect +from sqlalchemy.util import classproperty, WeakSequence, get_callable_argspec class KeyedTupleTest(): @@ -115,6 +115,36 @@ class KeyedTupleTest(): keyed_tuple[0] = 100 assert_raises(TypeError, should_raise) +class WeakSequenceTest(fixtures.TestBase): + @testing.requires.predictable_gc + def test_cleanout_elements(self): + class Foo(object): + pass + f1, f2, f3 = Foo(), Foo(), Foo() + w = WeakSequence([f1, f2, f3]) + eq_(len(w), 3) + eq_(len(w._storage), 3) + del f2 + gc_collect() + eq_(len(w), 2) + eq_(len(w._storage), 2) + + @testing.requires.predictable_gc + def test_cleanout_appended(self): + class Foo(object): + pass + f1, f2, f3 = Foo(), Foo(), Foo() + w = WeakSequence() + w.append(f1) + w.append(f2) + w.append(f3) + eq_(len(w), 3) + eq_(len(w._storage), 3) + del f2 + gc_collect() + eq_(len(w), 2) + eq_(len(w._storage), 2) + class OrderedDictTest(fixtures.TestBase): @@ -1154,6 +1184,33 @@ class ArgInspectionTest(fixtures.TestBase): test(f3) test(f4) + def test_callable_argspec_fn(self): + def foo(x, y, **kw): + pass + eq_( + get_callable_argspec(foo), + (['x', 'y'], None, 'kw', None) + ) + + def test_callable_argspec_method(self): + class Foo(object): + def foo(self, x, y, **kw): + pass + eq_( + get_callable_argspec(Foo.foo), + (['self', 'x', 'y'], None, 'kw', None) + ) + + def test_callable_argspec_partial(self): + from functools import partial + def foo(x, y, z, **kw): + pass + bar = partial(foo, 5) + + assert_raises( + ValueError, + get_callable_argspec, bar + ) class SymbolTest(fixtures.TestBase): @@ -1389,6 +1446,55 @@ class GenericReprTest(fixtures.TestBase): "Foo(b=5, d=7)" ) + def test_multi_kw(self): + class Foo(object): + def __init__(self, a, b, c=3, d=4): + self.a = a + self.b = b + self.c = c + self.d = d + class Bar(Foo): + def __init__(self, e, f, g=5, **kw): + self.e = e + self.f = f + self.g = g + super(Bar, self).__init__(**kw) + + eq_( + util.generic_repr( + Bar('e', 'f', g=7, a=6, b=5, d=9), + to_inspect=[Bar, Foo] + ), + "Bar('e', 'f', g=7, a=6, b=5, d=9)" + ) + + eq_( + util.generic_repr( + Bar('e', 'f', a=6, b=5), + to_inspect=[Bar, Foo] + ), + "Bar('e', 'f', a=6, b=5)" + ) + + def test_multi_kw_repeated(self): + class Foo(object): + def __init__(self, a=1, b=2): + self.a = a + self.b = b + class Bar(Foo): + def __init__(self, b=3, c=4, **kw): + self.c = c + super(Bar, self).__init__(b=b, **kw) + + eq_( + util.generic_repr( + Bar(a='a', b='b', c='c'), + to_inspect=[Bar, Foo] + ), + "Bar(b='b', c='c', a='a')" + ) + + def test_discard_vargs(self): class Foo(object): def __init__(self, a, b, *args): @@ -1586,3 +1692,5 @@ class TestClassProperty(fixtures.TestBase): return d eq_(B.something, {'foo': 1, 'bazz': 2}) + + diff --git a/test/dialect/mssql/test_compiler.py b/test/dialect/mssql/test_compiler.py index 87037c6a4..f12ab0330 100644 --- a/test/dialect/mssql/test_compiler.py +++ b/test/dialect/mssql/test_compiler.py @@ -510,6 +510,29 @@ class CompileTest(fixtures.TestBase, AssertsCompiledSQL): "CREATE TABLE test (id INTEGER NOT NULL IDENTITY(1,1))" ) + def test_table_pkc_clustering(self): + metadata = MetaData() + tbl = Table('test', metadata, + Column('x', Integer, autoincrement=False), + Column('y', Integer, autoincrement=False), + PrimaryKeyConstraint("x", "y", mssql_clustered=True)) + self.assert_compile(schema.CreateTable(tbl), + "CREATE TABLE test (x INTEGER NOT NULL, y INTEGER NOT NULL, " + "PRIMARY KEY CLUSTERED (x, y))" + ) + + def test_table_uc_clustering(self): + metadata = MetaData() + tbl = Table('test', metadata, + Column('x', Integer, autoincrement=False), + Column('y', Integer, autoincrement=False), + PrimaryKeyConstraint("x"), + UniqueConstraint("y", mssql_clustered=True)) + self.assert_compile(schema.CreateTable(tbl), + "CREATE TABLE test (x INTEGER NOT NULL, y INTEGER NULL, " + "PRIMARY KEY (x), UNIQUE CLUSTERED (y))" + ) + def test_index_clustering(self): metadata = MetaData() tbl = Table('test', metadata, @@ -528,6 +551,27 @@ class CompileTest(fixtures.TestBase, AssertsCompiledSQL): "CREATE INDEX foo ON test (x DESC, y)" ) + def test_create_index_expr(self): + m = MetaData() + t1 = Table('foo', m, + Column('x', Integer) + ) + self.assert_compile( + schema.CreateIndex(Index("bar", t1.c.x > 5)), + "CREATE INDEX bar ON foo (x > 5)" + ) + + def test_drop_index_w_schema(self): + m = MetaData() + t1 = Table('foo', m, + Column('x', Integer), + schema='bar' + ) + self.assert_compile( + schema.DropIndex(Index("idx_foo", t1.c.x)), + "DROP INDEX idx_foo ON bar.foo" + ) + def test_index_extra_include_1(self): metadata = MetaData() tbl = Table('test', metadata, diff --git a/test/dialect/mssql/test_engine.py b/test/dialect/mssql/test_engine.py index 2834f35ec..c07f30040 100644 --- a/test/dialect/mssql/test_engine.py +++ b/test/dialect/mssql/test_engine.py @@ -131,10 +131,11 @@ class ParseConnectTest(fixtures.TestBase): for error in [ 'Adaptive Server connection timed out', + 'Net-Lib error during Connection reset by peer', 'message 20003', - "Error 10054", - "Not connected to any MS SQL server", - "Connection is closed" + 'Error 10054', + 'Not connected to any MS SQL server', + 'Connection is closed' ]: eq_(dialect.is_disconnect(error, None, None), True) diff --git a/test/dialect/mssql/test_query.py b/test/dialect/mssql/test_query.py index bff737145..6a12744a7 100644 --- a/test/dialect/mssql/test_query.py +++ b/test/dialect/mssql/test_query.py @@ -232,9 +232,10 @@ class QueryTest(testing.AssertsExecutionResults, fixtures.TestBase): con.execute("""drop trigger paj""") meta.drop_all() + @testing.fails_on_everything_except('mssql+pyodbc', 'pyodbc-specific feature') @testing.provide_metadata def test_disable_scope_identity(self): - engine = engines.testing_engine(options={"use_scope_identity":False}) + engine = engines.testing_engine(options={"use_scope_identity": False}) metadata = self.metadata metadata.bind = engine t1 = Table('t1', metadata, diff --git a/test/dialect/mysql/test_compiler.py b/test/dialect/mysql/test_compiler.py index a77a25cc4..45f8405c8 100644 --- a/test/dialect/mysql/test_compiler.py +++ b/test/dialect/mysql/test_compiler.py @@ -6,6 +6,7 @@ from sqlalchemy import sql, exc, schema, types as sqltypes from sqlalchemy.dialects.mysql import base as mysql from sqlalchemy.testing import fixtures, AssertsCompiledSQL from sqlalchemy import testing +from sqlalchemy.sql import table, column class CompileTest(fixtures.TestBase, AssertsCompiledSQL): @@ -94,19 +95,57 @@ class CompileTest(fixtures.TestBase, AssertsCompiledSQL): "CREATE TABLE testtbl (data VARCHAR(255), " "PRIMARY KEY (data) USING btree)") - def test_skip_deferrable_kw(self): + def test_create_index_expr(self): + m = MetaData() + t1 = Table('foo', m, + Column('x', Integer) + ) + self.assert_compile( + schema.CreateIndex(Index("bar", t1.c.x > 5)), + "CREATE INDEX bar ON foo (x > 5)" + ) + + def test_deferrable_initially_kw_not_ignored(self): m = MetaData() t1 = Table('t1', m, Column('id', Integer, primary_key=True)) t2 = Table('t2', m, Column('id', Integer, - ForeignKey('t1.id', deferrable=True), + ForeignKey('t1.id', deferrable=True, initially="XYZ"), primary_key=True)) self.assert_compile( schema.CreateTable(t2), "CREATE TABLE t2 (id INTEGER NOT NULL, " - "PRIMARY KEY (id), FOREIGN KEY(id) REFERENCES t1 (id))" + "PRIMARY KEY (id), FOREIGN KEY(id) REFERENCES t1 (id) DEFERRABLE INITIALLY XYZ)" ) + def test_match_kw_raises(self): + m = MetaData() + t1 = Table('t1', m, Column('id', Integer, primary_key=True)) + t2 = Table('t2', m, Column('id', Integer, + ForeignKey('t1.id', match="XYZ"), + primary_key=True)) + + assert_raises_message( + exc.CompileError, + "MySQL ignores the 'MATCH' keyword while at the same time causes " + "ON UPDATE/ON DELETE clauses to be ignored.", + schema.CreateTable(t2).compile, dialect=mysql.dialect() + ) + + def test_for_update(self): + table1 = table('mytable', + column('myid'), column('name'), column('description')) + + self.assert_compile( + table1.select(table1.c.myid == 7).with_for_update(), + "SELECT mytable.myid, mytable.name, mytable.description " + "FROM mytable WHERE mytable.myid = %s FOR UPDATE") + + self.assert_compile( + table1.select(table1.c.myid == 7).with_for_update(read=True), + "SELECT mytable.myid, mytable.name, mytable.description " + "FROM mytable WHERE mytable.myid = %s LOCK IN SHARE MODE") + class SQLTest(fixtures.TestBase, AssertsCompiledSQL): """Tests MySQL-dialect specific compilation.""" @@ -302,8 +341,10 @@ class SQLTest(fixtures.TestBase, AssertsCompiledSQL): (VARCHAR, "CAST(t.col AS CHAR)"), (NCHAR, "CAST(t.col AS CHAR)"), (CHAR, "CAST(t.col AS CHAR)"), + (m.CHAR(charset='utf8'), "CAST(t.col AS CHAR CHARACTER SET utf8)"), (CLOB, "CAST(t.col AS CHAR)"), (TEXT, "CAST(t.col AS CHAR)"), + (m.TEXT(charset='utf8'), "CAST(t.col AS CHAR CHARACTER SET utf8)"), (String(32), "CAST(t.col AS CHAR(32))"), (Unicode(32), "CAST(t.col AS CHAR(32))"), (CHAR(32), "CAST(t.col AS CHAR(32))"), diff --git a/test/dialect/mysql/test_dialect.py b/test/dialect/mysql/test_dialect.py index 62bdfc81b..2ff17f0f7 100644 --- a/test/dialect/mysql/test_dialect.py +++ b/test/dialect/mysql/test_dialect.py @@ -9,12 +9,17 @@ from sqlalchemy.testing import engines import datetime class DialectTest(fixtures.TestBase): - __only_on__ = 'mysql' + def test_ssl_arguments_mysqldb(self): + from sqlalchemy.dialects.mysql import mysqldb + dialect = mysqldb.dialect() + self._test_ssl_arguments(dialect) - @testing.only_on(['mysql+mysqldb', 'mysql+oursql'], - 'requires particular SSL arguments') - def test_ssl_arguments(self): - dialect = testing.db.dialect + def test_ssl_arguments_oursql(self): + from sqlalchemy.dialects.mysql import oursql + dialect = oursql.dialect() + self._test_ssl_arguments(dialect) + + def _test_ssl_arguments(self, dialect): kwarg = dialect.create_connect_args( make_url("mysql://scott:tiger@localhost:3306/test" "?ssl_ca=/ca.pem&ssl_cert=/cert.pem&ssl_key=/key.pem") @@ -33,6 +38,50 @@ class DialectTest(fixtures.TestBase): } ) + def test_mysqlconnector_buffered_arg(self): + from sqlalchemy.dialects.mysql import mysqlconnector + dialect = mysqlconnector.dialect() + kw = dialect.create_connect_args( + make_url("mysql+mysqlconnector://u:p@host/db?buffered=true") + )[1] + eq_(kw['buffered'], True) + + kw = dialect.create_connect_args( + make_url("mysql+mysqlconnector://u:p@host/db?buffered=false") + )[1] + eq_(kw['buffered'], False) + + kw = dialect.create_connect_args( + make_url("mysql+mysqlconnector://u:p@host/db") + )[1] + eq_(kw['buffered'], True) + + def test_mysqlconnector_raise_on_warnings_arg(self): + from sqlalchemy.dialects.mysql import mysqlconnector + dialect = mysqlconnector.dialect() + kw = dialect.create_connect_args( + make_url("mysql+mysqlconnector://u:p@host/db?raise_on_warnings=true") + )[1] + eq_(kw['raise_on_warnings'], True) + + kw = dialect.create_connect_args( + make_url("mysql+mysqlconnector://u:p@host/db?raise_on_warnings=false") + )[1] + eq_(kw['raise_on_warnings'], False) + + kw = dialect.create_connect_args( + make_url("mysql+mysqlconnector://u:p@host/db") + )[1] + eq_(kw['raise_on_warnings'], True) + + @testing.only_on('mysql') + def test_random_arg(self): + dialect = testing.db.dialect + kw = dialect.create_connect_args( + make_url("mysql://u:p@host/db?foo=true") + )[1] + eq_(kw['foo'], "true") + class SQLModeDetectionTest(fixtures.TestBase): __only_on__ = 'mysql' diff --git a/test/dialect/mysql/test_reflection.py b/test/dialect/mysql/test_reflection.py index b9e347d41..7494eaf43 100644 --- a/test/dialect/mysql/test_reflection.py +++ b/test/dialect/mysql/test_reflection.py @@ -140,33 +140,33 @@ class ReflectionTest(fixtures.TestBase, AssertsExecutionResults): @testing.uses_deprecated('Manually quoting ENUM value literals') def test_type_reflection(self): # (ask_for, roundtripped_as_if_different) - specs = [( String(1), mysql.MSString(1), ), - ( String(3), mysql.MSString(3), ), - ( Text(), mysql.MSText(), ), - ( Unicode(1), mysql.MSString(1), ), - ( Unicode(3), mysql.MSString(3), ), - ( UnicodeText(), mysql.MSText(), ), - ( mysql.MSChar(1), ), - ( mysql.MSChar(3), ), - ( NCHAR(2), mysql.MSChar(2), ), - ( mysql.MSNChar(2), mysql.MSChar(2), ), # N is CREATE only - ( mysql.MSNVarChar(22), mysql.MSString(22), ), - ( SmallInteger(), mysql.MSSmallInteger(), ), - ( SmallInteger(), mysql.MSSmallInteger(4), ), - ( mysql.MSSmallInteger(), ), - ( mysql.MSSmallInteger(4), mysql.MSSmallInteger(4), ), - ( mysql.MSMediumInteger(), mysql.MSMediumInteger(), ), - ( mysql.MSMediumInteger(8), mysql.MSMediumInteger(8), ), - ( LargeBinary(3), mysql.TINYBLOB(), ), - ( LargeBinary(), mysql.BLOB() ), - ( mysql.MSBinary(3), mysql.MSBinary(3), ), - ( mysql.MSVarBinary(3),), - ( mysql.MSTinyBlob(),), - ( mysql.MSBlob(),), - ( mysql.MSBlob(1234), mysql.MSBlob()), - ( mysql.MSMediumBlob(),), - ( mysql.MSLongBlob(),), - ( mysql.ENUM("''","'fleem'"), ), + specs = [(String(1), mysql.MSString(1), ), + (String(3), mysql.MSString(3), ), + (Text(), mysql.MSText(), ), + (Unicode(1), mysql.MSString(1), ), + (Unicode(3), mysql.MSString(3), ), + (UnicodeText(), mysql.MSText(), ), + (mysql.MSChar(1), ), + (mysql.MSChar(3), ), + (NCHAR(2), mysql.MSChar(2), ), + (mysql.MSNChar(2), mysql.MSChar(2), ), # N is CREATE only + (mysql.MSNVarChar(22), mysql.MSString(22), ), + (SmallInteger(), mysql.MSSmallInteger(), ), + (SmallInteger(), mysql.MSSmallInteger(4), ), + (mysql.MSSmallInteger(), ), + (mysql.MSSmallInteger(4), mysql.MSSmallInteger(4), ), + (mysql.MSMediumInteger(), mysql.MSMediumInteger(), ), + (mysql.MSMediumInteger(8), mysql.MSMediumInteger(8), ), + (LargeBinary(3), mysql.TINYBLOB(), ), + (LargeBinary(), mysql.BLOB() ), + (mysql.MSBinary(3), mysql.MSBinary(3), ), + (mysql.MSVarBinary(3),), + (mysql.MSTinyBlob(),), + (mysql.MSBlob(),), + (mysql.MSBlob(1234), mysql.MSBlob()), + (mysql.MSMediumBlob(),), + (mysql.MSLongBlob(),), + (mysql.ENUM("''","'fleem'"), ), ] columns = [Column('c%i' % (i + 1), t[0]) for i, t in enumerate(specs)] @@ -298,3 +298,22 @@ class RawReflectionTest(fixtures.TestBase): assert regex.match(' PRIMARY KEY USING BTREE (`id`)') assert regex.match(' PRIMARY KEY (`id`) USING BTREE') + def test_fk_reflection(self): + regex = self.parser._re_constraint + + m = regex.match(' CONSTRAINT `addresses_user_id_fkey` ' + 'FOREIGN KEY (`user_id`) ' + 'REFERENCES `users` (`id`) ' + 'ON DELETE CASCADE ON UPDATE CASCADE') + eq_(m.groups(), ('addresses_user_id_fkey', '`user_id`', + '`users`', '`id`', None, 'CASCADE', 'CASCADE')) + + + m = regex.match(' CONSTRAINT `addresses_user_id_fkey` ' + 'FOREIGN KEY (`user_id`) ' + 'REFERENCES `users` (`id`) ' + 'ON DELETE CASCADE ON UPDATE SET NULL') + eq_(m.groups(), ('addresses_user_id_fkey', '`user_id`', + '`users`', '`id`', None, 'CASCADE', 'SET NULL')) + + diff --git a/test/dialect/mysql/test_types.py b/test/dialect/mysql/test_types.py index b918abe25..acf9c1e2f 100644 --- a/test/dialect/mysql/test_types.py +++ b/test/dialect/mysql/test_types.py @@ -4,12 +4,13 @@ from sqlalchemy.testing import eq_, assert_raises from sqlalchemy import * from sqlalchemy import sql, exc, schema from sqlalchemy.util import u +from sqlalchemy import util from sqlalchemy.dialects.mysql import base as mysql from sqlalchemy.testing import fixtures, AssertsCompiledSQL, AssertsExecutionResults from sqlalchemy import testing from sqlalchemy.testing.engines import utf8_engine import datetime - +import decimal class TypesTest(fixtures.TestBase, AssertsExecutionResults, AssertsCompiledSQL): "Test MySQL column types" @@ -141,10 +142,36 @@ class TypesTest(fixtures.TestBase, AssertsExecutionResults, AssertsCompiledSQL): ] for type_, args, kw, res in columns: + type_inst = type_(*args, **kw) self.assert_compile( - type_(*args, **kw), + type_inst, res ) + # test that repr() copies out all arguments + self.assert_compile( + eval("mysql.%r" % type_inst), + res + ) + + @testing.only_if('mysql') + @testing.provide_metadata + def test_precision_float_roundtrip(self): + t = Table('t', self.metadata, + Column('scale_value', mysql.DOUBLE( + precision=15, scale=12, asdecimal=True)), + Column('unscale_value', mysql.DOUBLE( + decimal_return_scale=12, asdecimal=True)) + ) + t.create(testing.db) + testing.db.execute( + t.insert(), scale_value=45.768392065789, + unscale_value=45.768392065789 + ) + result = testing.db.scalar(select([t.c.scale_value])) + eq_(result, decimal.Decimal("45.768392065789")) + + result = testing.db.scalar(select([t.c.unscale_value])) + eq_(result, decimal.Decimal("45.768392065789")) @testing.exclude('mysql', '<', (4, 1, 1), 'no charset support') def test_charset(self): @@ -212,14 +239,22 @@ class TypesTest(fixtures.TestBase, AssertsExecutionResults, AssertsCompiledSQL): (mysql.ENUM, ["foo", "bar"], {'unicode':True}, '''ENUM('foo','bar') UNICODE'''), - (String, [20], {"collation":"utf8"}, 'VARCHAR(20) COLLATE utf8') + (String, [20], {"collation": "utf8"}, 'VARCHAR(20) COLLATE utf8') ] for type_, args, kw, res in columns: + type_inst = type_(*args, **kw) self.assert_compile( - type_(*args, **kw), + type_inst, + res + ) + # test that repr() copies out all arguments + self.assert_compile( + eval("mysql.%r" % type_inst) + if type_ is not String + else eval("%r" % type_inst), res ) @@ -229,15 +264,23 @@ class TypesTest(fixtures.TestBase, AssertsExecutionResults, AssertsCompiledSQL): def test_charset_collate_table(self): t = Table('foo', self.metadata, Column('id', Integer), + Column('data', UnicodeText), mysql_default_charset='utf8', - mysql_collate='utf8_unicode_ci' + mysql_collate='utf8_bin' ) t.create() m2 = MetaData(testing.db) t2 = Table('foo', m2, autoload=True) - eq_(t2.kwargs['mysql_collate'], 'utf8_unicode_ci') + eq_(t2.kwargs['mysql_collate'], 'utf8_bin') eq_(t2.kwargs['mysql_default charset'], 'utf8') + # test [ticket:2906] + # in order to test the condition here, need to use + # MySQLdb 1.2.3 and also need to pass either use_unicode=1 + # or charset=utf8 to the URL. + t.insert().execute(id=1, data=u('some text')) + assert isinstance(testing.db.scalar(select([t.c.data])), util.text_type) + def test_bit_50(self): """Exercise BIT types on 5.0+ (not valid for all engine types)""" @@ -250,7 +293,9 @@ class TypesTest(fixtures.TestBase, AssertsExecutionResults, AssertsCompiledSQL): @testing.only_if('mysql') @testing.exclude('mysql', '<', (5, 0, 5), 'a 5.0+ feature') - @testing.fails_on('mysql+oursql', 'some round trips fail, oursql bug ?') + @testing.fails_if( + lambda: testing.against("mysql+oursql") and util.py3k, + 'some round trips fail, oursql bug ?') @testing.provide_metadata def test_bit_50_roundtrip(self): bit_table = Table('mysql_bits', self.metadata, @@ -474,72 +519,24 @@ class TypesTest(fixtures.TestBase, AssertsExecutionResults, AssertsCompiledSQL): self.assert_(colspec(table.c.y1).startswith('y1 YEAR')) eq_(colspec(table.c.y5), 'y5 YEAR(4)') - @testing.only_if('mysql') - @testing.provide_metadata - def test_set(self): - """Exercise the SET type.""" - set_table = Table('mysql_set', self.metadata, - Column('s1', - mysql.MSSet("'dq'", "'sq'")), Column('s2', - mysql.MSSet("'a'")), Column('s3', - mysql.MSSet("'5'", "'7'", "'9'"))) - eq_(colspec(set_table.c.s1), "s1 SET('dq','sq')") - eq_(colspec(set_table.c.s2), "s2 SET('a')") - eq_(colspec(set_table.c.s3), "s3 SET('5','7','9')") - set_table.create() - reflected = Table('mysql_set', MetaData(testing.db), - autoload=True) - for table in set_table, reflected: - - def roundtrip(store, expected=None): - expected = expected or store - table.insert(store).execute() - row = table.select().execute().first() - self.assert_(list(row) == expected) - table.delete().execute() - - roundtrip([None, None, None], [None] * 3) - roundtrip(['', '', ''], [set([''])] * 3) - roundtrip([set(['dq']), set(['a']), set(['5'])]) - roundtrip(['dq', 'a', '5'], [set(['dq']), set(['a']), - set(['5'])]) - roundtrip([1, 1, 1], [set(['dq']), set(['a']), set(['5' - ])]) - roundtrip([set(['dq', 'sq']), None, set(['9', '5', '7' - ])]) - set_table.insert().execute({'s3': set(['5'])}, - {'s3': set(['5', '7'])}, {'s3': set(['5', '7', '9'])}, - {'s3': set(['7', '9'])}) - - # NOTE: the string sent to MySQL here is sensitive to ordering. - # for some reason the set ordering is always "5, 7" when we test on - # MySQLdb but in Py3K this is not guaranteed. So basically our - # SET type doesn't do ordering correctly (not sure how it can, - # as we don't know how the SET was configured in the first place.) - rows = select([set_table.c.s3], - set_table.c.s3.in_([set(['5']), ['5', '7']]) - ).execute().fetchall() - found = set([frozenset(row[0]) for row in rows]) - eq_(found, set([frozenset(['5']), frozenset(['5', '7'])])) - -class EnumTest(fixtures.TestBase, AssertsExecutionResults, AssertsCompiledSQL): +class EnumSetTest(fixtures.TestBase, AssertsExecutionResults, AssertsCompiledSQL): __only_on__ = 'mysql' __dialect__ = mysql.dialect() - @testing.uses_deprecated('Manually quoting ENUM value literals') @testing.provide_metadata def test_enum(self): """Exercise the ENUM type.""" + with testing.expect_deprecated('Manually quoting ENUM value literals'): + e1, e2 = mysql.ENUM("'a'", "'b'"), mysql.ENUM("'a'", "'b'") + enum_table = Table('mysql_enum', self.metadata, - Column('e1', mysql.ENUM("'a'", "'b'")), - Column('e2', mysql.ENUM("'a'", "'b'"), - nullable=False), - Column('e2generic', Enum("a", "b"), - nullable=False), + Column('e1', e1), + Column('e2', e2, nullable=False), + Column('e2generic', Enum("a", "b"), nullable=False), Column('e3', mysql.ENUM("'a'", "'b'", strict=True)), Column('e4', mysql.ENUM("'a'", "'b'", strict=True), nullable=False), @@ -587,6 +584,106 @@ class EnumTest(fixtures.TestBase, AssertsExecutionResults, AssertsCompiledSQL): eq_(res, expected) + @testing.provide_metadata + def test_set(self): + + with testing.expect_deprecated('Manually quoting SET value literals'): + e1, e2 = mysql.SET("'a'", "'b'"), mysql.SET("'a'", "'b'") + + set_table = Table('mysql_set', self.metadata, + Column('e1', e1), + Column('e2', e2, nullable=False), + Column('e3', mysql.SET("a", "b")), + Column('e4', mysql.SET("'a'", "b")), + Column('e5', mysql.SET("'a'", "'b'", quoting="quoted")) + ) + + eq_(colspec(set_table.c.e1), + "e1 SET('a','b')") + eq_(colspec(set_table.c.e2), + "e2 SET('a','b') NOT NULL") + eq_(colspec(set_table.c.e3), + "e3 SET('a','b')") + eq_(colspec(set_table.c.e4), + "e4 SET('''a''','b')") + eq_(colspec(set_table.c.e5), + "e5 SET('a','b')") + set_table.create() + + assert_raises(exc.DBAPIError, set_table.insert().execute, + e1=None, e2=None, e3=None, e4=None) + + if testing.against("+oursql"): + assert_raises(exc.StatementError, set_table.insert().execute, + e1='c', e2='c', e3='c', e4='c') + + set_table.insert().execute(e1='a', e2='a', e3='a', e4="'a'", e5="a,b") + set_table.insert().execute(e1='b', e2='b', e3='b', e4='b', e5="a,b") + + res = set_table.select().execute().fetchall() + + if testing.against("+oursql"): + expected = [ + # 1st row with all c's, data truncated + (set(['']), set(['']), set(['']), set(['']), None), + ] + else: + expected = [] + + expected.extend([ + (set(['a']), set(['a']), set(['a']), set(["'a'"]), set(['a', 'b'])), + (set(['b']), set(['b']), set(['b']), set(['b']), set(['a', 'b'])) + ]) + + eq_(res, expected) + + @testing.provide_metadata + def test_set_roundtrip_plus_reflection(self): + set_table = Table('mysql_set', self.metadata, + Column('s1', + mysql.SET("dq", "sq")), + Column('s2', mysql.SET("a")), + Column('s3', mysql.SET("5", "7", "9"))) + + eq_(colspec(set_table.c.s1), "s1 SET('dq','sq')") + eq_(colspec(set_table.c.s2), "s2 SET('a')") + eq_(colspec(set_table.c.s3), "s3 SET('5','7','9')") + set_table.create() + reflected = Table('mysql_set', MetaData(testing.db), + autoload=True) + for table in set_table, reflected: + + def roundtrip(store, expected=None): + expected = expected or store + table.insert(store).execute() + row = table.select().execute().first() + self.assert_(list(row) == expected) + table.delete().execute() + + roundtrip([None, None, None], [None] * 3) + roundtrip(['', '', ''], [set([''])] * 3) + roundtrip([set(['dq']), set(['a']), set(['5'])]) + roundtrip(['dq', 'a', '5'], [set(['dq']), set(['a']), + set(['5'])]) + roundtrip([1, 1, 1], [set(['dq']), set(['a']), set(['5' + ])]) + roundtrip([set(['dq', 'sq']), None, set(['9', '5', '7' + ])]) + set_table.insert().execute({'s3': set(['5'])}, + {'s3': set(['5', '7'])}, {'s3': set(['5', '7', '9'])}, + {'s3': set(['7', '9'])}) + + # NOTE: the string sent to MySQL here is sensitive to ordering. + # for some reason the set ordering is always "5, 7" when we test on + # MySQLdb but in Py3K this is not guaranteed. So basically our + # SET type doesn't do ordering correctly (not sure how it can, + # as we don't know how the SET was configured in the first place.) + rows = select([set_table.c.s3], + set_table.c.s3.in_([set(['5']), ['5', '7']]) + ).execute().fetchall() + found = set([frozenset(row[0]) for row in rows]) + eq_(found, set([frozenset(['5']), frozenset(['5', '7'])])) + def test_unicode_enum(self): unicode_engine = utf8_engine() metadata = MetaData(unicode_engine) @@ -634,38 +731,64 @@ class EnumTest(fixtures.TestBase, AssertsExecutionResults, AssertsCompiledSQL): "VARCHAR(1), CHECK (somecolumn IN ('x', " "'y', 'z')))") + @testing.provide_metadata @testing.exclude('mysql', '<', (4,), "3.23 can't handle an ENUM of ''") - @testing.uses_deprecated('Manually quoting ENUM value literals') def test_enum_parse(self): - """More exercises for the ENUM type.""" - # MySQL 3.23 can't handle an ENUM of ''.... - - enum_table = Table('mysql_enum', MetaData(testing.db), - Column('e1', mysql.ENUM("'a'")), - Column('e2', mysql.ENUM("''")), - Column('e3', mysql.ENUM('a')), - Column('e4', mysql.ENUM('')), - Column('e5', mysql.ENUM("'a'", "''")), - Column('e6', mysql.ENUM("''", "'a'")), - Column('e7', mysql.ENUM("''", "'''a'''", "'b''b'", "''''"))) + with testing.expect_deprecated('Manually quoting ENUM value literals'): + enum_table = Table('mysql_enum', self.metadata, + Column('e1', mysql.ENUM("'a'")), + Column('e2', mysql.ENUM("''")), + Column('e3', mysql.ENUM('a')), + Column('e4', mysql.ENUM('')), + Column('e5', mysql.ENUM("'a'", "''")), + Column('e6', mysql.ENUM("''", "'a'")), + Column('e7', mysql.ENUM("''", "'''a'''", "'b''b'", "''''"))) for col in enum_table.c: self.assert_(repr(col)) - try: - enum_table.create() - reflected = Table('mysql_enum', MetaData(testing.db), - autoload=True) - for t in enum_table, reflected: - eq_(t.c.e1.type.enums, ("a",)) - eq_(t.c.e2.type.enums, ("",)) - eq_(t.c.e3.type.enums, ("a",)) - eq_(t.c.e4.type.enums, ("",)) - eq_(t.c.e5.type.enums, ("a", "")) - eq_(t.c.e6.type.enums, ("", "a")) - eq_(t.c.e7.type.enums, ("", "'a'", "b'b", "'")) - finally: - enum_table.drop() + + enum_table.create() + reflected = Table('mysql_enum', MetaData(testing.db), + autoload=True) + for t in enum_table, reflected: + eq_(t.c.e1.type.enums, ("a",)) + eq_(t.c.e2.type.enums, ("",)) + eq_(t.c.e3.type.enums, ("a",)) + eq_(t.c.e4.type.enums, ("",)) + eq_(t.c.e5.type.enums, ("a", "")) + eq_(t.c.e6.type.enums, ("", "a")) + eq_(t.c.e7.type.enums, ("", "'a'", "b'b", "'")) + + @testing.provide_metadata + @testing.exclude('mysql', '<', (5,)) + def test_set_parse(self): + with testing.expect_deprecated('Manually quoting SET value literals'): + set_table = Table('mysql_set', self.metadata, + Column('e1', mysql.SET("'a'")), + Column('e2', mysql.SET("''")), + Column('e3', mysql.SET('a')), + Column('e4', mysql.SET('')), + Column('e5', mysql.SET("'a'", "''")), + Column('e6', mysql.SET("''", "'a'")), + Column('e7', mysql.SET("''", "'''a'''", "'b''b'", "''''"))) + + for col in set_table.c: + self.assert_(repr(col)) + + set_table.create() + + # don't want any warnings on reflection + reflected = Table('mysql_set', MetaData(testing.db), + autoload=True) + for t in set_table, reflected: + eq_(t.c.e1.type.values, ("a",)) + eq_(t.c.e2.type.values, ("",)) + eq_(t.c.e3.type.values, ("a",)) + eq_(t.c.e4.type.values, ("",)) + eq_(t.c.e5.type.values, ("a", "")) + eq_(t.c.e6.type.values, ("", "a")) + eq_(t.c.e7.type.values, ("", "'a'", "b'b", "'")) def colspec(c): return testing.db.dialect.ddl_compiler( diff --git a/test/dialect/postgresql/test_compiler.py b/test/dialect/postgresql/test_compiler.py index 11661b11f..e64afb186 100644 --- a/test/dialect/postgresql/test_compiler.py +++ b/test/dialect/postgresql/test_compiler.py @@ -16,6 +16,7 @@ from sqlalchemy.dialects.postgresql import base as postgresql from sqlalchemy.dialects.postgresql import TSRANGE from sqlalchemy.orm import mapper, aliased, Session from sqlalchemy.sql import table, column, operators +from sqlalchemy.util import u class SequenceTest(fixtures.TestBase, AssertsCompiledSQL): @@ -106,6 +107,45 @@ class CompileTest(fixtures.TestBase, AssertsCompiledSQL): 'AS length_1', dialect=dialect) + def test_create_drop_enum(self): + # test escaping and unicode within CREATE TYPE for ENUM + typ = postgresql.ENUM( + "val1", "val2", "val's 3", u('méil'), name="myname") + self.assert_compile(postgresql.CreateEnumType(typ), + u("CREATE TYPE myname AS ENUM ('val1', 'val2', 'val''s 3', 'méil')") + ) + + typ = postgresql.ENUM( + "val1", "val2", "val's 3", name="PleaseQuoteMe") + self.assert_compile(postgresql.CreateEnumType(typ), + "CREATE TYPE \"PleaseQuoteMe\" AS ENUM " + "('val1', 'val2', 'val''s 3')" + ) + + def test_generic_enum(self): + e1 = Enum('x', 'y', 'z', name='somename') + e2 = Enum('x', 'y', 'z', name='somename', schema='someschema') + self.assert_compile(postgresql.CreateEnumType(e1), + "CREATE TYPE somename AS ENUM ('x', 'y', 'z')" + ) + self.assert_compile(postgresql.CreateEnumType(e2), + "CREATE TYPE someschema.somename AS ENUM " + "('x', 'y', 'z')") + self.assert_compile(postgresql.DropEnumType(e1), + 'DROP TYPE somename') + self.assert_compile(postgresql.DropEnumType(e2), + 'DROP TYPE someschema.somename') + t1 = Table('sometable', MetaData(), Column('somecolumn', e1)) + self.assert_compile(schema.CreateTable(t1), + 'CREATE TABLE sometable (somecolumn ' + 'somename)') + t1 = Table('sometable', MetaData(), Column('somecolumn', + Enum('x', 'y', 'z', native_enum=False))) + self.assert_compile(schema.CreateTable(t1), + "CREATE TABLE sometable (somecolumn " + "VARCHAR(1), CHECK (somecolumn IN ('x', " + "'y', 'z')))") + def test_create_partial_index(self): m = MetaData() tbl = Table('testtbl', m, Column('data', Integer)) @@ -173,6 +213,27 @@ class CompileTest(fixtures.TestBase, AssertsCompiledSQL): 'USING hash (data)', dialect=postgresql.dialect()) + + def test_create_index_expr_gets_parens(self): + m = MetaData() + tbl = Table('testtbl', m, Column('x', Integer), Column('y', Integer)) + + idx1 = Index('test_idx1', 5 / (tbl.c.x + tbl.c.y)) + self.assert_compile( + schema.CreateIndex(idx1), + "CREATE INDEX test_idx1 ON testtbl ((5 / (x + y)))" + ) + + def test_create_index_literals(self): + m = MetaData() + tbl = Table('testtbl', m, Column('data', Integer)) + + idx1 = Index('test_idx1', tbl.c.data + 5) + self.assert_compile( + schema.CreateIndex(idx1), + "CREATE INDEX test_idx1 ON testtbl ((data + 5))" + ) + def test_exclude_constraint_min(self): m = MetaData() tbl = Table('testtbl', m, @@ -228,6 +289,68 @@ class CompileTest(fixtures.TestBase, AssertsCompiledSQL): 'SUBSTRING(%(substring_1)s FROM %(substring_2)s)') + def test_for_update(self): + table1 = table('mytable', + column('myid'), column('name'), column('description')) + + self.assert_compile( + table1.select(table1.c.myid == 7).with_for_update(), + "SELECT mytable.myid, mytable.name, mytable.description " + "FROM mytable WHERE mytable.myid = %(myid_1)s FOR UPDATE") + + self.assert_compile( + table1.select(table1.c.myid == 7).with_for_update(nowait=True), + "SELECT mytable.myid, mytable.name, mytable.description " + "FROM mytable WHERE mytable.myid = %(myid_1)s FOR UPDATE NOWAIT") + + self.assert_compile( + table1.select(table1.c.myid == 7).with_for_update(read=True), + "SELECT mytable.myid, mytable.name, mytable.description " + "FROM mytable WHERE mytable.myid = %(myid_1)s FOR SHARE") + + self.assert_compile( + table1.select(table1.c.myid == 7). + with_for_update(read=True, nowait=True), + "SELECT mytable.myid, mytable.name, mytable.description " + "FROM mytable WHERE mytable.myid = %(myid_1)s FOR SHARE NOWAIT") + + self.assert_compile( + table1.select(table1.c.myid == 7). + with_for_update(of=table1.c.myid), + "SELECT mytable.myid, mytable.name, mytable.description " + "FROM mytable WHERE mytable.myid = %(myid_1)s " + "FOR UPDATE OF mytable") + + self.assert_compile( + table1.select(table1.c.myid == 7). + with_for_update(read=True, nowait=True, of=table1), + "SELECT mytable.myid, mytable.name, mytable.description " + "FROM mytable WHERE mytable.myid = %(myid_1)s " + "FOR SHARE OF mytable NOWAIT") + + self.assert_compile( + table1.select(table1.c.myid == 7). + with_for_update(read=True, nowait=True, of=table1.c.myid), + "SELECT mytable.myid, mytable.name, mytable.description " + "FROM mytable WHERE mytable.myid = %(myid_1)s " + "FOR SHARE OF mytable NOWAIT") + + self.assert_compile( + table1.select(table1.c.myid == 7). + with_for_update(read=True, nowait=True, + of=[table1.c.myid, table1.c.name]), + "SELECT mytable.myid, mytable.name, mytable.description " + "FROM mytable WHERE mytable.myid = %(myid_1)s " + "FOR SHARE OF mytable NOWAIT") + + ta = table1.alias() + self.assert_compile( + ta.select(ta.c.myid == 7). + with_for_update(of=[ta.c.myid, ta.c.name]), + "SELECT mytable_1.myid, mytable_1.name, mytable_1.description " + "FROM mytable AS mytable_1 " + "WHERE mytable_1.myid = %(myid_1)s FOR UPDATE OF mytable_1" + ) def test_reserved_words(self): diff --git a/test/dialect/postgresql/test_dialect.py b/test/dialect/postgresql/test_dialect.py index 1fc239cb7..fd6df2c98 100644 --- a/test/dialect/postgresql/test_dialect.py +++ b/test/dialect/postgresql/test_dialect.py @@ -17,6 +17,7 @@ from sqlalchemy.dialects.postgresql import base as postgresql import logging import logging.handlers from sqlalchemy.testing.mock import Mock +from sqlalchemy.engine.reflection import Inspector class MiscTest(fixtures.TestBase, AssertsExecutionResults, AssertsCompiledSQL): @@ -53,7 +54,11 @@ class MiscTest(fixtures.TestBase, AssertsExecutionResults, AssertsCompiledSQL): 'compiled by GCC gcc (GCC) 4.4.2, 64-bit', (8, 5)), ('EnterpriseDB 9.1.2.2 on x86_64-unknown-linux-gnu, ' 'compiled by gcc (GCC) 4.1.2 20080704 (Red Hat 4.1.2-50), ' - '64-bit', (9, 1, 2))]: + '64-bit', (9, 1, 2)), + ('[PostgreSQL 9.2.4 ] VMware vFabric Postgres 9.2.4.0 ' + 'release build 1080137', (9, 2, 4)) + + ]: eq_(testing.db.dialect._get_server_version_info(mock_conn(string)), version) @@ -63,8 +68,10 @@ class MiscTest(fixtures.TestBase, AssertsExecutionResults, AssertsCompiledSQL): assert testing.db.dialect.dbapi.__version__.\ startswith(".".join(str(x) for x in v)) + # currently not passing with pg 9.3 that does not seem to generate + # any notices here, woudl rather find a way to mock this @testing.only_on('postgresql+psycopg2', 'psycopg2-specific feature') - def test_notice_logging(self): + def _test_notice_logging(self): log = logging.getLogger('sqlalchemy.dialects.postgresql') buf = logging.handlers.BufferingHandler(100) lev = log.level @@ -199,18 +206,32 @@ class MiscTest(fixtures.TestBase, AssertsExecutionResults, AssertsCompiledSQL): assert_raises(exc.InvalidRequestError, testing.db.execute, stmt) def test_serial_integer(self): - for type_, expected in [ - (Integer, 'SERIAL'), - (BigInteger, 'BIGSERIAL'), - (SmallInteger, 'SMALLINT'), - (postgresql.INTEGER, 'SERIAL'), - (postgresql.BIGINT, 'BIGSERIAL'), + + for version, type_, expected in [ + (None, Integer, 'SERIAL'), + (None, BigInteger, 'BIGSERIAL'), + ((9, 1), SmallInteger, 'SMALLINT'), + ((9, 2), SmallInteger, 'SMALLSERIAL'), + (None, postgresql.INTEGER, 'SERIAL'), + (None, postgresql.BIGINT, 'BIGSERIAL'), ]: m = MetaData() t = Table('t', m, Column('c', type_, primary_key=True)) - ddl_compiler = testing.db.dialect.ddl_compiler(testing.db.dialect, schema.CreateTable(t)) + + if version: + dialect = postgresql.dialect() + dialect._get_server_version_info = Mock(return_value=version) + dialect.initialize(testing.db.connect()) + else: + dialect = testing.db.dialect + + ddl_compiler = dialect.ddl_compiler( + dialect, + schema.CreateTable(t) + ) eq_( ddl_compiler.get_column_specification(t.c.c), "c %s NOT NULL" % expected ) + diff --git a/test/dialect/postgresql/test_reflection.py b/test/dialect/postgresql/test_reflection.py index fb399b546..58f34d5d0 100644 --- a/test/dialect/postgresql/test_reflection.py +++ b/test/dialect/postgresql/test_reflection.py @@ -5,6 +5,7 @@ from sqlalchemy.testing.assertions import eq_, assert_raises, \ AssertsCompiledSQL, ComparesTables from sqlalchemy.testing import engines, fixtures from sqlalchemy import testing +from sqlalchemy import inspect from sqlalchemy import Table, Column, select, MetaData, text, Integer, \ String, Sequence, ForeignKey, join, Numeric, \ PrimaryKeyConstraint, DateTime, tuple_, Float, BigInteger, \ @@ -159,6 +160,17 @@ class ReflectionTest(fixtures.TestBase): subject.join(referer).onclause)) @testing.provide_metadata + def test_reflect_default_over_128_chars(self): + Table('t', self.metadata, + Column('x', String(200), server_default="abcd" * 40) + ).create(testing.db) + + m = MetaData() + t = Table('t', m, autoload=True, autoload_with=testing.db) + eq_( + t.c.x.server_default.arg.text, "'%s'::character varying" % ("abcd" * 40) + ) + @testing.provide_metadata def test_renamed_sequence_reflection(self): metadata = self.metadata t = Table('t', metadata, Column('id', Integer, primary_key=True)) @@ -416,6 +428,70 @@ class ReflectionTest(fixtures.TestBase): eq_(ind, [{'unique': False, 'column_names': ['y'], 'name': 'idx1'}]) conn.close() + @testing.provide_metadata + def test_foreign_key_option_inspection(self): + metadata = self.metadata + Table('person', metadata, + Column('id', String(length=32), nullable=False, primary_key=True), + Column('company_id', ForeignKey('company.id', + name='person_company_id_fkey', + match='FULL', onupdate='RESTRICT', ondelete='RESTRICT', + deferrable=True, initially='DEFERRED' + ) + ) + ) + Table('company', metadata, + Column('id', String(length=32), nullable=False, primary_key=True), + Column('name', String(length=255)), + Column('industry_id', ForeignKey('industry.id', + name='company_industry_id_fkey', + onupdate='CASCADE', ondelete='CASCADE', + deferrable=False, # PG default + initially='IMMEDIATE' # PG default + ) + ) + ) + Table('industry', metadata, + Column('id', Integer(), nullable=False, primary_key=True), + Column('name', String(length=255)) + ) + fk_ref = { + 'person_company_id_fkey': { + 'name': 'person_company_id_fkey', + 'constrained_columns': ['company_id'], + 'referred_columns': ['id'], + 'referred_table': 'company', + 'referred_schema': None, + 'options': { + 'onupdate': 'RESTRICT', + 'deferrable': True, + 'ondelete': 'RESTRICT', + 'initially': 'DEFERRED', + 'match': 'FULL' + } + }, + 'company_industry_id_fkey': { + 'name': 'company_industry_id_fkey', + 'constrained_columns': ['industry_id'], + 'referred_columns': ['id'], + 'referred_table': 'industry', + 'referred_schema': None, + 'options': { + 'onupdate': 'CASCADE', + 'deferrable': None, + 'ondelete': 'CASCADE', + 'initially': None, + 'match': None + } + } + } + metadata.create_all() + inspector = inspect(testing.db) + fks = inspector.get_foreign_keys('person') + \ + inspector.get_foreign_keys('company') + for fk in fks: + eq_(fk, fk_ref[fk['name']]) + class CustomTypeReflectionTest(fixtures.TestBase): class CustomType(object): diff --git a/test/dialect/postgresql/test_types.py b/test/dialect/postgresql/test_types.py index 784f8bcbf..ba4b63e1a 100644 --- a/test/dialect/postgresql/test_types.py +++ b/test/dialect/postgresql/test_types.py @@ -10,18 +10,22 @@ from sqlalchemy import Table, Column, select, MetaData, text, Integer, \ PrimaryKeyConstraint, DateTime, tuple_, Float, BigInteger, \ func, literal_column, literal, bindparam, cast, extract, \ SmallInteger, Enum, REAL, update, insert, Index, delete, \ - and_, Date, TypeDecorator, Time, Unicode, Interval, or_, Text + and_, Date, TypeDecorator, Time, Unicode, Interval, or_, Text, \ + type_coerce from sqlalchemy.orm import Session, mapper, aliased from sqlalchemy import exc, schema, types from sqlalchemy.dialects.postgresql import base as postgresql from sqlalchemy.dialects.postgresql import HSTORE, hstore, array, \ - INT4RANGE, INT8RANGE, NUMRANGE, DATERANGE, TSRANGE, TSTZRANGE + INT4RANGE, INT8RANGE, NUMRANGE, DATERANGE, TSRANGE, TSTZRANGE, \ + JSON import decimal from sqlalchemy import util from sqlalchemy.testing.util import round_decimal from sqlalchemy.sql import table, column, operators import logging import re +from sqlalchemy import inspect +from sqlalchemy import event class FloatCoercionTest(fixtures.TablesTest, AssertsExecutionResults): __only_on__ = 'postgresql' @@ -96,34 +100,10 @@ class FloatCoercionTest(fixtures.TablesTest, AssertsExecutionResults): ([5], [5], [6], [decimal.Decimal("6.4")]) ) -class EnumTest(fixtures.TestBase, AssertsExecutionResults, AssertsCompiledSQL): +class EnumTest(fixtures.TestBase, AssertsExecutionResults): __only_on__ = 'postgresql' - __dialect__ = postgresql.dialect() - def test_compile(self): - e1 = Enum('x', 'y', 'z', name='somename') - e2 = Enum('x', 'y', 'z', name='somename', schema='someschema') - self.assert_compile(postgresql.CreateEnumType(e1), - "CREATE TYPE somename AS ENUM ('x','y','z')" - ) - self.assert_compile(postgresql.CreateEnumType(e2), - "CREATE TYPE someschema.somename AS ENUM " - "('x','y','z')") - self.assert_compile(postgresql.DropEnumType(e1), - 'DROP TYPE somename') - self.assert_compile(postgresql.DropEnumType(e2), - 'DROP TYPE someschema.somename') - t1 = Table('sometable', MetaData(), Column('somecolumn', e1)) - self.assert_compile(schema.CreateTable(t1), - 'CREATE TABLE sometable (somecolumn ' - 'somename)') - t1 = Table('sometable', MetaData(), Column('somecolumn', - Enum('x', 'y', 'z', native_enum=False))) - self.assert_compile(schema.CreateTable(t1), - "CREATE TABLE sometable (somecolumn " - "VARCHAR(1), CHECK (somecolumn IN ('x', " - "'y', 'z')))") @testing.fails_on('postgresql+zxjdbc', 'zxjdbc fails on ENUM: column "XXX" is of type ' @@ -860,7 +840,8 @@ class SpecialTypesTest(fixtures.TestBase, ComparesTables, AssertsCompiledSQL): Column('plain_interval', postgresql.INTERVAL), Column('year_interval', y2m()), Column('month_interval', d2s()), - Column('precision_interval', postgresql.INTERVAL(precision=3)) + Column('precision_interval', postgresql.INTERVAL(precision=3)), + Column('tsvector_document', postgresql.TSVECTOR) ) metadata.create_all() @@ -893,6 +874,17 @@ class SpecialTypesTest(fixtures.TestBase, ComparesTables, AssertsCompiledSQL): self.assert_compile(type_, expected) @testing.provide_metadata + def test_tsvector_round_trip(self): + t = Table('t1', self.metadata, Column('data', postgresql.TSVECTOR)) + t.create() + testing.db.execute(t.insert(), data="a fat cat sat") + eq_(testing.db.scalar(select([t.c.data])), "'a' 'cat' 'fat' 'sat'") + + testing.db.execute(t.update(), data="'a' 'cat' 'fat' 'mat' 'sat'") + + eq_(testing.db.scalar(select([t.c.data])), "'a' 'cat' 'fat' 'mat' 'sat'") + + @testing.provide_metadata def test_bit_reflection(self): metadata = self.metadata t1 = Table('t1', metadata, @@ -918,7 +910,6 @@ class UUIDTest(fixtures.TestBase): __only_on__ = 'postgresql' - @testing.requires.python25 @testing.fails_on('postgresql+zxjdbc', 'column "data" is of type uuid but expression is of type character varying') @testing.fails_on('postgresql+pg8000', 'No support for UUID type') @@ -932,7 +923,6 @@ class UUIDTest(fixtures.TestBase): str(uuid.uuid4()) ) - @testing.requires.python25 @testing.fails_on('postgresql+zxjdbc', 'column "data" is of type uuid but expression is of type character varying') @testing.fails_on('postgresql+pg8000', 'No support for UUID type') @@ -978,13 +968,8 @@ class UUIDTest(fixtures.TestBase): -class HStoreTest(fixtures.TestBase): - def _assert_sql(self, construct, expected): - dialect = postgresql.dialect() - compiled = str(construct.compile(dialect=dialect)) - compiled = re.sub(r'\s+', ' ', compiled) - expected = re.sub(r'\s+', ' ', expected) - eq_(compiled, expected) +class HStoreTest(AssertsCompiledSQL, fixtures.TestBase): + __dialect__ = 'postgresql' def setup(self): metadata = MetaData() @@ -996,7 +981,7 @@ class HStoreTest(fixtures.TestBase): def _test_where(self, whereclause, expected): stmt = select([self.test_table]).where(whereclause) - self._assert_sql( + self.assert_compile( stmt, "SELECT test_table.id, test_table.hash FROM test_table " "WHERE %s" % expected @@ -1004,7 +989,7 @@ class HStoreTest(fixtures.TestBase): def _test_cols(self, colclause, expected, from_=True): stmt = select([colclause]) - self._assert_sql( + self.assert_compile( stmt, ( "SELECT %s" + @@ -1013,9 +998,8 @@ class HStoreTest(fixtures.TestBase): ) def test_bind_serialize_default(self): - from sqlalchemy.engine import default - dialect = default.DefaultDialect() + dialect = postgresql.dialect() proc = self.test_table.c.hash.type._cached_bind_processor(dialect) eq_( proc(util.OrderedDict([("key1", "value1"), ("key2", "value2")])), @@ -1023,9 +1007,7 @@ class HStoreTest(fixtures.TestBase): ) def test_bind_serialize_with_slashes_and_quotes(self): - from sqlalchemy.engine import default - - dialect = default.DefaultDialect() + dialect = postgresql.dialect() proc = self.test_table.c.hash.type._cached_bind_processor(dialect) eq_( proc({'\\"a': '\\"1'}), @@ -1033,9 +1015,7 @@ class HStoreTest(fixtures.TestBase): ) def test_parse_error(self): - from sqlalchemy.engine import default - - dialect = default.DefaultDialect() + dialect = postgresql.dialect() proc = self.test_table.c.hash.type._cached_result_processor( dialect, None) assert_raises_message( @@ -1048,9 +1028,7 @@ class HStoreTest(fixtures.TestBase): ) def test_result_deserialize_default(self): - from sqlalchemy.engine import default - - dialect = default.DefaultDialect() + dialect = postgresql.dialect() proc = self.test_table.c.hash.type._cached_result_processor( dialect, None) eq_( @@ -1059,9 +1037,7 @@ class HStoreTest(fixtures.TestBase): ) def test_result_deserialize_with_slashes_and_quotes(self): - from sqlalchemy.engine import default - - dialect = default.DefaultDialect() + dialect = postgresql.dialect() proc = self.test_table.c.hash.type._cached_result_processor( dialect, None) eq_( @@ -1305,7 +1281,6 @@ class HStoreRoundTripTest(fixtures.TablesTest): return engine def test_reflect(self): - from sqlalchemy import inspect insp = inspect(testing.db) cols = insp.get_columns('data_table') assert isinstance(cols[2]['type'], HSTORE) @@ -1677,3 +1652,320 @@ class DateTimeTZRangeTests(_RangeTypeMixin, fixtures.TablesTest): def _data_obj(self): return self.extras.DateTimeTZRange(*self.tstzs()) + + +class JSONTest(AssertsCompiledSQL, fixtures.TestBase): + __dialect__ = 'postgresql' + + def setup(self): + metadata = MetaData() + self.test_table = Table('test_table', metadata, + Column('id', Integer, primary_key=True), + Column('test_column', JSON) + ) + self.jsoncol = self.test_table.c.test_column + + def _test_where(self, whereclause, expected): + stmt = select([self.test_table]).where(whereclause) + self.assert_compile( + stmt, + "SELECT test_table.id, test_table.test_column FROM test_table " + "WHERE %s" % expected + ) + + def _test_cols(self, colclause, expected, from_=True): + stmt = select([colclause]) + self.assert_compile( + stmt, + ( + "SELECT %s" + + (" FROM test_table" if from_ else "") + ) % expected + ) + + def test_bind_serialize_default(self): + dialect = postgresql.dialect() + proc = self.test_table.c.test_column.type._cached_bind_processor(dialect) + eq_( + proc({"A": [1, 2, 3, True, False]}), + '{"A": [1, 2, 3, true, false]}' + ) + + def test_result_deserialize_default(self): + dialect = postgresql.dialect() + proc = self.test_table.c.test_column.type._cached_result_processor( + dialect, None) + eq_( + proc('{"A": [1, 2, 3, true, false]}'), + {"A": [1, 2, 3, True, False]} + ) + + # This test is a bit misleading -- in real life you will need to cast to do anything + def test_where_getitem(self): + self._test_where( + self.jsoncol['bar'] == None, + "(test_table.test_column -> %(test_column_1)s) IS NULL" + ) + + def test_where_path(self): + self._test_where( + self.jsoncol[("foo", 1)] == None, + "(test_table.test_column #> %(test_column_1)s) IS NULL" + ) + + def test_where_getitem_as_text(self): + self._test_where( + self.jsoncol['bar'].astext == None, + "(test_table.test_column ->> %(test_column_1)s) IS NULL" + ) + + def test_where_getitem_as_cast(self): + self._test_where( + self.jsoncol['bar'].cast(Integer) == 5, + "CAST(test_table.test_column ->> %(test_column_1)s AS INTEGER) " + "= %(param_1)s" + ) + + def test_where_path_as_text(self): + self._test_where( + self.jsoncol[("foo", 1)].astext == None, + "(test_table.test_column #>> %(test_column_1)s) IS NULL" + ) + + def test_cols_get(self): + self._test_cols( + self.jsoncol['foo'], + "test_table.test_column -> %(test_column_1)s AS anon_1", + True + ) + + +class JSONRoundTripTest(fixtures.TablesTest): + __only_on__ = ('postgresql >= 9.3',) + + @classmethod + def define_tables(cls, metadata): + Table('data_table', metadata, + Column('id', Integer, primary_key=True), + Column('name', String(30), nullable=False), + Column('data', JSON) + ) + + def _fixture_data(self, engine): + data_table = self.tables.data_table + engine.execute( + data_table.insert(), + {'name': 'r1', 'data': {"k1": "r1v1", "k2": "r1v2"}}, + {'name': 'r2', 'data': {"k1": "r2v1", "k2": "r2v2"}}, + {'name': 'r3', 'data': {"k1": "r3v1", "k2": "r3v2"}}, + {'name': 'r4', 'data': {"k1": "r4v1", "k2": "r4v2"}}, + {'name': 'r5', 'data': {"k1": "r5v1", "k2": "r5v2", "k3": 5}}, + ) + + def _assert_data(self, compare): + data = testing.db.execute( + select([self.tables.data_table.c.data]). + order_by(self.tables.data_table.c.name) + ).fetchall() + eq_([d for d, in data], compare) + + def _test_insert(self, engine): + engine.execute( + self.tables.data_table.insert(), + {'name': 'r1', 'data': {"k1": "r1v1", "k2": "r1v2"}} + ) + self._assert_data([{"k1": "r1v1", "k2": "r1v2"}]) + + def _non_native_engine(self, json_serializer=None, json_deserializer=None): + if json_serializer is not None or json_deserializer is not None: + options = { + "json_serializer": json_serializer, + "json_deserializer": json_deserializer + } + else: + options = {} + + if testing.against("postgresql+psycopg2"): + from psycopg2.extras import register_default_json + engine = engines.testing_engine(options=options) + @event.listens_for(engine, "connect") + def connect(dbapi_connection, connection_record): + engine.dialect._has_native_json = False + def pass_(value): + return value + register_default_json(dbapi_connection, loads=pass_) + elif options: + engine = engines.testing_engine(options=options) + else: + engine = testing.db + engine.connect() + return engine + + def test_reflect(self): + insp = inspect(testing.db) + cols = insp.get_columns('data_table') + assert isinstance(cols[2]['type'], JSON) + + @testing.only_on("postgresql+psycopg2") + def test_insert_native(self): + engine = testing.db + self._test_insert(engine) + + def test_insert_python(self): + engine = self._non_native_engine() + self._test_insert(engine) + + + def _test_custom_serialize_deserialize(self, native): + import json + def loads(value): + value = json.loads(value) + value['x'] = value['x'] + '_loads' + return value + + def dumps(value): + value = dict(value) + value['x'] = 'dumps_y' + return json.dumps(value) + + if native: + engine = engines.testing_engine(options=dict( + json_serializer=dumps, + json_deserializer=loads + )) + else: + engine = self._non_native_engine( + json_serializer=dumps, + json_deserializer=loads + ) + + s = select([ + cast( + { + "key": "value", + "x": "q" + }, + JSON + ) + ]) + eq_( + engine.scalar(s), + { + "key": "value", + "x": "dumps_y_loads" + }, + ) + + @testing.only_on("postgresql+psycopg2") + def test_custom_native(self): + self._test_custom_serialize_deserialize(True) + + @testing.only_on("postgresql+psycopg2") + def test_custom_python(self): + self._test_custom_serialize_deserialize(False) + + + @testing.only_on("postgresql+psycopg2") + def test_criterion_native(self): + engine = testing.db + self._fixture_data(engine) + self._test_criterion(engine) + + def test_criterion_python(self): + engine = self._non_native_engine() + self._fixture_data(engine) + self._test_criterion(engine) + + def test_path_query(self): + engine = testing.db + self._fixture_data(engine) + data_table = self.tables.data_table + result = engine.execute( + select([data_table.c.data]).where( + data_table.c.data[('k1',)].astext == 'r3v1' + ) + ).first() + eq_(result, ({'k1': 'r3v1', 'k2': 'r3v2'},)) + + def test_query_returned_as_text(self): + engine = testing.db + self._fixture_data(engine) + data_table = self.tables.data_table + result = engine.execute( + select([data_table.c.data['k1'].astext]) + ).first() + assert isinstance(result[0], util.text_type) + + def test_query_returned_as_int(self): + engine = testing.db + self._fixture_data(engine) + data_table = self.tables.data_table + result = engine.execute( + select([data_table.c.data['k3'].cast(Integer)]).where( + data_table.c.name == 'r5') + ).first() + assert isinstance(result[0], int) + + def _test_criterion(self, engine): + data_table = self.tables.data_table + result = engine.execute( + select([data_table.c.data]).where( + data_table.c.data['k1'].astext == 'r3v1' + ) + ).first() + eq_(result, ({'k1': 'r3v1', 'k2': 'r3v2'},)) + + def _test_fixed_round_trip(self, engine): + s = select([ + cast( + { + "key": "value", + "key2": {"k1": "v1", "k2": "v2"} + }, + JSON + ) + ]) + eq_( + engine.scalar(s), + { + "key": "value", + "key2": {"k1": "v1", "k2": "v2"} + }, + ) + + def test_fixed_round_trip_python(self): + engine = self._non_native_engine() + self._test_fixed_round_trip(engine) + + @testing.only_on("postgresql+psycopg2") + def test_fixed_round_trip_native(self): + engine = testing.db + self._test_fixed_round_trip(engine) + + def _test_unicode_round_trip(self, engine): + s = select([ + cast( + { + util.u('réveillé'): util.u('réveillé'), + "data": {"k1": util.u('drôle')} + }, + JSON + ) + ]) + eq_( + engine.scalar(s), + { + util.u('réveillé'): util.u('réveillé'), + "data": {"k1": util.u('drôle')} + }, + ) + + + def test_unicode_round_trip_python(self): + engine = self._non_native_engine() + self._test_unicode_round_trip(engine) + + @testing.only_on("postgresql+psycopg2") + def test_unicode_round_trip_native(self): + engine = testing.db + self._test_unicode_round_trip(engine) diff --git a/test/dialect/test_firebird.py b/test/dialect/test_firebird.py index 4a71b7d05..222e34b93 100644 --- a/test/dialect/test_firebird.py +++ b/test/dialect/test_firebird.py @@ -352,6 +352,15 @@ class CompileTest(fixtures.TestBase, AssertsCompiledSQL): for type_, args, kw, res in columns: self.assert_compile(type_(*args, **kw), res) + def test_quoting_initial_chars(self): + self.assert_compile( + column("_somecol"), + '"_somecol"' + ) + self.assert_compile( + column("$somecol"), + '"$somecol"' + ) class TypesTest(fixtures.TestBase): __only_on__ = 'firebird' diff --git a/test/dialect/test_informix.py b/test/dialect/test_informix.py deleted file mode 100644 index 332edd24e..000000000 --- a/test/dialect/test_informix.py +++ /dev/null @@ -1,25 +0,0 @@ -from sqlalchemy import * -from sqlalchemy.databases import informix -from sqlalchemy.testing import * - - -class CompileTest(fixtures.TestBase, AssertsCompiledSQL): - - __dialect__ = informix.InformixDialect() - - def test_statements(self): - meta = MetaData() - t1 = Table('t1', meta, Column('col1', Integer, - primary_key=True), Column('col2', String(50))) - t2 = Table('t2', meta, Column('col1', Integer, - primary_key=True), Column('col2', String(50)), - Column('col3', Integer, ForeignKey('t1.col1'))) - self.assert_compile(t1.select(), - 'SELECT t1.col1, t1.col2 FROM t1') - self.assert_compile(select([t1, t2]).select_from(t1.join(t2)), - 'SELECT t1.col1, t1.col2, t2.col1, ' - 't2.col2, t2.col3 FROM t1 JOIN t2 ON ' - 't1.col1 = t2.col3') - self.assert_compile(t1.update().values({t1.c.col1: t1.c.col1 - + 1}), 'UPDATE t1 SET col1=(t1.col1 + ?)') - diff --git a/test/dialect/test_oracle.py b/test/dialect/test_oracle.py index 71b2d96cb..8d0ff9776 100644 --- a/test/dialect/test_oracle.py +++ b/test/dialect/test_oracle.py @@ -18,7 +18,7 @@ from sqlalchemy.testing.schema import Table, Column import datetime import os from sqlalchemy import sql - +from sqlalchemy.testing.mock import Mock class OutParamTest(fixtures.TestBase, AssertsExecutionResults): __only_on__ = 'oracle+cx_oracle' @@ -26,31 +26,31 @@ class OutParamTest(fixtures.TestBase, AssertsExecutionResults): @classmethod def setup_class(cls): testing.db.execute(""" -create or replace procedure foo(x_in IN number, x_out OUT number, y_out OUT number, z_out OUT varchar) IS - retval number; - begin - retval := 6; - x_out := 10; - y_out := x_in * 15; - z_out := NULL; - end; + create or replace procedure foo(x_in IN number, x_out OUT number, + y_out OUT number, z_out OUT varchar) IS + retval number; + begin + retval := 6; + x_out := 10; + y_out := x_in * 15; + z_out := NULL; + end; """) def test_out_params(self): - result = \ - testing.db.execute(text('begin foo(:x_in, :x_out, :y_out, ' + result = testing.db.execute(text('begin foo(:x_in, :x_out, :y_out, ' ':z_out); end;', bindparams=[bindparam('x_in', Float), outparam('x_out', Integer), outparam('y_out', Float), outparam('z_out', String)]), x_in=5) - eq_(result.out_parameters, {'x_out': 10, 'y_out': 75, 'z_out' - : None}) + eq_(result.out_parameters, + {'x_out': 10, 'y_out': 75, 'z_out': None}) assert isinstance(result.out_parameters['x_out'], int) @classmethod def teardown_class(cls): - testing.db.execute("DROP PROCEDURE foo") + testing.db.execute("DROP PROCEDURE foo") class CXOracleArgsTest(fixtures.TestBase): __only_on__ = 'oracle+cx_oracle' @@ -92,7 +92,7 @@ class QuotedBindRoundTripTest(fixtures.TestBase): metadata.create_all() table.insert().execute( - {"option":1, "plain":1, "union":1} + {"option": 1, "plain": 1, "union": 1} ) eq_( testing.db.execute(table.select()).first(), @@ -106,8 +106,7 @@ class QuotedBindRoundTripTest(fixtures.TestBase): class CompileTest(fixtures.TestBase, AssertsCompiledSQL): - - __dialect__ = oracle.dialect() + __dialect__ = "oracle" #oracle.dialect() def test_true_false(self): self.assert_compile( @@ -218,6 +217,49 @@ class CompileTest(fixtures.TestBase, AssertsCompiledSQL): ':ROWNUM_1) WHERE ora_rn > :ora_rn_1 FOR ' 'UPDATE') + def test_for_update(self): + table1 = table('mytable', + column('myid'), column('name'), column('description')) + + self.assert_compile( + table1.select(table1.c.myid == 7).with_for_update(), + "SELECT mytable.myid, mytable.name, mytable.description " + "FROM mytable WHERE mytable.myid = :myid_1 FOR UPDATE") + + self.assert_compile( + table1.select(table1.c.myid == 7).with_for_update(of=table1.c.myid), + "SELECT mytable.myid, mytable.name, mytable.description " + "FROM mytable WHERE mytable.myid = :myid_1 FOR UPDATE OF mytable.myid") + + self.assert_compile( + table1.select(table1.c.myid == 7).with_for_update(nowait=True), + "SELECT mytable.myid, mytable.name, mytable.description " + "FROM mytable WHERE mytable.myid = :myid_1 FOR UPDATE NOWAIT") + + self.assert_compile( + table1.select(table1.c.myid == 7). + with_for_update(nowait=True, of=table1.c.myid), + "SELECT mytable.myid, mytable.name, mytable.description " + "FROM mytable WHERE mytable.myid = :myid_1 " + "FOR UPDATE OF mytable.myid NOWAIT") + + self.assert_compile( + table1.select(table1.c.myid == 7). + with_for_update(nowait=True, of=[table1.c.myid, table1.c.name]), + "SELECT mytable.myid, mytable.name, mytable.description " + "FROM mytable WHERE mytable.myid = :myid_1 FOR UPDATE OF " + "mytable.myid, mytable.name NOWAIT") + + ta = table1.alias() + self.assert_compile( + ta.select(ta.c.myid == 7). + with_for_update(of=[ta.c.myid, ta.c.name]), + "SELECT mytable_1.myid, mytable_1.name, mytable_1.description " + "FROM mytable mytable_1 " + "WHERE mytable_1.myid = :myid_1 FOR UPDATE OF " + "mytable_1.myid, mytable_1.name" + ) + def test_limit_preserves_typing_information(self): class MyType(TypeDecorator): impl = Integer @@ -250,7 +292,7 @@ class CompileTest(fixtures.TestBase, AssertsCompiledSQL): def test_use_binds_for_limits_enabled(self): t = table('sometable', column('col1'), column('col2')) - dialect = oracle.OracleDialect(use_binds_for_limits = True) + dialect = oracle.OracleDialect(use_binds_for_limits=True) self.assert_compile(select([t]).limit(10), "SELECT col1, col2 FROM (SELECT sometable.col1 AS col1, " @@ -348,8 +390,8 @@ class CompileTest(fixtures.TestBase, AssertsCompiledSQL): ) query = select([table1, table2], or_(table1.c.name == 'fred', - table1.c.myid == 10, table2.c.othername != 'jack' - , 'EXISTS (select yay from foo where boo = lar)' + table1.c.myid == 10, table2.c.othername != 'jack', + 'EXISTS (select yay from foo where boo = lar)' ), from_obj=[outerjoin(table1, table2, table1.c.myid == table2.c.otherid)]) self.assert_compile(query, @@ -435,8 +477,8 @@ class CompileTest(fixtures.TestBase, AssertsCompiledSQL): 'mytable.description AS description FROM ' 'mytable LEFT OUTER JOIN myothertable ON ' 'mytable.myid = myothertable.otherid) ' - 'anon_1 ON thirdtable.userid = anon_1.myid' - , dialect=oracle.dialect(use_ansi=True)) + 'anon_1 ON thirdtable.userid = anon_1.myid', + dialect=oracle.dialect(use_ansi=True)) self.assert_compile(q, 'SELECT thirdtable.userid, ' @@ -549,7 +591,8 @@ class CompileTest(fixtures.TestBase, AssertsCompiledSQL): def test_returning_insert_labeled(self): t1 = table('t1', column('c1'), column('c2'), column('c3')) self.assert_compile( - t1.insert().values(c1=1).returning(t1.c.c2.label('c2_l'), t1.c.c3.label('c3_l')), + t1.insert().values(c1=1).returning( + t1.c.c2.label('c2_l'), t1.c.c3.label('c3_l')), "INSERT INTO t1 (c1) VALUES (:c1) RETURNING " "t1.c2, t1.c3 INTO :ret_0, :ret_1" ) @@ -587,33 +630,52 @@ class CompileTest(fixtures.TestBase, AssertsCompiledSQL): schema.CreateIndex(Index("bar", t1.c.x)), "CREATE INDEX alt_schema.bar ON alt_schema.foo (x)" ) + + def test_create_index_expr(self): + m = MetaData() + t1 = Table('foo', m, + Column('x', Integer) + ) + self.assert_compile( + schema.CreateIndex(Index("bar", t1.c.x > 5)), + "CREATE INDEX bar ON foo (x > 5)" + ) + class CompatFlagsTest(fixtures.TestBase, AssertsCompiledSQL): - __only_on__ = 'oracle' - def test_ora8_flags(self): - def server_version_info(self): - return (8, 2, 5) + def _dialect(self, server_version, **kw): + def server_version_info(conn): + return server_version - dialect = oracle.dialect(dbapi=testing.db.dialect.dbapi) + dialect = oracle.dialect( + dbapi=Mock(version="0.0.0", paramstyle="named"), + **kw) dialect._get_server_version_info = server_version_info + dialect._check_unicode_returns = Mock() + dialect._check_unicode_description = Mock() + dialect._get_default_schema_name = Mock() + return dialect + + + def test_ora8_flags(self): + dialect = self._dialect((8, 2, 5)) # before connect, assume modern DB assert dialect._supports_char_length assert dialect._supports_nchar assert dialect.use_ansi - dialect.initialize(testing.db.connect()) + dialect.initialize(Mock()) assert not dialect.implicit_returning assert not dialect._supports_char_length assert not dialect._supports_nchar assert not dialect.use_ansi - self.assert_compile(String(50),"VARCHAR2(50)",dialect=dialect) - self.assert_compile(Unicode(50),"VARCHAR2(50)",dialect=dialect) - self.assert_compile(UnicodeText(),"CLOB",dialect=dialect) + self.assert_compile(String(50), "VARCHAR2(50)", dialect=dialect) + self.assert_compile(Unicode(50), "VARCHAR2(50)", dialect=dialect) + self.assert_compile(UnicodeText(), "CLOB", dialect=dialect) - dialect = oracle.dialect(implicit_returning=True, - dbapi=testing.db.dialect.dbapi) - dialect._get_server_version_info = server_version_info + + dialect = self._dialect((8, 2, 5), implicit_returning=True) dialect.initialize(testing.db.connect()) assert dialect.implicit_returning @@ -621,26 +683,25 @@ class CompatFlagsTest(fixtures.TestBase, AssertsCompiledSQL): def test_default_flags(self): """test with no initialization or server version info""" - dialect = oracle.dialect(dbapi=testing.db.dialect.dbapi) + dialect = self._dialect(None) + assert dialect._supports_char_length assert dialect._supports_nchar assert dialect.use_ansi - self.assert_compile(String(50),"VARCHAR2(50 CHAR)",dialect=dialect) - self.assert_compile(Unicode(50),"NVARCHAR2(50)",dialect=dialect) - self.assert_compile(UnicodeText(),"NCLOB",dialect=dialect) + self.assert_compile(String(50), "VARCHAR2(50 CHAR)", dialect=dialect) + self.assert_compile(Unicode(50), "NVARCHAR2(50)", dialect=dialect) + self.assert_compile(UnicodeText(), "NCLOB", dialect=dialect) def test_ora10_flags(self): - def server_version_info(self): - return (10, 2, 5) - dialect = oracle.dialect(dbapi=testing.db.dialect.dbapi) - dialect._get_server_version_info = server_version_info - dialect.initialize(testing.db.connect()) + dialect = self._dialect((10, 2, 5)) + + dialect.initialize(Mock()) assert dialect._supports_char_length assert dialect._supports_nchar assert dialect.use_ansi - self.assert_compile(String(50),"VARCHAR2(50 CHAR)",dialect=dialect) - self.assert_compile(Unicode(50),"NVARCHAR2(50)",dialect=dialect) - self.assert_compile(UnicodeText(),"NCLOB",dialect=dialect) + self.assert_compile(String(50), "VARCHAR2(50 CHAR)", dialect=dialect) + self.assert_compile(Unicode(50), "NVARCHAR2(50)", dialect=dialect) + self.assert_compile(UnicodeText(), "NCLOB", dialect=dialect) class MultiSchemaTest(fixtures.TestBase, AssertsCompiledSQL): @@ -664,9 +725,18 @@ create table test_schema.child( parent_id integer references test_schema.parent(id) ); +create table local_table( + id integer primary key, + data varchar2(50) +); + create synonym test_schema.ptable for test_schema.parent; create synonym test_schema.ctable for test_schema.child; +create synonym test_schema_ptable for test_schema.parent; + +create synonym test_schema.local_table for local_table; + -- can't make a ref from local schema to the -- remote schema's table without this, -- *and* cant give yourself a grant ! @@ -682,15 +752,20 @@ grant references on test_schema.child to public; for stmt in """ drop table test_schema.child; drop table test_schema.parent; +drop table local_table; drop synonym test_schema.ctable; drop synonym test_schema.ptable; +drop synonym test_schema_ptable; +drop synonym test_schema.local_table; + """.split(";"): if stmt.strip(): testing.db.execute(stmt) + @testing.provide_metadata def test_create_same_names_explicit_schema(self): schema = testing.db.dialect.default_schema_name - meta = MetaData(testing.db) + meta = self.metadata parent = Table('parent', meta, Column('pid', Integer, primary_key=True), schema=schema @@ -701,15 +776,31 @@ drop synonym test_schema.ptable; schema=schema ) meta.create_all() - try: - parent.insert().execute({'pid':1}) - child.insert().execute({'cid':1, 'pid':1}) - eq_(child.select().execute().fetchall(), [(1, 1)]) - finally: - meta.drop_all() + parent.insert().execute({'pid': 1}) + child.insert().execute({'cid': 1, 'pid': 1}) + eq_(child.select().execute().fetchall(), [(1, 1)]) - def test_create_same_names_implicit_schema(self): + def test_reflect_alt_table_owner_local_synonym(self): meta = MetaData(testing.db) + parent = Table('test_schema_ptable', meta, autoload=True, + oracle_resolve_synonyms=True) + self.assert_compile(parent.select(), + "SELECT test_schema_ptable.id, " + "test_schema_ptable.data FROM test_schema_ptable") + select([parent]).execute().fetchall() + + def test_reflect_alt_synonym_owner_local_table(self): + meta = MetaData(testing.db) + parent = Table('local_table', meta, autoload=True, + oracle_resolve_synonyms=True, schema="test_schema") + self.assert_compile(parent.select(), + "SELECT test_schema.local_table.id, " + "test_schema.local_table.data FROM test_schema.local_table") + select([parent]).execute().fetchall() + + @testing.provide_metadata + def test_create_same_names_implicit_schema(self): + meta = self.metadata parent = Table('parent', meta, Column('pid', Integer, primary_key=True), ) @@ -718,12 +809,9 @@ drop synonym test_schema.ptable; Column('pid', Integer, ForeignKey('parent.pid')), ) meta.create_all() - try: - parent.insert().execute({'pid':1}) - child.insert().execute({'cid':1, 'pid':1}) - eq_(child.select().execute().fetchall(), [(1, 1)]) - finally: - meta.drop_all() + parent.insert().execute({'pid': 1}) + child.insert().execute({'cid': 1, 'pid': 1}) + eq_(child.select().execute().fetchall(), [(1, 1)]) def test_reflect_alt_owner_explicit(self): @@ -911,10 +999,17 @@ class DialectTypesTest(fixtures.TestBase, AssertsCompiledSQL): dbapi = FakeDBAPI() b = bindparam("foo", "hello world!") - assert b.type.dialect_impl(dialect).get_dbapi_type(dbapi) == 'STRING' + eq_( + b.type.dialect_impl(dialect).get_dbapi_type(dbapi), + 'STRING' + ) b = bindparam("foo", "hello world!") - assert b.type.dialect_impl(dialect).get_dbapi_type(dbapi) == 'STRING' + eq_( + b.type.dialect_impl(dialect).get_dbapi_type(dbapi), + 'STRING' + ) + def test_long(self): self.assert_compile(oracle.LONG(), "LONG") @@ -943,14 +1038,14 @@ class DialectTypesTest(fixtures.TestBase, AssertsCompiledSQL): self.assert_compile(oracle.RAW(35), "RAW(35)") def test_char_length(self): - self.assert_compile(VARCHAR(50),"VARCHAR(50 CHAR)") + self.assert_compile(VARCHAR(50), "VARCHAR(50 CHAR)") oracle8dialect = oracle.dialect() oracle8dialect.server_version_info = (8, 0) - self.assert_compile(VARCHAR(50),"VARCHAR(50)",dialect=oracle8dialect) + self.assert_compile(VARCHAR(50), "VARCHAR(50)", dialect=oracle8dialect) - self.assert_compile(NVARCHAR(50),"NVARCHAR2(50)") - self.assert_compile(CHAR(50),"CHAR(50)") + self.assert_compile(NVARCHAR(50), "NVARCHAR2(50)") + self.assert_compile(CHAR(50), "CHAR(50)") def test_varchar_types(self): dialect = oracle.dialect() @@ -961,6 +1056,12 @@ class DialectTypesTest(fixtures.TestBase, AssertsCompiledSQL): (VARCHAR(50), "VARCHAR(50 CHAR)"), (oracle.NVARCHAR2(50), "NVARCHAR2(50)"), (oracle.VARCHAR2(50), "VARCHAR2(50 CHAR)"), + (String(), "VARCHAR2"), + (Unicode(), "NVARCHAR2"), + (NVARCHAR(), "NVARCHAR2"), + (VARCHAR(), "VARCHAR"), + (oracle.NVARCHAR2(), "NVARCHAR2"), + (oracle.VARCHAR2(), "VARCHAR2"), ]: self.assert_compile(typ, exp, dialect=dialect) @@ -998,36 +1099,36 @@ class TypesTest(fixtures.TestBase): dict(id=3, data="value 3") ) - eq_(t.select().where(t.c.data=='value 2').execute().fetchall(), + eq_( + t.select().where(t.c.data == 'value 2').execute().fetchall(), [(2, 'value 2 ')] - ) + ) m2 = MetaData(testing.db) t2 = Table('t1', m2, autoload=True) assert type(t2.c.data.type) is CHAR - eq_(t2.select().where(t2.c.data=='value 2').execute().fetchall(), + eq_( + t2.select().where(t2.c.data == 'value 2').execute().fetchall(), [(2, 'value 2 ')] - ) + ) finally: t.drop() @testing.requires.returning + @testing.provide_metadata def test_int_not_float(self): - m = MetaData(testing.db) + m = self.metadata t1 = Table('t1', m, Column('foo', Integer)) t1.create() - try: - r = t1.insert().values(foo=5).returning(t1.c.foo).execute() - x = r.scalar() - assert x == 5 - assert isinstance(x, int) - - x = t1.select().scalar() - assert x == 5 - assert isinstance(x, int) - finally: - t1.drop() + r = t1.insert().values(foo=5).returning(t1.c.foo).execute() + x = r.scalar() + assert x == 5 + assert isinstance(x, int) + + x = t1.select().scalar() + assert x == 5 + assert isinstance(x, int) @testing.provide_metadata def test_rowid(self): @@ -1044,7 +1145,7 @@ class TypesTest(fixtures.TestBase): # the ROWID type is not really needed here, # as cx_oracle just treats it as a string, # but we want to make sure the ROWID works... - rowid_col= column('rowid', oracle.ROWID) + rowid_col = column('rowid', oracle.ROWID) s3 = select([t.c.x, rowid_col]).\ where(rowid_col == cast(rowid, oracle.ROWID)) eq_(s3.select().execute().fetchall(), @@ -1070,8 +1171,9 @@ class TypesTest(fixtures.TestBase): eq_(row['day_interval'], datetime.timedelta(days=35, seconds=5743)) + @testing.provide_metadata def test_numerics(self): - m = MetaData(testing.db) + m = self.metadata t1 = Table('t1', m, Column('intcol', Integer), Column('numericcol', Numeric(precision=9, scale=2)), @@ -1084,41 +1186,38 @@ class TypesTest(fixtures.TestBase): ) t1.create() - try: - t1.insert().execute( - intcol=1, - numericcol=5.2, - floatcol1=6.5, - floatcol2 = 8.5, - doubleprec = 9.5, - numbercol1=12, - numbercol2=14.85, - numbercol3=15.76 - ) - - m2 = MetaData(testing.db) - t2 = Table('t1', m2, autoload=True) + t1.insert().execute( + intcol=1, + numericcol=5.2, + floatcol1=6.5, + floatcol2=8.5, + doubleprec=9.5, + numbercol1=12, + numbercol2=14.85, + numbercol3=15.76 + ) - for row in ( - t1.select().execute().first(), - t2.select().execute().first() - ): - for i, (val, type_) in enumerate(( - (1, int), - (decimal.Decimal("5.2"), decimal.Decimal), - (6.5, float), - (8.5, float), - (9.5, float), - (12, int), - (decimal.Decimal("14.85"), decimal.Decimal), - (15.76, float), - )): - eq_(row[i], val) - assert isinstance(row[i], type_), '%r is not %r' \ - % (row[i], type_) + m2 = MetaData(testing.db) + t2 = Table('t1', m2, autoload=True) + + for row in ( + t1.select().execute().first(), + t2.select().execute().first() + ): + for i, (val, type_) in enumerate(( + (1, int), + (decimal.Decimal("5.2"), decimal.Decimal), + (6.5, float), + (8.5, float), + (9.5, float), + (12, int), + (decimal.Decimal("14.85"), decimal.Decimal), + (15.76, float), + )): + eq_(row[i], val) + assert isinstance(row[i], type_), '%r is not %r' \ + % (row[i], type_) - finally: - t1.drop() def test_numeric_no_decimal_mode(self): @@ -1150,28 +1249,26 @@ class TypesTest(fixtures.TestBase): ) foo.create() - foo.insert().execute( - {'idata':5, 'ndata':decimal.Decimal("45.6"), - 'ndata2':decimal.Decimal("45.0"), - 'nidata':decimal.Decimal('53'), 'fdata':45.68392}, - ) + foo.insert().execute({ + 'idata': 5, + 'ndata': decimal.Decimal("45.6"), + 'ndata2': decimal.Decimal("45.0"), + 'nidata': decimal.Decimal('53'), + 'fdata': 45.68392 + }) - stmt = """ - SELECT - idata, - ndata, - ndata2, - nidata, - fdata - FROM foo - """ + stmt = "SELECT idata, ndata, ndata2, nidata, fdata FROM foo" row = testing.db.execute(stmt).fetchall()[0] - eq_([type(x) for x in row], [int, decimal.Decimal, decimal.Decimal, int, float]) + eq_( + [type(x) for x in row], + [int, decimal.Decimal, decimal.Decimal, int, float] + ) eq_( row, - (5, decimal.Decimal('45.6'), decimal.Decimal('45'), 53, 45.683920000000001) + (5, decimal.Decimal('45.6'), decimal.Decimal('45'), + 53, 45.683920000000001) ) # with a nested subquery, @@ -1195,7 +1292,10 @@ class TypesTest(fixtures.TestBase): FROM dual """ row = testing.db.execute(stmt).fetchall()[0] - eq_([type(x) for x in row], [int, decimal.Decimal, int, int, decimal.Decimal]) + eq_( + [type(x) for x in row], + [int, decimal.Decimal, int, int, decimal.Decimal] + ) eq_( row, (5, decimal.Decimal('45.6'), 45, 53, decimal.Decimal('45.68392')) @@ -1203,15 +1303,20 @@ class TypesTest(fixtures.TestBase): row = testing.db.execute(text(stmt, typemap={ - 'idata':Integer(), - 'ndata':Numeric(20, 2), - 'ndata2':Numeric(20, 2), - 'nidata':Numeric(5, 0), - 'fdata':Float() + 'idata': Integer(), + 'ndata': Numeric(20, 2), + 'ndata2': Numeric(20, 2), + 'nidata': Numeric(5, 0), + 'fdata': Float() })).fetchall()[0] - eq_([type(x) for x in row], [int, decimal.Decimal, decimal.Decimal, decimal.Decimal, float]) - eq_(row, - (5, decimal.Decimal('45.6'), decimal.Decimal('45'), decimal.Decimal('53'), 45.683920000000001) + eq_( + [type(x) for x in row], + [int, decimal.Decimal, decimal.Decimal, decimal.Decimal, float] + ) + eq_( + row, + (5, decimal.Decimal('45.6'), decimal.Decimal('45'), + decimal.Decimal('53'), 45.683920000000001) ) stmt = """ @@ -1237,39 +1342,55 @@ class TypesTest(fixtures.TestBase): ) WHERE ROWNUM >= 0) anon_1 """ - row =testing.db.execute(stmt).fetchall()[0] - eq_([type(x) for x in row], [int, decimal.Decimal, int, int, decimal.Decimal]) - eq_(row, (5, decimal.Decimal('45.6'), 45, 53, decimal.Decimal('45.68392'))) + row = testing.db.execute(stmt).fetchall()[0] + eq_( + [type(x) for x in row], + [int, decimal.Decimal, int, int, decimal.Decimal] + ) + eq_( + row, + (5, decimal.Decimal('45.6'), 45, 53, decimal.Decimal('45.68392')) + ) row = testing.db.execute(text(stmt, typemap={ - 'anon_1_idata':Integer(), - 'anon_1_ndata':Numeric(20, 2), - 'anon_1_ndata2':Numeric(20, 2), - 'anon_1_nidata':Numeric(5, 0), - 'anon_1_fdata':Float() + 'anon_1_idata': Integer(), + 'anon_1_ndata': Numeric(20, 2), + 'anon_1_ndata2': Numeric(20, 2), + 'anon_1_nidata': Numeric(5, 0), + 'anon_1_fdata': Float() })).fetchall()[0] - eq_([type(x) for x in row], [int, decimal.Decimal, decimal.Decimal, decimal.Decimal, float]) - eq_(row, - (5, decimal.Decimal('45.6'), decimal.Decimal('45'), decimal.Decimal('53'), 45.683920000000001) + eq_( + [type(x) for x in row], + [int, decimal.Decimal, decimal.Decimal, decimal.Decimal, float] + ) + eq_( + row, + (5, decimal.Decimal('45.6'), decimal.Decimal('45'), + decimal.Decimal('53'), 45.683920000000001) ) row = testing.db.execute(text(stmt, typemap={ - 'anon_1_idata':Integer(), - 'anon_1_ndata':Numeric(20, 2, asdecimal=False), - 'anon_1_ndata2':Numeric(20, 2, asdecimal=False), - 'anon_1_nidata':Numeric(5, 0, asdecimal=False), - 'anon_1_fdata':Float(asdecimal=True) + 'anon_1_idata': Integer(), + 'anon_1_ndata': Numeric(20, 2, asdecimal=False), + 'anon_1_ndata2': Numeric(20, 2, asdecimal=False), + 'anon_1_nidata': Numeric(5, 0, asdecimal=False), + 'anon_1_fdata': Float(asdecimal=True) })).fetchall()[0] - eq_([type(x) for x in row], [int, float, float, float, decimal.Decimal]) - eq_(row, + eq_( + [type(x) for x in row], + [int, float, float, float, decimal.Decimal] + ) + eq_( + row, (5, 45.6, 45, 53, decimal.Decimal('45.68392')) ) + @testing.provide_metadata def test_reflect_dates(self): - metadata = MetaData(testing.db) + metadata = self.metadata Table( "date_types", metadata, Column('d1', DATE), @@ -1278,20 +1399,16 @@ class TypesTest(fixtures.TestBase): Column('d4', oracle.INTERVAL(second_precision=5)), ) metadata.create_all() - try: - m = MetaData(testing.db) - t1 = Table( - "date_types", m, - autoload=True) - assert isinstance(t1.c.d1.type, DATE) - assert isinstance(t1.c.d2.type, TIMESTAMP) - assert not t1.c.d2.type.timezone - assert isinstance(t1.c.d3.type, TIMESTAMP) - assert t1.c.d3.type.timezone - assert isinstance(t1.c.d4.type, oracle.INTERVAL) - - finally: - metadata.drop_all() + m = MetaData(testing.db) + t1 = Table( + "date_types", m, + autoload=True) + assert isinstance(t1.c.d1.type, DATE) + assert isinstance(t1.c.d2.type, TIMESTAMP) + assert not t1.c.d2.type.timezone + assert isinstance(t1.c.d3.type, TIMESTAMP) + assert t1.c.d3.type.timezone + assert isinstance(t1.c.d4.type, oracle.INTERVAL) def test_reflect_all_types_schema(self): types_table = Table('all_types', MetaData(testing.db), @@ -1319,7 +1436,7 @@ class TypesTest(fixtures.TestBase): @testing.provide_metadata def test_reflect_nvarchar(self): metadata = self.metadata - t = Table('t', metadata, + Table('t', metadata, Column('data', sqltypes.NVARCHAR(255)) ) metadata.create_all() @@ -1341,22 +1458,20 @@ class TypesTest(fixtures.TestBase): assert isinstance(res, util.text_type) + @testing.provide_metadata def test_char_length(self): - metadata = MetaData(testing.db) + metadata = self.metadata t1 = Table('t1', metadata, Column("c1", VARCHAR(50)), Column("c2", NVARCHAR(250)), Column("c3", CHAR(200)) ) t1.create() - try: - m2 = MetaData(testing.db) - t2 = Table('t1', m2, autoload=True) - eq_(t2.c.c1.type.length, 50) - eq_(t2.c.c2.type.length, 250) - eq_(t2.c.c3.type.length, 200) - finally: - t1.drop() + m2 = MetaData(testing.db) + t2 = Table('t1', m2, autoload=True) + eq_(t2.c.c1.type.length, 50) + eq_(t2.c.c2.type.length, 250) + eq_(t2.c.c3.type.length, 200) @testing.provide_metadata def test_long_type(self): @@ -1372,8 +1487,6 @@ class TypesTest(fixtures.TestBase): "xyz" ) - - def test_longstring(self): metadata = MetaData(testing.db) testing.db.execute(""" @@ -1424,15 +1537,16 @@ class EuroNumericTest(fixtures.TestBase): del os.environ['NLS_LANG'] self.engine.dispose() - @testing.provide_metadata def test_output_type_handler(self): - metadata = self.metadata for stmt, exp, kw in [ ("SELECT 0.1 FROM DUAL", decimal.Decimal("0.1"), {}), ("SELECT 15 FROM DUAL", 15, {}), - ("SELECT CAST(15 AS NUMERIC(3, 1)) FROM DUAL", decimal.Decimal("15"), {}), - ("SELECT CAST(0.1 AS NUMERIC(5, 2)) FROM DUAL", decimal.Decimal("0.1"), {}), - ("SELECT :num FROM DUAL", decimal.Decimal("2.5"), {'num':decimal.Decimal("2.5")}) + ("SELECT CAST(15 AS NUMERIC(3, 1)) FROM DUAL", + decimal.Decimal("15"), {}), + ("SELECT CAST(0.1 AS NUMERIC(5, 2)) FROM DUAL", + decimal.Decimal("0.1"), {}), + ("SELECT :num FROM DUAL", decimal.Decimal("2.5"), + {'num': decimal.Decimal("2.5")}) ]: test_exp = self.engine.scalar(stmt, **kw) eq_( @@ -1513,97 +1627,86 @@ class BufferedColumnTest(fixtures.TestBase, AssertsCompiledSQL): class UnsupportedIndexReflectTest(fixtures.TestBase): __only_on__ = 'oracle' - def setup(self): - global metadata - metadata = MetaData(testing.db) - t1 = Table('test_index_reflect', metadata, + @testing.emits_warning("No column names") + @testing.provide_metadata + def test_reflect_functional_index(self): + metadata = self.metadata + Table('test_index_reflect', metadata, Column('data', String(20), primary_key=True) ) metadata.create_all() - def teardown(self): - metadata.drop_all() - - @testing.emits_warning("No column names") - def test_reflect_functional_index(self): testing.db.execute('CREATE INDEX DATA_IDX ON ' 'TEST_INDEX_REFLECT (UPPER(DATA))') m2 = MetaData(testing.db) - t2 = Table('test_index_reflect', m2, autoload=True) + Table('test_index_reflect', m2, autoload=True) class RoundTripIndexTest(fixtures.TestBase): __only_on__ = 'oracle' + @testing.provide_metadata def test_basic(self): - engine = testing.db - metadata = MetaData(engine) + metadata = self.metadata - table=Table("sometable", metadata, + table = Table("sometable", metadata, Column("id_a", Unicode(255), primary_key=True), Column("id_b", Unicode(255), primary_key=True, unique=True), Column("group", Unicode(255), primary_key=True), Column("col", Unicode(255)), - UniqueConstraint('col','group'), + UniqueConstraint('col', 'group'), ) # "group" is a keyword, so lower case normalind = Index('tableind', table.c.id_b, table.c.group) - # create metadata.create_all() - try: - # round trip, create from reflection - mirror = MetaData(engine) - mirror.reflect() - metadata.drop_all() - mirror.create_all() - - # inspect the reflected creation - inspect = MetaData(engine) - inspect.reflect() - - def obj_definition(obj): - return obj.__class__, tuple([c.name for c in - obj.columns]), getattr(obj, 'unique', None) - - # find what the primary k constraint name should be - primaryconsname = engine.execute( - text("""SELECT constraint_name - FROM all_constraints - WHERE table_name = :table_name - AND owner = :owner - AND constraint_type = 'P' """), - table_name=table.name.upper(), - owner=engine.url.username.upper()).fetchall()[0][0] - - reflectedtable = inspect.tables[table.name] - - # make a dictionary of the reflected objects: - - reflected = dict([(obj_definition(i), i) for i in - reflectedtable.indexes - | reflectedtable.constraints]) - - # assert we got primary key constraint and its name, Error - # if not in dict - - assert reflected[(PrimaryKeyConstraint, ('id_a', 'id_b', - 'group'), None)].name.upper() \ - == primaryconsname.upper() - - # Error if not in dict - - assert reflected[(Index, ('id_b', 'group'), False)].name \ - == normalind.name - assert (Index, ('id_b', ), True) in reflected - assert (Index, ('col', 'group'), True) in reflected - assert len(reflectedtable.constraints) == 1 - assert len(reflectedtable.indexes) == 3 + mirror = MetaData(testing.db) + mirror.reflect() + metadata.drop_all() + mirror.create_all() - finally: - metadata.drop_all() + inspect = MetaData(testing.db) + inspect.reflect() + def obj_definition(obj): + return obj.__class__, tuple([c.name for c in + obj.columns]), getattr(obj, 'unique', None) + # find what the primary k constraint name should be + primaryconsname = testing.db.execute( + text("""SELECT constraint_name + FROM all_constraints + WHERE table_name = :table_name + AND owner = :owner + AND constraint_type = 'P' """), + table_name=table.name.upper(), + owner=testing.db.url.username.upper()).fetchall()[0][0] + + reflectedtable = inspect.tables[table.name] + + # make a dictionary of the reflected objects: + + reflected = dict([(obj_definition(i), i) for i in + reflectedtable.indexes + | reflectedtable.constraints]) + + # assert we got primary key constraint and its name, Error + # if not in dict + + assert reflected[(PrimaryKeyConstraint, ('id_a', 'id_b', + 'group'), None)].name.upper() \ + == primaryconsname.upper() + + # Error if not in dict + + eq_( + reflected[(Index, ('id_b', 'group'), False)].name, + normalind.name + ) + assert (Index, ('id_b', ), True) in reflected + assert (Index, ('col', 'group'), True) in reflected + eq_(len(reflectedtable.constraints), 1) + eq_(len(reflectedtable.indexes), 3) class SequenceTest(fixtures.TestBase, AssertsCompiledSQL): @@ -1650,11 +1753,11 @@ class ExecuteTest(fixtures.TestBase): metadata.create_all() t.insert().execute( - {'id':1, 'data':1}, - {'id':2, 'data':7}, - {'id':3, 'data':12}, - {'id':4, 'data':15}, - {'id':5, 'data':32}, + {'id': 1, 'data': 1}, + {'id': 2, 'data': 7}, + {'id': 3, 'data': 12}, + {'id': 4, 'data': 15}, + {'id': 5, 'data': 32}, ) # here, we can't use ORDER BY. @@ -1679,7 +1782,7 @@ class UnicodeSchemaTest(fixtures.TestBase): @testing.provide_metadata def test_quoted_column_non_unicode(self): metadata = self.metadata - table=Table("atable", metadata, + table = Table("atable", metadata, Column("_underscorecolumn", Unicode(255), primary_key=True), ) metadata.create_all() @@ -1688,14 +1791,14 @@ class UnicodeSchemaTest(fixtures.TestBase): {'_underscorecolumn': u('’é')}, ) result = testing.db.execute( - table.select().where(table.c._underscorecolumn==u('’é')) + table.select().where(table.c._underscorecolumn == u('’é')) ).scalar() eq_(result, u('’é')) @testing.provide_metadata def test_quoted_column_unicode(self): metadata = self.metadata - table=Table("atable", metadata, + table = Table("atable", metadata, Column(u("méil"), Unicode(255), primary_key=True), ) metadata.create_all() diff --git a/test/engine/test_bind.py b/test/engine/test_bind.py index 973cf4d84..8f6c547f1 100644 --- a/test/engine/test_bind.py +++ b/test/engine/test_bind.py @@ -1,7 +1,7 @@ """tests the "bind" attribute/argument across schema and SQL, including the deprecated versions of these arguments""" -from sqlalchemy.testing import eq_, assert_raises +from sqlalchemy.testing import assert_raises, assert_raises_message from sqlalchemy import engine, exc from sqlalchemy import MetaData, ThreadLocalMetaData from sqlalchemy import Integer, text @@ -44,7 +44,7 @@ class BindTest(fixtures.TestBase): testing.db.connect() ): for args in [ - ([], {'bind':bind}), + ([], {'bind': bind}), ([bind], {}) ]: metadata.create_all(*args[0], **args[1]) @@ -56,18 +56,13 @@ class BindTest(fixtures.TestBase): def test_create_drop_err_metadata(self): metadata = MetaData() - table = Table('test_table', metadata, Column('foo', Integer)) + Table('test_table', metadata, Column('foo', Integer)) for meth in [metadata.create_all, metadata.drop_all]: - try: - meth() - assert False - except exc.UnboundExecutionError as e: - eq_(str(e), - "The MetaData is not bound to an Engine or " - "Connection. Execution can not proceed without a " - "database to execute against. Either execute with " - "an explicit connection or assign the MetaData's " - ".bind to enable implicit execution.") + assert_raises_message( + exc.UnboundExecutionError, + "MetaData object is not bound to an Engine or Connection.", + meth + ) def test_create_drop_err_table(self): metadata = MetaData() @@ -79,23 +74,16 @@ class BindTest(fixtures.TestBase): table.create, table.drop, ]: - try: - meth() - assert False - except exc.UnboundExecutionError as e: - eq_( - str(e), - "The Table 'test_table' " - "is not bound to an Engine or Connection. " - "Execution can not proceed without a database to execute " - "against. Either execute with an explicit connection or " - "assign this Table's .metadata.bind to enable implicit " - "execution.") + assert_raises_message( + exc.UnboundExecutionError, + "Table object 'test_table' is not bound to an Engine or Connection.", + meth + ) @testing.uses_deprecated() def test_create_drop_bound(self): - for meta in (MetaData,ThreadLocalMetaData): + for meta in (MetaData, ThreadLocalMetaData): for bind in ( testing.db, testing.db.connect() @@ -136,7 +124,7 @@ class BindTest(fixtures.TestBase): try: for args in ( ([bind], {}), - ([], {'bind':bind}), + ([], {'bind': bind}), ): metadata = MetaData(*args[0], **args[1]) table = Table('test_table', metadata, diff --git a/test/engine/test_execute.py b/test/engine/test_execute.py index 1d2aebf97..d3bd3c2cd 100644 --- a/test/engine/test_execute.py +++ b/test/engine/test_execute.py @@ -1,4 +1,4 @@ - +# coding: utf-8 from sqlalchemy.testing import eq_, assert_raises, assert_raises_message, \ config, is_ @@ -17,9 +17,9 @@ from sqlalchemy.testing.engines import testing_engine import logging.handlers from sqlalchemy.dialects.oracle.zxjdbc import ReturningParam from sqlalchemy.engine import result as _result, default -from sqlalchemy.engine.base import Connection, Engine +from sqlalchemy.engine.base import Engine from sqlalchemy.testing import fixtures -from sqlalchemy.testing.mock import Mock, call +from sqlalchemy.testing.mock import Mock, call, patch users, metadata, users_autoinc = None, None, None @@ -29,11 +29,11 @@ class ExecuteTest(fixtures.TestBase): global users, users_autoinc, metadata metadata = MetaData(testing.db) users = Table('users', metadata, - Column('user_id', INT, primary_key = True, autoincrement=False), + Column('user_id', INT, primary_key=True, autoincrement=False), Column('user_name', VARCHAR(20)), ) users_autoinc = Table('users_autoinc', metadata, - Column('user_id', INT, primary_key = True, + Column('user_id', INT, primary_key=True, test_needs_autoincrement=True), Column('user_name', VARCHAR(20)), ) @@ -59,10 +59,9 @@ class ExecuteTest(fixtures.TestBase): scalar(stmt) eq_(result, '%') - @testing.fails_on_everything_except('firebird', 'maxdb', + @testing.fails_on_everything_except('firebird', 'sqlite', '+pyodbc', - '+mxodbc', '+zxjdbc', 'mysql+oursql', - 'informix+informixdb') + '+mxodbc', '+zxjdbc', 'mysql+oursql') def test_raw_qmark(self): def go(conn): conn.execute('insert into users (user_id, user_name) ' @@ -182,7 +181,7 @@ class ExecuteTest(fixtures.TestBase): finally: conn.close() - @testing.fails_on_everything_except('sqlite', 'oracle+cx_oracle', 'informix+informixdb') + @testing.fails_on_everything_except('sqlite', 'oracle+cx_oracle') def test_raw_named(self): def go(conn): conn.execute('insert into users (user_id, user_name) ' @@ -204,19 +203,36 @@ class ExecuteTest(fixtures.TestBase): finally: conn.close() + @testing.engines.close_open_connections def test_exception_wrapping_dbapi(self): - def go(conn): + conn = testing.db.connect() + for _c in testing.db, conn: assert_raises_message( tsa.exc.DBAPIError, r"not_a_valid_statement", - conn.execute, 'not_a_valid_statement' + _c.execute, 'not_a_valid_statement' ) - go(testing.db) - conn = testing.db.connect() - try: - go(conn) - finally: - conn.close() + + @testing.requires.sqlite + def test_exception_wrapping_non_dbapi_error(self): + e = create_engine('sqlite://') + e.dialect.is_disconnect = is_disconnect = Mock() + + with e.connect() as c: + c.connection.cursor = Mock( + return_value=Mock( + execute=Mock( + side_effect=TypeError("I'm not a DBAPI error") + )) + ) + + assert_raises_message( + TypeError, + "I'm not a DBAPI error", + c.execute, "select " + ) + eq_(is_disconnect.call_count, 0) + def test_exception_wrapping_non_dbapi_statement(self): class MyType(TypeDecorator): @@ -227,7 +243,7 @@ class ExecuteTest(fixtures.TestBase): def _go(conn): assert_raises_message( tsa.exc.StatementError, - r"nope \(original cause: Exception: nope\) 'SELECT 1 ", + r"nope \(original cause: Exception: nope\) u?'SELECT 1 ", conn.execute, select([1]).\ where( @@ -241,6 +257,25 @@ class ExecuteTest(fixtures.TestBase): finally: conn.close() + def test_stmt_exception_non_ascii(self): + name = util.u('méil') + with testing.db.connect() as conn: + assert_raises_message( + tsa.exc.StatementError, + util.u( + "A value is required for bind parameter 'uname'" + r'.*SELECT users.user_name AS .m\\xe9il.') if util.py2k + else + util.u( + "A value is required for bind parameter 'uname'" + '.*SELECT users.user_name AS .méil.') + , + conn.execute, + select([users.c.user_name.label(name)]).where( + users.c.user_name == bindparam("uname")), + {'uname_incorrect': 'foo'} + ) + def test_stmt_exception_pickleable_no_dbapi(self): self._test_stmt_exception_pickleable(Exception("hello world")) @@ -326,17 +361,17 @@ class ExecuteTest(fixtures.TestBase): def test_engine_level_options(self): eng = engines.testing_engine(options={'execution_options': {'foo': 'bar'}}) - conn = eng.contextual_connect() - eq_(conn._execution_options['foo'], 'bar') - eq_(conn.execution_options(bat='hoho')._execution_options['foo' - ], 'bar') - eq_(conn.execution_options(bat='hoho')._execution_options['bat' - ], 'hoho') - eq_(conn.execution_options(foo='hoho')._execution_options['foo' - ], 'hoho') - eng.update_execution_options(foo='hoho') - conn = eng.contextual_connect() - eq_(conn._execution_options['foo'], 'hoho') + with eng.contextual_connect() as conn: + eq_(conn._execution_options['foo'], 'bar') + eq_(conn.execution_options(bat='hoho')._execution_options['foo' + ], 'bar') + eq_(conn.execution_options(bat='hoho')._execution_options['bat' + ], 'hoho') + eq_(conn.execution_options(foo='hoho')._execution_options['foo' + ], 'hoho') + eng.update_execution_options(foo='hoho') + conn = eng.contextual_connect() + eq_(conn._execution_options['foo'], 'hoho') @testing.requires.ad_hoc_engines def test_generative_engine_execution_options(self): @@ -383,8 +418,8 @@ class ExecuteTest(fixtures.TestBase): event.listen(eng, "before_execute", l2) event.listen(eng1, "before_execute", l3) - eng.execute(select([1])) - eng1.execute(select([1])) + eng.execute(select([1])).close() + eng1.execute(select([1])).close() eq_(canary, ["l1", "l2", "l3", "l1", "l2"]) @@ -892,45 +927,44 @@ class ResultProxyTest(fixtures.TestBase): def test_no_rowcount_on_selects_inserts(self): """assert that rowcount is only called on deletes and updates. - This because cursor.rowcount can be expensive on some dialects - such as Firebird. + This because cursor.rowcount may can be expensive on some dialects + such as Firebird, however many dialects require it be called + before the cursor is closed. """ metadata = self.metadata engine = engines.testing_engine() - metadata.bind = engine t = Table('t1', metadata, Column('data', String(10)) ) - metadata.create_all() + metadata.create_all(engine) - class BreakRowcountMixin(object): - @property - def rowcount(self): - assert False + with patch.object(engine.dialect.execution_ctx_cls, "rowcount") as mock_rowcount: + mock_rowcount.__get__ = Mock() + engine.execute(t.insert(), + {'data': 'd1'}, + {'data': 'd2'}, + {'data': 'd3'}) - execution_ctx_cls = engine.dialect.execution_ctx_cls - engine.dialect.execution_ctx_cls = type("FakeCtx", - (BreakRowcountMixin, - execution_ctx_cls), - {}) + eq_(len(mock_rowcount.__get__.mock_calls), 0) - try: - r = t.insert().execute({'data': 'd1'}, {'data': 'd2'}, - {'data': 'd3'}) - eq_(t.select().execute().fetchall(), [('d1', ), ('d2', ), - ('d3', )]) - assert_raises(AssertionError, t.update().execute, {'data' - : 'd4'}) - assert_raises(AssertionError, t.delete().execute) - finally: - engine.dialect.execution_ctx_cls = execution_ctx_cls + eq_( + engine.execute(t.select()).fetchall(), + [('d1', ), ('d2', ), ('d3', )] + ) + eq_(len(mock_rowcount.__get__.mock_calls), 0) + + engine.execute(t.update(), {'data': 'd4'}) + + eq_(len(mock_rowcount.__get__.mock_calls), 1) + + engine.execute(t.delete()) + eq_(len(mock_rowcount.__get__.mock_calls), 2) - @testing.requires.python26 def test_rowproxy_is_sequence(self): import collections from sqlalchemy.engine import RowProxy @@ -1016,7 +1050,7 @@ class ResultProxyTest(fixtures.TestBase): class ExecutionOptionsTest(fixtures.TestBase): def test_dialect_conn_options(self): - engine = testing_engine("sqlite://") + engine = testing_engine("sqlite://", options=dict(_initialize=False)) engine.dialect = Mock() conn = engine.connect() c2 = conn.execution_options(foo="bar") diff --git a/test/engine/test_parseconnect.py b/test/engine/test_parseconnect.py index 106bd0782..391b92144 100644 --- a/test/engine/test_parseconnect.py +++ b/test/engine/test_parseconnect.py @@ -1,13 +1,12 @@ -from sqlalchemy.testing import assert_raises, eq_ +from sqlalchemy.testing import assert_raises, eq_, assert_raises_message from sqlalchemy.util.compat import configparser, StringIO import sqlalchemy.engine.url as url from sqlalchemy import create_engine, engine_from_config, exc, pool -from sqlalchemy.engine.util import _coerce_config from sqlalchemy.engine.default import DefaultDialect import sqlalchemy as tsa from sqlalchemy.testing import fixtures from sqlalchemy import testing -from sqlalchemy.testing.mock import Mock +from sqlalchemy.testing.mock import Mock, MagicMock, patch class ParseConnectTest(fixtures.TestBase): @@ -15,6 +14,7 @@ class ParseConnectTest(fixtures.TestBase): for text in ( 'dbtype://username:password@hostspec:110//usr/db_file.db', 'dbtype://username:password@hostspec/database', + 'dbtype+apitype://username:password@hostspec/database', 'dbtype://username:password@hostspec', 'dbtype://username:password@/database', 'dbtype://username@hostspec', @@ -22,25 +22,53 @@ class ParseConnectTest(fixtures.TestBase): 'dbtype://hostspec/database', 'dbtype://hostspec', 'dbtype://hostspec/?arg1=val1&arg2=val2', - 'dbtype:///database', + 'dbtype+apitype:///database', 'dbtype:///:memory:', 'dbtype:///foo/bar/im/a/file', 'dbtype:///E:/work/src/LEM/db/hello.db', 'dbtype:///E:/work/src/LEM/db/hello.db?foo=bar&hoho=lala', 'dbtype://', - 'dbtype://username:password@/db', - 'dbtype:////usr/local/mailman/lists/_xtest@example.com/memb' - 'ers.db', - 'dbtype://username:apples%2Foranges@hostspec/mydatabase', + 'dbtype://username:password@/database', + 'dbtype:////usr/local/_xtest@example.com/members.db', + 'dbtype://username:apples%2Foranges@hostspec/database', + 'dbtype://username:password@[2001:da8:2004:1000:202:116:160:90]/database?foo=bar', + 'dbtype://username:password@[2001:da8:2004:1000:202:116:160:90]:80/database?foo=bar' ): u = url.make_url(text) - assert u.drivername == 'dbtype' - assert u.username == 'username' or u.username is None - assert u.password == 'password' or u.password \ - == 'apples/oranges' or u.password is None - assert u.host == 'hostspec' or u.host == '127.0.0.1' \ - or not u.host - assert str(u) == text + + assert u.drivername in ('dbtype', 'dbtype+apitype') + assert u.username in ('username', None) + assert u.password in ('password', 'apples/oranges', None) + assert u.host in ('hostspec', '127.0.0.1', + '2001:da8:2004:1000:202:116:160:90', '', None), u.host + assert u.database in ('database', + '/usr/local/_xtest@example.com/members.db', + '/usr/db_file.db', ':memory:', '', + 'foo/bar/im/a/file', + 'E:/work/src/LEM/db/hello.db', None), u.database + eq_(str(u), text) + + def test_rfc1738_password(self): + u = url.make_url("dbtype://user:pass word + other%3Awords@host/dbname") + eq_(u.password, "pass word + other:words") + eq_(str(u), "dbtype://user:pass word + other%3Awords@host/dbname") + + u = url.make_url('dbtype://username:apples%2Foranges@hostspec/database') + eq_(u.password, "apples/oranges") + eq_(str(u), 'dbtype://username:apples%2Foranges@hostspec/database') + + u = url.make_url('dbtype://username:apples%40oranges%40%40@hostspec/database') + eq_(u.password, "apples@oranges@@") + eq_(str(u), 'dbtype://username:apples%40oranges%40%40@hostspec/database') + + u = url.make_url('dbtype://username%40:@hostspec/database') + eq_(u.password, '') + eq_(u.username, "username@") + eq_(str(u), 'dbtype://username%40:@hostspec/database') + + u = url.make_url('dbtype://username:pass%2Fword@hostspec/database') + eq_(u.password, 'pass/word') + eq_(str(u), 'dbtype://username:pass%2Fword@hostspec/database') class DialectImportTest(fixtures.TestBase): def test_import_base_dialects(self): @@ -81,50 +109,6 @@ class CreateEngineTest(fixtures.TestBase): module=dbapi, _initialize=False) c = e.connect() - def test_coerce_config(self): - raw = r""" -[prefixed] -sqlalchemy.url=postgresql://scott:tiger@somehost/test?fooz=somevalue -sqlalchemy.convert_unicode=0 -sqlalchemy.echo=false -sqlalchemy.echo_pool=1 -sqlalchemy.max_overflow=2 -sqlalchemy.pool_recycle=50 -sqlalchemy.pool_size=2 -sqlalchemy.pool_threadlocal=1 -sqlalchemy.pool_timeout=10 -[plain] -url=postgresql://scott:tiger@somehost/test?fooz=somevalue -convert_unicode=0 -echo=0 -echo_pool=1 -max_overflow=2 -pool_recycle=50 -pool_size=2 -pool_threadlocal=1 -pool_timeout=10 -""" - ini = configparser.ConfigParser() - ini.readfp(StringIO(raw)) - - expected = { - 'url': 'postgresql://scott:tiger@somehost/test?fooz=somevalue', - 'convert_unicode': 0, - 'echo': False, - 'echo_pool': True, - 'max_overflow': 2, - 'pool_recycle': 50, - 'pool_size': 2, - 'pool_threadlocal': True, - 'pool_timeout': 10, - } - - prefixed = dict(ini.items('prefixed')) - self.assert_(_coerce_config(prefixed, 'sqlalchemy.') - == expected) - - plain = dict(ini.items('plain')) - self.assert_(_coerce_config(plain, '') == expected) def test_engine_from_config(self): dbapi = mock_dbapi @@ -141,19 +125,35 @@ pool_timeout=10 'z=somevalue') assert e.echo is True - for param, values in [ - ('convert_unicode', ('true', 'false', 'force')), - ('echo', ('true', 'false', 'debug')), - ('echo_pool', ('true', 'false', 'debug')), - ('use_native_unicode', ('true', 'false')), - ]: - for value in values: - config = { - 'sqlalchemy.url': 'postgresql://scott:tiger@somehost/test', - 'sqlalchemy.%s' % param : value - } - cfg = _coerce_config(config, 'sqlalchemy.') - assert cfg[param] == {'true':True, 'false':False}.get(value, value) + + def test_engine_from_config_custom(self): + from sqlalchemy import util + from sqlalchemy.dialects import registry + tokens = __name__.split(".") + + class MyDialect(MockDialect): + engine_config_types = { + "foobar": int, + "bathoho": util.bool_or_str('force') + } + + def __init__(self, foobar=None, bathoho=None, **kw): + self.foobar = foobar + self.bathoho = bathoho + + global dialect + dialect = MyDialect + registry.register("mockdialect.barb", + ".".join(tokens[0:-1]), tokens[-1]) + + config = { + "sqlalchemy.url": "mockdialect+barb://", + "sqlalchemy.foobar": "5", + "sqlalchemy.bathoho": "false" + } + e = engine_from_config(config, _initialize=False) + eq_(e.dialect.foobar, 5) + eq_(e.dialect.bathoho, False) def test_custom(self): @@ -227,17 +227,38 @@ pool_timeout=10 @testing.requires.sqlite def test_wraps_connect_in_dbapi(self): - # sqlite uses SingletonThreadPool which doesnt have max_overflow + e = create_engine('sqlite://') + sqlite3 = e.dialect.dbapi - assert_raises(TypeError, create_engine, 'sqlite://', - max_overflow=5, module=mock_sqlite_dbapi) - e = create_engine('sqlite://', connect_args={'use_unicode' - : True}, convert_unicode=True) + dbapi = MockDBAPI() + dbapi.Error = sqlite3.Error, + dbapi.ProgrammingError = sqlite3.ProgrammingError + dbapi.connect = Mock(side_effect=sqlite3.ProgrammingError("random error")) try: - e.connect() + create_engine('sqlite://', module=dbapi).connect() + assert False except tsa.exc.DBAPIError as de: assert not de.connection_invalidated + + @testing.requires.sqlite + def test_dont_touch_non_dbapi_exception_on_connect(self): + e = create_engine('sqlite://') + sqlite3 = e.dialect.dbapi + + dbapi = MockDBAPI() + dbapi.Error = sqlite3.Error, + dbapi.ProgrammingError = sqlite3.ProgrammingError + dbapi.connect = Mock(side_effect=TypeError("I'm not a DBAPI error")) + e = create_engine('sqlite://', module=dbapi) + e.dialect.is_disconnect = is_disconnect = Mock() + assert_raises_message( + TypeError, + "I'm not a DBAPI error", + e.connect + ) + eq_(is_disconnect.call_count, 0) + def test_ensure_dialect_does_is_disconnect_no_conn(self): """test that is_disconnect() doesn't choke if no connection, cursor given.""" dialect = testing.db.dialect @@ -277,6 +298,10 @@ pool_timeout=10 assert e.url.drivername == e2.url.drivername == 'mysql' assert e.url.username == e2.url.username == 'scott' assert e2.url is u + assert str(u) == 'mysql://scott:tiger@localhost/test' + assert repr(u) == 'mysql://scott:***@localhost/test' + assert repr(e) == 'Engine(mysql://scott:***@localhost/test)' + assert repr(e2) == 'Engine(mysql://scott:***@localhost/test)' def test_poolargs(self): """test that connection pool args make it thru""" @@ -363,7 +388,7 @@ def MockDBAPI(**assert_kwargs): ) return connection - return Mock( + return MagicMock( sqlite_version_info=(99, 9, 9,), version_info=(99, 9, 9,), sqlite_version='99.9.9', diff --git a/test/engine/test_pool.py b/test/engine/test_pool.py index 05c0487f8..2e4c2dc48 100644 --- a/test/engine/test_pool.py +++ b/test/engine/test_pool.py @@ -10,6 +10,8 @@ from sqlalchemy.testing import fixtures from sqlalchemy.testing.mock import Mock, call +join_timeout = 10 + def MockDBAPI(): def cursor(): while True: @@ -306,6 +308,13 @@ class PoolEventsTest(PoolTestBase): return p, canary + def _invalidate_event_fixture(self): + p = self._queuepool_fixture() + canary = Mock() + event.listen(p, 'invalidate', canary) + + return p, canary + def test_first_connect_event(self): p, canary = self._first_connect_event_fixture() @@ -409,6 +418,31 @@ class PoolEventsTest(PoolTestBase): c1.close() eq_(canary, ['reset']) + def test_invalidate_event_no_exception(self): + p, canary = self._invalidate_event_fixture() + + c1 = p.connect() + c1.close() + assert not canary.called + c1 = p.connect() + dbapi_con = c1.connection + c1.invalidate() + assert canary.call_args_list[0][0][0] is dbapi_con + assert canary.call_args_list[0][0][2] is None + + def test_invalidate_event_exception(self): + p, canary = self._invalidate_event_fixture() + + c1 = p.connect() + c1.close() + assert not canary.called + c1 = p.connect() + dbapi_con = c1.connection + exc = Exception("hi") + c1.invalidate(exc) + assert canary.call_args_list[0][0][0] is dbapi_con + assert canary.call_args_list[0][0][2] is exc + def test_checkin_event_gc(self): p, canary = self._checkin_event_fixture() @@ -827,7 +861,7 @@ class QueuePoolTest(PoolTestBase): th.start() threads.append(th) for th in threads: - th.join() + th.join(join_timeout) assert len(timeouts) > 0 for t in timeouts: @@ -864,22 +898,109 @@ class QueuePoolTest(PoolTestBase): th.start() threads.append(th) for th in threads: - th.join() + th.join(join_timeout) self.assert_(max(peaks) <= max_overflow) lazy_gc() assert not pool._refs + + def test_overflow_reset_on_failed_connect(self): + dbapi = Mock() + + def failing_dbapi(): + time.sleep(2) + raise Exception("connection failed") + + creator = dbapi.connect + def create(): + return creator() + + p = pool.QueuePool(creator=create, pool_size=2, max_overflow=3) + c1 = p.connect() + c2 = p.connect() + c3 = p.connect() + eq_(p._overflow, 1) + creator = failing_dbapi + assert_raises(Exception, p.connect) + eq_(p._overflow, 1) + + @testing.requires.threading_with_mock + def test_hanging_connect_within_overflow(self): + """test that a single connect() call which is hanging + does not block other connections from proceeding.""" + + dbapi = Mock() + mutex = threading.Lock() + + def hanging_dbapi(): + time.sleep(2) + with mutex: + return dbapi.connect() + + def fast_dbapi(): + with mutex: + return dbapi.connect() + + creator = threading.local() + + def create(): + return creator.mock_connector() + + def run_test(name, pool, should_hang): + if should_hang: + creator.mock_connector = hanging_dbapi + else: + creator.mock_connector = fast_dbapi + + conn = pool.connect() + conn.operation(name) + time.sleep(1) + conn.close() + + p = pool.QueuePool(creator=create, pool_size=2, max_overflow=3) + + threads = [ + threading.Thread( + target=run_test, args=("success_one", p, False)), + threading.Thread( + target=run_test, args=("success_two", p, False)), + threading.Thread( + target=run_test, args=("overflow_one", p, True)), + threading.Thread( + target=run_test, args=("overflow_two", p, False)), + threading.Thread( + target=run_test, args=("overflow_three", p, False)) + ] + for t in threads: + t.start() + time.sleep(.2) + + for t in threads: + t.join(timeout=join_timeout) + eq_( + dbapi.connect().operation.mock_calls, + [call("success_one"), call("success_two"), + call("overflow_two"), call("overflow_three"), + call("overflow_one")] + ) + + @testing.requires.threading_with_mock def test_waiters_handled(self): """test that threads waiting for connections are handled when the pool is replaced. """ + mutex = threading.Lock() dbapi = MockDBAPI() def creator(): - return dbapi.connect() + mutex.acquire() + try: + return dbapi.connect() + finally: + mutex.release() success = [] for timeout in (None, 30): @@ -897,21 +1018,27 @@ class QueuePoolTest(PoolTestBase): c1 = p.connect() c2 = p.connect() + threads = [] for i in range(2): t = threading.Thread(target=waiter, args=(p, timeout, max_overflow)) - t.setDaemon(True) # so the tests dont hang if this fails + t.daemon = True t.start() + threads.append(t) - c1.invalidate() - c2.invalidate() - p2 = p._replace() + # this sleep makes sure that the + # two waiter threads hit upon wait() + # inside the queue, before we invalidate the other + # two conns time.sleep(.2) + p2 = p._replace() + + for t in threads: + t.join(join_timeout) eq_(len(success), 12, "successes: %s" % success) @testing.requires.threading_with_mock - @testing.requires.python26 def test_notify_waiters(self): dbapi = MockDBAPI() canary = [] @@ -924,9 +1051,7 @@ class QueuePoolTest(PoolTestBase): p1 = pool.QueuePool(creator=creator1, pool_size=1, timeout=None, max_overflow=0) - p2 = pool.QueuePool(creator=creator2, - pool_size=1, timeout=None, - max_overflow=-1) + p2 = pool.NullPool(creator=creator2) def waiter(p): conn = p.connect() time.sleep(.5) @@ -934,14 +1059,18 @@ class QueuePoolTest(PoolTestBase): c1 = p1.connect() + threads = [] for i in range(5): t = threading.Thread(target=waiter, args=(p1, )) - t.setDaemon(True) t.start() + threads.append(t) time.sleep(.5) eq_(canary, [1]) p1._pool.abort(p2) - time.sleep(1) + + for t in threads: + t.join(join_timeout) + eq_(canary, [1, 2, 2, 2, 2, 2]) def test_dispose_closes_pooled(self): @@ -987,6 +1116,7 @@ class QueuePoolTest(PoolTestBase): self._test_overflow(40, 5) def test_mixed_close(self): + pool._refs.clear() p = self._queuepool_fixture(pool_size=3, max_overflow=-1, use_threadlocal=True) c1 = p.connect() c2 = p.connect() @@ -1198,6 +1328,96 @@ class QueuePoolTest(PoolTestBase): c2 = p.connect() assert c2.connection is not None +class ResetOnReturnTest(PoolTestBase): + def _fixture(self, **kw): + dbapi = Mock() + return dbapi, pool.QueuePool(creator=lambda: dbapi.connect('foo.db'), **kw) + + def test_plain_rollback(self): + dbapi, p = self._fixture(reset_on_return='rollback') + + c1 = p.connect() + c1.close() + assert dbapi.connect().rollback.called + assert not dbapi.connect().commit.called + + def test_plain_commit(self): + dbapi, p = self._fixture(reset_on_return='commit') + + c1 = p.connect() + c1.close() + assert not dbapi.connect().rollback.called + assert dbapi.connect().commit.called + + def test_plain_none(self): + dbapi, p = self._fixture(reset_on_return=None) + + c1 = p.connect() + c1.close() + assert not dbapi.connect().rollback.called + assert not dbapi.connect().commit.called + + def test_agent_rollback(self): + dbapi, p = self._fixture(reset_on_return='rollback') + + class Agent(object): + def __init__(self, conn): + self.conn = conn + + def rollback(self): + self.conn.special_rollback() + + def commit(self): + self.conn.special_commit() + + c1 = p.connect() + c1._reset_agent = Agent(c1) + c1.close() + + assert dbapi.connect().special_rollback.called + assert not dbapi.connect().special_commit.called + + assert not dbapi.connect().rollback.called + assert not dbapi.connect().commit.called + + c1 = p.connect() + c1.close() + eq_(dbapi.connect().special_rollback.call_count, 1) + eq_(dbapi.connect().special_commit.call_count, 0) + + assert dbapi.connect().rollback.called + assert not dbapi.connect().commit.called + + def test_agent_commit(self): + dbapi, p = self._fixture(reset_on_return='commit') + + class Agent(object): + def __init__(self, conn): + self.conn = conn + + def rollback(self): + self.conn.special_rollback() + + def commit(self): + self.conn.special_commit() + + c1 = p.connect() + c1._reset_agent = Agent(c1) + c1.close() + assert not dbapi.connect().special_rollback.called + assert dbapi.connect().special_commit.called + + assert not dbapi.connect().rollback.called + assert not dbapi.connect().commit.called + + c1 = p.connect() + c1.close() + + eq_(dbapi.connect().special_rollback.call_count, 0) + eq_(dbapi.connect().special_commit.call_count, 1) + assert not dbapi.connect().rollback.called + assert dbapi.connect().commit.called + class SingletonThreadPoolTest(PoolTestBase): @testing.requires.threading_with_mock @@ -1245,7 +1465,7 @@ class SingletonThreadPoolTest(PoolTestBase): th.start() threads.append(th) for th in threads: - th.join() + th.join(join_timeout) assert len(p._all_conns) == 3 if strong_refs: diff --git a/test/engine/test_reconnect.py b/test/engine/test_reconnect.py index 0a964cf63..ba336a1bf 100644 --- a/test/engine/test_reconnect.py +++ b/test/engine/test_reconnect.py @@ -413,8 +413,6 @@ class RealReconnectTest(fixtures.TestBase): def teardown(self): self.engine.dispose() - @testing.fails_on('+informixdb', - "Wrong error thrown, fix in informixdb?") def test_reconnect(self): conn = self.engine.connect() @@ -539,8 +537,6 @@ class RealReconnectTest(fixtures.TestBase): # pool was recreated assert engine.pool is not p1 - @testing.fails_on('+informixdb', - "Wrong error thrown, fix in informixdb?") def test_null_pool(self): engine = \ engines.reconnecting_engine(options=dict(poolclass=pool.NullPool)) @@ -554,8 +550,6 @@ class RealReconnectTest(fixtures.TestBase): eq_(conn.execute(select([1])).scalar(), 1) assert not conn.invalidated - @testing.fails_on('+informixdb', - "Wrong error thrown, fix in informixdb?") def test_close(self): conn = self.engine.connect() eq_(conn.execute(select([1])).scalar(), 1) @@ -569,8 +563,6 @@ class RealReconnectTest(fixtures.TestBase): conn = self.engine.connect() eq_(conn.execute(select([1])).scalar(), 1) - @testing.fails_on('+informixdb', - "Wrong error thrown, fix in informixdb?") def test_with_transaction(self): conn = self.engine.connect() trans = conn.begin() @@ -651,8 +643,6 @@ class InvalidateDuringResultTest(fixtures.TestBase): '+cymysql', '+pymysql', '+pg8000' ], "Buffers the result set and doesn't check for " "connection close") - @testing.fails_on('+informixdb', - "Wrong error thrown, fix in informixdb?") def test_invalidate_on_results(self): conn = self.engine.connect() result = conn.execute('select * from sometable') diff --git a/test/engine/test_reflection.py b/test/engine/test_reflection.py index 52cbc15e6..2f311f7e7 100644 --- a/test/engine/test_reflection.py +++ b/test/engine/test_reflection.py @@ -361,6 +361,27 @@ class ReflectionTest(fixtures.TestBase, ComparesTables): self.assert_(isinstance(table.c.col4.type, sa.String)) @testing.provide_metadata + def test_override_upgrade_pk_flag(self): + meta = self.metadata + table = Table( + 'override_test', meta, + Column('col1', sa.Integer), + Column('col2', sa.String(20)), + Column('col3', sa.Numeric) + ) + table.create() + + meta2 = MetaData(testing.db) + table = Table( + 'override_test', meta2, + Column('col1', sa.Integer, primary_key=True), + autoload=True) + + eq_(list(table.primary_key), [table.c.col1]) + eq_(table.c.col1.primary_key, True) + + + @testing.provide_metadata def test_override_pkfk(self): """test that you can override columns which contain foreign keys to other reflected tables, where the foreign key column is also @@ -602,6 +623,55 @@ class ReflectionTest(fixtures.TestBase, ComparesTables): is a2.c.user_id assert u2.join(a2).onclause.compare(u2.c.id == a2.c.user_id) + @testing.only_on(['postgresql', 'mysql']) + @testing.provide_metadata + def test_fk_options(self): + """test that foreign key reflection includes options (on + backends with {dialect}.get_foreign_keys() support)""" + + if testing.against('postgresql'): + test_attrs = ('match', 'onupdate', 'ondelete', 'deferrable', 'initially') + addresses_user_id_fkey = sa.ForeignKey( + # Each option is specifically not a Postgres default, or + # it won't be returned by PG's inspection + 'users.id', + name = 'addresses_user_id_fkey', + match='FULL', + onupdate='RESTRICT', + ondelete='RESTRICT', + deferrable=True, + initially='DEFERRED' + ) + elif testing.against('mysql'): + # MATCH, DEFERRABLE, and INITIALLY cannot be defined for MySQL + # ON UPDATE and ON DELETE have defaults of RESTRICT, which are + # elided by MySQL's inspection + addresses_user_id_fkey = sa.ForeignKey( + 'users.id', + name = 'addresses_user_id_fkey', + onupdate='CASCADE', + ondelete='CASCADE' + ) + test_attrs = ('onupdate', 'ondelete') + + meta = self.metadata + Table('users', meta, + Column('id', sa.Integer, primary_key=True), + Column('name', sa.String(30)), + test_needs_fk=True) + Table('addresses', meta, + Column('id', sa.Integer, primary_key=True), + Column('user_id', sa.Integer, addresses_user_id_fkey), + test_needs_fk=True) + meta.create_all() + + meta2 = MetaData() + meta2.reflect(testing.db) + for fk in meta2.tables['addresses'].foreign_keys: + ref = addresses_user_id_fkey + for attr in test_attrs: + eq_(getattr(fk, attr), getattr(ref, attr)) + def test_pks_not_uniques(self): """test that primary key reflection not tripped up by unique indexes""" @@ -705,10 +775,6 @@ class ReflectionTest(fixtures.TestBase, ComparesTables): @testing.crashes('oracle', 'FIXME: unknown, confirm not fails_on') - @testing.fails_on('+informixdb', - "FIXME: should be supported via the " - "DELIMITED env var but that breaks " - "everything else for now") @testing.provide_metadata def test_reserved(self): @@ -725,7 +791,7 @@ class ReflectionTest(fixtures.TestBase, ComparesTables): # There's currently no way to calculate identifier case # normalization in isolation, so... - if testing.against('firebird', 'oracle', 'maxdb'): + if testing.against('firebird', 'oracle'): check_col = 'TRUE' else: check_col = 'true' @@ -778,6 +844,7 @@ class ReflectionTest(fixtures.TestBase, ComparesTables): def test_reflect_uses_bind_engine_reflect(self): self._test_reflect_uses_bind(lambda e: MetaData().reflect(e)) + @testing.provide_metadata def test_reflect_all(self): existing = testing.db.table_names() @@ -833,6 +900,18 @@ class ReflectionTest(fixtures.TestBase, ComparesTables): m8.reflect ) + m8_e1 = MetaData(testing.db) + rt_c = Table('rt_c', m8_e1) + m8_e1.reflect(extend_existing=True) + eq_(set(m8_e1.tables.keys()), set(names)) + eq_(rt_c.c.keys(), ['id']) + + m8_e2 = MetaData(testing.db) + rt_c = Table('rt_c', m8_e2) + m8_e2.reflect(extend_existing=True, only=['rt_a', 'rt_c']) + eq_(set(m8_e2.tables.keys()), set(['rt_a', 'rt_c'])) + eq_(rt_c.c.keys(), ['id']) + if existing: print("Other tables present in database, skipping some checks.") else: @@ -1423,6 +1502,7 @@ class CaseSensitiveTest(fixtures.TablesTest): class ColumnEventsTest(fixtures.TestBase): + @classmethod def setup_class(cls): cls.metadata = MetaData() @@ -1430,7 +1510,16 @@ class ColumnEventsTest(fixtures.TestBase): 'to_reflect', cls.metadata, Column('x', sa.Integer, primary_key=True), + Column('y', sa.Integer), + test_needs_fk=True ) + cls.related = Table( + 'related', + cls.metadata, + Column('q', sa.Integer, sa.ForeignKey('to_reflect.x')), + test_needs_fk=True + ) + sa.Index("some_index", cls.to_reflect.c.y) cls.metadata.create_all(testing.db) @classmethod @@ -1440,7 +1529,7 @@ class ColumnEventsTest(fixtures.TestBase): def teardown(self): events.SchemaEventTarget.dispatch._clear() - def _do_test(self, col, update, assert_): + def _do_test(self, col, update, assert_, tablename="to_reflect"): # load the actual Table class, not the test # wrapper from sqlalchemy.schema import Table @@ -1450,22 +1539,54 @@ class ColumnEventsTest(fixtures.TestBase): if column_info['name'] == col: column_info.update(update) - t = Table('to_reflect', m, autoload=True, listeners=[ + t = Table(tablename, m, autoload=True, listeners=[ ('column_reflect', column_reflect), ]) assert_(t) m = MetaData(testing.db) event.listen(Table, 'column_reflect', column_reflect) - t2 = Table('to_reflect', m, autoload=True) + t2 = Table(tablename, m, autoload=True) assert_(t2) def test_override_key(self): + def assertions(table): + eq_(table.c.YXZ.name, "x") + eq_(set(table.primary_key), set([table.c.YXZ])) + self._do_test( "x", {"key": "YXZ"}, - lambda table: eq_(table.c.YXZ.name, "x") + assertions ) + def test_override_index(self): + def assertions(table): + idx = list(table.indexes)[0] + eq_(idx.columns, [table.c.YXZ]) + + self._do_test( + "y", {"key": "YXZ"}, + assertions + ) + + def test_override_key_fk(self): + m = MetaData(testing.db) + def column_reflect(insp, table, column_info): + + if column_info['name'] == 'q': + column_info['key'] = 'qyz' + elif column_info['name'] == 'x': + column_info['key'] = 'xyz' + + to_reflect = Table("to_reflect", m, autoload=True, listeners=[ + ('column_reflect', column_reflect), + ]) + related = Table("related", m, autoload=True, listeners=[ + ('column_reflect', column_reflect), + ]) + + assert related.c.qyz.references(to_reflect.c.xyz) + def test_override_type(self): def assert_(table): assert isinstance(table.c.x.type, sa.String) diff --git a/test/engine/test_transaction.py b/test/engine/test_transaction.py index ffc12b5b9..c373133d1 100644 --- a/test/engine/test_transaction.py +++ b/test/engine/test_transaction.py @@ -3,6 +3,7 @@ from sqlalchemy.testing import eq_, assert_raises, \ import sys import time import threading +from sqlalchemy import event from sqlalchemy.testing.engines import testing_engine from sqlalchemy import create_engine, MetaData, INT, VARCHAR, Sequence, \ select, Integer, String, func, text, exc @@ -29,7 +30,6 @@ class TransactionTest(fixtures.TestBase): testing.db.execute(users.delete()).close() @classmethod - @testing.crashes('mysql+cymysql', 'deadlock') def teardown_class(cls): users.drop(testing.db) @@ -342,7 +342,8 @@ class TransactionTest(fixtures.TestBase): transaction = connection.begin_twophase() connection.execute(users.insert(), user_id=1, user_name='user1') transaction.prepare() - connection.close() + connection.invalidate() + connection2 = testing.db.connect() eq_(connection2.execute(select([users.c.user_id]). order_by(users.c.user_id)).fetchall(), @@ -379,6 +380,138 @@ class TransactionTest(fixtures.TestBase): eq_(result.fetchall(), [('user1', ), ('user4', )]) conn.close() + @testing.requires.two_phase_transactions + def test_reset_rollback_two_phase_no_rollback(self): + # test [ticket:2907], essentially that the + # TwoPhaseTransaction is given the job of "reset on return" + # so that picky backends like MySQL correctly clear out + # their state when a connection is closed without handling + # the transaction explicitly. + + eng = testing_engine() + + # MySQL raises if you call straight rollback() on + # a connection with an XID present + @event.listens_for(eng, "invalidate") + def conn_invalidated(dbapi_con, con_record, exception): + dbapi_con.close() + raise exception + + with eng.connect() as conn: + rec = conn.connection._connection_record + raw_dbapi_con = rec.connection + xa = conn.begin_twophase() + conn.execute(users.insert(), user_id=1, user_name='user1') + + assert rec.connection is raw_dbapi_con + + with eng.connect() as conn: + result = \ + conn.execute(select([users.c.user_name]). + order_by(users.c.user_id)) + eq_(result.fetchall(), []) + +class ResetAgentTest(fixtures.TestBase): + def test_begin_close(self): + with testing.db.connect() as connection: + trans = connection.begin() + assert connection.connection._reset_agent is trans + assert not trans.is_active + + def test_begin_rollback(self): + with testing.db.connect() as connection: + trans = connection.begin() + assert connection.connection._reset_agent is trans + trans.rollback() + assert connection.connection._reset_agent is None + + def test_begin_commit(self): + with testing.db.connect() as connection: + trans = connection.begin() + assert connection.connection._reset_agent is trans + trans.commit() + assert connection.connection._reset_agent is None + + @testing.requires.savepoints + def test_begin_nested_close(self): + with testing.db.connect() as connection: + trans = connection.begin_nested() + assert connection.connection._reset_agent is trans + assert not trans.is_active + + @testing.requires.savepoints + def test_begin_begin_nested_close(self): + with testing.db.connect() as connection: + trans = connection.begin() + trans2 = connection.begin_nested() + assert connection.connection._reset_agent is trans + assert trans2.is_active # was never closed + assert not trans.is_active + + @testing.requires.savepoints + def test_begin_begin_nested_rollback_commit(self): + with testing.db.connect() as connection: + trans = connection.begin() + trans2 = connection.begin_nested() + assert connection.connection._reset_agent is trans + trans2.rollback() + assert connection.connection._reset_agent is trans + trans.commit() + assert connection.connection._reset_agent is None + + @testing.requires.savepoints + def test_begin_begin_nested_rollback_rollback(self): + with testing.db.connect() as connection: + trans = connection.begin() + trans2 = connection.begin_nested() + assert connection.connection._reset_agent is trans + trans2.rollback() + assert connection.connection._reset_agent is trans + trans.rollback() + assert connection.connection._reset_agent is None + + def test_begin_begin_rollback_rollback(self): + with testing.db.connect() as connection: + trans = connection.begin() + trans2 = connection.begin() + assert connection.connection._reset_agent is trans + trans2.rollback() + assert connection.connection._reset_agent is None + trans.rollback() + assert connection.connection._reset_agent is None + + def test_begin_begin_commit_commit(self): + with testing.db.connect() as connection: + trans = connection.begin() + trans2 = connection.begin() + assert connection.connection._reset_agent is trans + trans2.commit() + assert connection.connection._reset_agent is trans + trans.commit() + assert connection.connection._reset_agent is None + + @testing.requires.two_phase_transactions + def test_reset_via_agent_begin_twophase(self): + with testing.db.connect() as connection: + trans = connection.begin_twophase() + assert connection.connection._reset_agent is trans + + @testing.requires.two_phase_transactions + def test_reset_via_agent_begin_twophase_commit(self): + with testing.db.connect() as connection: + trans = connection.begin_twophase() + assert connection.connection._reset_agent is trans + trans.commit() + assert connection.connection._reset_agent is None + + @testing.requires.two_phase_transactions + def test_reset_via_agent_begin_twophase_rollback(self): + with testing.db.connect() as connection: + trans = connection.begin_twophase() + assert connection.connection._reset_agent is trans + trans.rollback() + assert connection.connection._reset_agent is None + class AutoRollbackTest(fixtures.TestBase): @classmethod @@ -504,7 +637,7 @@ class ExplicitAutoCommitTest(fixtures.TestBase): conn2.close() @testing.uses_deprecated(r'autocommit on select\(\) is deprecated', - r'autocommit\(\) is deprecated') + r'``autocommit\(\)`` is deprecated') def test_explicit_compiled_deprecated(self): conn1 = testing.db.connect() conn2 = testing.db.connect() @@ -1036,7 +1169,6 @@ class ForUpdateTest(fixtures.TestBase): @testing.crashes('mssql', 'FIXME: unknown') @testing.crashes('firebird', 'FIXME: unknown') @testing.crashes('sybase', 'FIXME: unknown') - @testing.crashes('access', 'FIXME: unknown') @testing.requires.independent_connections def test_queued_update(self): """Test SELECT FOR UPDATE with concurrent modifications. @@ -1101,7 +1233,6 @@ class ForUpdateTest(fixtures.TestBase): @testing.crashes('mssql', 'FIXME: unknown') @testing.crashes('firebird', 'FIXME: unknown') @testing.crashes('sybase', 'FIXME: unknown') - @testing.crashes('access', 'FIXME: unknown') @testing.requires.independent_connections def test_queued_select(self): """Simple SELECT FOR UPDATE conflict test""" @@ -1113,7 +1244,6 @@ class ForUpdateTest(fixtures.TestBase): @testing.fails_on('mysql', 'No support for NOWAIT') @testing.crashes('firebird', 'FIXME: unknown') @testing.crashes('sybase', 'FIXME: unknown') - @testing.crashes('access', 'FIXME: unknown') @testing.requires.independent_connections def test_nowait_select(self): """Simple SELECT FOR UPDATE NOWAIT conflict test""" diff --git a/test/ext/declarative/test_basic.py b/test/ext/declarative/test_basic.py index 540f1623f..1f14d8164 100644 --- a/test/ext/declarative/test_basic.py +++ b/test/ext/declarative/test_basic.py @@ -4,14 +4,14 @@ from sqlalchemy.testing import eq_, assert_raises, \ from sqlalchemy.ext import declarative as decl from sqlalchemy import exc import sqlalchemy as sa -from sqlalchemy import testing +from sqlalchemy import testing, util from sqlalchemy import MetaData, Integer, String, ForeignKey, \ ForeignKeyConstraint, Index from sqlalchemy.testing.schema import Table, Column from sqlalchemy.orm import relationship, create_session, class_mapper, \ joinedload, configure_mappers, backref, clear_mappers, \ deferred, column_property, composite,\ - Session + Session, properties from sqlalchemy.testing import eq_ from sqlalchemy.util import classproperty, with_metaclass from sqlalchemy.ext.declarative import declared_attr, AbstractConcreteBase, \ @@ -77,6 +77,26 @@ class DeclarativeTest(DeclarativeTestBase): eq_(a1, Address(email='two')) eq_(a1.user, User(name='u1')) + def test_unicode_string_resolve(self): + class User(Base, fixtures.ComparableEntity): + __tablename__ = 'users' + + id = Column('id', Integer, primary_key=True, + test_needs_autoincrement=True) + name = Column('name', String(50)) + addresses = relationship(util.u("Address"), backref="user") + + class Address(Base, fixtures.ComparableEntity): + __tablename__ = 'addresses' + + id = Column(Integer, primary_key=True, + test_needs_autoincrement=True) + email = Column(String(50), key='_email') + user_id = Column('user_id', Integer, ForeignKey('users.id'), + key='_user_id') + + assert User.addresses.property.mapper.class_ is Address + def test_no_table(self): def go(): class User(Base): @@ -123,6 +143,71 @@ class DeclarativeTest(DeclarativeTestBase): assert class_mapper(Bar).get_property('some_data').columns[0] \ is t.c.data + def test_column_named_twice(self): + def go(): + class Foo(Base): + __tablename__ = 'foo' + + id = Column(Integer, primary_key=True) + x = Column('x', Integer) + y = Column('x', Integer) + assert_raises_message( + sa.exc.SAWarning, + "On class 'Foo', Column object 'x' named directly multiple times, " + "only one will be used: x, y", + go + ) + + + def test_column_repeated_under_prop(self): + def go(): + class Foo(Base): + __tablename__ = 'foo' + + id = Column(Integer, primary_key=True) + x = Column('x', Integer) + y = column_property(x) + z = Column('x', Integer) + + assert_raises_message( + sa.exc.SAWarning, + "On class 'Foo', Column object 'x' named directly multiple times, " + "only one will be used: x, y, z", + go + ) + + def test_relationship_level_msg_for_invalid_callable(self): + class A(Base): + __tablename__ = 'a' + id = Column(Integer, primary_key=True) + class B(Base): + __tablename__ = 'b' + id = Column(Integer, primary_key=True) + a_id = Column(Integer, ForeignKey('a.id')) + a = relationship('a') + assert_raises_message( + sa.exc.ArgumentError, + "relationship 'a' expects a class or a mapper " + "argument .received: .*Table", + configure_mappers + ) + + def test_relationship_level_msg_for_invalid_object(self): + class A(Base): + __tablename__ = 'a' + id = Column(Integer, primary_key=True) + class B(Base): + __tablename__ = 'b' + id = Column(Integer, primary_key=True) + a_id = Column(Integer, ForeignKey('a.id')) + a = relationship(A.__table__) + assert_raises_message( + sa.exc.ArgumentError, + "relationship 'a' expects a class or a mapper " + "argument .received: .*Table", + configure_mappers + ) + def test_difficult_class(self): """test no getattr() errors with a customized class""" @@ -202,10 +287,10 @@ class DeclarativeTest(DeclarativeTestBase): user = relationship("User", primaryjoin=user_id == User.id, backref="addresses") - assert mapperlib._new_mappers is True + assert mapperlib.Mapper._new_mappers is True u = User() assert User.addresses - assert mapperlib._new_mappers is False + assert mapperlib.Mapper._new_mappers is False def test_string_dependency_resolution(self): from sqlalchemy.sql import desc @@ -707,6 +792,64 @@ class DeclarativeTest(DeclarativeTestBase): eq_(a1, Address(email='two')) eq_(a1.user, User(name='u1')) + def test_alt_name_attr_subclass_column_inline(self): + # [ticket:2900] + class A(Base): + __tablename__ = 'a' + id = Column('id', Integer, primary_key=True) + data = Column('data') + + class ASub(A): + brap = A.data + assert ASub.brap.property is A.data.property + assert isinstance(ASub.brap.original_property, properties.SynonymProperty) + + def test_alt_name_attr_subclass_relationship_inline(self): + # [ticket:2900] + class A(Base): + __tablename__ = 'a' + id = Column('id', Integer, primary_key=True) + b_id = Column(Integer, ForeignKey('b.id')) + b = relationship("B", backref="as_") + + class B(Base): + __tablename__ = 'b' + id = Column('id', Integer, primary_key=True) + + configure_mappers() + class ASub(A): + brap = A.b + assert ASub.brap.property is A.b.property + assert isinstance(ASub.brap.original_property, properties.SynonymProperty) + ASub(brap=B()) + + def test_alt_name_attr_subclass_column_attrset(self): + # [ticket:2900] + class A(Base): + __tablename__ = 'a' + id = Column('id', Integer, primary_key=True) + data = Column('data') + A.brap = A.data + assert A.brap.property is A.data.property + assert isinstance(A.brap.original_property, properties.SynonymProperty) + + def test_alt_name_attr_subclass_relationship_attrset(self): + # [ticket:2900] + class A(Base): + __tablename__ = 'a' + id = Column('id', Integer, primary_key=True) + b_id = Column(Integer, ForeignKey('b.id')) + b = relationship("B", backref="as_") + A.brap = A.b + class B(Base): + __tablename__ = 'b' + id = Column('id', Integer, primary_key=True) + + assert A.brap.property is A.b.property + assert isinstance(A.brap.original_property, properties.SynonymProperty) + A(brap=B()) + + def test_eager_order_by(self): class Address(Base, fixtures.ComparableEntity): @@ -1276,8 +1419,10 @@ class DeclarativeTest(DeclarativeTestBase): # case sa.orm.configure_mappers() - eq_(str(list(Address.user_id.property.columns[0].foreign_keys)[0]), - "ForeignKey('users.id')") + eq_( + list(Address.user_id.property.columns[0].foreign_keys)[0].column, + User.__table__.c.id + ) Base.metadata.create_all() u1 = User(name='u1', addresses=[Address(email='one'), Address(email='two')]) diff --git a/test/ext/declarative/test_reflection.py b/test/ext/declarative/test_reflection.py index 013439f93..f4bda6995 100644 --- a/test/ext/declarative/test_reflection.py +++ b/test/ext/declarative/test_reflection.py @@ -7,6 +7,8 @@ from sqlalchemy.orm import relationship, create_session, \ clear_mappers, \ Session from sqlalchemy.testing import fixtures +from sqlalchemy.testing.util import gc_collect +from sqlalchemy.ext.declarative.base import _DeferredMapperConfig class DeclarativeReflectionBase(fixtures.TablesTest): __requires__ = 'reflectable_autoincrement', @@ -47,9 +49,8 @@ class DeclarativeReflectionTest(DeclarativeReflectionBase): test_needs_fk=True, ) - def test_basic(self): - meta = MetaData(testing.db) + def test_basic(self): class User(Base, fixtures.ComparableEntity): __tablename__ = 'users' @@ -80,8 +81,6 @@ class DeclarativeReflectionTest(DeclarativeReflectionBase): eq_(a1.user, User(name='u1')) def test_rekey(self): - meta = MetaData(testing.db) - class User(Base, fixtures.ComparableEntity): __tablename__ = 'users' @@ -114,8 +113,6 @@ class DeclarativeReflectionTest(DeclarativeReflectionBase): assert_raises(TypeError, User, name='u3') def test_supplied_fk(self): - meta = MetaData(testing.db) - class IMHandle(Base, fixtures.ComparableEntity): __tablename__ = 'imhandles' @@ -151,9 +148,8 @@ class DeclarativeReflectionTest(DeclarativeReflectionBase): class DeferredReflectBase(DeclarativeReflectionBase): def teardown(self): - super(DeferredReflectBase,self).teardown() - from sqlalchemy.ext.declarative.base import _MapperConfig - _MapperConfig.configs.clear() + super(DeferredReflectBase, self).teardown() + _DeferredMapperConfig._configs.clear() Base = None @@ -275,7 +271,7 @@ class DeferredReflectionTest(DeferredReflectBase): @decl.declared_attr def __mapper_args__(cls): return { - "order_by":cls.__table__.c.name + "order_by": cls.__table__.c.name } decl.DeferredReflection.prepare(testing.db) @@ -297,6 +293,80 @@ class DeferredReflectionTest(DeferredReflectBase): ] ) + @testing.requires.predictable_gc + def test_cls_not_strong_ref(self): + class User(decl.DeferredReflection, fixtures.ComparableEntity, + Base): + __tablename__ = 'users' + class Address(decl.DeferredReflection, fixtures.ComparableEntity, + Base): + __tablename__ = 'addresses' + eq_(len(_DeferredMapperConfig._configs), 2) + del Address + gc_collect() + eq_(len(_DeferredMapperConfig._configs), 1) + decl.DeferredReflection.prepare(testing.db) + assert not _DeferredMapperConfig._configs + +class DeferredSecondaryReflectionTest(DeferredReflectBase): + @classmethod + def define_tables(cls, metadata): + Table('users', metadata, + Column('id', Integer, + primary_key=True, test_needs_autoincrement=True), + Column('name', String(50)), test_needs_fk=True) + + Table('user_items', metadata, + Column('user_id', ForeignKey('users.id'), primary_key=True), + Column('item_id', ForeignKey('items.id'), primary_key=True), + test_needs_fk=True + ) + + Table('items', metadata, + Column('id', Integer, primary_key=True, + test_needs_autoincrement=True), + Column('name', String(50)), + test_needs_fk=True + ) + + def _roundtrip(self): + + User = Base._decl_class_registry['User'] + Item = Base._decl_class_registry['Item'] + + u1 = User(name='u1', items=[Item(name='i1'), Item(name='i2')]) + + sess = Session() + sess.add(u1) + sess.commit() + + eq_(sess.query(User).all(), [User(name='u1', + items=[Item(name='i1'), Item(name='i2')])]) + + def test_string_resolution(self): + class User(decl.DeferredReflection, fixtures.ComparableEntity, Base): + __tablename__ = 'users' + + items = relationship("Item", secondary="user_items") + + class Item(decl.DeferredReflection, fixtures.ComparableEntity, Base): + __tablename__ = 'items' + + decl.DeferredReflection.prepare(testing.db) + self._roundtrip() + + def test_table_resolution(self): + class User(decl.DeferredReflection, fixtures.ComparableEntity, Base): + __tablename__ = 'users' + + items = relationship("Item", secondary=Table("user_items", Base.metadata)) + + class Item(decl.DeferredReflection, fixtures.ComparableEntity, Base): + __tablename__ = 'items' + + decl.DeferredReflection.prepare(testing.db) + self._roundtrip() + class DeferredInhReflectBase(DeferredReflectBase): def _roundtrip(self): Foo = Base._decl_class_registry['Foo'] @@ -338,11 +408,11 @@ class DeferredSingleInhReflectionTest(DeferredInhReflectBase): class Foo(decl.DeferredReflection, fixtures.ComparableEntity, Base): __tablename__ = 'foo' - __mapper_args__ = {"polymorphic_on":"type", - "polymorphic_identity":"foo"} + __mapper_args__ = {"polymorphic_on": "type", + "polymorphic_identity": "foo"} class Bar(Foo): - __mapper_args__ = {"polymorphic_identity":"bar"} + __mapper_args__ = {"polymorphic_identity": "bar"} decl.DeferredReflection.prepare(testing.db) self._roundtrip() @@ -351,11 +421,11 @@ class DeferredSingleInhReflectionTest(DeferredInhReflectBase): class Foo(decl.DeferredReflection, fixtures.ComparableEntity, Base): __tablename__ = 'foo' - __mapper_args__ = {"polymorphic_on":"type", - "polymorphic_identity":"foo"} + __mapper_args__ = {"polymorphic_on": "type", + "polymorphic_identity": "foo"} class Bar(Foo): - __mapper_args__ = {"polymorphic_identity":"bar"} + __mapper_args__ = {"polymorphic_identity": "bar"} bar_data = Column(String(30)) decl.DeferredReflection.prepare(testing.db) @@ -365,12 +435,12 @@ class DeferredSingleInhReflectionTest(DeferredInhReflectBase): class Foo(decl.DeferredReflection, fixtures.ComparableEntity, Base): __tablename__ = 'foo' - __mapper_args__ = {"polymorphic_on":"type", - "polymorphic_identity":"foo"} + __mapper_args__ = {"polymorphic_on": "type", + "polymorphic_identity": "foo"} id = Column(Integer, primary_key=True) class Bar(Foo): - __mapper_args__ = {"polymorphic_identity":"bar"} + __mapper_args__ = {"polymorphic_identity": "bar"} decl.DeferredReflection.prepare(testing.db) self._roundtrip() @@ -395,12 +465,12 @@ class DeferredJoinedInhReflectionTest(DeferredInhReflectBase): class Foo(decl.DeferredReflection, fixtures.ComparableEntity, Base): __tablename__ = 'foo' - __mapper_args__ = {"polymorphic_on":"type", - "polymorphic_identity":"foo"} + __mapper_args__ = {"polymorphic_on": "type", + "polymorphic_identity": "foo"} class Bar(Foo): __tablename__ = 'bar' - __mapper_args__ = {"polymorphic_identity":"bar"} + __mapper_args__ = {"polymorphic_identity": "bar"} decl.DeferredReflection.prepare(testing.db) self._roundtrip() @@ -409,12 +479,12 @@ class DeferredJoinedInhReflectionTest(DeferredInhReflectBase): class Foo(decl.DeferredReflection, fixtures.ComparableEntity, Base): __tablename__ = 'foo' - __mapper_args__ = {"polymorphic_on":"type", - "polymorphic_identity":"foo"} + __mapper_args__ = {"polymorphic_on": "type", + "polymorphic_identity": "foo"} class Bar(Foo): __tablename__ = 'bar' - __mapper_args__ = {"polymorphic_identity":"bar"} + __mapper_args__ = {"polymorphic_identity": "bar"} bar_data = Column(String(30)) decl.DeferredReflection.prepare(testing.db) @@ -424,13 +494,13 @@ class DeferredJoinedInhReflectionTest(DeferredInhReflectBase): class Foo(decl.DeferredReflection, fixtures.ComparableEntity, Base): __tablename__ = 'foo' - __mapper_args__ = {"polymorphic_on":"type", - "polymorphic_identity":"foo"} + __mapper_args__ = {"polymorphic_on": "type", + "polymorphic_identity": "foo"} id = Column(Integer, primary_key=True) class Bar(Foo): __tablename__ = 'bar' - __mapper_args__ = {"polymorphic_identity":"bar"} + __mapper_args__ = {"polymorphic_identity": "bar"} decl.DeferredReflection.prepare(testing.db) self._roundtrip() @@ -439,12 +509,12 @@ class DeferredJoinedInhReflectionTest(DeferredInhReflectBase): class Foo(decl.DeferredReflection, fixtures.ComparableEntity, Base): __tablename__ = 'foo' - __mapper_args__ = {"polymorphic_on":"type", - "polymorphic_identity":"foo"} + __mapper_args__ = {"polymorphic_on": "type", + "polymorphic_identity": "foo"} class Bar(Foo): __tablename__ = 'bar' - __mapper_args__ = {"polymorphic_identity":"bar"} + __mapper_args__ = {"polymorphic_identity": "bar"} id = Column(Integer, ForeignKey('foo.id'), primary_key=True) decl.DeferredReflection.prepare(testing.db) diff --git a/test/ext/test_associationproxy.py b/test/ext/test_associationproxy.py index 4cfb58481..3450eeb2f 100644 --- a/test/ext/test_associationproxy.py +++ b/test/ext/test_associationproxy.py @@ -9,7 +9,6 @@ from sqlalchemy.ext.associationproxy import * from sqlalchemy.ext.associationproxy import _AssociationList from sqlalchemy.testing import assert_raises_message from sqlalchemy.testing.util import gc_collect -from sqlalchemy.sql import not_ from sqlalchemy.testing import fixtures, AssertsCompiledSQL from sqlalchemy import testing from sqlalchemy.testing.schema import Table, Column @@ -139,7 +138,7 @@ class _CollectionOperations(fixtures.TestBase): self.assert_(len(p1._children) == 0) self.assert_(len(p1.children) == 0) - p1.children = ['a','b','c'] + p1.children = ['a', 'b', 'c'] self.assert_(len(p1._children) == 3) self.assert_(len(p1.children) == 3) @@ -324,7 +323,7 @@ class CustomDictTest(DictTest): self.assert_(len(p1._children) == 3) self.assert_(len(p1.children) == 3) - self.assert_(set(p1.children) == set(['d','e','f'])) + self.assert_(set(p1.children) == set(['d', 'e', 'f'])) del ch p1 = self.roundtrip(p1) @@ -407,7 +406,7 @@ class SetTest(_CollectionOperations): self.assert_(len(p1._children) == 0) self.assert_(len(p1.children) == 0) - p1.children = ['a','b','c'] + p1.children = ['a', 'b', 'c'] self.assert_(len(p1._children) == 3) self.assert_(len(p1.children) == 3) @@ -421,13 +420,12 @@ class SetTest(_CollectionOperations): self.assert_('b' in p1.children) self.assert_('d' not in p1.children) - self.assert_(p1.children == set(['a','b','c'])) + self.assert_(p1.children == set(['a', 'b', 'c'])) - try: - p1.children.remove('d') - self.fail() - except KeyError: - pass + assert_raises( + KeyError, + p1.children.remove, "d" + ) self.assert_(len(p1.children) == 3) p1.children.discard('d') @@ -442,9 +440,9 @@ class SetTest(_CollectionOperations): self.assert_(len(p1.children) == 2) self.assert_(popped not in p1.children) - p1.children = ['a','b','c'] + p1.children = ['a', 'b', 'c'] p1 = self.roundtrip(p1) - self.assert_(p1.children == set(['a','b','c'])) + self.assert_(p1.children == set(['a', 'b', 'c'])) p1.children.discard('b') p1 = self.roundtrip(p1) @@ -476,12 +474,12 @@ class SetTest(_CollectionOperations): Parent, Child = self.Parent, self.Child p1 = Parent('P1') - p1.children = ['a','b','c'] - control = set(['a','b','c']) + p1.children = ['a', 'b', 'c'] + control = set(['a', 'b', 'c']) - for other in (set(['a','b','c']), set(['a','b','c','d']), - set(['a']), set(['a','b']), - set(['c','d']), set(['e', 'f', 'g']), + for other in (set(['a', 'b', 'c']), set(['a', 'b', 'c', 'd']), + set(['a']), set(['a', 'b']), + set(['c', 'd']), set(['e', 'f', 'g']), set()): eq_(p1.children.union(other), @@ -499,12 +497,12 @@ class SetTest(_CollectionOperations): eq_(p1.children.issuperset(other), control.issuperset(other)) - self.assert_((p1.children == other) == (control == other)) - self.assert_((p1.children != other) == (control != other)) - self.assert_((p1.children < other) == (control < other)) - self.assert_((p1.children <= other) == (control <= other)) - self.assert_((p1.children > other) == (control > other)) - self.assert_((p1.children >= other) == (control >= other)) + self.assert_((p1.children == other) == (control == other)) + self.assert_((p1.children != other) == (control != other)) + self.assert_((p1.children < other) == (control < other)) + self.assert_((p1.children <= other) == (control <= other)) + self.assert_((p1.children > other) == (control > other)) + self.assert_((p1.children >= other) == (control >= other)) def test_set_mutation(self): Parent, Child = self.Parent, self.Child @@ -513,9 +511,9 @@ class SetTest(_CollectionOperations): for op in ('update', 'intersection_update', 'difference_update', 'symmetric_difference_update'): for base in (['a', 'b', 'c'], []): - for other in (set(['a','b','c']), set(['a','b','c','d']), - set(['a']), set(['a','b']), - set(['c','d']), set(['e', 'f', 'g']), + for other in (set(['a', 'b', 'c']), set(['a', 'b', 'c', 'd']), + set(['a']), set(['a', 'b']), + set(['c', 'd']), set(['e', 'f', 'g']), set()): p = Parent('p') p.children = base[:] @@ -544,9 +542,9 @@ class SetTest(_CollectionOperations): # in-place mutations for op in ('|=', '-=', '&=', '^='): for base in (['a', 'b', 'c'], []): - for other in (set(['a','b','c']), set(['a','b','c','d']), - set(['a']), set(['a','b']), - set(['c','d']), set(['e', 'f', 'g']), + for other in (set(['a', 'b', 'c']), set(['a', 'b', 'c', 'd']), + set(['a']), set(['a', 'b']), + set(['c', 'd']), set(['e', 'f', 'g']), frozenset(['e', 'f', 'g']), set()): p = Parent('p') @@ -599,12 +597,11 @@ class CustomObjectTest(_CollectionOperations): # We didn't provide an alternate _AssociationList implementation # for our ObjectCollection, so indexing will fail. + assert_raises( + TypeError, + p.children.__getitem__, 1 + ) - try: - v = p.children[1] - self.fail() - except TypeError: - pass class ProxyFactoryTest(ListTest): def setup(self): @@ -669,8 +666,9 @@ class ProxyFactoryTest(ListTest): class ScalarTest(fixtures.TestBase): + @testing.provide_metadata def test_scalar_proxy(self): - metadata = MetaData(testing.db) + metadata = self.metadata parents_table = Table('Parent', metadata, Column('id', Integer, primary_key=True, @@ -718,12 +716,8 @@ class ScalarTest(fixtures.TestBase): p = Parent('p') - # No child - try: - v = p.foo - self.fail() - except: - pass + eq_(p.child, None) + eq_(p.foo, None) p.child = Child(foo='a', bar='b', baz='c') @@ -744,19 +738,13 @@ class ScalarTest(fixtures.TestBase): p.child = None - # No child again - try: - v = p.foo - self.fail() - except: - pass + eq_(p.foo, None) # Bogus creator for this scalar type - try: - p.foo = 'zzz' - self.fail() - except TypeError: - pass + assert_raises( + TypeError, + setattr, p, "foo", "zzz" + ) p.bar = 'yyy' @@ -786,6 +774,48 @@ class ScalarTest(fixtures.TestBase): p2 = Parent('p2') p2.bar = 'quux' + @testing.provide_metadata + def test_empty_scalars(self): + metadata = self.metadata + + a = Table('a', metadata, + Column('id', Integer, primary_key=True), + Column('name', String(50)) + ) + a2b = Table('a2b', metadata, + Column('id', Integer, primary_key=True), + Column('id_a', Integer, ForeignKey('a.id')), + Column('id_b', Integer, ForeignKey('b.id')), + Column('name', String(50)) + ) + b = Table('b', metadata, + Column('id', Integer, primary_key=True), + Column('name', String(50)) + ) + class A(object): + a2b_name = association_proxy("a2b_single", "name") + b_single = association_proxy("a2b_single", "b") + + class A2B(object): + pass + + class B(object): + pass + + mapper(A, a, properties=dict( + a2b_single=relationship(A2B, uselist=False) + )) + + mapper(A2B, a2b, properties=dict( + b=relationship(B) + )) + mapper(B, b) + + a1 = A() + assert a1.a2b_name is None + assert a1.b_single is None + + class LazyLoadTest(fixtures.TestBase): def setup(self): @@ -840,7 +870,7 @@ class LazyLoadTest(fixtures.TestBase): collection_class=list)}) p = Parent('p') - p.children = ['a','b','c'] + p.children = ['a', 'b', 'c'] p = self.roundtrip(p) @@ -858,7 +888,7 @@ class LazyLoadTest(fixtures.TestBase): collection_class=list)}) p = Parent('p') - p.children = ['a','b','c'] + p.children = ['a', 'b', 'c'] p = self.roundtrip(p) @@ -1024,7 +1054,7 @@ class ComparatorTest(fixtures.MappedTest, AssertsCompiledSQL): @classmethod def define_tables(cls, metadata): Table('userkeywords', metadata, - Column('keyword_id', Integer,ForeignKey('keywords.id'), primary_key=True), + Column('keyword_id', Integer, ForeignKey('keywords.id'), primary_key=True), Column('user_id', Integer, ForeignKey('users.id')) ) Table('users', metadata, @@ -1094,15 +1124,15 @@ class ComparatorTest(fixtures.MappedTest, AssertsCompiledSQL): cls.classes.Singular) mapper(User, users, properties={ - 'singular':relationship(Singular) + 'singular': relationship(Singular) }) mapper(Keyword, keywords, properties={ - 'user_keyword':relationship(UserKeyword, uselist=False) + 'user_keyword': relationship(UserKeyword, uselist=False) }) mapper(UserKeyword, userkeywords, properties={ - 'user' : relationship(User, backref='user_keywords'), - 'keyword' : relationship(Keyword) + 'user': relationship(User, backref='user_keywords'), + 'keyword': relationship(Keyword) }) mapper(Singular, singular, properties={ 'keywords': relationship(Keyword) @@ -1300,7 +1330,7 @@ class ComparatorTest(fixtures.MappedTest, AssertsCompiledSQL): self.session.query(User).filter(User.singular_value == None), self.session.query(User).filter( or_( - User.singular.has(Singular.value==None), + User.singular.has(Singular.value == None), User.singular == None ) ) @@ -1324,7 +1354,7 @@ class ComparatorTest(fixtures.MappedTest, AssertsCompiledSQL): self._equivalent( self.session.query(User).filter(User.singular_value == "singular4"), self.session.query(User).filter( - User.singular.has(Singular.value=="singular4"), + User.singular.has(Singular.value == "singular4"), ) ) @@ -1343,7 +1373,7 @@ class ComparatorTest(fixtures.MappedTest, AssertsCompiledSQL): # a special case where we provide an empty has() on a # non-object-targeted association proxy. User = self.classes.User - Singular = self.classes.Singular + self.classes.Singular self._equivalent( self.session.query(User).filter(User.singular_value.has()), @@ -1356,7 +1386,7 @@ class ComparatorTest(fixtures.MappedTest, AssertsCompiledSQL): # a special case where we provide an empty has() on a # non-object-targeted association proxy. User = self.classes.User - Singular = self.classes.Singular + self.classes.Singular self._equivalent( self.session.query(User).filter(~User.singular_value.has()), @@ -1368,7 +1398,7 @@ class ComparatorTest(fixtures.MappedTest, AssertsCompiledSQL): def test_has_criterion_nul(self): # but we don't allow that with any criterion... User = self.classes.User - Singular = self.classes.Singular + self.classes.Singular assert_raises_message( exc.ArgumentError, @@ -1380,7 +1410,7 @@ class ComparatorTest(fixtures.MappedTest, AssertsCompiledSQL): def test_has_kwargs_nul(self): # ... or kwargs User = self.classes.User - Singular = self.classes.Singular + self.classes.Singular assert_raises_message( exc.ArgumentError, @@ -1391,32 +1421,32 @@ class ComparatorTest(fixtures.MappedTest, AssertsCompiledSQL): def test_filter_scalar_contains_fails_nul_nul(self): Keyword = self.classes.Keyword - assert_raises(exc.InvalidRequestError, lambda : \ - Keyword.user.contains(self.u)) + assert_raises(exc.InvalidRequestError, + lambda: Keyword.user.contains(self.u)) def test_filter_scalar_any_fails_nul_nul(self): Keyword = self.classes.Keyword - assert_raises(exc.InvalidRequestError, lambda : \ - Keyword.user.any(name='user2')) + assert_raises(exc.InvalidRequestError, + lambda: Keyword.user.any(name='user2')) def test_filter_collection_has_fails_ul_nul(self): User = self.classes.User - assert_raises(exc.InvalidRequestError, lambda : \ - User.keywords.has(keyword='quick')) + assert_raises(exc.InvalidRequestError, + lambda: User.keywords.has(keyword='quick')) def test_filter_collection_eq_fails_ul_nul(self): User = self.classes.User - assert_raises(exc.InvalidRequestError, lambda : \ - User.keywords == self.kw) + assert_raises(exc.InvalidRequestError, + lambda: User.keywords == self.kw) def test_filter_collection_ne_fails_ul_nul(self): User = self.classes.User - assert_raises(exc.InvalidRequestError, lambda : \ - User.keywords != self.kw) + assert_raises(exc.InvalidRequestError, + lambda: User.keywords != self.kw) def test_join_separate_attr(self): User = self.classes.User @@ -1458,7 +1488,7 @@ class DictOfTupleUpdateTest(fixtures.TestBase): b = Table('b', m, Column('id', Integer, primary_key=True), Column('aid', Integer, ForeignKey('a.id'))) mapper(A, a, properties={ - 'orig':relationship(B, collection_class=attribute_mapped_collection('key')) + 'orig': relationship(B, collection_class=attribute_mapped_collection('key')) }) mapper(B, b) self.A = A @@ -1467,22 +1497,22 @@ class DictOfTupleUpdateTest(fixtures.TestBase): def test_update_one_elem_dict(self): a1 = self.A() a1.elements.update({("B", 3): 'elem2'}) - eq_(a1.elements, {("B",3):'elem2'}) + eq_(a1.elements, {("B", 3): 'elem2'}) def test_update_multi_elem_dict(self): a1 = self.A() a1.elements.update({("B", 3): 'elem2', ("C", 4): "elem3"}) - eq_(a1.elements, {("B",3):'elem2', ("C", 4): "elem3"}) + eq_(a1.elements, {("B", 3): 'elem2', ("C", 4): "elem3"}) def test_update_one_elem_list(self): a1 = self.A() a1.elements.update([(("B", 3), 'elem2')]) - eq_(a1.elements, {("B",3):'elem2'}) + eq_(a1.elements, {("B", 3): 'elem2'}) def test_update_multi_elem_list(self): a1 = self.A() a1.elements.update([(("B", 3), 'elem2'), (("C", 4), "elem3")]) - eq_(a1.elements, {("B",3):'elem2', ("C", 4): "elem3"}) + eq_(a1.elements, {("B", 3): 'elem2', ("C", 4): "elem3"}) def test_update_one_elem_varg(self): a1 = self.A() diff --git a/test/ext/test_automap.py b/test/ext/test_automap.py new file mode 100644 index 000000000..9db85879d --- /dev/null +++ b/test/ext/test_automap.py @@ -0,0 +1,146 @@ +from sqlalchemy.testing import fixtures, eq_ +from ..orm._fixtures import FixtureTest +from sqlalchemy.ext.automap import automap_base +from sqlalchemy.orm import relationship, interfaces, backref +from sqlalchemy.ext.automap import generate_relationship +from sqlalchemy.testing.mock import Mock, call + +class AutomapTest(fixtures.MappedTest): + @classmethod + def define_tables(cls, metadata): + FixtureTest.define_tables(metadata) + + def test_relationship_o2m_default(self): + Base = automap_base(metadata=self.metadata) + Base.prepare() + + User = Base.classes.users + Address = Base.classes.addresses + + a1 = Address(email_address='e1') + u1 = User(name='u1', addresses_collection=[a1]) + assert a1.users is u1 + + def test_relationship_explicit_override_o2m(self): + Base = automap_base(metadata=self.metadata) + prop = relationship("addresses", collection_class=set) + class User(Base): + __tablename__ = 'users' + + addresses_collection = prop + + Base.prepare() + assert User.addresses_collection.property is prop + Address = Base.classes.addresses + + a1 = Address(email_address='e1') + u1 = User(name='u1', addresses_collection=set([a1])) + assert a1.user is u1 + + def test_relationship_explicit_override_m2o(self): + Base = automap_base(metadata=self.metadata) + + prop = relationship("users") + class Address(Base): + __tablename__ = 'addresses' + + users = prop + + Base.prepare() + User = Base.classes.users + + assert Address.users.property is prop + a1 = Address(email_address='e1') + u1 = User(name='u1', address_collection=[a1]) + assert a1.users is u1 + + + def test_relationship_self_referential(self): + Base = automap_base(metadata=self.metadata) + Base.prepare() + + Node = Base.classes.nodes + + n1 = Node() + n2 = Node() + n1.nodes_collection.append(n2) + assert n2.nodes is n1 + + def test_naming_schemes(self): + Base = automap_base(metadata=self.metadata) + + def classname_for_table(base, tablename, table): + return str("cls_" + tablename) + + def name_for_scalar_relationship(base, local_cls, referred_cls, constraint): + return "scalar_" + referred_cls.__name__ + + def name_for_collection_relationship(base, local_cls, referred_cls, constraint): + return "coll_" + referred_cls.__name__ + + Base.prepare( + classname_for_table=classname_for_table, + name_for_scalar_relationship=name_for_scalar_relationship, + name_for_collection_relationship=name_for_collection_relationship + ) + + User = Base.classes.cls_users + Address = Base.classes.cls_addresses + + u1 = User() + a1 = Address() + u1.coll_cls_addresses.append(a1) + assert a1.scalar_cls_users is u1 + + def test_relationship_m2m(self): + Base = automap_base(metadata=self.metadata) + + Base.prepare() + + Order, Item = Base.classes.orders, Base.classes['items'] + + o1 = Order() + i1 = Item() + o1.items_collection.append(i1) + assert o1 in i1.orders_collection + + def test_relationship_explicit_override_forwards_m2m(self): + Base = automap_base(metadata=self.metadata) + + class Order(Base): + __tablename__ = 'orders' + + items_collection = relationship("items", + secondary="order_items", + collection_class=set) + Base.prepare() + + Item = Base.classes['items'] + + o1 = Order() + i1 = Item() + o1.items_collection.add(i1) + + # it's 'order_collection' because the class name is + # "Order" ! + assert isinstance(i1.order_collection, list) + assert o1 in i1.order_collection + + def test_relationship_pass_params(self): + Base = automap_base(metadata=self.metadata) + + mock = Mock() + def _gen_relationship(base, direction, return_fn, attrname, + local_cls, referred_cls, **kw): + mock(base, direction, attrname) + return generate_relationship(base, direction, return_fn, + attrname, local_cls, referred_cls, **kw) + + Base.prepare(generate_relationship=_gen_relationship) + assert set(tuple(c[1]) for c in mock.mock_calls).issuperset([ + (Base, interfaces.MANYTOONE, "nodes"), + (Base, interfaces.MANYTOMANY, "keywords_collection"), + (Base, interfaces.MANYTOMANY, "items_collection"), + (Base, interfaces.MANYTOONE, "users"), + (Base, interfaces.ONETOMANY, "addresses_collection"), + ]) diff --git a/test/ext/test_compiler.py b/test/ext/test_compiler.py index c1f8b6258..5ed50442f 100644 --- a/test/ext/test_compiler.py +++ b/test/ext/test_compiler.py @@ -4,7 +4,7 @@ from sqlalchemy.sql.expression import ClauseElement, ColumnClause,\ FunctionElement, Select, \ BindParameter -from sqlalchemy.schema import DDLElement +from sqlalchemy.schema import DDLElement, CreateColumn, CreateTable from sqlalchemy.ext.compiler import compiles, deregister from sqlalchemy import exc from sqlalchemy.sql import table, column, visitors @@ -34,6 +34,22 @@ class UserDefinedTest(fixtures.TestBase, AssertsCompiledSQL): "SELECT >>x<<, >>y<< WHERE >>MYTHINGY!<< = :MYTHINGY!_1" ) + def test_create_column_skip(self): + @compiles(CreateColumn) + def skip_xmin(element, compiler, **kw): + if element.element.name == 'xmin': + return None + else: + return compiler.visit_create_column(element, **kw) + + t = Table('t', MetaData(), Column('a', Integer), + Column('xmin', Integer), + Column('c', Integer)) + + self.assert_compile( + CreateTable(t), + "CREATE TABLE t (a INTEGER, c INTEGER)" + ) def test_types(self): class MyType(TypeEngine): pass diff --git a/test/ext/test_extendedattr.py b/test/ext/test_extendedattr.py index a550ae4d0..7a733696a 100644 --- a/test/ext/test_extendedattr.py +++ b/test/ext/test_extendedattr.py @@ -61,6 +61,8 @@ class MyTypesManager(instrumentation.InstrumentationManager): class MyListLike(list): # add @appender, @remover decorators as needed _sa_iterator = list.__iter__ + _sa_linker = None + _sa_converter = None def _sa_appender(self, item, _sa_initiator=None): if _sa_initiator is not False: self._sa_adapter.fire_append_event(item, _sa_initiator) diff --git a/test/ext/test_mutable.py b/test/ext/test_mutable.py index 25c182f1d..ee1b8075e 100644 --- a/test/ext/test_mutable.py +++ b/test/ext/test_mutable.py @@ -153,9 +153,6 @@ class MutableWithScalarPickleTest(_MutableDictTestBase, fixtures.MappedTest): self._test_non_mutable() class MutableWithScalarJSONTest(_MutableDictTestBase, fixtures.MappedTest): - # json introduced in 2.6 - __skip_if__ = lambda: sys.version_info < (2, 6), - @classmethod def define_tables(cls, metadata): import json @@ -245,9 +242,6 @@ class MutableAssociationScalarPickleTest(_MutableDictTestBase, fixtures.MappedTe ) class MutableAssociationScalarJSONTest(_MutableDictTestBase, fixtures.MappedTest): - # json introduced in 2.6 - __skip_if__ = lambda: sys.version_info < (2, 6), - @classmethod def define_tables(cls, metadata): import json diff --git a/test/ext/test_serializer.py b/test/ext/test_serializer.py index 84fff1304..ffeac55c1 100644 --- a/test/ext/test_serializer.py +++ b/test/ext/test_serializer.py @@ -1,13 +1,15 @@ +# coding: utf-8 from sqlalchemy.ext import serializer from sqlalchemy import testing from sqlalchemy import Integer, String, ForeignKey, select, \ - desc, func, util + desc, func, util, MetaData from sqlalchemy.testing.schema import Table from sqlalchemy.testing.schema import Column from sqlalchemy.orm import relationship, sessionmaker, scoped_session, \ class_mapper, mapper, joinedload, configure_mappers, aliased -from sqlalchemy.testing import eq_ +from sqlalchemy.testing import eq_, AssertsCompiledSQL +from sqlalchemy.util import u, ue from sqlalchemy.testing import fixtures @@ -19,7 +21,7 @@ class Address(fixtures.ComparableEntity): users = addresses = Session = None -class SerializeTest(fixtures.MappedTest): +class SerializeTest(AssertsCompiledSQL, fixtures.MappedTest): run_setup_mappers = 'once' run_inserts = 'once' @@ -77,7 +79,6 @@ class SerializeTest(fixtures.MappedTest): assert serializer.loads(serializer.dumps(User.name, -1), None, None) is User.name - @testing.requires.python26 # crashes in 2.5 def test_expression(self): expr = \ select([users]).select_from(users.join(addresses)).limit(5) @@ -124,19 +125,20 @@ class SerializeTest(fixtures.MappedTest): eq_(q2.all(), [User(name='fred')]) eq_(list(q2.values(User.id, User.name)), [(9, 'fred')]) - @testing.requires.non_broken_pickle - def test_query_three(self): - ua = aliased(User) - q = \ - Session.query(ua).join(ua.addresses).\ - filter(Address.email.like('%fred%')) - q2 = serializer.loads(serializer.dumps(q, -1), users.metadata, - Session) - eq_(q2.all(), [User(name='fred')]) - + # fails too often/randomly + #@testing.requires.non_broken_pickle + #def test_query_three(self): + # ua = aliased(User) + # q = \ + # Session.query(ua).join(ua.addresses).\ + # filter(Address.email.like('%fred%')) + # q2 = serializer.loads(serializer.dumps(q, -1), users.metadata, + # Session) + # eq_(q2.all(), [User(name='fred')]) + # # try to pull out the aliased entity here... - ua_2 = q2._entities[0].entity_zero.entity - eq_(list(q2.values(ua_2.id, ua_2.name)), [(9, 'fred')]) + # ua_2 = q2._entities[0].entity_zero.entity + # eq_(list(q2.values(ua_2.id, ua_2.name)), [(9, 'fred')]) @testing.requires.non_broken_pickle def test_orm_join(self): @@ -149,7 +151,6 @@ class SerializeTest(fixtures.MappedTest): assert j2.right is j.right assert j2._target_adapter._next - @testing.requires.python26 # namedtuple workaround not serializable in 2.5 @testing.exclude('sqlite', '<=', (3, 5, 9), 'id comparison failing on the buildbot') def test_aliases(self): @@ -172,6 +173,22 @@ class SerializeTest(fixtures.MappedTest): x = serializer.loads(ser, users.metadata) eq_(str(r), str(x)) + def test_unicode(self): + m = MetaData() + t = Table(ue('\u6e2c\u8a66'), m, + Column(ue('\u6e2c\u8a66_id'), Integer)) + + expr = select([t]).where(t.c[ue('\u6e2c\u8a66_id')] == 5) + + expr2 = serializer.loads(serializer.dumps(expr, -1), m) + + self.assert_compile( + expr2, + ue('SELECT "\u6e2c\u8a66"."\u6e2c\u8a66_id" FROM "\u6e2c\u8a66" ' + 'WHERE "\u6e2c\u8a66"."\u6e2c\u8a66_id" = :\u6e2c\u8a66_id_1'), + dialect="default" + ) + if __name__ == '__main__': testing.main() diff --git a/test/orm/_fixtures.py b/test/orm/_fixtures.py index c21833619..0f6e522d4 100644 --- a/test/orm/_fixtures.py +++ b/test/orm/_fixtures.py @@ -64,10 +64,13 @@ class FixtureTest(fixtures.MappedTest): cls.classes.CompositePk, cls.tables.nodes, \ cls.classes.Order, cls.tables.orders, cls.tables.addresses - mapper(User, users, properties={ - 'addresses':relationship(Address, backref='user', order_by=addresses.c.id), - 'orders':relationship(Order, backref='user', order_by=orders.c.id), # o2m, m2o - }) + # use OrderedDict on this one to support some tests that + # assert the order of attributes (e.g. orm/test_inspect) + mapper(User, users, properties=util.OrderedDict( + [('addresses', relationship(Address, backref='user', order_by=addresses.c.id)), + ('orders', relationship(Order, backref='user', order_by=orders.c.id)), # o2m, m2o + ] + )) mapper(Address, addresses, properties={ 'dingaling':relationship(Dingaling, uselist=False, backref="address") #o2o }) diff --git a/test/orm/inheritance/test_assorted_poly.py b/test/orm/inheritance/test_assorted_poly.py index d05a22f39..da0e3b1a3 100644 --- a/test/orm/inheritance/test_assorted_poly.py +++ b/test/orm/inheritance/test_assorted_poly.py @@ -16,6 +16,7 @@ from test.orm import _fixtures from sqlalchemy.testing import eq_ from sqlalchemy.testing.schema import Table, Column + class AttrSettable(object): def __init__(self, **kwargs): [setattr(self, k, v) for k, v in kwargs.items()] diff --git a/test/orm/inheritance/test_basic.py b/test/orm/inheritance/test_basic.py index 41a167e72..1737d1ccb 100644 --- a/test/orm/inheritance/test_basic.py +++ b/test/orm/inheritance/test_basic.py @@ -964,7 +964,6 @@ class EagerLazyTest(fixtures.MappedTest): Column('foo_id', Integer, ForeignKey('foo.id')) ) - @testing.fails_on('maxdb', 'FIXME: unknown') def test_basic(self): class Foo(object): pass class Bar(Foo): pass diff --git a/test/orm/inheritance/test_manytomany.py b/test/orm/inheritance/test_manytomany.py index 31c4ba40a..51b797940 100644 --- a/test/orm/inheritance/test_manytomany.py +++ b/test/orm/inheritance/test_manytomany.py @@ -201,7 +201,6 @@ class InheritTest3(fixtures.MappedTest): found = [repr(l[0])] + sorted([repr(o) for o in l[0].foos]) eq_(found, compare) - @testing.fails_on('maxdb', 'FIXME: unknown') def testadvanced(self): class Foo(object): def __init__(self, data=None): diff --git a/test/orm/inheritance/test_poly_linked_list.py b/test/orm/inheritance/test_poly_linked_list.py index 1915007de..ec263b3b0 100644 --- a/test/orm/inheritance/test_poly_linked_list.py +++ b/test/orm/inheritance/test_poly_linked_list.py @@ -115,19 +115,15 @@ class PolymorphicCircularTest(fixtures.MappedTest): configure_mappers() assert table1_mapper.primary_key == (table1.c.id,), table1_mapper.primary_key - @testing.fails_on('maxdb', 'FIXME: unknown') def testone(self): self._testlist([Table1, Table2, Table1, Table2]) - @testing.fails_on('maxdb', 'FIXME: unknown') def testtwo(self): self._testlist([Table3]) - @testing.fails_on('maxdb', 'FIXME: unknown') def testthree(self): self._testlist([Table2, Table1, Table1B, Table3, Table3, Table1B, Table1B, Table2, Table1]) - @testing.fails_on('maxdb', 'FIXME: unknown') def testfour(self): self._testlist([ Table2('t2', [Data('data1'), Data('data2')]), diff --git a/test/orm/inheritance/test_relationship.py b/test/orm/inheritance/test_relationship.py index ecb4bf407..db2cd1ec6 100644 --- a/test/orm/inheritance/test_relationship.py +++ b/test/orm/inheritance/test_relationship.py @@ -154,6 +154,7 @@ class SelfReferentialJ2JTest(fixtures.MappedTest): managers.c.person_id == engineers.c.reports_to_id, backref='engineers')}) + def test_has(self): m1 = Manager(name='dogbert') e1 = Engineer(name='dilbert', primary_language='java', reports_to=m1) @@ -415,7 +416,6 @@ class M2MFilterTest(fixtures.MappedTest): sess = create_session() e1 = sess.query(Person).filter(Engineer.name == 'e1').one() - # this works eq_(sess.query(Organization) .filter(~Organization.engineers .of_type(Engineer) @@ -1354,9 +1354,144 @@ class SubClassToSubClassMultiTest(AssertsCompiledSQL, fixtures.MappedTest): "JOIN ep2 ON anon_1.base2_id = ep2.base2_id" ) +class JoinAcrossJoinedInhMultiPath(fixtures.DeclarativeMappedTest, + testing.AssertsCompiledSQL): + """test long join paths with a joined-inh in the middle, where we go multiple + times across the same joined-inh to the same target but with other classes + in the middle. E.g. test [ticket:2908] + """ + + + run_setup_mappers = 'once' + __dialect__ = 'default' + + @classmethod + def setup_classes(cls): + Base = cls.DeclarativeBasic + + class Root(Base): + __tablename__ = 'root' + + id = Column(Integer, primary_key=True) + sub1_id = Column(Integer, ForeignKey('sub1.id')) + + intermediate = relationship("Intermediate") + sub1 = relationship("Sub1") + + class Intermediate(Base): + __tablename__ = 'intermediate' + + id = Column(Integer, primary_key=True) + sub1_id = Column(Integer, ForeignKey('sub1.id')) + root_id = Column(Integer, ForeignKey('root.id')) + sub1 = relationship("Sub1") + + class Parent(Base): + __tablename__ = 'parent' + + id = Column(Integer, primary_key=True) + + class Sub1(Parent): + __tablename__ = 'sub1' + id = Column(Integer, ForeignKey('parent.id'), + primary_key=True) + + target = relationship("Target") + + class Target(Base): + __tablename__ = 'target' + id = Column(Integer, primary_key=True) + sub1_id = Column(Integer, ForeignKey('sub1.id')) + + def test_join(self): + Root, Intermediate, Sub1, Target = \ + self.classes.Root, self.classes.Intermediate, \ + self.classes.Sub1, self.classes.Target + s1_alias = aliased(Sub1) + s2_alias = aliased(Sub1) + t1_alias = aliased(Target) + t2_alias = aliased(Target) + + sess = Session() + q = sess.query(Root).\ + join(s1_alias, Root.sub1).join(t1_alias, s1_alias.target).\ + join(Root.intermediate).join(s2_alias, Intermediate.sub1).\ + join(t2_alias, s2_alias.target) + self.assert_compile(q, + "SELECT root.id AS root_id, root.sub1_id AS root_sub1_id " + "FROM root " + "JOIN (SELECT parent.id AS parent_id, sub1.id AS sub1_id " + "FROM parent JOIN sub1 ON parent.id = sub1.id) AS anon_1 " + "ON anon_1.sub1_id = root.sub1_id " + "JOIN target AS target_1 ON anon_1.sub1_id = target_1.sub1_id " + "JOIN intermediate ON root.id = intermediate.root_id " + "JOIN (SELECT parent.id AS parent_id, sub1.id AS sub1_id " + "FROM parent JOIN sub1 ON parent.id = sub1.id) AS anon_2 " + "ON anon_2.sub1_id = intermediate.sub1_id " + "JOIN target AS target_2 ON anon_2.sub1_id = target_2.sub1_id") + + def test_join_flat(self): + Root, Intermediate, Sub1, Target = \ + self.classes.Root, self.classes.Intermediate, \ + self.classes.Sub1, self.classes.Target + s1_alias = aliased(Sub1, flat=True) + s2_alias = aliased(Sub1, flat=True) + t1_alias = aliased(Target) + t2_alias = aliased(Target) + + sess = Session() + q = sess.query(Root).\ + join(s1_alias, Root.sub1).join(t1_alias, s1_alias.target).\ + join(Root.intermediate).join(s2_alias, Intermediate.sub1).\ + join(t2_alias, s2_alias.target) + self.assert_compile(q, + "SELECT root.id AS root_id, root.sub1_id AS root_sub1_id " + "FROM root " + "JOIN (parent AS parent_1 JOIN sub1 AS sub1_1 ON parent_1.id = sub1_1.id) " + "ON sub1_1.id = root.sub1_id " + "JOIN target AS target_1 ON sub1_1.id = target_1.sub1_id " + "JOIN intermediate ON root.id = intermediate.root_id " + "JOIN (parent AS parent_2 JOIN sub1 AS sub1_2 ON parent_2.id = sub1_2.id) " + "ON sub1_2.id = intermediate.sub1_id " + "JOIN target AS target_2 ON sub1_2.id = target_2.sub1_id" + ) + + def test_joinedload(self): + Root, Intermediate, Sub1, Target = \ + self.classes.Root, self.classes.Intermediate, \ + self.classes.Sub1, self.classes.Target + + sess = Session() + q = sess.query(Root).\ + options( + joinedload(Root.sub1).joinedload(Sub1.target), + joinedload(Root.intermediate).joinedload(Intermediate.sub1).\ + joinedload(Sub1.target), + ) + self.assert_compile(q, + "SELECT root.id AS root_id, root.sub1_id AS root_sub1_id, " + "target_1.id AS target_1_id, target_1.sub1_id AS target_1_sub1_id, " + "sub1_1.id AS sub1_1_id, parent_1.id AS parent_1_id, " + "intermediate_1.id AS intermediate_1_id, " + "intermediate_1.sub1_id AS intermediate_1_sub1_id, " + "intermediate_1.root_id AS intermediate_1_root_id, " + "target_2.id AS target_2_id, target_2.sub1_id AS target_2_sub1_id, " + "sub1_2.id AS sub1_2_id, parent_2.id AS parent_2_id " + "FROM root " + "LEFT OUTER JOIN intermediate AS intermediate_1 " + "ON root.id = intermediate_1.root_id " + "LEFT OUTER JOIN (parent AS parent_1 JOIN sub1 AS sub1_1 " + "ON parent_1.id = sub1_1.id) ON sub1_1.id = intermediate_1.sub1_id " + "LEFT OUTER JOIN target AS target_1 ON sub1_1.id = target_1.sub1_id " + "LEFT OUTER JOIN (parent AS parent_2 JOIN sub1 AS sub1_2 " + "ON parent_2.id = sub1_2.id) ON sub1_2.id = root.sub1_id " + "LEFT OUTER JOIN target AS target_2 ON sub1_2.id = target_2.sub1_id") + + class MultipleAdaptUsesEntityOverTableTest(AssertsCompiledSQL, fixtures.MappedTest): __dialect__ = 'default' run_create_tables = None + run_deletes = None @classmethod def define_tables(cls, metadata): diff --git a/test/orm/inheritance/test_selects.py b/test/orm/inheritance/test_selects.py index dd9c8c8b8..94f5faf8f 100644 --- a/test/orm/inheritance/test_selects.py +++ b/test/orm/inheritance/test_selects.py @@ -1,50 +1,101 @@ -from sqlalchemy import * -from sqlalchemy.orm import * +from sqlalchemy import String, Integer, ForeignKey, select +from sqlalchemy.orm import mapper, Session from sqlalchemy import testing -from sqlalchemy.testing import fixtures +from sqlalchemy.testing import fixtures, eq_ +from sqlalchemy.testing.schema import Table, Column class InheritingSelectablesTest(fixtures.MappedTest): @classmethod def define_tables(cls, metadata): - global foo, bar, baz foo = Table('foo', metadata, Column('a', String(30), primary_key=1), Column('b', String(30), nullable=0)) - bar = foo.select(foo.c.b == 'bar').alias('bar') - baz = foo.select(foo.c.b == 'baz').alias('baz') + cls.tables.bar = foo.select(foo.c.b == 'bar').alias('bar') + cls.tables.baz = foo.select(foo.c.b == 'baz').alias('baz') def test_load(self): + foo, bar, baz = self.tables.foo, self.tables.bar, self.tables.baz # TODO: add persistence test also testing.db.execute(foo.insert(), a='not bar', b='baz') testing.db.execute(foo.insert(), a='also not bar', b='baz') testing.db.execute(foo.insert(), a='i am bar', b='bar') testing.db.execute(foo.insert(), a='also bar', b='bar') - class Foo(fixtures.ComparableEntity): pass - class Bar(Foo): pass - class Baz(Foo): pass + class Foo(fixtures.ComparableEntity): + pass + class Bar(Foo): + pass + class Baz(Foo): + pass mapper(Foo, foo, polymorphic_on=foo.c.b) mapper(Baz, baz, - with_polymorphic=('*', foo.join(baz, foo.c.b=='baz').alias('baz')), + with_polymorphic=('*', foo.join(baz, foo.c.b == 'baz').alias('baz')), inherits=Foo, - inherit_condition=(foo.c.a==baz.c.a), + inherit_condition=(foo.c.a == baz.c.a), inherit_foreign_keys=[baz.c.a], polymorphic_identity='baz') mapper(Bar, bar, - with_polymorphic=('*', foo.join(bar, foo.c.b=='bar').alias('bar')), + with_polymorphic=('*', foo.join(bar, foo.c.b == 'bar').alias('bar')), inherits=Foo, - inherit_condition=(foo.c.a==bar.c.a), + inherit_condition=(foo.c.a == bar.c.a), inherit_foreign_keys=[bar.c.a], polymorphic_identity='bar') - s = sessionmaker(bind=testing.db)() + s = Session() assert [Baz(), Baz(), Bar(), Bar()] == s.query(Foo).order_by(Foo.b.desc()).all() assert [Bar(), Bar()] == s.query(Bar).all() + +class JoinFromSelectPersistenceTest(fixtures.MappedTest): + """test for [ticket:2885]""" + + @classmethod + def define_tables(cls, metadata): + Table('base', metadata, + Column('id', Integer, primary_key=True, + test_needs_autoincrement=True), + Column('type', String(50)) + ) + Table('child', metadata, + # 1. name of column must be different, so that we rely on + # mapper._table_to_equated to link the two cols + Column('child_id', Integer, ForeignKey('base.id'), primary_key=True), + Column('name', String(50)) + ) + + @classmethod + def setup_classes(cls): + class Base(cls.Comparable): + pass + class Child(Base): + pass + + def test_map_to_select(self): + Base, Child = self.classes.Base, self.classes.Child + base, child = self.tables.base, self.tables.child + + base_select = select([base]).alias() + mapper(Base, base_select, polymorphic_on=base_select.c.type, + polymorphic_identity='base') + mapper(Child, child, inherits=Base, + polymorphic_identity='child') + + sess = Session() + + # 2. use an id other than "1" here so can't rely on + # the two inserts having the same id + c1 = Child(id=12, name='c1') + sess.add(c1) + + sess.commit() + sess.close() + + c1 = sess.query(Child).one() + eq_(c1.name, 'c1') diff --git a/test/orm/test_assorted_eager.py b/test/orm/test_assorted_eager.py index 83fccbf7a..09c8ea732 100644 --- a/test/orm/test_assorted_eager.py +++ b/test/orm/test_assorted_eager.py @@ -120,8 +120,8 @@ class EagerTest(fixtures.MappedTest): self.tables.categories) # I want to display a list of tests owned by owner 1 - # if someoption is false or he hasn't specified it yet (null) - # but not if he set it to true (example someoption is for hiding) + # if someoption is false or they haven't specified it yet (null) + # but not if they set it to true (example someoption is for hiding) # desired output for owner 1 # test_id, cat_name @@ -286,7 +286,6 @@ class EagerTest2(fixtures.MappedTest): lazy='joined', backref=backref('middle', lazy='joined')))), - @testing.fails_on('maxdb', 'FIXME: unknown') def test_eager_terminate(self): """Eager query generation does not include the same mapper's table twice. @@ -339,7 +338,6 @@ class EagerTest3(fixtures.MappedTest): class Stat(cls.Basic): pass - @testing.fails_on('maxdb', 'FIXME: unknown') def test_nesting_with_functions(self): Stat, Foo, stats, foo, Data, datas = (self.classes.Stat, self.classes.Foo, @@ -423,7 +421,6 @@ class EagerTest4(fixtures.MappedTest): class Employee(cls.Basic): pass - @testing.fails_on('maxdb', 'FIXME: unknown') def test_basic(self): Department, Employee, employees, departments = (self.classes.Department, self.classes.Employee, @@ -774,7 +771,6 @@ class EagerTest8(fixtures.MappedTest): class Joined(cls.Comparable): pass - @testing.fails_on('maxdb', 'FIXME: unknown') def test_nested_joins(self): task, Task_Type, Joined, prj, task_type, msg = (self.tables.task, self.classes.Task_Type, @@ -867,7 +863,6 @@ class EagerTest9(fixtures.MappedTest): backref=backref('entries', lazy='joined', order_by=entries.c.entry_id)))) - @testing.fails_on('maxdb', 'FIXME: unknown') def test_joinedload_on_path(self): Entry, Account, Transaction = (self.classes.Entry, self.classes.Account, diff --git a/test/orm/test_attributes.py b/test/orm/test_attributes.py index 4bcecb71b..c282bc44c 100644 --- a/test/orm/test_attributes.py +++ b/test/orm/test_attributes.py @@ -294,6 +294,7 @@ class AttributesTest(fixtures.ORMTest): assert state.obj() is None assert state.dict == {} + @testing.requires.predictable_gc def test_object_dereferenced_error(self): class Foo(object): pass @@ -317,7 +318,8 @@ class AttributesTest(fixtures.ORMTest): ) def test_deferred(self): - class Foo(object):pass + class Foo(object): + pass data = {'a':'this is a', 'b':12} def loader(state, keys): @@ -1162,12 +1164,8 @@ class BackrefTest(fixtures.ORMTest): p2.children.append(c1) assert c1.parent is p2 - # note its still in p1.children - - # the event model currently allows only - # one level deep. without the parent_token, - # it keeps going until a ValueError is raised - # and this condition changes. - assert c1 in p1.children + # event propagates to remove as of [ticket:2789] + assert c1 not in p1.children class CyclicBackrefAssertionTest(fixtures.TestBase): """test that infinite recursion due to incorrect backref assignments @@ -1341,7 +1339,7 @@ class PendingBackrefTest(fixtures.ORMTest): ] ) - def test_lazy_history(self): + def test_lazy_history_collection(self): Post, Blog, lazy_posts = self._fixture() p1, p2, p3 = Post("post 1"), Post("post 2"), Post("post 3") @@ -1513,6 +1511,12 @@ class HistoryTest(fixtures.TestBase): return Foo, Bar def _someattr_history(self, f, **kw): + passive = kw.pop('passive', None) + if passive is True: + kw['passive'] = attributes.PASSIVE_NO_INITIALIZE + elif passive is False: + kw['passive'] = attributes.PASSIVE_OFF + return attributes.get_state_history( attributes.instance_state(f), 'someattr', **kw) @@ -1687,19 +1691,19 @@ class HistoryTest(fixtures.TestBase): Foo = self._fixture(uselist=True, useobject=True, active_history=True) f = Foo() - eq_(self._someattr_history(f, passive=True), ((), (), ())) + eq_(self._someattr_history(f, passive=True), (None, None, None)) def test_scalar_obj_never_set(self): Foo = self._fixture(uselist=False, useobject=True, active_history=True) f = Foo() - eq_(self._someattr_history(f, passive=True), ((), (), ())) + eq_(self._someattr_history(f, passive=True), (None, None, None)) def test_scalar_never_set(self): Foo = self._fixture(uselist=False, useobject=False, active_history=True) f = Foo() - eq_(self._someattr_history(f, passive=True), ((), (), ())) + eq_(self._someattr_history(f, passive=True), (None, None, None)) def test_scalar_active_set(self): Foo = self._fixture(uselist=False, useobject=False, @@ -1795,6 +1799,24 @@ class HistoryTest(fixtures.TestBase): eq_(self._someattr_history(f), (['two'], (), ())) + def test_scalar_passive_flag(self): + Foo = self._fixture(uselist=False, useobject=False, + active_history=True) + f = Foo() + f.someattr = 'one' + eq_(self._someattr_history(f), (['one'], (), ())) + + self._commit_someattr(f) + + state = attributes.instance_state(f) + state._expire_attribute_pre_commit(state.dict, 'someattr') + + def scalar_loader(state, toload): + state.dict['someattr'] = 'one' + state.manager.deferred_scalar_loader = scalar_loader + + eq_(self._someattr_history(f), ((), ['one'], ())) + def test_scalar_inplace_mutation_set(self): Foo = self._fixture(uselist=False, useobject=False, @@ -1850,6 +1872,7 @@ class HistoryTest(fixtures.TestBase): f.someattr = ['a'] eq_(self._someattr_history(f), ([['a']], (), ())) + def test_use_object_init(self): Foo, Bar = self._two_obj_fixture(uselist=False) f = Foo() diff --git a/test/orm/test_backref_mutations.py b/test/orm/test_backref_mutations.py index 925eedfa9..e9448d41c 100644 --- a/test/orm/test_backref_mutations.py +++ b/test/orm/test_backref_mutations.py @@ -75,10 +75,8 @@ class O2MCollectionTest(_fixtures.FixtureTest): # backref fires assert a1.user is u2 - # doesn't extend to the previous collection tho, - # which was already loaded. - # flushing at this point means its anyone's guess. - assert a1 in u1.addresses + # a1 removed from u1.addresses as of [ticket:2789] + assert a1 not in u1.addresses assert a1 in u2.addresses def test_collection_move_notloaded(self): @@ -699,9 +697,8 @@ class O2MStaleBackrefTest(_fixtures.FixtureTest): u1.addresses.append(a1) u2.addresses.append(a1) - # events haven't updated - # u1.addresses here. - u1.addresses.remove(a1) + # a1 removed from u1.addresses as of [ticket:2789] + assert a1 not in u1.addresses assert a1.user is u2 assert a1 in u2.addresses diff --git a/test/orm/test_bundle.py b/test/orm/test_bundle.py new file mode 100644 index 000000000..29b8e9382 --- /dev/null +++ b/test/orm/test_bundle.py @@ -0,0 +1,289 @@ +from sqlalchemy.testing import fixtures, eq_ +from sqlalchemy.testing.schema import Table, Column +from sqlalchemy.orm import Bundle, Session +from sqlalchemy.testing import AssertsCompiledSQL +from sqlalchemy import Integer, select, ForeignKey, String, func +from sqlalchemy.orm import mapper, relationship, aliased + +class BundleTest(fixtures.MappedTest, AssertsCompiledSQL): + __dialect__ = 'default' + + run_inserts = 'once' + run_setup_mappers = 'once' + run_deletes = None + + @classmethod + def define_tables(cls, metadata): + Table('data', metadata, + Column('id', Integer, primary_key=True, + test_needs_autoincrement=True), + Column('d1', String(10)), + Column('d2', String(10)), + Column('d3', String(10)) + ) + + Table('other', metadata, + Column('id', Integer, primary_key=True, test_needs_autoincrement=True), + Column('data_id', ForeignKey('data.id')), + Column('o1', String(10)) + ) + + @classmethod + def setup_classes(cls): + class Data(cls.Basic): + pass + class Other(cls.Basic): + pass + + @classmethod + def setup_mappers(cls): + mapper(cls.classes.Data, cls.tables.data, properties={ + 'others': relationship(cls.classes.Other) + }) + mapper(cls.classes.Other, cls.tables.other) + + @classmethod + def insert_data(cls): + sess = Session() + sess.add_all([ + cls.classes.Data(d1='d%dd1' % i, d2='d%dd2' % i, d3='d%dd3' % i, + others=[cls.classes.Other(o1="d%do%d" % (i, j)) for j in range(5)]) + for i in range(10) + ]) + sess.commit() + + def test_c_attr(self): + Data = self.classes.Data + + b1 = Bundle('b1', Data.d1, Data.d2) + + self.assert_compile( + select([b1.c.d1, b1.c.d2]), + "SELECT data.d1, data.d2 FROM data" + ) + + def test_result(self): + Data = self.classes.Data + sess = Session() + + b1 = Bundle('b1', Data.d1, Data.d2) + + eq_( + sess.query(b1).filter(b1.c.d1.between('d3d1', 'd5d1')).all(), + [(('d3d1', 'd3d2'),), (('d4d1', 'd4d2'),), (('d5d1', 'd5d2'),)] + ) + + def test_subclass(self): + Data = self.classes.Data + sess = Session() + + class MyBundle(Bundle): + def create_row_processor(self, query, procs, labels): + def proc(row, result): + return dict( + zip(labels, (proc(row, result) for proc in procs)) + ) + return proc + + b1 = MyBundle('b1', Data.d1, Data.d2) + + eq_( + sess.query(b1).filter(b1.c.d1.between('d3d1', 'd5d1')).all(), + [({'d2': 'd3d2', 'd1': 'd3d1'},), + ({'d2': 'd4d2', 'd1': 'd4d1'},), + ({'d2': 'd5d2', 'd1': 'd5d1'},)] + ) + + def test_multi_bundle(self): + Data = self.classes.Data + Other = self.classes.Other + + d1 = aliased(Data) + + b1 = Bundle('b1', d1.d1, d1.d2) + b2 = Bundle('b2', Data.d1, Other.o1) + + sess = Session() + + q = sess.query(b1, b2).join(Data.others).join(d1, d1.id == Data.id).\ + filter(b1.c.d1 == 'd3d1') + eq_( + q.all(), + [ + (('d3d1', 'd3d2'), ('d3d1', 'd3o0')), + (('d3d1', 'd3d2'), ('d3d1', 'd3o1')), + (('d3d1', 'd3d2'), ('d3d1', 'd3o2')), + (('d3d1', 'd3d2'), ('d3d1', 'd3o3')), + (('d3d1', 'd3d2'), ('d3d1', 'd3o4'))] + ) + + def test_single_entity(self): + Data = self.classes.Data + sess = Session() + + b1 = Bundle('b1', Data.d1, Data.d2, single_entity=True) + + eq_( + sess.query(b1). + filter(b1.c.d1.between('d3d1', 'd5d1')). + all(), + [('d3d1', 'd3d2'), ('d4d1', 'd4d2'), ('d5d1', 'd5d2')] + ) + + def test_single_entity_flag_but_multi_entities(self): + Data = self.classes.Data + sess = Session() + + b1 = Bundle('b1', Data.d1, Data.d2, single_entity=True) + b2 = Bundle('b1', Data.d3, single_entity=True) + + eq_( + sess.query(b1, b2). + filter(b1.c.d1.between('d3d1', 'd5d1')). + all(), + [ + (('d3d1', 'd3d2'), ('d3d3',)), + (('d4d1', 'd4d2'), ('d4d3',)), + (('d5d1', 'd5d2'), ('d5d3',)) + ] + ) + + def test_bundle_nesting(self): + Data = self.classes.Data + sess = Session() + + b1 = Bundle('b1', Data.d1, Bundle('b2', Data.d2, Data.d3)) + + eq_( + sess.query(b1). + filter(b1.c.d1.between('d3d1', 'd7d1')). + filter(b1.c.b2.c.d2.between('d4d2', 'd6d2')). + all(), + [(('d4d1', ('d4d2', 'd4d3')),), (('d5d1', ('d5d2', 'd5d3')),), + (('d6d1', ('d6d2', 'd6d3')),)] + ) + + def test_bundle_nesting_unions(self): + Data = self.classes.Data + sess = Session() + + b1 = Bundle('b1', Data.d1, Bundle('b2', Data.d2, Data.d3)) + + q1 = sess.query(b1).\ + filter(b1.c.d1.between('d3d1', 'd7d1')).\ + filter(b1.c.b2.c.d2.between('d4d2', 'd5d2')) + + q2 = sess.query(b1).\ + filter(b1.c.d1.between('d3d1', 'd7d1')).\ + filter(b1.c.b2.c.d2.between('d5d2', 'd6d2')) + + eq_( + q1.union(q2).all(), + [(('d4d1', ('d4d2', 'd4d3')),), (('d5d1', ('d5d2', 'd5d3')),), + (('d6d1', ('d6d2', 'd6d3')),)] + ) + + # naming structure is preserved + row = q1.union(q2).first() + eq_(row.b1.d1, 'd4d1') + eq_(row.b1.b2.d2, 'd4d2') + + + def test_query_count(self): + Data = self.classes.Data + b1 = Bundle('b1', Data.d1, Data.d2) + eq_(Session().query(b1).count(), 10) + + def test_join_relationship(self): + Data = self.classes.Data + + sess = Session() + b1 = Bundle('b1', Data.d1, Data.d2) + q = sess.query(b1).join(Data.others) + self.assert_compile(q, + "SELECT data.d1 AS data_d1, data.d2 AS data_d2 FROM data " + "JOIN other ON data.id = other.data_id" + ) + + def test_join_selectable(self): + Data = self.classes.Data + Other = self.classes.Other + + sess = Session() + b1 = Bundle('b1', Data.d1, Data.d2) + q = sess.query(b1).join(Other) + self.assert_compile(q, + "SELECT data.d1 AS data_d1, data.d2 AS data_d2 FROM data " + "JOIN other ON data.id = other.data_id" + ) + + + def test_joins_from_adapted_entities(self): + Data = self.classes.Data + + # test for #1853 in terms of bundles + # specifically this exercises adapt_to_selectable() + + b1 = Bundle('b1', Data.id, Data.d1, Data.d2) + + session = Session() + first = session.query(b1) + second = session.query(b1) + unioned = first.union(second) + subquery = session.query(Data.id).subquery() + joined = unioned.outerjoin(subquery, subquery.c.id == Data.id) + joined = joined.order_by(Data.id, Data.d1, Data.d2) + + self.assert_compile( + joined, + "SELECT anon_1.data_id AS anon_1_data_id, anon_1.data_d1 AS anon_1_data_d1, " + "anon_1.data_d2 AS anon_1_data_d2 FROM " + "(SELECT data.id AS data_id, data.d1 AS data_d1, data.d2 AS data_d2 FROM " + "data UNION SELECT data.id AS data_id, data.d1 AS data_d1, " + "data.d2 AS data_d2 FROM data) AS anon_1 " + "LEFT OUTER JOIN (SELECT data.id AS id FROM data) AS anon_2 " + "ON anon_2.id = anon_1.data_id " + "ORDER BY anon_1.data_id, anon_1.data_d1, anon_1.data_d2") + + # tuple nesting still occurs + eq_( + joined.all(), + [((1, 'd0d1', 'd0d2'),), ((2, 'd1d1', 'd1d2'),), + ((3, 'd2d1', 'd2d2'),), ((4, 'd3d1', 'd3d2'),), + ((5, 'd4d1', 'd4d2'),), ((6, 'd5d1', 'd5d2'),), + ((7, 'd6d1', 'd6d2'),), ((8, 'd7d1', 'd7d2'),), + ((9, 'd8d1', 'd8d2'),), ((10, 'd9d1', 'd9d2'),)] + ) + + def test_filter_by(self): + Data = self.classes.Data + + b1 = Bundle('b1', Data.id, Data.d1, Data.d2) + + sess = Session() + + self.assert_compile( + sess.query(b1).filter_by(d1='d1'), + "SELECT data.id AS data_id, data.d1 AS data_d1, " + "data.d2 AS data_d2 FROM data WHERE data.d1 = :d1_1" + ) + + def test_clause_expansion(self): + Data = self.classes.Data + + b1 = Bundle('b1', Data.id, Data.d1, Data.d2) + + sess = Session() + self.assert_compile( + sess.query(Data).order_by(b1), + "SELECT data.id AS data_id, data.d1 AS data_d1, " + "data.d2 AS data_d2, data.d3 AS data_d3 FROM data " + "ORDER BY data.id, data.d1, data.d2" + ) + + self.assert_compile( + sess.query(func.row_number().over(order_by=b1)), + "SELECT row_number() OVER (ORDER BY data.id, data.d1, data.d2) " + "AS anon_1 FROM data" + ) + diff --git a/test/orm/test_cascade.py b/test/orm/test_cascade.py index 12196b4e7..615ae815d 100644 --- a/test/orm/test_cascade.py +++ b/test/orm/test_cascade.py @@ -1217,7 +1217,6 @@ class M2OCascadeDeleteOrphanTestOne(fixtures.MappedTest): sess.flush() sess.close() - @testing.fails_on('maxdb', 'FIXME: unknown') def test_orphan(self): prefs, User, extra = (self.tables.prefs, self.classes.User, @@ -1282,7 +1281,6 @@ class M2OCascadeDeleteOrphanTestOne(fixtures.MappedTest): assert p2 in sess sess.commit() - @testing.fails_on('maxdb', 'FIXME: unknown') def test_orphan_on_update(self): prefs, User, extra = (self.tables.prefs, self.classes.User, @@ -1715,7 +1713,7 @@ class M2MCascadeTest(fixtures.MappedTest): a1.bs.remove(b1) sess.flush() - assert atob.count().scalar() ==0 + assert atob.count().scalar() == 0 assert b.count().scalar() == 0 assert a.count().scalar() == 1 diff --git a/test/orm/test_collection.py b/test/orm/test_collection.py index c9f9f6951..f94c742b3 100644 --- a/test/orm/test_collection.py +++ b/test/orm/test_collection.py @@ -128,9 +128,9 @@ class CollectionsTest(fixtures.ORMTest): control = list() def assert_eq(): - self.assert_(set(direct) == canary.data) - self.assert_(set(adapter) == canary.data) - self.assert_(direct == control) + eq_(set(direct), canary.data) + eq_(set(adapter), canary.data) + eq_(direct, control) # assume append() is available for list tests e = creator() @@ -260,6 +260,11 @@ class CollectionsTest(fixtures.ORMTest): control[-2:-1] = values assert_eq() + values = [creator()] + direct[0:0] = values + control[0:0] = values + assert_eq() + if hasattr(direct, '__delitem__') or hasattr(direct, '__delslice__'): for i in range(1, 4): @@ -279,6 +284,16 @@ class CollectionsTest(fixtures.ORMTest): del control[:] assert_eq() + if hasattr(direct, 'clear'): + for i in range(1, 4): + e = creator() + direct.append(e) + control.append(e) + + direct.clear() + control.clear() + assert_eq() + if hasattr(direct, 'extend'): values = [creator(), creator(), creator()] @@ -499,9 +514,9 @@ class CollectionsTest(fixtures.ORMTest): control = set() def assert_eq(): - self.assert_(set(direct) == canary.data) - self.assert_(set(adapter) == canary.data) - self.assert_(direct == control) + eq_(set(direct), canary.data) + eq_(set(adapter), canary.data) + eq_(direct, control) def addall(*values): for item in values: @@ -519,10 +534,6 @@ class CollectionsTest(fixtures.ORMTest): addall(e) addall(e) - if hasattr(direct, 'pop'): - direct.pop() - control.pop() - assert_eq() if hasattr(direct, 'remove'): e = creator() @@ -593,11 +604,19 @@ class CollectionsTest(fixtures.ORMTest): except TypeError: assert True - if hasattr(direct, 'clear'): - addall(creator(), creator()) - direct.clear() - control.clear() - assert_eq() + addall(creator(), creator()) + direct.clear() + control.clear() + assert_eq() + + # note: the clear test previously needs + # to have executed in order for this to + # pass in all cases; else there's the possibility + # of non-deterministic behavior. + addall(creator()) + direct.pop() + control.pop() + assert_eq() if hasattr(direct, 'difference_update'): zap() @@ -739,6 +758,7 @@ class CollectionsTest(fixtures.ORMTest): except TypeError: assert True + def _test_set_bulk(self, typecallable, creator=None): if creator is None: creator = self.entity_maker @@ -809,6 +829,8 @@ class CollectionsTest(fixtures.ORMTest): self.data.remove(item) def discard(self, item): self.data.discard(item) + def clear(self): + self.data.clear() def pop(self): return self.data.pop() def update(self, other): @@ -841,6 +863,8 @@ class CollectionsTest(fixtures.ORMTest): self.data.update(other) def __iter__(self): return iter(self.data) + def clear(self): + self.data.clear() __hash__ = object.__hash__ def __eq__(self, other): return self.data == other @@ -967,11 +991,10 @@ class CollectionsTest(fixtures.ORMTest): control.update(d) assert_eq() - if sys.version_info >= (2, 4): - kw = dict([(ee.a, ee) for ee in [e, creator()]]) - direct.update(**kw) - control.update(**kw) - assert_eq() + kw = dict([(ee.a, ee) for ee in [e, creator()]]) + direct.update(**kw) + control.update(**kw) + assert_eq() def _test_dict_bulk(self, typecallable, creator=None): if creator is None: diff --git a/test/orm/test_composites.py b/test/orm/test_composites.py index 5e7b91f3e..f13720ef3 100644 --- a/test/orm/test_composites.py +++ b/test/orm/test_composites.py @@ -1,18 +1,14 @@ from sqlalchemy.testing import assert_raises, assert_raises_message import sqlalchemy as sa from sqlalchemy import testing -from sqlalchemy import MetaData, Integer, String, ForeignKey, func, \ - util, select +from sqlalchemy import Integer, String, ForeignKey, \ + select from sqlalchemy.testing.schema import Table, Column -from sqlalchemy.orm import mapper, relationship, backref, \ - class_mapper, CompositeProperty, \ - validates, aliased -from sqlalchemy.orm import attributes, \ - composite, relationship, \ - Session +from sqlalchemy.orm import mapper, relationship, \ + CompositeProperty, aliased +from sqlalchemy.orm import composite, Session, configure_mappers from sqlalchemy.testing import eq_ from sqlalchemy.testing import fixtures -from test.orm import _fixtures class PointTest(fixtures.MappedTest): @@ -214,17 +210,45 @@ class PointTest(fixtures.MappedTest): ((), [Point(x=None, y=None)], ()) ) - def test_query_cols(self): + def test_query_cols_legacy(self): Edge = self.classes.Edge sess = self._fixture() eq_( - sess.query(Edge.start, Edge.end).all(), + sess.query(Edge.start.clauses, Edge.end.clauses).all(), [(3, 4, 5, 6), (14, 5, 2, 7)] ) + def test_query_cols(self): + Edge = self.classes.Edge + Point = self.classes.Point + + sess = self._fixture() + + start, end = Edge.start, Edge.end + + eq_( + sess.query(start, end).filter(start == Point(3, 4)).all(), + [(Point(3, 4), Point(5, 6))] + ) + + def test_query_cols_labeled(self): + Edge = self.classes.Edge + Point = self.classes.Point + + sess = self._fixture() + + start, end = Edge.start, Edge.end + + row = sess.query(start.label('s1'), end).filter(start == Point(3, 4)).first() + eq_(row.s1.x, 3) + eq_(row.s1.y, 4) + eq_(row.end.x, 5) + eq_(row.end.y, 6) + def test_delete(self): + Point = self.classes.Point Graph, Edge = self.classes.Graph, self.classes.Edge sess = self._fixture() @@ -235,7 +259,10 @@ class PointTest(fixtures.MappedTest): sess.flush() eq_( sess.query(Edge.start, Edge.end).all(), - [(3, 4, 5, 6), (14, 5, None, None)] + [ + (Point(x=3, y=4), Point(x=5, y=6)), + (Point(x=14, y=5), Point(x=None, y=None)) + ] ) def test_save_null(self): @@ -712,6 +739,24 @@ class ConfigurationTest(fixtures.MappedTest): }) self._test_roundtrip() + def test_check_prop_type(self): + edge, Edge, Point = (self.tables.edge, + self.classes.Edge, + self.classes.Point) + mapper(Edge, edge, properties={ + 'start': sa.orm.composite(Point, (edge.c.x1,), edge.c.y1), + }) + assert_raises_message( + sa.exc.ArgumentError, + # note that we also are checking that the tuple + # renders here, so the "%" operator in the string needs to + # apply the tuple also + r"Composite expects Column objects or mapped " + "attributes/attribute names as " + "arguments, got: \(Column", + configure_mappers + ) + class ComparatorTest(fixtures.MappedTest, testing.AssertsCompiledSQL): __dialect__ = 'default' @@ -863,3 +908,15 @@ class ComparatorTest(fixtures.MappedTest, testing.AssertsCompiledSQL): "edge_1.x2, edge_1.y2" ) + def test_clause_expansion(self): + self._fixture(False) + Edge = self.classes.Edge + from sqlalchemy.orm import configure_mappers + configure_mappers() + + self.assert_compile( + select([Edge]).order_by(Edge.start), + "SELECT edge.id, edge.x1, edge.y1, edge.x2, edge.y2 FROM edge " + "ORDER BY edge.x1, edge.y1" + ) + diff --git a/test/orm/test_default_strategies.py b/test/orm/test_default_strategies.py index c1668cdd4..b1175fc51 100644 --- a/test/orm/test_default_strategies.py +++ b/test/orm/test_default_strategies.py @@ -149,11 +149,13 @@ class DefaultStrategyOptionsTest(_fixtures.FixtureTest): def test_star_must_be_alone(self): sess = self._downgrade_fixture() User = self.classes.User + opt = sa.orm.subqueryload('*', User.addresses) assert_raises_message( sa.exc.ArgumentError, - "Wildcard identifier '\*' must be specified alone.", - sa.orm.subqueryload, '*', User.addresses + "Wildcard token cannot be followed by another entity", + sess.query(User).options, opt ) + def test_select_with_joinedload(self): """Mapper load strategy defaults can be downgraded with lazyload('*') option, while explicit joinedload() option @@ -283,6 +285,23 @@ class DefaultStrategyOptionsTest(_fixtures.FixtureTest): # verify everything loaded, with no additional sql needed self._assert_fully_loaded(users) + def test_joined_path_wildcards(self): + sess = self._upgrade_fixture() + users = [] + + # test upgrade all to joined: 1 sql + def go(): + users[:] = sess.query(self.classes.User)\ + .options(sa.orm.joinedload('.*'))\ + .options(sa.orm.joinedload("addresses.*"))\ + .options(sa.orm.joinedload("orders.*"))\ + .options(sa.orm.joinedload("orders.items.*"))\ + .order_by(self.classes.User.id)\ + .all() + + self.assert_sql_count(testing.db, go, 1) + self._assert_fully_loaded(users) + def test_joined_with_lazyload(self): """Mapper load strategy defaults can be upgraded with joinedload('*') option, while explicit lazyload() option @@ -350,6 +369,24 @@ class DefaultStrategyOptionsTest(_fixtures.FixtureTest): # verify everything loaded, with no additional sql needed self._assert_fully_loaded(users) + def test_subquery_path_wildcards(self): + sess = self._upgrade_fixture() + users = [] + + # test upgrade all to subquery: 1 sql + 4 relationships = 5 + def go(): + users[:] = sess.query(self.classes.User)\ + .options(sa.orm.subqueryload('.*'))\ + .options(sa.orm.subqueryload('addresses.*'))\ + .options(sa.orm.subqueryload('orders.*'))\ + .options(sa.orm.subqueryload('orders.items.*'))\ + .order_by(self.classes.User.id)\ + .all() + self.assert_sql_count(testing.db, go, 5) + + # verify everything loaded, with no additional sql needed + self._assert_fully_loaded(users) + def test_subquery_with_lazyload(self): """Mapper load strategy defaults can be upgraded with subqueryload('*') option, while explicit lazyload() option diff --git a/test/orm/test_deferred.py b/test/orm/test_deferred.py new file mode 100644 index 000000000..88e7e8663 --- /dev/null +++ b/test/orm/test_deferred.py @@ -0,0 +1,566 @@ +import sqlalchemy as sa +from sqlalchemy import testing, util +from sqlalchemy.orm import mapper, deferred, defer, undefer, Load, \ + load_only, undefer_group, create_session, synonym, relationship, Session,\ + joinedload, defaultload +from sqlalchemy.testing import eq_, AssertsCompiledSQL +from test.orm import _fixtures +from sqlalchemy.orm import strategies + +class DeferredTest(AssertsCompiledSQL, _fixtures.FixtureTest): + + def test_basic(self): + """A basic deferred load.""" + + Order, orders = self.classes.Order, self.tables.orders + + + mapper(Order, orders, order_by=orders.c.id, properties={ + 'description': deferred(orders.c.description)}) + + o = Order() + self.assert_(o.description is None) + + q = create_session().query(Order) + def go(): + l = q.all() + o2 = l[2] + x = o2.description + + self.sql_eq_(go, [ + ("SELECT orders.id AS orders_id, " + "orders.user_id AS orders_user_id, " + "orders.address_id AS orders_address_id, " + "orders.isopen AS orders_isopen " + "FROM orders ORDER BY orders.id", {}), + ("SELECT orders.description AS orders_description " + "FROM orders WHERE orders.id = :param_1", + {'param_1':3})]) + + def test_unsaved(self): + """Deferred loading does not kick in when just PK cols are set.""" + + Order, orders = self.classes.Order, self.tables.orders + + + mapper(Order, orders, properties={ + 'description': deferred(orders.c.description)}) + + sess = create_session() + o = Order() + sess.add(o) + o.id = 7 + def go(): + o.description = "some description" + self.sql_count_(0, go) + + def test_synonym_group_bug(self): + orders, Order = self.tables.orders, self.classes.Order + + mapper(Order, orders, properties={ + 'isopen':synonym('_isopen', map_column=True), + 'description':deferred(orders.c.description, group='foo') + }) + + sess = create_session() + o1 = sess.query(Order).get(1) + eq_(o1.description, "order 1") + + def test_unsaved_2(self): + Order, orders = self.classes.Order, self.tables.orders + + mapper(Order, orders, properties={ + 'description': deferred(orders.c.description)}) + + sess = create_session() + o = Order() + sess.add(o) + def go(): + o.description = "some description" + self.sql_count_(0, go) + + def test_unsaved_group(self): + """Deferred loading doesnt kick in when just PK cols are set""" + + orders, Order = self.tables.orders, self.classes.Order + + + mapper(Order, orders, order_by=orders.c.id, properties=dict( + description=deferred(orders.c.description, group='primary'), + opened=deferred(orders.c.isopen, group='primary'))) + + sess = create_session() + o = Order() + sess.add(o) + o.id = 7 + def go(): + o.description = "some description" + self.sql_count_(0, go) + + def test_unsaved_group_2(self): + orders, Order = self.tables.orders, self.classes.Order + + mapper(Order, orders, order_by=orders.c.id, properties=dict( + description=deferred(orders.c.description, group='primary'), + opened=deferred(orders.c.isopen, group='primary'))) + + sess = create_session() + o = Order() + sess.add(o) + def go(): + o.description = "some description" + self.sql_count_(0, go) + + def test_save(self): + Order, orders = self.classes.Order, self.tables.orders + + m = mapper(Order, orders, properties={ + 'description': deferred(orders.c.description)}) + + sess = create_session() + o2 = sess.query(Order).get(2) + o2.isopen = 1 + sess.flush() + + def test_group(self): + """Deferred load with a group""" + + orders, Order = self.tables.orders, self.classes.Order + + mapper(Order, orders, properties=util.OrderedDict([ + ('userident', deferred(orders.c.user_id, group='primary')), + ('addrident', deferred(orders.c.address_id, group='primary')), + ('description', deferred(orders.c.description, group='primary')), + ('opened', deferred(orders.c.isopen, group='primary')) + ])) + + sess = create_session() + q = sess.query(Order).order_by(Order.id) + def go(): + l = q.all() + o2 = l[2] + eq_(o2.opened, 1) + eq_(o2.userident, 7) + eq_(o2.description, 'order 3') + + self.sql_eq_(go, [ + ("SELECT orders.id AS orders_id " + "FROM orders ORDER BY orders.id", {}), + ("SELECT orders.user_id AS orders_user_id, " + "orders.address_id AS orders_address_id, " + "orders.description AS orders_description, " + "orders.isopen AS orders_isopen " + "FROM orders WHERE orders.id = :param_1", + {'param_1':3})]) + + o2 = q.all()[2] + eq_(o2.description, 'order 3') + assert o2 not in sess.dirty + o2.description = 'order 3' + def go(): + sess.flush() + self.sql_count_(0, go) + + def test_preserve_changes(self): + """A deferred load operation doesn't revert modifications on attributes""" + + orders, Order = self.tables.orders, self.classes.Order + + mapper(Order, orders, properties = { + 'userident': deferred(orders.c.user_id, group='primary'), + 'description': deferred(orders.c.description, group='primary'), + 'opened': deferred(orders.c.isopen, group='primary') + }) + sess = create_session() + o = sess.query(Order).get(3) + assert 'userident' not in o.__dict__ + o.description = 'somenewdescription' + eq_(o.description, 'somenewdescription') + def go(): + eq_(o.opened, 1) + self.assert_sql_count(testing.db, go, 1) + eq_(o.description, 'somenewdescription') + assert o in sess.dirty + + def test_commits_state(self): + """ + When deferred elements are loaded via a group, they get the proper + CommittedState and don't result in changes being committed + + """ + + orders, Order = self.tables.orders, self.classes.Order + + mapper(Order, orders, properties = { + 'userident': deferred(orders.c.user_id, group='primary'), + 'description': deferred(orders.c.description, group='primary'), + 'opened': deferred(orders.c.isopen, group='primary')}) + + sess = create_session() + o2 = sess.query(Order).get(3) + + # this will load the group of attributes + eq_(o2.description, 'order 3') + assert o2 not in sess.dirty + # this will mark it as 'dirty', but nothing actually changed + o2.description = 'order 3' + # therefore the flush() shouldnt actually issue any SQL + self.assert_sql_count(testing.db, sess.flush, 0) + + def test_map_selectable_wo_deferred(self): + """test mapping to a selectable with deferred cols, + the selectable doesn't include the deferred col. + + """ + + Order, orders = self.classes.Order, self.tables.orders + + + order_select = sa.select([ + orders.c.id, + orders.c.user_id, + orders.c.address_id, + orders.c.description, + orders.c.isopen]).alias() + mapper(Order, order_select, properties={ + 'description':deferred(order_select.c.description) + }) + + sess = Session() + o1 = sess.query(Order).order_by(Order.id).first() + assert 'description' not in o1.__dict__ + eq_(o1.description, 'order 1') + + +class DeferredOptionsTest(AssertsCompiledSQL, _fixtures.FixtureTest): + __dialect__ = 'default' + + def test_options(self): + """Options on a mapper to create deferred and undeferred columns""" + + orders, Order = self.tables.orders, self.classes.Order + + + mapper(Order, orders) + + sess = create_session() + q = sess.query(Order).order_by(Order.id).options(defer('user_id')) + + def go(): + q.all()[0].user_id + + self.sql_eq_(go, [ + ("SELECT orders.id AS orders_id, " + "orders.address_id AS orders_address_id, " + "orders.description AS orders_description, " + "orders.isopen AS orders_isopen " + "FROM orders ORDER BY orders.id", {}), + ("SELECT orders.user_id AS orders_user_id " + "FROM orders WHERE orders.id = :param_1", + {'param_1':1})]) + sess.expunge_all() + + q2 = q.options(undefer('user_id')) + self.sql_eq_(q2.all, [ + ("SELECT orders.id AS orders_id, " + "orders.user_id AS orders_user_id, " + "orders.address_id AS orders_address_id, " + "orders.description AS orders_description, " + "orders.isopen AS orders_isopen " + "FROM orders ORDER BY orders.id", + {})]) + + def test_undefer_group(self): + orders, Order = self.tables.orders, self.classes.Order + + mapper(Order, orders, properties=util.OrderedDict([ + ('userident', deferred(orders.c.user_id, group='primary')), + ('description', deferred(orders.c.description, group='primary')), + ('opened', deferred(orders.c.isopen, group='primary')) + ] + )) + + sess = create_session() + q = sess.query(Order).order_by(Order.id) + def go(): + l = q.options(undefer_group('primary')).all() + o2 = l[2] + eq_(o2.opened, 1) + eq_(o2.userident, 7) + eq_(o2.description, 'order 3') + + self.sql_eq_(go, [ + ("SELECT orders.user_id AS orders_user_id, " + "orders.description AS orders_description, " + "orders.isopen AS orders_isopen, " + "orders.id AS orders_id, " + "orders.address_id AS orders_address_id " + "FROM orders ORDER BY orders.id", + {})]) + + def test_undefer_star(self): + orders, Order = self.tables.orders, self.classes.Order + + mapper(Order, orders, properties=util.OrderedDict([ + ('userident', deferred(orders.c.user_id)), + ('description', deferred(orders.c.description)), + ('opened', deferred(orders.c.isopen)) + ] + )) + + sess = create_session() + q = sess.query(Order).options(Load(Order).undefer('*')) + self.assert_compile(q, + "SELECT orders.user_id AS orders_user_id, " + "orders.description AS orders_description, " + "orders.isopen AS orders_isopen, " + "orders.id AS orders_id, " + "orders.address_id AS orders_address_id FROM orders" + ) + + def test_locates_col(self): + """Manually adding a column to the result undefers the column.""" + + orders, Order = self.tables.orders, self.classes.Order + + + mapper(Order, orders, properties={ + 'description': deferred(orders.c.description)}) + + sess = create_session() + o1 = sess.query(Order).order_by(Order.id).first() + def go(): + eq_(o1.description, 'order 1') + self.sql_count_(1, go) + + sess = create_session() + o1 = (sess.query(Order). + order_by(Order.id). + add_column(orders.c.description).first())[0] + def go(): + eq_(o1.description, 'order 1') + self.sql_count_(0, go) + + def test_deep_options(self): + users, items, order_items, Order, Item, User, orders = (self.tables.users, + self.tables.items, + self.tables.order_items, + self.classes.Order, + self.classes.Item, + self.classes.User, + self.tables.orders) + + mapper(Item, items, properties=dict( + description=deferred(items.c.description))) + mapper(Order, orders, properties=dict( + items=relationship(Item, secondary=order_items))) + mapper(User, users, properties=dict( + orders=relationship(Order, order_by=orders.c.id))) + + sess = create_session() + q = sess.query(User).order_by(User.id) + l = q.all() + item = l[0].orders[1].items[1] + def go(): + eq_(item.description, 'item 4') + self.sql_count_(1, go) + eq_(item.description, 'item 4') + + sess.expunge_all() + l = q.options(undefer('orders.items.description')).all() + item = l[0].orders[1].items[1] + def go(): + eq_(item.description, 'item 4') + self.sql_count_(0, go) + eq_(item.description, 'item 4') + + def test_path_entity(self): + """test the legacy *addl_attrs argument.""" + + User = self.classes.User + Order = self.classes.Order + Item = self.classes.Item + + users = self.tables.users + orders = self.tables.orders + items = self.tables.items + order_items = self.tables.order_items + + mapper(User, users, properties={ + "orders": relationship(Order, lazy="joined") + }) + mapper(Order, orders, properties={ + "items": relationship(Item, secondary=order_items, lazy="joined") + }) + mapper(Item, items) + + sess = create_session() + + exp = ("SELECT users.id AS users_id, users.name AS users_name, " + "items_1.id AS items_1_id, orders_1.id AS orders_1_id, " + "orders_1.user_id AS orders_1_user_id, orders_1.address_id " + "AS orders_1_address_id, orders_1.description AS " + "orders_1_description, orders_1.isopen AS orders_1_isopen " + "FROM users LEFT OUTER JOIN orders AS orders_1 " + "ON users.id = orders_1.user_id LEFT OUTER JOIN " + "(order_items AS order_items_1 JOIN items AS items_1 " + "ON items_1.id = order_items_1.item_id) " + "ON orders_1.id = order_items_1.order_id") + + q = sess.query(User).options(defer(User.orders, Order.items, Item.description)) + self.assert_compile(q, exp) + + + def test_chained_multi_col_options(self): + users, User = self.tables.users, self.classes.User + orders, Order = self.tables.orders, self.classes.Order + + mapper(User, users, properties={ + "orders": relationship(Order) + }) + mapper(Order, orders) + + sess = create_session() + q = sess.query(User).options( + joinedload(User.orders).defer("description").defer("isopen") + ) + self.assert_compile(q, + "SELECT users.id AS users_id, users.name AS users_name, " + "orders_1.id AS orders_1_id, orders_1.user_id AS orders_1_user_id, " + "orders_1.address_id AS orders_1_address_id FROM users " + "LEFT OUTER JOIN orders AS orders_1 ON users.id = orders_1.user_id" + ) + + def test_load_only(self): + orders, Order = self.tables.orders, self.classes.Order + + mapper(Order, orders) + + sess = create_session() + q = sess.query(Order).options(load_only("isopen", "description")) + self.assert_compile(q, + "SELECT orders.description AS orders_description, " + "orders.isopen AS orders_isopen FROM orders") + + def test_load_only_w_deferred(self): + orders, Order = self.tables.orders, self.classes.Order + + mapper(Order, orders, properties={ + "description": deferred(orders.c.description) + }) + + sess = create_session() + q = sess.query(Order).options( + load_only("isopen", "description"), + undefer("user_id") + ) + self.assert_compile(q, + "SELECT orders.description AS orders_description, " + "orders.user_id AS orders_user_id, " + "orders.isopen AS orders_isopen FROM orders") + + def test_load_only_propagate_unbound(self): + self._test_load_only_propagate(False) + + def test_load_only_propagate_bound(self): + self._test_load_only_propagate(True) + + def _test_load_only_propagate(self, use_load): + User = self.classes.User + Address = self.classes.Address + + users = self.tables.users + addresses = self.tables.addresses + + mapper(User, users, properties={ + "addresses": relationship(Address) + }) + mapper(Address, addresses) + + sess = create_session() + expected = [ + ("SELECT users.id AS users_id, users.name AS users_name " + "FROM users WHERE users.id IN (:id_1, :id_2)", {'id_2': 8, 'id_1': 7}), + ("SELECT addresses.id AS addresses_id, " + "addresses.email_address AS addresses_email_address " + "FROM addresses WHERE :param_1 = addresses.user_id", {'param_1': 7}), + ("SELECT addresses.id AS addresses_id, " + "addresses.email_address AS addresses_email_address " + "FROM addresses WHERE :param_1 = addresses.user_id", {'param_1': 8}), + ] + + if use_load: + opt = Load(User).defaultload(User.addresses).load_only("id", "email_address") + else: + opt = defaultload(User.addresses).load_only("id", "email_address") + q = sess.query(User).options(opt).filter(User.id.in_([7, 8])) + def go(): + for user in q: + user.addresses + + self.sql_eq_(go, expected) + + + def test_load_only_parent_specific(self): + User = self.classes.User + Address = self.classes.Address + Order = self.classes.Order + + users = self.tables.users + addresses = self.tables.addresses + orders = self.tables.orders + + mapper(User, users) + mapper(Address, addresses) + mapper(Order, orders) + + sess = create_session() + q = sess.query(User, Order, Address).options( + Load(User).load_only("name"), + Load(Order).load_only("id"), + Load(Address).load_only("id", "email_address") + ) + + self.assert_compile(q, + "SELECT users.name AS users_name, orders.id AS orders_id, " + "addresses.id AS addresses_id, addresses.email_address " + "AS addresses_email_address FROM users, orders, addresses" + ) + + def test_load_only_path_specific(self): + User = self.classes.User + Address = self.classes.Address + Order = self.classes.Order + + users = self.tables.users + addresses = self.tables.addresses + orders = self.tables.orders + + mapper(User, users, properties=util.OrderedDict([ + ("addresses", relationship(Address, lazy="joined")), + ("orders", relationship(Order, lazy="joined")) + ])) + + mapper(Address, addresses) + mapper(Order, orders) + + sess = create_session() + + q = sess.query(User).options( + load_only("name").defaultload("addresses").load_only("id", "email_address"), + defaultload("orders").load_only("id") + ) + + # hmmmm joinedload seems to be forcing users.id into here... + self.assert_compile( + q, + "SELECT users.name AS users_name, users.id AS users_id, " + "addresses_1.id AS addresses_1_id, " + "addresses_1.email_address AS addresses_1_email_address, " + "orders_1.id AS orders_1_id FROM users " + "LEFT OUTER JOIN addresses AS addresses_1 " + "ON users.id = addresses_1.user_id " + "LEFT OUTER JOIN orders AS orders_1 ON users.id = orders_1.user_id" + ) + + diff --git a/test/orm/test_eager_relations.py b/test/orm/test_eager_relations.py index e53ff6669..f2ba3cc27 100644 --- a/test/orm/test_eager_relations.py +++ b/test/orm/test_eager_relations.py @@ -4,7 +4,8 @@ from sqlalchemy.testing import eq_, is_, is_not_ import sqlalchemy as sa from sqlalchemy import testing from sqlalchemy.orm import joinedload, deferred, undefer, \ - joinedload_all, backref, eagerload, Session, immediateload + joinedload_all, backref, eagerload, Session, immediateload,\ + defaultload, Load from sqlalchemy import Integer, String, Date, ForeignKey, and_, select, \ func from sqlalchemy.testing.schema import Table, Column @@ -599,7 +600,6 @@ class EagerTest(_fixtures.FixtureTest, testing.AssertsCompiledSQL): assert 'orders' not in noeagers[0].__dict__ assert 'addresses' not in noeagers[0].__dict__ - @testing.fails_on('maxdb', 'FIXME: unknown') def test_limit(self): """Limit operations combined with lazy-load relationships.""" @@ -654,7 +654,6 @@ class EagerTest(_fixtures.FixtureTest, testing.AssertsCompiledSQL): eq_(self.static.user_address_result, l) self.assert_sql_count(testing.db, go, 1) - @testing.fails_on('maxdb', 'FIXME: unknown') def test_limit_2(self): keywords, items, item_keywords, Keyword, Item = (self.tables.keywords, self.tables.items, @@ -676,7 +675,6 @@ class EagerTest(_fixtures.FixtureTest, testing.AssertsCompiledSQL): eq_(self.static.item_keyword_result[1:3], l) - @testing.fails_on('maxdb', 'FIXME: unknown') def test_limit_3(self): """test that the ORDER BY is propagated from the inner select to the outer select, when using the @@ -708,7 +706,7 @@ class EagerTest(_fixtures.FixtureTest, testing.AssertsCompiledSQL): q = sess.query(User) - if not testing.against('maxdb', 'mssql'): + if not testing.against('mssql'): l = q.join('orders').order_by(Order.user_id.desc()).limit(2).offset(1) eq_([ User(id=9, @@ -943,7 +941,6 @@ class EagerTest(_fixtures.FixtureTest, testing.AssertsCompiledSQL): eq_([User(id=7, address=Address(id=1))], l) self.assert_sql_count(testing.db, go, 1) - @testing.fails_on('maxdb', 'FIXME: unknown') def test_many_to_one(self): users, Address, addresses, User = (self.tables.users, self.classes.Address, @@ -1412,6 +1409,52 @@ class EagerTest(_fixtures.FixtureTest, testing.AssertsCompiledSQL): "WHERE orders.description = :description_1" ) + def test_propagated_lazyload_wildcard_unbound(self): + self._test_propagated_lazyload_wildcard(False) + + def test_propagated_lazyload_wildcard_bound(self): + self._test_propagated_lazyload_wildcard(True) + + def _test_propagated_lazyload_wildcard(self, use_load): + users, items, order_items, Order, Item, User, orders = (self.tables.users, + self.tables.items, + self.tables.order_items, + self.classes.Order, + self.classes.Item, + self.classes.User, + self.tables.orders) + + mapper(User, users, properties=dict( + orders=relationship(Order, lazy="select") + )) + mapper(Order, orders, properties=dict( + items=relationship(Item, secondary=order_items, lazy="joined") + )) + mapper(Item, items) + + sess = create_session() + + if use_load: + opt = Load(User).defaultload("orders").lazyload("*") + else: + opt = defaultload("orders").lazyload("*") + + q = sess.query(User).filter(User.id == 7).options(opt) + + def go(): + for u in q: + u.orders + + self.sql_eq_(go, [ + ("SELECT users.id AS users_id, users.name AS users_name " + "FROM users WHERE users.id = :id_1", {"id_1": 7}), + ("SELECT orders.id AS orders_id, orders.user_id AS orders_user_id, " + "orders.address_id AS orders_address_id, " + "orders.description AS orders_description, " + "orders.isopen AS orders_isopen FROM orders " + "WHERE :param_1 = orders.user_id", {"param_1": 7}), + ]) + class SubqueryAliasingTest(fixtures.MappedTest, testing.AssertsCompiledSQL): @@ -1875,7 +1918,6 @@ class SelfReferentialEagerTest(fixtures.MappedTest): Column('parent_id', Integer, ForeignKey('nodes.id')), Column('data', String(30))) - @testing.fails_on('maxdb', 'FIXME: unknown') def test_basic(self): nodes = self.tables.nodes @@ -2061,7 +2103,6 @@ class SelfReferentialEagerTest(fixtures.MappedTest): ) ) - @testing.fails_on('maxdb', 'FIXME: unknown') def test_no_depth(self): nodes = self.tables.nodes @@ -2166,7 +2207,8 @@ class MixedSelfReferentialEagerTest(fixtures.MappedTest): options( joinedload('parent_b1'), joinedload('parent_b2'), - joinedload('parent_z')). + joinedload('parent_z') + ). filter(B.id.in_([2, 8, 11])).order_by(B.id).all(), [ B(id=2, parent_z=A(id=1), parent_b1=B(id=1), parent_b2=None), @@ -2804,7 +2846,7 @@ class CyclicalInheritingEagerTestThree(fixtures.DeclarativeMappedTest, Director = self.classes.Director sess = create_session() self.assert_compile( - sess.query(PersistentObject).options(joinedload(Director.other, join_depth=1)), + sess.query(PersistentObject).options(joinedload(Director.other)), "SELECT persistent.id AS persistent_id, director.id AS director_id, " "director.other_id AS director_other_id, " "director.name AS director_name, persistent_1.id AS " diff --git a/test/orm/test_events.py b/test/orm/test_events.py index d2dae8ba3..a84ead0fa 100644 --- a/test/orm/test_events.py +++ b/test/orm/test_events.py @@ -18,8 +18,6 @@ from sqlalchemy.testing.mock import Mock, call class _RemoveListeners(object): def teardown(self): - # TODO: need to get remove() functionality - # going events.MapperEvents._clear() events.InstanceEvents._clear() events.SessionEvents._clear() @@ -362,14 +360,25 @@ class DeferredMapperEventsTest(_RemoveListeners, _fixtures.FixtureTest): class SubUser(User): pass - canary = [] + class SubSubUser(SubUser): + pass + + canary = Mock() def evt(x, y, z): canary.append(x) - event.listen(User, "before_insert", evt, propagate=True, raw=True) + event.listen(User, "before_insert", canary, propagate=True, raw=True) m = mapper(SubUser, users) m.dispatch.before_insert(5, 6, 7) - eq_(canary, [5]) + eq_(canary.mock_calls, + [call(5, 6, 7)]) + + m2 = mapper(SubSubUser, users) + + m2.dispatch.before_insert(8, 9, 10) + eq_(canary.mock_calls, + [call(5, 6, 7), call(8, 9, 10)]) + def test_deferred_map_event_subclass_no_propagate(self): """ @@ -416,6 +425,35 @@ class DeferredMapperEventsTest(_RemoveListeners, _fixtures.FixtureTest): m.dispatch.before_insert(5, 6, 7) eq_(canary, [5]) + def test_deferred_map_event_subclass_post_mapping_propagate_two(self): + """ + 1. map only subclass of class + 2. mapper event listen on class, w propagate + 3. event fire should receive event + + """ + users, User = (self.tables.users, + self.classes.User) + + class SubUser(User): + pass + + class SubSubUser(SubUser): + pass + + m = mapper(SubUser, users) + + canary = Mock() + event.listen(User, "before_insert", canary, propagate=True, raw=True) + + m2 = mapper(SubSubUser, users) + + m.dispatch.before_insert(5, 6, 7) + eq_(canary.mock_calls, [call(5, 6, 7)]) + + m2.dispatch.before_insert(8, 9, 10) + eq_(canary.mock_calls, [call(5, 6, 7), call(8, 9, 10)]) + def test_deferred_instance_event_subclass_post_mapping_propagate(self): """ 1. map only subclass of class @@ -507,23 +545,25 @@ class DeferredMapperEventsTest(_RemoveListeners, _fixtures.FixtureTest): class SubUser2(User): pass - canary = [] - def evt(x): - canary.append(x) - event.listen(User, "load", evt, propagate=True, raw=True) + canary = Mock() + event.listen(User, "load", canary, propagate=True, raw=False) + # reversing these fixes.... m = mapper(SubUser, users) m2 = mapper(User, users) - m.class_manager.dispatch.load(5) - eq_(canary, [5]) + instance = Mock() + m.class_manager.dispatch.load(instance) - m2.class_manager.dispatch.load(5) - eq_(canary, [5, 5]) + eq_(canary.mock_calls, [call(instance.obj())]) + + m2.class_manager.dispatch.load(instance) + eq_(canary.mock_calls, [call(instance.obj()), call(instance.obj())]) m3 = mapper(SubUser2, users) - m3.class_manager.dispatch.load(5) - eq_(canary, [5, 5, 5]) + m3.class_manager.dispatch.load(instance) + eq_(canary.mock_calls, [call(instance.obj()), + call(instance.obj()), call(instance.obj())]) def test_deferred_instance_event_subclass_no_propagate(self): """ @@ -577,21 +617,17 @@ class DeferredMapperEventsTest(_RemoveListeners, _fixtures.FixtureTest): class Bar(object): pass - listeners = instrumentation._instrumentation_factory.dispatch.\ - attribute_instrument.listeners - assert not listeners + dispatch = instrumentation._instrumentation_factory.dispatch + assert not dispatch.attribute_instrument - canary = [] - def evt(x): - canary.append(x) - event.listen(Bar, "attribute_instrument", evt) + event.listen(Bar, "attribute_instrument", lambda: None) - eq_(len(listeners), 1) + eq_(len(dispatch.attribute_instrument), 1) del Bar gc_collect() - assert not listeners + assert not dispatch.attribute_instrument def test_deferred_instrument_event_subclass_propagate(self): @@ -678,6 +714,70 @@ class LoadTest(_fixtures.FixtureTest): eq_(canary, ['load']) +class RemovalTest(_fixtures.FixtureTest): + run_inserts = None + + + def test_attr_propagated(self): + User = self.classes.User + + users, addresses, User = (self.tables.users, + self.tables.addresses, + self.classes.User) + + class AdminUser(User): + pass + + mapper(User, users) + mapper(AdminUser, addresses, inherits=User) + + fn = Mock() + event.listen(User.name, "set", fn, propagate=True) + + au = AdminUser() + au.name = 'ed' + + eq_(fn.call_count, 1) + + event.remove(User.name, "set", fn) + + au.name = 'jack' + + eq_(fn.call_count, 1) + + def test_unmapped_listen(self): + users = self.tables.users + + class Foo(object): + pass + + fn = Mock() + + event.listen(Foo, "before_insert", fn, propagate=True) + + class User(Foo): + pass + + m = mapper(User, users) + + u1 = User() + m.dispatch.before_insert(m, None, attributes.instance_state(u1)) + eq_(fn.call_count, 1) + + event.remove(Foo, "before_insert", fn) + + # existing event is removed + m.dispatch.before_insert(m, None, attributes.instance_state(u1)) + eq_(fn.call_count, 1) + + # the _HoldEvents is also cleaned out + class Bar(Foo): + pass + m = mapper(Bar, users) + b1 = Bar() + m.dispatch.before_insert(m, None, attributes.instance_state(b1)) + eq_(fn.call_count, 1) + class RefreshTest(_fixtures.FixtureTest): run_inserts = None diff --git a/test/orm/test_expire.py b/test/orm/test_expire.py index 6eb124cd2..edd243181 100644 --- a/test/orm/test_expire.py +++ b/test/orm/test_expire.py @@ -4,7 +4,7 @@ from sqlalchemy.testing import eq_, assert_raises, assert_raises_message from sqlalchemy.testing.util import gc_collect import sqlalchemy as sa from sqlalchemy import testing -from sqlalchemy import Integer, String, ForeignKey, exc as sa_exc +from sqlalchemy import Integer, String, ForeignKey, exc as sa_exc, FetchedValue from sqlalchemy.testing.schema import Table from sqlalchemy.testing.schema import Column from sqlalchemy.orm import mapper, relationship, create_session, \ @@ -12,7 +12,7 @@ from sqlalchemy.orm import mapper, relationship, create_session, \ strategies, state, lazyload, backref, Session from sqlalchemy.testing import fixtures from test.orm import _fixtures - +from sqlalchemy.sql import select class ExpireTest(_fixtures.FixtureTest): @@ -375,7 +375,7 @@ class ExpireTest(_fixtures.FixtureTest): o = sess.query(Order).get(3) sess.expire(o) - orders.update(id=3).execute(description='order 3 modified') + orders.update().execute(description='order 3 modified') assert o.isopen == 1 assert attributes.instance_state(o).dict['description'] == 'order 3 modified' def go(): @@ -850,11 +850,11 @@ class ExpireTest(_fixtures.FixtureTest): assert len(u.addresses) == 3 sess.expire(u) assert 'addresses' not in u.__dict__ - print("-------------------------------------------") sess.query(User).filter_by(id=8).all() assert 'addresses' in u.__dict__ assert len(u.addresses) == 3 + @testing.requires.predictable_gc def test_expire_all(self): users, Address, addresses, User = (self.tables.users, self.classes.Address, @@ -869,16 +869,16 @@ class ExpireTest(_fixtures.FixtureTest): sess = create_session() userlist = sess.query(User).order_by(User.id).all() - assert self.static.user_address_result == userlist - assert len(list(sess)) == 9 + eq_(self.static.user_address_result, userlist) + eq_(len(list(sess)), 9) sess.expire_all() gc_collect() - assert len(list(sess)) == 4 # since addresses were gc'ed + eq_(len(list(sess)), 4) # since addresses were gc'ed userlist = sess.query(User).order_by(User.id).all() u = userlist[1] eq_(self.static.user_address_result, userlist) - assert len(list(sess)) == 9 + eq_(len(list(sess)), 9) def test_state_change_col_to_deferred(self): """Behavioral test to verify the current activity of loader callables.""" @@ -1184,6 +1184,152 @@ class ExpiredPendingTest(_fixtures.FixtureTest): assert len(u1.addresses) == 3 +class LifecycleTest(fixtures.MappedTest): + @classmethod + def define_tables(cls, metadata): + Table("data", metadata, + Column('id', Integer, primary_key=True, test_needs_autoincrement=True), + Column('data', String(30)), + ) + Table("data_fetched", metadata, + Column('id', Integer, primary_key=True, test_needs_autoincrement=True), + Column('data', String(30), FetchedValue()), + ) + Table("data_defer", metadata, + Column('id', Integer, primary_key=True, test_needs_autoincrement=True), + Column('data', String(30)), + Column('data2', String(30)), + ) + + @classmethod + def setup_classes(cls): + class Data(cls.Comparable): + pass + class DataFetched(cls.Comparable): + pass + class DataDefer(cls.Comparable): + pass + + @classmethod + def setup_mappers(cls): + mapper(cls.classes.Data, cls.tables.data) + mapper(cls.classes.DataFetched, cls.tables.data_fetched) + mapper(cls.classes.DataDefer, cls.tables.data_defer, properties={ + "data": deferred(cls.tables.data_defer.c.data) + }) + + def test_attr_not_inserted(self): + Data = self.classes.Data + + sess = create_session() + + d1 = Data() + sess.add(d1) + sess.flush() + + # we didn't insert a value for 'data', + # so its not in dict, but also when we hit it, it isn't + # expired because there's no column default on it or anyhting like that + assert 'data' not in d1.__dict__ + def go(): + eq_(d1.data, None) + + self.assert_sql_count( + testing.db, + go, + 0 + ) + + def test_attr_not_inserted_expired(self): + Data = self.classes.Data + + sess = create_session() + + d1 = Data() + sess.add(d1) + sess.flush() + + assert 'data' not in d1.__dict__ + + # with an expire, we emit + sess.expire(d1) + + def go(): + eq_(d1.data, None) + + self.assert_sql_count( + testing.db, + go, + 1 + ) + + def test_attr_not_inserted_fetched(self): + Data = self.classes.DataFetched + + sess = create_session() + + d1 = Data() + sess.add(d1) + sess.flush() + + assert 'data' not in d1.__dict__ + def go(): + eq_(d1.data, None) + + # this one is marked as "fetch" so we emit SQL + self.assert_sql_count( + testing.db, + go, + 1 + ) + + def test_cols_missing_in_load(self): + Data = self.classes.Data + + sess = create_session() + + d1 = Data(data='d1') + sess.add(d1) + sess.flush() + sess.close() + + sess = create_session() + d1 = sess.query(Data).from_statement(select([Data.id])).first() + + # cols not present in the row are implicitly expired + def go(): + eq_(d1.data, 'd1') + + self.assert_sql_count( + testing.db, go, 1 + ) + + def test_deferred_cols_missing_in_load_state_reset(self): + Data = self.classes.DataDefer + + sess = create_session() + + d1 = Data(data='d1') + sess.add(d1) + sess.flush() + sess.close() + + sess = create_session() + d1 = sess.query(Data).from_statement( + select([Data.id])).options(undefer(Data.data)).first() + d1.data = 'd2' + + # the deferred loader has to clear out any state + # on the col, including that 'd2' here + d1 = sess.query(Data).populate_existing().first() + + def go(): + eq_(d1.data, 'd1') + + self.assert_sql_count( + testing.db, go, 1 + ) + class RefreshTest(_fixtures.FixtureTest): def test_refresh(self): @@ -1290,7 +1436,6 @@ class RefreshTest(_fixtures.FixtureTest): s.expire(u) assert len(u.addresses) == 3 - @testing.fails_on('maxdb', 'FIXME: unknown') def test_refresh2(self): """test a hang condition that was occurring on expire/refresh""" diff --git a/test/orm/test_froms.py b/test/orm/test_froms.py index 2403f4aae..fd4bef71a 100644 --- a/test/orm/test_froms.py +++ b/test/orm/test_froms.py @@ -675,19 +675,18 @@ class AddEntityEquivalenceTest(fixtures.MappedTest, AssertsCompiledSQL): class InstancesTest(QueryTest, AssertsCompiledSQL): - def test_from_alias(self): + def test_from_alias_one(self): User, addresses, users = (self.classes.User, self.tables.addresses, self.tables.users) - - query = users.select(users.c.id==7).\ - union(users.select(users.c.id>7)).\ + query = users.select(users.c.id == 7).\ + union(users.select(users.c.id > 7)).\ alias('ulist').\ outerjoin(addresses).\ select(use_labels=True, order_by=['ulist.id', addresses.c.id]) - sess =create_session() + sess = create_session() q = sess.query(User) def go(): @@ -697,7 +696,19 @@ class InstancesTest(QueryTest, AssertsCompiledSQL): assert self.static.user_address_result == l self.assert_sql_count(testing.db, go, 1) - sess.expunge_all() + def test_from_alias_two(self): + User, addresses, users = (self.classes.User, + self.tables.addresses, + self.tables.users) + + query = users.select(users.c.id == 7).\ + union(users.select(users.c.id > 7)).\ + alias('ulist').\ + outerjoin(addresses).\ + select(use_labels=True, + order_by=['ulist.id', addresses.c.id]) + sess = create_session() + q = sess.query(User) def go(): l = q.options(contains_alias('ulist'), @@ -706,6 +717,19 @@ class InstancesTest(QueryTest, AssertsCompiledSQL): assert self.static.user_address_result == l self.assert_sql_count(testing.db, go, 1) + def test_from_alias_three(self): + User, addresses, users = (self.classes.User, + self.tables.addresses, + self.tables.users) + + query = users.select(users.c.id == 7).\ + union(users.select(users.c.id > 7)).\ + alias('ulist').\ + outerjoin(addresses).\ + select(use_labels=True, + order_by=['ulist.id', addresses.c.id]) + sess = create_session() + # better way. use select_entity_from() def go(): l = sess.query(User).select_entity_from(query).\ @@ -713,12 +737,19 @@ class InstancesTest(QueryTest, AssertsCompiledSQL): assert self.static.user_address_result == l self.assert_sql_count(testing.db, go, 1) + def test_from_alias_four(self): + User, addresses, users = (self.classes.User, + self.tables.addresses, + self.tables.users) + + sess = create_session() + # same thing, but alias addresses, so that the adapter # generated by select_entity_from() is wrapped within # the adapter created by contains_eager() adalias = addresses.alias() - query = users.select(users.c.id==7).\ - union(users.select(users.c.id>7)).\ + query = users.select(users.c.id == 7).\ + union(users.select(users.c.id > 7)).\ alias('ulist').\ outerjoin(adalias).\ select(use_labels=True, @@ -902,6 +933,11 @@ class InstancesTest(QueryTest, AssertsCompiledSQL): order_by(users.c.id, oalias.c.id, ialias.c.id) # test using Alias with more than one level deep + + # new way: + #from sqlalchemy.orm.strategy_options import Load + #opt = Load(User).contains_eager('orders', alias=oalias).contains_eager('items', alias=ialias) + def go(): l = list(q.options( contains_eager('orders', alias=oalias), @@ -1781,7 +1817,6 @@ class SelectFromTest(QueryTest, AssertsCompiledSQL): users, User = self.tables.users, self.classes.User - mapper(User, users) sess = create_session() @@ -1790,21 +1825,21 @@ class SelectFromTest(QueryTest, AssertsCompiledSQL): ualias = aliased(User) self.assert_compile( - sess.query(User).join(sel, User.id>sel.c.id), + sess.query(User).join(sel, User.id > sel.c.id), "SELECT users.id AS users_id, users.name AS users_name FROM " "users JOIN (SELECT users.id AS id, users.name AS name FROM " "users WHERE users.id IN (:id_1, :id_2)) AS anon_1 ON users.id > anon_1.id", ) self.assert_compile( - sess.query(ualias).select_entity_from(sel).filter(ualias.id>sel.c.id), + sess.query(ualias).select_entity_from(sel).filter(ualias.id > sel.c.id), "SELECT users_1.id AS users_1_id, users_1.name AS users_1_name FROM " "users AS users_1, (SELECT users.id AS id, users.name AS name FROM " "users WHERE users.id IN (:id_1, :id_2)) AS anon_1 WHERE users_1.id > anon_1.id", ) self.assert_compile( - sess.query(ualias).select_entity_from(sel).join(ualias, ualias.id>sel.c.id), + sess.query(ualias).select_entity_from(sel).join(ualias, ualias.id > sel.c.id), "SELECT users_1.id AS users_1_id, users_1.name AS users_1_name " "FROM (SELECT users.id AS id, users.name AS name " "FROM users WHERE users.id IN (:id_1, :id_2)) AS anon_1 " @@ -1812,29 +1847,26 @@ class SelectFromTest(QueryTest, AssertsCompiledSQL): ) self.assert_compile( - sess.query(ualias).select_entity_from(sel).join(ualias, ualias.id>User.id), + sess.query(ualias).select_entity_from(sel).join(ualias, ualias.id > User.id), "SELECT users_1.id AS users_1_id, users_1.name AS users_1_name " "FROM (SELECT users.id AS id, users.name AS name FROM " "users WHERE users.id IN (:id_1, :id_2)) AS anon_1 " - "JOIN users AS users_1 ON anon_1.id < users_1.id" + "JOIN users AS users_1 ON users_1.id > anon_1.id" ) salias = aliased(User, sel) self.assert_compile( - sess.query(salias).join(ualias, ualias.id>salias.id), + sess.query(salias).join(ualias, ualias.id > salias.id), "SELECT anon_1.id AS anon_1_id, anon_1.name AS anon_1_name FROM " "(SELECT users.id AS id, users.name AS name FROM users WHERE users.id " "IN (:id_1, :id_2)) AS anon_1 JOIN users AS users_1 ON users_1.id > anon_1.id", ) - - # this one uses an explicit join(left, right, onclause) so works self.assert_compile( - sess.query(ualias).select_entity_from(join(sel, ualias, ualias.id>sel.c.id)), + sess.query(ualias).select_entity_from(join(sel, ualias, ualias.id > sel.c.id)), "SELECT users_1.id AS users_1_id, users_1.name AS users_1_name FROM " "(SELECT users.id AS id, users.name AS name FROM users WHERE users.id " - "IN (:id_1, :id_2)) AS anon_1 JOIN users AS users_1 ON users_1.id > anon_1.id", - use_default_dialect=True + "IN (:id_1, :id_2)) AS anon_1 JOIN users AS users_1 ON users_1.id > anon_1.id" ) @@ -1848,25 +1880,31 @@ class SelectFromTest(QueryTest, AssertsCompiledSQL): self.assert_compile( sess.query(User).select_from(ua).join(User, ua.name > User.name), "SELECT users.id AS users_id, users.name AS users_name " - "FROM users AS users_1 JOIN users ON users.name < users_1.name" + "FROM users AS users_1 JOIN users ON users_1.name > users.name" ) self.assert_compile( sess.query(User.name).select_from(ua).join(User, ua.name > User.name), "SELECT users.name AS users_name FROM users AS users_1 " - "JOIN users ON users.name < users_1.name" + "JOIN users ON users_1.name > users.name" ) self.assert_compile( sess.query(ua.name).select_from(ua).join(User, ua.name > User.name), "SELECT users_1.name AS users_1_name FROM users AS users_1 " - "JOIN users ON users.name < users_1.name" + "JOIN users ON users_1.name > users.name" ) self.assert_compile( sess.query(ua).select_from(User).join(ua, ua.name > User.name), "SELECT users_1.id AS users_1_id, users_1.name AS users_1_name " - "FROM users JOIN users AS users_1 ON users.name < users_1.name" + "FROM users JOIN users AS users_1 ON users_1.name > users.name" + ) + + self.assert_compile( + sess.query(ua).select_from(User).join(ua, User.name > ua.name), + "SELECT users_1.id AS users_1_id, users_1.name AS users_1_name " + "FROM users JOIN users AS users_1 ON users.name > users_1.name" ) # this is tested in many other places here, just adding it diff --git a/test/orm/test_generative.py b/test/orm/test_generative.py index 52858cc26..cbe559db9 100644 --- a/test/orm/test_generative.py +++ b/test/orm/test_generative.py @@ -43,7 +43,6 @@ class GenerativeQueryTest(fixtures.MappedTest): assert res.order_by(Foo.bar)[0].bar == 5 assert res.order_by(sa.desc(Foo.bar))[0].bar == 95 - @testing.fails_on('maxdb', 'FIXME: unknown') def test_slice(self): Foo = self.classes.Foo diff --git a/test/orm/test_inspect.py b/test/orm/test_inspect.py index 61c1fd93e..5f5457943 100644 --- a/test/orm/test_inspect.py +++ b/test/orm/test_inspect.py @@ -130,8 +130,8 @@ class TestORMInspection(_fixtures.FixtureTest): User = self.classes.User insp = inspect(User) eq_( - set(insp.attrs.keys()), - set(['addresses', 'orders', 'id', 'name', 'name_syn']) + list(insp.attrs.keys()), + ['addresses', 'orders', 'id', 'name', 'name_syn'] ) def test_col_filter(self): @@ -365,7 +365,7 @@ class TestORMInspection(_fixtures.FixtureTest): [] ) - def test_instance_state_attr_hist(self): + def test_instance_state_collection_attr_hist(self): User = self.classes.User u1 = User(name='ed') insp = inspect(u1) @@ -379,6 +379,48 @@ class TestORMInspection(_fixtures.FixtureTest): hist.unchanged, [] ) + def test_instance_state_scalar_attr_hist(self): + User = self.classes.User + u1 = User(name='ed') + sess = Session() + sess.add(u1) + sess.commit() + assert 'name' not in u1.__dict__ + insp = inspect(u1) + hist = insp.attrs.name.history + eq_( + hist.unchanged, None + ) + assert 'name' not in u1.__dict__ + + def test_instance_state_collection_attr_load_hist(self): + User = self.classes.User + u1 = User(name='ed') + insp = inspect(u1) + hist = insp.attrs.addresses.load_history() + eq_( + hist.unchanged, () + ) + u1.addresses + hist = insp.attrs.addresses.load_history() + eq_( + hist.unchanged, [] + ) + + def test_instance_state_scalar_attr_hist_load(self): + User = self.classes.User + u1 = User(name='ed') + sess = Session() + sess.add(u1) + sess.commit() + assert 'name' not in u1.__dict__ + insp = inspect(u1) + hist = insp.attrs.name.load_history() + eq_( + hist.unchanged, ['ed'] + ) + assert 'name' in u1.__dict__ + def test_instance_state_ident_transient(self): User = self.classes.User u1 = User(name='ed') diff --git a/test/orm/test_joins.py b/test/orm/test_joins.py index 21b82f408..5f48b39b1 100644 --- a/test/orm/test_joins.py +++ b/test/orm/test_joins.py @@ -333,7 +333,32 @@ class InheritedJoinTest(fixtures.MappedTest, AssertsCompiledSQL): , use_default_dialect = True ) + def test_auto_aliasing_multi_link(self): + # test [ticket:2903] + sess = create_session() + Company, Engineer, Manager, Boss = self.classes.Company, \ + self.classes.Engineer, \ + self.classes.Manager, self.classes.Boss + q = sess.query(Company).\ + join(Company.employees.of_type(Engineer)).\ + join(Company.employees.of_type(Manager)).\ + join(Company.employees.of_type(Boss)) + + self.assert_compile(q, + "SELECT companies.company_id AS companies_company_id, " + "companies.name AS companies_name FROM companies " + "JOIN (people JOIN engineers ON people.person_id = engineers.person_id) " + "ON companies.company_id = people.company_id " + "JOIN (people AS people_1 JOIN managers AS managers_1 " + "ON people_1.person_id = managers_1.person_id) " + "ON companies.company_id = people_1.company_id " + "JOIN (people AS people_2 JOIN managers AS managers_2 " + "ON people_2.person_id = managers_2.person_id JOIN boss AS boss_1 " + "ON managers_2.person_id = boss_1.boss_id) " + "ON companies.company_id = people_2.company_id", + use_default_dialect=True + ) class JoinTest(QueryTest, AssertsCompiledSQL): @@ -1582,12 +1607,14 @@ class MultiplePathTest(fixtures.MappedTest, AssertsCompiledSQL): self.tables.t1t2_2, self.tables.t1) - class T1(object):pass - class T2(object):pass + class T1(object): + pass + class T2(object): + pass mapper(T1, t1, properties={ - 't2s_1':relationship(T2, secondary=t1t2_1), - 't2s_2':relationship(T2, secondary=t1t2_2), + 't2s_1': relationship(T2, secondary=t1t2_1), + 't2s_2': relationship(T2, secondary=t1t2_2), }) mapper(T2, t2) diff --git a/test/orm/test_lazy_relations.py b/test/orm/test_lazy_relations.py index 66b1eb5e4..37d290b58 100644 --- a/test/orm/test_lazy_relations.py +++ b/test/orm/test_lazy_relations.py @@ -178,7 +178,7 @@ class LazyTest(_fixtures.FixtureTest): sess = create_session() q = sess.query(User) - if testing.against('maxdb', 'mssql'): + if testing.against('mssql'): l = q.limit(2).all() assert self.static.user_all_result[:2] == l else: diff --git a/test/orm/test_lockmode.py b/test/orm/test_lockmode.py index 0fe82f394..fc473a329 100644 --- a/test/orm/test_lockmode.py +++ b/test/orm/test_lockmode.py @@ -2,12 +2,13 @@ from sqlalchemy.engine import default from sqlalchemy.databases import * from sqlalchemy.orm import mapper from sqlalchemy.orm import Session -from sqlalchemy.testing import AssertsCompiledSQL +from sqlalchemy.testing import AssertsCompiledSQL, eq_ from sqlalchemy.testing import assert_raises_message +from sqlalchemy import exc from test.orm import _fixtures -class LockModeTest(_fixtures.FixtureTest, AssertsCompiledSQL): +class LegacyLockModeTest(_fixtures.FixtureTest): run_inserts = None @classmethod @@ -15,100 +16,184 @@ class LockModeTest(_fixtures.FixtureTest, AssertsCompiledSQL): User, users = cls.classes.User, cls.tables.users mapper(User, users) - def test_default_update(self): + def _assert_legacy(self, arg, read=False, nowait=False): + User = self.classes.User + s = Session() + q = s.query(User).with_lockmode(arg) + sel = q._compile_context().statement + + if arg is None: + assert q._for_update_arg is None + assert sel._for_update_arg is None + return + + assert q._for_update_arg.read is read + assert q._for_update_arg.nowait is nowait + + assert sel._for_update_arg.read is read + assert sel._for_update_arg.nowait is nowait + + def test_false_legacy(self): + self._assert_legacy(None) + + def test_plain_legacy(self): + self._assert_legacy("update") + + def test_nowait_legacy(self): + self._assert_legacy("update_nowait", nowait=True) + + def test_read_legacy(self): + self._assert_legacy("read", read=True) + + def test_unknown_legacy_lock_mode(self): User = self.classes.User sess = Session() - self.assert_compile(sess.query(User.id).with_lockmode('update'), - "SELECT users.id AS users_id FROM users FOR UPDATE", - dialect=default.DefaultDialect() + assert_raises_message( + exc.ArgumentError, "Unknown with_lockmode argument: 'unknown_mode'", + sess.query(User.id).with_lockmode, 'unknown_mode' ) - def test_not_supported_by_dialect_should_just_use_update(self): +class ForUpdateTest(_fixtures.FixtureTest): + @classmethod + def setup_mappers(cls): + User, users = cls.classes.User, cls.tables.users + mapper(User, users) + + def _assert(self, read=False, nowait=False, of=None, + assert_q_of=None, assert_sel_of=None): User = self.classes.User - sess = Session() - self.assert_compile(sess.query(User.id).with_lockmode('read'), - "SELECT users.id AS users_id FROM users FOR UPDATE", - dialect=default.DefaultDialect() + s = Session() + q = s.query(User).with_for_update(read=read, nowait=nowait, of=of) + sel = q._compile_context().statement + + assert q._for_update_arg.read is read + assert sel._for_update_arg.read is read + + assert q._for_update_arg.nowait is nowait + assert sel._for_update_arg.nowait is nowait + + eq_(q._for_update_arg.of, assert_q_of) + eq_(sel._for_update_arg.of, assert_sel_of) + + def test_read(self): + self._assert(read=True) + + def test_plain(self): + self._assert() + + def test_nowait(self): + self._assert(nowait=True) + + def test_of_single_col(self): + User, users = self.classes.User, self.tables.users + self._assert( + of=User.id, + assert_q_of=[users.c.id], + assert_sel_of=[users.c.id] ) - def test_none_lock_mode(self): +class CompileTest(_fixtures.FixtureTest, AssertsCompiledSQL): + """run some compile tests, even though these are redundant.""" + run_inserts = None + + @classmethod + def setup_mappers(cls): + User, users = cls.classes.User, cls.tables.users + Address, addresses = cls.classes.Address, cls.tables.addresses + mapper(User, users) + mapper(Address, addresses) + + def test_default_update(self): User = self.classes.User sess = Session() - self.assert_compile(sess.query(User.id).with_lockmode(None), - "SELECT users.id AS users_id FROM users", + self.assert_compile(sess.query(User.id).with_for_update(), + "SELECT users.id AS users_id FROM users FOR UPDATE", dialect=default.DefaultDialect() ) - def test_unknown_lock_mode(self): + def test_not_supported_by_dialect_should_just_use_update(self): User = self.classes.User sess = Session() - assert_raises_message( - Exception, "Unknown lockmode 'unknown_mode'", - self.assert_compile, - sess.query(User.id).with_lockmode('unknown_mode'), None, + self.assert_compile(sess.query(User.id).with_for_update(read=True), + "SELECT users.id AS users_id FROM users FOR UPDATE", dialect=default.DefaultDialect() ) def test_postgres_read(self): User = self.classes.User sess = Session() - self.assert_compile(sess.query(User.id).with_lockmode('read'), + self.assert_compile(sess.query(User.id).with_for_update(read=True), "SELECT users.id AS users_id FROM users FOR SHARE", - dialect=postgresql.dialect() + dialect="postgresql" ) def test_postgres_read_nowait(self): User = self.classes.User sess = Session() - self.assert_compile(sess.query(User.id).with_lockmode('read_nowait'), + self.assert_compile(sess.query(User.id). + with_for_update(read=True, nowait=True), "SELECT users.id AS users_id FROM users FOR SHARE NOWAIT", - dialect=postgresql.dialect() + dialect="postgresql" ) def test_postgres_update(self): User = self.classes.User sess = Session() - self.assert_compile(sess.query(User.id).with_lockmode('update'), + self.assert_compile(sess.query(User.id).with_for_update(), "SELECT users.id AS users_id FROM users FOR UPDATE", - dialect=postgresql.dialect() + dialect="postgresql" ) - def test_postgres_update_nowait(self): + def test_postgres_update_of(self): User = self.classes.User sess = Session() - self.assert_compile(sess.query(User.id).with_lockmode('update_nowait'), - "SELECT users.id AS users_id FROM users FOR UPDATE NOWAIT", - dialect=postgresql.dialect() + self.assert_compile(sess.query(User.id).with_for_update(of=User.id), + "SELECT users.id AS users_id FROM users FOR UPDATE OF users", + dialect="postgresql" ) - def test_oracle_update(self): + def test_postgres_update_of_entity(self): User = self.classes.User sess = Session() - self.assert_compile(sess.query(User.id).with_lockmode('update'), - "SELECT users.id AS users_id FROM users FOR UPDATE", - dialect=oracle.dialect() + self.assert_compile(sess.query(User.id).with_for_update(of=User), + "SELECT users.id AS users_id FROM users FOR UPDATE OF users", + dialect="postgresql" ) - def test_oracle_update_nowait(self): + def test_postgres_update_of_entity_list(self): User = self.classes.User + Address = self.classes.Address + sess = Session() - self.assert_compile(sess.query(User.id).with_lockmode('update_nowait'), - "SELECT users.id AS users_id FROM users FOR UPDATE NOWAIT", - dialect=oracle.dialect() + self.assert_compile(sess.query(User.id, Address.id). + with_for_update(of=[User, Address]), + "SELECT users.id AS users_id, addresses.id AS addresses_id " + "FROM users, addresses FOR UPDATE OF users, addresses", + dialect="postgresql" ) - def test_mysql_read(self): + def test_postgres_update_of_list(self): User = self.classes.User sess = Session() - self.assert_compile(sess.query(User.id).with_lockmode('read'), - "SELECT users.id AS users_id FROM users LOCK IN SHARE MODE", - dialect=mysql.dialect() + self.assert_compile(sess.query(User.id). + with_for_update(of=[User.id, User.id, User.id]), + "SELECT users.id AS users_id FROM users FOR UPDATE OF users", + dialect="postgresql" ) - def test_mysql_update(self): + + def test_oracle_update(self): User = self.classes.User sess = Session() - self.assert_compile(sess.query(User.id).with_lockmode('update'), + self.assert_compile(sess.query(User.id).with_for_update(), "SELECT users.id AS users_id FROM users FOR UPDATE", - dialect=mysql.dialect() + dialect="oracle" + ) + + def test_mysql_read(self): + User = self.classes.User + sess = Session() + self.assert_compile(sess.query(User.id).with_for_update(read=True), + "SELECT users.id AS users_id FROM users LOCK IN SHARE MODE", + dialect="mysql" ) diff --git a/test/orm/test_mapper.py b/test/orm/test_mapper.py index 06ec4ce27..32126e0dd 100644 --- a/test/orm/test_mapper.py +++ b/test/orm/test_mapper.py @@ -17,6 +17,7 @@ from sqlalchemy.testing import fixtures from test.orm import _fixtures from sqlalchemy.testing.assertsql import CompiledSQL import logging +import logging.handlers class MapperTest(_fixtures.FixtureTest, AssertsCompiledSQL): __dialect__ = 'default' @@ -58,7 +59,7 @@ class MapperTest(_fixtures.FixtureTest, AssertsCompiledSQL): addresses = self.tables.addresses Address = self.classes.Address - from sqlalchemy.orm.util import _is_mapped_class, _is_aliased_class + from sqlalchemy.orm.base import _is_mapped_class, _is_aliased_class class Foo(object): x = "something" @@ -95,7 +96,7 @@ class MapperTest(_fixtures.FixtureTest, AssertsCompiledSQL): def test_entity_descriptor(self): users = self.tables.users - from sqlalchemy.orm.util import _entity_descriptor + from sqlalchemy.orm.base import _entity_descriptor class Foo(object): x = "something" @@ -195,16 +196,16 @@ class MapperTest(_fixtures.FixtureTest, AssertsCompiledSQL): mapper(User, users) sa.orm.configure_mappers() - assert sa.orm.mapperlib._new_mappers is False + assert sa.orm.mapperlib.Mapper._new_mappers is False m = mapper(Address, addresses, properties={ 'user': relationship(User, backref="addresses")}) assert m.configured is False - assert sa.orm.mapperlib._new_mappers is True + assert sa.orm.mapperlib.Mapper._new_mappers is True u = User() assert User.addresses - assert sa.orm.mapperlib._new_mappers is False + assert sa.orm.mapperlib.Mapper._new_mappers is False def test_configure_on_session(self): User, users = self.classes.User, self.tables.users @@ -302,6 +303,22 @@ class MapperTest(_fixtures.FixtureTest, AssertsCompiledSQL): }) assert User.addresses.property is m.get_property('addresses') + def test_unicode_relationship_backref_names(self): + # test [ticket:2901] + users, Address, addresses, User = (self.tables.users, + self.classes.Address, + self.tables.addresses, + self.classes.User) + + mapper(Address, addresses) + mapper(User, users, properties={ + util.u('addresses'): relationship(Address, backref=util.u('user')) + }) + u1 = User() + a1 = Address() + u1.addresses.append(a1) + assert a1.user is u1 + def test_configure_on_prop_1(self): users, Address, addresses, User = (self.tables.users, self.classes.Address, @@ -1566,6 +1583,13 @@ class MapperTest(_fixtures.FixtureTest, AssertsCompiledSQL): class_mapper, 5 ) + def test_unmapped_not_type_error_iter_ok(self): + assert_raises_message( + sa.exc.ArgumentError, + r"Class object expected, got '\(5, 6\)'.", + class_mapper, (5, 6) + ) + def test_unmapped_subclass_error_postmap(self): users = self.tables.users @@ -1706,7 +1730,6 @@ class ORMLoggingTest(_fixtures.FixtureTest): class OptionsTest(_fixtures.FixtureTest): - @testing.fails_on('maxdb', 'FIXME: unknown') def test_synonym_options(self): Address, addresses, users, User = (self.classes.Address, self.tables.addresses, @@ -1749,7 +1772,6 @@ class OptionsTest(_fixtures.FixtureTest): eq_(l, self.static.user_address_result) self.sql_count_(0, go) - @testing.fails_on('maxdb', 'FIXME: unknown') def test_eager_options_with_limit(self): Address, addresses, users, User = (self.classes.Address, self.tables.addresses, @@ -1775,7 +1797,6 @@ class OptionsTest(_fixtures.FixtureTest): eq_(u.id, 8) eq_(len(u.addresses), 3) - @testing.fails_on('maxdb', 'FIXME: unknown') def test_lazy_options_with_limit(self): Address, addresses, users, User = (self.classes.Address, self.tables.addresses, @@ -1924,12 +1945,11 @@ class OptionsTest(_fixtures.FixtureTest): oalias = aliased(Order) opt1 = sa.orm.joinedload(User.orders, Order.items) - opt2a, opt2b = sa.orm.contains_eager(User.orders, Order.items, alias=oalias) - u1 = sess.query(User).join(oalias, User.orders).options(opt1, opt2a, opt2b).first() + opt2 = sa.orm.contains_eager(User.orders, Order.items, alias=oalias) + u1 = sess.query(User).join(oalias, User.orders).options(opt1, opt2).first() ustate = attributes.instance_state(u1) assert opt1 in ustate.load_options - assert opt2a not in ustate.load_options - assert opt2b not in ustate.load_options + assert opt2 not in ustate.load_options class DeepOptionsTest(_fixtures.FixtureTest): @@ -2038,139 +2058,6 @@ class DeepOptionsTest(_fixtures.FixtureTest): x = u[0].orders[1].items[0].keywords[1] self.sql_count_(2, go) -class ValidatorTest(_fixtures.FixtureTest): - def test_scalar(self): - users = self.tables.users - canary = [] - class User(fixtures.ComparableEntity): - @validates('name') - def validate_name(self, key, name): - canary.append((key, name)) - assert name != 'fred' - return name + ' modified' - - mapper(User, users) - sess = create_session() - u1 = User(name='ed') - eq_(u1.name, 'ed modified') - assert_raises(AssertionError, setattr, u1, "name", "fred") - eq_(u1.name, 'ed modified') - eq_(canary, [('name', 'ed'), ('name', 'fred')]) - sess.add(u1) - sess.flush() - sess.expunge_all() - eq_(sess.query(User).filter_by(name='ed modified').one(), User(name='ed')) - - def test_collection(self): - users, addresses, Address = (self.tables.users, - self.tables.addresses, - self.classes.Address) - - canary = [] - class User(fixtures.ComparableEntity): - @validates('addresses') - def validate_address(self, key, ad): - canary.append((key, ad)) - assert '@' in ad.email_address - return ad - - mapper(User, users, properties={'addresses':relationship(Address)}) - mapper(Address, addresses) - sess = create_session() - u1 = User(name='edward') - a0 = Address(email_address='noemail') - assert_raises(AssertionError, u1.addresses.append, a0) - a1 = Address(id=15, email_address='foo@bar.com') - u1.addresses.append(a1) - eq_(canary, [('addresses', a0), ('addresses', a1)]) - sess.add(u1) - sess.flush() - sess.expunge_all() - eq_( - sess.query(User).filter_by(name='edward').one(), - User(name='edward', addresses=[Address(email_address='foo@bar.com')]) - ) - - def test_validators_dict(self): - users, addresses, Address = (self.tables.users, - self.tables.addresses, - self.classes.Address) - - class User(fixtures.ComparableEntity): - - @validates('name') - def validate_name(self, key, name): - assert name != 'fred' - return name + ' modified' - - @validates('addresses') - def validate_address(self, key, ad): - assert '@' in ad.email_address - return ad - - def simple_function(self, key, value): - return key, value - - u_m = mapper(User, - users, - properties={'addresses':relationship(Address)}) - mapper(Address, addresses) - - eq_( - dict((k, v[0].__name__) for k, v in list(u_m.validators.items())), - {'name':'validate_name', - 'addresses':'validate_address'} - ) - - def test_validator_w_removes(self): - users, addresses, Address = (self.tables.users, - self.tables.addresses, - self.classes.Address) - canary = [] - class User(fixtures.ComparableEntity): - - @validates('name', include_removes=True) - def validate_name(self, key, item, remove): - canary.append((key, item, remove)) - return item - - @validates('addresses', include_removes=True) - def validate_address(self, key, item, remove): - canary.append((key, item, remove)) - return item - - mapper(User, - users, - properties={'addresses':relationship(Address)}) - mapper(Address, addresses) - - u1 = User() - u1.name = "ed" - u1.name = "mary" - del u1.name - a1, a2, a3 = Address(), Address(), Address() - u1.addresses.append(a1) - u1.addresses.remove(a1) - u1.addresses = [a1, a2] - u1.addresses = [a2, a3] - - eq_(canary, [ - ('name', 'ed', False), - ('name', 'mary', False), - ('name', 'mary', True), - # append a1 - ('addresses', a1, False), - # remove a1 - ('addresses', a1, True), - # set to [a1, a2] - this is two appends - ('addresses', a1, False), ('addresses', a2, False), - # set to [a2, a3] - this is a remove of a1, - # append of a3. the appends are first. - ('addresses', a3, False), - ('addresses', a1, True), - ] - ) - class ComparatorFactoryTest(_fixtures.FixtureTest, AssertsCompiledSQL): def test_kwarg_accepted(self): users, Address = self.tables.users, self.classes.Address @@ -2241,18 +2128,18 @@ class ComparatorFactoryTest(_fixtures.FixtureTest, AssertsCompiledSQL): self.tables.addresses, self.classes.User) - from sqlalchemy.orm.properties import PropertyLoader + from sqlalchemy.orm.properties import RelationshipProperty # NOTE: this API changed in 0.8, previously __clause_element__() # gave the parent selecatable, now it gives the # primaryjoin/secondaryjoin - class MyFactory(PropertyLoader.Comparator): + class MyFactory(RelationshipProperty.Comparator): __hash__ = None def __eq__(self, other): return func.foobar(self._source_selectable().c.user_id) == \ func.foobar(other.id) - class MyFactory2(PropertyLoader.Comparator): + class MyFactory2(RelationshipProperty.Comparator): __hash__ = None def __eq__(self, other): return func.foobar(self._source_selectable().c.id) == \ @@ -2285,349 +2172,6 @@ class ComparatorFactoryTest(_fixtures.FixtureTest, AssertsCompiledSQL): dialect=default.DefaultDialect()) -class DeferredTest(_fixtures.FixtureTest): - - def test_basic(self): - """A basic deferred load.""" - - Order, orders = self.classes.Order, self.tables.orders - - - mapper(Order, orders, order_by=orders.c.id, properties={ - 'description': deferred(orders.c.description)}) - - o = Order() - self.assert_(o.description is None) - - q = create_session().query(Order) - def go(): - l = q.all() - o2 = l[2] - x = o2.description - - self.sql_eq_(go, [ - ("SELECT orders.id AS orders_id, " - "orders.user_id AS orders_user_id, " - "orders.address_id AS orders_address_id, " - "orders.isopen AS orders_isopen " - "FROM orders ORDER BY orders.id", {}), - ("SELECT orders.description AS orders_description " - "FROM orders WHERE orders.id = :param_1", - {'param_1':3})]) - - def test_unsaved(self): - """Deferred loading does not kick in when just PK cols are set.""" - - Order, orders = self.classes.Order, self.tables.orders - - - mapper(Order, orders, properties={ - 'description': deferred(orders.c.description)}) - - sess = create_session() - o = Order() - sess.add(o) - o.id = 7 - def go(): - o.description = "some description" - self.sql_count_(0, go) - - def test_synonym_group_bug(self): - orders, Order = self.tables.orders, self.classes.Order - - mapper(Order, orders, properties={ - 'isopen':synonym('_isopen', map_column=True), - 'description':deferred(orders.c.description, group='foo') - }) - - sess = create_session() - o1 = sess.query(Order).get(1) - eq_(o1.description, "order 1") - - def test_unsaved_2(self): - Order, orders = self.classes.Order, self.tables.orders - - mapper(Order, orders, properties={ - 'description': deferred(orders.c.description)}) - - sess = create_session() - o = Order() - sess.add(o) - def go(): - o.description = "some description" - self.sql_count_(0, go) - - def test_unsaved_group(self): - """Deferred loading doesnt kick in when just PK cols are set""" - - orders, Order = self.tables.orders, self.classes.Order - - - mapper(Order, orders, order_by=orders.c.id, properties=dict( - description=deferred(orders.c.description, group='primary'), - opened=deferred(orders.c.isopen, group='primary'))) - - sess = create_session() - o = Order() - sess.add(o) - o.id = 7 - def go(): - o.description = "some description" - self.sql_count_(0, go) - - def test_unsaved_group_2(self): - orders, Order = self.tables.orders, self.classes.Order - - mapper(Order, orders, order_by=orders.c.id, properties=dict( - description=deferred(orders.c.description, group='primary'), - opened=deferred(orders.c.isopen, group='primary'))) - - sess = create_session() - o = Order() - sess.add(o) - def go(): - o.description = "some description" - self.sql_count_(0, go) - - def test_save(self): - Order, orders = self.classes.Order, self.tables.orders - - m = mapper(Order, orders, properties={ - 'description': deferred(orders.c.description)}) - - sess = create_session() - o2 = sess.query(Order).get(2) - o2.isopen = 1 - sess.flush() - - def test_group(self): - """Deferred load with a group""" - - orders, Order = self.tables.orders, self.classes.Order - - mapper(Order, orders, properties=util.OrderedDict([ - ('userident', deferred(orders.c.user_id, group='primary')), - ('addrident', deferred(orders.c.address_id, group='primary')), - ('description', deferred(orders.c.description, group='primary')), - ('opened', deferred(orders.c.isopen, group='primary')) - ])) - - sess = create_session() - q = sess.query(Order).order_by(Order.id) - def go(): - l = q.all() - o2 = l[2] - eq_(o2.opened, 1) - eq_(o2.userident, 7) - eq_(o2.description, 'order 3') - - self.sql_eq_(go, [ - ("SELECT orders.id AS orders_id " - "FROM orders ORDER BY orders.id", {}), - ("SELECT orders.user_id AS orders_user_id, " - "orders.address_id AS orders_address_id, " - "orders.description AS orders_description, " - "orders.isopen AS orders_isopen " - "FROM orders WHERE orders.id = :param_1", - {'param_1':3})]) - - o2 = q.all()[2] - eq_(o2.description, 'order 3') - assert o2 not in sess.dirty - o2.description = 'order 3' - def go(): - sess.flush() - self.sql_count_(0, go) - - def test_preserve_changes(self): - """A deferred load operation doesn't revert modifications on attributes""" - - orders, Order = self.tables.orders, self.classes.Order - - mapper(Order, orders, properties = { - 'userident': deferred(orders.c.user_id, group='primary'), - 'description': deferred(orders.c.description, group='primary'), - 'opened': deferred(orders.c.isopen, group='primary') - }) - sess = create_session() - o = sess.query(Order).get(3) - assert 'userident' not in o.__dict__ - o.description = 'somenewdescription' - eq_(o.description, 'somenewdescription') - def go(): - eq_(o.opened, 1) - self.assert_sql_count(testing.db, go, 1) - eq_(o.description, 'somenewdescription') - assert o in sess.dirty - - def test_commits_state(self): - """ - When deferred elements are loaded via a group, they get the proper - CommittedState and don't result in changes being committed - - """ - - orders, Order = self.tables.orders, self.classes.Order - - mapper(Order, orders, properties = { - 'userident':deferred(orders.c.user_id, group='primary'), - 'description':deferred(orders.c.description, group='primary'), - 'opened':deferred(orders.c.isopen, group='primary')}) - - sess = create_session() - o2 = sess.query(Order).get(3) - - # this will load the group of attributes - eq_(o2.description, 'order 3') - assert o2 not in sess.dirty - # this will mark it as 'dirty', but nothing actually changed - o2.description = 'order 3' - # therefore the flush() shouldnt actually issue any SQL - self.assert_sql_count(testing.db, sess.flush, 0) - - def test_options(self): - """Options on a mapper to create deferred and undeferred columns""" - - orders, Order = self.tables.orders, self.classes.Order - - - mapper(Order, orders) - - sess = create_session() - q = sess.query(Order).order_by(Order.id).options(defer('user_id')) - - def go(): - q.all()[0].user_id - - self.sql_eq_(go, [ - ("SELECT orders.id AS orders_id, " - "orders.address_id AS orders_address_id, " - "orders.description AS orders_description, " - "orders.isopen AS orders_isopen " - "FROM orders ORDER BY orders.id", {}), - ("SELECT orders.user_id AS orders_user_id " - "FROM orders WHERE orders.id = :param_1", - {'param_1':1})]) - sess.expunge_all() - - q2 = q.options(sa.orm.undefer('user_id')) - self.sql_eq_(q2.all, [ - ("SELECT orders.id AS orders_id, " - "orders.user_id AS orders_user_id, " - "orders.address_id AS orders_address_id, " - "orders.description AS orders_description, " - "orders.isopen AS orders_isopen " - "FROM orders ORDER BY orders.id", - {})]) - - def test_undefer_group(self): - orders, Order = self.tables.orders, self.classes.Order - - mapper(Order, orders, properties=util.OrderedDict([ - ('userident',deferred(orders.c.user_id, group='primary')), - ('description',deferred(orders.c.description, group='primary')), - ('opened',deferred(orders.c.isopen, group='primary')) - ] - )) - - sess = create_session() - q = sess.query(Order).order_by(Order.id) - def go(): - l = q.options(sa.orm.undefer_group('primary')).all() - o2 = l[2] - eq_(o2.opened, 1) - eq_(o2.userident, 7) - eq_(o2.description, 'order 3') - - self.sql_eq_(go, [ - ("SELECT orders.user_id AS orders_user_id, " - "orders.description AS orders_description, " - "orders.isopen AS orders_isopen, " - "orders.id AS orders_id, " - "orders.address_id AS orders_address_id " - "FROM orders ORDER BY orders.id", - {})]) - - def test_locates_col(self): - """Manually adding a column to the result undefers the column.""" - - orders, Order = self.tables.orders, self.classes.Order - - - mapper(Order, orders, properties={ - 'description':deferred(orders.c.description)}) - - sess = create_session() - o1 = sess.query(Order).order_by(Order.id).first() - def go(): - eq_(o1.description, 'order 1') - self.sql_count_(1, go) - - sess = create_session() - o1 = (sess.query(Order). - order_by(Order.id). - add_column(orders.c.description).first())[0] - def go(): - eq_(o1.description, 'order 1') - self.sql_count_(0, go) - - def test_map_selectable_wo_deferred(self): - """test mapping to a selectable with deferred cols, - the selectable doesn't include the deferred col. - - """ - - Order, orders = self.classes.Order, self.tables.orders - - - order_select = sa.select([ - orders.c.id, - orders.c.user_id, - orders.c.address_id, - orders.c.description, - orders.c.isopen]).alias() - mapper(Order, order_select, properties={ - 'description':deferred(order_select.c.description) - }) - - sess = Session() - o1 = sess.query(Order).order_by(Order.id).first() - assert 'description' not in o1.__dict__ - eq_(o1.description, 'order 1') - - def test_deep_options(self): - users, items, order_items, Order, Item, User, orders = (self.tables.users, - self.tables.items, - self.tables.order_items, - self.classes.Order, - self.classes.Item, - self.classes.User, - self.tables.orders) - - mapper(Item, items, properties=dict( - description=deferred(items.c.description))) - mapper(Order, orders, properties=dict( - items=relationship(Item, secondary=order_items))) - mapper(User, users, properties=dict( - orders=relationship(Order, order_by=orders.c.id))) - - sess = create_session() - q = sess.query(User).order_by(User.id) - l = q.all() - item = l[0].orders[1].items[1] - def go(): - eq_(item.description, 'item 4') - self.sql_count_(1, go) - eq_(item.description, 'item 4') - - sess.expunge_all() - l = q.options(sa.orm.undefer('orders.items.description')).all() - item = l[0].orders[1].items[1] - def go(): - eq_(item.description, 'item 4') - self.sql_count_(0, go) - eq_(item.description, 'item 4') - class SecondaryOptionsTest(fixtures.MappedTest): """test that the contains_eager() option doesn't bleed into a secondary load.""" diff --git a/test/orm/test_naturalpks.py b/test/orm/test_naturalpks.py index 173408b82..a4663217f 100644 --- a/test/orm/test_naturalpks.py +++ b/test/orm/test_naturalpks.py @@ -391,7 +391,7 @@ class NaturalPKTest(fixtures.MappedTest): # mysqldb executemany() of the association table fails to # report the correct row count @testing.fails_if(lambda: testing.against('mysql') - and not testing.against('+zxjdbc')) + and not (testing.against('+zxjdbc') or testing.against('+cymysql'))) def test_manytomany_nonpassive(self): self._test_manytomany(False) diff --git a/test/orm/test_of_type.py b/test/orm/test_of_type.py index 67baddb52..836d85cc7 100644 --- a/test/orm/test_of_type.py +++ b/test/orm/test_of_type.py @@ -506,7 +506,7 @@ class SubclassRelationshipTest(testing.AssertsCompiledSQL, fixtures.DeclarativeM "FROM job AS job_1 LEFT OUTER JOIN subjob AS subjob_1 " "ON job_1.id = subjob_1.id " "WHERE data_container.id = job_1.container_id " - "AND job.id > job_1.id)" + "AND job_1.id < job.id)" ) def test_any_walias(self): @@ -531,7 +531,7 @@ class SubclassRelationshipTest(testing.AssertsCompiledSQL, fixtures.DeclarativeM "WHERE EXISTS (SELECT 1 " "FROM job AS job_1 " "WHERE data_container.id = job_1.container_id " - "AND job.id > job_1.id AND job_1.type = :type_1)" + "AND job_1.id < job.id AND job_1.type = :type_1)" ) def test_join_wpoly(self): diff --git a/test/orm/test_options.py b/test/orm/test_options.py new file mode 100644 index 000000000..6eba38d15 --- /dev/null +++ b/test/orm/test_options.py @@ -0,0 +1,760 @@ +from sqlalchemy import inspect +from sqlalchemy.orm import attributes, mapper, relationship, backref, \ + configure_mappers, create_session, synonym, Session, class_mapper, \ + aliased, column_property, joinedload_all, joinedload, Query,\ + util as orm_util, Load +import sqlalchemy as sa +from sqlalchemy import testing +from sqlalchemy.testing.assertions import eq_, assert_raises, assert_raises_message +from test.orm import _fixtures + +class QueryTest(_fixtures.FixtureTest): + run_setup_mappers = 'once' + run_inserts = 'once' + run_deletes = None + + @classmethod + def setup_mappers(cls): + cls._setup_stock_mapping() + +class PathTest(object): + def _make_path(self, path): + r = [] + for i, item in enumerate(path): + if i % 2 == 0: + if isinstance(item, type): + item = class_mapper(item) + else: + if isinstance(item, str): + item = inspect(r[-1]).mapper.attrs[item] + r.append(item) + return tuple(r) + + def _make_path_registry(self, path): + return orm_util.PathRegistry.coerce(self._make_path(path)) + + def _assert_path_result(self, opt, q, paths): + q._attributes = q._attributes.copy() + attr = {} + + for val in opt._to_bind: + val._bind_loader(q, attr, False) + + assert_paths = [k[1] for k in attr] + eq_( + set([p for p in assert_paths]), + set([self._make_path(p) for p in paths]) + ) + +class LoadTest(PathTest, QueryTest): + + def test_gen_path_attr_entity(self): + User = self.classes.User + Address = self.classes.Address + + l = Load(User) + eq_( + l._generate_path(inspect(User)._path_registry, User.addresses, "relationship"), + self._make_path_registry([User, "addresses", Address]) + ) + + def test_gen_path_attr_column(self): + User = self.classes.User + + l = Load(User) + eq_( + l._generate_path(inspect(User)._path_registry, User.name, "column"), + self._make_path_registry([User, "name"]) + ) + + def test_gen_path_string_entity(self): + User = self.classes.User + Address = self.classes.Address + + l = Load(User) + eq_( + l._generate_path(inspect(User)._path_registry, "addresses", "relationship"), + self._make_path_registry([User, "addresses", Address]) + ) + + def test_gen_path_string_column(self): + User = self.classes.User + + l = Load(User) + eq_( + l._generate_path(inspect(User)._path_registry, "name", "column"), + self._make_path_registry([User, "name"]) + ) + + def test_gen_path_invalid_from_col(self): + User = self.classes.User + + l = Load(User) + l.path = self._make_path_registry([User, "name"]) + assert_raises_message( + sa.exc.ArgumentError, + "Attribute 'name' of entity 'Mapper|User|users' does " + "not refer to a mapped entity", + l._generate_path, l.path, User.addresses, "relationship" + + ) + def test_gen_path_attr_entity_invalid_raiseerr(self): + User = self.classes.User + Order = self.classes.Order + + l = Load(User) + + assert_raises_message( + sa.exc.ArgumentError, + "Attribute 'Order.items' does not link from element 'Mapper|User|users'", + l._generate_path, + inspect(User)._path_registry, Order.items, "relationship", + ) + + def test_gen_path_attr_entity_invalid_noraiseerr(self): + User = self.classes.User + Order = self.classes.Order + + l = Load(User) + + eq_( + l._generate_path( + inspect(User)._path_registry, Order.items, "relationship", False + ), + None + ) + + def test_set_strat_ent(self): + User = self.classes.User + + l1 = Load(User) + l2 = l1.joinedload("addresses") + eq_( + l1.context, + { + ('loader', self._make_path([User, "addresses"])): l2 + } + ) + + def test_set_strat_col(self): + User = self.classes.User + + l1 = Load(User) + l2 = l1.defer("name") + l3 = list(l2.context.values())[0] + eq_( + l1.context, + { + ('loader', self._make_path([User, "name"])): l3 + } + ) + + +class OptionsTest(PathTest, QueryTest): + + def _option_fixture(self, *arg): + from sqlalchemy.orm import strategy_options + + return strategy_options._UnboundLoad._from_keys( + strategy_options._UnboundLoad.joinedload, arg, True, {}) + + + + def test_get_path_one_level_string(self): + User = self.classes.User + + sess = Session() + q = sess.query(User) + + opt = self._option_fixture("addresses") + self._assert_path_result(opt, q, [(User, 'addresses')]) + + def test_get_path_one_level_attribute(self): + User = self.classes.User + + sess = Session() + q = sess.query(User) + + opt = self._option_fixture(User.addresses) + self._assert_path_result(opt, q, [(User, 'addresses')]) + + def test_path_on_entity_but_doesnt_match_currentpath(self): + User, Address = self.classes.User, self.classes.Address + + # ensure "current path" is fully consumed before + # matching against current entities. + # see [ticket:2098] + sess = Session() + q = sess.query(User) + opt = self._option_fixture('email_address', 'id') + q = sess.query(Address)._with_current_path( + orm_util.PathRegistry.coerce([inspect(User), + inspect(User).attrs.addresses]) + ) + self._assert_path_result(opt, q, []) + + def test_get_path_one_level_with_unrelated(self): + Order = self.classes.Order + + sess = Session() + q = sess.query(Order) + opt = self._option_fixture("addresses") + self._assert_path_result(opt, q, []) + + def test_path_multilevel_string(self): + Item, User, Order = (self.classes.Item, + self.classes.User, + self.classes.Order) + + sess = Session() + q = sess.query(User) + + opt = self._option_fixture("orders.items.keywords") + self._assert_path_result(opt, q, [ + (User, 'orders'), + (User, 'orders', Order, 'items'), + (User, 'orders', Order, 'items', Item, 'keywords') + ]) + + def test_path_multilevel_attribute(self): + Item, User, Order = (self.classes.Item, + self.classes.User, + self.classes.Order) + + sess = Session() + q = sess.query(User) + + opt = self._option_fixture(User.orders, Order.items, Item.keywords) + self._assert_path_result(opt, q, [ + (User, 'orders'), + (User, 'orders', Order, 'items'), + (User, 'orders', Order, 'items', Item, 'keywords') + ]) + + def test_with_current_matching_string(self): + Item, User, Order = (self.classes.Item, + self.classes.User, + self.classes.Order) + + sess = Session() + q = sess.query(Item)._with_current_path( + self._make_path_registry([User, 'orders', Order, 'items']) + ) + + opt = self._option_fixture("orders.items.keywords") + self._assert_path_result(opt, q, [ + (Item, 'keywords') + ]) + + def test_with_current_matching_attribute(self): + Item, User, Order = (self.classes.Item, + self.classes.User, + self.classes.Order) + + sess = Session() + q = sess.query(Item)._with_current_path( + self._make_path_registry([User, 'orders', Order, 'items']) + ) + + opt = self._option_fixture(User.orders, Order.items, Item.keywords) + self._assert_path_result(opt, q, [ + (Item, 'keywords') + ]) + + def test_with_current_nonmatching_string(self): + Item, User, Order = (self.classes.Item, + self.classes.User, + self.classes.Order) + + sess = Session() + q = sess.query(Item)._with_current_path( + self._make_path_registry([User, 'orders', Order, 'items']) + ) + + opt = self._option_fixture("keywords") + self._assert_path_result(opt, q, []) + + opt = self._option_fixture("items.keywords") + self._assert_path_result(opt, q, []) + + def test_with_current_nonmatching_attribute(self): + Item, User, Order = (self.classes.Item, + self.classes.User, + self.classes.Order) + + sess = Session() + q = sess.query(Item)._with_current_path( + self._make_path_registry([User, 'orders', Order, 'items']) + ) + + opt = self._option_fixture(Item.keywords) + self._assert_path_result(opt, q, []) + + opt = self._option_fixture(Order.items, Item.keywords) + self._assert_path_result(opt, q, []) + + def test_from_base_to_subclass_attr(self): + Dingaling, Address = self.classes.Dingaling, self.classes.Address + + sess = Session() + class SubAddr(Address): + pass + mapper(SubAddr, inherits=Address, properties={ + 'flub': relationship(Dingaling) + }) + + q = sess.query(Address) + opt = self._option_fixture(SubAddr.flub) + + self._assert_path_result(opt, q, [(SubAddr, 'flub')]) + + def test_from_subclass_to_subclass_attr(self): + Dingaling, Address = self.classes.Dingaling, self.classes.Address + + sess = Session() + class SubAddr(Address): + pass + mapper(SubAddr, inherits=Address, properties={ + 'flub': relationship(Dingaling) + }) + + q = sess.query(SubAddr) + opt = self._option_fixture(SubAddr.flub) + + self._assert_path_result(opt, q, [(SubAddr, 'flub')]) + + def test_from_base_to_base_attr_via_subclass(self): + Dingaling, Address = self.classes.Dingaling, self.classes.Address + + sess = Session() + class SubAddr(Address): + pass + mapper(SubAddr, inherits=Address, properties={ + 'flub': relationship(Dingaling) + }) + + q = sess.query(Address) + opt = self._option_fixture(SubAddr.user) + + self._assert_path_result(opt, q, + [(Address, inspect(Address).attrs.user)]) + + def test_of_type(self): + User, Address = self.classes.User, self.classes.Address + + sess = Session() + class SubAddr(Address): + pass + mapper(SubAddr, inherits=Address) + + q = sess.query(User) + opt = self._option_fixture(User.addresses.of_type(SubAddr), SubAddr.user) + + u_mapper = inspect(User) + a_mapper = inspect(Address) + self._assert_path_result(opt, q, [ + (u_mapper, u_mapper.attrs.addresses), + (u_mapper, u_mapper.attrs.addresses, a_mapper, a_mapper.attrs.user) + ]) + + def test_of_type_plus_level(self): + Dingaling, User, Address = (self.classes.Dingaling, + self.classes.User, + self.classes.Address) + + sess = Session() + class SubAddr(Address): + pass + mapper(SubAddr, inherits=Address, properties={ + 'flub': relationship(Dingaling) + }) + + q = sess.query(User) + opt = self._option_fixture(User.addresses.of_type(SubAddr), SubAddr.flub) + + u_mapper = inspect(User) + sa_mapper = inspect(SubAddr) + self._assert_path_result(opt, q, [ + (u_mapper, u_mapper.attrs.addresses), + (u_mapper, u_mapper.attrs.addresses, sa_mapper, sa_mapper.attrs.flub) + ]) + + def test_aliased_single(self): + User = self.classes.User + + sess = Session() + ualias = aliased(User) + q = sess.query(ualias) + opt = self._option_fixture(ualias.addresses) + self._assert_path_result(opt, q, [(inspect(ualias), 'addresses')]) + + def test_with_current_aliased_single(self): + User, Address = self.classes.User, self.classes.Address + + sess = Session() + ualias = aliased(User) + q = sess.query(ualias)._with_current_path( + self._make_path_registry([Address, 'user']) + ) + opt = self._option_fixture(Address.user, ualias.addresses) + self._assert_path_result(opt, q, [(inspect(ualias), 'addresses')]) + + def test_with_current_aliased_single_nonmatching_option(self): + User, Address = self.classes.User, self.classes.Address + + sess = Session() + ualias = aliased(User) + q = sess.query(User)._with_current_path( + self._make_path_registry([Address, 'user']) + ) + opt = self._option_fixture(Address.user, ualias.addresses) + self._assert_path_result(opt, q, []) + + def test_with_current_aliased_single_nonmatching_entity(self): + User, Address = self.classes.User, self.classes.Address + + sess = Session() + ualias = aliased(User) + q = sess.query(ualias)._with_current_path( + self._make_path_registry([Address, 'user']) + ) + opt = self._option_fixture(Address.user, User.addresses) + self._assert_path_result(opt, q, []) + + def test_multi_entity_opt_on_second(self): + Item = self.classes.Item + Order = self.classes.Order + opt = self._option_fixture(Order.items) + sess = Session() + q = sess.query(Item, Order) + self._assert_path_result(opt, q, [(Order, "items")]) + + def test_multi_entity_opt_on_string(self): + Item = self.classes.Item + Order = self.classes.Order + opt = self._option_fixture("items") + sess = Session() + q = sess.query(Item, Order) + self._assert_path_result(opt, q, []) + + def test_multi_entity_no_mapped_entities(self): + Item = self.classes.Item + Order = self.classes.Order + opt = self._option_fixture("items") + sess = Session() + q = sess.query(Item.id, Order.id) + self._assert_path_result(opt, q, []) + + def test_path_exhausted(self): + User = self.classes.User + Item = self.classes.Item + Order = self.classes.Order + opt = self._option_fixture(User.orders) + sess = Session() + q = sess.query(Item)._with_current_path( + self._make_path_registry([User, 'orders', Order, 'items']) + ) + self._assert_path_result(opt, q, []) + + def test_chained(self): + User = self.classes.User + Order = self.classes.Order + Item = self.classes.Item + sess = Session() + q = sess.query(User) + opt = self._option_fixture(User.orders).joinedload("items") + self._assert_path_result(opt, q, [ + (User, 'orders'), + (User, 'orders', Order, "items") + ]) + + def test_chained_plus_dotted(self): + User = self.classes.User + Order = self.classes.Order + Item = self.classes.Item + sess = Session() + q = sess.query(User) + opt = self._option_fixture("orders.items").joinedload("keywords") + self._assert_path_result(opt, q, [ + (User, 'orders'), + (User, 'orders', Order, "items"), + (User, 'orders', Order, "items", Item, "keywords") + ]) + + def test_chained_plus_multi(self): + User = self.classes.User + Order = self.classes.Order + Item = self.classes.Item + sess = Session() + q = sess.query(User) + opt = self._option_fixture(User.orders, Order.items).joinedload("keywords") + self._assert_path_result(opt, q, [ + (User, 'orders'), + (User, 'orders', Order, "items"), + (User, 'orders', Order, "items", Item, "keywords") + ]) + + +class OptionsNoPropTest(_fixtures.FixtureTest): + """test the error messages emitted when using property + options in conjunection with column-only entities, or + for not existing options + + """ + + run_create_tables = False + run_inserts = None + run_deletes = None + + def test_option_with_mapper_basestring(self): + Item = self.classes.Item + + self._assert_option([Item], 'keywords') + + def test_option_with_mapper_PropCompatator(self): + Item = self.classes.Item + + self._assert_option([Item], Item.keywords) + + def test_option_with_mapper_then_column_basestring(self): + Item = self.classes.Item + + self._assert_option([Item, Item.id], 'keywords') + + def test_option_with_mapper_then_column_PropComparator(self): + Item = self.classes.Item + + self._assert_option([Item, Item.id], Item.keywords) + + def test_option_with_column_then_mapper_basestring(self): + Item = self.classes.Item + + self._assert_option([Item.id, Item], 'keywords') + + def test_option_with_column_then_mapper_PropComparator(self): + Item = self.classes.Item + + self._assert_option([Item.id, Item], Item.keywords) + + def test_option_with_column_basestring(self): + Item = self.classes.Item + + message = \ + "Query has only expression-based entities - "\ + "can't find property named 'keywords'." + self._assert_eager_with_just_column_exception(Item.id, + 'keywords', message) + + def test_option_with_column_PropComparator(self): + Item = self.classes.Item + + self._assert_eager_with_just_column_exception(Item.id, + Item.keywords, + "Query has only expression-based entities " + "- can't find property named 'keywords'." + ) + + def test_option_against_nonexistent_PropComparator(self): + Item = self.classes.Item + Keyword = self.classes.Keyword + self._assert_eager_with_entity_exception( + [Keyword], + (joinedload(Item.keywords), ), + r"Can't find property 'keywords' on any entity specified " + r"in this Query. Note the full path from root " + r"\(Mapper\|Keyword\|keywords\) to target entity must be specified." + ) + + def test_option_against_nonexistent_basestring(self): + Item = self.classes.Item + self._assert_eager_with_entity_exception( + [Item], + (joinedload("foo"), ), + r"Can't find property named 'foo' on the mapped " + r"entity Mapper\|Item\|items in this Query." + ) + + def test_option_against_nonexistent_twolevel_basestring(self): + Item = self.classes.Item + self._assert_eager_with_entity_exception( + [Item], + (joinedload("keywords.foo"), ), + r"Can't find property named 'foo' on the mapped entity " + r"Mapper\|Keyword\|keywords in this Query." + ) + + def test_option_against_nonexistent_twolevel_all(self): + Item = self.classes.Item + self._assert_eager_with_entity_exception( + [Item], + (joinedload_all("keywords.foo"), ), + r"Can't find property named 'foo' on the mapped entity " + r"Mapper\|Keyword\|keywords in this Query." + ) + + @testing.fails_if(lambda: True, + "PropertyOption doesn't yet check for relation/column on end result") + def test_option_against_non_relation_basestring(self): + Item = self.classes.Item + Keyword = self.classes.Keyword + self._assert_eager_with_entity_exception( + [Keyword, Item], + (joinedload_all("keywords"), ), + r"Attribute 'keywords' of entity 'Mapper\|Keyword\|keywords' " + "does not refer to a mapped entity" + ) + + @testing.fails_if(lambda: True, + "PropertyOption doesn't yet check for relation/column on end result") + def test_option_against_multi_non_relation_basestring(self): + Item = self.classes.Item + Keyword = self.classes.Keyword + self._assert_eager_with_entity_exception( + [Keyword, Item], + (joinedload_all("keywords"), ), + r"Attribute 'keywords' of entity 'Mapper\|Keyword\|keywords' " + "does not refer to a mapped entity" + ) + + def test_option_against_wrong_entity_type_basestring(self): + Item = self.classes.Item + self._assert_eager_with_entity_exception( + [Item], + (joinedload_all("id", "keywords"), ), + r"Attribute 'id' of entity 'Mapper\|Item\|items' does not " + r"refer to a mapped entity" + ) + + def test_option_against_multi_non_relation_twolevel_basestring(self): + Item = self.classes.Item + Keyword = self.classes.Keyword + self._assert_eager_with_entity_exception( + [Keyword, Item], + (joinedload_all("id", "keywords"), ), + r"Attribute 'id' of entity 'Mapper\|Keyword\|keywords' " + "does not refer to a mapped entity" + ) + + def test_option_against_multi_nonexistent_basestring(self): + Item = self.classes.Item + Keyword = self.classes.Keyword + self._assert_eager_with_entity_exception( + [Keyword, Item], + (joinedload_all("description"), ), + r"Can't find property named 'description' on the mapped " + r"entity Mapper\|Keyword\|keywords in this Query." + ) + + def test_option_against_multi_no_entities_basestring(self): + Item = self.classes.Item + Keyword = self.classes.Keyword + self._assert_eager_with_entity_exception( + [Keyword.id, Item.id], + (joinedload_all("keywords"), ), + r"Query has only expression-based entities - can't find property " + "named 'keywords'." + ) + + def test_option_against_wrong_multi_entity_type_attr_one(self): + Item = self.classes.Item + Keyword = self.classes.Keyword + self._assert_eager_with_entity_exception( + [Keyword, Item], + (joinedload_all(Keyword.id, Item.keywords), ), + r"Attribute 'id' of entity 'Mapper\|Keyword\|keywords' " + "does not refer to a mapped entity" + ) + + def test_option_against_wrong_multi_entity_type_attr_two(self): + Item = self.classes.Item + Keyword = self.classes.Keyword + self._assert_eager_with_entity_exception( + [Keyword, Item], + (joinedload_all(Keyword.keywords, Item.keywords), ), + r"Attribute 'keywords' of entity 'Mapper\|Keyword\|keywords' " + "does not refer to a mapped entity" + ) + + def test_option_against_wrong_multi_entity_type_attr_three(self): + Item = self.classes.Item + Keyword = self.classes.Keyword + self._assert_eager_with_entity_exception( + [Keyword.id, Item.id], + (joinedload_all(Keyword.keywords, Item.keywords), ), + r"Query has only expression-based entities - " + "can't find property named 'keywords'." + ) + + def test_wrong_type_in_option(self): + Item = self.classes.Item + Keyword = self.classes.Keyword + self._assert_eager_with_entity_exception( + [Item], + (joinedload_all(Keyword), ), + r"mapper option expects string key or list of attributes" + ) + + def test_non_contiguous_all_option(self): + User = self.classes.User + self._assert_eager_with_entity_exception( + [User], + (joinedload_all(User.addresses, User.orders), ), + r"Attribute 'User.orders' does not link " + "from element 'Mapper|Address|addresses'" + ) + + def test_non_contiguous_all_option_of_type(self): + User = self.classes.User + Order = self.classes.Order + self._assert_eager_with_entity_exception( + [User], + (joinedload_all(User.addresses, User.orders.of_type(Order)), ), + r"Attribute 'User.orders' does not link " + "from element 'Mapper|Address|addresses'" + ) + + @classmethod + def setup_mappers(cls): + users, User, addresses, Address, orders, Order = ( + cls.tables.users, cls.classes.User, + cls.tables.addresses, cls.classes.Address, + cls.tables.orders, cls.classes.Order) + mapper(User, users, properties={ + 'addresses': relationship(Address), + 'orders': relationship(Order) + }) + mapper(Address, addresses) + mapper(Order, orders) + keywords, items, item_keywords, Keyword, Item = (cls.tables.keywords, + cls.tables.items, + cls.tables.item_keywords, + cls.classes.Keyword, + cls.classes.Item) + mapper(Keyword, keywords, properties={ + "keywords": column_property(keywords.c.name + "some keyword") + }) + mapper(Item, items, + properties=dict(keywords=relationship(Keyword, + secondary=item_keywords))) + + def _assert_option(self, entity_list, option): + Item = self.classes.Item + + q = create_session().query(*entity_list).\ + options(joinedload(option)) + key = ('loader', (inspect(Item), inspect(Item).attrs.keywords)) + assert key in q._attributes + + def _assert_eager_with_entity_exception(self, entity_list, options, + message): + assert_raises_message(sa.exc.ArgumentError, + message, + create_session().query(*entity_list).options, + *options) + + def _assert_eager_with_just_column_exception(self, column, + eager_option, message): + assert_raises_message(sa.exc.ArgumentError, message, + create_session().query(column).options, + joinedload(eager_option)) + diff --git a/test/orm/test_pickled.py b/test/orm/test_pickled.py index b54af93f2..35f1b19d1 100644 --- a/test/orm/test_pickled.py +++ b/test/orm/test_pickled.py @@ -20,6 +20,7 @@ from sqlalchemy.testing.pickleable import User, Address, Dingaling, Order, \ class PickleTest(fixtures.MappedTest): + @classmethod def define_tables(cls, metadata): Table('users', metadata, @@ -170,6 +171,7 @@ class PickleTest(fixtures.MappedTest): sess.add(u2) assert u2.addresses + @testing.requires.non_broken_pickle def test_instance_deferred_cols(self): users, addresses = (self.tables.users, self.tables.addresses) @@ -240,6 +242,7 @@ class PickleTest(fixtures.MappedTest): eq_(u1, u2) + @testing.requires.non_broken_pickle def test_options_with_descriptors(self): users, addresses, dingalings = (self.tables.users, self.tables.addresses, @@ -267,7 +270,7 @@ class PickleTest(fixtures.MappedTest): sa.orm.joinedload("addresses", Address.dingaling), ]: opt2 = pickle.loads(pickle.dumps(opt)) - eq_(opt.key, opt2.key) + eq_(opt.path, opt2.path) u1 = sess.query(User).options(opt).first() u2 = pickle.loads(pickle.dumps(u1)) diff --git a/test/orm/test_query.py b/test/orm/test_query.py index 0973dc357..fea2337ca 100644 --- a/test/orm/test_query.py +++ b/test/orm/test_query.py @@ -19,7 +19,7 @@ from sqlalchemy.testing.assertions import eq_, assert_raises, assert_raises_mess from sqlalchemy.testing import AssertsCompiledSQL from test.orm import _fixtures from sqlalchemy.testing import fixtures, engines - +from sqlalchemy.orm import Bundle from sqlalchemy.orm.util import join, outerjoin, with_parent class QueryTest(_fixtures.FixtureTest): @@ -74,6 +74,7 @@ class RowTupleTest(QueryTest): address_alias = aliased(Address, name='aalias') fn = func.count(User.id) name_label = User.name.label('uname') + bundle = Bundle('b1', User.id, User.name) for q, asserted in [ ( sess.query(User), @@ -112,6 +113,15 @@ class RowTupleTest(QueryTest): 'expr':fn }, ] + ), + ( + sess.query(bundle), + [ + {'aliased': False, + 'expr': bundle, + 'type': Bundle, + 'name': 'b1'} + ] ) ]: eq_( @@ -119,6 +129,7 @@ class RowTupleTest(QueryTest): asserted ) + def test_unhashable_type(self): from sqlalchemy.types import TypeDecorator, Integer from sqlalchemy.sql import type_coerce @@ -216,10 +227,13 @@ class RawSelectTest(QueryTest, AssertsCompiledSQL): where(uu.id == Address.user_id).\ correlate(uu).as_scalar() ]), - # curious, "address.user_id = uu.id" is reversed here + # for a long time, "uu.id = address.user_id" was reversed; + # this was resolved as of #2872 and had to do with + # InstrumentedAttribute.__eq__() taking precedence over + # QueryableAttribute.__eq__() "SELECT uu.name, addresses.id, " "(SELECT count(addresses.id) AS count_1 " - "FROM addresses WHERE addresses.user_id = uu.id) AS anon_1 " + "FROM addresses WHERE uu.id = addresses.user_id) AS anon_1 " "FROM users AS uu, addresses" ) @@ -688,6 +702,7 @@ class InvalidGenerationsTest(QueryTest, AssertsCompiledSQL): meth, q, *arg, **kw ) + class OperatorTest(QueryTest, AssertsCompiledSQL): """test sql.Comparator implementation for MapperProperties""" @@ -1325,7 +1340,6 @@ class FilterTest(QueryTest, AssertsCompiledSQL): assert [User(id=10)] == sess.query(User).outerjoin("addresses", aliased=True).filter(~User.addresses.any()).all() - @testing.crashes('maxdb', 'can dump core') def test_has(self): Dingaling, User, Address = (self.classes.Dingaling, self.classes.User, @@ -1726,16 +1740,37 @@ class AggregateTest(QueryTest): class ExistsTest(QueryTest, AssertsCompiledSQL): + __dialect__ = 'default' def test_exists(self): User = self.classes.User sess = create_session() - q1 = sess.query(User).filter(User.name == 'fred') + + q1 = sess.query(User) self.assert_compile(sess.query(q1.exists()), 'SELECT EXISTS (' + 'SELECT 1 FROM users' + ') AS anon_1' + ) + + q2 = sess.query(User).filter(User.name == 'fred') + self.assert_compile(sess.query(q2.exists()), + 'SELECT EXISTS (' 'SELECT 1 FROM users WHERE users.name = :name_1' - ') AS anon_1', - dialect=default.DefaultDialect() + ') AS anon_1' + ) + + def test_exists_col_warning(self): + User = self.classes.User + Address = self.classes.Address + sess = create_session() + + q1 = sess.query(User, Address).filter(User.id == Address.user_id) + self.assert_compile(sess.query(q1.exists()), + 'SELECT EXISTS (' + 'SELECT 1 FROM users, addresses ' + 'WHERE users.id = addresses.user_id' + ') AS anon_1' ) @@ -1955,7 +1990,7 @@ class HintsTest(QueryTest, AssertsCompiledSQL): "SELECT users.id AS users_id, users.name AS users_name, " "users_1.id AS users_1_id, users_1.name AS users_1_name " "FROM users INNER JOIN users AS users_1 USE INDEX (col1_index,col2_index) " - "ON users.id < users_1.id", + "ON users_1.id > users.id", dialect=dialect ) @@ -2443,584 +2478,3 @@ class ExecutionOptionsTest(QueryTest): q1.all() -class OptionsTest(QueryTest): - """Test the _process_paths() method of PropertyOption.""" - - def _option_fixture(self, *arg): - from sqlalchemy.orm import interfaces - class Opt(interfaces.PropertyOption): - pass - return Opt(arg) - - def _make_path(self, path): - r = [] - for i, item in enumerate(path): - if i % 2 == 0: - if isinstance(item, type): - item = class_mapper(item) - else: - if isinstance(item, str): - item = inspect(r[-1]).mapper.attrs[item] - r.append(item) - return tuple(r) - - def _make_path_registry(self, path): - return orm_util.PathRegistry.coerce(self._make_path(path)) - - def _assert_path_result(self, opt, q, paths): - q._attributes = q._attributes.copy() - assert_paths = opt._process_paths(q, False) - eq_( - [p.path for p in assert_paths], - [self._make_path(p) for p in paths] - ) - - def test_get_path_one_level_string(self): - User = self.classes.User - - sess = Session() - q = sess.query(User) - - opt = self._option_fixture("addresses") - self._assert_path_result(opt, q, [(User, 'addresses')]) - - def test_get_path_one_level_attribute(self): - User = self.classes.User - - sess = Session() - q = sess.query(User) - - opt = self._option_fixture(User.addresses) - self._assert_path_result(opt, q, [(User, 'addresses')]) - - def test_path_on_entity_but_doesnt_match_currentpath(self): - User, Address = self.classes.User, self.classes.Address - - # ensure "current path" is fully consumed before - # matching against current entities. - # see [ticket:2098] - sess = Session() - q = sess.query(User) - opt = self._option_fixture('email_address', 'id') - q = sess.query(Address)._with_current_path( - orm_util.PathRegistry.coerce([inspect(User), - inspect(User).attrs.addresses]) - ) - self._assert_path_result(opt, q, []) - - def test_get_path_one_level_with_unrelated(self): - Order = self.classes.Order - - sess = Session() - q = sess.query(Order) - opt = self._option_fixture("addresses") - self._assert_path_result(opt, q, []) - - def test_path_multilevel_string(self): - Item, User, Order = (self.classes.Item, - self.classes.User, - self.classes.Order) - - sess = Session() - q = sess.query(User) - - opt = self._option_fixture("orders.items.keywords") - self._assert_path_result(opt, q, [ - (User, 'orders'), - (User, 'orders', Order, 'items'), - (User, 'orders', Order, 'items', Item, 'keywords') - ]) - - def test_path_multilevel_attribute(self): - Item, User, Order = (self.classes.Item, - self.classes.User, - self.classes.Order) - - sess = Session() - q = sess.query(User) - - opt = self._option_fixture(User.orders, Order.items, Item.keywords) - self._assert_path_result(opt, q, [ - (User, 'orders'), - (User, 'orders', Order, 'items'), - (User, 'orders', Order, 'items', Item, 'keywords') - ]) - - def test_with_current_matching_string(self): - Item, User, Order = (self.classes.Item, - self.classes.User, - self.classes.Order) - - sess = Session() - q = sess.query(Item)._with_current_path( - self._make_path_registry([User, 'orders', Order, 'items']) - ) - - opt = self._option_fixture("orders.items.keywords") - self._assert_path_result(opt, q, [ - (Item, 'keywords') - ]) - - def test_with_current_matching_attribute(self): - Item, User, Order = (self.classes.Item, - self.classes.User, - self.classes.Order) - - sess = Session() - q = sess.query(Item)._with_current_path( - self._make_path_registry([User, 'orders', Order, 'items']) - ) - - opt = self._option_fixture(User.orders, Order.items, Item.keywords) - self._assert_path_result(opt, q, [ - (Item, 'keywords') - ]) - - def test_with_current_nonmatching_string(self): - Item, User, Order = (self.classes.Item, - self.classes.User, - self.classes.Order) - - sess = Session() - q = sess.query(Item)._with_current_path( - self._make_path_registry([User, 'orders', Order, 'items']) - ) - - opt = self._option_fixture("keywords") - self._assert_path_result(opt, q, []) - - opt = self._option_fixture("items.keywords") - self._assert_path_result(opt, q, []) - - def test_with_current_nonmatching_attribute(self): - Item, User, Order = (self.classes.Item, - self.classes.User, - self.classes.Order) - - sess = Session() - q = sess.query(Item)._with_current_path( - self._make_path_registry([User, 'orders', Order, 'items']) - ) - - opt = self._option_fixture(Item.keywords) - self._assert_path_result(opt, q, []) - - opt = self._option_fixture(Order.items, Item.keywords) - self._assert_path_result(opt, q, []) - - def test_from_base_to_subclass_attr(self): - Dingaling, Address = self.classes.Dingaling, self.classes.Address - - sess = Session() - class SubAddr(Address): - pass - mapper(SubAddr, inherits=Address, properties={ - 'flub': relationship(Dingaling) - }) - - q = sess.query(Address) - opt = self._option_fixture(SubAddr.flub) - - self._assert_path_result(opt, q, [(SubAddr, 'flub')]) - - def test_from_subclass_to_subclass_attr(self): - Dingaling, Address = self.classes.Dingaling, self.classes.Address - - sess = Session() - class SubAddr(Address): - pass - mapper(SubAddr, inherits=Address, properties={ - 'flub': relationship(Dingaling) - }) - - q = sess.query(SubAddr) - opt = self._option_fixture(SubAddr.flub) - - self._assert_path_result(opt, q, [(SubAddr, 'flub')]) - - def test_from_base_to_base_attr_via_subclass(self): - Dingaling, Address = self.classes.Dingaling, self.classes.Address - - sess = Session() - class SubAddr(Address): - pass - mapper(SubAddr, inherits=Address, properties={ - 'flub': relationship(Dingaling) - }) - - q = sess.query(Address) - opt = self._option_fixture(SubAddr.user) - - self._assert_path_result(opt, q, - [(Address, inspect(Address).attrs.user)]) - - def test_of_type(self): - User, Address = self.classes.User, self.classes.Address - - sess = Session() - class SubAddr(Address): - pass - mapper(SubAddr, inherits=Address) - - q = sess.query(User) - opt = self._option_fixture(User.addresses.of_type(SubAddr), SubAddr.user) - - u_mapper = inspect(User) - a_mapper = inspect(Address) - self._assert_path_result(opt, q, [ - (u_mapper, u_mapper.attrs.addresses), - (u_mapper, u_mapper.attrs.addresses, a_mapper, a_mapper.attrs.user) - ]) - - def test_of_type_plus_level(self): - Dingaling, User, Address = (self.classes.Dingaling, - self.classes.User, - self.classes.Address) - - sess = Session() - class SubAddr(Address): - pass - mapper(SubAddr, inherits=Address, properties={ - 'flub': relationship(Dingaling) - }) - - q = sess.query(User) - opt = self._option_fixture(User.addresses.of_type(SubAddr), SubAddr.flub) - - u_mapper = inspect(User) - sa_mapper = inspect(SubAddr) - self._assert_path_result(opt, q, [ - (u_mapper, u_mapper.attrs.addresses), - (u_mapper, u_mapper.attrs.addresses, sa_mapper, sa_mapper.attrs.flub) - ]) - - def test_aliased_single(self): - User = self.classes.User - - sess = Session() - ualias = aliased(User) - q = sess.query(ualias) - opt = self._option_fixture(ualias.addresses) - self._assert_path_result(opt, q, [(inspect(ualias), 'addresses')]) - - def test_with_current_aliased_single(self): - User, Address = self.classes.User, self.classes.Address - - sess = Session() - ualias = aliased(User) - q = sess.query(ualias)._with_current_path( - self._make_path_registry([Address, 'user']) - ) - opt = self._option_fixture(Address.user, ualias.addresses) - self._assert_path_result(opt, q, [(inspect(ualias), 'addresses')]) - - def test_with_current_aliased_single_nonmatching_option(self): - User, Address = self.classes.User, self.classes.Address - - sess = Session() - ualias = aliased(User) - q = sess.query(User)._with_current_path( - self._make_path_registry([Address, 'user']) - ) - opt = self._option_fixture(Address.user, ualias.addresses) - self._assert_path_result(opt, q, []) - - def test_with_current_aliased_single_nonmatching_entity(self): - User, Address = self.classes.User, self.classes.Address - - sess = Session() - ualias = aliased(User) - q = sess.query(ualias)._with_current_path( - self._make_path_registry([Address, 'user']) - ) - opt = self._option_fixture(Address.user, User.addresses) - self._assert_path_result(opt, q, []) - - def test_multi_entity_opt_on_second(self): - Item = self.classes.Item - Order = self.classes.Order - opt = self._option_fixture(Order.items) - sess = Session() - q = sess.query(Item, Order) - self._assert_path_result(opt, q, [(Order, "items")]) - - def test_multi_entity_opt_on_string(self): - Item = self.classes.Item - Order = self.classes.Order - opt = self._option_fixture("items") - sess = Session() - q = sess.query(Item, Order) - self._assert_path_result(opt, q, []) - - def test_multi_entity_no_mapped_entities(self): - Item = self.classes.Item - Order = self.classes.Order - opt = self._option_fixture("items") - sess = Session() - q = sess.query(Item.id, Order.id) - self._assert_path_result(opt, q, []) - - def test_path_exhausted(self): - User = self.classes.User - Item = self.classes.Item - Order = self.classes.Order - opt = self._option_fixture(User.orders) - sess = Session() - q = sess.query(Item)._with_current_path( - self._make_path_registry([User, 'orders', Order, 'items']) - ) - self._assert_path_result(opt, q, []) - -class OptionsNoPropTest(_fixtures.FixtureTest): - """test the error messages emitted when using property - options in conjunection with column-only entities, or - for not existing options - - """ - - run_create_tables = False - run_inserts = None - run_deletes = None - - def test_option_with_mapper_basestring(self): - Item = self.classes.Item - - self._assert_option([Item], 'keywords') - - def test_option_with_mapper_PropCompatator(self): - Item = self.classes.Item - - self._assert_option([Item], Item.keywords) - - def test_option_with_mapper_then_column_basestring(self): - Item = self.classes.Item - - self._assert_option([Item, Item.id], 'keywords') - - def test_option_with_mapper_then_column_PropComparator(self): - Item = self.classes.Item - - self._assert_option([Item, Item.id], Item.keywords) - - def test_option_with_column_then_mapper_basestring(self): - Item = self.classes.Item - - self._assert_option([Item.id, Item], 'keywords') - - def test_option_with_column_then_mapper_PropComparator(self): - Item = self.classes.Item - - self._assert_option([Item.id, Item], Item.keywords) - - def test_option_with_column_basestring(self): - Item = self.classes.Item - - message = \ - "Query has only expression-based entities - "\ - "can't find property named 'keywords'." - self._assert_eager_with_just_column_exception(Item.id, - 'keywords', message) - - def test_option_with_column_PropComparator(self): - Item = self.classes.Item - - self._assert_eager_with_just_column_exception(Item.id, - Item.keywords, - "Query has only expression-based entities " - "- can't find property named 'keywords'." - ) - - def test_option_against_nonexistent_PropComparator(self): - Item = self.classes.Item - Keyword = self.classes.Keyword - self._assert_eager_with_entity_exception( - [Keyword], - (joinedload(Item.keywords), ), - r"Can't find property 'keywords' on any entity specified " - r"in this Query. Note the full path from root " - r"\(Mapper\|Keyword\|keywords\) to target entity must be specified." - ) - - def test_option_against_nonexistent_basestring(self): - Item = self.classes.Item - self._assert_eager_with_entity_exception( - [Item], - (joinedload("foo"), ), - r"Can't find property named 'foo' on the mapped " - r"entity Mapper\|Item\|items in this Query." - ) - - def test_option_against_nonexistent_twolevel_basestring(self): - Item = self.classes.Item - self._assert_eager_with_entity_exception( - [Item], - (joinedload("keywords.foo"), ), - r"Can't find property named 'foo' on the mapped entity " - r"Mapper\|Keyword\|keywords in this Query." - ) - - def test_option_against_nonexistent_twolevel_all(self): - Item = self.classes.Item - self._assert_eager_with_entity_exception( - [Item], - (joinedload_all("keywords.foo"), ), - r"Can't find property named 'foo' on the mapped entity " - r"Mapper\|Keyword\|keywords in this Query." - ) - - @testing.fails_if(lambda:True, - "PropertyOption doesn't yet check for relation/column on end result") - def test_option_against_non_relation_basestring(self): - Item = self.classes.Item - Keyword = self.classes.Keyword - self._assert_eager_with_entity_exception( - [Keyword, Item], - (joinedload_all("keywords"), ), - r"Attribute 'keywords' of entity 'Mapper\|Keyword\|keywords' " - "does not refer to a mapped entity" - ) - - @testing.fails_if(lambda:True, - "PropertyOption doesn't yet check for relation/column on end result") - def test_option_against_multi_non_relation_basestring(self): - Item = self.classes.Item - Keyword = self.classes.Keyword - self._assert_eager_with_entity_exception( - [Keyword, Item], - (joinedload_all("keywords"), ), - r"Attribute 'keywords' of entity 'Mapper\|Keyword\|keywords' " - "does not refer to a mapped entity" - ) - - def test_option_against_wrong_entity_type_basestring(self): - Item = self.classes.Item - self._assert_eager_with_entity_exception( - [Item], - (joinedload_all("id", "keywords"), ), - r"Attribute 'id' of entity 'Mapper\|Item\|items' does not " - r"refer to a mapped entity" - ) - - def test_option_against_multi_non_relation_twolevel_basestring(self): - Item = self.classes.Item - Keyword = self.classes.Keyword - self._assert_eager_with_entity_exception( - [Keyword, Item], - (joinedload_all("id", "keywords"), ), - r"Attribute 'id' of entity 'Mapper\|Keyword\|keywords' " - "does not refer to a mapped entity" - ) - - def test_option_against_multi_nonexistent_basestring(self): - Item = self.classes.Item - Keyword = self.classes.Keyword - self._assert_eager_with_entity_exception( - [Keyword, Item], - (joinedload_all("description"), ), - r"Can't find property named 'description' on the mapped " - r"entity Mapper\|Keyword\|keywords in this Query." - ) - - def test_option_against_multi_no_entities_basestring(self): - Item = self.classes.Item - Keyword = self.classes.Keyword - self._assert_eager_with_entity_exception( - [Keyword.id, Item.id], - (joinedload_all("keywords"), ), - r"Query has only expression-based entities - can't find property " - "named 'keywords'." - ) - - def test_option_against_wrong_multi_entity_type_attr_one(self): - Item = self.classes.Item - Keyword = self.classes.Keyword - self._assert_eager_with_entity_exception( - [Keyword, Item], - (joinedload_all(Keyword.id, Item.keywords), ), - r"Attribute 'Keyword.id' of entity 'Mapper\|Keyword\|keywords' " - "does not refer to a mapped entity" - ) - - def test_option_against_wrong_multi_entity_type_attr_two(self): - Item = self.classes.Item - Keyword = self.classes.Keyword - self._assert_eager_with_entity_exception( - [Keyword, Item], - (joinedload_all(Keyword.keywords, Item.keywords), ), - r"Attribute 'Keyword.keywords' of entity 'Mapper\|Keyword\|keywords' " - "does not refer to a mapped entity" - ) - - def test_option_against_wrong_multi_entity_type_attr_three(self): - Item = self.classes.Item - Keyword = self.classes.Keyword - self._assert_eager_with_entity_exception( - [Keyword.id, Item.id], - (joinedload_all(Keyword.keywords, Item.keywords), ), - r"Query has only expression-based entities - " - "can't find property named 'keywords'." - ) - - def test_wrong_type_in_option(self): - Item = self.classes.Item - Keyword = self.classes.Keyword - self._assert_eager_with_entity_exception( - [Item], - (joinedload_all(Keyword), ), - r"mapper option expects string key or list of attributes" - ) - - def test_non_contiguous_all_option(self): - User = self.classes.User - self._assert_eager_with_entity_exception( - [User], - (joinedload_all(User.addresses, User.orders), ), - r"Attribute 'User.orders' does not link " - "from element 'Mapper|Address|addresses'" - ) - - @classmethod - def setup_mappers(cls): - users, User, addresses, Address, orders, Order = ( - cls.tables.users, cls.classes.User, - cls.tables.addresses, cls.classes.Address, - cls.tables.orders, cls.classes.Order) - mapper(User, users, properties={ - 'addresses': relationship(Address), - 'orders': relationship(Order) - }) - mapper(Address, addresses) - mapper(Order, orders) - keywords, items, item_keywords, Keyword, Item = (cls.tables.keywords, - cls.tables.items, - cls.tables.item_keywords, - cls.classes.Keyword, - cls.classes.Item) - mapper(Keyword, keywords, properties={ - "keywords": column_property(keywords.c.name + "some keyword") - }) - mapper(Item, items, - properties=dict(keywords=relationship(Keyword, - secondary=item_keywords))) - - def _assert_option(self, entity_list, option): - Item = self.classes.Item - - q = create_session().query(*entity_list).\ - options(joinedload(option)) - key = ('loaderstrategy', (inspect(Item), inspect(Item).attrs.keywords)) - assert key in q._attributes - - def _assert_eager_with_entity_exception(self, entity_list, options, - message): - assert_raises_message(sa.exc.ArgumentError, - message, - create_session().query(*entity_list).options, - *options) - - def _assert_eager_with_just_column_exception(self, column, - eager_option, message): - assert_raises_message(sa.exc.ArgumentError, message, - create_session().query(column).options, - joinedload(eager_option)) - diff --git a/test/orm/test_relationships.py b/test/orm/test_relationships.py index 8dc9c3c52..717f136c0 100644 --- a/test/orm/test_relationships.py +++ b/test/orm/test_relationships.py @@ -14,6 +14,7 @@ from sqlalchemy.testing import eq_, startswith_, AssertsCompiledSQL, is_ from sqlalchemy.testing import fixtures from test.orm import _fixtures from sqlalchemy import exc +from sqlalchemy import inspect class _RelationshipErrors(object): def _assert_raises_no_relevant_fks(self, fn, expr, relname, @@ -1516,6 +1517,117 @@ class TypedAssociationTable(fixtures.MappedTest): assert t3.count().scalar() == 1 +class ViewOnlyHistoryTest(fixtures.MappedTest): + @classmethod + def define_tables(cls, metadata): + Table("t1", metadata, + Column('id', Integer, primary_key=True, + test_needs_autoincrement=True), + Column('data', String(40))) + Table("t2", metadata, + Column('id', Integer, primary_key=True, + test_needs_autoincrement=True), + Column('data', String(40)), + Column('t1id', Integer, ForeignKey('t1.id'))) + + def _assert_fk(self, a1, b1, is_set): + s = Session(testing.db) + s.add_all([a1, b1]) + s.flush() + + if is_set: + eq_(b1.t1id, a1.id) + else: + eq_(b1.t1id, None) + + return s + + def test_o2m_viewonly_oneside(self): + class A(fixtures.ComparableEntity): + pass + class B(fixtures.ComparableEntity): + pass + + mapper(A, self.tables.t1, properties={ + "bs": relationship(B, viewonly=True, + backref=backref("a", viewonly=False)) + }) + mapper(B, self.tables.t2) + + a1 = A() + b1 = B() + a1.bs.append(b1) + assert b1.a is a1 + assert not inspect(a1).attrs.bs.history.has_changes() + assert inspect(b1).attrs.a.history.has_changes() + + sess = self._assert_fk(a1, b1, True) + + a1.bs.remove(b1) + assert a1 not in sess.dirty + assert b1 in sess.dirty + + def test_m2o_viewonly_oneside(self): + class A(fixtures.ComparableEntity): + pass + class B(fixtures.ComparableEntity): + pass + + mapper(A, self.tables.t1, properties={ + "bs": relationship(B, viewonly=False, + backref=backref("a", viewonly=True)) + }) + mapper(B, self.tables.t2) + + a1 = A() + b1 = B() + b1.a = a1 + assert b1 in a1.bs + assert inspect(a1).attrs.bs.history.has_changes() + assert not inspect(b1).attrs.a.history.has_changes() + + sess = self._assert_fk(a1, b1, True) + + a1.bs.remove(b1) + assert a1 in sess.dirty + assert b1 not in sess.dirty + + def test_o2m_viewonly_only(self): + class A(fixtures.ComparableEntity): + pass + class B(fixtures.ComparableEntity): + pass + + mapper(A, self.tables.t1, properties={ + "bs": relationship(B, viewonly=True) + }) + mapper(B, self.tables.t2) + + a1 = A() + b1 = B() + a1.bs.append(b1) + assert not inspect(a1).attrs.bs.history.has_changes() + + self._assert_fk(a1, b1, False) + + def test_m2o_viewonly_only(self): + class A(fixtures.ComparableEntity): + pass + class B(fixtures.ComparableEntity): + pass + + mapper(A, self.tables.t1) + mapper(B, self.tables.t2, properties={ + 'a': relationship(A, viewonly=True) + }) + + a1 = A() + b1 = B() + b1.a = a1 + assert not inspect(b1).attrs.a.history.has_changes() + + self._assert_fk(a1, b1, False) + class ViewOnlyM2MBackrefTest(fixtures.MappedTest): @classmethod def define_tables(cls, metadata): @@ -1551,6 +1663,8 @@ class ViewOnlyM2MBackrefTest(fixtures.MappedTest): a1 = A() b1 = B(as_=[a1]) + assert not inspect(b1).attrs.as_.history.has_changes() + sess.add(a1) sess.flush() eq_( @@ -2232,7 +2346,7 @@ class InvalidRemoteSideTest(fixtures.MappedTest): assert_raises_message(sa.exc.ArgumentError, "T1.t1s and back-reference T1.parent are " - "both of the same direction <symbol 'ONETOMANY>. Did you " + r"both of the same direction symbol\('ONETOMANY'\). Did you " "mean to set remote_side on the many-to-one side ?", configure_mappers) @@ -2247,7 +2361,7 @@ class InvalidRemoteSideTest(fixtures.MappedTest): assert_raises_message(sa.exc.ArgumentError, "T1.t1s and back-reference T1.parent are " - "both of the same direction <symbol 'MANYTOONE>. Did you " + r"both of the same direction symbol\('MANYTOONE'\). Did you " "mean to set remote_side on the many-to-one side ?", configure_mappers) @@ -2261,7 +2375,7 @@ class InvalidRemoteSideTest(fixtures.MappedTest): # can't be sure of ordering here assert_raises_message(sa.exc.ArgumentError, - "both of the same direction <symbol 'ONETOMANY>. Did you " + r"both of the same direction symbol\('ONETOMANY'\). Did you " "mean to set remote_side on the many-to-one side ?", configure_mappers) @@ -2277,7 +2391,7 @@ class InvalidRemoteSideTest(fixtures.MappedTest): # can't be sure of ordering here assert_raises_message(sa.exc.ArgumentError, - "both of the same direction <symbol 'MANYTOONE>. Did you " + r"both of the same direction symbol\('MANYTOONE'\). Did you " "mean to set remote_side on the many-to-one side ?", configure_mappers) diff --git a/test/orm/test_session.py b/test/orm/test_session.py index 45164483b..4eb498ee9 100644 --- a/test/orm/test_session.py +++ b/test/orm/test_session.py @@ -395,6 +395,27 @@ class SessionStateTest(_fixtures.FixtureTest): run_inserts = None + def test_info(self): + s = Session() + eq_(s.info, {}) + + maker = sessionmaker(info={"global": True, "s1": 5}) + + s1 = maker() + s2 = maker(info={"s1": 6, "s2": True}) + + eq_(s1.info, {"global": True, "s1": 5}) + eq_(s2.info, {"global": True, "s1": 6, "s2": True}) + s2.info["global"] = False + s2.info["s1"] = 7 + + s3 = maker() + eq_(s3.info, {"global": True, "s1": 5}) + + maker2 = sessionmaker() + s4 = maker2(info={'s4': 8}) + eq_(s4.info, {'s4': 8}) + @testing.requires.independent_connections @engines.close_open_connections def test_autoflush(self): @@ -418,7 +439,6 @@ class SessionStateTest(_fixtures.FixtureTest): eq_(bind.connect().execute("select count(1) from users").scalar(), 1) sess.close() - @testing.requires.python26 def test_with_no_autoflush(self): User, users = self.classes.User, self.tables.users diff --git a/test/orm/test_subquery_relations.py b/test/orm/test_subquery_relations.py index a6cc37691..f36820e70 100644 --- a/test/orm/test_subquery_relations.py +++ b/test/orm/test_subquery_relations.py @@ -10,6 +10,7 @@ from sqlalchemy.testing import eq_, assert_raises, \ assert_raises_message from sqlalchemy.testing.assertsql import CompiledSQL from sqlalchemy.testing import fixtures +from sqlalchemy.testing.entities import ComparableEntity from test.orm import _fixtures import sqlalchemy as sa @@ -632,7 +633,6 @@ class EagerTest(_fixtures.FixtureTest, testing.AssertsCompiledSQL): ], q.all()) self.assert_sql_count(testing.db, go, 6) - @testing.fails_on('maxdb', 'FIXME: unknown') def test_limit(self): """Limit operations combined with lazy-load relationships.""" @@ -706,7 +706,6 @@ class EagerTest(_fixtures.FixtureTest, testing.AssertsCompiledSQL): eq_([User(id=7, address=Address(id=1))], l) self.assert_sql_count(testing.db, go, 2) - @testing.fails_on('maxdb', 'FIXME: unknown') def test_many_to_one(self): users, Address, addresses, User = (self.tables.users, self.classes.Address, @@ -1144,7 +1143,6 @@ class SelfReferentialTest(fixtures.MappedTest): Column('parent_id', Integer, ForeignKey('nodes.id')), Column('data', String(30))) - @testing.fails_on('maxdb', 'FIXME: unknown') def test_basic(self): nodes = self.tables.nodes @@ -1309,7 +1307,6 @@ class SelfReferentialTest(fixtures.MappedTest): ]), d) self.assert_sql_count(testing.db, go, 3) - @testing.fails_on('maxdb', 'FIXME: unknown') def test_no_depth(self): """no join depth is set, so no eager loading occurs.""" @@ -1563,3 +1560,251 @@ class CyclicalInheritingEagerTestTwo(fixtures.DeclarativeMappedTest, d = session.query(Director).options(subqueryload('*')).first() assert len(list(session)) == 3 + +class SubqueryloadDistinctTest(fixtures.DeclarativeMappedTest, + testing.AssertsCompiledSQL): + __dialect__ = 'default' + + run_inserts = 'once' + run_deletes = None + + @classmethod + def setup_classes(cls): + Base = cls.DeclarativeBasic + + class Director(Base): + __tablename__ = 'director' + id = Column(Integer, primary_key=True, + test_needs_autoincrement=True) + name = Column(String(50)) + + class DirectorPhoto(Base): + __tablename__ = 'director_photo' + id = Column(Integer, primary_key=True, + test_needs_autoincrement=True) + path = Column(String(255)) + director_id = Column(Integer, ForeignKey('director.id')) + director = relationship(Director, backref="photos") + + class Movie(Base): + __tablename__ = 'movie' + id = Column(Integer, primary_key=True, test_needs_autoincrement=True) + director_id = Column(Integer, ForeignKey('director.id')) + director = relationship(Director, backref="movies") + title = Column(String(50)) + credits = relationship("Credit", backref="movie") + + class Credit(Base): + __tablename__ = 'credit' + id = Column(Integer, primary_key=True, test_needs_autoincrement=True) + movie_id = Column(Integer, ForeignKey('movie.id')) + + @classmethod + def insert_data(cls): + Movie = cls.classes.Movie + Director = cls.classes.Director + DirectorPhoto = cls.classes.DirectorPhoto + Credit = cls.classes.Credit + + d = Director(name='Woody Allen') + d.photos = [DirectorPhoto(path='/1.jpg'), + DirectorPhoto(path='/2.jpg')] + d.movies = [Movie(title='Manhattan', credits=[Credit(), Credit()]), + Movie(title='Sweet and Lowdown', credits=[Credit()])] + sess = create_session() + sess.add_all([d]) + sess.flush() + + def test_distinct_strategy_opt_m2o(self): + self._run_test_m2o(True, None) + self._run_test_m2o(False, None) + + def test_distinct_unrelated_opt_m2o(self): + self._run_test_m2o(None, True) + self._run_test_m2o(None, False) + + def _run_test_m2o(self, + director_strategy_level, + photo_strategy_level): + + # test where the innermost is m2o, e.g. + # Movie->director + + Movie = self.classes.Movie + Director = self.classes.Director + + Movie.director.property.distinct_target_key = director_strategy_level + Director.photos.property.distinct_target_key = photo_strategy_level + + # the DISTINCT is controlled by + # only the Movie->director relationship, *not* the + # Director.photos + expect_distinct = director_strategy_level in (True, None) + + s = create_session() + + q = ( + s.query(Movie) + .options( + subqueryload(Movie.director) + .subqueryload(Director.photos) + ) + ) + ctx = q._compile_context() + + q2 = ctx.attributes[ + ('subquery', (inspect(Movie), inspect(Movie).attrs.director)) + ] + self.assert_compile( + q2, + 'SELECT director.id AS director_id, ' + 'director.name AS director_name, ' + 'anon_1.movie_director_id AS anon_1_movie_director_id ' + 'FROM (SELECT%s movie.director_id AS movie_director_id ' + 'FROM movie) AS anon_1 ' + 'JOIN director ON director.id = anon_1.movie_director_id ' + 'ORDER BY anon_1.movie_director_id' % ( + " DISTINCT" if expect_distinct else "") + ) + + ctx2 = q2._compile_context() + result = s.execute(q2) + rows = result.fetchall() + + if expect_distinct: + eq_(rows, [ + (1, 'Woody Allen', 1), + ]) + else: + eq_(rows, [ + (1, 'Woody Allen', 1), (1, 'Woody Allen', 1), + ]) + + q3 = ctx2.attributes[ + ('subquery', (inspect(Director), inspect(Director).attrs.photos)) + ] + + self.assert_compile( + q3, + 'SELECT director_photo.id AS director_photo_id, ' + 'director_photo.path AS director_photo_path, ' + 'director_photo.director_id AS director_photo_director_id, ' + 'director_1.id AS director_1_id ' + 'FROM (SELECT%s movie.director_id AS movie_director_id ' + 'FROM movie) AS anon_1 ' + 'JOIN director AS director_1 ON director_1.id = anon_1.movie_director_id ' + 'JOIN director_photo ON director_1.id = director_photo.director_id ' + 'ORDER BY director_1.id' % ( + " DISTINCT" if expect_distinct else "") + ) + result = s.execute(q3) + rows = result.fetchall() + if expect_distinct: + eq_(set(tuple(t) for t in rows), set([ + (1, u'/1.jpg', 1, 1), + (2, u'/2.jpg', 1, 1), + ])) + else: + # oracle might not order the way we expect here + eq_(set(tuple(t) for t in rows), set([ + (1, u'/1.jpg', 1, 1), + (2, u'/2.jpg', 1, 1), + (1, u'/1.jpg', 1, 1), + (2, u'/2.jpg', 1, 1), + ])) + + + movies = q.all() + + # check number of persistent objects in session + eq_(len(list(s)), 5) + + def test_cant_do_distinct_in_joins(self): + """the DISTINCT feature here works when the m2o is in the innermost + mapper, but when we are just joining along relationships outside + of that, we can still have dupes, and there's no solution to that. + + """ + Movie = self.classes.Movie + Credit = self.classes.Credit + + s = create_session() + + q = ( + s.query(Credit) + .options( + subqueryload(Credit.movie) + .subqueryload(Movie.director) + ) + ) + + ctx = q._compile_context() + + q2 = ctx.attributes[ + ('subquery', (inspect(Credit), Credit.movie.property)) + ] + ctx2 = q2._compile_context() + q3 = ctx2.attributes[ + ('subquery', (inspect(Movie), Movie.director.property)) + ] + + result = s.execute(q3) + eq_( + result.fetchall(), + [ + (1, 'Woody Allen', 1), (1, 'Woody Allen', 1), + ] + ) + + +class JoinedNoLoadConflictTest(fixtures.DeclarativeMappedTest): + """test for [ticket:2887]""" + + @classmethod + def setup_classes(cls): + Base = cls.DeclarativeBasic + + class Parent(ComparableEntity, Base): + __tablename__ = 'parent' + + id = Column(Integer, primary_key=True, test_needs_autoincrement=True) + name = Column(String(20)) + + children = relationship('Child', + back_populates='parent', + lazy='noload' + ) + + class Child(ComparableEntity, Base): + __tablename__ = 'child' + + id = Column(Integer, primary_key=True, test_needs_autoincrement=True) + name = Column(String(20)) + parent_id = Column(Integer, ForeignKey('parent.id')) + + parent = relationship('Parent', back_populates='children', lazy='joined') + + @classmethod + def insert_data(cls): + Parent = cls.classes.Parent + Child = cls.classes.Child + + s = Session() + s.add(Parent(name='parent', children=[Child(name='c1')])) + s.commit() + + def test_subqueryload_on_joined_noload(self): + Parent = self.classes.Parent + Child = self.classes.Child + + s = Session() + + # here we have Parent->subqueryload->Child->joinedload->parent->noload->children. + # the actual subqueryload has to emit *after* we've started populating + # Parent->subqueryload->child. + parent = s.query(Parent).options([subqueryload('children')]).first() + eq_( + parent.children, + [Child(name='c1')] + ) + diff --git a/test/orm/test_unitofwork.py b/test/orm/test_unitofwork.py index 4b9799d47..386280a50 100644 --- a/test/orm/test_unitofwork.py +++ b/test/orm/test_unitofwork.py @@ -849,21 +849,40 @@ class DefaultTest(fixtures.MappedTest): eq_(h5.foober, 'im the new foober') @testing.fails_on('firebird', 'Data type unknown on the parameter') + @testing.fails_on("oracle+cx_oracle", "seems like a cx_oracle bug") def test_eager_defaults(self): hohoval, default_t, Hoho = (self.other.hohoval, self.tables.default_t, self.classes.Hoho) + Secondary = self.classes.Secondary + + mapper(Hoho, default_t, eager_defaults=True, properties={ + "sec": relationship(Secondary), + "syn": sa.orm.synonym(default_t.c.counter) + }) - mapper(Hoho, default_t, eager_defaults=True) + mapper(Secondary, self.tables.secondary_table) h1 = Hoho() session = create_session() session.add(h1) - session.flush() + + if testing.db.dialect.implicit_returning: + self.sql_count_(1, session.flush) + else: + self.sql_count_(2, session.flush) self.sql_count_(0, lambda: eq_(h1.hoho, hohoval)) + # no actual eager defaults, make sure error isn't raised + h2 = Hoho(hoho=hohoval, counter=5) + session.add(h2) + session.flush() + eq_(h2.hoho, hohoval) + eq_(h2.counter, 5) + + def test_insert_nopostfetch(self): default_t, Hoho = self.tables.default_t, self.classes.Hoho diff --git a/test/orm/test_update_delete.py b/test/orm/test_update_delete.py index 6915ac8a2..ac94fde2f 100644 --- a/test/orm/test_update_delete.py +++ b/test/orm/test_update_delete.py @@ -545,12 +545,14 @@ class UpdateDeleteFromTest(fixtures.MappedTest): def define_tables(cls, metadata): Table('users', metadata, Column('id', Integer, primary_key=True), + Column('samename', String(10)), ) Table('documents', metadata, Column('id', Integer, primary_key=True), Column('user_id', None, ForeignKey('users.id')), Column('title', String(32)), - Column('flag', Boolean) + Column('flag', Boolean), + Column('samename', String(10)), ) @classmethod @@ -659,6 +661,34 @@ class UpdateDeleteFromTest(fixtures.MappedTest): ]) ) + @testing.only_on('mysql', 'Multi table update') + def test_update_from_multitable_same_names(self): + Document = self.classes.Document + User = self.classes.User + + s = Session() + + s.query(Document).\ + filter(User.id == Document.user_id).\ + filter(User.id == 2).update({ + Document.samename: 'd_samename', + User.samename: 'u_samename' + } + ) + eq_( + s.query(User.id, Document.samename, User.samename). + filter(User.id == Document.user_id). + order_by(User.id).all(), + [ + (1, None, None), + (1, None, None), + (2, 'd_samename', 'u_samename'), + (2, 'd_samename', 'u_samename'), + (3, None, None), + (3, None, None), + ] + ) + class ExpressionUpdateTest(fixtures.MappedTest): @classmethod def define_tables(cls, metadata): @@ -786,3 +816,5 @@ class InheritTest(fixtures.DeclarativeMappedTest): set(s.query(Person.name, Engineer.engineer_name)), set([('e1', 'e1', ), ('e22', 'e55')]) ) + + diff --git a/test/orm/test_utils.py b/test/orm/test_utils.py index d15881075..ae225ad92 100644 --- a/test/orm/test_utils.py +++ b/test/orm/test_utils.py @@ -5,27 +5,31 @@ from sqlalchemy import util from sqlalchemy import Integer from sqlalchemy import MetaData from sqlalchemy import Table -from sqlalchemy.orm import aliased, with_polymorphic -from sqlalchemy.orm import mapper, create_session +from sqlalchemy.orm import aliased, with_polymorphic, synonym +from sqlalchemy.orm import mapper, create_session, Session from sqlalchemy.testing import fixtures from test.orm import _fixtures from sqlalchemy.testing import eq_, is_ -from sqlalchemy.orm.util import PathRegistry +from sqlalchemy.orm.path_registry import PathRegistry, RootRegistry from sqlalchemy import inspect +from sqlalchemy.ext.hybrid import hybrid_property, hybrid_method +from sqlalchemy.testing import AssertsCompiledSQL -class AliasedClassTest(fixtures.TestBase): - def point_map(self, cls): +class AliasedClassTest(fixtures.TestBase, AssertsCompiledSQL): + __dialect__ = 'default' + + def _fixture(self, cls, properties={}): table = Table('point', MetaData(), Column('id', Integer(), primary_key=True), Column('x', Integer), Column('y', Integer)) - mapper(cls, table) + mapper(cls, table, properties=properties) return table def test_simple(self): class Point(object): pass - table = self.point_map(Point) + table = self._fixture(Point) alias = aliased(Point) @@ -36,48 +40,51 @@ class AliasedClassTest(fixtures.TestBase): assert Point.id.__clause_element__().table is table assert alias.id.__clause_element__().table is not table - def test_notcallable(self): + def test_not_instantiatable(self): class Point(object): pass - table = self.point_map(Point) + table = self._fixture(Point) alias = aliased(Point) assert_raises(TypeError, alias) - def test_instancemethods(self): + def test_instancemethod(self): class Point(object): def zero(self): self.x, self.y = 0, 0 - table = self.point_map(Point) + table = self._fixture(Point) alias = aliased(Point) assert Point.zero + # TODO: I don't quite understand this + # still if util.py2k: - # TODO: what is this testing ?? assert not getattr(alias, 'zero') + else: + assert getattr(alias, 'zero') - def test_classmethods(self): + def test_classmethod(self): class Point(object): @classmethod def max_x(cls): return 100 - table = self.point_map(Point) + table = self._fixture(Point) alias = aliased(Point) assert Point.max_x assert alias.max_x - assert Point.max_x() == alias.max_x() + assert Point.max_x() == alias.max_x() == 100 - def test_simpleproperties(self): + def test_simple_property(self): class Point(object): @property def max_x(self): return 100 - table = self.point_map(Point) + table = self._fixture(Point) alias = aliased(Point) assert Point.max_x @@ -86,7 +93,6 @@ class AliasedClassTest(fixtures.TestBase): assert Point.max_x is alias.max_x def test_descriptors(self): - """Tortured...""" class descriptor(object): def __init__(self, fn): @@ -105,7 +111,7 @@ class AliasedClassTest(fixtures.TestBase): def thing(self, arg): return arg.center - table = self.point_map(Point) + table = self._fixture(Point) alias = aliased(Point) assert Point.thing != (0, 0) @@ -115,74 +121,106 @@ class AliasedClassTest(fixtures.TestBase): assert alias.thing != (0, 0) assert alias.thing.method() == 'method' - def test_hybrid_descriptors(self): + def _assert_has_table(self, expr, table): from sqlalchemy import Column # override testlib's override - import types - - class MethodDescriptor(object): - def __init__(self, func): - self.func = func - def __get__(self, instance, owner): - if instance is None: - if util.py2k: - args = (self.func, owner, owner.__class__) - else: - args = (self.func, owner) - else: - if util.py2k: - args = (self.func, instance, owner) - else: - args = (self.func, instance) - return types.MethodType(*args) - - class PropertyDescriptor(object): - def __init__(self, fget, fset, fdel): - self.fget = fget - self.fset = fset - self.fdel = fdel - def __get__(self, instance, owner): - if instance is None: - return self.fget(owner) - else: - return self.fget(instance) - def __set__(self, instance, value): - self.fset(instance, value) - def __delete__(self, instance): - self.fdel(instance) - hybrid = MethodDescriptor - def hybrid_property(fget, fset=None, fdel=None): - return PropertyDescriptor(fget, fset, fdel) - - def assert_table(expr, table): - for child in expr.get_children(): - if isinstance(child, Column): - assert child.table is table + for child in expr.get_children(): + if isinstance(child, Column): + assert child.table is table + def test_hybrid_descriptor_one(self): class Point(object): def __init__(self, x, y): self.x, self.y = x, y - @hybrid + + @hybrid_method def left_of(self, other): return self.x < other.x - double_x = hybrid_property(lambda self: self.x * 2) + self._fixture(Point) + alias = aliased(Point) + sess = Session() + + self.assert_compile( + sess.query(alias).filter(alias.left_of(Point)), + "SELECT point_1.id AS point_1_id, point_1.x AS point_1_x, " + "point_1.y AS point_1_y FROM point AS point_1, point " + "WHERE point_1.x < point.x" + ) + + def test_hybrid_descriptor_two(self): + class Point(object): + def __init__(self, x, y): + self.x, self.y = x, y + + @hybrid_property + def double_x(self): + return self.x * 2 - table = self.point_map(Point) + self._fixture(Point) alias = aliased(Point) - alias_table = alias.x.__clause_element__().table - assert table is not alias_table - p1 = Point(-10, -10) - p2 = Point(20, 20) + eq_(str(Point.double_x), "point.x * :x_1") + eq_(str(alias.double_x), "point_1.x * :x_1") - assert p1.left_of(p2) - assert p1.double_x == -20 + sess = Session() + + self.assert_compile( + sess.query(alias).filter(alias.double_x > Point.x), + "SELECT point_1.id AS point_1_id, point_1.x AS point_1_x, " + "point_1.y AS point_1_y FROM point AS point_1, point " + "WHERE point_1.x * :x_1 > point.x" + ) + + def test_hybrid_descriptor_three(self): + class Point(object): + def __init__(self, x, y): + self.x, self.y = x, y - assert_table(Point.double_x, table) - assert_table(alias.double_x, alias_table) + @hybrid_property + def x_alone(self): + return self.x - assert_table(Point.left_of(p2), table) - assert_table(alias.left_of(p2), alias_table) + self._fixture(Point) + alias = aliased(Point) + + eq_(str(Point.x_alone), "Point.x") + eq_(str(alias.x_alone), "AliasedClass_Point.x") + + assert Point.x_alone is Point.x + + eq_(str(alias.x_alone == alias.x), "point_1.x = point_1.x") + + a2 = aliased(Point) + eq_(str(a2.x_alone == alias.x), "point_1.x = point_2.x") + + sess = Session() + + self.assert_compile( + sess.query(alias).filter(alias.x_alone > Point.x), + "SELECT point_1.id AS point_1_id, point_1.x AS point_1_x, " + "point_1.y AS point_1_y FROM point AS point_1, point " + "WHERE point_1.x > point.x" + ) + + def test_proxy_descriptor_one(self): + class Point(object): + def __init__(self, x, y): + self.x, self.y = x, y + + self._fixture(Point, properties={ + 'x_syn': synonym("x") + }) + alias = aliased(Point) + + eq_(str(Point.x_syn), "Point.x_syn") + eq_(str(alias.x_syn), "AliasedClass_Point.x_syn") + + sess = Session() + self.assert_compile( + sess.query(alias.x_syn).filter(alias.x_syn > Point.x_syn), + "SELECT point_1.x AS point_1_x FROM point AS point_1, point " + "WHERE point_1.x > point.x" + ) class IdentityKeyTest(_fixtures.FixtureTest): run_inserts = None @@ -241,12 +279,12 @@ class PathRegistryTest(_fixtures.FixtureTest): def test_root_registry(self): umapper = inspect(self.classes.User) is_( - orm_util.RootRegistry()[umapper], + RootRegistry()[umapper], umapper._path_registry ) eq_( - orm_util.RootRegistry()[umapper], - orm_util.PathRegistry.coerce((umapper,)) + RootRegistry()[umapper], + PathRegistry.coerce((umapper,)) ) def test_expand(self): diff --git a/test/orm/test_validators.py b/test/orm/test_validators.py new file mode 100644 index 000000000..417554f46 --- /dev/null +++ b/test/orm/test_validators.py @@ -0,0 +1,281 @@ +from test.orm import _fixtures +from sqlalchemy.testing import fixtures, assert_raises, eq_, ne_ +from sqlalchemy.orm import mapper, Session, validates, relationship +from sqlalchemy.testing.mock import Mock, call + + +class ValidatorTest(_fixtures.FixtureTest): + def test_scalar(self): + users = self.tables.users + canary = Mock() + + class User(fixtures.ComparableEntity): + @validates('name') + def validate_name(self, key, name): + canary(key, name) + ne_(name, 'fred') + return name + ' modified' + + mapper(User, users) + sess = Session() + u1 = User(name='ed') + eq_(u1.name, 'ed modified') + assert_raises(AssertionError, setattr, u1, "name", "fred") + eq_(u1.name, 'ed modified') + eq_(canary.mock_calls, [call('name', 'ed'), call('name', 'fred')]) + + sess.add(u1) + sess.commit() + + eq_( + sess.query(User).filter_by(name='ed modified').one(), + User(name='ed') + ) + + def test_collection(self): + users, addresses, Address = (self.tables.users, + self.tables.addresses, + self.classes.Address) + + canary = Mock() + class User(fixtures.ComparableEntity): + @validates('addresses') + def validate_address(self, key, ad): + canary(key, ad) + assert '@' in ad.email_address + return ad + + mapper(User, users, properties={ + 'addresses': relationship(Address)} + ) + mapper(Address, addresses) + sess = Session() + u1 = User(name='edward') + a0 = Address(email_address='noemail') + assert_raises(AssertionError, u1.addresses.append, a0) + a1 = Address(id=15, email_address='foo@bar.com') + u1.addresses.append(a1) + eq_(canary.mock_calls, [call('addresses', a0), call('addresses', a1)]) + sess.add(u1) + sess.commit() + + eq_( + sess.query(User).filter_by(name='edward').one(), + User(name='edward', addresses=[Address(email_address='foo@bar.com')]) + ) + + def test_validators_dict(self): + users, addresses, Address = (self.tables.users, + self.tables.addresses, + self.classes.Address) + + class User(fixtures.ComparableEntity): + + @validates('name') + def validate_name(self, key, name): + ne_(name, 'fred') + return name + ' modified' + + @validates('addresses') + def validate_address(self, key, ad): + assert '@' in ad.email_address + return ad + + def simple_function(self, key, value): + return key, value + + u_m = mapper(User, users, properties={ + 'addresses': relationship(Address) + } + ) + mapper(Address, addresses) + + eq_( + dict((k, v[0].__name__) for k, v in list(u_m.validators.items())), + {'name': 'validate_name', + 'addresses': 'validate_address'} + ) + + def test_validator_w_removes(self): + users, addresses, Address = (self.tables.users, + self.tables.addresses, + self.classes.Address) + canary = Mock() + class User(fixtures.ComparableEntity): + + @validates('name', include_removes=True) + def validate_name(self, key, item, remove): + canary(key, item, remove) + return item + + @validates('addresses', include_removes=True) + def validate_address(self, key, item, remove): + canary(key, item, remove) + return item + + mapper(User, users, properties={ + 'addresses': relationship(Address) + }) + mapper(Address, addresses) + + u1 = User() + u1.name = "ed" + u1.name = "mary" + del u1.name + a1, a2, a3 = Address(), Address(), Address() + u1.addresses.append(a1) + u1.addresses.remove(a1) + u1.addresses = [a1, a2] + u1.addresses = [a2, a3] + + eq_(canary.mock_calls, [ + call('name', 'ed', False), + call('name', 'mary', False), + call('name', 'mary', True), + # append a1 + call('addresses', a1, False), + # remove a1 + call('addresses', a1, True), + # set to [a1, a2] - this is two appends + call('addresses', a1, False), call('addresses', a2, False), + # set to [a2, a3] - this is a remove of a1, + # append of a3. the appends are first. + call('addresses', a3, False), + call('addresses', a1, True), + ] + ) + + def test_validator_wo_backrefs_wo_removes(self): + self._test_validator_backrefs(False, False) + + def test_validator_wo_backrefs_w_removes(self): + self._test_validator_backrefs(False, True) + + def test_validator_w_backrefs_wo_removes(self): + self._test_validator_backrefs(True, False) + + def test_validator_w_backrefs_w_removes(self): + self._test_validator_backrefs(True, True) + + def _test_validator_backrefs(self, include_backrefs, include_removes): + users, addresses = (self.tables.users, + self.tables.addresses) + canary = Mock() + class User(fixtures.ComparableEntity): + + if include_removes: + @validates('addresses', include_removes=True, + include_backrefs=include_backrefs) + def validate_address(self, key, item, remove): + canary(key, item, remove) + return item + else: + @validates('addresses', include_removes=False, + include_backrefs=include_backrefs) + def validate_address(self, key, item): + canary(key, item) + return item + + class Address(fixtures.ComparableEntity): + if include_removes: + @validates('user', include_backrefs=include_backrefs, + include_removes=True) + def validate_user(self, key, item, remove): + canary(key, item, remove) + return item + else: + @validates('user', include_backrefs=include_backrefs) + def validate_user(self, key, item): + canary(key, item) + return item + + mapper(User, users, properties={ + 'addresses': relationship(Address, backref="user") + }) + mapper(Address, addresses) + + u1 = User() + u2 = User() + a1, a2 = Address(), Address() + + # 3 append/set, two removes + u1.addresses.append(a1) + u1.addresses.append(a2) + a2.user = u2 + del a1.user + u2.addresses.remove(a2) + + # copy, so that generation of the + # comparisons don't get caught + calls = list(canary.mock_calls) + + if include_backrefs: + if include_removes: + eq_(calls, + [ + # append #1 + call('addresses', Address(), False), + + # backref for append + call('user', User(addresses=[]), False), + + # append #2 + call('addresses', Address(user=None), False), + + # backref for append + call('user', User(addresses=[]), False), + + # assign a2.user = u2 + call('user', User(addresses=[]), False), + + # backref for u1.addresses.remove(a2) + call('addresses', Address(user=None), True), + + # backref for u2.addresses.append(a2) + call('addresses', Address(user=None), False), + + # del a1.user + call('user', User(addresses=[]), True), + + # backref for u1.addresses.remove(a1) + call('addresses', Address(), True), + + # u2.addresses.remove(a2) + call('addresses', Address(user=None), True), + + # backref for a2.user = None + call('user', None, False) + ] + ) + else: + eq_(calls, + [ + call('addresses', Address()), + call('user', User(addresses=[])), + call('addresses', Address(user=None)), + call('user', User(addresses=[])), + call('user', User(addresses=[])), + call('addresses', Address(user=None)), + call('user', None) + ] + ) + else: + if include_removes: + eq_(calls, + [ + call('addresses', Address(), False), + call('addresses', Address(user=None), False), + call('user', User(addresses=[]), False), + call('user', User(addresses=[]), True), + call('addresses', Address(user=None), True) + ] + + ) + else: + eq_(calls, + [ + call('addresses', Address()), + call('addresses', Address(user=None)), + call('user', User(addresses=[])) + ] + ) diff --git a/test/orm/test_versioning.py b/test/orm/test_versioning.py index abb08c536..9379543ed 100644 --- a/test/orm/test_versioning.py +++ b/test/orm/test_versioning.py @@ -11,7 +11,7 @@ from sqlalchemy.orm import mapper, relationship, Session, \ from sqlalchemy.testing import eq_, ne_, assert_raises, assert_raises_message from sqlalchemy.testing import fixtures from test.orm import _fixtures -from sqlalchemy.testing import fixtures +from sqlalchemy.testing.assertsql import AllOf, CompiledSQL _uuids = [ @@ -461,12 +461,12 @@ class AlternateGeneratorTest(fixtures.MappedTest): cls.classes.P) mapper(P, p, version_id_col=p.c.version_id, - version_id_generator=lambda x:make_uuid(), + version_id_generator=lambda x: make_uuid(), properties={ - 'c':relationship(C, uselist=False, cascade='all, delete-orphan') + 'c': relationship(C, uselist=False, cascade='all, delete-orphan') }) mapper(C, c, version_id_col=c.c.version_id, - version_id_generator=lambda x:make_uuid(), + version_id_generator=lambda x: make_uuid(), ) @testing.emits_warning_on('+zxjdbc', r'.*does not support updated rowcount') @@ -643,3 +643,276 @@ class InheritanceTwoVersionIdsTest(fixtures.MappedTest): mapper, Sub, sub, inherits=Base, version_id_col=sub.c.version_id) + + +class ServerVersioningTest(fixtures.MappedTest): + run_define_tables = 'each' + + @classmethod + def define_tables(cls, metadata): + from sqlalchemy.sql import ColumnElement + from sqlalchemy.ext.compiler import compiles + import itertools + + counter = itertools.count(1) + + class IncDefault(ColumnElement): + pass + + @compiles(IncDefault) + def compile(element, compiler, **kw): + # cache the counter value on the statement + # itself so the assertsql system gets the same + # value when it compiles the statement a second time + stmt = compiler.statement + if hasattr(stmt, "_counter"): + return stmt._counter + else: + stmt._counter = str(next(counter)) + return stmt._counter + + Table('version_table', metadata, + Column('id', Integer, primary_key=True, + test_needs_autoincrement=True), + Column('version_id', Integer, nullable=False, + default=IncDefault(), onupdate=IncDefault()), + Column('value', String(40), nullable=False)) + + @classmethod + def setup_classes(cls): + class Foo(cls.Basic): + pass + class Bar(cls.Basic): + pass + + def _fixture(self, expire_on_commit=True): + Foo, version_table = self.classes.Foo, self.tables.version_table + + mapper(Foo, version_table, + version_id_col=version_table.c.version_id, + version_id_generator=False, + ) + + s1 = Session(expire_on_commit=expire_on_commit) + return s1 + + def test_insert_col(self): + sess = self._fixture() + + f1 = self.classes.Foo(value='f1') + sess.add(f1) + + statements = [ + # note that the assertsql tests the rule against + # "default" - on a "returning" backend, the statement + # includes "RETURNING" + CompiledSQL( + "INSERT INTO version_table (version_id, value) " + "VALUES (1, :value)", + lambda ctx: [{'value': 'f1'}] + ) + ] + if not testing.db.dialect.implicit_returning: + # DBs without implicit returning, we must immediately + # SELECT for the new version id + statements.append( + CompiledSQL( + "SELECT version_table.version_id AS version_table_version_id " + "FROM version_table WHERE version_table.id = :param_1", + lambda ctx: [{"param_1": 1}] + ) + ) + self.assert_sql_execution(testing.db, sess.flush, *statements) + + def test_update_col(self): + sess = self._fixture() + + f1 = self.classes.Foo(value='f1') + sess.add(f1) + sess.flush() + + f1.value = 'f2' + + statements = [ + # note that the assertsql tests the rule against + # "default" - on a "returning" backend, the statement + # includes "RETURNING" + CompiledSQL( + "UPDATE version_table SET version_id=2, value=:value " + "WHERE version_table.id = :version_table_id AND " + "version_table.version_id = :version_table_version_id", + lambda ctx: [{"version_table_id": 1, + "version_table_version_id": 1, "value": "f2"}] + ) + ] + if not testing.db.dialect.implicit_returning: + # DBs without implicit returning, we must immediately + # SELECT for the new version id + statements.append( + CompiledSQL( + "SELECT version_table.version_id AS version_table_version_id " + "FROM version_table WHERE version_table.id = :param_1", + lambda ctx: [{"param_1": 1}] + ) + ) + self.assert_sql_execution(testing.db, sess.flush, *statements) + + + def test_delete_col(self): + sess = self._fixture() + + f1 = self.classes.Foo(value='f1') + sess.add(f1) + sess.flush() + + sess.delete(f1) + + statements = [ + # note that the assertsql tests the rule against + # "default" - on a "returning" backend, the statement + # includes "RETURNING" + CompiledSQL( + "DELETE FROM version_table " + "WHERE version_table.id = :id AND " + "version_table.version_id = :version_id", + lambda ctx: [{"id": 1, "version_id": 1}] + ) + ] + self.assert_sql_execution(testing.db, sess.flush, *statements) + + def test_concurrent_mod_err_expire_on_commit(self): + sess = self._fixture() + + f1 = self.classes.Foo(value='f1') + sess.add(f1) + sess.commit() + + f1.value + + s2 = Session() + f2 = s2.query(self.classes.Foo).first() + f2.value = 'f2' + s2.commit() + + f1.value = 'f3' + + assert_raises_message( + orm.exc.StaleDataError, + r"UPDATE statement on table 'version_table' expected to " + r"update 1 row\(s\); 0 were matched.", + sess.commit + ) + + def test_concurrent_mod_err_noexpire_on_commit(self): + sess = self._fixture(expire_on_commit=False) + + f1 = self.classes.Foo(value='f1') + sess.add(f1) + sess.commit() + + # here, we're not expired overall, so no load occurs and we + # stay without a version id, unless we've emitted + # a SELECT for it within the flush. + f1.value + + s2 = Session(expire_on_commit=False) + f2 = s2.query(self.classes.Foo).first() + f2.value = 'f2' + s2.commit() + + f1.value = 'f3' + + assert_raises_message( + orm.exc.StaleDataError, + r"UPDATE statement on table 'version_table' expected to " + r"update 1 row\(s\); 0 were matched.", + sess.commit + ) + +class ManualVersionTest(fixtures.MappedTest): + run_define_tables = 'each' + + @classmethod + def define_tables(cls, metadata): + Table("a", metadata, + Column('id', Integer, primary_key=True, test_needs_autoincrement=True), + Column('data', String(30)), + Column('vid', Integer) + ) + + @classmethod + def setup_classes(cls): + class A(cls.Basic): + pass + + + @classmethod + def setup_mappers(cls): + mapper(cls.classes.A, cls.tables.a, + version_id_col=cls.tables.a.c.vid, + version_id_generator=False) + + def test_insert(self): + sess = Session() + a1 = self.classes.A() + + a1.vid = 1 + sess.add(a1) + sess.commit() + + eq_(a1.vid, 1) + + def test_update(self): + sess = Session() + a1 = self.classes.A() + + a1.vid = 1 + a1.data = 'd1' + sess.add(a1) + sess.commit() + + a1.vid = 2 + a1.data = 'd2' + + sess.commit() + + eq_(a1.vid, 2) + + def test_update_concurrent_check(self): + sess = Session() + a1 = self.classes.A() + + a1.vid = 1 + a1.data = 'd1' + sess.add(a1) + sess.commit() + + a1.vid = 2 + sess.execute(self.tables.a.update().values(vid=3)) + a1.data = 'd2' + assert_raises( + orm_exc.StaleDataError, + sess.commit + ) + + def test_update_version_conditional(self): + sess = Session() + a1 = self.classes.A() + + a1.vid = 1 + a1.data = 'd1' + sess.add(a1) + sess.commit() + + # change the data and UPDATE without + # incrementing version id + a1.data = 'd2' + sess.commit() + + eq_(a1.vid, 1) + + a1.data = 'd3' + a1.vid = 2 + sess.commit() + + eq_(a1.vid, 2)
\ No newline at end of file diff --git a/test/perf/orm2010.py b/test/perf/orm2010.py index 937e6ddff..8036ac268 100644 --- a/test/perf/orm2010.py +++ b/test/perf/orm2010.py @@ -1,7 +1,7 @@ -# monkeypatch the "cdecimal" library in. -# this is a drop-in replacement for "decimal". -# All SQLA versions support cdecimal except -# for the MS-SQL dialect, which is fixed in 0.7 +import warnings +warnings.filterwarnings("ignore", r".*Decimal objects natively") + +# speed up cdecimal if available try: import cdecimal import sys @@ -13,11 +13,7 @@ from sqlalchemy import __version__ from sqlalchemy import Column, Integer, create_engine, ForeignKey, \ String, Numeric -if __version__ < "0.6": - from sqlalchemy.orm.session import Session - from sqlalchemy.orm import relation as relationship -else: - from sqlalchemy.orm import Session, relationship +from sqlalchemy.orm import Session, relationship from sqlalchemy.ext.declarative import declarative_base import random @@ -33,7 +29,7 @@ class Employee(Base): name = Column(String(100), nullable=False) type = Column(String(50), nullable=False) - __mapper_args__ = {'polymorphic_on':type} + __mapper_args__ = {'polymorphic_on': type} class Boss(Employee): __tablename__ = 'boss' @@ -41,7 +37,7 @@ class Boss(Employee): id = Column(Integer, ForeignKey('employee.id'), primary_key=True) golf_average = Column(Numeric) - __mapper_args__ = {'polymorphic_identity':'boss'} + __mapper_args__ = {'polymorphic_identity': 'boss'} class Grunt(Employee): __tablename__ = 'grunt' @@ -51,32 +47,10 @@ class Grunt(Employee): employer_id = Column(Integer, ForeignKey('boss.id')) - # Configure an 'employer' relationship, where Grunt references - # Boss. This is a joined-table subclass to subclass relationship, - # which is a less typical case. - - # In 0.7, "Boss.id" is the "id" column of "boss", as would be expected. - if __version__ >= "0.7": - employer = relationship("Boss", backref="employees", - primaryjoin=Boss.id==employer_id) - - # Prior to 0.7, "Boss.id" is the "id" column of "employee". - # Long story. So we hardwire the relationship against the "id" - # column of Boss' table. - elif __version__ >= "0.6": - employer = relationship("Boss", backref="employees", - primaryjoin=Boss.__table__.c.id==employer_id) - - # In 0.5, the many-to-one loader wouldn't recognize the above as a - # simple "identity map" fetch. So to give 0.5 a chance to emit - # the same amount of SQL as 0.6, we hardwire the relationship against - # "employee.id" to work around the bug. - else: - employer = relationship("Boss", backref="employees", - primaryjoin=Employee.__table__.c.id==employer_id, - foreign_keys=employer_id) + employer = relationship("Boss", backref="employees", + primaryjoin=Boss.id == employer_id) - __mapper_args__ = {'polymorphic_identity':'grunt'} + __mapper_args__ = {'polymorphic_identity': 'grunt'} if os.path.exists('orm2010.db'): os.remove('orm2010.db') @@ -88,101 +62,122 @@ Base.metadata.create_all(engine) sess = Session(engine) -def runit(): - # create 1000 Boss objects. +def runit(status, factor=1): + num_bosses = 100 * factor + num_grunts = num_bosses * 100 + bosses = [ Boss( name="Boss %d" % i, golf_average=Decimal(random.randint(40, 150)) ) - for i in range(1000) + for i in range(num_bosses) ] sess.add_all(bosses) + status("Added %d boss objects" % num_bosses) - - # create 10000 Grunt objects. grunts = [ Grunt( name="Grunt %d" % i, savings=Decimal(random.randint(5000000, 15000000) / 100) ) - for i in range(10000) + for i in range(num_grunts) ] + status("Added %d grunt objects" % num_grunts) - # Assign each Grunt a Boss. Look them up in the DB - # to simulate a little bit of two-way activity with the - # DB while we populate. Autoflush occurs on each query. - # In 0.7 executemany() is used for all the "boss" and "grunt" - # tables since priamry key fetching is not needed. while grunts: + # this doesn't associate grunts with bosses evenly, + # just associates lots of them with a relatively small + # handful of bosses + batch_size = 100 + batch_num = (num_grunts - len(grunts)) / batch_size boss = sess.query(Boss).\ - filter_by(name="Boss %d" % (101 - len(grunts) / 100)).\ + filter_by(name="Boss %d" % batch_num).\ first() - for grunt in grunts[0:100]: + for grunt in grunts[0:batch_size]: grunt.employer = boss - grunts = grunts[100:] + grunts = grunts[batch_size:] sess.commit() + status("Associated grunts w/ bosses and committed") + + # do some heavier reading + for i in range(int(round(factor / 2.0))): + status("Heavy query run #%d" % (i + 1)) - report = [] + report = [] - # load all the Grunts, print a report with their name, stats, - # and their bosses' stats. - for grunt in sess.query(Grunt): - # here, the overhead of a many-to-one fetch of - # "grunt.employer" directly from the identity map - # is less than half of that of 0.6. - report.append(( - grunt.name, - grunt.savings, - grunt.employer.name, - grunt.employer.golf_average - )) + # load all the Grunts, print a report with their name, stats, + # and their bosses' stats. + for grunt in sess.query(Grunt): + report.append(( + grunt.name, + grunt.savings, + grunt.employer.name, + grunt.employer.golf_average + )) -import cProfile, os, pstats + sess.close() # close out the session -filename = "orm2010.profile" -cProfile.runctx('runit()', globals(), locals(), filename) -stats = pstats.Stats(filename) +def run_with_profile(runsnake=False, dump=False): + import cProfile + import pstats + filename = "orm2010.profile" -counts_by_methname = dict((key[2], stats.stats[key][0]) for key in stats.stats) + def status(msg): + print(msg) -print("SQLA Version: %s" % __version__) -print("Total calls %d" % stats.total_calls) -print("Total cpu seconds: %.2f" % stats.total_tt) -print('Total execute calls: %d' \ - % counts_by_methname["<method 'execute' of 'sqlite3.Cursor' " - "objects>"]) -print('Total executemany calls: %d' \ - % counts_by_methname.get("<method 'executemany' of 'sqlite3.Cursor' " - "objects>", 0)) + cProfile.runctx('runit(status)', globals(), locals(), filename) + stats = pstats.Stats(filename) -#stats.sort_stats('time', 'calls') -#stats.print_stats() -os.system("runsnake %s" % filename) + counts_by_methname = dict((key[2], stats.stats[key][0]) for key in stats.stats) -# SQLA Version: 0.7b1 -# Total calls 4956750 -# Total execute calls: 11201 -# Total executemany calls: 101 + print("SQLA Version: %s" % __version__) + print("Total calls %d" % stats.total_calls) + print("Total cpu seconds: %.2f" % stats.total_tt) + print('Total execute calls: %d' \ + % counts_by_methname["<method 'execute' of 'sqlite3.Cursor' " + "objects>"]) + print('Total executemany calls: %d' \ + % counts_by_methname.get("<method 'executemany' of 'sqlite3.Cursor' " + "objects>", 0)) -# SQLA Version: 0.6.6 -# Total calls 7963214 -# Total execute calls: 22201 -# Total executemany calls: 0 + if dump: + stats.sort_stats('time', 'calls') + stats.print_stats() -# SQLA Version: 0.5.8 -# Total calls 10556480 -# Total execute calls: 22201 -# Total executemany calls: 0 + if runsnake: + os.system("runsnake %s" % filename) +def run_with_time(): + import time + now = time.time() + def status(msg): + print("%d - %s" % (time.time() - now, msg)) + runit(status, 10) + print("Total time: %d" % (time.time() - now)) +if __name__ == '__main__': + import argparse + parser = argparse.ArgumentParser() + parser.add_argument('--profile', action='store_true', + help='run shorter test suite w/ cprofilng') + parser.add_argument('--dump', action='store_true', + help='dump full call profile (implies --profile)') + parser.add_argument('--runsnake', action='store_true', + help='invoke runsnakerun (implies --profile)') + args = parser.parse_args() + args.profile = args.profile or args.dump or args.runsnake + if args.profile: + run_with_profile(runsnake=args.runsnake, dump=args.dump) + else: + run_with_time() diff --git a/test/profiles.txt b/test/profiles.txt index c2ea3e959..4f833ef1c 100644 --- a/test/profiles.txt +++ b/test/profiles.txt @@ -1,21 +1,22 @@ # /Users/classic/dev/sqlalchemy/test/profiles.txt # This file is written out on a per-environment basis. -# For each test in aaa_profiling, the corresponding function and +# For each test in aaa_profiling, the corresponding function and # environment is located within this file. If it doesn't exist, # the test is skipped. -# If a callcount does exist, it is compared to what we received. +# If a callcount does exist, it is compared to what we received. # assertions are raised if the counts do not match. -# -# To add a new callcount test, apply the function_call_count -# decorator and re-run the tests using the --write-profiles +# +# To add a new callcount test, apply the function_call_count +# decorator and re-run the tests using the --write-profiles # option - this file will be rewritten including the new count. -# +# # TEST: test.aaa_profiling.test_compiler.CompileTest.test_insert test.aaa_profiling.test_compiler.CompileTest.test_insert 2.6_sqlite_pysqlite_nocextensions 72 test.aaa_profiling.test_compiler.CompileTest.test_insert 2.7_mysql_mysqldb_cextensions 72 test.aaa_profiling.test_compiler.CompileTest.test_insert 2.7_mysql_mysqldb_nocextensions 72 +test.aaa_profiling.test_compiler.CompileTest.test_insert 2.7_mysql_oursql_nocextensions 72 test.aaa_profiling.test_compiler.CompileTest.test_insert 2.7_oracle_cx_oracle_nocextensions 72 test.aaa_profiling.test_compiler.CompileTest.test_insert 2.7_postgresql_psycopg2_cextensions 72 test.aaa_profiling.test_compiler.CompileTest.test_insert 2.7_postgresql_psycopg2_nocextensions 72 @@ -23,8 +24,12 @@ test.aaa_profiling.test_compiler.CompileTest.test_insert 2.7_sqlite_pysqlite_cex test.aaa_profiling.test_compiler.CompileTest.test_insert 2.7_sqlite_pysqlite_nocextensions 72 test.aaa_profiling.test_compiler.CompileTest.test_insert 3.2_postgresql_psycopg2_nocextensions 74 test.aaa_profiling.test_compiler.CompileTest.test_insert 3.2_sqlite_pysqlite_nocextensions 74 +test.aaa_profiling.test_compiler.CompileTest.test_insert 3.3_mysql_oursql_cextensions 77 +test.aaa_profiling.test_compiler.CompileTest.test_insert 3.3_mysql_oursql_nocextensions 77 test.aaa_profiling.test_compiler.CompileTest.test_insert 3.3_oracle_cx_oracle_nocextensions 76 +test.aaa_profiling.test_compiler.CompileTest.test_insert 3.3_postgresql_psycopg2_cextensions 77 test.aaa_profiling.test_compiler.CompileTest.test_insert 3.3_postgresql_psycopg2_nocextensions 74 +test.aaa_profiling.test_compiler.CompileTest.test_insert 3.3_sqlite_pysqlite_cextensions 76 test.aaa_profiling.test_compiler.CompileTest.test_insert 3.3_sqlite_pysqlite_nocextensions 74 # TEST: test.aaa_profiling.test_compiler.CompileTest.test_select @@ -32,64 +37,75 @@ test.aaa_profiling.test_compiler.CompileTest.test_insert 3.3_sqlite_pysqlite_noc test.aaa_profiling.test_compiler.CompileTest.test_select 2.6_sqlite_pysqlite_nocextensions 141 test.aaa_profiling.test_compiler.CompileTest.test_select 2.7_mysql_mysqldb_cextensions 141 test.aaa_profiling.test_compiler.CompileTest.test_select 2.7_mysql_mysqldb_nocextensions 141 -test.aaa_profiling.test_compiler.CompileTest.test_select 2.7_oracle_cx_oracle_nocextensions 141 +test.aaa_profiling.test_compiler.CompileTest.test_select 2.7_mysql_oursql_nocextensions 148 +test.aaa_profiling.test_compiler.CompileTest.test_select 2.7_oracle_cx_oracle_nocextensions 148 test.aaa_profiling.test_compiler.CompileTest.test_select 2.7_postgresql_psycopg2_cextensions 141 test.aaa_profiling.test_compiler.CompileTest.test_select 2.7_postgresql_psycopg2_nocextensions 141 test.aaa_profiling.test_compiler.CompileTest.test_select 2.7_sqlite_pysqlite_cextensions 141 test.aaa_profiling.test_compiler.CompileTest.test_select 2.7_sqlite_pysqlite_nocextensions 141 -test.aaa_profiling.test_compiler.CompileTest.test_select 3.2_postgresql_psycopg2_nocextensions 151 -test.aaa_profiling.test_compiler.CompileTest.test_select 3.2_sqlite_pysqlite_nocextensions 151 -test.aaa_profiling.test_compiler.CompileTest.test_select 3.3_oracle_cx_oracle_nocextensions 153 -test.aaa_profiling.test_compiler.CompileTest.test_select 3.3_postgresql_psycopg2_nocextensions 151 -test.aaa_profiling.test_compiler.CompileTest.test_select 3.3_sqlite_pysqlite_nocextensions 151 +test.aaa_profiling.test_compiler.CompileTest.test_select 3.3_mysql_oursql_cextensions 163 +test.aaa_profiling.test_compiler.CompileTest.test_select 3.3_mysql_oursql_nocextensions 163 +test.aaa_profiling.test_compiler.CompileTest.test_select 3.3_postgresql_psycopg2_cextensions 163 +test.aaa_profiling.test_compiler.CompileTest.test_select 3.3_postgresql_psycopg2_nocextensions 163 +test.aaa_profiling.test_compiler.CompileTest.test_select 3.3_sqlite_pysqlite_cextensions 163 # TEST: test.aaa_profiling.test_compiler.CompileTest.test_select_labels test.aaa_profiling.test_compiler.CompileTest.test_select_labels 2.6_sqlite_pysqlite_nocextensions 175 test.aaa_profiling.test_compiler.CompileTest.test_select_labels 2.7_mysql_mysqldb_cextensions 175 test.aaa_profiling.test_compiler.CompileTest.test_select_labels 2.7_mysql_mysqldb_nocextensions 175 +test.aaa_profiling.test_compiler.CompileTest.test_select_labels 2.7_mysql_oursql_nocextensions 181 test.aaa_profiling.test_compiler.CompileTest.test_select_labels 2.7_oracle_cx_oracle_nocextensions 175 test.aaa_profiling.test_compiler.CompileTest.test_select_labels 2.7_postgresql_psycopg2_cextensions 175 test.aaa_profiling.test_compiler.CompileTest.test_select_labels 2.7_postgresql_psycopg2_nocextensions 175 test.aaa_profiling.test_compiler.CompileTest.test_select_labels 2.7_sqlite_pysqlite_cextensions 175 test.aaa_profiling.test_compiler.CompileTest.test_select_labels 2.7_sqlite_pysqlite_nocextensions 175 -test.aaa_profiling.test_compiler.CompileTest.test_select_labels 3.2_postgresql_psycopg2_nocextensions 185 -test.aaa_profiling.test_compiler.CompileTest.test_select_labels 3.2_sqlite_pysqlite_nocextensions 185 -test.aaa_profiling.test_compiler.CompileTest.test_select_labels 3.3_oracle_cx_oracle_nocextensions 187 -test.aaa_profiling.test_compiler.CompileTest.test_select_labels 3.3_postgresql_psycopg2_nocextensions 185 -test.aaa_profiling.test_compiler.CompileTest.test_select_labels 3.3_sqlite_pysqlite_nocextensions 185 +test.aaa_profiling.test_compiler.CompileTest.test_select_labels 3.3_mysql_oursql_cextensions 196 +test.aaa_profiling.test_compiler.CompileTest.test_select_labels 3.3_mysql_oursql_nocextensions 196 +test.aaa_profiling.test_compiler.CompileTest.test_select_labels 3.3_postgresql_psycopg2_cextensions 196 +test.aaa_profiling.test_compiler.CompileTest.test_select_labels 3.3_postgresql_psycopg2_nocextensions 196 +test.aaa_profiling.test_compiler.CompileTest.test_select_labels 3.3_sqlite_pysqlite_cextensions 196 # TEST: test.aaa_profiling.test_compiler.CompileTest.test_update test.aaa_profiling.test_compiler.CompileTest.test_update 2.6_sqlite_pysqlite_nocextensions 75 test.aaa_profiling.test_compiler.CompileTest.test_update 2.7_mysql_mysqldb_cextensions 75 test.aaa_profiling.test_compiler.CompileTest.test_update 2.7_mysql_mysqldb_nocextensions 75 +test.aaa_profiling.test_compiler.CompileTest.test_update 2.7_mysql_oursql_nocextensions 77 test.aaa_profiling.test_compiler.CompileTest.test_update 2.7_oracle_cx_oracle_nocextensions 75 test.aaa_profiling.test_compiler.CompileTest.test_update 2.7_postgresql_psycopg2_cextensions 75 test.aaa_profiling.test_compiler.CompileTest.test_update 2.7_postgresql_psycopg2_nocextensions 75 test.aaa_profiling.test_compiler.CompileTest.test_update 2.7_sqlite_pysqlite_cextensions 75 test.aaa_profiling.test_compiler.CompileTest.test_update 2.7_sqlite_pysqlite_nocextensions 75 -test.aaa_profiling.test_compiler.CompileTest.test_update 3.2_postgresql_psycopg2_nocextensions 75 -test.aaa_profiling.test_compiler.CompileTest.test_update 3.2_sqlite_pysqlite_nocextensions 75 -test.aaa_profiling.test_compiler.CompileTest.test_update 3.3_oracle_cx_oracle_nocextensions 77 -test.aaa_profiling.test_compiler.CompileTest.test_update 3.3_postgresql_psycopg2_nocextensions 75 -test.aaa_profiling.test_compiler.CompileTest.test_update 3.3_sqlite_pysqlite_nocextensions 75 +test.aaa_profiling.test_compiler.CompileTest.test_update 3.3_mysql_oursql_cextensions 80 +test.aaa_profiling.test_compiler.CompileTest.test_update 3.3_mysql_oursql_nocextensions 80 +test.aaa_profiling.test_compiler.CompileTest.test_update 3.3_postgresql_psycopg2_cextensions 80 +test.aaa_profiling.test_compiler.CompileTest.test_update 3.3_postgresql_psycopg2_nocextensions 80 +test.aaa_profiling.test_compiler.CompileTest.test_update 3.3_sqlite_pysqlite_cextensions 80 # TEST: test.aaa_profiling.test_compiler.CompileTest.test_update_whereclause -test.aaa_profiling.test_compiler.CompileTest.test_update_whereclause 2.6_sqlite_pysqlite_nocextensions 137 -test.aaa_profiling.test_compiler.CompileTest.test_update_whereclause 2.7_mysql_mysqldb_cextensions 137 -test.aaa_profiling.test_compiler.CompileTest.test_update_whereclause 2.7_mysql_mysqldb_nocextensions 137 -test.aaa_profiling.test_compiler.CompileTest.test_update_whereclause 2.7_oracle_cx_oracle_nocextensions 137 -test.aaa_profiling.test_compiler.CompileTest.test_update_whereclause 2.7_postgresql_psycopg2_cextensions 137 -test.aaa_profiling.test_compiler.CompileTest.test_update_whereclause 2.7_postgresql_psycopg2_nocextensions 137 -test.aaa_profiling.test_compiler.CompileTest.test_update_whereclause 2.7_sqlite_pysqlite_cextensions 137 -test.aaa_profiling.test_compiler.CompileTest.test_update_whereclause 2.7_sqlite_pysqlite_nocextensions 137 -test.aaa_profiling.test_compiler.CompileTest.test_update_whereclause 3.2_postgresql_psycopg2_nocextensions 136 -test.aaa_profiling.test_compiler.CompileTest.test_update_whereclause 3.2_sqlite_pysqlite_nocextensions 136 -test.aaa_profiling.test_compiler.CompileTest.test_update_whereclause 3.3_oracle_cx_oracle_nocextensions 138 -test.aaa_profiling.test_compiler.CompileTest.test_update_whereclause 3.3_postgresql_psycopg2_nocextensions 136 -test.aaa_profiling.test_compiler.CompileTest.test_update_whereclause 3.3_sqlite_pysqlite_nocextensions 136 +test.aaa_profiling.test_compiler.CompileTest.test_update_whereclause 2.7_mysql_mysqldb_cextensions 149 +test.aaa_profiling.test_compiler.CompileTest.test_update_whereclause 2.7_mysql_mysqldb_nocextensions 149 +test.aaa_profiling.test_compiler.CompileTest.test_update_whereclause 2.7_mysql_oursql_nocextensions 149 +test.aaa_profiling.test_compiler.CompileTest.test_update_whereclause 2.7_oracle_cx_oracle_nocextensions 149 +test.aaa_profiling.test_compiler.CompileTest.test_update_whereclause 2.7_postgresql_psycopg2_cextensions 149 +test.aaa_profiling.test_compiler.CompileTest.test_update_whereclause 2.7_postgresql_psycopg2_nocextensions 149 +test.aaa_profiling.test_compiler.CompileTest.test_update_whereclause 2.7_sqlite_pysqlite_cextensions 149 +test.aaa_profiling.test_compiler.CompileTest.test_update_whereclause 2.7_sqlite_pysqlite_nocextensions 149 +test.aaa_profiling.test_compiler.CompileTest.test_update_whereclause 3.3_mysql_oursql_cextensions 151 +test.aaa_profiling.test_compiler.CompileTest.test_update_whereclause 3.3_mysql_oursql_nocextensions 151 +test.aaa_profiling.test_compiler.CompileTest.test_update_whereclause 3.3_postgresql_psycopg2_cextensions 151 +test.aaa_profiling.test_compiler.CompileTest.test_update_whereclause 3.3_postgresql_psycopg2_nocextensions 151 +test.aaa_profiling.test_compiler.CompileTest.test_update_whereclause 3.3_sqlite_pysqlite_cextensions 151 + +# TEST: test.aaa_profiling.test_orm.AttributeOverheadTest.test_attribute_set + +test.aaa_profiling.test_orm.AttributeOverheadTest.test_attribute_set 2.7_sqlite_pysqlite_cextensions 4265 + +# TEST: test.aaa_profiling.test_orm.AttributeOverheadTest.test_collection_append_remove + +test.aaa_profiling.test_orm.AttributeOverheadTest.test_collection_append_remove 2.7_sqlite_pysqlite_cextensions 6525 # TEST: test.aaa_profiling.test_orm.DeferOptionsTest.test_baseline @@ -99,6 +115,9 @@ test.aaa_profiling.test_orm.DeferOptionsTest.test_baseline 2.7_postgresql_psycop test.aaa_profiling.test_orm.DeferOptionsTest.test_baseline 2.7_postgresql_psycopg2_nocextensions 51049 test.aaa_profiling.test_orm.DeferOptionsTest.test_baseline 2.7_sqlite_pysqlite_cextensions 30008 test.aaa_profiling.test_orm.DeferOptionsTest.test_baseline 2.7_sqlite_pysqlite_nocextensions 39025 +test.aaa_profiling.test_orm.DeferOptionsTest.test_baseline 3.3_postgresql_psycopg2_cextensions 32141 +test.aaa_profiling.test_orm.DeferOptionsTest.test_baseline 3.3_postgresql_psycopg2_nocextensions 41144 +test.aaa_profiling.test_orm.DeferOptionsTest.test_baseline 3.3_sqlite_pysqlite_cextensions 31190 # TEST: test.aaa_profiling.test_orm.DeferOptionsTest.test_defer_many_cols @@ -108,6 +127,9 @@ test.aaa_profiling.test_orm.DeferOptionsTest.test_defer_many_cols 2.7_postgresql test.aaa_profiling.test_orm.DeferOptionsTest.test_defer_many_cols 2.7_postgresql_psycopg2_nocextensions 32835 test.aaa_profiling.test_orm.DeferOptionsTest.test_defer_many_cols 2.7_sqlite_pysqlite_cextensions 29812 test.aaa_profiling.test_orm.DeferOptionsTest.test_defer_many_cols 2.7_sqlite_pysqlite_nocextensions 32817 +test.aaa_profiling.test_orm.DeferOptionsTest.test_defer_many_cols 3.3_postgresql_psycopg2_cextensions 31858 +test.aaa_profiling.test_orm.DeferOptionsTest.test_defer_many_cols 3.3_postgresql_psycopg2_nocextensions 34861 +test.aaa_profiling.test_orm.DeferOptionsTest.test_defer_many_cols 3.3_sqlite_pysqlite_cextensions 30960 # TEST: test.aaa_profiling.test_orm.LoadManyToOneFromIdentityTest.test_many_to_one_load_identity @@ -122,7 +144,9 @@ test.aaa_profiling.test_orm.LoadManyToOneFromIdentityTest.test_many_to_one_load_ test.aaa_profiling.test_orm.LoadManyToOneFromIdentityTest.test_many_to_one_load_identity 3.2_postgresql_psycopg2_nocextensions 18987 test.aaa_profiling.test_orm.LoadManyToOneFromIdentityTest.test_many_to_one_load_identity 3.2_sqlite_pysqlite_nocextensions 18987 test.aaa_profiling.test_orm.LoadManyToOneFromIdentityTest.test_many_to_one_load_identity 3.3_oracle_cx_oracle_nocextensions 18987 +test.aaa_profiling.test_orm.LoadManyToOneFromIdentityTest.test_many_to_one_load_identity 3.3_postgresql_psycopg2_cextensions 18987 test.aaa_profiling.test_orm.LoadManyToOneFromIdentityTest.test_many_to_one_load_identity 3.3_postgresql_psycopg2_nocextensions 18987 +test.aaa_profiling.test_orm.LoadManyToOneFromIdentityTest.test_many_to_one_load_identity 3.3_sqlite_pysqlite_cextensions 18987 test.aaa_profiling.test_orm.LoadManyToOneFromIdentityTest.test_many_to_one_load_identity 3.3_sqlite_pysqlite_nocextensions 18987 # TEST: test.aaa_profiling.test_orm.LoadManyToOneFromIdentityTest.test_many_to_one_load_no_identity @@ -138,7 +162,9 @@ test.aaa_profiling.test_orm.LoadManyToOneFromIdentityTest.test_many_to_one_load_ test.aaa_profiling.test_orm.LoadManyToOneFromIdentityTest.test_many_to_one_load_no_identity 3.2_postgresql_psycopg2_nocextensions 121790 test.aaa_profiling.test_orm.LoadManyToOneFromIdentityTest.test_many_to_one_load_no_identity 3.2_sqlite_pysqlite_nocextensions 121822 test.aaa_profiling.test_orm.LoadManyToOneFromIdentityTest.test_many_to_one_load_no_identity 3.3_oracle_cx_oracle_nocextensions 130792 +test.aaa_profiling.test_orm.LoadManyToOneFromIdentityTest.test_many_to_one_load_no_identity 3.3_postgresql_psycopg2_cextensions 126077 test.aaa_profiling.test_orm.LoadManyToOneFromIdentityTest.test_many_to_one_load_no_identity 3.3_postgresql_psycopg2_nocextensions 121822 +test.aaa_profiling.test_orm.LoadManyToOneFromIdentityTest.test_many_to_one_load_no_identity 3.3_sqlite_pysqlite_cextensions 164074 # TEST: test.aaa_profiling.test_orm.MergeBackrefsTest.test_merge_pending_with_all_pks @@ -152,7 +178,9 @@ test.aaa_profiling.test_orm.MergeBackrefsTest.test_merge_pending_with_all_pks 2. test.aaa_profiling.test_orm.MergeBackrefsTest.test_merge_pending_with_all_pks 2.7_sqlite_pysqlite_nocextensions 21790 test.aaa_profiling.test_orm.MergeBackrefsTest.test_merge_pending_with_all_pks 3.2_postgresql_psycopg2_nocextensions 20424 test.aaa_profiling.test_orm.MergeBackrefsTest.test_merge_pending_with_all_pks 3.3_oracle_cx_oracle_nocextensions 21244 +test.aaa_profiling.test_orm.MergeBackrefsTest.test_merge_pending_with_all_pks 3.3_postgresql_psycopg2_cextensions 20268 test.aaa_profiling.test_orm.MergeBackrefsTest.test_merge_pending_with_all_pks 3.3_postgresql_psycopg2_nocextensions 20344 +test.aaa_profiling.test_orm.MergeBackrefsTest.test_merge_pending_with_all_pks 3.3_sqlite_pysqlite_cextensions 23404 # TEST: test.aaa_profiling.test_orm.MergeTest.test_merge_load @@ -166,7 +194,9 @@ test.aaa_profiling.test_orm.MergeTest.test_merge_load 2.7_sqlite_pysqlite_cexten test.aaa_profiling.test_orm.MergeTest.test_merge_load 2.7_sqlite_pysqlite_nocextensions 1521 test.aaa_profiling.test_orm.MergeTest.test_merge_load 3.2_postgresql_psycopg2_nocextensions 1332 test.aaa_profiling.test_orm.MergeTest.test_merge_load 3.3_oracle_cx_oracle_nocextensions 1366 +test.aaa_profiling.test_orm.MergeTest.test_merge_load 3.3_postgresql_psycopg2_cextensions 1358 test.aaa_profiling.test_orm.MergeTest.test_merge_load 3.3_postgresql_psycopg2_nocextensions 1357 +test.aaa_profiling.test_orm.MergeTest.test_merge_load 3.3_sqlite_pysqlite_cextensions 1598 # TEST: test.aaa_profiling.test_orm.MergeTest.test_merge_no_load @@ -181,7 +211,9 @@ test.aaa_profiling.test_orm.MergeTest.test_merge_no_load 2.7_sqlite_pysqlite_noc test.aaa_profiling.test_orm.MergeTest.test_merge_no_load 3.2_postgresql_psycopg2_nocextensions 127,19 test.aaa_profiling.test_orm.MergeTest.test_merge_no_load 3.2_sqlite_pysqlite_nocextensions 127,19 test.aaa_profiling.test_orm.MergeTest.test_merge_no_load 3.3_oracle_cx_oracle_nocextensions 134,19 +test.aaa_profiling.test_orm.MergeTest.test_merge_no_load 3.3_postgresql_psycopg2_cextensions 132,20 test.aaa_profiling.test_orm.MergeTest.test_merge_no_load 3.3_postgresql_psycopg2_nocextensions 127,19 +test.aaa_profiling.test_orm.MergeTest.test_merge_no_load 3.3_sqlite_pysqlite_cextensions 134,19 test.aaa_profiling.test_orm.MergeTest.test_merge_no_load 3.3_sqlite_pysqlite_nocextensions 127,19 # TEST: test.aaa_profiling.test_pool.QueuePoolTest.test_first_connect @@ -197,7 +229,9 @@ test.aaa_profiling.test_pool.QueuePoolTest.test_first_connect 2.7_sqlite_pysqlit test.aaa_profiling.test_pool.QueuePoolTest.test_first_connect 3.2_postgresql_psycopg2_nocextensions 75 test.aaa_profiling.test_pool.QueuePoolTest.test_first_connect 3.2_sqlite_pysqlite_nocextensions 75 test.aaa_profiling.test_pool.QueuePoolTest.test_first_connect 3.3_oracle_cx_oracle_nocextensions 74 +test.aaa_profiling.test_pool.QueuePoolTest.test_first_connect 3.3_postgresql_psycopg2_cextensions 74 test.aaa_profiling.test_pool.QueuePoolTest.test_first_connect 3.3_postgresql_psycopg2_nocextensions 74 +test.aaa_profiling.test_pool.QueuePoolTest.test_first_connect 3.3_sqlite_pysqlite_cextensions 74 test.aaa_profiling.test_pool.QueuePoolTest.test_first_connect 3.3_sqlite_pysqlite_nocextensions 74 # TEST: test.aaa_profiling.test_pool.QueuePoolTest.test_second_connect @@ -213,7 +247,9 @@ test.aaa_profiling.test_pool.QueuePoolTest.test_second_connect 2.7_sqlite_pysqli test.aaa_profiling.test_pool.QueuePoolTest.test_second_connect 3.2_postgresql_psycopg2_nocextensions 23 test.aaa_profiling.test_pool.QueuePoolTest.test_second_connect 3.2_sqlite_pysqlite_nocextensions 23 test.aaa_profiling.test_pool.QueuePoolTest.test_second_connect 3.3_oracle_cx_oracle_nocextensions 22 +test.aaa_profiling.test_pool.QueuePoolTest.test_second_connect 3.3_postgresql_psycopg2_cextensions 23 test.aaa_profiling.test_pool.QueuePoolTest.test_second_connect 3.3_postgresql_psycopg2_nocextensions 22 +test.aaa_profiling.test_pool.QueuePoolTest.test_second_connect 3.3_sqlite_pysqlite_cextensions 23 test.aaa_profiling.test_pool.QueuePoolTest.test_second_connect 3.3_sqlite_pysqlite_nocextensions 22 # TEST: test.aaa_profiling.test_pool.QueuePoolTest.test_second_samethread_connect @@ -229,7 +265,9 @@ test.aaa_profiling.test_pool.QueuePoolTest.test_second_samethread_connect 2.7_sq test.aaa_profiling.test_pool.QueuePoolTest.test_second_samethread_connect 3.2_postgresql_psycopg2_nocextensions 8 test.aaa_profiling.test_pool.QueuePoolTest.test_second_samethread_connect 3.2_sqlite_pysqlite_nocextensions 8 test.aaa_profiling.test_pool.QueuePoolTest.test_second_samethread_connect 3.3_oracle_cx_oracle_nocextensions 8 +test.aaa_profiling.test_pool.QueuePoolTest.test_second_samethread_connect 3.3_postgresql_psycopg2_cextensions 8 test.aaa_profiling.test_pool.QueuePoolTest.test_second_samethread_connect 3.3_postgresql_psycopg2_nocextensions 8 +test.aaa_profiling.test_pool.QueuePoolTest.test_second_samethread_connect 3.3_sqlite_pysqlite_cextensions 8 test.aaa_profiling.test_pool.QueuePoolTest.test_second_samethread_connect 3.3_sqlite_pysqlite_nocextensions 8 # TEST: test.aaa_profiling.test_resultset.ExecutionTest.test_minimal_connection_execute @@ -245,7 +283,9 @@ test.aaa_profiling.test_resultset.ExecutionTest.test_minimal_connection_execute test.aaa_profiling.test_resultset.ExecutionTest.test_minimal_connection_execute 3.2_postgresql_psycopg2_nocextensions 41 test.aaa_profiling.test_resultset.ExecutionTest.test_minimal_connection_execute 3.2_sqlite_pysqlite_nocextensions 41 test.aaa_profiling.test_resultset.ExecutionTest.test_minimal_connection_execute 3.3_oracle_cx_oracle_nocextensions 41 +test.aaa_profiling.test_resultset.ExecutionTest.test_minimal_connection_execute 3.3_postgresql_psycopg2_cextensions 41 test.aaa_profiling.test_resultset.ExecutionTest.test_minimal_connection_execute 3.3_postgresql_psycopg2_nocextensions 41 +test.aaa_profiling.test_resultset.ExecutionTest.test_minimal_connection_execute 3.3_sqlite_pysqlite_cextensions 41 test.aaa_profiling.test_resultset.ExecutionTest.test_minimal_connection_execute 3.3_sqlite_pysqlite_nocextensions 41 # TEST: test.aaa_profiling.test_resultset.ExecutionTest.test_minimal_engine_execute @@ -261,7 +301,9 @@ test.aaa_profiling.test_resultset.ExecutionTest.test_minimal_engine_execute 2.7_ test.aaa_profiling.test_resultset.ExecutionTest.test_minimal_engine_execute 3.2_postgresql_psycopg2_nocextensions 71 test.aaa_profiling.test_resultset.ExecutionTest.test_minimal_engine_execute 3.2_sqlite_pysqlite_nocextensions 71 test.aaa_profiling.test_resultset.ExecutionTest.test_minimal_engine_execute 3.3_oracle_cx_oracle_nocextensions 71 +test.aaa_profiling.test_resultset.ExecutionTest.test_minimal_engine_execute 3.3_postgresql_psycopg2_cextensions 71 test.aaa_profiling.test_resultset.ExecutionTest.test_minimal_engine_execute 3.3_postgresql_psycopg2_nocextensions 71 +test.aaa_profiling.test_resultset.ExecutionTest.test_minimal_engine_execute 3.3_sqlite_pysqlite_cextensions 71 test.aaa_profiling.test_resultset.ExecutionTest.test_minimal_engine_execute 3.3_sqlite_pysqlite_nocextensions 71 # TEST: test.aaa_profiling.test_resultset.ResultSetTest.test_contains_doesnt_compile @@ -277,93 +319,106 @@ test.aaa_profiling.test_resultset.ResultSetTest.test_contains_doesnt_compile 2.7 test.aaa_profiling.test_resultset.ResultSetTest.test_contains_doesnt_compile 3.2_postgresql_psycopg2_nocextensions 15 test.aaa_profiling.test_resultset.ResultSetTest.test_contains_doesnt_compile 3.2_sqlite_pysqlite_nocextensions 15 test.aaa_profiling.test_resultset.ResultSetTest.test_contains_doesnt_compile 3.3_oracle_cx_oracle_nocextensions 15 +test.aaa_profiling.test_resultset.ResultSetTest.test_contains_doesnt_compile 3.3_postgresql_psycopg2_cextensions 15 test.aaa_profiling.test_resultset.ResultSetTest.test_contains_doesnt_compile 3.3_postgresql_psycopg2_nocextensions 15 +test.aaa_profiling.test_resultset.ResultSetTest.test_contains_doesnt_compile 3.3_sqlite_pysqlite_cextensions 15 test.aaa_profiling.test_resultset.ResultSetTest.test_contains_doesnt_compile 3.3_sqlite_pysqlite_nocextensions 15 # TEST: test.aaa_profiling.test_resultset.ResultSetTest.test_string test.aaa_profiling.test_resultset.ResultSetTest.test_string 2.6_sqlite_pysqlite_nocextensions 15447 -test.aaa_profiling.test_resultset.ResultSetTest.test_string 2.7_mysql_mysqldb_cextensions 485 +test.aaa_profiling.test_resultset.ResultSetTest.test_string 2.7_mysql_mysqldb_cextensions 512 test.aaa_profiling.test_resultset.ResultSetTest.test_string 2.7_mysql_mysqldb_nocextensions 15505 test.aaa_profiling.test_resultset.ResultSetTest.test_string 2.7_oracle_cx_oracle_nocextensions 35582 test.aaa_profiling.test_resultset.ResultSetTest.test_string 2.7_postgresql_psycopg2_cextensions 20471 test.aaa_profiling.test_resultset.ResultSetTest.test_string 2.7_postgresql_psycopg2_nocextensions 35491 -test.aaa_profiling.test_resultset.ResultSetTest.test_string 2.7_sqlite_pysqlite_cextensions 427 +test.aaa_profiling.test_resultset.ResultSetTest.test_string 2.7_sqlite_pysqlite_cextensions 455 test.aaa_profiling.test_resultset.ResultSetTest.test_string 2.7_sqlite_pysqlite_nocextensions 15447 test.aaa_profiling.test_resultset.ResultSetTest.test_string 3.2_postgresql_psycopg2_nocextensions 14459 test.aaa_profiling.test_resultset.ResultSetTest.test_string 3.2_sqlite_pysqlite_nocextensions 14430 test.aaa_profiling.test_resultset.ResultSetTest.test_string 3.3_oracle_cx_oracle_nocextensions 14548 +test.aaa_profiling.test_resultset.ResultSetTest.test_string 3.3_postgresql_psycopg2_cextensions 497 test.aaa_profiling.test_resultset.ResultSetTest.test_string 3.3_postgresql_psycopg2_nocextensions 14457 +test.aaa_profiling.test_resultset.ResultSetTest.test_string 3.3_sqlite_pysqlite_cextensions 453 test.aaa_profiling.test_resultset.ResultSetTest.test_string 3.3_sqlite_pysqlite_nocextensions 14430 # TEST: test.aaa_profiling.test_resultset.ResultSetTest.test_unicode test.aaa_profiling.test_resultset.ResultSetTest.test_unicode 2.6_sqlite_pysqlite_nocextensions 15447 -test.aaa_profiling.test_resultset.ResultSetTest.test_unicode 2.7_mysql_mysqldb_cextensions 485 +test.aaa_profiling.test_resultset.ResultSetTest.test_unicode 2.7_mysql_mysqldb_cextensions 512 test.aaa_profiling.test_resultset.ResultSetTest.test_unicode 2.7_mysql_mysqldb_nocextensions 45505 test.aaa_profiling.test_resultset.ResultSetTest.test_unicode 2.7_oracle_cx_oracle_nocextensions 35572 test.aaa_profiling.test_resultset.ResultSetTest.test_unicode 2.7_postgresql_psycopg2_cextensions 20471 test.aaa_profiling.test_resultset.ResultSetTest.test_unicode 2.7_postgresql_psycopg2_nocextensions 35491 -test.aaa_profiling.test_resultset.ResultSetTest.test_unicode 2.7_sqlite_pysqlite_cextensions 427 +test.aaa_profiling.test_resultset.ResultSetTest.test_unicode 2.7_sqlite_pysqlite_cextensions 455 test.aaa_profiling.test_resultset.ResultSetTest.test_unicode 2.7_sqlite_pysqlite_nocextensions 15447 test.aaa_profiling.test_resultset.ResultSetTest.test_unicode 3.2_postgresql_psycopg2_nocextensions 14459 test.aaa_profiling.test_resultset.ResultSetTest.test_unicode 3.2_sqlite_pysqlite_nocextensions 14430 test.aaa_profiling.test_resultset.ResultSetTest.test_unicode 3.3_oracle_cx_oracle_nocextensions 14548 +test.aaa_profiling.test_resultset.ResultSetTest.test_unicode 3.3_postgresql_psycopg2_cextensions 497 test.aaa_profiling.test_resultset.ResultSetTest.test_unicode 3.3_postgresql_psycopg2_nocextensions 14457 +test.aaa_profiling.test_resultset.ResultSetTest.test_unicode 3.3_sqlite_pysqlite_cextensions 453 test.aaa_profiling.test_resultset.ResultSetTest.test_unicode 3.3_sqlite_pysqlite_nocextensions 14430 # TEST: test.aaa_profiling.test_zoomark.ZooMarkTest.test_profile_1a_populate -test.aaa_profiling.test_zoomark.ZooMarkTest.test_profile_1a_populate 2.7_postgresql_psycopg2_nocextensions 5175 +test.aaa_profiling.test_zoomark.ZooMarkTest.test_profile_1a_populate 2.7_postgresql_psycopg2_cextensions 5340 +test.aaa_profiling.test_zoomark.ZooMarkTest.test_profile_1a_populate 2.7_postgresql_psycopg2_nocextensions 5470 test.aaa_profiling.test_zoomark.ZooMarkTest.test_profile_1a_populate 3.2_postgresql_psycopg2_nocextensions 4828 -test.aaa_profiling.test_zoomark.ZooMarkTest.test_profile_1a_populate 3.3_postgresql_psycopg2_nocextensions 4792 +test.aaa_profiling.test_zoomark.ZooMarkTest.test_profile_1a_populate 3.3_postgresql_psycopg2_cextensions 5157 +test.aaa_profiling.test_zoomark.ZooMarkTest.test_profile_1a_populate 3.3_postgresql_psycopg2_nocextensions 5179 # TEST: test.aaa_profiling.test_zoomark.ZooMarkTest.test_profile_2_insert -test.aaa_profiling.test_zoomark.ZooMarkTest.test_profile_2_insert 2.7_postgresql_psycopg2_cextensions 256 -test.aaa_profiling.test_zoomark.ZooMarkTest.test_profile_2_insert 2.7_postgresql_psycopg2_nocextensions 256 -test.aaa_profiling.test_zoomark.ZooMarkTest.test_profile_2_insert 3.3_postgresql_psycopg2_nocextensions 251 +test.aaa_profiling.test_zoomark.ZooMarkTest.test_profile_2_insert 2.7_postgresql_psycopg2_cextensions 270 +test.aaa_profiling.test_zoomark.ZooMarkTest.test_profile_2_insert 2.7_postgresql_psycopg2_nocextensions 270 +test.aaa_profiling.test_zoomark.ZooMarkTest.test_profile_2_insert 3.3_postgresql_psycopg2_cextensions 259 +test.aaa_profiling.test_zoomark.ZooMarkTest.test_profile_2_insert 3.3_postgresql_psycopg2_nocextensions 259 # TEST: test.aaa_profiling.test_zoomark.ZooMarkTest.test_profile_3_properties -test.aaa_profiling.test_zoomark.ZooMarkTest.test_profile_3_properties 2.7_postgresql_psycopg2_cextensions 3425 +test.aaa_profiling.test_zoomark.ZooMarkTest.test_profile_3_properties 2.7_postgresql_psycopg2_cextensions 3625 test.aaa_profiling.test_zoomark.ZooMarkTest.test_profile_3_properties 2.7_postgresql_psycopg2_nocextensions 3749 -test.aaa_profiling.test_zoomark.ZooMarkTest.test_profile_3_properties 3.2_postgresql_psycopg2_nocextensions 3401 -test.aaa_profiling.test_zoomark.ZooMarkTest.test_profile_3_properties 3.3_postgresql_psycopg2_nocextensions 3385 +test.aaa_profiling.test_zoomark.ZooMarkTest.test_profile_3_properties 3.3_postgresql_psycopg2_cextensions 3569 +test.aaa_profiling.test_zoomark.ZooMarkTest.test_profile_3_properties 3.3_postgresql_psycopg2_nocextensions 3665 # TEST: test.aaa_profiling.test_zoomark.ZooMarkTest.test_profile_4_expressions -test.aaa_profiling.test_zoomark.ZooMarkTest.test_profile_4_expressions 2.7_postgresql_psycopg2_cextensions 11045 -test.aaa_profiling.test_zoomark.ZooMarkTest.test_profile_4_expressions 2.7_postgresql_psycopg2_nocextensions 12747 -test.aaa_profiling.test_zoomark.ZooMarkTest.test_profile_4_expressions 3.2_postgresql_psycopg2_nocextensions 11849 -test.aaa_profiling.test_zoomark.ZooMarkTest.test_profile_4_expressions 3.3_postgresql_psycopg2_nocextensions 11803 +test.aaa_profiling.test_zoomark.ZooMarkTest.test_profile_4_expressions 2.7_postgresql_psycopg2_cextensions 11688 +test.aaa_profiling.test_zoomark.ZooMarkTest.test_profile_4_expressions 2.7_postgresql_psycopg2_nocextensions 13440 +test.aaa_profiling.test_zoomark.ZooMarkTest.test_profile_4_expressions 3.3_postgresql_psycopg2_cextensions 11548 +test.aaa_profiling.test_zoomark.ZooMarkTest.test_profile_4_expressions 3.3_postgresql_psycopg2_nocextensions 12720 # TEST: test.aaa_profiling.test_zoomark.ZooMarkTest.test_profile_5_aggregates test.aaa_profiling.test_zoomark.ZooMarkTest.test_profile_5_aggregates 2.7_postgresql_psycopg2_cextensions 1050 test.aaa_profiling.test_zoomark.ZooMarkTest.test_profile_5_aggregates 2.7_postgresql_psycopg2_nocextensions 1167 test.aaa_profiling.test_zoomark.ZooMarkTest.test_profile_5_aggregates 3.2_postgresql_psycopg2_nocextensions 1114 +test.aaa_profiling.test_zoomark.ZooMarkTest.test_profile_5_aggregates 3.3_postgresql_psycopg2_cextensions 1044 test.aaa_profiling.test_zoomark.ZooMarkTest.test_profile_5_aggregates 3.3_postgresql_psycopg2_nocextensions 1106 # TEST: test.aaa_profiling.test_zoomark.ZooMarkTest.test_profile_6_editing -test.aaa_profiling.test_zoomark.ZooMarkTest.test_profile_6_editing 2.7_postgresql_psycopg2_cextensions 1811 -test.aaa_profiling.test_zoomark.ZooMarkTest.test_profile_6_editing 2.7_postgresql_psycopg2_nocextensions 1858 +test.aaa_profiling.test_zoomark.ZooMarkTest.test_profile_6_editing 2.7_postgresql_psycopg2_cextensions 1905 +test.aaa_profiling.test_zoomark.ZooMarkTest.test_profile_6_editing 2.7_postgresql_psycopg2_nocextensions 1958 test.aaa_profiling.test_zoomark.ZooMarkTest.test_profile_6_editing 3.2_postgresql_psycopg2_nocextensions 1731 -test.aaa_profiling.test_zoomark.ZooMarkTest.test_profile_6_editing 3.3_postgresql_psycopg2_nocextensions 1721 +test.aaa_profiling.test_zoomark.ZooMarkTest.test_profile_6_editing 3.3_postgresql_psycopg2_cextensions 1846 +test.aaa_profiling.test_zoomark.ZooMarkTest.test_profile_6_editing 3.3_postgresql_psycopg2_nocextensions 1853 # TEST: test.aaa_profiling.test_zoomark.ZooMarkTest.test_profile_7_multiview -test.aaa_profiling.test_zoomark.ZooMarkTest.test_profile_7_multiview 2.7_postgresql_psycopg2_cextensions 2300 +test.aaa_profiling.test_zoomark.ZooMarkTest.test_profile_7_multiview 2.7_postgresql_psycopg2_cextensions 2424 test.aaa_profiling.test_zoomark.ZooMarkTest.test_profile_7_multiview 2.7_postgresql_psycopg2_nocextensions 2559 test.aaa_profiling.test_zoomark.ZooMarkTest.test_profile_7_multiview 3.2_postgresql_psycopg2_nocextensions 2483 -test.aaa_profiling.test_zoomark.ZooMarkTest.test_profile_7_multiview 3.3_postgresql_psycopg2_nocextensions 2473 +test.aaa_profiling.test_zoomark.ZooMarkTest.test_profile_7_multiview 3.3_postgresql_psycopg2_cextensions 2460 +test.aaa_profiling.test_zoomark.ZooMarkTest.test_profile_7_multiview 3.3_postgresql_psycopg2_nocextensions 2652 # TEST: test.aaa_profiling.test_zoomark_orm.ZooMarkTest.test_profile_1a_populate test.aaa_profiling.test_zoomark_orm.ZooMarkTest.test_profile_1a_populate 2.7_postgresql_psycopg2_cextensions 6157 test.aaa_profiling.test_zoomark_orm.ZooMarkTest.test_profile_1a_populate 2.7_postgresql_psycopg2_nocextensions 6276 test.aaa_profiling.test_zoomark_orm.ZooMarkTest.test_profile_1a_populate 3.2_postgresql_psycopg2_nocextensions 6252 +test.aaa_profiling.test_zoomark_orm.ZooMarkTest.test_profile_1a_populate 3.3_postgresql_psycopg2_cextensions 6286 test.aaa_profiling.test_zoomark_orm.ZooMarkTest.test_profile_1a_populate 3.3_postgresql_psycopg2_nocextensions 6251 # TEST: test.aaa_profiling.test_zoomark_orm.ZooMarkTest.test_profile_2_insert @@ -371,20 +426,23 @@ test.aaa_profiling.test_zoomark_orm.ZooMarkTest.test_profile_1a_populate 3.3_pos test.aaa_profiling.test_zoomark_orm.ZooMarkTest.test_profile_2_insert 2.7_postgresql_psycopg2_cextensions 391 test.aaa_profiling.test_zoomark_orm.ZooMarkTest.test_profile_2_insert 2.7_postgresql_psycopg2_nocextensions 398 test.aaa_profiling.test_zoomark_orm.ZooMarkTest.test_profile_2_insert 3.2_postgresql_psycopg2_nocextensions 395 +test.aaa_profiling.test_zoomark_orm.ZooMarkTest.test_profile_2_insert 3.3_postgresql_psycopg2_cextensions 391 test.aaa_profiling.test_zoomark_orm.ZooMarkTest.test_profile_2_insert 3.3_postgresql_psycopg2_nocextensions 394 # TEST: test.aaa_profiling.test_zoomark_orm.ZooMarkTest.test_profile_3_properties -test.aaa_profiling.test_zoomark_orm.ZooMarkTest.test_profile_3_properties 2.7_postgresql_psycopg2_cextensions 6422 -test.aaa_profiling.test_zoomark_orm.ZooMarkTest.test_profile_3_properties 2.7_postgresql_psycopg2_nocextensions 6654 +test.aaa_profiling.test_zoomark_orm.ZooMarkTest.test_profile_3_properties 2.7_postgresql_psycopg2_cextensions 6765 +test.aaa_profiling.test_zoomark_orm.ZooMarkTest.test_profile_3_properties 2.7_postgresql_psycopg2_nocextensions 7056 test.aaa_profiling.test_zoomark_orm.ZooMarkTest.test_profile_3_properties 3.2_postgresql_psycopg2_nocextensions 6560 -test.aaa_profiling.test_zoomark_orm.ZooMarkTest.test_profile_3_properties 3.3_postgresql_psycopg2_nocextensions 6560 +test.aaa_profiling.test_zoomark_orm.ZooMarkTest.test_profile_3_properties 3.3_postgresql_psycopg2_cextensions 6895 +test.aaa_profiling.test_zoomark_orm.ZooMarkTest.test_profile_3_properties 3.3_postgresql_psycopg2_nocextensions 6999 # TEST: test.aaa_profiling.test_zoomark_orm.ZooMarkTest.test_profile_4_expressions test.aaa_profiling.test_zoomark_orm.ZooMarkTest.test_profile_4_expressions 2.7_postgresql_psycopg2_cextensions 19145 test.aaa_profiling.test_zoomark_orm.ZooMarkTest.test_profile_4_expressions 2.7_postgresql_psycopg2_nocextensions 20576 test.aaa_profiling.test_zoomark_orm.ZooMarkTest.test_profile_4_expressions 3.2_postgresql_psycopg2_nocextensions 20279 +test.aaa_profiling.test_zoomark_orm.ZooMarkTest.test_profile_4_expressions 3.3_postgresql_psycopg2_cextensions 20117 test.aaa_profiling.test_zoomark_orm.ZooMarkTest.test_profile_4_expressions 3.3_postgresql_psycopg2_nocextensions 20279 # TEST: test.aaa_profiling.test_zoomark_orm.ZooMarkTest.test_profile_5_aggregates @@ -392,6 +450,7 @@ test.aaa_profiling.test_zoomark_orm.ZooMarkTest.test_profile_4_expressions 3.3_p test.aaa_profiling.test_zoomark_orm.ZooMarkTest.test_profile_5_aggregates 2.7_postgresql_psycopg2_cextensions 1063 test.aaa_profiling.test_zoomark_orm.ZooMarkTest.test_profile_5_aggregates 2.7_postgresql_psycopg2_nocextensions 1171 test.aaa_profiling.test_zoomark_orm.ZooMarkTest.test_profile_5_aggregates 3.2_postgresql_psycopg2_nocextensions 1120 +test.aaa_profiling.test_zoomark_orm.ZooMarkTest.test_profile_5_aggregates 3.3_postgresql_psycopg2_cextensions 1059 test.aaa_profiling.test_zoomark_orm.ZooMarkTest.test_profile_5_aggregates 3.3_postgresql_psycopg2_nocextensions 1113 # TEST: test.aaa_profiling.test_zoomark_orm.ZooMarkTest.test_profile_6_editing @@ -399,4 +458,5 @@ test.aaa_profiling.test_zoomark_orm.ZooMarkTest.test_profile_5_aggregates 3.3_po test.aaa_profiling.test_zoomark_orm.ZooMarkTest.test_profile_6_editing 2.7_postgresql_psycopg2_cextensions 2686 test.aaa_profiling.test_zoomark_orm.ZooMarkTest.test_profile_6_editing 2.7_postgresql_psycopg2_nocextensions 2749 test.aaa_profiling.test_zoomark_orm.ZooMarkTest.test_profile_6_editing 3.2_postgresql_psycopg2_nocextensions 2749 +test.aaa_profiling.test_zoomark_orm.ZooMarkTest.test_profile_6_editing 3.3_postgresql_psycopg2_cextensions 2796 test.aaa_profiling.test_zoomark_orm.ZooMarkTest.test_profile_6_editing 3.3_postgresql_psycopg2_nocextensions 2749 diff --git a/test/requirements.py b/test/requirements.py index a56c037d1..29b7d9997 100644 --- a/test/requirements.py +++ b/test/requirements.py @@ -85,8 +85,6 @@ class DefaultRequirements(SuiteRequirements): no_support('oracle', 'not supported by database'), no_support('mssql', 'not supported by database'), no_support('sybase', 'not supported by database'), - no_support('maxdb', 'FIXME: verify not supported by database'), - no_support('informix', 'not supported by database'), ]) @property @@ -125,6 +123,18 @@ class DefaultRequirements(SuiteRequirements): ) @property + def insert_from_select(self): + return skip_if( + ["firebird"], "crashes for unknown reason" + ) + + @property + def fetch_rows_post_commit(self): + return skip_if( + ["firebird"], "not supported" + ) + + @property def binary_comparisons(self): """target database/driver can allow BLOB/BINARY fields to be compared against a bound parameter value. @@ -226,7 +236,6 @@ class DefaultRequirements(SuiteRequirements): "sqlite", "sybase", ("mysql", "<", (5, 0, 3)), - ("informix", "<", (11, 55, "xC3")) ], "savepoints not supported") @@ -283,14 +292,14 @@ class DefaultRequirements(SuiteRequirements): """Target database must support INTERSECT or equivalent.""" return fails_if([ - "firebird", "mysql", "sybase", "informix" + "firebird", "mysql", "sybase", ], 'no support for INTERSECT') @property def except_(self): """Target database must support EXCEPT or equivalent (i.e. MINUS).""" return fails_if([ - "firebird", "mysql", "sybase", "informix" + "firebird", "mysql", "sybase", ], 'no support for EXCEPT') @property @@ -313,7 +322,6 @@ class DefaultRequirements(SuiteRequirements): return skip_if([ no_support('firebird', 'no SA implementation'), - no_support('maxdb', 'two-phase xact not supported by database'), no_support('mssql', 'two-phase xact not supported by drivers'), no_support('oracle', 'two-phase xact not implemented in SQLA/oracle'), no_support('drizzle', 'two-phase xact not supported by database'), @@ -366,7 +374,6 @@ class DefaultRequirements(SuiteRequirements): """Target driver must support some degree of non-ascii symbol names.""" # TODO: expand to exclude MySQLdb versions w/ broken unicode return skip_if([ - no_support('maxdb', 'database support flakey'), no_support('oracle', 'FIXME: no support in database?'), no_support('sybase', 'FIXME: guessing, needs confirmation'), no_support('mssql+pymssql', 'no FreeTDS support'), @@ -394,7 +401,7 @@ class DefaultRequirements(SuiteRequirements): return fails_on_everything_except('mysql+mysqldb', 'mysql+oursql', 'sqlite+pysqlite', 'mysql+pymysql', 'mysql+cymysql', - 'sybase', 'mssql+pyodbc', 'mssql+mxodbc') + 'sybase', 'mssql') @property def implements_get_lastrowid(self): @@ -408,7 +415,8 @@ class DefaultRequirements(SuiteRequirements): cursor object. """ - return fails_on_everything_except('mysql+mysqldb', 'mysql+oursql', + return skip_if('mssql+pymssql', 'crashes on pymssql') + \ + fails_on_everything_except('mysql+mysqldb', 'mysql+oursql', 'sqlite+pysqlite', 'mysql+pymysql', 'mysql+cymysql') @@ -432,6 +440,15 @@ class DefaultRequirements(SuiteRequirements): 'sybase') @property + def datetime_literals(self): + """target dialect supports rendering of a date, time, or datetime as a + literal string, e.g. via the TypeEngine.literal_processor() method. + + """ + + return fails_on_everything_except("sqlite") + + @property def datetime(self): """target dialect supports representation of Python datetime.datetime() objects.""" @@ -486,23 +503,24 @@ class DefaultRequirements(SuiteRequirements): def precision_numerics_general(self): """target backend has general support for moderately high-precision numerics.""" - return fails_if('mssql+pymssql', 'FIXME: improve pymssql dec handling') + return exclusions.open() @property def precision_numerics_enotation_small(self): """target backend supports Decimal() objects using E notation to represent very small values.""" - return fails_if('mssql+pymssql', 'FIXME: improve pymssql dec handling') + # NOTE: this exclusion isn't used in current tests. + return exclusions.open() @property def precision_numerics_enotation_large(self): """target backend supports Decimal() objects using E notation to represent very large values.""" - return fails_if( - ("sybase+pyodbc", None, None, + return skip_if( + [("sybase+pyodbc", None, None, "Don't know how do get these values through FreeTDS + Sybase"), - ("firebird", None, None, "Precision must be from 1 to 18"), + ("firebird", None, None, "Precision must be from 1 to 18"),] ) @property @@ -537,8 +555,39 @@ class DefaultRequirements(SuiteRequirements): ) @property + def precision_generic_float_type(self): + """target backend will return native floating point numbers with at + least seven decimal places when using the generic Float type.""" + + return fails_if([ + ('mysql', None, None, + 'mysql FLOAT type only returns 4 decimals'), + ('firebird', None, None, + "firebird FLOAT type isn't high precision"), + ]) + + @property def floats_to_four_decimals(self): - return fails_if("mysql+oursql", "Floating point error") + return fails_if([ + ("mysql+oursql", None, None, "Floating point error"), + ("firebird", None, None, + "Firebird still has FP inaccuracy even " + "with only four decimal places"), + ('mssql+pyodbc', None, None, + 'mssql+pyodbc has FP inaccuracy even with ' + 'only four decimal places ' + ), + ('mssql+pymssql', None, None, + 'mssql+pymssql has FP inaccuracy even with ' + 'only four decimal places ' + ) + ]) + + @property + def fetch_null_from_numeric(self): + return skip_if( + ("mssql+pyodbc", None, None, "crashes due to bug #351"), + ) @property def python2(self): @@ -555,20 +604,6 @@ class DefaultRequirements(SuiteRequirements): ) @property - def python26(self): - return skip_if( - lambda: sys.version_info < (2, 6), - "Python version 2.6 or greater is required" - ) - - @property - def python25(self): - return skip_if( - lambda: sys.version_info < (2, 5), - "Python version 2.5 or greater is required" - ) - - @property def cpython(self): return only_if(lambda: util.cpython, "cPython interpreter needed" @@ -579,8 +614,9 @@ class DefaultRequirements(SuiteRequirements): def non_broken_pickle(self): from sqlalchemy.util import pickle return only_if( - lambda: pickle.__name__ == 'cPickle' or sys.version_info >= (3, 2), - "Needs cPickle or newer Python 3 pickle" + lambda: not util.pypy and pickle.__name__ == 'cPickle' + or sys.version_info >= (3, 2), + "Needs cPickle+cPython or newer Python 3 pickle" ) diff --git a/test/sql/test_case_statement.py b/test/sql/test_case_statement.py index 944a15384..998a55cd8 100644 --- a/test/sql/test_case_statement.py +++ b/test/sql/test_case_statement.py @@ -32,7 +32,6 @@ class CaseTest(fixtures.TestBase, AssertsCompiledSQL): info_table.drop() @testing.fails_on('firebird', 'FIXME: unknown') - @testing.fails_on('maxdb', 'FIXME: unknown') @testing.requires.subqueries def test_case(self): inner = select([case([ @@ -130,7 +129,6 @@ class CaseTest(fixtures.TestBase, AssertsCompiledSQL): @testing.fails_on('firebird', 'FIXME: unknown') - @testing.fails_on('maxdb', 'FIXME: unknown') def testcase_with_dict(self): query = select([case({ info_table.c.pk < 3: 'lessthan3', diff --git a/test/sql/test_compiler.py b/test/sql/test_compiler.py index bdfcccb22..53b9f68fc 100644 --- a/test/sql/test_compiler.py +++ b/test/sql/test_compiler.py @@ -18,7 +18,7 @@ from sqlalchemy import Integer, String, MetaData, Table, Column, select, \ literal, and_, null, type_coerce, alias, or_, literal_column,\ Float, TIMESTAMP, Numeric, Date, Text, collate, union, except_,\ intersect, union_all, Boolean, distinct, join, outerjoin, asc, desc,\ - over, subquery, case + over, subquery, case, true import decimal from sqlalchemy.util import u from sqlalchemy import exc, sql, util, types, schema @@ -272,9 +272,10 @@ class SelectTest(fixtures.TestBase, AssertsCompiledSQL): "SELECT foo() AS foo_1" ) + # this is native_boolean=False for default dialect self.assert_compile( select([not_(True)], use_labels=True), - "SELECT NOT :param_1" + "SELECT :param_1 = 0" ) self.assert_compile( @@ -852,6 +853,17 @@ class SelectTest(fixtures.TestBase, AssertsCompiledSQL): 'otherid_1': 9, 'myid_1': 12} ) + # test a generator + self.assert_compile( + and_( + conj for conj in [ + table1.c.myid == 12, + table1.c.name == 'asdf' + ] + ), + "mytable.myid = :myid_1 AND mytable.name = :name_1" + ) + def test_nested_conjunctions_short_circuit(self): """test that empty or_(), and_() conjunctions are collapsed by an enclosing conjunction.""" @@ -874,6 +886,26 @@ class SelectTest(fixtures.TestBase, AssertsCompiledSQL): "SELECT t.x FROM t WHERE t.x = :x_1 OR t.x = :x_2" ) + def test_true_short_circuit(self): + t = table('t', column('x')) + + self.assert_compile( + select([t]).where(true()), + "SELECT t.x FROM t WHERE 1 = 1", + dialect=default.DefaultDialect(supports_native_boolean=False) + ) + self.assert_compile( + select([t]).where(true()), + "SELECT t.x FROM t WHERE true", + dialect=default.DefaultDialect(supports_native_boolean=True) + ) + + self.assert_compile( + select([t]), + "SELECT t.x FROM t", + dialect=default.DefaultDialect(supports_native_boolean=True) + ) + def test_distinct(self): self.assert_compile( select([table1.c.myid.distinct()]), @@ -1024,80 +1056,22 @@ class SelectTest(fixtures.TestBase, AssertsCompiledSQL): def test_for_update(self): self.assert_compile( - table1.select(table1.c.myid == 7, for_update=True), + table1.select(table1.c.myid == 7).with_for_update(), "SELECT mytable.myid, mytable.name, mytable.description " "FROM mytable WHERE mytable.myid = :myid_1 FOR UPDATE") - self.assert_compile( - table1.select(table1.c.myid == 7, for_update=False), - "SELECT mytable.myid, mytable.name, mytable.description " - "FROM mytable WHERE mytable.myid = :myid_1") - # not supported by dialect, should just use update self.assert_compile( - table1.select(table1.c.myid == 7, for_update='nowait'), - "SELECT mytable.myid, mytable.name, mytable.description " - "FROM mytable WHERE mytable.myid = :myid_1 FOR UPDATE") - - # unknown lock mode - self.assert_compile( - table1.select(table1.c.myid == 7, for_update='unknown_mode'), + table1.select(table1.c.myid == 7).with_for_update(nowait=True), "SELECT mytable.myid, mytable.name, mytable.description " "FROM mytable WHERE mytable.myid = :myid_1 FOR UPDATE") - # ----- mysql - - self.assert_compile( - table1.select(table1.c.myid == 7, for_update=True), - "SELECT mytable.myid, mytable.name, mytable.description " - "FROM mytable WHERE mytable.myid = %s FOR UPDATE", - dialect=mysql.dialect()) - - self.assert_compile( - table1.select(table1.c.myid == 7, for_update="read"), - "SELECT mytable.myid, mytable.name, mytable.description " - "FROM mytable WHERE mytable.myid = %s LOCK IN SHARE MODE", - dialect=mysql.dialect()) - - # ----- oracle - - self.assert_compile( - table1.select(table1.c.myid == 7, for_update=True), - "SELECT mytable.myid, mytable.name, mytable.description " - "FROM mytable WHERE mytable.myid = :myid_1 FOR UPDATE", - dialect=oracle.dialect()) - - self.assert_compile( - table1.select(table1.c.myid == 7, for_update="nowait"), - "SELECT mytable.myid, mytable.name, mytable.description " - "FROM mytable WHERE mytable.myid = :myid_1 FOR UPDATE NOWAIT", - dialect=oracle.dialect()) - - # ----- postgresql - - self.assert_compile( - table1.select(table1.c.myid == 7, for_update=True), - "SELECT mytable.myid, mytable.name, mytable.description " - "FROM mytable WHERE mytable.myid = %(myid_1)s FOR UPDATE", - dialect=postgresql.dialect()) - - self.assert_compile( - table1.select(table1.c.myid == 7, for_update="nowait"), - "SELECT mytable.myid, mytable.name, mytable.description " - "FROM mytable WHERE mytable.myid = %(myid_1)s FOR UPDATE NOWAIT", - dialect=postgresql.dialect()) - - self.assert_compile( - table1.select(table1.c.myid == 7, for_update="read"), - "SELECT mytable.myid, mytable.name, mytable.description " - "FROM mytable WHERE mytable.myid = %(myid_1)s FOR SHARE", - dialect=postgresql.dialect()) + assert_raises_message( + exc.ArgumentError, + "Unknown for_update argument: 'unknown_mode'", + table1.select, table1.c.myid == 7, for_update='unknown_mode' + ) - self.assert_compile( - table1.select(table1.c.myid == 7, for_update="read_nowait"), - "SELECT mytable.myid, mytable.name, mytable.description " - "FROM mytable WHERE mytable.myid = %(myid_1)s FOR SHARE NOWAIT", - dialect=postgresql.dialect()) def test_alias(self): # test the alias for a table1. column names stay the same, @@ -1171,172 +1145,6 @@ class SelectTest(fixtures.TestBase, AssertsCompiledSQL): dialect=mysql.dialect() ) - def test_text(self): - self.assert_compile( - text("select * from foo where lala = bar"), - "select * from foo where lala = bar" - ) - - # test bytestring - self.assert_compile(select( - ["foobar(a)", "pk_foo_bar(syslaal)"], - "a = 12", - from_obj=["foobar left outer join lala on foobar.foo = lala.foo"] - ), - "SELECT foobar(a), pk_foo_bar(syslaal) FROM foobar " - "left outer join lala on foobar.foo = lala.foo WHERE a = 12" - ) - - # test unicode - self.assert_compile(select( - ["foobar(a)", "pk_foo_bar(syslaal)"], - "a = 12", - from_obj=["foobar left outer join lala on foobar.foo = lala.foo"] - ), - "SELECT foobar(a), pk_foo_bar(syslaal) FROM foobar " - "left outer join lala on foobar.foo = lala.foo WHERE a = 12" - ) - - # test building a select query programmatically with text - s = select() - s.append_column("column1") - s.append_column("column2") - s.append_whereclause("column1=12") - s.append_whereclause("column2=19") - s = s.order_by("column1") - s.append_from("table1") - self.assert_compile(s, "SELECT column1, column2 FROM table1 WHERE " - "column1=12 AND column2=19 ORDER BY column1") - - self.assert_compile( - select(["column1", "column2"], - from_obj=table1).alias('somealias').select(), - "SELECT somealias.column1, somealias.column2 FROM " - "(SELECT column1, column2 FROM mytable) AS somealias" - ) - - # test that use_labels doesnt interfere with literal columns - self.assert_compile( - select(["column1", "column2", table1.c.myid], from_obj=table1, - use_labels=True), - "SELECT column1, column2, mytable.myid AS mytable_myid " - "FROM mytable" - ) - - # test that use_labels doesnt interfere - # with literal columns that have textual labels - self.assert_compile( - select(["column1 AS foobar", "column2 AS hoho", table1.c.myid], - from_obj=table1, use_labels=True), - "SELECT column1 AS foobar, column2 AS hoho, " - "mytable.myid AS mytable_myid FROM mytable" - ) - - # test that "auto-labeling of subquery columns" - # doesnt interfere with literal columns, - # exported columns dont get quoted - self.assert_compile( - select(["column1 AS foobar", "column2 AS hoho", table1.c.myid], - from_obj=[table1]).select(), - "SELECT column1 AS foobar, column2 AS hoho, myid FROM " - "(SELECT column1 AS foobar, column2 AS hoho, " - "mytable.myid AS myid FROM mytable)" - ) - - self.assert_compile( - select(['col1', 'col2'], from_obj='tablename').alias('myalias'), - "SELECT col1, col2 FROM tablename" - ) - - def test_binds_in_text(self): - self.assert_compile( - text("select * from foo where lala=:bar and hoho=:whee", - bindparams=[bindparam('bar', 4), bindparam('whee', 7)]), - "select * from foo where lala=:bar and hoho=:whee", - checkparams={'bar': 4, 'whee': 7}, - ) - - self.assert_compile( - text("select * from foo where clock='05:06:07'"), - "select * from foo where clock='05:06:07'", - checkparams={}, - params={}, - ) - - dialect = postgresql.dialect() - self.assert_compile( - text("select * from foo where lala=:bar and hoho=:whee", - bindparams=[bindparam('bar', 4), bindparam('whee', 7)]), - "select * from foo where lala=%(bar)s and hoho=%(whee)s", - checkparams={'bar': 4, 'whee': 7}, - dialect=dialect - ) - - # test escaping out text() params with a backslash - self.assert_compile( - text("select * from foo where clock='05:06:07' " - "and mork='\:mindy'"), - "select * from foo where clock='05:06:07' and mork=':mindy'", - checkparams={}, - params={}, - dialect=dialect - ) - - dialect = sqlite.dialect() - self.assert_compile( - text("select * from foo where lala=:bar and hoho=:whee", - bindparams=[bindparam('bar', 4), bindparam('whee', 7)]), - "select * from foo where lala=? and hoho=?", - checkparams={'bar': 4, 'whee': 7}, - dialect=dialect - ) - - self.assert_compile(select( - [table1, table2.c.otherid, "sysdate()", "foo, bar, lala"], - and_( - "foo.id = foofoo(lala)", - "datetime(foo) = Today", - table1.c.myid == table2.c.otherid, - ) - ), - "SELECT mytable.myid, mytable.name, mytable.description, " - "myothertable.otherid, sysdate(), foo, bar, lala " - "FROM mytable, myothertable WHERE foo.id = foofoo(lala) AND " - "datetime(foo) = Today AND mytable.myid = myothertable.otherid") - - self.assert_compile(select( - [alias(table1, 't'), "foo.f"], - "foo.f = t.id", - from_obj=["(select f from bar where lala=heyhey) foo"] - ), - "SELECT t.myid, t.name, t.description, foo.f FROM mytable AS t, " - "(select f from bar where lala=heyhey) foo WHERE foo.f = t.id") - - # test Text embedded within select_from(), using binds - generate_series = text( - "generate_series(:x, :y, :z) as s(a)", - bindparams=[bindparam('x', None), - bindparam('y', None), bindparam('z', None)] - ) - - s = select([ - (func.current_date() + - literal_column("s.a")).label("dates") - ]).select_from(generate_series) - self.assert_compile( - s, - "SELECT CURRENT_DATE + s.a AS dates FROM " - "generate_series(:x, :y, :z) as s(a)", - checkparams={'y': None, 'x': None, 'z': None} - ) - - self.assert_compile( - s.params(x=5, y=6, z=7), - "SELECT CURRENT_DATE + s.a AS dates FROM " - "generate_series(:x, :y, :z) as s(a)", - checkparams={'y': 6, 'x': 5, 'z': 7} - ) - @testing.emits_warning('.*empty sequence.*') def test_render_binds_as_literal(self): """test a compiler that renders binds inline into @@ -1377,8 +1185,9 @@ class SelectTest(fixtures.TestBase, AssertsCompiledSQL): dialect=dialect ) - assert_raises( + assert_raises_message( exc.CompileError, + "Bind parameter 'foo' without a renderable value not allowed here.", bindparam("foo").in_([]).compile, dialect=dialect ) @@ -1422,58 +1231,6 @@ class SelectTest(fixtures.TestBase, AssertsCompiledSQL): "/ values.val1 > :param_1" ) - def test_collate(self): - for expr in (select([table1.c.name.collate('latin1_german2_ci')]), - select([collate(table1.c.name, 'latin1_german2_ci')])): - self.assert_compile( - expr, "SELECT mytable.name COLLATE latin1_german2_ci " - "AS anon_1 FROM mytable") - - assert table1.c.name.collate('latin1_german2_ci').type is \ - table1.c.name.type - - expr = select([table1.c.name.collate('latin1_german2_ci').\ - label('k1')]).order_by('k1') - self.assert_compile(expr, - "SELECT mytable.name " - "COLLATE latin1_german2_ci AS k1 FROM mytable ORDER BY k1") - - expr = select([collate('foo', 'latin1_german2_ci').label('k1')]) - self.assert_compile(expr, - "SELECT :param_1 COLLATE latin1_german2_ci AS k1") - - expr = select([table1.c.name.collate('latin1_german2_ci').like('%x%')]) - self.assert_compile(expr, - "SELECT mytable.name COLLATE latin1_german2_ci " - "LIKE :param_1 AS anon_1 FROM mytable") - - expr = select([table1.c.name.like(collate('%x%', - 'latin1_german2_ci'))]) - self.assert_compile(expr, - "SELECT mytable.name " - "LIKE :param_1 COLLATE latin1_german2_ci AS anon_1 " - "FROM mytable") - - expr = select([table1.c.name.collate('col1').like( - collate('%x%', 'col2'))]) - self.assert_compile(expr, - "SELECT mytable.name COLLATE col1 " - "LIKE :param_1 COLLATE col2 AS anon_1 " - "FROM mytable") - - expr = select([func.concat('a', 'b').\ - collate('latin1_german2_ci').label('x')]) - self.assert_compile(expr, - "SELECT concat(:param_1, :param_2) " - "COLLATE latin1_german2_ci AS x") - - - expr = select([table1.c.name]).\ - order_by(table1.c.name.collate('latin1_german2_ci')) - self.assert_compile(expr, - "SELECT mytable.name FROM mytable ORDER BY " - "mytable.name COLLATE latin1_german2_ci") - def test_percent_chars(self): t = table("table%name", column("percent%"), @@ -2785,10 +2542,6 @@ class DDLTest(fixtures.TestBase, AssertsCompiledSQL): schema.CreateTable(t1).compile ) - # there's some unicode issue in the assertion - # regular expression that appears to be resolved - # in 2.6, not exactly sure what it is - @testing.requires.python26 def test_reraise_of_column_spec_issue_unicode(self): MyType = self._illegal_type_fixture() t1 = Table('t', MetaData(), @@ -2800,6 +2553,22 @@ class DDLTest(fixtures.TestBase, AssertsCompiledSQL): schema.CreateTable(t1).compile ) + def test_system_flag(self): + m = MetaData() + t = Table('t', m, Column('x', Integer), + Column('y', Integer, system=True), + Column('z', Integer)) + self.assert_compile( + schema.CreateTable(t), + "CREATE TABLE t (x INTEGER, z INTEGER)" + ) + m2 = MetaData() + t2 = t.tometadata(m2) + self.assert_compile( + schema.CreateTable(t2), + "CREATE TABLE t (x INTEGER, z INTEGER)" + ) + class InlineDefaultTest(fixtures.TestBase, AssertsCompiledSQL): __dialect__ = 'default' @@ -2909,6 +2678,7 @@ class SchemaTest(fixtures.TestBase, AssertsCompiledSQL): "(:rem_id, :datatype_id, :value)") + class CorrelateTest(fixtures.TestBase, AssertsCompiledSQL): __dialect__ = 'default' @@ -3238,13 +3008,34 @@ class CorrelateTest(fixtures.TestBase, AssertsCompiledSQL): ) class CoercionTest(fixtures.TestBase, AssertsCompiledSQL): - __dialect__ = 'default' + __dialect__ = default.DefaultDialect(supports_native_boolean=True) def _fixture(self): m = MetaData() return Table('foo', m, Column('id', Integer)) + bool_table = table('t', column('x', Boolean)) + + def test_coerce_bool_where(self): + self.assert_compile( + select([self.bool_table]).where(self.bool_table.c.x), + "SELECT t.x FROM t WHERE t.x" + ) + + def test_coerce_bool_where_non_native(self): + self.assert_compile( + select([self.bool_table]).where(self.bool_table.c.x), + "SELECT t.x FROM t WHERE t.x = 1", + dialect=default.DefaultDialect(supports_native_boolean=False) + ) + + self.assert_compile( + select([self.bool_table]).where(~self.bool_table.c.x), + "SELECT t.x FROM t WHERE t.x = 0", + dialect=default.DefaultDialect(supports_native_boolean=False) + ) + def test_null_constant(self): self.assert_compile(_literal_as_text(None), "NULL") @@ -3257,12 +3048,12 @@ class CoercionTest(fixtures.TestBase, AssertsCompiledSQL): def test_val_and_false(self): t = self._fixture() self.assert_compile(and_(t.c.id == 1, False), - "foo.id = :id_1 AND false") + "false") def test_val_and_true_coerced(self): t = self._fixture() self.assert_compile(and_(t.c.id == 1, True), - "foo.id = :id_1 AND true") + "foo.id = :id_1") def test_val_is_null_coerced(self): t = self._fixture() @@ -3270,26 +3061,21 @@ class CoercionTest(fixtures.TestBase, AssertsCompiledSQL): "foo.id IS NULL") def test_val_and_None(self): - # current convention is None in and_() or - # other clauselist is ignored. May want - # to revise this at some point. t = self._fixture() self.assert_compile(and_(t.c.id == 1, None), - "foo.id = :id_1") + "foo.id = :id_1 AND NULL") def test_None_and_val(self): - # current convention is None in and_() or - # other clauselist is ignored. May want - # to revise this at some point. t = self._fixture() - self.assert_compile(and_(t.c.id == 1, None), - "foo.id = :id_1") + self.assert_compile(and_(None, t.c.id == 1), + "NULL AND foo.id = :id_1") def test_None_and_nothing(self): # current convention is None in and_() # returns None May want # to revise this at some point. - assert and_(None) is None + self.assert_compile( + and_(None), "NULL") def test_val_and_null(self): t = self._fixture() diff --git a/test/sql/test_constraints.py b/test/sql/test_constraints.py index b44a65190..cb4b73ec8 100644 --- a/test/sql/test_constraints.py +++ b/test/sql/test_constraints.py @@ -544,6 +544,28 @@ class ConstraintCompilationTest(fixtures.TestBase, AssertsCompiledSQL): "FOREIGN KEY(foo_bar) REFERENCES foo (bar))" ) + def test_empty_pkc(self): + # test that an empty primary key is ignored + metadata = MetaData() + tbl = Table('test', metadata, + Column('x', Integer, autoincrement=False), + Column('y', Integer, autoincrement=False), + PrimaryKeyConstraint()) + self.assert_compile(schema.CreateTable(tbl), + "CREATE TABLE test (x INTEGER, y INTEGER)" + ) + + def test_empty_uc(self): + # test that an empty constraint is ignored + metadata = MetaData() + tbl = Table('test', metadata, + Column('x', Integer, autoincrement=False), + Column('y', Integer, autoincrement=False), + UniqueConstraint()) + self.assert_compile(schema.CreateTable(tbl), + "CREATE TABLE test (x INTEGER, y INTEGER)" + ) + def test_deferrable_column_check(self): t = Table('tbl', MetaData(), Column('a', Integer), @@ -726,6 +748,27 @@ class ConstraintCompilationTest(fixtures.TestBase, AssertsCompiledSQL): "ALTER TABLE tbl ADD PRIMARY KEY (a)" ) + def test_render_check_constraint_sql_literal(self): + t, t2 = self._constraint_create_fixture() + + constraint = CheckConstraint(t.c.a > 5) + + self.assert_compile( + schema.AddConstraint(constraint), + "ALTER TABLE tbl ADD CHECK (a > 5)" + ) + + def test_render_index_sql_literal(self): + t, t2 = self._constraint_create_fixture() + + constraint = Index('name', t.c.a + 5) + + self.assert_compile( + schema.CreateIndex(constraint), + "CREATE INDEX name ON tbl (a + 5)" + ) + + class ConstraintAPITest(fixtures.TestBase): def test_double_fk_usage_raises(self): f = ForeignKey('b.id') diff --git a/test/sql/test_cte.py b/test/sql/test_cte.py index 28756873f..0f6831375 100644 --- a/test/sql/test_cte.py +++ b/test/sql/test_cte.py @@ -312,6 +312,22 @@ class CTETest(fixtures.TestBase, AssertsCompiledSQL): "FROM regional_sales" ) + def test_multi_subq_quote(self): + cte = select([literal(1).label("id")]).cte(name='CTE') + + s1 = select([cte.c.id]).alias() + s2 = select([cte.c.id]).alias() + + s = select([s1, s2]) + self.assert_compile( + s, + 'WITH "CTE" AS (SELECT :param_1 AS id) ' + 'SELECT anon_1.id, anon_2.id FROM ' + '(SELECT "CTE".id AS id FROM "CTE") AS anon_1, ' + '(SELECT "CTE".id AS id FROM "CTE") AS anon_2' + ) + + def test_positional_binds(self): orders = table('orders', column('order'), @@ -351,3 +367,32 @@ class CTETest(fixtures.TestBase, AssertsCompiledSQL): dialect=dialect ) + + def test_all_aliases(self): + orders = table('order', column('order')) + s = select([orders.c.order]).cte("regional_sales") + + r1 = s.alias() + r2 = s.alias() + + s2 = select([r1, r2]).where(r1.c.order > r2.c.order) + + self.assert_compile( + s2, + 'WITH regional_sales AS (SELECT "order"."order" ' + 'AS "order" FROM "order") ' + 'SELECT anon_1."order", anon_2."order" ' + 'FROM regional_sales AS anon_1, ' + 'regional_sales AS anon_2 WHERE anon_1."order" > anon_2."order"' + ) + + s3 = select([orders]).select_from(orders.join(r1, r1.c.order == orders.c.order)) + + self.assert_compile( + s3, + 'WITH regional_sales AS ' + '(SELECT "order"."order" AS "order" ' + 'FROM "order")' + ' SELECT "order"."order" ' + 'FROM "order" JOIN regional_sales AS anon_1 ON anon_1."order" = "order"."order"' + )
\ No newline at end of file diff --git a/test/engine/test_ddlemit.py b/test/sql/test_ddlemit.py index e773d0ced..be75f63ec 100644 --- a/test/engine/test_ddlemit.py +++ b/test/sql/test_ddlemit.py @@ -1,5 +1,5 @@ from sqlalchemy.testing import fixtures -from sqlalchemy.engine.ddl import SchemaGenerator, SchemaDropper +from sqlalchemy.sql.ddl import SchemaGenerator, SchemaDropper from sqlalchemy.engine import default from sqlalchemy import MetaData, Table, Column, Integer, Sequence from sqlalchemy import schema diff --git a/test/sql/test_defaults.py b/test/sql/test_defaults.py index 1508c0532..1622c4ed8 100644 --- a/test/sql/test_defaults.py +++ b/test/sql/test_defaults.py @@ -45,9 +45,14 @@ class DefaultTest(fixtures.TestBase): # since its a "branched" connection conn.close() - use_function_defaults = testing.against('postgresql', 'mssql', 'maxdb') + use_function_defaults = testing.against('postgresql', 'mssql') is_oracle = testing.against('oracle') + class MyClass(object): + @classmethod + def gen_default(cls, ctx): + return "hi" + # select "count(1)" returns different results on different DBs also # correct for "current_date" compatible as column default, value # differences @@ -68,9 +73,7 @@ class DefaultTest(fixtures.TestBase): f2 = sa.select([func.length('abcdefghijk')], bind=db).scalar() def1 = currenttime deftype = sa.Date - if testing.against('maxdb'): - def2 = sa.text("curdate") - elif testing.against('mssql'): + if testing.against('mssql'): def2 = sa.text("getdate()") else: def2 = sa.text("current_date") @@ -125,7 +128,12 @@ class DefaultTest(fixtures.TestBase): # combo Column('col9', String(20), default='py', - server_default='ddl')) + server_default='ddl'), + + # python method w/ context + Column('col10', String(20), default=MyClass.gen_default) + ) + t.create() @classmethod @@ -285,7 +293,7 @@ class DefaultTest(fixtures.TestBase): today = datetime.date.today() eq_(l.fetchall(), [ (x, 'imthedefault', f, ts, ts, ctexec, True, False, - 12, today, 'py') + 12, today, 'py', 'hi') for x in range(51, 54)]) t.insert().execute(col9=None) @@ -295,7 +303,7 @@ class DefaultTest(fixtures.TestBase): eq_(t.select(t.c.col1 == 54).execute().fetchall(), [(54, 'imthedefault', f, ts, ts, ctexec, True, False, - 12, today, None)]) + 12, today, None, 'hi')]) @testing.fails_on('firebird', 'Data type unknown') def test_insertmany(self): @@ -311,11 +319,11 @@ class DefaultTest(fixtures.TestBase): today = datetime.date.today() eq_(l.fetchall(), [(51, 'imthedefault', f, ts, ts, ctexec, True, False, - 12, today, 'py'), + 12, today, 'py', 'hi'), (52, 'imthedefault', f, ts, ts, ctexec, True, False, - 12, today, 'py'), + 12, today, 'py', 'hi'), (53, 'imthedefault', f, ts, ts, ctexec, True, False, - 12, today, 'py')]) + 12, today, 'py', 'hi')]) def test_no_embed_in_sql(self): """Using a DefaultGenerator, Sequence, DefaultClause @@ -379,11 +387,11 @@ class DefaultTest(fixtures.TestBase): today = datetime.date.today() eq_(l.fetchall(), [(51, 'im the update', f2, ts, ts, ctexec, False, False, - 13, today, 'py'), + 13, today, 'py', 'hi'), (52, 'im the update', f2, ts, ts, ctexec, True, False, - 13, today, 'py'), + 13, today, 'py', 'hi'), (53, 'im the update', f2, ts, ts, ctexec, True, False, - 13, today, 'py')]) + 13, today, 'py', 'hi')]) @testing.fails_on('firebird', 'Data type unknown') def test_update(self): @@ -395,7 +403,7 @@ class DefaultTest(fixtures.TestBase): l = l.first() eq_(l, (pk, 'im the update', f2, None, None, ctexec, True, False, - 13, datetime.date.today(), 'py')) + 13, datetime.date.today(), 'py', 'hi')) eq_(11, f2) @testing.fails_on('firebird', 'Data type unknown') @@ -607,6 +615,33 @@ class AutoIncrementTest(fixtures.TablesTest): nonai.insert().execute(id=1, data='row 1') + + def test_col_w_sequence_non_autoinc_no_firing(self): + metadata = self.metadata + # plain autoincrement/PK table in the actual schema + Table("x", metadata, + Column("set_id", Integer, primary_key=True) + ) + metadata.create_all() + + # for the INSERT use a table with a Sequence + # and autoincrement=False. Using a ForeignKey + # would have the same effect + dataset_no_autoinc = Table("x", MetaData(), + Column("set_id", Integer, Sequence("some_seq"), + primary_key=True, autoincrement=False) + ) + + testing.db.execute( + dataset_no_autoinc.insert() + ) + eq_( + testing.db.scalar(dataset_no_autoinc.count()), 1 + ) + + + + class SequenceDDLTest(fixtures.TestBase, testing.AssertsCompiledSQL): __dialect__ = 'default' @@ -879,6 +914,7 @@ class SequenceTest(fixtures.TestBase, testing.AssertsCompiledSQL): assert not self._has_sequence('s1') assert not self._has_sequence('s2') + cartitems = sometable = metadata = None class TableBoundSequenceTest(fixtures.TestBase): __requires__ = ('sequences',) diff --git a/test/sql/test_functions.py b/test/sql/test_functions.py index ee503dbb7..ee1d61f85 100644 --- a/test/sql/test_functions.py +++ b/test/sql/test_functions.py @@ -21,13 +21,12 @@ class CompileTest(fixtures.TestBase, AssertsCompiledSQL): functions._registry.clear() def test_compile(self): - for dialect in all_dialects(exclude=('sybase', 'access', - 'informix', 'maxdb')): + for dialect in all_dialects(exclude=('sybase', )): bindtemplate = BIND_TEMPLATES[dialect.paramstyle] self.assert_compile(func.current_timestamp(), "CURRENT_TIMESTAMP", dialect=dialect) self.assert_compile(func.localtime(), "LOCALTIME", dialect=dialect) - if dialect.name in ('firebird', 'maxdb'): + if dialect.name in ('firebird',): self.assert_compile(func.nosuchfunction(), "nosuchfunction", dialect=dialect) else: diff --git a/test/sql/test_generative.py b/test/sql/test_generative.py index 09b20d8ea..5a65cecef 100644 --- a/test/sql/test_generative.py +++ b/test/sql/test_generative.py @@ -428,13 +428,13 @@ class ClauseTest(fixtures.TestBase, AssertsCompiledSQL): class Vis(CloningVisitor): def visit_textclause(self, text): text.text = text.text + " SOME MODIFIER=:lala" - text.bindparams['lala'] = bindparam('lala') + text._bindparams['lala'] = bindparam('lala') clause2 = Vis().traverse(clause) assert c1 == str(clause) assert str(clause2) == c1 + " SOME MODIFIER=:lala" - assert list(clause.bindparams.keys()) == ['bar'] - assert set(clause2.bindparams.keys()) == set(['bar', 'lala']) + assert list(clause._bindparams.keys()) == ['bar'] + assert set(clause2._bindparams.keys()) == set(['bar', 'lala']) def test_select(self): s2 = select([t1]) diff --git a/test/sql/test_insert.py b/test/sql/test_insert.py index e1171532d..5c3b9b6c9 100644 --- a/test/sql/test_insert.py +++ b/test/sql/test_insert.py @@ -133,6 +133,35 @@ class InsertTest(_InsertTestBase, fixtures.TablesTest, AssertsCompiledSQL): checkparams={"name_1": "foo"} ) + def test_insert_from_select_select_alt_ordering(self): + table1 = self.tables.mytable + sel = select([table1.c.name, table1.c.myid]).where(table1.c.name == 'foo') + ins = self.tables.myothertable.insert().\ + from_select(("othername", "otherid"), sel) + self.assert_compile( + ins, + "INSERT INTO myothertable (othername, otherid) " + "SELECT mytable.name, mytable.myid FROM mytable " + "WHERE mytable.name = :name_1", + checkparams={"name_1": "foo"} + ) + + def test_insert_from_select_select_no_defaults(self): + metadata = MetaData() + table = Table('sometable', metadata, + Column('id', Integer, primary_key=True), + Column('foo', Integer, default=func.foobar())) + table1 = self.tables.mytable + sel = select([table1.c.myid]).where(table1.c.name == 'foo') + ins = table.insert().\ + from_select(["id"], sel) + self.assert_compile( + ins, + "INSERT INTO sometable (id) SELECT mytable.myid " + "FROM mytable WHERE mytable.name = :name_1", + checkparams={"name_1": "foo"} + ) + def test_insert_mix_select_values_exception(self): table1 = self.tables.mytable sel = select([table1.c.myid, table1.c.name]).where(table1.c.name == 'foo') diff --git a/test/sql/test_join_rewriting.py b/test/sql/test_join_rewriting.py index 5a9bdd1d3..801d5ce9a 100644 --- a/test/sql/test_join_rewriting.py +++ b/test/sql/test_join_rewriting.py @@ -1,10 +1,11 @@ from sqlalchemy import Table, Column, Integer, MetaData, ForeignKey, select -from sqlalchemy.testing import fixtures, AssertsCompiledSQL +from sqlalchemy.testing import fixtures, AssertsCompiledSQL, eq_ from sqlalchemy import util from sqlalchemy.engine import default from sqlalchemy import testing + m = MetaData() a = Table('a', m, @@ -30,6 +31,15 @@ e = Table('e', m, Column('id', Integer, primary_key=True) ) +b_key = Table('b_key', m, + Column('id', Integer, primary_key=True, key='bid'), + ) + +a_to_b_key = Table('a_to_b_key', m, + Column('aid', Integer, ForeignKey('a.id')), + Column('bid', Integer, ForeignKey('b_key.bid')), + ) + class _JoinRewriteTestBase(AssertsCompiledSQL): def _test(self, s, assert_): self.assert_compile( @@ -38,10 +48,22 @@ class _JoinRewriteTestBase(AssertsCompiledSQL): ) compiled = s.compile(dialect=self.__dialect__) - for key, col in zip([c.key for c in s.c], s.inner_columns): + + # column name should be in result map, as we never render + # .key in SQL + for key, col in zip([c.name for c in s.c], s.inner_columns): key = key % compiled.anon_map assert col in compiled.result_map[key][1] + _a_bkeyselect_bkey = "" + + def test_a_bkeyselect_bkey(self): + assoc = a_to_b_key.select().alias() + j1 = assoc.join(b_key) + j2 = a.join(j1) + + s = select([a, b_key], use_labels=True).select_from(j2) + self._test(s, self._a_bkeyselect_bkey) def test_a_bc(self): j1 = b.join(c) @@ -60,6 +82,27 @@ class _JoinRewriteTestBase(AssertsCompiledSQL): self._test(s, self._a_bc) + def test_a_bkeyassoc(self): + j1 = b_key.join(a_to_b_key) + j2 = a.join(j1) + + s = select([a, b_key.c.bid], use_labels=True).\ + select_from(j2) + + self._test(s, self._a_bkeyassoc) + + def test_a_bkeyassoc_aliased(self): + bkey_alias = b_key.alias() + a_to_b_key_alias = a_to_b_key.alias() + + j1 = bkey_alias.join(a_to_b_key_alias) + j2 = a.join(j1) + + s = select([a, bkey_alias.c.bid], use_labels=True).\ + select_from(j2) + + self._test(s, self._a_bkeyassoc_aliased) + def test_a__b_dc(self): j1 = c.join(d) j2 = b.join(j1) @@ -94,6 +137,7 @@ class _JoinRewriteTestBase(AssertsCompiledSQL): self._a_bc_comma_a1_selbc ) + class JoinRewriteTest(_JoinRewriteTestBase, fixtures.TestBase): """test rendering of each join with right-nested rewritten as aliased SELECT statements..""" @@ -149,6 +193,36 @@ class JoinRewriteTest(_JoinRewriteTestBase, fixtures.TestBase): "ON a_1.id = anon_2.b_a_id ORDER BY anon_2.b_id" ) + _a_bkeyassoc = ( + "SELECT a.id AS a_id, anon_1.b_key_id AS b_key_id " + "FROM a JOIN " + "(SELECT b_key.id AS b_key_id, a_to_b_key.aid AS a_to_b_key_aid, " + "a_to_b_key.bid AS a_to_b_key_bid FROM b_key " + "JOIN a_to_b_key ON b_key.id = a_to_b_key.bid) AS anon_1 " + "ON a.id = anon_1.a_to_b_key_aid" + ) + + _a_bkeyassoc_aliased = ( + "SELECT a.id AS a_id, anon_1.b_key_1_id AS b_key_1_id " + "FROM a JOIN (SELECT b_key_1.id AS b_key_1_id, " + "a_to_b_key_1.aid AS a_to_b_key_1_aid, " + "a_to_b_key_1.bid AS a_to_b_key_1_bid FROM b_key AS b_key_1 " + "JOIN a_to_b_key AS a_to_b_key_1 ON b_key_1.id = a_to_b_key_1.bid) AS " + "anon_1 ON a.id = anon_1.a_to_b_key_1_aid" + ) + + _a_bkeyselect_bkey = ( + "SELECT a.id AS a_id, anon_2.anon_1_aid AS anon_1_aid, " + "anon_2.anon_1_bid AS anon_1_bid, anon_2.b_key_id AS b_key_id " + "FROM a JOIN (SELECT anon_1.aid AS anon_1_aid, anon_1.bid AS anon_1_bid, " + "b_key.id AS b_key_id " + "FROM (SELECT a_to_b_key.aid AS aid, a_to_b_key.bid AS bid " + "FROM a_to_b_key) AS anon_1 " + "JOIN b_key ON b_key.id = anon_1.bid) AS anon_2 ON a.id = anon_2.anon_1_aid" + ) + + + class JoinPlainTest(_JoinRewriteTestBase, fixtures.TestBase): """test rendering of each join with normal nesting.""" @util.classproperty @@ -156,6 +230,12 @@ class JoinPlainTest(_JoinRewriteTestBase, fixtures.TestBase): dialect = default.DefaultDialect() return dialect + _a_bkeyselect_bkey = ( + "SELECT a.id AS a_id, b_key.id AS b_key_id FROM a JOIN " + "((SELECT a_to_b_key.aid AS aid, a_to_b_key.bid AS bid " + "FROM a_to_b_key) AS anon_1 JOIN b_key ON b_key.id = anon_1.bid) " + "ON a.id = anon_1.aid" + ) _a__b_dc = ( "SELECT a.id AS a_id, b.id AS b_id, " "b.a_id AS b_a_id, c.id AS c_id, " @@ -194,6 +274,19 @@ class JoinPlainTest(_JoinRewriteTestBase, fixtures.TestBase): "ON a_1.id = anon_1.b_a_id ORDER BY anon_1.b_id" ) + _a_bkeyassoc = ( + "SELECT a.id AS a_id, b_key.id AS b_key_id " + "FROM a JOIN " + "(b_key JOIN a_to_b_key ON b_key.id = a_to_b_key.bid) " + "ON a.id = a_to_b_key.aid" + ) + + _a_bkeyassoc_aliased = ( + "SELECT a.id AS a_id, b_key_1.id AS b_key_1_id FROM a " + "JOIN (b_key AS b_key_1 JOIN a_to_b_key AS a_to_b_key_1 " + "ON b_key_1.id = a_to_b_key_1.bid) ON a.id = a_to_b_key_1.aid" + ) + class JoinNoUseLabelsTest(_JoinRewriteTestBase, fixtures.TestBase): @util.classproperty def __dialect__(cls): @@ -208,6 +301,12 @@ class JoinNoUseLabelsTest(_JoinRewriteTestBase, fixtures.TestBase): assert_ ) + _a_bkeyselect_bkey = ( + "SELECT a.id, b_key.id FROM a JOIN ((SELECT a_to_b_key.aid AS aid, " + "a_to_b_key.bid AS bid FROM a_to_b_key) AS anon_1 " + "JOIN b_key ON b_key.id = anon_1.bid) ON a.id = anon_1.aid" + ) + _a__b_dc = ( "SELECT a.id, b.id, " "b.a_id, c.id, " @@ -245,10 +344,21 @@ class JoinNoUseLabelsTest(_JoinRewriteTestBase, fixtures.TestBase): "ON a_1.id = anon_1.b_a_id ORDER BY anon_1.b_id" ) + _a_bkeyassoc = ( + "SELECT a.id, b_key.id FROM a JOIN (b_key JOIN a_to_b_key " + "ON b_key.id = a_to_b_key.bid) ON a.id = a_to_b_key.aid" + ) + + _a_bkeyassoc_aliased = ( + "SELECT a.id, b_key_1.id FROM a JOIN (b_key AS b_key_1 " + "JOIN a_to_b_key AS a_to_b_key_1 ON b_key_1.id = a_to_b_key_1.bid) " + "ON a.id = a_to_b_key_1.aid" + ) + class JoinExecTest(_JoinRewriteTestBase, fixtures.TestBase): """invoke the SQL on the current backend to ensure compatibility""" - _a_bc = _a_bc_comma_a1_selbc = _a__b_dc = None + _a_bc = _a_bc_comma_a1_selbc = _a__b_dc = _a_bkeyassoc = _a_bkeyassoc_aliased = None @classmethod def setup_class(cls): @@ -259,7 +369,9 @@ class JoinExecTest(_JoinRewriteTestBase, fixtures.TestBase): m.drop_all(testing.db) def _test(self, selectable, assert_): - testing.db.execute(selectable) + result = testing.db.execute(selectable) + for col in selectable.inner_columns: + assert col in result._metadata._keymap class DialectFlagTest(fixtures.TestBase, AssertsCompiledSQL): diff --git a/test/sql/test_metadata.py b/test/sql/test_metadata.py index 851e9b920..f933a2494 100644 --- a/test/sql/test_metadata.py +++ b/test/sql/test_metadata.py @@ -5,15 +5,16 @@ from sqlalchemy.testing import emits_warning import pickle from sqlalchemy import Integer, String, UniqueConstraint, \ CheckConstraint, ForeignKey, MetaData, Sequence, \ - ForeignKeyConstraint, ColumnDefault, Index, event,\ - events, Unicode, types as sqltypes -from sqlalchemy.testing.schema import Table, Column + ForeignKeyConstraint, PrimaryKeyConstraint, ColumnDefault, Index, event,\ + events, Unicode, types as sqltypes, bindparam, \ + Table, Column from sqlalchemy import schema, exc import sqlalchemy as tsa from sqlalchemy.testing import fixtures from sqlalchemy import testing from sqlalchemy.testing import ComparesTables, AssertsCompiledSQL -from sqlalchemy.testing import eq_, is_ +from sqlalchemy.testing import eq_, is_, mock +from contextlib import contextmanager class MetaDataTest(fixtures.TestBase, ComparesTables): def test_metadata_connect(self): @@ -236,6 +237,45 @@ class MetaDataTest(fixtures.TestBase, ComparesTables): go ) + def test_fk_given_non_col(self): + not_a_col = bindparam('x') + assert_raises_message( + exc.ArgumentError, + "String, Column, or Column-bound argument expected, got Bind", + ForeignKey, not_a_col + ) + + def test_fk_given_non_col_clauseelem(self): + class Foo(object): + def __clause_element__(self): + return bindparam('x') + assert_raises_message( + exc.ArgumentError, + "String, Column, or Column-bound argument expected, got Bind", + ForeignKey, Foo() + ) + + def test_fk_given_col_non_table(self): + t = Table('t', MetaData(), Column('x', Integer)) + xa = t.alias().c.x + assert_raises_message( + exc.ArgumentError, + "ForeignKey received Column not bound to a Table, got: .*Alias", + ForeignKey, xa + ) + + def test_fk_given_col_non_table_clauseelem(self): + t = Table('t', MetaData(), Column('x', Integer)) + class Foo(object): + def __clause_element__(self): + return t.alias().c.x + + assert_raises_message( + exc.ArgumentError, + "ForeignKey received Column not bound to a Table, got: .*Alias", + ForeignKey, Foo() + ) + def test_fk_no_such_target_col_error_upfront(self): meta = MetaData() a = Table('a', meta, Column('a', Integer)) @@ -268,6 +308,7 @@ class MetaDataTest(fixtures.TestBase, ComparesTables): @testing.exclude('mysql', '<', (4, 1, 1), 'early types are squirrely') def test_to_metadata(self): + from sqlalchemy.testing.schema import Table meta = MetaData() table = Table('mytable', meta, @@ -280,7 +321,7 @@ class MetaDataTest(fixtures.TestBase, ComparesTables): Column('description', String(30), CheckConstraint("description='hi'")), UniqueConstraint('name'), - test_needs_fk=True, + test_needs_fk=True ) table2 = Table('othertable', meta, @@ -288,7 +329,7 @@ class MetaDataTest(fixtures.TestBase, ComparesTables): Column('myid', Integer, ForeignKey('mytable.myid'), ), - test_needs_fk=True, + test_needs_fk=True ) def test_to_metadata(): @@ -447,13 +488,11 @@ class MetaDataTest(fixtures.TestBase, ComparesTables): Column('description', String(30), CheckConstraint("description='hi'")), UniqueConstraint('name'), - test_needs_fk=True, ) table2 = Table('othertable', meta, Column('id', Integer, primary_key=True), Column('myid', Integer, ForeignKey('mytable.myid')), - test_needs_fk=True, ) meta2 = MetaData() @@ -474,14 +513,12 @@ class MetaDataTest(fixtures.TestBase, ComparesTables): Column('description', String(30), CheckConstraint("description='hi'")), UniqueConstraint('name'), - test_needs_fk=True, schema='myschema', ) table2 = Table('othertable', meta, Column('id', Integer, primary_key=True), Column('myid', Integer, ForeignKey('myschema.mytable.myid')), - test_needs_fk=True, schema='myschema', ) @@ -494,6 +531,47 @@ class MetaDataTest(fixtures.TestBase, ComparesTables): eq_(str(table_c.join(table2_c).onclause), 'myschema.mytable.myid = myschema.othertable.myid') + def test_tometadata_copy_info(self): + m = MetaData() + fk = ForeignKey('t2.id') + c = Column('c', Integer, fk) + ck = CheckConstraint('c > 5') + t = Table('t', m, c, ck) + + m.info['minfo'] = True + fk.info['fkinfo'] = True + c.info['cinfo'] = True + ck.info['ckinfo'] = True + t.info['tinfo'] = True + t.primary_key.info['pkinfo'] = True + fkc = [const for const in t.constraints if + isinstance(const, ForeignKeyConstraint)][0] + fkc.info['fkcinfo'] = True + + m2 = MetaData() + t2 = t.tometadata(m2) + + m.info['minfo'] = False + fk.info['fkinfo'] = False + c.info['cinfo'] = False + ck.info['ckinfo'] = False + t.primary_key.info['pkinfo'] = False + fkc.info['fkcinfo'] = False + + eq_(m2.info, {}) + eq_(t2.info, {"tinfo": True}) + eq_(t2.c.c.info, {"cinfo": True}) + eq_(list(t2.c.c.foreign_keys)[0].info, {"fkinfo": True}) + eq_(t2.primary_key.info, {"pkinfo": True}) + + fkc2 = [const for const in t2.constraints + if isinstance(const, ForeignKeyConstraint)][0] + eq_(fkc2.info, {"fkcinfo": True}) + + ck2 = [const for const in + t2.constraints if isinstance(const, CheckConstraint)][0] + eq_(ck2.info, {"ckinfo": True}) + def test_tometadata_kwargs(self): meta = MetaData() @@ -506,6 +584,8 @@ class MetaDataTest(fixtures.TestBase, ComparesTables): meta2 = MetaData() table_c = table.tometadata(meta2) + eq_(table.kwargs, {"mysql_engine": "InnoDB"}) + eq_(table.kwargs, table_c.kwargs) def test_tometadata_indexes(self): @@ -581,11 +661,13 @@ class MetaDataTest(fixtures.TestBase, ComparesTables): kw['quote_schema'] = quote_schema t = Table(name, metadata, **kw) eq_(t.schema, exp_schema, "test %d, table schema" % i) - eq_(t.quote_schema, exp_quote_schema, + eq_(t.schema.quote if t.schema is not None else None, + exp_quote_schema, "test %d, table quote_schema" % i) seq = Sequence(name, metadata=metadata, **kw) eq_(seq.schema, exp_schema, "test %d, seq schema" % i) - eq_(seq.quote_schema, exp_quote_schema, + eq_(seq.schema.quote if seq.schema is not None else None, + exp_quote_schema, "test %d, seq quote_schema" % i) def test_manual_dependencies(self): @@ -614,13 +696,11 @@ class MetaDataTest(fixtures.TestBase, ComparesTables): Column('name', String(40), nullable=True), Column('description', String(30), CheckConstraint("description='hi'")), UniqueConstraint('name'), - test_needs_fk=True ) table2 = Table('othertable', meta, Column('id', Integer, primary_key=True), Column('myid', Integer, ForeignKey('myschema.mytable.myid')), - test_needs_fk=True ) meta2 = MetaData(schema='someschema') @@ -641,13 +721,11 @@ class MetaDataTest(fixtures.TestBase, ComparesTables): Column('description', String(30), CheckConstraint("description='hi'")), UniqueConstraint('name'), - test_needs_fk=True, ) table2 = Table('othertable', meta, Column('id', Integer, primary_key=True), Column('myid', Integer, ForeignKey('mytable.myid')), - test_needs_fk=True, ) meta2 = MetaData() @@ -764,6 +842,77 @@ class TableTest(fixtures.TestBase, AssertsCompiledSQL): ) is_(t._autoincrement_column, t.c.id) + def test_pk_args_standalone(self): + m = MetaData() + t = Table('t', m, + Column('x', Integer, primary_key=True), + PrimaryKeyConstraint(mssql_clustered=True) + ) + eq_( + list(t.primary_key), [t.c.x] + ) + eq_( + t.primary_key.dialect_kwargs, {"mssql_clustered": True} + ) + + def test_pk_cols_sets_flags(self): + m = MetaData() + t = Table('t', m, + Column('x', Integer), + Column('y', Integer), + Column('z', Integer), + PrimaryKeyConstraint('x', 'y') + ) + eq_(t.c.x.primary_key, True) + eq_(t.c.y.primary_key, True) + eq_(t.c.z.primary_key, False) + + def test_pk_col_mismatch_one(self): + m = MetaData() + assert_raises_message( + exc.SAWarning, + "Table 't' specifies columns 'x' as primary_key=True, " + "not matching locally specified columns 'q'", + Table, 't', m, + Column('x', Integer, primary_key=True), + Column('q', Integer), + PrimaryKeyConstraint('q') + ) + + def test_pk_col_mismatch_two(self): + m = MetaData() + assert_raises_message( + exc.SAWarning, + "Table 't' specifies columns 'a', 'b', 'c' as primary_key=True, " + "not matching locally specified columns 'b', 'c'", + Table, 't', m, + Column('a', Integer, primary_key=True), + Column('b', Integer, primary_key=True), + Column('c', Integer, primary_key=True), + PrimaryKeyConstraint('b', 'c') + ) + + @testing.emits_warning("Table 't'") + def test_pk_col_mismatch_three(self): + m = MetaData() + t = Table('t', m, + Column('x', Integer, primary_key=True), + Column('q', Integer), + PrimaryKeyConstraint('q') + ) + eq_(list(t.primary_key), [t.c.q]) + + @testing.emits_warning("Table 't'") + def test_pk_col_mismatch_four(self): + m = MetaData() + t = Table('t', m, + Column('a', Integer, primary_key=True), + Column('b', Integer, primary_key=True), + Column('c', Integer, primary_key=True), + PrimaryKeyConstraint('b', 'c') + ) + eq_(list(t.primary_key), [t.c.b, t.c.c]) + class SchemaTypeTest(fixtures.TestBase): class MyType(sqltypes.SchemaType, sqltypes.TypeEngine): column = None @@ -1039,7 +1188,7 @@ class UseExistingTest(fixtures.TablesTest): meta2 = self._useexisting_fixture() users = Table('users', meta2, quote=True, autoload=True, keep_existing=True) - assert not users.quote + assert not users.name.quote def test_keep_existing_add_column(self): meta2 = self._useexisting_fixture() @@ -1055,12 +1204,15 @@ class UseExistingTest(fixtures.TablesTest): autoload=True, keep_existing=True) assert isinstance(users.c.name.type, Unicode) + @testing.skip_if( + lambda: testing.db.dialect.requires_name_normalize, + "test depends on lowercase as case insensitive") def test_keep_existing_quote_no_orig(self): meta2 = self._notexisting_fixture() users = Table('users', meta2, quote=True, autoload=True, keep_existing=True) - assert users.quote + assert users.name.quote def test_keep_existing_add_column_no_orig(self): meta2 = self._notexisting_fixture() @@ -1080,7 +1232,7 @@ class UseExistingTest(fixtures.TablesTest): meta2 = self._useexisting_fixture() users = Table('users', meta2, quote=True, keep_existing=True) - assert not users.quote + assert not users.name.quote def test_keep_existing_add_column_no_reflection(self): meta2 = self._useexisting_fixture() @@ -1097,9 +1249,12 @@ class UseExistingTest(fixtures.TablesTest): def test_extend_existing_quote(self): meta2 = self._useexisting_fixture() - users = Table('users', meta2, quote=True, autoload=True, - extend_existing=True) - assert users.quote + assert_raises_message( + tsa.exc.ArgumentError, + "Can't redefine 'quote' or 'quote_schema' arguments", + Table, 'users', meta2, quote=True, autoload=True, + extend_existing=True + ) def test_extend_existing_add_column(self): meta2 = self._useexisting_fixture() @@ -1115,12 +1270,15 @@ class UseExistingTest(fixtures.TablesTest): autoload=True, extend_existing=True) assert isinstance(users.c.name.type, Unicode) + @testing.skip_if( + lambda: testing.db.dialect.requires_name_normalize, + "test depends on lowercase as case insensitive") def test_extend_existing_quote_no_orig(self): meta2 = self._notexisting_fixture() users = Table('users', meta2, quote=True, autoload=True, extend_existing=True) - assert users.quote + assert users.name.quote def test_extend_existing_add_column_no_orig(self): meta2 = self._notexisting_fixture() @@ -1138,9 +1296,12 @@ class UseExistingTest(fixtures.TablesTest): def test_extend_existing_quote_no_reflection(self): meta2 = self._useexisting_fixture() - users = Table('users', meta2, quote=True, - extend_existing=True) - assert users.quote + assert_raises_message( + tsa.exc.ArgumentError, + "Can't redefine 'quote' or 'quote_schema' arguments", + Table, 'users', meta2, quote=True, + extend_existing=True + ) def test_extend_existing_add_column_no_reflection(self): meta2 = self._useexisting_fixture() @@ -1546,6 +1707,28 @@ class ColumnDefinitionTest(AssertsCompiledSQL, fixtures.TestBase): assert c.name == 'named' assert c.name == c.key + def test_unique_index_flags_default_to_none(self): + c = Column(Integer) + eq_(c.unique, None) + eq_(c.index, None) + + c = Column('c', Integer, index=True) + eq_(c.unique, None) + eq_(c.index, True) + + t = Table('t', MetaData(), c) + eq_(list(t.indexes)[0].unique, False) + + c = Column(Integer, unique=True) + eq_(c.unique, True) + eq_(c.index, None) + + c = Column('c', Integer, index=True, unique=True) + eq_(c.unique, True) + eq_(c.index, True) + + t = Table('t', MetaData(), c) + eq_(list(t.indexes)[0].unique, True) def test_bogus(self): assert_raises(exc.ArgumentError, Column, 'foo', name='bar') @@ -1841,7 +2024,6 @@ class ColumnOptionsTest(fixtures.TestBase): c.info['bar'] = 'zip' assert c.info['bar'] == 'zip' - class CatchAllEventsTest(fixtures.TestBase): def teardown(self): @@ -1890,6 +2072,7 @@ class CatchAllEventsTest(fixtures.TestBase): parent.__class__.__name__)) def after_attach(obj, parent): + assert hasattr(obj, 'name') # so we can change it canary.append("%s->%s" % (target.__name__, parent)) event.listen(target, "before_parent_attach", before_attach) event.listen(target, "after_parent_attach", after_attach) @@ -1897,14 +2080,15 @@ class CatchAllEventsTest(fixtures.TestBase): for target in [ schema.ForeignKeyConstraint, schema.PrimaryKeyConstraint, schema.UniqueConstraint, - schema.CheckConstraint + schema.CheckConstraint, + schema.Index ]: evt(target) m = MetaData() Table('t1', m, Column('id', Integer, Sequence('foo_id'), primary_key=True), - Column('bar', String, ForeignKey('t2.id')), + Column('bar', String, ForeignKey('t2.id'), index=True), Column('bat', Integer, unique=True), ) Table('t2', m, @@ -1912,17 +2096,291 @@ class CatchAllEventsTest(fixtures.TestBase): Column('bar', Integer), Column('bat', Integer), CheckConstraint("bar>5"), - UniqueConstraint('bar', 'bat') + UniqueConstraint('bar', 'bat'), + Index(None, 'bar', 'bat') ) eq_( canary, [ 'PrimaryKeyConstraint->Table', 'PrimaryKeyConstraint->t1', + 'Index->Table', 'Index->t1', 'ForeignKeyConstraint->Table', 'ForeignKeyConstraint->t1', 'UniqueConstraint->Table', 'UniqueConstraint->t1', 'PrimaryKeyConstraint->Table', 'PrimaryKeyConstraint->t2', 'CheckConstraint->Table', 'CheckConstraint->t2', - 'UniqueConstraint->Table', 'UniqueConstraint->t2' + 'UniqueConstraint->Table', 'UniqueConstraint->t2', + 'Index->Table', 'Index->t2' ] ) +class DialectKWArgTest(fixtures.TestBase): + @contextmanager + def _fixture(self): + from sqlalchemy.engine.default import DefaultDialect + class ParticipatingDialect(DefaultDialect): + construct_arguments = [ + (schema.Index, { + "x": 5, + "y": False, + "z_one": None + }), + (schema.ForeignKeyConstraint, { + "foobar": False + }) + ] + + class ParticipatingDialect2(DefaultDialect): + construct_arguments = [ + (schema.Index, { + "x": 9, + "y": True, + "pp": "default" + }), + (schema.Table, { + "*": None + }) + ] + + class NonParticipatingDialect(DefaultDialect): + construct_arguments = None + + def load(dialect_name): + if dialect_name == "participating": + return ParticipatingDialect + elif dialect_name == "participating2": + return ParticipatingDialect2 + elif dialect_name == "nonparticipating": + return NonParticipatingDialect + else: + raise exc.NoSuchModuleError("no dialect %r" % dialect_name) + with mock.patch("sqlalchemy.dialects.registry.load", load): + yield + + def test_participating(self): + with self._fixture(): + idx = Index('a', 'b', 'c', participating_y=True) + eq_( + idx.dialect_options, + {"participating": {"x": 5, "y": True, "z_one": None}} + ) + eq_( + idx.dialect_kwargs, + { + 'participating_y': True, + } + ) + + def test_nonparticipating(self): + with self._fixture(): + idx = Index('a', 'b', 'c', nonparticipating_y=True, nonparticipating_q=5) + eq_( + idx.dialect_kwargs, + { + 'nonparticipating_y': True, + 'nonparticipating_q': 5 + } + ) + + def test_unknown_dialect_warning(self): + with self._fixture(): + assert_raises_message( + exc.SAWarning, + "Can't validate argument 'unknown_y'; can't locate " + "any SQLAlchemy dialect named 'unknown'", + Index, 'a', 'b', 'c', unknown_y=True + ) + + def test_participating_bad_kw(self): + with self._fixture(): + assert_raises_message( + exc.ArgumentError, + "Argument 'participating_q_p_x' is not accepted by dialect " + "'participating' on behalf of " + "<class 'sqlalchemy.sql.schema.Index'>", + Index, 'a', 'b', 'c', participating_q_p_x=8 + ) + + def test_participating_unknown_schema_item(self): + with self._fixture(): + # the dialect doesn't include UniqueConstraint in + # its registry at all. + assert_raises_message( + exc.ArgumentError, + "Argument 'participating_q_p_x' is not accepted by dialect " + "'participating' on behalf of " + "<class 'sqlalchemy.sql.schema.UniqueConstraint'>", + UniqueConstraint, 'a', 'b', participating_q_p_x=8 + ) + + @testing.emits_warning("Can't validate") + def test_unknown_dialect_warning_still_populates(self): + with self._fixture(): + idx = Index('a', 'b', 'c', unknown_y=True) + eq_(idx.dialect_kwargs, {"unknown_y": True}) # still populates + + @testing.emits_warning("Can't validate") + def test_unknown_dialect_warning_still_populates_multiple(self): + with self._fixture(): + idx = Index('a', 'b', 'c', unknown_y=True, unknown_z=5, + otherunknown_foo='bar', participating_y=8) + eq_( + idx.dialect_options, + { + "unknown": {'y': True, 'z': 5, '*': None}, + "otherunknown": {'foo': 'bar', '*': None}, + "participating": {'x': 5, 'y': 8, 'z_one': None} + } + ) + eq_(idx.dialect_kwargs, + {'unknown_z': 5, 'participating_y': 8, + 'unknown_y': True, + 'otherunknown_foo': 'bar'} + ) # still populates + + def test_combined(self): + with self._fixture(): + idx = Index('a', 'b', 'c', participating_x=7, + nonparticipating_y=True) + + eq_( + idx.dialect_options, + { + 'participating': {'y': False, 'x': 7, 'z_one': None}, + 'nonparticipating': {'y': True, '*': None} + } + ) + eq_( + idx.dialect_kwargs, + { + 'participating_x': 7, + 'nonparticipating_y': True, + } + ) + + def test_multiple_participating(self): + with self._fixture(): + idx = Index('a', 'b', 'c', + participating_x=7, + participating2_x=15, + participating2_y="lazy" + ) + eq_( + idx.dialect_options, + { + "participating": {'x': 7, 'y': False, 'z_one': None}, + "participating2": {'x': 15, 'y': 'lazy', 'pp': 'default'}, + } + ) + eq_( + idx.dialect_kwargs, + { + 'participating_x': 7, + 'participating2_x': 15, + 'participating2_y': 'lazy' + } + ) + + def test_foreign_key_propagate(self): + with self._fixture(): + m = MetaData() + fk = ForeignKey('t2.id', participating_foobar=True) + t = Table('t', m, Column('id', Integer, fk)) + fkc = [c for c in t.constraints if isinstance(c, ForeignKeyConstraint)][0] + eq_( + fkc.dialect_kwargs, + {'participating_foobar': True} + ) + + def test_foreign_key_propagate_exceptions_delayed(self): + with self._fixture(): + m = MetaData() + fk = ForeignKey('t2.id', participating_fake=True) + c1 = Column('id', Integer, fk) + assert_raises_message( + exc.ArgumentError, + "Argument 'participating_fake' is not accepted by " + "dialect 'participating' on behalf of " + "<class 'sqlalchemy.sql.schema.ForeignKeyConstraint'>", + Table, 't', m, c1 + ) + + def test_wildcard(self): + with self._fixture(): + m = MetaData() + t = Table('x', m, Column('x', Integer), + participating2_xyz='foo', + participating2_engine='InnoDB', + ) + eq_( + t.dialect_kwargs, + { + 'participating2_xyz': 'foo', + 'participating2_engine': 'InnoDB' + } + ) + + def test_uninit_wildcard(self): + with self._fixture(): + m = MetaData() + t = Table('x', m, Column('x', Integer)) + eq_( + t.dialect_options['participating2'], {'*': None} + ) + eq_( + t.dialect_kwargs, {} + ) + + def test_not_contains_wildcard(self): + with self._fixture(): + m = MetaData() + t = Table('x', m, Column('x', Integer)) + assert 'foobar' not in t.dialect_options['participating2'] + + def test_contains_wildcard(self): + with self._fixture(): + m = MetaData() + t = Table('x', m, Column('x', Integer), participating2_foobar=5) + assert 'foobar' in t.dialect_options['participating2'] + + + def test_update(self): + with self._fixture(): + idx = Index('a', 'b', 'c', participating_x=20) + eq_(idx.dialect_kwargs, { + "participating_x": 20, + }) + idx._validate_dialect_kwargs({ + "participating_x": 25, + "participating_z_one": "default"}) + eq_(idx.dialect_options, { + "participating": {"x": 25, "y": False, "z_one": "default"} + }) + eq_(idx.dialect_kwargs, { + "participating_x": 25, + 'participating_z_one': "default" + }) + + idx._validate_dialect_kwargs({ + "participating_x": 25, + "participating_z_one": "default"}) + + eq_(idx.dialect_options, { + "participating": {"x": 25, "y": False, "z_one": "default"} + }) + eq_(idx.dialect_kwargs, { + "participating_x": 25, + 'participating_z_one': "default" + }) + + idx._validate_dialect_kwargs({ + "participating_y": True, + 'participating2_y': "p2y"}) + eq_(idx.dialect_options, { + "participating": {"x": 25, "y": True, "z_one": "default"}, + "participating2": {"y": "p2y", "pp": "default", "x": 9} + }) + eq_(idx.dialect_kwargs, { + "participating_x": 25, + "participating_y": True, + 'participating2_y': "p2y", + "participating_z_one": "default"}) diff --git a/test/sql/test_operators.py b/test/sql/test_operators.py index b3919d0da..670d088d2 100644 --- a/test/sql/test_operators.py +++ b/test/sql/test_operators.py @@ -9,14 +9,18 @@ from sqlalchemy.sql import operators, table import operator from sqlalchemy import String, Integer from sqlalchemy import exc +from sqlalchemy.engine import default +from sqlalchemy.sql.elements import _literal_as_text from sqlalchemy.schema import Column, Table, MetaData -from sqlalchemy.types import TypeEngine, TypeDecorator, UserDefinedType +from sqlalchemy.types import TypeEngine, TypeDecorator, UserDefinedType, Boolean from sqlalchemy.dialects import mysql, firebird, postgresql, oracle, \ sqlite, mssql from sqlalchemy import util import datetime import collections from sqlalchemy import text, literal_column +from sqlalchemy import and_, not_, between, or_ +from sqlalchemy.sql import true, false, null class LoopOperate(operators.ColumnOperators): def operate(self, op, *other, **kwargs): @@ -35,11 +39,11 @@ class DefaultColumnComparatorTest(fixtures.TestBase): left = column('left') assert left.comparator.operate(operator, right).compare( - BinaryExpression(left, right, operator) + BinaryExpression(_literal_as_text(left), _literal_as_text(right), operator) ) assert operator(left, right).compare( - BinaryExpression(left, right, operator) + BinaryExpression(_literal_as_text(left), _literal_as_text(right), operator) ) self._loop_test(operator, right) @@ -352,7 +356,6 @@ class ExtensionOperatorTest(fixtures.TestBase, testing.AssertsCompiledSQL): "x -> :x_1" ) - @testing.requires.python26 def test_op_not_an_iterator(self): # see [ticket:2726] class MyType(UserDefinedType): @@ -385,7 +388,205 @@ class ExtensionOperatorTest(fixtures.TestBase, testing.AssertsCompiledSQL): "x -> :x_1" ) -from sqlalchemy import and_, not_, between + +class BooleanEvalTest(fixtures.TestBase, testing.AssertsCompiledSQL): + """test standalone booleans being wrapped in an AsBoolean, as well + as true/false compilation.""" + + def _dialect(self, native_boolean): + d = default.DefaultDialect() + d.supports_native_boolean = native_boolean + return d + + def test_one(self): + c = column('x', Boolean) + self.assert_compile( + select([c]).where(c), + "SELECT x WHERE x", + dialect=self._dialect(True) + ) + + def test_two(self): + c = column('x', Boolean) + self.assert_compile( + select([c]).where(c), + "SELECT x WHERE x = 1", + dialect=self._dialect(False) + ) + + def test_three(self): + c = column('x', Boolean) + self.assert_compile( + select([c]).where(~c), + "SELECT x WHERE x = 0", + dialect=self._dialect(False) + ) + + def test_four(self): + c = column('x', Boolean) + self.assert_compile( + select([c]).where(~c), + "SELECT x WHERE NOT x", + dialect=self._dialect(True) + ) + + def test_five(self): + c = column('x', Boolean) + self.assert_compile( + select([c]).having(c), + "SELECT x HAVING x = 1", + dialect=self._dialect(False) + ) + + def test_six(self): + self.assert_compile( + or_(false(), true()), + "1 = 1", + dialect=self._dialect(False) + ) + + def test_eight(self): + self.assert_compile( + and_(false(), true()), + "false", + dialect=self._dialect(True) + ) + + def test_nine(self): + self.assert_compile( + and_(false(), true()), + "0 = 1", + dialect=self._dialect(False) + ) + + def test_ten(self): + c = column('x', Boolean) + self.assert_compile( + c == 1, + "x = :x_1", + dialect=self._dialect(False) + ) + + def test_eleven(self): + c = column('x', Boolean) + self.assert_compile( + c.is_(true()), + "x IS true", + dialect=self._dialect(True) + ) + + def test_twelve(self): + c = column('x', Boolean) + # I don't have a solution for this one yet, + # other than adding some heavy-handed conditionals + # into compiler + self.assert_compile( + c.is_(true()), + "x IS 1", + dialect=self._dialect(False) + ) + + +class ConjunctionTest(fixtures.TestBase, testing.AssertsCompiledSQL): + """test interaction of and_()/or_() with boolean , null constants + """ + __dialect__ = default.DefaultDialect(supports_native_boolean=True) + + def test_one(self): + self.assert_compile(~and_(true()), "false") + + def test_two(self): + self.assert_compile(or_(~and_(true())), "false") + + def test_three(self): + self.assert_compile(or_(and_()), "") + + def test_four(self): + x = column('x') + self.assert_compile( + and_(or_(x == 5), or_(x == 7)), + "x = :x_1 AND x = :x_2") + + + def test_five(self): + x = column("x") + self.assert_compile( + and_(true()._ifnone(None), x == 7), + "x = :x_1" + ) + + def test_six(self): + x = column("x") + self.assert_compile(or_(true(), x == 7), "true") + self.assert_compile(or_(x == 7, true()), "true") + self.assert_compile(~or_(x == 7, true()), "false") + + def test_six_pt_five(self): + x = column("x") + self.assert_compile(select([x]).where(or_(x == 7, true())), + "SELECT x WHERE true") + + self.assert_compile(select([x]).where(or_(x == 7, true())), + "SELECT x WHERE 1 = 1", + dialect=default.DefaultDialect(supports_native_boolean=False)) + + def test_seven(self): + x = column("x") + self.assert_compile( + and_(true(), x == 7, true(), x == 9), + "x = :x_1 AND x = :x_2") + + def test_eight(self): + x = column("x") + self.assert_compile( + or_(false(), x == 7, false(), x == 9), + "x = :x_1 OR x = :x_2") + + def test_nine(self): + x = column("x") + self.assert_compile( + and_(x == 7, x == 9, false(), x == 5), + "false" + ) + self.assert_compile( + ~and_(x == 7, x == 9, false(), x == 5), + "true" + ) + + def test_ten(self): + self.assert_compile( + and_(None, None), + "NULL AND NULL" + ) + + def test_eleven(self): + x = column("x") + self.assert_compile( + select([x]).where(None).where(None), + "SELECT x WHERE NULL AND NULL" + ) + + def test_twelve(self): + x = column("x") + self.assert_compile( + select([x]).where(and_(None, None)), + "SELECT x WHERE NULL AND NULL" + ) + + def test_thirteen(self): + x = column("x") + self.assert_compile( + select([x]).where(~and_(None, None)), + "SELECT x WHERE NOT (NULL AND NULL)" + ) + + def test_fourteen(self): + x = column("x") + self.assert_compile( + select([x]).where(~null()), + "SELECT x WHERE NOT NULL" + ) + class OperatorPrecedenceTest(fixtures.TestBase, testing.AssertsCompiledSQL): __dialect__ = 'default' @@ -472,6 +673,58 @@ class OperatorPrecedenceTest(fixtures.TestBase, testing.AssertsCompiledSQL): self.table2.c.field).is_(None)), "SELECT op.field FROM op WHERE (op.field MATCH op.field) IS NULL") + def test_operator_precedence_collate_1(self): + self.assert_compile( + self.table1.c.name == literal('foo').collate('utf-8'), + "mytable.name = (:param_1 COLLATE utf-8)" + ) + + def test_operator_precedence_collate_2(self): + self.assert_compile( + (self.table1.c.name == literal('foo')).collate('utf-8'), + "mytable.name = :param_1 COLLATE utf-8" + ) + + def test_operator_precedence_collate_3(self): + self.assert_compile( + self.table1.c.name.collate('utf-8') == 'foo', + "(mytable.name COLLATE utf-8) = :param_1" + ) + + def test_operator_precedence_collate_4(self): + self.assert_compile( + and_( + (self.table1.c.name == literal('foo')).collate('utf-8'), + (self.table2.c.field == literal('bar')).collate('utf-8'), + ), + "mytable.name = :param_1 COLLATE utf-8 " + "AND op.field = :param_2 COLLATE utf-8" + ) + + def test_operator_precedence_collate_5(self): + self.assert_compile( + select([self.table1.c.name]).order_by( + self.table1.c.name.collate('utf-8').desc()), + "SELECT mytable.name FROM mytable " + "ORDER BY mytable.name COLLATE utf-8 DESC" + ) + + def test_operator_precedence_collate_6(self): + self.assert_compile( + select([self.table1.c.name]).order_by( + self.table1.c.name.collate('utf-8').desc().nullslast()), + "SELECT mytable.name FROM mytable " + "ORDER BY mytable.name COLLATE utf-8 DESC NULLS LAST" + ) + + def test_operator_precedence_collate_7(self): + self.assert_compile( + select([self.table1.c.name]).order_by( + self.table1.c.name.collate('utf-8').asc()), + "SELECT mytable.name FROM mytable " + "ORDER BY mytable.name COLLATE utf-8 ASC" + ) + def test_commutative_operators(self): self.assert_compile( literal("a") + literal("b") * literal("c"), @@ -768,6 +1021,17 @@ class InTest(fixtures.TestBase, testing.AssertsCompiledSQL): "mytable.myid IN (NULL)" ) + @testing.emits_warning('.*empty sequence.*') + def test_in_29(self): + self.assert_compile(self.table1.c.myid.notin_([]), + "mytable.myid = mytable.myid") + + @testing.emits_warning('.*empty sequence.*') + def test_in_30(self): + self.assert_compile(~self.table1.c.myid.in_([]), + "mytable.myid = mytable.myid") + + class MathOperatorTest(fixtures.TestBase, testing.AssertsCompiledSQL): __dialect__ = 'default' diff --git a/test/sql/test_query.py b/test/sql/test_query.py index 39c896266..40c63b179 100644 --- a/test/sql/test_query.py +++ b/test/sql/test_query.py @@ -68,7 +68,7 @@ class QueryTest(fixtures.TestBase): r"A value is required for bind parameter 'user_name', in " "parameter group 2 \(original cause: (sqlalchemy.exc.)?InvalidRequestError: A " "value is required for bind parameter 'user_name', in " - "parameter group 2\) 'INSERT INTO query_users", + "parameter group 2\) u?'INSERT INTO query_users", users.insert().execute, {'user_id':7, 'user_name':'jack'}, {'user_id':8, 'user_name':'ed'}, @@ -1090,6 +1090,19 @@ class QueryTest(fixtures.TestBase): eq_(len(r), 1) + def test_sorting_in_python(self): + users.insert().execute( + dict(user_id=1, user_name='foo'), + dict(user_id=2, user_name='bar'), + dict(user_id=3, user_name='def'), + ) + + rows = users.select().order_by(users.c.user_name).execute().fetchall() + + eq_(rows, [(2, 'bar'), (3, 'def'), (1, 'foo')]) + + eq_(sorted(rows), [(1, 'foo'), (2, 'bar'), (3, 'def')]) + def test_column_order_with_simple_query(self): # should return values in column definition order users.insert().execute(user_id=1, user_name='foo') @@ -1110,7 +1123,6 @@ class QueryTest(fixtures.TestBase): @testing.crashes('oracle', 'FIXME: unknown, varify not fails_on()') @testing.crashes('firebird', 'An identifier must begin with a letter') - @testing.crashes('maxdb', 'FIXME: unknown, verify not fails_on()') def test_column_accessor_shadow(self): meta = MetaData(testing.db) shadowed = Table('test_shadowed', meta, @@ -1900,7 +1912,6 @@ class CompoundTest(fixtures.TestBase): eq_(u.execute().fetchall(), wanted) @testing.fails_on('firebird', "doesn't like ORDER BY with UNIONs") - @testing.fails_on('maxdb', 'FIXME: unknown') @testing.requires.subqueries def test_union_ordered_alias(self): (s1, s2) = ( @@ -1919,7 +1930,6 @@ class CompoundTest(fixtures.TestBase): @testing.fails_on('firebird', "has trouble extracting anonymous column from union subquery") @testing.fails_on('mysql', 'FIXME: unknown') @testing.fails_on('sqlite', 'FIXME: unknown') - @testing.fails_on('informix', "FIXME: unknown (maybe the second alias isn't allows)") def test_union_all(self): e = union_all( select([t1.c.col3]), diff --git a/test/sql/test_quote.py b/test/sql/test_quote.py index c92f1ac80..3cab3dc79 100644 --- a/test/sql/test_quote.py +++ b/test/sql/test_quote.py @@ -1,9 +1,10 @@ from sqlalchemy import * from sqlalchemy import sql, schema from sqlalchemy.sql import compiler -from sqlalchemy.testing import fixtures, AssertsCompiledSQL +from sqlalchemy.testing import fixtures, AssertsCompiledSQL, eq_ from sqlalchemy import testing - +from sqlalchemy.sql.elements import quoted_name, _truncated_label, _anonymous_label +from sqlalchemy.testing.util import picklers class QuoteTest(fixtures.TestBase, AssertsCompiledSQL): __dialect__ = 'default' @@ -61,6 +62,49 @@ class QuoteTest(fixtures.TestBase, AssertsCompiledSQL): assert 'MixedCase' in t2.c + @testing.provide_metadata + def test_has_table_case_sensitive(self): + preparer = testing.db.dialect.identifier_preparer + if testing.db.dialect.requires_name_normalize: + testing.db.execute("CREATE TABLE TAB1 (id INTEGER)") + else: + testing.db.execute("CREATE TABLE tab1 (id INTEGER)") + testing.db.execute('CREATE TABLE %s (id INTEGER)' % + preparer.quote_identifier("tab2")) + testing.db.execute('CREATE TABLE %s (id INTEGER)' % + preparer.quote_identifier("TAB3")) + testing.db.execute('CREATE TABLE %s (id INTEGER)' % + preparer.quote_identifier("TAB4")) + + t1 = Table('tab1', self.metadata, + Column('id', Integer, primary_key=True), + ) + t2 = Table('tab2', self.metadata, + Column('id', Integer, primary_key=True), + quote=True + ) + t3 = Table('TAB3', self.metadata, + Column('id', Integer, primary_key=True), + ) + t4 = Table('TAB4', self.metadata, + Column('id', Integer, primary_key=True), + quote=True) + + insp = inspect(testing.db) + assert testing.db.has_table(t1.name) + eq_([c['name'] for c in insp.get_columns(t1.name)], ['id']) + + assert testing.db.has_table(t2.name) + eq_([c['name'] for c in insp.get_columns(t2.name)], ['id']) + + assert testing.db.has_table(t3.name) + eq_([c['name'] for c in insp.get_columns(t3.name)], ['id']) + + assert testing.db.has_table(t4.name) + eq_([c['name'] for c in insp.get_columns(t4.name)], ['id']) + + + def test_basic(self): table1.insert().execute( {'lowercase': 1, 'UPPERCASE': 2, 'MixedCase': 3, 'a123': 4}, @@ -299,7 +343,7 @@ class QuoteTest(fixtures.TestBase, AssertsCompiledSQL): 'FROM create.foreign' ) - def test_subquery(self): + def test_subquery_one(self): # Lower case names, should not quote metadata = MetaData() t1 = Table('t1', metadata, @@ -318,6 +362,7 @@ class QuoteTest(fixtures.TestBase, AssertsCompiledSQL): 'WHERE anon.col1 = :col1_1' ) + def test_subquery_two(self): # Lower case names, quotes on, should quote metadata = MetaData() t1 = Table('t1', metadata, @@ -336,6 +381,7 @@ class QuoteTest(fixtures.TestBase, AssertsCompiledSQL): 'WHERE anon."col1" = :col1_1' ) + def test_subquery_three(self): # Not lower case names, should quote metadata = MetaData() t1 = Table('T1', metadata, @@ -355,6 +401,8 @@ class QuoteTest(fixtures.TestBase, AssertsCompiledSQL): '"Anon"."Col1" = :Col1_1' ) + def test_subquery_four(self): + # Not lower case names, quotes off, should not quote metadata = MetaData() t1 = Table('T1', metadata, @@ -513,7 +561,7 @@ class QuoteTest(fixtures.TestBase, AssertsCompiledSQL): ') AS "Alias1"' ) - def test_apply_labels(self): + def test_apply_labels_should_quote(self): # Not lower case names, should quote metadata = MetaData() t1 = Table('T1', metadata, @@ -527,6 +575,7 @@ class QuoteTest(fixtures.TestBase, AssertsCompiledSQL): '"Foo"."T1"' ) + def test_apply_labels_shouldnt_quote(self): # Not lower case names, quotes off metadata = MetaData() t1 = Table('T1', metadata, @@ -563,7 +612,20 @@ class QuoteTest(fixtures.TestBase, AssertsCompiledSQL): 'CREATE INDEX foo ON t ("x")' ) + def test_quote_flag_propagate_anon_label(self): + m = MetaData() + t = Table('t', m, Column('x', Integer, quote=True)) + self.assert_compile( + select([t.alias()]).apply_labels(), + 'SELECT t_1."x" AS "t_1_x" FROM t AS t_1' + ) + + t2 = Table('t2', m, Column('x', Integer), quote=True) + self.assert_compile( + select([t2.c.x]).apply_labels(), + 'SELECT "t2".x AS "t2_x" FROM "t2"' + ) class PreparerTest(fixtures.TestBase): """Test the db-agnostic quoting services of IdentifierPreparer.""" @@ -619,3 +681,95 @@ class PreparerTest(fixtures.TestBase): a_eq(unformat('`foo`.bar'), ['foo', 'bar']) a_eq(unformat('`foo`.`b``a``r`.`baz`'), ['foo', 'b`a`r', 'baz']) +class QuotedIdentTest(fixtures.TestBase): + def test_concat_quotetrue(self): + q1 = quoted_name("x", True) + self._assert_not_quoted("y" + q1) + + def test_concat_quotefalse(self): + q1 = quoted_name("x", False) + self._assert_not_quoted("y" + q1) + + def test_concat_quotenone(self): + q1 = quoted_name("x", None) + self._assert_not_quoted("y" + q1) + + def test_rconcat_quotetrue(self): + q1 = quoted_name("x", True) + self._assert_not_quoted("y" + q1) + + def test_rconcat_quotefalse(self): + q1 = quoted_name("x", False) + self._assert_not_quoted("y" + q1) + + def test_rconcat_quotenone(self): + q1 = quoted_name("x", None) + self._assert_not_quoted("y" + q1) + + def test_concat_anon(self): + q1 = _anonymous_label(quoted_name("x", True)) + assert isinstance(q1, _anonymous_label) + value = q1 + "y" + assert isinstance(value, _anonymous_label) + self._assert_quoted(value, True) + + def test_rconcat_anon(self): + q1 = _anonymous_label(quoted_name("x", True)) + assert isinstance(q1, _anonymous_label) + value = "y" + q1 + assert isinstance(value, _anonymous_label) + self._assert_quoted(value, True) + + def test_coerce_quoted_switch(self): + q1 = quoted_name("x", False) + q2 = quoted_name(q1, True) + eq_(q2.quote, True) + + def test_coerce_quoted_none(self): + q1 = quoted_name("x", False) + q2 = quoted_name(q1, None) + eq_(q2.quote, False) + + def test_coerce_quoted_retain(self): + q1 = quoted_name("x", False) + q2 = quoted_name(q1, False) + eq_(q2.quote, False) + + def test_coerce_none(self): + q1 = quoted_name(None, False) + eq_(q1, None) + + def test_apply_map_quoted(self): + q1 = _anonymous_label(quoted_name("x%s", True)) + q2 = q1.apply_map(('bar')) + eq_(q2, "xbar") + eq_(q2.quote, True) + + def test_apply_map_plain(self): + q1 = _anonymous_label(quoted_name("x%s", None)) + q2 = q1.apply_map(('bar')) + eq_(q2, "xbar") + self._assert_not_quoted(q2) + + def test_pickle_quote(self): + q1 = quoted_name("x", True) + for loads, dumps in picklers(): + q2 = loads(dumps(q1)) + eq_(str(q1), str(q2)) + eq_(q1.quote, q2.quote) + + def test_pickle_anon_label(self): + q1 = _anonymous_label(quoted_name("x", True)) + for loads, dumps in picklers(): + q2 = loads(dumps(q1)) + assert isinstance(q2, _anonymous_label) + eq_(str(q1), str(q2)) + eq_(q1.quote, q2.quote) + + def _assert_quoted(self, value, quote): + assert isinstance(value, quoted_name) + eq_(value.quote, quote) + + def _assert_not_quoted(self, value): + assert not isinstance(value, quoted_name) + diff --git a/test/sql/test_returning.py b/test/sql/test_returning.py index 6a42b0625..e7245aa3c 100644 --- a/test/sql/test_returning.py +++ b/test/sql/test_returning.py @@ -6,6 +6,7 @@ from sqlalchemy.types import TypeDecorator from sqlalchemy.testing import fixtures, AssertsExecutionResults, engines, \ assert_raises_message from sqlalchemy import exc as sa_exc +import itertools class ReturningTest(fixtures.TestBase, AssertsExecutionResults): __requires__ = 'returning', @@ -184,6 +185,129 @@ class KeyReturningTest(fixtures.TestBase, AssertsExecutionResults): assert row[table.c.foo_id] == row['id'] == 1 +class ReturnDefaultsTest(fixtures.TablesTest): + __requires__ = ('returning', ) + run_define_tables = 'each' + + @classmethod + def define_tables(cls, metadata): + from sqlalchemy.sql import ColumnElement + from sqlalchemy.ext.compiler import compiles + + counter = itertools.count() + + class IncDefault(ColumnElement): + pass + + @compiles(IncDefault) + def compile(element, compiler, **kw): + return str(next(counter)) + + Table("t1", metadata, + Column("id", Integer, primary_key=True, test_needs_autoincrement=True), + Column("data", String(50)), + Column("insdef", Integer, default=IncDefault()), + Column("upddef", Integer, onupdate=IncDefault()) + ) + + def test_chained_insert_pk(self): + t1 = self.tables.t1 + result = testing.db.execute( + t1.insert().values(upddef=1).return_defaults(t1.c.insdef) + ) + eq_( + [result.returned_defaults[k] for k in (t1.c.id, t1.c.insdef)], + [1, 0] + ) + + def test_arg_insert_pk(self): + t1 = self.tables.t1 + result = testing.db.execute( + t1.insert(return_defaults=[t1.c.insdef]).values(upddef=1) + ) + eq_( + [result.returned_defaults[k] for k in (t1.c.id, t1.c.insdef)], + [1, 0] + ) + + def test_chained_update_pk(self): + t1 = self.tables.t1 + testing.db.execute( + t1.insert().values(upddef=1) + ) + result = testing.db.execute(t1.update().values(data='d1'). + return_defaults(t1.c.upddef)) + eq_( + [result.returned_defaults[k] for k in (t1.c.upddef,)], + [1] + ) + + def test_arg_update_pk(self): + t1 = self.tables.t1 + testing.db.execute( + t1.insert().values(upddef=1) + ) + result = testing.db.execute(t1.update(return_defaults=[t1.c.upddef]). + values(data='d1')) + eq_( + [result.returned_defaults[k] for k in (t1.c.upddef,)], + [1] + ) + + def test_insert_non_default(self): + """test that a column not marked at all as a + default works with this feature.""" + + t1 = self.tables.t1 + result = testing.db.execute( + t1.insert().values(upddef=1).return_defaults(t1.c.data) + ) + eq_( + [result.returned_defaults[k] for k in (t1.c.id, t1.c.data,)], + [1, None] + ) + + def test_update_non_default(self): + """test that a column not marked at all as a + default works with this feature.""" + + t1 = self.tables.t1 + testing.db.execute( + t1.insert().values(upddef=1) + ) + result = testing.db.execute(t1.update(). + values(upddef=2).return_defaults(t1.c.data)) + eq_( + [result.returned_defaults[k] for k in (t1.c.data,)], + [None] + ) + + @testing.fails_on("oracle+cx_oracle", "seems like a cx_oracle bug") + def test_insert_non_default_plus_default(self): + t1 = self.tables.t1 + result = testing.db.execute( + t1.insert().values(upddef=1).return_defaults( + t1.c.data, t1.c.insdef) + ) + eq_( + dict(result.returned_defaults), + {"id": 1, "data": None, "insdef": 0} + ) + + @testing.fails_on("oracle+cx_oracle", "seems like a cx_oracle bug") + def test_update_non_default_plus_default(self): + t1 = self.tables.t1 + testing.db.execute( + t1.insert().values(upddef=1) + ) + result = testing.db.execute(t1.update(). + values(insdef=2).return_defaults( + t1.c.data, t1.c.upddef)) + eq_( + dict(result.returned_defaults), + {"data": None, 'upddef': 1} + ) + class ImplicitReturningFlag(fixtures.TestBase): def test_flag_turned_off(self): e = engines.testing_engine(options={'implicit_returning':False}) diff --git a/test/sql/test_selectable.py b/test/sql/test_selectable.py index df174fb25..8c7bf43b0 100644 --- a/test/sql/test_selectable.py +++ b/test/sql/test_selectable.py @@ -10,6 +10,7 @@ from sqlalchemy.sql import util as sql_util, visitors, expression from sqlalchemy import exc from sqlalchemy.sql import table, column, null from sqlalchemy import util +from sqlalchemy.schema import Column, Table, MetaData metadata = MetaData() table1 = Table('table1', metadata, @@ -513,6 +514,18 @@ class SelectableTest(fixtures.TestBase, AssertsExecutionResults, AssertsCompiled "SELECT c FROM (SELECT (SELECT (SELECT table1.col1 AS a FROM table1) AS b) AS c)" ) + def test_self_referential_select_raises(self): + t = table('t', column('x')) + + s = select([t]) + + s.append_whereclause(s.c.x > 5) + assert_raises_message( + exc.InvalidRequestError, + r"select\(\) construct refers to itself as a FROM", + s.compile + ) + def test_unusual_column_elements_text(self): """test that .c excludes text().""" @@ -1460,6 +1473,12 @@ class AnnotationsTest(fixtures.TestBase): c1.name = 'somename' eq_(c1_a.name, 'somename') + def test_late_table_add(self): + c1 = Column("foo", Integer) + c1_a = c1._annotate({"foo": "bar"}) + t = Table('t', MetaData(), c1) + is_(c1_a.table, t) + def test_custom_constructions(self): from sqlalchemy.schema import Column class MyColumn(Column): @@ -1884,3 +1903,64 @@ class WithLabelsTest(fixtures.TestBase): ['t1_x', 't2_x'] ) self._assert_result_keys(sel, ['t1_a', 't2_b']) + +class ForUpdateTest(fixtures.TestBase, AssertsCompiledSQL): + __dialect__ = "default" + + def _assert_legacy(self, leg, read=False, nowait=False): + t = table('t', column('c')) + s1 = select([t], for_update=leg) + + if leg is False: + assert s1._for_update_arg is None + assert s1.for_update is None + else: + eq_( + s1._for_update_arg.read, read + ) + eq_( + s1._for_update_arg.nowait, nowait + ) + eq_(s1.for_update, leg) + + def test_false_legacy(self): + self._assert_legacy(False) + + def test_plain_true_legacy(self): + self._assert_legacy(True) + + def test_read_legacy(self): + self._assert_legacy("read", read=True) + + def test_nowait_legacy(self): + self._assert_legacy("nowait", nowait=True) + + def test_read_nowait_legacy(self): + self._assert_legacy("read_nowait", read=True, nowait=True) + + def test_legacy_setter(self): + t = table('t', column('c')) + s = select([t]) + s.for_update = 'nowait' + eq_(s._for_update_arg.nowait, True) + + def test_basic_clone(self): + t = table('t', column('c')) + s = select([t]).with_for_update(read=True, of=t.c.c) + s2 = visitors.ReplacingCloningVisitor().traverse(s) + assert s2._for_update_arg is not s._for_update_arg + eq_(s2._for_update_arg.read, True) + eq_(s2._for_update_arg.of, [t.c.c]) + self.assert_compile(s2, + "SELECT t.c FROM t FOR SHARE OF t", + dialect="postgresql") + + def test_adapt(self): + t = table('t', column('c')) + s = select([t]).with_for_update(read=True, of=t.c.c) + a = t.alias() + s2 = sql_util.ClauseAdapter(a).traverse(s) + eq_(s2._for_update_arg.of, [a.c.c]) + self.assert_compile(s2, + "SELECT t_1.c FROM t AS t_1 FOR SHARE OF t_1", + dialect="postgresql") diff --git a/test/sql/test_text.py b/test/sql/test_text.py new file mode 100644 index 000000000..37346437e --- /dev/null +++ b/test/sql/test_text.py @@ -0,0 +1,371 @@ +"""Test the TextClause and related constructs.""" + +from sqlalchemy.testing import fixtures, AssertsCompiledSQL, eq_, assert_raises_message +from sqlalchemy import text, select, Integer, String, Float, \ + bindparam, and_, func, literal_column, exc +from sqlalchemy.types import NullType +from sqlalchemy.sql import table, column + +table1 = table('mytable', + column('myid', Integer), + column('name', String), + column('description', String), +) + +table2 = table( + 'myothertable', + column('otherid', Integer), + column('othername', String), +) + +class CompileTest(fixtures.TestBase, AssertsCompiledSQL): + __dialect__ = 'default' + + def test_basic(self): + self.assert_compile( + text("select * from foo where lala = bar"), + "select * from foo where lala = bar" + ) + +class SelectCompositionTest(fixtures.TestBase, AssertsCompiledSQL): + """test the usage of text() implicit within the select() construct + when strings are passed.""" + + __dialect__ = 'default' + + def test_select_composition_one(self): + self.assert_compile(select( + ["foobar(a)", "pk_foo_bar(syslaal)"], + "a = 12", + from_obj=["foobar left outer join lala on foobar.foo = lala.foo"] + ), + "SELECT foobar(a), pk_foo_bar(syslaal) FROM foobar " + "left outer join lala on foobar.foo = lala.foo WHERE a = 12" + ) + + def test_select_composition_two(self): + s = select() + s.append_column("column1") + s.append_column("column2") + s.append_whereclause("column1=12") + s.append_whereclause("column2=19") + s = s.order_by("column1") + s.append_from("table1") + self.assert_compile(s, "SELECT column1, column2 FROM table1 WHERE " + "column1=12 AND column2=19 ORDER BY column1") + + def test_select_composition_three(self): + self.assert_compile( + select(["column1", "column2"], + from_obj=table1).alias('somealias').select(), + "SELECT somealias.column1, somealias.column2 FROM " + "(SELECT column1, column2 FROM mytable) AS somealias" + ) + + def test_select_composition_four(self): + # test that use_labels doesnt interfere with literal columns + self.assert_compile( + select(["column1", "column2", table1.c.myid], from_obj=table1, + use_labels=True), + "SELECT column1, column2, mytable.myid AS mytable_myid " + "FROM mytable" + ) + + def test_select_composition_five(self): + # test that use_labels doesnt interfere + # with literal columns that have textual labels + self.assert_compile( + select(["column1 AS foobar", "column2 AS hoho", table1.c.myid], + from_obj=table1, use_labels=True), + "SELECT column1 AS foobar, column2 AS hoho, " + "mytable.myid AS mytable_myid FROM mytable" + ) + + def test_select_composition_six(self): + # test that "auto-labeling of subquery columns" + # doesnt interfere with literal columns, + # exported columns dont get quoted + self.assert_compile( + select(["column1 AS foobar", "column2 AS hoho", table1.c.myid], + from_obj=[table1]).select(), + "SELECT column1 AS foobar, column2 AS hoho, myid FROM " + "(SELECT column1 AS foobar, column2 AS hoho, " + "mytable.myid AS myid FROM mytable)" + ) + + def test_select_composition_seven(self): + self.assert_compile( + select(['col1', 'col2'], from_obj='tablename').alias('myalias'), + "SELECT col1, col2 FROM tablename" + ) + + def test_select_composition_eight(self): + self.assert_compile(select( + [table1.alias('t'), "foo.f"], + "foo.f = t.id", + from_obj=["(select f from bar where lala=heyhey) foo"] + ), + "SELECT t.myid, t.name, t.description, foo.f FROM mytable AS t, " + "(select f from bar where lala=heyhey) foo WHERE foo.f = t.id") + + def test_select_bundle_columns(self): + self.assert_compile(select( + [table1, table2.c.otherid, "sysdate()", "foo, bar, lala"], + and_( + "foo.id = foofoo(lala)", + "datetime(foo) = Today", + table1.c.myid == table2.c.otherid, + ) + ), + "SELECT mytable.myid, mytable.name, mytable.description, " + "myothertable.otherid, sysdate(), foo, bar, lala " + "FROM mytable, myothertable WHERE foo.id = foofoo(lala) AND " + "datetime(foo) = Today AND mytable.myid = myothertable.otherid") + +class BindParamTest(fixtures.TestBase, AssertsCompiledSQL): + __dialect__ = 'default' + + def test_legacy(self): + t = text("select * from foo where lala=:bar and hoho=:whee", + bindparams=[bindparam('bar', 4), bindparam('whee', 7)]) + + self.assert_compile( + t, + "select * from foo where lala=:bar and hoho=:whee", + checkparams={'bar': 4, 'whee': 7}, + ) + + def test_positional(self): + t = text("select * from foo where lala=:bar and hoho=:whee") + t = t.bindparams(bindparam('bar', 4), bindparam('whee', 7)) + + self.assert_compile( + t, + "select * from foo where lala=:bar and hoho=:whee", + checkparams={'bar': 4, 'whee': 7}, + ) + + def test_kw(self): + t = text("select * from foo where lala=:bar and hoho=:whee") + t = t.bindparams(bar=4, whee=7) + + self.assert_compile( + t, + "select * from foo where lala=:bar and hoho=:whee", + checkparams={'bar': 4, 'whee': 7}, + ) + + def test_positional_plus_kw(self): + t = text("select * from foo where lala=:bar and hoho=:whee") + t = t.bindparams(bindparam('bar', 4), whee=7) + + self.assert_compile( + t, + "select * from foo where lala=:bar and hoho=:whee", + checkparams={'bar': 4, 'whee': 7}, + ) + + def test_literal_binds(self): + t = text("select * from foo where lala=:bar and hoho=:whee") + t = t.bindparams(bindparam('bar', 4), whee='whee') + + self.assert_compile( + t, + "select * from foo where lala=4 and hoho='whee'", + checkparams={}, + literal_binds=True + ) + + def _assert_type_map(self, t, compare): + map_ = dict( + (b.key, b.type) for b in t._bindparams.values() + ) + for k in compare: + assert compare[k]._type_affinity is map_[k]._type_affinity + + def test_typing_construction(self): + t = text("select * from table :foo :bar :bat") + + self._assert_type_map(t, {"foo": NullType(), + "bar": NullType(), + "bat": NullType()}) + + t = t.bindparams(bindparam('foo', type_=String)) + + self._assert_type_map(t, {"foo": String(), + "bar": NullType(), + "bat": NullType()}) + + t = t.bindparams(bindparam('bar', type_=Integer)) + + self._assert_type_map(t, {"foo": String(), + "bar": Integer(), + "bat": NullType()}) + + t = t.bindparams(bat=45.564) + + self._assert_type_map(t, {"foo": String(), + "bar": Integer(), + "bat": Float()}) + + + def test_binds_compiled_named(self): + self.assert_compile( + text("select * from foo where lala=:bar and hoho=:whee"). + bindparams(bar=4, whee=7), + "select * from foo where lala=%(bar)s and hoho=%(whee)s", + checkparams={'bar': 4, 'whee': 7}, + dialect="postgresql" + ) + + def test_binds_compiled_positional(self): + self.assert_compile( + text("select * from foo where lala=:bar and hoho=:whee"). + bindparams(bar=4, whee=7), + "select * from foo where lala=? and hoho=?", + checkparams={'bar': 4, 'whee': 7}, + dialect="sqlite" + ) + + def test_missing_bind_kw(self): + assert_raises_message( + exc.ArgumentError, + "This text\(\) construct doesn't define a bound parameter named 'bar'", + text(":foo").bindparams, + foo=5, bar=7 + ) + + def test_missing_bind_posn(self): + assert_raises_message( + exc.ArgumentError, + "This text\(\) construct doesn't define a bound parameter named 'bar'", + text(":foo").bindparams, + bindparam('foo', value=5), bindparam('bar', value=7) + ) + + def test_escaping_colons(self): + # test escaping out text() params with a backslash + self.assert_compile( + text("select * from foo where clock='05:06:07' " + "and mork='\:mindy'"), + "select * from foo where clock='05:06:07' and mork=':mindy'", + checkparams={}, + params={}, + dialect="postgresql" + ) + + + def test_text_in_select_nonfrom(self): + + generate_series = text("generate_series(:x, :y, :z) as s(a)").\ + bindparams(x=None, y=None, z=None) + + s = select([ + (func.current_date() + literal_column("s.a")).label("dates") + ]).select_from(generate_series) + + self.assert_compile( + s, + "SELECT CURRENT_DATE + s.a AS dates FROM " + "generate_series(:x, :y, :z) as s(a)", + checkparams={'y': None, 'x': None, 'z': None} + ) + + self.assert_compile( + s.params(x=5, y=6, z=7), + "SELECT CURRENT_DATE + s.a AS dates FROM " + "generate_series(:x, :y, :z) as s(a)", + checkparams={'y': 6, 'x': 5, 'z': 7} + ) + +class AsFromTest(fixtures.TestBase, AssertsCompiledSQL): + __dialect__ = 'default' + + def test_basic_toplevel_resultmap_positional(self): + t = text("select id, name from user").columns( + column('id', Integer), + column('name') + ) + + compiled = t.compile() + eq_( + compiled.result_map, + { + 'id': ('id', (t.c.id,), t.c.id.type), + 'name': ('name', (t.c.name,), t.c.name.type) + } + ) + + def test_basic_toplevel_resultmap(self): + t = text("select id, name from user").columns(id=Integer, name=String) + + compiled = t.compile() + eq_( + compiled.result_map, + { + 'id': ('id', (t.c.id,), t.c.id.type), + 'name': ('name', (t.c.name,), t.c.name.type) + } + ) + + def test_basic_subquery_resultmap(self): + t = text("select id, name from user").columns(id=Integer, name=String) + + stmt = select([table1.c.myid]).select_from( + table1.join(t, table1.c.myid == t.c.id)) + compiled = stmt.compile() + eq_( + compiled.result_map, + { + "myid": ("myid", + (table1.c.myid, "myid", "myid"), table1.c.myid.type), + } + ) + + def test_cte(self): + t = text("select id, name from user").columns(id=Integer, name=String).cte('t') + + s = select([table1]).where(table1.c.myid == t.c.id) + self.assert_compile( + s, + "WITH t AS (select id, name from user) " + "SELECT mytable.myid, mytable.name, mytable.description " + "FROM mytable, t WHERE mytable.myid = t.id" + ) + + + def test_alias(self): + t = text("select id, name from user").columns(id=Integer, name=String).alias('t') + + s = select([table1]).where(table1.c.myid == t.c.id) + self.assert_compile( + s, + "SELECT mytable.myid, mytable.name, mytable.description " + "FROM mytable, (select id, name from user) AS t " + "WHERE mytable.myid = t.id" + ) + + def test_scalar_subquery(self): + t = text("select id from user").columns(id=Integer) + subq = t.as_scalar() + + assert subq.type._type_affinity is Integer()._type_affinity + + s = select([table1.c.myid, subq]).where(table1.c.myid == subq) + self.assert_compile( + s, + "SELECT mytable.myid, (select id from user) AS anon_1 " + "FROM mytable WHERE mytable.myid = (select id from user)" + ) + + def test_build_bindparams(self): + t = text("select id from user :foo :bar :bat") + t = t.bindparams(bindparam("foo", type_=Integer)) + t = t.columns(id=Integer) + t = t.bindparams(bar=String) + t = t.bindparams(bindparam('bat', value='bat')) + + eq_( + set(t.element._bindparams), + set(["bat", "foo", "bar"]) + )
\ No newline at end of file diff --git a/test/sql/test_types.py b/test/sql/test_types.py index 2a22224a2..3a263aab2 100644 --- a/test/sql/test_types.py +++ b/test/sql/test_types.py @@ -8,6 +8,7 @@ from sqlalchemy import exc, types, util, dialects for name in dialects.__all__: __import__("sqlalchemy.dialects.%s" % name) from sqlalchemy.sql import operators, column, table +from sqlalchemy.schema import CheckConstraint, AddConstraint from sqlalchemy.engine import default from sqlalchemy.testing.schema import Table, Column from sqlalchemy import testing @@ -166,14 +167,6 @@ class AdaptTest(fixtures.TestBase): t1 = typ() repr(t1) - def test_plain_init_deprecation_warning(self): - for typ in (Integer, Date, SmallInteger): - assert_raises_message( - exc.SADeprecationWarning, - "Passing arguments to type object " - "constructor %s is deprecated" % typ, - typ, 11 - ) class TypeAffinityTest(fixtures.TestBase): def test_type_affinity(self): @@ -272,6 +265,36 @@ class UserDefinedTest(fixtures.TablesTest, AssertsCompiledSQL): for col in row[3], row[4]: assert isinstance(col, util.text_type) + def test_typedecorator_literal_render(self): + class MyType(types.TypeDecorator): + impl = String + + def process_literal_param(self, value, dialect): + return "HI->%s<-THERE" % value + + self.assert_compile( + select([literal("test", MyType)]), + "SELECT 'HI->test<-THERE' AS anon_1", + dialect='default', + literal_binds=True + ) + + def test_typedecorator_literal_render_fallback_bound(self): + # fall back to process_bind_param for literal + # value rendering. + class MyType(types.TypeDecorator): + impl = String + + def process_bind_param(self, value, dialect): + return "HI->%s<-THERE" % value + + self.assert_compile( + select([literal("test", MyType)]), + "SELECT 'HI->test<-THERE' AS anon_1", + dialect='default', + literal_binds=True + ) + def test_typedecorator_impl(self): for impl_, exp, kw in [ (Float, "FLOAT", {}), @@ -381,75 +404,6 @@ class UserDefinedTest(fixtures.TablesTest, AssertsCompiledSQL): eq_(a.foo, 'foo') eq_(a.dialect_specific_args['bar'], 'bar') - @testing.provide_metadata - def test_type_coerce(self): - """test ad-hoc usage of custom types with type_coerce().""" - - metadata = self.metadata - class MyType(types.TypeDecorator): - impl = String - - def process_bind_param(self, value, dialect): - return value[0:-8] - - def process_result_value(self, value, dialect): - return value + "BIND_OUT" - - t = Table('t', metadata, Column('data', String(50))) - metadata.create_all() - - t.insert().values(data=type_coerce('d1BIND_OUT', MyType)).execute() - - eq_( - select([type_coerce(t.c.data, MyType)]).execute().fetchall(), - [('d1BIND_OUT', )] - ) - - eq_( - select([t.c.data, type_coerce(t.c.data, MyType)]).execute().fetchall(), - [('d1', 'd1BIND_OUT')] - ) - - eq_( - select([t.c.data, type_coerce(t.c.data, MyType)]). - alias().select().execute().fetchall(), - [('d1', 'd1BIND_OUT')] - ) - - eq_( - select([t.c.data, type_coerce(t.c.data, MyType)]).\ - where(type_coerce(t.c.data, MyType) == 'd1BIND_OUT').\ - execute().fetchall(), - [('d1', 'd1BIND_OUT')] - ) - - eq_( - select([t.c.data, type_coerce(t.c.data, MyType)]).\ - where(t.c.data == type_coerce('d1BIND_OUT', MyType)).\ - execute().fetchall(), - [('d1', 'd1BIND_OUT')] - ) - - eq_( - select([t.c.data, type_coerce(t.c.data, MyType)]).\ - where(t.c.data == type_coerce(None, MyType)).\ - execute().fetchall(), - [] - ) - - eq_( - select([t.c.data, type_coerce(t.c.data, MyType)]).\ - where(type_coerce(t.c.data, MyType) == None).\ - execute().fetchall(), - [] - ) - - eq_( - testing.db.scalar( - select([type_coerce(literal('d1BIND_OUT'), MyType)]) - ), - 'd1BIND_OUT' - ) @classmethod def define_tables(cls, metadata): @@ -550,6 +504,220 @@ class UserDefinedTest(fixtures.TablesTest, AssertsCompiledSQL): Column('goofy9', MyNewIntSubClass, nullable=False), ) +class TypeCoerceCastTest(fixtures.TablesTest): + + @classmethod + def define_tables(cls, metadata): + class MyType(types.TypeDecorator): + impl = String + + def process_bind_param(self, value, dialect): + return "BIND_IN" + str(value) + + def process_result_value(self, value, dialect): + return value + "BIND_OUT" + + cls.MyType = MyType + + Table('t', metadata, + Column('data', String(50)) + ) + + @testing.fails_on("oracle", + "oracle doesn't like CAST in the VALUES of an INSERT") + def test_insert_round_trip_cast(self): + self._test_insert_round_trip(cast) + + def test_insert_round_trip_type_coerce(self): + self._test_insert_round_trip(type_coerce) + + def _test_insert_round_trip(self, coerce_fn): + MyType = self.MyType + t = self.tables.t + + t.insert().values(data=coerce_fn('d1', MyType)).execute() + + eq_( + select([coerce_fn(t.c.data, MyType)]).execute().fetchall(), + [('BIND_INd1BIND_OUT', )] + ) + + @testing.fails_on("oracle", + "ORA-00906: missing left parenthesis - " + "seems to be CAST(:param AS type)") + def test_coerce_from_nulltype_cast(self): + self._test_coerce_from_nulltype(cast) + + def test_coerce_from_nulltype_type_coerce(self): + self._test_coerce_from_nulltype(type_coerce) + + def _test_coerce_from_nulltype(self, coerce_fn): + MyType = self.MyType + + # test coerce from nulltype - e.g. use an object that + # doens't match to a known type + class MyObj(object): + def __str__(self): + return "THISISMYOBJ" + + eq_( + testing.db.execute( + select([coerce_fn(MyObj(), MyType)]) + ).fetchall(), + [('BIND_INTHISISMYOBJBIND_OUT',)] + ) + + @testing.fails_on("oracle", + "oracle doesn't like CAST in the VALUES of an INSERT") + def test_vs_non_coerced_cast(self): + self._test_vs_non_coerced(cast) + + def test_vs_non_coerced_type_coerce(self): + self._test_vs_non_coerced(type_coerce) + + def _test_vs_non_coerced(self, coerce_fn): + MyType = self.MyType + t = self.tables.t + + t.insert().values(data=coerce_fn('d1', MyType)).execute() + + eq_( + select([t.c.data, coerce_fn(t.c.data, MyType)]).execute().fetchall(), + [('BIND_INd1', 'BIND_INd1BIND_OUT')] + ) + + @testing.fails_on("oracle", + "oracle doesn't like CAST in the VALUES of an INSERT") + def test_vs_non_coerced_alias_cast(self): + self._test_vs_non_coerced_alias(cast) + + def test_vs_non_coerced_alias_type_coerce(self): + self._test_vs_non_coerced_alias(type_coerce) + + def _test_vs_non_coerced_alias(self, coerce_fn): + MyType = self.MyType + t = self.tables.t + + t.insert().values(data=coerce_fn('d1', MyType)).execute() + + eq_( + select([t.c.data, coerce_fn(t.c.data, MyType)]). + alias().select().execute().fetchall(), + [('BIND_INd1', 'BIND_INd1BIND_OUT')] + ) + + @testing.fails_on("oracle", + "oracle doesn't like CAST in the VALUES of an INSERT") + def test_vs_non_coerced_where_cast(self): + self._test_vs_non_coerced_where(cast) + + def test_vs_non_coerced_where_type_coerce(self): + self._test_vs_non_coerced_where(type_coerce) + + def _test_vs_non_coerced_where(self, coerce_fn): + MyType = self.MyType + + t = self.tables.t + t.insert().values(data=coerce_fn('d1', MyType)).execute() + + # coerce on left side + eq_( + select([t.c.data, coerce_fn(t.c.data, MyType)]).\ + where(coerce_fn(t.c.data, MyType) == 'd1').\ + execute().fetchall(), + [('BIND_INd1', 'BIND_INd1BIND_OUT')] + ) + + # coerce on right side + eq_( + select([t.c.data, coerce_fn(t.c.data, MyType)]).\ + where(t.c.data == coerce_fn('d1', MyType)).\ + execute().fetchall(), + [('BIND_INd1', 'BIND_INd1BIND_OUT')] + ) + + @testing.fails_on("oracle", + "oracle doesn't like CAST in the VALUES of an INSERT") + def test_coerce_none_cast(self): + self._test_coerce_none(cast) + + def test_coerce_none_type_coerce(self): + self._test_coerce_none(type_coerce) + + def _test_coerce_none(self, coerce_fn): + MyType = self.MyType + + t = self.tables.t + t.insert().values(data=coerce_fn('d1', MyType)).execute() + eq_( + select([t.c.data, coerce_fn(t.c.data, MyType)]).\ + where(t.c.data == coerce_fn(None, MyType)).\ + execute().fetchall(), + [] + ) + + eq_( + select([t.c.data, coerce_fn(t.c.data, MyType)]).\ + where(coerce_fn(t.c.data, MyType) == None).\ + execute().fetchall(), + [] + ) + + @testing.fails_on("oracle", + "oracle doesn't like CAST in the VALUES of an INSERT") + def test_resolve_clause_element_cast(self): + self._test_resolve_clause_element(cast) + + def test_resolve_clause_element_type_coerce(self): + self._test_resolve_clause_element(type_coerce) + + def _test_resolve_clause_element(self, coerce_fn): + MyType = self.MyType + + t = self.tables.t + t.insert().values(data=coerce_fn('d1', MyType)).execute() + + class MyFoob(object): + def __clause_element__(self): + return t.c.data + + eq_( + testing.db.execute( + select([t.c.data, coerce_fn(MyFoob(), MyType)]) + ).fetchall(), + [('BIND_INd1', 'BIND_INd1BIND_OUT')] + ) + + @testing.fails_on("oracle", + "ORA-00906: missing left parenthesis - " + "seems to be CAST(:param AS type)") + def test_cast_existing_typed(self): + MyType = self.MyType + coerce_fn = cast + + # when cast() is given an already typed value, + # the type does not take effect on the value itself. + eq_( + testing.db.scalar( + select([coerce_fn(literal('d1'), MyType)]) + ), + 'd1BIND_OUT' + ) + + def test_type_coerce_existing_typed(self): + MyType = self.MyType + coerce_fn = type_coerce + # type_coerce does upgrade the given expression to the + # given type. + eq_( + testing.db.scalar( + select([coerce_fn(literal('d1'), MyType)]) + ), + 'BIND_INd1BIND_OUT' + ) + + + class VariantTest(fixtures.TestBase, AssertsCompiledSQL): def setup(self): class UTypeOne(types.UserDefinedType): @@ -685,8 +853,11 @@ class UnicodeTest(fixtures.TestBase): testing.db.dialect.returns_unicode_strings, True if util.py3k else False ) - - + elif testing.against('oracle+cx_oracle'): + eq_( + testing.db.dialect.returns_unicode_strings, + True if util.py3k else "conditional" + ) else: expected = (testing.db.name, testing.db.driver) in \ ( @@ -699,7 +870,6 @@ class UnicodeTest(fixtures.TestBase): ('mysql', 'mysqlconnector'), ('sqlite', 'pysqlite'), ('oracle', 'zxjdbc'), - ('oracle', 'cx_oracle'), ) eq_( @@ -768,7 +938,7 @@ class UnicodeTest(fixtures.TestBase): ) -class EnumTest(fixtures.TestBase): +class EnumTest(AssertsCompiledSQL, fixtures.TestBase): @classmethod def setup_class(cls): global enum_table, non_native_enum_table, metadata @@ -851,6 +1021,42 @@ class EnumTest(fixtures.TestBase): {'id': 4, 'someenum': 'four'} ) + def test_non_native_constraint_custom_type(self): + class Foob(object): + def __init__(self, name): + self.name = name + + class MyEnum(types.SchemaType, TypeDecorator): + def __init__(self, values): + self.impl = Enum( + *[v.name for v in values], + name="myenum", + native_enum=False + ) + + + def _set_table(self, table, column): + self.impl._set_table(table, column) + + # future method + def process_literal_param(self, value, dialect): + return value.name + + def process_bind_param(self, value, dialect): + return value.name + + m = MetaData() + t1 = Table('t', m, Column('x', MyEnum([Foob('a'), Foob('b')]))) + const = [c for c in t1.constraints if isinstance(c, CheckConstraint)][0] + + self.assert_compile( + AddConstraint(const), + "ALTER TABLE t ADD CONSTRAINT myenum CHECK (x IN ('a', 'b'))", + dialect="default" + ) + + + @testing.fails_on('mysql', "the CHECK constraint doesn't raise an exception for unknown reason") def test_non_native_constraint(self): @@ -873,6 +1079,14 @@ class EnumTest(fixtures.TestBase): # depending on backend. assert "('x'," in e.print_sql() + def test_repr(self): + e = Enum("x", "y", name="somename", convert_unicode=True, + quote=True, inherit_schema=True) + eq_( + repr(e), + "Enum('x', 'y', name='somename', inherit_schema=True)" + ) + class BinaryTest(fixtures.TestBase, AssertsExecutionResults): __excluded_on__ = ( ('mysql', '<', (4, 1, 1)), # screwy varbinary types @@ -995,6 +1209,8 @@ class ExpressionTest(fixtures.TestBase, AssertsExecutionResults, AssertsCompiled def process(value): return value / 10 return process + + class MyOldCustomType(MyCustomType): def adapt_operator(self, op): return {operators.add: operators.sub, operators.sub: operators.add}.get(op, op) @@ -1071,6 +1287,26 @@ class ExpressionTest(fixtures.TestBase, AssertsExecutionResults, AssertsCompiled datetime.date(2007, 10, 15), 25, 'BIND_INfooBIND_OUT')] ) + def test_bind_adapt_update(self): + bp = bindparam("somevalue") + stmt = test_table.update().values(avalue=bp) + compiled = stmt.compile() + eq_(bp.type._type_affinity, types.NullType) + eq_(compiled.binds['somevalue'].type._type_affinity, MyCustomType) + + def test_bind_adapt_insert(self): + bp = bindparam("somevalue") + stmt = test_table.insert().values(avalue=bp) + compiled = stmt.compile() + eq_(bp.type._type_affinity, types.NullType) + eq_(compiled.binds['somevalue'].type._type_affinity, MyCustomType) + + def test_bind_adapt_expression(self): + bp = bindparam("somevalue") + stmt = test_table.c.avalue == bp + eq_(bp.type._type_affinity, types.NullType) + eq_(stmt.right.type._type_affinity, MyCustomType) + def test_literal_adapt(self): # literals get typed based on the types dictionary, unless # compatible with the left side type @@ -1150,15 +1386,18 @@ class ExpressionTest(fixtures.TestBase, AssertsExecutionResults, AssertsCompiled ) self.assert_compile( and_(c1 == True, c2 == True, c3 == True), - "x = :x_1 AND x = true AND x = :x_2" + "x = :x_1 AND x = true AND x = :x_2", + dialect=default.DefaultDialect(supports_native_boolean=True) ) self.assert_compile( and_(c1 == 3, c2 == 3, c3 == 3), - "x = :x_1 AND x = :x_2 AND x = :x_3" + "x = :x_1 AND x = :x_2 AND x = :x_3", + dialect=default.DefaultDialect(supports_native_boolean=True) ) self.assert_compile( and_(c1.is_(True), c2.is_(True), c3.is_(True)), - "x IS :x_1 AND x IS true AND x IS :x_2" + "x IS :x_1 AND x IS true AND x IS :x_2", + dialect=default.DefaultDialect(supports_native_boolean=True) ) @@ -1202,7 +1441,9 @@ class ExpressionTest(fixtures.TestBase, AssertsExecutionResults, AssertsCompiled assert expr.right.type._type_affinity is MyFoobarType # untyped bind - it gets assigned MyFoobarType - expr = column("foo", MyFoobarType) + bindparam("foo") + bp = bindparam("foo") + expr = column("foo", MyFoobarType) + bp + assert bp.type._type_affinity is types.NullType assert expr.right.type._type_affinity is MyFoobarType expr = column("foo", MyFoobarType) + bindparam("foo", type_=Integer) @@ -1453,7 +1694,7 @@ class IntervalTest(fixtures.TestBase, AssertsExecutionResults): eq_(row['non_native_interval'], None) -class BooleanTest(fixtures.TestBase, AssertsExecutionResults): +class BooleanTest(fixtures.TestBase, AssertsExecutionResults, AssertsCompiledSQL): @classmethod def setup_class(cls): global bool_table @@ -1515,6 +1756,35 @@ class BooleanTest(fixtures.TestBase, AssertsExecutionResults): testing.db.execute( "insert into booltest (id, unconstrained_value) values (1, 5)") + def test_non_native_constraint_custom_type(self): + class Foob(object): + def __init__(self, value): + self.value = value + + class MyBool(types.SchemaType, TypeDecorator): + impl = Boolean() + + def _set_table(self, table, column): + self.impl._set_table(table, column) + + # future method + def process_literal_param(self, value, dialect): + return value.value + + def process_bind_param(self, value, dialect): + return value.value + + m = MetaData() + t1 = Table('t', m, Column('x', MyBool())) + const = [c for c in t1.constraints if isinstance(c, CheckConstraint)][0] + + self.assert_compile( + AddConstraint(const), + "ALTER TABLE t ADD CHECK (x IN (0, 1))", + dialect="sqlite" + ) + + class PickleTest(fixtures.TestBase): def test_eq_comparison(self): p1 = PickleType() diff --git a/test/sql/test_unicode.py b/test/sql/test_unicode.py index ffcef903f..8a8cbd06c 100644 --- a/test/sql/test_unicode.py +++ b/test/sql/test_unicode.py @@ -2,7 +2,7 @@ """verrrrry basic unicode column name testing""" from sqlalchemy import * -from sqlalchemy.testing import fixtures, engines +from sqlalchemy.testing import fixtures, engines, eq_ from sqlalchemy import testing from sqlalchemy.testing.engines import utf8_engine from sqlalchemy.sql import column @@ -114,6 +114,20 @@ class UnicodeSchemaTest(fixtures.TestBase): meta.drop_all() metadata.create_all() + def test_repr(self): + + m = MetaData() + t = Table(ue('\u6e2c\u8a66'), m, Column(ue('\u6e2c\u8a66_id'), Integer)) + + # I hardly understand what's going on with the backslashes in + # this one on py2k vs. py3k + eq_( + repr(t), + ( + "Table('\\u6e2c\\u8a66', MetaData(bind=None), " + "Column('\\u6e2c\\u8a66_id', Integer(), table=<\u6e2c\u8a66>), " + "schema=None)")) + class EscapesDefaultsTest(fixtures.TestBase): def test_default_exec(self): metadata = MetaData(testing.db) diff --git a/test/sql/test_update.py b/test/sql/test_update.py index a8510f374..10306372b 100644 --- a/test/sql/test_update.py +++ b/test/sql/test_update.py @@ -192,22 +192,6 @@ class UpdateTest(_UpdateFromTestBase, fixtures.TablesTest, AssertsCompiledSQL): 'UPDATE A B C D mytable SET myid=%s, name=%s, description=%s', dialect=mysql.dialect()) - def test_alias(self): - table1 = self.tables.mytable - talias1 = table1.alias('t1') - - self.assert_compile(update(talias1, talias1.c.myid == 7), - 'UPDATE mytable AS t1 ' - 'SET name=:name ' - 'WHERE t1.myid = :myid_1', - params={table1.c.name: 'fred'}) - - self.assert_compile(update(talias1, table1.c.myid == 7), - 'UPDATE mytable AS t1 ' - 'SET name=:name ' - 'FROM mytable ' - 'WHERE mytable.myid = :myid_1', - params={table1.c.name: 'fred'}) def test_update_to_expression(self): """test update from an expression. @@ -268,6 +252,64 @@ class UpdateFromCompileTest(_UpdateFromTestBase, fixtures.TablesTest, run_create_tables = run_inserts = run_deletes = None + def test_alias_one(self): + table1 = self.tables.mytable + talias1 = table1.alias('t1') + + # this case is nonsensical. the UPDATE is entirely + # against the alias, but we name the table-bound column + # in values. The behavior here isn't really defined + self.assert_compile( + update(talias1, talias1.c.myid == 7). + values({table1.c.name: "fred"}), + 'UPDATE mytable AS t1 ' + 'SET name=:name ' + 'WHERE t1.myid = :myid_1') + + def test_alias_two(self): + table1 = self.tables.mytable + talias1 = table1.alias('t1') + + # Here, compared to + # test_alias_one(), here we actually have UPDATE..FROM, + # which is causing the "table1.c.name" param to be handled + # as an "extra table", hence we see the full table name rendered. + self.assert_compile( + update(talias1, table1.c.myid == 7). + values({table1.c.name: 'fred'}), + 'UPDATE mytable AS t1 ' + 'SET name=:mytable_name ' + 'FROM mytable ' + 'WHERE mytable.myid = :myid_1', + checkparams={'mytable_name': 'fred', 'myid_1': 7}, + ) + + def test_alias_two_mysql(self): + table1 = self.tables.mytable + talias1 = table1.alias('t1') + + self.assert_compile( + update(talias1, table1.c.myid == 7). + values({table1.c.name: 'fred'}), + "UPDATE mytable AS t1, mytable SET mytable.name=%s " + "WHERE mytable.myid = %s", + checkparams={'mytable_name': 'fred', 'myid_1': 7}, + dialect='mysql') + + def test_update_from_multitable_same_name_mysql(self): + users, addresses = self.tables.users, self.tables.addresses + + self.assert_compile( + users.update(). + values(name='newname').\ + values({addresses.c.name: "new address"}).\ + where(users.c.id == addresses.c.user_id), + "UPDATE users, addresses SET addresses.name=%s, " + "users.name=%s WHERE users.id = addresses.user_id", + checkparams={u'addresses_name': 'new address', 'name': 'newname'}, + dialect='mysql' + ) + def test_render_table(self): users, addresses = self.tables.users, self.tables.addresses @@ -455,6 +497,36 @@ class UpdateFromRoundTripTest(_UpdateFromTestBase, fixtures.TablesTest): (10, 'chuck')] self._assert_users(users, expected) + @testing.only_on('mysql', 'Multi table update') + def test_exec_multitable_same_name(self): + users, addresses = self.tables.users, self.tables.addresses + + values = { + addresses.c.name: 'ad_ed2', + users.c.name: 'ed2' + } + + testing.db.execute( + addresses.update(). + values(values). + where(users.c.id == addresses.c.user_id). + where(users.c.name == 'ed')) + + expected = [ + (1, 7, 'x', 'jack@bean.com'), + (2, 8, 'ad_ed2', 'ed@wood.com'), + (3, 8, 'ad_ed2', 'ed@bettyboop.com'), + (4, 8, 'ad_ed2', 'ed@lala.com'), + (5, 9, 'x', 'fred@fred.com')] + self._assert_addresses(addresses, expected) + + expected = [ + (7, 'jack'), + (8, 'ed2'), + (9, 'fred'), + (10, 'chuck')] + self._assert_users(users, expected) + def _assert_addresses(self, addresses, expected): stmt = addresses.select().order_by(addresses.c.id) eq_(testing.db.execute(stmt).fetchall(), expected) @@ -478,7 +550,16 @@ class UpdateFromMultiTableUpdateDefaultsTest(_UpdateFromTestBase, Column('id', Integer, primary_key=True, test_needs_autoincrement=True), Column('user_id', None, ForeignKey('users.id')), - Column('email_address', String(50), nullable=False)) + Column('email_address', String(50), nullable=False), + ) + + Table('foobar', metadata, + Column('id', Integer, primary_key=True, + test_needs_autoincrement=True), + Column('user_id', None, ForeignKey('users.id')), + Column('data', String(30)), + Column('some_update', String(30), onupdate='im the other update') + ) @classmethod def fixtures(cls): @@ -494,6 +575,12 @@ class UpdateFromMultiTableUpdateDefaultsTest(_UpdateFromTestBase, (3, 8, 'ed@bettyboop.com'), (4, 9, 'fred@fred.com') ), + foobar=( + ('id', 'user_id', 'data'), + (2, 8, 'd1'), + (3, 8, 'd2'), + (4, 9, 'd3') + ) ) @testing.only_on('mysql', 'Multi table update') @@ -525,6 +612,37 @@ class UpdateFromMultiTableUpdateDefaultsTest(_UpdateFromTestBase, self._assert_users(users, expected) @testing.only_on('mysql', 'Multi table update') + def test_defaults_second_table_same_name(self): + users, foobar = self.tables.users, self.tables.foobar + + values = { + foobar.c.data: foobar.c.data + 'a', + users.c.name: 'ed2' + } + + ret = testing.db.execute( + users.update(). + values(values). + where(users.c.id == foobar.c.user_id). + where(users.c.name == 'ed')) + + eq_( + set(ret.prefetch_cols()), + set([users.c.some_update, foobar.c.some_update]) + ) + + expected = [ + (2, 8, 'd1a', 'im the other update'), + (3, 8, 'd2a', 'im the other update'), + (4, 9, 'd3', None)] + self._assert_foobar(foobar, expected) + + expected = [ + (8, 'ed2', 'im the update'), + (9, 'fred', 'value')] + self._assert_users(users, expected) + + @testing.only_on('mysql', 'Multi table update') def test_no_defaults_second_table(self): users, addresses = self.tables.users, self.tables.addresses @@ -548,6 +666,10 @@ class UpdateFromMultiTableUpdateDefaultsTest(_UpdateFromTestBase, (9, 'fred', 'value')] self._assert_users(users, expected) + def _assert_foobar(self, foobar, expected): + stmt = foobar.select().order_by(foobar.c.id) + eq_(testing.db.execute(stmt).fetchall(), expected) + def _assert_addresses(self, addresses, expected): stmt = addresses.select().order_by(addresses.c.id) eq_(testing.db.execute(stmt).fetchall(), expected) |