diff options
Diffstat (limited to 'test/dialect/test_sqlite.py')
-rw-r--r-- | test/dialect/test_sqlite.py | 772 |
1 files changed, 528 insertions, 244 deletions
diff --git a/test/dialect/test_sqlite.py b/test/dialect/test_sqlite.py index 124208dbe..44e4eda42 100644 --- a/test/dialect/test_sqlite.py +++ b/test/dialect/test_sqlite.py @@ -7,8 +7,8 @@ import datetime from sqlalchemy.testing import eq_, assert_raises, \ assert_raises_message, is_ from sqlalchemy import Table, select, bindparam, Column,\ - MetaData, func, extract, ForeignKey, text, DefaultClause, and_, create_engine,\ - UniqueConstraint + MetaData, func, extract, ForeignKey, text, DefaultClause, and_, \ + create_engine, UniqueConstraint from sqlalchemy.types import Integer, String, Boolean, DateTime, Date, Time from sqlalchemy import types as sqltypes from sqlalchemy import event, inspect @@ -21,6 +21,9 @@ from sqlalchemy.testing import fixtures, AssertsCompiledSQL, \ AssertsExecutionResults, engines from sqlalchemy import testing from sqlalchemy.schema import CreateTable +from sqlalchemy.engine.reflection import Inspector +from sqlalchemy.testing import mock + class TestTypes(fixtures.TestBase, AssertsExecutionResults): @@ -32,9 +35,10 @@ class TestTypes(fixtures.TestBase, AssertsExecutionResults): """ meta = MetaData(testing.db) - t = Table('bool_table', meta, Column('id', Integer, - primary_key=True), Column('boo', - Boolean(create_constraint=False))) + t = Table( + 'bool_table', meta, + Column('id', Integer, primary_key=True), + Column('boo', Boolean(create_constraint=False))) try: meta.create_all() testing.db.execute("INSERT INTO bool_table (id, boo) " @@ -69,28 +73,31 @@ class TestTypes(fixtures.TestBase, AssertsExecutionResults): ValueError, "Couldn't parse %s string." % disp, lambda: testing.db.execute( - text("select 'ASDF' as value", typemap={"value":typ}) + text("select 'ASDF' as value", typemap={"value": typ}) ).scalar() ) def test_native_datetime(self): dbapi = testing.db.dialect.dbapi - connect_args = {'detect_types': dbapi.PARSE_DECLTYPES \ - | dbapi.PARSE_COLNAMES} - engine = engines.testing_engine(options={'connect_args' - : connect_args, 'native_datetime': True}) - t = Table('datetest', MetaData(), - Column('id', Integer, primary_key=True), - Column('d1', Date), Column('d2', sqltypes.TIMESTAMP)) + connect_args = { + 'detect_types': dbapi.PARSE_DECLTYPES | dbapi.PARSE_COLNAMES} + engine = engines.testing_engine( + options={'connect_args': connect_args, 'native_datetime': True}) + t = Table( + 'datetest', MetaData(), + Column('id', Integer, primary_key=True), + Column('d1', Date), Column('d2', sqltypes.TIMESTAMP)) t.create(engine) try: - engine.execute(t.insert(), {'d1': datetime.date(2010, 5, - 10), - 'd2': datetime.datetime( 2010, 5, 10, 12, 15, 25, - )}) + engine.execute(t.insert(), { + 'd1': datetime.date(2010, 5, 10), + 'd2': datetime.datetime(2010, 5, 10, 12, 15, 25) + }) row = engine.execute(t.select()).first() - eq_(row, (1, datetime.date(2010, 5, 10), - datetime.datetime( 2010, 5, 10, 12, 15, 25, ))) + eq_( + row, + (1, datetime.date(2010, 5, 10), + datetime.datetime(2010, 5, 10, 12, 15, 25))) r = engine.execute(func.current_date()).scalar() assert isinstance(r, util.string_types) finally: @@ -100,15 +107,16 @@ class TestTypes(fixtures.TestBase, AssertsExecutionResults): @testing.provide_metadata def test_custom_datetime(self): sqlite_date = sqlite.DATETIME( - # 2004-05-21T00:00:00 - storage_format="%(year)04d-%(month)02d-%(day)02d" - "T%(hour)02d:%(minute)02d:%(second)02d", - regexp=r"(\d+)-(\d+)-(\d+)T(\d+):(\d+):(\d+)", - ) + # 2004-05-21T00:00:00 + storage_format="%(year)04d-%(month)02d-%(day)02d" + "T%(hour)02d:%(minute)02d:%(second)02d", + regexp=r"(\d+)-(\d+)-(\d+)T(\d+):(\d+):(\d+)", + ) t = Table('t', self.metadata, Column('d', sqlite_date)) self.metadata.create_all(testing.db) - testing.db.execute(t.insert(). - values(d=datetime.datetime(2010, 10, 15, 12, 37, 0))) + testing.db.execute( + t.insert(). + values(d=datetime.datetime(2010, 10, 15, 12, 37, 0))) testing.db.execute("insert into t (d) values ('2004-05-21T00:00:00')") eq_( testing.db.execute("select * from t order by d").fetchall(), @@ -116,21 +124,70 @@ class TestTypes(fixtures.TestBase, AssertsExecutionResults): ) eq_( testing.db.execute(select([t.c.d]).order_by(t.c.d)).fetchall(), - [(datetime.datetime(2004, 5, 21, 0, 0),), - (datetime.datetime(2010, 10, 15, 12, 37),)] + [ + (datetime.datetime(2004, 5, 21, 0, 0),), + (datetime.datetime(2010, 10, 15, 12, 37),)] + ) + + @testing.provide_metadata + def test_custom_datetime_text_affinity(self): + sqlite_date = sqlite.DATETIME( + storage_format="%(year)04d%(month)02d%(day)02d" + "%(hour)02d%(minute)02d%(second)02d", + regexp=r"(\d{4})(\d{2})(\d{2})(\d{2})(\d{2})(\d{2})", + ) + t = Table('t', self.metadata, Column('d', sqlite_date)) + self.metadata.create_all(testing.db) + testing.db.execute( + t.insert(). + values(d=datetime.datetime(2010, 10, 15, 12, 37, 0))) + testing.db.execute("insert into t (d) values ('20040521000000')") + eq_( + testing.db.execute("select * from t order by d").fetchall(), + [('20040521000000',), ('20101015123700',)] + ) + eq_( + testing.db.execute(select([t.c.d]).order_by(t.c.d)).fetchall(), + [ + (datetime.datetime(2004, 5, 21, 0, 0),), + (datetime.datetime(2010, 10, 15, 12, 37),)] + ) + + @testing.provide_metadata + def test_custom_date_text_affinity(self): + sqlite_date = sqlite.DATE( + storage_format="%(year)04d%(month)02d%(day)02d", + regexp=r"(\d{4})(\d{2})(\d{2})", + ) + t = Table('t', self.metadata, Column('d', sqlite_date)) + self.metadata.create_all(testing.db) + testing.db.execute( + t.insert(). + values(d=datetime.date(2010, 10, 15))) + testing.db.execute("insert into t (d) values ('20040521')") + eq_( + testing.db.execute("select * from t order by d").fetchall(), + [('20040521',), ('20101015',)] + ) + eq_( + testing.db.execute(select([t.c.d]).order_by(t.c.d)).fetchall(), + [ + (datetime.date(2004, 5, 21),), + (datetime.date(2010, 10, 15),)] ) @testing.provide_metadata def test_custom_date(self): sqlite_date = sqlite.DATE( - # 2004-05-21T00:00:00 - storage_format="%(year)04d|%(month)02d|%(day)02d", - regexp=r"(\d+)\|(\d+)\|(\d+)", - ) + # 2004-05-21T00:00:00 + storage_format="%(year)04d|%(month)02d|%(day)02d", + regexp=r"(\d+)\|(\d+)\|(\d+)", + ) t = Table('t', self.metadata, Column('d', sqlite_date)) self.metadata.create_all(testing.db) - testing.db.execute(t.insert(). - values(d=datetime.date(2010, 10, 15))) + testing.db.execute( + t.insert(). + values(d=datetime.date(2010, 10, 15))) testing.db.execute("insert into t (d) values ('2004|05|21')") eq_( testing.db.execute("select * from t order by d").fetchall(), @@ -138,11 +195,11 @@ class TestTypes(fixtures.TestBase, AssertsExecutionResults): ) eq_( testing.db.execute(select([t.c.d]).order_by(t.c.d)).fetchall(), - [(datetime.date(2004, 5, 21),), - (datetime.date(2010, 10, 15),)] + [ + (datetime.date(2004, 5, 21),), + (datetime.date(2010, 10, 15),)] ) - def test_no_convert_unicode(self): """test no utf-8 encoding occurs""" @@ -156,7 +213,7 @@ class TestTypes(fixtures.TestBase, AssertsExecutionResults): sqltypes.CHAR(convert_unicode=True), sqltypes.Unicode(), sqltypes.UnicodeText(), - ): + ): bindproc = t.dialect_impl(dialect).bind_processor(dialect) assert not bindproc or \ isinstance(bindproc(util.u('some string')), util.text_type) @@ -198,6 +255,7 @@ class DateTimeTest(fixtures.TestBase, AssertsCompiledSQL): rp = sldt.result_processor(None, None) eq_(rp(bp(dt)), dt) + class DateTest(fixtures.TestBase, AssertsCompiledSQL): def test_default(self): @@ -221,6 +279,7 @@ class DateTest(fixtures.TestBase, AssertsCompiledSQL): rp = sldt.result_processor(None, None) eq_(rp(bp(dt)), dt) + class TimeTest(fixtures.TestBase, AssertsCompiledSQL): def test_default(self): @@ -333,8 +392,9 @@ class DefaultsTest(fixtures.TestBase, AssertsCompiledSQL): @testing.provide_metadata def test_boolean_default(self): - t = Table("t", self.metadata, - Column("x", Boolean, server_default=sql.false())) + t = Table( + "t", self.metadata, + Column("x", Boolean, server_default=sql.false())) t.create(testing.db) testing.db.execute(t.insert()) testing.db.execute(t.insert().values(x=True)) @@ -351,7 +411,6 @@ class DefaultsTest(fixtures.TestBase, AssertsCompiledSQL): eq_(info['default'], '3') - class DialectTest(fixtures.TestBase, AssertsExecutionResults): __only_on__ = 'sqlite' @@ -372,7 +431,7 @@ class DialectTest(fixtures.TestBase, AssertsExecutionResults): Column('true', Integer), Column('false', Integer), Column('column', Integer), - ) + ) try: meta.create_all() t.insert().execute(safe=1) @@ -403,8 +462,8 @@ class DialectTest(fixtures.TestBase, AssertsExecutionResults): table1 = Table('django_admin_log', metadata, autoload=True) table2 = Table('django_content_type', metadata, autoload=True) j = table1.join(table2) - assert j.onclause.compare(table1.c.content_type_id - == table2.c.id) + assert j.onclause.compare( + table1.c.content_type_id == table2.c.id) @testing.provide_metadata def test_quoted_identifiers_functional_two(self): @@ -426,8 +485,8 @@ class DialectTest(fixtures.TestBase, AssertsExecutionResults): # unfortunately, still can't do this; sqlite quadruples # up the quotes on the table name here for pragma foreign_key_list - #testing.db.execute(r''' - #CREATE TABLE """b""" ( + # testing.db.execute(r''' + # CREATE TABLE """b""" ( # """id""" integer NOT NULL PRIMARY KEY, # """aid""" integer NULL # REFERENCES """a""" ("""id""") @@ -439,48 +498,25 @@ class DialectTest(fixtures.TestBase, AssertsExecutionResults): #table2 = Table(r'"b"', metadata, autoload=True) #j = table1.join(table2) - #assert j.onclause.compare(table1.c['"id"'] + # assert j.onclause.compare(table1.c['"id"'] # == table2.c['"aid"']) - def test_legacy_quoted_identifiers_unit(self): - dialect = sqlite.dialect() - dialect._broken_fk_pragma_quotes = True - - - for row in [ - (0, 'target', 'tid', 'id'), - (0, '"target"', 'tid', 'id'), - (0, '[target]', 'tid', 'id'), - (0, "'target'", 'tid', 'id'), - (0, '`target`', 'tid', 'id'), - ]: - fks = {} - fkeys = [] - dialect._parse_fk(fks, fkeys, *row) - eq_(fkeys, [{ - 'referred_table': 'target', - 'referred_columns': ['id'], - 'referred_schema': None, - 'name': None, - 'constrained_columns': ['tid'] - }]) - @testing.provide_metadata def test_description_encoding(self): # amazingly, pysqlite seems to still deliver cursor.description # as encoded bytes in py2k - t = Table('x', self.metadata, - Column(u('méil'), Integer, primary_key=True), - Column(ue('\u6e2c\u8a66'), Integer), - ) + t = Table( + 'x', self.metadata, + Column(u('méil'), Integer, primary_key=True), + Column(ue('\u6e2c\u8a66'), Integer), + ) self.metadata.create_all(testing.db) result = testing.db.execute(t.select()) assert u('méil') in result.keys() assert ue('\u6e2c\u8a66') in result.keys() - def test_file_path_is_absolute(self): d = pysqlite_dialect.dialect() eq_( @@ -498,48 +534,6 @@ class DialectTest(fixtures.TestBase, AssertsExecutionResults): e = create_engine('sqlite+pysqlite:///foo.db') assert e.pool.__class__ is pool.NullPool - def test_dont_reflect_autoindex(self): - meta = MetaData(testing.db) - t = Table('foo', meta, Column('bar', String, primary_key=True)) - meta.create_all() - from sqlalchemy.engine.reflection import Inspector - try: - inspector = Inspector(testing.db) - eq_(inspector.get_indexes('foo'), []) - eq_(inspector.get_indexes('foo', - include_auto_indexes=True), [{'unique': 1, 'name' - : 'sqlite_autoindex_foo_1', 'column_names': ['bar']}]) - finally: - meta.drop_all() - - def test_create_index_with_schema(self): - """Test creation of index with explicit schema""" - - meta = MetaData(testing.db) - t = Table('foo', meta, Column('bar', String, index=True), - schema='main') - try: - meta.create_all() - finally: - meta.drop_all() - - def test_get_unique_constraints(self): - meta = MetaData(testing.db) - t1 = Table('foo', meta, Column('f', Integer), - UniqueConstraint('f', name='foo_f')) - t2 = Table('bar', meta, Column('b', Integer), - UniqueConstraint('b', name='bar_b'), - prefixes=['TEMPORARY']) - meta.create_all() - from sqlalchemy.engine.reflection import Inspector - try: - inspector = Inspector(testing.db) - eq_(inspector.get_unique_constraints('foo'), - [{'column_names': [u'f'], 'name': u'foo_f'}]) - eq_(inspector.get_unique_constraints('bar'), - [{'column_names': [u'b'], 'name': u'bar_b'}]) - finally: - meta.drop_all() class AttachedMemoryDBTest(fixtures.TestBase): @@ -662,7 +656,7 @@ class SQLTest(fixtures.TestBase, AssertsCompiledSQL): 'epoch': '%s', 'dow': '%w', 'week': '%W', - } + } for field, subst in mapping.items(): self.assert_compile(select([extract(field, t.c.col1)]), "SELECT CAST(STRFTIME('%s', t.col1) AS " @@ -685,53 +679,57 @@ class SQLTest(fixtures.TestBase, AssertsCompiledSQL): def test_constraints_with_schemas(self): metadata = MetaData() - t1 = Table('t1', metadata, - Column('id', Integer, primary_key=True), - schema='master') - t2 = Table('t2', metadata, - Column('id', Integer, primary_key=True), - Column('t1_id', Integer, ForeignKey('master.t1.id')), - schema='master' - ) - t3 = Table('t3', metadata, - Column('id', Integer, primary_key=True), - Column('t1_id', Integer, ForeignKey('master.t1.id')), - schema='alternate' - ) - t4 = Table('t4', metadata, - Column('id', Integer, primary_key=True), - Column('t1_id', Integer, ForeignKey('master.t1.id')), - ) + Table( + 't1', metadata, + Column('id', Integer, primary_key=True), + schema='master') + t2 = Table( + 't2', metadata, + Column('id', Integer, primary_key=True), + Column('t1_id', Integer, ForeignKey('master.t1.id')), + schema='master' + ) + t3 = Table( + 't3', metadata, + Column('id', Integer, primary_key=True), + Column('t1_id', Integer, ForeignKey('master.t1.id')), + schema='alternate' + ) + t4 = Table( + 't4', metadata, + Column('id', Integer, primary_key=True), + Column('t1_id', Integer, ForeignKey('master.t1.id')), + ) # schema->schema, generate REFERENCES with no schema name self.assert_compile( schema.CreateTable(t2), - "CREATE TABLE master.t2 (" - "id INTEGER NOT NULL, " - "t1_id INTEGER, " - "PRIMARY KEY (id), " - "FOREIGN KEY(t1_id) REFERENCES t1 (id)" - ")" + "CREATE TABLE master.t2 (" + "id INTEGER NOT NULL, " + "t1_id INTEGER, " + "PRIMARY KEY (id), " + "FOREIGN KEY(t1_id) REFERENCES t1 (id)" + ")" ) # schema->different schema, don't generate REFERENCES self.assert_compile( schema.CreateTable(t3), - "CREATE TABLE alternate.t3 (" - "id INTEGER NOT NULL, " - "t1_id INTEGER, " - "PRIMARY KEY (id)" - ")" + "CREATE TABLE alternate.t3 (" + "id INTEGER NOT NULL, " + "t1_id INTEGER, " + "PRIMARY KEY (id)" + ")" ) # same for local schema self.assert_compile( schema.CreateTable(t4), - "CREATE TABLE t4 (" - "id INTEGER NOT NULL, " - "t1_id INTEGER, " - "PRIMARY KEY (id)" - ")" + "CREATE TABLE t4 (" + "id INTEGER NOT NULL, " + "t1_id INTEGER, " + "PRIMARY KEY (id)" + ")" ) @@ -756,30 +754,37 @@ class InsertTest(fixtures.TestBase, AssertsExecutionResults): @testing.exclude('sqlite', '<', (3, 3, 8), 'no database support') def test_empty_insert_pk1(self): - self._test_empty_insert(Table('a', MetaData(testing.db), - Column('id', Integer, - primary_key=True))) + self._test_empty_insert( + Table( + 'a', MetaData(testing.db), + Column('id', Integer, primary_key=True))) @testing.exclude('sqlite', '<', (3, 3, 8), 'no database support') def test_empty_insert_pk2(self): - assert_raises(exc.DBAPIError, self._test_empty_insert, Table('b' - , MetaData(testing.db), Column('x', Integer, - primary_key=True), Column('y', Integer, - primary_key=True))) + assert_raises( + exc.DBAPIError, self._test_empty_insert, + Table( + 'b', MetaData(testing.db), + Column('x', Integer, primary_key=True), + Column('y', Integer, primary_key=True))) @testing.exclude('sqlite', '<', (3, 3, 8), 'no database support') def test_empty_insert_pk3(self): - assert_raises(exc.DBAPIError, self._test_empty_insert, Table('c' - , MetaData(testing.db), Column('x', Integer, - primary_key=True), Column('y', Integer, - DefaultClause('123'), primary_key=True))) + assert_raises( + exc.DBAPIError, self._test_empty_insert, + Table( + 'c', MetaData(testing.db), + Column('x', Integer, primary_key=True), + Column('y', Integer, DefaultClause('123'), primary_key=True))) @testing.exclude('sqlite', '<', (3, 3, 8), 'no database support') def test_empty_insert_pk4(self): - self._test_empty_insert(Table('d', MetaData(testing.db), - Column('x', Integer, primary_key=True), - Column('y', Integer, DefaultClause('123' - )))) + self._test_empty_insert( + Table( + 'd', MetaData(testing.db), + Column('x', Integer, primary_key=True), + Column('y', Integer, DefaultClause('123')) + )) @testing.exclude('sqlite', '<', (3, 3, 8), 'no database support') def test_empty_insert_nopk1(self): @@ -788,9 +793,10 @@ class InsertTest(fixtures.TestBase, AssertsExecutionResults): @testing.exclude('sqlite', '<', (3, 3, 8), 'no database support') def test_empty_insert_nopk2(self): - self._test_empty_insert(Table('f', MetaData(testing.db), - Column('x', Integer), Column('y', - Integer))) + self._test_empty_insert( + Table( + 'f', MetaData(testing.db), + Column('x', Integer), Column('y', Integer))) def test_inserts_with_spaces(self): tbl = Table('tbl', MetaData('sqlite:///'), Column('with space', @@ -800,8 +806,8 @@ class InsertTest(fixtures.TestBase, AssertsExecutionResults): tbl.insert().execute({'without': 123}) assert list(tbl.select().execute()) == [(None, 123)] tbl.insert().execute({'with space': 456}) - assert list(tbl.select().execute()) == [(None, 123), (456, - None)] + assert list(tbl.select().execute()) == [ + (None, 123), (456, None)] finally: tbl.drop() @@ -817,6 +823,8 @@ def full_text_search_missing(): except: return True +metadata = cattable = matchtable = None + class MatchTest(fixtures.TestBase, AssertsCompiledSQL): @@ -845,19 +853,20 @@ class MatchTest(fixtures.TestBase, AssertsCompiledSQL): """) matchtable = Table('matchtable', metadata, autoload=True) metadata.create_all() - cattable.insert().execute([{'id': 1, 'description': 'Python'}, - {'id': 2, 'description': 'Ruby'}]) - matchtable.insert().execute([{'id': 1, 'title' - : 'Agile Web Development with Rails' - , 'category_id': 2}, {'id': 2, - 'title': 'Dive Into Python', - 'category_id': 1}, {'id': 3, 'title' - : "Programming Matz's Ruby", - 'category_id': 2}, {'id': 4, 'title' - : 'The Definitive Guide to Django', - 'category_id': 1}, {'id': 5, 'title' - : 'Python in a Nutshell', - 'category_id': 1}]) + cattable.insert().execute( + [{'id': 1, 'description': 'Python'}, + {'id': 2, 'description': 'Ruby'}]) + matchtable.insert().execute( + [ + {'id': 1, 'title': 'Agile Web Development with Rails', + 'category_id': 2}, + {'id': 2, 'title': 'Dive Into Python', 'category_id': 1}, + {'id': 3, 'title': "Programming Matz's Ruby", + 'category_id': 2}, + {'id': 4, 'title': 'The Definitive Guide to Django', + 'category_id': 1}, + {'id': 5, 'title': 'Python in a Nutshell', 'category_id': 1} + ]) @classmethod def teardown_class(cls): @@ -869,35 +878,38 @@ class MatchTest(fixtures.TestBase, AssertsCompiledSQL): def test_simple_match(self): results = \ - matchtable.select().where(matchtable.c.title.match('python' - )).order_by(matchtable.c.id).execute().fetchall() + matchtable.select().where( + matchtable.c.title.match('python')).\ + order_by(matchtable.c.id).execute().fetchall() eq_([2, 5], [r.id for r in results]) def test_simple_prefix_match(self): results = \ - matchtable.select().where(matchtable.c.title.match('nut*' - )).execute().fetchall() + matchtable.select().where( + matchtable.c.title.match('nut*')).execute().fetchall() eq_([5], [r.id for r in results]) def test_or_match(self): results2 = \ matchtable.select().where( - matchtable.c.title.match('nutshell OR ruby' - )).order_by(matchtable.c.id).execute().fetchall() + matchtable.c.title.match('nutshell OR ruby')).\ + order_by(matchtable.c.id).execute().fetchall() eq_([3, 5], [r.id for r in results2]) def test_and_match(self): results2 = \ matchtable.select().where( - matchtable.c.title.match('python nutshell' - )).execute().fetchall() + matchtable.c.title.match('python nutshell') + ).execute().fetchall() eq_([5], [r.id for r in results2]) def test_match_across_joins(self): - results = matchtable.select().where(and_(cattable.c.id - == matchtable.c.category_id, - cattable.c.description.match('Ruby' - ))).order_by(matchtable.c.id).execute().fetchall() + results = matchtable.select().where( + and_( + cattable.c.id == matchtable.c.category_id, + cattable.c.description.match('Ruby') + ) + ).order_by(matchtable.c.id).execute().fetchall() eq_([1, 3], [r.id for r in results]) @@ -907,10 +919,11 @@ class AutoIncrementTest(fixtures.TestBase, AssertsCompiledSQL): table = Table('autoinctable', MetaData(), Column('id', Integer, primary_key=True), Column('x', Integer, default=None), sqlite_autoincrement=True) - self.assert_compile(schema.CreateTable(table), - 'CREATE TABLE autoinctable (id INTEGER NOT ' - 'NULL PRIMARY KEY AUTOINCREMENT, x INTEGER)' - , dialect=sqlite.dialect()) + self.assert_compile( + schema.CreateTable(table), + 'CREATE TABLE autoinctable (id INTEGER NOT ' + 'NULL PRIMARY KEY AUTOINCREMENT, x INTEGER)', + dialect=sqlite.dialect()) def test_sqlite_autoincrement_constraint(self): table = Table( @@ -920,7 +933,7 @@ class AutoIncrementTest(fixtures.TestBase, AssertsCompiledSQL): Column('x', Integer, default=None), UniqueConstraint('x'), sqlite_autoincrement=True, - ) + ) self.assert_compile(schema.CreateTable(table), 'CREATE TABLE autoinctable (id INTEGER NOT ' 'NULL PRIMARY KEY AUTOINCREMENT, x ' @@ -944,7 +957,7 @@ class AutoIncrementTest(fixtures.TestBase, AssertsCompiledSQL): MetaData(), Column('id', MyInteger, primary_key=True), sqlite_autoincrement=True, - ) + ) self.assert_compile(schema.CreateTable(table), 'CREATE TABLE autoinctable (id INTEGER NOT ' 'NULL PRIMARY KEY AUTOINCREMENT)', @@ -958,7 +971,8 @@ class ReflectHeadlessFKsTest(fixtures.TestBase): testing.db.execute("CREATE TABLE a (id INTEGER PRIMARY KEY)") # this syntax actually works on other DBs perhaps we'd want to add # tests to test_reflection - testing.db.execute("CREATE TABLE b (id INTEGER PRIMARY KEY REFERENCES a)") + testing.db.execute( + "CREATE TABLE b (id INTEGER PRIMARY KEY REFERENCES a)") def teardown(self): testing.db.execute("drop table b") @@ -971,53 +985,312 @@ class ReflectHeadlessFKsTest(fixtures.TestBase): assert b.c.id.references(a.c.id) -class ReflectFKConstraintTest(fixtures.TestBase): + +class ConstraintReflectionTest(fixtures.TestBase): __only_on__ = 'sqlite' - def setup(self): - testing.db.execute("CREATE TABLE a1 (id INTEGER PRIMARY KEY)") - testing.db.execute("CREATE TABLE a2 (id INTEGER PRIMARY KEY)") - testing.db.execute("CREATE TABLE b (id INTEGER PRIMARY KEY, " - "FOREIGN KEY(id) REFERENCES a1(id)," - "FOREIGN KEY(id) REFERENCES a2(id)" - ")") - testing.db.execute("CREATE TABLE c (id INTEGER, " - "CONSTRAINT bar PRIMARY KEY(id)," - "CONSTRAINT foo1 FOREIGN KEY(id) REFERENCES a1(id)," - "CONSTRAINT foo2 FOREIGN KEY(id) REFERENCES a2(id)" - ")") + @classmethod + def setup_class(cls): + with testing.db.begin() as conn: + + conn.execute("CREATE TABLE a1 (id INTEGER PRIMARY KEY)") + conn.execute("CREATE TABLE a2 (id INTEGER PRIMARY KEY)") + conn.execute( + "CREATE TABLE b (id INTEGER PRIMARY KEY, " + "FOREIGN KEY(id) REFERENCES a1(id)," + "FOREIGN KEY(id) REFERENCES a2(id)" + ")") + conn.execute( + "CREATE TABLE c (id INTEGER, " + "CONSTRAINT bar PRIMARY KEY(id)," + "CONSTRAINT foo1 FOREIGN KEY(id) REFERENCES a1(id)," + "CONSTRAINT foo2 FOREIGN KEY(id) REFERENCES a2(id)" + ")") + conn.execute( + # the lower casing + inline is intentional here + "CREATE TABLE d (id INTEGER, x INTEGER unique)") + conn.execute( + # the lower casing + inline is intentional here + 'CREATE TABLE d1 ' + '(id INTEGER, "some ( STUPID n,ame" INTEGER unique)') + conn.execute( + # the lower casing + inline is intentional here + 'CREATE TABLE d2 ( "some STUPID n,ame" INTEGER unique)') + conn.execute( + # the lower casing + inline is intentional here + 'CREATE TABLE d3 ( "some STUPID n,ame" INTEGER NULL unique)') + + conn.execute( + # lower casing + inline is intentional + "CREATE TABLE e (id INTEGER, x INTEGER references a2(id))") + conn.execute( + 'CREATE TABLE e1 (id INTEGER, "some ( STUPID n,ame" INTEGER ' + 'references a2 ("some ( STUPID n,ame"))') + conn.execute( + 'CREATE TABLE e2 (id INTEGER, ' + '"some ( STUPID n,ame" INTEGER NOT NULL ' + 'references a2 ("some ( STUPID n,ame"))') + + conn.execute( + "CREATE TABLE f (x INTEGER, CONSTRAINT foo_fx UNIQUE(x))" + ) + conn.execute( + "CREATE TEMPORARY TABLE g " + "(x INTEGER, CONSTRAINT foo_gx UNIQUE(x))" + ) + conn.execute( + # intentional broken casing + "CREATE TABLE h (x INTEGER, COnstraINT foo_hx unIQUE(x))" + ) + conn.execute( + "CREATE TABLE i (x INTEGER, y INTEGER, PRIMARY KEY(x, y))" + ) + conn.execute( + "CREATE TABLE j (id INTEGER, q INTEGER, p INTEGER, " + "PRIMARY KEY(id), FOreiGN KEY(q,p) REFERENCes i(x,y))" + ) + conn.execute( + "CREATE TABLE k (id INTEGER, q INTEGER, p INTEGER, " + "PRIMARY KEY(id), " + "conSTRAINT my_fk FOreiGN KEY ( q , p ) " + "REFERENCes i ( x , y ))" + ) - def teardown(self): - testing.db.execute("drop table c") - testing.db.execute("drop table b") - testing.db.execute("drop table a1") - testing.db.execute("drop table a2") + meta = MetaData() + Table( + 'l', meta, Column('bar', String, index=True), + schema='main') + + Table( + 'm', meta, + Column('id', Integer, primary_key=True), + Column('x', String(30)), + UniqueConstraint('x') + ) + + Table( + 'n', meta, + Column('id', Integer, primary_key=True), + Column('x', String(30)), + UniqueConstraint('x'), + prefixes=['TEMPORARY'] + ) - def test_name_is_none(self): + meta.create_all(conn) + + # will contain an "autoindex" + conn.execute("create table o (foo varchar(20) primary key)") + + @classmethod + def teardown_class(cls): + with testing.db.begin() as conn: + for name in [ + "m", "main.l", "k", "j", "i", "h", "g", "f", "e", "e1", + "d", "d1", "d2", "c", "b", "a1", "a2"]: + conn.execute("drop table %s" % name) + + def test_legacy_quoted_identifiers_unit(self): + dialect = sqlite.dialect() + dialect._broken_fk_pragma_quotes = True + + for row in [ + (0, None, 'target', 'tid', 'id', None), + (0, None, '"target"', 'tid', 'id', None), + (0, None, '[target]', 'tid', 'id', None), + (0, None, "'target'", 'tid', 'id', None), + (0, None, '`target`', 'tid', 'id', None), + ]: + def _get_table_pragma(*arg, **kw): + return [row] + + def _get_table_sql(*arg, **kw): + return "CREATE TABLE foo "\ + "(tid INTEGER, "\ + "FOREIGN KEY(tid) REFERENCES %s (id))" % row[2] + with mock.patch.object( + dialect, "_get_table_pragma", _get_table_pragma): + with mock.patch.object( + dialect, '_get_table_sql', _get_table_sql): + + fkeys = dialect.get_foreign_keys(None, 'foo') + eq_( + fkeys, + [{ + 'referred_table': 'target', + 'referred_columns': ['id'], + 'referred_schema': None, + 'name': None, + 'constrained_columns': ['tid'] + }]) + + def test_foreign_key_name_is_none(self): # and not "0" - meta = MetaData() - b = Table('b', meta, autoload=True, autoload_with=testing.db) + inspector = Inspector(testing.db) + fks = inspector.get_foreign_keys('b') eq_( - [con.name for con in b.constraints], - [None, None, None] + fks, + [ + {'referred_table': 'a1', 'referred_columns': ['id'], + 'referred_schema': None, 'name': None, + 'constrained_columns': ['id']}, + {'referred_table': 'a2', 'referred_columns': ['id'], + 'referred_schema': None, 'name': None, + 'constrained_columns': ['id']}, + ] ) - def test_name_not_none(self): - # we don't have names for PK constraints, - # it appears we get back None in the pragma for - # FKs also (also it doesn't even appear to be documented on sqlite's docs - # at http://www.sqlite.org/pragma.html#pragma_foreign_key_list - # how did we ever know that's the "name" field ??) + def test_foreign_key_name_is_not_none(self): + inspector = Inspector(testing.db) + fks = inspector.get_foreign_keys('c') + eq_( + fks, + [ + { + 'referred_table': 'a1', 'referred_columns': ['id'], + 'referred_schema': None, 'name': 'foo1', + 'constrained_columns': ['id']}, + { + 'referred_table': 'a2', 'referred_columns': ['id'], + 'referred_schema': None, 'name': 'foo2', + 'constrained_columns': ['id']}, + ] + ) - meta = MetaData() - c = Table('c', meta, autoload=True, autoload_with=testing.db) + def test_unnamed_inline_foreign_key(self): + inspector = Inspector(testing.db) + fks = inspector.get_foreign_keys('e') eq_( - set([con.name for con in c.constraints]), - set([None, None]) + fks, + [{ + 'referred_table': 'a2', 'referred_columns': ['id'], + 'referred_schema': None, + 'name': None, 'constrained_columns': ['x'] + }] + ) + + def test_unnamed_inline_foreign_key_quoted(self): + inspector = Inspector(testing.db) + + inspector = Inspector(testing.db) + fks = inspector.get_foreign_keys('e1') + eq_( + fks, + [{ + 'referred_table': 'a2', + 'referred_columns': ['some ( STUPID n,ame'], + 'referred_schema': None, + 'name': None, 'constrained_columns': ['some ( STUPID n,ame'] + }] + ) + fks = inspector.get_foreign_keys('e2') + eq_( + fks, + [{ + 'referred_table': 'a2', + 'referred_columns': ['some ( STUPID n,ame'], + 'referred_schema': None, + 'name': None, 'constrained_columns': ['some ( STUPID n,ame'] + }] + ) + + def test_foreign_key_composite_broken_casing(self): + inspector = Inspector(testing.db) + fks = inspector.get_foreign_keys('j') + eq_( + fks, + [{ + 'referred_table': 'i', + 'referred_columns': ['x', 'y'], + 'referred_schema': None, 'name': None, + 'constrained_columns': ['q', 'p']}] + ) + fks = inspector.get_foreign_keys('k') + eq_( + fks, + [{'referred_table': 'i', 'referred_columns': ['x', 'y'], + 'referred_schema': None, 'name': 'my_fk', + 'constrained_columns': ['q', 'p']}] + ) + + def test_dont_reflect_autoindex(self): + inspector = Inspector(testing.db) + eq_(inspector.get_indexes('o'), []) + eq_( + inspector.get_indexes('o', include_auto_indexes=True), + [{ + 'unique': 1, + 'name': 'sqlite_autoindex_o_1', + 'column_names': ['foo']}]) + + def test_create_index_with_schema(self): + """Test creation of index with explicit schema""" + + inspector = Inspector(testing.db) + eq_( + inspector.get_indexes('l', schema='main'), + [{'unique': 0, 'name': u'ix_main_l_bar', + 'column_names': [u'bar']}]) + + def test_unique_constraint_named(self): + inspector = Inspector(testing.db) + eq_( + inspector.get_unique_constraints("f"), + [{'column_names': ['x'], 'name': 'foo_fx'}] + ) + + def test_unique_constraint_named_broken_casing(self): + inspector = Inspector(testing.db) + eq_( + inspector.get_unique_constraints("h"), + [{'column_names': ['x'], 'name': 'foo_hx'}] + ) + + def test_unique_constraint_named_broken_temp(self): + inspector = Inspector(testing.db) + eq_( + inspector.get_unique_constraints("g"), + [{'column_names': ['x'], 'name': 'foo_gx'}] + ) + + def test_unique_constraint_unnamed_inline(self): + inspector = Inspector(testing.db) + eq_( + inspector.get_unique_constraints("d"), + [{'column_names': ['x'], 'name': None}] + ) + + def test_unique_constraint_unnamed_inline_quoted(self): + inspector = Inspector(testing.db) + eq_( + inspector.get_unique_constraints("d1"), + [{'column_names': ['some ( STUPID n,ame'], 'name': None}] + ) + eq_( + inspector.get_unique_constraints("d2"), + [{'column_names': ['some STUPID n,ame'], 'name': None}] + ) + eq_( + inspector.get_unique_constraints("d3"), + [{'column_names': ['some STUPID n,ame'], 'name': None}] + ) + + def test_unique_constraint_unnamed_normal(self): + inspector = Inspector(testing.db) + eq_( + inspector.get_unique_constraints("m"), + [{'column_names': ['x'], 'name': None}] + ) + + def test_unique_constraint_unnamed_normal_temporary(self): + inspector = Inspector(testing.db) + eq_( + inspector.get_unique_constraints("n"), + [{'column_names': ['x'], 'name': None}] ) class SavepointTest(fixtures.TablesTest): + """test that savepoints work when we use the correct event setup""" __only_on__ = 'sqlite' @@ -1081,7 +1354,7 @@ class SavepointTest(fixtures.TablesTest): connection = self.bind.connect() transaction = connection.begin() connection.execute(users.insert(), user_id=1, user_name='user1') - trans2 = connection.begin_nested() + connection.begin_nested() connection.execute(users.insert(), user_id=2, user_name='user2') trans3 = connection.begin() connection.execute(users.insert(), user_id=3, user_name='user3') @@ -1127,6 +1400,16 @@ class TypeReflectionTest(fixtures.TestBase): (sqltypes.Time, sqltypes.TIME()), (sqltypes.BOOLEAN, sqltypes.BOOLEAN()), (sqltypes.Boolean, sqltypes.BOOLEAN()), + (sqlite.DATE( + storage_format="%(year)04d%(month)02d%(day)02d", + ), sqltypes.DATE()), + (sqlite.TIME( + storage_format="%(hour)02d%(minute)02d%(second)02d", + ), sqltypes.TIME()), + (sqlite.DATETIME( + storage_format="%(year)04d%(month)02d%(day)02d" + "%(hour)02d%(minute)02d%(second)02d", + ), sqltypes.DATETIME()), ] def _unsupported_args_fixture(self): @@ -1169,8 +1452,8 @@ class TypeReflectionTest(fixtures.TestBase): if warnings: def go(): return dialect._resolve_type_affinity(from_) - final_type = testing.assert_warnings(go, - ["Could not instantiate"], regex=True) + final_type = testing.assert_warnings( + go, ["Could not instantiate"], regex=True) else: final_type = dialect._resolve_type_affinity(from_) expected_type = type(to_) @@ -1186,8 +1469,8 @@ class TypeReflectionTest(fixtures.TestBase): if warnings: def go(): return inspector.get_columns("foo")[0] - col_info = testing.assert_warnings(go, - ["Could not instantiate"], regex=True) + col_info = testing.assert_warnings( + go, ["Could not instantiate"], regex=True) else: col_info = inspector.get_columns("foo")[0] expected_type = type(to_) @@ -1207,7 +1490,8 @@ class TypeReflectionTest(fixtures.TestBase): self._test_lookup_direct(self._fixed_lookup_fixture()) def test_lookup_direct_unsupported_args(self): - self._test_lookup_direct(self._unsupported_args_fixture(), warnings=True) + self._test_lookup_direct( + self._unsupported_args_fixture(), warnings=True) def test_lookup_direct_type_affinity(self): self._test_lookup_direct(self._type_affinity_fixture()) @@ -1216,8 +1500,8 @@ class TypeReflectionTest(fixtures.TestBase): self._test_round_trip(self._fixed_lookup_fixture()) def test_round_trip_direct_unsupported_args(self): - self._test_round_trip(self._unsupported_args_fixture(), warnings=True) + self._test_round_trip( + self._unsupported_args_fixture(), warnings=True) def test_round_trip_direct_type_affinity(self): self._test_round_trip(self._type_affinity_fixture()) - |