diff options
| author | Victor Uriarte <victor.m.uriarte@intel.com> | 2016-06-18 23:11:06 -0700 |
|---|---|---|
| committer | Victor Uriarte <victor.m.uriarte@intel.com> | 2016-06-20 07:40:20 -0700 |
| commit | 64b9d3537783dbf761b421644ba07078a80b5640 (patch) | |
| tree | 2919b35edcd01b85486e184102eeb4483e40dcc9 /tests/test_tokenize.py | |
| parent | 63c369364aeb4591983102889f604dca8d266f2e (diff) | |
| download | sqlparse-64b9d3537783dbf761b421644ba07078a80b5640.tar.gz | |
Replace other UnitTest with Py.Test
Diffstat (limited to 'tests/test_tokenize.py')
| -rw-r--r-- | tests/test_tokenize.py | 120 |
1 files changed, 62 insertions, 58 deletions
diff --git a/tests/test_tokenize.py b/tests/test_tokenize.py index 0446dfa..4234043 100644 --- a/tests/test_tokenize.py +++ b/tests/test_tokenize.py @@ -1,150 +1,154 @@ # -*- coding: utf-8 -*- import types -import unittest import pytest import sqlparse from sqlparse import lexer -from sqlparse import sql -from sqlparse import tokens as T +from sqlparse import sql, tokens as T from sqlparse.compat import StringIO -class TestTokenize(unittest.TestCase): +class TestTokenize(object): def test_simple(self): s = 'select * from foo;' stream = lexer.tokenize(s) - self.assert_(isinstance(stream, types.GeneratorType)) + assert isinstance(stream, types.GeneratorType) tokens = list(stream) - self.assertEqual(len(tokens), 8) - self.assertEqual(len(tokens[0]), 2) - self.assertEqual(tokens[0], (T.Keyword.DML, u'select')) - self.assertEqual(tokens[-1], (T.Punctuation, u';')) + assert len(tokens) == 8 + assert len(tokens[0]) == 2 + assert tokens[0] == (T.Keyword.DML, 'select') + assert tokens[-1] == (T.Punctuation, ';') def test_backticks(self): s = '`foo`.`bar`' tokens = list(lexer.tokenize(s)) - self.assertEqual(len(tokens), 3) - self.assertEqual(tokens[0], (T.Name, u'`foo`')) + assert len(tokens) == 3 + assert tokens[0] == (T.Name, '`foo`') def test_linebreaks(self): # issue1 s = 'foo\nbar\n' tokens = lexer.tokenize(s) - self.assertEqual(''.join(str(x[1]) for x in tokens), s) + assert ''.join(str(x[1]) for x in tokens) == s s = 'foo\rbar\r' tokens = lexer.tokenize(s) - self.assertEqual(''.join(str(x[1]) for x in tokens), s) + assert ''.join(str(x[1]) for x in tokens) == s s = 'foo\r\nbar\r\n' tokens = lexer.tokenize(s) - self.assertEqual(''.join(str(x[1]) for x in tokens), s) + assert ''.join(str(x[1]) for x in tokens) == s s = 'foo\r\nbar\n' tokens = lexer.tokenize(s) - self.assertEqual(''.join(str(x[1]) for x in tokens), s) + assert ''.join(str(x[1]) for x in tokens) == s def test_inline_keywords(self): # issue 7 s = "create created_foo" tokens = list(lexer.tokenize(s)) - self.assertEqual(len(tokens), 3) - self.assertEqual(tokens[0][0], T.Keyword.DDL) - self.assertEqual(tokens[2][0], T.Name) - self.assertEqual(tokens[2][1], u'created_foo') + assert len(tokens) == 3 + assert tokens[0][0] == T.Keyword.DDL + assert tokens[2][0] == T.Name + assert tokens[2][1] == 'created_foo' s = "enddate" tokens = list(lexer.tokenize(s)) - self.assertEqual(len(tokens), 1) - self.assertEqual(tokens[0][0], T.Name) + assert len(tokens) == 1 + assert tokens[0][0] == T.Name s = "join_col" tokens = list(lexer.tokenize(s)) - self.assertEqual(len(tokens), 1) - self.assertEqual(tokens[0][0], T.Name) + assert len(tokens) == 1 + assert tokens[0][0] == T.Name s = "left join_col" tokens = list(lexer.tokenize(s)) - self.assertEqual(len(tokens), 3) - self.assertEqual(tokens[2][0], T.Name) - self.assertEqual(tokens[2][1], 'join_col') + assert len(tokens) == 3 + assert tokens[2][0] == T.Name + assert tokens[2][1] == 'join_col' def test_negative_numbers(self): s = "values(-1)" tokens = list(lexer.tokenize(s)) - self.assertEqual(len(tokens), 4) - self.assertEqual(tokens[2][0], T.Number.Integer) - self.assertEqual(tokens[2][1], '-1') + assert len(tokens) == 4 + assert tokens[2][0] == T.Number.Integer + assert tokens[2][1] == '-1' -class TestToken(unittest.TestCase): +class TestToken(object): def test_str(self): token = sql.Token(None, 'FoO') - self.assertEqual(str(token), 'FoO') + assert str(token) == 'FoO' def test_repr(self): token = sql.Token(T.Keyword, 'foo') tst = "<Keyword 'foo' at 0x" - self.assertEqual(repr(token)[:len(tst)], tst) + assert repr(token)[:len(tst)] == tst token = sql.Token(T.Keyword, '1234567890') tst = "<Keyword '123456...' at 0x" - self.assertEqual(repr(token)[:len(tst)], tst) + assert repr(token)[:len(tst)] == tst def test_flatten(self): token = sql.Token(T.Keyword, 'foo') gen = token.flatten() - self.assertEqual(type(gen), types.GeneratorType) + assert isinstance(gen, types.GeneratorType) lgen = list(gen) - self.assertEqual(lgen, [token]) + assert lgen == [token] -class TestTokenList(unittest.TestCase): +class TestTokenList(object): def test_repr(self): p = sqlparse.parse('foo, bar, baz')[0] tst = "<IdentifierList 'foo, b...' at 0x" - self.assertEqual(repr(p.tokens[0])[:len(tst)], tst) + assert repr(p.tokens[0])[:len(tst)] == tst def test_token_first(self): p = sqlparse.parse(' select foo')[0] first = p.token_first() - self.assertEqual(first.value, 'select') - self.assertEqual(p.token_first(skip_ws=False).value, ' ') - self.assertEqual(sql.TokenList([]).token_first(), None) + assert first.value == 'select' + assert p.token_first(skip_ws=False).value == ' ' + assert sql.TokenList([]).token_first() is None def test_token_matching(self): t1 = sql.Token(T.Keyword, 'foo') t2 = sql.Token(T.Punctuation, ',') x = sql.TokenList([t1, t2]) - self.assertEqual(x.token_matching( - [lambda t: t.ttype is T.Keyword], 0), t1) - self.assertEqual(x.token_matching( - [lambda t: t.ttype is T.Punctuation], 0), t2) - self.assertEqual(x.token_matching( - [lambda t: t.ttype is T.Keyword], 1), None) + assert x.token_matching( + [lambda t: t.ttype is T.Keyword], 0) == t1 + assert x.token_matching( + [lambda t: t.ttype is T.Punctuation], 0) == t2 + assert x.token_matching( + [lambda t: t.ttype is T.Keyword], 1) is None -class TestStream(unittest.TestCase): +class TestStream(object): def test_simple(self): stream = StringIO("SELECT 1; SELECT 2;") tokens = lexer.tokenize(stream) - self.assertEqual(len(list(tokens)), 9) + assert len(list(tokens)) == 9 stream.seek(0) tokens = list(lexer.tokenize(stream)) - self.assertEqual(len(tokens), 9) + assert len(tokens) == 9 stream.seek(0) tokens = list(lexer.tokenize(stream)) - self.assertEqual(len(tokens), 9) + assert len(tokens) == 9 def test_error(self): stream = StringIO("FOOBAR{") tokens = list(lexer.tokenize(stream)) - self.assertEqual(len(tokens), 2) - self.assertEqual(tokens[1][0], T.Error) - - -@pytest.mark.parametrize('expr', ['JOIN', 'LEFT JOIN', 'LEFT OUTER JOIN', - 'FULL OUTER JOIN', 'NATURAL JOIN', - 'CROSS JOIN', 'STRAIGHT JOIN', - 'INNER JOIN', 'LEFT INNER JOIN']) + assert len(tokens) == 2 + assert tokens[1][0] == T.Error + + +@pytest.mark.parametrize('expr', ['JOIN', + 'LEFT JOIN', + 'LEFT OUTER JOIN', + 'FULL OUTER JOIN', + 'NATURAL JOIN', + 'CROSS JOIN', + 'STRAIGHT JOIN', + 'INNER JOIN', + 'LEFT INNER JOIN', + ]) def test_parse_join(expr): p = sqlparse.parse('{0} foo'.format(expr))[0] assert len(p.tokens) == 3 |
