blob: 7106b3c87411eb81785d458c36496b54bc3fb712 (
plain)
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
|
# -*- coding: utf-8 -*-
import unittest
import types
from sqlparse import lexer
from sqlparse.tokens import *
class TestTokenize(unittest.TestCase):
def test_simple(self):
sql = 'select * from foo;'
stream = lexer.tokenize(sql)
self.assert_(type(stream) is types.GeneratorType)
tokens = list(stream)
self.assertEqual(len(tokens), 8)
self.assertEqual(len(tokens[0]), 2)
self.assertEqual(tokens[0], (Keyword.DML, u'select'))
self.assertEqual(tokens[-1], (Punctuation, u';'))
|