summaryrefslogtreecommitdiff
diff options
context:
space:
mode:
authorAndi Albrecht <albrecht.andi@gmail.com>2013-10-24 05:59:09 +0200
committerAndi Albrecht <albrecht.andi@gmail.com>2013-10-24 05:59:09 +0200
commite7b8ad372f6cfe6d14dcf1c7dc696a24b493e58c (patch)
treeca1c351c2fb874a0630ed65c8d364bdcec2ad982
parent6efac742ec8505734432dbb1e6fb50d188369fac (diff)
downloadsqlparse-e7b8ad372f6cfe6d14dcf1c7dc696a24b493e58c.tar.gz
Add test case for JOIN, update Changelog and AUTHORS.
-rw-r--r--AUTHORS1
-rw-r--r--CHANGES3
-rw-r--r--tests/test_tokenize.py11
3 files changed, 10 insertions, 5 deletions
diff --git a/AUTHORS b/AUTHORS
index 4f1c92a..f158c77 100644
--- a/AUTHORS
+++ b/AUTHORS
@@ -4,6 +4,7 @@ This module contains code (namely the lexer and filter mechanism) from
the pygments project that was written by Georg Brandl.
Alphabetical list of contributors:
+* Alexander Beedie <ayembee@gmail.com>
* Alexey Malyshev <nostrict@gmail.com>
* Florian Bauer <florian.bauer@zmdi.com>
* Jesús Leganés Combarro "Piranna" <piranna@gmail.com>
diff --git a/CHANGES b/CHANGES
index 5e73c98..ffb8079 100644
--- a/CHANGES
+++ b/CHANGES
@@ -5,6 +5,9 @@ Bug Fixes
* Removed buffered reading again, it obviously causes wrong parsing in some rare
cases (issue114).
+Enhancements
+* Improved support for JOINs, by Alexander Beedie.
+
Release 0.1.9 (Sep 28, 2013)
----------------------------
diff --git a/tests/test_tokenize.py b/tests/test_tokenize.py
index 3b529d9..0b23fa8 100644
--- a/tests/test_tokenize.py
+++ b/tests/test_tokenize.py
@@ -163,11 +163,12 @@ class TestStream(unittest.TestCase):
self.assertEqual(tokens[1][0], Error)
-def test_parse_join():
- p = sqlparse.parse('LEFT JOIN foo')[0]
- assert len(p.tokens) == 3
- assert p.tokens[0].ttype is Keyword
- p = sqlparse.parse('LEFT OUTER JOIN foo')[0]
+@pytest.mark.parametrize('expr', ['JOIN', 'LEFT JOIN', 'LEFT OUTER JOIN',
+ 'FULL OUTER JOIN', 'NATURAL JOIN',
+ 'CROSS JOIN', 'STRAIGHT JOIN',
+ 'INNER JOIN', 'LEFT INNER JOIN'])
+def test_parse_join(expr):
+ p = sqlparse.parse('%s foo' % expr)[0]
assert len(p.tokens) == 3
assert p.tokens[0].ttype is Keyword