summaryrefslogtreecommitdiff
path: root/tests
diff options
context:
space:
mode:
authorKenn Knowles <kenn.knowles@gmail.com>2013-02-09 00:29:55 -0500
committerKenn Knowles <kenn.knowles@gmail.com>2013-02-09 00:29:55 -0500
commit9c88079576d84d710fc4e300874752344bcf1fec (patch)
treed4209f5b27f232399d432a9473238cbc4cc0df6d /tests
parente3391c3af3e6681ea90f7baef91dfeecca83d011 (diff)
downloadjsonpath-rw-9c88079576d84d710fc4e300874752344bcf1fec.tar.gz
Add lexer, with tests
Diffstat (limited to 'tests')
-rw-r--r--tests/__init__.py0
-rw-r--r--tests/test_lexer.py46
2 files changed, 46 insertions, 0 deletions
diff --git a/tests/__init__.py b/tests/__init__.py
new file mode 100644
index 0000000..e69de29
--- /dev/null
+++ b/tests/__init__.py
diff --git a/tests/test_lexer.py b/tests/test_lexer.py
new file mode 100644
index 0000000..db0bcd1
--- /dev/null
+++ b/tests/test_lexer.py
@@ -0,0 +1,46 @@
+import logging
+import unittest
+
+from ply.lex import LexToken
+
+from jsonpath_rw.lexer import JsonPathLexer
+
+class TestLexer(unittest.TestCase):
+
+ def token(self, value, ty=None):
+ t = LexToken()
+ t.type = ty if ty != None else value
+ t.value = value
+ t.lineno = -1
+ t.lexpos = -1
+ return t
+
+ def assert_lex_equiv(self, s, stream2):
+ # NOTE: lexer fails to reset after call?
+ l = JsonPathLexer(debug=True)
+ stream1 = list(l.tokenize(s)) # Save the stream for debug output when a test fails
+ stream2 = list(stream2)
+ assert len(stream1) == len(stream2)
+ for token1, token2 in zip(stream1, stream2):
+ print token1, token2
+ assert token1.type == token2.type
+ assert token1.value == token2.value
+
+ @classmethod
+ def setup_class(cls):
+ logging.basicConfig()
+
+ def test_simple_inputs(self):
+ self.assert_lex_equiv('$', [self.token('$', '$')])
+ self.assert_lex_equiv('"hello"', [self.token('hello', 'ID')])
+ self.assert_lex_equiv("'goodbye'", [self.token('goodbye', 'ID')])
+ self.assert_lex_equiv('fuzz', [self.token('fuzz', 'ID')])
+ self.assert_lex_equiv('1', [self.token(1, 'NUMBER')])
+ self.assert_lex_equiv('45', [self.token(45, 'NUMBER')])
+ self.assert_lex_equiv('"fuzz.bang"', [self.token('fuzz.bang', 'ID')])
+ self.assert_lex_equiv('fuzz.bang', [self.token('fuzz', 'ID'), self.token('.', '.'), self.token('bang', 'ID')])
+ self.assert_lex_equiv('fuzz.*', [self.token('fuzz', 'ID'), self.token('.', '.'), self.token('*', '*')])
+ self.assert_lex_equiv('fuzz..bang', [self.token('fuzz', 'ID'), self.token('..', 'DOUBLEDOT'), self.token('bang', 'ID')])
+ self.assert_lex_equiv('&', [self.token('&', '&')])
+ self.assert_lex_equiv('|', [self.token('|', '|')])
+ self.assert_lex_equiv('where', [self.token('where', 'WHERE')])