summaryrefslogtreecommitdiff
diff options
context:
space:
mode:
-rw-r--r--sqlparse/engine/grouping.py5
-rw-r--r--sqlparse/sql.py28
2 files changed, 25 insertions, 8 deletions
diff --git a/sqlparse/engine/grouping.py b/sqlparse/engine/grouping.py
index fddee0f..6bdba2f 100644
--- a/sqlparse/engine/grouping.py
+++ b/sqlparse/engine/grouping.py
@@ -166,10 +166,11 @@ def group_identifier_list(tlist):
tidx, token = tlist.token_idx_next_by(m=M_COMMA)
while token:
- before, after = tlist.token_prev(tidx), tlist.token_next(tidx)
+ before_idx, before = tlist.token_idx_prev(tidx)
+ after = tlist.token_next(tidx)
if func(before) and func(after):
- tidx = tlist.token_index(before)
+ tidx = before_idx
token = tlist.group_tokens_between(sql.IdentifierList, tidx, after, extend=True)
tidx, token = tlist.token_idx_next_by(m=M_COMMA, idx=tidx + 1)
diff --git a/sqlparse/sql.py b/sqlparse/sql.py
index 928b784..9782c33 100644
--- a/sqlparse/sql.py
+++ b/sqlparse/sql.py
@@ -233,12 +233,18 @@ class TokenList(Token):
if not isinstance(funcs, (list, tuple)):
funcs = (funcs,)
- iterable = enumerate(self.tokens[start:end], start=start)
-
- for idx, token in iterable:
- for func in funcs:
- if func(token):
- return idx, token
+ if reverse:
+ assert end is None
+ for idx in range(start - 2, -1, -1):
+ token = self.tokens[idx]
+ for func in funcs:
+ if func(token):
+ return idx, token
+ else:
+ for idx, token in enumerate(self.tokens[start:end], start=start):
+ for func in funcs:
+ if func(token):
+ return idx, token
return None, None
def _token_matching(self, funcs, start=0, end=None, reverse=False):
@@ -312,6 +318,16 @@ class TokenList(Token):
def token_matching(self, idx, funcs):
return self._token_matching(funcs, idx)
+ def token_idx_prev(self, idx, skip_ws=True):
+ """Returns the previous token relative to *idx*.
+
+ If *skip_ws* is ``True`` (the default) whitespace tokens are ignored.
+ ``None`` is returned if there's no previous token.
+ """
+ idx += 1 # alot of code usage current pre-compensates for this
+ funcs = lambda tk: not (tk.is_whitespace() and skip_ws)
+ return self._token_idx_matching(funcs, idx, reverse=True)
+
def token_prev(self, idx, skip_ws=True):
"""Returns the previous token relative to *idx*.