summaryrefslogtreecommitdiff
path: root/sqlparse
diff options
context:
space:
mode:
authorAndi Albrecht <albrecht.andi@gmail.com>2015-01-17 09:01:11 +0100
committerAndi Albrecht <albrecht.andi@gmail.com>2015-01-17 09:01:11 +0100
commit8c5c0684ac61ab7d0c5e77881728c8106f2877f9 (patch)
treee06f09a2de797e873516cf644172bdaab626a959 /sqlparse
parenta17db7a7557056728acf5506d3dea6841ad55fa9 (diff)
downloadsqlparse-8c5c0684ac61ab7d0c5e77881728c8106f2877f9.tar.gz
Group comments to parent object (fixes #160).
Diffstat (limited to 'sqlparse')
-rw-r--r--sqlparse/engine/grouping.py21
-rw-r--r--sqlparse/filters.py14
2 files changed, 25 insertions, 10 deletions
diff --git a/sqlparse/engine/grouping.py b/sqlparse/engine/grouping.py
index 5189f7e..d6f1360 100644
--- a/sqlparse/engine/grouping.py
+++ b/sqlparse/engine/grouping.py
@@ -370,6 +370,23 @@ def group_order(tlist):
token = tlist.token_next_by_type(idx, T.Keyword.Order)
+def align_comments(tlist):
+ [align_comments(sgroup) for sgroup in tlist.get_sublists()]
+ idx = 0
+ token = tlist.token_next_by_instance(idx, sql.Comment)
+ while token:
+ before = tlist.token_prev(tlist.token_index(token))
+ if isinstance(before, sql.TokenList):
+ grp = tlist.tokens_between(before, token)[1:]
+ before.tokens.extend(grp)
+ for t in grp:
+ tlist.tokens.remove(t)
+ idx = tlist.token_index(before) + 1
+ else:
+ idx = tlist.token_index(token) + 1
+ token = tlist.token_next_by_instance(idx, sql.Comment)
+
+
def group(tlist):
for func in [
group_comments,
@@ -384,9 +401,11 @@ def group(tlist):
group_aliased,
group_assignment,
group_comparison,
+ align_comments,
group_identifier_list,
group_if,
group_for,
group_foreach,
- group_begin]:
+ group_begin,
+ ]:
func(tlist)
diff --git a/sqlparse/filters.py b/sqlparse/filters.py
index 47972e4..7b9b5e7 100644
--- a/sqlparse/filters.py
+++ b/sqlparse/filters.py
@@ -329,19 +329,19 @@ class ReindentFilter:
idx = 0
token = _next_token(idx)
+ added = set()
while token:
prev = tlist.token_prev(tlist.token_index(token), False)
offset = 1
- if prev and prev.is_whitespace():
+ if prev and prev.is_whitespace() and prev not in added:
tlist.tokens.pop(tlist.token_index(prev))
offset += 1
- if (prev
- and isinstance(prev, sql.Comment)
- and (unicode(prev).endswith('\n')
- or unicode(prev).endswith('\r'))):
+ uprev = unicode(prev)
+ if (prev and (uprev.endswith('\n') or uprev.endswith('\r'))):
nl = tlist.token_next(token)
else:
nl = self.nl()
+ added.add(nl)
tlist.insert_before(token, nl)
token = _next_token(tlist.token_index(nl) + offset)
@@ -384,7 +384,6 @@ class ReindentFilter:
self._process_default(tlist)
self.indent -= 1
-
def _process_parenthesis(self, tlist):
first = tlist.token_next(0)
indented = False
@@ -413,9 +412,6 @@ class ReindentFilter:
self.offset += num_offset
for token in identifiers[1:]:
tlist.insert_before(token, self.nl())
- for token in tlist.tokens:
- if isinstance(token, sql.Comment):
- tlist.insert_after(token, self.nl())
self.offset -= num_offset
self._process_default(tlist)