From 9deabd31fe51f8bf7dad45c342478878e703605a Mon Sep 17 00:00:00 2001 From: Ned Batchelder Date: Sat, 26 Oct 2013 14:41:42 -0400 Subject: Micro optimizations. --- coverage/phystokens.py | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) (limited to 'coverage/phystokens.py') diff --git a/coverage/phystokens.py b/coverage/phystokens.py index 2a91882..f7c099e 100644 --- a/coverage/phystokens.py +++ b/coverage/phystokens.py @@ -1,7 +1,7 @@ """Better tokenizing for coverage.py.""" import codecs, keyword, re, sys, token, tokenize -from coverage.backward import StringIO # pylint: disable=W0622 +from coverage.backward import set, StringIO # pylint: disable=W0622 def phys_tokens(toks): """Return all physical tokens, even line continuations. @@ -18,7 +18,7 @@ def phys_tokens(toks): last_ttype = None for ttype, ttext, (slineno, scol), (elineno, ecol), ltext in toks: if last_lineno != elineno: - if last_line and last_line[-2:] == "\\\n": + if last_line and last_line.endswith("\\\n"): # We are at the beginning of a new line, and the last line # ended with a backslash. We probably have to inject a # backslash token into the stream. Unfortunately, there's more @@ -74,7 +74,7 @@ def source_token_lines(source): is indistinguishable from a final line with a newline. """ - ws_tokens = [token.INDENT, token.DEDENT, token.NEWLINE, tokenize.NL] + ws_tokens = set([token.INDENT, token.DEDENT, token.NEWLINE, tokenize.NL]) line = [] col = 0 source = source.expandtabs(8).replace('\r\n', '\n') -- cgit v1.2.1 From bde6d2060bebcf7c8a3a365f9b9c01a9d801dbe9 Mon Sep 17 00:00:00 2001 From: Ned Batchelder Date: Sat, 26 Oct 2013 22:08:47 -0400 Subject: Cache generate_tokens to speed HTML reports. --- coverage/phystokens.py | 6 ++++-- 1 file changed, 4 insertions(+), 2 deletions(-) (limited to 'coverage/phystokens.py') diff --git a/coverage/phystokens.py b/coverage/phystokens.py index f7c099e..99b1d5b 100644 --- a/coverage/phystokens.py +++ b/coverage/phystokens.py @@ -1,7 +1,9 @@ """Better tokenizing for coverage.py.""" import codecs, keyword, re, sys, token, tokenize -from coverage.backward import set, StringIO # pylint: disable=W0622 +from coverage.backward import set # pylint: disable=W0622 +from coverage.parser import generate_tokens + def phys_tokens(toks): """Return all physical tokens, even line continuations. @@ -78,7 +80,7 @@ def source_token_lines(source): line = [] col = 0 source = source.expandtabs(8).replace('\r\n', '\n') - tokgen = tokenize.generate_tokens(StringIO(source).readline) + tokgen = generate_tokens(source) for ttype, ttext, (_, scol), (_, ecol), _ in phys_tokens(tokgen): mark_start = True for part in re.split('(\n)', ttext): -- cgit v1.2.1