summaryrefslogtreecommitdiff
path: root/src/flake8
diff options
context:
space:
mode:
Diffstat (limited to 'src/flake8')
-rw-r--r--src/flake8/checker.py23
-rw-r--r--src/flake8/processor.py51
2 files changed, 54 insertions, 20 deletions
diff --git a/src/flake8/checker.py b/src/flake8/checker.py
index 5482454..572c153 100644
--- a/src/flake8/checker.py
+++ b/src/flake8/checker.py
@@ -406,20 +406,20 @@ class FileChecker(object):
self.report("E902", 0, 0, message)
return None
- def report(self, error_code, line_number, column, text, line=None):
- # type: (str, int, int, str, Optional[str]) -> str
+ def report(self, error_code, line_number, column, text):
+ # type: (Optional[str], int, int, str) -> str
"""Report an error by storing it in the results list."""
if error_code is None:
error_code, text = text.split(" ", 1)
- physical_line = line
# If we're recovering from a problem in _make_processor, we will not
# have this attribute.
- if not physical_line and getattr(self, "processor", None):
- physical_line = self.processor.line_for(line_number)
+ if hasattr(self, "processor"):
+ line = self.processor.noqa_line_for(line_number)
+ else:
+ line = None
- error = (error_code, line_number, column, text, physical_line)
- self.results.append(error)
+ self.results.append((error_code, line_number, column, text, line))
return error_code
def run_check(self, plugin, **arguments):
@@ -483,7 +483,7 @@ class FileChecker(object):
column -= column_offset
return row, column
- def run_ast_checks(self):
+ def run_ast_checks(self): # type: () -> None
"""Run all checks expecting an abstract syntax tree."""
try:
ast = self.processor.build_ast()
@@ -534,7 +534,7 @@ class FileChecker(object):
self.processor.next_logical_line()
- def run_physical_checks(self, physical_line, override_error_line=None):
+ def run_physical_checks(self, physical_line):
"""Run all checks for a given physical line.
A single physical check may return multiple errors.
@@ -562,7 +562,6 @@ class FileChecker(object):
line_number=self.processor.line_number,
column=column_offset,
text=text,
- line=(override_error_line or physical_line),
)
def process_tokens(self):
@@ -640,9 +639,7 @@ class FileChecker(object):
line_no = token[2][0]
with self.processor.inside_multiline(line_number=line_no):
for line in self.processor.split_line(token):
- self.run_physical_checks(
- line + "\n", override_error_line=token[4]
- )
+ self.run_physical_checks(line + "\n")
def _pool_init():
diff --git a/src/flake8/processor.py b/src/flake8/processor.py
index aa7f1d8..0375ed9 100644
--- a/src/flake8/processor.py
+++ b/src/flake8/processor.py
@@ -108,6 +108,8 @@ class FileProcessor(object):
#: Statistics dictionary
self.statistics = {"logical lines": 0}
self._file_tokens = None # type: Optional[List[_Token]]
+ # map from line number to the line we'll search for `noqa` in
+ self._noqa_line_mapping = None # type: Optional[Dict[int, str]]
@property
def file_tokens(self): # type: () -> List[_Token]
@@ -275,16 +277,51 @@ class FileProcessor(object):
except (tokenize.TokenError, SyntaxError) as exc:
raise exceptions.InvalidSyntax(exception=exc)
- def line_for(self, line_number):
- # type: (int) -> Optional[str]
- """Retrieve the physical line at the specified line number."""
- adjusted_line_number = line_number - 1
+ def _noqa_line_range(self, min_line, max_line):
+ # type: (int, int) -> Dict[int, str]
+ line_range = range(min_line, max_line + 1)
+ joined = "".join(self.lines[min_line - 1 : max_line])
+ return dict.fromkeys(line_range, joined)
+
+ def noqa_line_for(self, line_number): # type: (int) -> Optional[str]
+ """Retrieve the line which will be used to determine noqa."""
+ if self._noqa_line_mapping is None:
+ try:
+ file_tokens = self.file_tokens
+ except exceptions.InvalidSyntax:
+ # if we failed to parse the file tokens, we'll always fail in
+ # the future, so set this so the code does not try again
+ self._noqa_line_mapping = {}
+ else:
+ ret = {}
+
+ min_line = len(self.lines) + 2
+ max_line = -1
+ for tp, _, (s_line, _), (e_line, _), _ in file_tokens:
+ if tp == tokenize.ENDMARKER:
+ break
+
+ min_line = min(min_line, s_line)
+ max_line = max(max_line, e_line)
+
+ if tp in (tokenize.NL, tokenize.NEWLINE):
+ ret.update(self._noqa_line_range(min_line, max_line))
+
+ min_line = len(self.lines) + 2
+ max_line = -1
+
+ # in newer versions of python, a `NEWLINE` token is inserted
+ # at the end of the file even if it doesn't have one.
+ # on old pythons, they will not have hit a `NEWLINE`
+ if max_line != -1:
+ ret.update(self._noqa_line_range(min_line, max_line))
+
+ self._noqa_line_mapping = ret
+
# NOTE(sigmavirus24): Some plugins choose to report errors for empty
# files on Line 1. In those cases, we shouldn't bother trying to
# retrieve a physical line (since none exist).
- if 0 <= adjusted_line_number < len(self.lines):
- return self.lines[adjusted_line_number]
- return None
+ return self._noqa_line_mapping.get(line_number)
def next_line(self): # type: () -> str
"""Get the next line from the list."""