diff options
| author | Anthony Sottile <asottile@umich.edu> | 2020-03-17 22:12:56 -0700 |
|---|---|---|
| committer | Anthony Sottile <asottile@umich.edu> | 2020-03-17 22:12:56 -0700 |
| commit | 28797a57d882283c1c987b96655cc1e53aef060e (patch) | |
| tree | 6e2418de570b88f619dcfd34933026ed2bce0a1d | |
| parent | 8f9b4931b9a28896fb43edccb23016a7540f5b82 (diff) | |
| download | flake8-noqa_continuation.tar.gz | |
Allow noqa to apply to lines due to continuationnoqa_continuation
| -rw-r--r-- | src/flake8/checker.py | 23 | ||||
| -rw-r--r-- | src/flake8/processor.py | 51 | ||||
| -rw-r--r-- | tests/integration/test_checker.py | 9 | ||||
| -rw-r--r-- | tests/integration/test_main.py | 19 | ||||
| -rw-r--r-- | tests/unit/test_file_processor.py | 50 |
5 files changed, 122 insertions, 30 deletions
diff --git a/src/flake8/checker.py b/src/flake8/checker.py index 5482454..572c153 100644 --- a/src/flake8/checker.py +++ b/src/flake8/checker.py @@ -406,20 +406,20 @@ class FileChecker(object): self.report("E902", 0, 0, message) return None - def report(self, error_code, line_number, column, text, line=None): - # type: (str, int, int, str, Optional[str]) -> str + def report(self, error_code, line_number, column, text): + # type: (Optional[str], int, int, str) -> str """Report an error by storing it in the results list.""" if error_code is None: error_code, text = text.split(" ", 1) - physical_line = line # If we're recovering from a problem in _make_processor, we will not # have this attribute. - if not physical_line and getattr(self, "processor", None): - physical_line = self.processor.line_for(line_number) + if hasattr(self, "processor"): + line = self.processor.noqa_line_for(line_number) + else: + line = None - error = (error_code, line_number, column, text, physical_line) - self.results.append(error) + self.results.append((error_code, line_number, column, text, line)) return error_code def run_check(self, plugin, **arguments): @@ -483,7 +483,7 @@ class FileChecker(object): column -= column_offset return row, column - def run_ast_checks(self): + def run_ast_checks(self): # type: () -> None """Run all checks expecting an abstract syntax tree.""" try: ast = self.processor.build_ast() @@ -534,7 +534,7 @@ class FileChecker(object): self.processor.next_logical_line() - def run_physical_checks(self, physical_line, override_error_line=None): + def run_physical_checks(self, physical_line): """Run all checks for a given physical line. A single physical check may return multiple errors. @@ -562,7 +562,6 @@ class FileChecker(object): line_number=self.processor.line_number, column=column_offset, text=text, - line=(override_error_line or physical_line), ) def process_tokens(self): @@ -640,9 +639,7 @@ class FileChecker(object): line_no = token[2][0] with self.processor.inside_multiline(line_number=line_no): for line in self.processor.split_line(token): - self.run_physical_checks( - line + "\n", override_error_line=token[4] - ) + self.run_physical_checks(line + "\n") def _pool_init(): diff --git a/src/flake8/processor.py b/src/flake8/processor.py index aa7f1d8..0375ed9 100644 --- a/src/flake8/processor.py +++ b/src/flake8/processor.py @@ -108,6 +108,8 @@ class FileProcessor(object): #: Statistics dictionary self.statistics = {"logical lines": 0} self._file_tokens = None # type: Optional[List[_Token]] + # map from line number to the line we'll search for `noqa` in + self._noqa_line_mapping = None # type: Optional[Dict[int, str]] @property def file_tokens(self): # type: () -> List[_Token] @@ -275,16 +277,51 @@ class FileProcessor(object): except (tokenize.TokenError, SyntaxError) as exc: raise exceptions.InvalidSyntax(exception=exc) - def line_for(self, line_number): - # type: (int) -> Optional[str] - """Retrieve the physical line at the specified line number.""" - adjusted_line_number = line_number - 1 + def _noqa_line_range(self, min_line, max_line): + # type: (int, int) -> Dict[int, str] + line_range = range(min_line, max_line + 1) + joined = "".join(self.lines[min_line - 1 : max_line]) + return dict.fromkeys(line_range, joined) + + def noqa_line_for(self, line_number): # type: (int) -> Optional[str] + """Retrieve the line which will be used to determine noqa.""" + if self._noqa_line_mapping is None: + try: + file_tokens = self.file_tokens + except exceptions.InvalidSyntax: + # if we failed to parse the file tokens, we'll always fail in + # the future, so set this so the code does not try again + self._noqa_line_mapping = {} + else: + ret = {} + + min_line = len(self.lines) + 2 + max_line = -1 + for tp, _, (s_line, _), (e_line, _), _ in file_tokens: + if tp == tokenize.ENDMARKER: + break + + min_line = min(min_line, s_line) + max_line = max(max_line, e_line) + + if tp in (tokenize.NL, tokenize.NEWLINE): + ret.update(self._noqa_line_range(min_line, max_line)) + + min_line = len(self.lines) + 2 + max_line = -1 + + # in newer versions of python, a `NEWLINE` token is inserted + # at the end of the file even if it doesn't have one. + # on old pythons, they will not have hit a `NEWLINE` + if max_line != -1: + ret.update(self._noqa_line_range(min_line, max_line)) + + self._noqa_line_mapping = ret + # NOTE(sigmavirus24): Some plugins choose to report errors for empty # files on Line 1. In those cases, we shouldn't bother trying to # retrieve a physical line (since none exist). - if 0 <= adjusted_line_number < len(self.lines): - return self.lines[adjusted_line_number] - return None + return self._noqa_line_mapping.get(line_number) def next_line(self): # type: () -> str """Get the next line from the list.""" diff --git a/tests/integration/test_checker.py b/tests/integration/test_checker.py index 93cc239..096b350 100644 --- a/tests/integration/test_checker.py +++ b/tests/integration/test_checker.py @@ -15,7 +15,7 @@ EXPECTED_RESULT_PHYSICAL_LINE = ( 0, 1, 'Expected Message', - PHYSICAL_LINE, + None, ) @@ -153,11 +153,10 @@ def test_line_check_results(plugin_target, len_results): """Test the FileChecker class handling results from line checks.""" file_checker = mock_file_checker_with_plugin(plugin_target) - # Results will be store in an internal array + # Results will be stored in an internal array file_checker.run_physical_checks(PHYSICAL_LINE) - assert file_checker.results == [ - EXPECTED_RESULT_PHYSICAL_LINE - ] * len_results + expected = [EXPECTED_RESULT_PHYSICAL_LINE] * len_results + assert file_checker.results == expected PLACEHOLDER_CODE = 'some_line = "of" * code' diff --git a/tests/integration/test_main.py b/tests/integration/test_main.py index db307ba..b67992b 100644 --- a/tests/integration/test_main.py +++ b/tests/integration/test_main.py @@ -182,6 +182,25 @@ t.py:1:15: E711 comparison to None should be 'if cond is None:' ''' +def test_specific_noqa_on_line_with_continuation(tmpdir, capsys): + """See https://gitlab.com/pycqa/flake8/issues/375.""" + t_py_src = '''\ +from os \\ + import path # noqa: F401 + +x = """ + trailing whitespace: \n +""" # noqa: W291 +''' + + with tmpdir.as_cwd(): + tmpdir.join('t.py').write(t_py_src) + _call_main(['t.py'], retv=0) + + out, err = capsys.readouterr() + assert out == err == '' + + def test_obtaining_args_from_sys_argv_when_not_explicity_provided(capsys): """Test that arguments are obtained from 'sys.argv'.""" with mock.patch('sys.argv', ['flake8', '--help']): diff --git a/tests/unit/test_file_processor.py b/tests/unit/test_file_processor.py index e5367a8..044d11b 100644 --- a/tests/unit/test_file_processor.py +++ b/tests/unit/test_file_processor.py @@ -122,16 +122,56 @@ def test_read_lines_ignores_empty_display_name( assert file_processor.filename == 'stdin' -def test_line_for(default_options): +def test_noqa_line_for(default_options): """Verify we grab the correct line from the cached lines.""" file_processor = processor.FileProcessor('-', default_options, lines=[ - 'Line 1', - 'Line 2', - 'Line 3', + 'Line 1\n', + 'Line 2\n', + 'Line 3\n', ]) for i in range(1, 4): - assert file_processor.line_for(i) == 'Line {0}'.format(i) + assert file_processor.noqa_line_for(i) == 'Line {0}\n'.format(i) + + +def test_noqa_line_for_continuation(default_options): + """Verify that the correct "line" is retrieved for continuation.""" + src = '''\ +from foo \\ + import bar # 2 + +x = """ +hello +world +""" # 7 +''' + lines = src.splitlines(True) + file_processor = processor.FileProcessor('-', default_options, lines=lines) + + assert file_processor.noqa_line_for(0) is None + + l_1_2 = 'from foo \\\n import bar # 2\n' + assert file_processor.noqa_line_for(1) == l_1_2 + assert file_processor.noqa_line_for(2) == l_1_2 + + assert file_processor.noqa_line_for(3) == '\n' + + l_4_7 = 'x = """\nhello\nworld\n""" # 7\n' + for i in (4, 5, 6, 7): + assert file_processor.noqa_line_for(i) == l_4_7 + + assert file_processor.noqa_line_for(8) is None + + +def test_noqa_line_for_no_eol_at_end_of_file(default_options): + """Verify that we properly handle noqa line at the end of the file.""" + src = 'from foo \\\nimport bar' # no end of file newline + lines = src.splitlines(True) + file_processor = processor.FileProcessor('-', default_options, lines=lines) + + l_1_2 = 'from foo \\\nimport bar' + assert file_processor.noqa_line_for(1) == l_1_2 + assert file_processor.noqa_line_for(2) == l_1_2 def test_next_line(default_options): |
