summaryrefslogtreecommitdiff
path: root/src
diff options
context:
space:
mode:
Diffstat (limited to 'src')
-rw-r--r--src/flake8/__init__.py23
-rw-r--r--src/flake8/api/legacy.py23
-rw-r--r--src/flake8/checker.py193
-rw-r--r--src/flake8/defaults.py47
-rw-r--r--src/flake8/exceptions.py72
-rw-r--r--src/flake8/formatting/base.py29
-rw-r--r--src/flake8/formatting/default.py8
-rw-r--r--src/flake8/main/application.py81
-rw-r--r--src/flake8/main/debug.py27
-rw-r--r--src/flake8/main/git.py64
-rw-r--r--src/flake8/main/mercurial.py57
-rw-r--r--src/flake8/main/options.py235
-rw-r--r--src/flake8/main/setuptools_command.py23
-rw-r--r--src/flake8/main/vcs.py7
-rw-r--r--src/flake8/options/aggregator.py30
-rw-r--r--src/flake8/options/config.py155
-rw-r--r--src/flake8/options/manager.py146
-rw-r--r--src/flake8/plugins/_trie.py8
-rw-r--r--src/flake8/plugins/manager.py109
-rw-r--r--src/flake8/plugins/notifier.py2
-rw-r--r--src/flake8/plugins/pyflakes.py154
-rw-r--r--src/flake8/processor.py110
-rw-r--r--src/flake8/statistics.py18
-rw-r--r--src/flake8/style_guide.py159
-rw-r--r--src/flake8/utils.py62
25 files changed, 1035 insertions, 807 deletions
diff --git a/src/flake8/__init__.py b/src/flake8/__init__.py
index e9789ea..8a0219b 100644
--- a/src/flake8/__init__.py
+++ b/src/flake8/__init__.py
@@ -15,14 +15,16 @@ import sys
LOG = logging.getLogger(__name__)
LOG.addHandler(logging.NullHandler())
-__version__ = '3.5.0'
-__version_info__ = tuple(int(i) for i in __version__.split('.') if i.isdigit())
+__version__ = "3.5.0"
+__version_info__ = tuple(
+ int(i) for i in __version__.split(".") if i.isdigit()
+)
# There is nothing lower than logging.DEBUG (10) in the logging library,
# but we want an extra level to avoid being too verbose when using -vv.
_EXTRA_VERBOSE = 5
-logging.addLevelName(_EXTRA_VERBOSE, 'VERBOSE')
+logging.addLevelName(_EXTRA_VERBOSE, "VERBOSE")
_VERBOSITY_TO_LOG_LEVEL = {
# output more than warnings but not debugging info
@@ -33,8 +35,10 @@ _VERBOSITY_TO_LOG_LEVEL = {
3: _EXTRA_VERBOSE,
}
-LOG_FORMAT = ('%(name)-25s %(processName)-11s %(relativeCreated)6d '
- '%(levelname)-8s %(message)s')
+LOG_FORMAT = (
+ "%(name)-25s %(processName)-11s %(relativeCreated)6d "
+ "%(levelname)-8s %(message)s"
+)
def configure_logging(verbosity, filename=None, logformat=LOG_FORMAT):
@@ -55,8 +59,8 @@ def configure_logging(verbosity, filename=None, logformat=LOG_FORMAT):
log_level = _VERBOSITY_TO_LOG_LEVEL[verbosity]
- if not filename or filename in ('stderr', 'stdout'):
- fileobj = getattr(sys, filename or 'stderr')
+ if not filename or filename in ("stderr", "stdout"):
+ fileobj = getattr(sys, filename or "stderr")
handler_cls = logging.StreamHandler
else:
fileobj = filename
@@ -66,5 +70,6 @@ def configure_logging(verbosity, filename=None, logformat=LOG_FORMAT):
handler.setFormatter(logging.Formatter(logformat))
LOG.addHandler(handler)
LOG.setLevel(log_level)
- LOG.debug('Added a %s logging handler to logger root at %s',
- filename, __name__)
+ LOG.debug(
+ "Added a %s logging handler to logger root at %s", filename, __name__
+ )
diff --git a/src/flake8/api/legacy.py b/src/flake8/api/legacy.py
index b332860..2a3f10b 100644
--- a/src/flake8/api/legacy.py
+++ b/src/flake8/api/legacy.py
@@ -13,7 +13,7 @@ from flake8.main import application as app
LOG = logging.getLogger(__name__)
-__all__ = ('get_style_guide',)
+__all__ = ("get_style_guide",)
def get_style_guide(**kwargs):
@@ -29,7 +29,8 @@ def get_style_guide(**kwargs):
application = app.Application()
application.parse_preliminary_options_and_args([])
flake8.configure_logging(
- application.prelim_opts.verbose, application.prelim_opts.output_file)
+ application.prelim_opts.verbose, application.prelim_opts.output_file
+ )
application.make_config_finder()
application.find_plugins()
application.register_plugin_options()
@@ -113,18 +114,22 @@ class StyleGuide(object):
:rtype:
bool
"""
- return (self._file_checker_manager.is_path_excluded(filename) or
- (parent and
- self._file_checker_manager.is_path_excluded(
- os.path.join(parent, filename))))
+ return self._file_checker_manager.is_path_excluded(filename) or (
+ parent
+ and self._file_checker_manager.is_path_excluded(
+ os.path.join(parent, filename)
+ )
+ )
def init_report(self, reporter=None):
"""Set up a formatter for this run of Flake8."""
if reporter is None:
return
if not issubclass(reporter, formatter.BaseFormatter):
- raise ValueError("Report should be subclass of "
- "flake8.formatter.BaseFormatter.")
+ raise ValueError(
+ "Report should be subclass of "
+ "flake8.formatter.BaseFormatter."
+ )
self._application.formatter = None
self._application.make_formatter(reporter)
self._application.guide = None
@@ -197,6 +202,6 @@ class Report(object):
list
"""
return [
- '{} {} {}'.format(s.count, s.error_code, s.message)
+ "{} {} {}".format(s.count, s.error_code, s.message)
for s in self._stats.statistics_for(violation)
]
diff --git a/src/flake8/checker.py b/src/flake8/checker.py
index b32fd3e..e8a71d4 100644
--- a/src/flake8/checker.py
+++ b/src/flake8/checker.py
@@ -77,17 +77,17 @@ class Manager(object):
self.processes = []
self.checkers = []
self.statistics = {
- 'files': 0,
- 'logical lines': 0,
- 'physical lines': 0,
- 'tokens': 0,
+ "files": 0,
+ "logical lines": 0,
+ "physical lines": 0,
+ "tokens": 0,
}
def _process_statistics(self):
for checker in self.checkers:
for statistic in defaults.STATISTIC_NAMES:
self.statistics[statistic] += checker.statistics[statistic]
- self.statistics['files'] += len(self.checkers)
+ self.statistics["files"] += len(self.checkers)
def _job_count(self):
# type: () -> int
@@ -101,40 +101,53 @@ class Manager(object):
# multiprocessing and which really shouldn't require multiprocessing
# - the user provided some awful input
if not multiprocessing:
- LOG.warning('The multiprocessing module is not available. '
- 'Ignoring --jobs arguments.')
+ LOG.warning(
+ "The multiprocessing module is not available. "
+ "Ignoring --jobs arguments."
+ )
return 0
- if (utils.is_windows() and
- not utils.can_run_multiprocessing_on_windows()):
- LOG.warning('The --jobs option is not available on Windows due to'
- ' a bug (https://bugs.python.org/issue27649) in '
- 'Python 2.7.11+ and 3.3+. We have detected that you '
- 'are running an unsupported version of Python on '
- 'Windows. Ignoring --jobs arguments.')
+ if (
+ utils.is_windows()
+ and not utils.can_run_multiprocessing_on_windows()
+ ):
+ LOG.warning(
+ "The --jobs option is not available on Windows due to"
+ " a bug (https://bugs.python.org/issue27649) in "
+ "Python 2.7.11+ and 3.3+. We have detected that you "
+ "are running an unsupported version of Python on "
+ "Windows. Ignoring --jobs arguments."
+ )
return 0
if utils.is_using_stdin(self.arguments):
- LOG.warning('The --jobs option is not compatible with supplying '
- 'input using - . Ignoring --jobs arguments.')
+ LOG.warning(
+ "The --jobs option is not compatible with supplying "
+ "input using - . Ignoring --jobs arguments."
+ )
return 0
if self.options.diff:
- LOG.warning('The --diff option was specified with --jobs but '
- 'they are not compatible. Ignoring --jobs arguments.')
+ LOG.warning(
+ "The --diff option was specified with --jobs but "
+ "they are not compatible. Ignoring --jobs arguments."
+ )
return 0
jobs = self.options.jobs
- if jobs != 'auto' and not jobs.isdigit():
- LOG.warning('"%s" is not a valid parameter to --jobs. Must be one '
- 'of "auto" or a numerical value, e.g., 4.', jobs)
+ if jobs != "auto" and not jobs.isdigit():
+ LOG.warning(
+ '"%s" is not a valid parameter to --jobs. Must be one '
+ 'of "auto" or a numerical value, e.g., 4.',
+ jobs,
+ )
return 0
# If the value is "auto", we want to let the multiprocessing library
# decide the number based on the number of CPUs. However, if that
# function is not implemented for this particular value of Python we
# default to 1
- if jobs == 'auto':
+ if jobs == "auto":
try:
return multiprocessing.cpu_count()
except NotImplementedError:
@@ -170,8 +183,8 @@ class Manager(object):
:rtype:
bool
"""
- if path == '-':
- if self.options.stdin_display_name == 'stdin':
+ if path == "-":
+ if self.options.stdin_display_name == "stdin":
return False
path = self.options.stdin_display_name
@@ -185,8 +198,9 @@ class Manager(object):
absolute_path = os.path.abspath(path)
match = utils.fnmatch(absolute_path, exclude)
- LOG.debug('"%s" has %sbeen excluded', absolute_path,
- '' if match else 'not ')
+ LOG.debug(
+ '"%s" has %sbeen excluded', absolute_path, "" if match else "not "
+ )
return match
def make_checkers(self, paths=None):
@@ -196,7 +210,7 @@ class Manager(object):
paths = self.arguments
if not paths:
- paths = ['.']
+ paths = ["."]
filename_patterns = self.options.filename
running_from_vcs = self.options._running_from_vcs
@@ -209,7 +223,7 @@ class Manager(object):
matches_filename_patterns = utils.fnmatch(
filename, filename_patterns
)
- is_stdin = filename == '-'
+ is_stdin = filename == "-"
# NOTE(sigmavirus24): If a user explicitly specifies something,
# e.g, ``flake8 bin/script`` then we should run Flake8 against
# that. Since should_create_file_checker looks to see if the
@@ -217,24 +231,28 @@ class Manager(object):
# the event that the argument and the filename are identical.
# If it was specified explicitly, the user intended for it to be
# checked.
- explicitly_provided = (not running_from_vcs and
- not running_from_diff and
- (argument == filename))
- return ((explicitly_provided or matches_filename_patterns) or
- is_stdin)
+ explicitly_provided = (
+ not running_from_vcs
+ and not running_from_diff
+ and (argument == filename)
+ )
+ return (
+ explicitly_provided or matches_filename_patterns
+ ) or is_stdin
checks = self.checks.to_dictionary()
checkers = (
FileChecker(filename, checks, self.options)
for argument in paths
- for filename in utils.filenames_from(argument,
- self.is_path_excluded)
+ for filename in utils.filenames_from(
+ argument, self.is_path_excluded
+ )
if should_create_file_checker(filename, argument)
)
self.checkers = [
checker for checker in checkers if checker.should_process
]
- LOG.info('Checking %d files', len(self.checkers))
+ LOG.info("Checking %d files", len(self.checkers))
def report(self):
# type: () -> (int, int)
@@ -250,7 +268,9 @@ class Manager(object):
"""
results_reported = results_found = 0
for checker in self.checkers:
- results = sorted(checker.results, key=lambda tup: (tup[1], tup[2]))
+ results = sorted(
+ checker.results, key=lambda tup: (tup[1], tup[2])
+ )
filename = checker.display_name
with self.style_guide.processing_file(filename):
results_reported += self._handle_results(filename, results)
@@ -276,8 +296,7 @@ class Manager(object):
_run_checks,
self.checkers,
chunksize=calculate_pool_chunksize(
- len(self.checkers),
- self.jobs,
+ len(self.checkers), self.jobs
),
)
for ret in pool_map:
@@ -294,8 +313,9 @@ class Manager(object):
for checker in self.checkers:
filename = checker.display_name
- checker.results = sorted(final_results[filename],
- key=lambda tup: (tup[2], tup[2]))
+ checker.results = sorted(
+ final_results[filename], key=lambda tup: (tup[2], tup[2])
+ )
checker.statistics = final_statistics[filename]
def run_serial(self):
@@ -322,11 +342,11 @@ class Manager(object):
if oserr.errno not in SERIAL_RETRY_ERRNOS:
LOG.exception(oserr)
raise
- LOG.warning('Running in serial after OS exception, %r', oserr)
+ LOG.warning("Running in serial after OS exception, %r", oserr)
self.run_serial()
except KeyboardInterrupt:
- LOG.warning('Flake8 was interrupted by the user')
- raise exceptions.EarlyQuit('Early quit while running checks')
+ LOG.warning("Flake8 was interrupted by the user")
+ raise exceptions.EarlyQuit("Early quit while running checks")
def start(self, paths=None):
"""Start checking files.
@@ -335,14 +355,14 @@ class Manager(object):
Path names to check. This is passed directly to
:meth:`~Manager.make_checkers`.
"""
- LOG.info('Making checkers')
+ LOG.info("Making checkers")
self.make_checkers(paths)
def stop(self):
"""Stop checking files."""
self._process_statistics()
for proc in self.processes:
- LOG.info('Joining %s to the main process', proc.name)
+ LOG.info("Joining %s to the main process", proc.name)
proc.join()
@@ -368,9 +388,9 @@ class FileChecker(object):
self.checks = checks
self.results = []
self.statistics = {
- 'tokens': 0,
- 'logical lines': 0,
- 'physical lines': 0,
+ "tokens": 0,
+ "logical lines": 0,
+ "physical lines": 0,
}
self.processor = self._make_processor()
self.display_name = filename
@@ -378,11 +398,11 @@ class FileChecker(object):
if self.processor is not None:
self.display_name = self.processor.filename
self.should_process = not self.processor.should_ignore_file()
- self.statistics['physical lines'] = len(self.processor.lines)
+ self.statistics["physical lines"] = len(self.processor.lines)
def __repr__(self):
"""Provide helpful debugging representation."""
- return 'FileChecker for {}'.format(self.filename)
+ return "FileChecker for {}".format(self.filename)
def _make_processor(self):
try:
@@ -395,20 +415,20 @@ class FileChecker(object):
# as an E902. We probably *want* a better error code for this
# going forward.
(exc_type, exception) = sys.exc_info()[:2]
- message = '{0}: {1}'.format(exc_type.__name__, exception)
- self.report('E902', 0, 0, message)
+ message = "{0}: {1}".format(exc_type.__name__, exception)
+ self.report("E902", 0, 0, message)
return None
def report(self, error_code, line_number, column, text, line=None):
# type: (str, int, int, str) -> str
"""Report an error by storing it in the results list."""
if error_code is None:
- error_code, text = text.split(' ', 1)
+ error_code, text = text.split(" ", 1)
physical_line = line
# If we're recovering from a problem in _make_processor, we will not
# have this attribute.
- if not physical_line and getattr(self, 'processor', None):
+ if not physical_line and getattr(self, "processor", None):
physical_line = self.processor.line_for(line_number)
error = (error_code, line_number, column, text, physical_line)
@@ -417,26 +437,24 @@ class FileChecker(object):
def run_check(self, plugin, **arguments):
"""Run the check in a single plugin."""
- LOG.debug('Running %r with %r', plugin, arguments)
+ LOG.debug("Running %r with %r", plugin, arguments)
try:
self.processor.keyword_arguments_for(
- plugin['parameters'],
- arguments,
+ plugin["parameters"], arguments
)
except AttributeError as ae:
- LOG.error('Plugin requested unknown parameters.')
+ LOG.error("Plugin requested unknown parameters.")
raise exceptions.PluginRequestedUnknownParameters(
- plugin=plugin,
- exception=ae,
+ plugin=plugin, exception=ae
)
try:
- return plugin['plugin'](**arguments)
+ return plugin["plugin"](**arguments)
except Exception as all_exc:
- LOG.critical('Plugin %s raised an unexpected exception',
- plugin['name'])
+ LOG.critical(
+ "Plugin %s raised an unexpected exception", plugin["name"]
+ )
raise exceptions.PluginExecutionFailed(
- plugin=plugin,
- excetion=all_exc,
+ plugin=plugin, excetion=all_exc
)
@staticmethod
@@ -466,7 +484,7 @@ class FileChecker(object):
# "physical" line so much as what was accumulated by the point
# tokenizing failed.
# See also: https://gitlab.com/pycqa/flake8/issues/237
- lines = physical_line.rstrip('\n').split('\n')
+ lines = physical_line.rstrip("\n").split("\n")
row_offset = len(lines) - 1
logical_line = lines[0]
logical_line_length = len(logical_line)
@@ -483,11 +501,15 @@ class FileChecker(object):
except (ValueError, SyntaxError, TypeError):
(exc_type, exception) = sys.exc_info()[:2]
row, column = self._extract_syntax_information(exception)
- self.report('E999', row, column, '%s: %s' %
- (exc_type.__name__, exception.args[0]))
+ self.report(
+ "E999",
+ row,
+ column,
+ "%s: %s" % (exc_type.__name__, exception.args[0]),
+ )
return
- for plugin in self.checks['ast_plugins']:
+ for plugin in self.checks["ast_plugins"]:
checker = self.run_check(plugin, tree=ast)
# If the plugin uses a class, call the run method of it, otherwise
# the call should return something iterable itself
@@ -512,7 +534,7 @@ class FileChecker(object):
LOG.debug('Logical line: "%s"', logical_line.rstrip())
- for plugin in self.checks['logical_line_plugins']:
+ for plugin in self.checks["logical_line_plugins"]:
self.processor.update_checker_state_for(plugin)
results = self.run_check(plugin, logical_line=logical_line) or ()
for offset, text in results:
@@ -529,7 +551,7 @@ class FileChecker(object):
def run_physical_checks(self, physical_line, override_error_line=None):
"""Run all checks for a given physical line."""
- for plugin in self.checks['physical_line_plugins']:
+ for plugin in self.checks["physical_line_plugins"]:
self.processor.update_checker_state_for(plugin)
result = self.run_check(plugin, physical_line=physical_line)
if result is not None:
@@ -555,7 +577,7 @@ class FileChecker(object):
statistics = self.statistics
file_processor = self.processor
for token in file_processor.generate_tokens():
- statistics['tokens'] += 1
+ statistics["tokens"] += 1
self.check_physical_eol(token)
token_type, text = token[0:2]
processor.log_token(LOG, token)
@@ -564,8 +586,10 @@ class FileChecker(object):
elif parens == 0:
if processor.token_is_newline(token):
self.handle_newline(token_type)
- elif (processor.token_is_comment(token) and
- len(file_processor.tokens) == 1):
+ elif (
+ processor.token_is_comment(token)
+ and len(file_processor.tokens) == 1
+ ):
self.handle_comment(token, text)
if file_processor.tokens:
@@ -578,20 +602,24 @@ class FileChecker(object):
try:
self.process_tokens()
except exceptions.InvalidSyntax as exc:
- self.report(exc.error_code, exc.line_number, exc.column_number,
- exc.error_message)
+ self.report(
+ exc.error_code,
+ exc.line_number,
+ exc.column_number,
+ exc.error_message,
+ )
self.run_ast_checks()
- logical_lines = self.processor.statistics['logical lines']
- self.statistics['logical lines'] = logical_lines
+ logical_lines = self.processor.statistics["logical lines"]
+ self.statistics["logical lines"] = logical_lines
return self.filename, self.results, self.statistics
def handle_comment(self, token, token_text):
"""Handle the logic when encountering a comment token."""
# The comment also ends a physical line
token = list(token)
- token[1] = token_text.rstrip('\r\n')
+ token[1] = token_text.rstrip("\r\n")
token[3] = (token[2][0], token[2][1] + len(token[1]))
self.processor.tokens = [tuple(token)]
self.run_logical_checks()
@@ -628,8 +656,9 @@ class FileChecker(object):
line_no = token[2][0]
with self.processor.inside_multiline(line_number=line_no):
for line in self.processor.split_line(token):
- self.run_physical_checks(line + '\n',
- override_error_line=token[4])
+ self.run_physical_checks(
+ line + "\n", override_error_line=token[4]
+ )
def _pool_init():
diff --git a/src/flake8/defaults.py b/src/flake8/defaults.py
index 3ad959b..61f2571 100644
--- a/src/flake8/defaults.py
+++ b/src/flake8/defaults.py
@@ -2,39 +2,26 @@
import re
EXCLUDE = (
- '.svn',
- 'CVS',
- '.bzr',
- '.hg',
- '.git',
- '__pycache__',
- '.tox',
- '.eggs',
- '*.egg',
+ ".svn",
+ "CVS",
+ ".bzr",
+ ".hg",
+ ".git",
+ "__pycache__",
+ ".tox",
+ ".eggs",
+ "*.egg",
)
-IGNORE = (
- 'E121',
- 'E123',
- 'E126',
- 'E226',
- 'E24',
- 'E704',
- 'W503',
- 'W504',
-)
-SELECT = ('E', 'F', 'W', 'C90')
+IGNORE = ("E121", "E123", "E126", "E226", "E24", "E704", "W503", "W504")
+SELECT = ("E", "F", "W", "C90")
MAX_LINE_LENGTH = 79
-TRUTHY_VALUES = {'true', '1', 't'}
+TRUTHY_VALUES = {"true", "1", "t"}
# Other constants
-WHITESPACE = frozenset(' \t')
+WHITESPACE = frozenset(" \t")
-STATISTIC_NAMES = (
- 'logical lines',
- 'physical lines',
- 'tokens',
-)
+STATISTIC_NAMES = ("logical lines", "physical lines", "tokens")
NOQA_INLINE_REGEXP = re.compile(
# We're looking for items that look like this:
@@ -46,8 +33,8 @@ NOQA_INLINE_REGEXP = re.compile(
# We do not care about the ``: `` that follows ``noqa``
# We do not care about the casing of ``noqa``
# We want a comma-separated list of errors
- r'# noqa(?:: (?P<codes>([A-Z][0-9]+(?:[,\s]+)?)+))?',
- re.IGNORECASE
+ r"# noqa(?:: (?P<codes>([A-Z][0-9]+(?:[,\s]+)?)+))?",
+ re.IGNORECASE,
)
-NOQA_FILE = re.compile(r'\s*# flake8[:=]\s*noqa', re.I)
+NOQA_FILE = re.compile(r"\s*# flake8[:=]\s*noqa", re.I)
diff --git a/src/flake8/exceptions.py b/src/flake8/exceptions.py
index c7217f5..51c25d3 100644
--- a/src/flake8/exceptions.py
+++ b/src/flake8/exceptions.py
@@ -24,15 +24,17 @@ class FailedToLoadPlugin(Flake8Exception):
def __init__(self, *args, **kwargs):
"""Initialize our FailedToLoadPlugin exception."""
- self.plugin = kwargs.pop('plugin')
+ self.plugin = kwargs.pop("plugin")
self.ep_name = self.plugin.name
- self.original_exception = kwargs.pop('exception')
+ self.original_exception = kwargs.pop("exception")
super(FailedToLoadPlugin, self).__init__(*args, **kwargs)
def __str__(self):
"""Return a nice string for our exception."""
- return self.FORMAT % {'name': self.ep_name,
- 'exc': self.original_exception}
+ return self.FORMAT % {
+ "name": self.ep_name,
+ "exc": self.original_exception,
+ }
class InvalidSyntax(Flake8Exception):
@@ -40,19 +42,16 @@ class InvalidSyntax(Flake8Exception):
def __init__(self, *args, **kwargs):
"""Initialize our InvalidSyntax exception."""
- exception = kwargs.pop('exception', None)
+ exception = kwargs.pop("exception", None)
self.original_exception = exception
- self.error_message = '{0}: {1}'.format(
- exception.__class__.__name__,
- exception.args[0],
+ self.error_message = "{0}: {1}".format(
+ exception.__class__.__name__, exception.args[0]
)
- self.error_code = 'E902'
+ self.error_code = "E902"
self.line_number = 1
self.column_number = 0
super(InvalidSyntax, self).__init__(
- self.error_message,
- *args,
- **kwargs
+ self.error_message, *args, **kwargs
)
@@ -63,17 +62,18 @@ class PluginRequestedUnknownParameters(Flake8Exception):
def __init__(self, *args, **kwargs):
"""Pop certain keyword arguments for initialization."""
- self.original_exception = kwargs.pop('exception')
- self.plugin = kwargs.pop('plugin')
+ self.original_exception = kwargs.pop("exception")
+ self.plugin = kwargs.pop("plugin")
super(PluginRequestedUnknownParameters, self).__init__(
- *args,
- **kwargs
+ *args, **kwargs
)
def __str__(self):
"""Format our exception message."""
- return self.FORMAT % {'name': self.plugin['plugin_name'],
- 'exc': self.original_exception}
+ return self.FORMAT % {
+ "name": self.plugin["plugin_name"],
+ "exc": self.original_exception,
+ }
class PluginExecutionFailed(Flake8Exception):
@@ -83,16 +83,18 @@ class PluginExecutionFailed(Flake8Exception):
def __init__(self, *args, **kwargs):
"""Utilize keyword arguments for message generation."""
- self.original_exception = kwargs.pop('exception')
- self.plugin = kwargs.pop('plugin')
+ self.original_exception = kwargs.pop("exception")
+ self.plugin = kwargs.pop("plugin")
super(PluginExecutionFailed, self).__init__(
str(self), *args, **kwargs
)
def __str__(self):
"""Format our exception message."""
- return self.FORMAT % {'name': self.plugin['plugin_name'],
- 'exc': self.original_exception}
+ return self.FORMAT % {
+ "name": self.plugin["plugin_name"],
+ "exc": self.original_exception,
+ }
class HookInstallationError(Flake8Exception):
@@ -106,14 +108,16 @@ class GitHookAlreadyExists(HookInstallationError):
def __init__(self, *args, **kwargs):
"""Initialize the path attribute."""
- self.path = kwargs.pop('path')
+ self.path = kwargs.pop("path")
super(GitHookAlreadyExists, self).__init__(*args, **kwargs)
def __str__(self):
"""Provide a nice message regarding the exception."""
- msg = ('The Git pre-commit hook ({0}) already exists. To convince '
- 'Flake8 to install the hook, please remove the existing '
- 'hook.')
+ msg = (
+ "The Git pre-commit hook ({0}) already exists. To convince "
+ "Flake8 to install the hook, please remove the existing "
+ "hook."
+ )
return msg.format(self.path)
@@ -124,25 +128,27 @@ class MercurialHookAlreadyExists(HookInstallationError):
def __init__(self, *args, **kwargs):
"""Initialize the relevant attributes."""
- self.path = kwargs.pop('path')
- self.value = kwargs.pop('value')
+ self.path = kwargs.pop("path")
+ self.value = kwargs.pop("value")
super(MercurialHookAlreadyExists, self).__init__(*args, **kwargs)
def __str__(self):
"""Return a nicely formatted string for these errors."""
- msg = ('The Mercurial {0} hook already exists with "{1}" in {2}. '
- 'To convince Flake8 to install the hook, please remove the '
- '{0} configuration from the [hooks] section of your hgrc.')
+ msg = (
+ 'The Mercurial {0} hook already exists with "{1}" in {2}. '
+ "To convince Flake8 to install the hook, please remove the "
+ "{0} configuration from the [hooks] section of your hgrc."
+ )
return msg.format(self.hook_name, self.value, self.path)
class MercurialCommitHookAlreadyExists(MercurialHookAlreadyExists):
"""Exception raised when the hg commit hook is already configured."""
- hook_name = 'commit'
+ hook_name = "commit"
class MercurialQRefreshHookAlreadyExists(MercurialHookAlreadyExists):
"""Exception raised when the hg commit hook is already configured."""
- hook_name = 'qrefresh'
+ hook_name = "qrefresh"
diff --git a/src/flake8/formatting/base.py b/src/flake8/formatting/base.py
index 1443e4c..259d03f 100644
--- a/src/flake8/formatting/base.py
+++ b/src/flake8/formatting/base.py
@@ -37,7 +37,7 @@ class BaseFormatter(object):
self.options = options
self.filename = options.output_file
self.output_fd = None
- self.newline = '\n'
+ self.newline = "\n"
self.after_init()
def after_init(self):
@@ -68,7 +68,7 @@ class BaseFormatter(object):
This defaults to initializing :attr:`output_fd` if :attr:`filename`
"""
if self.filename:
- self.output_fd = open(self.filename, 'a')
+ self.output_fd = open(self.filename, "a")
def handle(self, error):
"""Handle an error reported by Flake8.
@@ -102,8 +102,9 @@ class BaseFormatter(object):
:rtype:
str
"""
- raise NotImplementedError('Subclass of BaseFormatter did not implement'
- ' format.')
+ raise NotImplementedError(
+ "Subclass of BaseFormatter did not implement" " format."
+ )
def show_statistics(self, statistics):
"""Format and print the statistics."""
@@ -112,11 +113,13 @@ class BaseFormatter(object):
statistic = next(stats_for_error_code)
count = statistic.count
count += sum(stat.count for stat in stats_for_error_code)
- self._write('{count:<5} {error_code} {message}'.format(
- count=count,
- error_code=error_code,
- message=statistic.message,
- ))
+ self._write(
+ "{count:<5} {error_code} {message}".format(
+ count=count,
+ error_code=error_code,
+ message=statistic.message,
+ )
+ )
def show_benchmarks(self, benchmarks):
"""Format and print the benchmarks."""
@@ -130,8 +133,8 @@ class BaseFormatter(object):
# the decimal point to be displayed. This is the precision and it
# can not be specified for integers which is why we need two separate
# format strings.
- float_format = '{value:<10.3} {statistic}'.format
- int_format = '{value:<10} {statistic}'.format
+ float_format = "{value:<10.3} {statistic}".format
+ int_format = "{value:<10} {statistic}".format
for statistic, value in benchmarks:
if isinstance(value, int):
benchmark = int_format(statistic=statistic, value=value)
@@ -158,11 +161,11 @@ class BaseFormatter(object):
str
"""
if not self.options.show_source or error.physical_line is None:
- return ''
+ return ""
# Because column numbers are 1-indexed, we need to remove one to get
# the proper number of space characters.
- pointer = (' ' * (error.column_number - 1)) + '^'
+ pointer = (" " * (error.column_number - 1)) + "^"
# Physical lines have a newline at the end, no need to add an extra
# one
return error.physical_line + pointer
diff --git a/src/flake8/formatting/default.py b/src/flake8/formatting/default.py
index 8c91f9f..e1061f3 100644
--- a/src/flake8/formatting/default.py
+++ b/src/flake8/formatting/default.py
@@ -42,24 +42,24 @@ class Default(SimpleFormatter):
format string.
"""
- error_format = '%(path)s:%(row)d:%(col)d: %(code)s %(text)s'
+ error_format = "%(path)s:%(row)d:%(col)d: %(code)s %(text)s"
def after_init(self):
"""Check for a custom format string."""
- if self.options.format.lower() != 'default':
+ if self.options.format.lower() != "default":
self.error_format = self.options.format
class Pylint(SimpleFormatter):
"""Pylint formatter for Flake8."""
- error_format = '%(path)s:%(row)d: [%(code)s] %(text)s'
+ error_format = "%(path)s:%(row)d: [%(code)s] %(text)s"
class FilenameOnly(SimpleFormatter):
"""Only print filenames, e.g., flake8 -q."""
- error_format = '%(path)s'
+ error_format = "%(path)s"
def after_init(self):
"""Initialize our set of filenames."""
diff --git a/src/flake8/main/application.py b/src/flake8/main/application.py
index 9c15629..bd409d7 100644
--- a/src/flake8/main/application.py
+++ b/src/flake8/main/application.py
@@ -22,7 +22,7 @@ LOG = logging.getLogger(__name__)
class Application(object):
"""Abstract our application into a class."""
- def __init__(self, program='flake8', version=flake8.__version__):
+ def __init__(self, program="flake8", version=flake8.__version__):
# type: (str, str) -> NoneType
"""Initialize our application.
@@ -42,7 +42,7 @@ class Application(object):
#: The instance of :class:`flake8.options.manager.OptionManager` used
#: to parse and handle the options and arguments passed by the user
self.option_manager = manager.OptionManager(
- prog='flake8', version=flake8.__version__
+ prog="flake8", version=flake8.__version__
)
options.register_default_options(self.option_manager)
#: The preliminary options parsed from CLI before plugins are loaded,
@@ -118,21 +118,21 @@ class Application(object):
# Similarly we have to defer printing the help text until later.
args = (argv or sys.argv)[:]
try:
- args.remove('--version')
+ args.remove("--version")
except ValueError:
pass
try:
- args.remove('--help')
+ args.remove("--help")
except ValueError:
pass
try:
- args.remove('-h')
+ args.remove("-h")
except ValueError:
pass
opts, args = self.option_manager.parse_known_args(args)
# parse_known_args includes program name and unknown options as args
- args = [a for a in args[1:] if not a.startswith('-')]
+ args = [a for a in args[1:] if not a.startswith("-")]
self.prelim_opts, self.prelim_args = opts, args
def exit(self):
@@ -146,14 +146,16 @@ class Application(object):
print(self.result_count)
if not self.options.exit_zero:
- raise SystemExit((self.result_count > 0) or
- self.catastrophic_failure)
+ raise SystemExit(
+ (self.result_count > 0) or self.catastrophic_failure
+ )
def make_config_finder(self):
"""Make our ConfigFileFinder based on preliminary opts and args."""
if self.config_finder is None:
extra_config_files = utils.normalize_paths(
- self.prelim_opts.append_config)
+ self.prelim_opts.append_config
+ )
self.config_finder = config.ConfigFileFinder(
self.option_manager.program_name,
self.prelim_args,
@@ -181,14 +183,16 @@ class Application(object):
if self.check_plugins is None:
self.check_plugins = plugin_manager.Checkers(
- self.local_plugins.extension)
+ self.local_plugins.extension
+ )
if self.listening_plugins is None:
self.listening_plugins = plugin_manager.Listeners()
if self.formatting_plugins is None:
self.formatting_plugins = plugin_manager.ReportFormatters(
- self.local_plugins.report)
+ self.local_plugins.report
+ )
self.check_plugins.load_plugins()
self.listening_plugins.load_plugins()
@@ -222,19 +226,20 @@ class Application(object):
self.options._running_from_vcs = False
- self.check_plugins.provide_options(self.option_manager, self.options,
- self.args)
- self.listening_plugins.provide_options(self.option_manager,
- self.options,
- self.args)
- self.formatting_plugins.provide_options(self.option_manager,
- self.options,
- self.args)
+ self.check_plugins.provide_options(
+ self.option_manager, self.options, self.args
+ )
+ self.listening_plugins.provide_options(
+ self.option_manager, self.options, self.args
+ )
+ self.formatting_plugins.provide_options(
+ self.option_manager, self.options, self.args
+ )
def formatter_for(self, formatter_plugin_name):
"""Retrieve the formatter class by plugin name."""
try:
- default_formatter = self.formatting_plugins['default']
+ default_formatter = self.formatting_plugins["default"]
except KeyError:
raise exceptions.ExecutionError(
"The 'default' Flake8 formatting plugin is unavailable. "
@@ -259,9 +264,9 @@ class Application(object):
if self.formatter is None:
format_plugin = self.options.format
if 1 <= self.options.quiet < 2:
- format_plugin = 'quiet-filename'
+ format_plugin = "quiet-filename"
elif 2 <= self.options.quiet:
- format_plugin = 'quiet-nothing'
+ format_plugin = "quiet-nothing"
if formatter_class is None:
formatter_class = self.formatter_for(format_plugin)
@@ -313,9 +318,9 @@ class Application(object):
self.file_checker_manager.run()
except exceptions.PluginExecutionFailed as plugin_failed:
print(str(plugin_failed))
- print('Run flake8 with greater verbosity to see more details')
+ print("Run flake8 with greater verbosity to see more details")
self.catastrophic_failure = True
- LOG.info('Finished running')
+ LOG.info("Finished running")
self.file_checker_manager.stop()
self.end_time = time.time()
@@ -325,13 +330,13 @@ class Application(object):
return
time_elapsed = self.end_time - self.start_time
- statistics = [('seconds elapsed', time_elapsed)]
+ statistics = [("seconds elapsed", time_elapsed)]
add_statistic = statistics.append
- for statistic in (defaults.STATISTIC_NAMES + ('files',)):
+ for statistic in defaults.STATISTIC_NAMES + ("files",):
value = self.file_checker_manager.statistics[statistic]
- total_description = 'total ' + statistic + ' processed'
+ total_description = "total " + statistic + " processed"
add_statistic((total_description, value))
- per_second_description = statistic + ' processed per second'
+ per_second_description = statistic + " processed per second"
add_statistic((per_second_description, int(value / time_elapsed)))
self.formatter.show_benchmarks(statistics)
@@ -343,11 +348,14 @@ class Application(object):
This also updates the :attr:`result_count` attribute with the total
number of errors, warnings, and other messages found.
"""
- LOG.info('Reporting errors')
+ LOG.info("Reporting errors")
results = self.file_checker_manager.report()
self.total_result_count, self.result_count = results
- LOG.info('Found a total of %d violations and reported %d',
- self.total_result_count, self.result_count)
+ LOG.info(
+ "Found a total of %d violations and reported %d",
+ self.total_result_count,
+ self.result_count,
+ )
def report_statistics(self):
"""Aggregate and report statistics from this run."""
@@ -367,7 +375,8 @@ class Application(object):
# our legacy API calls to these same methods.
self.parse_preliminary_options_and_args(argv)
flake8.configure_logging(
- self.prelim_opts.verbose, self.prelim_opts.output_file)
+ self.prelim_opts.verbose, self.prelim_opts.output_file
+ )
self.make_config_finder()
self.find_plugins()
self.register_plugin_options()
@@ -402,15 +411,15 @@ class Application(object):
try:
self._run(argv)
except KeyboardInterrupt as exc:
- print('... stopped')
- LOG.critical('Caught keyboard interrupt from user')
+ print("... stopped")
+ LOG.critical("Caught keyboard interrupt from user")
LOG.exception(exc)
self.catastrophic_failure = True
except exceptions.ExecutionError as exc:
- print('There was a critical error during execution of Flake8:')
+ print("There was a critical error during execution of Flake8:")
print(exc.message)
LOG.exception(exc)
self.catastrophic_failure = True
except exceptions.EarlyQuit:
self.catastrophic_failure = True
- print('... stopped while processing files')
+ print("... stopped while processing files")
diff --git a/src/flake8/main/debug.py b/src/flake8/main/debug.py
index ac52494..51bac9a 100644
--- a/src/flake8/main/debug.py
+++ b/src/flake8/main/debug.py
@@ -5,8 +5,9 @@ import json
import platform
-def print_information(option, option_string, value, parser,
- option_manager=None):
+def print_information(
+ option, option_string, value, parser, option_manager=None
+):
"""Print debugging information used in bug reports.
:param option:
@@ -38,13 +39,13 @@ def print_information(option, option_string, value, parser,
def information(option_manager):
"""Generate the information to be printed for the bug report."""
return {
- 'version': option_manager.version,
- 'plugins': plugins_from(option_manager),
- 'dependencies': dependencies(),
- 'platform': {
- 'python_implementation': platform.python_implementation(),
- 'python_version': platform.python_version(),
- 'system': platform.system(),
+ "version": option_manager.version,
+ "plugins": plugins_from(option_manager),
+ "dependencies": dependencies(),
+ "platform": {
+ "python_implementation": platform.python_implementation(),
+ "python_version": platform.python_version(),
+ "system": platform.system(),
},
}
@@ -53,9 +54,9 @@ def plugins_from(option_manager):
"""Generate the list of plugins installed."""
return [
{
- 'plugin': plugin.name,
- 'version': plugin.version,
- 'is_local': plugin.local,
+ "plugin": plugin.name,
+ "version": plugin.version,
+ "is_local": plugin.local,
}
for plugin in sorted(option_manager.registered_plugins)
]
@@ -66,4 +67,4 @@ def dependencies():
# defer this expensive import, not used outside --bug-report
import setuptools
- return [{'dependency': 'setuptools', 'version': setuptools.__version__}]
+ return [{"dependency": "setuptools", "version": setuptools.__version__}]
diff --git a/src/flake8/main/git.py b/src/flake8/main/git.py
index 3637feb..eed5c09 100644
--- a/src/flake8/main/git.py
+++ b/src/flake8/main/git.py
@@ -16,7 +16,7 @@ import tempfile
from flake8 import defaults
from flake8 import exceptions
-__all__ = ('hook', 'install')
+__all__ = ("hook", "install")
def hook(lazy=False, strict=False):
@@ -39,10 +39,11 @@ def hook(lazy=False, strict=False):
"""
# NOTE(sigmavirus24): Delay import of application until we need it.
from flake8.main import application
+
app = application.Application()
with make_temporary_directory() as tempdir:
filepaths = list(copy_indexed_files_to(tempdir, lazy))
- app.initialize(['.'])
+ app.initialize(["."])
app.options.exclude = update_excludes(app.options.exclude, tempdir)
app.options._running_from_vcs = True
# Apparently there are times when there are no files to check (e.g.,
@@ -81,22 +82,21 @@ def install():
if git_directory is None or not os.path.exists(git_directory):
return False
- hooks_directory = os.path.join(git_directory, 'hooks')
+ hooks_directory = os.path.join(git_directory, "hooks")
if not os.path.exists(hooks_directory):
os.mkdir(hooks_directory)
pre_commit_file = os.path.abspath(
- os.path.join(hooks_directory, 'pre-commit')
+ os.path.join(hooks_directory, "pre-commit")
)
if os.path.exists(pre_commit_file):
raise exceptions.GitHookAlreadyExists(
- 'File already exists',
- path=pre_commit_file,
+ "File already exists", path=pre_commit_file
)
executable = get_executable()
- with open(pre_commit_file, 'w') as fd:
+ with open(pre_commit_file, "w") as fd:
fd.write(_HOOK_TEMPLATE.format(executable=executable))
# NOTE(sigmavirus24): The following sets:
@@ -108,8 +108,8 @@ def install():
pre_commit_permissions = stat.S_IRWXU | stat.S_IRGRP | stat.S_IROTH
os.chmod(pre_commit_file, pre_commit_permissions)
- print('git pre-commit hook installed, for configuration options see')
- print('http://flake8.pycqa.org/en/latest/user/using-hooks.html')
+ print("git pre-commit hook installed, for configuration options see")
+ print("http://flake8.pycqa.org/en/latest/user/using-hooks.html")
return True
@@ -117,11 +117,11 @@ def install():
def get_executable():
if sys.executable is not None:
return sys.executable
- return '/usr/bin/env python'
+ return "/usr/bin/env python"
def find_git_directory():
- rev_parse = piped_process(['git', 'rev-parse', '--git-dir'])
+ rev_parse = piped_process(["git", "rev-parse", "--git-dir"])
(stdout, _) = rev_parse.communicate()
stdout = to_text(stdout)
@@ -146,12 +146,13 @@ def copy_indexed_files_to(temporary_directory, lazy):
def copy_file_to(destination_directory, filepath, contents):
directory, filename = os.path.split(os.path.abspath(filepath))
- temporary_directory = make_temporary_directory_from(destination_directory,
- directory)
+ temporary_directory = make_temporary_directory_from(
+ destination_directory, directory
+ )
if not os.path.exists(temporary_directory):
os.makedirs(temporary_directory)
temporary_filepath = os.path.join(temporary_directory, filename)
- with open(temporary_filepath, 'wb') as fd:
+ with open(temporary_filepath, "wb") as fd:
fd.write(contents)
return temporary_filepath
@@ -164,11 +165,15 @@ def make_temporary_directory_from(destination, directory):
def find_modified_files(lazy):
diff_index_cmd = [
- 'git', 'diff-index', '--cached', '--name-only',
- '--diff-filter=ACMRTUXB', 'HEAD'
+ "git",
+ "diff-index",
+ "--cached",
+ "--name-only",
+ "--diff-filter=ACMRTUXB",
+ "HEAD",
]
if lazy:
- diff_index_cmd.remove('--cached')
+ diff_index_cmd.remove("--cached")
diff_index = piped_process(diff_index_cmd)
(stdout, _) = diff_index.communicate()
@@ -177,11 +182,9 @@ def find_modified_files(lazy):
def find_setup_cfgs(lazy):
- setup_cfg_cmd = [
- 'git', 'ls-files', '--cached', '*setup.cfg'
- ]
+ setup_cfg_cmd = ["git", "ls-files", "--cached", "*setup.cfg"]
if lazy:
- setup_cfg_cmd.remove('--cached')
+ setup_cfg_cmd.remove("--cached")
extra_files = piped_process(setup_cfg_cmd)
(stdout, _) = extra_files.communicate()
stdout = to_text(stdout)
@@ -189,7 +192,7 @@ def find_setup_cfgs(lazy):
def get_staged_contents_from(filename):
- git_show = piped_process(['git', 'show', ':{0}'.format(filename)])
+ git_show = piped_process(["git", "show", ":{0}".format(filename)])
(stdout, _) = git_show.communicate()
return stdout
@@ -203,28 +206,26 @@ def make_temporary_directory():
def to_text(string):
"""Ensure that the string is text."""
- if callable(getattr(string, 'decode', None)):
- return string.decode('utf-8')
+ if callable(getattr(string, "decode", None)):
+ return string.decode("utf-8")
return string
def piped_process(command):
return subprocess.Popen(
- command,
- stdout=subprocess.PIPE,
- stderr=subprocess.PIPE,
+ command, stdout=subprocess.PIPE, stderr=subprocess.PIPE
)
def git_config_for(parameter):
- config = piped_process(['git', 'config', '--get', '--bool', parameter])
+ config = piped_process(["git", "config", "--get", "--bool", parameter])
(stdout, _) = config.communicate()
return to_text(stdout).strip()
def config_for(parameter):
- environment_variable = 'flake8_{0}'.format(parameter).upper()
- git_variable = 'flake8.{0}'.format(parameter)
+ environment_variable = "flake8_{0}".format(parameter).upper()
+ git_variable = "flake8.{0}".format(parameter)
value = os.environ.get(environment_variable, git_config_for(git_variable))
return value.lower() in defaults.TRUTHY_VALUES
@@ -232,7 +233,8 @@ def config_for(parameter):
def update_excludes(exclude_list, temporary_directory_path):
return [
(temporary_directory_path + pattern)
- if os.path.isabs(pattern) else pattern
+ if os.path.isabs(pattern)
+ else pattern
for pattern in exclude_list
]
diff --git a/src/flake8/main/mercurial.py b/src/flake8/main/mercurial.py
index 344c9f7..65ef8ce 100644
--- a/src/flake8/main/mercurial.py
+++ b/src/flake8/main/mercurial.py
@@ -10,7 +10,7 @@ import subprocess
from flake8 import exceptions as exc
-__all__ = ('hook', 'install')
+__all__ = ("hook", "install")
def hook(ui, repo, **kwargs):
@@ -24,13 +24,14 @@ def hook(ui, repo, **kwargs):
avoid using it all the same.
"""
from flake8.main import application
+
hgrc = find_hgrc(create_if_missing=False)
if hgrc is None:
- print('Cannot locate your root mercurial repository.')
+ print("Cannot locate your root mercurial repository.")
raise SystemExit(True)
hgconfig = configparser_for(hgrc)
- strict = hgconfig.get('flake8', 'strict', fallback=True)
+ strict = hgconfig.get("flake8", "strict", fallback=True)
filenames = list(get_filenames_from(repo, kwargs))
@@ -68,42 +69,40 @@ def install():
hgconfig = configparser_for(hgrc)
- if not hgconfig.has_section('hooks'):
- hgconfig.add_section('hooks')
+ if not hgconfig.has_section("hooks"):
+ hgconfig.add_section("hooks")
- if hgconfig.has_option('hooks', 'commit'):
+ if hgconfig.has_option("hooks", "commit"):
raise exc.MercurialCommitHookAlreadyExists(
- path=hgrc,
- value=hgconfig.get('hooks', 'commit'),
+ path=hgrc, value=hgconfig.get("hooks", "commit")
)
- if hgconfig.has_option('hooks', 'qrefresh'):
+ if hgconfig.has_option("hooks", "qrefresh"):
raise exc.MercurialQRefreshHookAlreadyExists(
- path=hgrc,
- value=hgconfig.get('hooks', 'qrefresh'),
+ path=hgrc, value=hgconfig.get("hooks", "qrefresh")
)
- hgconfig.set('hooks', 'commit', 'python:flake8.main.mercurial.hook')
- hgconfig.set('hooks', 'qrefresh', 'python:flake8.main.mercurial.hook')
+ hgconfig.set("hooks", "commit", "python:flake8.main.mercurial.hook")
+ hgconfig.set("hooks", "qrefresh", "python:flake8.main.mercurial.hook")
- if not hgconfig.has_section('flake8'):
- hgconfig.add_section('flake8')
+ if not hgconfig.has_section("flake8"):
+ hgconfig.add_section("flake8")
- if not hgconfig.has_option('flake8', 'strict'):
- hgconfig.set('flake8', 'strict', False)
+ if not hgconfig.has_option("flake8", "strict"):
+ hgconfig.set("flake8", "strict", False)
- with open(hgrc, 'w') as fd:
+ with open(hgrc, "w") as fd:
hgconfig.write(fd)
- print('mercurial hooks installed, for configuration options see')
- print('http://flake8.pycqa.org/en/latest/user/using-hooks.html')
+ print("mercurial hooks installed, for configuration options see")
+ print("http://flake8.pycqa.org/en/latest/user/using-hooks.html")
return True
def get_filenames_from(repository, kwargs):
seen_filenames = set()
- node = kwargs['node']
+ node = kwargs["node"]
for revision in range(repository[node], len(repository)):
for filename in repository[revision].files():
full_filename = os.path.join(repository.root, filename)
@@ -113,30 +112,26 @@ def get_filenames_from(repository, kwargs):
continue
seen_filenames.add(full_filename)
- if full_filename.endswith('.py'):
+ if full_filename.endswith(".py"):
yield full_filename
def find_hgrc(create_if_missing=False):
root = subprocess.Popen(
- ['hg', 'root'],
- stdout=subprocess.PIPE,
- stderr=subprocess.PIPE,
+ ["hg", "root"], stdout=subprocess.PIPE, stderr=subprocess.PIPE
)
(hg_directory, _) = root.communicate()
- if callable(getattr(hg_directory, 'decode', None)):
- hg_directory = hg_directory.decode('utf-8')
+ if callable(getattr(hg_directory, "decode", None)):
+ hg_directory = hg_directory.decode("utf-8")
if not os.path.isdir(hg_directory):
return None
- hgrc = os.path.abspath(
- os.path.join(hg_directory, '.hg', 'hgrc')
- )
+ hgrc = os.path.abspath(os.path.join(hg_directory, ".hg", "hgrc"))
if not os.path.exists(hgrc):
if create_if_missing:
- open(hgrc, 'w').close()
+ open(hgrc, "w").close()
else:
return None
diff --git a/src/flake8/main/options.py b/src/flake8/main/options.py
index b13a4f3..73b3932 100644
--- a/src/flake8/main/options.py
+++ b/src/flake8/main/options.py
@@ -39,50 +39,66 @@ def register_default_options(option_manager):
# pep8 options
add_option(
- '-v', '--verbose', default=0, action='count',
+ "-v",
+ "--verbose",
+ default=0,
+ action="count",
parse_from_config=True,
- help='Print more information about what is happening in flake8.'
- ' This option is repeatable and will increase verbosity each '
- 'time it is repeated.',
+ help="Print more information about what is happening in flake8."
+ " This option is repeatable and will increase verbosity each "
+ "time it is repeated.",
)
add_option(
- '-q', '--quiet', default=0, action='count',
+ "-q",
+ "--quiet",
+ default=0,
+ action="count",
parse_from_config=True,
- help='Report only file names, or nothing. This option is repeatable.',
+ help="Report only file names, or nothing. This option is repeatable.",
)
add_option(
- '--count', action='store_true', parse_from_config=True,
- help='Print total number of errors and warnings to standard error and'
- ' set the exit code to 1 if total is not empty.',
+ "--count",
+ action="store_true",
+ parse_from_config=True,
+ help="Print total number of errors and warnings to standard error and"
+ " set the exit code to 1 if total is not empty.",
)
add_option(
- '--diff', action='store_true',
- help='Report changes only within line number ranges in the unified '
- 'diff provided on standard in by the user.',
+ "--diff",
+ action="store_true",
+ help="Report changes only within line number ranges in the unified "
+ "diff provided on standard in by the user.",
)
add_option(
- '--exclude', metavar='patterns', default=','.join(defaults.EXCLUDE),
- comma_separated_list=True, parse_from_config=True,
+ "--exclude",
+ metavar="patterns",
+ default=",".join(defaults.EXCLUDE),
+ comma_separated_list=True,
+ parse_from_config=True,
normalize_paths=True,
- help='Comma-separated list of files or directories to exclude.'
- ' (Default: %default)',
+ help="Comma-separated list of files or directories to exclude."
+ " (Default: %default)",
)
add_option(
- '--filename', metavar='patterns', default='*.py',
- parse_from_config=True, comma_separated_list=True,
- help='Only check for filenames matching the patterns in this comma-'
- 'separated list. (Default: %default)',
+ "--filename",
+ metavar="patterns",
+ default="*.py",
+ parse_from_config=True,
+ comma_separated_list=True,
+ help="Only check for filenames matching the patterns in this comma-"
+ "separated list. (Default: %default)",
)
add_option(
- '--stdin-display-name', default='stdin',
- help='The name used when reporting errors from code passed via stdin.'
- ' This is useful for editors piping the file contents to flake8.'
- ' (Default: %default)',
+ "--stdin-display-name",
+ default="stdin",
+ help="The name used when reporting errors from code passed via stdin."
+ " This is useful for editors piping the file contents to flake8."
+ " (Default: %default)",
)
# TODO(sigmavirus24): Figure out --first/--repeat
@@ -91,136 +107,183 @@ def register_default_options(option_manager):
# freely provide a format string and that will break if we restrict their
# choices.
add_option(
- '--format', metavar='format', default='default',
+ "--format",
+ metavar="format",
+ default="default",
parse_from_config=True,
- help='Format errors according to the chosen formatter.',
+ help="Format errors according to the chosen formatter.",
)
add_option(
- '--hang-closing', action='store_true', parse_from_config=True,
- help='Hang closing bracket instead of matching indentation of opening'
- " bracket's line.",
+ "--hang-closing",
+ action="store_true",
+ parse_from_config=True,
+ help="Hang closing bracket instead of matching indentation of opening"
+ " bracket's line.",
)
add_option(
- '--ignore', metavar='errors', default=','.join(defaults.IGNORE),
- parse_from_config=True, comma_separated_list=True,
- help='Comma-separated list of errors and warnings to ignore (or skip).'
- ' For example, ``--ignore=E4,E51,W234``. (Default: %default)',
+ "--ignore",
+ metavar="errors",
+ default=",".join(defaults.IGNORE),
+ parse_from_config=True,
+ comma_separated_list=True,
+ help="Comma-separated list of errors and warnings to ignore (or skip)."
+ " For example, ``--ignore=E4,E51,W234``. (Default: %default)",
)
add_option(
- '--extend-ignore', metavar='errors', default='',
- parse_from_config=True, comma_separated_list=True,
- help='Comma-separated list of errors and warnings to add to the list'
- ' of ignored ones. For example, ``--extend-ignore=E4,E51,W234``.',
+ "--extend-ignore",
+ metavar="errors",
+ default="",
+ parse_from_config=True,
+ comma_separated_list=True,
+ help="Comma-separated list of errors and warnings to add to the list"
+ " of ignored ones. For example, ``--extend-ignore=E4,E51,W234``.",
)
add_option(
- '--max-line-length', type='int', metavar='n',
- default=defaults.MAX_LINE_LENGTH, parse_from_config=True,
- help='Maximum allowed line length for the entirety of this run. '
- '(Default: %default)',
+ "--max-line-length",
+ type="int",
+ metavar="n",
+ default=defaults.MAX_LINE_LENGTH,
+ parse_from_config=True,
+ help="Maximum allowed line length for the entirety of this run. "
+ "(Default: %default)",
)
add_option(
- '--select', metavar='errors', default=','.join(defaults.SELECT),
- parse_from_config=True, comma_separated_list=True,
- help='Comma-separated list of errors and warnings to enable.'
- ' For example, ``--select=E4,E51,W234``. (Default: %default)',
+ "--select",
+ metavar="errors",
+ default=",".join(defaults.SELECT),
+ parse_from_config=True,
+ comma_separated_list=True,
+ help="Comma-separated list of errors and warnings to enable."
+ " For example, ``--select=E4,E51,W234``. (Default: %default)",
)
add_option(
- '--disable-noqa', default=False, parse_from_config=True,
- action='store_true',
+ "--disable-noqa",
+ default=False,
+ parse_from_config=True,
+ action="store_true",
help='Disable the effect of "# noqa". This will report errors on '
- 'lines with "# noqa" at the end.'
+ 'lines with "# noqa" at the end.',
)
# TODO(sigmavirus24): Decide what to do about --show-pep8
add_option(
- '--show-source', action='store_true', parse_from_config=True,
- help='Show the source generate each error or warning.',
+ "--show-source",
+ action="store_true",
+ parse_from_config=True,
+ help="Show the source generate each error or warning.",
)
add_option(
- '--statistics', action='store_true', parse_from_config=True,
- help='Count errors and warnings.',
+ "--statistics",
+ action="store_true",
+ parse_from_config=True,
+ help="Count errors and warnings.",
)
# Flake8 options
add_option(
- '--enable-extensions', default='', parse_from_config=True,
- comma_separated_list=True, type='string',
- help='Enable plugins and extensions that are otherwise disabled '
- 'by default',
+ "--enable-extensions",
+ default="",
+ parse_from_config=True,
+ comma_separated_list=True,
+ type="string",
+ help="Enable plugins and extensions that are otherwise disabled "
+ "by default",
)
add_option(
- '--exit-zero', action='store_true',
+ "--exit-zero",
+ action="store_true",
help='Exit with status code "0" even if there are errors.',
)
add_option(
- '--install-hook', action='callback', type='choice',
- choices=vcs.choices(), callback=vcs.install,
- help='Install a hook that is run prior to a commit for the supported '
- 'version control system.'
+ "--install-hook",
+ action="callback",
+ type="choice",
+ choices=vcs.choices(),
+ callback=vcs.install,
+ help="Install a hook that is run prior to a commit for the supported "
+ "version control system.",
)
add_option(
- '-j', '--jobs', type='string', default='auto', parse_from_config=True,
- help='Number of subprocesses to use to run checks in parallel. '
- 'This is ignored on Windows. The default, "auto", will '
- 'auto-detect the number of processors available to use.'
- ' (Default: %default)',
+ "-j",
+ "--jobs",
+ type="string",
+ default="auto",
+ parse_from_config=True,
+ help="Number of subprocesses to use to run checks in parallel. "
+ 'This is ignored on Windows. The default, "auto", will '
+ "auto-detect the number of processors available to use."
+ " (Default: %default)",
)
add_option(
- '--output-file', default=None, type='string', parse_from_config=True,
+ "--output-file",
+ default=None,
+ type="string",
+ parse_from_config=True,
# callback=callbacks.redirect_stdout,
- help='Redirect report to a file.',
+ help="Redirect report to a file.",
)
add_option(
- '--tee', default=False, parse_from_config=True, action='store_true',
- help='Write to stdout and output-file.',
+ "--tee",
+ default=False,
+ parse_from_config=True,
+ action="store_true",
+ help="Write to stdout and output-file.",
)
# Config file options
add_option(
- '--append-config', action='append',
- help='Provide extra config files to parse in addition to the files '
- 'found by Flake8 by default. These files are the last ones read '
- 'and so they take the highest precedence when multiple files '
- 'provide the same option.',
+ "--append-config",
+ action="append",
+ help="Provide extra config files to parse in addition to the files "
+ "found by Flake8 by default. These files are the last ones read "
+ "and so they take the highest precedence when multiple files "
+ "provide the same option.",
)
add_option(
- '--config', default=None,
- help='Path to the config file that will be the authoritative config '
- 'source. This will cause Flake8 to ignore all other '
- 'configuration files.'
+ "--config",
+ default=None,
+ help="Path to the config file that will be the authoritative config "
+ "source. This will cause Flake8 to ignore all other "
+ "configuration files.",
)
add_option(
- '--isolated', default=False, action='store_true',
- help='Ignore all configuration files.',
+ "--isolated",
+ default=False,
+ action="store_true",
+ help="Ignore all configuration files.",
)
# Benchmarking
add_option(
- '--benchmark', default=False, action='store_true',
- help='Print benchmark information about this run of Flake8',
+ "--benchmark",
+ default=False,
+ action="store_true",
+ help="Print benchmark information about this run of Flake8",
)
# Debugging
add_option(
- '--bug-report', action='callback', callback=debug.print_information,
- callback_kwargs={'option_manager': option_manager},
- help='Print information necessary when preparing a bug report',
+ "--bug-report",
+ action="callback",
+ callback=debug.print_information,
+ callback_kwargs={"option_manager": option_manager},
+ help="Print information necessary when preparing a bug report",
)
diff --git a/src/flake8/main/setuptools_command.py b/src/flake8/main/setuptools_command.py
index a5d23a5..0a76057 100644
--- a/src/flake8/main/setuptools_command.py
+++ b/src/flake8/main/setuptools_command.py
@@ -11,7 +11,7 @@ UNSET = object()
class Flake8(setuptools.Command):
"""Run Flake8 via setuptools/distutils for registered modules."""
- description = 'Run Flake8 on modules registered in setup.py'
+ description = "Run Flake8 on modules registered in setup.py"
# NOTE(sigmavirus24): If we populated this with a list of tuples, users
# could do something like ``python setup.py flake8 --ignore=E123,E234``
# but we would have to redefine it and we can't define it dynamically.
@@ -39,23 +39,26 @@ class Flake8(setuptools.Command):
value = getattr(self, name, UNSET)
if value is UNSET:
continue
- setattr(self.flake8.options,
- name,
- option.normalize_from_setuptools(value))
+ setattr(
+ self.flake8.options,
+ name,
+ option.normalize_from_setuptools(value),
+ )
def package_files(self):
"""Collect the files/dirs included in the registered modules."""
seen_package_directories = ()
directories = self.distribution.package_dir or {}
- empty_directory_exists = '' in directories
+ empty_directory_exists = "" in directories
packages = self.distribution.packages or []
for package in packages:
package_directory = package
if package in directories:
package_directory = directories[package]
elif empty_directory_exists:
- package_directory = os.path.join(directories[''],
- package_directory)
+ package_directory = os.path.join(
+ directories[""], package_directory
+ )
# NOTE(sigmavirus24): Do not collect submodules, e.g.,
# if we have:
@@ -66,13 +69,13 @@ class Flake8(setuptools.Command):
if package_directory.startswith(seen_package_directories):
continue
- seen_package_directories += (package_directory + '.',)
+ seen_package_directories += (package_directory + ".",)
yield package_directory
def module_files(self):
"""Collect the files listed as py_modules."""
modules = self.distribution.py_modules or []
- filename_from = '{0}.py'.format
+ filename_from = "{0}.py".format
for module in modules:
yield filename_from(module)
@@ -84,7 +87,7 @@ class Flake8(setuptools.Command):
for module in self.module_files():
yield module
- yield 'setup.py'
+ yield "setup.py"
def run(self):
"""Run the Flake8 application."""
diff --git a/src/flake8/main/vcs.py b/src/flake8/main/vcs.py
index 73dbaa9..f7f139a 100644
--- a/src/flake8/main/vcs.py
+++ b/src/flake8/main/vcs.py
@@ -8,10 +8,7 @@ from flake8.main import mercurial
# as plugins, e.g., adding a flake8.vcs entry-point. In that case, this
# dictionary should disappear, and this module might contain more code for
# managing those bits (in conjuntion with flake8.plugins.manager).
-_INSTALLERS = {
- 'git': git.install,
- 'mercurial': mercurial.install,
-}
+_INSTALLERS = {"git": git.install, "mercurial": mercurial.install}
def install(option, option_string, value, parser):
@@ -30,7 +27,7 @@ def install(option, option_string, value, parser):
errored = True
if not successful:
- print('Could not find the {0} directory'.format(value))
+ print("Could not find the {0} directory".format(value))
raise SystemExit(not successful and errored)
diff --git a/src/flake8/options/aggregator.py b/src/flake8/options/aggregator.py
index 5b8ab9c..304f53c 100644
--- a/src/flake8/options/aggregator.py
+++ b/src/flake8/options/aggregator.py
@@ -37,26 +37,28 @@ def aggregate_options(manager, config_finder, arglist=None, values=None):
# Make our new configuration file mergerator
config_parser = config.MergedConfigParser(
- option_manager=manager,
- config_finder=config_finder,
+ option_manager=manager, config_finder=config_finder
)
# Get the parsed config
- parsed_config = config_parser.parse(original_values.config,
- original_values.isolated)
+ parsed_config = config_parser.parse(
+ original_values.config, original_values.isolated
+ )
# Extend the default ignore value with the extended default ignore list,
# registered by plugins.
extended_default_ignore = manager.extended_default_ignore.copy()
- LOG.debug('Extended default ignore list: %s',
- list(extended_default_ignore))
+ LOG.debug(
+ "Extended default ignore list: %s", list(extended_default_ignore)
+ )
extended_default_ignore.update(default_values.ignore)
default_values.ignore = list(extended_default_ignore)
- LOG.debug('Merged default ignore list: %s', default_values.ignore)
+ LOG.debug("Merged default ignore list: %s", default_values.ignore)
extended_default_select = manager.extended_default_select.copy()
- LOG.debug('Extended default select list: %s',
- list(extended_default_select))
+ LOG.debug(
+ "Extended default select list: %s", list(extended_default_select)
+ )
default_values.extended_default_select = extended_default_select
# Merge values parsed from config onto the default values returned
@@ -67,10 +69,12 @@ def aggregate_options(manager, config_finder, arglist=None, values=None):
if not hasattr(default_values, config_name):
dest_name = config_parser.config_options[config_name].dest
- LOG.debug('Overriding default value of (%s) for "%s" with (%s)',
- getattr(default_values, dest_name, None),
- dest_name,
- value)
+ LOG.debug(
+ 'Overriding default value of (%s) for "%s" with (%s)',
+ getattr(default_values, dest_name, None),
+ dest_name,
+ value,
+ )
# Override the default values with the config values
setattr(default_values, dest_name, value)
diff --git a/src/flake8/options/config.py b/src/flake8/options/config.py
index ba9442a..9325495 100644
--- a/src/flake8/options/config.py
+++ b/src/flake8/options/config.py
@@ -9,13 +9,13 @@ from flake8 import utils
LOG = logging.getLogger(__name__)
-__all__ = ('ConfigFileFinder', 'MergedConfigParser')
+__all__ = ("ConfigFileFinder", "MergedConfigParser")
class ConfigFileFinder(object):
"""Encapsulate the logic for finding and reading config files."""
- PROJECT_FILENAMES = ('setup.cfg', 'tox.ini')
+ PROJECT_FILENAMES = ("setup.cfg", "tox.ini")
def __init__(self, program_name, args, extra_config_files):
"""Initialize object to find config files.
@@ -31,25 +31,27 @@ class ConfigFileFinder(object):
extra_config_files = extra_config_files or []
self.extra_config_files = [
# Ensure the paths are absolute paths for local_config_files
- os.path.abspath(f) for f in extra_config_files
+ os.path.abspath(f)
+ for f in extra_config_files
]
# Platform specific settings
- self.is_windows = sys.platform == 'win32'
- self.xdg_home = os.environ.get('XDG_CONFIG_HOME',
- os.path.expanduser('~/.config'))
+ self.is_windows = sys.platform == "win32"
+ self.xdg_home = os.environ.get(
+ "XDG_CONFIG_HOME", os.path.expanduser("~/.config")
+ )
# Look for '.<program_name>' files
- self.program_config = '.' + program_name
+ self.program_config = "." + program_name
self.program_name = program_name
# List of filenames to find in the local/project directory
- self.project_filenames = ('setup.cfg', 'tox.ini', self.program_config)
+ self.project_filenames = ("setup.cfg", "tox.ini", self.program_config)
self.local_directory = os.path.abspath(os.curdir)
if not args:
- args = ['.']
+ args = ["."]
self.parent = self.tail = os.path.abspath(os.path.commonprefix(args))
# caches to avoid double-reading config files
@@ -61,7 +63,7 @@ class ConfigFileFinder(object):
@staticmethod
def _read_config(files):
config = configparser.RawConfigParser()
- if isinstance(files, (str, type(u''))):
+ if isinstance(files, (str, type(u""))):
files = [files]
found_files = []
@@ -69,13 +71,17 @@ class ConfigFileFinder(object):
try:
found_files.extend(config.read(filename))
except UnicodeDecodeError:
- LOG.exception("There was an error decoding a config file."
- "The file with a problem was %s.",
- filename)
+ LOG.exception(
+ "There was an error decoding a config file."
+ "The file with a problem was %s.",
+ filename,
+ )
except configparser.ParsingError:
- LOG.exception("There was an error trying to parse a config "
- "file. The file with a problem was %s.",
- filename)
+ LOG.exception(
+ "There was an error trying to parse a config "
+ "file. The file with a problem was %s.",
+ filename,
+ )
return (config, found_files)
def cli_config(self, files):
@@ -83,7 +89,7 @@ class ConfigFileFinder(object):
if files not in self._cli_configs:
config, found_files = self._read_config(files)
if found_files:
- LOG.debug('Found cli configuration files: %s', found_files)
+ LOG.debug("Found cli configuration files: %s", found_files)
self._cli_configs[files] = config
return self._cli_configs[files]
@@ -94,8 +100,9 @@ class ConfigFileFinder(object):
found_config_files = False
while tail and not found_config_files:
for project_filename in self.project_filenames:
- filename = os.path.abspath(os.path.join(parent,
- project_filename))
+ filename = os.path.abspath(
+ os.path.join(parent, project_filename)
+ )
if os.path.exists(filename):
yield filename
found_config_files = True
@@ -117,8 +124,7 @@ class ConfigFileFinder(object):
"""
exists = os.path.exists
return [
- filename
- for filename in self.generate_possible_local_files()
+ filename for filename in self.generate_possible_local_files()
] + [f for f in self.extra_config_files if exists(f)]
def local_configs_with_files(self):
@@ -129,7 +135,7 @@ class ConfigFileFinder(object):
if self._local_configs is None:
config, found_files = self._read_config(self.local_config_files())
if found_files:
- LOG.debug('Found local configuration files: %s', found_files)
+ LOG.debug("Found local configuration files: %s", found_files)
self._local_configs = config
self._local_found_files = found_files
return (self._local_configs, self._local_found_files)
@@ -141,7 +147,7 @@ class ConfigFileFinder(object):
def user_config_file(self):
"""Find the user-level config file."""
if self.is_windows:
- return os.path.expanduser('~\\' + self.program_config)
+ return os.path.expanduser("~\\" + self.program_config)
return os.path.join(self.xdg_home, self.program_name)
def user_config(self):
@@ -149,7 +155,7 @@ class ConfigFileFinder(object):
if self._user_config is None:
config, found_files = self._read_config(self.user_config_file())
if found_files:
- LOG.debug('Found user configuration files: %s', found_files)
+ LOG.debug("Found user configuration files: %s", found_files)
self._user_config = config
return self._user_config
@@ -164,10 +170,10 @@ class MergedConfigParser(object):
#: Set of types that should use the
#: :meth:`~configparser.RawConfigParser.getint` method.
- GETINT_TYPES = {'int', 'count'}
+ GETINT_TYPES = {"int", "count"}
#: Set of actions that should use the
#: :meth:`~configparser.RawConfigParser.getbool` method.
- GETBOOL_ACTIONS = {'store_true', 'store_false'}
+ GETBOOL_ACTIONS = {"store_true", "store_false"}
def __init__(self, option_manager, config_finder):
"""Initialize the MergedConfigParser instance.
@@ -189,26 +195,32 @@ class MergedConfigParser(object):
def _normalize_value(self, option, value):
final_value = option.normalize(
+ value, self.config_finder.local_directory
+ )
+ LOG.debug(
+ '%r has been normalized to %r for option "%s"',
value,
- self.config_finder.local_directory,
+ final_value,
+ option.config_name,
)
- LOG.debug('%r has been normalized to %r for option "%s"',
- value, final_value, option.config_name)
return final_value
def _parse_config(self, config_parser):
config_dict = {}
for option_name in config_parser.options(self.program_name):
if option_name not in self.config_options:
- LOG.debug('Option "%s" is not registered. Ignoring.',
- option_name)
+ LOG.debug(
+ 'Option "%s" is not registered. Ignoring.', option_name
+ )
continue
option = self.config_options[option_name]
# Use the appropriate method to parse the config value
method = config_parser.get
- if (option.type in self.GETINT_TYPES or
- option.action in self.GETINT_TYPES):
+ if (
+ option.type in self.GETINT_TYPES
+ or option.action in self.GETINT_TYPES
+ ):
method = config_parser.getint
elif option.action in self.GETBOOL_ACTIONS:
method = config_parser.getboolean
@@ -229,33 +241,39 @@ class MergedConfigParser(object):
"""Parse and return the local configuration files."""
config = self.config_finder.local_configs()
if not self.is_configured_by(config):
- LOG.debug('Local configuration files have no %s section',
- self.program_name)
+ LOG.debug(
+ "Local configuration files have no %s section",
+ self.program_name,
+ )
return {}
- LOG.debug('Parsing local configuration files.')
+ LOG.debug("Parsing local configuration files.")
return self._parse_config(config)
def parse_user_config(self):
"""Parse and return the user configuration files."""
config = self.config_finder.user_config()
if not self.is_configured_by(config):
- LOG.debug('User configuration files have no %s section',
- self.program_name)
+ LOG.debug(
+ "User configuration files have no %s section",
+ self.program_name,
+ )
return {}
- LOG.debug('Parsing user configuration files.')
+ LOG.debug("Parsing user configuration files.")
return self._parse_config(config)
def parse_cli_config(self, config_path):
"""Parse and return the file specified by --config."""
config = self.config_finder.cli_config(config_path)
if not self.is_configured_by(config):
- LOG.debug('CLI configuration files have no %s section',
- self.program_name)
+ LOG.debug(
+ "CLI configuration files have no %s section",
+ self.program_name,
+ )
return {}
- LOG.debug('Parsing CLI configuration files.')
+ LOG.debug("Parsing CLI configuration files.")
return self._parse_config(config)
def merge_user_and_local_config(self):
@@ -293,14 +311,19 @@ class MergedConfigParser(object):
dict
"""
if isolated:
- LOG.debug('Refusing to parse configuration files due to user-'
- 'requested isolation')
+ LOG.debug(
+ "Refusing to parse configuration files due to user-"
+ "requested isolation"
+ )
return {}
if cli_config:
- LOG.debug('Ignoring user and locally found configuration files. '
- 'Reading only configuration from "%s" specified via '
- '--config by the user', cli_config)
+ LOG.debug(
+ "Ignoring user and locally found configuration files. "
+ 'Reading only configuration from "%s" specified via '
+ "--config by the user",
+ cli_config,
+ )
return self.parse_cli_config(cli_config)
return self.merge_user_and_local_config()
@@ -325,13 +348,18 @@ def get_local_plugins(config_finder, cli_config=None, isolated=False):
"""
local_plugins = LocalPlugins(extension=[], report=[], paths=[])
if isolated:
- LOG.debug('Refusing to look for local plugins in configuration'
- 'files due to user-requested isolation')
+ LOG.debug(
+ "Refusing to look for local plugins in configuration"
+ "files due to user-requested isolation"
+ )
return local_plugins
if cli_config:
- LOG.debug('Reading local plugins only from "%s" specified via '
- '--config by the user', cli_config)
+ LOG.debug(
+ 'Reading local plugins only from "%s" specified via '
+ "--config by the user",
+ cli_config,
+ )
config = config_finder.cli_config(cli_config)
config_files = [cli_config]
else:
@@ -339,28 +367,31 @@ def get_local_plugins(config_finder, cli_config=None, isolated=False):
base_dirs = {os.path.dirname(cf) for cf in config_files}
- section = '%s:local-plugins' % config_finder.program_name
- for plugin_type in ['extension', 'report']:
+ section = "%s:local-plugins" % config_finder.program_name
+ for plugin_type in ["extension", "report"]:
if config.has_option(section, plugin_type):
local_plugins_string = config.get(section, plugin_type).strip()
plugin_type_list = getattr(local_plugins, plugin_type)
- plugin_type_list.extend(utils.parse_comma_separated_list(
- local_plugins_string,
- regexp=utils.LOCAL_PLUGIN_LIST_RE,
- ))
- if config.has_option(section, 'paths'):
+ plugin_type_list.extend(
+ utils.parse_comma_separated_list(
+ local_plugins_string, regexp=utils.LOCAL_PLUGIN_LIST_RE
+ )
+ )
+ if config.has_option(section, "paths"):
raw_paths = utils.parse_comma_separated_list(
- config.get(section, 'paths').strip()
+ config.get(section, "paths").strip()
)
norm_paths = []
for base_dir in base_dirs:
norm_paths.extend(
- path for path in
- utils.normalize_paths(raw_paths, parent=base_dir)
+ path
+ for path in utils.normalize_paths(raw_paths, parent=base_dir)
if os.path.exists(path)
)
local_plugins.paths.extend(norm_paths)
return local_plugins
-LocalPlugins = collections.namedtuple('LocalPlugins', 'extension report paths')
+LocalPlugins = collections.namedtuple(
+ "LocalPlugins", "extension report paths"
+)
diff --git a/src/flake8/options/manager.py b/src/flake8/options/manager.py
index 5b4796f..3f4e883 100644
--- a/src/flake8/options/manager.py
+++ b/src/flake8/options/manager.py
@@ -11,15 +11,28 @@ LOG = logging.getLogger(__name__)
class Option(object):
"""Our wrapper around an optparse.Option object to add features."""
- def __init__(self, short_option_name=None, long_option_name=None,
- # Options below here are taken from the optparse.Option class
- action=None, default=None, type=None, dest=None,
- nargs=None, const=None, choices=None, callback=None,
- callback_args=None, callback_kwargs=None, help=None,
- metavar=None,
- # Options below here are specific to Flake8
- parse_from_config=False, comma_separated_list=False,
- normalize_paths=False):
+ def __init__(
+ self,
+ short_option_name=None,
+ long_option_name=None,
+ # Options below here are taken from the optparse.Option class
+ action=None,
+ default=None,
+ type=None,
+ dest=None,
+ nargs=None,
+ const=None,
+ choices=None,
+ callback=None,
+ callback_args=None,
+ callback_kwargs=None,
+ help=None,
+ metavar=None,
+ # Options below here are specific to Flake8
+ parse_from_config=False,
+ comma_separated_list=False,
+ normalize_paths=False,
+ ):
"""Initialize an Option instance wrapping optparse.Option.
The following are all passed directly through to optparse.
@@ -73,18 +86,18 @@ class Option(object):
x for x in (short_option_name, long_option_name) if x is not None
]
self.option_kwargs = {
- 'action': action,
- 'default': default,
- 'type': type,
- 'dest': self._make_dest(dest),
- 'nargs': nargs,
- 'const': const,
- 'choices': choices,
- 'callback': callback,
- 'callback_args': callback_args,
- 'callback_kwargs': callback_kwargs,
- 'help': help,
- 'metavar': metavar,
+ "action": action,
+ "default": default,
+ "type": type,
+ "dest": self._make_dest(dest),
+ "nargs": nargs,
+ "const": const,
+ "choices": choices,
+ "callback": callback,
+ "callback_args": callback_args,
+ "callback_kwargs": callback_kwargs,
+ "help": help,
+ "metavar": metavar,
}
# Set attributes for our option arguments
for key, value in self.option_kwargs.items():
@@ -98,27 +111,32 @@ class Option(object):
self.config_name = None
if parse_from_config:
if not long_option_name:
- raise ValueError('When specifying parse_from_config=True, '
- 'a long_option_name must also be specified.')
- self.config_name = long_option_name[2:].replace('-', '_')
+ raise ValueError(
+ "When specifying parse_from_config=True, "
+ "a long_option_name must also be specified."
+ )
+ self.config_name = long_option_name[2:].replace("-", "_")
self._opt = None
def __repr__(self): # noqa: D105
return (
- 'Option({0}, {1}, action={action}, default={default}, '
- 'dest={dest}, type={type}, callback={callback}, help={help},'
- ' callback={callback}, callback_args={callback_args}, '
- 'callback_kwargs={callback_kwargs}, metavar={metavar})'
- ).format(self.short_option_name, self.long_option_name,
- **self.option_kwargs)
+ "Option({0}, {1}, action={action}, default={default}, "
+ "dest={dest}, type={type}, callback={callback}, help={help},"
+ " callback={callback}, callback_args={callback_args}, "
+ "callback_kwargs={callback_kwargs}, metavar={metavar})"
+ ).format(
+ self.short_option_name,
+ self.long_option_name,
+ **self.option_kwargs
+ )
def _make_dest(self, dest):
if dest:
return dest
if self.long_option_name:
- return self.long_option_name[2:].replace('-', '_')
+ return self.long_option_name[2:].replace("-", "_")
return self.short_option_name[1]
def normalize(self, value, *normalize_args):
@@ -136,33 +154,36 @@ class Option(object):
def normalize_from_setuptools(self, value):
"""Normalize the value received from setuptools."""
value = self.normalize(value)
- if self.type == 'int' or self.action == 'count':
+ if self.type == "int" or self.action == "count":
return int(value)
- if self.action in ('store_true', 'store_false'):
+ if self.action in ("store_true", "store_false"):
value = str(value).upper()
- if value in ('1', 'T', 'TRUE', 'ON'):
+ if value in ("1", "T", "TRUE", "ON"):
return True
- if value in ('0', 'F', 'FALSE', 'OFF'):
+ if value in ("0", "F", "FALSE", "OFF"):
return False
return value
def to_optparse(self):
"""Convert a Flake8 Option to an optparse Option."""
if self._opt is None:
- self._opt = optparse.Option(*self.option_args,
- **self.option_kwargs)
+ self._opt = optparse.Option(
+ *self.option_args, **self.option_kwargs
+ )
return self._opt
-PluginVersion = collections.namedtuple("PluginVersion",
- ["name", "version", "local"])
+PluginVersion = collections.namedtuple(
+ "PluginVersion", ["name", "version", "local"]
+)
class OptionManager(object):
"""Manage Options and OptionParser while adding post-processing."""
- def __init__(self, prog=None, version=None,
- usage='%prog [options] file file ...'):
+ def __init__(
+ self, prog=None, version=None, usage="%prog [options] file file ..."
+ ):
"""Initialize an instance of an OptionManager.
:param str prog:
@@ -172,8 +193,9 @@ class OptionManager(object):
:param str usage:
Basic usage string used by the OptionParser.
"""
- self.parser = optparse.OptionParser(prog=prog, version=version,
- usage=usage)
+ self.parser = optparse.OptionParser(
+ prog=prog, version=version, usage=usage
+ )
self.config_options_dict = {}
self.options = []
self.program_name = prog
@@ -198,7 +220,7 @@ class OptionManager(object):
``short_option_name`` and ``long_option_name`` may be specified
positionally as they are with optparse normally.
"""
- if len(args) == 1 and args[0].startswith('--'):
+ if len(args) == 1 and args[0].startswith("--"):
args = (None, args[0])
option = Option(*args, **kwargs)
self.parser.add_option(option.to_optparse())
@@ -206,7 +228,7 @@ class OptionManager(object):
if option.parse_from_config:
name = option.config_name
self.config_options_dict[name] = option
- self.config_options_dict[name.replace('_', '-')] = option
+ self.config_options_dict[name.replace("_", "-")] = option
LOG.debug('Registered option "%s".', option)
def remove_from_default_ignore(self, error_codes):
@@ -216,13 +238,16 @@ class OptionManager(object):
List of strings that are the error/warning codes to attempt to
remove from the extended default ignore list.
"""
- LOG.debug('Removing %r from the default ignore list', error_codes)
+ LOG.debug("Removing %r from the default ignore list", error_codes)
for error_code in error_codes:
try:
self.extended_default_ignore.remove(error_code)
except (ValueError, KeyError):
- LOG.debug('Attempted to remove %s from default ignore'
- ' but it was not a member of the list.', error_code)
+ LOG.debug(
+ "Attempted to remove %s from default ignore"
+ " but it was not a member of the list.",
+ error_code,
+ )
def extend_default_ignore(self, error_codes):
"""Extend the default ignore list with the error codes provided.
@@ -231,7 +256,7 @@ class OptionManager(object):
List of strings that are the error/warning codes with which to
extend the default ignore list.
"""
- LOG.debug('Extending default ignore list with %r', error_codes)
+ LOG.debug("Extending default ignore list with %r", error_codes)
self.extended_default_ignore.update(error_codes)
def extend_default_select(self, error_codes):
@@ -241,11 +266,12 @@ class OptionManager(object):
List of strings that are the error/warning codes with which
to extend the default select list.
"""
- LOG.debug('Extending default select list with %r', error_codes)
+ LOG.debug("Extending default select list with %r", error_codes)
self.extended_default_select.update(error_codes)
- def generate_versions(self, format_str='%(name)s: %(version)s',
- join_on=', '):
+ def generate_versions(
+ self, format_str="%(name)s: %(version)s", join_on=", "
+ ):
"""Generate a comma-separated list of versions of plugins."""
return join_on.join(
format_str % self.format_plugin(plugin)
@@ -255,14 +281,17 @@ class OptionManager(object):
def update_version_string(self):
"""Update the flake8 version string."""
self.parser.version = (
- self.version + ' (' + self.generate_versions() + ') ' +
- utils.get_python_version()
+ self.version
+ + " ("
+ + self.generate_versions()
+ + ") "
+ + utils.get_python_version()
)
def generate_epilog(self):
"""Create an epilog with the version and name of each of plugin."""
- plugin_version_format = '%(name)s: %(version)s'
- self.parser.epilog = 'Installed plugins: ' + self.generate_versions(
+ plugin_version_format = "%(name)s: %(version)s"
+ self.parser.epilog = "Installed plugins: " + self.generate_versions(
plugin_version_format
)
@@ -303,7 +332,10 @@ class OptionManager(object):
# Unfortunately, we need to rely on a private method here.
try:
self.parser._process_args(largs, rargs, values)
- except (optparse.BadOptionError, optparse.OptionValueError) as err:
+ except (
+ optparse.BadOptionError,
+ optparse.OptionValueError,
+ ) as err:
self.parser.largs.append(err.opt_str)
args = largs + rargs
diff --git a/src/flake8/plugins/_trie.py b/src/flake8/plugins/_trie.py
index 17c226f..9a50b45 100644
--- a/src/flake8/plugins/_trie.py
+++ b/src/flake8/plugins/_trie.py
@@ -1,11 +1,11 @@
"""Independent implementation of a Trie tree."""
-__all__ = ('Trie', 'TrieNode')
+__all__ = ("Trie", "TrieNode")
def _iterate_stringlike_objects(string):
for i in range(len(string)):
- yield string[i:i + 1]
+ yield string[i : i + 1]
class Trie(object):
@@ -57,9 +57,7 @@ class TrieNode(object):
def __repr__(self):
"""Generate an easy to read representation of the node."""
- return 'TrieNode(prefix={0}, data={1})'.format(
- self.prefix, self.data
- )
+ return "TrieNode(prefix={0}, data={1})".format(self.prefix, self.data)
def find_prefix(self, prefix):
"""Find the prefix in the children of this node.
diff --git a/src/flake8/plugins/manager.py b/src/flake8/plugins/manager.py
index 503dfbb..1b9005e 100644
--- a/src/flake8/plugins/manager.py
+++ b/src/flake8/plugins/manager.py
@@ -11,11 +11,11 @@ from flake8.plugins import notifier
LOG = logging.getLogger(__name__)
__all__ = (
- 'Checkers',
- 'Listeners',
- 'Plugin',
- 'PluginManager',
- 'ReportFormatters',
+ "Checkers",
+ "Listeners",
+ "Plugin",
+ "PluginManager",
+ "ReportFormatters",
)
NO_GROUP_FOUND = object()
@@ -55,11 +55,11 @@ class Plugin(object):
def to_dictionary(self):
"""Convert this plugin to a dictionary."""
return {
- 'name': self.name,
- 'parameters': self.parameters,
- 'parameter_names': self.parameter_names,
- 'plugin': self.plugin,
- 'plugin_name': self.plugin_name,
+ "name": self.name,
+ "parameters": self.parameters,
+ "parameter_names": self.parameter_names,
+ "plugin": self.plugin,
+ "plugin_name": self.plugin_name,
}
def is_in_a_group(self):
@@ -75,7 +75,7 @@ class Plugin(object):
def group(self):
"""Find and parse the group the plugin is in."""
if self._group is None:
- name = self.name.split('.', 1)
+ name = self.name.split(".", 1)
if len(name) > 1:
self._group = name[0]
else:
@@ -132,7 +132,7 @@ class Plugin(object):
@property
def off_by_default(self):
"""Return whether the plugin is ignored by default."""
- return getattr(self.plugin, 'off_by_default', False)
+ return getattr(self.plugin, "off_by_default", False)
def execute(self, *args, **kwargs):
r"""Call the plugin with \*args and \*\*kwargs."""
@@ -140,22 +140,21 @@ class Plugin(object):
def _load(self, verify_requirements):
# Avoid relying on hasattr() here.
- resolve = getattr(self.entry_point, 'resolve', None)
- require = getattr(self.entry_point, 'require', None)
+ resolve = getattr(self.entry_point, "resolve", None)
+ require = getattr(self.entry_point, "require", None)
if resolve and require:
if verify_requirements:
- LOG.debug('Verifying plugin "%s"\'s requirements.',
- self.name)
+ LOG.debug('Verifying plugin "%s"\'s requirements.', self.name)
require()
self._plugin = resolve()
else:
- self._plugin = self.entry_point.load(
- require=verify_requirements
- )
+ self._plugin = self.entry_point.load(require=verify_requirements)
if not callable(self._plugin):
- msg = ('Plugin %r is not a callable. It might be written for an'
- ' older version of flake8 and might not work with this'
- ' version' % self._plugin)
+ msg = (
+ "Plugin %r is not a callable. It might be written for an"
+ " older version of flake8 and might not work with this"
+ " version" % self._plugin
+ )
LOG.critical(msg)
raise TypeError(msg)
@@ -179,8 +178,7 @@ class Plugin(object):
except Exception as load_exception:
LOG.exception(load_exception)
failed_to_load = exceptions.FailedToLoadPlugin(
- plugin=self,
- exception=load_exception,
+ plugin=self, exception=load_exception
)
LOG.critical(str(failed_to_load))
raise failed_to_load
@@ -194,8 +192,11 @@ class Plugin(object):
try:
options.ignore.remove(self.name)
except (ValueError, KeyError):
- LOG.debug('Attempted to remove %s from the ignore list but it was '
- 'not a member of the list.', self.name)
+ LOG.debug(
+ "Attempted to remove %s from the ignore list but it was "
+ "not a member of the list.",
+ self.name,
+ )
def disable(self, optmanager):
"""Add the plugin name to the default ignore list."""
@@ -203,7 +204,7 @@ class Plugin(object):
def provide_options(self, optmanager, options, extra_args):
"""Pass the parsed options and extra arguments to the plugin."""
- parse_options = getattr(self.plugin, 'parse_options', None)
+ parse_options = getattr(self.plugin, "parse_options", None)
if parse_options is not None:
LOG.debug('Providing options to plugin "%s".', self.name)
try:
@@ -224,11 +225,12 @@ class Plugin(object):
:returns:
Nothing
"""
- add_options = getattr(self.plugin, 'add_options', None)
+ add_options = getattr(self.plugin, "add_options", None)
if add_options is not None:
LOG.debug(
'Registering options from plugin "%s" on OptionManager %r',
- self.name, optmanager
+ self.name,
+ optmanager,
)
add_options(optmanager)
@@ -239,8 +241,9 @@ class Plugin(object):
class PluginManager(object): # pylint: disable=too-few-public-methods
"""Find and manage plugins consistently."""
- def __init__(self, namespace,
- verify_requirements=False, local_plugins=None):
+ def __init__(
+ self, namespace, verify_requirements=False, local_plugins=None
+ ):
"""Initialize the manager.
:param str namespace:
@@ -348,7 +351,7 @@ def version_for(plugin):
except ImportError:
return None
- return getattr(module, '__version__', None)
+ return getattr(module, "__version__", None)
class PluginTypeManager(object):
@@ -363,7 +366,8 @@ class PluginTypeManager(object):
Plugins from config file instead of entry-points
"""
self.manager = PluginManager(
- self.namespace, local_plugins=local_plugins)
+ self.namespace, local_plugins=local_plugins
+ )
self.plugins_loaded = False
def __contains__(self, name):
@@ -406,9 +410,11 @@ class PluginTypeManager(object):
def _generate_call_function(method_name, optmanager, *args, **kwargs):
def generated_function(plugin): # noqa: D105
method = getattr(plugin, method_name, None)
- if (method is not None and
- isinstance(method, collections.Callable)):
+ if method is not None and isinstance(
+ method, collections.Callable
+ ):
return method(optmanager, *args, **kwargs)
+
return generated_function
def load_plugins(self):
@@ -435,7 +441,7 @@ class PluginTypeManager(object):
"""Register all of the checkers' options to the OptionManager."""
self.load_plugins()
call_register_options = self._generate_call_function(
- 'register_options', optmanager,
+ "register_options", optmanager
)
list(self.manager.map(call_register_options))
@@ -443,7 +449,7 @@ class PluginTypeManager(object):
def provide_options(self, optmanager, options, extra_args):
"""Provide parsed options and extra arguments to the plugins."""
call_provide_options = self._generate_call_function(
- 'provide_options', optmanager, options, extra_args,
+ "provide_options", optmanager, options, extra_args
)
list(self.manager.map(call_provide_options))
@@ -470,7 +476,7 @@ class NotifierBuilderMixin(object): # pylint: disable=too-few-public-methods
class Checkers(PluginTypeManager):
"""All of the checkers registered through entry-points or config."""
- namespace = 'flake8.extension'
+ namespace = "flake8.extension"
def checks_expecting(self, argument_name):
"""Retrieve checks that expect an argument with the specified name.
@@ -484,14 +490,15 @@ class Checkers(PluginTypeManager):
def to_dictionary(self):
"""Return a dictionary of AST and line-based plugins."""
return {
- 'ast_plugins': [
+ "ast_plugins": [
plugin.to_dictionary() for plugin in self.ast_plugins
],
- 'logical_line_plugins': [
+ "logical_line_plugins": [
plugin.to_dictionary() for plugin in self.logical_line_plugins
],
- 'physical_line_plugins': [
- plugin.to_dictionary() for plugin in self.physical_line_plugins
+ "physical_line_plugins": [
+ plugin.to_dictionary()
+ for plugin in self.physical_line_plugins
],
}
@@ -508,7 +515,7 @@ class Checkers(PluginTypeManager):
# function to map over the plugins.
self.load_plugins()
call_register_options = self._generate_call_function(
- 'register_options', optmanager,
+ "register_options", optmanager
)
def register_and_enable(plugin):
@@ -521,27 +528,27 @@ class Checkers(PluginTypeManager):
@property
def ast_plugins(self):
"""List of plugins that expect the AST tree."""
- plugins = getattr(self, '_ast_plugins', [])
+ plugins = getattr(self, "_ast_plugins", [])
if not plugins:
- plugins = list(self.checks_expecting('tree'))
+ plugins = list(self.checks_expecting("tree"))
self._ast_plugins = plugins
return plugins
@property
def logical_line_plugins(self):
"""List of plugins that expect the logical lines."""
- plugins = getattr(self, '_logical_line_plugins', [])
+ plugins = getattr(self, "_logical_line_plugins", [])
if not plugins:
- plugins = list(self.checks_expecting('logical_line'))
+ plugins = list(self.checks_expecting("logical_line"))
self._logical_line_plugins = plugins
return plugins
@property
def physical_line_plugins(self):
"""List of plugins that expect the physical lines."""
- plugins = getattr(self, '_physical_line_plugins', [])
+ plugins = getattr(self, "_physical_line_plugins", [])
if not plugins:
- plugins = list(self.checks_expecting('physical_line'))
+ plugins = list(self.checks_expecting("physical_line"))
self._physical_line_plugins = plugins
return plugins
@@ -549,10 +556,10 @@ class Checkers(PluginTypeManager):
class Listeners(PluginTypeManager, NotifierBuilderMixin):
"""All of the listeners registered through entry-points or config."""
- namespace = 'flake8.listen'
+ namespace = "flake8.listen"
class ReportFormatters(PluginTypeManager):
"""All of the report formatters registered through entry-points/config."""
- namespace = 'flake8.report'
+ namespace = "flake8.report"
diff --git a/src/flake8/plugins/notifier.py b/src/flake8/plugins/notifier.py
index dc255c4..9efccd4 100644
--- a/src/flake8/plugins/notifier.py
+++ b/src/flake8/plugins/notifier.py
@@ -31,7 +31,7 @@ class Notifier(object):
path = error_code
while path:
node = self.listeners.find(path)
- listeners = getattr(node, 'data', [])
+ listeners = getattr(node, "data", [])
for listener in listeners:
yield listener
path = path[:-1]
diff --git a/src/flake8/plugins/pyflakes.py b/src/flake8/plugins/pyflakes.py
index 9ef9e93..e2ef2c3 100644
--- a/src/flake8/plugins/pyflakes.py
+++ b/src/flake8/plugins/pyflakes.py
@@ -18,35 +18,35 @@ from flake8 import utils
FLAKE8_PYFLAKES_CODES = {
- 'UnusedImport': 'F401',
- 'ImportShadowedByLoopVar': 'F402',
- 'ImportStarUsed': 'F403',
- 'LateFutureImport': 'F404',
- 'ImportStarUsage': 'F405',
- 'ImportStarNotPermitted': 'F406',
- 'FutureFeatureNotDefined': 'F407',
- 'MultiValueRepeatedKeyLiteral': 'F601',
- 'MultiValueRepeatedKeyVariable': 'F602',
- 'TooManyExpressionsInStarredAssignment': 'F621',
- 'TwoStarredExpressions': 'F622',
- 'AssertTuple': 'F631',
- 'BreakOutsideLoop': 'F701',
- 'ContinueOutsideLoop': 'F702',
- 'ContinueInFinally': 'F703',
- 'YieldOutsideFunction': 'F704',
- 'ReturnWithArgsInsideGenerator': 'F705',
- 'ReturnOutsideFunction': 'F706',
- 'DefaultExceptNotLast': 'F707',
- 'DoctestSyntaxError': 'F721',
- 'ForwardAnnotationSyntaxError': 'F722',
- 'RedefinedWhileUnused': 'F811',
- 'RedefinedInListComp': 'F812',
- 'UndefinedName': 'F821',
- 'UndefinedExport': 'F822',
- 'UndefinedLocal': 'F823',
- 'DuplicateArgument': 'F831',
- 'UnusedVariable': 'F841',
- 'RaiseNotImplemented': 'F901',
+ "UnusedImport": "F401",
+ "ImportShadowedByLoopVar": "F402",
+ "ImportStarUsed": "F403",
+ "LateFutureImport": "F404",
+ "ImportStarUsage": "F405",
+ "ImportStarNotPermitted": "F406",
+ "FutureFeatureNotDefined": "F407",
+ "MultiValueRepeatedKeyLiteral": "F601",
+ "MultiValueRepeatedKeyVariable": "F602",
+ "TooManyExpressionsInStarredAssignment": "F621",
+ "TwoStarredExpressions": "F622",
+ "AssertTuple": "F631",
+ "BreakOutsideLoop": "F701",
+ "ContinueOutsideLoop": "F702",
+ "ContinueInFinally": "F703",
+ "YieldOutsideFunction": "F704",
+ "ReturnWithArgsInsideGenerator": "F705",
+ "ReturnOutsideFunction": "F706",
+ "DefaultExceptNotLast": "F707",
+ "DoctestSyntaxError": "F721",
+ "ForwardAnnotationSyntaxError": "F722",
+ "RedefinedWhileUnused": "F811",
+ "RedefinedInListComp": "F812",
+ "UndefinedName": "F821",
+ "UndefinedExport": "F822",
+ "UndefinedLocal": "F823",
+ "DuplicateArgument": "F831",
+ "UnusedVariable": "F841",
+ "RaiseNotImplemented": "F901",
}
@@ -54,8 +54,9 @@ def patch_pyflakes():
"""Add error codes to Pyflakes messages."""
for name, obj in vars(pyflakes.messages).items():
if name[0].isupper() and obj.message:
- obj.flake8_msg = '%s %s' % (
- FLAKE8_PYFLAKES_CODES.get(name, 'F999'), obj.message
+ obj.flake8_msg = "%s %s" % (
+ FLAKE8_PYFLAKES_CODES.get(name, "F999"),
+ obj.message,
)
@@ -65,7 +66,7 @@ patch_pyflakes()
class FlakesChecker(pyflakes.checker.Checker):
"""Subclass the Pyflakes checker to conform with the flake8 API."""
- name = 'pyflakes'
+ name = "pyflakes"
version = pyflakes.__version__
with_doctest = False
include_in_doctest = []
@@ -75,48 +76,65 @@ class FlakesChecker(pyflakes.checker.Checker):
"""Initialize the PyFlakes plugin with an AST tree and filename."""
filename = utils.normalize_paths(filename)[0]
with_doctest = self.with_doctest
- included_by = [include for include in self.include_in_doctest
- if include != '' and filename.startswith(include)]
+ included_by = [
+ include
+ for include in self.include_in_doctest
+ if include != "" and filename.startswith(include)
+ ]
if included_by:
with_doctest = True
for exclude in self.exclude_from_doctest:
- if exclude != '' and filename.startswith(exclude):
+ if exclude != "" and filename.startswith(exclude):
with_doctest = False
- overlaped_by = [include for include in included_by
- if include.startswith(exclude)]
+ overlaped_by = [
+ include
+ for include in included_by
+ if include.startswith(exclude)
+ ]
if overlaped_by:
with_doctest = True
- super(FlakesChecker, self).__init__(tree, filename,
- withDoctest=with_doctest)
+ super(FlakesChecker, self).__init__(
+ tree, filename, withDoctest=with_doctest
+ )
@classmethod
def add_options(cls, parser):
"""Register options for PyFlakes on the Flake8 OptionManager."""
parser.add_option(
- '--builtins', parse_from_config=True, comma_separated_list=True,
+ "--builtins",
+ parse_from_config=True,
+ comma_separated_list=True,
help="define more built-ins, comma separated",
)
parser.add_option(
- '--doctests', default=False, action='store_true',
+ "--doctests",
+ default=False,
+ action="store_true",
parse_from_config=True,
help="check syntax of the doctests",
)
parser.add_option(
- '--include-in-doctest', default='',
- dest='include_in_doctest', parse_from_config=True,
- comma_separated_list=True, normalize_paths=True,
- help='Run doctests only on these files',
- type='string',
+ "--include-in-doctest",
+ default="",
+ dest="include_in_doctest",
+ parse_from_config=True,
+ comma_separated_list=True,
+ normalize_paths=True,
+ help="Run doctests only on these files",
+ type="string",
)
parser.add_option(
- '--exclude-from-doctest', default='',
- dest='exclude_from_doctest', parse_from_config=True,
- comma_separated_list=True, normalize_paths=True,
- help='Skip these files when running doctests',
- type='string',
+ "--exclude-from-doctest",
+ default="",
+ dest="exclude_from_doctest",
+ parse_from_config=True,
+ comma_separated_list=True,
+ normalize_paths=True,
+ help="Skip these files when running doctests",
+ type="string",
)
@classmethod
@@ -128,20 +146,20 @@ class FlakesChecker(pyflakes.checker.Checker):
included_files = []
for included_file in options.include_in_doctest:
- if included_file == '':
+ if included_file == "":
continue
- if not included_file.startswith((os.sep, './', '~/')):
- included_files.append('./' + included_file)
+ if not included_file.startswith((os.sep, "./", "~/")):
+ included_files.append("./" + included_file)
else:
included_files.append(included_file)
cls.include_in_doctest = utils.normalize_paths(included_files)
excluded_files = []
for excluded_file in options.exclude_from_doctest:
- if excluded_file == '':
+ if excluded_file == "":
continue
- if not excluded_file.startswith((os.sep, './', '~/')):
- excluded_files.append('./' + excluded_file)
+ if not excluded_file.startswith((os.sep, "./", "~/")):
+ excluded_files.append("./" + excluded_file)
else:
excluded_files.append(excluded_file)
cls.exclude_from_doctest = utils.normalize_paths(excluded_files)
@@ -150,16 +168,20 @@ class FlakesChecker(pyflakes.checker.Checker):
cls.exclude_from_doctest
)
if inc_exc:
- raise ValueError('"%s" was specified in both the '
- 'include-in-doctest and exclude-from-doctest '
- 'options. You are not allowed to specify it in '
- 'both for doctesting.' % inc_exc)
+ raise ValueError(
+ '"%s" was specified in both the '
+ "include-in-doctest and exclude-from-doctest "
+ "options. You are not allowed to specify it in "
+ "both for doctesting." % inc_exc
+ )
def run(self):
"""Run the plugin."""
for message in self.messages:
- col = getattr(message, 'col', 0)
- yield (message.lineno,
- col,
- (message.flake8_msg % message.message_args),
- message.__class__)
+ col = getattr(message, "col", 0)
+ yield (
+ message.lineno,
+ col,
+ (message.flake8_msg % message.message_args),
+ message.__class__,
+ )
diff --git a/src/flake8/processor.py b/src/flake8/processor.py
index d54a82f..fdf0a98 100644
--- a/src/flake8/processor.py
+++ b/src/flake8/processor.py
@@ -15,10 +15,11 @@ PyCF_ONLY_AST = 1024
NEWLINE = frozenset([tokenize.NL, tokenize.NEWLINE])
# Work around Python < 2.6 behaviour, which does not generate NL after
# a comment which is on a line by itself.
-COMMENT_WITH_NL = tokenize.generate_tokens(['#\n'].pop).send(None)[1] == '#\n'
+COMMENT_WITH_NL = tokenize.generate_tokens(["#\n"].pop).send(None)[1] == "#\n"
-SKIP_TOKENS = frozenset([tokenize.NL, tokenize.NEWLINE, tokenize.INDENT,
- tokenize.DEDENT])
+SKIP_TOKENS = frozenset(
+ [tokenize.NL, tokenize.NEWLINE, tokenize.INDENT, tokenize.DEDENT]
+)
class FileProcessor(object):
@@ -79,7 +80,7 @@ class FileProcessor(object):
#: Line number in the file
self.line_number = 0
#: Current logical line
- self.logical_line = ''
+ self.logical_line = ""
#: Maximum line length as configured by the user
self.max_line_length = options.max_line_length
#: Whether the current physical line is multiline
@@ -89,9 +90,9 @@ class FileProcessor(object):
#: Previous level of indentation
self.previous_indent_level = 0
#: Previous logical line
- self.previous_logical = ''
+ self.previous_logical = ""
#: Previous unindented (i.e. top-level) logical line
- self.previous_unindented_logical_line = ''
+ self.previous_unindented_logical_line = ""
#: Current set of tokens
self.tokens = []
#: Total number of lines in the file
@@ -99,9 +100,7 @@ class FileProcessor(object):
#: Verbosity level of Flake8
self.verbose = options.verbose
#: Statistics dictionary
- self.statistics = {
- 'logical lines': 0,
- }
+ self.statistics = {"logical lines": 0}
self._file_tokens = None
@property
@@ -115,9 +114,9 @@ class FileProcessor(object):
if self._file_tokens is None:
line_iter = iter(self.lines)
try:
- self._file_tokens = list(tokenize.generate_tokens(
- lambda: next(line_iter)
- ))
+ self._file_tokens = list(
+ tokenize.generate_tokens(lambda: next(line_iter))
+ )
except tokenize.TokenError as exc:
raise exceptions.InvalidSyntax(exc.message, exception=exc)
@@ -153,9 +152,9 @@ class FileProcessor(object):
def update_checker_state_for(self, plugin):
"""Update the checker_state attribute for the plugin."""
- if 'checker_state' in plugin['parameters']:
+ if "checker_state" in plugin["parameters"]:
self.checker_state = self._checker_states.setdefault(
- plugin['name'], {}
+ plugin["name"], {}
)
def next_logical_line(self):
@@ -194,10 +193,10 @@ class FileProcessor(object):
row_index = previous_row - 1
column_index = previous_column - 1
previous_text = self.lines[row_index][column_index]
- if (previous_text == ',' or
- (previous_text not in '{[(' and
- text not in '}])')):
- text = ' ' + text
+ if previous_text == "," or (
+ previous_text not in "{[(" and text not in "}])"
+ ):
+ text = " " + text
elif previous_column != start_column:
text = line[previous_column:start_column] + text
logical.append(text)
@@ -208,16 +207,16 @@ class FileProcessor(object):
def build_ast(self):
"""Build an abstract syntax tree from the list of lines."""
- return compile(''.join(self.lines), '', 'exec', PyCF_ONLY_AST)
+ return compile("".join(self.lines), "", "exec", PyCF_ONLY_AST)
def build_logical_line(self):
"""Build a logical line from the current tokens list."""
comments, logical, mapping_list = self.build_logical_line_tokens()
- joined_comments = ''.join(comments)
- self.logical_line = ''.join(logical)
+ joined_comments = "".join(comments)
+ self.logical_line = "".join(logical)
if defaults.NOQA_INLINE_REGEXP.search(joined_comments):
self.noqa = True
- self.statistics['logical lines'] += 1
+ self.statistics["logical lines"] += 1
return joined_comments, self.logical_line, mapping_list
def split_line(self, token):
@@ -225,7 +224,7 @@ class FileProcessor(object):
This also auto-increments the line number for the caller.
"""
- for line in token[1].split('\n')[:-1]:
+ for line in token[1].split("\n")[:-1]:
yield line
self.line_number += 1
@@ -243,14 +242,16 @@ class FileProcessor(object):
LOG.exception(exc)
raise
else:
- LOG.warning('Plugin requested optional parameter "%s" '
- 'but this is not an available parameter.',
- param)
+ LOG.warning(
+ 'Plugin requested optional parameter "%s" '
+ "but this is not an available parameter.",
+ param,
+ )
return arguments
def check_physical_error(self, error_code, line):
"""Update attributes based on error code and line."""
- if error_code == 'E101':
+ if error_code == "E101":
self.indent_char = line[0]
def generate_tokens(self):
@@ -282,7 +283,7 @@ class FileProcessor(object):
def next_line(self):
"""Get the next line from the list."""
if self.line_number >= self.total_lines:
- return ''
+ return ""
line = self.lines[self.line_number]
self.line_number += 1
if self.indent_char is None and line[:1] in defaults.WHITESPACE:
@@ -292,8 +293,8 @@ class FileProcessor(object):
def read_lines(self):
# type: () -> List[str]
"""Read the lines for this file checker."""
- if self.filename is None or self.filename == '-':
- self.filename = self.options.stdin_display_name or 'stdin'
+ if self.filename is None or self.filename == "-":
+ self.filename = self.options.stdin_display_name or "stdin"
lines = self.read_lines_from_stdin()
else:
lines = self.read_lines_from_filename()
@@ -301,21 +302,20 @@ class FileProcessor(object):
def _readlines_py2(self):
# type: () -> List[str]
- with open(self.filename, 'rU') as fd:
+ with open(self.filename, "rU") as fd:
return fd.readlines()
def _readlines_py3(self):
# type: () -> List[str]
try:
- with open(self.filename, 'rb') as fd:
+ with open(self.filename, "rb") as fd:
(coding, lines) = tokenize.detect_encoding(fd.readline)
textfd = io.TextIOWrapper(fd, coding, line_buffering=True)
- return ([l.decode(coding) for l in lines] +
- textfd.readlines())
+ return [l.decode(coding) for l in lines] + textfd.readlines()
except (LookupError, SyntaxError, UnicodeError):
# If we can't detect the codec with tokenize.detect_encoding, or
# the detected encoding is incorrect, just fallback to latin-1.
- with open(self.filename, encoding='latin-1') as fd:
+ with open(self.filename, encoding="latin-1") as fd:
return fd.readlines()
def read_lines_from_filename(self):
@@ -346,8 +346,8 @@ class FileProcessor(object):
return True
elif any(defaults.NOQA_FILE.search(line) for line in self.lines):
LOG.warning(
- 'Detected `flake8: noqa` on line with code. To ignore an '
- 'error on a line use `noqa` instead.',
+ "Detected `flake8: noqa` on line with code. To ignore an "
+ "error on a line use `noqa` instead."
)
return False
else:
@@ -367,25 +367,27 @@ class FileProcessor(object):
# If the first byte of the file is a UTF-8 BOM, strip it
if first_byte == 0xFEFF:
self.lines[0] = self.lines[0][1:]
- elif self.lines[0][:3] == '\xEF\xBB\xBF':
+ elif self.lines[0][:3] == "\xEF\xBB\xBF":
self.lines[0] = self.lines[0][3:]
def is_eol_token(token):
"""Check if the token is an end-of-line token."""
- return token[0] in NEWLINE or token[4][token[3][1]:].lstrip() == '\\\n'
+ return token[0] in NEWLINE or token[4][token[3][1] :].lstrip() == "\\\n"
if COMMENT_WITH_NL: # If on Python 2.6
+
def is_eol_token(token, _is_eol_token=is_eol_token):
"""Check if the token is an end-of-line token."""
- return (_is_eol_token(token) or
- (token[0] == tokenize.COMMENT and token[1] == token[4]))
+ return _is_eol_token(token) or (
+ token[0] == tokenize.COMMENT and token[1] == token[4]
+ )
def is_multiline_string(token):
"""Check if this is a multiline string."""
- return token[0] == tokenize.STRING and '\n' in token[1]
+ return token[0] == tokenize.STRING and "\n" in token[1]
def token_is_newline(token):
@@ -401,9 +403,9 @@ def token_is_comment(token):
def count_parentheses(current_parentheses_count, token_text):
"""Count the number of parentheses."""
current_parentheses_count = current_parentheses_count or 0
- if token_text in '([{':
+ if token_text in "([{":
return current_parentheses_count + 1
- elif token_text in '}])':
+ elif token_text in "}])":
return current_parentheses_count - 1
return current_parentheses_count
@@ -411,12 +413,14 @@ def count_parentheses(current_parentheses_count, token_text):
def log_token(log, token):
"""Log a token to a provided logging object."""
if token[2][0] == token[3][0]:
- pos = '[%s:%s]' % (token[2][1] or '', token[3][1])
+ pos = "[%s:%s]" % (token[2][1] or "", token[3][1])
else:
- pos = 'l.%s' % token[3][0]
- log.log(flake8._EXTRA_VERBOSE, 'l.%s\t%s\t%s\t%r' %
- (token[2][0], pos, tokenize.tok_name[token[0]],
- token[1]))
+ pos = "l.%s" % token[3][0]
+ log.log(
+ flake8._EXTRA_VERBOSE,
+ "l.%s\t%s\t%s\t%r"
+ % (token[2][0], pos, tokenize.tok_name[token[0]], token[1]),
+ )
# NOTE(sigmavirus24): This was taken wholesale from
@@ -435,13 +439,13 @@ def expand_indent(line):
>>> expand_indent(' \t')
16
"""
- if '\t' not in line:
+ if "\t" not in line:
return len(line) - len(line.lstrip())
result = 0
for char in line:
- if char == '\t':
+ if char == "\t":
result = result // 8 * 8 + 8
- elif char == ' ':
+ elif char == " ":
result += 1
else:
break
@@ -470,4 +474,4 @@ def mutate_string(text):
if text[-3:] in ('"""', "'''"):
start += 2
end -= 2
- return text[:start] + 'x' * (end - start) + text[end:]
+ return text[:start] + "x" * (end - start) + text[end:]
diff --git a/src/flake8/statistics.py b/src/flake8/statistics.py
index d39750a..f2131b5 100644
--- a/src/flake8/statistics.py
+++ b/src/flake8/statistics.py
@@ -56,13 +56,14 @@ class Statistics(object):
:returns:
Generator of instances of :class:`Statistic`
"""
- matching_errors = sorted(key for key in self._store
- if key.matches(prefix, filename))
+ matching_errors = sorted(
+ key for key in self._store if key.matches(prefix, filename)
+ )
for error_code in matching_errors:
yield self._store[error_code]
-class Key(collections.namedtuple('Key', ['filename', 'code'])):
+class Key(collections.namedtuple("Key", ["filename", "code"])):
"""Simple key structure for the Statistics dictionary.
To make things clearer, easier to read, and more understandable, we use a
@@ -75,10 +76,7 @@ class Key(collections.namedtuple('Key', ['filename', 'code'])):
@classmethod
def create_from(cls, error):
"""Create a Key from :class:`flake8.style_guide.Violation`."""
- return cls(
- filename=error.filename,
- code=error.code,
- )
+ return cls(filename=error.filename, code=error.code)
def matches(self, prefix, filename):
"""Determine if this key matches some constraints.
@@ -94,9 +92,9 @@ class Key(collections.namedtuple('Key', ['filename', 'code'])):
:rtype:
bool
"""
- return (self.code.startswith(prefix) and
- (filename is None or
- self.filename == filename))
+ return self.code.startswith(prefix) and (
+ filename is None or self.filename == filename
+ )
class Statistic(object):
diff --git a/src/flake8/style_guide.py b/src/flake8/style_guide.py
index b50580b..7565b00 100644
--- a/src/flake8/style_guide.py
+++ b/src/flake8/style_guide.py
@@ -11,9 +11,7 @@ from flake8 import defaults
from flake8 import statistics
from flake8 import utils
-__all__ = (
- 'StyleGuide',
-)
+__all__ = ("StyleGuide",)
LOG = logging.getLogger(__name__)
@@ -21,34 +19,32 @@ LOG = logging.getLogger(__name__)
try:
lru_cache = functools.lru_cache
except AttributeError:
+
def lru_cache(maxsize=128, typed=False):
"""Stub for missing lru_cache."""
- def fake_decorator(func):
- return func
-
- return fake_decorator
+ return lambda func: func
# TODO(sigmavirus24): Determine if we need to use enum/enum34
class Selected(enum.Enum):
"""Enum representing an explicitly or implicitly selected code."""
- Explicitly = 'explicitly selected'
- Implicitly = 'implicitly selected'
+ Explicitly = "explicitly selected"
+ Implicitly = "implicitly selected"
class Ignored(enum.Enum):
"""Enum representing an explicitly or implicitly ignored code."""
- Explicitly = 'explicitly ignored'
- Implicitly = 'implicitly ignored'
+ Explicitly = "explicitly ignored"
+ Implicitly = "implicitly ignored"
class Decision(enum.Enum):
"""Enum representing whether a code should be ignored or selected."""
- Ignored = 'ignored error'
- Selected = 'selected error'
+ Ignored = "ignored error"
+ Selected = "selected error"
@lru_cache(maxsize=512)
@@ -57,14 +53,14 @@ def find_noqa(physical_line):
_Violation = collections.namedtuple(
- 'Violation',
+ "Violation",
[
- 'code',
- 'filename',
- 'line_number',
- 'column_number',
- 'text',
- 'physical_line',
+ "code",
+ "filename",
+ "line_number",
+ "column_number",
+ "text",
+ "physical_line",
],
)
@@ -89,26 +85,29 @@ class Violation(_Violation):
return False
if physical_line is None:
- physical_line = linecache.getline(self.filename,
- self.line_number)
+ physical_line = linecache.getline(self.filename, self.line_number)
noqa_match = find_noqa(physical_line)
if noqa_match is None:
- LOG.debug('%r is not inline ignored', self)
+ LOG.debug("%r is not inline ignored", self)
return False
- codes_str = noqa_match.groupdict()['codes']
+ codes_str = noqa_match.groupdict()["codes"]
if codes_str is None:
- LOG.debug('%r is ignored by a blanket ``# noqa``', self)
+ LOG.debug("%r is ignored by a blanket ``# noqa``", self)
return True
codes = set(utils.parse_comma_separated_list(codes_str))
if self.code in codes or self.code.startswith(tuple(codes)):
- LOG.debug('%r is ignored specifically inline with ``# noqa: %s``',
- self, codes_str)
+ LOG.debug(
+ "%r is ignored specifically inline with ``# noqa: %s``",
+ self,
+ codes_str,
+ )
return True
- LOG.debug('%r is not ignored inline with ``# noqa: %s``',
- self, codes_str)
+ LOG.debug(
+ "%r is not ignored inline with ``# noqa: %s``", self, codes_str
+ )
return False
def is_in(self, diff):
@@ -158,30 +157,29 @@ class DecisionEngine(object):
"""Initialize the engine."""
self.cache = {}
self.selected = tuple(options.select)
- self.extended_selected = tuple(sorted(
- options.extended_default_select,
- reverse=True,
- ))
+ self.extended_selected = tuple(
+ sorted(options.extended_default_select, reverse=True)
+ )
self.enabled_extensions = tuple(options.enable_extensions)
- self.all_selected = tuple(sorted(
- self.selected + self.enabled_extensions,
- reverse=True,
- ))
- self.ignored = tuple(sorted(
- itertools.chain(options.ignore, options.extend_ignore),
- reverse=True,
- ))
- self.using_default_ignore = set(self.ignored) == set(defaults.IGNORE)
- self.using_default_select = (
- set(self.selected) == set(defaults.SELECT)
+ self.all_selected = tuple(
+ sorted(self.selected + self.enabled_extensions, reverse=True)
)
+ self.ignored = tuple(
+ sorted(
+ itertools.chain(options.ignore, options.extend_ignore),
+ reverse=True,
+ )
+ )
+ self.using_default_ignore = set(self.ignored) == set(defaults.IGNORE)
+ self.using_default_select = set(self.selected) == set(defaults.SELECT)
def _in_all_selected(self, code):
return self.all_selected and code.startswith(self.all_selected)
def _in_extended_selected(self, code):
- return (self.extended_selected and
- code.startswith(self.extended_selected))
+ return self.extended_selected and code.startswith(
+ self.extended_selected
+ )
def was_selected(self, code):
# type: (str) -> Union[Selected, Ignored]
@@ -264,11 +262,13 @@ class DecisionEngine(object):
# default select list. In either case, we want the violation to be
# selected.
return Decision.Selected
- if (select is None and
- (extra_select is None or not self.using_default_ignore)):
+ if select is None and (
+ extra_select is None or not self.using_default_ignore
+ ):
return Decision.Ignored
- if ((select is None and not self.using_default_select) and
- (ignore is None and self.using_default_ignore)):
+ if (select is None and not self.using_default_select) and (
+ ignore is None and self.using_default_ignore
+ ):
return Decision.Ignored
return Decision.Selected
@@ -277,20 +277,24 @@ class DecisionEngine(object):
LOG.debug('Deciding if "%s" should be reported', code)
selected = self.was_selected(code)
ignored = self.was_ignored(code)
- LOG.debug('The user configured "%s" to be "%s", "%s"',
- code, selected, ignored)
+ LOG.debug(
+ 'The user configured "%s" to be "%s", "%s"',
+ code,
+ selected,
+ ignored,
+ )
- if ((selected is Selected.Explicitly or
- selected is Selected.Implicitly) and
- ignored is Selected.Implicitly):
+ if (
+ selected is Selected.Explicitly or selected is Selected.Implicitly
+ ) and ignored is Selected.Implicitly:
decision = Decision.Selected
- elif ((selected is Selected.Explicitly and
- ignored is Ignored.Explicitly) or
- (selected is Ignored.Implicitly and
- ignored is Selected.Implicitly)):
+ elif (
+ selected is Selected.Explicitly and ignored is Ignored.Explicitly
+ ) or (
+ selected is Ignored.Implicitly and ignored is Selected.Implicitly
+ ):
decision = self.more_specific_decision_for(code)
- elif (selected is Ignored.Implicitly or
- ignored is Ignored.Explicitly):
+ elif selected is Ignored.Implicitly or ignored is Ignored.Explicitly:
decision = Decision.Ignored # pylint: disable=R0204
return decision
@@ -354,8 +358,15 @@ class StyleGuide(object):
"""
return self.decider.decision_for(code)
- def handle_error(self, code, filename, line_number, column_number, text,
- physical_line=None):
+ def handle_error(
+ self,
+ code,
+ filename,
+ line_number,
+ column_number,
+ text,
+ physical_line=None,
+ ):
# type: (str, str, int, int, str) -> int
"""Handle an error reported by a check.
@@ -385,14 +396,24 @@ class StyleGuide(object):
# caught, column_number may be None.
if not column_number:
column_number = 0
- error = Violation(code, filename, line_number, column_number + 1,
- text, physical_line)
- error_is_selected = (self.should_report_error(error.code) is
- Decision.Selected)
+ error = Violation(
+ code,
+ filename,
+ line_number,
+ column_number + 1,
+ text,
+ physical_line,
+ )
+ error_is_selected = (
+ self.should_report_error(error.code) is Decision.Selected
+ )
is_not_inline_ignored = error.is_inline_ignored(disable_noqa) is False
is_included_in_diff = error.is_in(self._parsed_diff)
- if (error_is_selected and is_not_inline_ignored and
- is_included_in_diff):
+ if (
+ error_is_selected
+ and is_not_inline_ignored
+ and is_included_in_diff
+ ):
self.formatter.handle(error)
self.stats.record(error)
self.listener.notify(error.code, error)
diff --git a/src/flake8/utils.py b/src/flake8/utils.py
index d28b810..a837577 100644
--- a/src/flake8/utils.py
+++ b/src/flake8/utils.py
@@ -9,9 +9,9 @@ import re
import sys
import tokenize
-DIFF_HUNK_REGEXP = re.compile(r'^@@ -\d+(?:,\d+)? \+(\d+)(?:,(\d+))? @@.*$')
-COMMA_SEPARATED_LIST_RE = re.compile(r'[,\s]')
-LOCAL_PLUGIN_LIST_RE = re.compile(r'[,\t\n\r\f\v]')
+DIFF_HUNK_REGEXP = re.compile(r"^@@ -\d+(?:,\d+)? \+(\d+)(?:,(\d+))? @@.*$")
+COMMA_SEPARATED_LIST_RE = re.compile(r"[,\s]")
+LOCAL_PLUGIN_LIST_RE = re.compile(r"[,\t\n\r\f\v]")
def parse_comma_separated_list(value, regexp=COMMA_SEPARATED_LIST_RE):
@@ -49,8 +49,9 @@ def normalize_paths(paths, parent=os.curdir):
:rtype:
[str]
"""
- return [normalize_path(p, parent)
- for p in parse_comma_separated_list(paths)]
+ return [
+ normalize_path(p, parent) for p in parse_comma_separated_list(paths)
+ ]
def normalize_path(path, parent=os.curdir):
@@ -67,9 +68,10 @@ def normalize_path(path, parent=os.curdir):
# Unix style paths (/foo/bar).
separator = os.path.sep
# NOTE(sigmavirus24): os.path.altsep may be None
- alternate_separator = os.path.altsep or ''
- if separator in path or (alternate_separator and
- alternate_separator in path):
+ alternate_separator = os.path.altsep or ""
+ if separator in path or (
+ alternate_separator and alternate_separator in path
+ ):
path = os.path.abspath(os.path.join(parent, path))
return path.rstrip(separator + alternate_separator)
@@ -81,13 +83,13 @@ def _stdin_get_value_py3():
(coding, lines) = tokenize.detect_encoding(fd.readline)
return io.StringIO(stdin_value.decode(coding))
except (LookupError, SyntaxError, UnicodeError):
- return io.StringIO(stdin_value.decode('utf-8'))
+ return io.StringIO(stdin_value.decode("utf-8"))
def stdin_get_value():
# type: () -> str
"""Get and cache it so plugins can use it."""
- cached_value = getattr(stdin_get_value, 'cached_stdin', None)
+ cached_value = getattr(stdin_get_value, "cached_stdin", None)
if cached_value is None:
if sys.version_info < (3, 0):
stdin_value = io.BytesIO(sys.stdin.read())
@@ -118,7 +120,7 @@ def parse_unified_diff(diff=None):
if number_of_rows:
# NOTE(sigmavirus24): Below we use a slice because stdin may be
# bytes instead of text on Python 3.
- if line[:1] != '-':
+ if line[:1] != "-":
number_of_rows -= 1
# We're in the part of the diff that has lines starting with +, -,
# and ' ' to show context and the changes made. We skip these
@@ -139,10 +141,10 @@ def parse_unified_diff(diff=None):
# +++ b/file.py\t100644
# Which is an example that has the new file permissions/mode.
# In this case we only care about the file name.
- if line[:3] == '+++':
- current_path = line[4:].split('\t', 1)[0]
+ if line[:3] == "+++":
+ current_path = line[4:].split("\t", 1)[0]
# NOTE(sigmavirus24): This check is for diff output from git.
- if current_path[:2] == 'b/':
+ if current_path[:2] == "b/":
current_path = current_path[2:]
# We don't need to do anything else. We have set up our local
# ``current_path`` variable. We can skip the rest of this loop.
@@ -179,7 +181,7 @@ def is_windows():
:rtype:
bool
"""
- return os.name == 'nt'
+ return os.name == "nt"
# NOTE(sigmavirus24): If and when https://bugs.python.org/issue27649 is fixed,
@@ -217,7 +219,7 @@ def is_using_stdin(paths):
:rtype:
bool
"""
- return '-' in paths
+ return "-" in paths
def _default_predicate(*args):
@@ -312,19 +314,23 @@ def parameters_for(plugin):
argspec = inspect.getargspec(func)
start_of_optional_args = len(argspec[0]) - len(argspec[-1] or [])
parameter_names = argspec[0]
- parameters = collections.OrderedDict([
- (name, position < start_of_optional_args)
- for position, name in enumerate(parameter_names)
- ])
+ parameters = collections.OrderedDict(
+ [
+ (name, position < start_of_optional_args)
+ for position, name in enumerate(parameter_names)
+ ]
+ )
else:
- parameters = collections.OrderedDict([
- (parameter.name, parameter.default is parameter.empty)
- for parameter in inspect.signature(func).parameters.values()
- if parameter.kind == parameter.POSITIONAL_OR_KEYWORD
- ])
+ parameters = collections.OrderedDict(
+ [
+ (parameter.name, parameter.default is parameter.empty)
+ for parameter in inspect.signature(func).parameters.values()
+ if parameter.kind == parameter.POSITIONAL_OR_KEYWORD
+ ]
+ )
if is_class:
- parameters.pop('self', None)
+ parameters.pop("self", None)
return parameters
@@ -341,5 +347,5 @@ def get_python_version():
try:
impl = platform.python_implementation() + " "
except AttributeError: # Python 2.5
- impl = ''
- return '%s%s on %s' % (impl, platform.python_version(), platform.system())
+ impl = ""
+ return "%s%s on %s" % (impl, platform.python_version(), platform.system())