summaryrefslogtreecommitdiff
diff options
context:
space:
mode:
authorTamas Szabo <szabtam@gmail.com>2020-08-31 11:36:27 +0300
committerTamas Szabo <szabtam@gmail.com>2020-08-31 11:36:27 +0300
commit0767c5df22378cc1a30005f32dbb711cf3f32b4d (patch)
tree2b95ec7600fc281f31954fa6a54686c0d676313a
parentedd3aa0adff8ea8308c80e2315d1fc818c172a28 (diff)
parent7ccb37aae8bf9c41386f0071960644c72206382a (diff)
downloadisort-issue/1429/print-errors-to-stderr.tar.gz
Merge branch 'develop' into issue/1429/print-errors-to-stderrissue/1429/print-errors-to-stderr
-rw-r--r--docs/configuration/options.md6
-rw-r--r--docs/upgrade_guides/5.0.0.md2
-rw-r--r--example_isort_formatting_plugin/example_isort_formatting_plugin.py3
-rw-r--r--example_isort_formatting_plugin/pyproject.toml4
-rw-r--r--isort/_future/_dataclasses.py7
-rw-r--r--isort/api.py11
-rw-r--r--isort/core.py2
-rw-r--r--isort/format.py7
-rw-r--r--isort/main.py8
-rw-r--r--isort/output.py65
-rw-r--r--isort/parse.py54
-rw-r--r--isort/wrap_modes.py2
-rw-r--r--poetry.lock117
-rw-r--r--pyproject.toml4
-rw-r--r--tests/integration/test_projects_using_isort.py19
-rw-r--r--tests/integration/test_setting_combinations.py180
-rw-r--r--tests/unit/profiles/__init__.py0
-rw-r--r--tests/unit/profiles/test_attrs.py102
-rw-r--r--tests/unit/profiles/test_black.py370
-rw-r--r--tests/unit/profiles/test_django.py122
-rw-r--r--tests/unit/profiles/test_google.py397
-rw-r--r--tests/unit/profiles/test_hug.py112
-rw-r--r--tests/unit/profiles/test_open_stack.py134
-rw-r--r--tests/unit/profiles/test_plone.py75
-rw-r--r--tests/unit/profiles/test_pycharm.py55
-rw-r--r--tests/unit/test_isort.py15
-rw-r--r--tests/unit/test_regressions.py42
-rw-r--r--tests/unit/test_ticketed_features.py10
-rw-r--r--tests/unit/utils.py14
29 files changed, 1813 insertions, 126 deletions
diff --git a/docs/configuration/options.md b/docs/configuration/options.md
index 712b14ae..10c1cfb8 100644
--- a/docs/configuration/options.md
+++ b/docs/configuration/options.md
@@ -8,7 +8,7 @@ Too busy to build your perfect isort configuration? For curated common configura
## Python Version
-Tells isort to set the known standard library based on the the specified Python version. Default is to assume any Python 3 version could be the target, and use a union off all stdlib modules across versions. If auto is specified, the version of the interpreter used to run isort (currently: 38) will be used.
+Tells isort to set the known standard library based on the specified Python version. Default is to assume any Python 3 version could be the target, and use a union of all stdlib modules across versions. If auto is specified, the version of the interpreter used to run isort (currently: 38) will be used.
**Type:** String
**Default:** `py3`
@@ -765,7 +765,7 @@ Tells isort to honor noqa comments to enforce skipping those comments.
## Src Paths
-Add an explicitly defined source path (modules within src paths have their imports automatically catorgorized as first_party).
+Add an explicitly defined source path (modules within src paths have their imports automatically categorized as first_party).
**Type:** Frozenset
**Default:** `frozenset()`
@@ -948,7 +948,7 @@ Number of files to process in parallel.
- -j
- --jobs
-## Dont Order By Type
+## Don't Order By Type
Don't order imports by type, which is determined by case, in addition to alphabetically.
diff --git a/docs/upgrade_guides/5.0.0.md b/docs/upgrade_guides/5.0.0.md
index 6a68acec..d31fb9dc 100644
--- a/docs/upgrade_guides/5.0.0.md
+++ b/docs/upgrade_guides/5.0.0.md
@@ -45,7 +45,7 @@ The `-v` (previously for version now for verbose) and `-V` (previously for verbo
## Migrating Config options
The first thing to keep in mind is how isort loads config options has changed in isort 5. It will no longer merge multiple config files, instead you must have 1 isort config per a project.
-If you have multiple configs, they will need to be merged into 1 single one. You can see the priority order of configuration files and the manor in which they are loaded on the
+If you have multiple configs, they will need to be merged into 1 single one. You can see the priority order of configuration files and the manner in which they are loaded on the
[config files documentation page](https://pycqa.github.io/isort/docs/configuration/config_files/).
### `not_skip`
diff --git a/example_isort_formatting_plugin/example_isort_formatting_plugin.py b/example_isort_formatting_plugin/example_isort_formatting_plugin.py
index ff65412f..f63b817e 100644
--- a/example_isort_formatting_plugin/example_isort_formatting_plugin.py
+++ b/example_isort_formatting_plugin/example_isort_formatting_plugin.py
@@ -15,7 +15,8 @@ def black_format_import_section(
contents,
fast=True,
mode=black.FileMode(
- is_pyi=extension.lower() == "pyi", line_length=config.line_length,
+ is_pyi=extension.lower() == "pyi",
+ line_length=config.line_length,
),
)
except black.NothingChanged:
diff --git a/example_isort_formatting_plugin/pyproject.toml b/example_isort_formatting_plugin/pyproject.toml
index 9e060ec1..31e3f928 100644
--- a/example_isort_formatting_plugin/pyproject.toml
+++ b/example_isort_formatting_plugin/pyproject.toml
@@ -1,6 +1,6 @@
[tool.poetry]
name = "example_isort_formatting_plugin"
-version = "0.0.1"
+version = "0.0.2"
description = "An example plugin that modifies isort formatting using black."
authors = ["Timothy Crosley <timothy.crosley@gmail.com>"]
license = "MIT"
@@ -11,7 +11,7 @@ example = "example_isort_formatting_plugin:black_format_import_section"
[tool.poetry.dependencies]
python = "^3.6"
isort = "^5.1.4"
-black = "^19.10b0"
+black = "^20.08b1"
[tool.poetry.dev-dependencies]
diff --git a/isort/_future/_dataclasses.py b/isort/_future/_dataclasses.py
index a7b113fe..87acb666 100644
--- a/isort/_future/_dataclasses.py
+++ b/isort/_future/_dataclasses.py
@@ -1137,7 +1137,10 @@ def make_dataclass(
name = item
tp = "typing.Any"
elif len(item) == 2:
- name, tp, = item
+ (
+ name,
+ tp,
+ ) = item
elif len(item) == 3:
name, tp, spec = item
namespace[name] = spec
@@ -1173,7 +1176,7 @@ def replace(obj, **changes):
c = C(1, 2)
c1 = replace(c, x=3)
assert c1.x == 3 and c1.y == 2
- """
+ """
# We're going to mutate 'changes', but that's okay because it's a
# new dict, even if called with 'replace(obj, **my_changes)'.
diff --git a/isort/api.py b/isort/api.py
index 8b0ca237..cbcc3e6e 100644
--- a/isort/api.py
+++ b/isort/api.py
@@ -298,6 +298,7 @@ def sort_file(
- ****config_kwargs**: Any config modifications.
"""
with io.File.read(filename) as source_file:
+ actual_file_path = file_path or source_file.path
changed: bool = False
try:
if write_to_stdout:
@@ -305,7 +306,7 @@ def sort_file(
input_stream=source_file.stream,
output_stream=sys.stdout,
config=config,
- file_path=file_path or source_file.path,
+ file_path=actual_file_path,
disregard_skip=disregard_skip,
extension=extension,
**config_kwargs,
@@ -321,7 +322,7 @@ def sort_file(
input_stream=source_file.stream,
output_stream=output_stream,
config=config,
- file_path=file_path or source_file.path,
+ file_path=actual_file_path,
disregard_skip=disregard_skip,
extension=extension,
**config_kwargs,
@@ -335,7 +336,7 @@ def sort_file(
show_unified_diff(
file_input=source_file.stream.read(),
file_output=tmp_out.read(),
- file_path=file_path or source_file.path,
+ file_path=actual_file_path,
output=None if show_diff is True else cast(TextIO, show_diff),
color_output=config.color_output,
)
@@ -356,9 +357,9 @@ def sort_file(
except FileNotFoundError:
pass
except ExistingSyntaxErrors:
- warn(f"{file_path} unable to sort due to existing syntax errors")
+ warn(f"{actual_file_path} unable to sort due to existing syntax errors")
except IntroducedSyntaxErrors: # pragma: no cover
- warn(f"{file_path} unable to sort as isort introduces new syntax errors")
+ warn(f"{actual_file_path} unable to sort as isort introduces new syntax errors")
return changed
diff --git a/isort/core.py b/isort/core.py
index 35738abd..908be4e1 100644
--- a/isort/core.py
+++ b/isort/core.py
@@ -215,9 +215,7 @@ def process(
if not import_section:
output_stream.write(line)
line = ""
- import_section += line_separator.join(add_imports) + line_separator
contains_imports = True
- add_imports = []
else:
not_imports = True
elif (
diff --git a/isort/format.py b/isort/format.py
index 0fe116bd..46bb1569 100644
--- a/isort/format.py
+++ b/isort/format.py
@@ -109,13 +109,14 @@ class BasicPrinter:
class ColoramaPrinter(BasicPrinter):
- ADDED_LINE = colorama.Fore.GREEN
- REMOVED_LINE = colorama.Fore.RED
-
def __init__(self, output: Optional[TextIO] = None):
self.output = output or sys.stdout
+ # Note: this constants are instance variables instead ofs class variables
+ # because they refer to colorama which might not be installed.
self.ERROR = self.style_text("ERROR", colorama.Fore.RED)
self.SUCCESS = self.style_text("SUCCESS", colorama.Fore.GREEN)
+ self.ADDED_LINE = colorama.Fore.GREEN
+ self.REMOVED_LINE = colorama.Fore.RED
@staticmethod
def style_text(text: str, style: Optional[str] = None) -> str:
diff --git a/isort/main.py b/isort/main.py
index ed9517a6..f4328a8b 100644
--- a/isort/main.py
+++ b/isort/main.py
@@ -144,7 +144,7 @@ def _build_arg_parser() -> argparse.ArgumentParser:
"start guide, otherwise, one or more files/directories/stdin must be provided. "
"Use `-` as the first argument to represent stdin. Use --interactive to use the pre 5.0.0 "
"interactive behavior."
- ""
+ " "
"If you've used isort 4 but are new to isort 5, see the upgrading guide:"
"https://pycqa.github.io/isort/docs/upgrade_guides/5.0.0/."
)
@@ -155,7 +155,7 @@ def _build_arg_parser() -> argparse.ArgumentParser:
dest="src_paths",
action="append",
help="Add an explicitly defined source path "
- "(modules within src paths have their imports automatically catorgorized as first_party).",
+ "(modules within src paths have their imports automatically categorized as first_party).",
)
parser.add_argument(
"-a",
@@ -586,9 +586,9 @@ def _build_arg_parser() -> argparse.ArgumentParser:
action="store",
dest="py_version",
choices=tuple(VALID_PY_TARGETS) + ("auto",),
- help="Tells isort to set the known standard library based on the the specified Python "
+ help="Tells isort to set the known standard library based on the specified Python "
"version. Default is to assume any Python 3 version could be the target, and use a union "
- "off all stdlib modules across versions. If auto is specified, the version of the "
+ "of all stdlib modules across versions. If auto is specified, the version of the "
"interpreter used to run isort "
f"(currently: {sys.version_info.major}{sys.version_info.minor}) will be used.",
)
diff --git a/isort/output.py b/isort/output.py
index afb14ea2..8cf915f9 100644
--- a/isort/output.py
+++ b/isort/output.py
@@ -229,7 +229,11 @@ def _with_from_imports(
from_imports = sorting.naturally(
from_imports,
key=lambda key: sorting.module_key(
- key, config, True, ignore_case, section_name=section,
+ key,
+ config,
+ True,
+ ignore_case,
+ section_name=section,
),
)
if remove_imports:
@@ -268,7 +272,7 @@ def _with_from_imports(
if "*" in from_imports and config.combine_star:
import_statement = wrap.line(
with_comments(
- comments,
+ _with_star_comments(parsed, module, list(comments or ())),
f"{import_start}*",
removed=config.ignore_comments,
comment_prefix=config.comment_prefix,
@@ -324,13 +328,20 @@ def _with_from_imports(
while from_imports and from_imports[0] in as_imports:
from_import = from_imports.pop(0)
as_imports[from_import] = sorting.naturally(as_imports[from_import])
- from_comments = parsed.categorized_comments["straight"].get(
- f"{module}.{from_import}"
+ from_comments = (
+ parsed.categorized_comments["straight"].get(f"{module}.{from_import}") or []
)
if (
parsed.imports[section]["from"][module][from_import]
and not only_show_as_imports
):
+ specific_comment = (
+ parsed.categorized_comments["nested"]
+ .get(module, {})
+ .pop(from_import, None)
+ )
+ if specific_comment:
+ from_comments.append(specific_comment)
output.append(
wrap.line(
with_comments(
@@ -343,24 +354,36 @@ def _with_from_imports(
config,
)
)
- output.extend(
- wrap.line(
- with_comments(
- from_comments,
- import_start + as_import,
- removed=config.ignore_comments,
- comment_prefix=config.comment_prefix,
- ),
- parsed.line_separator,
- config,
+ from_comments = []
+
+ for as_import in as_imports[from_import]:
+ specific_comment = (
+ parsed.categorized_comments["nested"]
+ .get(module, {})
+ .pop(as_import, None)
)
- for as_import in as_imports[from_import]
- )
+ if specific_comment:
+ from_comments.append(specific_comment)
+
+ output.append(
+ wrap.line(
+ with_comments(
+ from_comments,
+ import_start + as_import,
+ removed=config.ignore_comments,
+ comment_prefix=config.comment_prefix,
+ ),
+ parsed.line_separator,
+ config,
+ )
+ )
+
+ from_comments = []
if "*" in from_imports:
output.append(
with_comments(
- comments,
+ _with_star_comments(parsed, module, list(comments or ())),
f"{import_start}*",
removed=config.ignore_comments,
comment_prefix=config.comment_prefix,
@@ -525,3 +548,11 @@ def _ensure_newline_before_comment(output):
new_output.append("")
new_output.append(line)
return new_output
+
+
+def _with_star_comments(parsed: parse.ParsedContent, module: str, comments: List[str]) -> List[str]:
+ star_comment = parsed.categorized_comments["nested"].get(module, {}).pop("*", None)
+ if star_comment:
+ return comments + [star_comment]
+ else:
+ return comments
diff --git a/isort/parse.py b/isort/parse.py
index 1e71c6d2..86974234 100644
--- a/isort/parse.py
+++ b/isort/parse.py
@@ -222,12 +222,7 @@ def file_contents(contents: str, config: Config = DEFAULT_CONFIG) -> ParsedConte
import_string, comment = parse_comments(line)
comments = [comment] if comment else []
line_parts = [part for part in _strip_syntax(import_string).strip().split(" ") if part]
- if (
- type_of_import == "from"
- and len(line_parts) == 2
- and line_parts[1] != "*"
- and comments
- ):
+ if type_of_import == "from" and len(line_parts) == 2 and comments:
nested_comments[line_parts[-1]] = comments[0]
if "(" in line.split("#", 1)[0] and index < line_count:
@@ -240,7 +235,7 @@ def file_contents(contents: str, config: Config = DEFAULT_CONFIG) -> ParsedConte
if (
type_of_import == "from"
and stripped_line
- and " " not in stripped_line
+ and " " not in stripped_line.replace(" as ", "")
and new_comment
):
nested_comments[stripped_line] = comments[-1]
@@ -262,7 +257,7 @@ def file_contents(contents: str, config: Config = DEFAULT_CONFIG) -> ParsedConte
if (
type_of_import == "from"
and stripped_line
- and " " not in stripped_line
+ and " " not in stripped_line.replace(" as ", "")
and new_comment
):
nested_comments[stripped_line] = comments[-1]
@@ -277,7 +272,7 @@ def file_contents(contents: str, config: Config = DEFAULT_CONFIG) -> ParsedConte
if (
type_of_import == "from"
and stripped_line
- and " " not in stripped_line
+ and " " not in stripped_line.replace(" as ", "")
and new_comment
):
nested_comments[stripped_line] = comments[-1]
@@ -287,7 +282,7 @@ def file_contents(contents: str, config: Config = DEFAULT_CONFIG) -> ParsedConte
if (
type_of_import == "from"
and stripped_line
- and " " not in stripped_line
+ and " " not in stripped_line.replace(" as ", "")
and new_comment
):
nested_comments[stripped_line] = comments[-1]
@@ -323,6 +318,7 @@ def file_contents(contents: str, config: Config = DEFAULT_CONFIG) -> ParsedConte
for item in _strip_syntax(import_string).split()
]
straight_import = True
+ attach_comments_to: Optional[List[Any]] = None
if "as" in just_imports and (just_imports.index("as") + 1) < len(just_imports):
straight_import = False
while "as" in just_imports:
@@ -337,6 +333,15 @@ def file_contents(contents: str, config: Config = DEFAULT_CONFIG) -> ParsedConte
pass
elif as_name not in as_map["from"][module]:
as_map["from"][module].append(as_name)
+
+ full_name = f"{nested_module} as {as_name}"
+ associated_comment = nested_comments.get(full_name)
+ if associated_comment:
+ categorized_comments["nested"].setdefault(top_level_module, {})[
+ full_name
+ ] = associated_comment
+ if associated_comment in comments:
+ comments.pop(comments.index(associated_comment))
else:
module = just_imports[as_index - 1]
as_name = just_imports[as_index + 1]
@@ -345,15 +350,17 @@ def file_contents(contents: str, config: Config = DEFAULT_CONFIG) -> ParsedConte
elif as_name not in as_map["straight"][module]:
as_map["straight"][module].append(as_name)
- if config.combine_as_imports and nested_module:
- categorized_comments["from"].setdefault(
- f"{top_level_module}.__combined_as__", []
- ).extend(comments)
- comments = []
- else:
- categorized_comments["straight"][module] = comments
- comments = []
+ if comments and attach_comments_to is None:
+ if nested_module and config.combine_as_imports:
+ attach_comments_to = categorized_comments["from"].setdefault(
+ f"{top_level_module}.__combined_as__", []
+ )
+ else:
+ attach_comments_to = categorized_comments["straight"].setdefault(
+ module, []
+ )
del just_imports[as_index : as_index + 2]
+
if type_of_import == "from":
import_from = just_imports.pop(0)
placed_module = finder(import_from)
@@ -373,8 +380,8 @@ def file_contents(contents: str, config: Config = DEFAULT_CONFIG) -> ParsedConte
] = associated_comment
if associated_comment in comments:
comments.pop(comments.index(associated_comment))
- if comments:
- categorized_comments["from"].setdefault(import_from, []).extend(comments)
+ if comments and attach_comments_to is None:
+ attach_comments_to = categorized_comments["from"].setdefault(import_from, [])
if len(out_lines) > max(import_index, 1) - 1:
last = out_lines and out_lines[-1].rstrip() or ""
@@ -408,7 +415,14 @@ def file_contents(contents: str, config: Config = DEFAULT_CONFIG) -> ParsedConte
(module, straight_import | root[import_from].get(module, False))
for module in just_imports
)
+
+ if comments and attach_comments_to is not None:
+ attach_comments_to.extend(comments)
else:
+ if attach_comments_to:
+ attach_comments_to.extend(comments)
+ comments = []
+
for module in just_imports:
if comments:
categorized_comments["straight"][module] = comments
diff --git a/isort/wrap_modes.py b/isort/wrap_modes.py
index 92a63c3f..8e10a947 100644
--- a/isort/wrap_modes.py
+++ b/isort/wrap_modes.py
@@ -34,7 +34,7 @@ def _wrap_mode_interface(
def _wrap_mode(function):
"""Registers an individual wrap mode. Function name and order are significant and used for
- creating enum.
+ creating enum.
"""
_wrap_modes[function.__name__.upper()] = function
function.__signature__ = signature(_wrap_mode_interface)
diff --git a/poetry.lock b/poetry.lock
index e7472fbd..ce01d351 100644
--- a/poetry.lock
+++ b/poetry.lock
@@ -21,10 +21,10 @@ description = "Better dates & times for Python"
name = "arrow"
optional = false
python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*"
-version = "0.15.8"
+version = "0.16.0"
[package.dependencies]
-python-dateutil = "*"
+python-dateutil = ">=2.7.0"
[[package]]
category = "dev"
@@ -41,13 +41,12 @@ description = "Classes Without Boilerplate"
name = "attrs"
optional = false
python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*"
-version = "19.3.0"
+version = "20.1.0"
[package.extras]
-azure-pipelines = ["coverage", "hypothesis", "pympler", "pytest (>=4.3.0)", "six", "zope.interface", "pytest-azurepipelines"]
-dev = ["coverage", "hypothesis", "pympler", "pytest (>=4.3.0)", "six", "zope.interface", "sphinx", "pre-commit"]
-docs = ["sphinx", "zope.interface"]
-tests = ["coverage", "hypothesis", "pympler", "pytest (>=4.3.0)", "six", "zope.interface"]
+dev = ["coverage (>=5.0.2)", "hypothesis", "pympler", "pytest (>=4.3.0)", "six", "zope.interface", "sphinx", "sphinx-rtd-theme", "pre-commit"]
+docs = ["sphinx", "sphinx-rtd-theme", "zope.interface"]
+tests = ["coverage (>=5.0.2)", "hypothesis", "pympler", "pytest (>=4.3.0)", "six", "zope.interface"]
[[package]]
category = "dev"
@@ -89,18 +88,24 @@ description = "The uncompromising code formatter."
name = "black"
optional = false
python-versions = ">=3.6"
-version = "19.10b0"
+version = "20.8b1"
[package.dependencies]
appdirs = "*"
-attrs = ">=18.1.0"
-click = ">=6.5"
+click = ">=7.1.2"
+mypy-extensions = ">=0.4.3"
pathspec = ">=0.6,<1"
-regex = "*"
-toml = ">=0.9.4"
+regex = ">=2020.1.8"
+toml = ">=0.10.1"
typed-ast = ">=1.4.0"
+typing-extensions = ">=3.7.4"
+
+[package.dependencies.dataclasses]
+python = "<3.7"
+version = ">=0.6"
[package.extras]
+colorama = ["colorama (>=0.4.3)"]
d = ["aiohttp (>=3.3.2)", "aiohttp-cors"]
[[package]]
@@ -202,7 +207,7 @@ description = "Allows you to maintain all the necessary cruft for packaging and
name = "cruft"
optional = false
python-versions = ">=3.6,<4.0"
-version = "2.2.0"
+version = "2.3.0"
[package.dependencies]
click = ">=7.1.2,<8.0.0"
@@ -269,10 +274,10 @@ description = "An example plugin that modifies isort formatting using black."
name = "example-isort-formatting-plugin"
optional = false
python-versions = ">=3.6,<4.0"
-version = "0.0.1"
+version = "0.0.2"
[package.dependencies]
-black = ">=19.10b0,<20.0"
+black = ">=20.08b1,<21.0"
isort = ">=5.1.4,<6.0.0"
[[package]]
@@ -417,7 +422,7 @@ description = "Chromium HSTS Preload list as a Python package and updated daily"
name = "hstspreload"
optional = false
python-versions = ">=3.6"
-version = "2020.8.12"
+version = "2020.8.25"
[[package]]
category = "dev"
@@ -475,17 +480,19 @@ description = "A library for property-based testing"
name = "hypothesis"
optional = false
python-versions = ">=3.5.2"
-version = "5.24.4"
+version = "5.29.3"
[package.dependencies]
attrs = ">=19.2.0"
sortedcontainers = ">=2.1.0,<3.0.0"
[package.extras]
-all = ["django (>=2.2)", "dpcontracts (>=0.4)", "lark-parser (>=0.6.5)", "numpy (>=1.9.0)", "pandas (>=0.19)", "pytest (>=4.3)", "python-dateutil (>=1.4)", "pytz (>=2014.1)"]
+all = ["black (>=19.10b0)", "click (>=7.0)", "django (>=2.2)", "dpcontracts (>=0.4)", "lark-parser (>=0.6.5)", "numpy (>=1.9.0)", "pandas (>=0.19)", "pytest (>=4.3)", "python-dateutil (>=1.4)", "pytz (>=2014.1)"]
+cli = ["click (>=7.0)", "black (>=19.10b0)"]
dateutil = ["python-dateutil (>=1.4)"]
django = ["pytz (>=2014.1)", "django (>=2.2)"]
dpcontracts = ["dpcontracts (>=0.4)"]
+ghostwriter = ["black (>=19.10b0)"]
lark = ["lark-parser (>=0.6.5)"]
numpy = ["numpy (>=1.9.0)"]
pandas = ["pandas (>=0.19)"]
@@ -513,7 +520,7 @@ description = "Hypothesis strategies for generating Python programs, something l
name = "hypothesmith"
optional = false
python-versions = ">=3.6"
-version = "0.1.3"
+version = "0.1.4"
[package.dependencies]
hypothesis = ">=5.23.7"
@@ -660,7 +667,7 @@ description = "A concrete syntax tree with AST-like properties for Python 3.5, 3
name = "libcst"
optional = false
python-versions = ">=3.6"
-version = "0.3.9"
+version = "0.3.10"
[package.dependencies]
pyyaml = ">=5.2"
@@ -680,7 +687,7 @@ description = "Python LiveReload is an awesome tool for web developers"
name = "livereload"
optional = false
python-versions = "*"
-version = "2.6.2"
+version = "2.6.3"
[package.dependencies]
six = "*"
@@ -782,7 +789,7 @@ description = "A Material Design theme for MkDocs"
name = "mkdocs-material"
optional = false
python-versions = "*"
-version = "5.5.6"
+version = "5.5.9"
[package.dependencies]
Pygments = ">=2.4"
@@ -1153,7 +1160,7 @@ description = "Python docstring style checker"
name = "pydocstyle"
optional = false
python-versions = ">=3.5"
-version = "5.0.2"
+version = "5.1.0"
[package.dependencies]
snowballstemmer = "*"
@@ -1525,7 +1532,7 @@ description = "Typer, build great CLIs. Easy to code. Based on Python type hints
name = "typer"
optional = false
python-versions = ">=3.6"
-version = "0.3.1"
+version = "0.3.2"
[package.dependencies]
click = ">=7.1.1,<7.2.0"
@@ -1534,7 +1541,7 @@ click = ">=7.1.1,<7.2.0"
all = ["colorama (>=0.4.3,<0.5.0)", "shellingham (>=1.3.0,<2.0.0)"]
dev = ["autoflake (>=1.3.1,<2.0.0)", "flake8 (>=3.8.3,<4.0.0)"]
doc = ["mkdocs (>=1.1.2,<2.0.0)", "mkdocs-material (>=5.4.0,<6.0.0)", "markdown-include (>=0.5.1,<0.6.0)"]
-test = ["shellingham (>=1.3.0,<2.0.0)", "pytest (>=4.4.0,<5.4.0)", "pytest-cov (>=2.10.0,<3.0.0)", "coverage (>=5.2,<6.0)", "pytest-xdist (>=1.32.0,<2.0.0)", "pytest-sugar (>=0.9.4,<0.10.0)", "mypy (0.782)", "black (>=19.10b0,<20.0b0)", "isort (>=5.0.6,<6.0.0)"]
+test = ["pytest-xdist (>=1.32.0,<2.0.0)", "pytest-sugar (>=0.9.4,<0.10.0)", "mypy (0.782)", "black (>=19.10b0,<20.0b0)", "isort (>=5.0.6,<6.0.0)", "shellingham (>=1.3.0,<2.0.0)", "pytest (>=4.4.0,<5.4.0)", "pytest-cov (>=2.10.0,<3.0.0)", "coverage (>=5.2,<6.0)"]
[[package]]
category = "dev"
@@ -1542,7 +1549,7 @@ description = "Backported and Experimental Type Hints for Python 3.5+"
name = "typing-extensions"
optional = false
python-versions = "*"
-version = "3.7.4.2"
+version = "3.7.4.3"
[[package]]
category = "dev"
@@ -1653,7 +1660,7 @@ pipfile_deprecated_finder = ["pipreqs", "tomlkit", "requirementslib"]
requirements_deprecated_finder = ["pipreqs", "pip-api"]
[metadata]
-content-hash = "5463d8238a63216a861fd6d75b1a86842f2a1ab3574a5ecb8dbe8d9183b6a874"
+content-hash = "c366831acb4de815d36de7d6072c341ae3a1a1cca409490b5c81fe1159977597"
python-versions = "^3.6"
[metadata.files]
@@ -1666,16 +1673,16 @@ appnope = [
{file = "appnope-0.1.0.tar.gz", hash = "sha256:8b995ffe925347a2138d7ac0fe77155e4311a0ea6d6da4f5128fe4b3cbe5ed71"},
]
arrow = [
- {file = "arrow-0.15.8-py2.py3-none-any.whl", hash = "sha256:271b8e05174d48e50324ed0dc5d74796c839c7e579a4f21cf1a7394665f9e94f"},
- {file = "arrow-0.15.8.tar.gz", hash = "sha256:edc31dc051db12c95da9bac0271cd1027b8e36912daf6d4580af53b23e62721a"},
+ {file = "arrow-0.16.0-py2.py3-none-any.whl", hash = "sha256:98184d8dd3e5d30b96c2df4596526f7de679ccb467f358b82b0f686436f3a6b8"},
+ {file = "arrow-0.16.0.tar.gz", hash = "sha256:92aac856ea5175c804f7ccb96aca4d714d936f1c867ba59d747a8096ec30e90a"},
]
atomicwrites = [
{file = "atomicwrites-1.4.0-py2.py3-none-any.whl", hash = "sha256:6d1784dea7c0c8d4a5172b6c620f40b6e4cbfdf96d783691f2e1302a7b88e197"},
{file = "atomicwrites-1.4.0.tar.gz", hash = "sha256:ae70396ad1a434f9c7046fd2dd196fc04b12f9e91ffb859164193be8b6168a7a"},
]
attrs = [
- {file = "attrs-19.3.0-py2.py3-none-any.whl", hash = "sha256:08a96c641c3a74e44eb59afb61a24f2cb9f4d7188748e76ba4bb5edfa3cb7d1c"},
- {file = "attrs-19.3.0.tar.gz", hash = "sha256:f7b7ce16570fe9965acd6d30101a28f62fb4a7f9e926b3bbc9b61f8b04247e72"},
+ {file = "attrs-20.1.0-py2.py3-none-any.whl", hash = "sha256:2867b7b9f8326499ab5b0e2d12801fa5c98842d2cbd22b35112ae04bf85b4dff"},
+ {file = "attrs-20.1.0.tar.gz", hash = "sha256:0ef97238856430dcf9228e07f316aefc17e8939fc8507e18c6501b761ef1a42a"},
]
backcall = [
{file = "backcall-0.2.0-py2.py3-none-any.whl", hash = "sha256:fbbce6a29f263178a1f7915c1940bde0ec2b2a967566fe1c65c1dfb7422bd255"},
@@ -1690,8 +1697,8 @@ binaryornot = [
{file = "binaryornot-0.4.4.tar.gz", hash = "sha256:359501dfc9d40632edc9fac890e19542db1a287bbcfa58175b66658392018061"},
]
black = [
- {file = "black-19.10b0-py36-none-any.whl", hash = "sha256:1b30e59be925fafc1ee4565e5e08abef6b03fe455102883820fe5ee2e4734e0b"},
- {file = "black-19.10b0.tar.gz", hash = "sha256:c2edb73a08e9e0e6f65a0e6af18b059b8b1cdd5bef997d7a0b181df93dc81539"},
+ {file = "black-20.8b1-py3-none-any.whl", hash = "sha256:70b62ef1527c950db59062cda342ea224d772abdf6adc58b86a45421bab20a6b"},
+ {file = "black-20.8b1.tar.gz", hash = "sha256:1c02557aa099101b9d21496f8a914e9ed2222ef70336404eeeac8edba836fbea"},
]
cached-property = [
{file = "cached-property-1.5.1.tar.gz", hash = "sha256:9217a59f14a5682da7c4b8829deadbfc194ac22e9908ccf7c8820234e80a1504"},
@@ -1760,8 +1767,8 @@ coverage = [
{file = "coverage-5.2.1.tar.gz", hash = "sha256:a34cb28e0747ea15e82d13e14de606747e9e484fb28d63c999483f5d5188e89b"},
]
cruft = [
- {file = "cruft-2.2.0-py3-none-any.whl", hash = "sha256:da3dc9ee84dea1a4ea161b5d0fa86b11ecb77480eb22b4d776572f95a96ae9fc"},
- {file = "cruft-2.2.0.tar.gz", hash = "sha256:9365ae8547bf2297c9e88dec3b28a16bd4491c4931f417b8c3c1fd8140b1e0fd"},
+ {file = "cruft-2.3.0-py3-none-any.whl", hash = "sha256:ca973c1ca9e4add9893483dbce02cd8930e105f8940afe0d087a14b70c6068de"},
+ {file = "cruft-2.3.0.tar.gz", hash = "sha256:7c0f7682765e76fcf31adf877ea6f74372a0ab9554d8f8d6766e8e0413730e52"},
]
dataclasses = [
{file = "dataclasses-0.6-py3-none-any.whl", hash = "sha256:454a69d788c7fda44efd71e259be79577822f5e3f53f029a22d08004e951dc9f"},
@@ -1783,8 +1790,8 @@ dparse = [
{file = "dparse-0.5.1.tar.gz", hash = "sha256:a1b5f169102e1c894f9a7d5ccf6f9402a836a5d24be80a986c7ce9eaed78f367"},
]
example-isort-formatting-plugin = [
- {file = "example_isort_formatting_plugin-0.0.1-py3-none-any.whl", hash = "sha256:3c4bd66eb457480daa1320e2e1ef4160e639f2629315cfc54830b2613aa823bc"},
- {file = "example_isort_formatting_plugin-0.0.1.tar.gz", hash = "sha256:2666045920accaa0c3f7e60e85369c408658faef50c6c8971519562a14b7e7d8"},
+ {file = "example_isort_formatting_plugin-0.0.2-py3-none-any.whl", hash = "sha256:ce428ab5deb4719e4bec56eae63978ff2d9c20dc2c2aa7cc39ece61044153db7"},
+ {file = "example_isort_formatting_plugin-0.0.2.tar.gz", hash = "sha256:8cb6401c9efe2f97ba3e776439cb647ee964dc7880bd9790b0324be2c7a55907"},
]
example-shared-isort-profile = [
{file = "example_shared_isort_profile-0.0.1-py3-none-any.whl", hash = "sha256:3fa3e2d093e68285fc7893704b727791ed3e0969d07bdd2733e366303d1a2582"},
@@ -1850,8 +1857,8 @@ hpack = [
{file = "hpack-3.0.0.tar.gz", hash = "sha256:8eec9c1f4bfae3408a3f30500261f7e6a65912dc138526ea054f9ad98892e9d2"},
]
hstspreload = [
- {file = "hstspreload-2020.8.12-py3-none-any.whl", hash = "sha256:64f4441066d5544873faccf2e0b5757c6670217d34dc31d362ca2977f44604ff"},
- {file = "hstspreload-2020.8.12.tar.gz", hash = "sha256:3f5c324b1eb9d924e32ffeb5fe265b879806b6e346b765f57566410344f4b41e"},
+ {file = "hstspreload-2020.8.25-py3-none-any.whl", hash = "sha256:c96401eca4669340b423abd711d2d5d03ddf0685461f95e9cfe500d5e9acf3d2"},
+ {file = "hstspreload-2020.8.25.tar.gz", hash = "sha256:3129613419c13ea62411ec7375d79840e28004cbb6a585909ddcbeee401bea14"},
]
httpcore = [
{file = "httpcore-0.9.1-py3-none-any.whl", hash = "sha256:9850fe97a166a794d7e920590d5ec49a05488884c9fc8b5dba8561effab0c2a0"},
@@ -1870,16 +1877,16 @@ hyperframe = [
{file = "hyperframe-5.2.0.tar.gz", hash = "sha256:a9f5c17f2cc3c719b917c4f33ed1c61bd1f8dfac4b1bd23b7c80b3400971b41f"},
]
hypothesis = [
- {file = "hypothesis-5.24.4-py3-none-any.whl", hash = "sha256:4d86b1d7bbec9caffc49dbd0037fa549c456d08aa99e468dbce5871fdbf2167b"},
- {file = "hypothesis-5.24.4.tar.gz", hash = "sha256:c3ac78ae0cebe7098bc00d8b3e16b65640c97593cceb64c9eb2331ac282fa607"},
+ {file = "hypothesis-5.29.3-py3-none-any.whl", hash = "sha256:07b865184494a64cf2e18090ecfb876c97d303973c2f97139a07be361b0c3a28"},
+ {file = "hypothesis-5.29.3.tar.gz", hash = "sha256:e6cf92a94a5108d326e45df5a2b256dc0d57f9663d13efdebcadcfbad9accc31"},
]
hypothesis-auto = [
{file = "hypothesis-auto-1.1.4.tar.gz", hash = "sha256:5e2c2fb09dc09842512d80630bb792359a1d33d2c0473ad47ee23da0be9e32b1"},
{file = "hypothesis_auto-1.1.4-py3-none-any.whl", hash = "sha256:fea8560c4522c0fd490ed8cc17e420b95dabebb11b9b334c59bf2d768839015f"},
]
hypothesmith = [
- {file = "hypothesmith-0.1.3-py3-none-any.whl", hash = "sha256:aceb0feae6029eeaa4502cd763debec313b1aec43db8805958e5a81036c3e483"},
- {file = "hypothesmith-0.1.3.tar.gz", hash = "sha256:4cf1e2ce43407ad1c9c2ab5a940760db3ea8c3c29134435bc0600f33a4a32de4"},
+ {file = "hypothesmith-0.1.4-py3-none-any.whl", hash = "sha256:bc45f45808078d2bbe6c3806af3b3604bde35624964fcc6b849cecadf254d3a9"},
+ {file = "hypothesmith-0.1.4.tar.gz", hash = "sha256:5628fb1a06233c70751105635bc3cee789c82358041b4518c2cab5300e73cd65"},
]
idna = [
{file = "idna-2.10-py2.py3-none-any.whl", hash = "sha256:b97d804b1e9b523befed77c48dacec60e6dcb0b5391d57af6a65a312a90648c0"},
@@ -1931,11 +1938,11 @@ lark-parser = [
{file = "lark-parser-0.9.0.tar.gz", hash = "sha256:9e7589365d6b6de1cca40b0eaec31104a3fb96a37a11a9dfd5098e95b50aa6cd"},
]
libcst = [
- {file = "libcst-0.3.9-py3-none-any.whl", hash = "sha256:ca1744d9344f51c2c9226d0472a5a3096f8b39e4fe38441ebc2ba26babd00688"},
- {file = "libcst-0.3.9.tar.gz", hash = "sha256:b5185c84f0e4a38409aac59f53a71741bec8c1b1159c874996b3266daafe63e5"},
+ {file = "libcst-0.3.10-py3-none-any.whl", hash = "sha256:e9395d952a490e6fc160f2bea8df139bdf1fdcb3fe4c01b88893da279eff00de"},
+ {file = "libcst-0.3.10.tar.gz", hash = "sha256:b0dccbfc1cff7bfa3214980e1d2d90b4e00b2fed002d4b276a8a411217738df3"},
]
livereload = [
- {file = "livereload-2.6.2.tar.gz", hash = "sha256:d1eddcb5c5eb8d2ca1fa1f750e580da624c0f7fcb734aa5780dc81b7dcbd89be"},
+ {file = "livereload-2.6.3.tar.gz", hash = "sha256:776f2f865e59fde56490a56bcc6773b6917366bce0c267c60ee8aaf1a0959869"},
]
lunr = [
{file = "lunr-0.5.8-py2.py3-none-any.whl", hash = "sha256:aab3f489c4d4fab4c1294a257a30fec397db56f0a50273218ccc3efdbf01d6ca"},
@@ -1993,8 +2000,8 @@ mkdocs = [
{file = "mkdocs-1.1.2.tar.gz", hash = "sha256:f0b61e5402b99d7789efa032c7a74c90a20220a9c81749da06dbfbcbd52ffb39"},
]
mkdocs-material = [
- {file = "mkdocs-material-5.5.6.tar.gz", hash = "sha256:08af704cdfaf2a07fd5f135831df9106c589bfd422f9ef026929981433e80b9d"},
- {file = "mkdocs_material-5.5.6-py2.py3-none-any.whl", hash = "sha256:29f3637d5fb758d076344b026a67b8e316743d0c2da84b9303383f6cbeabfd5f"},
+ {file = "mkdocs-material-5.5.9.tar.gz", hash = "sha256:37d60947993b939318945c170c7b3a153646976badf57648fd70befc3b54c830"},
+ {file = "mkdocs_material-5.5.9-py2.py3-none-any.whl", hash = "sha256:c8cb3c8c44bf10ed7ac1eb568d93a4346efe03fee2994b6a80e96559421cec49"},
]
mkdocs-material-extensions = [
{file = "mkdocs-material-extensions-1.0.tar.gz", hash = "sha256:17d7491e189af75700310b7ec33c6c48a22060b8b445001deca040cb60471cde"},
@@ -2162,8 +2169,8 @@ pydantic = [
{file = "pydantic-1.6.1.tar.gz", hash = "sha256:54122a8ed6b75fe1dd80797f8251ad2063ea348a03b77218d73ea9fe19bd4e73"},
]
pydocstyle = [
- {file = "pydocstyle-5.0.2-py3-none-any.whl", hash = "sha256:da7831660b7355307b32778c4a0dbfb137d89254ef31a2b2978f50fc0b4d7586"},
- {file = "pydocstyle-5.0.2.tar.gz", hash = "sha256:f4f5d210610c2d153fae39093d44224c17429e2ad7da12a8b419aba5c2f614b5"},
+ {file = "pydocstyle-5.1.0-py3-none-any.whl", hash = "sha256:08374b9d4d2b7164bae50b71bb24eb0d74a56b309029d5d502264092fa7db0c3"},
+ {file = "pydocstyle-5.1.0.tar.gz", hash = "sha256:4ca3c7736d36f92bb215dd74ef84ac3d6c146edd795c7afc5154c10f1eb1f65a"},
]
pyflakes = [
{file = "pyflakes-2.2.0-py2.py3-none-any.whl", hash = "sha256:0d94e0e05a19e57a99444b6ddcf9a6eb2e5c68d3ca1e98e90707af8152c90a92"},
@@ -2339,13 +2346,13 @@ typed-ast = [
{file = "typed_ast-1.4.1.tar.gz", hash = "sha256:8c8aaad94455178e3187ab22c8b01a3837f8ee50e09cf31f1ba129eb293ec30b"},
]
typer = [
- {file = "typer-0.3.1-py3-none-any.whl", hash = "sha256:778a9695e68eb26a0a0321ca9d3f1a8809783f6f083549b84c67bc2385bf014e"},
- {file = "typer-0.3.1.tar.gz", hash = "sha256:85b1e5f6369750b4220ad548ea30b881a2c502504e5a0d849db9bdf6b487bdbf"},
+ {file = "typer-0.3.2-py3-none-any.whl", hash = "sha256:ba58b920ce851b12a2d790143009fa00ac1d05b3ff3257061ff69dbdfc3d161b"},
+ {file = "typer-0.3.2.tar.gz", hash = "sha256:5455d750122cff96745b0dec87368f56d023725a7ebc9d2e54dd23dc86816303"},
]
typing-extensions = [
- {file = "typing_extensions-3.7.4.2-py2-none-any.whl", hash = "sha256:f8d2bd89d25bc39dabe7d23df520442fa1d8969b82544370e03d88b5a591c392"},
- {file = "typing_extensions-3.7.4.2-py3-none-any.whl", hash = "sha256:6e95524d8a547a91e08f404ae485bbb71962de46967e1b71a0cb89af24e761c5"},
- {file = "typing_extensions-3.7.4.2.tar.gz", hash = "sha256:79ee589a3caca649a9bfd2a8de4709837400dfa00b6cc81962a1e6a1815969ae"},
+ {file = "typing_extensions-3.7.4.3-py2-none-any.whl", hash = "sha256:dafc7639cde7f1b6e1acc0f457842a83e722ccca8eef5270af2d74792619a89f"},
+ {file = "typing_extensions-3.7.4.3-py3-none-any.whl", hash = "sha256:7cb407020f00f7bfc3cb3e7881628838e69d8f3fcab2f64742a5e76b2f841918"},
+ {file = "typing_extensions-3.7.4.3.tar.gz", hash = "sha256:99d4073b617d30288f569d3f13d2bd7548c3a7e4c8de87db09a9d29bb3a4a60c"},
]
typing-inspect = [
{file = "typing_inspect-0.6.0-py2-none-any.whl", hash = "sha256:de08f50a22955ddec353876df7b2545994d6df08a2f45d54ac8c05e530372ca0"},
diff --git a/pyproject.toml b/pyproject.toml
index 1e3e1e81..5f648f40 100644
--- a/pyproject.toml
+++ b/pyproject.toml
@@ -54,7 +54,7 @@ vulture = "^1.0"
bandit = "^1.6"
safety = "^1.8"
flake8-bugbear = "^19.8"
-black = {version = "^19.10b0", allow-prereleases = true}
+black = {version = "^20.08b1", allow-prereleases = true}
mypy = "^0.761.0"
ipython = "^7.7"
pytest = "^5.0"
@@ -79,7 +79,7 @@ smmap2 = "^3.0.1"
gitdb2 = "^4.0.2"
httpx = "^0.13.3"
example_shared_isort_profile = "^0.0.1"
-example_isort_formatting_plugin = "^0.0.1"
+example_isort_formatting_plugin = "^0.0.2"
[tool.poetry.scripts]
isort = "isort.main:main"
diff --git a/tests/integration/test_projects_using_isort.py b/tests/integration/test_projects_using_isort.py
index 036b604c..bb7d61de 100644
--- a/tests/integration/test_projects_using_isort.py
+++ b/tests/integration/test_projects_using_isort.py
@@ -151,3 +151,22 @@ def test_pillow(tmpdir):
["git", "clone", "--depth", "1", "https://github.com/python-pillow/Pillow.git", str(tmpdir)]
)
main(["--check-only", "--diff", str(tmpdir), "--skip", "tests"])
+
+
+def test_attrs(tmpdir):
+ check_call(
+ ["git", "clone", "--depth", "1", "https://github.com/python-attrs/attrs.git", str(tmpdir)]
+ )
+ main(
+ [
+ "--check-only",
+ "--diff",
+ str(tmpdir),
+ "--skip",
+ "tests",
+ "--ext",
+ "py",
+ "--skip",
+ "_compat.py",
+ ]
+ )
diff --git a/tests/integration/test_setting_combinations.py b/tests/integration/test_setting_combinations.py
new file mode 100644
index 00000000..7e236f9f
--- /dev/null
+++ b/tests/integration/test_setting_combinations.py
@@ -0,0 +1,180 @@
+from typing import get_type_hints
+
+import hypothesis
+from hypothesis import strategies as st
+
+import isort
+
+
+def _as_config(kw) -> isort.Config:
+ kw["atomic"] = False
+ if "wrap_length" in kw and "line_length" in kw:
+ kw["wrap_length"], kw["line_length"] = sorted([kw["wrap_length"], kw["line_length"]])
+ try:
+ return isort.Config(**kw)
+ except ValueError:
+ kw["wrap_length"] = 0
+ return isort.Config(**kw)
+
+
+def configs() -> st.SearchStrategy[isort.Config]:
+ """Generate arbitrary Config objects."""
+ skip = {
+ "line_ending",
+ "sections",
+ "known_standard_library",
+ "known_future_library",
+ "known_third_party",
+ "known_first_party",
+ "known_local_folder",
+ "extra_standard_library",
+ "forced_separate",
+ "lines_after_imports",
+ "add_imports",
+ "lines_between_sections",
+ "lines_between_types",
+ "sources",
+ "virtual_env",
+ "conda_env",
+ "directory",
+ "formatter",
+ "formatting_function",
+ "comment_prefix",
+ "atomic",
+ "skip",
+ "src_paths",
+ }
+ inferred_kwargs = {
+ k: st.from_type(v)
+ for k, v in get_type_hints(isort.settings._Config).items()
+ if k not in skip
+ }
+ specific = {
+ "line_length": st.integers(0, 200),
+ "wrap_length": st.integers(0, 200),
+ "indent": st.integers(0, 20).map(lambda n: n * " "),
+ "default_section": st.sampled_from(sorted(isort.settings.KNOWN_SECTION_MAPPING)),
+ "force_grid_wrap": st.integers(0, 20),
+ "profile": st.sampled_from(sorted(isort.settings.profiles)),
+ "py_version": st.sampled_from(("auto",) + isort.settings.VALID_PY_TARGETS),
+ }
+ kwargs = {**inferred_kwargs, **specific}
+ return st.fixed_dictionaries({}, optional=kwargs).map(_as_config)
+
+
+st.register_type_strategy(isort.Config, configs())
+
+CODE_SNIPPET = """
+'''Taken from bottle.py
+
+Copyright (c) 2009-2018, Marcel Hellkamp.
+License: MIT (see LICENSE for details)
+'''
+# Lots of stdlib and builtin differences.
+if py3k:
+ import http.client as httplib
+ import _thread as thread
+ from urllib.parse import urljoin, SplitResult as UrlSplitResult
+ from urllib.parse import urlencode, quote as urlquote, unquote as urlunquote
+ urlunquote = functools.partial(urlunquote, encoding='latin1')
+ from http.cookies import SimpleCookie, Morsel, CookieError
+ from collections.abc import MutableMapping as DictMixin
+ import pickle # comment number 2
+ from io import BytesIO
+ import configparser
+
+ basestring = str
+ unicode = str
+ json_loads = lambda s: json_lds(touni(s))
+ callable = lambda x: hasattr(x, '__call__')
+ imap = map
+
+ def _raise(*a):
+ raise a[0](a[1]).with_traceback(a[2])
+else: # 2.x
+ import httplib
+ import thread
+ from urlparse import urljoin, SplitResult as UrlSplitResult
+ from urllib import urlencode, quote as urlquote, unquote as urlunquote
+ from Cookie import SimpleCookie, Morsel, CookieError
+ from itertools import imap
+ import cPickle as pickle
+ from StringIO import StringIO as BytesIO
+ import ConfigParser as configparser # commentnumberone
+ from collections import MutableMapping as DictMixin
+ unicode = unicode
+ json_loads = json_lds
+ exec(compile('def _raise(*a): raise a[0], a[1], a[2]', '<py3fix>', 'exec'))
+"""
+SHOULD_RETAIN = [
+ """'''Taken from bottle.py
+
+Copyright (c) 2009-2018, Marcel Hellkamp.
+License: MIT (see LICENSE for details)
+'''""",
+ "# Lots of stdlib and builtin differences.",
+ "if py3k:",
+ "http.client",
+ "_thread",
+ "urllib.parse",
+ "urlencode",
+ "urlunquote = functools.partial(urlunquote, encoding='latin1')",
+ "http.cookies",
+ "SimpleCookie",
+ "collections.abc",
+ "pickle",
+ "comment number 2",
+ "io",
+ "configparser",
+ """basestring = str
+ unicode = str
+ json_loads = lambda s: json_lds(touni(s))
+ callable = lambda x: hasattr(x, '__call__')
+ imap = map
+
+ def _raise(*a):
+ raise a[0](a[1]).with_traceback(a[2])
+else: # 2.x
+""",
+ "httplib",
+ "thread",
+ "urlparse",
+ "urllib",
+ "Cookie",
+ "itertools",
+ "cPickle",
+ "StringIO",
+ "ConfigParser",
+ "commentnumberone",
+ "collections",
+ """unicode = unicode
+ json_loads = json_lds
+ exec(compile('def _raise(*a): raise a[0], a[1], a[2]', '<py3fix>', 'exec'))""",
+]
+
+
+@hypothesis.given(
+ config=st.from_type(isort.Config),
+ disregard_skip=st.booleans(),
+)
+def test_isort_is_idempotent(config: isort.Config, disregard_skip: bool) -> None:
+ try:
+ result = isort.code(CODE_SNIPPET, config=config, disregard_skip=disregard_skip)
+ result = isort.code(result, config=config, disregard_skip=disregard_skip)
+ assert result == isort.code(result, config=config, disregard_skip=disregard_skip)
+ except ValueError:
+ pass
+
+
+@hypothesis.given(
+ config=st.from_type(isort.Config),
+ disregard_skip=st.booleans(),
+)
+def test_isort_doesnt_lose_imports_or_comments(config: isort.Config, disregard_skip: bool) -> None:
+ result = isort.code(CODE_SNIPPET, config=config, disregard_skip=disregard_skip)
+ for should_be_retained in SHOULD_RETAIN:
+ if should_be_retained not in result:
+ if config.ignore_comments and should_be_retained.startswith("comment"):
+ continue
+
+ assert should_be_retained in result
diff --git a/tests/unit/profiles/__init__.py b/tests/unit/profiles/__init__.py
new file mode 100644
index 00000000..e69de29b
--- /dev/null
+++ b/tests/unit/profiles/__init__.py
diff --git a/tests/unit/profiles/test_attrs.py b/tests/unit/profiles/test_attrs.py
new file mode 100644
index 00000000..c08f184e
--- /dev/null
+++ b/tests/unit/profiles/test_attrs.py
@@ -0,0 +1,102 @@
+from functools import partial
+
+from ..utils import isort_test
+
+attrs_isort_test = partial(isort_test, profile="attrs")
+
+
+def test_attrs_code_snippet_one():
+ attrs_isort_test(
+ """from __future__ import absolute_import, division, print_function
+
+import sys
+
+from functools import partial
+
+from . import converters, exceptions, filters, setters, validators
+from ._config import get_run_validators, set_run_validators
+from ._funcs import asdict, assoc, astuple, evolve, has, resolve_types
+from ._make import (
+ NOTHING,
+ Attribute,
+ Factory,
+ attrib,
+ attrs,
+ fields,
+ fields_dict,
+ make_class,
+ validate,
+)
+from ._version_info import VersionInfo
+
+
+__version__ = "20.2.0.dev0"
+"""
+ )
+
+
+def test_attrs_code_snippet_two():
+ attrs_isort_test(
+ """from __future__ import absolute_import, division, print_function
+
+import copy
+import linecache
+import sys
+import threading
+import uuid
+import warnings
+
+from operator import itemgetter
+
+from . import _config, setters
+from ._compat import (
+ PY2,
+ isclass,
+ iteritems,
+ metadata_proxy,
+ ordered_dict,
+ set_closure_cell,
+)
+from .exceptions import (
+ DefaultAlreadySetError,
+ FrozenInstanceError,
+ NotAnAttrsClassError,
+ PythonTooOldError,
+ UnannotatedAttributeError,
+)
+
+
+# This is used at least twice, so cache it here.
+_obj_setattr = object.__setattr__
+"""
+ )
+
+
+def test_attrs_code_snippet_three():
+ attrs_isort_test(
+ '''
+"""
+Commonly useful validators.
+"""
+
+from __future__ import absolute_import, division, print_function
+
+import re
+
+from ._make import _AndValidator, and_, attrib, attrs
+from .exceptions import NotCallableError
+
+
+__all__ = [
+ "and_",
+ "deep_iterable",
+ "deep_mapping",
+ "in_",
+ "instance_of",
+ "is_callable",
+ "matches_re",
+ "optional",
+ "provides",
+]
+'''
+ )
diff --git a/tests/unit/profiles/test_black.py b/tests/unit/profiles/test_black.py
new file mode 100644
index 00000000..0e54e706
--- /dev/null
+++ b/tests/unit/profiles/test_black.py
@@ -0,0 +1,370 @@
+import black
+
+import isort
+
+
+def black_format(code: str, is_pyi: bool = False, line_length: int = 88) -> str:
+ """Formats the provided code snippet using black"""
+ try:
+ return black.format_file_contents(
+ code,
+ fast=True,
+ mode=black.FileMode(
+ is_pyi=is_pyi,
+ line_length=line_length,
+ ),
+ )
+ except black.NothingChanged:
+ return code
+
+
+def black_test(code: str, expected_output: str = ""):
+ """Tests that the given code:
+ - Behaves the same when formatted multiple times with isort.
+ - Agrees with black formatting.
+ - Matches the desired output or itself if none is provided.
+ """
+ expected_output = expected_output or code
+
+ # output should stay consistent over multiple runs
+ output = isort.code(code, profile="black")
+ assert output == isort.code(code, profile="black")
+
+ # output should agree with black
+ black_output = black_format(output)
+ assert output == black_output
+
+ # output should match expected output
+ assert output == expected_output
+
+
+def test_black_snippet_one():
+ """Test consistent code formatting between isort and black for code snippet from black repository.
+ See: https://github.com/psf/black/blob/master/tests/test_black.py
+ """
+ black_test(
+ """#!/usr/bin/env python3
+import asyncio
+import logging
+from concurrent.futures import ThreadPoolExecutor
+from contextlib import contextmanager
+from dataclasses import replace
+from functools import partial
+import inspect
+from io import BytesIO, TextIOWrapper
+import os
+from pathlib import Path
+from platform import system
+import regex as re
+import sys
+from tempfile import TemporaryDirectory
+import types
+from typing import (
+ Any,
+ BinaryIO,
+ Callable,
+ Dict,
+ Generator,
+ List,
+ Tuple,
+ Iterator,
+ TypeVar,
+)
+import unittest
+from unittest.mock import patch, MagicMock
+
+import click
+from click import unstyle
+from click.testing import CliRunner
+
+import black
+from black import Feature, TargetVersion
+
+try:
+ import blackd
+ from aiohttp.test_utils import AioHTTPTestCase, unittest_run_loop
+ from aiohttp import web
+except ImportError:
+ has_blackd_deps = False
+else:
+ has_blackd_deps = True
+
+from pathspec import PathSpec
+
+# Import other test classes
+from .test_primer import PrimerCLITests # noqa: F401
+
+
+DEFAULT_MODE = black.FileMode(experimental_string_processing=True)
+""",
+ """#!/usr/bin/env python3
+import asyncio
+import inspect
+import logging
+import os
+import sys
+import types
+import unittest
+from concurrent.futures import ThreadPoolExecutor
+from contextlib import contextmanager
+from dataclasses import replace
+from functools import partial
+from io import BytesIO, TextIOWrapper
+from pathlib import Path
+from platform import system
+from tempfile import TemporaryDirectory
+from typing import (
+ Any,
+ BinaryIO,
+ Callable,
+ Dict,
+ Generator,
+ Iterator,
+ List,
+ Tuple,
+ TypeVar,
+)
+from unittest.mock import MagicMock, patch
+
+import black
+import click
+import regex as re
+from black import Feature, TargetVersion
+from click import unstyle
+from click.testing import CliRunner
+
+try:
+ import blackd
+ from aiohttp import web
+ from aiohttp.test_utils import AioHTTPTestCase, unittest_run_loop
+except ImportError:
+ has_blackd_deps = False
+else:
+ has_blackd_deps = True
+
+from pathspec import PathSpec
+
+# Import other test classes
+from .test_primer import PrimerCLITests # noqa: F401
+
+DEFAULT_MODE = black.FileMode(experimental_string_processing=True)
+""",
+ )
+
+
+def test_black_snippet_two():
+ """Test consistent code formatting between isort and black for code snippet from black repository.
+ See: https://github.com/psf/black/blob/master/tests/test_primer.py
+ """
+ black_test(
+ '''#!/usr/bin/env python3
+
+import asyncio
+import sys
+import unittest
+from contextlib import contextmanager
+from copy import deepcopy
+from io import StringIO
+from os import getpid
+from pathlib import Path
+from platform import system
+from subprocess import CalledProcessError
+from tempfile import TemporaryDirectory, gettempdir
+from typing import Any, Callable, Generator, Iterator, Tuple
+from unittest.mock import Mock, patch
+
+from click.testing import CliRunner
+
+from black_primer import cli, lib
+
+
+EXPECTED_ANALYSIS_OUTPUT = """\
+-- primer results 📊 --
+68 / 69 succeeded (98.55%) ✅
+1 / 69 FAILED (1.45%) 💩
+ - 0 projects disabled by config
+ - 0 projects skipped due to Python version
+ - 0 skipped due to long checkout
+Failed projects:
+## black:
+ - Returned 69
+ - stdout:
+Black didn't work
+"""
+''',
+ '''#!/usr/bin/env python3
+
+import asyncio
+import sys
+import unittest
+from contextlib import contextmanager
+from copy import deepcopy
+from io import StringIO
+from os import getpid
+from pathlib import Path
+from platform import system
+from subprocess import CalledProcessError
+from tempfile import TemporaryDirectory, gettempdir
+from typing import Any, Callable, Generator, Iterator, Tuple
+from unittest.mock import Mock, patch
+
+from black_primer import cli, lib
+from click.testing import CliRunner
+
+EXPECTED_ANALYSIS_OUTPUT = """-- primer results 📊 --
+68 / 69 succeeded (98.55%) ✅
+1 / 69 FAILED (1.45%) 💩
+ - 0 projects disabled by config
+ - 0 projects skipped due to Python version
+ - 0 skipped due to long checkout
+Failed projects:
+## black:
+ - Returned 69
+ - stdout:
+Black didn't work
+"""
+''',
+ )
+
+
+def test_black_snippet_three():
+ """Test consistent code formatting between isort and black for code snippet from black repository.
+ See: https://github.com/psf/black/blob/master/src/black/__init__.py
+ """
+ black_test(
+ """import ast
+import asyncio
+from abc import ABC, abstractmethod
+from collections import defaultdict
+from concurrent.futures import Executor, ThreadPoolExecutor, ProcessPoolExecutor
+from contextlib import contextmanager
+from datetime import datetime
+from enum import Enum
+from functools import lru_cache, partial, wraps
+import io
+import itertools
+import logging
+from multiprocessing import Manager, freeze_support
+import os
+from pathlib import Path
+import pickle
+import regex as re
+import signal
+import sys
+import tempfile
+import tokenize
+import traceback
+from typing import (
+ Any,
+ Callable,
+ Collection,
+ Dict,
+ Generator,
+ Generic,
+ Iterable,
+ Iterator,
+ List,
+ Optional,
+ Pattern,
+ Sequence,
+ Set,
+ Sized,
+ Tuple,
+ Type,
+ TypeVar,
+ Union,
+ cast,
+ TYPE_CHECKING,
+)
+from typing_extensions import Final
+from mypy_extensions import mypyc_attr
+
+from appdirs import user_cache_dir
+from dataclasses import dataclass, field, replace
+import click
+import toml
+from typed_ast import ast3, ast27
+from pathspec import PathSpec
+
+# lib2to3 fork
+from blib2to3.pytree import Node, Leaf, type_repr
+from blib2to3 import pygram, pytree
+from blib2to3.pgen2 import driver, token
+from blib2to3.pgen2.grammar import Grammar
+from blib2to3.pgen2.parse import ParseError
+
+from _black_version import version as __version__
+
+if TYPE_CHECKING:
+ import colorama # noqa: F401
+
+DEFAULT_LINE_LENGTH = 88
+""",
+ """import ast
+import asyncio
+import io
+import itertools
+import logging
+import os
+import pickle
+import signal
+import sys
+import tempfile
+import tokenize
+import traceback
+from abc import ABC, abstractmethod
+from collections import defaultdict
+from concurrent.futures import Executor, ProcessPoolExecutor, ThreadPoolExecutor
+from contextlib import contextmanager
+from dataclasses import dataclass, field, replace
+from datetime import datetime
+from enum import Enum
+from functools import lru_cache, partial, wraps
+from multiprocessing import Manager, freeze_support
+from pathlib import Path
+from typing import (
+ TYPE_CHECKING,
+ Any,
+ Callable,
+ Collection,
+ Dict,
+ Generator,
+ Generic,
+ Iterable,
+ Iterator,
+ List,
+ Optional,
+ Pattern,
+ Sequence,
+ Set,
+ Sized,
+ Tuple,
+ Type,
+ TypeVar,
+ Union,
+ cast,
+)
+
+import click
+import regex as re
+import toml
+from _black_version import version as __version__
+from appdirs import user_cache_dir
+from blib2to3 import pygram, pytree
+from blib2to3.pgen2 import driver, token
+from blib2to3.pgen2.grammar import Grammar
+from blib2to3.pgen2.parse import ParseError
+
+# lib2to3 fork
+from blib2to3.pytree import Leaf, Node, type_repr
+from mypy_extensions import mypyc_attr
+from pathspec import PathSpec
+from typed_ast import ast3, ast27
+from typing_extensions import Final
+
+if TYPE_CHECKING:
+ import colorama # noqa: F401
+
+DEFAULT_LINE_LENGTH = 88
+""",
+ )
diff --git a/tests/unit/profiles/test_django.py b/tests/unit/profiles/test_django.py
new file mode 100644
index 00000000..c2202717
--- /dev/null
+++ b/tests/unit/profiles/test_django.py
@@ -0,0 +1,122 @@
+from functools import partial
+
+from ..utils import isort_test
+
+django_isort_test = partial(isort_test, profile="django", known_first_party=["django"])
+
+
+def test_django_snippet_one():
+ django_isort_test(
+ """import copy
+import inspect
+import warnings
+from functools import partialmethod
+from itertools import chain
+
+from django.apps import apps
+from django.conf import settings
+from django.core import checks
+from django.core.exceptions import (
+ NON_FIELD_ERRORS, FieldDoesNotExist, FieldError, MultipleObjectsReturned,
+ ObjectDoesNotExist, ValidationError,
+)
+from django.db import (
+ DEFAULT_DB_ALIAS, DJANGO_VERSION_PICKLE_KEY, DatabaseError, connection,
+ connections, router, transaction,
+)
+from django.db.models import (
+ NOT_PROVIDED, ExpressionWrapper, IntegerField, Max, Value,
+)
+from django.db.models.constants import LOOKUP_SEP
+from django.db.models.constraints import CheckConstraint
+from django.db.models.deletion import CASCADE, Collector
+from django.db.models.fields.related import (
+ ForeignObjectRel, OneToOneField, lazy_related_operation, resolve_relation,
+)
+from django.db.models.functions import Coalesce
+from django.db.models.manager import Manager
+from django.db.models.options import Options
+from django.db.models.query import Q
+from django.db.models.signals import (
+ class_prepared, post_init, post_save, pre_init, pre_save,
+)
+from django.db.models.utils import make_model_tuple
+from django.utils.encoding import force_str
+from django.utils.hashable import make_hashable
+from django.utils.text import capfirst, get_text_list
+from django.utils.translation import gettext_lazy as _
+from django.utils.version import get_version
+
+
+class Deferred:
+ def __repr__(self):
+ return '<Deferred field>'
+
+ def __str__(self):
+ return '<Deferred field>'"""
+ )
+
+
+def test_django_snippet_two():
+ django_isort_test(
+ '''from django.utils.version import get_version
+
+VERSION = (3, 2, 0, 'alpha', 0)
+
+__version__ = get_version(VERSION)
+
+
+def setup(set_prefix=True):
+ """
+ Configure the settings (this happens as a side effect of accessing the
+ first setting), configure logging and populate the app registry.
+ Set the thread-local urlresolvers script prefix if `set_prefix` is True.
+ """
+ from django.apps import apps
+ from django.conf import settings
+ from django.urls import set_script_prefix
+ from django.utils.log import configure_logging
+
+ configure_logging(settings.LOGGING_CONFIG, settings.LOGGING)
+ if set_prefix:
+ set_script_prefix(
+ '/' if settings.FORCE_SCRIPT_NAME is None else settings.FORCE_SCRIPT_NAME
+ )
+ apps.populate(settings.INSTALLED_APPS)'''
+ )
+
+
+def test_django_snippet_three():
+ django_isort_test(
+ """import cgi
+import codecs
+import copy
+import warnings
+from io import BytesIO
+from itertools import chain
+from urllib.parse import quote, urlencode, urljoin, urlsplit
+
+from django.conf import settings
+from django.core import signing
+from django.core.exceptions import (
+ DisallowedHost, ImproperlyConfigured, RequestDataTooBig,
+)
+from django.core.files import uploadhandler
+from django.http.multipartparser import MultiPartParser, MultiPartParserError
+from django.utils.datastructures import (
+ CaseInsensitiveMapping, ImmutableList, MultiValueDict,
+)
+from django.utils.deprecation import RemovedInDjango40Warning
+from django.utils.encoding import escape_uri_path, iri_to_uri
+from django.utils.functional import cached_property
+from django.utils.http import is_same_domain, limited_parse_qsl
+from django.utils.regex_helper import _lazy_re_compile
+
+from .multipartparser import parse_header
+
+RAISE_ERROR = object()
+
+
+class UnreadablePostError(OSError):
+ pass"""
+ )
diff --git a/tests/unit/profiles/test_google.py b/tests/unit/profiles/test_google.py
new file mode 100644
index 00000000..c558664d
--- /dev/null
+++ b/tests/unit/profiles/test_google.py
@@ -0,0 +1,397 @@
+from functools import partial
+
+from ..utils import isort_test
+
+google_isort_test = partial(isort_test, profile="google")
+
+
+def test_google_code_snippet_one():
+ google_isort_test(
+ '''# coding=utf-8
+# Copyright 2018 Google LLC
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# https://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+"""JAX user-facing transformations and utilities.
+The transformations here mostly wrap internal transformations, providing
+convenience flags to control behavior and handling Python containers of
+arguments and outputs. The Python containers handled are pytrees (see
+tree_util.py), which include nested tuples/lists/dicts, where the leaves are
+arrays.
+"""
+
+# flake8: noqa: F401
+import collections
+import functools
+import inspect
+import itertools as it
+import threading
+import weakref
+from typing import Any, Callable, Iterable, List, NamedTuple, Optional, Sequence, Tuple, TypeVar, Union
+from warnings import warn
+
+import numpy as np
+from contextlib import contextmanager, ExitStack
+
+from . import core
+from . import linear_util as lu
+from . import ad_util
+from . import dtypes
+from .core import eval_jaxpr
+from .api_util import (wraps, flatten_fun, apply_flat_fun, flatten_fun_nokwargs,
+ flatten_fun_nokwargs2, argnums_partial, flatten_axes,
+ donation_vector, rebase_donate_argnums)
+from .traceback_util import api_boundary
+from .tree_util import (tree_map, tree_flatten, tree_unflatten, tree_structure,
+ tree_transpose, tree_leaves, tree_multimap,
+ treedef_is_leaf, Partial)
+from .util import (unzip2, curry, partial, safe_map, safe_zip, prod, split_list,
+ extend_name_stack, wrap_name, cache)
+from .lib import xla_bridge as xb
+from .lib import xla_client as xc
+# Unused imports to be exported
+from .lib.xla_bridge import (device_count, local_device_count, devices,
+ local_devices, host_id, host_ids, host_count)
+from .abstract_arrays import ConcreteArray, ShapedArray, raise_to_shaped
+from .interpreters import partial_eval as pe
+from .interpreters import xla
+from .interpreters import pxla
+from .interpreters import ad
+from .interpreters import batching
+from .interpreters import masking
+from .interpreters import invertible_ad as iad
+from .interpreters.invertible_ad import custom_ivjp
+from .custom_derivatives import custom_jvp, custom_vjp
+from .config import flags, config, bool_env
+
+AxisName = Any
+
+# This TypeVar is used below to express the fact that function call signatures
+# are invariant under the jit, vmap, and pmap transformations.
+# Specifically, we statically assert that the return type is invariant.
+# Until PEP-612 is implemented, we cannot express the same invariance for
+# function arguments.
+# Note that the return type annotations will generally not strictly hold
+# in JIT internals, as Tracer values are passed through the function.
+# Should this raise any type errors for the tracing code in future, we can disable
+# type checking in parts of the tracing code, or remove these annotations.
+T = TypeVar("T")
+
+map = safe_map
+zip = safe_zip
+
+FLAGS = flags.FLAGS
+flags.DEFINE_bool("jax_disable_jit", bool_env("JAX_DISABLE_JIT", False),
+ "Disable JIT compilation and just call original Python.")
+
+''',
+ '''# coding=utf-8
+# Copyright 2018 Google LLC
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# https://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+"""JAX user-facing transformations and utilities.
+The transformations here mostly wrap internal transformations, providing
+convenience flags to control behavior and handling Python containers of
+arguments and outputs. The Python containers handled are pytrees (see
+tree_util.py), which include nested tuples/lists/dicts, where the leaves are
+arrays.
+"""
+
+# flake8: noqa: F401
+import collections
+from contextlib import ExitStack
+from contextlib import contextmanager
+import functools
+import inspect
+import itertools as it
+import threading
+from typing import (Any, Callable, Iterable, List, NamedTuple, Optional,
+ Sequence, Tuple, TypeVar, Union)
+from warnings import warn
+import weakref
+
+import numpy as np
+
+from . import ad_util
+from . import core
+from . import dtypes
+from . import linear_util as lu
+from .abstract_arrays import ConcreteArray
+from .abstract_arrays import ShapedArray
+from .abstract_arrays import raise_to_shaped
+from .api_util import apply_flat_fun
+from .api_util import argnums_partial
+from .api_util import donation_vector
+from .api_util import flatten_axes
+from .api_util import flatten_fun
+from .api_util import flatten_fun_nokwargs
+from .api_util import flatten_fun_nokwargs2
+from .api_util import rebase_donate_argnums
+from .api_util import wraps
+from .config import bool_env
+from .config import config
+from .config import flags
+from .core import eval_jaxpr
+from .custom_derivatives import custom_jvp
+from .custom_derivatives import custom_vjp
+from .interpreters import ad
+from .interpreters import batching
+from .interpreters import invertible_ad as iad
+from .interpreters.invertible_ad import custom_ivjp
+from .interpreters import masking
+from .interpreters import partial_eval as pe
+from .interpreters import pxla
+from .interpreters import xla
+from .lib import xla_bridge as xb
+# Unused imports to be exported
+from .lib.xla_bridge import device_count
+from .lib.xla_bridge import devices
+from .lib.xla_bridge import host_count
+from .lib.xla_bridge import host_id
+from .lib.xla_bridge import host_ids
+from .lib.xla_bridge import local_device_count
+from .lib.xla_bridge import local_devices
+from .lib import xla_client as xc
+from .traceback_util import api_boundary
+from .tree_util import Partial
+from .tree_util import tree_flatten
+from .tree_util import tree_leaves
+from .tree_util import tree_map
+from .tree_util import tree_multimap
+from .tree_util import tree_structure
+from .tree_util import tree_transpose
+from .tree_util import tree_unflatten
+from .tree_util import treedef_is_leaf
+from .util import cache
+from .util import curry
+from .util import extend_name_stack
+from .util import partial
+from .util import prod
+from .util import safe_map
+from .util import safe_zip
+from .util import split_list
+from .util import unzip2
+from .util import wrap_name
+
+AxisName = Any
+
+# This TypeVar is used below to express the fact that function call signatures
+# are invariant under the jit, vmap, and pmap transformations.
+# Specifically, we statically assert that the return type is invariant.
+# Until PEP-612 is implemented, we cannot express the same invariance for
+# function arguments.
+# Note that the return type annotations will generally not strictly hold
+# in JIT internals, as Tracer values are passed through the function.
+# Should this raise any type errors for the tracing code in future, we can disable
+# type checking in parts of the tracing code, or remove these annotations.
+T = TypeVar("T")
+
+map = safe_map
+zip = safe_zip
+
+FLAGS = flags.FLAGS
+flags.DEFINE_bool("jax_disable_jit", bool_env("JAX_DISABLE_JIT", False),
+ "Disable JIT compilation and just call original Python.")
+
+''',
+ )
+
+
+def test_google_code_snippet_two():
+ google_isort_test(
+ """#!/usr/bin/env python
+# In[ ]:
+# coding: utf-8
+
+###### Searching and Downloading Google Images to the local disk ######
+
+# Import Libraries
+import sys
+version = (3, 0)
+cur_version = sys.version_info
+if cur_version >= version: # If the Current Version of Python is 3.0 or above
+ import urllib.request
+ from urllib.request import Request, urlopen
+ from urllib.request import URLError, HTTPError
+ from urllib.parse import quote
+ import http.client
+ from http.client import IncompleteRead, BadStatusLine
+ http.client._MAXHEADERS = 1000
+else: # If the Current Version of Python is 2.x
+ import urllib2
+ from urllib2 import Request, urlopen
+ from urllib2 import URLError, HTTPError
+ from urllib import quote
+ import httplib
+ from httplib import IncompleteRead, BadStatusLine
+ httplib._MAXHEADERS = 1000
+import time # Importing the time library to check the time of code execution
+import os
+import argparse
+import ssl
+import datetime
+import json
+import re
+import codecs
+import socket""",
+ """#!/usr/bin/env python
+# In[ ]:
+# coding: utf-8
+
+###### Searching and Downloading Google Images to the local disk ######
+
+# Import Libraries
+import sys
+
+version = (3, 0)
+cur_version = sys.version_info
+if cur_version >= version: # If the Current Version of Python is 3.0 or above
+ import http.client
+ from http.client import BadStatusLine
+ from http.client import IncompleteRead
+ from urllib.parse import quote
+ import urllib.request
+ from urllib.request import HTTPError
+ from urllib.request import Request
+ from urllib.request import URLError
+ from urllib.request import urlopen
+ http.client._MAXHEADERS = 1000
+else: # If the Current Version of Python is 2.x
+ from urllib import quote
+
+ import httplib
+ from httplib import BadStatusLine
+ from httplib import IncompleteRead
+ import urllib2
+ from urllib2 import HTTPError
+ from urllib2 import Request
+ from urllib2 import URLError
+ from urllib2 import urlopen
+ httplib._MAXHEADERS = 1000
+import argparse
+import codecs
+import datetime
+import json
+import os
+import re
+import socket
+import ssl
+import time # Importing the time library to check the time of code execution
+""",
+ )
+
+
+def test_code_snippet_three():
+ google_isort_test(
+ '''# Copyright 2019 Google LLC
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+"""Monitoring."""
+# pylint: disable=invalid-name
+# TODO(ochang): Remove V3 from names once all metrics are migrated to
+# stackdriver.
+
+from builtins import object
+from builtins import range
+from builtins import str
+
+import bisect
+import collections
+import functools
+import itertools
+import re
+import six
+import threading
+import time
+
+try:
+ from google.cloud import monitoring_v3
+except (ImportError, RuntimeError):
+ monitoring_v3 = None
+
+from google.api_core import exceptions
+from google.api_core import retry
+
+from base import errors
+from base import utils
+from config import local_config
+from google_cloud_utils import compute_metadata
+from google_cloud_utils import credentials
+from metrics import logs
+from system import environment''',
+ '''# Copyright 2019 Google LLC
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+"""Monitoring."""
+# pylint: disable=invalid-name
+# TODO(ochang): Remove V3 from names once all metrics are migrated to
+# stackdriver.
+
+import bisect
+from builtins import object
+from builtins import range
+from builtins import str
+import collections
+import functools
+import itertools
+import re
+import threading
+import time
+
+import six
+
+try:
+ from google.cloud import monitoring_v3
+except (ImportError, RuntimeError):
+ monitoring_v3 = None
+
+from base import errors
+from base import utils
+from config import local_config
+from google.api_core import exceptions
+from google.api_core import retry
+from google_cloud_utils import compute_metadata
+from google_cloud_utils import credentials
+from metrics import logs
+from system import environment
+''',
+ )
diff --git a/tests/unit/profiles/test_hug.py b/tests/unit/profiles/test_hug.py
new file mode 100644
index 00000000..f10b3ee6
--- /dev/null
+++ b/tests/unit/profiles/test_hug.py
@@ -0,0 +1,112 @@
+from functools import partial
+
+from ..utils import isort_test
+
+hug_isort_test = partial(isort_test, profile="hug", known_first_party=["hug"])
+
+
+def test_hug_code_snippet_one():
+ hug_isort_test(
+ '''
+from __future__ import absolute_import
+
+import asyncio
+import sys
+from collections import OrderedDict, namedtuple
+from distutils.util import strtobool
+from functools import partial
+from itertools import chain
+from types import ModuleType
+from wsgiref.simple_server import make_server
+
+import falcon
+from falcon import HTTP_METHODS
+
+import hug.defaults
+import hug.output_format
+from hug import introspect
+from hug._version import current
+
+INTRO = """
+/#######################################################################\\
+ `.----``..-------..``.----.
+ :/:::::--:---------:--::::://.
+ .+::::----##/-/oo+:-##----:::://
+ `//::-------/oosoo-------::://. ## ## ## ## #####
+ .-:------./++o/o-.------::-` ``` ## ## ## ## ##
+ `----.-./+o+:..----. `.:///. ######## ## ## ##
+ ``` `----.-::::::------ `.-:::://. ## ## ## ## ## ####
+ ://::--.``` -:``...-----...` `:--::::::-.` ## ## ## ## ## ##
+ :/:::::::::-:- ````` .:::::-.` ## ## #### ######
+ ``.--:::::::. .:::.`
+ ``..::. .:: EMBRACE THE APIs OF THE FUTURE
+ ::- .:-
+ -::` ::- VERSION {0}
+ `::- -::`
+ -::-` -::-
+\\########################################################################/
+ Copyright (C) 2016 Timothy Edmund Crosley
+ Under the MIT License
+""".format(
+ current
+)'''
+ )
+
+
+def test_hug_code_snippet_two():
+ hug_isort_test(
+ """from __future__ import absolute_import
+
+import functools
+from collections import namedtuple
+
+from falcon import HTTP_METHODS
+
+import hug.api
+import hug.defaults
+import hug.output_format
+from hug import introspect
+from hug.format import underscore
+
+
+def default_output_format(
+ content_type="application/json", apply_globally=False, api=None, cli=False, http=True
+):
+"""
+ )
+
+
+def test_hug_code_snippet_three():
+ hug_isort_test(
+ """from __future__ import absolute_import
+
+import argparse
+import asyncio
+import os
+import sys
+from collections import OrderedDict
+from functools import lru_cache, partial, wraps
+
+import falcon
+from falcon import HTTP_BAD_REQUEST
+
+import hug._empty as empty
+import hug.api
+import hug.output_format
+import hug.types as types
+from hug import introspect
+from hug.exceptions import InvalidTypeData
+from hug.format import parse_content_type
+from hug.types import (
+ MarshmallowInputSchema,
+ MarshmallowReturnSchema,
+ Multiple,
+ OneOf,
+ SmartBoolean,
+ Text,
+ text,
+)
+
+DOC_TYPE_MAP = {str: "String", bool: "Boolean", list: "Multiple", int: "Integer", float: "Float"}
+"""
+ )
diff --git a/tests/unit/profiles/test_open_stack.py b/tests/unit/profiles/test_open_stack.py
new file mode 100644
index 00000000..2def240f
--- /dev/null
+++ b/tests/unit/profiles/test_open_stack.py
@@ -0,0 +1,134 @@
+from functools import partial
+
+from ..utils import isort_test
+
+open_stack_isort_test = partial(isort_test, profile="open_stack")
+
+
+def test_open_stack_code_snippet_one():
+ open_stack_isort_test(
+ """import httplib
+import logging
+import random
+import StringIO
+import time
+import unittest
+
+import eventlet
+import webob.exc
+
+import nova.api.ec2
+from nova.api import manager
+from nova.api import openstack
+from nova.auth import users
+from nova.endpoint import cloud
+import nova.flags
+from nova.i18n import _
+from nova.i18n import _LC
+from nova import test
+""",
+ known_first_party=["nova"],
+ py_version="2",
+ order_by_type=False,
+ )
+
+
+def test_open_stack_code_snippet_two():
+ open_stack_isort_test(
+ """# Copyright 2011 VMware, Inc
+# All Rights Reserved.
+#
+# Licensed under the Apache License, Version 2.0 (the "License"); you may
+# not use this file except in compliance with the License. You may obtain
+# a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
+# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
+# License for the specific language governing permissions and limitations
+# under the License.
+
+import inspect
+import os
+import random
+
+from neutron_lib.callbacks import events
+from neutron_lib.callbacks import registry
+from neutron_lib.callbacks import resources
+from neutron_lib import context
+from neutron_lib.db import api as session
+from neutron_lib.plugins import directory
+from neutron_lib import rpc as n_rpc
+from oslo_concurrency import processutils
+from oslo_config import cfg
+from oslo_log import log as logging
+from oslo_messaging import server as rpc_server
+from oslo_service import loopingcall
+from oslo_service import service as common_service
+from oslo_utils import excutils
+from oslo_utils import importutils
+import psutil
+
+from neutron.common import config
+from neutron.common import profiler
+from neutron.conf import service
+from neutron import worker as neutron_worker
+from neutron import wsgi
+
+service.register_service_opts(service.SERVICE_OPTS)
+""",
+ known_first_party=["neutron"],
+ )
+
+
+def test_open_stack_code_snippet_three():
+ open_stack_isort_test(
+ """
+# Copyright 2013 Red Hat, Inc.
+#
+# Licensed under the Apache License, Version 2.0 (the "License"); you may
+# not use this file except in compliance with the License. You may obtain
+# a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
+# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
+# License for the specific language governing permissions and limitations
+# under the License.
+
+import functools
+
+from oslo_log import log as logging
+import oslo_messaging as messaging
+from oslo_messaging.rpc import dispatcher
+from oslo_serialization import jsonutils
+from oslo_service import periodic_task
+from oslo_utils import importutils
+import six
+
+import nova.conf
+import nova.context
+import nova.exception
+from nova.i18n import _
+
+__all__ = [
+ 'init',
+ 'cleanup',
+ 'set_defaults',
+ 'add_extra_exmods',
+ 'clear_extra_exmods',
+ 'get_allowed_exmods',
+ 'RequestContextSerializer',
+ 'get_client',
+ 'get_server',
+ 'get_notifier',
+]
+
+profiler = importutils.try_import("osprofiler.profiler")
+""",
+ known_first_party=["nova"],
+ )
diff --git a/tests/unit/profiles/test_plone.py b/tests/unit/profiles/test_plone.py
new file mode 100644
index 00000000..ecacc152
--- /dev/null
+++ b/tests/unit/profiles/test_plone.py
@@ -0,0 +1,75 @@
+from functools import partial
+
+from ..utils import isort_test
+
+plone_isort_test = partial(isort_test, profile="plone")
+
+
+def test_plone_code_snippet_one():
+ plone_isort_test(
+ """# -*- coding: utf-8 -*-
+from plone.app.multilingual.testing import PLONE_APP_MULTILINGUAL_PRESET_FIXTURE # noqa
+from plone.app.robotframework.testing import REMOTE_LIBRARY_BUNDLE_FIXTURE
+from plone.app.testing import FunctionalTesting
+from plone.app.testing import IntegrationTesting
+from plone.app.testing import PloneWithPackageLayer
+from plone.testing import z2
+
+import plone.app.multilingualindexes
+
+
+PAMI_FIXTURE = PloneWithPackageLayer(
+ bases=(PLONE_APP_MULTILINGUAL_PRESET_FIXTURE,),
+ name="PAMILayer:Fixture",
+ gs_profile_id="plone.app.multilingualindexes:default",
+ zcml_package=plone.app.multilingualindexes,
+ zcml_filename="configure.zcml",
+ additional_z2_products=["plone.app.multilingualindexes"],
+)
+"""
+ )
+
+
+def test_plone_code_snippet_two():
+ plone_isort_test(
+ """# -*- coding: utf-8 -*-
+from Acquisition import aq_base
+from App.class_init import InitializeClass
+from App.special_dtml import DTMLFile
+from BTrees.OOBTree import OOTreeSet
+from logging import getLogger
+from plone import api
+from plone.app.multilingual.events import ITranslationRegisteredEvent
+from plone.app.multilingual.interfaces import ITG
+from plone.app.multilingual.interfaces import ITranslatable
+from plone.app.multilingual.interfaces import ITranslationManager
+from plone.app.multilingualindexes.utils import get_configuration
+from plone.indexer.interfaces import IIndexableObject
+from Products.CMFPlone.utils import safe_hasattr
+from Products.DateRecurringIndex.index import DateRecurringIndex
+from Products.PluginIndexes.common.UnIndex import UnIndex
+from Products.ZCatalog.Catalog import Catalog
+from ZODB.POSException import ConflictError
+from zope.component import getMultiAdapter
+from zope.component import queryAdapter
+from zope.globalrequest import getRequest
+
+
+logger = getLogger(__name__)
+"""
+ )
+
+
+def test_plone_code_snippet_three():
+ plone_isort_test(
+ """# -*- coding: utf-8 -*-
+from plone.app.querystring.interfaces import IQueryModifier
+from zope.interface import provider
+
+import logging
+
+
+logger = logging.getLogger(__name__)
+
+"""
+ )
diff --git a/tests/unit/profiles/test_pycharm.py b/tests/unit/profiles/test_pycharm.py
new file mode 100644
index 00000000..f3ce1fd3
--- /dev/null
+++ b/tests/unit/profiles/test_pycharm.py
@@ -0,0 +1,55 @@
+from functools import partial
+
+from ..utils import isort_test
+
+pycharm_isort_test = partial(isort_test, profile="pycharm")
+
+
+def test_pycharm_snippet_one():
+ pycharm_isort_test(
+ """import shutil
+import sys
+from io import StringIO
+from pathlib import Path
+from typing import (
+ Optional,
+ TextIO,
+ Union,
+ cast
+)
+from warnings import warn
+
+from isort import core
+
+from . import io
+from .exceptions import (
+ ExistingSyntaxErrors,
+ FileSkipComment,
+ FileSkipSetting,
+ IntroducedSyntaxErrors
+)
+from .format import (
+ ask_whether_to_apply_changes_to_file,
+ create_terminal_printer,
+ show_unified_diff
+)
+from .io import Empty
+from .place import module as place_module # noqa: F401
+from .place import module_with_reason as place_module_with_reason # noqa: F401
+from .settings import (
+ DEFAULT_CONFIG,
+ Config
+)
+
+
+def sort_code_string(
+ code: str,
+ extension: Optional[str] = None,
+ config: Config = DEFAULT_CONFIG,
+ file_path: Optional[Path] = None,
+ disregard_skip: bool = False,
+ show_diff: Union[bool, TextIO] = False,
+ **config_kwargs,
+):
+"""
+ )
diff --git a/tests/unit/test_isort.py b/tests/unit/test_isort.py
index bfa550f4..dc7f5303 100644
--- a/tests/unit/test_isort.py
+++ b/tests/unit/test_isort.py
@@ -2940,12 +2940,15 @@ def test_not_splitted_sections() -> None:
)
# in case when THIRDPARTY section is excluded from sections list,
# it's ok to merge STDLIB and FIRSTPARTY
- assert isort.code(
- code=test_input,
- sections=["STDLIB", "FIRSTPARTY", "LOCALFOLDER"],
- no_lines_before=["FIRSTPARTY"],
- known_first_party=["app"],
- ) == (stdlib_section + firstparty_section + whiteline + local_section + whiteline + statement)
+ assert (
+ isort.code(
+ code=test_input,
+ sections=["STDLIB", "FIRSTPARTY", "LOCALFOLDER"],
+ no_lines_before=["FIRSTPARTY"],
+ known_first_party=["app"],
+ )
+ == (stdlib_section + firstparty_section + whiteline + local_section + whiteline + statement)
+ )
# it doesn't change output, because stdlib packages don't have any whitelines before them
assert (
isort.code(test_input, no_lines_before=["STDLIB"], known_first_party=["app"]) == test_input
diff --git a/tests/unit/test_regressions.py b/tests/unit/test_regressions.py
index 98621545..9a44c967 100644
--- a/tests/unit/test_regressions.py
+++ b/tests/unit/test_regressions.py
@@ -617,3 +617,45 @@ from ..fileB import b_var
lines_after_imports=2,
no_lines_before="LOCALFOLDER",
)
+
+
+def test_isort_should_be_able_to_add_independent_of_doc_string_placement_issue_1420():
+ """isort should be able to know when an import requested to be added is sucesfully added,
+ independent of where the top doc string is located.
+ See: https://github.com/PyCQA/isort/issues/1420
+ """
+ assert isort.check_code(
+ '''"""module docstring"""
+
+import os
+''',
+ show_diff=True,
+ add_imports=["os"],
+ )
+
+
+def test_comments_should_never_be_moved_between_imports_issue_1427():
+ """isort should never move comments to different import statement.
+ See: https://github.com/PyCQA/isort/issues/1427
+ """
+ assert isort.check_code(
+ """from package import CONSTANT
+from package import * # noqa
+ """,
+ force_single_line=True,
+ show_diff=True,
+ )
+
+
+def test_isort_doesnt_misplace_comments_issue_1431():
+ """Test to ensure isort wont misplace comments.
+ See: https://github.com/PyCQA/isort/issues/1431
+ """
+ input_text = """from com.my_lovely_company.my_lovely_team.my_lovely_project.my_lovely_component import (
+ MyLovelyCompanyTeamProjectComponent, # NOT DRY
+)
+from com.my_lovely_company.my_lovely_team.my_lovely_project.my_lovely_component import (
+ MyLovelyCompanyTeamProjectComponent as component, # DRY
+)
+"""
+ assert isort.code(input_text, profile="black") == input_text
diff --git a/tests/unit/test_ticketed_features.py b/tests/unit/test_ticketed_features.py
index 86b39045..b387ec63 100644
--- a/tests/unit/test_ticketed_features.py
+++ b/tests/unit/test_ticketed_features.py
@@ -558,10 +558,16 @@ from a import b as y
from a import c
"""
assert (
- isort.code(test_input, combine_star=True,)
+ isort.code(
+ test_input,
+ combine_star=True,
+ )
== isort.code(test_input, combine_star=True, force_single_line=True)
== isort.code(
- test_input, combine_star=True, force_single_line=True, combine_as_imports=True,
+ test_input,
+ combine_star=True,
+ force_single_line=True,
+ combine_as_imports=True,
)
== """
from a import *
diff --git a/tests/unit/utils.py b/tests/unit/utils.py
new file mode 100644
index 00000000..9c963d63
--- /dev/null
+++ b/tests/unit/utils.py
@@ -0,0 +1,14 @@
+import isort
+
+
+def isort_test(code: str, expected_output: str = "", **config):
+ """Runs isort against the given code snippet and ensures that it
+ gives consistent output accross multiple runs, and if an expected_output
+ is given - that it matches that.
+ """
+ expected_output = expected_output or code
+
+ output = isort.code(code, **config)
+ assert output == expected_output
+
+ assert output == isort.code(output, **config)