summaryrefslogtreecommitdiff
path: root/setuptools/tests
diff options
context:
space:
mode:
authorJason R. Coombs <jaraco@jaraco.com>2023-01-14 11:13:55 -0500
committerJason R. Coombs <jaraco@jaraco.com>2023-01-14 11:13:55 -0500
commit245da5441248eeb2d575034d04cbc241bf545161 (patch)
treed76526e1461252cc1119cd9482a64ef1e75f7838 /setuptools/tests
parentd7b783a4b8b01e58135e40bd9a1db8a82c090982 (diff)
parent82eee6a998251b33ab3984f39b25c27ca72ba8b0 (diff)
downloadpython-setuptools-git-245da5441248eeb2d575034d04cbc241bf545161.tar.gz
Merge branch 'main' into debt/remove-legacy-version
Diffstat (limited to 'setuptools/tests')
-rw-r--r--setuptools/tests/config/__init__.py0
-rw-r--r--setuptools/tests/config/downloads/.gitignore4
-rw-r--r--setuptools/tests/config/downloads/__init__.py57
-rw-r--r--setuptools/tests/config/downloads/preload.py18
-rw-r--r--setuptools/tests/config/setupcfg_examples.txt23
-rw-r--r--setuptools/tests/config/test_apply_pyprojecttoml.py407
-rw-r--r--setuptools/tests/config/test_expand.py224
-rw-r--r--setuptools/tests/config/test_pyprojecttoml.py402
-rw-r--r--setuptools/tests/config/test_pyprojecttoml_dynamic_deps.py82
-rw-r--r--setuptools/tests/config/test_setupcfg.py (renamed from setuptools/tests/test_config.py)115
-rw-r--r--setuptools/tests/contexts.py23
-rw-r--r--setuptools/tests/environment.py13
-rw-r--r--setuptools/tests/fixtures.py21
-rw-r--r--setuptools/tests/integration/helpers.py14
-rw-r--r--setuptools/tests/integration/test_pip_install_sdist.py43
-rw-r--r--setuptools/tests/namespaces.py23
-rw-r--r--setuptools/tests/script-with-bom.py4
-rw-r--r--setuptools/tests/test_bdist_deprecations.py2
-rw-r--r--setuptools/tests/test_build.py63
-rw-r--r--setuptools/tests/test_build_clib.py31
-rw-r--r--setuptools/tests/test_build_ext.py139
-rw-r--r--setuptools/tests/test_build_meta.py417
-rw-r--r--setuptools/tests/test_build_py.py222
-rw-r--r--setuptools/tests/test_config_discovery.py637
-rw-r--r--setuptools/tests/test_develop.py44
-rw-r--r--setuptools/tests/test_dist.py146
-rw-r--r--setuptools/tests/test_dist_info.py121
-rw-r--r--setuptools/tests/test_distutils_adoption.py73
-rw-r--r--setuptools/tests/test_easy_install.py193
-rw-r--r--setuptools/tests/test_editable_install.py992
-rw-r--r--setuptools/tests/test_egg_info.py91
-rw-r--r--setuptools/tests/test_find_packages.py67
-rw-r--r--setuptools/tests/test_find_py_modules.py81
-rw-r--r--setuptools/tests/test_logging.py17
-rw-r--r--setuptools/tests/test_manifest.py117
-rw-r--r--setuptools/tests/test_msvc.py179
-rw-r--r--setuptools/tests/test_msvc14.py1
-rw-r--r--setuptools/tests/test_packageindex.py87
-rw-r--r--setuptools/tests/test_register.py5
-rw-r--r--setuptools/tests/test_sdist.py71
-rw-r--r--setuptools/tests/test_setuptools.py5
-rw-r--r--setuptools/tests/test_sphinx_upload_docs.py37
-rw-r--r--setuptools/tests/test_upload.py5
-rw-r--r--setuptools/tests/test_upload_docs.py64
-rw-r--r--setuptools/tests/test_virtualenv.py109
-rw-r--r--setuptools/tests/test_wheel.py89
-rw-r--r--setuptools/tests/test_windows_wrappers.py14
47 files changed, 4997 insertions, 595 deletions
diff --git a/setuptools/tests/config/__init__.py b/setuptools/tests/config/__init__.py
new file mode 100644
index 00000000..e69de29b
--- /dev/null
+++ b/setuptools/tests/config/__init__.py
diff --git a/setuptools/tests/config/downloads/.gitignore b/setuptools/tests/config/downloads/.gitignore
new file mode 100644
index 00000000..df3779fc
--- /dev/null
+++ b/setuptools/tests/config/downloads/.gitignore
@@ -0,0 +1,4 @@
+*
+!.gitignore
+!__init__.py
+!preload.py
diff --git a/setuptools/tests/config/downloads/__init__.py b/setuptools/tests/config/downloads/__init__.py
new file mode 100644
index 00000000..9fb9b14b
--- /dev/null
+++ b/setuptools/tests/config/downloads/__init__.py
@@ -0,0 +1,57 @@
+import re
+import time
+from pathlib import Path
+from urllib.error import HTTPError
+from urllib.request import urlopen
+
+__all__ = ["DOWNLOAD_DIR", "retrieve_file", "output_file", "urls_from_file"]
+
+
+NAME_REMOVE = ("http://", "https://", "github.com/", "/raw/")
+DOWNLOAD_DIR = Path(__file__).parent
+
+
+# ----------------------------------------------------------------------
+# Please update ./preload.py accordingly when modifying this file
+# ----------------------------------------------------------------------
+
+
+def output_file(url: str, download_dir: Path = DOWNLOAD_DIR):
+ file_name = url.strip()
+ for part in NAME_REMOVE:
+ file_name = file_name.replace(part, '').strip().strip('/:').strip()
+ return Path(download_dir, re.sub(r"[^\-_\.\w\d]+", "_", file_name))
+
+
+def retrieve_file(url: str, download_dir: Path = DOWNLOAD_DIR, wait: float = 5):
+ path = output_file(url, download_dir)
+ if path.exists():
+ print(f"Skipping {url} (already exists: {path})")
+ else:
+ download_dir.mkdir(exist_ok=True, parents=True)
+ print(f"Downloading {url} to {path}")
+ try:
+ download(url, path)
+ except HTTPError:
+ time.sleep(wait) # wait a few seconds and try again.
+ download(url, path)
+ return path
+
+
+def urls_from_file(list_file: Path):
+ """``list_file`` should be a text file where each line corresponds to a URL to
+ download.
+ """
+ print(f"file: {list_file}")
+ content = list_file.read_text(encoding="utf-8")
+ return [url for url in content.splitlines() if not url.startswith("#")]
+
+
+def download(url: str, dest: Path):
+ with urlopen(url) as f:
+ data = f.read()
+
+ with open(dest, "wb") as f:
+ f.write(data)
+
+ assert Path(dest).exists()
diff --git a/setuptools/tests/config/downloads/preload.py b/setuptools/tests/config/downloads/preload.py
new file mode 100644
index 00000000..64b3f1c8
--- /dev/null
+++ b/setuptools/tests/config/downloads/preload.py
@@ -0,0 +1,18 @@
+"""This file can be used to preload files needed for testing.
+
+For example you can use::
+
+ cd setuptools/tests/config
+ python -m downloads.preload setupcfg_examples.txt
+
+to make sure the `setup.cfg` examples are downloaded before starting the tests.
+"""
+import sys
+from pathlib import Path
+
+from . import retrieve_file, urls_from_file
+
+
+if __name__ == "__main__":
+ urls = urls_from_file(Path(sys.argv[1]))
+ list(map(retrieve_file, urls))
diff --git a/setuptools/tests/config/setupcfg_examples.txt b/setuptools/tests/config/setupcfg_examples.txt
new file mode 100644
index 00000000..5db35654
--- /dev/null
+++ b/setuptools/tests/config/setupcfg_examples.txt
@@ -0,0 +1,23 @@
+# ====================================================================
+# Some popular packages that use setup.cfg (and others not so popular)
+# Reference: https://hugovk.github.io/top-pypi-packages/
+# ====================================================================
+https://github.com/pypa/setuptools/raw/52c990172fec37766b3566679724aa8bf70ae06d/setup.cfg
+https://github.com/pypa/wheel/raw/0acd203cd896afec7f715aa2ff5980a403459a3b/setup.cfg
+https://github.com/python/importlib_metadata/raw/2f05392ca980952a6960d82b2f2d2ea10aa53239/setup.cfg
+https://github.com/jaraco/skeleton/raw/d9008b5c510cd6969127a6a2ab6f832edddef296/setup.cfg
+https://github.com/jaraco/zipp/raw/700d3a96390e970b6b962823bfea78b4f7e1c537/setup.cfg
+https://github.com/pallets/jinja/raw/7d72eb7fefb7dce065193967f31f805180508448/setup.cfg
+https://github.com/tkem/cachetools/raw/2fd87a94b8d3861d80e9e4236cd480bfdd21c90d/setup.cfg
+https://github.com/aio-libs/aiohttp/raw/5e0e6b7080f2408d5f1dd544c0e1cf88378b7b10/setup.cfg
+https://github.com/pallets/flask/raw/9486b6cf57bd6a8a261f67091aca8ca78eeec1e3/setup.cfg
+https://github.com/pallets/click/raw/6411f425fae545f42795665af4162006b36c5e4a/setup.cfg
+https://github.com/sqlalchemy/sqlalchemy/raw/533f5718904b620be8d63f2474229945d6f8ba5d/setup.cfg
+https://github.com/pytest-dev/pluggy/raw/461ef63291d13589c4e21aa182cd1529257e9a0a/setup.cfg
+https://github.com/pytest-dev/pytest/raw/c7be96dae487edbd2f55b561b31b68afac1dabe6/setup.cfg
+https://github.com/tqdm/tqdm/raw/fc69d5dcf578f7c7986fa76841a6b793f813df35/setup.cfg
+https://github.com/platformdirs/platformdirs/raw/7b7852128dd6f07511b618d6edea35046bd0c6ff/setup.cfg
+https://github.com/pandas-dev/pandas/raw/bc17343f934a33dc231c8c74be95d8365537c376/setup.cfg
+https://github.com/django/django/raw/4e249d11a6e56ca8feb4b055b681cec457ef3a3d/setup.cfg
+https://github.com/pyscaffold/pyscaffold/raw/de7aa5dc059fbd04307419c667cc4961bc9df4b8/setup.cfg
+https://github.com/pypa/virtualenv/raw/f92eda6e3da26a4d28c2663ffb85c4960bdb990c/setup.cfg
diff --git a/setuptools/tests/config/test_apply_pyprojecttoml.py b/setuptools/tests/config/test_apply_pyprojecttoml.py
new file mode 100644
index 00000000..3a66d494
--- /dev/null
+++ b/setuptools/tests/config/test_apply_pyprojecttoml.py
@@ -0,0 +1,407 @@
+"""Make sure that applying the configuration from pyproject.toml is equivalent to
+applying a similar configuration from setup.cfg
+
+To run these tests offline, please have a look on ``./downloads/preload.py``
+"""
+import io
+import re
+import tarfile
+from inspect import cleandoc
+from pathlib import Path
+from unittest.mock import Mock
+from zipfile import ZipFile
+
+import pytest
+from ini2toml.api import Translator
+
+import setuptools # noqa ensure monkey patch to metadata
+from setuptools._deprecation_warning import SetuptoolsDeprecationWarning
+from setuptools.dist import Distribution
+from setuptools.config import setupcfg, pyprojecttoml
+from setuptools.config import expand
+from setuptools.config._apply_pyprojecttoml import _WouldIgnoreField, _some_attrgetter
+from setuptools.command.egg_info import write_requirements
+
+from .downloads import retrieve_file, urls_from_file
+
+
+HERE = Path(__file__).parent
+EXAMPLES_FILE = "setupcfg_examples.txt"
+
+
+def makedist(path, **attrs):
+ return Distribution({"src_root": path, **attrs})
+
+
+@pytest.mark.parametrize("url", urls_from_file(HERE / EXAMPLES_FILE))
+@pytest.mark.filterwarnings("ignore")
+@pytest.mark.uses_network
+def test_apply_pyproject_equivalent_to_setupcfg(url, monkeypatch, tmp_path):
+ monkeypatch.setattr(expand, "read_attr", Mock(return_value="0.0.1"))
+ setupcfg_example = retrieve_file(url)
+ pyproject_example = Path(tmp_path, "pyproject.toml")
+ toml_config = Translator().translate(setupcfg_example.read_text(), "setup.cfg")
+ pyproject_example.write_text(toml_config)
+
+ dist_toml = pyprojecttoml.apply_configuration(makedist(tmp_path), pyproject_example)
+ dist_cfg = setupcfg.apply_configuration(makedist(tmp_path), setupcfg_example)
+
+ pkg_info_toml = core_metadata(dist_toml)
+ pkg_info_cfg = core_metadata(dist_cfg)
+ assert pkg_info_toml == pkg_info_cfg
+
+ if any(getattr(d, "license_files", None) for d in (dist_toml, dist_cfg)):
+ assert set(dist_toml.license_files) == set(dist_cfg.license_files)
+
+ if any(getattr(d, "entry_points", None) for d in (dist_toml, dist_cfg)):
+ print(dist_cfg.entry_points)
+ ep_toml = {(k, *sorted(i.replace(" ", "") for i in v))
+ for k, v in dist_toml.entry_points.items()}
+ ep_cfg = {(k, *sorted(i.replace(" ", "") for i in v))
+ for k, v in dist_cfg.entry_points.items()}
+ assert ep_toml == ep_cfg
+
+ if any(getattr(d, "package_data", None) for d in (dist_toml, dist_cfg)):
+ pkg_data_toml = {(k, *sorted(v)) for k, v in dist_toml.package_data.items()}
+ pkg_data_cfg = {(k, *sorted(v)) for k, v in dist_cfg.package_data.items()}
+ assert pkg_data_toml == pkg_data_cfg
+
+ if any(getattr(d, "data_files", None) for d in (dist_toml, dist_cfg)):
+ data_files_toml = {(k, *sorted(v)) for k, v in dist_toml.data_files}
+ data_files_cfg = {(k, *sorted(v)) for k, v in dist_cfg.data_files}
+ assert data_files_toml == data_files_cfg
+
+ assert set(dist_toml.install_requires) == set(dist_cfg.install_requires)
+ if any(getattr(d, "extras_require", None) for d in (dist_toml, dist_cfg)):
+ if (
+ "testing" in dist_toml.extras_require
+ and "testing" not in dist_cfg.extras_require
+ ):
+ # ini2toml can automatically convert `tests_require` to `testing` extra
+ dist_toml.extras_require.pop("testing")
+ extra_req_toml = {(k, *sorted(v)) for k, v in dist_toml.extras_require.items()}
+ extra_req_cfg = {(k, *sorted(v)) for k, v in dist_cfg.extras_require.items()}
+ assert extra_req_toml == extra_req_cfg
+
+
+PEP621_EXAMPLE = """\
+[project]
+name = "spam"
+version = "2020.0.0"
+description = "Lovely Spam! Wonderful Spam!"
+readme = "README.rst"
+requires-python = ">=3.8"
+license = {file = "LICENSE.txt"}
+keywords = ["egg", "bacon", "sausage", "tomatoes", "Lobster Thermidor"]
+authors = [
+ {email = "hi@pradyunsg.me"},
+ {name = "Tzu-Ping Chung"}
+]
+maintainers = [
+ {name = "Brett Cannon", email = "brett@python.org"},
+ {name = "John X. Ãørçeč", email = "john@utf8.org"},
+ {name = "Γαμα קּ 東", email = "gama@utf8.org"},
+]
+classifiers = [
+ "Development Status :: 4 - Beta",
+ "Programming Language :: Python"
+]
+
+dependencies = [
+ "httpx",
+ "gidgethub[httpx]>4.0.0",
+ "django>2.1; os_name != 'nt'",
+ "django>2.0; os_name == 'nt'"
+]
+
+[project.optional-dependencies]
+test = [
+ "pytest < 5.0.0",
+ "pytest-cov[all]"
+]
+
+[project.urls]
+homepage = "http://example.com"
+documentation = "http://readthedocs.org"
+repository = "http://github.com"
+changelog = "http://github.com/me/spam/blob/master/CHANGELOG.md"
+
+[project.scripts]
+spam-cli = "spam:main_cli"
+
+[project.gui-scripts]
+spam-gui = "spam:main_gui"
+
+[project.entry-points."spam.magical"]
+tomatoes = "spam:main_tomatoes"
+"""
+
+PEP621_INTERNATIONAL_EMAIL_EXAMPLE = """\
+[project]
+name = "spam"
+version = "2020.0.0"
+authors = [
+ {email = "hi@pradyunsg.me"},
+ {name = "Tzu-Ping Chung"}
+]
+maintainers = [
+ {name = "Степан Бандера", email = "криївка@оун-упа.укр"},
+]
+"""
+
+PEP621_EXAMPLE_SCRIPT = """
+def main_cli(): pass
+def main_gui(): pass
+def main_tomatoes(): pass
+"""
+
+
+def _pep621_example_project(
+ tmp_path,
+ readme="README.rst",
+ pyproject_text=PEP621_EXAMPLE,
+):
+ pyproject = tmp_path / "pyproject.toml"
+ text = pyproject_text
+ replacements = {'readme = "README.rst"': f'readme = "{readme}"'}
+ for orig, subst in replacements.items():
+ text = text.replace(orig, subst)
+ pyproject.write_text(text, encoding="utf-8")
+
+ (tmp_path / readme).write_text("hello world")
+ (tmp_path / "LICENSE.txt").write_text("--- LICENSE stub ---")
+ (tmp_path / "spam.py").write_text(PEP621_EXAMPLE_SCRIPT)
+ return pyproject
+
+
+def test_pep621_example(tmp_path):
+ """Make sure the example in PEP 621 works"""
+ pyproject = _pep621_example_project(tmp_path)
+ dist = pyprojecttoml.apply_configuration(makedist(tmp_path), pyproject)
+ assert dist.metadata.license == "--- LICENSE stub ---"
+ assert set(dist.metadata.license_files) == {"LICENSE.txt"}
+
+
+@pytest.mark.parametrize(
+ "readme, ctype",
+ [
+ ("Readme.txt", "text/plain"),
+ ("readme.md", "text/markdown"),
+ ("text.rst", "text/x-rst"),
+ ]
+)
+def test_readme_content_type(tmp_path, readme, ctype):
+ pyproject = _pep621_example_project(tmp_path, readme)
+ dist = pyprojecttoml.apply_configuration(makedist(tmp_path), pyproject)
+ assert dist.metadata.long_description_content_type == ctype
+
+
+def test_undefined_content_type(tmp_path):
+ pyproject = _pep621_example_project(tmp_path, "README.tex")
+ with pytest.raises(ValueError, match="Undefined content type for README.tex"):
+ pyprojecttoml.apply_configuration(makedist(tmp_path), pyproject)
+
+
+def test_no_explicit_content_type_for_missing_extension(tmp_path):
+ pyproject = _pep621_example_project(tmp_path, "README")
+ dist = pyprojecttoml.apply_configuration(makedist(tmp_path), pyproject)
+ assert dist.metadata.long_description_content_type is None
+
+
+@pytest.mark.parametrize(
+ ('pyproject_text', 'expected_maintainers_meta_value'),
+ (
+ pytest.param(
+ PEP621_EXAMPLE,
+ (
+ 'Brett Cannon <brett@python.org>, "John X. Ãørçeč" <john@utf8.org>, '
+ 'Γαμα קּ 東 <gama@utf8.org>'
+ ),
+ id='non-international-emails',
+ ),
+ pytest.param(
+ PEP621_INTERNATIONAL_EMAIL_EXAMPLE,
+ 'Степан Бандера <криївка@оун-упа.укр>',
+ marks=pytest.mark.xfail(
+ reason="CPython's `email.headerregistry.Address` only supports "
+ 'RFC 5322, as of Nov 10, 2022 and latest Python 3.11.0',
+ strict=True,
+ ),
+ id='international-email',
+ ),
+ ),
+)
+def test_utf8_maintainer_in_metadata( # issue-3663
+ expected_maintainers_meta_value,
+ pyproject_text, tmp_path,
+):
+ pyproject = _pep621_example_project(
+ tmp_path, "README", pyproject_text=pyproject_text,
+ )
+ dist = pyprojecttoml.apply_configuration(makedist(tmp_path), pyproject)
+ assert dist.metadata.maintainer_email == expected_maintainers_meta_value
+ pkg_file = tmp_path / "PKG-FILE"
+ with open(pkg_file, "w", encoding="utf-8") as fh:
+ dist.metadata.write_pkg_file(fh)
+ content = pkg_file.read_text(encoding="utf-8")
+ assert f"Maintainer-email: {expected_maintainers_meta_value}" in content
+
+
+# TODO: After PEP 639 is accepted, we have to move the license-files
+# to the `project` table instead of `tool.setuptools`
+def test_license_and_license_files(tmp_path):
+ pyproject = _pep621_example_project(tmp_path, "README")
+ text = pyproject.read_text(encoding="utf-8")
+
+ # Sanity-check
+ assert 'license = {file = "LICENSE.txt"}' in text
+ assert "[tool.setuptools]" not in text
+
+ text += '\n[tool.setuptools]\nlicense-files = ["_FILE*"]\n'
+ pyproject.write_text(text, encoding="utf-8")
+ (tmp_path / "_FILE.txt").touch()
+ (tmp_path / "_FILE.rst").touch()
+
+ # Would normally match the `license_files` glob patterns, but we want to exclude it
+ # by being explicit. On the other hand, its contents should be added to `license`
+ (tmp_path / "LICENSE.txt").write_text("LicenseRef-Proprietary\n", encoding="utf-8")
+
+ dist = pyprojecttoml.apply_configuration(makedist(tmp_path), pyproject)
+ assert set(dist.metadata.license_files) == {"_FILE.rst", "_FILE.txt"}
+ assert dist.metadata.license == "LicenseRef-Proprietary\n"
+
+
+class TestDeprecatedFields:
+ def test_namespace_packages(self, tmp_path):
+ pyproject = tmp_path / "pyproject.toml"
+ config = """
+ [project]
+ name = "myproj"
+ version = "42"
+ [tool.setuptools]
+ namespace-packages = ["myproj.pkg"]
+ """
+ pyproject.write_text(cleandoc(config), encoding="utf-8")
+ with pytest.warns(SetuptoolsDeprecationWarning, match="namespace_packages"):
+ pyprojecttoml.apply_configuration(makedist(tmp_path), pyproject)
+
+
+class TestPresetField:
+ def pyproject(self, tmp_path, dynamic, extra_content=""):
+ content = f"[project]\nname = 'proj'\ndynamic = {dynamic!r}\n"
+ if "version" not in dynamic:
+ content += "version = '42'\n"
+ file = tmp_path / "pyproject.toml"
+ file.write_text(content + extra_content, encoding="utf-8")
+ return file
+
+ @pytest.mark.parametrize(
+ "attr, field, value",
+ [
+ ("install_requires", "dependencies", ["six"]),
+ ("classifiers", "classifiers", ["Private :: Classifier"]),
+ ]
+ )
+ def test_not_listed_in_dynamic(self, tmp_path, attr, field, value):
+ """For the time being we just warn if the user pre-set values (e.g. via
+ ``setup.py``) but do not include them in ``dynamic``.
+ """
+ pyproject = self.pyproject(tmp_path, [])
+ dist = makedist(tmp_path, **{attr: value})
+ msg = re.compile(f"defined outside of `pyproject.toml`:.*{field}", re.S)
+ with pytest.warns(_WouldIgnoreField, match=msg):
+ dist = pyprojecttoml.apply_configuration(dist, pyproject)
+
+ # TODO: Once support for pyproject.toml config stabilizes attr should be None
+ dist_value = _some_attrgetter(f"metadata.{attr}", attr)(dist)
+ assert dist_value == value
+
+ @pytest.mark.parametrize(
+ "attr, field, value",
+ [
+ ("install_requires", "dependencies", []),
+ ("extras_require", "optional-dependencies", {}),
+ ("install_requires", "dependencies", ["six"]),
+ ("classifiers", "classifiers", ["Private :: Classifier"]),
+ ]
+ )
+ def test_listed_in_dynamic(self, tmp_path, attr, field, value):
+ pyproject = self.pyproject(tmp_path, [field])
+ dist = makedist(tmp_path, **{attr: value})
+ dist = pyprojecttoml.apply_configuration(dist, pyproject)
+ dist_value = _some_attrgetter(f"metadata.{attr}", attr)(dist)
+ assert dist_value == value
+
+ def test_warning_overwritten_dependencies(self, tmp_path):
+ src = "[project]\nname='pkg'\nversion='0.1'\ndependencies=['click']\n"
+ pyproject = tmp_path / "pyproject.toml"
+ pyproject.write_text(src, encoding="utf-8")
+ dist = makedist(tmp_path, install_requires=["wheel"])
+ with pytest.warns(match="`install_requires` overwritten"):
+ dist = pyprojecttoml.apply_configuration(dist, pyproject)
+ assert "wheel" not in dist.install_requires
+
+ def test_optional_dependencies_dont_remove_env_markers(self, tmp_path):
+ """
+ Internally setuptools converts dependencies with markers to "extras".
+ If ``install_requires`` is given by ``setup.py``, we have to ensure that
+ applying ``optional-dependencies`` does not overwrite the mandatory
+ dependencies with markers (see #3204).
+ """
+ # If setuptools replace its internal mechanism that uses `requires.txt`
+ # this test has to be rewritten to adapt accordingly
+ extra = "\n[project.optional-dependencies]\nfoo = ['bar>1']\n"
+ pyproject = self.pyproject(tmp_path, ["dependencies"], extra)
+ install_req = ['importlib-resources (>=3.0.0) ; python_version < "3.7"']
+ dist = makedist(tmp_path, install_requires=install_req)
+ dist = pyprojecttoml.apply_configuration(dist, pyproject)
+ assert "foo" in dist.extras_require
+ assert ':python_version < "3.7"' in dist.extras_require
+ egg_info = dist.get_command_obj("egg_info")
+ write_requirements(egg_info, tmp_path, tmp_path / "requires.txt")
+ reqs = (tmp_path / "requires.txt").read_text(encoding="utf-8")
+ assert "importlib-resources" in reqs
+ assert "bar" in reqs
+
+
+class TestMeta:
+ def test_example_file_in_sdist(self, setuptools_sdist):
+ """Meta test to ensure tests can run from sdist"""
+ with tarfile.open(setuptools_sdist) as tar:
+ assert any(name.endswith(EXAMPLES_FILE) for name in tar.getnames())
+
+ def test_example_file_not_in_wheel(self, setuptools_wheel):
+ """Meta test to ensure auxiliary test files are not in wheel"""
+ with ZipFile(setuptools_wheel) as zipfile:
+ assert not any(name.endswith(EXAMPLES_FILE) for name in zipfile.namelist())
+
+
+# --- Auxiliary Functions ---
+
+
+def core_metadata(dist) -> str:
+ with io.StringIO() as buffer:
+ dist.metadata.write_pkg_file(buffer)
+ pkg_file_txt = buffer.getvalue()
+
+ skip_prefixes = ()
+ skip_lines = set()
+ # ---- DIFF NORMALISATION ----
+ # PEP 621 is very particular about author/maintainer metadata conversion, so skip
+ skip_prefixes += ("Author:", "Author-email:", "Maintainer:", "Maintainer-email:")
+ # May be redundant with Home-page
+ skip_prefixes += ("Project-URL: Homepage,", "Home-page:")
+ # May be missing in original (relying on default) but backfilled in the TOML
+ skip_prefixes += ("Description-Content-Type:",)
+ # ini2toml can automatically convert `tests_require` to `testing` extra
+ skip_lines.add("Provides-Extra: testing")
+ # Remove empty lines
+ skip_lines.add("")
+
+ result = []
+ for line in pkg_file_txt.splitlines():
+ if line.startswith(skip_prefixes) or line in skip_lines:
+ continue
+ result.append(line + "\n")
+
+ return "".join(result)
diff --git a/setuptools/tests/config/test_expand.py b/setuptools/tests/config/test_expand.py
new file mode 100644
index 00000000..6af88efd
--- /dev/null
+++ b/setuptools/tests/config/test_expand.py
@@ -0,0 +1,224 @@
+import os
+from pathlib import Path
+
+import pytest
+
+from distutils.errors import DistutilsOptionError
+from setuptools.config import expand
+from setuptools.discovery import find_package_path
+
+
+def write_files(files, root_dir):
+ for file, content in files.items():
+ path = root_dir / file
+ path.parent.mkdir(exist_ok=True, parents=True)
+ path.write_text(content)
+
+
+def test_glob_relative(tmp_path, monkeypatch):
+ files = {
+ "dir1/dir2/dir3/file1.txt",
+ "dir1/dir2/file2.txt",
+ "dir1/file3.txt",
+ "a.ini",
+ "b.ini",
+ "dir1/c.ini",
+ "dir1/dir2/a.ini",
+ }
+
+ write_files({k: "" for k in files}, tmp_path)
+ patterns = ["**/*.txt", "[ab].*", "**/[ac].ini"]
+ monkeypatch.chdir(tmp_path)
+ assert set(expand.glob_relative(patterns)) == files
+ # Make sure the same APIs work outside cwd
+ assert set(expand.glob_relative(patterns, tmp_path)) == files
+
+
+def test_read_files(tmp_path, monkeypatch):
+
+ dir_ = tmp_path / "dir_"
+ (tmp_path / "_dir").mkdir(exist_ok=True)
+ (tmp_path / "a.txt").touch()
+ files = {
+ "a.txt": "a",
+ "dir1/b.txt": "b",
+ "dir1/dir2/c.txt": "c"
+ }
+ write_files(files, dir_)
+
+ secrets = Path(str(dir_) + "secrets")
+ secrets.mkdir(exist_ok=True)
+ write_files({"secrets.txt": "secret keys"}, secrets)
+
+ with monkeypatch.context() as m:
+ m.chdir(dir_)
+ assert expand.read_files(list(files)) == "a\nb\nc"
+
+ cannot_access_msg = r"Cannot access '.*\.\..a\.txt'"
+ with pytest.raises(DistutilsOptionError, match=cannot_access_msg):
+ expand.read_files(["../a.txt"])
+
+ cannot_access_secrets_msg = r"Cannot access '.*secrets\.txt'"
+ with pytest.raises(DistutilsOptionError, match=cannot_access_secrets_msg):
+ expand.read_files(["../dir_secrets/secrets.txt"])
+
+ # Make sure the same APIs work outside cwd
+ assert expand.read_files(list(files), dir_) == "a\nb\nc"
+ with pytest.raises(DistutilsOptionError, match=cannot_access_msg):
+ expand.read_files(["../a.txt"], dir_)
+
+
+class TestReadAttr:
+ @pytest.mark.parametrize(
+ "example",
+ [
+ # No cookie means UTF-8:
+ b"__version__ = '\xc3\xa9'\nraise SystemExit(1)\n",
+ # If a cookie is present, honor it:
+ b"# -*- coding: utf-8 -*-\n__version__ = '\xc3\xa9'\nraise SystemExit(1)\n",
+ b"# -*- coding: latin1 -*-\n__version__ = '\xe9'\nraise SystemExit(1)\n",
+ ]
+ )
+ def test_read_attr_encoding_cookie(self, example, tmp_path):
+ (tmp_path / "mod.py").write_bytes(example)
+ assert expand.read_attr('mod.__version__', root_dir=tmp_path) == 'é'
+
+ def test_read_attr(self, tmp_path, monkeypatch):
+ files = {
+ "pkg/__init__.py": "",
+ "pkg/sub/__init__.py": "VERSION = '0.1.1'",
+ "pkg/sub/mod.py": (
+ "VALUES = {'a': 0, 'b': {42}, 'c': (0, 1, 1)}\n"
+ "raise SystemExit(1)"
+ ),
+ }
+ write_files(files, tmp_path)
+
+ with monkeypatch.context() as m:
+ m.chdir(tmp_path)
+ # Make sure it can read the attr statically without evaluating the module
+ assert expand.read_attr('pkg.sub.VERSION') == '0.1.1'
+ values = expand.read_attr('lib.mod.VALUES', {'lib': 'pkg/sub'})
+
+ assert values['a'] == 0
+ assert values['b'] == {42}
+
+ # Make sure the same APIs work outside cwd
+ assert expand.read_attr('pkg.sub.VERSION', root_dir=tmp_path) == '0.1.1'
+ values = expand.read_attr('lib.mod.VALUES', {'lib': 'pkg/sub'}, tmp_path)
+ assert values['c'] == (0, 1, 1)
+
+ @pytest.mark.parametrize(
+ "example",
+ [
+ "VERSION: str\nVERSION = '0.1.1'\nraise SystemExit(1)\n",
+ "VERSION: str = '0.1.1'\nraise SystemExit(1)\n",
+ ]
+ )
+ def test_read_annotated_attr(self, tmp_path, example):
+ files = {
+ "pkg/__init__.py": "",
+ "pkg/sub/__init__.py": example,
+ }
+ write_files(files, tmp_path)
+ # Make sure this attribute can be read statically
+ assert expand.read_attr('pkg.sub.VERSION', root_dir=tmp_path) == '0.1.1'
+
+ def test_import_order(self, tmp_path):
+ """
+ Sometimes the import machinery will import the parent package of a nested
+ module, which triggers side-effects and might create problems (see issue #3176)
+
+ ``read_attr`` should bypass these limitations by resolving modules statically
+ (via ast.literal_eval).
+ """
+ files = {
+ "src/pkg/__init__.py": "from .main import func\nfrom .about import version",
+ "src/pkg/main.py": "import super_complicated_dep\ndef func(): return 42",
+ "src/pkg/about.py": "version = '42'",
+ }
+ write_files(files, tmp_path)
+ attr_desc = "pkg.about.version"
+ package_dir = {"": "src"}
+ # `import super_complicated_dep` should not run, otherwise the build fails
+ assert expand.read_attr(attr_desc, package_dir, tmp_path) == "42"
+
+
+@pytest.mark.parametrize(
+ 'package_dir, file, module, return_value',
+ [
+ ({"": "src"}, "src/pkg/main.py", "pkg.main", 42),
+ ({"pkg": "lib"}, "lib/main.py", "pkg.main", 13),
+ ({}, "single_module.py", "single_module", 70),
+ ({}, "flat_layout/pkg.py", "flat_layout.pkg", 836),
+ ]
+)
+def test_resolve_class(tmp_path, package_dir, file, module, return_value):
+ files = {file: f"class Custom:\n def testing(self): return {return_value}"}
+ write_files(files, tmp_path)
+ cls = expand.resolve_class(f"{module}.Custom", package_dir, tmp_path)
+ assert cls().testing() == return_value
+
+
+@pytest.mark.parametrize(
+ 'args, pkgs',
+ [
+ ({"where": ["."], "namespaces": False}, {"pkg", "other"}),
+ ({"where": [".", "dir1"], "namespaces": False}, {"pkg", "other", "dir2"}),
+ ({"namespaces": True}, {"pkg", "other", "dir1", "dir1.dir2"}),
+ ({}, {"pkg", "other", "dir1", "dir1.dir2"}), # default value for `namespaces`
+ ]
+)
+def test_find_packages(tmp_path, args, pkgs):
+ files = {
+ "pkg/__init__.py",
+ "other/__init__.py",
+ "dir1/dir2/__init__.py",
+ }
+ write_files({k: "" for k in files}, tmp_path)
+
+ package_dir = {}
+ kwargs = {"root_dir": tmp_path, "fill_package_dir": package_dir, **args}
+ where = kwargs.get("where", ["."])
+ assert set(expand.find_packages(**kwargs)) == pkgs
+ for pkg in pkgs:
+ pkg_path = find_package_path(pkg, package_dir, tmp_path)
+ assert os.path.exists(pkg_path)
+
+ # Make sure the same APIs work outside cwd
+ where = [
+ str((tmp_path / p).resolve()).replace(os.sep, "/") # ensure posix-style paths
+ for p in args.pop("where", ["."])
+ ]
+
+ assert set(expand.find_packages(where=where, **args)) == pkgs
+
+
+@pytest.mark.parametrize(
+ "files, where, expected_package_dir",
+ [
+ (["pkg1/__init__.py", "pkg1/other.py"], ["."], {}),
+ (["pkg1/__init__.py", "pkg2/__init__.py"], ["."], {}),
+ (["src/pkg1/__init__.py", "src/pkg1/other.py"], ["src"], {"": "src"}),
+ (["src/pkg1/__init__.py", "src/pkg2/__init__.py"], ["src"], {"": "src"}),
+ (
+ ["src1/pkg1/__init__.py", "src2/pkg2/__init__.py"],
+ ["src1", "src2"],
+ {"pkg1": "src1/pkg1", "pkg2": "src2/pkg2"},
+ ),
+ (
+ ["src/pkg1/__init__.py", "pkg2/__init__.py"],
+ ["src", "."],
+ {"pkg1": "src/pkg1"},
+ ),
+ ],
+)
+def test_fill_package_dir(tmp_path, files, where, expected_package_dir):
+ write_files({k: "" for k in files}, tmp_path)
+ pkg_dir = {}
+ kwargs = {"root_dir": tmp_path, "fill_package_dir": pkg_dir, "namespaces": False}
+ pkgs = expand.find_packages(where=where, **kwargs)
+ assert set(pkg_dir.items()) == set(expected_package_dir.items())
+ for pkg in pkgs:
+ pkg_path = find_package_path(pkg, pkg_dir, tmp_path)
+ assert os.path.exists(pkg_path)
diff --git a/setuptools/tests/config/test_pyprojecttoml.py b/setuptools/tests/config/test_pyprojecttoml.py
new file mode 100644
index 00000000..811328f5
--- /dev/null
+++ b/setuptools/tests/config/test_pyprojecttoml.py
@@ -0,0 +1,402 @@
+import re
+from configparser import ConfigParser
+from inspect import cleandoc
+
+import pytest
+import tomli_w
+from path import Path as _Path
+
+from setuptools.config._apply_pyprojecttoml import _WouldIgnoreField
+from setuptools.config.pyprojecttoml import (
+ read_configuration,
+ expand_configuration,
+ apply_configuration,
+ validate,
+ _InvalidFile,
+)
+from setuptools.dist import Distribution
+from setuptools.errors import OptionError
+
+
+import setuptools # noqa -- force distutils.core to be patched
+import distutils.core
+
+EXAMPLE = """
+[project]
+name = "myproj"
+keywords = ["some", "key", "words"]
+dynamic = ["version", "readme"]
+requires-python = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*"
+dependencies = [
+ 'importlib-metadata>=0.12;python_version<"3.8"',
+ 'importlib-resources>=1.0;python_version<"3.7"',
+ 'pathlib2>=2.3.3,<3;python_version < "3.4" and sys.platform != "win32"',
+]
+
+[project.optional-dependencies]
+docs = [
+ "sphinx>=3",
+ "sphinx-argparse>=0.2.5",
+ "sphinx-rtd-theme>=0.4.3",
+]
+testing = [
+ "pytest>=1",
+ "coverage>=3,<5",
+]
+
+[project.scripts]
+exec = "pkg.__main__:exec"
+
+[build-system]
+requires = ["setuptools", "wheel"]
+build-backend = "setuptools.build_meta"
+
+[tool.setuptools]
+package-dir = {"" = "src"}
+zip-safe = true
+platforms = ["any"]
+
+[tool.setuptools.packages.find]
+where = ["src"]
+
+[tool.setuptools.cmdclass]
+sdist = "pkg.mod.CustomSdist"
+
+[tool.setuptools.dynamic.version]
+attr = "pkg.__version__.VERSION"
+
+[tool.setuptools.dynamic.readme]
+file = ["README.md"]
+content-type = "text/markdown"
+
+[tool.setuptools.package-data]
+"*" = ["*.txt"]
+
+[tool.setuptools.data-files]
+"data" = ["_files/*.txt"]
+
+[tool.distutils.sdist]
+formats = "gztar"
+
+[tool.distutils.bdist_wheel]
+universal = true
+"""
+
+
+def create_example(path, pkg_root):
+ pyproject = path / "pyproject.toml"
+
+ files = [
+ f"{pkg_root}/pkg/__init__.py",
+ "_files/file.txt",
+ ]
+ if pkg_root != ".": # flat-layout will raise error for multi-package dist
+ # Ensure namespaces are discovered
+ files.append(f"{pkg_root}/other/nested/__init__.py")
+
+ for file in files:
+ (path / file).parent.mkdir(exist_ok=True, parents=True)
+ (path / file).touch()
+
+ pyproject.write_text(EXAMPLE)
+ (path / "README.md").write_text("hello world")
+ (path / f"{pkg_root}/pkg/mod.py").write_text("class CustomSdist: pass")
+ (path / f"{pkg_root}/pkg/__version__.py").write_text("VERSION = (3, 10)")
+ (path / f"{pkg_root}/pkg/__main__.py").write_text("def exec(): print('hello')")
+
+
+def verify_example(config, path, pkg_root):
+ pyproject = path / "pyproject.toml"
+ pyproject.write_text(tomli_w.dumps(config), encoding="utf-8")
+ expanded = expand_configuration(config, path)
+ expanded_project = expanded["project"]
+ assert read_configuration(pyproject, expand=True) == expanded
+ assert expanded_project["version"] == "3.10"
+ assert expanded_project["readme"]["text"] == "hello world"
+ assert "packages" in expanded["tool"]["setuptools"]
+ if pkg_root == ".":
+ # Auto-discovery will raise error for multi-package dist
+ assert set(expanded["tool"]["setuptools"]["packages"]) == {"pkg"}
+ else:
+ assert set(expanded["tool"]["setuptools"]["packages"]) == {
+ "pkg",
+ "other",
+ "other.nested",
+ }
+ assert expanded["tool"]["setuptools"]["include-package-data"] is True
+ assert "" in expanded["tool"]["setuptools"]["package-data"]
+ assert "*" not in expanded["tool"]["setuptools"]["package-data"]
+ assert expanded["tool"]["setuptools"]["data-files"] == [
+ ("data", ["_files/file.txt"])
+ ]
+
+
+def test_read_configuration(tmp_path):
+ create_example(tmp_path, "src")
+ pyproject = tmp_path / "pyproject.toml"
+
+ config = read_configuration(pyproject, expand=False)
+ assert config["project"].get("version") is None
+ assert config["project"].get("readme") is None
+
+ verify_example(config, tmp_path, "src")
+
+
+@pytest.mark.parametrize(
+ "pkg_root, opts",
+ [
+ (".", {}),
+ ("src", {}),
+ ("lib", {"packages": {"find": {"where": ["lib"]}}}),
+ ],
+)
+def test_discovered_package_dir_with_attr_directive_in_config(tmp_path, pkg_root, opts):
+ create_example(tmp_path, pkg_root)
+
+ pyproject = tmp_path / "pyproject.toml"
+
+ config = read_configuration(pyproject, expand=False)
+ assert config["project"].get("version") is None
+ assert config["project"].get("readme") is None
+ config["tool"]["setuptools"].pop("packages", None)
+ config["tool"]["setuptools"].pop("package-dir", None)
+
+ config["tool"]["setuptools"].update(opts)
+ verify_example(config, tmp_path, pkg_root)
+
+
+ENTRY_POINTS = {
+ "console_scripts": {"a": "mod.a:func"},
+ "gui_scripts": {"b": "mod.b:func"},
+ "other": {"c": "mod.c:func [extra]"},
+}
+
+
+class TestEntryPoints:
+ def write_entry_points(self, tmp_path):
+ entry_points = ConfigParser()
+ entry_points.read_dict(ENTRY_POINTS)
+ with open(tmp_path / "entry-points.txt", "w") as f:
+ entry_points.write(f)
+
+ def pyproject(self, dynamic=None):
+ project = {"dynamic": dynamic or ["scripts", "gui-scripts", "entry-points"]}
+ tool = {"dynamic": {"entry-points": {"file": "entry-points.txt"}}}
+ return {"project": project, "tool": {"setuptools": tool}}
+
+ def test_all_listed_in_dynamic(self, tmp_path):
+ self.write_entry_points(tmp_path)
+ expanded = expand_configuration(self.pyproject(), tmp_path)
+ expanded_project = expanded["project"]
+ assert len(expanded_project["scripts"]) == 1
+ assert expanded_project["scripts"]["a"] == "mod.a:func"
+ assert len(expanded_project["gui-scripts"]) == 1
+ assert expanded_project["gui-scripts"]["b"] == "mod.b:func"
+ assert len(expanded_project["entry-points"]) == 1
+ assert expanded_project["entry-points"]["other"]["c"] == "mod.c:func [extra]"
+
+ @pytest.mark.parametrize("missing_dynamic", ("scripts", "gui-scripts"))
+ def test_scripts_not_listed_in_dynamic(self, tmp_path, missing_dynamic):
+ self.write_entry_points(tmp_path)
+ dynamic = {"scripts", "gui-scripts", "entry-points"} - {missing_dynamic}
+
+ msg = f"defined outside of `pyproject.toml`:.*{missing_dynamic}"
+ with pytest.warns(_WouldIgnoreField, match=re.compile(msg, re.S)):
+ expanded = expand_configuration(self.pyproject(dynamic), tmp_path)
+
+ expanded_project = expanded["project"]
+ assert dynamic < set(expanded_project)
+ assert len(expanded_project["entry-points"]) == 1
+ # TODO: Test the following when pyproject.toml support stabilizes:
+ # >>> assert missing_dynamic not in expanded_project
+
+
+class TestClassifiers:
+ def test_dynamic(self, tmp_path):
+ # Let's create a project example that has dynamic classifiers
+ # coming from a txt file.
+ create_example(tmp_path, "src")
+ classifiers = """\
+ Framework :: Flask
+ Programming Language :: Haskell
+ """
+ (tmp_path / "classifiers.txt").write_text(cleandoc(classifiers))
+
+ pyproject = tmp_path / "pyproject.toml"
+ config = read_configuration(pyproject, expand=False)
+ dynamic = config["project"]["dynamic"]
+ config["project"]["dynamic"] = list({*dynamic, "classifiers"})
+ dynamic_config = config["tool"]["setuptools"]["dynamic"]
+ dynamic_config["classifiers"] = {"file": "classifiers.txt"}
+
+ # When the configuration is expanded,
+ # each line of the file should be an different classifier.
+ validate(config, pyproject)
+ expanded = expand_configuration(config, tmp_path)
+
+ assert set(expanded["project"]["classifiers"]) == {
+ "Framework :: Flask",
+ "Programming Language :: Haskell",
+ }
+
+ def test_dynamic_without_config(self, tmp_path):
+ config = """
+ [project]
+ name = "myproj"
+ version = '42'
+ dynamic = ["classifiers"]
+ """
+
+ pyproject = tmp_path / "pyproject.toml"
+ pyproject.write_text(cleandoc(config))
+ with pytest.raises(OptionError, match="No configuration .* .classifiers."):
+ read_configuration(pyproject)
+
+ def test_dynamic_readme_from_setup_script_args(self, tmp_path):
+ config = """
+ [project]
+ name = "myproj"
+ version = '42'
+ dynamic = ["readme"]
+ """
+ pyproject = tmp_path / "pyproject.toml"
+ pyproject.write_text(cleandoc(config))
+ dist = Distribution(attrs={"long_description": "42"})
+ # No error should occur because of missing `readme`
+ dist = apply_configuration(dist, pyproject)
+ assert dist.metadata.long_description == "42"
+
+ def test_dynamic_without_file(self, tmp_path):
+ config = """
+ [project]
+ name = "myproj"
+ version = '42'
+ dynamic = ["classifiers"]
+
+ [tool.setuptools.dynamic]
+ classifiers = {file = ["classifiers.txt"]}
+ """
+
+ pyproject = tmp_path / "pyproject.toml"
+ pyproject.write_text(cleandoc(config))
+ with pytest.warns(UserWarning, match="File .*classifiers.txt. cannot be found"):
+ expanded = read_configuration(pyproject)
+ assert "classifiers" not in expanded["project"]
+
+
+@pytest.mark.parametrize(
+ "example",
+ (
+ """
+ [project]
+ name = "myproj"
+ version = "1.2"
+
+ [my-tool.that-disrespect.pep518]
+ value = 42
+ """,
+ ),
+)
+def test_ignore_unrelated_config(tmp_path, example):
+ pyproject = tmp_path / "pyproject.toml"
+ pyproject.write_text(cleandoc(example))
+
+ # Make sure no error is raised due to 3rd party configs in pyproject.toml
+ assert read_configuration(pyproject) is not None
+
+
+@pytest.mark.parametrize(
+ "example, error_msg",
+ [
+ (
+ """
+ [project]
+ name = "myproj"
+ version = "1.2"
+ requires = ['pywin32; platform_system=="Windows"' ]
+ """,
+ "configuration error: .project. must not contain ..requires.. properties",
+ ),
+ ],
+)
+def test_invalid_example(tmp_path, example, error_msg):
+ pyproject = tmp_path / "pyproject.toml"
+ pyproject.write_text(cleandoc(example))
+
+ pattern = re.compile(f"invalid pyproject.toml.*{error_msg}.*", re.M | re.S)
+ with pytest.raises(ValueError, match=pattern):
+ read_configuration(pyproject)
+
+
+@pytest.mark.parametrize("config", ("", "[tool.something]\nvalue = 42"))
+def test_empty(tmp_path, config):
+ pyproject = tmp_path / "pyproject.toml"
+ pyproject.write_text(config)
+
+ # Make sure no error is raised
+ assert read_configuration(pyproject) == {}
+
+
+@pytest.mark.parametrize("config", ("[project]\nname = 'myproj'\nversion='42'\n",))
+def test_include_package_data_by_default(tmp_path, config):
+ """Builds with ``pyproject.toml`` should consider ``include-package-data=True`` as
+ default.
+ """
+ pyproject = tmp_path / "pyproject.toml"
+ pyproject.write_text(config)
+
+ config = read_configuration(pyproject)
+ assert config["tool"]["setuptools"]["include-package-data"] is True
+
+
+def test_include_package_data_in_setuppy(tmp_path):
+ """Builds with ``pyproject.toml`` should consider ``include_package_data`` set in
+ ``setup.py``.
+
+ See https://github.com/pypa/setuptools/issues/3197#issuecomment-1079023889
+ """
+ pyproject = tmp_path / "pyproject.toml"
+ pyproject.write_text("[project]\nname = 'myproj'\nversion='42'\n")
+ setuppy = tmp_path / "setup.py"
+ setuppy.write_text("__import__('setuptools').setup(include_package_data=False)")
+
+ with _Path(tmp_path):
+ dist = distutils.core.run_setup("setup.py", {}, stop_after="config")
+
+ assert dist.get_name() == "myproj"
+ assert dist.get_version() == "42"
+ assert dist.include_package_data is False
+
+
+class TestSkipBadConfig:
+ @pytest.mark.parametrize(
+ "setup_attrs",
+ [
+ {"name": "myproj"},
+ {"install_requires": ["does-not-exist"]},
+ ],
+ )
+ @pytest.mark.parametrize(
+ "pyproject_content",
+ [
+ "[project]\nrequires-python = '>=3.7'\n",
+ "[project]\nversion = '42'\nrequires-python = '>=3.7'\n",
+ "[project]\nname='othername'\nrequires-python = '>=3.7'\n",
+ ],
+ )
+ def test_popular_config(self, tmp_path, pyproject_content, setup_attrs):
+ # See pypa/setuptools#3199 and pypa/cibuildwheel#1064
+ pyproject = tmp_path / "pyproject.toml"
+ pyproject.write_text(pyproject_content)
+ dist = Distribution(attrs=setup_attrs)
+
+ prev_name = dist.get_name()
+ prev_deps = dist.install_requires
+
+ with pytest.warns(_InvalidFile, match=r"DO NOT include.*\[project\].* table"):
+ dist = apply_configuration(dist, pyproject)
+
+ assert dist.get_name() != "othername"
+ assert dist.get_name() == prev_name
+ assert dist.python_requires is None
+ assert set(dist.install_requires) == set(prev_deps)
diff --git a/setuptools/tests/config/test_pyprojecttoml_dynamic_deps.py b/setuptools/tests/config/test_pyprojecttoml_dynamic_deps.py
new file mode 100644
index 00000000..5687cf1a
--- /dev/null
+++ b/setuptools/tests/config/test_pyprojecttoml_dynamic_deps.py
@@ -0,0 +1,82 @@
+import pytest
+
+from setuptools.config.pyprojecttoml import apply_configuration
+from setuptools.dist import Distribution
+from setuptools.tests.textwrap import DALS
+
+
+def test_dynamic_dependencies(tmp_path):
+ (tmp_path / "requirements.txt").write_text("six\n # comment\n")
+ pyproject = (tmp_path / "pyproject.toml")
+ pyproject.write_text(DALS("""
+ [project]
+ name = "myproj"
+ version = "1.0"
+ dynamic = ["dependencies"]
+
+ [build-system]
+ requires = ["setuptools", "wheel"]
+ build-backend = "setuptools.build_meta"
+
+ [tool.setuptools.dynamic.dependencies]
+ file = ["requirements.txt"]
+ """))
+ dist = Distribution()
+ dist = apply_configuration(dist, pyproject)
+ assert dist.install_requires == ["six"]
+
+
+def test_dynamic_optional_dependencies(tmp_path):
+ (tmp_path / "requirements-docs.txt").write_text("sphinx\n # comment\n")
+ pyproject = (tmp_path / "pyproject.toml")
+ pyproject.write_text(DALS("""
+ [project]
+ name = "myproj"
+ version = "1.0"
+ dynamic = ["optional-dependencies"]
+
+ [tool.setuptools.dynamic.optional-dependencies.docs]
+ file = ["requirements-docs.txt"]
+
+ [build-system]
+ requires = ["setuptools", "wheel"]
+ build-backend = "setuptools.build_meta"
+ """))
+ dist = Distribution()
+ dist = apply_configuration(dist, pyproject)
+ assert dist.extras_require == {"docs": ["sphinx"]}
+
+
+def test_mixed_dynamic_optional_dependencies(tmp_path):
+ """
+ Test that if PEP 621 was loosened to allow mixing of dynamic and static
+ configurations in the case of fields containing sub-fields (groups),
+ things would work out.
+ """
+ (tmp_path / "requirements-images.txt").write_text("pillow~=42.0\n # comment\n")
+ pyproject = (tmp_path / "pyproject.toml")
+ pyproject.write_text(DALS("""
+ [project]
+ name = "myproj"
+ version = "1.0"
+ dynamic = ["optional-dependencies"]
+
+ [project.optional-dependencies]
+ docs = ["sphinx"]
+
+ [tool.setuptools.dynamic.optional-dependencies.images]
+ file = ["requirements-images.txt"]
+
+ [build-system]
+ requires = ["setuptools", "wheel"]
+ build-backend = "setuptools.build_meta"
+ """))
+ # Test that the mix-and-match doesn't currently validate.
+ with pytest.raises(ValueError, match="project.optional-dependencies"):
+ apply_configuration(Distribution(), pyproject)
+
+ # Explicitly disable the validation and try again, to see that the mix-and-match
+ # result would be correct.
+ dist = Distribution()
+ dist = apply_configuration(dist, pyproject, ignore_option_errors=True)
+ assert dist.extras_require == {"docs": ["sphinx"], "images": ["pillow~=42.0"]}
diff --git a/setuptools/tests/test_config.py b/setuptools/tests/config/test_setupcfg.py
index 005742e4..d2964fda 100644
--- a/setuptools/tests/test_config.py
+++ b/setuptools/tests/config/test_setupcfg.py
@@ -1,21 +1,21 @@
-import types
-import sys
-
-import contextlib
import configparser
+import contextlib
+import inspect
+from pathlib import Path
+from unittest.mock import Mock, patch
import pytest
from distutils.errors import DistutilsOptionError, DistutilsFileError
-from mock import patch
+from setuptools._deprecation_warning import SetuptoolsDeprecationWarning
from setuptools.dist import Distribution, _Distribution
-from setuptools.config import ConfigHandler, read_configuration
-from distutils.core import Command
-from .textwrap import DALS
+from setuptools.config.setupcfg import ConfigHandler, read_configuration
+from ..textwrap import DALS
class ErrConfigHandler(ConfigHandler):
"""Erroneous handler. Fails to implement required methods."""
+ section_prefix = "**err**"
def make_package_dir(name, base_dir, ns=False):
@@ -70,7 +70,7 @@ def get_dist(tmpdir, kwargs_initial=None, parse=True):
def test_parsers_implemented():
with pytest.raises(NotImplementedError):
- handler = ErrConfigHandler(None, {})
+ handler = ErrConfigHandler(None, {}, False, Mock())
handler.parsers
@@ -186,9 +186,12 @@ class TestMetadata:
def test_file_sandboxed(self, tmpdir):
- fake_env(tmpdir, '[metadata]\n' 'long_description = file: ../../README\n')
+ tmpdir.ensure("README")
+ project = tmpdir.join('depth1', 'depth2')
+ project.ensure(dir=True)
+ fake_env(project, '[metadata]\n' 'long_description = file: ../../README\n')
- with get_dist(tmpdir, parse=False) as dist:
+ with get_dist(project, parse=False) as dist:
with pytest.raises(DistutilsOptionError):
dist.parse_config_files() # file: out of sandbox
@@ -407,7 +410,7 @@ class TestMetadata:
'requires = some, requirement\n',
)
- with pytest.deprecated_call():
+ with pytest.warns(SetuptoolsDeprecationWarning, match="requires"):
with get_dist(tmpdir) as dist:
metadata = dist.metadata
@@ -516,7 +519,8 @@ class TestOptions:
'python_requires = >=1.0, !=2.8\n'
'py_modules = module1, module2\n',
)
- with get_dist(tmpdir) as dist:
+ deprec = pytest.warns(SetuptoolsDeprecationWarning, match="namespace_packages")
+ with deprec, get_dist(tmpdir) as dist:
assert dist.zip_safe
assert dist.include_package_data
assert dist.package_dir == {'': 'src', 'b': 'c'}
@@ -570,7 +574,8 @@ class TestOptions:
' http://some.com/here/1\n'
' http://some.com/there/2\n',
)
- with get_dist(tmpdir) as dist:
+ deprec = pytest.warns(SetuptoolsDeprecationWarning, match="namespace_packages")
+ with deprec, get_dist(tmpdir) as dist:
assert dist.package_dir == {'': 'src', 'b': 'c'}
assert dist.packages == ['pack_a', 'pack_b.subpack']
assert dist.namespace_packages == ['pack1', 'pack2']
@@ -711,6 +716,51 @@ class TestOptions:
}
assert dist.metadata.provides_extras == set(['pdf', 'rest'])
+ @pytest.mark.parametrize(
+ "config",
+ [
+ "[options.extras_require]\nfoo = bar;python_version<'3'",
+ "[options.extras_require]\nfoo = bar;os_name=='linux'",
+ "[options.extras_require]\nfoo = bar;python_version<'3'\n",
+ "[options.extras_require]\nfoo = bar;os_name=='linux'\n",
+ "[options]\ninstall_requires = bar;python_version<'3'",
+ "[options]\ninstall_requires = bar;os_name=='linux'",
+ "[options]\ninstall_requires = bar;python_version<'3'\n",
+ "[options]\ninstall_requires = bar;os_name=='linux'\n",
+ ],
+ )
+ def test_warn_accidental_env_marker_misconfig(self, config, tmpdir):
+ fake_env(tmpdir, config)
+ match = (
+ r"One of the parsed requirements in `(install_requires|extras_require.+)` "
+ "looks like a valid environment marker.*"
+ )
+ with pytest.warns(UserWarning, match=match):
+ with get_dist(tmpdir) as _:
+ pass
+
+ @pytest.mark.parametrize(
+ "config",
+ [
+ "[options.extras_require]\nfoo =\n bar;python_version<'3'",
+ "[options.extras_require]\nfoo = bar;baz\nboo = xxx;yyy",
+ "[options.extras_require]\nfoo =\n bar;python_version<'3'\n",
+ "[options.extras_require]\nfoo = bar;baz\nboo = xxx;yyy\n",
+ "[options.extras_require]\nfoo =\n bar\n python_version<'3'\n",
+ "[options]\ninstall_requires =\n bar;python_version<'3'",
+ "[options]\ninstall_requires = bar;baz\nboo = xxx;yyy",
+ "[options]\ninstall_requires =\n bar;python_version<'3'\n",
+ "[options]\ninstall_requires = bar;baz\nboo = xxx;yyy\n",
+ "[options]\ninstall_requires =\n bar\n python_version<'3'\n",
+ ],
+ )
+ def test_nowarn_accidental_env_marker_misconfig(self, config, tmpdir, recwarn):
+ fake_env(tmpdir, config)
+ with get_dist(tmpdir) as _:
+ pass
+ # The examples are valid, no warnings shown
+ assert not any(w.category == UserWarning for w in recwarn)
+
def test_dash_preserved_extras_require(self, tmpdir):
fake_env(tmpdir, '[options.extras_require]\n' 'foo-a = foo\n' 'foo_b = test\n')
@@ -859,22 +909,43 @@ class TestOptions:
dist.parse_config_files()
def test_cmdclass(self, tmpdir):
- class CustomCmd(Command):
- pass
-
- m = types.ModuleType('custom_build', 'test package')
+ module_path = Path(tmpdir, "src/custom_build.py") # auto discovery for src
+ module_path.parent.mkdir(parents=True, exist_ok=True)
+ module_path.write_text(
+ "from distutils.core import Command\n"
+ "class CustomCmd(Command): pass\n"
+ )
- m.__dict__['CustomCmd'] = CustomCmd
+ setup_cfg = """
+ [options]
+ cmdclass =
+ customcmd = custom_build.CustomCmd
+ """
+ fake_env(tmpdir, inspect.cleandoc(setup_cfg))
- sys.modules['custom_build'] = m
+ with get_dist(tmpdir) as dist:
+ cmdclass = dist.cmdclass['customcmd']
+ assert cmdclass.__name__ == "CustomCmd"
+ assert cmdclass.__module__ == "custom_build"
+ assert module_path.samefile(inspect.getfile(cmdclass))
+ def test_requirements_file(self, tmpdir):
fake_env(
tmpdir,
- '[options]\n' 'cmdclass =\n' ' customcmd = custom_build.CustomCmd\n',
+ DALS("""
+ [options]
+ install_requires = file:requirements.txt
+ [options.extras_require]
+ colors = file:requirements-extra.txt
+ """)
)
+ tmpdir.join('requirements.txt').write('\ndocutils>=0.3\n\n')
+ tmpdir.join('requirements-extra.txt').write('colorama')
+
with get_dist(tmpdir) as dist:
- assert dist.cmdclass == {'customcmd': CustomCmd}
+ assert dist.install_requires == ['docutils>=0.3']
+ assert dist.extras_require == {'colors': ['colorama']}
saved_dist_init = _Distribution.__init__
diff --git a/setuptools/tests/contexts.py b/setuptools/tests/contexts.py
index 58948824..7ddbc780 100644
--- a/setuptools/tests/contexts.py
+++ b/setuptools/tests/contexts.py
@@ -123,3 +123,26 @@ def session_locked_tmp_dir(request, tmp_path_factory, name):
# ^-- prevent multiple workers to access the directory at once
locked_dir.mkdir(exist_ok=True, parents=True)
yield locked_dir
+
+
+@contextlib.contextmanager
+def save_paths():
+ """Make sure ``sys.path``, ``sys.meta_path`` and ``sys.path_hooks`` are preserved"""
+ prev = sys.path[:], sys.meta_path[:], sys.path_hooks[:]
+
+ try:
+ yield
+ finally:
+ sys.path, sys.meta_path, sys.path_hooks = prev
+
+
+@contextlib.contextmanager
+def save_sys_modules():
+ """Make sure initial ``sys.modules`` is preserved"""
+ prev_modules = sys.modules
+
+ try:
+ sys.modules = sys.modules.copy()
+ yield
+ finally:
+ sys.modules = prev_modules
diff --git a/setuptools/tests/environment.py b/setuptools/tests/environment.py
index a0c0ec6e..bcf29601 100644
--- a/setuptools/tests/environment.py
+++ b/setuptools/tests/environment.py
@@ -18,6 +18,19 @@ class VirtualEnv(jaraco.envs.VirtualEnv):
def run(self, cmd, *args, **kwargs):
cmd = [self.exe(cmd[0])] + cmd[1:]
kwargs = {"cwd": self.root, **kwargs} # Allow overriding
+ # In some environments (eg. downstream distro packaging), where:
+ # - tox isn't used to run tests and
+ # - PYTHONPATH is set to point to a specific setuptools codebase and
+ # - no custom env is explicitly set by a test
+ # PYTHONPATH will leak into the spawned processes.
+ # In that case tests look for module in the wrong place (on PYTHONPATH).
+ # Unless the test sets its own special env, pass a copy of the existing
+ # environment with removed PYTHONPATH to the subprocesses.
+ if "env" not in kwargs:
+ env = dict(os.environ)
+ if "PYTHONPATH" in env:
+ del env["PYTHONPATH"]
+ kwargs["env"] = env
return subprocess.check_output(cmd, *args, **kwargs)
diff --git a/setuptools/tests/fixtures.py b/setuptools/tests/fixtures.py
index 7599e655..25ab49fd 100644
--- a/setuptools/tests/fixtures.py
+++ b/setuptools/tests/fixtures.py
@@ -1,6 +1,8 @@
+import os
import contextlib
import sys
import subprocess
+from pathlib import Path
import pytest
import path
@@ -64,6 +66,9 @@ def sample_project(tmp_path):
@pytest.fixture(scope="session")
def setuptools_sdist(tmp_path_factory, request):
+ if os.getenv("PRE_BUILT_SETUPTOOLS_SDIST"):
+ return Path(os.getenv("PRE_BUILT_SETUPTOOLS_SDIST")).resolve()
+
with contexts.session_locked_tmp_dir(
request, tmp_path_factory, "sdist_build") as tmp:
dist = next(tmp.glob("*.tar.gz"), None)
@@ -79,6 +84,9 @@ def setuptools_sdist(tmp_path_factory, request):
@pytest.fixture(scope="session")
def setuptools_wheel(tmp_path_factory, request):
+ if os.getenv("PRE_BUILT_SETUPTOOLS_WHEEL"):
+ return Path(os.getenv("PRE_BUILT_SETUPTOOLS_WHEEL")).resolve()
+
with contexts.session_locked_tmp_dir(
request, tmp_path_factory, "wheel_build") as tmp:
dist = next(tmp.glob("*.whl"), None)
@@ -98,7 +106,18 @@ def venv(tmp_path, setuptools_wheel):
env = environment.VirtualEnv()
env.root = path.Path(tmp_path / 'venv')
env.req = str(setuptools_wheel)
- return env.create()
+ # In some environments (eg. downstream distro packaging),
+ # where tox isn't used to run tests and PYTHONPATH is set to point to
+ # a specific setuptools codebase, PYTHONPATH will leak into the spawned
+ # processes.
+ # env.create() should install the just created setuptools
+ # wheel, but it doesn't if it finds another existing matching setuptools
+ # installation present on PYTHONPATH:
+ # `setuptools is already installed with the same version as the provided
+ # wheel. Use --force-reinstall to force an installation of the wheel.`
+ # This prevents leaking PYTHONPATH to the created environment.
+ with contexts.environment(PYTHONPATH=None):
+ return env.create()
@pytest.fixture
diff --git a/setuptools/tests/integration/helpers.py b/setuptools/tests/integration/helpers.py
index 43f43902..24c02be0 100644
--- a/setuptools/tests/integration/helpers.py
+++ b/setuptools/tests/integration/helpers.py
@@ -8,6 +8,7 @@ import os
import subprocess
import tarfile
from zipfile import ZipFile
+from pathlib import Path
def run(cmd, env=None):
@@ -59,3 +60,16 @@ class Archive:
raise ValueError(msg)
return str(content.read(), "utf-8")
return str(self._obj.read(zip_or_tar_info), "utf-8")
+
+
+def get_sdist_members(sdist_path):
+ with tarfile.open(sdist_path, "r:gz") as tar:
+ files = [Path(f) for f in tar.getnames()]
+ # remove root folder
+ relative_files = ("/".join(f.parts[1:]) for f in files)
+ return {f for f in relative_files if f}
+
+
+def get_wheel_members(wheel_path):
+ with ZipFile(wheel_path) as zipfile:
+ return set(zipfile.namelist())
diff --git a/setuptools/tests/integration/test_pip_install_sdist.py b/setuptools/tests/integration/test_pip_install_sdist.py
index 86cc4235..b44e32fc 100644
--- a/setuptools/tests/integration/test_pip_install_sdist.py
+++ b/setuptools/tests/integration/test_pip_install_sdist.py
@@ -27,7 +27,7 @@ from .helpers import Archive, run
pytestmark = pytest.mark.integration
-LATEST, = list(Enum("v", "LATEST"))
+LATEST, = Enum("v", "LATEST")
"""Default version to be checked"""
# There are positive and negative aspects of checking the latest version of the
# packages.
@@ -53,7 +53,7 @@ EXAMPLES = [
("brotli", LATEST), # not in the list but used by urllib3
# When adding packages to this list, make sure they expose a `__version__`
- # attribute, or modify the tests bellow
+ # attribute, or modify the tests below
]
@@ -72,11 +72,11 @@ VIRTUALENV = (sys.executable, "-m", "virtualenv")
# means it will download the previous stable version of setuptools.
# `pip` flags can avoid that (the version of setuptools under test
# should be the one to be used)
-SDIST_OPTIONS = (
+INSTALL_OPTIONS = (
"--ignore-installed",
"--no-build-isolation",
- # We don't need "--no-binary :all:" since we specify the path to the sdist.
- # It also helps with performance, since dependencies can come from wheels.
+ # Omit "--no-binary :all:" the sdist is supplied directly.
+ # Allows dependencies as wheels.
)
# The downside of `--no-build-isolation` is that pip will not download build
# dependencies. The test script will have to also handle that.
@@ -112,6 +112,7 @@ ALREADY_LOADED = ("pytest", "mypy") # loaded by pytest/pytest-enabler
@pytest.mark.parametrize('package, version', EXAMPLES)
+@pytest.mark.uses_network
def test_install_sdist(package, version, tmp_path, venv_python, setuptools_wheel):
venv_pip = (venv_python, "-m", "pip")
sdist = retrieve_sdist(package, version, tmp_path)
@@ -124,7 +125,7 @@ def test_install_sdist(package, version, tmp_path, venv_python, setuptools_wheel
# Use a virtualenv to simulate PEP 517 isolation
# but install fresh setuptools wheel to ensure the version under development
run([*venv_pip, "install", "-I", setuptools_wheel])
- run([*venv_pip, "install", *SDIST_OPTIONS, sdist])
+ run([*venv_pip, "install", *INSTALL_OPTIONS, sdist])
# Execute a simple script to make sure the package was installed correctly
script = f"import {package}; print(getattr({package}, '__version__', 0))"
@@ -164,17 +165,9 @@ def retrieve_pypi_sdist_metadata(package, version):
raise ValueError(f"Release for {package} {version} was yanked")
version = metadata["info"]["version"]
- release = metadata["releases"][version]
- dists = [d for d in release if d["packagetype"] == "sdist"]
- if len(dists) == 0:
- raise ValueError(f"No sdist found for {package} {version}")
-
- for dist in dists:
- if dist["filename"].endswith(".tar.gz"):
- return dist
-
- # Not all packages are publishing tar.gz
- return dist
+ release = metadata["releases"][version] if version is LATEST else metadata["urls"]
+ sdist, = filter(lambda d: d["packagetype"] == "sdist", release)
+ return sdist
def download(url, dest, md5_digest):
@@ -192,7 +185,7 @@ def download(url, dest, md5_digest):
def build_deps(package, sdist_file):
"""Find out what are the build dependencies for a package.
- We need to "manually" install them, since pip will not install build
+ "Manually" install them, since pip will not install build
deps with `--no-build-isolation`.
"""
import tomli as toml
@@ -201,9 +194,7 @@ def build_deps(package, sdist_file):
# testenv without tomli
archive = Archive(sdist_file)
- pyproject = _read_pyproject(archive)
-
- info = toml.loads(pyproject)
+ info = toml.loads(_read_pyproject(archive))
deps = info.get("build-system", {}).get("requires", [])
deps += EXTRA_BUILD_DEPS.get(package, [])
# Remove setuptools from requirements (and deduplicate)
@@ -212,7 +203,9 @@ def build_deps(package, sdist_file):
def _read_pyproject(archive):
- for member in archive:
- if os.path.basename(archive.get_name(member)) == "pyproject.toml":
- return archive.get_content(member)
- return ""
+ contents = (
+ archive.get_content(member)
+ for member in archive
+ if os.path.basename(archive.get_name(member)) == "pyproject.toml"
+ )
+ return next(contents, "")
diff --git a/setuptools/tests/namespaces.py b/setuptools/tests/namespaces.py
index 245cf8ea..34e916f5 100644
--- a/setuptools/tests/namespaces.py
+++ b/setuptools/tests/namespaces.py
@@ -28,6 +28,29 @@ def build_namespace_package(tmpdir, name):
return src_dir
+def build_pep420_namespace_package(tmpdir, name):
+ src_dir = tmpdir / name
+ src_dir.mkdir()
+ pyproject = src_dir / "pyproject.toml"
+ namespace, sep, rest = name.rpartition(".")
+ script = f"""\
+ [build-system]
+ requires = ["setuptools"]
+ build-backend = "setuptools.build_meta"
+
+ [project]
+ name = "{name}"
+ version = "3.14159"
+ """
+ pyproject.write_text(textwrap.dedent(script), encoding='utf-8')
+ ns_pkg_dir = src_dir / namespace.replace(".", "/")
+ ns_pkg_dir.mkdir(parents=True)
+ pkg_mod = ns_pkg_dir / (rest + ".py")
+ some_functionality = f"name = {rest!r}"
+ pkg_mod.write_text(some_functionality, encoding='utf-8')
+ return src_dir
+
+
def make_site_dir(target):
"""
Add a sitecustomize.py module in target to cause
diff --git a/setuptools/tests/script-with-bom.py b/setuptools/tests/script-with-bom.py
index 22dee0d2..93d28f16 100644
--- a/setuptools/tests/script-with-bom.py
+++ b/setuptools/tests/script-with-bom.py
@@ -1,3 +1 @@
-# -*- coding: utf-8 -*-
-
-result = 'passed'
+result = 'passed'
diff --git a/setuptools/tests/test_bdist_deprecations.py b/setuptools/tests/test_bdist_deprecations.py
index 1a900c67..1b69c418 100644
--- a/setuptools/tests/test_bdist_deprecations.py
+++ b/setuptools/tests/test_bdist_deprecations.py
@@ -1,7 +1,7 @@
"""develop tests
"""
-import mock
import sys
+from unittest import mock
import pytest
diff --git a/setuptools/tests/test_build.py b/setuptools/tests/test_build.py
new file mode 100644
index 00000000..cefb3d34
--- /dev/null
+++ b/setuptools/tests/test_build.py
@@ -0,0 +1,63 @@
+from contextlib import contextmanager
+from setuptools import Command, SetuptoolsDeprecationWarning
+from setuptools.dist import Distribution
+from setuptools.command.build import build
+from distutils.command.build import build as distutils_build
+
+import pytest
+
+
+def test_distribution_gives_setuptools_build_obj(tmpdir_cwd):
+ """
+ Check that the setuptools Distribution uses the
+ setuptools specific build object.
+ """
+
+ dist = Distribution(dict(
+ script_name='setup.py',
+ script_args=['build'],
+ packages=[],
+ package_data={'': ['path/*']},
+ ))
+ assert isinstance(dist.get_command_obj("build"), build)
+
+
+@contextmanager
+def _restore_sub_commands():
+ orig = distutils_build.sub_commands[:]
+ try:
+ yield
+ finally:
+ distutils_build.sub_commands = orig
+
+
+class Subcommand(Command):
+ """Dummy command to be used in tests"""
+
+ def initialize_options(self):
+ pass
+
+ def finalize_options(self):
+ pass
+
+ def run(self):
+ raise NotImplementedError("just to check if the command runs")
+
+
+@_restore_sub_commands()
+def test_subcommand_in_distutils(tmpdir_cwd):
+ """
+ Ensure that sub commands registered in ``distutils`` run,
+ after instructing the users to migrate to ``setuptools``.
+ """
+ dist = Distribution(dict(
+ packages=[],
+ cmdclass={'subcommand': Subcommand},
+ ))
+ distutils_build.sub_commands.append(('subcommand', None))
+
+ warning_msg = "please use .setuptools.command.build."
+ with pytest.warns(SetuptoolsDeprecationWarning, match=warning_msg):
+ # For backward compatibility, the subcommand should run anyway:
+ with pytest.raises(NotImplementedError, match="the command runs"):
+ dist.run_command("build")
diff --git a/setuptools/tests/test_build_clib.py b/setuptools/tests/test_build_clib.py
index 48bea2b4..2d9273cd 100644
--- a/setuptools/tests/test_build_clib.py
+++ b/setuptools/tests/test_build_clib.py
@@ -1,6 +1,8 @@
+from unittest import mock
+
import pytest
-import mock
+import random
from distutils.errors import DistutilsSetupError
from setuptools.command.build_clib import build_clib
from setuptools.dist import Distribution
@@ -55,3 +57,30 @@ class TestBuildCLib:
cmd.build_libraries(libs)
assert cmd.compiler.compile.call_count == 1
assert cmd.compiler.create_static_lib.call_count == 1
+
+ @mock.patch(
+ 'setuptools.command.build_clib.newer_pairwise_group')
+ def test_build_libraries_reproducible(self, mock_newer):
+ dist = Distribution()
+ cmd = build_clib(dist)
+
+ # with that out of the way, let's see if the crude dependency
+ # system works
+ cmd.compiler = mock.MagicMock(spec=cmd.compiler)
+ mock_newer.return_value = ([], [])
+
+ original_sources = ['a-example.c', 'example.c']
+ sources = original_sources
+
+ obj_deps = {'': ('global.h',), 'example.c': ('example.h',)}
+ libs = [('example', {'sources': sources, 'obj_deps': obj_deps})]
+
+ cmd.build_libraries(libs)
+ computed_call_args = mock_newer.call_args[0]
+
+ while sources == original_sources:
+ sources = random.sample(original_sources, len(original_sources))
+ libs = [('example', {'sources': sources, 'obj_deps': obj_deps})]
+
+ cmd.build_libraries(libs)
+ assert computed_call_args == mock_newer.call_args[0]
diff --git a/setuptools/tests/test_build_ext.py b/setuptools/tests/test_build_ext.py
index 3177a2cd..62ba925e 100644
--- a/setuptools/tests/test_build_ext.py
+++ b/setuptools/tests/test_build_ext.py
@@ -2,16 +2,20 @@ import os
import sys
import distutils.command.build_ext as orig
from distutils.sysconfig import get_config_var
+from importlib.util import cache_from_source as _compiled_file_name
from jaraco import path
from setuptools.command.build_ext import build_ext, get_abi3_suffix
from setuptools.dist import Distribution
from setuptools.extension import Extension
+from setuptools.errors import CompileError
from . import environment
from .textwrap import DALS
+import pytest
+
IS_PYPY = '__pypy__' in sys.builtin_module_names
@@ -83,6 +87,141 @@ class TestBuildExt:
finally:
del os.environ['SETUPTOOLS_EXT_SUFFIX']
+ def dist_with_example(self):
+ files = {
+ "src": {"mypkg": {"subpkg": {"ext2.c": ""}}},
+ "c-extensions": {"ext1": {"main.c": ""}},
+ }
+
+ ext1 = Extension("mypkg.ext1", ["c-extensions/ext1/main.c"])
+ ext2 = Extension("mypkg.subpkg.ext2", ["src/mypkg/subpkg/ext2.c"])
+ ext3 = Extension("ext3", ["c-extension/ext3.c"])
+
+ path.build(files)
+ dist = Distribution({
+ "script_name": "%test%",
+ "ext_modules": [ext1, ext2, ext3],
+ "package_dir": {"": "src"},
+ })
+ return dist
+
+ def test_get_outputs(self, tmpdir_cwd, monkeypatch):
+ monkeypatch.setenv('SETUPTOOLS_EXT_SUFFIX', '.mp3') # make test OS-independent
+ monkeypatch.setattr('setuptools.command.build_ext.use_stubs', False)
+ dist = self.dist_with_example()
+
+ # Regular build: get_outputs not empty, but get_output_mappings is empty
+ build_ext = dist.get_command_obj("build_ext")
+ build_ext.editable_mode = False
+ build_ext.ensure_finalized()
+ build_lib = build_ext.build_lib.replace(os.sep, "/")
+ outputs = [x.replace(os.sep, "/") for x in build_ext.get_outputs()]
+ assert outputs == [
+ f"{build_lib}/ext3.mp3",
+ f"{build_lib}/mypkg/ext1.mp3",
+ f"{build_lib}/mypkg/subpkg/ext2.mp3",
+ ]
+ assert build_ext.get_output_mapping() == {}
+
+ # Editable build: get_output_mappings should contain everything in get_outputs
+ dist.reinitialize_command("build_ext")
+ build_ext.editable_mode = True
+ build_ext.ensure_finalized()
+ mapping = {
+ k.replace(os.sep, "/"): v.replace(os.sep, "/")
+ for k, v in build_ext.get_output_mapping().items()
+ }
+ assert mapping == {
+ f"{build_lib}/ext3.mp3": "src/ext3.mp3",
+ f"{build_lib}/mypkg/ext1.mp3": "src/mypkg/ext1.mp3",
+ f"{build_lib}/mypkg/subpkg/ext2.mp3": "src/mypkg/subpkg/ext2.mp3",
+ }
+
+ def test_get_output_mapping_with_stub(self, tmpdir_cwd, monkeypatch):
+ monkeypatch.setenv('SETUPTOOLS_EXT_SUFFIX', '.mp3') # make test OS-independent
+ monkeypatch.setattr('setuptools.command.build_ext.use_stubs', True)
+ dist = self.dist_with_example()
+
+ # Editable build should create compiled stubs (.pyc files only, no .py)
+ build_ext = dist.get_command_obj("build_ext")
+ build_ext.editable_mode = True
+ build_ext.ensure_finalized()
+ for ext in build_ext.extensions:
+ monkeypatch.setattr(ext, "_needs_stub", True)
+
+ build_lib = build_ext.build_lib.replace(os.sep, "/")
+ mapping = {
+ k.replace(os.sep, "/"): v.replace(os.sep, "/")
+ for k, v in build_ext.get_output_mapping().items()
+ }
+
+ def C(file):
+ """Make it possible to do comparisons and tests in a OS-independent way"""
+ return _compiled_file_name(file).replace(os.sep, "/")
+
+ assert mapping == {
+ C(f"{build_lib}/ext3.py"): C("src/ext3.py"),
+ f"{build_lib}/ext3.mp3": "src/ext3.mp3",
+ C(f"{build_lib}/mypkg/ext1.py"): C("src/mypkg/ext1.py"),
+ f"{build_lib}/mypkg/ext1.mp3": "src/mypkg/ext1.mp3",
+ C(f"{build_lib}/mypkg/subpkg/ext2.py"): C("src/mypkg/subpkg/ext2.py"),
+ f"{build_lib}/mypkg/subpkg/ext2.mp3": "src/mypkg/subpkg/ext2.mp3",
+ }
+
+ # Ensure only the compiled stubs are present not the raw .py stub
+ assert f"{build_lib}/mypkg/ext1.py" not in mapping
+ assert f"{build_lib}/mypkg/subpkg/ext2.py" not in mapping
+
+ # Visualize what the cached stub files look like
+ example_stub = C(f"{build_lib}/mypkg/ext1.py")
+ assert example_stub in mapping
+ assert example_stub.startswith(f"{build_lib}/mypkg/__pycache__/ext1")
+ assert example_stub.endswith(".pyc")
+
+
+class TestBuildExtInplace:
+ def get_build_ext_cmd(self, optional: bool, **opts):
+ files = {
+ "eggs.c": "#include missingheader.h\n",
+ ".build": {"lib": {}, "tmp": {}},
+ }
+ path.build(files)
+ extension = Extension('spam.eggs', ['eggs.c'], optional=optional)
+ dist = Distribution(dict(ext_modules=[extension]))
+ dist.script_name = 'setup.py'
+ cmd = build_ext(dist)
+ vars(cmd).update(build_lib=".build/lib", build_temp=".build/tmp", **opts)
+ cmd.ensure_finalized()
+ return cmd
+
+ def get_log_messages(self, caplog, capsys):
+ """
+ Historically, distutils "logged" by printing to sys.std*.
+ Later versions adopted the logging framework. Grab
+ messages regardless of how they were captured.
+ """
+ std = capsys.readouterr()
+ return std.out.splitlines() + std.err.splitlines() + caplog.messages
+
+ def test_optional(self, tmpdir_cwd, caplog, capsys):
+ """
+ If optional extensions fail to build, setuptools should show the error
+ in the logs but not fail to build
+ """
+ cmd = self.get_build_ext_cmd(optional=True, inplace=True)
+ cmd.run()
+ assert any(
+ 'build_ext: building extension "spam.eggs" failed'
+ for msg in self.get_log_messages(caplog, capsys)
+ )
+ # No compile error exception should be raised
+
+ def test_non_optional(self, tmpdir_cwd):
+ # Non-optional extensions should raise an exception
+ cmd = self.get_build_ext_cmd(optional=False, inplace=True)
+ with pytest.raises(CompileError):
+ cmd.run()
+
def test_build_ext_config_handling(tmpdir_cwd):
files = {
diff --git a/setuptools/tests/test_build_meta.py b/setuptools/tests/test_build_meta.py
index 0f4a1a73..9e55a938 100644
--- a/setuptools/tests/test_build_meta.py
+++ b/setuptools/tests/test_build_meta.py
@@ -1,15 +1,33 @@
import os
+import sys
import shutil
+import signal
import tarfile
import importlib
+import contextlib
from concurrent import futures
import re
+from zipfile import ZipFile
+from pathlib import Path
import pytest
from jaraco import path
from .textwrap import DALS
+SETUP_SCRIPT_STUB = "__import__('setuptools').setup()"
+
+
+TIMEOUT = int(os.getenv("TIMEOUT_BACKEND_TEST", "180")) # in seconds
+IS_PYPY = '__pypy__' in sys.builtin_module_names
+
+
+pytestmark = pytest.mark.skipif(
+ sys.platform == "win32" and IS_PYPY,
+ reason="The combination of PyPy + Windows + pytest-xdist + ProcessPoolExecutor "
+ "is flaky and problematic"
+)
+
class BuildBackendBase:
def __init__(self, cwd='.', env={}, backend_name='setuptools.build_meta'):
@@ -26,15 +44,32 @@ class BuildBackend(BuildBackendBase):
self.pool = futures.ProcessPoolExecutor(max_workers=1)
def __getattr__(self, name):
- """Handles aribrary function invocations on the build backend."""
+ """Handles arbitrary function invocations on the build backend."""
def method(*args, **kw):
root = os.path.abspath(self.cwd)
caller = BuildBackendCaller(root, self.env, self.backend_name)
- return self.pool.submit(caller, name, *args, **kw).result()
+ pid = None
+ try:
+ pid = self.pool.submit(os.getpid).result(TIMEOUT)
+ return self.pool.submit(caller, name, *args, **kw).result(TIMEOUT)
+ except futures.TimeoutError:
+ self.pool.shutdown(wait=False) # doesn't stop already running processes
+ self._kill(pid)
+ pytest.xfail(f"Backend did not respond before timeout ({TIMEOUT} s)")
+ except (futures.process.BrokenProcessPool, MemoryError, OSError):
+ if IS_PYPY:
+ pytest.xfail("PyPy frequently fails tests with ProcessPoolExector")
+ raise
return method
+ def _kill(self, pid):
+ if pid is None:
+ return
+ with contextlib.suppress(ProcessLookupError, OSError):
+ os.kill(pid, signal.SIGTERM if os.name == "nt" else signal.SIGKILL)
+
class BuildBackendCaller(BuildBackendBase):
def __init__(self, *args, **kwargs):
@@ -44,7 +79,7 @@ class BuildBackendCaller(BuildBackendBase):
self.backend_obj) = self.backend_name.partition(':')
def __call__(self, name, *args, **kw):
- """Handles aribrary function invocations on the build backend."""
+ """Handles arbitrary function invocations on the build backend."""
os.chdir(self.cwd)
os.environ.update(self.env)
mod = importlib.import_module(self.backend_name)
@@ -58,7 +93,7 @@ class BuildBackendCaller(BuildBackendBase):
defns = [
- {
+ { # simple setup.py script
'setup.py': DALS("""
__import__('setuptools').setup(
name='foo',
@@ -72,7 +107,7 @@ defns = [
print('hello')
"""),
},
- {
+ { # setup.py that relies on __name__
'setup.py': DALS("""
assert __name__ == '__main__'
__import__('setuptools').setup(
@@ -87,7 +122,7 @@ defns = [
print('hello')
"""),
},
- {
+ { # setup.py script that runs arbitrary code
'setup.py': DALS("""
variable = True
def function():
@@ -105,7 +140,45 @@ defns = [
print('hello')
"""),
},
- {
+ { # setup.py script that constructs temp files to be included in the distribution
+ 'setup.py': DALS("""
+ # Some packages construct files on the fly, include them in the package,
+ # and immediately remove them after `setup()` (e.g. pybind11==2.9.1).
+ # Therefore, we cannot use `distutils.core.run_setup(..., stop_after=...)`
+ # to obtain a distribution object first, and then run the distutils
+ # commands later, because these files will be removed in the meantime.
+
+ with open('world.py', 'w') as f:
+ f.write('x = 42')
+
+ try:
+ __import__('setuptools').setup(
+ name='foo',
+ version='0.0.0',
+ py_modules=['world'],
+ setup_requires=['six'],
+ )
+ finally:
+ # Some packages will clean temporary files
+ __import__('os').unlink('world.py')
+ """),
+ },
+ { # setup.cfg only
+ 'setup.cfg': DALS("""
+ [metadata]
+ name = foo
+ version = 0.0.0
+
+ [options]
+ py_modules=hello
+ setup_requires=six
+ """),
+ 'hello.py': DALS("""
+ def run():
+ print('hello')
+ """)
+ },
+ { # setup.cfg and setup.py
'setup.cfg': DALS("""
[metadata]
name = foo
@@ -115,6 +188,7 @@ defns = [
py_modules=hello
setup_requires=six
"""),
+ 'setup.py': "__import__('setuptools').setup()",
'hello.py': DALS("""
def run():
print('hello')
@@ -150,7 +224,20 @@ class TestBuildMetaBackend:
os.makedirs(dist_dir)
wheel_name = build_backend.build_wheel(dist_dir)
- assert os.path.isfile(os.path.join(dist_dir, wheel_name))
+ wheel_file = os.path.join(dist_dir, wheel_name)
+ assert os.path.isfile(wheel_file)
+
+ # Temporary files should be removed
+ assert not os.path.isfile('world.py')
+
+ with ZipFile(wheel_file) as zipfile:
+ wheel_contents = set(zipfile.namelist())
+
+ # Each one of the examples have a single module
+ # that should be included in the distribution
+ python_scripts = (f for f in wheel_contents if f.endswith('.py'))
+ modules = [f for f in python_scripts if not f.endswith('setup.py')]
+ assert len(modules) == 1
@pytest.mark.parametrize('build_type', ('wheel', 'sdist'))
def test_build_with_existing_file_present(self, build_type, tmpdir_cwd):
@@ -199,6 +286,190 @@ class TestBuildMetaBackend:
assert third_result == second_result
assert os.path.getsize(os.path.join(dist_dir, third_result)) > 0
+ @pytest.mark.parametrize("setup_script", [None, SETUP_SCRIPT_STUB])
+ def test_build_with_pyproject_config(self, tmpdir, setup_script):
+ files = {
+ 'pyproject.toml': DALS("""
+ [build-system]
+ requires = ["setuptools", "wheel"]
+ build-backend = "setuptools.build_meta"
+
+ [project]
+ name = "foo"
+ license = {text = "MIT"}
+ description = "This is a Python package"
+ dynamic = ["version", "readme"]
+ classifiers = [
+ "Development Status :: 5 - Production/Stable",
+ "Intended Audience :: Developers"
+ ]
+ urls = {Homepage = "http://github.com"}
+ dependencies = [
+ "appdirs",
+ ]
+
+ [project.optional-dependencies]
+ all = [
+ "tomli>=1",
+ "pyscaffold>=4,<5",
+ 'importlib; python_version == "2.6"',
+ ]
+
+ [project.scripts]
+ foo = "foo.cli:main"
+
+ [tool.setuptools]
+ zip-safe = false
+ package-dir = {"" = "src"}
+ packages = {find = {where = ["src"]}}
+ license-files = ["LICENSE*"]
+
+ [tool.setuptools.dynamic]
+ version = {attr = "foo.__version__"}
+ readme = {file = "README.rst"}
+
+ [tool.distutils.sdist]
+ formats = "gztar"
+
+ [tool.distutils.bdist_wheel]
+ universal = true
+ """),
+ "MANIFEST.in": DALS("""
+ global-include *.py *.txt
+ global-exclude *.py[cod]
+ """),
+ "README.rst": "This is a ``README``",
+ "LICENSE.txt": "---- placeholder MIT license ----",
+ "src": {
+ "foo": {
+ "__init__.py": "__version__ = '0.1'",
+ "cli.py": "def main(): print('hello world')",
+ "data.txt": "def main(): print('hello world')",
+ }
+ }
+ }
+ if setup_script:
+ files["setup.py"] = setup_script
+
+ build_backend = self.get_build_backend()
+ with tmpdir.as_cwd():
+ path.build(files)
+ sdist_path = build_backend.build_sdist("temp")
+ wheel_file = build_backend.build_wheel("temp")
+
+ with tarfile.open(os.path.join(tmpdir, "temp", sdist_path)) as tar:
+ sdist_contents = set(tar.getnames())
+
+ with ZipFile(os.path.join(tmpdir, "temp", wheel_file)) as zipfile:
+ wheel_contents = set(zipfile.namelist())
+ metadata = str(zipfile.read("foo-0.1.dist-info/METADATA"), "utf-8")
+ license = str(zipfile.read("foo-0.1.dist-info/LICENSE.txt"), "utf-8")
+ epoints = str(zipfile.read("foo-0.1.dist-info/entry_points.txt"), "utf-8")
+
+ assert sdist_contents - {"foo-0.1/setup.py"} == {
+ 'foo-0.1',
+ 'foo-0.1/LICENSE.txt',
+ 'foo-0.1/MANIFEST.in',
+ 'foo-0.1/PKG-INFO',
+ 'foo-0.1/README.rst',
+ 'foo-0.1/pyproject.toml',
+ 'foo-0.1/setup.cfg',
+ 'foo-0.1/src',
+ 'foo-0.1/src/foo',
+ 'foo-0.1/src/foo/__init__.py',
+ 'foo-0.1/src/foo/cli.py',
+ 'foo-0.1/src/foo/data.txt',
+ 'foo-0.1/src/foo.egg-info',
+ 'foo-0.1/src/foo.egg-info/PKG-INFO',
+ 'foo-0.1/src/foo.egg-info/SOURCES.txt',
+ 'foo-0.1/src/foo.egg-info/dependency_links.txt',
+ 'foo-0.1/src/foo.egg-info/entry_points.txt',
+ 'foo-0.1/src/foo.egg-info/requires.txt',
+ 'foo-0.1/src/foo.egg-info/top_level.txt',
+ 'foo-0.1/src/foo.egg-info/not-zip-safe',
+ }
+ assert wheel_contents == {
+ "foo/__init__.py",
+ "foo/cli.py",
+ "foo/data.txt", # include_package_data defaults to True
+ "foo-0.1.dist-info/LICENSE.txt",
+ "foo-0.1.dist-info/METADATA",
+ "foo-0.1.dist-info/WHEEL",
+ "foo-0.1.dist-info/entry_points.txt",
+ "foo-0.1.dist-info/top_level.txt",
+ "foo-0.1.dist-info/RECORD",
+ }
+ assert license == "---- placeholder MIT license ----"
+ for line in (
+ "Summary: This is a Python package",
+ "License: MIT",
+ "Classifier: Intended Audience :: Developers",
+ "Requires-Dist: appdirs",
+ "Requires-Dist: tomli (>=1) ; extra == 'all'",
+ "Requires-Dist: importlib ; (python_version == \"2.6\") and extra == 'all'"
+ ):
+ assert line in metadata
+
+ assert metadata.strip().endswith("This is a ``README``")
+ assert epoints.strip() == "[console_scripts]\nfoo = foo.cli:main"
+
+ def test_static_metadata_in_pyproject_config(self, tmpdir):
+ # Make sure static metadata in pyproject.toml is not overwritten by setup.py
+ # as required by PEP 621
+ files = {
+ 'pyproject.toml': DALS("""
+ [build-system]
+ requires = ["setuptools", "wheel"]
+ build-backend = "setuptools.build_meta"
+
+ [project]
+ name = "foo"
+ description = "This is a Python package"
+ version = "42"
+ dependencies = ["six"]
+ """),
+ 'hello.py': DALS("""
+ def run():
+ print('hello')
+ """),
+ 'setup.py': DALS("""
+ __import__('setuptools').setup(
+ name='bar',
+ version='13',
+ )
+ """),
+ }
+ build_backend = self.get_build_backend()
+ with tmpdir.as_cwd():
+ path.build(files)
+ sdist_path = build_backend.build_sdist("temp")
+ wheel_file = build_backend.build_wheel("temp")
+
+ assert (tmpdir / "temp/foo-42.tar.gz").exists()
+ assert (tmpdir / "temp/foo-42-py3-none-any.whl").exists()
+ assert not (tmpdir / "temp/bar-13.tar.gz").exists()
+ assert not (tmpdir / "temp/bar-42.tar.gz").exists()
+ assert not (tmpdir / "temp/foo-13.tar.gz").exists()
+ assert not (tmpdir / "temp/bar-13-py3-none-any.whl").exists()
+ assert not (tmpdir / "temp/bar-42-py3-none-any.whl").exists()
+ assert not (tmpdir / "temp/foo-13-py3-none-any.whl").exists()
+
+ with tarfile.open(os.path.join(tmpdir, "temp", sdist_path)) as tar:
+ pkg_info = str(tar.extractfile('foo-42/PKG-INFO').read(), "utf-8")
+ members = tar.getnames()
+ assert "bar-13/PKG-INFO" not in members
+
+ with ZipFile(os.path.join(tmpdir, "temp", wheel_file)) as zipfile:
+ metadata = str(zipfile.read("foo-42.dist-info/METADATA"), "utf-8")
+ members = zipfile.namelist()
+ assert "bar-13.dist-info/METADATA" not in members
+
+ for file in pkg_info, metadata:
+ for line in ("Name: foo", "Version: 42"):
+ assert line in file
+ for line in ("Name: bar", "Version: 13"):
+ assert line not in file
+
def test_build_sdist(self, build_backend):
dist_dir = os.path.abspath('pip-sdist')
os.makedirs(dist_dir)
@@ -214,6 +485,23 @@ class TestBuildMetaBackend:
assert os.path.isfile(os.path.join(dist_dir, dist_info, 'METADATA'))
+ def test_prepare_metadata_inplace(self, build_backend):
+ """
+ Some users might pass metadata_directory pre-populated with `.tox` or `.venv`.
+ See issue #3523.
+ """
+ for pre_existing in [
+ ".tox/python/lib/python3.10/site-packages/attrs-22.1.0.dist-info",
+ ".tox/python/lib/python3.10/site-packages/autocommand-2.2.1.dist-info",
+ ".nox/python/lib/python3.10/site-packages/build-0.8.0.dist-info",
+ ".venv/python3.10/site-packages/click-8.1.3.dist-info",
+ "venv/python3.10/site-packages/distlib-0.3.5.dist-info",
+ "env/python3.10/site-packages/docutils-0.19.dist-info",
+ ]:
+ os.makedirs(pre_existing, exist_ok=True)
+ dist_info = build_backend.prepare_metadata_for_build_wheel(".")
+ assert os.path.isfile(os.path.join(dist_info, 'METADATA'))
+
def test_build_sdist_explicit_dist(self, build_backend):
# explicitly specifying the dist folder should work
# the folder sdist_directory and the ``--dist-dir`` can be the same
@@ -341,6 +629,71 @@ class TestBuildMetaBackend:
with pytest.raises(ImportError, match="^No module named 'hello'$"):
build_backend.build_sdist("temp")
+ _simple_pyproject_example = {
+ "pyproject.toml": DALS("""
+ [project]
+ name = "proj"
+ version = "42"
+ """),
+ "src": {
+ "proj": {"__init__.py": ""}
+ }
+ }
+
+ def _assert_link_tree(self, parent_dir):
+ """All files in the directory should be either links or hard links"""
+ files = list(Path(parent_dir).glob("**/*"))
+ assert files # Should not be empty
+ for file in files:
+ assert file.is_symlink() or os.stat(file).st_nlink > 0
+
+ @pytest.mark.filterwarnings("ignore::setuptools.SetuptoolsDeprecationWarning")
+ # Since the backend is running via a process pool, in some operating systems
+ # we may have problems to make assertions based on warnings/stdout/stderr...
+ # So the best is to ignore them for the time being.
+ def test_editable_with_global_option_still_works(self, tmpdir_cwd):
+ """The usage of --global-option is now discouraged in favour of --build-option.
+ This is required to make more sense of the provided scape hatch and align with
+ previous pip behaviour. See pypa/setuptools#1928.
+ """
+ path.build({**self._simple_pyproject_example, '_meta': {}})
+ build_backend = self.get_build_backend()
+ assert not Path("build").exists()
+
+ cfg = {"--global-option": ["--mode", "strict"]}
+ build_backend.prepare_metadata_for_build_editable("_meta", cfg)
+ build_backend.build_editable("temp", cfg, "_meta")
+
+ self._assert_link_tree(next(Path("build").glob("__editable__.*")))
+
+ def test_editable_without_config_settings(self, tmpdir_cwd):
+ """
+ Sanity check to ensure tests with --mode=strict are different from the ones
+ without --mode.
+
+ --mode=strict should create a local directory with a package tree.
+ The directory should not get created otherwise.
+ """
+ path.build(self._simple_pyproject_example)
+ build_backend = self.get_build_backend()
+ assert not Path("build").exists()
+ build_backend.build_editable("temp")
+ assert not Path("build").exists()
+
+ @pytest.mark.parametrize(
+ "config_settings", [
+ {"--build-option": ["--mode", "strict"]},
+ {"editable-mode": "strict"},
+ ]
+ )
+ def test_editable_with_config_settings(self, tmpdir_cwd, config_settings):
+ path.build({**self._simple_pyproject_example, '_meta': {}})
+ assert not Path("build").exists()
+ build_backend = self.get_build_backend()
+ build_backend.prepare_metadata_for_build_editable("_meta", config_settings)
+ build_backend.build_editable("temp", config_settings, "_meta")
+ self._assert_link_tree(next(Path("build").glob("__editable__.*")))
+
@pytest.mark.parametrize('setup_literal, requirements', [
("'foo'", ['foo']),
("['foo']", ['foo']),
@@ -392,6 +745,30 @@ class TestBuildMetaBackend:
assert expected == sorted(actual)
+ def test_setup_requires_with_auto_discovery(self, tmpdir_cwd):
+ # Make sure patches introduced to retrieve setup_requires don't accidentally
+ # activate auto-discovery and cause problems due to the incomplete set of
+ # attributes passed to MinimalDistribution
+ files = {
+ 'pyproject.toml': DALS("""
+ [project]
+ name = "proj"
+ version = "42"
+ """),
+ "setup.py": DALS("""
+ __import__('setuptools').setup(
+ setup_requires=["foo"],
+ py_modules = ["hello", "world"]
+ )
+ """),
+ 'hello.py': "'hello'",
+ 'world.py': "'world'",
+ }
+ path.build(files)
+ build_backend = self.get_build_backend()
+ setup_requires = build_backend.get_requires_for_build_wheel()
+ assert setup_requires == ["wheel", "foo"]
+
def test_dont_install_setup_requires(self, tmpdir_cwd):
files = {
'setup.py': DALS("""
@@ -470,3 +847,27 @@ class TestBuildMetaLegacyBackend(TestBuildMetaBackend):
build_backend = self.get_build_backend()
build_backend.build_sdist("temp")
+
+
+def test_legacy_editable_install(venv, tmpdir, tmpdir_cwd):
+ pyproject = """
+ [build-system]
+ requires = ["setuptools"]
+ build-backend = "setuptools.build_meta"
+ [project]
+ name = "myproj"
+ version = "42"
+ """
+ path.build({"pyproject.toml": DALS(pyproject), "mymod.py": ""})
+
+ # First: sanity check
+ cmd = ["pip", "install", "--no-build-isolation", "-e", "."]
+ output = str(venv.run(cmd, cwd=tmpdir), "utf-8").lower()
+ assert "running setup.py develop for myproj" not in output
+ assert "created wheel for myproj" in output
+
+ # Then: real test
+ env = {**os.environ, "SETUPTOOLS_ENABLE_FEATURES": "legacy-editable"}
+ cmd = ["pip", "install", "--no-build-isolation", "-e", "."]
+ output = str(venv.run(cmd, cwd=tmpdir, env=env), "utf-8").lower()
+ assert "running setup.py develop for myproj" in output
diff --git a/setuptools/tests/test_build_py.py b/setuptools/tests/test_build_py.py
index 19c8b780..77738f23 100644
--- a/setuptools/tests/test_build_py.py
+++ b/setuptools/tests/test_build_py.py
@@ -1,11 +1,17 @@
import os
import stat
import shutil
+from pathlib import Path
+from unittest.mock import Mock
import pytest
+import jaraco.path
+from setuptools import SetuptoolsDeprecationWarning
from setuptools.dist import Distribution
+from .textwrap import DALS
+
def test_directories_in_package_data_glob(tmpdir_cwd):
"""
@@ -25,6 +31,29 @@ def test_directories_in_package_data_glob(tmpdir_cwd):
dist.run_commands()
+def test_recursive_in_package_data_glob(tmpdir_cwd):
+ """
+ Files matching recursive globs (**) in package_data should
+ be included in the package data.
+
+ #1806
+ """
+ dist = Distribution(dict(
+ script_name='setup.py',
+ script_args=['build_py'],
+ packages=[''],
+ package_data={'': ['path/**/data']},
+ ))
+ os.makedirs('path/subpath/subsubpath')
+ open('path/subpath/subsubpath/data', 'w').close()
+
+ dist.parse_command_line()
+ dist.run_commands()
+
+ assert stat.S_ISREG(os.stat('build/lib/path/subpath/subsubpath/data').st_mode), \
+ "File is not included"
+
+
def test_read_only(tmpdir_cwd):
"""
Ensure read-only flag is not preserved in copy
@@ -79,3 +108,196 @@ def test_executable_data(tmpdir_cwd):
assert os.stat('build/lib/pkg/run-me').st_mode & stat.S_IEXEC, \
"Script is not executable"
+
+
+EXAMPLE_WITH_MANIFEST = {
+ "setup.cfg": DALS("""
+ [metadata]
+ name = mypkg
+ version = 42
+
+ [options]
+ include_package_data = True
+ packages = find:
+
+ [options.packages.find]
+ exclude = *.tests*
+ """),
+ "mypkg": {
+ "__init__.py": "",
+ "resource_file.txt": "",
+ "tests": {
+ "__init__.py": "",
+ "test_mypkg.py": "",
+ "test_file.txt": "",
+ }
+ },
+ "MANIFEST.in": DALS("""
+ global-include *.py *.txt
+ global-exclude *.py[cod]
+ prune dist
+ prune build
+ prune *.egg-info
+ """)
+}
+
+
+def test_excluded_subpackages(tmpdir_cwd):
+ jaraco.path.build(EXAMPLE_WITH_MANIFEST)
+ dist = Distribution({"script_name": "%PEP 517%"})
+ dist.parse_config_files()
+
+ build_py = dist.get_command_obj("build_py")
+ msg = r"Python recognizes 'mypkg\.tests' as an importable package"
+ with pytest.warns(SetuptoolsDeprecationWarning, match=msg):
+ # TODO: To fix #3260 we need some transition period to deprecate the
+ # existing behavior of `include_package_data`. After the transition, we
+ # should remove the warning and fix the behaviour.
+ build_py.finalize_options()
+ build_py.run()
+
+ build_dir = Path(dist.get_command_obj("build_py").build_lib)
+ assert (build_dir / "mypkg/__init__.py").exists()
+ assert (build_dir / "mypkg/resource_file.txt").exists()
+
+ # Setuptools is configured to ignore `mypkg.tests`, therefore the following
+ # files/dirs should not be included in the distribution.
+ for f in [
+ "mypkg/tests/__init__.py",
+ "mypkg/tests/test_mypkg.py",
+ "mypkg/tests/test_file.txt",
+ "mypkg/tests",
+ ]:
+ with pytest.raises(AssertionError):
+ # TODO: Enforce the following assertion once #3260 is fixed
+ # (remove context manager and the following xfail).
+ assert not (build_dir / f).exists()
+
+ pytest.xfail("#3260")
+
+
+@pytest.mark.filterwarnings("ignore::setuptools.SetuptoolsDeprecationWarning")
+def test_existing_egg_info(tmpdir_cwd, monkeypatch):
+ """When provided with the ``existing_egg_info_dir`` attribute, build_py should not
+ attempt to run egg_info again.
+ """
+ # == Pre-condition ==
+ # Generate an egg-info dir
+ jaraco.path.build(EXAMPLE_WITH_MANIFEST)
+ dist = Distribution({"script_name": "%PEP 517%"})
+ dist.parse_config_files()
+ assert dist.include_package_data
+
+ egg_info = dist.get_command_obj("egg_info")
+ dist.run_command("egg_info")
+ egg_info_dir = next(Path(egg_info.egg_base).glob("*.egg-info"))
+ assert egg_info_dir.is_dir()
+
+ # == Setup ==
+ build_py = dist.get_command_obj("build_py")
+ build_py.finalize_options()
+ egg_info = dist.get_command_obj("egg_info")
+ egg_info_run = Mock(side_effect=egg_info.run)
+ monkeypatch.setattr(egg_info, "run", egg_info_run)
+
+ # == Remove caches ==
+ # egg_info is called when build_py looks for data_files, which gets cached.
+ # We need to ensure it is not cached yet, otherwise it may impact on the tests
+ build_py.__dict__.pop('data_files', None)
+ dist.reinitialize_command(egg_info)
+
+ # == Sanity check ==
+ # Ensure that if existing_egg_info is not given, build_py attempts to run egg_info
+ build_py.existing_egg_info_dir = None
+ build_py.run()
+ egg_info_run.assert_called()
+
+ # == Remove caches ==
+ egg_info_run.reset_mock()
+ build_py.__dict__.pop('data_files', None)
+ dist.reinitialize_command(egg_info)
+
+ # == Actual test ==
+ # Ensure that if existing_egg_info_dir is given, egg_info doesn't run
+ build_py.existing_egg_info_dir = egg_info_dir
+ build_py.run()
+ egg_info_run.assert_not_called()
+ assert build_py.data_files
+
+ # Make sure the list of outputs is actually OK
+ outputs = map(lambda x: x.replace(os.sep, "/"), build_py.get_outputs())
+ assert outputs
+ example = str(Path(build_py.build_lib, "mypkg/__init__.py")).replace(os.sep, "/")
+ assert example in outputs
+
+
+EXAMPLE_ARBITRARY_MAPPING = {
+ "pyproject.toml": DALS("""
+ [project]
+ name = "mypkg"
+ version = "42"
+
+ [tool.setuptools]
+ packages = ["mypkg", "mypkg.sub1", "mypkg.sub2", "mypkg.sub2.nested"]
+
+ [tool.setuptools.package-dir]
+ "" = "src"
+ "mypkg.sub2" = "src/mypkg/_sub2"
+ "mypkg.sub2.nested" = "other"
+ """),
+ "src": {
+ "mypkg": {
+ "__init__.py": "",
+ "resource_file.txt": "",
+ "sub1": {
+ "__init__.py": "",
+ "mod1.py": "",
+ },
+ "_sub2": {
+ "mod2.py": "",
+ },
+ },
+ },
+ "other": {
+ "__init__.py": "",
+ "mod3.py": "",
+ },
+ "MANIFEST.in": DALS("""
+ global-include *.py *.txt
+ global-exclude *.py[cod]
+ """)
+}
+
+
+def test_get_outputs(tmpdir_cwd):
+ jaraco.path.build(EXAMPLE_ARBITRARY_MAPPING)
+ dist = Distribution({"script_name": "%test%"})
+ dist.parse_config_files()
+
+ build_py = dist.get_command_obj("build_py")
+ build_py.editable_mode = True
+ build_py.ensure_finalized()
+ build_lib = build_py.build_lib.replace(os.sep, "/")
+ outputs = {x.replace(os.sep, "/") for x in build_py.get_outputs()}
+ assert outputs == {
+ f"{build_lib}/mypkg/__init__.py",
+ f"{build_lib}/mypkg/resource_file.txt",
+ f"{build_lib}/mypkg/sub1/__init__.py",
+ f"{build_lib}/mypkg/sub1/mod1.py",
+ f"{build_lib}/mypkg/sub2/mod2.py",
+ f"{build_lib}/mypkg/sub2/nested/__init__.py",
+ f"{build_lib}/mypkg/sub2/nested/mod3.py",
+ }
+ mapping = {
+ k.replace(os.sep, "/"): v.replace(os.sep, "/")
+ for k, v in build_py.get_output_mapping().items()
+ }
+ assert mapping == {
+ f"{build_lib}/mypkg/__init__.py": "src/mypkg/__init__.py",
+ f"{build_lib}/mypkg/resource_file.txt": "src/mypkg/resource_file.txt",
+ f"{build_lib}/mypkg/sub1/__init__.py": "src/mypkg/sub1/__init__.py",
+ f"{build_lib}/mypkg/sub1/mod1.py": "src/mypkg/sub1/mod1.py",
+ f"{build_lib}/mypkg/sub2/mod2.py": "src/mypkg/_sub2/mod2.py",
+ f"{build_lib}/mypkg/sub2/nested/__init__.py": "other/__init__.py",
+ f"{build_lib}/mypkg/sub2/nested/mod3.py": "other/mod3.py",
+ }
diff --git a/setuptools/tests/test_config_discovery.py b/setuptools/tests/test_config_discovery.py
new file mode 100644
index 00000000..f65b00b6
--- /dev/null
+++ b/setuptools/tests/test_config_discovery.py
@@ -0,0 +1,637 @@
+import os
+import sys
+from configparser import ConfigParser
+from itertools import product
+
+from setuptools.command.sdist import sdist
+from setuptools.dist import Distribution
+from setuptools.discovery import find_package_path, find_parent_package
+from setuptools.errors import PackageDiscoveryError
+
+import setuptools # noqa -- force distutils.core to be patched
+import distutils.core
+
+import pytest
+import jaraco.path
+from path import Path as _Path
+
+from .contexts import quiet
+from .integration.helpers import get_sdist_members, get_wheel_members, run
+from .textwrap import DALS
+
+
+class TestFindParentPackage:
+ def test_single_package(self, tmp_path):
+ # find_parent_package should find a non-namespace parent package
+ (tmp_path / "src/namespace/pkg/nested").mkdir(exist_ok=True, parents=True)
+ (tmp_path / "src/namespace/pkg/nested/__init__.py").touch()
+ (tmp_path / "src/namespace/pkg/__init__.py").touch()
+ packages = ["namespace", "namespace.pkg", "namespace.pkg.nested"]
+ assert find_parent_package(packages, {"": "src"}, tmp_path) == "namespace.pkg"
+
+ def test_multiple_toplevel(self, tmp_path):
+ # find_parent_package should return null if the given list of packages does not
+ # have a single parent package
+ multiple = ["pkg", "pkg1", "pkg2"]
+ for name in multiple:
+ (tmp_path / f"src/{name}").mkdir(exist_ok=True, parents=True)
+ (tmp_path / f"src/{name}/__init__.py").touch()
+ assert find_parent_package(multiple, {"": "src"}, tmp_path) is None
+
+
+class TestDiscoverPackagesAndPyModules:
+ """Make sure discovered values for ``packages`` and ``py_modules`` work
+ similarly to explicit configuration for the simple scenarios.
+ """
+ OPTIONS = {
+ # Different options according to the circumstance being tested
+ "explicit-src": {
+ "package_dir": {"": "src"},
+ "packages": ["pkg"]
+ },
+ "variation-lib": {
+ "package_dir": {"": "lib"}, # variation of the source-layout
+ },
+ "explicit-flat": {
+ "packages": ["pkg"]
+ },
+ "explicit-single_module": {
+ "py_modules": ["pkg"]
+ },
+ "explicit-namespace": {
+ "packages": ["ns", "ns.pkg"]
+ },
+ "automatic-src": {},
+ "automatic-flat": {},
+ "automatic-single_module": {},
+ "automatic-namespace": {}
+ }
+ FILES = {
+ "src": ["src/pkg/__init__.py", "src/pkg/main.py"],
+ "lib": ["lib/pkg/__init__.py", "lib/pkg/main.py"],
+ "flat": ["pkg/__init__.py", "pkg/main.py"],
+ "single_module": ["pkg.py"],
+ "namespace": ["ns/pkg/__init__.py"]
+ }
+
+ def _get_info(self, circumstance):
+ _, _, layout = circumstance.partition("-")
+ files = self.FILES[layout]
+ options = self.OPTIONS[circumstance]
+ return files, options
+
+ @pytest.mark.parametrize("circumstance", OPTIONS.keys())
+ def test_sdist_filelist(self, tmp_path, circumstance):
+ files, options = self._get_info(circumstance)
+ _populate_project_dir(tmp_path, files, options)
+
+ _, cmd = _run_sdist_programatically(tmp_path, options)
+
+ manifest = [f.replace(os.sep, "/") for f in cmd.filelist.files]
+ for file in files:
+ assert any(f.endswith(file) for f in manifest)
+
+ @pytest.mark.parametrize("circumstance", OPTIONS.keys())
+ def test_project(self, tmp_path, circumstance):
+ files, options = self._get_info(circumstance)
+ _populate_project_dir(tmp_path, files, options)
+
+ # Simulate a pre-existing `build` directory
+ (tmp_path / "build").mkdir()
+ (tmp_path / "build/lib").mkdir()
+ (tmp_path / "build/bdist.linux-x86_64").mkdir()
+ (tmp_path / "build/bdist.linux-x86_64/file.py").touch()
+ (tmp_path / "build/lib/__init__.py").touch()
+ (tmp_path / "build/lib/file.py").touch()
+ (tmp_path / "dist").mkdir()
+ (tmp_path / "dist/file.py").touch()
+
+ _run_build(tmp_path)
+
+ sdist_files = get_sdist_members(next(tmp_path.glob("dist/*.tar.gz")))
+ print("~~~~~ sdist_members ~~~~~")
+ print('\n'.join(sdist_files))
+ assert sdist_files >= set(files)
+
+ wheel_files = get_wheel_members(next(tmp_path.glob("dist/*.whl")))
+ print("~~~~~ wheel_members ~~~~~")
+ print('\n'.join(wheel_files))
+ orig_files = {f.replace("src/", "").replace("lib/", "") for f in files}
+ assert wheel_files >= orig_files
+
+ # Make sure build files are not included by mistake
+ for file in wheel_files:
+ assert "build" not in files
+ assert "dist" not in files
+
+ PURPOSEFULLY_EMPY = {
+ "setup.cfg": DALS(
+ """
+ [metadata]
+ name = myproj
+ version = 0.0.0
+
+ [options]
+ {param} =
+ """
+ ),
+ "setup.py": DALS(
+ """
+ __import__('setuptools').setup(
+ name="myproj",
+ version="0.0.0",
+ {param}=[]
+ )
+ """
+ ),
+ "pyproject.toml": DALS(
+ """
+ [build-system]
+ requires = []
+ build-backend = 'setuptools.build_meta'
+
+ [project]
+ name = "myproj"
+ version = "0.0.0"
+
+ [tool.setuptools]
+ {param} = []
+ """
+ ),
+ "template-pyproject.toml": DALS(
+ """
+ [build-system]
+ requires = []
+ build-backend = 'setuptools.build_meta'
+ """
+ )
+ }
+
+ @pytest.mark.parametrize(
+ "config_file, param, circumstance",
+ product(
+ ["setup.cfg", "setup.py", "pyproject.toml"],
+ ["packages", "py_modules"],
+ FILES.keys()
+ )
+ )
+ def test_purposefully_empty(self, tmp_path, config_file, param, circumstance):
+ files = self.FILES[circumstance] + ["mod.py", "other.py", "src/pkg/__init__.py"]
+ _populate_project_dir(tmp_path, files, {})
+
+ if config_file == "pyproject.toml":
+ template_param = param.replace("_", "-")
+ else:
+ # Make sure build works with or without setup.cfg
+ pyproject = self.PURPOSEFULLY_EMPY["template-pyproject.toml"]
+ (tmp_path / "pyproject.toml").write_text(pyproject)
+ template_param = param
+
+ config = self.PURPOSEFULLY_EMPY[config_file].format(param=template_param)
+ (tmp_path / config_file).write_text(config)
+
+ dist = _get_dist(tmp_path, {})
+ # When either parameter package or py_modules is an empty list,
+ # then there should be no discovery
+ assert getattr(dist, param) == []
+ other = {"py_modules": "packages", "packages": "py_modules"}[param]
+ assert getattr(dist, other) is None
+
+ @pytest.mark.parametrize(
+ "extra_files, pkgs",
+ [
+ (["venv/bin/simulate_venv"], {"pkg"}),
+ (["pkg-stubs/__init__.pyi"], {"pkg", "pkg-stubs"}),
+ (["other-stubs/__init__.pyi"], {"pkg", "other-stubs"}),
+ (
+ # Type stubs can also be namespaced
+ ["namespace-stubs/pkg/__init__.pyi"],
+ {"pkg", "namespace-stubs", "namespace-stubs.pkg"},
+ ),
+ (
+ # Just the top-level package can have `-stubs`, ignore nested ones
+ ["namespace-stubs/pkg-stubs/__init__.pyi"],
+ {"pkg", "namespace-stubs"}
+ ),
+ (["_hidden/file.py"], {"pkg"}),
+ (["news/finalize.py"], {"pkg"}),
+ ]
+ )
+ def test_flat_layout_with_extra_files(self, tmp_path, extra_files, pkgs):
+ files = self.FILES["flat"] + extra_files
+ _populate_project_dir(tmp_path, files, {})
+ dist = _get_dist(tmp_path, {})
+ assert set(dist.packages) == pkgs
+
+ @pytest.mark.parametrize(
+ "extra_files",
+ [
+ ["other/__init__.py"],
+ ["other/finalize.py"],
+ ]
+ )
+ def test_flat_layout_with_dangerous_extra_files(self, tmp_path, extra_files):
+ files = self.FILES["flat"] + extra_files
+ _populate_project_dir(tmp_path, files, {})
+ with pytest.raises(PackageDiscoveryError, match="multiple (packages|modules)"):
+ _get_dist(tmp_path, {})
+
+ def test_flat_layout_with_single_module(self, tmp_path):
+ files = self.FILES["single_module"] + ["invalid-module-name.py"]
+ _populate_project_dir(tmp_path, files, {})
+ dist = _get_dist(tmp_path, {})
+ assert set(dist.py_modules) == {"pkg"}
+
+ def test_flat_layout_with_multiple_modules(self, tmp_path):
+ files = self.FILES["single_module"] + ["valid_module_name.py"]
+ _populate_project_dir(tmp_path, files, {})
+ with pytest.raises(PackageDiscoveryError, match="multiple (packages|modules)"):
+ _get_dist(tmp_path, {})
+
+ def test_py_modules_when_wheel_dir_is_cwd(self, tmp_path):
+ """Regression for issue 3692"""
+ from setuptools import build_meta
+
+ pyproject = '[project]\nname = "test"\nversion = "1"'
+ (tmp_path / "pyproject.toml").write_text(DALS(pyproject), encoding="utf-8")
+ (tmp_path / "foo.py").touch()
+ with jaraco.path.DirectoryStack().context(tmp_path):
+ build_meta.build_wheel(".")
+ # Ensure py_modules are found
+ wheel_files = get_wheel_members(next(tmp_path.glob("*.whl")))
+ assert "foo.py" in wheel_files
+
+
+class TestNoConfig:
+ DEFAULT_VERSION = "0.0.0" # Default version given by setuptools
+
+ EXAMPLES = {
+ "pkg1": ["src/pkg1.py"],
+ "pkg2": ["src/pkg2/__init__.py"],
+ "pkg3": ["src/pkg3/__init__.py", "src/pkg3-stubs/__init__.py"],
+ "pkg4": ["pkg4/__init__.py", "pkg4-stubs/__init__.py"],
+ "ns.nested.pkg1": ["src/ns/nested/pkg1/__init__.py"],
+ "ns.nested.pkg2": ["ns/nested/pkg2/__init__.py"],
+ }
+
+ @pytest.mark.parametrize("example", EXAMPLES.keys())
+ def test_discover_name(self, tmp_path, example):
+ _populate_project_dir(tmp_path, self.EXAMPLES[example], {})
+ dist = _get_dist(tmp_path, {})
+ assert dist.get_name() == example
+
+ def test_build_with_discovered_name(self, tmp_path):
+ files = ["src/ns/nested/pkg/__init__.py"]
+ _populate_project_dir(tmp_path, files, {})
+ _run_build(tmp_path, "--sdist")
+ # Expected distribution file
+ dist_file = tmp_path / f"dist/ns.nested.pkg-{self.DEFAULT_VERSION}.tar.gz"
+ assert dist_file.is_file()
+
+
+class TestWithAttrDirective:
+ @pytest.mark.parametrize(
+ "folder, opts",
+ [
+ ("src", {}),
+ ("lib", {"packages": "find:", "packages.find": {"where": "lib"}}),
+ ]
+ )
+ def test_setupcfg_metadata(self, tmp_path, folder, opts):
+ files = [f"{folder}/pkg/__init__.py", "setup.cfg"]
+ _populate_project_dir(tmp_path, files, opts)
+ (tmp_path / folder / "pkg/__init__.py").write_text("version = 42")
+ (tmp_path / "setup.cfg").write_text(
+ "[metadata]\nversion = attr: pkg.version\n"
+ + (tmp_path / "setup.cfg").read_text()
+ )
+
+ dist = _get_dist(tmp_path, {})
+ assert dist.get_name() == "pkg"
+ assert dist.get_version() == "42"
+ assert dist.package_dir
+ package_path = find_package_path("pkg", dist.package_dir, tmp_path)
+ assert os.path.exists(package_path)
+ assert folder in _Path(package_path).parts()
+
+ _run_build(tmp_path, "--sdist")
+ dist_file = tmp_path / "dist/pkg-42.tar.gz"
+ assert dist_file.is_file()
+
+ def test_pyproject_metadata(self, tmp_path):
+ _populate_project_dir(tmp_path, ["src/pkg/__init__.py"], {})
+ (tmp_path / "src/pkg/__init__.py").write_text("version = 42")
+ (tmp_path / "pyproject.toml").write_text(
+ "[project]\nname = 'pkg'\ndynamic = ['version']\n"
+ "[tool.setuptools.dynamic]\nversion = {attr = 'pkg.version'}\n"
+ )
+ dist = _get_dist(tmp_path, {})
+ assert dist.get_version() == "42"
+ assert dist.package_dir == {"": "src"}
+
+
+class TestWithCExtension:
+ def _simulate_package_with_extension(self, tmp_path):
+ # This example is based on: https://github.com/nucleic/kiwi/tree/1.4.0
+ files = [
+ "benchmarks/file.py",
+ "docs/Makefile",
+ "docs/requirements.txt",
+ "docs/source/conf.py",
+ "proj/header.h",
+ "proj/file.py",
+ "py/proj.cpp",
+ "py/other.cpp",
+ "py/file.py",
+ "py/py.typed",
+ "py/tests/test_proj.py",
+ "README.rst",
+ ]
+ _populate_project_dir(tmp_path, files, {})
+
+ setup_script = """
+ from setuptools import Extension, setup
+
+ ext_modules = [
+ Extension(
+ "proj",
+ ["py/proj.cpp", "py/other.cpp"],
+ include_dirs=["."],
+ language="c++",
+ ),
+ ]
+ setup(ext_modules=ext_modules)
+ """
+ (tmp_path / "setup.py").write_text(DALS(setup_script))
+
+ def test_skip_discovery_with_setupcfg_metadata(self, tmp_path):
+ """Ensure that auto-discovery is not triggered when the project is based on
+ C-extensions only, for backward compatibility.
+ """
+ self._simulate_package_with_extension(tmp_path)
+
+ pyproject = """
+ [build-system]
+ requires = []
+ build-backend = 'setuptools.build_meta'
+ """
+ (tmp_path / "pyproject.toml").write_text(DALS(pyproject))
+
+ setupcfg = """
+ [metadata]
+ name = proj
+ version = 42
+ """
+ (tmp_path / "setup.cfg").write_text(DALS(setupcfg))
+
+ dist = _get_dist(tmp_path, {})
+ assert dist.get_name() == "proj"
+ assert dist.get_version() == "42"
+ assert dist.py_modules is None
+ assert dist.packages is None
+ assert len(dist.ext_modules) == 1
+ assert dist.ext_modules[0].name == "proj"
+
+ def test_dont_skip_discovery_with_pyproject_metadata(self, tmp_path):
+ """When opting-in to pyproject.toml metadata, auto-discovery will be active if
+ the package lists C-extensions, but does not configure py-modules or packages.
+
+ This way we ensure users with complex package layouts that would lead to the
+ discovery of multiple top-level modules/packages see errors and are forced to
+ explicitly set ``packages`` or ``py-modules``.
+ """
+ self._simulate_package_with_extension(tmp_path)
+
+ pyproject = """
+ [project]
+ name = 'proj'
+ version = '42'
+ """
+ (tmp_path / "pyproject.toml").write_text(DALS(pyproject))
+ with pytest.raises(PackageDiscoveryError, match="multiple (packages|modules)"):
+ _get_dist(tmp_path, {})
+
+
+class TestWithPackageData:
+ def _simulate_package_with_data_files(self, tmp_path, src_root):
+ files = [
+ f"{src_root}/proj/__init__.py",
+ f"{src_root}/proj/file1.txt",
+ f"{src_root}/proj/nested/file2.txt",
+ ]
+ _populate_project_dir(tmp_path, files, {})
+
+ manifest = """
+ global-include *.py *.txt
+ """
+ (tmp_path / "MANIFEST.in").write_text(DALS(manifest))
+
+ EXAMPLE_SETUPCFG = """
+ [metadata]
+ name = proj
+ version = 42
+
+ [options]
+ include_package_data = True
+ """
+ EXAMPLE_PYPROJECT = """
+ [project]
+ name = "proj"
+ version = "42"
+ """
+
+ PYPROJECT_PACKAGE_DIR = """
+ [tool.setuptools]
+ package-dir = {"" = "src"}
+ """
+
+ @pytest.mark.parametrize(
+ "src_root, files",
+ [
+ (".", {"setup.cfg": DALS(EXAMPLE_SETUPCFG)}),
+ (".", {"pyproject.toml": DALS(EXAMPLE_PYPROJECT)}),
+ ("src", {"setup.cfg": DALS(EXAMPLE_SETUPCFG)}),
+ ("src", {"pyproject.toml": DALS(EXAMPLE_PYPROJECT)}),
+ (
+ "src",
+ {
+ "setup.cfg": DALS(EXAMPLE_SETUPCFG) + DALS(
+ """
+ packages = find:
+ package_dir =
+ =src
+
+ [options.packages.find]
+ where = src
+ """
+ )
+ }
+ ),
+ (
+ "src",
+ {
+ "pyproject.toml": DALS(EXAMPLE_PYPROJECT) + DALS(
+ """
+ [tool.setuptools]
+ package-dir = {"" = "src"}
+ """
+ )
+ },
+ ),
+ ]
+ )
+ def test_include_package_data(self, tmp_path, src_root, files):
+ """
+ Make sure auto-discovery does not affect package include_package_data.
+ See issue #3196.
+ """
+ jaraco.path.build(files, prefix=str(tmp_path))
+ self._simulate_package_with_data_files(tmp_path, src_root)
+
+ expected = {
+ os.path.normpath(f"{src_root}/proj/file1.txt").replace(os.sep, "/"),
+ os.path.normpath(f"{src_root}/proj/nested/file2.txt").replace(os.sep, "/"),
+ }
+
+ _run_build(tmp_path)
+
+ sdist_files = get_sdist_members(next(tmp_path.glob("dist/*.tar.gz")))
+ print("~~~~~ sdist_members ~~~~~")
+ print('\n'.join(sdist_files))
+ assert sdist_files >= expected
+
+ wheel_files = get_wheel_members(next(tmp_path.glob("dist/*.whl")))
+ print("~~~~~ wheel_members ~~~~~")
+ print('\n'.join(wheel_files))
+ orig_files = {f.replace("src/", "").replace("lib/", "") for f in expected}
+ assert wheel_files >= orig_files
+
+
+def test_compatible_with_numpy_configuration(tmp_path):
+ files = [
+ "dir1/__init__.py",
+ "dir2/__init__.py",
+ "file.py",
+ ]
+ _populate_project_dir(tmp_path, files, {})
+ dist = Distribution({})
+ dist.configuration = object()
+ dist.set_defaults()
+ assert dist.py_modules is None
+ assert dist.packages is None
+
+
+def test_name_discovery_doesnt_break_cli(tmpdir_cwd):
+ jaraco.path.build({"pkg.py": ""})
+ dist = Distribution({})
+ dist.script_args = ["--name"]
+ dist.set_defaults()
+ dist.parse_command_line() # <-- no exception should be raised here.
+ assert dist.get_name() == "pkg"
+
+
+def test_preserve_explicit_name_with_dynamic_version(tmpdir_cwd, monkeypatch):
+ """According to #3545 it seems that ``name`` discovery is running,
+ even when the project already explicitly sets it.
+ This seems to be related to parsing of dynamic versions (via ``attr`` directive),
+ which requires the auto-discovery of ``package_dir``.
+ """
+ files = {
+ "src": {
+ "pkg": {"__init__.py": "__version__ = 42\n"},
+ },
+ "pyproject.toml": DALS("""
+ [project]
+ name = "myproj" # purposefully different from package name
+ dynamic = ["version"]
+ [tool.setuptools.dynamic]
+ version = {"attr" = "pkg.__version__"}
+ """)
+ }
+ jaraco.path.build(files)
+ dist = Distribution({})
+ orig_analyse_name = dist.set_defaults.analyse_name
+
+ def spy_analyse_name():
+ # We can check if name discovery was triggered by ensuring the original
+ # name remains instead of the package name.
+ orig_analyse_name()
+ assert dist.get_name() == "myproj"
+
+ monkeypatch.setattr(dist.set_defaults, "analyse_name", spy_analyse_name)
+ dist.parse_config_files()
+ assert dist.get_version() == "42"
+ assert set(dist.packages) == {"pkg"}
+
+
+def _populate_project_dir(root, files, options):
+ # NOTE: Currently pypa/build will refuse to build the project if no
+ # `pyproject.toml` or `setup.py` is found. So it is impossible to do
+ # completely "config-less" projects.
+ (root / "setup.py").write_text("import setuptools\nsetuptools.setup()")
+ (root / "README.md").write_text("# Example Package")
+ (root / "LICENSE").write_text("Copyright (c) 2018")
+ _write_setupcfg(root, options)
+ paths = (root / f for f in files)
+ for path in paths:
+ path.parent.mkdir(exist_ok=True, parents=True)
+ path.touch()
+
+
+def _write_setupcfg(root, options):
+ if not options:
+ print("~~~~~ **NO** setup.cfg ~~~~~")
+ return
+ setupcfg = ConfigParser()
+ setupcfg.add_section("options")
+ for key, value in options.items():
+ if key == "packages.find":
+ setupcfg.add_section(f"options.{key}")
+ setupcfg[f"options.{key}"].update(value)
+ elif isinstance(value, list):
+ setupcfg["options"][key] = ", ".join(value)
+ elif isinstance(value, dict):
+ str_value = "\n".join(f"\t{k} = {v}" for k, v in value.items())
+ setupcfg["options"][key] = "\n" + str_value
+ else:
+ setupcfg["options"][key] = str(value)
+ with open(root / "setup.cfg", "w") as f:
+ setupcfg.write(f)
+ print("~~~~~ setup.cfg ~~~~~")
+ print((root / "setup.cfg").read_text())
+
+
+def _run_build(path, *flags):
+ cmd = [sys.executable, "-m", "build", "--no-isolation", *flags, str(path)]
+ return run(cmd, env={'DISTUTILS_DEBUG': ''})
+
+
+def _get_dist(dist_path, attrs):
+ root = "/".join(os.path.split(dist_path)) # POSIX-style
+
+ script = dist_path / 'setup.py'
+ if script.exists():
+ with _Path(dist_path):
+ dist = distutils.core.run_setup("setup.py", {}, stop_after="init")
+ else:
+ dist = Distribution(attrs)
+
+ dist.src_root = root
+ dist.script_name = "setup.py"
+ with _Path(dist_path):
+ dist.parse_config_files()
+
+ dist.set_defaults()
+ return dist
+
+
+def _run_sdist_programatically(dist_path, attrs):
+ dist = _get_dist(dist_path, attrs)
+ cmd = sdist(dist)
+ cmd.ensure_finalized()
+ assert cmd.distribution.packages or cmd.distribution.py_modules
+
+ with quiet(), _Path(dist_path):
+ cmd.run()
+
+ return dist, cmd
diff --git a/setuptools/tests/test_develop.py b/setuptools/tests/test_develop.py
index c52072ac..0dd60342 100644
--- a/setuptools/tests/test_develop.py
+++ b/setuptools/tests/test_develop.py
@@ -5,12 +5,10 @@ import os
import sys
import subprocess
import platform
-import pathlib
from setuptools.command import test
import pytest
-import pip_run.launch
from setuptools.command.develop import develop
from setuptools.dist import Distribution
@@ -165,45 +163,3 @@ class TestNamespaces:
]
with test.test.paths_on_pythonpath([str(target)]):
subprocess.check_call(pkg_resources_imp)
-
- @pytest.mark.xfail(
- platform.python_implementation() == 'PyPy',
- reason="Workaround fails on PyPy (why?)",
- )
- def test_editable_prefix(self, tmp_path, sample_project):
- """
- Editable install to a prefix should be discoverable.
- """
- prefix = tmp_path / 'prefix'
-
- # figure out where pip will likely install the package
- site_packages = prefix / next(
- pathlib.Path(path).relative_to(sys.prefix)
- for path in sys.path
- if 'site-packages' in path and path.startswith(sys.prefix)
- )
- site_packages.mkdir(parents=True)
-
- # install workaround
- pip_run.launch.inject_sitecustomize(str(site_packages))
-
- env = dict(os.environ, PYTHONPATH=str(site_packages))
- cmd = [
- sys.executable,
- '-m',
- 'pip',
- 'install',
- '--editable',
- str(sample_project),
- '--prefix',
- str(prefix),
- '--no-build-isolation',
- ]
- subprocess.check_call(cmd, env=env)
-
- # now run 'sample' with the prefix on the PYTHONPATH
- bin = 'Scripts' if platform.system() == 'Windows' else 'bin'
- exe = prefix / bin / 'sample'
- if sys.version_info < (3, 8) and platform.system() == 'Windows':
- exe = str(exe)
- subprocess.check_call([exe], env=env)
diff --git a/setuptools/tests/test_dist.py b/setuptools/tests/test_dist.py
index 4980f2c3..e7d2f5ca 100644
--- a/setuptools/tests/test_dist.py
+++ b/setuptools/tests/test_dist.py
@@ -2,6 +2,7 @@ import io
import collections
import re
import functools
+import os
import urllib.request
import urllib.parse
from distutils.errors import DistutilsSetupError
@@ -18,6 +19,7 @@ from setuptools import Distribution
from .textwrap import DALS
from .test_easy_install import make_nspkg_sdist
+from .test_find_packages import ensure_files
import pytest
@@ -69,16 +71,19 @@ def test_dist__get_unpatched_deprecated():
pytest.warns(DistDeprecationWarning, _get_unpatched, [""])
+EXAMPLE_BASE_INFO = dict(
+ name="package",
+ version="0.0.1",
+ author="Foo Bar",
+ author_email="foo@bar.net",
+ long_description="Long\ndescription",
+ description="Short description",
+ keywords=["one", "two"],
+)
+
+
def __read_test_cases():
- base = dict(
- name="package",
- version="0.0.1",
- author="Foo Bar",
- author_email="foo@bar.net",
- long_description="Long\ndescription",
- description="Short description",
- keywords=["one", "two"],
- )
+ base = EXAMPLE_BASE_INFO
params = functools.partial(dict, base)
@@ -379,3 +384,126 @@ def test_rfc822_unescape(content, result):
def test_metadata_name():
with pytest.raises(DistutilsSetupError, match='missing.*name'):
Distribution()._validate_metadata()
+
+
+@pytest.mark.parametrize(
+ "dist_name, py_module",
+ [
+ ("my.pkg", "my_pkg"),
+ ("my-pkg", "my_pkg"),
+ ("my_pkg", "my_pkg"),
+ ("pkg", "pkg"),
+ ]
+)
+def test_dist_default_py_modules(tmp_path, dist_name, py_module):
+ (tmp_path / f"{py_module}.py").touch()
+
+ (tmp_path / "setup.py").touch()
+ (tmp_path / "noxfile.py").touch()
+ # ^-- make sure common tool files are ignored
+
+ attrs = {
+ **EXAMPLE_BASE_INFO,
+ "name": dist_name,
+ "src_root": str(tmp_path)
+ }
+ # Find `py_modules` corresponding to dist_name if not given
+ dist = Distribution(attrs)
+ dist.set_defaults()
+ assert dist.py_modules == [py_module]
+ # When `py_modules` is given, don't do anything
+ dist = Distribution({**attrs, "py_modules": ["explicity_py_module"]})
+ dist.set_defaults()
+ assert dist.py_modules == ["explicity_py_module"]
+ # When `packages` is given, don't do anything
+ dist = Distribution({**attrs, "packages": ["explicity_package"]})
+ dist.set_defaults()
+ assert not dist.py_modules
+
+
+@pytest.mark.parametrize(
+ "dist_name, package_dir, package_files, packages",
+ [
+ ("my.pkg", None, ["my_pkg/__init__.py", "my_pkg/mod.py"], ["my_pkg"]),
+ ("my-pkg", None, ["my_pkg/__init__.py", "my_pkg/mod.py"], ["my_pkg"]),
+ ("my_pkg", None, ["my_pkg/__init__.py", "my_pkg/mod.py"], ["my_pkg"]),
+ ("my.pkg", None, ["my/pkg/__init__.py"], ["my", "my.pkg"]),
+ (
+ "my_pkg",
+ None,
+ ["src/my_pkg/__init__.py", "src/my_pkg2/__init__.py"],
+ ["my_pkg", "my_pkg2"]
+ ),
+ (
+ "my_pkg",
+ {"pkg": "lib", "pkg2": "lib2"},
+ ["lib/__init__.py", "lib/nested/__init__.pyt", "lib2/__init__.py"],
+ ["pkg", "pkg.nested", "pkg2"]
+ ),
+ ]
+)
+def test_dist_default_packages(
+ tmp_path, dist_name, package_dir, package_files, packages
+):
+ ensure_files(tmp_path, package_files)
+
+ (tmp_path / "setup.py").touch()
+ (tmp_path / "noxfile.py").touch()
+ # ^-- should not be included by default
+
+ attrs = {
+ **EXAMPLE_BASE_INFO,
+ "name": dist_name,
+ "src_root": str(tmp_path),
+ "package_dir": package_dir
+ }
+ # Find `packages` either corresponding to dist_name or inside src
+ dist = Distribution(attrs)
+ dist.set_defaults()
+ assert not dist.py_modules
+ assert not dist.py_modules
+ assert set(dist.packages) == set(packages)
+ # When `py_modules` is given, don't do anything
+ dist = Distribution({**attrs, "py_modules": ["explicit_py_module"]})
+ dist.set_defaults()
+ assert not dist.packages
+ assert set(dist.py_modules) == {"explicit_py_module"}
+ # When `packages` is given, don't do anything
+ dist = Distribution({**attrs, "packages": ["explicit_package"]})
+ dist.set_defaults()
+ assert not dist.py_modules
+ assert set(dist.packages) == {"explicit_package"}
+
+
+@pytest.mark.parametrize(
+ "dist_name, package_dir, package_files",
+ [
+ ("my.pkg.nested", None, ["my/pkg/nested/__init__.py"]),
+ ("my.pkg", None, ["my/pkg/__init__.py", "my/pkg/file.py"]),
+ ("my_pkg", None, ["my_pkg.py"]),
+ ("my_pkg", None, ["my_pkg/__init__.py", "my_pkg/nested/__init__.py"]),
+ ("my_pkg", None, ["src/my_pkg/__init__.py", "src/my_pkg/nested/__init__.py"]),
+ (
+ "my_pkg",
+ {"my_pkg": "lib", "my_pkg.lib2": "lib2"},
+ ["lib/__init__.py", "lib/nested/__init__.pyt", "lib2/__init__.py"],
+ ),
+ # Should not try to guess a name from multiple py_modules/packages
+ ("UNKNOWN", None, ["src/mod1.py", "src/mod2.py"]),
+ ("UNKNOWN", None, ["src/pkg1/__ini__.py", "src/pkg2/__init__.py"]),
+ ]
+)
+def test_dist_default_name(tmp_path, dist_name, package_dir, package_files):
+ """Make sure dist.name is discovered from packages/py_modules"""
+ ensure_files(tmp_path, package_files)
+ attrs = {
+ **EXAMPLE_BASE_INFO,
+ "src_root": "/".join(os.path.split(tmp_path)), # POSIX-style
+ "package_dir": package_dir
+ }
+ del attrs["name"]
+
+ dist = Distribution(attrs)
+ dist.set_defaults()
+ assert dist.py_modules or dist.packages
+ assert dist.get_name() == dist_name
diff --git a/setuptools/tests/test_dist_info.py b/setuptools/tests/test_dist_info.py
index 29fbd09d..350e6429 100644
--- a/setuptools/tests/test_dist_info.py
+++ b/setuptools/tests/test_dist_info.py
@@ -1,12 +1,22 @@
"""Test .dist-info style distributions.
"""
+import pathlib
+import re
+import shutil
+import subprocess
+import sys
+from functools import partial
import pytest
import pkg_resources
+from setuptools.archive_util import unpack_archive
from .textwrap import DALS
+read = partial(pathlib.Path.read_text, encoding="utf-8")
+
+
class TestDistInfo:
metadata_base = DALS("""
@@ -72,3 +82,114 @@ class TestDistInfo:
pkg_resources.Requirement.parse('quux>=1.1;extra=="baz"'),
]
assert d.extras == ['baz']
+
+ def test_invalid_version(self, tmp_path):
+ config = "[metadata]\nname=proj\nversion=42\n[egg_info]\ntag_build=invalid!!!\n"
+ (tmp_path / "setup.cfg").write_text(config, encoding="utf-8")
+ msg = re.compile("invalid version", re.M | re.I)
+ output = run_command("dist_info", cwd=tmp_path)
+ assert msg.search(output)
+ dist_info = next(tmp_path.glob("*.dist-info"))
+ assert dist_info.name.startswith("proj-42")
+
+ def test_tag_arguments(self, tmp_path):
+ config = """
+ [metadata]
+ name=proj
+ version=42
+ [egg_info]
+ tag_date=1
+ tag_build=.post
+ """
+ (tmp_path / "setup.cfg").write_text(config, encoding="utf-8")
+
+ print(run_command("dist_info", "--no-date", cwd=tmp_path))
+ dist_info = next(tmp_path.glob("*.dist-info"))
+ assert dist_info.name.startswith("proj-42")
+ shutil.rmtree(dist_info)
+
+ print(run_command("dist_info", "--tag-build", ".a", cwd=tmp_path))
+ dist_info = next(tmp_path.glob("*.dist-info"))
+ assert dist_info.name.startswith("proj-42a")
+
+ @pytest.mark.parametrize("keep_egg_info", (False, True))
+ def test_output_dir(self, tmp_path, keep_egg_info):
+ config = "[metadata]\nname=proj\nversion=42\n"
+ (tmp_path / "setup.cfg").write_text(config, encoding="utf-8")
+ out = (tmp_path / "__out")
+ out.mkdir()
+ opts = ["--keep-egg-info"] if keep_egg_info else []
+ run_command("dist_info", "--output-dir", out, *opts, cwd=tmp_path)
+ assert len(list(out.glob("*.dist-info"))) == 1
+ assert len(list(tmp_path.glob("*.dist-info"))) == 0
+ expected_egg_info = 1 if keep_egg_info else 0
+ assert len(list(out.glob("*.egg-info"))) == expected_egg_info
+ assert len(list(tmp_path.glob("*.egg-info"))) == 0
+ assert len(list(out.glob("*.__bkp__"))) == 0
+ assert len(list(tmp_path.glob("*.__bkp__"))) == 0
+
+
+class TestWheelCompatibility:
+ """Make sure the .dist-info directory produced with the ``dist_info`` command
+ is the same as the one produced by ``bdist_wheel``.
+ """
+ SETUPCFG = DALS("""
+ [metadata]
+ name = {name}
+ version = {version}
+
+ [options]
+ install_requires = foo>=12; sys_platform != "linux"
+
+ [options.extras_require]
+ test = pytest
+
+ [options.entry_points]
+ console_scripts =
+ executable-name = my_package.module:function
+ discover =
+ myproj = my_package.other_module:function
+ """)
+
+ EGG_INFO_OPTS = [
+ # Related: #3088 #2872
+ ("", ""),
+ (".post", "[egg_info]\ntag_build = post\n"),
+ (".post", "[egg_info]\ntag_build = .post\n"),
+ (".post", "[egg_info]\ntag_build = post\ntag_date = 1\n"),
+ (".dev", "[egg_info]\ntag_build = .dev\n"),
+ (".dev", "[egg_info]\ntag_build = .dev\ntag_date = 1\n"),
+ ("a1", "[egg_info]\ntag_build = .a1\n"),
+ ("+local", "[egg_info]\ntag_build = +local\n"),
+ ]
+
+ @pytest.mark.parametrize("name", "my-proj my_proj my.proj My.Proj".split())
+ @pytest.mark.parametrize("version", ["0.42.13"])
+ @pytest.mark.parametrize("suffix, cfg", EGG_INFO_OPTS)
+ def test_dist_info_is_the_same_as_in_wheel(
+ self, name, version, tmp_path, suffix, cfg
+ ):
+ config = self.SETUPCFG.format(name=name, version=version) + cfg
+
+ for i in "dir_wheel", "dir_dist":
+ (tmp_path / i).mkdir()
+ (tmp_path / i / "setup.cfg").write_text(config, encoding="utf-8")
+
+ run_command("bdist_wheel", cwd=tmp_path / "dir_wheel")
+ wheel = next(tmp_path.glob("dir_wheel/dist/*.whl"))
+ unpack_archive(wheel, tmp_path / "unpack")
+ wheel_dist_info = next(tmp_path.glob("unpack/*.dist-info"))
+
+ run_command("dist_info", cwd=tmp_path / "dir_dist")
+ dist_info = next(tmp_path.glob("dir_dist/*.dist-info"))
+
+ assert dist_info.name == wheel_dist_info.name
+ assert dist_info.name.startswith(f"{name.replace('-', '_')}-{version}{suffix}")
+ for file in "METADATA", "entry_points.txt":
+ assert read(dist_info / file) == read(wheel_dist_info / file)
+
+
+def run_command(*cmd, **kwargs):
+ opts = {"stderr": subprocess.STDOUT, "text": True, **kwargs}
+ cmd = [sys.executable, "-c", "__import__('setuptools').setup()", *map(str, cmd)]
+ return subprocess.check_output(cmd, **opts)
diff --git a/setuptools/tests/test_distutils_adoption.py b/setuptools/tests/test_distutils_adoption.py
index 366f2928..3f07e9a1 100644
--- a/setuptools/tests/test_distutils_adoption.py
+++ b/setuptools/tests/test_distutils_adoption.py
@@ -49,6 +49,13 @@ def count_meta_path(venv, env=None):
return int(popen_text(venv.run)(cmd, env=win_sr(env)))
+skip_without_stdlib_distutils = pytest.mark.skipif(
+ sys.version_info >= (3, 12),
+ reason='stdlib distutils is removed from Python 3.12+',
+)
+
+
+@skip_without_stdlib_distutils
def test_distutils_stdlib(venv):
"""
Ensure stdlib distutils is used when appropriate.
@@ -93,3 +100,69 @@ def test_distutils_has_origin():
Distutils module spec should have an origin. #2990.
"""
assert __import__('distutils').__spec__.origin
+
+
+ENSURE_IMPORTS_ARE_NOT_DUPLICATED = r"""
+# Depending on the importlib machinery and _distutils_hack, some imports are
+# duplicated resulting in different module objects being loaded, which prevents
+# patches as shown in #3042.
+# This script provides a way of verifying if this duplication is happening.
+
+from distutils import cmd
+import distutils.command.sdist as sdist
+
+# import last to prevent caching
+from distutils import {imported_module}
+
+for mod in (cmd, sdist):
+ assert mod.{imported_module} == {imported_module}, (
+ f"\n{{mod.dir_util}}\n!=\n{{{imported_module}}}"
+ )
+
+print("success")
+"""
+
+
+@pytest.mark.parametrize(
+ "distutils_version, imported_module",
+ [
+ pytest.param("stdlib", "dir_util", marks=skip_without_stdlib_distutils),
+ pytest.param("stdlib", "file_util", marks=skip_without_stdlib_distutils),
+ pytest.param("stdlib", "archive_util", marks=skip_without_stdlib_distutils),
+ ("local", "dir_util"),
+ ("local", "file_util"),
+ ("local", "archive_util"),
+ ]
+)
+def test_modules_are_not_duplicated_on_import(
+ distutils_version, imported_module, tmpdir_cwd, venv
+):
+ env = dict(SETUPTOOLS_USE_DISTUTILS=distutils_version)
+ script = ENSURE_IMPORTS_ARE_NOT_DUPLICATED.format(imported_module=imported_module)
+ cmd = ['python', '-c', script]
+ output = popen_text(venv.run)(cmd, env=win_sr(env)).strip()
+ assert output == "success"
+
+
+ENSURE_LOG_IMPORT_IS_NOT_DUPLICATED = r"""
+import types
+import distutils.dist as dist
+from distutils import log
+if isinstance(dist.log, types.ModuleType):
+ assert dist.log == log, f"\n{dist.log}\n!=\n{log}"
+print("success")
+"""
+
+
+@pytest.mark.parametrize(
+ "distutils_version",
+ [
+ "local",
+ pytest.param("stdlib", marks=skip_without_stdlib_distutils),
+ ]
+)
+def test_log_module_is_not_duplicated_on_import(distutils_version, tmpdir_cwd, venv):
+ env = dict(SETUPTOOLS_USE_DISTUTILS=distutils_version)
+ cmd = ['python', '-c', ENSURE_LOG_IMPORT_IS_NOT_DUPLICATED]
+ output = popen_text(venv.run)(cmd, env=win_sr(env)).strip()
+ assert output == "success"
diff --git a/setuptools/tests/test_easy_install.py b/setuptools/tests/test_easy_install.py
index 5831b267..bca86066 100644
--- a/setuptools/tests/test_easy_install.py
+++ b/setuptools/tests/test_easy_install.py
@@ -12,13 +12,14 @@ import itertools
import distutils.errors
import io
import zipfile
-import mock
import time
import re
import subprocess
import pathlib
import warnings
from collections import namedtuple
+from pathlib import Path
+from unittest import mock
import pytest
from jaraco import path
@@ -448,6 +449,68 @@ class TestDistutilsPackage:
run_setup('setup.py', ['bdist_egg'])
+class TestInstallRequires:
+ def test_setup_install_includes_dependencies(self, tmp_path, mock_index):
+ """
+ When ``python setup.py install`` is called directly, it will use easy_install
+ to fetch dependencies.
+ """
+ # TODO: Remove these tests once `setup.py install` is completely removed
+ project_root = tmp_path / "project"
+ project_root.mkdir(exist_ok=True)
+ install_root = tmp_path / "install"
+ install_root.mkdir(exist_ok=True)
+
+ self.create_project(project_root)
+ cmd = [
+ sys.executable,
+ '-c', '__import__("setuptools").setup()',
+ 'install',
+ '--install-base', str(install_root),
+ '--install-lib', str(install_root),
+ '--install-headers', str(install_root),
+ '--install-scripts', str(install_root),
+ '--install-data', str(install_root),
+ '--install-purelib', str(install_root),
+ '--install-platlib', str(install_root),
+ ]
+ env = {**os.environ, "__EASYINSTALL_INDEX": mock_index.url}
+ cp = subprocess.run(
+ cmd,
+ cwd=str(project_root),
+ env=env,
+ stdout=subprocess.PIPE,
+ stderr=subprocess.STDOUT,
+ text=True,
+ )
+ assert cp.returncode != 0
+ try:
+ assert '/does-not-exist/' in {r.path for r in mock_index.requests}
+ assert next(
+ line
+ for line in cp.stdout.splitlines()
+ if "not find suitable distribution for" in line
+ and "does-not-exist" in line
+ )
+ except Exception:
+ if "failed to get random numbers" in cp.stdout:
+ pytest.xfail(f"{sys.platform} failure - {cp.stdout}")
+ raise
+
+ def create_project(self, root):
+ config = """
+ [metadata]
+ name = project
+ version = 42
+
+ [options]
+ install_requires = does-not-exist
+ py_modules = mod
+ """
+ (root / 'setup.cfg').write_text(DALS(config), encoding="utf-8")
+ (root / 'mod.py').touch()
+
+
class TestSetupRequires:
def test_setup_requires_honors_fetch_params(self, mock_index, monkeypatch):
@@ -466,7 +529,7 @@ class TestSetupRequires:
with contexts.environment(PYTHONPATH=temp_install_dir):
cmd = [
sys.executable,
- '-m', 'setup',
+ '-c', '__import__("setuptools").setup()',
'easy_install',
'--index-url', mock_index.url,
'--exclude-scripts',
@@ -783,9 +846,11 @@ class TestSetupRequires:
def test_setup_requires_with_transitive_extra_dependency(
self, monkeypatch):
- # Use case: installing a package with a build dependency on
- # an already installed `dep[extra]`, which in turn depends
- # on `extra_dep` (whose is not already installed).
+ '''
+ Use case: installing a package with a build dependency on
+ an already installed `dep[extra]`, which in turn depends
+ on `extra_dep` (whose is not already installed).
+ '''
with contexts.save_pkg_resources_state():
with contexts.tempdir() as temp_dir:
# Create source distribution for `extra_dep`.
@@ -827,6 +892,75 @@ class TestSetupRequires:
monkeypatch.setenv(str('PIP_TIMEOUT'), str('0'))
run_setup(test_setup_py, [str('--version')])
+ def test_setup_requires_with_distutils_command_dep(self, monkeypatch):
+ '''
+ Use case: ensure build requirements' extras
+ are properly installed and activated.
+ '''
+ with contexts.save_pkg_resources_state():
+ with contexts.tempdir() as temp_dir:
+ # Create source distribution for `extra_dep`.
+ make_sdist(os.path.join(temp_dir, 'extra_dep-1.0.tar.gz'), [
+ ('setup.py',
+ DALS("""
+ import setuptools
+ setuptools.setup(
+ name='extra_dep',
+ version='1.0',
+ py_modules=['extra_dep'],
+ )
+ """)),
+ ('setup.cfg', ''),
+ ('extra_dep.py', ''),
+ ])
+ # Create source tree for `epdep`.
+ dep_pkg = os.path.join(temp_dir, 'epdep')
+ os.mkdir(dep_pkg)
+ path.build({
+ 'setup.py':
+ DALS("""
+ import setuptools
+ setuptools.setup(
+ name='dep', version='2.0',
+ py_modules=['epcmd'],
+ extras_require={'extra': ['extra_dep']},
+ entry_points='''
+ [distutils.commands]
+ epcmd = epcmd:epcmd [extra]
+ ''',
+ )
+ """),
+ 'setup.cfg': '',
+ 'epcmd.py': DALS("""
+ from distutils.command.build_py import build_py
+
+ import extra_dep
+
+ class epcmd(build_py):
+ pass
+ """),
+ }, prefix=dep_pkg)
+ # "Install" dep.
+ run_setup(
+ os.path.join(dep_pkg, 'setup.py'), [str('dist_info')])
+ working_set.add_entry(dep_pkg)
+ # Create source tree for test package.
+ test_pkg = os.path.join(temp_dir, 'test_pkg')
+ test_setup_py = os.path.join(test_pkg, 'setup.py')
+ os.mkdir(test_pkg)
+ with open(test_setup_py, 'w') as fp:
+ fp.write(DALS(
+ '''
+ from setuptools import installer, setup
+ setup(setup_requires='dep[extra]')
+ '''))
+ # Check...
+ monkeypatch.setenv(str('PIP_FIND_LINKS'), str(temp_dir))
+ monkeypatch.setenv(str('PIP_NO_INDEX'), str('1'))
+ monkeypatch.setenv(str('PIP_RETRIES'), str('0'))
+ monkeypatch.setenv(str('PIP_TIMEOUT'), str('0'))
+ run_setup(test_setup_py, ['epcmd'])
+
def make_trivial_sdist(dist_path, distname, version):
"""
@@ -1109,3 +1243,52 @@ def test_use_correct_python_version_string(tmpdir, tmpdir_cwd, monkeypatch):
assert cmd.config_vars['py_version'] == '3.10.1'
assert cmd.config_vars['py_version_short'] == '3.10'
assert cmd.config_vars['py_version_nodot'] == '310'
+
+
+def test_editable_user_and_build_isolation(setup_context, monkeypatch, tmp_path):
+ ''' `setup.py develop` should honor `--user` even under build isolation'''
+
+ # == Arrange ==
+ # Pretend that build isolation was enabled
+ # e.g pip sets the environment variable PYTHONNOUSERSITE=1
+ monkeypatch.setattr('site.ENABLE_USER_SITE', False)
+
+ # Patching $HOME for 2 reasons:
+ # 1. setuptools/command/easy_install.py:create_home_path
+ # tries creating directories in $HOME
+ # given `self.config_vars['DESTDIRS'] = "/home/user/.pyenv/versions/3.9.10 /home/user/.pyenv/versions/3.9.10/lib /home/user/.pyenv/versions/3.9.10/lib/python3.9 /home/user/.pyenv/versions/3.9.10/lib/python3.9/lib-dynload"`` # noqa: E501
+ # it will `makedirs("/home/user/.pyenv/versions/3.9.10 /home/user/.pyenv/versions/3.9.10/lib /home/user/.pyenv/versions/3.9.10/lib/python3.9 /home/user/.pyenv/versions/3.9.10/lib/python3.9/lib-dynload")`` # noqa: E501
+ # 2. We are going to force `site` to update site.USER_BASE and site.USER_SITE
+ # To point inside our new home
+ monkeypatch.setenv('HOME', str(tmp_path / '.home'))
+ monkeypatch.setenv('USERPROFILE', str(tmp_path / '.home'))
+ monkeypatch.setenv('APPDATA', str(tmp_path / '.home'))
+ monkeypatch.setattr('site.USER_BASE', None)
+ monkeypatch.setattr('site.USER_SITE', None)
+ user_site = Path(site.getusersitepackages())
+ user_site.mkdir(parents=True, exist_ok=True)
+
+ sys_prefix = (tmp_path / '.sys_prefix')
+ sys_prefix.mkdir(parents=True, exist_ok=True)
+ monkeypatch.setattr('sys.prefix', str(sys_prefix))
+
+ setup_script = (
+ "__import__('setuptools').setup(name='aproj', version=42, packages=[])\n"
+ )
+ (tmp_path / "setup.py").write_text(setup_script, encoding="utf-8")
+
+ # == Sanity check ==
+ assert list(sys_prefix.glob("*")) == []
+ assert list(user_site.glob("*")) == []
+
+ # == Act ==
+ run_setup('setup.py', ['develop', '--user'])
+
+ # == Assert ==
+ # Should not install to sys.prefix
+ assert list(sys_prefix.glob("*")) == []
+ # Should install to user site
+ installed = {f.name for f in user_site.glob("*")}
+ # sometimes easy-install.pth is created and sometimes not
+ installed = installed - {"easy-install.pth"}
+ assert installed == {'aproj.egg-link'}
diff --git a/setuptools/tests/test_editable_install.py b/setuptools/tests/test_editable_install.py
new file mode 100644
index 00000000..4406eda5
--- /dev/null
+++ b/setuptools/tests/test_editable_install.py
@@ -0,0 +1,992 @@
+import os
+import stat
+import sys
+import subprocess
+import platform
+from copy import deepcopy
+from importlib import import_module
+from importlib.machinery import EXTENSION_SUFFIXES
+from pathlib import Path
+from textwrap import dedent
+from unittest.mock import Mock
+from uuid import uuid4
+
+import jaraco.envs
+import jaraco.path
+import pip_run.launch
+import pytest
+from path import Path as _Path
+
+from . import contexts, namespaces
+
+from setuptools._importlib import resources as importlib_resources
+from setuptools.command.editable_wheel import (
+ _LinkTree,
+ _find_virtual_namespaces,
+ _find_namespaces,
+ _find_package_roots,
+ _finder_template,
+ editable_wheel,
+)
+from setuptools.dist import Distribution
+from setuptools.extension import Extension
+
+
+@pytest.fixture(params=["strict", "lenient"])
+def editable_opts(request):
+ if request.param == "strict":
+ return ["--config-settings", "editable-mode=strict"]
+ return []
+
+
+EXAMPLE = {
+ 'pyproject.toml': dedent("""\
+ [build-system]
+ requires = ["setuptools"]
+ build-backend = "setuptools.build_meta"
+
+ [project]
+ name = "mypkg"
+ version = "3.14159"
+ license = {text = "MIT"}
+ description = "This is a Python package"
+ dynamic = ["readme"]
+ classifiers = [
+ "Development Status :: 5 - Production/Stable",
+ "Intended Audience :: Developers"
+ ]
+ urls = {Homepage = "http://github.com"}
+ dependencies = ['importlib-metadata; python_version<"3.8"']
+
+ [tool.setuptools]
+ package-dir = {"" = "src"}
+ packages = {find = {where = ["src"]}}
+ license-files = ["LICENSE*"]
+
+ [tool.setuptools.dynamic]
+ readme = {file = "README.rst"}
+
+ [tool.distutils.egg_info]
+ tag-build = ".post0"
+ """),
+ "MANIFEST.in": dedent("""\
+ global-include *.py *.txt
+ global-exclude *.py[cod]
+ prune dist
+ prune build
+ """).strip(),
+ "README.rst": "This is a ``README``",
+ "LICENSE.txt": "---- placeholder MIT license ----",
+ "src": {
+ "mypkg": {
+ "__init__.py": dedent("""\
+ import sys
+
+ if sys.version_info[:2] >= (3, 8):
+ from importlib.metadata import PackageNotFoundError, version
+ else:
+ from importlib_metadata import PackageNotFoundError, version
+
+ try:
+ __version__ = version(__name__)
+ except PackageNotFoundError:
+ __version__ = "unknown"
+ """),
+ "__main__.py": dedent("""\
+ from importlib.resources import read_text
+ from . import __version__, __name__ as parent
+ from .mod import x
+
+ data = read_text(parent, "data.txt")
+ print(__version__, data, x)
+ """),
+ "mod.py": "x = ''",
+ "data.txt": "Hello World",
+ }
+ }
+}
+
+
+SETUP_SCRIPT_STUB = "__import__('setuptools').setup()"
+
+
+@pytest.mark.parametrize(
+ "files",
+ [
+ {**EXAMPLE, "setup.py": SETUP_SCRIPT_STUB}, # type: ignore
+ EXAMPLE, # No setup.py script
+ ]
+)
+def test_editable_with_pyproject(tmp_path, venv, files, editable_opts):
+ project = tmp_path / "mypkg"
+ project.mkdir()
+ jaraco.path.build(files, prefix=project)
+
+ cmd = [venv.exe(), "-m", "pip", "install",
+ "--no-build-isolation", # required to force current version of setuptools
+ "-e", str(project), *editable_opts]
+ print(str(subprocess.check_output(cmd), "utf-8"))
+
+ cmd = [venv.exe(), "-m", "mypkg"]
+ assert subprocess.check_output(cmd).strip() == b"3.14159.post0 Hello World"
+
+ (project / "src/mypkg/data.txt").write_text("foobar")
+ (project / "src/mypkg/mod.py").write_text("x = 42")
+ assert subprocess.check_output(cmd).strip() == b"3.14159.post0 foobar 42"
+
+
+def test_editable_with_flat_layout(tmp_path, venv, editable_opts):
+ files = {
+ "mypkg": {
+ "pyproject.toml": dedent("""\
+ [build-system]
+ requires = ["setuptools", "wheel"]
+ build-backend = "setuptools.build_meta"
+
+ [project]
+ name = "mypkg"
+ version = "3.14159"
+
+ [tool.setuptools]
+ packages = ["pkg"]
+ py-modules = ["mod"]
+ """),
+ "pkg": {"__init__.py": "a = 4"},
+ "mod.py": "b = 2",
+ },
+ }
+ jaraco.path.build(files, prefix=tmp_path)
+ project = tmp_path / "mypkg"
+
+ cmd = [venv.exe(), "-m", "pip", "install",
+ "--no-build-isolation", # required to force current version of setuptools
+ "-e", str(project), *editable_opts]
+ print(str(subprocess.check_output(cmd), "utf-8"))
+ cmd = [venv.exe(), "-c", "import pkg, mod; print(pkg.a, mod.b)"]
+ assert subprocess.check_output(cmd).strip() == b"4 2"
+
+
+def test_editable_with_single_module(tmp_path, venv, editable_opts):
+ files = {
+ "mypkg": {
+ "pyproject.toml": dedent("""\
+ [build-system]
+ requires = ["setuptools", "wheel"]
+ build-backend = "setuptools.build_meta"
+
+ [project]
+ name = "mod"
+ version = "3.14159"
+
+ [tool.setuptools]
+ py-modules = ["mod"]
+ """),
+ "mod.py": "b = 2",
+ },
+ }
+ jaraco.path.build(files, prefix=tmp_path)
+ project = tmp_path / "mypkg"
+
+ cmd = [venv.exe(), "-m", "pip", "install",
+ "--no-build-isolation", # required to force current version of setuptools
+ "-e", str(project), *editable_opts]
+ print(str(subprocess.check_output(cmd), "utf-8"))
+ cmd = [venv.exe(), "-c", "import mod; print(mod.b)"]
+ assert subprocess.check_output(cmd).strip() == b"2"
+
+
+class TestLegacyNamespaces:
+ """Ported from test_develop"""
+
+ def test_namespace_package_importable(self, venv, tmp_path, editable_opts):
+ """
+ Installing two packages sharing the same namespace, one installed
+ naturally using pip or `--single-version-externally-managed`
+ and the other installed in editable mode should leave the namespace
+ intact and both packages reachable by import.
+ """
+ build_system = """\
+ [build-system]
+ requires = ["setuptools"]
+ build-backend = "setuptools.build_meta"
+ """
+ pkg_A = namespaces.build_namespace_package(tmp_path, 'myns.pkgA')
+ pkg_B = namespaces.build_namespace_package(tmp_path, 'myns.pkgB')
+ (pkg_A / "pyproject.toml").write_text(build_system, encoding="utf-8")
+ (pkg_B / "pyproject.toml").write_text(build_system, encoding="utf-8")
+ # use pip to install to the target directory
+ opts = editable_opts[:]
+ opts.append("--no-build-isolation") # force current version of setuptools
+ venv.run(["python", "-m", "pip", "install", str(pkg_A), *opts])
+ venv.run(["python", "-m", "pip", "install", "-e", str(pkg_B), *opts])
+ venv.run(["python", "-c", "import myns.pkgA; import myns.pkgB"])
+ # additionally ensure that pkg_resources import works
+ venv.run(["python", "-c", "import pkg_resources"])
+
+
+class TestPep420Namespaces:
+ def test_namespace_package_importable(self, venv, tmp_path, editable_opts):
+ """
+ Installing two packages sharing the same namespace, one installed
+ normally using pip and the other installed in editable mode
+ should allow importing both packages.
+ """
+ pkg_A = namespaces.build_pep420_namespace_package(tmp_path, 'myns.n.pkgA')
+ pkg_B = namespaces.build_pep420_namespace_package(tmp_path, 'myns.n.pkgB')
+ # use pip to install to the target directory
+ opts = editable_opts[:]
+ opts.append("--no-build-isolation") # force current version of setuptools
+ venv.run(["python", "-m", "pip", "install", str(pkg_A), *opts])
+ venv.run(["python", "-m", "pip", "install", "-e", str(pkg_B), *opts])
+ venv.run(["python", "-c", "import myns.n.pkgA; import myns.n.pkgB"])
+
+ def test_namespace_created_via_package_dir(self, venv, tmp_path, editable_opts):
+ """Currently users can create a namespace by tweaking `package_dir`"""
+ files = {
+ "pkgA": {
+ "pyproject.toml": dedent("""\
+ [build-system]
+ requires = ["setuptools", "wheel"]
+ build-backend = "setuptools.build_meta"
+
+ [project]
+ name = "pkgA"
+ version = "3.14159"
+
+ [tool.setuptools]
+ package-dir = {"myns.n.pkgA" = "src"}
+ """),
+ "src": {"__init__.py": "a = 1"},
+ },
+ }
+ jaraco.path.build(files, prefix=tmp_path)
+ pkg_A = tmp_path / "pkgA"
+ pkg_B = namespaces.build_pep420_namespace_package(tmp_path, 'myns.n.pkgB')
+ pkg_C = namespaces.build_pep420_namespace_package(tmp_path, 'myns.n.pkgC')
+
+ # use pip to install to the target directory
+ opts = editable_opts[:]
+ opts.append("--no-build-isolation") # force current version of setuptools
+ venv.run(["python", "-m", "pip", "install", str(pkg_A), *opts])
+ venv.run(["python", "-m", "pip", "install", "-e", str(pkg_B), *opts])
+ venv.run(["python", "-m", "pip", "install", "-e", str(pkg_C), *opts])
+ venv.run(["python", "-c", "from myns.n import pkgA, pkgB, pkgC"])
+
+ def test_namespace_accidental_config_in_lenient_mode(self, venv, tmp_path):
+ """Sometimes users might specify an ``include`` pattern that ignores parent
+ packages. In a normal installation this would ignore all modules inside the
+ parent packages, and make them namespaces (reported in issue #3504),
+ so the editable mode should preserve this behaviour.
+ """
+ files = {
+ "pkgA": {
+ "pyproject.toml": dedent("""\
+ [build-system]
+ requires = ["setuptools", "wheel"]
+ build-backend = "setuptools.build_meta"
+
+ [project]
+ name = "pkgA"
+ version = "3.14159"
+
+ [tool.setuptools]
+ packages.find.include = ["mypkg.*"]
+ """),
+ "mypkg": {
+ "__init__.py": "",
+ "other.py": "b = 1",
+ "n": {
+ "__init__.py": "",
+ "pkgA.py": "a = 1",
+ },
+ },
+ "MANIFEST.in": EXAMPLE["MANIFEST.in"],
+ },
+ }
+ jaraco.path.build(files, prefix=tmp_path)
+ pkg_A = tmp_path / "pkgA"
+
+ # use pip to install to the target directory
+ opts = ["--no-build-isolation"] # force current version of setuptools
+ venv.run(["python", "-m", "pip", "-v", "install", "-e", str(pkg_A), *opts])
+ out = venv.run(["python", "-c", "from mypkg.n import pkgA; print(pkgA.a)"])
+ assert str(out, "utf-8").strip() == "1"
+ cmd = """\
+ try:
+ import mypkg.other
+ except ImportError:
+ print("mypkg.other not defined")
+ """
+ out = venv.run(["python", "-c", dedent(cmd)])
+ assert "mypkg.other not defined" in str(out, "utf-8")
+
+
+# Moved here from test_develop:
+@pytest.mark.xfail(
+ platform.python_implementation() == 'PyPy',
+ reason="Workaround fails on PyPy (why?)",
+)
+def test_editable_with_prefix(tmp_path, sample_project, editable_opts):
+ """
+ Editable install to a prefix should be discoverable.
+ """
+ prefix = tmp_path / 'prefix'
+
+ # figure out where pip will likely install the package
+ site_packages = prefix / next(
+ Path(path).relative_to(sys.prefix)
+ for path in sys.path
+ if 'site-packages' in path and path.startswith(sys.prefix)
+ )
+ site_packages.mkdir(parents=True)
+
+ # install workaround
+ pip_run.launch.inject_sitecustomize(site_packages)
+
+ env = dict(os.environ, PYTHONPATH=str(site_packages))
+ cmd = [
+ sys.executable,
+ '-m',
+ 'pip',
+ 'install',
+ '--editable',
+ str(sample_project),
+ '--prefix',
+ str(prefix),
+ '--no-build-isolation',
+ *editable_opts,
+ ]
+ subprocess.check_call(cmd, env=env)
+
+ # now run 'sample' with the prefix on the PYTHONPATH
+ bin = 'Scripts' if platform.system() == 'Windows' else 'bin'
+ exe = prefix / bin / 'sample'
+ if sys.version_info < (3, 8) and platform.system() == 'Windows':
+ exe = str(exe)
+ subprocess.check_call([exe], env=env)
+
+
+class TestFinderTemplate:
+ """This test focus in getting a particular implementation detail right.
+ If at some point in time the implementation is changed for something different,
+ this test can be modified or even excluded.
+ """
+ def install_finder(self, finder):
+ loc = {}
+ exec(finder, loc, loc)
+ loc["install"]()
+
+ def test_packages(self, tmp_path):
+ files = {
+ "src1": {
+ "pkg1": {
+ "__init__.py": "",
+ "subpkg": {"mod1.py": "a = 42"},
+ },
+ },
+ "src2": {"mod2.py": "a = 43"},
+ }
+ jaraco.path.build(files, prefix=tmp_path)
+
+ mapping = {
+ "pkg1": str(tmp_path / "src1/pkg1"),
+ "mod2": str(tmp_path / "src2/mod2")
+ }
+ template = _finder_template(str(uuid4()), mapping, {})
+
+ with contexts.save_paths(), contexts.save_sys_modules():
+ for mod in ("pkg1", "pkg1.subpkg", "pkg1.subpkg.mod1", "mod2"):
+ sys.modules.pop(mod, None)
+
+ self.install_finder(template)
+ mod1 = import_module("pkg1.subpkg.mod1")
+ mod2 = import_module("mod2")
+ subpkg = import_module("pkg1.subpkg")
+
+ assert mod1.a == 42
+ assert mod2.a == 43
+ expected = str((tmp_path / "src1/pkg1/subpkg").resolve())
+ assert_path(subpkg, expected)
+
+ def test_namespace(self, tmp_path):
+ files = {"pkg": {"__init__.py": "a = 13", "text.txt": "abc"}}
+ jaraco.path.build(files, prefix=tmp_path)
+
+ mapping = {"ns.othername": str(tmp_path / "pkg")}
+ namespaces = {"ns": []}
+
+ template = _finder_template(str(uuid4()), mapping, namespaces)
+ with contexts.save_paths(), contexts.save_sys_modules():
+ for mod in ("ns", "ns.othername"):
+ sys.modules.pop(mod, None)
+
+ self.install_finder(template)
+ pkg = import_module("ns.othername")
+ text = importlib_resources.files(pkg) / "text.txt"
+
+ expected = str((tmp_path / "pkg").resolve())
+ assert_path(pkg, expected)
+ assert pkg.a == 13
+
+ # Make sure resources can also be found
+ assert text.read_text(encoding="utf-8") == "abc"
+
+ def test_combine_namespaces(self, tmp_path):
+ files = {
+ "src1": {"ns": {"pkg1": {"__init__.py": "a = 13"}}},
+ "src2": {"ns": {"mod2.py": "b = 37"}},
+ }
+ jaraco.path.build(files, prefix=tmp_path)
+
+ mapping = {
+ "ns.pkgA": str(tmp_path / "src1/ns/pkg1"),
+ "ns": str(tmp_path / "src2/ns"),
+ }
+ namespaces_ = {"ns": [str(tmp_path / "src1"), str(tmp_path / "src2")]}
+ template = _finder_template(str(uuid4()), mapping, namespaces_)
+
+ with contexts.save_paths(), contexts.save_sys_modules():
+ for mod in ("ns", "ns.pkgA", "ns.mod2"):
+ sys.modules.pop(mod, None)
+
+ self.install_finder(template)
+ pkgA = import_module("ns.pkgA")
+ mod2 = import_module("ns.mod2")
+
+ expected = str((tmp_path / "src1/ns/pkg1").resolve())
+ assert_path(pkgA, expected)
+ assert pkgA.a == 13
+ assert mod2.b == 37
+
+ def test_dynamic_path_computation(self, tmp_path):
+ # Follows the example in PEP 420
+ files = {
+ "project1": {"parent": {"child": {"one.py": "x = 1"}}},
+ "project2": {"parent": {"child": {"two.py": "x = 2"}}},
+ "project3": {"parent": {"child": {"three.py": "x = 3"}}},
+ }
+ jaraco.path.build(files, prefix=tmp_path)
+ mapping = {}
+ namespaces_ = {"parent": [str(tmp_path / "project1/parent")]}
+ template = _finder_template(str(uuid4()), mapping, namespaces_)
+
+ mods = (f"parent.child.{name}" for name in ("one", "two", "three"))
+ with contexts.save_paths(), contexts.save_sys_modules():
+ for mod in ("parent", "parent.child", "parent.child", *mods):
+ sys.modules.pop(mod, None)
+
+ self.install_finder(template)
+
+ one = import_module("parent.child.one")
+ assert one.x == 1
+
+ with pytest.raises(ImportError):
+ import_module("parent.child.two")
+
+ sys.path.append(str(tmp_path / "project2"))
+ two = import_module("parent.child.two")
+ assert two.x == 2
+
+ with pytest.raises(ImportError):
+ import_module("parent.child.three")
+
+ sys.path.append(str(tmp_path / "project3"))
+ three = import_module("parent.child.three")
+ assert three.x == 3
+
+ def test_no_recursion(self, tmp_path):
+ # See issue #3550
+ files = {
+ "pkg": {
+ "__init__.py": "from . import pkg",
+ },
+ }
+ jaraco.path.build(files, prefix=tmp_path)
+
+ mapping = {
+ "pkg": str(tmp_path / "pkg"),
+ }
+ template = _finder_template(str(uuid4()), mapping, {})
+
+ with contexts.save_paths(), contexts.save_sys_modules():
+ sys.modules.pop("pkg", None)
+
+ self.install_finder(template)
+ with pytest.raises(ImportError, match="pkg"):
+ import_module("pkg")
+
+ def test_similar_name(self, tmp_path):
+ files = {
+ "foo": {
+ "__init__.py": "",
+ "bar": {
+ "__init__.py": "",
+ }
+ },
+ }
+ jaraco.path.build(files, prefix=tmp_path)
+
+ mapping = {
+ "foo": str(tmp_path / "foo"),
+ }
+ template = _finder_template(str(uuid4()), mapping, {})
+
+ with contexts.save_paths(), contexts.save_sys_modules():
+ sys.modules.pop("foo", None)
+ sys.modules.pop("foo.bar", None)
+
+ self.install_finder(template)
+ with pytest.raises(ImportError, match="foobar"):
+ import_module("foobar")
+
+
+def test_pkg_roots(tmp_path):
+ """This test focus in getting a particular implementation detail right.
+ If at some point in time the implementation is changed for something different,
+ this test can be modified or even excluded.
+ """
+ files = {
+ "a": {"b": {"__init__.py": "ab = 1"}, "__init__.py": "a = 1"},
+ "d": {"__init__.py": "d = 1", "e": {"__init__.py": "de = 1"}},
+ "f": {"g": {"h": {"__init__.py": "fgh = 1"}}},
+ "other": {"__init__.py": "abc = 1"},
+ "another": {"__init__.py": "abcxyz = 1"},
+ "yet_another": {"__init__.py": "mnopq = 1"},
+ }
+ jaraco.path.build(files, prefix=tmp_path)
+ package_dir = {
+ "a.b.c": "other",
+ "a.b.c.x.y.z": "another",
+ "m.n.o.p.q": "yet_another"
+ }
+ packages = [
+ "a",
+ "a.b",
+ "a.b.c",
+ "a.b.c.x.y",
+ "a.b.c.x.y.z",
+ "d",
+ "d.e",
+ "f",
+ "f.g",
+ "f.g.h",
+ "m.n.o.p.q",
+ ]
+ roots = _find_package_roots(packages, package_dir, tmp_path)
+ assert roots == {
+ "a": str(tmp_path / "a"),
+ "a.b.c": str(tmp_path / "other"),
+ "a.b.c.x.y.z": str(tmp_path / "another"),
+ "d": str(tmp_path / "d"),
+ "f": str(tmp_path / "f"),
+ "m.n.o.p.q": str(tmp_path / "yet_another"),
+ }
+
+ ns = set(dict(_find_namespaces(packages, roots)))
+ assert ns == {"f", "f.g"}
+
+ ns = set(_find_virtual_namespaces(roots))
+ assert ns == {"a.b", "a.b.c.x", "a.b.c.x.y", "m", "m.n", "m.n.o", "m.n.o.p"}
+
+
+class TestOverallBehaviour:
+ PYPROJECT = """\
+ [build-system]
+ requires = ["setuptools"]
+ build-backend = "setuptools.build_meta"
+
+ [project]
+ name = "mypkg"
+ version = "3.14159"
+ """
+
+ FLAT_LAYOUT = {
+ "pyproject.toml": dedent(PYPROJECT),
+ "MANIFEST.in": EXAMPLE["MANIFEST.in"],
+ "otherfile.py": "",
+ "mypkg": {
+ "__init__.py": "",
+ "mod1.py": "var = 42",
+ "subpackage": {
+ "__init__.py": "",
+ "mod2.py": "var = 13",
+ "resource_file.txt": "resource 39",
+ },
+ },
+ }
+
+ EXAMPLES = {
+ "flat-layout": FLAT_LAYOUT,
+ "src-layout": {
+ "pyproject.toml": dedent(PYPROJECT),
+ "MANIFEST.in": EXAMPLE["MANIFEST.in"],
+ "otherfile.py": "",
+ "src": {"mypkg": FLAT_LAYOUT["mypkg"]},
+ },
+ "custom-layout": {
+ "pyproject.toml": dedent(PYPROJECT) + dedent("""\
+ [tool.setuptools]
+ packages = ["mypkg", "mypkg.subpackage"]
+
+ [tool.setuptools.package-dir]
+ "mypkg.subpackage" = "other"
+ """),
+ "MANIFEST.in": EXAMPLE["MANIFEST.in"],
+ "otherfile.py": "",
+ "mypkg": {
+ "__init__.py": "",
+ "mod1.py": FLAT_LAYOUT["mypkg"]["mod1.py"], # type: ignore
+ },
+ "other": FLAT_LAYOUT["mypkg"]["subpackage"], # type: ignore
+ },
+ "namespace": {
+ "pyproject.toml": dedent(PYPROJECT),
+ "MANIFEST.in": EXAMPLE["MANIFEST.in"],
+ "otherfile.py": "",
+ "src": {
+ "mypkg": {
+ "mod1.py": FLAT_LAYOUT["mypkg"]["mod1.py"], # type: ignore
+ "subpackage": FLAT_LAYOUT["mypkg"]["subpackage"], # type: ignore
+ },
+ },
+ },
+ }
+
+ @pytest.mark.parametrize("layout", EXAMPLES.keys())
+ def test_editable_install(self, tmp_path, venv, layout, editable_opts):
+ project, _ = install_project(
+ "mypkg", venv, tmp_path, self.EXAMPLES[layout], *editable_opts
+ )
+
+ # Ensure stray files are not importable
+ cmd_import_error = """\
+ try:
+ import otherfile
+ except ImportError as ex:
+ print(ex)
+ """
+ out = venv.run(["python", "-c", dedent(cmd_import_error)])
+ assert b"No module named 'otherfile'" in out
+
+ # Ensure the modules are importable
+ cmd_get_vars = """\
+ import mypkg, mypkg.mod1, mypkg.subpackage.mod2
+ print(mypkg.mod1.var, mypkg.subpackage.mod2.var)
+ """
+ out = venv.run(["python", "-c", dedent(cmd_get_vars)])
+ assert b"42 13" in out
+
+ # Ensure resources are reachable
+ cmd_get_resource = """\
+ import mypkg.subpackage
+ from setuptools._importlib import resources as importlib_resources
+ text = importlib_resources.files(mypkg.subpackage) / "resource_file.txt"
+ print(text.read_text(encoding="utf-8"))
+ """
+ out = venv.run(["python", "-c", dedent(cmd_get_resource)])
+ assert b"resource 39" in out
+
+ # Ensure files are editable
+ mod1 = next(project.glob("**/mod1.py"))
+ mod2 = next(project.glob("**/mod2.py"))
+ resource_file = next(project.glob("**/resource_file.txt"))
+
+ mod1.write_text("var = 17", encoding="utf-8")
+ mod2.write_text("var = 781", encoding="utf-8")
+ resource_file.write_text("resource 374", encoding="utf-8")
+
+ out = venv.run(["python", "-c", dedent(cmd_get_vars)])
+ assert b"42 13" not in out
+ assert b"17 781" in out
+
+ out = venv.run(["python", "-c", dedent(cmd_get_resource)])
+ assert b"resource 39" not in out
+ assert b"resource 374" in out
+
+
+class TestLinkTree:
+ FILES = deepcopy(TestOverallBehaviour.EXAMPLES["src-layout"])
+ FILES["pyproject.toml"] += dedent("""\
+ [tool.setuptools]
+ # Temporary workaround: both `include-package-data` and `package-data` configs
+ # can be removed after #3260 is fixed.
+ include-package-data = false
+ package-data = {"*" = ["*.txt"]}
+
+ [tool.setuptools.packages.find]
+ where = ["src"]
+ exclude = ["*.subpackage*"]
+ """)
+ FILES["src"]["mypkg"]["resource.not_in_manifest"] = "abc"
+
+ def test_generated_tree(self, tmp_path):
+ jaraco.path.build(self.FILES, prefix=tmp_path)
+
+ with _Path(tmp_path):
+ name = "mypkg-3.14159"
+ dist = Distribution({"script_name": "%PEP 517%"})
+ dist.parse_config_files()
+
+ wheel = Mock()
+ aux = tmp_path / ".aux"
+ build = tmp_path / ".build"
+ aux.mkdir()
+ build.mkdir()
+
+ build_py = dist.get_command_obj("build_py")
+ build_py.editable_mode = True
+ build_py.build_lib = str(build)
+ build_py.ensure_finalized()
+ outputs = build_py.get_outputs()
+ output_mapping = build_py.get_output_mapping()
+
+ make_tree = _LinkTree(dist, name, aux, build)
+ make_tree(wheel, outputs, output_mapping)
+
+ mod1 = next(aux.glob("**/mod1.py"))
+ expected = tmp_path / "src/mypkg/mod1.py"
+ assert_link_to(mod1, expected)
+
+ assert next(aux.glob("**/subpackage"), None) is None
+ assert next(aux.glob("**/mod2.py"), None) is None
+ assert next(aux.glob("**/resource_file.txt"), None) is None
+
+ assert next(aux.glob("**/resource.not_in_manifest"), None) is None
+
+ def test_strict_install(self, tmp_path, venv):
+ opts = ["--config-settings", "editable-mode=strict"]
+ install_project("mypkg", venv, tmp_path, self.FILES, *opts)
+
+ out = venv.run(["python", "-c", "import mypkg.mod1; print(mypkg.mod1.var)"])
+ assert b"42" in out
+
+ # Ensure packages excluded from distribution are not importable
+ cmd_import_error = """\
+ try:
+ from mypkg import subpackage
+ except ImportError as ex:
+ print(ex)
+ """
+ out = venv.run(["python", "-c", dedent(cmd_import_error)])
+ assert b"cannot import name 'subpackage'" in out
+
+ # Ensure resource files excluded from distribution are not reachable
+ cmd_get_resource = """\
+ import mypkg
+ from setuptools._importlib import resources as importlib_resources
+ try:
+ text = importlib_resources.files(mypkg) / "resource.not_in_manifest"
+ print(text.read_text(encoding="utf-8"))
+ except FileNotFoundError as ex:
+ print(ex)
+ """
+ out = venv.run(["python", "-c", dedent(cmd_get_resource)])
+ assert b"No such file or directory" in out
+ assert b"resource.not_in_manifest" in out
+
+
+@pytest.mark.filterwarnings("ignore:.*compat.*:setuptools.SetuptoolsDeprecationWarning")
+def test_compat_install(tmp_path, venv):
+ # TODO: Remove `compat` after Dec/2022.
+ opts = ["--config-settings", "editable-mode=compat"]
+ files = TestOverallBehaviour.EXAMPLES["custom-layout"]
+ install_project("mypkg", venv, tmp_path, files, *opts)
+
+ out = venv.run(["python", "-c", "import mypkg.mod1; print(mypkg.mod1.var)"])
+ assert b"42" in out
+
+ expected_path = comparable_path(str(tmp_path))
+
+ # Compatible behaviour will make spurious modules and excluded
+ # files importable directly from the original path
+ for cmd in (
+ "import otherfile; print(otherfile)",
+ "import other; print(other)",
+ "import mypkg; print(mypkg)",
+ ):
+ out = comparable_path(str(venv.run(["python", "-c", cmd]), "utf-8"))
+ assert expected_path in out
+
+ # Compatible behaviour will not consider custom mappings
+ cmd = """\
+ try:
+ from mypkg import subpackage;
+ except ImportError as ex:
+ print(ex)
+ """
+ out = str(venv.run(["python", "-c", dedent(cmd)]), "utf-8")
+ assert "cannot import name 'subpackage'" in out
+
+
+def test_pbr_integration(tmp_path, venv, editable_opts):
+ """Ensure editable installs work with pbr, issue #3500"""
+ files = {
+ "pyproject.toml": dedent("""\
+ [build-system]
+ requires = ["setuptools"]
+ build-backend = "setuptools.build_meta"
+ """),
+ "setup.py": dedent("""\
+ __import__('setuptools').setup(
+ pbr=True,
+ setup_requires=["pbr"],
+ )
+ """),
+ "setup.cfg": dedent("""\
+ [metadata]
+ name = mypkg
+
+ [files]
+ packages =
+ mypkg
+ """),
+ "mypkg": {
+ "__init__.py": "",
+ "hello.py": "print('Hello world!')",
+ },
+ "other": {"test.txt": "Another file in here."},
+ }
+ venv.run(["python", "-m", "pip", "install", "pbr"])
+
+ with contexts.environment(PBR_VERSION="0.42"):
+ install_project("mypkg", venv, tmp_path, files, *editable_opts)
+
+ out = venv.run(["python", "-c", "import mypkg.hello"])
+ assert b"Hello world!" in out
+
+
+class TestCustomBuildPy:
+ """
+ Issue #3501 indicates that some plugins/customizations might rely on:
+
+ 1. ``build_py`` not running
+ 2. ``build_py`` always copying files to ``build_lib``
+
+ During the transition period setuptools should prevent potential errors from
+ happening due to those assumptions.
+ """
+ # TODO: Remove tests after _run_build_steps is removed.
+
+ FILES = {
+ **TestOverallBehaviour.EXAMPLES["flat-layout"],
+ "setup.py": dedent("""\
+ import pathlib
+ from setuptools import setup
+ from setuptools.command.build_py import build_py as orig
+
+ class my_build_py(orig):
+ def run(self):
+ super().run()
+ raise ValueError("TEST_RAISE")
+
+ setup(cmdclass={"build_py": my_build_py})
+ """),
+ }
+
+ def test_safeguarded_from_errors(self, tmp_path, venv):
+ """Ensure that errors in custom build_py are reported as warnings"""
+ # Warnings should show up
+ _, out = install_project("mypkg", venv, tmp_path, self.FILES)
+ assert b"SetuptoolsDeprecationWarning" in out
+ assert b"ValueError: TEST_RAISE" in out
+ # but installation should be successful
+ out = venv.run(["python", "-c", "import mypkg.mod1; print(mypkg.mod1.var)"])
+ assert b"42" in out
+
+
+class TestCustomBuildWheel:
+ def install_custom_build_wheel(self, dist):
+ bdist_wheel_cls = dist.get_command_class("bdist_wheel")
+
+ class MyBdistWheel(bdist_wheel_cls):
+ def get_tag(self):
+ # In issue #3513, we can see that some extensions may try to access
+ # the `plat_name` property in bdist_wheel
+ if self.plat_name.startswith("macosx-"):
+ _ = "macOS platform"
+ return super().get_tag()
+
+ dist.cmdclass["bdist_wheel"] = MyBdistWheel
+
+ def test_access_plat_name(self, tmpdir_cwd):
+ # Even when a custom bdist_wheel tries to access plat_name the build should
+ # be successful
+ jaraco.path.build({"module.py": "x = 42"})
+ dist = Distribution()
+ dist.script_name = "setup.py"
+ dist.set_defaults()
+ self.install_custom_build_wheel(dist)
+ cmd = editable_wheel(dist)
+ cmd.ensure_finalized()
+ cmd.run()
+ wheel_file = str(next(Path().glob('dist/*.whl')))
+ assert "editable" in wheel_file
+
+
+class TestCustomBuildExt:
+ def install_custom_build_ext_distutils(self, dist):
+ from distutils.command.build_ext import build_ext as build_ext_cls
+
+ class MyBuildExt(build_ext_cls):
+ pass
+
+ dist.cmdclass["build_ext"] = MyBuildExt
+
+ @pytest.mark.skipif(
+ sys.platform != "linux", reason="compilers may fail without correct setup"
+ )
+ def test_distutils_leave_inplace_files(self, tmpdir_cwd):
+ jaraco.path.build({"module.c": ""})
+ attrs = {
+ "ext_modules": [Extension("module", ["module.c"])],
+ }
+ dist = Distribution(attrs)
+ dist.script_name = "setup.py"
+ dist.set_defaults()
+ self.install_custom_build_ext_distutils(dist)
+ cmd = editable_wheel(dist)
+ cmd.ensure_finalized()
+ cmd.run()
+ wheel_file = str(next(Path().glob('dist/*.whl')))
+ assert "editable" in wheel_file
+ files = [p for p in Path().glob("module.*") if p.suffix != ".c"]
+ assert len(files) == 1
+ name = files[0].name
+ assert any(name.endswith(ext) for ext in EXTENSION_SUFFIXES)
+
+
+def install_project(name, venv, tmp_path, files, *opts):
+ project = tmp_path / name
+ project.mkdir()
+ jaraco.path.build(files, prefix=project)
+ opts = [*opts, "--no-build-isolation"] # force current version of setuptools
+ out = venv.run(
+ ["python", "-m", "pip", "-v", "install", "-e", str(project), *opts],
+ stderr=subprocess.STDOUT,
+ )
+ return project, out
+
+
+# ---- Assertion Helpers ----
+
+
+def assert_path(pkg, expected):
+ # __path__ is not guaranteed to exist, so we have to account for that
+ if pkg.__path__:
+ path = next(iter(pkg.__path__), None)
+ if path:
+ assert str(Path(path).resolve()) == expected
+
+
+def assert_link_to(file: Path, other: Path):
+ if file.is_symlink():
+ assert str(file.resolve()) == str(other.resolve())
+ else:
+ file_stat = file.stat()
+ other_stat = other.stat()
+ assert file_stat[stat.ST_INO] == other_stat[stat.ST_INO]
+ assert file_stat[stat.ST_DEV] == other_stat[stat.ST_DEV]
+
+
+def comparable_path(str_with_path: str) -> str:
+ return str_with_path.lower().replace(os.sep, "/").replace("//", "/")
diff --git a/setuptools/tests/test_egg_info.py b/setuptools/tests/test_egg_info.py
index ee07b5a1..6a2a9893 100644
--- a/setuptools/tests/test_egg_info.py
+++ b/setuptools/tests/test_egg_info.py
@@ -6,12 +6,19 @@ import re
import stat
import time
from typing import List, Tuple
+from pathlib import Path
+from unittest import mock
import pytest
from jaraco import path
+from setuptools import errors
from setuptools.command.egg_info import (
- egg_info, manifest_maker, EggInfoDeprecationWarning, get_pkg_info_revision,
+ EggInfoDeprecationWarning,
+ egg_info,
+ get_pkg_info_revision,
+ manifest_maker,
+ write_entries,
)
from setuptools.dist import Distribution
@@ -24,6 +31,28 @@ class Environment(str):
pass
+@pytest.fixture
+def env():
+ with contexts.tempdir(prefix='setuptools-test.') as env_dir:
+ env = Environment(env_dir)
+ os.chmod(env_dir, stat.S_IRWXU)
+ subs = 'home', 'lib', 'scripts', 'data', 'egg-base'
+ env.paths = dict(
+ (dirname, os.path.join(env_dir, dirname))
+ for dirname in subs
+ )
+ list(map(os.mkdir, env.paths.values()))
+ path.build({
+ env.paths['home']: {
+ '.pydistutils.cfg': DALS("""
+ [egg_info]
+ egg-base = %(egg-base)s
+ """ % env.paths)
+ }
+ })
+ yield env
+
+
class TestEggInfo:
setup_script = DALS("""
@@ -51,27 +80,6 @@ class TestEggInfo:
version_str = pkg_info_lines[0].split(' ')[1]
return tuple(map(int, version_str.split('.')[:2]))
- @pytest.fixture
- def env(self):
- with contexts.tempdir(prefix='setuptools-test.') as env_dir:
- env = Environment(env_dir)
- os.chmod(env_dir, stat.S_IRWXU)
- subs = 'home', 'lib', 'scripts', 'data', 'egg-base'
- env.paths = dict(
- (dirname, os.path.join(env_dir, dirname))
- for dirname in subs
- )
- list(map(os.mkdir, env.paths.values()))
- path.build({
- env.paths['home']: {
- '.pydistutils.cfg': DALS("""
- [egg_info]
- egg-base = %(egg-base)s
- """ % env.paths)
- }
- })
- yield env
-
def test_egg_info_save_version_info_setup_empty(self, tmpdir_cwd, env):
"""
When the egg_info section is empty or not present, running
@@ -151,6 +159,21 @@ class TestEggInfo:
]
assert sorted(actual) == expected
+ def test_handling_utime_error(self, tmpdir_cwd, env):
+ dist = Distribution()
+ ei = egg_info(dist)
+ utime_patch = mock.patch('os.utime', side_effect=OSError("TEST"))
+ mkpath_patch = mock.patch(
+ 'setuptools.command.egg_info.egg_info.mkpath', return_val=None
+ )
+
+ with utime_patch, mkpath_patch:
+ import distutils.errors
+
+ msg = r"Cannot update time stamp of directory 'None'"
+ with pytest.raises(distutils.errors.DistutilsFileError, match=msg):
+ ei.run()
+
def test_license_is_a_string(self, tmpdir_cwd, env):
setup_config = DALS("""
[metadata]
@@ -1084,3 +1107,27 @@ class TestEggInfo:
def test_get_pkg_info_revision_deprecated(self):
pytest.warns(EggInfoDeprecationWarning, get_pkg_info_revision)
+
+
+class TestWriteEntries:
+
+ def test_invalid_entry_point(self, tmpdir_cwd, env):
+ dist = Distribution({"name": "foo", "version": "0.0.1"})
+ dist.entry_points = {"foo": "foo = invalid-identifier:foo"}
+ cmd = dist.get_command_obj("egg_info")
+ expected_msg = r"Problems to parse .*invalid-identifier.*"
+ with pytest.raises(errors.OptionError, match=expected_msg) as ex:
+ write_entries(cmd, "entry_points", "entry_points.txt")
+ assert "ensure entry-point follows the spec" in ex.value.args[0]
+
+ def test_valid_entry_point(self, tmpdir_cwd, env):
+ dist = Distribution({"name": "foo", "version": "0.0.1"})
+ dist.entry_points = {
+ "abc": "foo = bar:baz",
+ "def": ["faa = bor:boz"],
+ }
+ cmd = dist.get_command_obj("egg_info")
+ write_entries(cmd, "entry_points", "entry_points.txt")
+ content = Path("entry_points.txt").read_text(encoding="utf-8")
+ assert "[abc]\nfoo = bar:baz\n" in content
+ assert "[def]\nfaa = bor:boz\n" in content
diff --git a/setuptools/tests/test_find_packages.py b/setuptools/tests/test_find_packages.py
index 906713f6..efcce924 100644
--- a/setuptools/tests/test_find_packages.py
+++ b/setuptools/tests/test_find_packages.py
@@ -1,4 +1,4 @@
-"""Tests for setuptools.find_packages()."""
+"""Tests for automatic package discovery"""
import os
import sys
import shutil
@@ -9,6 +9,7 @@ import pytest
from setuptools import find_packages
from setuptools import find_namespace_packages
+from setuptools.discovery import FlatLayoutPackageFinder
# modeled after CPython's test.support.can_symlink
@@ -178,3 +179,67 @@ class TestFindPackages:
shutil.rmtree(os.path.join(self.dist_dir, 'pkg/subpkg/assets'))
packages = find_namespace_packages(self.dist_dir)
self._assert_packages(packages, ['pkg', 'pkg.nspkg', 'pkg.subpkg'])
+
+
+class TestFlatLayoutPackageFinder:
+ EXAMPLES = {
+ "hidden-folders": (
+ [".pkg/__init__.py", "pkg/__init__.py", "pkg/nested/file.txt"],
+ ["pkg", "pkg.nested"]
+ ),
+ "private-packages": (
+ ["_pkg/__init__.py", "pkg/_private/__init__.py"],
+ ["pkg", "pkg._private"]
+ ),
+ "invalid-name": (
+ ["invalid-pkg/__init__.py", "other.pkg/__init__.py", "yet,another/file.py"],
+ []
+ ),
+ "docs": (
+ ["pkg/__init__.py", "docs/conf.py", "docs/readme.rst"],
+ ["pkg"]
+ ),
+ "tests": (
+ ["pkg/__init__.py", "tests/test_pkg.py", "tests/__init__.py"],
+ ["pkg"]
+ ),
+ "examples": (
+ [
+ "pkg/__init__.py",
+ "examples/__init__.py",
+ "examples/file.py"
+ "example/other_file.py",
+ # Sub-packages should always be fine
+ "pkg/example/__init__.py",
+ "pkg/examples/__init__.py",
+ ],
+ ["pkg", "pkg.examples", "pkg.example"]
+ ),
+ "tool-specific": (
+ [
+ "pkg/__init__.py",
+ "tasks/__init__.py",
+ "tasks/subpackage/__init__.py",
+ "fabfile/__init__.py",
+ "fabfile/subpackage/__init__.py",
+ # Sub-packages should always be fine
+ "pkg/tasks/__init__.py",
+ "pkg/fabfile/__init__.py",
+ ],
+ ["pkg", "pkg.tasks", "pkg.fabfile"]
+ )
+ }
+
+ @pytest.mark.parametrize("example", EXAMPLES.keys())
+ def test_unwanted_directories_not_included(self, tmp_path, example):
+ files, expected_packages = self.EXAMPLES[example]
+ ensure_files(tmp_path, files)
+ found_packages = FlatLayoutPackageFinder.find(str(tmp_path))
+ assert set(found_packages) == set(expected_packages)
+
+
+def ensure_files(root_path, files):
+ for file in files:
+ path = root_path / file
+ path.parent.mkdir(parents=True, exist_ok=True)
+ path.touch()
diff --git a/setuptools/tests/test_find_py_modules.py b/setuptools/tests/test_find_py_modules.py
new file mode 100644
index 00000000..4ef68801
--- /dev/null
+++ b/setuptools/tests/test_find_py_modules.py
@@ -0,0 +1,81 @@
+"""Tests for automatic discovery of modules"""
+import os
+
+import pytest
+
+from setuptools.discovery import FlatLayoutModuleFinder, ModuleFinder
+
+from .test_find_packages import ensure_files, has_symlink
+
+
+class TestModuleFinder:
+ def find(self, path, *args, **kwargs):
+ return set(ModuleFinder.find(str(path), *args, **kwargs))
+
+ EXAMPLES = {
+ # circumstance: (files, kwargs, expected_modules)
+ "simple_folder": (
+ ["file.py", "other.py"],
+ {}, # kwargs
+ ["file", "other"],
+ ),
+ "exclude": (
+ ["file.py", "other.py"],
+ {"exclude": ["f*"]},
+ ["other"],
+ ),
+ "include": (
+ ["file.py", "fole.py", "other.py"],
+ {"include": ["f*"], "exclude": ["fo*"]},
+ ["file"],
+ ),
+ "invalid-name": (
+ ["my-file.py", "other.file.py"],
+ {},
+ []
+ )
+ }
+
+ @pytest.mark.parametrize("example", EXAMPLES.keys())
+ def test_finder(self, tmp_path, example):
+ files, kwargs, expected_modules = self.EXAMPLES[example]
+ ensure_files(tmp_path, files)
+ assert self.find(tmp_path, **kwargs) == set(expected_modules)
+
+ @pytest.mark.skipif(not has_symlink(), reason='Symlink support required')
+ def test_symlinked_packages_are_included(self, tmp_path):
+ src = "_myfiles/file.py"
+ ensure_files(tmp_path, [src])
+ os.symlink(tmp_path / src, tmp_path / "link.py")
+ assert self.find(tmp_path) == {"link"}
+
+
+class TestFlatLayoutModuleFinder:
+ def find(self, path, *args, **kwargs):
+ return set(FlatLayoutModuleFinder.find(str(path)))
+
+ EXAMPLES = {
+ # circumstance: (files, expected_modules)
+ "hidden-files": (
+ [".module.py"],
+ []
+ ),
+ "private-modules": (
+ ["_module.py"],
+ []
+ ),
+ "common-names": (
+ ["setup.py", "conftest.py", "test.py", "tests.py", "example.py", "mod.py"],
+ ["mod"]
+ ),
+ "tool-specific": (
+ ["tasks.py", "fabfile.py", "noxfile.py", "dodo.py", "manage.py", "mod.py"],
+ ["mod"]
+ )
+ }
+
+ @pytest.mark.parametrize("example", EXAMPLES.keys())
+ def test_unwanted_files_not_included(self, tmp_path, example):
+ files, expected_modules = self.EXAMPLES[example]
+ ensure_files(tmp_path, files)
+ assert self.find(tmp_path) == set(expected_modules)
diff --git a/setuptools/tests/test_logging.py b/setuptools/tests/test_logging.py
index a5ddd56d..aa2b502b 100644
--- a/setuptools/tests/test_logging.py
+++ b/setuptools/tests/test_logging.py
@@ -1,4 +1,6 @@
+import inspect
import logging
+import os
import pytest
@@ -34,3 +36,18 @@ def test_verbosity_level(tmp_path, monkeypatch, flag, expected_level):
log_level = logger.getEffectiveLevel()
log_level_name = logging.getLevelName(log_level)
assert log_level_name == expected_level
+
+
+def test_patching_does_not_cause_problems():
+ # Ensure `dist.log` is only patched if necessary
+
+ import setuptools.logging
+ from distutils import dist
+
+ setuptools.logging.configure()
+
+ if os.getenv("SETUPTOOLS_USE_DISTUTILS", "local").lower() == "local":
+ # Modern logging infra, no problematic patching.
+ assert isinstance(dist.log, logging.Logger)
+ else:
+ assert inspect.ismodule(dist.log)
diff --git a/setuptools/tests/test_manifest.py b/setuptools/tests/test_manifest.py
index 82bdb9c6..3a973b01 100644
--- a/setuptools/tests/test_manifest.py
+++ b/setuptools/tests/test_manifest.py
@@ -1,4 +1,3 @@
-# -*- coding: utf-8 -*-
"""sdist tests"""
import contextlib
@@ -8,6 +7,7 @@ import sys
import tempfile
import itertools
import io
+import logging
from distutils import log
from distutils.errors import DistutilsTemplateError
@@ -18,6 +18,9 @@ from setuptools.tests.textwrap import DALS
import pytest
+IS_PYPY = '__pypy__' in sys.builtin_module_names
+
+
def make_local_path(s):
"""Converts '/' in a string to os.sep"""
return s.replace('/', os.sep)
@@ -321,41 +324,29 @@ class TestFileListTest(TempDirTestCase):
to ensure setuptools' version of FileList keeps parity with distutils.
"""
- def setup_method(self, method):
- super(TestFileListTest, self).setup_method(method)
- self.threshold = log.set_threshold(log.FATAL)
- self._old_log = log.Log._log
- log.Log._log = self._log
- self.logs = []
+ @pytest.fixture(autouse=os.getenv("SETUPTOOLS_USE_DISTUTILS") == "stdlib")
+ def _compat_record_logs(self, monkeypatch, caplog):
+ """Account for stdlib compatibility"""
+ def _log(_logger, level, msg, args):
+ exc = sys.exc_info()
+ rec = logging.LogRecord("distutils", level, "", 0, msg, args, exc)
+ caplog.records.append(rec)
- def teardown_method(self, method):
- log.set_threshold(self.threshold)
- log.Log._log = self._old_log
- super(TestFileListTest, self).teardown_method(method)
-
- def _log(self, level, msg, args):
- if level not in (log.DEBUG, log.INFO, log.WARN, log.ERROR, log.FATAL):
- raise ValueError('%s wrong log level' % str(level))
- self.logs.append((level, msg, args))
-
- def get_logs(self, *levels):
- def _format(msg, args):
- if len(args) == 0:
- return msg
- return msg % args
- return [_format(msg, args) for level, msg, args
- in self.logs if level in levels]
-
- def clear_logs(self):
- self.logs = []
-
- def assertNoWarnings(self):
- assert self.get_logs(log.WARN) == []
- self.clear_logs()
-
- def assertWarnings(self):
- assert len(self.get_logs(log.WARN)) > 0
- self.clear_logs()
+ monkeypatch.setattr(log.Log, "_log", _log)
+
+ def get_records(self, caplog, *levels):
+ return [r for r in caplog.records if r.levelno in levels]
+
+ def assertNoWarnings(self, caplog):
+ assert self.get_records(caplog, log.WARN) == []
+ caplog.clear()
+
+ def assertWarnings(self, caplog):
+ if IS_PYPY and not caplog.records:
+ pytest.xfail("caplog checks may not work well in PyPy")
+ else:
+ assert len(self.get_records(caplog, log.WARN)) > 0
+ caplog.clear()
def make_files(self, files):
for file in files:
@@ -472,7 +463,8 @@ class TestFileListTest(TempDirTestCase):
else:
assert False, "Should have thrown an error"
- def test_include(self):
+ def test_include(self, caplog):
+ caplog.set_level(logging.DEBUG)
ml = make_local_path
# include
file_list = FileList()
@@ -481,14 +473,15 @@ class TestFileListTest(TempDirTestCase):
file_list.process_template_line('include *.py')
file_list.sort()
assert file_list.files == ['a.py']
- self.assertNoWarnings()
+ self.assertNoWarnings(caplog)
file_list.process_template_line('include *.rb')
file_list.sort()
assert file_list.files == ['a.py']
- self.assertWarnings()
+ self.assertWarnings(caplog)
- def test_exclude(self):
+ def test_exclude(self, caplog):
+ caplog.set_level(logging.DEBUG)
ml = make_local_path
# exclude
file_list = FileList()
@@ -497,14 +490,15 @@ class TestFileListTest(TempDirTestCase):
file_list.process_template_line('exclude *.py')
file_list.sort()
assert file_list.files == ['b.txt', ml('d/c.py')]
- self.assertNoWarnings()
+ self.assertNoWarnings(caplog)
file_list.process_template_line('exclude *.rb')
file_list.sort()
assert file_list.files == ['b.txt', ml('d/c.py')]
- self.assertWarnings()
+ self.assertWarnings(caplog)
- def test_global_include(self):
+ def test_global_include(self, caplog):
+ caplog.set_level(logging.DEBUG)
ml = make_local_path
# global-include
file_list = FileList()
@@ -513,14 +507,15 @@ class TestFileListTest(TempDirTestCase):
file_list.process_template_line('global-include *.py')
file_list.sort()
assert file_list.files == ['a.py', ml('d/c.py')]
- self.assertNoWarnings()
+ self.assertNoWarnings(caplog)
file_list.process_template_line('global-include *.rb')
file_list.sort()
assert file_list.files == ['a.py', ml('d/c.py')]
- self.assertWarnings()
+ self.assertWarnings(caplog)
- def test_global_exclude(self):
+ def test_global_exclude(self, caplog):
+ caplog.set_level(logging.DEBUG)
ml = make_local_path
# global-exclude
file_list = FileList()
@@ -529,14 +524,15 @@ class TestFileListTest(TempDirTestCase):
file_list.process_template_line('global-exclude *.py')
file_list.sort()
assert file_list.files == ['b.txt']
- self.assertNoWarnings()
+ self.assertNoWarnings(caplog)
file_list.process_template_line('global-exclude *.rb')
file_list.sort()
assert file_list.files == ['b.txt']
- self.assertWarnings()
+ self.assertWarnings(caplog)
- def test_recursive_include(self):
+ def test_recursive_include(self, caplog):
+ caplog.set_level(logging.DEBUG)
ml = make_local_path
# recursive-include
file_list = FileList()
@@ -545,14 +541,15 @@ class TestFileListTest(TempDirTestCase):
file_list.process_template_line('recursive-include d *.py')
file_list.sort()
assert file_list.files == [ml('d/b.py'), ml('d/d/e.py')]
- self.assertNoWarnings()
+ self.assertNoWarnings(caplog)
file_list.process_template_line('recursive-include e *.py')
file_list.sort()
assert file_list.files == [ml('d/b.py'), ml('d/d/e.py')]
- self.assertWarnings()
+ self.assertWarnings(caplog)
- def test_recursive_exclude(self):
+ def test_recursive_exclude(self, caplog):
+ caplog.set_level(logging.DEBUG)
ml = make_local_path
# recursive-exclude
file_list = FileList()
@@ -561,14 +558,15 @@ class TestFileListTest(TempDirTestCase):
file_list.process_template_line('recursive-exclude d *.py')
file_list.sort()
assert file_list.files == ['a.py', ml('d/c.txt')]
- self.assertNoWarnings()
+ self.assertNoWarnings(caplog)
file_list.process_template_line('recursive-exclude e *.py')
file_list.sort()
assert file_list.files == ['a.py', ml('d/c.txt')]
- self.assertWarnings()
+ self.assertWarnings(caplog)
- def test_graft(self):
+ def test_graft(self, caplog):
+ caplog.set_level(logging.DEBUG)
ml = make_local_path
# graft
file_list = FileList()
@@ -577,14 +575,15 @@ class TestFileListTest(TempDirTestCase):
file_list.process_template_line('graft d')
file_list.sort()
assert file_list.files == [ml('d/b.py'), ml('d/d/e.py')]
- self.assertNoWarnings()
+ self.assertNoWarnings(caplog)
file_list.process_template_line('graft e')
file_list.sort()
assert file_list.files == [ml('d/b.py'), ml('d/d/e.py')]
- self.assertWarnings()
+ self.assertWarnings(caplog)
- def test_prune(self):
+ def test_prune(self, caplog):
+ caplog.set_level(logging.DEBUG)
ml = make_local_path
# prune
file_list = FileList()
@@ -593,9 +592,9 @@ class TestFileListTest(TempDirTestCase):
file_list.process_template_line('prune d')
file_list.sort()
assert file_list.files == ['a.py', ml('f/f.py')]
- self.assertNoWarnings()
+ self.assertNoWarnings(caplog)
file_list.process_template_line('prune e')
file_list.sort()
assert file_list.files == ['a.py', ml('f/f.py')]
- self.assertWarnings()
+ self.assertWarnings(caplog)
diff --git a/setuptools/tests/test_msvc.py b/setuptools/tests/test_msvc.py
deleted file mode 100644
index d1527bfa..00000000
--- a/setuptools/tests/test_msvc.py
+++ /dev/null
@@ -1,179 +0,0 @@
-"""
-Tests for msvc support module.
-"""
-
-import os
-import contextlib
-import distutils.errors
-import mock
-
-import pytest
-
-from . import contexts
-
-# importing only setuptools should apply the patch
-__import__('setuptools')
-
-pytest.importorskip("distutils.msvc9compiler")
-
-
-def mock_reg(hkcu=None, hklm=None):
- """
- Return a mock for distutils.msvc9compiler.Reg, patched
- to mock out the functions that access the registry.
- """
-
- _winreg = getattr(distutils.msvc9compiler, '_winreg', None)
- winreg = getattr(distutils.msvc9compiler, 'winreg', _winreg)
-
- hives = {
- winreg.HKEY_CURRENT_USER: hkcu or {},
- winreg.HKEY_LOCAL_MACHINE: hklm or {},
- }
-
- @classmethod
- def read_keys(cls, base, key):
- """Return list of registry keys."""
- hive = hives.get(base, {})
- return [
- k.rpartition('\\')[2]
- for k in hive if k.startswith(key.lower())
- ]
-
- @classmethod
- def read_values(cls, base, key):
- """Return dict of registry keys and values."""
- hive = hives.get(base, {})
- return dict(
- (k.rpartition('\\')[2], hive[k])
- for k in hive if k.startswith(key.lower())
- )
-
- return mock.patch.multiple(
- distutils.msvc9compiler.Reg,
- read_keys=read_keys, read_values=read_values)
-
-
-class TestModulePatch:
- """
- Ensure that importing setuptools is sufficient to replace
- the standard find_vcvarsall function with a version that
- recognizes the "Visual C++ for Python" package.
- """
-
- key_32 = r'software\microsoft\devdiv\vcforpython\9.0\installdir'
- key_64 = key_32.replace(r'\microsoft', r'\wow6432node\microsoft')
-
- def test_patched(self):
- "Test the module is actually patched"
- mod_name = distutils.msvc9compiler.find_vcvarsall.__module__
- assert mod_name == "setuptools.msvc", "find_vcvarsall unpatched"
-
- def test_no_registry_entries_means_nothing_found(self):
- """
- No registry entries or environment variable should lead to an error
- directing the user to download vcpython27.
- """
- find_vcvarsall = distutils.msvc9compiler.find_vcvarsall
- query_vcvarsall = distutils.msvc9compiler.query_vcvarsall
-
- with contexts.environment(VS90COMNTOOLS=None):
- with mock_reg():
- assert find_vcvarsall(9.0) is None
-
- try:
- query_vcvarsall(9.0)
- except Exception as exc:
- expected = distutils.errors.DistutilsPlatformError
- assert isinstance(exc, expected)
- assert 'aka.ms/vcpython27' in str(exc)
-
- @pytest.fixture
- def user_preferred_setting(self):
- """
- Set up environment with different install dirs for user vs. system
- and yield the user_install_dir for the expected result.
- """
- with self.mock_install_dir() as user_install_dir:
- with self.mock_install_dir() as system_install_dir:
- reg = mock_reg(
- hkcu={
- self.key_32: user_install_dir,
- },
- hklm={
- self.key_32: system_install_dir,
- self.key_64: system_install_dir,
- },
- )
- with reg:
- yield user_install_dir
-
- def test_prefer_current_user(self, user_preferred_setting):
- """
- Ensure user's settings are preferred.
- """
- result = distutils.msvc9compiler.find_vcvarsall(9.0)
- expected = os.path.join(user_preferred_setting, 'vcvarsall.bat')
- assert expected == result
-
- @pytest.fixture
- def local_machine_setting(self):
- """
- Set up environment with only the system environment configured.
- """
- with self.mock_install_dir() as system_install_dir:
- reg = mock_reg(
- hklm={
- self.key_32: system_install_dir,
- },
- )
- with reg:
- yield system_install_dir
-
- def test_local_machine_recognized(self, local_machine_setting):
- """
- Ensure machine setting is honored if user settings are not present.
- """
- result = distutils.msvc9compiler.find_vcvarsall(9.0)
- expected = os.path.join(local_machine_setting, 'vcvarsall.bat')
- assert expected == result
-
- @pytest.fixture
- def x64_preferred_setting(self):
- """
- Set up environment with 64-bit and 32-bit system settings configured
- and yield the canonical location.
- """
- with self.mock_install_dir() as x32_dir:
- with self.mock_install_dir() as x64_dir:
- reg = mock_reg(
- hklm={
- # This *should* only exist on 32-bit machines
- self.key_32: x32_dir,
- # This *should* only exist on 64-bit machines
- self.key_64: x64_dir,
- },
- )
- with reg:
- yield x32_dir
-
- def test_ensure_64_bit_preferred(self, x64_preferred_setting):
- """
- Ensure 64-bit system key is preferred.
- """
- result = distutils.msvc9compiler.find_vcvarsall(9.0)
- expected = os.path.join(x64_preferred_setting, 'vcvarsall.bat')
- assert expected == result
-
- @staticmethod
- @contextlib.contextmanager
- def mock_install_dir():
- """
- Make a mock install dir in a unique location so that tests can
- distinguish which dir was detected in a given scenario.
- """
- with contexts.tempdir() as result:
- vcvarsall = os.path.join(result, 'vcvarsall.bat')
- with open(vcvarsall, 'w'):
- pass
- yield result
diff --git a/setuptools/tests/test_msvc14.py b/setuptools/tests/test_msvc14.py
index 1aca12dd..271d6be5 100644
--- a/setuptools/tests/test_msvc14.py
+++ b/setuptools/tests/test_msvc14.py
@@ -1,4 +1,3 @@
-# -*- coding: utf-8 -*-
"""
Tests for msvc support module (msvc14 unit tests).
"""
diff --git a/setuptools/tests/test_packageindex.py b/setuptools/tests/test_packageindex.py
index 5f09e1bd..8b5356dc 100644
--- a/setuptools/tests/test_packageindex.py
+++ b/setuptools/tests/test_packageindex.py
@@ -5,8 +5,8 @@ import platform
import urllib.request
import urllib.error
import http.client
+from unittest import mock
-import mock
import pytest
import setuptools.package_index
@@ -21,7 +21,9 @@ class TestPackageIndex:
<a href="http://some_url">Name</a>
(<a title="MD5 hash"
href="{hash_url}">md5</a>)
- """.lstrip().format(**locals())
+ """.lstrip().format(
+ **locals()
+ )
assert setuptools.package_index.PYPI_MD5.match(doc)
def test_bad_url_bad_port(self):
@@ -38,9 +40,7 @@ class TestPackageIndex:
# issue 16
# easy_install inquant.contentmirror.plone breaks because of a typo
# in its home URL
- index = setuptools.package_index.PackageIndex(
- hosts=('www.example.com',)
- )
+ index = setuptools.package_index.PackageIndex(hosts=('www.example.com',))
url = (
'url:%20https://svn.plone.org/svn'
@@ -54,9 +54,7 @@ class TestPackageIndex:
assert isinstance(v, urllib.error.HTTPError)
def test_bad_url_bad_status_line(self):
- index = setuptools.package_index.PackageIndex(
- hosts=('www.example.com',)
- )
+ index = setuptools.package_index.PackageIndex(hosts=('www.example.com',))
def _urlopen(*args):
raise http.client.BadStatusLine('line')
@@ -74,9 +72,7 @@ class TestPackageIndex:
"""
A bad URL with a double scheme should raise a DistutilsError.
"""
- index = setuptools.package_index.PackageIndex(
- hosts=('www.example.com',)
- )
+ index = setuptools.package_index.PackageIndex(hosts=('www.example.com',))
# issue 20
url = 'http://http://svn.pythonpaste.org/Paste/wphp/trunk'
@@ -93,22 +89,17 @@ class TestPackageIndex:
raise RuntimeError("Did not raise")
def test_bad_url_screwy_href(self):
- index = setuptools.package_index.PackageIndex(
- hosts=('www.example.com',)
- )
+ index = setuptools.package_index.PackageIndex(hosts=('www.example.com',))
# issue #160
if sys.version_info[0] == 2 and sys.version_info[1] == 7:
# this should not fail
url = 'http://example.com'
- page = ('<a href="http://www.famfamfam.com]('
- 'http://www.famfamfam.com/">')
+ page = '<a href="http://www.famfamfam.com](' 'http://www.famfamfam.com/">'
index.process_index(url, page)
def test_url_ok(self):
- index = setuptools.package_index.PackageIndex(
- hosts=('www.example.com',)
- )
+ index = setuptools.package_index.PackageIndex(hosts=('www.example.com',))
url = 'file:///tmp/test_package_index'
assert index.url_ok(url, True)
@@ -169,9 +160,7 @@ class TestPackageIndex:
'b0',
'rc0',
]
- post = [
- '.post0'
- ]
+ post = ['.post0']
dev = [
'.dev0',
]
@@ -186,10 +175,14 @@ class TestPackageIndex:
for e in epoch
for r in releases
for p in sum([pre, post, dev], [''])
- for locs in local]
+ for locs in local
+ ]
for v, vc in versions:
- dists = list(setuptools.package_index.distros_for_url(
- 'http://example.com/example-foo.zip#egg=example-foo-' + v))
+ dists = list(
+ setuptools.package_index.distros_for_url(
+ 'http://example.com/example-foo.zip#egg=example-foo-' + v
+ )
+ )
assert dists[0].version == ''
assert dists[1].version == vc
@@ -204,8 +197,7 @@ class TestPackageIndex:
expected_dir = str(tmpdir / 'project@master')
expected = (
- 'git clone --quiet '
- 'https://github.example/group/project {expected_dir}'
+ 'git clone --quiet ' 'https://github.example/group/project {expected_dir}'
).format(**locals())
first_call_args = os_system_mock.call_args_list[0][0]
assert first_call_args == (expected,)
@@ -226,8 +218,7 @@ class TestPackageIndex:
expected_dir = str(tmpdir / 'project')
expected = (
- 'git clone --quiet '
- 'https://github.example/group/project {expected_dir}'
+ 'git clone --quiet ' 'https://github.example/group/project {expected_dir}'
).format(**locals())
os_system_mock.assert_called_once_with(expected)
@@ -243,8 +234,7 @@ class TestPackageIndex:
expected_dir = str(tmpdir / 'project')
expected = (
- 'svn checkout -q '
- 'svn+https://svn.example/project {expected_dir}'
+ 'svn checkout -q ' 'svn+https://svn.example/project {expected_dir}'
).format(**locals())
os_system_mock.assert_called_once_with(expected)
@@ -252,7 +242,8 @@ class TestPackageIndex:
class TestContentCheckers:
def test_md5(self):
checker = setuptools.package_index.HashChecker.from_url(
- 'http://foo/bar#md5=f12895fdffbd45007040d2e44df98478')
+ 'http://foo/bar#md5=f12895fdffbd45007040d2e44df98478'
+ )
checker.feed('You should probably not be using MD5'.encode('ascii'))
assert checker.hash.hexdigest() == 'f12895fdffbd45007040d2e44df98478'
assert checker.is_valid()
@@ -260,25 +251,27 @@ class TestContentCheckers:
def test_other_fragment(self):
"Content checks should succeed silently if no hash is present"
checker = setuptools.package_index.HashChecker.from_url(
- 'http://foo/bar#something%20completely%20different')
+ 'http://foo/bar#something%20completely%20different'
+ )
checker.feed('anything'.encode('ascii'))
assert checker.is_valid()
def test_blank_md5(self):
"Content checks should succeed if a hash is empty"
- checker = setuptools.package_index.HashChecker.from_url(
- 'http://foo/bar#md5=')
+ checker = setuptools.package_index.HashChecker.from_url('http://foo/bar#md5=')
checker.feed('anything'.encode('ascii'))
assert checker.is_valid()
def test_get_hash_name_md5(self):
checker = setuptools.package_index.HashChecker.from_url(
- 'http://foo/bar#md5=f12895fdffbd45007040d2e44df98478')
+ 'http://foo/bar#md5=f12895fdffbd45007040d2e44df98478'
+ )
assert checker.hash_name == 'md5'
def test_report(self):
checker = setuptools.package_index.HashChecker.from_url(
- 'http://foo/bar#md5=f12895fdffbd45007040d2e44df98478')
+ 'http://foo/bar#md5=f12895fdffbd45007040d2e44df98478'
+ )
rep = checker.report(lambda x: x, 'My message about %s')
assert rep == 'My message about md5'
@@ -287,8 +280,8 @@ class TestContentCheckers:
def temp_home(tmpdir, monkeypatch):
key = (
'USERPROFILE'
- if platform.system() == 'Windows' and sys.version_info > (3, 8) else
- 'HOME'
+ if platform.system() == 'Windows' and sys.version_info > (3, 8)
+ else 'HOME'
)
monkeypatch.setitem(os.environ, key, str(tmpdir))
@@ -298,13 +291,25 @@ def temp_home(tmpdir, monkeypatch):
class TestPyPIConfig:
def test_percent_in_password(self, temp_home):
pypirc = temp_home / '.pypirc'
- pypirc.write(DALS("""
+ pypirc.write(
+ DALS(
+ """
[pypi]
repository=https://pypi.org
username=jaraco
password=pity%
- """))
+ """
+ )
+ )
cfg = setuptools.package_index.PyPIConfig()
cred = cfg.creds_by_repository['https://pypi.org']
assert cred.username == 'jaraco'
assert cred.password == 'pity%'
+
+
+@pytest.mark.timeout(1)
+def test_REL_DoS():
+ """
+ REL should not hang on a contrived attack string.
+ """
+ setuptools.package_index.REL.search('< rel=' + ' ' * 2**12)
diff --git a/setuptools/tests/test_register.py b/setuptools/tests/test_register.py
index 98605806..ed85e9bb 100644
--- a/setuptools/tests/test_register.py
+++ b/setuptools/tests/test_register.py
@@ -2,10 +2,7 @@ from setuptools.command.register import register
from setuptools.dist import Distribution
from setuptools.errors import RemovedCommandError
-try:
- from unittest import mock
-except ImportError:
- import mock
+from unittest import mock
import pytest
diff --git a/setuptools/tests/test_sdist.py b/setuptools/tests/test_sdist.py
index 66f46ad0..30631c24 100644
--- a/setuptools/tests/test_sdist.py
+++ b/setuptools/tests/test_sdist.py
@@ -10,7 +10,8 @@ from unittest import mock
import pytest
-import pkg_resources
+from setuptools import Command
+from setuptools._importlib import metadata
from setuptools import SetuptoolsDeprecationWarning
from setuptools.command.sdist import sdist
from setuptools.command.egg_info import manifest_maker
@@ -27,11 +28,14 @@ SETUP_ATTRS = {
'data_files': [("data", [os.path.join("d", "e.dat")])],
}
-SETUP_PY = """\
+SETUP_PY = (
+ """\
from setuptools import setup
setup(**%r)
-""" % SETUP_ATTRS
+"""
+ % SETUP_ATTRS
+)
@contextlib.contextmanager
@@ -85,6 +89,12 @@ fail_on_latin1_encoded_filenames = pytest.mark.xfail(
)
+skip_under_xdist = pytest.mark.skipif(
+ "os.environ.get('PYTEST_XDIST_WORKER')",
+ reason="pytest-dev/pytest-xdist#843",
+)
+
+
def touch(path):
path.write_text('', encoding='utf-8')
@@ -318,6 +328,7 @@ class TestSdistTest:
# The filelist should have been updated as well
assert u_filename in mm.filelist.files
+ @skip_under_xdist
def test_write_manifest_skips_non_utf8_filenames(self):
"""
Files that cannot be encoded to UTF-8 (specifically, those that
@@ -450,13 +461,13 @@ class TestSdistTest:
@classmethod
def make_strings(cls, item):
if isinstance(item, dict):
- return {
- key: cls.make_strings(value) for key, value in item.items()}
+ return {key: cls.make_strings(value) for key, value in item.items()}
if isinstance(item, list):
return list(map(cls.make_strings, item))
return str(item)
@fail_on_latin1_encoded_filenames
+ @skip_under_xdist
def test_sdist_with_latin1_encoded_filename(self):
# Test for #303.
dist = Distribution(self.make_strings(SETUP_ATTRS))
@@ -517,6 +528,46 @@ class TestSdistTest:
manifest = cmd.filelist.files
assert 'pyproject.toml' not in manifest
+ def test_build_subcommand_source_files(self, tmpdir):
+ touch(tmpdir / '.myfile~')
+
+ # Sanity check: without custom commands file list should not be affected
+ dist = Distribution({**SETUP_ATTRS, "script_name": "setup.py"})
+ cmd = sdist(dist)
+ cmd.ensure_finalized()
+ with quiet():
+ cmd.run()
+ manifest = cmd.filelist.files
+ assert '.myfile~' not in manifest
+
+ # Test: custom command should be able to augment file list
+ dist = Distribution({**SETUP_ATTRS, "script_name": "setup.py"})
+ build = dist.get_command_obj("build")
+ build.sub_commands = [*build.sub_commands, ("build_custom", None)]
+
+ class build_custom(Command):
+ def initialize_options(self):
+ ...
+
+ def finalize_options(self):
+ ...
+
+ def run(self):
+ ...
+
+ def get_source_files(self):
+ return ['.myfile~']
+
+ dist.cmdclass.update(build_custom=build_custom)
+
+ cmd = sdist(dist)
+ cmd.use_defaults = True
+ cmd.ensure_finalized()
+ with quiet():
+ cmd.run()
+ manifest = cmd.filelist.files
+ assert '.myfile~' in manifest
+
def test_default_revctrl():
"""
@@ -529,7 +580,11 @@ def test_default_revctrl():
This interface must be maintained until Ubuntu 12.04 is no longer
supported (by Setuptools).
"""
- ep_def = 'svn_cvs = setuptools.command.sdist:_default_revctrl'
- ep = pkg_resources.EntryPoint.parse(ep_def)
- res = ep.resolve()
+ (ep,) = metadata.EntryPoints._from_text(
+ """
+ [setuptools.file_finders]
+ svn_cvs = setuptools.command.sdist:_default_revctrl
+ """
+ )
+ res = ep.load()
assert hasattr(res, '__iter__')
diff --git a/setuptools/tests/test_setuptools.py b/setuptools/tests/test_setuptools.py
index b97faf17..0640f49d 100644
--- a/setuptools/tests/test_setuptools.py
+++ b/setuptools/tests/test_setuptools.py
@@ -303,3 +303,8 @@ def test_its_own_wheel_does_not_contain_tests(setuptools_wheel):
for member in contents:
assert '/tests/' not in member
+
+
+def test_convert_path_deprecated():
+ with pytest.warns(setuptools.SetuptoolsDeprecationWarning):
+ setuptools.convert_path('setuptools/tests')
diff --git a/setuptools/tests/test_sphinx_upload_docs.py b/setuptools/tests/test_sphinx_upload_docs.py
deleted file mode 100644
index f24077fd..00000000
--- a/setuptools/tests/test_sphinx_upload_docs.py
+++ /dev/null
@@ -1,37 +0,0 @@
-import pytest
-
-from jaraco import path
-
-from setuptools.command.upload_docs import upload_docs
-from setuptools.dist import Distribution
-
-
-@pytest.fixture
-def sphinx_doc_sample_project(tmpdir_cwd):
- path.build({
- 'setup.py': 'from setuptools import setup; setup()',
- 'build': {
- 'docs': {
- 'conf.py': 'project="test"',
- 'index.rst': ".. toctree::\
- :maxdepth: 2\
- :caption: Contents:",
- },
- },
- })
-
-
-@pytest.mark.usefixtures('sphinx_doc_sample_project')
-class TestSphinxUploadDocs:
- def test_sphinx_doc(self):
- params = dict(
- packages=['test'],
- )
- dist = Distribution(params)
-
- cmd = upload_docs(dist)
-
- cmd.initialize_options()
- assert cmd.upload_dir is None
- assert cmd.has_sphinx() is True
- cmd.finalize_options()
diff --git a/setuptools/tests/test_upload.py b/setuptools/tests/test_upload.py
index 7586cb26..4ed59bc2 100644
--- a/setuptools/tests/test_upload.py
+++ b/setuptools/tests/test_upload.py
@@ -2,10 +2,7 @@ from setuptools.command.upload import upload
from setuptools.dist import Distribution
from setuptools.errors import RemovedCommandError
-try:
- from unittest import mock
-except ImportError:
- import mock
+from unittest import mock
import pytest
diff --git a/setuptools/tests/test_upload_docs.py b/setuptools/tests/test_upload_docs.py
deleted file mode 100644
index 68977a5d..00000000
--- a/setuptools/tests/test_upload_docs.py
+++ /dev/null
@@ -1,64 +0,0 @@
-import os
-import zipfile
-import contextlib
-
-import pytest
-from jaraco import path
-
-from setuptools.command.upload_docs import upload_docs
-from setuptools.dist import Distribution
-
-from .textwrap import DALS
-from . import contexts
-
-
-@pytest.fixture
-def sample_project(tmpdir_cwd):
- path.build({
- 'setup.py': DALS("""
- from setuptools import setup
-
- setup()
- """),
- 'build': {
- 'index.html': 'Hello world.',
- 'empty': {},
- }
- })
-
-
-@pytest.mark.usefixtures('sample_project')
-@pytest.mark.usefixtures('user_override')
-class TestUploadDocsTest:
- def test_create_zipfile(self):
- """
- Ensure zipfile creation handles common cases, including a folder
- containing an empty folder.
- """
-
- dist = Distribution()
-
- cmd = upload_docs(dist)
- cmd.target_dir = cmd.upload_dir = 'build'
- with contexts.tempdir() as tmp_dir:
- tmp_file = os.path.join(tmp_dir, 'foo.zip')
- zip_file = cmd.create_zipfile(tmp_file)
-
- assert zipfile.is_zipfile(tmp_file)
-
- with contextlib.closing(zipfile.ZipFile(tmp_file)) as zip_file:
- assert zip_file.namelist() == ['index.html']
-
- def test_build_multipart(self):
- data = dict(
- a="foo",
- b="bar",
- file=('file.txt', b'content'),
- )
- body, content_type = upload_docs._build_multipart(data)
- assert 'form-data' in content_type
- assert "b'" not in content_type
- assert 'b"' not in content_type
- assert isinstance(body, bytes)
- assert b'foo' in body
- assert b'content' in body
diff --git a/setuptools/tests/test_virtualenv.py b/setuptools/tests/test_virtualenv.py
index 0ba89643..acfe04e9 100644
--- a/setuptools/tests/test_virtualenv.py
+++ b/setuptools/tests/test_virtualenv.py
@@ -1,7 +1,8 @@
import os
import sys
-import itertools
import subprocess
+from urllib.request import urlopen
+from urllib.error import URLError
import pathlib
@@ -18,7 +19,7 @@ def pytest_virtualenv_works(venv):
pytest_virtualenv may not work. if it doesn't, skip these
tests. See #1284.
"""
- venv_prefix = venv.run(["python" , "-c", "import sys; print(sys.prefix)"]).strip()
+ venv_prefix = venv.run(["python", "-c", "import sys; print(sys.prefix)"]).strip()
if venv_prefix == sys.prefix:
pytest.skip("virtualenv is broken (see pypa/setuptools#1284)")
@@ -31,58 +32,60 @@ def test_clean_env_install(venv_without_setuptools, setuptools_wheel):
venv_without_setuptools.run(cmd)
-def _get_pip_versions():
- # This fixture will attempt to detect if tests are being run without
- # network connectivity and if so skip some tests
-
- network = True
+def access_pypi():
+ # Detect if tests are being run without connectivity
if not os.environ.get('NETWORK_REQUIRED', False): # pragma: nocover
try:
- from urllib.request import urlopen
- from urllib.error import URLError
- except ImportError:
- from urllib2 import urlopen, URLError # Python 2.7 compat
-
- try:
urlopen('https://pypi.org', timeout=1)
except URLError:
# No network, disable most of these tests
- network = False
-
- def mark(param, *marks):
- if not isinstance(param, type(pytest.param(''))):
- param = pytest.param(param)
- return param._replace(marks=param.marks + marks)
-
- def skip_network(param):
- return param if network else mark(param, pytest.mark.skip(reason="no network"))
-
- network_versions = [
- mark('pip<20', pytest.mark.xfail(reason='pypa/pip#6599')),
- 'pip<20.1',
- 'pip<21',
- 'pip<22',
- mark(
- 'https://github.com/pypa/pip/archive/main.zip',
- pytest.mark.xfail(reason='#2975'),
- ),
- ]
+ return False
- versions = itertools.chain(
- [None],
- map(skip_network, network_versions)
- )
-
- return list(versions)
+ return True
@pytest.mark.skipif(
'platform.python_implementation() == "PyPy"',
reason="https://github.com/pypa/setuptools/pull/2865#issuecomment-965834995",
)
-@pytest.mark.parametrize('pip_version', _get_pip_versions())
-def test_pip_upgrade_from_source(pip_version, venv_without_setuptools,
- setuptools_wheel, setuptools_sdist):
+@pytest.mark.skipif(not access_pypi(), reason="no network")
+# ^-- Even when it is not necessary to install a different version of `pip`
+# the build process will still try to download `wheel`, see #3147 and #2986.
+@pytest.mark.parametrize(
+ 'pip_version',
+ [
+ None,
+ pytest.param(
+ 'pip<20.1',
+ marks=pytest.mark.xfail(
+ 'sys.version_info > (3, 12)',
+ reason="pip 22 requried for Python 3.12 and later",
+ ),
+ ),
+ pytest.param(
+ 'pip<21',
+ marks=pytest.mark.xfail(
+ 'sys.version_info > (3, 12)',
+ reason="pip 22 requried for Python 3.12 and later",
+ ),
+ ),
+ pytest.param(
+ 'pip<22',
+ marks=pytest.mark.xfail(
+ 'sys.version_info > (3, 12)',
+ reason="pip 22 requried for Python 3.12 and later",
+ ),
+ ),
+ 'pip<23',
+ pytest.param(
+ 'https://github.com/pypa/pip/archive/main.zip',
+ marks=pytest.mark.xfail(reason='#2975'),
+ ),
+ ],
+)
+def test_pip_upgrade_from_source(
+ pip_version, venv_without_setuptools, setuptools_wheel, setuptools_sdist
+):
"""
Check pip can upgrade setuptools from source.
"""
@@ -106,10 +109,12 @@ def _check_test_command_install_requirements(venv, tmpdir):
"""
Check the test command will install all required dependencies.
"""
+
def sdist(distname, version):
dist_path = tmpdir.join('%s-%s.tar.gz' % (distname, version))
make_nspkg_sdist(str(dist_path), distname, version)
return dist_path
+
dependency_links = [
pathlib.Path(str(dist_path)).as_uri()
for dist_path in (
@@ -120,8 +125,9 @@ def _check_test_command_install_requirements(venv, tmpdir):
)
]
with tmpdir.join('setup.py').open('w') as fp:
- fp.write(DALS(
- '''
+ fp.write(
+ DALS(
+ '''
from setuptools import setup
setup(
@@ -143,17 +149,24 @@ def _check_test_command_install_requirements(venv, tmpdir):
""",
}}
)
- '''.format(dependency_links=dependency_links)))
+ '''.format(
+ dependency_links=dependency_links
+ )
+ )
+ )
with tmpdir.join('test.py').open('w') as fp:
- fp.write(DALS(
- '''
+ fp.write(
+ DALS(
+ '''
import foobar
import bits
import bobs
import pieces
open('success', 'w').close()
- '''))
+ '''
+ )
+ )
cmd = ["python", 'setup.py', 'test', '-s', 'test']
venv.run(cmd, cwd=str(tmpdir))
diff --git a/setuptools/tests/test_wheel.py b/setuptools/tests/test_wheel.py
index a15c3a46..b2bbdfae 100644
--- a/setuptools/tests/test_wheel.py
+++ b/setuptools/tests/test_wheel.py
@@ -1,11 +1,11 @@
-# -*- coding: utf-8 -*-
-
"""wheel tests
"""
from distutils.sysconfig import get_config_var
from distutils.util import get_platform
import contextlib
+import pathlib
+import stat
import glob
import inspect
import os
@@ -614,3 +614,88 @@ def test_wheel_is_compatible(monkeypatch):
monkeypatch.setattr('setuptools.wheel.sys_tags', sys_tags)
assert Wheel(
'onnxruntime-0.1.2-cp36-cp36m-manylinux1_x86_64.whl').is_compatible()
+
+
+def test_wheel_mode():
+ @contextlib.contextmanager
+ def build_wheel(extra_file_defs=None, **kwargs):
+ file_defs = {
+ 'setup.py': (DALS(
+ '''
+ # -*- coding: utf-8 -*-
+ from setuptools import setup
+ import setuptools
+ setup(**%r)
+ '''
+ ) % kwargs).encode('utf-8'),
+ }
+ if extra_file_defs:
+ file_defs.update(extra_file_defs)
+ with tempdir() as source_dir:
+ path.build(file_defs, source_dir)
+ runsh = pathlib.Path(source_dir) / "script.sh"
+ os.chmod(runsh, 0o777)
+ subprocess.check_call((sys.executable, 'setup.py',
+ '-q', 'bdist_wheel'), cwd=source_dir)
+ yield glob.glob(os.path.join(source_dir, 'dist', '*.whl'))[0]
+
+ params = dict(
+ id='script',
+ file_defs={
+ 'script.py': DALS(
+ '''
+ #/usr/bin/python
+ print('hello world!')
+ '''
+ ),
+ 'script.sh': DALS(
+ '''
+ #/bin/sh
+ echo 'hello world!'
+ '''
+ ),
+ },
+ setup_kwargs=dict(
+ scripts=['script.py', 'script.sh'],
+ ),
+ install_tree=flatten_tree({
+ 'foo-1.0-py{py_version}.egg': {
+ 'EGG-INFO': [
+ 'PKG-INFO',
+ 'RECORD',
+ 'WHEEL',
+ 'top_level.txt',
+ {'scripts': [
+ 'script.py',
+ 'script.sh'
+ ]}
+
+ ]
+ }
+ })
+ )
+
+ project_name = params.get('name', 'foo')
+ version = params.get('version', '1.0')
+ install_tree = params.get('install_tree')
+ file_defs = params.get('file_defs', {})
+ setup_kwargs = params.get('setup_kwargs', {})
+
+ with build_wheel(
+ name=project_name,
+ version=version,
+ install_requires=[],
+ extras_require={},
+ extra_file_defs=file_defs,
+ **setup_kwargs
+ ) as filename, tempdir() as install_dir:
+ _check_wheel_install(filename, install_dir,
+ install_tree, project_name,
+ version, None)
+ w = Wheel(filename)
+ base = pathlib.Path(install_dir) / w.egg_name()
+ script_sh = base / "EGG-INFO" / "scripts" / "script.sh"
+ assert script_sh.exists()
+ if sys.platform != 'win32':
+ # Editable file mode has no effect on Windows
+ assert oct(stat.S_IMODE(script_sh.stat().st_mode)) == "0o777"
diff --git a/setuptools/tests/test_windows_wrappers.py b/setuptools/tests/test_windows_wrappers.py
index 8ac9bd07..f8b82fcc 100644
--- a/setuptools/tests/test_windows_wrappers.py
+++ b/setuptools/tests/test_windows_wrappers.py
@@ -107,9 +107,9 @@ class TestCLI(WrapperTester):
'arg5 a\\\\b',
]
proc = subprocess.Popen(
- cmd, stdout=subprocess.PIPE, stdin=subprocess.PIPE)
- stdout, stderr = proc.communicate('hello\nworld\n'.encode('ascii'))
- actual = stdout.decode('ascii').replace('\r\n', '\n')
+ cmd, stdout=subprocess.PIPE, stdin=subprocess.PIPE, text=True)
+ stdout, stderr = proc.communicate('hello\nworld\n')
+ actual = stdout.replace('\r\n', '\n')
expected = textwrap.dedent(r"""
\foo-script.py
['arg1', 'arg 2', 'arg "2\\"', 'arg 4\\', 'arg5 a\\\\b']
@@ -148,9 +148,11 @@ class TestCLI(WrapperTester):
cmd,
stdout=subprocess.PIPE,
stdin=subprocess.PIPE,
- stderr=subprocess.STDOUT)
+ stderr=subprocess.STDOUT,
+ text=True,
+ )
stdout, stderr = proc.communicate()
- actual = stdout.decode('ascii').replace('\r\n', '\n')
+ actual = stdout.replace('\r\n', '\n')
expected = textwrap.dedent(r"""
\foo-script.py
[]
@@ -188,7 +190,7 @@ class TestGUI(WrapperTester):
]
proc = subprocess.Popen(
cmd, stdout=subprocess.PIPE, stdin=subprocess.PIPE,
- stderr=subprocess.STDOUT)
+ stderr=subprocess.STDOUT, text=True)
stdout, stderr = proc.communicate()
assert not stdout
assert not stderr