I am done

This commit is contained in:
2024-10-30 22:14:35 +01:00
parent 720dc28c09
commit 40e2a747cf
36901 changed files with 5011519 additions and 0 deletions

View File

@ -0,0 +1,13 @@
import locale
import sys
import pytest
__all__ = ['fail_on_ascii']
if sys.version_info >= (3, 11):
locale_encoding = locale.getencoding()
else:
locale_encoding = locale.getpreferredencoding(False)
is_ascii = locale_encoding == 'ANSI_X3.4-1968'
fail_on_ascii = pytest.mark.xfail(is_ascii, reason="Test fails in this locale")

View File

@ -0,0 +1,3 @@
from jaraco.test.cpython import from_test_support, try_import
os_helper = try_import('os_helper') or from_test_support('can_symlink')

View File

@ -0,0 +1,57 @@
import re
import time
from pathlib import Path
from urllib.error import HTTPError
from urllib.request import urlopen
__all__ = ["DOWNLOAD_DIR", "retrieve_file", "output_file", "urls_from_file"]
NAME_REMOVE = ("http://", "https://", "github.com/", "/raw/")
DOWNLOAD_DIR = Path(__file__).parent
# ----------------------------------------------------------------------
# Please update ./preload.py accordingly when modifying this file
# ----------------------------------------------------------------------
def output_file(url: str, download_dir: Path = DOWNLOAD_DIR):
file_name = url.strip()
for part in NAME_REMOVE:
file_name = file_name.replace(part, '').strip().strip('/:').strip()
return Path(download_dir, re.sub(r"[^\-_\.\w\d]+", "_", file_name))
def retrieve_file(url: str, download_dir: Path = DOWNLOAD_DIR, wait: float = 5):
path = output_file(url, download_dir)
if path.exists():
print(f"Skipping {url} (already exists: {path})")
else:
download_dir.mkdir(exist_ok=True, parents=True)
print(f"Downloading {url} to {path}")
try:
download(url, path)
except HTTPError:
time.sleep(wait) # wait a few seconds and try again.
download(url, path)
return path
def urls_from_file(list_file: Path):
"""``list_file`` should be a text file where each line corresponds to a URL to
download.
"""
print(f"file: {list_file}")
content = list_file.read_text(encoding="utf-8")
return [url for url in content.splitlines() if not url.startswith("#")]
def download(url: str, dest: Path):
with urlopen(url) as f:
data = f.read()
with open(dest, "wb") as f:
f.write(data)
assert Path(dest).exists()

View File

@ -0,0 +1,18 @@
"""This file can be used to preload files needed for testing.
For example you can use::
cd setuptools/tests/config
python -m downloads.preload setupcfg_examples.txt
to make sure the `setup.cfg` examples are downloaded before starting the tests.
"""
import sys
from pathlib import Path
from . import retrieve_file, urls_from_file
if __name__ == "__main__":
urls = urls_from_file(Path(sys.argv[1]))
list(map(retrieve_file, urls))

View File

@ -0,0 +1,22 @@
# ====================================================================
# Some popular packages that use setup.cfg (and others not so popular)
# Reference: https://hugovk.github.io/top-pypi-packages/
# ====================================================================
https://github.com/pypa/setuptools/raw/52c990172fec37766b3566679724aa8bf70ae06d/setup.cfg
https://github.com/pypa/wheel/raw/0acd203cd896afec7f715aa2ff5980a403459a3b/setup.cfg
https://github.com/python/importlib_metadata/raw/2f05392ca980952a6960d82b2f2d2ea10aa53239/setup.cfg
https://github.com/jaraco/skeleton/raw/d9008b5c510cd6969127a6a2ab6f832edddef296/setup.cfg
https://github.com/jaraco/zipp/raw/700d3a96390e970b6b962823bfea78b4f7e1c537/setup.cfg
https://github.com/pallets/jinja/raw/7d72eb7fefb7dce065193967f31f805180508448/setup.cfg
https://github.com/tkem/cachetools/raw/2fd87a94b8d3861d80e9e4236cd480bfdd21c90d/setup.cfg
https://github.com/aio-libs/aiohttp/raw/5e0e6b7080f2408d5f1dd544c0e1cf88378b7b10/setup.cfg
https://github.com/pallets/flask/raw/9486b6cf57bd6a8a261f67091aca8ca78eeec1e3/setup.cfg
https://github.com/pallets/click/raw/6411f425fae545f42795665af4162006b36c5e4a/setup.cfg
https://github.com/sqlalchemy/sqlalchemy/raw/533f5718904b620be8d63f2474229945d6f8ba5d/setup.cfg
https://github.com/pytest-dev/pluggy/raw/461ef63291d13589c4e21aa182cd1529257e9a0a/setup.cfg
https://github.com/pytest-dev/pytest/raw/c7be96dae487edbd2f55b561b31b68afac1dabe6/setup.cfg
https://github.com/platformdirs/platformdirs/raw/7b7852128dd6f07511b618d6edea35046bd0c6ff/setup.cfg
https://github.com/pandas-dev/pandas/raw/bc17343f934a33dc231c8c74be95d8365537c376/setup.cfg
https://github.com/django/django/raw/4e249d11a6e56ca8feb4b055b681cec457ef3a3d/setup.cfg
https://github.com/pyscaffold/pyscaffold/raw/de7aa5dc059fbd04307419c667cc4961bc9df4b8/setup.cfg
https://github.com/pypa/virtualenv/raw/f92eda6e3da26a4d28c2663ffb85c4960bdb990c/setup.cfg

View File

@ -0,0 +1,511 @@
"""Make sure that applying the configuration from pyproject.toml is equivalent to
applying a similar configuration from setup.cfg
To run these tests offline, please have a look on ``./downloads/preload.py``
"""
from __future__ import annotations
import io
import re
import tarfile
from inspect import cleandoc
from pathlib import Path
from unittest.mock import Mock
import pytest
from ini2toml.api import LiteTranslator
from packaging.metadata import Metadata
import setuptools # noqa ensure monkey patch to metadata
from setuptools.command.egg_info import write_requirements
from setuptools.config import expand, pyprojecttoml, setupcfg
from setuptools.config._apply_pyprojecttoml import _MissingDynamic, _some_attrgetter
from setuptools.dist import Distribution
from setuptools.errors import RemovedConfigError
from .downloads import retrieve_file, urls_from_file
HERE = Path(__file__).parent
EXAMPLES_FILE = "setupcfg_examples.txt"
def makedist(path, **attrs):
return Distribution({"src_root": path, **attrs})
@pytest.mark.parametrize("url", urls_from_file(HERE / EXAMPLES_FILE))
@pytest.mark.filterwarnings("ignore")
@pytest.mark.uses_network
def test_apply_pyproject_equivalent_to_setupcfg(url, monkeypatch, tmp_path):
monkeypatch.setattr(expand, "read_attr", Mock(return_value="0.0.1"))
setupcfg_example = retrieve_file(url)
pyproject_example = Path(tmp_path, "pyproject.toml")
setupcfg_text = setupcfg_example.read_text(encoding="utf-8")
toml_config = LiteTranslator().translate(setupcfg_text, "setup.cfg")
pyproject_example.write_text(toml_config, encoding="utf-8")
dist_toml = pyprojecttoml.apply_configuration(makedist(tmp_path), pyproject_example)
dist_cfg = setupcfg.apply_configuration(makedist(tmp_path), setupcfg_example)
pkg_info_toml = core_metadata(dist_toml)
pkg_info_cfg = core_metadata(dist_cfg)
assert pkg_info_toml == pkg_info_cfg
if any(getattr(d, "license_files", None) for d in (dist_toml, dist_cfg)):
assert set(dist_toml.license_files) == set(dist_cfg.license_files)
if any(getattr(d, "entry_points", None) for d in (dist_toml, dist_cfg)):
print(dist_cfg.entry_points)
ep_toml = {
(k, *sorted(i.replace(" ", "") for i in v))
for k, v in dist_toml.entry_points.items()
}
ep_cfg = {
(k, *sorted(i.replace(" ", "") for i in v))
for k, v in dist_cfg.entry_points.items()
}
assert ep_toml == ep_cfg
if any(getattr(d, "package_data", None) for d in (dist_toml, dist_cfg)):
pkg_data_toml = {(k, *sorted(v)) for k, v in dist_toml.package_data.items()}
pkg_data_cfg = {(k, *sorted(v)) for k, v in dist_cfg.package_data.items()}
assert pkg_data_toml == pkg_data_cfg
if any(getattr(d, "data_files", None) for d in (dist_toml, dist_cfg)):
data_files_toml = {(k, *sorted(v)) for k, v in dist_toml.data_files}
data_files_cfg = {(k, *sorted(v)) for k, v in dist_cfg.data_files}
assert data_files_toml == data_files_cfg
assert set(dist_toml.install_requires) == set(dist_cfg.install_requires)
if any(getattr(d, "extras_require", None) for d in (dist_toml, dist_cfg)):
extra_req_toml = {(k, *sorted(v)) for k, v in dist_toml.extras_require.items()}
extra_req_cfg = {(k, *sorted(v)) for k, v in dist_cfg.extras_require.items()}
assert extra_req_toml == extra_req_cfg
PEP621_EXAMPLE = """\
[project]
name = "spam"
version = "2020.0.0"
description = "Lovely Spam! Wonderful Spam!"
readme = "README.rst"
requires-python = ">=3.8"
license = {file = "LICENSE.txt"}
keywords = ["egg", "bacon", "sausage", "tomatoes", "Lobster Thermidor"]
authors = [
{email = "hi@pradyunsg.me"},
{name = "Tzu-Ping Chung"}
]
maintainers = [
{name = "Brett Cannon", email = "brett@python.org"},
{name = "John X. Ãørçeč", email = "john@utf8.org"},
{name = "Γαμα קּ 東", email = "gama@utf8.org"},
]
classifiers = [
"Development Status :: 4 - Beta",
"Programming Language :: Python"
]
dependencies = [
"httpx",
"gidgethub[httpx]>4.0.0",
"django>2.1; os_name != 'nt'",
"django>2.0; os_name == 'nt'"
]
[project.optional-dependencies]
test = [
"pytest < 5.0.0",
"pytest-cov[all]"
]
[project.urls]
homepage = "http://example.com"
documentation = "http://readthedocs.org"
repository = "http://github.com"
changelog = "http://github.com/me/spam/blob/master/CHANGELOG.md"
[project.scripts]
spam-cli = "spam:main_cli"
[project.gui-scripts]
spam-gui = "spam:main_gui"
[project.entry-points."spam.magical"]
tomatoes = "spam:main_tomatoes"
"""
PEP621_INTERNATIONAL_EMAIL_EXAMPLE = """\
[project]
name = "spam"
version = "2020.0.0"
authors = [
{email = "hi@pradyunsg.me"},
{name = "Tzu-Ping Chung"}
]
maintainers = [
{name = "Степан Бандера", email = "криївка@оун-упа.укр"},
]
"""
PEP621_EXAMPLE_SCRIPT = """
def main_cli(): pass
def main_gui(): pass
def main_tomatoes(): pass
"""
def _pep621_example_project(
tmp_path,
readme="README.rst",
pyproject_text=PEP621_EXAMPLE,
):
pyproject = tmp_path / "pyproject.toml"
text = pyproject_text
replacements = {'readme = "README.rst"': f'readme = "{readme}"'}
for orig, subst in replacements.items():
text = text.replace(orig, subst)
pyproject.write_text(text, encoding="utf-8")
(tmp_path / readme).write_text("hello world", encoding="utf-8")
(tmp_path / "LICENSE.txt").write_text("--- LICENSE stub ---", encoding="utf-8")
(tmp_path / "spam.py").write_text(PEP621_EXAMPLE_SCRIPT, encoding="utf-8")
return pyproject
def test_pep621_example(tmp_path):
"""Make sure the example in PEP 621 works"""
pyproject = _pep621_example_project(tmp_path)
dist = pyprojecttoml.apply_configuration(makedist(tmp_path), pyproject)
assert dist.metadata.license == "--- LICENSE stub ---"
assert set(dist.metadata.license_files) == {"LICENSE.txt"}
@pytest.mark.parametrize(
"readme, ctype",
[
("Readme.txt", "text/plain"),
("readme.md", "text/markdown"),
("text.rst", "text/x-rst"),
],
)
def test_readme_content_type(tmp_path, readme, ctype):
pyproject = _pep621_example_project(tmp_path, readme)
dist = pyprojecttoml.apply_configuration(makedist(tmp_path), pyproject)
assert dist.metadata.long_description_content_type == ctype
def test_undefined_content_type(tmp_path):
pyproject = _pep621_example_project(tmp_path, "README.tex")
with pytest.raises(ValueError, match="Undefined content type for README.tex"):
pyprojecttoml.apply_configuration(makedist(tmp_path), pyproject)
def test_no_explicit_content_type_for_missing_extension(tmp_path):
pyproject = _pep621_example_project(tmp_path, "README")
dist = pyprojecttoml.apply_configuration(makedist(tmp_path), pyproject)
assert dist.metadata.long_description_content_type is None
@pytest.mark.parametrize(
('pyproject_text', 'expected_maintainers_meta_value'),
(
pytest.param(
PEP621_EXAMPLE,
(
'Brett Cannon <brett@python.org>, "John X. Ãørçeč" <john@utf8.org>, '
'Γαμα קּ 東 <gama@utf8.org>'
),
id='non-international-emails',
),
pytest.param(
PEP621_INTERNATIONAL_EMAIL_EXAMPLE,
'Степан Бандера <криївка@оун-упа.укр>',
marks=pytest.mark.xfail(
reason="CPython's `email.headerregistry.Address` only supports "
'RFC 5322, as of Nov 10, 2022 and latest Python 3.11.0',
strict=True,
),
id='international-email',
),
),
)
def test_utf8_maintainer_in_metadata( # issue-3663
expected_maintainers_meta_value,
pyproject_text,
tmp_path,
):
pyproject = _pep621_example_project(
tmp_path,
"README",
pyproject_text=pyproject_text,
)
dist = pyprojecttoml.apply_configuration(makedist(tmp_path), pyproject)
assert dist.metadata.maintainer_email == expected_maintainers_meta_value
pkg_file = tmp_path / "PKG-FILE"
with open(pkg_file, "w", encoding="utf-8") as fh:
dist.metadata.write_pkg_file(fh)
content = pkg_file.read_text(encoding="utf-8")
assert f"Maintainer-email: {expected_maintainers_meta_value}" in content
class TestLicenseFiles:
# TODO: After PEP 639 is accepted, we have to move the license-files
# to the `project` table instead of `tool.setuptools`
def base_pyproject(self, tmp_path, additional_text):
pyproject = _pep621_example_project(tmp_path, "README")
text = pyproject.read_text(encoding="utf-8")
# Sanity-check
assert 'license = {file = "LICENSE.txt"}' in text
assert "[tool.setuptools]" not in text
text = f"{text}\n{additional_text}\n"
pyproject.write_text(text, encoding="utf-8")
return pyproject
def test_both_license_and_license_files_defined(self, tmp_path):
setuptools_config = '[tool.setuptools]\nlicense-files = ["_FILE*"]'
pyproject = self.base_pyproject(tmp_path, setuptools_config)
(tmp_path / "_FILE.txt").touch()
(tmp_path / "_FILE.rst").touch()
# Would normally match the `license_files` patterns, but we want to exclude it
# by being explicit. On the other hand, contents should be added to `license`
license = tmp_path / "LICENSE.txt"
license.write_text("LicenseRef-Proprietary\n", encoding="utf-8")
dist = pyprojecttoml.apply_configuration(makedist(tmp_path), pyproject)
assert set(dist.metadata.license_files) == {"_FILE.rst", "_FILE.txt"}
assert dist.metadata.license == "LicenseRef-Proprietary\n"
def test_default_patterns(self, tmp_path):
setuptools_config = '[tool.setuptools]\nzip-safe = false'
# ^ used just to trigger section validation
pyproject = self.base_pyproject(tmp_path, setuptools_config)
license_files = "LICENCE-a.html COPYING-abc.txt AUTHORS-xyz NOTICE,def".split()
for fname in license_files:
(tmp_path / fname).write_text(f"{fname}\n", encoding="utf-8")
dist = pyprojecttoml.apply_configuration(makedist(tmp_path), pyproject)
assert (tmp_path / "LICENSE.txt").exists() # from base example
assert set(dist.metadata.license_files) == {*license_files, "LICENSE.txt"}
class TestPyModules:
# https://github.com/pypa/setuptools/issues/4316
def dist(self, name):
toml_config = f"""
[project]
name = "test"
version = "42.0"
[tool.setuptools]
py-modules = [{name!r}]
"""
pyproject = Path("pyproject.toml")
pyproject.write_text(cleandoc(toml_config), encoding="utf-8")
return pyprojecttoml.apply_configuration(Distribution({}), pyproject)
@pytest.mark.parametrize("module", ["pip-run", "abc-d.λ-xyz-e"])
def test_valid_module_name(self, tmp_path, monkeypatch, module):
monkeypatch.chdir(tmp_path)
assert module in self.dist(module).py_modules
@pytest.mark.parametrize("module", ["pip run", "-pip-run", "pip-run-stubs"])
def test_invalid_module_name(self, tmp_path, monkeypatch, module):
monkeypatch.chdir(tmp_path)
with pytest.raises(ValueError, match="py-modules"):
self.dist(module).py_modules
class TestExtModules:
def test_pyproject_sets_attribute(self, tmp_path, monkeypatch):
monkeypatch.chdir(tmp_path)
pyproject = Path("pyproject.toml")
toml_config = """
[project]
name = "test"
version = "42.0"
[tool.setuptools]
ext-modules = [
{name = "my.ext", sources = ["hello.c", "world.c"]}
]
"""
pyproject.write_text(cleandoc(toml_config), encoding="utf-8")
with pytest.warns(pyprojecttoml._ExperimentalConfiguration):
dist = pyprojecttoml.apply_configuration(Distribution({}), pyproject)
assert len(dist.ext_modules) == 1
assert dist.ext_modules[0].name == "my.ext"
assert set(dist.ext_modules[0].sources) == {"hello.c", "world.c"}
class TestDeprecatedFields:
def test_namespace_packages(self, tmp_path):
pyproject = tmp_path / "pyproject.toml"
config = """
[project]
name = "myproj"
version = "42"
[tool.setuptools]
namespace-packages = ["myproj.pkg"]
"""
pyproject.write_text(cleandoc(config), encoding="utf-8")
with pytest.raises(RemovedConfigError, match="namespace-packages"):
pyprojecttoml.apply_configuration(makedist(tmp_path), pyproject)
class TestPresetField:
def pyproject(self, tmp_path, dynamic, extra_content=""):
content = f"[project]\nname = 'proj'\ndynamic = {dynamic!r}\n"
if "version" not in dynamic:
content += "version = '42'\n"
file = tmp_path / "pyproject.toml"
file.write_text(content + extra_content, encoding="utf-8")
return file
@pytest.mark.parametrize(
"attr, field, value",
[
("classifiers", "classifiers", ["Private :: Classifier"]),
("entry_points", "scripts", {"console_scripts": ["foobar=foobar:main"]}),
("entry_points", "gui-scripts", {"gui_scripts": ["bazquux=bazquux:main"]}),
pytest.param(
*("install_requires", "dependencies", ["six"]),
marks=[
pytest.mark.filterwarnings("ignore:.*install_requires. overwritten")
],
),
],
)
def test_not_listed_in_dynamic(self, tmp_path, attr, field, value):
"""Setuptools cannot set a field if not listed in ``dynamic``"""
pyproject = self.pyproject(tmp_path, [])
dist = makedist(tmp_path, **{attr: value})
msg = re.compile(f"defined outside of `pyproject.toml`:.*{field}", re.S)
with pytest.warns(_MissingDynamic, match=msg):
dist = pyprojecttoml.apply_configuration(dist, pyproject)
dist_value = _some_attrgetter(f"metadata.{attr}", attr)(dist)
assert not dist_value
@pytest.mark.parametrize(
"attr, field, value",
[
("install_requires", "dependencies", []),
("extras_require", "optional-dependencies", {}),
("install_requires", "dependencies", ["six"]),
("classifiers", "classifiers", ["Private :: Classifier"]),
],
)
def test_listed_in_dynamic(self, tmp_path, attr, field, value):
pyproject = self.pyproject(tmp_path, [field])
dist = makedist(tmp_path, **{attr: value})
dist = pyprojecttoml.apply_configuration(dist, pyproject)
dist_value = _some_attrgetter(f"metadata.{attr}", attr)(dist)
assert dist_value == value
def test_warning_overwritten_dependencies(self, tmp_path):
src = "[project]\nname='pkg'\nversion='0.1'\ndependencies=['click']\n"
pyproject = tmp_path / "pyproject.toml"
pyproject.write_text(src, encoding="utf-8")
dist = makedist(tmp_path, install_requires=["wheel"])
with pytest.warns(match="`install_requires` overwritten"):
dist = pyprojecttoml.apply_configuration(dist, pyproject)
assert "wheel" not in dist.install_requires
def test_optional_dependencies_dont_remove_env_markers(self, tmp_path):
"""
Internally setuptools converts dependencies with markers to "extras".
If ``install_requires`` is given by ``setup.py``, we have to ensure that
applying ``optional-dependencies`` does not overwrite the mandatory
dependencies with markers (see #3204).
"""
# If setuptools replace its internal mechanism that uses `requires.txt`
# this test has to be rewritten to adapt accordingly
extra = "\n[project.optional-dependencies]\nfoo = ['bar>1']\n"
pyproject = self.pyproject(tmp_path, ["dependencies"], extra)
install_req = ['importlib-resources (>=3.0.0) ; python_version < "3.7"']
dist = makedist(tmp_path, install_requires=install_req)
dist = pyprojecttoml.apply_configuration(dist, pyproject)
assert "foo" in dist.extras_require
egg_info = dist.get_command_obj("egg_info")
write_requirements(egg_info, tmp_path, tmp_path / "requires.txt")
reqs = (tmp_path / "requires.txt").read_text(encoding="utf-8")
assert "importlib-resources" in reqs
assert "bar" in reqs
assert ':python_version < "3.7"' in reqs
@pytest.mark.parametrize(
"field,group", [("scripts", "console_scripts"), ("gui-scripts", "gui_scripts")]
)
@pytest.mark.filterwarnings("error")
def test_scripts_dont_require_dynamic_entry_points(self, tmp_path, field, group):
# Issue 3862
pyproject = self.pyproject(tmp_path, [field])
dist = makedist(tmp_path, entry_points={group: ["foobar=foobar:main"]})
dist = pyprojecttoml.apply_configuration(dist, pyproject)
assert group in dist.entry_points
class TestMeta:
def test_example_file_in_sdist(self, setuptools_sdist):
"""Meta test to ensure tests can run from sdist"""
with tarfile.open(setuptools_sdist) as tar:
assert any(name.endswith(EXAMPLES_FILE) for name in tar.getnames())
class TestInteropCommandLineParsing:
def test_version(self, tmp_path, monkeypatch, capsys):
# See pypa/setuptools#4047
# This test can be removed once the CLI interface of setup.py is removed
monkeypatch.chdir(tmp_path)
toml_config = """
[project]
name = "test"
version = "42.0"
"""
pyproject = Path(tmp_path, "pyproject.toml")
pyproject.write_text(cleandoc(toml_config), encoding="utf-8")
opts = {"script_args": ["--version"]}
dist = pyprojecttoml.apply_configuration(Distribution(opts), pyproject)
dist.parse_command_line() # <-- there should be no exception here.
captured = capsys.readouterr()
assert "42.0" in captured.out
# --- Auxiliary Functions ---
def core_metadata(dist) -> str:
with io.StringIO() as buffer:
dist.metadata.write_pkg_file(buffer)
pkg_file_txt = buffer.getvalue()
# Make sure core metadata is valid
Metadata.from_email(pkg_file_txt, validate=True) # can raise exceptions
skip_prefixes: tuple[str, ...] = ()
skip_lines = set()
# ---- DIFF NORMALISATION ----
# PEP 621 is very particular about author/maintainer metadata conversion, so skip
skip_prefixes += ("Author:", "Author-email:", "Maintainer:", "Maintainer-email:")
# May be redundant with Home-page
skip_prefixes += ("Project-URL: Homepage,", "Home-page:")
# May be missing in original (relying on default) but backfilled in the TOML
skip_prefixes += ("Description-Content-Type:",)
# Remove empty lines
skip_lines.add("")
result = []
for line in pkg_file_txt.splitlines():
if line.startswith(skip_prefixes) or line in skip_lines:
continue
result.append(line + "\n")
return "".join(result)

View File

@ -0,0 +1,221 @@
import os
import sys
from pathlib import Path
import pytest
from setuptools.config import expand
from setuptools.discovery import find_package_path
from distutils.errors import DistutilsOptionError
def write_files(files, root_dir):
for file, content in files.items():
path = root_dir / file
path.parent.mkdir(exist_ok=True, parents=True)
path.write_text(content, encoding="utf-8")
def test_glob_relative(tmp_path, monkeypatch):
files = {
"dir1/dir2/dir3/file1.txt",
"dir1/dir2/file2.txt",
"dir1/file3.txt",
"a.ini",
"b.ini",
"dir1/c.ini",
"dir1/dir2/a.ini",
}
write_files({k: "" for k in files}, tmp_path)
patterns = ["**/*.txt", "[ab].*", "**/[ac].ini"]
monkeypatch.chdir(tmp_path)
assert set(expand.glob_relative(patterns)) == files
# Make sure the same APIs work outside cwd
assert set(expand.glob_relative(patterns, tmp_path)) == files
def test_read_files(tmp_path, monkeypatch):
dir_ = tmp_path / "dir_"
(tmp_path / "_dir").mkdir(exist_ok=True)
(tmp_path / "a.txt").touch()
files = {"a.txt": "a", "dir1/b.txt": "b", "dir1/dir2/c.txt": "c"}
write_files(files, dir_)
secrets = Path(str(dir_) + "secrets")
secrets.mkdir(exist_ok=True)
write_files({"secrets.txt": "secret keys"}, secrets)
with monkeypatch.context() as m:
m.chdir(dir_)
assert expand.read_files(list(files)) == "a\nb\nc"
cannot_access_msg = r"Cannot access '.*\.\..a\.txt'"
with pytest.raises(DistutilsOptionError, match=cannot_access_msg):
expand.read_files(["../a.txt"])
cannot_access_secrets_msg = r"Cannot access '.*secrets\.txt'"
with pytest.raises(DistutilsOptionError, match=cannot_access_secrets_msg):
expand.read_files(["../dir_secrets/secrets.txt"])
# Make sure the same APIs work outside cwd
assert expand.read_files(list(files), dir_) == "a\nb\nc"
with pytest.raises(DistutilsOptionError, match=cannot_access_msg):
expand.read_files(["../a.txt"], dir_)
class TestReadAttr:
@pytest.mark.parametrize(
"example",
[
# No cookie means UTF-8:
b"__version__ = '\xc3\xa9'\nraise SystemExit(1)\n",
# If a cookie is present, honor it:
b"# -*- coding: utf-8 -*-\n__version__ = '\xc3\xa9'\nraise SystemExit(1)\n",
b"# -*- coding: latin1 -*-\n__version__ = '\xe9'\nraise SystemExit(1)\n",
],
)
def test_read_attr_encoding_cookie(self, example, tmp_path):
(tmp_path / "mod.py").write_bytes(example)
assert expand.read_attr('mod.__version__', root_dir=tmp_path) == 'é'
def test_read_attr(self, tmp_path, monkeypatch):
files = {
"pkg/__init__.py": "",
"pkg/sub/__init__.py": "VERSION = '0.1.1'",
"pkg/sub/mod.py": (
"VALUES = {'a': 0, 'b': {42}, 'c': (0, 1, 1)}\nraise SystemExit(1)"
),
}
write_files(files, tmp_path)
with monkeypatch.context() as m:
m.chdir(tmp_path)
# Make sure it can read the attr statically without evaluating the module
assert expand.read_attr('pkg.sub.VERSION') == '0.1.1'
values = expand.read_attr('lib.mod.VALUES', {'lib': 'pkg/sub'})
assert values['a'] == 0
assert values['b'] == {42}
# Make sure the same APIs work outside cwd
assert expand.read_attr('pkg.sub.VERSION', root_dir=tmp_path) == '0.1.1'
values = expand.read_attr('lib.mod.VALUES', {'lib': 'pkg/sub'}, tmp_path)
assert values['c'] == (0, 1, 1)
@pytest.mark.parametrize(
"example",
[
"VERSION: str\nVERSION = '0.1.1'\nraise SystemExit(1)\n",
"VERSION: str = '0.1.1'\nraise SystemExit(1)\n",
],
)
def test_read_annotated_attr(self, tmp_path, example):
files = {
"pkg/__init__.py": "",
"pkg/sub/__init__.py": example,
}
write_files(files, tmp_path)
# Make sure this attribute can be read statically
assert expand.read_attr('pkg.sub.VERSION', root_dir=tmp_path) == '0.1.1'
def test_import_order(self, tmp_path):
"""
Sometimes the import machinery will import the parent package of a nested
module, which triggers side-effects and might create problems (see issue #3176)
``read_attr`` should bypass these limitations by resolving modules statically
(via ast.literal_eval).
"""
files = {
"src/pkg/__init__.py": "from .main import func\nfrom .about import version",
"src/pkg/main.py": "import super_complicated_dep\ndef func(): return 42",
"src/pkg/about.py": "version = '42'",
}
write_files(files, tmp_path)
attr_desc = "pkg.about.version"
package_dir = {"": "src"}
# `import super_complicated_dep` should not run, otherwise the build fails
assert expand.read_attr(attr_desc, package_dir, tmp_path) == "42"
@pytest.mark.parametrize(
'package_dir, file, module, return_value',
[
({"": "src"}, "src/pkg/main.py", "pkg.main", 42),
({"pkg": "lib"}, "lib/main.py", "pkg.main", 13),
({}, "single_module.py", "single_module", 70),
({}, "flat_layout/pkg.py", "flat_layout.pkg", 836),
],
)
def test_resolve_class(monkeypatch, tmp_path, package_dir, file, module, return_value):
monkeypatch.setattr(sys, "modules", {}) # reproducibility
files = {file: f"class Custom:\n def testing(self): return {return_value}"}
write_files(files, tmp_path)
cls = expand.resolve_class(f"{module}.Custom", package_dir, tmp_path)
assert cls().testing() == return_value
@pytest.mark.parametrize(
'args, pkgs',
[
({"where": ["."], "namespaces": False}, {"pkg", "other"}),
({"where": [".", "dir1"], "namespaces": False}, {"pkg", "other", "dir2"}),
({"namespaces": True}, {"pkg", "other", "dir1", "dir1.dir2"}),
({}, {"pkg", "other", "dir1", "dir1.dir2"}), # default value for `namespaces`
],
)
def test_find_packages(tmp_path, args, pkgs):
files = {
"pkg/__init__.py",
"other/__init__.py",
"dir1/dir2/__init__.py",
}
write_files({k: "" for k in files}, tmp_path)
package_dir = {}
kwargs = {"root_dir": tmp_path, "fill_package_dir": package_dir, **args}
where = kwargs.get("where", ["."])
assert set(expand.find_packages(**kwargs)) == pkgs
for pkg in pkgs:
pkg_path = find_package_path(pkg, package_dir, tmp_path)
assert os.path.exists(pkg_path)
# Make sure the same APIs work outside cwd
where = [
str((tmp_path / p).resolve()).replace(os.sep, "/") # ensure posix-style paths
for p in args.pop("where", ["."])
]
assert set(expand.find_packages(where=where, **args)) == pkgs
@pytest.mark.parametrize(
"files, where, expected_package_dir",
[
(["pkg1/__init__.py", "pkg1/other.py"], ["."], {}),
(["pkg1/__init__.py", "pkg2/__init__.py"], ["."], {}),
(["src/pkg1/__init__.py", "src/pkg1/other.py"], ["src"], {"": "src"}),
(["src/pkg1/__init__.py", "src/pkg2/__init__.py"], ["src"], {"": "src"}),
(
["src1/pkg1/__init__.py", "src2/pkg2/__init__.py"],
["src1", "src2"],
{"pkg1": "src1/pkg1", "pkg2": "src2/pkg2"},
),
(
["src/pkg1/__init__.py", "pkg2/__init__.py"],
["src", "."],
{"pkg1": "src/pkg1"},
),
],
)
def test_fill_package_dir(tmp_path, files, where, expected_package_dir):
write_files({k: "" for k in files}, tmp_path)
pkg_dir = {}
kwargs = {"root_dir": tmp_path, "fill_package_dir": pkg_dir, "namespaces": False}
pkgs = expand.find_packages(where=where, **kwargs)
assert set(pkg_dir.items()) == set(expected_package_dir.items())
for pkg in pkgs:
pkg_path = find_package_path(pkg, pkg_dir, tmp_path)
assert os.path.exists(pkg_path)

View File

@ -0,0 +1,396 @@
import re
from configparser import ConfigParser
from inspect import cleandoc
import jaraco.path
import pytest
import tomli_w
from path import Path
import setuptools # noqa: F401 # force distutils.core to be patched
from setuptools.config.pyprojecttoml import (
_ToolsTypoInMetadata,
apply_configuration,
expand_configuration,
read_configuration,
validate,
)
from setuptools.dist import Distribution
from setuptools.errors import OptionError
import distutils.core
EXAMPLE = """
[project]
name = "myproj"
keywords = ["some", "key", "words"]
dynamic = ["version", "readme"]
requires-python = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*"
dependencies = [
'importlib-metadata>=0.12;python_version<"3.8"',
'importlib-resources>=1.0;python_version<"3.7"',
'pathlib2>=2.3.3,<3;python_version < "3.4" and sys.platform != "win32"',
]
[project.optional-dependencies]
docs = [
"sphinx>=3",
"sphinx-argparse>=0.2.5",
"sphinx-rtd-theme>=0.4.3",
]
testing = [
"pytest>=1",
"coverage>=3,<5",
]
[project.scripts]
exec = "pkg.__main__:exec"
[build-system]
requires = ["setuptools", "wheel"]
build-backend = "setuptools.build_meta"
[tool.setuptools]
package-dir = {"" = "src"}
zip-safe = true
platforms = ["any"]
[tool.setuptools.packages.find]
where = ["src"]
[tool.setuptools.cmdclass]
sdist = "pkg.mod.CustomSdist"
[tool.setuptools.dynamic.version]
attr = "pkg.__version__.VERSION"
[tool.setuptools.dynamic.readme]
file = ["README.md"]
content-type = "text/markdown"
[tool.setuptools.package-data]
"*" = ["*.txt"]
[tool.setuptools.data-files]
"data" = ["_files/*.txt"]
[tool.distutils.sdist]
formats = "gztar"
[tool.distutils.bdist_wheel]
universal = true
"""
def create_example(path, pkg_root):
files = {
"pyproject.toml": EXAMPLE,
"README.md": "hello world",
"_files": {
"file.txt": "",
},
}
packages = {
"pkg": {
"__init__.py": "",
"mod.py": "class CustomSdist: pass",
"__version__.py": "VERSION = (3, 10)",
"__main__.py": "def exec(): print('hello')",
},
}
assert pkg_root # Meta-test: cannot be empty string.
if pkg_root == ".":
files = {**files, **packages}
# skip other files: flat-layout will raise error for multi-package dist
else:
# Use this opportunity to ensure namespaces are discovered
files[pkg_root] = {**packages, "other": {"nested": {"__init__.py": ""}}}
jaraco.path.build(files, prefix=path)
def verify_example(config, path, pkg_root):
pyproject = path / "pyproject.toml"
pyproject.write_text(tomli_w.dumps(config), encoding="utf-8")
expanded = expand_configuration(config, path)
expanded_project = expanded["project"]
assert read_configuration(pyproject, expand=True) == expanded
assert expanded_project["version"] == "3.10"
assert expanded_project["readme"]["text"] == "hello world"
assert "packages" in expanded["tool"]["setuptools"]
if pkg_root == ".":
# Auto-discovery will raise error for multi-package dist
assert set(expanded["tool"]["setuptools"]["packages"]) == {"pkg"}
else:
assert set(expanded["tool"]["setuptools"]["packages"]) == {
"pkg",
"other",
"other.nested",
}
assert expanded["tool"]["setuptools"]["include-package-data"] is True
assert "" in expanded["tool"]["setuptools"]["package-data"]
assert "*" not in expanded["tool"]["setuptools"]["package-data"]
assert expanded["tool"]["setuptools"]["data-files"] == [
("data", ["_files/file.txt"])
]
def test_read_configuration(tmp_path):
create_example(tmp_path, "src")
pyproject = tmp_path / "pyproject.toml"
config = read_configuration(pyproject, expand=False)
assert config["project"].get("version") is None
assert config["project"].get("readme") is None
verify_example(config, tmp_path, "src")
@pytest.mark.parametrize(
"pkg_root, opts",
[
(".", {}),
("src", {}),
("lib", {"packages": {"find": {"where": ["lib"]}}}),
],
)
def test_discovered_package_dir_with_attr_directive_in_config(tmp_path, pkg_root, opts):
create_example(tmp_path, pkg_root)
pyproject = tmp_path / "pyproject.toml"
config = read_configuration(pyproject, expand=False)
assert config["project"].get("version") is None
assert config["project"].get("readme") is None
config["tool"]["setuptools"].pop("packages", None)
config["tool"]["setuptools"].pop("package-dir", None)
config["tool"]["setuptools"].update(opts)
verify_example(config, tmp_path, pkg_root)
ENTRY_POINTS = {
"console_scripts": {"a": "mod.a:func"},
"gui_scripts": {"b": "mod.b:func"},
"other": {"c": "mod.c:func [extra]"},
}
class TestEntryPoints:
def write_entry_points(self, tmp_path):
entry_points = ConfigParser()
entry_points.read_dict(ENTRY_POINTS)
with open(tmp_path / "entry-points.txt", "w", encoding="utf-8") as f:
entry_points.write(f)
def pyproject(self, dynamic=None):
project = {"dynamic": dynamic or ["scripts", "gui-scripts", "entry-points"]}
tool = {"dynamic": {"entry-points": {"file": "entry-points.txt"}}}
return {"project": project, "tool": {"setuptools": tool}}
def test_all_listed_in_dynamic(self, tmp_path):
self.write_entry_points(tmp_path)
expanded = expand_configuration(self.pyproject(), tmp_path)
expanded_project = expanded["project"]
assert len(expanded_project["scripts"]) == 1
assert expanded_project["scripts"]["a"] == "mod.a:func"
assert len(expanded_project["gui-scripts"]) == 1
assert expanded_project["gui-scripts"]["b"] == "mod.b:func"
assert len(expanded_project["entry-points"]) == 1
assert expanded_project["entry-points"]["other"]["c"] == "mod.c:func [extra]"
@pytest.mark.parametrize("missing_dynamic", ("scripts", "gui-scripts"))
def test_scripts_not_listed_in_dynamic(self, tmp_path, missing_dynamic):
self.write_entry_points(tmp_path)
dynamic = {"scripts", "gui-scripts", "entry-points"} - {missing_dynamic}
msg = f"defined outside of `pyproject.toml`:.*{missing_dynamic}"
with pytest.raises(OptionError, match=re.compile(msg, re.S)):
expand_configuration(self.pyproject(dynamic), tmp_path)
class TestClassifiers:
def test_dynamic(self, tmp_path):
# Let's create a project example that has dynamic classifiers
# coming from a txt file.
create_example(tmp_path, "src")
classifiers = cleandoc(
"""
Framework :: Flask
Programming Language :: Haskell
"""
)
(tmp_path / "classifiers.txt").write_text(classifiers, encoding="utf-8")
pyproject = tmp_path / "pyproject.toml"
config = read_configuration(pyproject, expand=False)
dynamic = config["project"]["dynamic"]
config["project"]["dynamic"] = list({*dynamic, "classifiers"})
dynamic_config = config["tool"]["setuptools"]["dynamic"]
dynamic_config["classifiers"] = {"file": "classifiers.txt"}
# When the configuration is expanded,
# each line of the file should be an different classifier.
validate(config, pyproject)
expanded = expand_configuration(config, tmp_path)
assert set(expanded["project"]["classifiers"]) == {
"Framework :: Flask",
"Programming Language :: Haskell",
}
def test_dynamic_without_config(self, tmp_path):
config = """
[project]
name = "myproj"
version = '42'
dynamic = ["classifiers"]
"""
pyproject = tmp_path / "pyproject.toml"
pyproject.write_text(cleandoc(config), encoding="utf-8")
with pytest.raises(OptionError, match="No configuration .* .classifiers."):
read_configuration(pyproject)
def test_dynamic_readme_from_setup_script_args(self, tmp_path):
config = """
[project]
name = "myproj"
version = '42'
dynamic = ["readme"]
"""
pyproject = tmp_path / "pyproject.toml"
pyproject.write_text(cleandoc(config), encoding="utf-8")
dist = Distribution(attrs={"long_description": "42"})
# No error should occur because of missing `readme`
dist = apply_configuration(dist, pyproject)
assert dist.metadata.long_description == "42"
def test_dynamic_without_file(self, tmp_path):
config = """
[project]
name = "myproj"
version = '42'
dynamic = ["classifiers"]
[tool.setuptools.dynamic]
classifiers = {file = ["classifiers.txt"]}
"""
pyproject = tmp_path / "pyproject.toml"
pyproject.write_text(cleandoc(config), encoding="utf-8")
with pytest.warns(UserWarning, match="File .*classifiers.txt. cannot be found"):
expanded = read_configuration(pyproject)
assert "classifiers" not in expanded["project"]
@pytest.mark.parametrize(
"example",
(
"""
[project]
name = "myproj"
version = "1.2"
[my-tool.that-disrespect.pep518]
value = 42
""",
),
)
def test_ignore_unrelated_config(tmp_path, example):
pyproject = tmp_path / "pyproject.toml"
pyproject.write_text(cleandoc(example), encoding="utf-8")
# Make sure no error is raised due to 3rd party configs in pyproject.toml
assert read_configuration(pyproject) is not None
@pytest.mark.parametrize(
"example, error_msg",
[
(
"""
[project]
name = "myproj"
version = "1.2"
requires = ['pywin32; platform_system=="Windows"' ]
""",
"configuration error: .project. must not contain ..requires.. properties",
),
],
)
def test_invalid_example(tmp_path, example, error_msg):
pyproject = tmp_path / "pyproject.toml"
pyproject.write_text(cleandoc(example), encoding="utf-8")
pattern = re.compile(f"invalid pyproject.toml.*{error_msg}.*", re.M | re.S)
with pytest.raises(ValueError, match=pattern):
read_configuration(pyproject)
@pytest.mark.parametrize("config", ("", "[tool.something]\nvalue = 42"))
def test_empty(tmp_path, config):
pyproject = tmp_path / "pyproject.toml"
pyproject.write_text(config, encoding="utf-8")
# Make sure no error is raised
assert read_configuration(pyproject) == {}
@pytest.mark.parametrize("config", ("[project]\nname = 'myproj'\nversion='42'\n",))
def test_include_package_data_by_default(tmp_path, config):
"""Builds with ``pyproject.toml`` should consider ``include-package-data=True`` as
default.
"""
pyproject = tmp_path / "pyproject.toml"
pyproject.write_text(config, encoding="utf-8")
config = read_configuration(pyproject)
assert config["tool"]["setuptools"]["include-package-data"] is True
def test_include_package_data_in_setuppy(tmp_path):
"""Builds with ``pyproject.toml`` should consider ``include_package_data`` set in
``setup.py``.
See https://github.com/pypa/setuptools/issues/3197#issuecomment-1079023889
"""
files = {
"pyproject.toml": "[project]\nname = 'myproj'\nversion='42'\n",
"setup.py": "__import__('setuptools').setup(include_package_data=False)",
}
jaraco.path.build(files, prefix=tmp_path)
with Path(tmp_path):
dist = distutils.core.run_setup("setup.py", {}, stop_after="config")
assert dist.get_name() == "myproj"
assert dist.get_version() == "42"
assert dist.include_package_data is False
def test_warn_tools_typo(tmp_path):
"""Test that the common ``tools.setuptools`` typo in ``pyproject.toml`` issues a warning
See https://github.com/pypa/setuptools/issues/4150
"""
config = """
[build-system]
requires = ["setuptools"]
build-backend = "setuptools.build_meta"
[project]
name = "myproj"
version = '42'
[tools.setuptools]
packages = ["package"]
"""
pyproject = tmp_path / "pyproject.toml"
pyproject.write_text(cleandoc(config), encoding="utf-8")
with pytest.warns(_ToolsTypoInMetadata):
read_configuration(pyproject)

View File

@ -0,0 +1,109 @@
from inspect import cleandoc
import pytest
from jaraco import path
from setuptools.config.pyprojecttoml import apply_configuration
from setuptools.dist import Distribution
from setuptools.warnings import SetuptoolsWarning
def test_dynamic_dependencies(tmp_path):
files = {
"requirements.txt": "six\n # comment\n",
"pyproject.toml": cleandoc(
"""
[project]
name = "myproj"
version = "1.0"
dynamic = ["dependencies"]
[build-system]
requires = ["setuptools", "wheel"]
build-backend = "setuptools.build_meta"
[tool.setuptools.dynamic.dependencies]
file = ["requirements.txt"]
"""
),
}
path.build(files, prefix=tmp_path)
dist = Distribution()
dist = apply_configuration(dist, tmp_path / "pyproject.toml")
assert dist.install_requires == ["six"]
def test_dynamic_optional_dependencies(tmp_path):
files = {
"requirements-docs.txt": "sphinx\n # comment\n",
"pyproject.toml": cleandoc(
"""
[project]
name = "myproj"
version = "1.0"
dynamic = ["optional-dependencies"]
[tool.setuptools.dynamic.optional-dependencies.docs]
file = ["requirements-docs.txt"]
[build-system]
requires = ["setuptools", "wheel"]
build-backend = "setuptools.build_meta"
"""
),
}
path.build(files, prefix=tmp_path)
dist = Distribution()
dist = apply_configuration(dist, tmp_path / "pyproject.toml")
assert dist.extras_require == {"docs": ["sphinx"]}
def test_mixed_dynamic_optional_dependencies(tmp_path):
"""
Test that if PEP 621 was loosened to allow mixing of dynamic and static
configurations in the case of fields containing sub-fields (groups),
things would work out.
"""
files = {
"requirements-images.txt": "pillow~=42.0\n # comment\n",
"pyproject.toml": cleandoc(
"""
[project]
name = "myproj"
version = "1.0"
dynamic = ["optional-dependencies"]
[project.optional-dependencies]
docs = ["sphinx"]
[tool.setuptools.dynamic.optional-dependencies.images]
file = ["requirements-images.txt"]
"""
),
}
path.build(files, prefix=tmp_path)
pyproject = tmp_path / "pyproject.toml"
with pytest.raises(ValueError, match="project.optional-dependencies"):
apply_configuration(Distribution(), pyproject)
def test_mixed_extras_require_optional_dependencies(tmp_path):
files = {
"pyproject.toml": cleandoc(
"""
[project]
name = "myproj"
version = "1.0"
optional-dependencies.docs = ["sphinx"]
"""
),
}
path.build(files, prefix=tmp_path)
pyproject = tmp_path / "pyproject.toml"
with pytest.warns(SetuptoolsWarning, match=".extras_require. overwritten"):
dist = Distribution({"extras_require": {"hello": ["world"]}})
dist = apply_configuration(dist, pyproject)
assert dist.extras_require == {"docs": ["sphinx"]}

View File

@ -0,0 +1,967 @@
import configparser
import contextlib
import inspect
from pathlib import Path
from unittest.mock import Mock, patch
import pytest
from packaging.requirements import InvalidRequirement
from setuptools.config.setupcfg import ConfigHandler, Target, read_configuration
from setuptools.dist import Distribution, _Distribution
from setuptools.warnings import SetuptoolsDeprecationWarning
from ..textwrap import DALS
from distutils.errors import DistutilsFileError, DistutilsOptionError
class ErrConfigHandler(ConfigHandler[Target]):
"""Erroneous handler. Fails to implement required methods."""
section_prefix = "**err**"
def make_package_dir(name, base_dir, ns=False):
dir_package = base_dir
for dir_name in name.split('/'):
dir_package = dir_package.mkdir(dir_name)
init_file = None
if not ns:
init_file = dir_package.join('__init__.py')
init_file.write('')
return dir_package, init_file
def fake_env(
tmpdir, setup_cfg, setup_py=None, encoding='ascii', package_path='fake_package'
):
if setup_py is None:
setup_py = 'from setuptools import setup\nsetup()\n'
tmpdir.join('setup.py').write(setup_py)
config = tmpdir.join('setup.cfg')
config.write(setup_cfg.encode(encoding), mode='wb')
package_dir, init_file = make_package_dir(package_path, tmpdir)
init_file.write(
'VERSION = (1, 2, 3)\n'
'\n'
'VERSION_MAJOR = 1'
'\n'
'def get_version():\n'
' return [3, 4, 5, "dev"]\n'
'\n'
)
return package_dir, config
@contextlib.contextmanager
def get_dist(tmpdir, kwargs_initial=None, parse=True):
kwargs_initial = kwargs_initial or {}
with tmpdir.as_cwd():
dist = Distribution(kwargs_initial)
dist.script_name = 'setup.py'
parse and dist.parse_config_files()
yield dist
def test_parsers_implemented():
with pytest.raises(NotImplementedError):
handler = ErrConfigHandler(None, {}, False, Mock())
handler.parsers
class TestConfigurationReader:
def test_basic(self, tmpdir):
_, config = fake_env(
tmpdir,
'[metadata]\n'
'version = 10.1.1\n'
'keywords = one, two\n'
'\n'
'[options]\n'
'scripts = bin/a.py, bin/b.py\n',
)
config_dict = read_configuration('%s' % config)
assert config_dict['metadata']['version'] == '10.1.1'
assert config_dict['metadata']['keywords'] == ['one', 'two']
assert config_dict['options']['scripts'] == ['bin/a.py', 'bin/b.py']
def test_no_config(self, tmpdir):
with pytest.raises(DistutilsFileError):
read_configuration('%s' % tmpdir.join('setup.cfg'))
def test_ignore_errors(self, tmpdir):
_, config = fake_env(
tmpdir,
'[metadata]\nversion = attr: none.VERSION\nkeywords = one, two\n',
)
with pytest.raises(ImportError):
read_configuration('%s' % config)
config_dict = read_configuration('%s' % config, ignore_option_errors=True)
assert config_dict['metadata']['keywords'] == ['one', 'two']
assert 'version' not in config_dict['metadata']
config.remove()
class TestMetadata:
def test_basic(self, tmpdir):
fake_env(
tmpdir,
'[metadata]\n'
'version = 10.1.1\n'
'description = Some description\n'
'long_description_content_type = text/something\n'
'long_description = file: README\n'
'name = fake_name\n'
'keywords = one, two\n'
'provides = package, package.sub\n'
'license = otherlic\n'
'download_url = http://test.test.com/test/\n'
'maintainer_email = test@test.com\n',
)
tmpdir.join('README').write('readme contents\nline2')
meta_initial = {
# This will be used so `otherlic` won't replace it.
'license': 'BSD 3-Clause License',
}
with get_dist(tmpdir, meta_initial) as dist:
metadata = dist.metadata
assert metadata.version == '10.1.1'
assert metadata.description == 'Some description'
assert metadata.long_description_content_type == 'text/something'
assert metadata.long_description == 'readme contents\nline2'
assert metadata.provides == ['package', 'package.sub']
assert metadata.license == 'BSD 3-Clause License'
assert metadata.name == 'fake_name'
assert metadata.keywords == ['one', 'two']
assert metadata.download_url == 'http://test.test.com/test/'
assert metadata.maintainer_email == 'test@test.com'
def test_license_cfg(self, tmpdir):
fake_env(
tmpdir,
DALS(
"""
[metadata]
name=foo
version=0.0.1
license=Apache 2.0
"""
),
)
with get_dist(tmpdir) as dist:
metadata = dist.metadata
assert metadata.name == "foo"
assert metadata.version == "0.0.1"
assert metadata.license == "Apache 2.0"
def test_file_mixed(self, tmpdir):
fake_env(
tmpdir,
'[metadata]\nlong_description = file: README.rst, CHANGES.rst\n\n',
)
tmpdir.join('README.rst').write('readme contents\nline2')
tmpdir.join('CHANGES.rst').write('changelog contents\nand stuff')
with get_dist(tmpdir) as dist:
assert dist.metadata.long_description == (
'readme contents\nline2\nchangelog contents\nand stuff'
)
def test_file_sandboxed(self, tmpdir):
tmpdir.ensure("README")
project = tmpdir.join('depth1', 'depth2')
project.ensure(dir=True)
fake_env(project, '[metadata]\nlong_description = file: ../../README\n')
with get_dist(project, parse=False) as dist:
with pytest.raises(DistutilsOptionError):
dist.parse_config_files() # file: out of sandbox
def test_aliases(self, tmpdir):
fake_env(
tmpdir,
'[metadata]\n'
'author_email = test@test.com\n'
'home_page = http://test.test.com/test/\n'
'summary = Short summary\n'
'platform = a, b\n'
'classifier =\n'
' Framework :: Django\n'
' Programming Language :: Python :: 3.5\n',
)
with get_dist(tmpdir) as dist:
metadata = dist.metadata
assert metadata.author_email == 'test@test.com'
assert metadata.url == 'http://test.test.com/test/'
assert metadata.description == 'Short summary'
assert metadata.platforms == ['a', 'b']
assert metadata.classifiers == [
'Framework :: Django',
'Programming Language :: Python :: 3.5',
]
def test_multiline(self, tmpdir):
fake_env(
tmpdir,
'[metadata]\n'
'name = fake_name\n'
'keywords =\n'
' one\n'
' two\n'
'classifiers =\n'
' Framework :: Django\n'
' Programming Language :: Python :: 3.5\n',
)
with get_dist(tmpdir) as dist:
metadata = dist.metadata
assert metadata.keywords == ['one', 'two']
assert metadata.classifiers == [
'Framework :: Django',
'Programming Language :: Python :: 3.5',
]
def test_dict(self, tmpdir):
fake_env(
tmpdir,
'[metadata]\n'
'project_urls =\n'
' Link One = https://example.com/one/\n'
' Link Two = https://example.com/two/\n',
)
with get_dist(tmpdir) as dist:
metadata = dist.metadata
assert metadata.project_urls == {
'Link One': 'https://example.com/one/',
'Link Two': 'https://example.com/two/',
}
def test_version(self, tmpdir):
package_dir, config = fake_env(
tmpdir, '[metadata]\nversion = attr: fake_package.VERSION\n'
)
sub_a = package_dir.mkdir('subpkg_a')
sub_a.join('__init__.py').write('')
sub_a.join('mod.py').write('VERSION = (2016, 11, 26)')
sub_b = package_dir.mkdir('subpkg_b')
sub_b.join('__init__.py').write('')
sub_b.join('mod.py').write(
'import third_party_module\nVERSION = (2016, 11, 26)'
)
with get_dist(tmpdir) as dist:
assert dist.metadata.version == '1.2.3'
config.write('[metadata]\nversion = attr: fake_package.get_version\n')
with get_dist(tmpdir) as dist:
assert dist.metadata.version == '3.4.5.dev'
config.write('[metadata]\nversion = attr: fake_package.VERSION_MAJOR\n')
with get_dist(tmpdir) as dist:
assert dist.metadata.version == '1'
config.write('[metadata]\nversion = attr: fake_package.subpkg_a.mod.VERSION\n')
with get_dist(tmpdir) as dist:
assert dist.metadata.version == '2016.11.26'
config.write('[metadata]\nversion = attr: fake_package.subpkg_b.mod.VERSION\n')
with get_dist(tmpdir) as dist:
assert dist.metadata.version == '2016.11.26'
def test_version_file(self, tmpdir):
_, config = fake_env(
tmpdir, '[metadata]\nversion = file: fake_package/version.txt\n'
)
tmpdir.join('fake_package', 'version.txt').write('1.2.3\n')
with get_dist(tmpdir) as dist:
assert dist.metadata.version == '1.2.3'
tmpdir.join('fake_package', 'version.txt').write('1.2.3\n4.5.6\n')
with pytest.raises(DistutilsOptionError):
with get_dist(tmpdir) as dist:
dist.metadata.version
def test_version_with_package_dir_simple(self, tmpdir):
_, config = fake_env(
tmpdir,
'[metadata]\n'
'version = attr: fake_package_simple.VERSION\n'
'[options]\n'
'package_dir =\n'
' = src\n',
package_path='src/fake_package_simple',
)
with get_dist(tmpdir) as dist:
assert dist.metadata.version == '1.2.3'
def test_version_with_package_dir_rename(self, tmpdir):
_, config = fake_env(
tmpdir,
'[metadata]\n'
'version = attr: fake_package_rename.VERSION\n'
'[options]\n'
'package_dir =\n'
' fake_package_rename = fake_dir\n',
package_path='fake_dir',
)
with get_dist(tmpdir) as dist:
assert dist.metadata.version == '1.2.3'
def test_version_with_package_dir_complex(self, tmpdir):
_, config = fake_env(
tmpdir,
'[metadata]\n'
'version = attr: fake_package_complex.VERSION\n'
'[options]\n'
'package_dir =\n'
' fake_package_complex = src/fake_dir\n',
package_path='src/fake_dir',
)
with get_dist(tmpdir) as dist:
assert dist.metadata.version == '1.2.3'
def test_unknown_meta_item(self, tmpdir):
fake_env(tmpdir, '[metadata]\nname = fake_name\nunknown = some\n')
with get_dist(tmpdir, parse=False) as dist:
dist.parse_config_files() # Skip unknown.
def test_usupported_section(self, tmpdir):
fake_env(tmpdir, '[metadata.some]\nkey = val\n')
with get_dist(tmpdir, parse=False) as dist:
with pytest.raises(DistutilsOptionError):
dist.parse_config_files()
def test_classifiers(self, tmpdir):
expected = set([
'Framework :: Django',
'Programming Language :: Python :: 3',
'Programming Language :: Python :: 3.5',
])
# From file.
_, config = fake_env(tmpdir, '[metadata]\nclassifiers = file: classifiers\n')
tmpdir.join('classifiers').write(
'Framework :: Django\n'
'Programming Language :: Python :: 3\n'
'Programming Language :: Python :: 3.5\n'
)
with get_dist(tmpdir) as dist:
assert set(dist.metadata.classifiers) == expected
# From list notation
config.write(
'[metadata]\n'
'classifiers =\n'
' Framework :: Django\n'
' Programming Language :: Python :: 3\n'
' Programming Language :: Python :: 3.5\n'
)
with get_dist(tmpdir) as dist:
assert set(dist.metadata.classifiers) == expected
def test_interpolation(self, tmpdir):
fake_env(tmpdir, '[metadata]\ndescription = %(message)s\n')
with pytest.raises(configparser.InterpolationMissingOptionError):
with get_dist(tmpdir):
pass
def test_non_ascii_1(self, tmpdir):
fake_env(tmpdir, '[metadata]\ndescription = éàïôñ\n', encoding='utf-8')
with get_dist(tmpdir):
pass
def test_non_ascii_3(self, tmpdir):
fake_env(tmpdir, '\n# -*- coding: invalid\n')
with get_dist(tmpdir):
pass
def test_non_ascii_4(self, tmpdir):
fake_env(
tmpdir,
'# -*- coding: utf-8\n[metadata]\ndescription = éàïôñ\n',
encoding='utf-8',
)
with get_dist(tmpdir) as dist:
assert dist.metadata.description == 'éàïôñ'
def test_not_utf8(self, tmpdir):
"""
Config files encoded not in UTF-8 will fail
"""
fake_env(
tmpdir,
'# vim: set fileencoding=iso-8859-15 :\n[metadata]\ndescription = éàïôñ\n',
encoding='iso-8859-15',
)
with pytest.raises(UnicodeDecodeError):
with get_dist(tmpdir):
pass
def test_warn_dash_deprecation(self, tmpdir):
# warn_dash_deprecation() is a method in setuptools.dist
# remove this test and the method when no longer needed
fake_env(
tmpdir,
'[metadata]\n'
'author-email = test@test.com\n'
'maintainer_email = foo@foo.com\n',
)
msg = "Usage of dash-separated 'author-email' will not be supported"
with pytest.warns(SetuptoolsDeprecationWarning, match=msg):
with get_dist(tmpdir) as dist:
metadata = dist.metadata
assert metadata.author_email == 'test@test.com'
assert metadata.maintainer_email == 'foo@foo.com'
def test_make_option_lowercase(self, tmpdir):
# remove this test and the method make_option_lowercase() in setuptools.dist
# when no longer needed
fake_env(tmpdir, '[metadata]\nName = foo\ndescription = Some description\n')
msg = "Usage of uppercase key 'Name' in 'metadata' will not be supported"
with pytest.warns(SetuptoolsDeprecationWarning, match=msg):
with get_dist(tmpdir) as dist:
metadata = dist.metadata
assert metadata.name == 'foo'
assert metadata.description == 'Some description'
class TestOptions:
def test_basic(self, tmpdir):
fake_env(
tmpdir,
'[options]\n'
'zip_safe = True\n'
'include_package_data = yes\n'
'package_dir = b=c, =src\n'
'packages = pack_a, pack_b.subpack\n'
'namespace_packages = pack1, pack2\n'
'scripts = bin/one.py, bin/two.py\n'
'eager_resources = bin/one.py, bin/two.py\n'
'install_requires = docutils>=0.3; pack ==1.1, ==1.3; hey\n'
'setup_requires = docutils>=0.3; spack ==1.1, ==1.3; there\n'
'dependency_links = http://some.com/here/1, '
'http://some.com/there/2\n'
'python_requires = >=1.0, !=2.8\n'
'py_modules = module1, module2\n',
)
deprec = pytest.warns(SetuptoolsDeprecationWarning, match="namespace_packages")
with deprec, get_dist(tmpdir) as dist:
assert dist.zip_safe
assert dist.include_package_data
assert dist.package_dir == {'': 'src', 'b': 'c'}
assert dist.packages == ['pack_a', 'pack_b.subpack']
assert dist.namespace_packages == ['pack1', 'pack2']
assert dist.scripts == ['bin/one.py', 'bin/two.py']
assert dist.dependency_links == ([
'http://some.com/here/1',
'http://some.com/there/2',
])
assert dist.install_requires == ([
'docutils>=0.3',
'pack==1.1,==1.3',
'hey',
])
assert dist.setup_requires == ([
'docutils>=0.3',
'spack ==1.1, ==1.3',
'there',
])
assert dist.python_requires == '>=1.0, !=2.8'
assert dist.py_modules == ['module1', 'module2']
def test_multiline(self, tmpdir):
fake_env(
tmpdir,
'[options]\n'
'package_dir = \n'
' b=c\n'
' =src\n'
'packages = \n'
' pack_a\n'
' pack_b.subpack\n'
'namespace_packages = \n'
' pack1\n'
' pack2\n'
'scripts = \n'
' bin/one.py\n'
' bin/two.py\n'
'eager_resources = \n'
' bin/one.py\n'
' bin/two.py\n'
'install_requires = \n'
' docutils>=0.3\n'
' pack ==1.1, ==1.3\n'
' hey\n'
'setup_requires = \n'
' docutils>=0.3\n'
' spack ==1.1, ==1.3\n'
' there\n'
'dependency_links = \n'
' http://some.com/here/1\n'
' http://some.com/there/2\n',
)
deprec = pytest.warns(SetuptoolsDeprecationWarning, match="namespace_packages")
with deprec, get_dist(tmpdir) as dist:
assert dist.package_dir == {'': 'src', 'b': 'c'}
assert dist.packages == ['pack_a', 'pack_b.subpack']
assert dist.namespace_packages == ['pack1', 'pack2']
assert dist.scripts == ['bin/one.py', 'bin/two.py']
assert dist.dependency_links == ([
'http://some.com/here/1',
'http://some.com/there/2',
])
assert dist.install_requires == ([
'docutils>=0.3',
'pack==1.1,==1.3',
'hey',
])
assert dist.setup_requires == ([
'docutils>=0.3',
'spack ==1.1, ==1.3',
'there',
])
def test_package_dir_fail(self, tmpdir):
fake_env(tmpdir, '[options]\npackage_dir = a b\n')
with get_dist(tmpdir, parse=False) as dist:
with pytest.raises(DistutilsOptionError):
dist.parse_config_files()
def test_package_data(self, tmpdir):
fake_env(
tmpdir,
'[options.package_data]\n'
'* = *.txt, *.rst\n'
'hello = *.msg\n'
'\n'
'[options.exclude_package_data]\n'
'* = fake1.txt, fake2.txt\n'
'hello = *.dat\n',
)
with get_dist(tmpdir) as dist:
assert dist.package_data == {
'': ['*.txt', '*.rst'],
'hello': ['*.msg'],
}
assert dist.exclude_package_data == {
'': ['fake1.txt', 'fake2.txt'],
'hello': ['*.dat'],
}
def test_packages(self, tmpdir):
fake_env(tmpdir, '[options]\npackages = find:\n')
with get_dist(tmpdir) as dist:
assert dist.packages == ['fake_package']
def test_find_directive(self, tmpdir):
dir_package, config = fake_env(tmpdir, '[options]\npackages = find:\n')
dir_sub_one, _ = make_package_dir('sub_one', dir_package)
dir_sub_two, _ = make_package_dir('sub_two', dir_package)
with get_dist(tmpdir) as dist:
assert set(dist.packages) == set([
'fake_package',
'fake_package.sub_two',
'fake_package.sub_one',
])
config.write(
'[options]\n'
'packages = find:\n'
'\n'
'[options.packages.find]\n'
'where = .\n'
'include =\n'
' fake_package.sub_one\n'
' two\n'
)
with get_dist(tmpdir) as dist:
assert dist.packages == ['fake_package.sub_one']
config.write(
'[options]\n'
'packages = find:\n'
'\n'
'[options.packages.find]\n'
'exclude =\n'
' fake_package.sub_one\n'
)
with get_dist(tmpdir) as dist:
assert set(dist.packages) == set(['fake_package', 'fake_package.sub_two'])
def test_find_namespace_directive(self, tmpdir):
dir_package, config = fake_env(
tmpdir, '[options]\npackages = find_namespace:\n'
)
dir_sub_one, _ = make_package_dir('sub_one', dir_package)
dir_sub_two, _ = make_package_dir('sub_two', dir_package, ns=True)
with get_dist(tmpdir) as dist:
assert set(dist.packages) == {
'fake_package',
'fake_package.sub_two',
'fake_package.sub_one',
}
config.write(
'[options]\n'
'packages = find_namespace:\n'
'\n'
'[options.packages.find]\n'
'where = .\n'
'include =\n'
' fake_package.sub_one\n'
' two\n'
)
with get_dist(tmpdir) as dist:
assert dist.packages == ['fake_package.sub_one']
config.write(
'[options]\n'
'packages = find_namespace:\n'
'\n'
'[options.packages.find]\n'
'exclude =\n'
' fake_package.sub_one\n'
)
with get_dist(tmpdir) as dist:
assert set(dist.packages) == {'fake_package', 'fake_package.sub_two'}
def test_extras_require(self, tmpdir):
fake_env(
tmpdir,
'[options.extras_require]\n'
'pdf = ReportLab>=1.2; RXP\n'
'rest = \n'
' docutils>=0.3\n'
' pack ==1.1, ==1.3\n',
)
with get_dist(tmpdir) as dist:
assert dist.extras_require == {
'pdf': ['ReportLab>=1.2', 'RXP'],
'rest': ['docutils>=0.3', 'pack==1.1,==1.3'],
}
assert set(dist.metadata.provides_extras) == {'pdf', 'rest'}
@pytest.mark.parametrize(
"config",
[
"[options.extras_require]\nfoo = bar;python_version<'3'",
"[options.extras_require]\nfoo = bar;os_name=='linux'",
"[options.extras_require]\nfoo = bar;python_version<'3'\n",
"[options.extras_require]\nfoo = bar;os_name=='linux'\n",
"[options]\ninstall_requires = bar;python_version<'3'",
"[options]\ninstall_requires = bar;os_name=='linux'",
"[options]\ninstall_requires = bar;python_version<'3'\n",
"[options]\ninstall_requires = bar;os_name=='linux'\n",
],
)
def test_raises_accidental_env_marker_misconfig(self, config, tmpdir):
fake_env(tmpdir, config)
match = (
r"One of the parsed requirements in `(install_requires|extras_require.+)` "
"looks like a valid environment marker.*"
)
with pytest.raises(InvalidRequirement, match=match):
with get_dist(tmpdir) as _:
pass
@pytest.mark.parametrize(
"config",
[
"[options.extras_require]\nfoo = bar;python_version<3",
"[options.extras_require]\nfoo = bar;python_version<3\n",
"[options]\ninstall_requires = bar;python_version<3",
"[options]\ninstall_requires = bar;python_version<3\n",
],
)
def test_warn_accidental_env_marker_misconfig(self, config, tmpdir):
fake_env(tmpdir, config)
match = (
r"One of the parsed requirements in `(install_requires|extras_require.+)` "
"looks like a valid environment marker.*"
)
with pytest.warns(SetuptoolsDeprecationWarning, match=match):
with get_dist(tmpdir) as _:
pass
@pytest.mark.parametrize(
"config",
[
"[options.extras_require]\nfoo =\n bar;python_version<'3'",
"[options.extras_require]\nfoo = bar;baz\nboo = xxx;yyy",
"[options.extras_require]\nfoo =\n bar;python_version<'3'\n",
"[options.extras_require]\nfoo = bar;baz\nboo = xxx;yyy\n",
"[options.extras_require]\nfoo =\n bar\n python_version<3\n",
"[options]\ninstall_requires =\n bar;python_version<'3'",
"[options]\ninstall_requires = bar;baz\nboo = xxx;yyy",
"[options]\ninstall_requires =\n bar;python_version<'3'\n",
"[options]\ninstall_requires = bar;baz\nboo = xxx;yyy\n",
"[options]\ninstall_requires =\n bar\n python_version<3\n",
],
)
@pytest.mark.filterwarnings("error::setuptools.SetuptoolsDeprecationWarning")
def test_nowarn_accidental_env_marker_misconfig(self, config, tmpdir, recwarn):
fake_env(tmpdir, config)
num_warnings = len(recwarn)
with get_dist(tmpdir) as _:
pass
# The examples are valid, no warnings shown
assert len(recwarn) == num_warnings
def test_dash_preserved_extras_require(self, tmpdir):
fake_env(tmpdir, '[options.extras_require]\nfoo-a = foo\nfoo_b = test\n')
with get_dist(tmpdir) as dist:
assert dist.extras_require == {'foo-a': ['foo'], 'foo_b': ['test']}
def test_entry_points(self, tmpdir):
_, config = fake_env(
tmpdir,
'[options.entry_points]\n'
'group1 = point1 = pack.module:func, '
'.point2 = pack.module2:func_rest [rest]\n'
'group2 = point3 = pack.module:func2\n',
)
with get_dist(tmpdir) as dist:
assert dist.entry_points == {
'group1': [
'point1 = pack.module:func',
'.point2 = pack.module2:func_rest [rest]',
],
'group2': ['point3 = pack.module:func2'],
}
expected = (
'[blogtool.parsers]\n'
'.rst = some.nested.module:SomeClass.some_classmethod[reST]\n'
)
tmpdir.join('entry_points').write(expected)
# From file.
config.write('[options]\nentry_points = file: entry_points\n')
with get_dist(tmpdir) as dist:
assert dist.entry_points == expected
def test_case_sensitive_entry_points(self, tmpdir):
_, config = fake_env(
tmpdir,
'[options.entry_points]\n'
'GROUP1 = point1 = pack.module:func, '
'.point2 = pack.module2:func_rest [rest]\n'
'group2 = point3 = pack.module:func2\n',
)
with get_dist(tmpdir) as dist:
assert dist.entry_points == {
'GROUP1': [
'point1 = pack.module:func',
'.point2 = pack.module2:func_rest [rest]',
],
'group2': ['point3 = pack.module:func2'],
}
def test_data_files(self, tmpdir):
fake_env(
tmpdir,
'[options.data_files]\n'
'cfg =\n'
' a/b.conf\n'
' c/d.conf\n'
'data = e/f.dat, g/h.dat\n',
)
with get_dist(tmpdir) as dist:
expected = [
('cfg', ['a/b.conf', 'c/d.conf']),
('data', ['e/f.dat', 'g/h.dat']),
]
assert sorted(dist.data_files) == sorted(expected)
def test_data_files_globby(self, tmpdir):
fake_env(
tmpdir,
'[options.data_files]\n'
'cfg =\n'
' a/b.conf\n'
' c/d.conf\n'
'data = *.dat\n'
'icons = \n'
' *.ico\n'
'audio = \n'
' *.wav\n'
' sounds.db\n',
)
# Create dummy files for glob()'s sake:
tmpdir.join('a.dat').write('')
tmpdir.join('b.dat').write('')
tmpdir.join('c.dat').write('')
tmpdir.join('a.ico').write('')
tmpdir.join('b.ico').write('')
tmpdir.join('c.ico').write('')
tmpdir.join('beep.wav').write('')
tmpdir.join('boop.wav').write('')
tmpdir.join('sounds.db').write('')
with get_dist(tmpdir) as dist:
expected = [
('cfg', ['a/b.conf', 'c/d.conf']),
('data', ['a.dat', 'b.dat', 'c.dat']),
('icons', ['a.ico', 'b.ico', 'c.ico']),
('audio', ['beep.wav', 'boop.wav', 'sounds.db']),
]
assert sorted(dist.data_files) == sorted(expected)
def test_python_requires_simple(self, tmpdir):
fake_env(
tmpdir,
DALS(
"""
[options]
python_requires=>=2.7
"""
),
)
with get_dist(tmpdir) as dist:
dist.parse_config_files()
def test_python_requires_compound(self, tmpdir):
fake_env(
tmpdir,
DALS(
"""
[options]
python_requires=>=2.7,!=3.0.*
"""
),
)
with get_dist(tmpdir) as dist:
dist.parse_config_files()
def test_python_requires_invalid(self, tmpdir):
fake_env(
tmpdir,
DALS(
"""
[options]
python_requires=invalid
"""
),
)
with pytest.raises(Exception):
with get_dist(tmpdir) as dist:
dist.parse_config_files()
def test_cmdclass(self, tmpdir):
module_path = Path(tmpdir, "src/custom_build.py") # auto discovery for src
module_path.parent.mkdir(parents=True, exist_ok=True)
module_path.write_text(
"from distutils.core import Command\nclass CustomCmd(Command): pass\n",
encoding="utf-8",
)
setup_cfg = """
[options]
cmdclass =
customcmd = custom_build.CustomCmd
"""
fake_env(tmpdir, inspect.cleandoc(setup_cfg))
with get_dist(tmpdir) as dist:
cmdclass = dist.cmdclass['customcmd']
assert cmdclass.__name__ == "CustomCmd"
assert cmdclass.__module__ == "custom_build"
assert module_path.samefile(inspect.getfile(cmdclass))
def test_requirements_file(self, tmpdir):
fake_env(
tmpdir,
DALS(
"""
[options]
install_requires = file:requirements.txt
[options.extras_require]
colors = file:requirements-extra.txt
"""
),
)
tmpdir.join('requirements.txt').write('\ndocutils>=0.3\n\n')
tmpdir.join('requirements-extra.txt').write('colorama')
with get_dist(tmpdir) as dist:
assert dist.install_requires == ['docutils>=0.3']
assert dist.extras_require == {'colors': ['colorama']}
saved_dist_init = _Distribution.__init__
class TestExternalSetters:
# During creation of the setuptools Distribution() object, we call
# the init of the parent distutils Distribution object via
# _Distribution.__init__ ().
#
# It's possible distutils calls out to various keyword
# implementations (i.e. distutils.setup_keywords entry points)
# that may set a range of variables.
#
# This wraps distutil's Distribution.__init__ and simulates
# pbr or something else setting these values.
def _fake_distribution_init(self, dist, attrs):
saved_dist_init(dist, attrs)
# see self._DISTUTILS_UNSUPPORTED_METADATA
dist.metadata.long_description_content_type = 'text/something'
# Test overwrite setup() args
dist.metadata.project_urls = {
'Link One': 'https://example.com/one/',
'Link Two': 'https://example.com/two/',
}
@patch.object(_Distribution, '__init__', autospec=True)
def test_external_setters(self, mock_parent_init, tmpdir):
mock_parent_init.side_effect = self._fake_distribution_init
dist = Distribution(attrs={'project_urls': {'will_be': 'ignored'}})
assert dist.metadata.long_description_content_type == 'text/something'
assert dist.metadata.project_urls == {
'Link One': 'https://example.com/one/',
'Link Two': 'https://example.com/two/',
}

View File

@ -0,0 +1,145 @@
import contextlib
import io
import os
import shutil
import site
import sys
import tempfile
from filelock import FileLock
@contextlib.contextmanager
def tempdir(cd=lambda dir: None, **kwargs):
temp_dir = tempfile.mkdtemp(**kwargs)
orig_dir = os.getcwd()
try:
cd(temp_dir)
yield temp_dir
finally:
cd(orig_dir)
shutil.rmtree(temp_dir)
@contextlib.contextmanager
def environment(**replacements):
"""
In a context, patch the environment with replacements. Pass None values
to clear the values.
"""
saved = dict((key, os.environ[key]) for key in replacements if key in os.environ)
# remove values that are null
remove = (key for (key, value) in replacements.items() if value is None)
for key in list(remove):
os.environ.pop(key, None)
replacements.pop(key)
os.environ.update(replacements)
try:
yield saved
finally:
for key in replacements:
os.environ.pop(key, None)
os.environ.update(saved)
@contextlib.contextmanager
def quiet():
"""
Redirect stdout/stderr to StringIO objects to prevent console output from
distutils commands.
"""
old_stdout = sys.stdout
old_stderr = sys.stderr
new_stdout = sys.stdout = io.StringIO()
new_stderr = sys.stderr = io.StringIO()
try:
yield new_stdout, new_stderr
finally:
new_stdout.seek(0)
new_stderr.seek(0)
sys.stdout = old_stdout
sys.stderr = old_stderr
@contextlib.contextmanager
def save_user_site_setting():
saved = site.ENABLE_USER_SITE
try:
yield saved
finally:
site.ENABLE_USER_SITE = saved
@contextlib.contextmanager
def save_pkg_resources_state():
import pkg_resources
pr_state = pkg_resources.__getstate__()
# also save sys.path
sys_path = sys.path[:]
try:
yield pr_state, sys_path
finally:
sys.path[:] = sys_path
pkg_resources.__setstate__(pr_state)
@contextlib.contextmanager
def suppress_exceptions(*excs):
try:
yield
except excs:
pass
def multiproc(request):
"""
Return True if running under xdist and multiple
workers are used.
"""
try:
worker_id = request.getfixturevalue('worker_id')
except Exception:
return False
return worker_id != 'master'
@contextlib.contextmanager
def session_locked_tmp_dir(request, tmp_path_factory, name):
"""Uses a file lock to guarantee only one worker can access a temp dir"""
# get the temp directory shared by all workers
base = tmp_path_factory.getbasetemp()
shared_dir = base.parent if multiproc(request) else base
locked_dir = shared_dir / name
with FileLock(locked_dir.with_suffix(".lock")):
# ^-- prevent multiple workers to access the directory at once
locked_dir.mkdir(exist_ok=True, parents=True)
yield locked_dir
@contextlib.contextmanager
def save_paths():
"""Make sure ``sys.path``, ``sys.meta_path`` and ``sys.path_hooks`` are preserved"""
prev = sys.path[:], sys.meta_path[:], sys.path_hooks[:]
try:
yield
finally:
sys.path, sys.meta_path, sys.path_hooks = prev
@contextlib.contextmanager
def save_sys_modules():
"""Make sure initial ``sys.modules`` is preserved"""
prev_modules = sys.modules
try:
sys.modules = sys.modules.copy()
yield
finally:
sys.modules = prev_modules

View File

@ -0,0 +1,95 @@
import os
import subprocess
import sys
import unicodedata
from subprocess import PIPE as _PIPE, Popen as _Popen
import jaraco.envs
class VirtualEnv(jaraco.envs.VirtualEnv):
name = '.env'
# Some version of PyPy will import distutils on startup, implicitly
# importing setuptools, and thus leading to BackendInvalid errors
# when upgrading Setuptools. Bypass this behavior by avoiding the
# early availability and need to upgrade.
create_opts = ['--no-setuptools']
def run(self, cmd, *args, **kwargs):
cmd = [self.exe(cmd[0])] + cmd[1:]
kwargs = {"cwd": self.root, "encoding": "utf-8", **kwargs} # Allow overriding
# In some environments (eg. downstream distro packaging), where:
# - tox isn't used to run tests and
# - PYTHONPATH is set to point to a specific setuptools codebase and
# - no custom env is explicitly set by a test
# PYTHONPATH will leak into the spawned processes.
# In that case tests look for module in the wrong place (on PYTHONPATH).
# Unless the test sets its own special env, pass a copy of the existing
# environment with removed PYTHONPATH to the subprocesses.
if "env" not in kwargs:
env = dict(os.environ)
if "PYTHONPATH" in env:
del env["PYTHONPATH"]
kwargs["env"] = env
return subprocess.check_output(cmd, *args, **kwargs)
def _which_dirs(cmd):
result = set()
for path in os.environ.get('PATH', '').split(os.pathsep):
filename = os.path.join(path, cmd)
if os.access(filename, os.X_OK):
result.add(path)
return result
def run_setup_py(cmd, pypath=None, path=None, data_stream=0, env=None):
"""
Execution command for tests, separate from those used by the
code directly to prevent accidental behavior issues
"""
if env is None:
env = dict()
for envname in os.environ:
env[envname] = os.environ[envname]
# override the python path if needed
if pypath is not None:
env["PYTHONPATH"] = pypath
# override the execution path if needed
if path is not None:
env["PATH"] = path
if not env.get("PATH", ""):
env["PATH"] = _which_dirs("tar").union(_which_dirs("gzip"))
env["PATH"] = os.pathsep.join(env["PATH"])
cmd = [sys.executable, "setup.py"] + list(cmd)
# https://bugs.python.org/issue8557
shell = sys.platform == 'win32'
try:
proc = _Popen(
cmd,
stdout=_PIPE,
stderr=_PIPE,
shell=shell,
env=env,
encoding="utf-8",
)
if isinstance(data_stream, tuple):
data_stream = slice(*data_stream)
data = proc.communicate()[data_stream]
except OSError:
return 1, ''
# decode the console string if needed
if hasattr(data, "decode"):
# use the default encoding
data = data.decode()
data = unicodedata.normalize('NFC', data)
# communicate calls wait()
return proc.returncode, data

View File

@ -0,0 +1,157 @@
import contextlib
import os
import subprocess
import sys
from pathlib import Path
import path
import pytest
from . import contexts, environment
@pytest.fixture
def user_override(monkeypatch):
"""
Override site.USER_BASE and site.USER_SITE with temporary directories in
a context.
"""
with contexts.tempdir() as user_base:
monkeypatch.setattr('site.USER_BASE', user_base)
with contexts.tempdir() as user_site:
monkeypatch.setattr('site.USER_SITE', user_site)
with contexts.save_user_site_setting():
yield
@pytest.fixture
def tmpdir_cwd(tmpdir):
with tmpdir.as_cwd() as orig:
yield orig
@pytest.fixture(autouse=True, scope="session")
def workaround_xdist_376(request):
"""
Workaround pytest-dev/pytest-xdist#376
``pytest-xdist`` tends to inject '' into ``sys.path``,
which may break certain isolation expectations.
Remove the entry so the import
machinery behaves the same irrespective of xdist.
"""
if not request.config.pluginmanager.has_plugin('xdist'):
return
with contextlib.suppress(ValueError):
sys.path.remove('')
@pytest.fixture
def sample_project(tmp_path):
"""
Clone the 'sampleproject' and return a path to it.
"""
cmd = ['git', 'clone', 'https://github.com/pypa/sampleproject']
try:
subprocess.check_call(cmd, cwd=str(tmp_path))
except Exception:
pytest.skip("Unable to clone sampleproject")
return tmp_path / 'sampleproject'
# sdist and wheel artifacts should be stable across a round of tests
# so we can build them once per session and use the files as "readonly"
# In the case of setuptools, building the wheel without sdist may cause
# it to contain the `build` directory, and therefore create situations with
# `setuptools/build/lib/build/lib/...`. To avoid that, build both artifacts at once.
def _build_distributions(tmp_path_factory, request):
with contexts.session_locked_tmp_dir(
request, tmp_path_factory, "dist_build"
) as tmp: # pragma: no cover
sdist = next(tmp.glob("*.tar.gz"), None)
wheel = next(tmp.glob("*.whl"), None)
if sdist and wheel:
return (sdist, wheel)
# Sanity check: should not create recursive setuptools/build/lib/build/lib/...
assert not Path(request.config.rootdir, "build/lib/build").exists()
subprocess.check_output([
sys.executable,
"-m",
"build",
"--outdir",
str(tmp),
str(request.config.rootdir),
])
# Sanity check: should not create recursive setuptools/build/lib/build/lib/...
assert not Path(request.config.rootdir, "build/lib/build").exists()
return next(tmp.glob("*.tar.gz")), next(tmp.glob("*.whl"))
@pytest.fixture(scope="session")
def setuptools_sdist(tmp_path_factory, request):
prebuilt = os.getenv("PRE_BUILT_SETUPTOOLS_SDIST")
if prebuilt and os.path.exists(prebuilt): # pragma: no cover
return Path(prebuilt).resolve()
sdist, _ = _build_distributions(tmp_path_factory, request)
return sdist
@pytest.fixture(scope="session")
def setuptools_wheel(tmp_path_factory, request):
prebuilt = os.getenv("PRE_BUILT_SETUPTOOLS_WHEEL")
if prebuilt and os.path.exists(prebuilt): # pragma: no cover
return Path(prebuilt).resolve()
_, wheel = _build_distributions(tmp_path_factory, request)
return wheel
@pytest.fixture
def venv(tmp_path, setuptools_wheel):
"""Virtual env with the version of setuptools under test installed"""
env = environment.VirtualEnv()
env.root = path.Path(tmp_path / 'venv')
env.create_opts = ['--no-setuptools', '--wheel=bundle']
# TODO: Use `--no-wheel` when setuptools implements its own bdist_wheel
env.req = str(setuptools_wheel)
# In some environments (eg. downstream distro packaging),
# where tox isn't used to run tests and PYTHONPATH is set to point to
# a specific setuptools codebase, PYTHONPATH will leak into the spawned
# processes.
# env.create() should install the just created setuptools
# wheel, but it doesn't if it finds another existing matching setuptools
# installation present on PYTHONPATH:
# `setuptools is already installed with the same version as the provided
# wheel. Use --force-reinstall to force an installation of the wheel.`
# This prevents leaking PYTHONPATH to the created environment.
with contexts.environment(PYTHONPATH=None):
return env.create()
@pytest.fixture
def venv_without_setuptools(tmp_path):
"""Virtual env without any version of setuptools installed"""
env = environment.VirtualEnv()
env.root = path.Path(tmp_path / 'venv_without_setuptools')
env.create_opts = ['--no-setuptools', '--no-wheel']
env.ensure_env()
return env
@pytest.fixture
def bare_venv(tmp_path):
"""Virtual env without any common packages installed"""
env = environment.VirtualEnv()
env.root = path.Path(tmp_path / 'bare_venv')
env.create_opts = ['--no-setuptools', '--no-pip', '--no-wheel', '--no-seed']
env.ensure_env()
return env

View File

@ -0,0 +1,3 @@
<html><body>
<a href="/foobar-0.1.tar.gz#md5=1__bad_md5___">bad old link</a>
</body></html>

View File

@ -0,0 +1,4 @@
<html><body>
<a href="/foobar-0.1.tar.gz#md5=0_correct_md5">foobar-0.1.tar.gz</a><br/>
<a href="../../external.html" rel="homepage">external homepage</a><br/>
</body></html>

View File

@ -0,0 +1,77 @@
"""Reusable functions and classes for different types of integration tests.
For example ``Archive`` can be used to check the contents of distribution built
with setuptools, and ``run`` will always try to be as verbose as possible to
facilitate debugging.
"""
import os
import subprocess
import tarfile
from pathlib import Path
from zipfile import ZipFile
def run(cmd, env=None):
r = subprocess.run(
cmd,
capture_output=True,
text=True,
encoding="utf-8",
env={**os.environ, **(env or {})},
# ^-- allow overwriting instead of discarding the current env
)
out = r.stdout + "\n" + r.stderr
# pytest omits stdout/err by default, if the test fails they help debugging
print("~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~")
print(f"Command: {cmd}\nreturn code: {r.returncode}\n\n{out}")
if r.returncode == 0:
return out
raise subprocess.CalledProcessError(r.returncode, cmd, r.stdout, r.stderr)
class Archive:
"""Compatibility layer for ZipFile/Info and TarFile/Info"""
def __init__(self, filename):
self._filename = filename
if filename.endswith("tar.gz"):
self._obj = tarfile.open(filename, "r:gz")
elif filename.endswith("zip"):
self._obj = ZipFile(filename)
else:
raise ValueError(f"{filename} doesn't seem to be a zip or tar.gz")
def __iter__(self):
if hasattr(self._obj, "infolist"):
return iter(self._obj.infolist())
return iter(self._obj)
def get_name(self, zip_or_tar_info):
if hasattr(zip_or_tar_info, "filename"):
return zip_or_tar_info.filename
return zip_or_tar_info.name
def get_content(self, zip_or_tar_info):
if hasattr(self._obj, "extractfile"):
content = self._obj.extractfile(zip_or_tar_info)
if content is None:
msg = f"Invalid {zip_or_tar_info.name} in {self._filename}"
raise ValueError(msg)
return str(content.read(), "utf-8")
return str(self._obj.read(zip_or_tar_info), "utf-8")
def get_sdist_members(sdist_path):
with tarfile.open(sdist_path, "r:gz") as tar:
files = [Path(f) for f in tar.getnames()]
# remove root folder
relative_files = ("/".join(f.parts[1:]) for f in files)
return {f for f in relative_files if f}
def get_wheel_members(wheel_path):
with ZipFile(wheel_path) as zipfile:
return set(zipfile.namelist())

View File

@ -0,0 +1,224 @@
# https://github.com/python/mypy/issues/16936
# mypy: disable-error-code="has-type"
"""Integration tests for setuptools that focus on building packages via pip.
The idea behind these tests is not to exhaustively check all the possible
combinations of packages, operating systems, supporting libraries, etc, but
rather check a limited number of popular packages and how they interact with
the exposed public API. This way if any change in API is introduced, we hope to
identify backward compatibility problems before publishing a release.
The number of tested packages is purposefully kept small, to minimise duration
and the associated maintenance cost (changes in the way these packages define
their build process may require changes in the tests).
"""
import json
import os
import shutil
import sys
from enum import Enum
from glob import glob
from hashlib import md5
from urllib.request import urlopen
import pytest
from packaging.requirements import Requirement
from .helpers import Archive, run
pytestmark = pytest.mark.integration
(LATEST,) = Enum("v", "LATEST") # type: ignore[misc] # https://github.com/python/mypy/issues/16936
"""Default version to be checked"""
# There are positive and negative aspects of checking the latest version of the
# packages.
# The main positive aspect is that the latest version might have already
# removed the use of APIs deprecated in previous releases of setuptools.
# Packages to be tested:
# (Please notice the test environment cannot support EVERY library required for
# compiling binary extensions. In Ubuntu/Debian nomenclature, we only assume
# that `build-essential`, `gfortran` and `libopenblas-dev` are installed,
# due to their relevance to the numerical/scientific programming ecosystem)
EXAMPLES = [
("pip", LATEST), # just in case...
("pytest", LATEST), # uses setuptools_scm
("mypy", LATEST), # custom build_py + ext_modules
# --- Popular packages: https://hugovk.github.io/top-pypi-packages/ ---
("botocore", LATEST),
("kiwisolver", LATEST), # build_ext
("brotli", LATEST), # not in the list but used by urllib3
("pyyaml", LATEST), # cython + custom build_ext + custom distclass
("charset-normalizer", LATEST), # uses mypyc, used by aiohttp
("protobuf", LATEST),
("requests", LATEST),
("celery", LATEST),
# When adding packages to this list, make sure they expose a `__version__`
# attribute, or modify the tests below
]
# Some packages have "optional" dependencies that modify their build behaviour
# and are not listed in pyproject.toml, others still use `setup_requires`
EXTRA_BUILD_DEPS = {
"pyyaml": ("Cython<3.0",), # constraint to avoid errors
"charset-normalizer": ("mypy>=1.4.1",), # no pyproject.toml available
}
EXTRA_ENV_VARS = {
"pyyaml": {"PYYAML_FORCE_CYTHON": "1"},
"charset-normalizer": {"CHARSET_NORMALIZER_USE_MYPYC": "1"},
}
IMPORT_NAME = {
"pyyaml": "yaml",
"protobuf": "google.protobuf",
}
VIRTUALENV = (sys.executable, "-m", "virtualenv")
# By default, pip will try to build packages in isolation (PEP 517), which
# means it will download the previous stable version of setuptools.
# `pip` flags can avoid that (the version of setuptools under test
# should be the one to be used)
INSTALL_OPTIONS = (
"--ignore-installed",
"--no-build-isolation",
# Omit "--no-binary :all:" the sdist is supplied directly.
# Allows dependencies as wheels.
)
# The downside of `--no-build-isolation` is that pip will not download build
# dependencies. The test script will have to also handle that.
@pytest.fixture
def venv_python(tmp_path):
run([*VIRTUALENV, str(tmp_path / ".venv")])
possible_path = (str(p.parent) for p in tmp_path.glob(".venv/*/python*"))
return shutil.which("python", path=os.pathsep.join(possible_path))
@pytest.fixture(autouse=True)
def _prepare(tmp_path, venv_python, monkeypatch, request):
download_path = os.getenv("DOWNLOAD_PATH", str(tmp_path))
os.makedirs(download_path, exist_ok=True)
# Environment vars used for building some of the packages
monkeypatch.setenv("USE_MYPYC", "1")
def _debug_info():
# Let's provide the maximum amount of information possible in the case
# it is necessary to debug the tests directly from the CI logs.
print("~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~")
print("Temporary directory:")
map(print, tmp_path.glob("*"))
print("Virtual environment:")
run([venv_python, "-m", "pip", "freeze"])
request.addfinalizer(_debug_info)
@pytest.mark.parametrize('package, version', EXAMPLES)
@pytest.mark.uses_network
def test_install_sdist(package, version, tmp_path, venv_python, setuptools_wheel):
venv_pip = (venv_python, "-m", "pip")
sdist = retrieve_sdist(package, version, tmp_path)
deps = build_deps(package, sdist)
if deps:
print("~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~")
print("Dependencies:", deps)
run([*venv_pip, "install", *deps])
# Use a virtualenv to simulate PEP 517 isolation
# but install fresh setuptools wheel to ensure the version under development
env = EXTRA_ENV_VARS.get(package, {})
run([*venv_pip, "install", "--force-reinstall", setuptools_wheel])
run([*venv_pip, "install", *INSTALL_OPTIONS, sdist], env)
# Execute a simple script to make sure the package was installed correctly
pkg = IMPORT_NAME.get(package, package).replace("-", "_")
script = f"import {pkg}; print(getattr({pkg}, '__version__', 0))"
run([venv_python, "-c", script])
# ---- Helper Functions ----
def retrieve_sdist(package, version, tmp_path):
"""Either use cached sdist file or download it from PyPI"""
# `pip download` cannot be used due to
# https://github.com/pypa/pip/issues/1884
# https://discuss.python.org/t/pep-625-file-name-of-a-source-distribution/4686
# We have to find the correct distribution file and download it
download_path = os.getenv("DOWNLOAD_PATH", str(tmp_path))
dist = retrieve_pypi_sdist_metadata(package, version)
# Remove old files to prevent cache to grow indefinitely
for file in glob(os.path.join(download_path, f"{package}*")):
if dist["filename"] != file:
os.unlink(file)
dist_file = os.path.join(download_path, dist["filename"])
if not os.path.exists(dist_file):
download(dist["url"], dist_file, dist["md5_digest"])
return dist_file
def retrieve_pypi_sdist_metadata(package, version):
# https://warehouse.pypa.io/api-reference/json.html
id_ = package if version is LATEST else f"{package}/{version}"
with urlopen(f"https://pypi.org/pypi/{id_}/json") as f:
metadata = json.load(f)
if metadata["info"]["yanked"]:
raise ValueError(f"Release for {package} {version} was yanked")
version = metadata["info"]["version"]
release = metadata["releases"][version] if version is LATEST else metadata["urls"]
(sdist,) = filter(lambda d: d["packagetype"] == "sdist", release)
return sdist
def download(url, dest, md5_digest):
with urlopen(url) as f:
data = f.read()
assert md5(data).hexdigest() == md5_digest
with open(dest, "wb") as f:
f.write(data)
assert os.path.exists(dest)
def build_deps(package, sdist_file):
"""Find out what are the build dependencies for a package.
"Manually" install them, since pip will not install build
deps with `--no-build-isolation`.
"""
# delay importing, since pytest discovery phase may hit this file from a
# testenv without tomli
from setuptools.compat.py310 import tomllib
archive = Archive(sdist_file)
info = tomllib.loads(_read_pyproject(archive))
deps = info.get("build-system", {}).get("requires", [])
deps += EXTRA_BUILD_DEPS.get(package, [])
# Remove setuptools from requirements (and deduplicate)
requirements = {Requirement(d).name: d for d in deps}
return [v for k, v in requirements.items() if k != "setuptools"]
def _read_pyproject(archive):
contents = (
archive.get_content(member)
for member in archive
if os.path.basename(archive.get_name(member)) == "pyproject.toml"
)
return next(contents, "")

View File

@ -0,0 +1 @@
value = 'three, sir!'

View File

@ -0,0 +1,90 @@
import ast
import json
import textwrap
from pathlib import Path
def iter_namespace_pkgs(namespace):
parts = namespace.split(".")
for i in range(len(parts)):
yield ".".join(parts[: i + 1])
def build_namespace_package(tmpdir, name, version="1.0", impl="pkg_resources"):
src_dir = tmpdir / name
src_dir.mkdir()
setup_py = src_dir / 'setup.py'
namespace, _, rest = name.rpartition('.')
namespaces = list(iter_namespace_pkgs(namespace))
setup_args = {
"name": name,
"version": version,
"packages": namespaces,
}
if impl == "pkg_resources":
tmpl = '__import__("pkg_resources").declare_namespace(__name__)'
setup_args["namespace_packages"] = namespaces
elif impl == "pkgutil":
tmpl = '__path__ = __import__("pkgutil").extend_path(__path__, __name__)'
else:
raise ValueError(f"Cannot recognise {impl=} when creating namespaces")
args = json.dumps(setup_args, indent=4)
assert ast.literal_eval(args) # ensure it is valid Python
script = textwrap.dedent(
"""\
import setuptools
args = {args}
setuptools.setup(**args)
"""
).format(args=args)
setup_py.write_text(script, encoding='utf-8')
ns_pkg_dir = Path(src_dir, namespace.replace(".", "/"))
ns_pkg_dir.mkdir(parents=True)
for ns in namespaces:
pkg_init = src_dir / ns.replace(".", "/") / '__init__.py'
pkg_init.write_text(tmpl, encoding='utf-8')
pkg_mod = ns_pkg_dir / (rest + '.py')
some_functionality = 'name = {rest!r}'.format(**locals())
pkg_mod.write_text(some_functionality, encoding='utf-8')
return src_dir
def build_pep420_namespace_package(tmpdir, name):
src_dir = tmpdir / name
src_dir.mkdir()
pyproject = src_dir / "pyproject.toml"
namespace, _, rest = name.rpartition(".")
script = f"""\
[build-system]
requires = ["setuptools"]
build-backend = "setuptools.build_meta"
[project]
name = "{name}"
version = "3.14159"
"""
pyproject.write_text(textwrap.dedent(script), encoding='utf-8')
ns_pkg_dir = Path(src_dir, namespace.replace(".", "/"))
ns_pkg_dir.mkdir(parents=True)
pkg_mod = ns_pkg_dir / (rest + ".py")
some_functionality = f"name = {rest!r}"
pkg_mod.write_text(some_functionality, encoding='utf-8')
return src_dir
def make_site_dir(target):
"""
Add a sitecustomize.py module in target to cause
target to be added to site dirs such that .pth files
are processed there.
"""
sc = target / 'sitecustomize.py'
target_str = str(target)
tmpl = '__import__("site").addsitedir({target_str!r})'
sc.write_text(tmpl.format(**locals()), encoding='utf-8')

View File

@ -0,0 +1 @@
result = 'passed'

View File

@ -0,0 +1,86 @@
"""Basic http server for tests to simulate PyPI or custom indexes"""
import http.server
import os
import threading
import time
import urllib.parse
import urllib.request
class IndexServer(http.server.HTTPServer):
"""Basic single-threaded http server simulating a package index
You can use this server in unittest like this::
s = IndexServer()
s.start()
index_url = s.base_url() + 'mytestindex'
# do some test requests to the index
# The index files should be located in setuptools/tests/indexes
s.stop()
"""
def __init__(
self,
server_address=('', 0),
RequestHandlerClass=http.server.SimpleHTTPRequestHandler,
):
http.server.HTTPServer.__init__(self, server_address, RequestHandlerClass)
self._run = True
def start(self):
self.thread = threading.Thread(target=self.serve_forever)
self.thread.start()
def stop(self):
"Stop the server"
# Let the server finish the last request and wait for a new one.
time.sleep(0.1)
self.shutdown()
self.thread.join()
self.socket.close()
def base_url(self):
port = self.server_port
return 'http://127.0.0.1:%s/setuptools/tests/indexes/' % port
class RequestRecorder(http.server.BaseHTTPRequestHandler):
def do_GET(self):
requests = vars(self.server).setdefault('requests', [])
requests.append(self)
self.send_response(200, 'OK')
class MockServer(http.server.HTTPServer, threading.Thread):
"""
A simple HTTP Server that records the requests made to it.
"""
def __init__(self, server_address=('', 0), RequestHandlerClass=RequestRecorder):
http.server.HTTPServer.__init__(self, server_address, RequestHandlerClass)
threading.Thread.__init__(self)
self.daemon = True
self.requests = []
def run(self):
self.serve_forever()
@property
def netloc(self):
return 'localhost:%s' % self.server_port
@property
def url(self):
return 'http://%s/' % self.netloc
def path_to_url(path, authority=None):
"""Convert a path to a file: URL."""
path = os.path.normpath(os.path.abspath(path))
base = 'file:'
if authority is not None:
base += '//' + authority
return urllib.parse.urljoin(base, urllib.request.pathname2url(path))

View File

@ -0,0 +1,36 @@
import io
import tarfile
import pytest
from setuptools import archive_util
@pytest.fixture
def tarfile_with_unicode(tmpdir):
"""
Create a tarfile containing only a file whose name is
a zero byte file called testimäge.png.
"""
tarobj = io.BytesIO()
with tarfile.open(fileobj=tarobj, mode="w:gz") as tgz:
data = b""
filename = "testimäge.png"
t = tarfile.TarInfo(filename)
t.size = len(data)
tgz.addfile(t, io.BytesIO(data))
target = tmpdir / 'unicode-pkg-1.0.tar.gz'
with open(str(target), mode='wb') as tf:
tf.write(tarobj.getvalue())
return str(target)
@pytest.mark.xfail(reason="#710 and #712")
def test_unicode_files(tarfile_with_unicode, tmpdir):
target = tmpdir / 'out'
archive_util.unpack_archive(tarfile_with_unicode, str(target))

View File

@ -0,0 +1,28 @@
"""develop tests"""
import sys
from unittest import mock
import pytest
from setuptools import SetuptoolsDeprecationWarning
from setuptools.dist import Distribution
@pytest.mark.skipif(sys.platform == 'win32', reason='non-Windows only')
@pytest.mark.xfail(reason="bdist_rpm is long deprecated, should we remove it? #1988")
@mock.patch('distutils.command.bdist_rpm.bdist_rpm')
def test_bdist_rpm_warning(distutils_cmd, tmpdir_cwd):
dist = Distribution(
dict(
script_name='setup.py',
script_args=['bdist_rpm'],
name='foo',
py_modules=['hi'],
)
)
dist.parse_command_line()
with pytest.warns(SetuptoolsDeprecationWarning):
dist.run_commands()
distutils_cmd.run.assert_called_once()

View File

@ -0,0 +1,69 @@
"""develop tests"""
import os
import re
import zipfile
import pytest
from setuptools.dist import Distribution
from . import contexts
SETUP_PY = """\
from setuptools import setup
setup(py_modules=['hi'])
"""
@pytest.fixture(scope='function')
def setup_context(tmpdir):
with (tmpdir / 'setup.py').open('w') as f:
f.write(SETUP_PY)
with (tmpdir / 'hi.py').open('w') as f:
f.write('1\n')
with tmpdir.as_cwd():
yield tmpdir
class Test:
def test_bdist_egg(self, setup_context, user_override):
dist = Distribution(
dict(
script_name='setup.py',
script_args=['bdist_egg'],
name='foo',
py_modules=['hi'],
)
)
os.makedirs(os.path.join('build', 'src'))
with contexts.quiet():
dist.parse_command_line()
dist.run_commands()
# let's see if we got our egg link at the right place
[content] = os.listdir('dist')
assert re.match(r'foo-0.0.0-py[23].\d+.egg$', content)
@pytest.mark.xfail(
os.environ.get('PYTHONDONTWRITEBYTECODE', False),
reason="Byte code disabled",
)
def test_exclude_source_files(self, setup_context, user_override):
dist = Distribution(
dict(
script_name='setup.py',
script_args=['bdist_egg', '--exclude-source-files'],
py_modules=['hi'],
)
)
with contexts.quiet():
dist.parse_command_line()
dist.run_commands()
[dist_name] = os.listdir('dist')
dist_filename = os.path.join('dist', dist_name)
zip = zipfile.ZipFile(dist_filename)
names = list(zi.filename for zi in zip.filelist)
assert 'hi.pyc' in names
assert 'hi.py' not in names

View File

@ -0,0 +1,652 @@
from __future__ import annotations
import builtins
import importlib
import os.path
import platform
import shutil
import stat
import struct
import sys
import sysconfig
from contextlib import suppress
from inspect import cleandoc
from unittest.mock import Mock
from zipfile import ZipFile
import jaraco.path
import pytest
from packaging import tags
import setuptools
from setuptools.command.bdist_wheel import (
bdist_wheel,
get_abi_tag,
remove_readonly,
remove_readonly_exc,
)
from setuptools.dist import Distribution
from setuptools.warnings import SetuptoolsDeprecationWarning
from distutils.core import run_setup
DEFAULT_FILES = {
"dummy_dist-1.0.dist-info/top_level.txt",
"dummy_dist-1.0.dist-info/METADATA",
"dummy_dist-1.0.dist-info/WHEEL",
"dummy_dist-1.0.dist-info/RECORD",
}
DEFAULT_LICENSE_FILES = {
"LICENSE",
"LICENSE.txt",
"LICENCE",
"LICENCE.txt",
"COPYING",
"COPYING.md",
"NOTICE",
"NOTICE.rst",
"AUTHORS",
"AUTHORS.txt",
}
OTHER_IGNORED_FILES = {
"LICENSE~",
"AUTHORS~",
}
SETUPPY_EXAMPLE = """\
from setuptools import setup
setup(
name='dummy_dist',
version='1.0',
)
"""
EXAMPLES = {
"dummy-dist": {
"setup.py": SETUPPY_EXAMPLE,
"licenses": {"DUMMYFILE": ""},
**dict.fromkeys(DEFAULT_LICENSE_FILES | OTHER_IGNORED_FILES, ""),
},
"simple-dist": {
"setup.py": cleandoc(
"""
from setuptools import setup
setup(
name="simple.dist",
version="0.1",
description="A testing distribution \N{SNOWMAN}",
extras_require={"voting": ["beaglevote"]},
)
"""
),
"simpledist": "",
},
"complex-dist": {
"setup.py": cleandoc(
"""
from setuptools import setup
setup(
name="complex-dist",
version="0.1",
description="Another testing distribution \N{SNOWMAN}",
long_description="Another testing distribution \N{SNOWMAN}",
author="Illustrious Author",
author_email="illustrious@example.org",
url="http://example.org/exemplary",
packages=["complexdist"],
setup_requires=["setuptools"],
install_requires=["quux", "splort"],
extras_require={"simple": ["simple.dist"]},
entry_points={
"console_scripts": [
"complex-dist=complexdist:main",
"complex-dist2=complexdist:main",
],
},
)
"""
),
"complexdist": {"__init__.py": "def main(): return"},
},
"headers-dist": {
"setup.py": cleandoc(
"""
from setuptools import setup
setup(
name="headers.dist",
version="0.1",
description="A distribution with headers",
headers=["header.h"],
)
"""
),
"headersdist.py": "",
"header.h": "",
},
"commasinfilenames-dist": {
"setup.py": cleandoc(
"""
from setuptools import setup
setup(
name="testrepo",
version="0.1",
packages=["mypackage"],
description="A test package with commas in file names",
include_package_data=True,
package_data={"mypackage.data": ["*"]},
)
"""
),
"mypackage": {
"__init__.py": "",
"data": {"__init__.py": "", "1,2,3.txt": ""},
},
"testrepo-0.1.0": {
"mypackage": {"__init__.py": ""},
},
},
"unicode-dist": {
"setup.py": cleandoc(
"""
from setuptools import setup
setup(
name="unicode.dist",
version="0.1",
description="A testing distribution \N{SNOWMAN}",
packages=["unicodedist"],
zip_safe=True,
)
"""
),
"unicodedist": {"__init__.py": "", "åäö_日本語.py": ""},
},
"utf8-metadata-dist": {
"setup.cfg": cleandoc(
"""
[metadata]
name = utf8-metadata-dist
version = 42
author_email = "John X. Ãørçeč" <john@utf8.org>, Γαμα קּ 東 <gama@utf8.org>
long_description = file: README.rst
"""
),
"README.rst": "UTF-8 描述 説明",
},
}
if sys.platform != "win32":
# ABI3 extensions don't really work on Windows
EXAMPLES["abi3extension-dist"] = {
"setup.py": cleandoc(
"""
from setuptools import Extension, setup
setup(
name="extension.dist",
version="0.1",
description="A testing distribution \N{SNOWMAN}",
ext_modules=[
Extension(
name="extension", sources=["extension.c"], py_limited_api=True
)
],
)
"""
),
"setup.cfg": "[bdist_wheel]\npy_limited_api=cp32",
"extension.c": "#define Py_LIMITED_API 0x03020000\n#include <Python.h>",
}
def bdist_wheel_cmd(**kwargs):
"""Run command in the same process so that it is easier to collect coverage"""
dist_obj = (
run_setup("setup.py", stop_after="init")
if os.path.exists("setup.py")
else Distribution({"script_name": "%%build_meta%%"})
)
dist_obj.parse_config_files()
cmd = bdist_wheel(dist_obj)
for attr, value in kwargs.items():
setattr(cmd, attr, value)
cmd.finalize_options()
return cmd
def mkexample(tmp_path_factory, name):
basedir = tmp_path_factory.mktemp(name)
jaraco.path.build(EXAMPLES[name], prefix=str(basedir))
return basedir
@pytest.fixture(scope="session")
def wheel_paths(tmp_path_factory):
build_base = tmp_path_factory.mktemp("build")
dist_dir = tmp_path_factory.mktemp("dist")
for name in EXAMPLES:
example_dir = mkexample(tmp_path_factory, name)
build_dir = build_base / name
with jaraco.path.DirectoryStack().context(example_dir):
bdist_wheel_cmd(bdist_dir=str(build_dir), dist_dir=str(dist_dir)).run()
return sorted(str(fname) for fname in dist_dir.glob("*.whl"))
@pytest.fixture
def dummy_dist(tmp_path_factory):
return mkexample(tmp_path_factory, "dummy-dist")
def test_no_scripts(wheel_paths):
"""Make sure entry point scripts are not generated."""
path = next(path for path in wheel_paths if "complex_dist" in path)
for entry in ZipFile(path).infolist():
assert ".data/scripts/" not in entry.filename
def test_unicode_record(wheel_paths):
path = next(path for path in wheel_paths if "unicode.dist" in path)
with ZipFile(path) as zf:
record = zf.read("unicode.dist-0.1.dist-info/RECORD")
assert "åäö_日本語.py".encode() in record
UTF8_PKG_INFO = """\
Metadata-Version: 2.1
Name: helloworld
Version: 42
Author-email: "John X. Ãørçeč" <john@utf8.org>, Γαμα קּ 東 <gama@utf8.org>
UTF-8 描述 説明
"""
def test_preserve_unicode_metadata(monkeypatch, tmp_path):
monkeypatch.chdir(tmp_path)
egginfo = tmp_path / "dummy_dist.egg-info"
distinfo = tmp_path / "dummy_dist.dist-info"
egginfo.mkdir()
(egginfo / "PKG-INFO").write_text(UTF8_PKG_INFO, encoding="utf-8")
(egginfo / "dependency_links.txt").touch()
class simpler_bdist_wheel(bdist_wheel):
"""Avoid messing with setuptools/distutils internals"""
def __init__(self):
pass
@property
def license_paths(self):
return []
cmd_obj = simpler_bdist_wheel()
cmd_obj.egg2dist(egginfo, distinfo)
metadata = (distinfo / "METADATA").read_text(encoding="utf-8")
assert 'Author-email: "John X. Ãørçeč"' in metadata
assert "Γαμα קּ 東 " in metadata
assert "UTF-8 描述 説明" in metadata
def test_licenses_default(dummy_dist, monkeypatch, tmp_path):
monkeypatch.chdir(dummy_dist)
bdist_wheel_cmd(bdist_dir=str(tmp_path)).run()
with ZipFile("dist/dummy_dist-1.0-py3-none-any.whl") as wf:
license_files = {
"dummy_dist-1.0.dist-info/" + fname for fname in DEFAULT_LICENSE_FILES
}
assert set(wf.namelist()) == DEFAULT_FILES | license_files
def test_licenses_deprecated(dummy_dist, monkeypatch, tmp_path):
dummy_dist.joinpath("setup.cfg").write_text(
"[metadata]\nlicense_file=licenses/DUMMYFILE", encoding="utf-8"
)
monkeypatch.chdir(dummy_dist)
bdist_wheel_cmd(bdist_dir=str(tmp_path)).run()
with ZipFile("dist/dummy_dist-1.0-py3-none-any.whl") as wf:
license_files = {"dummy_dist-1.0.dist-info/DUMMYFILE"}
assert set(wf.namelist()) == DEFAULT_FILES | license_files
@pytest.mark.parametrize(
"config_file, config",
[
("setup.cfg", "[metadata]\nlicense_files=licenses/*\n LICENSE"),
("setup.cfg", "[metadata]\nlicense_files=licenses/*, LICENSE"),
(
"setup.py",
SETUPPY_EXAMPLE.replace(
")", " license_files=['licenses/DUMMYFILE', 'LICENSE'])"
),
),
],
)
def test_licenses_override(dummy_dist, monkeypatch, tmp_path, config_file, config):
dummy_dist.joinpath(config_file).write_text(config, encoding="utf-8")
monkeypatch.chdir(dummy_dist)
bdist_wheel_cmd(bdist_dir=str(tmp_path)).run()
with ZipFile("dist/dummy_dist-1.0-py3-none-any.whl") as wf:
license_files = {
"dummy_dist-1.0.dist-info/" + fname for fname in {"DUMMYFILE", "LICENSE"}
}
assert set(wf.namelist()) == DEFAULT_FILES | license_files
def test_licenses_disabled(dummy_dist, monkeypatch, tmp_path):
dummy_dist.joinpath("setup.cfg").write_text(
"[metadata]\nlicense_files=\n", encoding="utf-8"
)
monkeypatch.chdir(dummy_dist)
bdist_wheel_cmd(bdist_dir=str(tmp_path)).run()
with ZipFile("dist/dummy_dist-1.0-py3-none-any.whl") as wf:
assert set(wf.namelist()) == DEFAULT_FILES
def test_build_number(dummy_dist, monkeypatch, tmp_path):
monkeypatch.chdir(dummy_dist)
bdist_wheel_cmd(bdist_dir=str(tmp_path), build_number="2").run()
with ZipFile("dist/dummy_dist-1.0-2-py3-none-any.whl") as wf:
filenames = set(wf.namelist())
assert "dummy_dist-1.0.dist-info/RECORD" in filenames
assert "dummy_dist-1.0.dist-info/METADATA" in filenames
def test_universal_deprecated(dummy_dist, monkeypatch, tmp_path):
monkeypatch.chdir(dummy_dist)
with pytest.warns(SetuptoolsDeprecationWarning, match=".*universal is deprecated"):
bdist_wheel_cmd(bdist_dir=str(tmp_path), universal=True).run()
# For now we still respect the option
assert os.path.exists("dist/dummy_dist-1.0-py2.py3-none-any.whl")
EXTENSION_EXAMPLE = """\
#include <Python.h>
static PyMethodDef methods[] = {
{ NULL, NULL, 0, NULL }
};
static struct PyModuleDef module_def = {
PyModuleDef_HEAD_INIT,
"extension",
"Dummy extension module",
-1,
methods
};
PyMODINIT_FUNC PyInit_extension(void) {
return PyModule_Create(&module_def);
}
"""
EXTENSION_SETUPPY = """\
from __future__ import annotations
from setuptools import Extension, setup
setup(
name="extension.dist",
version="0.1",
description="A testing distribution \N{SNOWMAN}",
ext_modules=[Extension(name="extension", sources=["extension.c"])],
)
"""
@pytest.mark.filterwarnings(
"once:Config variable '.*' is unset.*, Python ABI tag may be incorrect"
)
def test_limited_abi(monkeypatch, tmp_path, tmp_path_factory):
"""Test that building a binary wheel with the limited ABI works."""
source_dir = tmp_path_factory.mktemp("extension_dist")
(source_dir / "setup.py").write_text(EXTENSION_SETUPPY, encoding="utf-8")
(source_dir / "extension.c").write_text(EXTENSION_EXAMPLE, encoding="utf-8")
build_dir = tmp_path.joinpath("build")
dist_dir = tmp_path.joinpath("dist")
monkeypatch.chdir(source_dir)
bdist_wheel_cmd(bdist_dir=str(build_dir), dist_dir=str(dist_dir)).run()
def test_build_from_readonly_tree(dummy_dist, monkeypatch, tmp_path):
basedir = str(tmp_path.joinpath("dummy"))
shutil.copytree(str(dummy_dist), basedir)
monkeypatch.chdir(basedir)
# Make the tree read-only
for root, _dirs, files in os.walk(basedir):
for fname in files:
os.chmod(os.path.join(root, fname), stat.S_IREAD)
bdist_wheel_cmd().run()
@pytest.mark.parametrize(
"option, compress_type",
list(bdist_wheel.supported_compressions.items()),
ids=list(bdist_wheel.supported_compressions),
)
def test_compression(dummy_dist, monkeypatch, tmp_path, option, compress_type):
monkeypatch.chdir(dummy_dist)
bdist_wheel_cmd(bdist_dir=str(tmp_path), compression=option).run()
with ZipFile("dist/dummy_dist-1.0-py3-none-any.whl") as wf:
filenames = set(wf.namelist())
assert "dummy_dist-1.0.dist-info/RECORD" in filenames
assert "dummy_dist-1.0.dist-info/METADATA" in filenames
for zinfo in wf.filelist:
assert zinfo.compress_type == compress_type
def test_wheelfile_line_endings(wheel_paths):
for path in wheel_paths:
with ZipFile(path) as wf:
wheelfile = next(fn for fn in wf.filelist if fn.filename.endswith("WHEEL"))
wheelfile_contents = wf.read(wheelfile)
assert b"\r" not in wheelfile_contents
def test_unix_epoch_timestamps(dummy_dist, monkeypatch, tmp_path):
monkeypatch.setenv("SOURCE_DATE_EPOCH", "0")
monkeypatch.chdir(dummy_dist)
bdist_wheel_cmd(bdist_dir=str(tmp_path), build_number="2a").run()
with ZipFile("dist/dummy_dist-1.0-2a-py3-none-any.whl") as wf:
for zinfo in wf.filelist:
assert zinfo.date_time >= (1980, 1, 1, 0, 0, 0) # min epoch is used
def test_get_abi_tag_windows(monkeypatch):
monkeypatch.setattr(tags, "interpreter_name", lambda: "cp")
monkeypatch.setattr(sysconfig, "get_config_var", lambda x: "cp313-win_amd64")
assert get_abi_tag() == "cp313"
monkeypatch.setattr(sys, "gettotalrefcount", lambda: 1, False)
assert get_abi_tag() == "cp313d"
monkeypatch.setattr(sysconfig, "get_config_var", lambda x: "cp313t-win_amd64")
assert get_abi_tag() == "cp313td"
monkeypatch.delattr(sys, "gettotalrefcount")
assert get_abi_tag() == "cp313t"
def test_get_abi_tag_pypy_old(monkeypatch):
monkeypatch.setattr(tags, "interpreter_name", lambda: "pp")
monkeypatch.setattr(sysconfig, "get_config_var", lambda x: "pypy36-pp73")
assert get_abi_tag() == "pypy36_pp73"
def test_get_abi_tag_pypy_new(monkeypatch):
monkeypatch.setattr(sysconfig, "get_config_var", lambda x: "pypy37-pp73-darwin")
monkeypatch.setattr(tags, "interpreter_name", lambda: "pp")
assert get_abi_tag() == "pypy37_pp73"
def test_get_abi_tag_graalpy(monkeypatch):
monkeypatch.setattr(
sysconfig, "get_config_var", lambda x: "graalpy231-310-native-x86_64-linux"
)
monkeypatch.setattr(tags, "interpreter_name", lambda: "graalpy")
assert get_abi_tag() == "graalpy231_310_native"
def test_get_abi_tag_fallback(monkeypatch):
monkeypatch.setattr(sysconfig, "get_config_var", lambda x: "unknown-python-310")
monkeypatch.setattr(tags, "interpreter_name", lambda: "unknown-python")
assert get_abi_tag() == "unknown_python_310"
def test_platform_with_space(dummy_dist, monkeypatch):
"""Ensure building on platforms with a space in the name succeed."""
monkeypatch.chdir(dummy_dist)
bdist_wheel_cmd(plat_name="isilon onefs").run()
def test_rmtree_readonly(monkeypatch, tmp_path):
"""Verify onerr works as expected"""
bdist_dir = tmp_path / "with_readonly"
bdist_dir.mkdir()
some_file = bdist_dir.joinpath("file.txt")
some_file.touch()
some_file.chmod(stat.S_IREAD)
expected_count = 1 if sys.platform.startswith("win") else 0
if sys.version_info < (3, 12):
count_remove_readonly = Mock(side_effect=remove_readonly)
shutil.rmtree(bdist_dir, onerror=count_remove_readonly)
assert count_remove_readonly.call_count == expected_count
else:
count_remove_readonly_exc = Mock(side_effect=remove_readonly_exc)
shutil.rmtree(bdist_dir, onexc=count_remove_readonly_exc)
assert count_remove_readonly_exc.call_count == expected_count
assert not bdist_dir.is_dir()
def test_data_dir_with_tag_build(monkeypatch, tmp_path):
"""
Setuptools allow authors to set PEP 440's local version segments
using ``egg_info.tag_build``. This should be reflected not only in the
``.whl`` file name, but also in the ``.dist-info`` and ``.data`` dirs.
See pypa/setuptools#3997.
"""
monkeypatch.chdir(tmp_path)
files = {
"setup.py": """
from setuptools import setup
setup(headers=["hello.h"])
""",
"setup.cfg": """
[metadata]
name = test
version = 1.0
[options.data_files]
hello/world = file.txt
[egg_info]
tag_build = +what
tag_date = 0
""",
"file.txt": "",
"hello.h": "",
}
for file, content in files.items():
with open(file, "w", encoding="utf-8") as fh:
fh.write(cleandoc(content))
bdist_wheel_cmd().run()
# Ensure .whl, .dist-info and .data contain the local segment
wheel_path = "dist/test-1.0+what-py3-none-any.whl"
assert os.path.exists(wheel_path)
entries = set(ZipFile(wheel_path).namelist())
for expected in (
"test-1.0+what.data/headers/hello.h",
"test-1.0+what.data/data/hello/world/file.txt",
"test-1.0+what.dist-info/METADATA",
"test-1.0+what.dist-info/WHEEL",
):
assert expected in entries
for not_expected in (
"test.data/headers/hello.h",
"test-1.0.data/data/hello/world/file.txt",
"test.dist-info/METADATA",
"test-1.0.dist-info/WHEEL",
):
assert not_expected not in entries
@pytest.mark.parametrize(
"reported,expected",
[("linux-x86_64", "linux_i686"), ("linux-aarch64", "linux_armv7l")],
)
@pytest.mark.skipif(
platform.system() != "Linux", reason="Only makes sense to test on Linux"
)
def test_platform_linux32(reported, expected, monkeypatch):
monkeypatch.setattr(struct, "calcsize", lambda x: 4)
dist = setuptools.Distribution()
cmd = bdist_wheel(dist)
cmd.plat_name = reported
cmd.root_is_pure = False
_, _, actual = cmd.get_tag()
assert actual == expected
def test_no_ctypes(monkeypatch) -> None:
def _fake_import(name: str, *args, **kwargs):
if name == "ctypes":
raise ModuleNotFoundError(f"No module named {name}")
return importlib.__import__(name, *args, **kwargs)
with suppress(KeyError):
monkeypatch.delitem(sys.modules, "wheel.macosx_libfile")
# Install an importer shim that refuses to load ctypes
monkeypatch.setattr(builtins, "__import__", _fake_import)
with pytest.raises(ModuleNotFoundError, match="No module named ctypes"):
import wheel.macosx_libfile # noqa: F401
# Unload and reimport the bdist_wheel command module to make sure it won't try to
# import ctypes
monkeypatch.delitem(sys.modules, "setuptools.command.bdist_wheel")
import setuptools.command.bdist_wheel # noqa: F401
def test_dist_info_provided(dummy_dist, monkeypatch, tmp_path):
monkeypatch.chdir(dummy_dist)
distinfo = tmp_path / "dummy_dist.dist-info"
distinfo.mkdir()
(distinfo / "METADATA").write_text("name: helloworld", encoding="utf-8")
# We don't control the metadata. According to PEP-517, "The hook MAY also
# create other files inside this directory, and a build frontend MUST
# preserve".
(distinfo / "FOO").write_text("bar", encoding="utf-8")
bdist_wheel_cmd(bdist_dir=str(tmp_path), dist_info_dir=str(distinfo)).run()
expected = {
"dummy_dist-1.0.dist-info/FOO",
"dummy_dist-1.0.dist-info/RECORD",
}
with ZipFile("dist/dummy_dist-1.0-py3-none-any.whl") as wf:
files_found = set(wf.namelist())
# Check that all expected files are there.
assert expected - files_found == set()
# Make sure there is no accidental egg-info bleeding into the wheel.
assert not [path for path in files_found if 'egg-info' in str(path)]

View File

@ -0,0 +1,33 @@
from setuptools import Command
from setuptools.command.build import build
from setuptools.dist import Distribution
def test_distribution_gives_setuptools_build_obj(tmpdir_cwd):
"""
Check that the setuptools Distribution uses the
setuptools specific build object.
"""
dist = Distribution(
dict(
script_name='setup.py',
script_args=['build'],
packages=[],
package_data={'': ['path/*']},
)
)
assert isinstance(dist.get_command_obj("build"), build)
class Subcommand(Command):
"""Dummy command to be used in tests"""
def initialize_options(self):
pass
def finalize_options(self):
pass
def run(self):
raise NotImplementedError("just to check if the command runs")

View File

@ -0,0 +1,84 @@
import random
from unittest import mock
import pytest
from setuptools.command.build_clib import build_clib
from setuptools.dist import Distribution
from distutils.errors import DistutilsSetupError
class TestBuildCLib:
@mock.patch('setuptools.command.build_clib.newer_pairwise_group')
def test_build_libraries(self, mock_newer):
dist = Distribution()
cmd = build_clib(dist)
# this will be a long section, just making sure all
# exceptions are properly raised
libs = [('example', {'sources': 'broken.c'})]
with pytest.raises(DistutilsSetupError):
cmd.build_libraries(libs)
obj_deps = 'some_string'
libs = [('example', {'sources': ['source.c'], 'obj_deps': obj_deps})]
with pytest.raises(DistutilsSetupError):
cmd.build_libraries(libs)
obj_deps = {'': ''}
libs = [('example', {'sources': ['source.c'], 'obj_deps': obj_deps})]
with pytest.raises(DistutilsSetupError):
cmd.build_libraries(libs)
obj_deps = {'source.c': ''}
libs = [('example', {'sources': ['source.c'], 'obj_deps': obj_deps})]
with pytest.raises(DistutilsSetupError):
cmd.build_libraries(libs)
# with that out of the way, let's see if the crude dependency
# system works
cmd.compiler = mock.MagicMock(spec=cmd.compiler)
mock_newer.return_value = ([], [])
obj_deps = {'': ('global.h',), 'example.c': ('example.h',)}
libs = [('example', {'sources': ['example.c'], 'obj_deps': obj_deps})]
cmd.build_libraries(libs)
assert [['example.c', 'global.h', 'example.h']] in mock_newer.call_args[0]
assert not cmd.compiler.compile.called
assert cmd.compiler.create_static_lib.call_count == 1
# reset the call numbers so we can test again
cmd.compiler.reset_mock()
mock_newer.return_value = '' # anything as long as it's not ([],[])
cmd.build_libraries(libs)
assert cmd.compiler.compile.call_count == 1
assert cmd.compiler.create_static_lib.call_count == 1
@mock.patch('setuptools.command.build_clib.newer_pairwise_group')
def test_build_libraries_reproducible(self, mock_newer):
dist = Distribution()
cmd = build_clib(dist)
# with that out of the way, let's see if the crude dependency
# system works
cmd.compiler = mock.MagicMock(spec=cmd.compiler)
mock_newer.return_value = ([], [])
original_sources = ['a-example.c', 'example.c']
sources = original_sources
obj_deps = {'': ('global.h',), 'example.c': ('example.h',)}
libs = [('example', {'sources': sources, 'obj_deps': obj_deps})]
cmd.build_libraries(libs)
computed_call_args = mock_newer.call_args[0]
while sources == original_sources:
sources = random.sample(original_sources, len(original_sources))
libs = [('example', {'sources': sources, 'obj_deps': obj_deps})]
cmd.build_libraries(libs)
assert computed_call_args == mock_newer.call_args[0]

View File

@ -0,0 +1,291 @@
import os
import sys
from importlib.util import cache_from_source as _compiled_file_name
import pytest
from jaraco import path
from setuptools.command.build_ext import build_ext, get_abi3_suffix
from setuptools.dist import Distribution
from setuptools.errors import CompileError
from setuptools.extension import Extension
from . import environment
from .textwrap import DALS
import distutils.command.build_ext as orig
from distutils.sysconfig import get_config_var
IS_PYPY = '__pypy__' in sys.builtin_module_names
class TestBuildExt:
def test_get_ext_filename(self):
"""
Setuptools needs to give back the same
result as distutils, even if the fullname
is not in ext_map.
"""
dist = Distribution()
cmd = build_ext(dist)
cmd.ext_map['foo/bar'] = ''
res = cmd.get_ext_filename('foo')
wanted = orig.build_ext.get_ext_filename(cmd, 'foo')
assert res == wanted
def test_abi3_filename(self):
"""
Filename needs to be loadable by several versions
of Python 3 if 'is_abi3' is truthy on Extension()
"""
print(get_abi3_suffix())
extension = Extension('spam.eggs', ['eggs.c'], py_limited_api=True)
dist = Distribution(dict(ext_modules=[extension]))
cmd = build_ext(dist)
cmd.finalize_options()
assert 'spam.eggs' in cmd.ext_map
res = cmd.get_ext_filename('spam.eggs')
if not get_abi3_suffix():
assert res.endswith(get_config_var('EXT_SUFFIX'))
elif sys.platform == 'win32':
assert res.endswith('eggs.pyd')
else:
assert 'abi3' in res
def test_ext_suffix_override(self):
"""
SETUPTOOLS_EXT_SUFFIX variable always overrides
default extension options.
"""
dist = Distribution()
cmd = build_ext(dist)
cmd.ext_map['for_abi3'] = ext = Extension(
'for_abi3',
['s.c'],
# Override shouldn't affect abi3 modules
py_limited_api=True,
)
# Mock value needed to pass tests
ext._links_to_dynamic = False
if not IS_PYPY:
expect = cmd.get_ext_filename('for_abi3')
else:
# PyPy builds do not use ABI3 tag, so they will
# also get the overridden suffix.
expect = 'for_abi3.test-suffix'
try:
os.environ['SETUPTOOLS_EXT_SUFFIX'] = '.test-suffix'
res = cmd.get_ext_filename('normal')
assert 'normal.test-suffix' == res
res = cmd.get_ext_filename('for_abi3')
assert expect == res
finally:
del os.environ['SETUPTOOLS_EXT_SUFFIX']
def dist_with_example(self):
files = {
"src": {"mypkg": {"subpkg": {"ext2.c": ""}}},
"c-extensions": {"ext1": {"main.c": ""}},
}
ext1 = Extension("mypkg.ext1", ["c-extensions/ext1/main.c"])
ext2 = Extension("mypkg.subpkg.ext2", ["src/mypkg/subpkg/ext2.c"])
ext3 = Extension("ext3", ["c-extension/ext3.c"])
path.build(files)
return Distribution({
"script_name": "%test%",
"ext_modules": [ext1, ext2, ext3],
"package_dir": {"": "src"},
})
def test_get_outputs(self, tmpdir_cwd, monkeypatch):
monkeypatch.setenv('SETUPTOOLS_EXT_SUFFIX', '.mp3') # make test OS-independent
monkeypatch.setattr('setuptools.command.build_ext.use_stubs', False)
dist = self.dist_with_example()
# Regular build: get_outputs not empty, but get_output_mappings is empty
build_ext = dist.get_command_obj("build_ext")
build_ext.editable_mode = False
build_ext.ensure_finalized()
build_lib = build_ext.build_lib.replace(os.sep, "/")
outputs = [x.replace(os.sep, "/") for x in build_ext.get_outputs()]
assert outputs == [
f"{build_lib}/ext3.mp3",
f"{build_lib}/mypkg/ext1.mp3",
f"{build_lib}/mypkg/subpkg/ext2.mp3",
]
assert build_ext.get_output_mapping() == {}
# Editable build: get_output_mappings should contain everything in get_outputs
dist.reinitialize_command("build_ext")
build_ext.editable_mode = True
build_ext.ensure_finalized()
mapping = {
k.replace(os.sep, "/"): v.replace(os.sep, "/")
for k, v in build_ext.get_output_mapping().items()
}
assert mapping == {
f"{build_lib}/ext3.mp3": "src/ext3.mp3",
f"{build_lib}/mypkg/ext1.mp3": "src/mypkg/ext1.mp3",
f"{build_lib}/mypkg/subpkg/ext2.mp3": "src/mypkg/subpkg/ext2.mp3",
}
def test_get_output_mapping_with_stub(self, tmpdir_cwd, monkeypatch):
monkeypatch.setenv('SETUPTOOLS_EXT_SUFFIX', '.mp3') # make test OS-independent
monkeypatch.setattr('setuptools.command.build_ext.use_stubs', True)
dist = self.dist_with_example()
# Editable build should create compiled stubs (.pyc files only, no .py)
build_ext = dist.get_command_obj("build_ext")
build_ext.editable_mode = True
build_ext.ensure_finalized()
for ext in build_ext.extensions:
monkeypatch.setattr(ext, "_needs_stub", True)
build_lib = build_ext.build_lib.replace(os.sep, "/")
mapping = {
k.replace(os.sep, "/"): v.replace(os.sep, "/")
for k, v in build_ext.get_output_mapping().items()
}
def C(file):
"""Make it possible to do comparisons and tests in a OS-independent way"""
return _compiled_file_name(file).replace(os.sep, "/")
assert mapping == {
C(f"{build_lib}/ext3.py"): C("src/ext3.py"),
f"{build_lib}/ext3.mp3": "src/ext3.mp3",
C(f"{build_lib}/mypkg/ext1.py"): C("src/mypkg/ext1.py"),
f"{build_lib}/mypkg/ext1.mp3": "src/mypkg/ext1.mp3",
C(f"{build_lib}/mypkg/subpkg/ext2.py"): C("src/mypkg/subpkg/ext2.py"),
f"{build_lib}/mypkg/subpkg/ext2.mp3": "src/mypkg/subpkg/ext2.mp3",
}
# Ensure only the compiled stubs are present not the raw .py stub
assert f"{build_lib}/mypkg/ext1.py" not in mapping
assert f"{build_lib}/mypkg/subpkg/ext2.py" not in mapping
# Visualize what the cached stub files look like
example_stub = C(f"{build_lib}/mypkg/ext1.py")
assert example_stub in mapping
assert example_stub.startswith(f"{build_lib}/mypkg/__pycache__/ext1")
assert example_stub.endswith(".pyc")
class TestBuildExtInplace:
def get_build_ext_cmd(self, optional: bool, **opts):
files = {
"eggs.c": "#include missingheader.h\n",
".build": {"lib": {}, "tmp": {}},
}
path.build(files) # jaraco/path#232
extension = Extension('spam.eggs', ['eggs.c'], optional=optional)
dist = Distribution(dict(ext_modules=[extension]))
dist.script_name = 'setup.py'
cmd = build_ext(dist)
vars(cmd).update(build_lib=".build/lib", build_temp=".build/tmp", **opts)
cmd.ensure_finalized()
return cmd
def get_log_messages(self, caplog, capsys):
"""
Historically, distutils "logged" by printing to sys.std*.
Later versions adopted the logging framework. Grab
messages regardless of how they were captured.
"""
std = capsys.readouterr()
return std.out.splitlines() + std.err.splitlines() + caplog.messages
def test_optional(self, tmpdir_cwd, caplog, capsys):
"""
If optional extensions fail to build, setuptools should show the error
in the logs but not fail to build
"""
cmd = self.get_build_ext_cmd(optional=True, inplace=True)
cmd.run()
assert any(
'build_ext: building extension "spam.eggs" failed'
for msg in self.get_log_messages(caplog, capsys)
)
# No compile error exception should be raised
def test_non_optional(self, tmpdir_cwd):
# Non-optional extensions should raise an exception
cmd = self.get_build_ext_cmd(optional=False, inplace=True)
with pytest.raises(CompileError):
cmd.run()
def test_build_ext_config_handling(tmpdir_cwd):
files = {
'setup.py': DALS(
"""
from setuptools import Extension, setup
setup(
name='foo',
version='0.0.0',
ext_modules=[Extension('foo', ['foo.c'])],
)
"""
),
'foo.c': DALS(
"""
#include "Python.h"
#if PY_MAJOR_VERSION >= 3
static struct PyModuleDef moduledef = {
PyModuleDef_HEAD_INIT,
"foo",
NULL,
0,
NULL,
NULL,
NULL,
NULL,
NULL
};
#define INITERROR return NULL
PyMODINIT_FUNC PyInit_foo(void)
#else
#define INITERROR return
void initfoo(void)
#endif
{
#if PY_MAJOR_VERSION >= 3
PyObject *module = PyModule_Create(&moduledef);
#else
PyObject *module = Py_InitModule("extension", NULL);
#endif
if (module == NULL)
INITERROR;
#if PY_MAJOR_VERSION >= 3
return module;
#endif
}
"""
),
'setup.cfg': DALS(
"""
[build]
build_base = foo_build
"""
),
}
path.build(files)
code, output = environment.run_setup_py(
cmd=['build'],
data_stream=(0, 2),
)
assert code == 0, '\nSTDOUT:\n%s\nSTDERR:\n%s' % output

View File

@ -0,0 +1,970 @@
import contextlib
import importlib
import os
import re
import shutil
import signal
import sys
import tarfile
from concurrent import futures
from pathlib import Path
from typing import Any, Callable
from zipfile import ZipFile
import pytest
from jaraco import path
from packaging.requirements import Requirement
from .textwrap import DALS
SETUP_SCRIPT_STUB = "__import__('setuptools').setup()"
TIMEOUT = int(os.getenv("TIMEOUT_BACKEND_TEST", "180")) # in seconds
IS_PYPY = '__pypy__' in sys.builtin_module_names
pytestmark = pytest.mark.skipif(
sys.platform == "win32" and IS_PYPY,
reason="The combination of PyPy + Windows + pytest-xdist + ProcessPoolExecutor "
"is flaky and problematic",
)
class BuildBackendBase:
def __init__(self, cwd='.', env=None, backend_name='setuptools.build_meta'):
self.cwd = cwd
self.env = env or {}
self.backend_name = backend_name
class BuildBackend(BuildBackendBase):
"""PEP 517 Build Backend"""
def __init__(self, *args, **kwargs):
super().__init__(*args, **kwargs)
self.pool = futures.ProcessPoolExecutor(max_workers=1)
def __getattr__(self, name: str) -> Callable[..., Any]:
"""Handles arbitrary function invocations on the build backend."""
def method(*args, **kw):
root = os.path.abspath(self.cwd)
caller = BuildBackendCaller(root, self.env, self.backend_name)
pid = None
try:
pid = self.pool.submit(os.getpid).result(TIMEOUT)
return self.pool.submit(caller, name, *args, **kw).result(TIMEOUT)
except futures.TimeoutError:
self.pool.shutdown(wait=False) # doesn't stop already running processes
self._kill(pid)
pytest.xfail(f"Backend did not respond before timeout ({TIMEOUT} s)")
except (futures.process.BrokenProcessPool, MemoryError, OSError):
if IS_PYPY:
pytest.xfail("PyPy frequently fails tests with ProcessPoolExector")
raise
return method
def _kill(self, pid):
if pid is None:
return
with contextlib.suppress(ProcessLookupError, OSError):
os.kill(pid, signal.SIGTERM if os.name == "nt" else signal.SIGKILL)
class BuildBackendCaller(BuildBackendBase):
def __init__(self, *args, **kwargs):
super().__init__(*args, **kwargs)
(self.backend_name, _, self.backend_obj) = self.backend_name.partition(':')
def __call__(self, name, *args, **kw):
"""Handles arbitrary function invocations on the build backend."""
os.chdir(self.cwd)
os.environ.update(self.env)
mod = importlib.import_module(self.backend_name)
if self.backend_obj:
backend = getattr(mod, self.backend_obj)
else:
backend = mod
return getattr(backend, name)(*args, **kw)
defns = [
{ # simple setup.py script
'setup.py': DALS(
"""
__import__('setuptools').setup(
name='foo',
version='0.0.0',
py_modules=['hello'],
setup_requires=['six'],
)
"""
),
'hello.py': DALS(
"""
def run():
print('hello')
"""
),
},
{ # setup.py that relies on __name__
'setup.py': DALS(
"""
assert __name__ == '__main__'
__import__('setuptools').setup(
name='foo',
version='0.0.0',
py_modules=['hello'],
setup_requires=['six'],
)
"""
),
'hello.py': DALS(
"""
def run():
print('hello')
"""
),
},
{ # setup.py script that runs arbitrary code
'setup.py': DALS(
"""
variable = True
def function():
return variable
assert variable
__import__('setuptools').setup(
name='foo',
version='0.0.0',
py_modules=['hello'],
setup_requires=['six'],
)
"""
),
'hello.py': DALS(
"""
def run():
print('hello')
"""
),
},
{ # setup.py script that constructs temp files to be included in the distribution
'setup.py': DALS(
"""
# Some packages construct files on the fly, include them in the package,
# and immediately remove them after `setup()` (e.g. pybind11==2.9.1).
# Therefore, we cannot use `distutils.core.run_setup(..., stop_after=...)`
# to obtain a distribution object first, and then run the distutils
# commands later, because these files will be removed in the meantime.
with open('world.py', 'w', encoding="utf-8") as f:
f.write('x = 42')
try:
__import__('setuptools').setup(
name='foo',
version='0.0.0',
py_modules=['world'],
setup_requires=['six'],
)
finally:
# Some packages will clean temporary files
__import__('os').unlink('world.py')
"""
),
},
{ # setup.cfg only
'setup.cfg': DALS(
"""
[metadata]
name = foo
version = 0.0.0
[options]
py_modules=hello
setup_requires=six
"""
),
'hello.py': DALS(
"""
def run():
print('hello')
"""
),
},
{ # setup.cfg and setup.py
'setup.cfg': DALS(
"""
[metadata]
name = foo
version = 0.0.0
[options]
py_modules=hello
setup_requires=six
"""
),
'setup.py': "__import__('setuptools').setup()",
'hello.py': DALS(
"""
def run():
print('hello')
"""
),
},
]
class TestBuildMetaBackend:
backend_name = 'setuptools.build_meta'
def get_build_backend(self):
return BuildBackend(backend_name=self.backend_name)
@pytest.fixture(params=defns)
def build_backend(self, tmpdir, request):
path.build(request.param, prefix=str(tmpdir))
with tmpdir.as_cwd():
yield self.get_build_backend()
def test_get_requires_for_build_wheel(self, build_backend):
actual = build_backend.get_requires_for_build_wheel()
expected = ['six']
assert sorted(actual) == sorted(expected)
def test_get_requires_for_build_sdist(self, build_backend):
actual = build_backend.get_requires_for_build_sdist()
expected = ['six']
assert sorted(actual) == sorted(expected)
def test_build_wheel(self, build_backend):
dist_dir = os.path.abspath('pip-wheel')
os.makedirs(dist_dir)
wheel_name = build_backend.build_wheel(dist_dir)
wheel_file = os.path.join(dist_dir, wheel_name)
assert os.path.isfile(wheel_file)
# Temporary files should be removed
assert not os.path.isfile('world.py')
with ZipFile(wheel_file) as zipfile:
wheel_contents = set(zipfile.namelist())
# Each one of the examples have a single module
# that should be included in the distribution
python_scripts = (f for f in wheel_contents if f.endswith('.py'))
modules = [f for f in python_scripts if not f.endswith('setup.py')]
assert len(modules) == 1
@pytest.mark.parametrize('build_type', ('wheel', 'sdist'))
def test_build_with_existing_file_present(self, build_type, tmpdir_cwd):
# Building a sdist/wheel should still succeed if there's
# already a sdist/wheel in the destination directory.
files = {
'setup.py': "from setuptools import setup\nsetup()",
'VERSION': "0.0.1",
'setup.cfg': DALS(
"""
[metadata]
name = foo
version = file: VERSION
"""
),
'pyproject.toml': DALS(
"""
[build-system]
requires = ["setuptools", "wheel"]
build-backend = "setuptools.build_meta"
"""
),
}
path.build(files)
dist_dir = os.path.abspath('preexisting-' + build_type)
build_backend = self.get_build_backend()
build_method = getattr(build_backend, 'build_' + build_type)
# Build a first sdist/wheel.
# Note: this also check the destination directory is
# successfully created if it does not exist already.
first_result = build_method(dist_dir)
# Change version.
with open("VERSION", "wt", encoding="utf-8") as version_file:
version_file.write("0.0.2")
# Build a *second* sdist/wheel.
second_result = build_method(dist_dir)
assert os.path.isfile(os.path.join(dist_dir, first_result))
assert first_result != second_result
# And if rebuilding the exact same sdist/wheel?
open(os.path.join(dist_dir, second_result), 'wb').close()
third_result = build_method(dist_dir)
assert third_result == second_result
assert os.path.getsize(os.path.join(dist_dir, third_result)) > 0
@pytest.mark.parametrize("setup_script", [None, SETUP_SCRIPT_STUB])
def test_build_with_pyproject_config(self, tmpdir, setup_script):
files = {
'pyproject.toml': DALS(
"""
[build-system]
requires = ["setuptools", "wheel"]
build-backend = "setuptools.build_meta"
[project]
name = "foo"
license = {text = "MIT"}
description = "This is a Python package"
dynamic = ["version", "readme"]
classifiers = [
"Development Status :: 5 - Production/Stable",
"Intended Audience :: Developers"
]
urls = {Homepage = "http://github.com"}
dependencies = [
"appdirs",
]
[project.optional-dependencies]
all = [
"tomli>=1",
"pyscaffold>=4,<5",
'importlib; python_version == "2.6"',
]
[project.scripts]
foo = "foo.cli:main"
[tool.setuptools]
zip-safe = false
package-dir = {"" = "src"}
packages = {find = {where = ["src"]}}
license-files = ["LICENSE*"]
[tool.setuptools.dynamic]
version = {attr = "foo.__version__"}
readme = {file = "README.rst"}
[tool.distutils.sdist]
formats = "gztar"
"""
),
"MANIFEST.in": DALS(
"""
global-include *.py *.txt
global-exclude *.py[cod]
"""
),
"README.rst": "This is a ``README``",
"LICENSE.txt": "---- placeholder MIT license ----",
"src": {
"foo": {
"__init__.py": "__version__ = '0.1'",
"__init__.pyi": "__version__: str",
"cli.py": "def main(): print('hello world')",
"data.txt": "def main(): print('hello world')",
"py.typed": "",
}
},
}
if setup_script:
files["setup.py"] = setup_script
build_backend = self.get_build_backend()
with tmpdir.as_cwd():
path.build(files)
sdist_path = build_backend.build_sdist("temp")
wheel_file = build_backend.build_wheel("temp")
with tarfile.open(os.path.join(tmpdir, "temp", sdist_path)) as tar:
sdist_contents = set(tar.getnames())
with ZipFile(os.path.join(tmpdir, "temp", wheel_file)) as zipfile:
wheel_contents = set(zipfile.namelist())
metadata = str(zipfile.read("foo-0.1.dist-info/METADATA"), "utf-8")
license = str(zipfile.read("foo-0.1.dist-info/LICENSE.txt"), "utf-8")
epoints = str(zipfile.read("foo-0.1.dist-info/entry_points.txt"), "utf-8")
assert sdist_contents - {"foo-0.1/setup.py"} == {
'foo-0.1',
'foo-0.1/LICENSE.txt',
'foo-0.1/MANIFEST.in',
'foo-0.1/PKG-INFO',
'foo-0.1/README.rst',
'foo-0.1/pyproject.toml',
'foo-0.1/setup.cfg',
'foo-0.1/src',
'foo-0.1/src/foo',
'foo-0.1/src/foo/__init__.py',
'foo-0.1/src/foo/__init__.pyi',
'foo-0.1/src/foo/cli.py',
'foo-0.1/src/foo/data.txt',
'foo-0.1/src/foo/py.typed',
'foo-0.1/src/foo.egg-info',
'foo-0.1/src/foo.egg-info/PKG-INFO',
'foo-0.1/src/foo.egg-info/SOURCES.txt',
'foo-0.1/src/foo.egg-info/dependency_links.txt',
'foo-0.1/src/foo.egg-info/entry_points.txt',
'foo-0.1/src/foo.egg-info/requires.txt',
'foo-0.1/src/foo.egg-info/top_level.txt',
'foo-0.1/src/foo.egg-info/not-zip-safe',
}
assert wheel_contents == {
"foo/__init__.py",
"foo/__init__.pyi", # include type information by default
"foo/cli.py",
"foo/data.txt", # include_package_data defaults to True
"foo/py.typed", # include type information by default
"foo-0.1.dist-info/LICENSE.txt",
"foo-0.1.dist-info/METADATA",
"foo-0.1.dist-info/WHEEL",
"foo-0.1.dist-info/entry_points.txt",
"foo-0.1.dist-info/top_level.txt",
"foo-0.1.dist-info/RECORD",
}
assert license == "---- placeholder MIT license ----"
for line in (
"Summary: This is a Python package",
"License: MIT",
"Classifier: Intended Audience :: Developers",
"Requires-Dist: appdirs",
"Requires-Dist: " + str(Requirement('tomli>=1 ; extra == "all"')),
"Requires-Dist: "
+ str(Requirement('importlib; python_version=="2.6" and extra =="all"')),
):
assert line in metadata, (line, metadata)
assert metadata.strip().endswith("This is a ``README``")
assert epoints.strip() == "[console_scripts]\nfoo = foo.cli:main"
def test_static_metadata_in_pyproject_config(self, tmpdir):
# Make sure static metadata in pyproject.toml is not overwritten by setup.py
# as required by PEP 621
files = {
'pyproject.toml': DALS(
"""
[build-system]
requires = ["setuptools", "wheel"]
build-backend = "setuptools.build_meta"
[project]
name = "foo"
description = "This is a Python package"
version = "42"
dependencies = ["six"]
"""
),
'hello.py': DALS(
"""
def run():
print('hello')
"""
),
'setup.py': DALS(
"""
__import__('setuptools').setup(
name='bar',
version='13',
)
"""
),
}
build_backend = self.get_build_backend()
with tmpdir.as_cwd():
path.build(files)
sdist_path = build_backend.build_sdist("temp")
wheel_file = build_backend.build_wheel("temp")
assert (tmpdir / "temp/foo-42.tar.gz").exists()
assert (tmpdir / "temp/foo-42-py3-none-any.whl").exists()
assert not (tmpdir / "temp/bar-13.tar.gz").exists()
assert not (tmpdir / "temp/bar-42.tar.gz").exists()
assert not (tmpdir / "temp/foo-13.tar.gz").exists()
assert not (tmpdir / "temp/bar-13-py3-none-any.whl").exists()
assert not (tmpdir / "temp/bar-42-py3-none-any.whl").exists()
assert not (tmpdir / "temp/foo-13-py3-none-any.whl").exists()
with tarfile.open(os.path.join(tmpdir, "temp", sdist_path)) as tar:
pkg_info = str(tar.extractfile('foo-42/PKG-INFO').read(), "utf-8")
members = tar.getnames()
assert "bar-13/PKG-INFO" not in members
with ZipFile(os.path.join(tmpdir, "temp", wheel_file)) as zipfile:
metadata = str(zipfile.read("foo-42.dist-info/METADATA"), "utf-8")
members = zipfile.namelist()
assert "bar-13.dist-info/METADATA" not in members
for file in pkg_info, metadata:
for line in ("Name: foo", "Version: 42"):
assert line in file
for line in ("Name: bar", "Version: 13"):
assert line not in file
def test_build_sdist(self, build_backend):
dist_dir = os.path.abspath('pip-sdist')
os.makedirs(dist_dir)
sdist_name = build_backend.build_sdist(dist_dir)
assert os.path.isfile(os.path.join(dist_dir, sdist_name))
def test_prepare_metadata_for_build_wheel(self, build_backend):
dist_dir = os.path.abspath('pip-dist-info')
os.makedirs(dist_dir)
dist_info = build_backend.prepare_metadata_for_build_wheel(dist_dir)
assert os.path.isfile(os.path.join(dist_dir, dist_info, 'METADATA'))
def test_prepare_metadata_inplace(self, build_backend):
"""
Some users might pass metadata_directory pre-populated with `.tox` or `.venv`.
See issue #3523.
"""
for pre_existing in [
".tox/python/lib/python3.10/site-packages/attrs-22.1.0.dist-info",
".tox/python/lib/python3.10/site-packages/autocommand-2.2.1.dist-info",
".nox/python/lib/python3.10/site-packages/build-0.8.0.dist-info",
".venv/python3.10/site-packages/click-8.1.3.dist-info",
"venv/python3.10/site-packages/distlib-0.3.5.dist-info",
"env/python3.10/site-packages/docutils-0.19.dist-info",
]:
os.makedirs(pre_existing, exist_ok=True)
dist_info = build_backend.prepare_metadata_for_build_wheel(".")
assert os.path.isfile(os.path.join(dist_info, 'METADATA'))
def test_build_sdist_explicit_dist(self, build_backend):
# explicitly specifying the dist folder should work
# the folder sdist_directory and the ``--dist-dir`` can be the same
dist_dir = os.path.abspath('dist')
sdist_name = build_backend.build_sdist(dist_dir)
assert os.path.isfile(os.path.join(dist_dir, sdist_name))
def test_build_sdist_version_change(self, build_backend):
sdist_into_directory = os.path.abspath("out_sdist")
os.makedirs(sdist_into_directory)
sdist_name = build_backend.build_sdist(sdist_into_directory)
assert os.path.isfile(os.path.join(sdist_into_directory, sdist_name))
# if the setup.py changes subsequent call of the build meta
# should still succeed, given the
# sdist_directory the frontend specifies is empty
setup_loc = os.path.abspath("setup.py")
if not os.path.exists(setup_loc):
setup_loc = os.path.abspath("setup.cfg")
with open(setup_loc, 'rt', encoding="utf-8") as file_handler:
content = file_handler.read()
with open(setup_loc, 'wt', encoding="utf-8") as file_handler:
file_handler.write(content.replace("version='0.0.0'", "version='0.0.1'"))
shutil.rmtree(sdist_into_directory)
os.makedirs(sdist_into_directory)
sdist_name = build_backend.build_sdist("out_sdist")
assert os.path.isfile(os.path.join(os.path.abspath("out_sdist"), sdist_name))
def test_build_sdist_pyproject_toml_exists(self, tmpdir_cwd):
files = {
'setup.py': DALS(
"""
__import__('setuptools').setup(
name='foo',
version='0.0.0',
py_modules=['hello']
)"""
),
'hello.py': '',
'pyproject.toml': DALS(
"""
[build-system]
requires = ["setuptools", "wheel"]
build-backend = "setuptools.build_meta"
"""
),
}
path.build(files)
build_backend = self.get_build_backend()
targz_path = build_backend.build_sdist("temp")
with tarfile.open(os.path.join("temp", targz_path)) as tar:
assert any('pyproject.toml' in name for name in tar.getnames())
def test_build_sdist_setup_py_exists(self, tmpdir_cwd):
# If build_sdist is called from a script other than setup.py,
# ensure setup.py is included
path.build(defns[0])
build_backend = self.get_build_backend()
targz_path = build_backend.build_sdist("temp")
with tarfile.open(os.path.join("temp", targz_path)) as tar:
assert any('setup.py' in name for name in tar.getnames())
def test_build_sdist_setup_py_manifest_excluded(self, tmpdir_cwd):
# Ensure that MANIFEST.in can exclude setup.py
files = {
'setup.py': DALS(
"""
__import__('setuptools').setup(
name='foo',
version='0.0.0',
py_modules=['hello']
)"""
),
'hello.py': '',
'MANIFEST.in': DALS(
"""
exclude setup.py
"""
),
}
path.build(files)
build_backend = self.get_build_backend()
targz_path = build_backend.build_sdist("temp")
with tarfile.open(os.path.join("temp", targz_path)) as tar:
assert not any('setup.py' in name for name in tar.getnames())
def test_build_sdist_builds_targz_even_if_zip_indicated(self, tmpdir_cwd):
files = {
'setup.py': DALS(
"""
__import__('setuptools').setup(
name='foo',
version='0.0.0',
py_modules=['hello']
)"""
),
'hello.py': '',
'setup.cfg': DALS(
"""
[sdist]
formats=zip
"""
),
}
path.build(files)
build_backend = self.get_build_backend()
build_backend.build_sdist("temp")
_relative_path_import_files = {
'setup.py': DALS(
"""
__import__('setuptools').setup(
name='foo',
version=__import__('hello').__version__,
py_modules=['hello']
)"""
),
'hello.py': '__version__ = "0.0.0"',
'setup.cfg': DALS(
"""
[sdist]
formats=zip
"""
),
}
def test_build_sdist_relative_path_import(self, tmpdir_cwd):
path.build(self._relative_path_import_files)
build_backend = self.get_build_backend()
with pytest.raises(ImportError, match="^No module named 'hello'$"):
build_backend.build_sdist("temp")
_simple_pyproject_example = {
"pyproject.toml": DALS(
"""
[project]
name = "proj"
version = "42"
"""
),
"src": {"proj": {"__init__.py": ""}},
}
def _assert_link_tree(self, parent_dir):
"""All files in the directory should be either links or hard links"""
files = list(Path(parent_dir).glob("**/*"))
assert files # Should not be empty
for file in files:
assert file.is_symlink() or os.stat(file).st_nlink > 0
def test_editable_without_config_settings(self, tmpdir_cwd):
"""
Sanity check to ensure tests with --mode=strict are different from the ones
without --mode.
--mode=strict should create a local directory with a package tree.
The directory should not get created otherwise.
"""
path.build(self._simple_pyproject_example)
build_backend = self.get_build_backend()
assert not Path("build").exists()
build_backend.build_editable("temp")
assert not Path("build").exists()
def test_build_wheel_inplace(self, tmpdir_cwd):
config_settings = {"--build-option": ["build_ext", "--inplace"]}
path.build(self._simple_pyproject_example)
build_backend = self.get_build_backend()
assert not Path("build").exists()
Path("build").mkdir()
build_backend.prepare_metadata_for_build_wheel("build", config_settings)
build_backend.build_wheel("build", config_settings)
assert Path("build/proj-42-py3-none-any.whl").exists()
@pytest.mark.parametrize("config_settings", [{"editable-mode": "strict"}])
def test_editable_with_config_settings(self, tmpdir_cwd, config_settings):
path.build({**self._simple_pyproject_example, '_meta': {}})
assert not Path("build").exists()
build_backend = self.get_build_backend()
build_backend.prepare_metadata_for_build_editable("_meta", config_settings)
build_backend.build_editable("temp", config_settings, "_meta")
self._assert_link_tree(next(Path("build").glob("__editable__.*")))
@pytest.mark.parametrize(
'setup_literal, requirements',
[
("'foo'", ['foo']),
("['foo']", ['foo']),
(r"'foo\n'", ['foo']),
(r"'foo\n\n'", ['foo']),
("['foo', 'bar']", ['foo', 'bar']),
(r"'# Has a comment line\nfoo'", ['foo']),
(r"'foo # Has an inline comment'", ['foo']),
(r"'foo \\\n >=3.0'", ['foo>=3.0']),
(r"'foo\nbar'", ['foo', 'bar']),
(r"'foo\nbar\n'", ['foo', 'bar']),
(r"['foo\n', 'bar\n']", ['foo', 'bar']),
],
)
@pytest.mark.parametrize('use_wheel', [True, False])
def test_setup_requires(self, setup_literal, requirements, use_wheel, tmpdir_cwd):
files = {
'setup.py': DALS(
"""
from setuptools import setup
setup(
name="qux",
version="0.0.0",
py_modules=["hello"],
setup_requires={setup_literal},
)
"""
).format(setup_literal=setup_literal),
'hello.py': DALS(
"""
def run():
print('hello')
"""
),
}
path.build(files)
build_backend = self.get_build_backend()
if use_wheel:
get_requires = build_backend.get_requires_for_build_wheel
else:
get_requires = build_backend.get_requires_for_build_sdist
# Ensure that the build requirements are properly parsed
expected = sorted(requirements)
actual = get_requires()
assert expected == sorted(actual)
def test_setup_requires_with_auto_discovery(self, tmpdir_cwd):
# Make sure patches introduced to retrieve setup_requires don't accidentally
# activate auto-discovery and cause problems due to the incomplete set of
# attributes passed to MinimalDistribution
files = {
'pyproject.toml': DALS(
"""
[project]
name = "proj"
version = "42"
"""
),
"setup.py": DALS(
"""
__import__('setuptools').setup(
setup_requires=["foo"],
py_modules = ["hello", "world"]
)
"""
),
'hello.py': "'hello'",
'world.py': "'world'",
}
path.build(files)
build_backend = self.get_build_backend()
setup_requires = build_backend.get_requires_for_build_wheel()
assert setup_requires == ["foo"]
def test_dont_install_setup_requires(self, tmpdir_cwd):
files = {
'setup.py': DALS(
"""
from setuptools import setup
setup(
name="qux",
version="0.0.0",
py_modules=["hello"],
setup_requires=["does-not-exist >99"],
)
"""
),
'hello.py': DALS(
"""
def run():
print('hello')
"""
),
}
path.build(files)
build_backend = self.get_build_backend()
dist_dir = os.path.abspath('pip-dist-info')
os.makedirs(dist_dir)
# does-not-exist can't be satisfied, so if it attempts to install
# setup_requires, it will fail.
build_backend.prepare_metadata_for_build_wheel(dist_dir)
_sys_argv_0_passthrough = {
'setup.py': DALS(
"""
import os
import sys
__import__('setuptools').setup(
name='foo',
version='0.0.0',
)
sys_argv = os.path.abspath(sys.argv[0])
file_path = os.path.abspath('setup.py')
assert sys_argv == file_path
"""
)
}
def test_sys_argv_passthrough(self, tmpdir_cwd):
path.build(self._sys_argv_0_passthrough)
build_backend = self.get_build_backend()
with pytest.raises(AssertionError):
build_backend.build_sdist("temp")
_setup_py_file_abspath = {
'setup.py': DALS(
"""
import os
assert os.path.isabs(__file__)
__import__('setuptools').setup(
name='foo',
version='0.0.0',
py_modules=['hello'],
setup_requires=['six'],
)
"""
)
}
def test_setup_py_file_abspath(self, tmpdir_cwd):
path.build(self._setup_py_file_abspath)
build_backend = self.get_build_backend()
build_backend.build_sdist("temp")
@pytest.mark.parametrize('build_hook', ('build_sdist', 'build_wheel'))
def test_build_with_empty_setuppy(self, build_backend, build_hook):
files = {'setup.py': ''}
path.build(files)
msg = re.escape('No distribution was found.')
with pytest.raises(ValueError, match=msg):
getattr(build_backend, build_hook)("temp")
class TestBuildMetaLegacyBackend(TestBuildMetaBackend):
backend_name = 'setuptools.build_meta:__legacy__'
# build_meta_legacy-specific tests
def test_build_sdist_relative_path_import(self, tmpdir_cwd):
# This must fail in build_meta, but must pass in build_meta_legacy
path.build(self._relative_path_import_files)
build_backend = self.get_build_backend()
build_backend.build_sdist("temp")
def test_sys_argv_passthrough(self, tmpdir_cwd):
path.build(self._sys_argv_0_passthrough)
build_backend = self.get_build_backend()
build_backend.build_sdist("temp")
def test_legacy_editable_install(venv, tmpdir, tmpdir_cwd):
pyproject = """
[build-system]
requires = ["setuptools"]
build-backend = "setuptools.build_meta"
[project]
name = "myproj"
version = "42"
"""
path.build({"pyproject.toml": DALS(pyproject), "mymod.py": ""})
# First: sanity check
cmd = ["pip", "install", "--no-build-isolation", "-e", "."]
output = venv.run(cmd, cwd=tmpdir).lower()
assert "running setup.py develop for myproj" not in output
assert "created wheel for myproj" in output
# Then: real test
env = {**os.environ, "SETUPTOOLS_ENABLE_FEATURES": "legacy-editable"}
cmd = ["pip", "install", "--no-build-isolation", "-e", "."]
output = venv.run(cmd, cwd=tmpdir, env=env).lower()
assert "running setup.py develop for myproj" in output
@pytest.mark.filterwarnings("ignore::setuptools.SetuptoolsDeprecationWarning")
def test_sys_exit_0_in_setuppy(monkeypatch, tmp_path):
"""Setuptools should be resilient to setup.py with ``sys.exit(0)`` (#3973)."""
monkeypatch.chdir(tmp_path)
setuppy = """
import sys, setuptools
setuptools.setup(name='foo', version='0.0.0')
sys.exit(0)
"""
(tmp_path / "setup.py").write_text(DALS(setuppy), encoding="utf-8")
backend = BuildBackend(backend_name="setuptools.build_meta")
assert backend.get_requires_for_build_wheel() == []
def test_system_exit_in_setuppy(monkeypatch, tmp_path):
monkeypatch.chdir(tmp_path)
setuppy = "import sys; sys.exit('some error')"
(tmp_path / "setup.py").write_text(setuppy, encoding="utf-8")
with pytest.raises(SystemExit, match="some error"):
backend = BuildBackend(backend_name="setuptools.build_meta")
backend.get_requires_for_build_wheel()

View File

@ -0,0 +1,480 @@
import os
import shutil
import stat
import warnings
from pathlib import Path
from unittest.mock import Mock
import jaraco.path
import pytest
from setuptools import SetuptoolsDeprecationWarning
from setuptools.dist import Distribution
from .textwrap import DALS
def test_directories_in_package_data_glob(tmpdir_cwd):
"""
Directories matching the glob in package_data should
not be included in the package data.
Regression test for #261.
"""
dist = Distribution(
dict(
script_name='setup.py',
script_args=['build_py'],
packages=[''],
package_data={'': ['path/*']},
)
)
os.makedirs('path/subpath')
dist.parse_command_line()
dist.run_commands()
def test_recursive_in_package_data_glob(tmpdir_cwd):
"""
Files matching recursive globs (**) in package_data should
be included in the package data.
#1806
"""
dist = Distribution(
dict(
script_name='setup.py',
script_args=['build_py'],
packages=[''],
package_data={'': ['path/**/data']},
)
)
os.makedirs('path/subpath/subsubpath')
open('path/subpath/subsubpath/data', 'wb').close()
dist.parse_command_line()
dist.run_commands()
assert stat.S_ISREG(os.stat('build/lib/path/subpath/subsubpath/data').st_mode), (
"File is not included"
)
def test_read_only(tmpdir_cwd):
"""
Ensure read-only flag is not preserved in copy
for package modules and package data, as that
causes problems with deleting read-only files on
Windows.
#1451
"""
dist = Distribution(
dict(
script_name='setup.py',
script_args=['build_py'],
packages=['pkg'],
package_data={'pkg': ['data.dat']},
)
)
os.makedirs('pkg')
open('pkg/__init__.py', 'wb').close()
open('pkg/data.dat', 'wb').close()
os.chmod('pkg/__init__.py', stat.S_IREAD)
os.chmod('pkg/data.dat', stat.S_IREAD)
dist.parse_command_line()
dist.run_commands()
shutil.rmtree('build')
@pytest.mark.xfail(
'platform.system() == "Windows"',
reason="On Windows, files do not have executable bits",
raises=AssertionError,
strict=True,
)
def test_executable_data(tmpdir_cwd):
"""
Ensure executable bit is preserved in copy for
package data, as users rely on it for scripts.
#2041
"""
dist = Distribution(
dict(
script_name='setup.py',
script_args=['build_py'],
packages=['pkg'],
package_data={'pkg': ['run-me']},
)
)
os.makedirs('pkg')
open('pkg/__init__.py', 'wb').close()
open('pkg/run-me', 'wb').close()
os.chmod('pkg/run-me', 0o700)
dist.parse_command_line()
dist.run_commands()
assert os.stat('build/lib/pkg/run-me').st_mode & stat.S_IEXEC, (
"Script is not executable"
)
EXAMPLE_WITH_MANIFEST = {
"setup.cfg": DALS(
"""
[metadata]
name = mypkg
version = 42
[options]
include_package_data = True
packages = find:
[options.packages.find]
exclude = *.tests*
"""
),
"mypkg": {
"__init__.py": "",
"resource_file.txt": "",
"tests": {
"__init__.py": "",
"test_mypkg.py": "",
"test_file.txt": "",
},
},
"MANIFEST.in": DALS(
"""
global-include *.py *.txt
global-exclude *.py[cod]
prune dist
prune build
prune *.egg-info
"""
),
}
def test_excluded_subpackages(tmpdir_cwd):
jaraco.path.build(EXAMPLE_WITH_MANIFEST)
dist = Distribution({"script_name": "%PEP 517%"})
dist.parse_config_files()
build_py = dist.get_command_obj("build_py")
msg = r"Python recognizes 'mypkg\.tests' as an importable package"
with pytest.warns(SetuptoolsDeprecationWarning, match=msg):
# TODO: To fix #3260 we need some transition period to deprecate the
# existing behavior of `include_package_data`. After the transition, we
# should remove the warning and fix the behaviour.
if os.getenv("SETUPTOOLS_USE_DISTUTILS") == "stdlib":
# pytest.warns reset the warning filter temporarily
# https://github.com/pytest-dev/pytest/issues/4011#issuecomment-423494810
warnings.filterwarnings(
"ignore",
"'encoding' argument not specified",
module="distutils.text_file",
# This warning is already fixed in pypa/distutils but not in stdlib
)
build_py.finalize_options()
build_py.run()
build_dir = Path(dist.get_command_obj("build_py").build_lib)
assert (build_dir / "mypkg/__init__.py").exists()
assert (build_dir / "mypkg/resource_file.txt").exists()
# Setuptools is configured to ignore `mypkg.tests`, therefore the following
# files/dirs should not be included in the distribution.
for f in [
"mypkg/tests/__init__.py",
"mypkg/tests/test_mypkg.py",
"mypkg/tests/test_file.txt",
"mypkg/tests",
]:
with pytest.raises(AssertionError):
# TODO: Enforce the following assertion once #3260 is fixed
# (remove context manager and the following xfail).
assert not (build_dir / f).exists()
pytest.xfail("#3260")
@pytest.mark.filterwarnings("ignore::setuptools.SetuptoolsDeprecationWarning")
def test_existing_egg_info(tmpdir_cwd, monkeypatch):
"""When provided with the ``existing_egg_info_dir`` attribute, build_py should not
attempt to run egg_info again.
"""
# == Pre-condition ==
# Generate an egg-info dir
jaraco.path.build(EXAMPLE_WITH_MANIFEST)
dist = Distribution({"script_name": "%PEP 517%"})
dist.parse_config_files()
assert dist.include_package_data
egg_info = dist.get_command_obj("egg_info")
dist.run_command("egg_info")
egg_info_dir = next(Path(egg_info.egg_base).glob("*.egg-info"))
assert egg_info_dir.is_dir()
# == Setup ==
build_py = dist.get_command_obj("build_py")
build_py.finalize_options()
egg_info = dist.get_command_obj("egg_info")
egg_info_run = Mock(side_effect=egg_info.run)
monkeypatch.setattr(egg_info, "run", egg_info_run)
# == Remove caches ==
# egg_info is called when build_py looks for data_files, which gets cached.
# We need to ensure it is not cached yet, otherwise it may impact on the tests
build_py.__dict__.pop('data_files', None)
dist.reinitialize_command(egg_info)
# == Sanity check ==
# Ensure that if existing_egg_info is not given, build_py attempts to run egg_info
build_py.existing_egg_info_dir = None
build_py.run()
egg_info_run.assert_called()
# == Remove caches ==
egg_info_run.reset_mock()
build_py.__dict__.pop('data_files', None)
dist.reinitialize_command(egg_info)
# == Actual test ==
# Ensure that if existing_egg_info_dir is given, egg_info doesn't run
build_py.existing_egg_info_dir = egg_info_dir
build_py.run()
egg_info_run.assert_not_called()
assert build_py.data_files
# Make sure the list of outputs is actually OK
outputs = map(lambda x: x.replace(os.sep, "/"), build_py.get_outputs())
assert outputs
example = str(Path(build_py.build_lib, "mypkg/__init__.py")).replace(os.sep, "/")
assert example in outputs
EXAMPLE_ARBITRARY_MAPPING = {
"pyproject.toml": DALS(
"""
[project]
name = "mypkg"
version = "42"
[tool.setuptools]
packages = ["mypkg", "mypkg.sub1", "mypkg.sub2", "mypkg.sub2.nested"]
[tool.setuptools.package-dir]
"" = "src"
"mypkg.sub2" = "src/mypkg/_sub2"
"mypkg.sub2.nested" = "other"
"""
),
"src": {
"mypkg": {
"__init__.py": "",
"resource_file.txt": "",
"sub1": {
"__init__.py": "",
"mod1.py": "",
},
"_sub2": {
"mod2.py": "",
},
},
},
"other": {
"__init__.py": "",
"mod3.py": "",
},
"MANIFEST.in": DALS(
"""
global-include *.py *.txt
global-exclude *.py[cod]
"""
),
}
def test_get_outputs(tmpdir_cwd):
jaraco.path.build(EXAMPLE_ARBITRARY_MAPPING)
dist = Distribution({"script_name": "%test%"})
dist.parse_config_files()
build_py = dist.get_command_obj("build_py")
build_py.editable_mode = True
build_py.ensure_finalized()
build_lib = build_py.build_lib.replace(os.sep, "/")
outputs = {x.replace(os.sep, "/") for x in build_py.get_outputs()}
assert outputs == {
f"{build_lib}/mypkg/__init__.py",
f"{build_lib}/mypkg/resource_file.txt",
f"{build_lib}/mypkg/sub1/__init__.py",
f"{build_lib}/mypkg/sub1/mod1.py",
f"{build_lib}/mypkg/sub2/mod2.py",
f"{build_lib}/mypkg/sub2/nested/__init__.py",
f"{build_lib}/mypkg/sub2/nested/mod3.py",
}
mapping = {
k.replace(os.sep, "/"): v.replace(os.sep, "/")
for k, v in build_py.get_output_mapping().items()
}
assert mapping == {
f"{build_lib}/mypkg/__init__.py": "src/mypkg/__init__.py",
f"{build_lib}/mypkg/resource_file.txt": "src/mypkg/resource_file.txt",
f"{build_lib}/mypkg/sub1/__init__.py": "src/mypkg/sub1/__init__.py",
f"{build_lib}/mypkg/sub1/mod1.py": "src/mypkg/sub1/mod1.py",
f"{build_lib}/mypkg/sub2/mod2.py": "src/mypkg/_sub2/mod2.py",
f"{build_lib}/mypkg/sub2/nested/__init__.py": "other/__init__.py",
f"{build_lib}/mypkg/sub2/nested/mod3.py": "other/mod3.py",
}
class TestTypeInfoFiles:
PYPROJECTS = {
"default_pyproject": DALS(
"""
[project]
name = "foo"
version = "1"
"""
),
"dont_include_package_data": DALS(
"""
[project]
name = "foo"
version = "1"
[tool.setuptools]
include-package-data = false
"""
),
"exclude_type_info": DALS(
"""
[project]
name = "foo"
version = "1"
[tool.setuptools]
include-package-data = false
[tool.setuptools.exclude-package-data]
"*" = ["py.typed", "*.pyi"]
"""
),
}
EXAMPLES = {
"simple_namespace": {
"directory_structure": {
"foo": {
"bar.pyi": "",
"py.typed": "",
"__init__.py": "",
}
},
"expected_type_files": {"foo/bar.pyi", "foo/py.typed"},
},
"nested_inside_namespace": {
"directory_structure": {
"foo": {
"bar": {
"py.typed": "",
"mod.pyi": "",
}
}
},
"expected_type_files": {"foo/bar/mod.pyi", "foo/bar/py.typed"},
},
"namespace_nested_inside_regular": {
"directory_structure": {
"foo": {
"namespace": {
"foo.pyi": "",
},
"__init__.pyi": "",
"py.typed": "",
}
},
"expected_type_files": {
"foo/namespace/foo.pyi",
"foo/__init__.pyi",
"foo/py.typed",
},
},
}
@pytest.mark.parametrize(
"pyproject",
[
"default_pyproject",
pytest.param(
"dont_include_package_data",
marks=pytest.mark.xfail(reason="pypa/setuptools#4350"),
),
],
)
@pytest.mark.parametrize("example", EXAMPLES.keys())
def test_type_files_included_by_default(self, tmpdir_cwd, pyproject, example):
structure = {
**self.EXAMPLES[example]["directory_structure"],
"pyproject.toml": self.PYPROJECTS[pyproject],
}
expected_type_files = self.EXAMPLES[example]["expected_type_files"]
jaraco.path.build(structure)
build_py = get_finalized_build_py()
outputs = get_outputs(build_py)
assert expected_type_files <= outputs
@pytest.mark.parametrize("pyproject", ["exclude_type_info"])
@pytest.mark.parametrize("example", EXAMPLES.keys())
def test_type_files_can_be_excluded(self, tmpdir_cwd, pyproject, example):
structure = {
**self.EXAMPLES[example]["directory_structure"],
"pyproject.toml": self.PYPROJECTS[pyproject],
}
expected_type_files = self.EXAMPLES[example]["expected_type_files"]
jaraco.path.build(structure)
build_py = get_finalized_build_py()
outputs = get_outputs(build_py)
assert expected_type_files.isdisjoint(outputs)
def test_stub_only_package(self, tmpdir_cwd):
structure = {
"pyproject.toml": DALS(
"""
[project]
name = "foo-stubs"
version = "1"
"""
),
"foo-stubs": {"__init__.pyi": "", "bar.pyi": ""},
}
expected_type_files = {"foo-stubs/__init__.pyi", "foo-stubs/bar.pyi"}
jaraco.path.build(structure)
build_py = get_finalized_build_py()
outputs = get_outputs(build_py)
assert expected_type_files <= outputs
def get_finalized_build_py(script_name="%build_py-test%"):
dist = Distribution({"script_name": script_name})
dist.parse_config_files()
build_py = dist.get_command_obj("build_py")
build_py.finalize_options()
return build_py
def get_outputs(build_py):
build_dir = Path(build_py.build_lib)
return {
os.path.relpath(x, build_dir).replace(os.sep, "/")
for x in build_py.get_outputs()
}

View File

@ -0,0 +1,647 @@
import os
import sys
from configparser import ConfigParser
from itertools import product
from typing import cast
import jaraco.path
import pytest
from path import Path
import setuptools # noqa: F401 # force distutils.core to be patched
from setuptools.command.sdist import sdist
from setuptools.discovery import find_package_path, find_parent_package
from setuptools.dist import Distribution
from setuptools.errors import PackageDiscoveryError
from .contexts import quiet
from .integration.helpers import get_sdist_members, get_wheel_members, run
from .textwrap import DALS
import distutils.core
class TestFindParentPackage:
def test_single_package(self, tmp_path):
# find_parent_package should find a non-namespace parent package
(tmp_path / "src/namespace/pkg/nested").mkdir(exist_ok=True, parents=True)
(tmp_path / "src/namespace/pkg/nested/__init__.py").touch()
(tmp_path / "src/namespace/pkg/__init__.py").touch()
packages = ["namespace", "namespace.pkg", "namespace.pkg.nested"]
assert find_parent_package(packages, {"": "src"}, tmp_path) == "namespace.pkg"
def test_multiple_toplevel(self, tmp_path):
# find_parent_package should return null if the given list of packages does not
# have a single parent package
multiple = ["pkg", "pkg1", "pkg2"]
for name in multiple:
(tmp_path / f"src/{name}").mkdir(exist_ok=True, parents=True)
(tmp_path / f"src/{name}/__init__.py").touch()
assert find_parent_package(multiple, {"": "src"}, tmp_path) is None
class TestDiscoverPackagesAndPyModules:
"""Make sure discovered values for ``packages`` and ``py_modules`` work
similarly to explicit configuration for the simple scenarios.
"""
OPTIONS = {
# Different options according to the circumstance being tested
"explicit-src": {"package_dir": {"": "src"}, "packages": ["pkg"]},
"variation-lib": {
"package_dir": {"": "lib"}, # variation of the source-layout
},
"explicit-flat": {"packages": ["pkg"]},
"explicit-single_module": {"py_modules": ["pkg"]},
"explicit-namespace": {"packages": ["ns", "ns.pkg"]},
"automatic-src": {},
"automatic-flat": {},
"automatic-single_module": {},
"automatic-namespace": {},
}
FILES = {
"src": ["src/pkg/__init__.py", "src/pkg/main.py"],
"lib": ["lib/pkg/__init__.py", "lib/pkg/main.py"],
"flat": ["pkg/__init__.py", "pkg/main.py"],
"single_module": ["pkg.py"],
"namespace": ["ns/pkg/__init__.py"],
}
def _get_info(self, circumstance):
_, _, layout = circumstance.partition("-")
files = self.FILES[layout]
options = self.OPTIONS[circumstance]
return files, options
@pytest.mark.parametrize("circumstance", OPTIONS.keys())
def test_sdist_filelist(self, tmp_path, circumstance):
files, options = self._get_info(circumstance)
_populate_project_dir(tmp_path, files, options)
_, cmd = _run_sdist_programatically(tmp_path, options)
manifest = [f.replace(os.sep, "/") for f in cmd.filelist.files]
for file in files:
assert any(f.endswith(file) for f in manifest)
@pytest.mark.parametrize("circumstance", OPTIONS.keys())
def test_project(self, tmp_path, circumstance):
files, options = self._get_info(circumstance)
_populate_project_dir(tmp_path, files, options)
# Simulate a pre-existing `build` directory
(tmp_path / "build").mkdir()
(tmp_path / "build/lib").mkdir()
(tmp_path / "build/bdist.linux-x86_64").mkdir()
(tmp_path / "build/bdist.linux-x86_64/file.py").touch()
(tmp_path / "build/lib/__init__.py").touch()
(tmp_path / "build/lib/file.py").touch()
(tmp_path / "dist").mkdir()
(tmp_path / "dist/file.py").touch()
_run_build(tmp_path)
sdist_files = get_sdist_members(next(tmp_path.glob("dist/*.tar.gz")))
print("~~~~~ sdist_members ~~~~~")
print('\n'.join(sdist_files))
assert sdist_files >= set(files)
wheel_files = get_wheel_members(next(tmp_path.glob("dist/*.whl")))
print("~~~~~ wheel_members ~~~~~")
print('\n'.join(wheel_files))
orig_files = {f.replace("src/", "").replace("lib/", "") for f in files}
assert wheel_files >= orig_files
# Make sure build files are not included by mistake
for file in wheel_files:
assert "build" not in files
assert "dist" not in files
PURPOSEFULLY_EMPY = {
"setup.cfg": DALS(
"""
[metadata]
name = myproj
version = 0.0.0
[options]
{param} =
"""
),
"setup.py": DALS(
"""
__import__('setuptools').setup(
name="myproj",
version="0.0.0",
{param}=[]
)
"""
),
"pyproject.toml": DALS(
"""
[build-system]
requires = []
build-backend = 'setuptools.build_meta'
[project]
name = "myproj"
version = "0.0.0"
[tool.setuptools]
{param} = []
"""
),
"template-pyproject.toml": DALS(
"""
[build-system]
requires = []
build-backend = 'setuptools.build_meta'
"""
),
}
@pytest.mark.parametrize(
"config_file, param, circumstance",
product(
["setup.cfg", "setup.py", "pyproject.toml"],
["packages", "py_modules"],
FILES.keys(),
),
)
def test_purposefully_empty(self, tmp_path, config_file, param, circumstance):
files = self.FILES[circumstance] + ["mod.py", "other.py", "src/pkg/__init__.py"]
_populate_project_dir(tmp_path, files, {})
if config_file == "pyproject.toml":
template_param = param.replace("_", "-")
else:
# Make sure build works with or without setup.cfg
pyproject = self.PURPOSEFULLY_EMPY["template-pyproject.toml"]
(tmp_path / "pyproject.toml").write_text(pyproject, encoding="utf-8")
template_param = param
config = self.PURPOSEFULLY_EMPY[config_file].format(param=template_param)
(tmp_path / config_file).write_text(config, encoding="utf-8")
dist = _get_dist(tmp_path, {})
# When either parameter package or py_modules is an empty list,
# then there should be no discovery
assert getattr(dist, param) == []
other = {"py_modules": "packages", "packages": "py_modules"}[param]
assert getattr(dist, other) is None
@pytest.mark.parametrize(
"extra_files, pkgs",
[
(["venv/bin/simulate_venv"], {"pkg"}),
(["pkg-stubs/__init__.pyi"], {"pkg", "pkg-stubs"}),
(["other-stubs/__init__.pyi"], {"pkg", "other-stubs"}),
(
# Type stubs can also be namespaced
["namespace-stubs/pkg/__init__.pyi"],
{"pkg", "namespace-stubs", "namespace-stubs.pkg"},
),
(
# Just the top-level package can have `-stubs`, ignore nested ones
["namespace-stubs/pkg-stubs/__init__.pyi"],
{"pkg", "namespace-stubs"},
),
(["_hidden/file.py"], {"pkg"}),
(["news/finalize.py"], {"pkg"}),
],
)
def test_flat_layout_with_extra_files(self, tmp_path, extra_files, pkgs):
files = self.FILES["flat"] + extra_files
_populate_project_dir(tmp_path, files, {})
dist = _get_dist(tmp_path, {})
assert set(dist.packages) == pkgs
@pytest.mark.parametrize(
"extra_files",
[
["other/__init__.py"],
["other/finalize.py"],
],
)
def test_flat_layout_with_dangerous_extra_files(self, tmp_path, extra_files):
files = self.FILES["flat"] + extra_files
_populate_project_dir(tmp_path, files, {})
with pytest.raises(PackageDiscoveryError, match="multiple (packages|modules)"):
_get_dist(tmp_path, {})
def test_flat_layout_with_single_module(self, tmp_path):
files = self.FILES["single_module"] + ["invalid-module-name.py"]
_populate_project_dir(tmp_path, files, {})
dist = _get_dist(tmp_path, {})
assert set(dist.py_modules) == {"pkg"}
def test_flat_layout_with_multiple_modules(self, tmp_path):
files = self.FILES["single_module"] + ["valid_module_name.py"]
_populate_project_dir(tmp_path, files, {})
with pytest.raises(PackageDiscoveryError, match="multiple (packages|modules)"):
_get_dist(tmp_path, {})
def test_py_modules_when_wheel_dir_is_cwd(self, tmp_path):
"""Regression for issue 3692"""
from setuptools import build_meta
pyproject = '[project]\nname = "test"\nversion = "1"'
(tmp_path / "pyproject.toml").write_text(DALS(pyproject), encoding="utf-8")
(tmp_path / "foo.py").touch()
with jaraco.path.DirectoryStack().context(tmp_path):
build_meta.build_wheel(".")
# Ensure py_modules are found
wheel_files = get_wheel_members(next(tmp_path.glob("*.whl")))
assert "foo.py" in wheel_files
class TestNoConfig:
DEFAULT_VERSION = "0.0.0" # Default version given by setuptools
EXAMPLES = {
"pkg1": ["src/pkg1.py"],
"pkg2": ["src/pkg2/__init__.py"],
"pkg3": ["src/pkg3/__init__.py", "src/pkg3-stubs/__init__.py"],
"pkg4": ["pkg4/__init__.py", "pkg4-stubs/__init__.py"],
"ns.nested.pkg1": ["src/ns/nested/pkg1/__init__.py"],
"ns.nested.pkg2": ["ns/nested/pkg2/__init__.py"],
}
@pytest.mark.parametrize("example", EXAMPLES.keys())
def test_discover_name(self, tmp_path, example):
_populate_project_dir(tmp_path, self.EXAMPLES[example], {})
dist = _get_dist(tmp_path, {})
assert dist.get_name() == example
def test_build_with_discovered_name(self, tmp_path):
files = ["src/ns/nested/pkg/__init__.py"]
_populate_project_dir(tmp_path, files, {})
_run_build(tmp_path, "--sdist")
# Expected distribution file
dist_file = tmp_path / f"dist/ns_nested_pkg-{self.DEFAULT_VERSION}.tar.gz"
assert dist_file.is_file()
class TestWithAttrDirective:
@pytest.mark.parametrize(
"folder, opts",
[
("src", {}),
("lib", {"packages": "find:", "packages.find": {"where": "lib"}}),
],
)
def test_setupcfg_metadata(self, tmp_path, folder, opts):
files = [f"{folder}/pkg/__init__.py", "setup.cfg"]
_populate_project_dir(tmp_path, files, opts)
config = (tmp_path / "setup.cfg").read_text(encoding="utf-8")
overwrite = {
folder: {"pkg": {"__init__.py": "version = 42"}},
"setup.cfg": "[metadata]\nversion = attr: pkg.version\n" + config,
}
jaraco.path.build(overwrite, prefix=tmp_path)
dist = _get_dist(tmp_path, {})
assert dist.get_name() == "pkg"
assert dist.get_version() == "42"
assert dist.package_dir
package_path = find_package_path("pkg", dist.package_dir, tmp_path)
assert os.path.exists(package_path)
assert folder in Path(package_path).parts()
_run_build(tmp_path, "--sdist")
dist_file = tmp_path / "dist/pkg-42.tar.gz"
assert dist_file.is_file()
def test_pyproject_metadata(self, tmp_path):
_populate_project_dir(tmp_path, ["src/pkg/__init__.py"], {})
overwrite = {
"src": {"pkg": {"__init__.py": "version = 42"}},
"pyproject.toml": (
"[project]\nname = 'pkg'\ndynamic = ['version']\n"
"[tool.setuptools.dynamic]\nversion = {attr = 'pkg.version'}\n"
),
}
jaraco.path.build(overwrite, prefix=tmp_path)
dist = _get_dist(tmp_path, {})
assert dist.get_version() == "42"
assert dist.package_dir == {"": "src"}
class TestWithCExtension:
def _simulate_package_with_extension(self, tmp_path):
# This example is based on: https://github.com/nucleic/kiwi/tree/1.4.0
files = [
"benchmarks/file.py",
"docs/Makefile",
"docs/requirements.txt",
"docs/source/conf.py",
"proj/header.h",
"proj/file.py",
"py/proj.cpp",
"py/other.cpp",
"py/file.py",
"py/py.typed",
"py/tests/test_proj.py",
"README.rst",
]
_populate_project_dir(tmp_path, files, {})
setup_script = """
from setuptools import Extension, setup
ext_modules = [
Extension(
"proj",
["py/proj.cpp", "py/other.cpp"],
include_dirs=["."],
language="c++",
),
]
setup(ext_modules=ext_modules)
"""
(tmp_path / "setup.py").write_text(DALS(setup_script), encoding="utf-8")
def test_skip_discovery_with_setupcfg_metadata(self, tmp_path):
"""Ensure that auto-discovery is not triggered when the project is based on
C-extensions only, for backward compatibility.
"""
self._simulate_package_with_extension(tmp_path)
pyproject = """
[build-system]
requires = []
build-backend = 'setuptools.build_meta'
"""
(tmp_path / "pyproject.toml").write_text(DALS(pyproject), encoding="utf-8")
setupcfg = """
[metadata]
name = proj
version = 42
"""
(tmp_path / "setup.cfg").write_text(DALS(setupcfg), encoding="utf-8")
dist = _get_dist(tmp_path, {})
assert dist.get_name() == "proj"
assert dist.get_version() == "42"
assert dist.py_modules is None
assert dist.packages is None
assert len(dist.ext_modules) == 1
assert dist.ext_modules[0].name == "proj"
def test_dont_skip_discovery_with_pyproject_metadata(self, tmp_path):
"""When opting-in to pyproject.toml metadata, auto-discovery will be active if
the package lists C-extensions, but does not configure py-modules or packages.
This way we ensure users with complex package layouts that would lead to the
discovery of multiple top-level modules/packages see errors and are forced to
explicitly set ``packages`` or ``py-modules``.
"""
self._simulate_package_with_extension(tmp_path)
pyproject = """
[project]
name = 'proj'
version = '42'
"""
(tmp_path / "pyproject.toml").write_text(DALS(pyproject), encoding="utf-8")
with pytest.raises(PackageDiscoveryError, match="multiple (packages|modules)"):
_get_dist(tmp_path, {})
class TestWithPackageData:
def _simulate_package_with_data_files(self, tmp_path, src_root):
files = [
f"{src_root}/proj/__init__.py",
f"{src_root}/proj/file1.txt",
f"{src_root}/proj/nested/file2.txt",
]
_populate_project_dir(tmp_path, files, {})
manifest = """
global-include *.py *.txt
"""
(tmp_path / "MANIFEST.in").write_text(DALS(manifest), encoding="utf-8")
EXAMPLE_SETUPCFG = """
[metadata]
name = proj
version = 42
[options]
include_package_data = True
"""
EXAMPLE_PYPROJECT = """
[project]
name = "proj"
version = "42"
"""
PYPROJECT_PACKAGE_DIR = """
[tool.setuptools]
package-dir = {"" = "src"}
"""
@pytest.mark.parametrize(
"src_root, files",
[
(".", {"setup.cfg": DALS(EXAMPLE_SETUPCFG)}),
(".", {"pyproject.toml": DALS(EXAMPLE_PYPROJECT)}),
("src", {"setup.cfg": DALS(EXAMPLE_SETUPCFG)}),
("src", {"pyproject.toml": DALS(EXAMPLE_PYPROJECT)}),
(
"src",
{
"setup.cfg": DALS(EXAMPLE_SETUPCFG)
+ DALS(
"""
packages = find:
package_dir =
=src
[options.packages.find]
where = src
"""
)
},
),
(
"src",
{
"pyproject.toml": DALS(EXAMPLE_PYPROJECT)
+ DALS(
"""
[tool.setuptools]
package-dir = {"" = "src"}
"""
)
},
),
],
)
def test_include_package_data(self, tmp_path, src_root, files):
"""
Make sure auto-discovery does not affect package include_package_data.
See issue #3196.
"""
jaraco.path.build(files, prefix=str(tmp_path))
self._simulate_package_with_data_files(tmp_path, src_root)
expected = {
os.path.normpath(f"{src_root}/proj/file1.txt").replace(os.sep, "/"),
os.path.normpath(f"{src_root}/proj/nested/file2.txt").replace(os.sep, "/"),
}
_run_build(tmp_path)
sdist_files = get_sdist_members(next(tmp_path.glob("dist/*.tar.gz")))
print("~~~~~ sdist_members ~~~~~")
print('\n'.join(sdist_files))
assert sdist_files >= expected
wheel_files = get_wheel_members(next(tmp_path.glob("dist/*.whl")))
print("~~~~~ wheel_members ~~~~~")
print('\n'.join(wheel_files))
orig_files = {f.replace("src/", "").replace("lib/", "") for f in expected}
assert wheel_files >= orig_files
def test_compatible_with_numpy_configuration(tmp_path):
files = [
"dir1/__init__.py",
"dir2/__init__.py",
"file.py",
]
_populate_project_dir(tmp_path, files, {})
dist = Distribution({})
dist.configuration = object()
dist.set_defaults()
assert dist.py_modules is None
assert dist.packages is None
def test_name_discovery_doesnt_break_cli(tmpdir_cwd):
jaraco.path.build({"pkg.py": ""})
dist = Distribution({})
dist.script_args = ["--name"]
dist.set_defaults()
dist.parse_command_line() # <-- no exception should be raised here.
assert dist.get_name() == "pkg"
def test_preserve_explicit_name_with_dynamic_version(tmpdir_cwd, monkeypatch):
"""According to #3545 it seems that ``name`` discovery is running,
even when the project already explicitly sets it.
This seems to be related to parsing of dynamic versions (via ``attr`` directive),
which requires the auto-discovery of ``package_dir``.
"""
files = {
"src": {
"pkg": {"__init__.py": "__version__ = 42\n"},
},
"pyproject.toml": DALS(
"""
[project]
name = "myproj" # purposefully different from package name
dynamic = ["version"]
[tool.setuptools.dynamic]
version = {"attr" = "pkg.__version__"}
"""
),
}
jaraco.path.build(files)
dist = Distribution({})
orig_analyse_name = dist.set_defaults.analyse_name
def spy_analyse_name():
# We can check if name discovery was triggered by ensuring the original
# name remains instead of the package name.
orig_analyse_name()
assert dist.get_name() == "myproj"
monkeypatch.setattr(dist.set_defaults, "analyse_name", spy_analyse_name)
dist.parse_config_files()
assert dist.get_version() == "42"
assert set(dist.packages) == {"pkg"}
def _populate_project_dir(root, files, options):
# NOTE: Currently pypa/build will refuse to build the project if no
# `pyproject.toml` or `setup.py` is found. So it is impossible to do
# completely "config-less" projects.
basic = {
"setup.py": "import setuptools\nsetuptools.setup()",
"README.md": "# Example Package",
"LICENSE": "Copyright (c) 2018",
}
jaraco.path.build(basic, prefix=root)
_write_setupcfg(root, options)
paths = (root / f for f in files)
for path in paths:
path.parent.mkdir(exist_ok=True, parents=True)
path.touch()
def _write_setupcfg(root, options):
if not options:
print("~~~~~ **NO** setup.cfg ~~~~~")
return
setupcfg = ConfigParser()
setupcfg.add_section("options")
for key, value in options.items():
if key == "packages.find":
setupcfg.add_section(f"options.{key}")
setupcfg[f"options.{key}"].update(value)
elif isinstance(value, list):
setupcfg["options"][key] = ", ".join(value)
elif isinstance(value, dict):
str_value = "\n".join(f"\t{k} = {v}" for k, v in value.items())
setupcfg["options"][key] = "\n" + str_value
else:
setupcfg["options"][key] = str(value)
with open(root / "setup.cfg", "w", encoding="utf-8") as f:
setupcfg.write(f)
print("~~~~~ setup.cfg ~~~~~")
print((root / "setup.cfg").read_text(encoding="utf-8"))
def _run_build(path, *flags):
cmd = [sys.executable, "-m", "build", "--no-isolation", *flags, str(path)]
return run(cmd, env={'DISTUTILS_DEBUG': ''})
def _get_dist(dist_path, attrs):
root = "/".join(os.path.split(dist_path)) # POSIX-style
script = dist_path / 'setup.py'
if script.exists():
with Path(dist_path):
dist = cast(
Distribution,
distutils.core.run_setup("setup.py", {}, stop_after="init"),
)
else:
dist = Distribution(attrs)
dist.src_root = root
dist.script_name = "setup.py"
with Path(dist_path):
dist.parse_config_files()
dist.set_defaults()
return dist
def _run_sdist_programatically(dist_path, attrs):
dist = _get_dist(dist_path, attrs)
cmd = sdist(dist)
cmd.ensure_finalized()
assert cmd.distribution.packages or cmd.distribution.py_modules
with quiet(), Path(dist_path):
cmd.run()
return dist, cmd

View File

@ -0,0 +1,388 @@
import functools
import importlib
import io
from email import message_from_string
import pytest
from packaging.metadata import Metadata
from setuptools import _reqs, sic
from setuptools._core_metadata import rfc822_escape, rfc822_unescape
from setuptools.command.egg_info import egg_info, write_requirements
from setuptools.dist import Distribution
EXAMPLE_BASE_INFO = dict(
name="package",
version="0.0.1",
author="Foo Bar",
author_email="foo@bar.net",
long_description="Long\ndescription",
description="Short description",
keywords=["one", "two"],
)
@pytest.mark.parametrize(
'content, result',
(
pytest.param(
"Just a single line",
None,
id="single_line",
),
pytest.param(
"Multiline\nText\nwithout\nextra indents\n",
None,
id="multiline",
),
pytest.param(
"Multiline\n With\n\nadditional\n indentation",
None,
id="multiline_with_indentation",
),
pytest.param(
" Leading whitespace",
"Leading whitespace",
id="remove_leading_whitespace",
),
pytest.param(
" Leading whitespace\nIn\n Multiline comment",
"Leading whitespace\nIn\n Multiline comment",
id="remove_leading_whitespace_multiline",
),
),
)
def test_rfc822_unescape(content, result):
assert (result or content) == rfc822_unescape(rfc822_escape(content))
def __read_test_cases():
base = EXAMPLE_BASE_INFO
params = functools.partial(dict, base)
return [
('Metadata version 1.0', params()),
(
'Metadata Version 1.0: Short long description',
params(
long_description='Short long description',
),
),
(
'Metadata version 1.1: Classifiers',
params(
classifiers=[
'Programming Language :: Python :: 3',
'Programming Language :: Python :: 3.7',
'License :: OSI Approved :: MIT License',
],
),
),
(
'Metadata version 1.1: Download URL',
params(
download_url='https://example.com',
),
),
(
'Metadata Version 1.2: Requires-Python',
params(
python_requires='>=3.7',
),
),
pytest.param(
'Metadata Version 1.2: Project-Url',
params(project_urls=dict(Foo='https://example.bar')),
marks=pytest.mark.xfail(
reason="Issue #1578: project_urls not read",
),
),
(
'Metadata Version 2.1: Long Description Content Type',
params(
long_description_content_type='text/x-rst; charset=UTF-8',
),
),
(
'License',
params(
license='MIT',
),
),
(
'License multiline',
params(
license='This is a long license \nover multiple lines',
),
),
pytest.param(
'Metadata Version 2.1: Provides Extra',
params(provides_extras=['foo', 'bar']),
marks=pytest.mark.xfail(reason="provides_extras not read"),
),
(
'Missing author',
dict(
name='foo',
version='1.0.0',
author_email='snorri@sturluson.name',
),
),
(
'Missing author e-mail',
dict(
name='foo',
version='1.0.0',
author='Snorri Sturluson',
),
),
(
'Missing author and e-mail',
dict(
name='foo',
version='1.0.0',
),
),
(
'Bypass normalized version',
dict(
name='foo',
version=sic('1.0.0a'),
),
),
]
@pytest.mark.parametrize('name,attrs', __read_test_cases())
def test_read_metadata(name, attrs):
dist = Distribution(attrs)
metadata_out = dist.metadata
dist_class = metadata_out.__class__
# Write to PKG_INFO and then load into a new metadata object
PKG_INFO = io.StringIO()
metadata_out.write_pkg_file(PKG_INFO)
PKG_INFO.seek(0)
pkg_info = PKG_INFO.read()
assert _valid_metadata(pkg_info)
PKG_INFO.seek(0)
metadata_in = dist_class()
metadata_in.read_pkg_file(PKG_INFO)
tested_attrs = [
('name', dist_class.get_name),
('version', dist_class.get_version),
('author', dist_class.get_contact),
('author_email', dist_class.get_contact_email),
('metadata_version', dist_class.get_metadata_version),
('provides', dist_class.get_provides),
('description', dist_class.get_description),
('long_description', dist_class.get_long_description),
('download_url', dist_class.get_download_url),
('keywords', dist_class.get_keywords),
('platforms', dist_class.get_platforms),
('obsoletes', dist_class.get_obsoletes),
('requires', dist_class.get_requires),
('classifiers', dist_class.get_classifiers),
('project_urls', lambda s: getattr(s, 'project_urls', {})),
('provides_extras', lambda s: getattr(s, 'provides_extras', {})),
]
for attr, getter in tested_attrs:
assert getter(metadata_in) == getter(metadata_out)
def __maintainer_test_cases():
attrs = {"name": "package", "version": "1.0", "description": "xxx"}
def merge_dicts(d1, d2):
d1 = d1.copy()
d1.update(d2)
return d1
return [
('No author, no maintainer', attrs.copy()),
(
'Author (no e-mail), no maintainer',
merge_dicts(attrs, {'author': 'Author Name'}),
),
(
'Author (e-mail), no maintainer',
merge_dicts(
attrs, {'author': 'Author Name', 'author_email': 'author@name.com'}
),
),
(
'No author, maintainer (no e-mail)',
merge_dicts(attrs, {'maintainer': 'Maintainer Name'}),
),
(
'No author, maintainer (e-mail)',
merge_dicts(
attrs,
{
'maintainer': 'Maintainer Name',
'maintainer_email': 'maintainer@name.com',
},
),
),
(
'Author (no e-mail), Maintainer (no-email)',
merge_dicts(
attrs, {'author': 'Author Name', 'maintainer': 'Maintainer Name'}
),
),
(
'Author (e-mail), Maintainer (e-mail)',
merge_dicts(
attrs,
{
'author': 'Author Name',
'author_email': 'author@name.com',
'maintainer': 'Maintainer Name',
'maintainer_email': 'maintainer@name.com',
},
),
),
(
'No author (e-mail), no maintainer (e-mail)',
merge_dicts(
attrs,
{
'author_email': 'author@name.com',
'maintainer_email': 'maintainer@name.com',
},
),
),
('Author unicode', merge_dicts(attrs, {'author': '鉄沢寛'})),
('Maintainer unicode', merge_dicts(attrs, {'maintainer': 'Jan Łukasiewicz'})),
]
@pytest.mark.parametrize('name,attrs', __maintainer_test_cases())
def test_maintainer_author(name, attrs, tmpdir):
tested_keys = {
'author': 'Author',
'author_email': 'Author-email',
'maintainer': 'Maintainer',
'maintainer_email': 'Maintainer-email',
}
# Generate a PKG-INFO file
dist = Distribution(attrs)
fn = tmpdir.mkdir('pkg_info')
fn_s = str(fn)
dist.metadata.write_pkg_info(fn_s)
with open(str(fn.join('PKG-INFO')), 'r', encoding='utf-8') as f:
pkg_info = f.read()
assert _valid_metadata(pkg_info)
# Drop blank lines and strip lines from default description
raw_pkg_lines = pkg_info.splitlines()
pkg_lines = list(filter(None, raw_pkg_lines[:-2]))
pkg_lines_set = set(pkg_lines)
# Duplicate lines should not be generated
assert len(pkg_lines) == len(pkg_lines_set)
for fkey, dkey in tested_keys.items():
val = attrs.get(dkey, None)
if val is None:
for line in pkg_lines:
assert not line.startswith(fkey + ':')
else:
line = '%s: %s' % (fkey, val)
assert line in pkg_lines_set
def test_parity_with_metadata_from_pypa_wheel(tmp_path):
attrs = dict(
**EXAMPLE_BASE_INFO,
# Example with complex requirement definition
python_requires=">=3.8",
install_requires="""
packaging==23.2
more-itertools==8.8.0; extra == "other"
jaraco.text==3.7.0
importlib-resources==5.10.2; python_version<"3.8"
importlib-metadata==6.0.0 ; python_version<"3.8"
colorama>=0.4.4; sys_platform == "win32"
""",
extras_require={
"testing": """
pytest >= 6
pytest-checkdocs >= 2.4
tomli ; \\
# Using stdlib when possible
python_version < "3.11"
ini2toml[lite]>=0.9
""",
"other": [],
},
)
# Generate a PKG-INFO file using setuptools
dist = Distribution(attrs)
with io.StringIO() as fp:
dist.metadata.write_pkg_file(fp)
pkg_info = fp.getvalue()
assert _valid_metadata(pkg_info)
# Ensure Requires-Dist is present
expected = [
'Metadata-Version:',
'Requires-Python: >=3.8',
'Provides-Extra: other',
'Provides-Extra: testing',
'Requires-Dist: tomli; python_version < "3.11" and extra == "testing"',
'Requires-Dist: more-itertools==8.8.0; extra == "other"',
'Requires-Dist: ini2toml[lite]>=0.9; extra == "testing"',
]
for line in expected:
assert line in pkg_info
# Generate a METADATA file using pypa/wheel for comparison
wheel_metadata = importlib.import_module("wheel.metadata")
pkginfo_to_metadata = getattr(wheel_metadata, "pkginfo_to_metadata", None)
if pkginfo_to_metadata is None:
pytest.xfail(
"wheel.metadata.pkginfo_to_metadata is undefined, "
"(this is likely to be caused by API changes in pypa/wheel"
)
# Generate an simplified "egg-info" dir for pypa/wheel to convert
egg_info_dir = tmp_path / "pkg.egg-info"
egg_info_dir.mkdir(parents=True)
(egg_info_dir / "PKG-INFO").write_text(pkg_info, encoding="utf-8")
write_requirements(egg_info(dist), egg_info_dir, egg_info_dir / "requires.txt")
# Get pypa/wheel generated METADATA but normalize requirements formatting
metadata_msg = pkginfo_to_metadata(egg_info_dir, egg_info_dir / "PKG-INFO")
metadata_deps = set(_reqs.parse(metadata_msg.get_all("Requires-Dist")))
metadata_extras = set(metadata_msg.get_all("Provides-Extra"))
del metadata_msg["Requires-Dist"]
del metadata_msg["Provides-Extra"]
pkg_info_msg = message_from_string(pkg_info)
pkg_info_deps = set(_reqs.parse(pkg_info_msg.get_all("Requires-Dist")))
pkg_info_extras = set(pkg_info_msg.get_all("Provides-Extra"))
del pkg_info_msg["Requires-Dist"]
del pkg_info_msg["Provides-Extra"]
# Compare setuptools PKG-INFO x pypa/wheel METADATA
assert metadata_msg.as_string() == pkg_info_msg.as_string()
assert metadata_deps == pkg_info_deps
assert metadata_extras == pkg_info_extras
def _valid_metadata(text: str) -> bool:
metadata = Metadata.from_email(text, validate=True) # can raise exceptions
return metadata is not None

View File

@ -0,0 +1,15 @@
import sys
from setuptools import depends
class TestGetModuleConstant:
def test_basic(self):
"""
Invoke get_module_constant on a module in
the test package.
"""
mod_name = 'setuptools.tests.mod_with_constant'
val = depends.get_module_constant(mod_name, 'value')
assert val == 'three, sir!'
assert 'setuptools.tests.mod_with_constant' not in sys.modules

View File

@ -0,0 +1,175 @@
"""develop tests"""
import os
import pathlib
import platform
import subprocess
import sys
import pytest
from setuptools._path import paths_on_pythonpath
from setuptools.command.develop import develop
from setuptools.dist import Distribution
from . import contexts, namespaces
SETUP_PY = """\
from setuptools import setup
setup(name='foo',
packages=['foo'],
)
"""
INIT_PY = """print "foo"
"""
@pytest.fixture
def temp_user(monkeypatch):
with contexts.tempdir() as user_base:
with contexts.tempdir() as user_site:
monkeypatch.setattr('site.USER_BASE', user_base)
monkeypatch.setattr('site.USER_SITE', user_site)
yield
@pytest.fixture
def test_env(tmpdir, temp_user):
target = tmpdir
foo = target.mkdir('foo')
setup = target / 'setup.py'
if setup.isfile():
raise ValueError(dir(target))
with setup.open('w') as f:
f.write(SETUP_PY)
init = foo / '__init__.py'
with init.open('w') as f:
f.write(INIT_PY)
with target.as_cwd():
yield target
class TestDevelop:
in_virtualenv = hasattr(sys, 'real_prefix')
in_venv = hasattr(sys, 'base_prefix') and sys.base_prefix != sys.prefix
def test_console_scripts(self, tmpdir):
"""
Test that console scripts are installed and that they reference
only the project by name and not the current version.
"""
pytest.skip(
"TODO: needs a fixture to cause 'develop' "
"to be invoked without mutating environment."
)
settings = dict(
name='foo',
packages=['foo'],
version='0.0',
entry_points={
'console_scripts': [
'foocmd = foo:foo',
],
},
)
dist = Distribution(settings)
dist.script_name = 'setup.py'
cmd = develop(dist)
cmd.ensure_finalized()
cmd.install_dir = tmpdir
cmd.run()
# assert '0.0' not in foocmd_text
@pytest.mark.xfail(reason="legacy behavior retained for compatibility #4167")
def test_egg_link_filename(self):
settings = dict(
name='Foo $$$ Bar_baz-bing',
)
dist = Distribution(settings)
cmd = develop(dist)
cmd.ensure_finalized()
link = pathlib.Path(cmd.egg_link)
assert link.suffix == '.egg-link'
assert link.stem == 'Foo_Bar_baz_bing'
class TestResolver:
"""
TODO: These tests were written with a minimal understanding
of what _resolve_setup_path is intending to do. Come up with
more meaningful cases that look like real-world scenarios.
"""
def test_resolve_setup_path_cwd(self):
assert develop._resolve_setup_path('.', '.', '.') == '.'
def test_resolve_setup_path_one_dir(self):
assert develop._resolve_setup_path('pkgs', '.', 'pkgs') == '../'
def test_resolve_setup_path_one_dir_trailing_slash(self):
assert develop._resolve_setup_path('pkgs/', '.', 'pkgs') == '../'
class TestNamespaces:
@staticmethod
def install_develop(src_dir, target):
develop_cmd = [
sys.executable,
'setup.py',
'develop',
'--install-dir',
str(target),
]
with src_dir.as_cwd():
with paths_on_pythonpath([str(target)]):
subprocess.check_call(develop_cmd)
@pytest.mark.skipif(
bool(os.environ.get("APPVEYOR")),
reason="https://github.com/pypa/setuptools/issues/851",
)
@pytest.mark.skipif(
platform.python_implementation() == 'PyPy',
reason="https://github.com/pypa/setuptools/issues/1202",
)
def test_namespace_package_importable(self, tmpdir):
"""
Installing two packages sharing the same namespace, one installed
naturally using pip or `--single-version-externally-managed`
and the other installed using `develop` should leave the namespace
in tact and both packages reachable by import.
"""
pkg_A = namespaces.build_namespace_package(tmpdir, 'myns.pkgA')
pkg_B = namespaces.build_namespace_package(tmpdir, 'myns.pkgB')
target = tmpdir / 'packages'
# use pip to install to the target directory
install_cmd = [
sys.executable,
'-m',
'pip',
'install',
str(pkg_A),
'-t',
str(target),
]
subprocess.check_call(install_cmd)
self.install_develop(pkg_B, target)
namespaces.make_site_dir(target)
try_import = [
sys.executable,
'-c',
'import myns.pkgA; import myns.pkgB',
]
with paths_on_pythonpath([str(target)]):
subprocess.check_call(try_import)
# additionally ensure that pkg_resources import works
pkg_resources_imp = [
sys.executable,
'-c',
'import pkg_resources',
]
with paths_on_pythonpath([str(target)]):
subprocess.check_call(pkg_resources_imp)

View File

@ -0,0 +1,274 @@
import os
import re
import urllib.parse
import urllib.request
import pytest
from setuptools import Distribution
from setuptools.dist import check_package_data, check_specifier
from .test_easy_install import make_nspkg_sdist
from .test_find_packages import ensure_files
from .textwrap import DALS
from distutils.errors import DistutilsSetupError
def test_dist_fetch_build_egg(tmpdir):
"""
Check multiple calls to `Distribution.fetch_build_egg` work as expected.
"""
index = tmpdir.mkdir('index')
index_url = urllib.parse.urljoin('file://', urllib.request.pathname2url(str(index)))
def sdist_with_index(distname, version):
dist_dir = index.mkdir(distname)
dist_sdist = '%s-%s.tar.gz' % (distname, version)
make_nspkg_sdist(str(dist_dir.join(dist_sdist)), distname, version)
with dist_dir.join('index.html').open('w') as fp:
fp.write(
DALS(
"""
<!DOCTYPE html><html><body>
<a href="{dist_sdist}" rel="internal">{dist_sdist}</a><br/>
</body></html>
"""
).format(dist_sdist=dist_sdist)
)
sdist_with_index('barbazquux', '3.2.0')
sdist_with_index('barbazquux-runner', '2.11.1')
with tmpdir.join('setup.cfg').open('w') as fp:
fp.write(
DALS(
"""
[easy_install]
index_url = {index_url}
"""
).format(index_url=index_url)
)
reqs = """
barbazquux-runner
barbazquux
""".split()
with tmpdir.as_cwd():
dist = Distribution()
dist.parse_config_files()
resolved_dists = [dist.fetch_build_egg(r) for r in reqs]
assert [dist.key for dist in resolved_dists if dist] == reqs
EXAMPLE_BASE_INFO = dict(
name="package",
version="0.0.1",
author="Foo Bar",
author_email="foo@bar.net",
long_description="Long\ndescription",
description="Short description",
keywords=["one", "two"],
)
def test_provides_extras_deterministic_order():
attrs = dict(extras_require=dict(a=['foo'], b=['bar']))
dist = Distribution(attrs)
assert list(dist.metadata.provides_extras) == ['a', 'b']
attrs['extras_require'] = dict(reversed(attrs['extras_require'].items()))
dist = Distribution(attrs)
assert list(dist.metadata.provides_extras) == ['b', 'a']
CHECK_PACKAGE_DATA_TESTS = (
# Valid.
(
{
'': ['*.txt', '*.rst'],
'hello': ['*.msg'],
},
None,
),
# Not a dictionary.
(
(
('', ['*.txt', '*.rst']),
('hello', ['*.msg']),
),
(
"'package_data' must be a dictionary mapping package"
" names to lists of string wildcard patterns"
),
),
# Invalid key type.
(
{
400: ['*.txt', '*.rst'],
},
("keys of 'package_data' dict must be strings (got 400)"),
),
# Invalid value type.
(
{
'hello': '*.msg',
},
(
"\"values of 'package_data' dict\" must be of type <tuple[str, ...] | list[str]>"
" (got '*.msg')"
),
),
# Invalid value type (generators are single use)
(
{
'hello': (x for x in "generator"),
},
(
"\"values of 'package_data' dict\" must be of type <tuple[str, ...] | list[str]>"
" (got <generator object"
),
),
)
@pytest.mark.parametrize('package_data, expected_message', CHECK_PACKAGE_DATA_TESTS)
def test_check_package_data(package_data, expected_message):
if expected_message is None:
assert check_package_data(None, 'package_data', package_data) is None
else:
with pytest.raises(DistutilsSetupError, match=re.escape(expected_message)):
check_package_data(None, 'package_data', package_data)
def test_check_specifier():
# valid specifier value
attrs = {'name': 'foo', 'python_requires': '>=3.0, !=3.1'}
dist = Distribution(attrs)
check_specifier(dist, attrs, attrs['python_requires'])
# invalid specifier value
attrs = {'name': 'foo', 'python_requires': ['>=3.0', '!=3.1']}
with pytest.raises(DistutilsSetupError):
dist = Distribution(attrs)
def test_metadata_name():
with pytest.raises(DistutilsSetupError, match='missing.*name'):
Distribution()._validate_metadata()
@pytest.mark.parametrize(
"dist_name, py_module",
[
("my.pkg", "my_pkg"),
("my-pkg", "my_pkg"),
("my_pkg", "my_pkg"),
("pkg", "pkg"),
],
)
def test_dist_default_py_modules(tmp_path, dist_name, py_module):
(tmp_path / f"{py_module}.py").touch()
(tmp_path / "setup.py").touch()
(tmp_path / "noxfile.py").touch()
# ^-- make sure common tool files are ignored
attrs = {**EXAMPLE_BASE_INFO, "name": dist_name, "src_root": str(tmp_path)}
# Find `py_modules` corresponding to dist_name if not given
dist = Distribution(attrs)
dist.set_defaults()
assert dist.py_modules == [py_module]
# When `py_modules` is given, don't do anything
dist = Distribution({**attrs, "py_modules": ["explicity_py_module"]})
dist.set_defaults()
assert dist.py_modules == ["explicity_py_module"]
# When `packages` is given, don't do anything
dist = Distribution({**attrs, "packages": ["explicity_package"]})
dist.set_defaults()
assert not dist.py_modules
@pytest.mark.parametrize(
"dist_name, package_dir, package_files, packages",
[
("my.pkg", None, ["my_pkg/__init__.py", "my_pkg/mod.py"], ["my_pkg"]),
("my-pkg", None, ["my_pkg/__init__.py", "my_pkg/mod.py"], ["my_pkg"]),
("my_pkg", None, ["my_pkg/__init__.py", "my_pkg/mod.py"], ["my_pkg"]),
("my.pkg", None, ["my/pkg/__init__.py"], ["my", "my.pkg"]),
(
"my_pkg",
None,
["src/my_pkg/__init__.py", "src/my_pkg2/__init__.py"],
["my_pkg", "my_pkg2"],
),
(
"my_pkg",
{"pkg": "lib", "pkg2": "lib2"},
["lib/__init__.py", "lib/nested/__init__.pyt", "lib2/__init__.py"],
["pkg", "pkg.nested", "pkg2"],
),
],
)
def test_dist_default_packages(
tmp_path, dist_name, package_dir, package_files, packages
):
ensure_files(tmp_path, package_files)
(tmp_path / "setup.py").touch()
(tmp_path / "noxfile.py").touch()
# ^-- should not be included by default
attrs = {
**EXAMPLE_BASE_INFO,
"name": dist_name,
"src_root": str(tmp_path),
"package_dir": package_dir,
}
# Find `packages` either corresponding to dist_name or inside src
dist = Distribution(attrs)
dist.set_defaults()
assert not dist.py_modules
assert not dist.py_modules
assert set(dist.packages) == set(packages)
# When `py_modules` is given, don't do anything
dist = Distribution({**attrs, "py_modules": ["explicit_py_module"]})
dist.set_defaults()
assert not dist.packages
assert set(dist.py_modules) == {"explicit_py_module"}
# When `packages` is given, don't do anything
dist = Distribution({**attrs, "packages": ["explicit_package"]})
dist.set_defaults()
assert not dist.py_modules
assert set(dist.packages) == {"explicit_package"}
@pytest.mark.parametrize(
"dist_name, package_dir, package_files",
[
("my.pkg.nested", None, ["my/pkg/nested/__init__.py"]),
("my.pkg", None, ["my/pkg/__init__.py", "my/pkg/file.py"]),
("my_pkg", None, ["my_pkg.py"]),
("my_pkg", None, ["my_pkg/__init__.py", "my_pkg/nested/__init__.py"]),
("my_pkg", None, ["src/my_pkg/__init__.py", "src/my_pkg/nested/__init__.py"]),
(
"my_pkg",
{"my_pkg": "lib", "my_pkg.lib2": "lib2"},
["lib/__init__.py", "lib/nested/__init__.pyt", "lib2/__init__.py"],
),
# Should not try to guess a name from multiple py_modules/packages
("UNKNOWN", None, ["src/mod1.py", "src/mod2.py"]),
("UNKNOWN", None, ["src/pkg1/__ini__.py", "src/pkg2/__init__.py"]),
],
)
def test_dist_default_name(tmp_path, dist_name, package_dir, package_files):
"""Make sure dist.name is discovered from packages/py_modules"""
ensure_files(tmp_path, package_files)
attrs = {
**EXAMPLE_BASE_INFO,
"src_root": "/".join(os.path.split(tmp_path)), # POSIX-style
"package_dir": package_dir,
}
del attrs["name"]
dist = Distribution(attrs)
dist.set_defaults()
assert dist.py_modules or dist.packages
assert dist.get_name() == dist_name

View File

@ -0,0 +1,210 @@
"""Test .dist-info style distributions."""
import pathlib
import re
import shutil
import subprocess
import sys
from functools import partial
import pytest
import pkg_resources
from setuptools.archive_util import unpack_archive
from .textwrap import DALS
read = partial(pathlib.Path.read_text, encoding="utf-8")
class TestDistInfo:
metadata_base = DALS(
"""
Metadata-Version: 1.2
Requires-Dist: splort (==4)
Provides-Extra: baz
Requires-Dist: quux (>=1.1); extra == 'baz'
"""
)
@classmethod
def build_metadata(cls, **kwargs):
lines = ('{key}: {value}\n'.format(**locals()) for key, value in kwargs.items())
return cls.metadata_base + ''.join(lines)
@pytest.fixture
def metadata(self, tmpdir):
dist_info_name = 'VersionedDistribution-2.718.dist-info'
versioned = tmpdir / dist_info_name
versioned.mkdir()
filename = versioned / 'METADATA'
content = self.build_metadata(
Name='VersionedDistribution',
)
filename.write_text(content, encoding='utf-8')
dist_info_name = 'UnversionedDistribution.dist-info'
unversioned = tmpdir / dist_info_name
unversioned.mkdir()
filename = unversioned / 'METADATA'
content = self.build_metadata(
Name='UnversionedDistribution',
Version='0.3',
)
filename.write_text(content, encoding='utf-8')
return str(tmpdir)
def test_distinfo(self, metadata):
dists = dict(
(d.project_name, d) for d in pkg_resources.find_distributions(metadata)
)
assert len(dists) == 2, dists
unversioned = dists['UnversionedDistribution']
versioned = dists['VersionedDistribution']
assert versioned.version == '2.718' # from filename
assert unversioned.version == '0.3' # from METADATA
def test_conditional_dependencies(self, metadata):
specs = 'splort==4', 'quux>=1.1'
requires = list(map(pkg_resources.Requirement.parse, specs))
for d in pkg_resources.find_distributions(metadata):
assert d.requires() == requires[:1]
assert d.requires(extras=('baz',)) == [
requires[0],
pkg_resources.Requirement.parse('quux>=1.1;extra=="baz"'),
]
assert d.extras == ['baz']
def test_invalid_version(self, tmp_path):
"""
Supplying an invalid version crashes dist_info.
"""
config = "[metadata]\nname=proj\nversion=42\n[egg_info]\ntag_build=invalid!!!\n"
(tmp_path / "setup.cfg").write_text(config, encoding="utf-8")
msg = re.compile("invalid version", re.M | re.I)
proc = run_command_inner("dist_info", cwd=tmp_path, check=False)
assert proc.returncode
assert msg.search(proc.stdout)
assert not list(tmp_path.glob("*.dist-info"))
def test_tag_arguments(self, tmp_path):
config = """
[metadata]
name=proj
version=42
[egg_info]
tag_date=1
tag_build=.post
"""
(tmp_path / "setup.cfg").write_text(config, encoding="utf-8")
print(run_command("dist_info", "--no-date", cwd=tmp_path))
dist_info = next(tmp_path.glob("*.dist-info"))
assert dist_info.name.startswith("proj-42")
shutil.rmtree(dist_info)
print(run_command("dist_info", "--tag-build", ".a", cwd=tmp_path))
dist_info = next(tmp_path.glob("*.dist-info"))
assert dist_info.name.startswith("proj-42a")
@pytest.mark.parametrize("keep_egg_info", (False, True))
def test_output_dir(self, tmp_path, keep_egg_info):
config = "[metadata]\nname=proj\nversion=42\n"
(tmp_path / "setup.cfg").write_text(config, encoding="utf-8")
out = tmp_path / "__out"
out.mkdir()
opts = ["--keep-egg-info"] if keep_egg_info else []
run_command("dist_info", "--output-dir", out, *opts, cwd=tmp_path)
assert len(list(out.glob("*.dist-info"))) == 1
assert len(list(tmp_path.glob("*.dist-info"))) == 0
expected_egg_info = int(keep_egg_info)
assert len(list(out.glob("*.egg-info"))) == expected_egg_info
assert len(list(tmp_path.glob("*.egg-info"))) == 0
assert len(list(out.glob("*.__bkp__"))) == 0
assert len(list(tmp_path.glob("*.__bkp__"))) == 0
class TestWheelCompatibility:
"""Make sure the .dist-info directory produced with the ``dist_info`` command
is the same as the one produced by ``bdist_wheel``.
"""
SETUPCFG = DALS(
"""
[metadata]
name = {name}
version = {version}
[options]
install_requires =
foo>=12; sys_platform != "linux"
[options.extras_require]
test = pytest
[options.entry_points]
console_scripts =
executable-name = my_package.module:function
discover =
myproj = my_package.other_module:function
"""
)
EGG_INFO_OPTS = [
# Related: #3088 #2872
("", ""),
(".post", "[egg_info]\ntag_build = post\n"),
(".post", "[egg_info]\ntag_build = .post\n"),
(".post", "[egg_info]\ntag_build = post\ntag_date = 1\n"),
(".dev", "[egg_info]\ntag_build = .dev\n"),
(".dev", "[egg_info]\ntag_build = .dev\ntag_date = 1\n"),
("a1", "[egg_info]\ntag_build = .a1\n"),
("+local", "[egg_info]\ntag_build = +local\n"),
]
@pytest.mark.parametrize("name", "my-proj my_proj my.proj My.Proj".split())
@pytest.mark.parametrize("version", ["0.42.13"])
@pytest.mark.parametrize("suffix, cfg", EGG_INFO_OPTS)
def test_dist_info_is_the_same_as_in_wheel(
self, name, version, tmp_path, suffix, cfg
):
config = self.SETUPCFG.format(name=name, version=version) + cfg
for i in "dir_wheel", "dir_dist":
(tmp_path / i).mkdir()
(tmp_path / i / "setup.cfg").write_text(config, encoding="utf-8")
run_command("bdist_wheel", cwd=tmp_path / "dir_wheel")
wheel = next(tmp_path.glob("dir_wheel/dist/*.whl"))
unpack_archive(wheel, tmp_path / "unpack")
wheel_dist_info = next(tmp_path.glob("unpack/*.dist-info"))
run_command("dist_info", cwd=tmp_path / "dir_dist")
dist_info = next(tmp_path.glob("dir_dist/*.dist-info"))
assert dist_info.name == wheel_dist_info.name
assert dist_info.name.startswith(f"{name.replace('-', '_')}-{version}{suffix}")
for file in "METADATA", "entry_points.txt":
assert read(dist_info / file) == read(wheel_dist_info / file)
def run_command_inner(*cmd, **kwargs):
opts = {
"stderr": subprocess.STDOUT,
"stdout": subprocess.PIPE,
"text": True,
"encoding": "utf-8",
"check": True,
**kwargs,
}
cmd = [sys.executable, "-c", "__import__('setuptools').setup()", *map(str, cmd)]
return subprocess.run(cmd, **opts)
def run_command(*args, **kwargs):
return run_command_inner(*args, **kwargs).stdout

View File

@ -0,0 +1,198 @@
import os
import platform
import sys
import textwrap
import pytest
IS_PYPY = '__pypy__' in sys.builtin_module_names
_TEXT_KWARGS = {"text": True, "encoding": "utf-8"} # For subprocess.run
def win_sr(env):
"""
On Windows, SYSTEMROOT must be present to avoid
> Fatal Python error: _Py_HashRandomization_Init: failed to
> get random numbers to initialize Python
"""
if env and platform.system() == 'Windows':
env['SYSTEMROOT'] = os.environ['SYSTEMROOT']
return env
def find_distutils(venv, imports='distutils', env=None, **kwargs):
py_cmd = 'import {imports}; print(distutils.__file__)'.format(**locals())
cmd = ['python', '-c', py_cmd]
return venv.run(cmd, env=win_sr(env), **_TEXT_KWARGS, **kwargs)
def count_meta_path(venv, env=None):
py_cmd = textwrap.dedent(
"""
import sys
is_distutils = lambda finder: finder.__class__.__name__ == "DistutilsMetaFinder"
print(len(list(filter(is_distutils, sys.meta_path))))
"""
)
cmd = ['python', '-c', py_cmd]
return int(venv.run(cmd, env=win_sr(env), **_TEXT_KWARGS))
skip_without_stdlib_distutils = pytest.mark.skipif(
sys.version_info >= (3, 12),
reason='stdlib distutils is removed from Python 3.12+',
)
@skip_without_stdlib_distutils
def test_distutils_stdlib(venv):
"""
Ensure stdlib distutils is used when appropriate.
"""
env = dict(SETUPTOOLS_USE_DISTUTILS='stdlib')
assert venv.name not in find_distutils(venv, env=env).split(os.sep)
assert count_meta_path(venv, env=env) == 0
def test_distutils_local_with_setuptools(venv):
"""
Ensure local distutils is used when appropriate.
"""
env = dict(SETUPTOOLS_USE_DISTUTILS='local')
loc = find_distutils(venv, imports='setuptools, distutils', env=env)
assert venv.name in loc.split(os.sep)
assert count_meta_path(venv, env=env) <= 1
@pytest.mark.xfail('IS_PYPY', reason='pypy imports distutils on startup')
def test_distutils_local(venv):
"""
Even without importing, the setuptools-local copy of distutils is
preferred.
"""
env = dict(SETUPTOOLS_USE_DISTUTILS='local')
assert venv.name in find_distutils(venv, env=env).split(os.sep)
assert count_meta_path(venv, env=env) <= 1
def test_pip_import(venv):
"""
Ensure pip can be imported.
Regression test for #3002.
"""
cmd = ['python', '-c', 'import pip']
venv.run(cmd, **_TEXT_KWARGS)
def test_distutils_has_origin():
"""
Distutils module spec should have an origin. #2990.
"""
assert __import__('distutils').__spec__.origin
ENSURE_IMPORTS_ARE_NOT_DUPLICATED = r"""
# Depending on the importlib machinery and _distutils_hack, some imports are
# duplicated resulting in different module objects being loaded, which prevents
# patches as shown in #3042.
# This script provides a way of verifying if this duplication is happening.
from distutils import cmd
import distutils.command.sdist as sdist
# import last to prevent caching
from distutils import {imported_module}
for mod in (cmd, sdist):
assert mod.{imported_module} == {imported_module}, (
f"\n{{mod.dir_util}}\n!=\n{{{imported_module}}}"
)
print("success")
"""
@pytest.mark.usefixtures("tmpdir_cwd")
@pytest.mark.parametrize(
"distutils_version, imported_module",
[
pytest.param("stdlib", "dir_util", marks=skip_without_stdlib_distutils),
pytest.param("stdlib", "file_util", marks=skip_without_stdlib_distutils),
pytest.param("stdlib", "archive_util", marks=skip_without_stdlib_distutils),
("local", "dir_util"),
("local", "file_util"),
("local", "archive_util"),
],
)
def test_modules_are_not_duplicated_on_import(distutils_version, imported_module, venv):
env = dict(SETUPTOOLS_USE_DISTUTILS=distutils_version)
script = ENSURE_IMPORTS_ARE_NOT_DUPLICATED.format(imported_module=imported_module)
cmd = ['python', '-c', script]
output = venv.run(cmd, env=win_sr(env), **_TEXT_KWARGS).strip()
assert output == "success"
ENSURE_LOG_IMPORT_IS_NOT_DUPLICATED = r"""
import types
import distutils.dist as dist
from distutils import log
if isinstance(dist.log, types.ModuleType):
assert dist.log == log, f"\n{dist.log}\n!=\n{log}"
print("success")
"""
@pytest.mark.usefixtures("tmpdir_cwd")
@pytest.mark.parametrize(
"distutils_version",
[
"local",
pytest.param("stdlib", marks=skip_without_stdlib_distutils),
],
)
def test_log_module_is_not_duplicated_on_import(distutils_version, venv):
env = dict(SETUPTOOLS_USE_DISTUTILS=distutils_version)
cmd = ['python', '-c', ENSURE_LOG_IMPORT_IS_NOT_DUPLICATED]
output = venv.run(cmd, env=win_sr(env), **_TEXT_KWARGS).strip()
assert output == "success"
ENSURE_CONSISTENT_ERROR_FROM_MODIFIED_PY = r"""
from setuptools.modified import newer
from {imported_module}.errors import DistutilsError
# Can't use pytest.raises in this context
try:
newer("", "")
except DistutilsError:
print("success")
else:
raise AssertionError("Expected to raise")
"""
@pytest.mark.usefixtures("tmpdir_cwd")
@pytest.mark.parametrize(
"distutils_version, imported_module",
[
("local", "distutils"),
# Unfortunately we still get ._distutils.errors.DistutilsError with SETUPTOOLS_USE_DISTUTILS=stdlib
# But that's a deprecated use-case we don't mind not fully supporting in newer code
pytest.param(
"stdlib", "setuptools._distutils", marks=skip_without_stdlib_distutils
),
],
)
def test_consistent_error_from_modified_py(distutils_version, imported_module, venv):
env = dict(SETUPTOOLS_USE_DISTUTILS=distutils_version)
cmd = [
'python',
'-c',
ENSURE_CONSISTENT_ERROR_FROM_MODIFIED_PY.format(
imported_module=imported_module
),
]
output = venv.run(cmd, env=win_sr(env), **_TEXT_KWARGS).strip()
assert output == "success"

Some files were not shown because too many files have changed in this diff Show More