From bb289c49f19408c9afa633537cac6df284d11da7 Mon Sep 17 00:00:00 2001 From: AP Ljungquist Date: Sun, 18 Sep 2022 13:17:47 +0200 Subject: [PATCH 1/5] Add option to include build-system dependencies Also fix some docstrings that were not valid rst. Include build-system hook in via comments Use meaningful names for fake build deps Install static deps before requesting dynamic deps Address review feedback Remove python 3.7 from tox.ini This should probably have been done in 51d9e1ce16 (Drop support for Python 3.7). Use default labels for links Add test coverage for Dependency Introduce piptools.build module for building project metadata Fix naming Fixes for checkqa Address feedback Also * remove unused `src_file` argument * use filename as comes_from to avoid getting absolute paths in the output * remove unused small-fake-c from fake_dists_with_build_deps * mark tests as backtracking_resolver_only since the result is independent of resolver Use Any type because mypy gives inconsistent results Address feedback (II) Assert exit code separately from stdout Use intermediate variable Move 'may be used more than once' to end of help text Rename build-deps-only > only-build-deps Use ALL_BUILD_DISTRIBUTIONS from already imported options Use relative file path in comes from Remove xfail The test is failing locally since "Speed up tests execution (#1963)" but the xfail should not be part of the PR. Make pip-compile silent by default Improve type hints in build Remove excessive comments Give build distribution literal a meaningful name Test that transient build deps are excluded Address feedback and adapt for build>1 Fix checkqa Use build target terminology consistently Use pathlib.Path consistently in build.py and new tests More specific type hinting in build.py Don't convert to absolute path unnecessarily Update help texts * Use "extract" instead of "install" in help texts * Align texts that said different things but probably should be the same Import Monkeypatch from public API --- README.md | 66 +++++ examples/readme/constraints.txt | 40 +++ examples/readme/pyproject.toml | 11 + piptools/build.py | 174 ++++++++++++ piptools/scripts/compile.py | 72 +++-- piptools/scripts/options.py | 35 ++- piptools/utils.py | 41 +-- pyproject.toml | 2 +- tests/conftest.py | 48 +++- tests/test_build.py | 37 +++ tests/test_cli_compile.py | 255 ++++++++++++++++-- .../backend/backend.py | 32 +++ .../small_fake_with_build_deps/pyproject.toml | 8 + .../small_fake_with_build_deps/setup.py | 14 + 14 files changed, 751 insertions(+), 84 deletions(-) create mode 100644 examples/readme/constraints.txt create mode 100644 examples/readme/pyproject.toml create mode 100644 piptools/build.py create mode 100644 tests/test_build.py create mode 100644 tests/test_data/packages/small_fake_with_build_deps/backend/backend.py create mode 100644 tests/test_data/packages/small_fake_with_build_deps/pyproject.toml create mode 100644 tests/test_data/packages/small_fake_with_build_deps/setup.py diff --git a/README.md b/README.md index d806c5b8f..b126d34b7 100644 --- a/README.md +++ b/README.md @@ -548,6 +548,70 @@ dependencies, making any newly generated `requirements.txt` environment-dependen As a general rule, it's advised that users should still always execute `pip-compile` on each targeted Python environment to avoid issues. +### Maximizing reproducibility + +`pip-tools` is a great tool to improve the reproducibility of builds. +But there are a few things to keep in mind. + +- `pip-compile` will produce different results in different environments as described in the previous section. +- `pip` must be used with the `PIP_CONSTRAINT` environment variable to lock dependencies in build environments as documented in [#8439](https://github.com/pypa/pip/issues/8439). +- Dependencies come from many sources. + +Continuing the `pyproject.toml` example from earlier, creating a single lock file could be done like: + +```console +$ pip-compile --all-build-deps --all-extras --output-file=constraints.txt --strip-extras pyproject.toml +# +# This file is autogenerated by pip-compile with Python 3.9 +# by the following command: +# +# pip-compile --all-build-deps --all-extras --output-file=constraints.txt --strip-extras pyproject.toml +# +asgiref==3.5.2 + # via django +attrs==22.1.0 + # via pytest +backports-zoneinfo==0.2.1 + # via django +django==4.1 + # via my-cool-django-app (pyproject.toml) +editables==0.3 + # via hatchling +hatchling==1.11.1 + # via my-cool-django-app (pyproject.toml::build-system.requires) +iniconfig==1.1.1 + # via pytest +packaging==21.3 + # via + # hatchling + # pytest +pathspec==0.10.2 + # via hatchling +pluggy==1.0.0 + # via + # hatchling + # pytest +py==1.11.0 + # via pytest +pyparsing==3.0.9 + # via packaging +pytest==7.1.2 + # via my-cool-django-app (pyproject.toml) +sqlparse==0.4.2 + # via django +tomli==2.0.1 + # via + # hatchling + # pytest +``` + +Some build backends may also request build dependencies dynamically using the `get_requires_for_build_` hooks described in [PEP 517] and [PEP 660]. +This will be indicated in the output with one of the following suffixes: + +- `(pyproject.toml::build-system.backend::editable)` +- `(pyproject.toml::build-system.backend::sdist)` +- `(pyproject.toml::build-system.backend::wheel)` + ### Other useful tools - [pip-compile-multi](https://pip-compile-multi.readthedocs.io/en/latest/) - pip-compile command wrapper for multiple cross-referencing requirements files. @@ -600,5 +664,7 @@ note that it is deprecated and will be removed in a future release. [Matrix Space]: https://matrix.to/#/%23jazzband:matrix.org [pip-tools-overview]: https://github.com/jazzband/pip-tools/raw/main/img/pip-tools-overview.svg [environment-markers]: https://peps.python.org/pep-0508/#environment-markers +[PEP 517]: https://peps.python.org/pep-0517/ +[PEP 660]: https://peps.python.org/pep-0660/ [discord-chat]: https://discord.gg/pypa [discord-chat-image]: https://img.shields.io/discord/803025117553754132?label=Discord%20chat%20%23pip-tools&style=flat-square diff --git a/examples/readme/constraints.txt b/examples/readme/constraints.txt new file mode 100644 index 000000000..be6a6567e --- /dev/null +++ b/examples/readme/constraints.txt @@ -0,0 +1,40 @@ +# +# This file is autogenerated by pip-compile with Python 3.11 +# by the following command: +# +# pip-compile --all-build-deps --all-extras --output-file=constraints.txt --strip-extras pyproject.toml +# +asgiref==3.5.2 + # via django +attrs==22.1.0 + # via pytest +django==4.1 + # via my-cool-django-app (pyproject.toml) +editables==0.3 + # via hatchling +hatchling==1.11.1 + # via my-cool-django-app (pyproject.toml::build-system.requires) +iniconfig==1.1.1 + # via pytest +packaging==21.3 + # via + # hatchling + # pytest +pathspec==0.10.2 + # via hatchling +pluggy==1.0.0 + # via + # hatchling + # pytest +py==1.11.0 + # via pytest +pyparsing==3.0.9 + # via packaging +pytest==7.1.2 + # via my-cool-django-app (pyproject.toml) +sqlparse==0.4.2 + # via django +tomli==2.0.1 + # via + # hatchling + # pytest diff --git a/examples/readme/pyproject.toml b/examples/readme/pyproject.toml new file mode 100644 index 000000000..e230a33cc --- /dev/null +++ b/examples/readme/pyproject.toml @@ -0,0 +1,11 @@ +[build-system] +requires = ["hatchling"] +build-backend = "hatchling.build" + +[project] +name = "my-cool-django-app" +version = "42" +dependencies = ["django"] + +[project.optional-dependencies] +dev = ["pytest"] diff --git a/piptools/build.py b/piptools/build.py new file mode 100644 index 000000000..e7d0c9199 --- /dev/null +++ b/piptools/build.py @@ -0,0 +1,174 @@ +from __future__ import annotations + +import collections +import contextlib +import pathlib +import sys +import tempfile +from dataclasses import dataclass +from importlib import metadata as importlib_metadata +from typing import Any, Iterator, Protocol, TypeVar, overload + +import build +import build.env +import pyproject_hooks +from pip._internal.req import InstallRequirement +from pip._internal.req.constructors import install_req_from_line, parse_req_from_line + +PYPROJECT_TOML = "pyproject.toml" + +_T = TypeVar("_T") + + +if sys.version_info >= (3, 10): + from importlib.metadata import PackageMetadata +else: + + class PackageMetadata(Protocol): + @overload + def get_all(self, name: str, failobj: None = None) -> list[Any] | None: + ... + + @overload + def get_all(self, name: str, failobj: _T) -> list[Any] | _T: + ... + + +@dataclass +class ProjectMetadata: + extras: tuple[str, ...] + requirements: tuple[InstallRequirement, ...] + build_requirements: tuple[InstallRequirement, ...] + + +def build_project_metadata( + src_file: pathlib.Path, + build_targets: tuple[str, ...], + *, + isolated: bool, + quiet: bool, +) -> ProjectMetadata: + """ + Return the metadata for a project. + + Uses the ``prepare_metadata_for_build_wheel`` hook for the wheel metadata + if available, otherwise ``build_wheel``. + + Uses the ``prepare_metadata_for_build_{target}`` hook for each ``build_targets`` + if available. + + :param src_file: Project source file + :param build_targets: A tuple of build targets to get the dependencies + of (``sdist`` or ``wheel`` or ``editable``). + :param isolated: Whether to run invoke the backend in the current + environment or to create an isolated one and invoke it + there. + :param quiet: Whether to suppress the output of subprocesses. + """ + + src_dir = src_file.parent + with _create_project_builder(src_dir, isolated=isolated, quiet=quiet) as builder: + metadata = _build_project_wheel_metadata(builder) + extras = tuple(metadata.get_all("Provides-Extra") or ()) + requirements = tuple( + _prepare_requirements(metadata=metadata, src_file=src_file) + ) + build_requirements = tuple( + _prepare_build_requirements( + builder=builder, + src_file=src_file, + build_targets=build_targets, + package_name=_get_name(metadata), + ) + ) + return ProjectMetadata( + extras=extras, + requirements=requirements, + build_requirements=build_requirements, + ) + + +@contextlib.contextmanager +def _create_project_builder( + src_dir: pathlib.Path, *, isolated: bool, quiet: bool +) -> Iterator[build.ProjectBuilder]: + if quiet: + runner = pyproject_hooks.quiet_subprocess_runner + else: + runner = pyproject_hooks.default_subprocess_runner + + if not isolated: + yield build.ProjectBuilder(src_dir, runner=runner) + return + + with build.env.DefaultIsolatedEnv() as env: + builder = build.ProjectBuilder.from_isolated_env(env, src_dir, runner) + env.install(builder.build_system_requires) + env.install(builder.get_requires_for_build("wheel")) + yield builder + + +def _build_project_wheel_metadata( + builder: build.ProjectBuilder, +) -> PackageMetadata: + with tempfile.TemporaryDirectory() as tmpdir: + path = pathlib.Path(builder.metadata_path(tmpdir)) + return importlib_metadata.PathDistribution(path).metadata + + +def _get_name(metadata: PackageMetadata) -> str: + retval = metadata.get_all("Name")[0] # type: ignore[index] + assert isinstance(retval, str) + return retval + + +def _prepare_requirements( + metadata: PackageMetadata, src_file: pathlib.Path +) -> Iterator[InstallRequirement]: + package_name = _get_name(metadata) + comes_from = f"{package_name} ({src_file})" + package_dir = src_file.parent + + for req in metadata.get_all("Requires-Dist") or []: + parts = parse_req_from_line(req, comes_from) + if parts.requirement.name == package_name: + # Replace package name with package directory in the requirement + # string so that pip can find the package as self-referential. + # Note the string can contain extras, so we need to replace only + # the package name, not the whole string. + replaced_package_name = req.replace(package_name, package_dir, 1) + parts = parse_req_from_line(replaced_package_name, comes_from) + + yield InstallRequirement( + parts.requirement, + comes_from, + link=parts.link, + markers=parts.markers, + extras=parts.extras, + ) + + +def _prepare_build_requirements( + builder: build.ProjectBuilder, + src_file: pathlib.Path, + build_targets: tuple[str, ...], + package_name: str, +) -> Iterator[InstallRequirement]: + result = collections.defaultdict(set) + + # Build requirements will only be present if a pyproject.toml file exists, + # but if there is also a setup.py file then only that will be explicitly + # processed due to the order of `DEFAULT_REQUIREMENTS_FILES`. + src_file = src_file.parent / PYPROJECT_TOML + + for req in builder.build_system_requires: + result[req].add(f"{package_name} ({src_file}::build-system.requires)") + for build_target in build_targets: + for req in builder.get_requires_for_build(build_target): + result[req].add( + f"{package_name} ({src_file}::build-system.backend::{build_target})" + ) + + for req, comes_from_sources in result.items(): + for comes_from in comes_from_sources: + yield install_req_from_line(req, comes_from=comes_from) diff --git a/piptools/scripts/compile.py b/piptools/scripts/compile.py index f957cfe00..0d16475e2 100755 --- a/piptools/scripts/compile.py +++ b/piptools/scripts/compile.py @@ -10,7 +10,6 @@ import click from build import BuildBackendException -from build.util import project_wheel_metadata from click.utils import LazyFile, safecall from pip._internal.req import InstallRequirement from pip._internal.req.constructors import install_req_from_line @@ -18,21 +17,17 @@ from pyproject_hooks import default_subprocess_runner, quiet_subprocess_runner from .._compat import parse_requirements +from ..build import build_project_metadata from ..cache import DependencyCache from ..exceptions import NoCandidateFound, PipToolsError from ..logging import log from ..repositories import LocalRequirementsRepository, PyPIRepository from ..repositories.base import BaseRepository from ..resolver import BacktrackingResolver, LegacyResolver -from ..utils import ( - dedup, - drop_extras, - is_pinned_requirement, - key_from_ireq, - parse_requirements_from_wheel_metadata, -) +from ..utils import dedup, drop_extras, is_pinned_requirement, key_from_ireq from ..writer import OutputWriter from . import options +from .options import BuildTargetT DEFAULT_REQUIREMENTS_FILES = ( "requirements.in", @@ -40,6 +35,7 @@ "pyproject.toml", "setup.cfg", ) +DEFAULT_REQUIREMENTS_FILE = "requirements.in" DEFAULT_REQUIREMENTS_OUTPUT_FILE = "requirements.txt" METADATA_FILENAMES = frozenset({"setup.py", "setup.cfg", "pyproject.toml"}) @@ -115,6 +111,9 @@ def _determine_linesep( @options.config @options.no_config @options.constraint +@options.build_deps_for +@options.all_build_deps +@options.only_build_deps def cli( ctx: click.Context, verbose: int, @@ -156,6 +155,9 @@ def cli( config: Path | None, no_config: bool, constraint: tuple[str, ...], + build_deps_targets: tuple[BuildTargetT, ...], + all_build_deps: bool, + only_build_deps: bool, ) -> None: """ Compiles requirements.txt from requirements.in, pyproject.toml, setup.cfg, @@ -163,6 +165,22 @@ def cli( """ log.verbosity = verbose - quiet + if all_build_deps and build_deps_targets: + raise click.BadParameter( + "--build-deps-for has no effect when used with --all-build-deps" + ) + elif all_build_deps: + build_deps_targets = options.ALL_BUILD_TARGETS + + if only_build_deps and not build_deps_targets: + raise click.BadParameter( + "--only-build-deps requires either --build-deps-for or --all-build-deps" + ) + if only_build_deps and (extras or all_extras): + raise click.BadParameter( + "--only-build-deps cannot be used with any of --extra, --all-extras" + ) + if len(src_files) == 0: for file_path in DEFAULT_REQUIREMENTS_FILES: if os.path.exists(file_path): @@ -298,6 +316,13 @@ def cli( setup_file_found = False for src_file in src_files: is_setup_file = os.path.basename(src_file) in METADATA_FILENAMES + if not is_setup_file and build_deps_targets: + msg = ( + "--build-deps-for and --all-build-deps can be used only with the " + "setup.py, setup.cfg and pyproject.toml specs." + ) + raise click.BadParameter(msg) + if src_file == "-": # pip requires filenames and not files. Since we want to support # piping from stdin, we need to briefly save the input from stdin @@ -321,31 +346,26 @@ def cli( elif is_setup_file: setup_file_found = True try: - metadata = project_wheel_metadata( - os.path.dirname(os.path.abspath(src_file)), + metadata = build_project_metadata( + src_file=Path(src_file), + build_targets=build_deps_targets, isolated=build_isolation, - runner=( - default_subprocess_runner - if verbose - else quiet_subprocess_runner - ), + quiet=log.verbosity <= 0, ) except BuildBackendException as e: log.error(str(e)) log.error(f"Failed to parse {os.path.abspath(src_file)}") sys.exit(2) - constraints.extend( - parse_requirements_from_wheel_metadata( - metadata=metadata, src_file=src_file - ) - ) - - if all_extras: - if extras: - msg = "--extra has no effect when used with --all-extras" - raise click.BadParameter(msg) - extras = tuple(metadata.get_all("Provides-Extra")) + if not only_build_deps: + constraints.extend(metadata.requirements) + if all_extras: + if extras: + msg = "--extra has no effect when used with --all-extras" + raise click.BadParameter(msg) + extras = metadata.extras + if build_deps_targets: + constraints.extend(metadata.build_requirements) else: constraints.extend( parse_requirements( diff --git a/piptools/scripts/options.py b/piptools/scripts/options.py index d5100cd05..d3e82533b 100644 --- a/piptools/scripts/options.py +++ b/piptools/scripts/options.py @@ -1,6 +1,6 @@ from __future__ import annotations -from typing import Any +from typing import Any, Literal import click from pip._internal.commands import create_command @@ -9,6 +9,13 @@ from piptools.locations import CACHE_DIR, DEFAULT_CONFIG_FILE_NAMES from piptools.utils import UNSAFE_PACKAGES, override_defaults_from_config_file +BuildTargetT = Literal["sdist", "wheel", "editable"] +ALL_BUILD_TARGETS: tuple[BuildTargetT, ...] = ( + "editable", + "sdist", + "wheel", +) + def _get_default_option(option_name: str) -> Any: """ @@ -364,3 +371,29 @@ def _get_default_option(option_name: str) -> Any: is_flag=True, help="Restrict attention to user directory", ) + +build_deps_for = click.option( + "--build-deps-for", + "build_deps_targets", + multiple=True, + type=click.Choice(ALL_BUILD_TARGETS), + help="Name of a build target to extract dependencies for. " + "Static dependencies declared in 'pyproject.toml::build-system.requires' will be included as " + "well; may be used more than once.", +) + +all_build_deps = click.option( + "--all-build-deps", + is_flag=True, + default=False, + help="Extract dependencies for all build targets. " + "Static dependencies declared in 'pyproject.toml::build-system.requires' will be included as " + "well.", +) + +only_build_deps = click.option( + "--only-build-deps", + is_flag=True, + default=False, + help="Extract a package only if it is a build dependency.", +) diff --git a/piptools/utils.py b/piptools/utils.py index eb8e7f249..0bce25e68 100644 --- a/piptools/utils.py +++ b/piptools/utils.py @@ -10,7 +10,7 @@ import shlex import sys from pathlib import Path -from typing import TYPE_CHECKING, Any, Callable, Iterable, Iterator, TypeVar, cast +from typing import Any, Callable, Iterable, Iterator, TypeVar, cast from click.core import ParameterSource @@ -22,8 +22,8 @@ import click from click.utils import LazyFile from pip._internal.req import InstallRequirement -from pip._internal.req.constructors import install_req_from_line, parse_req_from_line from pip._internal.resolution.resolvelib.base import Requirement as PipRequirement +from pip._internal.req.constructors import install_req_from_line from pip._internal.utils.misc import redact_auth_from_url from pip._internal.vcs import is_url from pip._vendor.packaging.markers import Marker @@ -37,12 +37,6 @@ from piptools.locations import DEFAULT_CONFIG_FILE_NAMES from piptools.subprocess_utils import run_python_snippet -if TYPE_CHECKING: - from typing import Protocol -else: - Protocol = object - - _KT = TypeVar("_KT") _VT = TypeVar("_VT") _T = TypeVar("_T") @@ -527,37 +521,6 @@ def copy_install_requirement( return ireq -class PackageMetadata(Protocol): - def get_all(self, name: str, failobj: _T = ...) -> list[str] | _T: - ... - - -def parse_requirements_from_wheel_metadata( - metadata: PackageMetadata, src_file: str -) -> Iterator[InstallRequirement]: - package_name = metadata.get_all("Name")[0] - comes_from = f"{package_name} ({src_file})" - - for req in metadata.get_all("Requires-Dist") or []: - parts = parse_req_from_line(req, comes_from) - if parts.requirement.name == package_name: - package_dir = os.path.dirname(os.path.abspath(src_file)) - # Replace package name with package directory in the requirement - # string so that pip can find the package as self-referential. - # Note the string can contain extras, so we need to replace only - # the package name, not the whole string. - replaced_package_name = req.replace(package_name, package_dir, 1) - parts = parse_req_from_line(replaced_package_name, comes_from) - - yield InstallRequirement( - parts.requirement, - comes_from, - link=parts.link, - markers=parts.markers, - extras=parts.extras, - ) - - def override_defaults_from_config_file( ctx: click.Context, param: click.Parameter, value: str | None ) -> Path | None: diff --git a/pyproject.toml b/pyproject.toml index 159b769a2..ac3e04ee1 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -37,9 +37,9 @@ keywords = ["pip", "requirements", "packaging"] dependencies = [ # direct dependencies "build >= 1.0.0", - "pyproject_hooks", "click >= 8", "pip >= 22.2", + "pyproject_hooks", "tomli; python_version < '3.11'", # indirect dependencies "setuptools", # typically needed when pip-tools invokes setup.py diff --git a/tests/conftest.py b/tests/conftest.py index 0f68570ee..1f380383b 100644 --- a/tests/conftest.py +++ b/tests/conftest.py @@ -292,7 +292,13 @@ def make_package(tmp_path_factory): Make a package from a given name, version and list of required packages. """ - def _make_package(name, version="0.1", install_requires=None, extras_require=None): + def _make_package( + name, + version="0.1", + install_requires=None, + extras_require=None, + build_system_requires=None, + ): if install_requires is None: install_requires = [] @@ -319,6 +325,7 @@ def _make_package(name, version="0.1", install_requires=None, extras_require=Non url="https://github.com/jazzband/pip-tools", install_requires={install_requires_str}, extras_require={extras_require}, + py_modules=[{name!r}], ) """ ) @@ -327,6 +334,20 @@ def _make_package(name, version="0.1", install_requires=None, extras_require=Non # Create a README to avoid setuptools warnings. (package_dir / "README").touch() + # Create a module to make the package importable. + (package_dir / name).with_suffix(".py").touch() + + if build_system_requires: + with (package_dir / "pyproject.toml").open("w") as fp: + fp.write( + dedent( + f"""\ + [build-system] + requires = {json.dumps(build_system_requires)} + """ + ) + ) + return package_dir return _make_package @@ -424,6 +445,31 @@ def fake_dists(tmp_path_factory, make_package, make_wheel): return dists_path +@pytest.fixture(scope="session") +def fake_dists_with_build_deps(tmp_path_factory, make_package, make_wheel): + """Generate distribution packages with names that make sense for testing build deps.""" + dists_path = tmp_path_factory.mktemp("dists") + pkgs = [ + make_package( + "fake_static_build_dep", + version="0.1", + install_requires=["fake_transient_run_dep"], + build_system_requires=["fake_transient_build_dep"], + ), + make_package("fake_dynamic_build_dep_for_all", version="0.2"), + make_package("fake_dynamic_build_dep_for_sdist", version="0.3"), + make_package("fake_dynamic_build_dep_for_wheel", version="0.4"), + make_package("fake_dynamic_build_dep_for_editable", version="0.5"), + make_package("fake_direct_runtime_dep", version="0.1"), + make_package("fake_direct_extra_runtime_dep", version="0.2"), + make_package("fake_transient_build_dep", version="0.3"), + make_package("fake_transient_run_dep", version="0.3"), + ] + for pkg in pkgs: + make_wheel(pkg, dists_path) + return dists_path + + @pytest.fixture def venv(tmp_path): """Create a temporary venv and get the path of its directory of executables.""" diff --git a/tests/test_build.py b/tests/test_build.py new file mode 100644 index 000000000..6d39ea012 --- /dev/null +++ b/tests/test_build.py @@ -0,0 +1,37 @@ +from __future__ import annotations + +import pathlib +import shutil + +import pytest + +from piptools.build import build_project_metadata +from tests.constants import PACKAGES_PATH + + +@pytest.mark.network +def test_build_project_metadata_resolved_correct_build_dependencies( + fake_dists_with_build_deps, tmp_path, monkeypatch +): + """Test that the resolved build dependencies are correct. + + Because this is a slow process we test it only for one build target and rely + on ``test_all_extras_and_all_build_deps`` to test that it works with multiple build + targets. + """ + # When used as argument to the runner it is not passed to pip + monkeypatch.setenv("PIP_FIND_LINKS", fake_dists_with_build_deps) + src_pkg_path = pathlib.Path(PACKAGES_PATH) / "small_fake_with_build_deps" + shutil.copytree(src_pkg_path, tmp_path, dirs_exist_ok=True) + src_file = tmp_path / "setup.py" + metadata = build_project_metadata( + src_file, ("editable",), isolated=True, quiet=False + ) + build_requirements = sorted(r.name for r in metadata.build_requirements) + assert build_requirements == [ + "fake_dynamic_build_dep_for_all", + "fake_dynamic_build_dep_for_editable", + "fake_static_build_dep", + "setuptools", + "wheel", + ] diff --git a/tests/test_cli_compile.py b/tests/test_cli_compile.py index 0275cc0d8..e8326c35e 100644 --- a/tests/test_cli_compile.py +++ b/tests/test_cli_compile.py @@ -2,17 +2,22 @@ import hashlib import os +import pathlib import shutil import subprocess import sys from textwrap import dedent from unittest import mock +from unittest.mock import MagicMock import pytest +from pytest import MonkeyPatch +from pip._internal.req.constructors import install_req_from_line from pip._internal.utils.hashes import FAVORITE_HASH from pip._internal.utils.urls import path_to_url from pip._vendor.packaging.version import Version +from piptools.build import ProjectMetadata from piptools.scripts.compile import cli from piptools.utils import ( COMPILE_EXCLUDE_OPTIONS, @@ -1691,13 +1696,13 @@ def test_parse_requirements_build_isolation_option( ("option", "expected"), (("--build-isolation", True), ("--no-build-isolation", False)), ) -@mock.patch("piptools.scripts.compile.project_wheel_metadata") -def test_project_wheel_metadata_isolation_option( - project_wheel_metadata, runner, option, expected +@mock.patch("piptools.scripts.compile.build_project_metadata") +def test_build_project_metadata_isolation_option( + build_project_metadata, runner, option, expected ): """ A value of the --build-isolation/--no-build-isolation flag - must be passed to project_wheel_metadata(). + must be passed to build_project_metadata(). """ with open("setup.py", "w") as package: @@ -1712,8 +1717,8 @@ def test_project_wheel_metadata_isolation_option( runner.invoke(cli, [option]) - # Ensure the options in project_wheel_metadata has the isolated kwarg - _, kwargs = project_wheel_metadata.call_args + # Ensure the options in build_project_metadata has the isolated kwarg + _, kwargs = build_project_metadata.call_args assert kwargs["isolated"] is expected @@ -2624,7 +2629,7 @@ def test_error_in_pyproject_toml( @pytest.mark.parametrize(("fname", "content"), METADATA_TEST_CASES) def test_one_extra(fake_dists, runner, make_module, fname, content): """ - Test one `--extra` (dev) passed, other extras (test) must be ignored. + Test one ``--extra`` (dev) passed, other extras (test) must be ignored. """ meta_path = make_module(fname=fname, content=content) out = runner.invoke( @@ -2656,7 +2661,7 @@ def test_one_extra(fake_dists, runner, make_module, fname, content): @pytest.mark.parametrize(("fname", "content"), METADATA_TEST_CASES) def test_multiple_extras(fake_dists, runner, make_module, fname, content, extra_opts): """ - Test passing multiple `--extra` params. + Test passing multiple ``--extra`` params. """ meta_path = make_module(fname=fname, content=content) out = runner.invoke( @@ -2680,7 +2685,7 @@ def test_multiple_extras(fake_dists, runner, make_module, fname, content, extra_ @pytest.mark.parametrize(("fname", "content"), METADATA_TEST_CASES) def test_all_extras(fake_dists, runner, make_module, fname, content): """ - Test passing `--all-extras` includes all applicable extras. + Test passing ``--all-extras`` includes all applicable extras. """ meta_path = make_module(fname=fname, content=content) out = runner.invoke( @@ -2716,7 +2721,7 @@ def test_all_extras(fake_dists, runner, make_module, fname, content): @pytest.mark.parametrize(("fname", "content"), METADATA_TEST_CASES[:1]) def test_all_extras_fail_with_extra(fake_dists, runner, make_module, fname, content): """ - Test that passing `--all-extras` and `--extra` fails. + Test that passing ``--all-extras`` and ``--extra`` fails. """ meta_path = make_module(fname=fname, content=content) out = runner.invoke( @@ -2740,14 +2745,232 @@ def test_all_extras_fail_with_extra(fake_dists, runner, make_module, fname, cont assert exp in out.stderr +def _mock_resolver_cls(monkeypatch: MonkeyPatch) -> MagicMock: + obj = MagicMock() + obj.resolve = MagicMock(return_value=set()) + obj.resolve_hashes = MagicMock(return_value=dict()) + cls = MagicMock(return_value=obj) + + monkeypatch.setattr("piptools.scripts.compile.BacktrackingResolver", cls) + monkeypatch.setattr("piptools.scripts.compile.LegacyResolver", cls) + + return cls + + +def _mock_build_project_metadata(monkeypatch: MonkeyPatch) -> MagicMock: + func = MagicMock( + return_value=ProjectMetadata( + extras=("e",), + requirements=( + install_req_from_line("rdep0"), + install_req_from_line("rdep1; extra=='e'"), + ), + build_requirements=(install_req_from_line("bdep0"),), + ) + ) + + monkeypatch.setattr("piptools.scripts.compile.build_project_metadata", func) + + return func + + +@backtracking_resolver_only +@pytest.mark.network +def test_all_extras_and_all_build_deps( + fake_dists_with_build_deps, + runner, + tmp_path, + monkeypatch, + current_resolver, +): + """ + Test that trying to lock all dependencies gives the expected output. + """ + src_pkg_path = pathlib.Path(PACKAGES_PATH) / "small_fake_with_build_deps" + # When used as argument to the runner it is not passed to pip + monkeypatch.setenv("PIP_FIND_LINKS", fake_dists_with_build_deps) + + with runner.isolated_filesystem(tmp_path) as tmp_pkg_path: + shutil.copytree(src_pkg_path, tmp_pkg_path, dirs_exist_ok=True) + out = runner.invoke( + cli, + [ + "--allow-unsafe", + "--output-file", + "-", + "--quiet", + "--no-emit-options", + "--no-header", + "--all-extras", + "--all-build-deps", + ], + ) + + assert out.exit_code == 0 + # Note that the build dependencies of our build dependencies are not resolved. + # This means that if our build dependencies are not available as wheels then we will not get + # reproducible results. + assert "fake_transient_build_dep" not in out.stdout + assert out.stdout == dedent( + """\ + fake-direct-extra-runtime-dep==0.2 + # via small-fake-with-build-deps (setup.py) + fake-direct-runtime-dep==0.1 + # via small-fake-with-build-deps (setup.py) + fake-dynamic-build-dep-for-all==0.2 + # via + # small-fake-with-build-deps (pyproject.toml::build-system.backend::editable) + # small-fake-with-build-deps (pyproject.toml::build-system.backend::sdist) + # small-fake-with-build-deps (pyproject.toml::build-system.backend::wheel) + fake-dynamic-build-dep-for-editable==0.5 + # via small-fake-with-build-deps (pyproject.toml::build-system.backend::editable) + fake-dynamic-build-dep-for-sdist==0.3 + # via small-fake-with-build-deps (pyproject.toml::build-system.backend::sdist) + fake-dynamic-build-dep-for-wheel==0.4 + # via small-fake-with-build-deps (pyproject.toml::build-system.backend::wheel) + fake-static-build-dep==0.1 + # via small-fake-with-build-deps (pyproject.toml::build-system.requires) + fake-transient-run-dep==0.3 + # via fake-static-build-dep + wheel==0.41.1 + # via + # small-fake-with-build-deps (pyproject.toml::build-system.backend::wheel) + # small-fake-with-build-deps (pyproject.toml::build-system.requires) + + # The following packages are considered to be unsafe in a requirements file: + setuptools==68.1.2 + # via small-fake-with-build-deps (pyproject.toml::build-system.requires) + """ + ) + + +@backtracking_resolver_only +def test_all_build_deps(runner, tmp_path, monkeypatch): + """ + Test that ``--all-build-deps`` is equivalent to specifying every + ``--build-deps-for``. + """ + func = _mock_build_project_metadata(monkeypatch) + _mock_resolver_cls(monkeypatch) + + src_file = tmp_path / "pyproject.toml" + src_file.touch() + + out = runner.invoke( + cli, + [ + "--all-build-deps", + os.fspath(src_file), + ], + ) + assert out.exit_code == 0 + assert func.call_args.kwargs["build_targets"] == ( + "editable", + "sdist", + "wheel", + ) + + +@backtracking_resolver_only +def test_only_build_deps(runner, tmp_path, monkeypatch): + """ + Test that ``--only-build-deps`` excludes dependencies other than build dependencies. + """ + _mock_build_project_metadata(monkeypatch) + cls = _mock_resolver_cls(monkeypatch) + + src_file = tmp_path / "pyproject.toml" + src_file.touch() + + out = runner.invoke( + cli, + [ + "--all-build-deps", + "--only-build-deps", + os.fspath(src_file), + ], + ) + assert out.exit_code == 0 + assert [c.name for c in cls.call_args.kwargs["constraints"]] == ["bdep0"] + + +@backtracking_resolver_only +def test_all_build_deps_fail_with_build_target(runner): + """ + Test that passing ``--all-build-deps`` and ``--build-deps-for`` fails. + """ + out = runner.invoke( + cli, + [ + "--all-build-deps", + "--build-deps-for", + "sdist", + ], + ) + exp = "--build-deps-for has no effect when used with --all-build-deps" + assert out.exit_code == 2 + assert exp in out.stderr + + +@backtracking_resolver_only +def test_only_build_deps_fails_without_any_build_deps(runner): + """ + Test that passing ``--only-build-deps`` fails when it is not specified how build deps should + be gathered. + """ + out = runner.invoke( + cli, + ["--only-build-deps"], + ) + exp = "--only-build-deps requires either --build-deps-for or --all-build-deps" + assert out.exit_code == 2 + assert exp in out.stderr + + +@backtracking_resolver_only +@pytest.mark.parametrize("option", ("--all-extras", "--extra=foo")) +def test_only_build_deps_fails_with_conflicting_options(runner, option): + """ + Test that passing ``--all-build-deps`` and conflicting option fails. + """ + out = runner.invoke( + cli, + [ + "--all-build-deps", + "--only-build-deps", + option, + ], + ) + exp = "--only-build-deps cannot be used with any of --extra, --all-extras" + assert out.exit_code == 2 + assert exp in out.stderr + + +@backtracking_resolver_only +@pytest.mark.parametrize("option", ("--all-build-deps", "--build-deps-for=wheel")) +def test_build_deps_fail_without_setup_file(runner, tmpdir, option): + """ + Test that passing ``--build-deps-for`` or ``--all-build-deps`` fails when used with a + requirements file as opposed to a setup file. + """ + path = pathlib.Path(tmpdir) / "requirements.in" + path.write_text("\n") + out = runner.invoke(cli, ["-n", option, os.fspath(path)]) + exp = ( + "--build-deps-for and --all-build-deps can be used only with the " + "setup.py, setup.cfg and pyproject.toml specs." + ) + assert out.exit_code == 2 + assert exp in out.stderr + + def test_extras_fail_with_requirements_in(runner, tmpdir): """ - Test that passing `--extra` with `requirements.in` input file fails. + Test that passing ``--extra`` with ``requirements.in`` input file fails. """ - path = os.path.join(tmpdir, "requirements.in") - with open(path, "w") as stream: - stream.write("\n") - out = runner.invoke(cli, ["-n", "--extra", "something", path]) + path = pathlib.Path(tmpdir) / "requirements.in" + path.write_text("\n") + out = runner.invoke(cli, ["-n", "--extra", "something", os.fspath(path)]) assert out.exit_code == 2 exp = "--extra has effect only with setup.py and PEP-517 input formats" assert exp in out.stderr @@ -2755,7 +2978,7 @@ def test_extras_fail_with_requirements_in(runner, tmpdir): def test_cli_compile_strip_extras(runner, make_package, make_sdist, tmpdir): """ - Assures that --strip-extras removes mention of extras from output. + Assures that ``--strip-extras`` removes mention of extras from output. """ test_package_1 = make_package( "test_package_1", version="0.1", extras_require={"more": "test_package_2"} diff --git a/tests/test_data/packages/small_fake_with_build_deps/backend/backend.py b/tests/test_data/packages/small_fake_with_build_deps/backend/backend.py new file mode 100644 index 000000000..ad9cc9c8f --- /dev/null +++ b/tests/test_data/packages/small_fake_with_build_deps/backend/backend.py @@ -0,0 +1,32 @@ +from __future__ import annotations + +# A dependency of the build backend that is not installed is equivalent to a build +# backend that is not installed so we don't have to test both cases. +import fake_static_build_dep # noqa: F401 +import setuptools.build_meta + +# Re-export all names in case more hooks are added in the future +from setuptools.build_meta import * # noqa: F401, F403 + +build_wheel = setuptools.build_meta.build_wheel +build_sdist = setuptools.build_meta.build_sdist + + +def get_requires_for_build_sdist(config_settings=None): + result = setuptools.build_meta.get_requires_for_build_sdist(config_settings) + assert result == [] + result.append("fake_dynamic_build_dep_for_all") + result.append("fake_dynamic_build_dep_for_sdist") + return result + + +def get_requires_for_build_wheel(config_settings=None): + result = setuptools.build_meta.get_requires_for_build_wheel(config_settings) + assert result == ["wheel"] + result.append("fake_dynamic_build_dep_for_all") + result.append("fake_dynamic_build_dep_for_wheel") + return result + + +def get_requires_for_build_editable(config_settings=None): + return ["fake_dynamic_build_dep_for_all", "fake_dynamic_build_dep_for_editable"] diff --git a/tests/test_data/packages/small_fake_with_build_deps/pyproject.toml b/tests/test_data/packages/small_fake_with_build_deps/pyproject.toml new file mode 100644 index 000000000..c765cd18d --- /dev/null +++ b/tests/test_data/packages/small_fake_with_build_deps/pyproject.toml @@ -0,0 +1,8 @@ +[build-system] +requires = [ + "setuptools==68.1.2", + "wheel==0.41.1", + "fake_static_build_dep" +] +build-backend = "backend" +backend-path = ["backend"] diff --git a/tests/test_data/packages/small_fake_with_build_deps/setup.py b/tests/test_data/packages/small_fake_with_build_deps/setup.py new file mode 100644 index 000000000..a1f84cd51 --- /dev/null +++ b/tests/test_data/packages/small_fake_with_build_deps/setup.py @@ -0,0 +1,14 @@ +from __future__ import annotations + +from setuptools import setup + +setup( + name="small_fake_with_build_deps", + version=0.1, + install_requires=[ + "fake_direct_runtime_dep", + ], + extras_require={ + "x": ["fake_direct_extra_runtime_dep"], + }, +) From 14009d1a7a79bbdfb139f5951bda80ccaf22f3c7 Mon Sep 17 00:00:00 2001 From: AP Ljungquist Date: Sun, 15 Oct 2023 11:48:12 +0200 Subject: [PATCH 2/5] Minor changes to pass CI --- piptools/build.py | 2 +- piptools/scripts/compile.py | 1 - piptools/utils.py | 2 +- tests/test_cli_compile.py | 5 ++--- 4 files changed, 4 insertions(+), 6 deletions(-) diff --git a/piptools/build.py b/piptools/build.py index e7d0c9199..6f87c32e1 100644 --- a/piptools/build.py +++ b/piptools/build.py @@ -136,7 +136,7 @@ def _prepare_requirements( # string so that pip can find the package as self-referential. # Note the string can contain extras, so we need to replace only # the package name, not the whole string. - replaced_package_name = req.replace(package_name, package_dir, 1) + replaced_package_name = req.replace(package_name, str(package_dir), 1) parts = parse_req_from_line(replaced_package_name, comes_from) yield InstallRequirement( diff --git a/piptools/scripts/compile.py b/piptools/scripts/compile.py index 0d16475e2..462215f4d 100755 --- a/piptools/scripts/compile.py +++ b/piptools/scripts/compile.py @@ -14,7 +14,6 @@ from pip._internal.req import InstallRequirement from pip._internal.req.constructors import install_req_from_line from pip._internal.utils.misc import redact_auth_from_url -from pyproject_hooks import default_subprocess_runner, quiet_subprocess_runner from .._compat import parse_requirements from ..build import build_project_metadata diff --git a/piptools/utils.py b/piptools/utils.py index 0bce25e68..62cb26a0b 100644 --- a/piptools/utils.py +++ b/piptools/utils.py @@ -22,8 +22,8 @@ import click from click.utils import LazyFile from pip._internal.req import InstallRequirement -from pip._internal.resolution.resolvelib.base import Requirement as PipRequirement from pip._internal.req.constructors import install_req_from_line +from pip._internal.resolution.resolvelib.base import Requirement as PipRequirement from pip._internal.utils.misc import redact_auth_from_url from pip._internal.vcs import is_url from pip._vendor.packaging.markers import Marker diff --git a/tests/test_cli_compile.py b/tests/test_cli_compile.py index e8326c35e..bcc54d3e8 100644 --- a/tests/test_cli_compile.py +++ b/tests/test_cli_compile.py @@ -11,7 +11,6 @@ from unittest.mock import MagicMock import pytest -from pytest import MonkeyPatch from pip._internal.req.constructors import install_req_from_line from pip._internal.utils.hashes import FAVORITE_HASH from pip._internal.utils.urls import path_to_url @@ -2745,7 +2744,7 @@ def test_all_extras_fail_with_extra(fake_dists, runner, make_module, fname, cont assert exp in out.stderr -def _mock_resolver_cls(monkeypatch: MonkeyPatch) -> MagicMock: +def _mock_resolver_cls(monkeypatch: pytest.MonkeyPatch) -> MagicMock: obj = MagicMock() obj.resolve = MagicMock(return_value=set()) obj.resolve_hashes = MagicMock(return_value=dict()) @@ -2757,7 +2756,7 @@ def _mock_resolver_cls(monkeypatch: MonkeyPatch) -> MagicMock: return cls -def _mock_build_project_metadata(monkeypatch: MonkeyPatch) -> MagicMock: +def _mock_build_project_metadata(monkeypatch: pytest.MonkeyPatch) -> MagicMock: func = MagicMock( return_value=ProjectMetadata( extras=("e",), From 973b00e5b6e323f8d84cda5aa0413881e9f6243a Mon Sep 17 00:00:00 2001 From: AP Ljungquist Date: Fri, 20 Oct 2023 11:40:01 +0200 Subject: [PATCH 3/5] Control python version used by pre-commit for mypy Under python versions >= 3.10 mypy will complain about ``` piptools/build.py:120: error: Unused "type: ignore" comment [unused-ignore] ``` This causes pre-commit.ci to fail, presumably because it uses a recent version of python. --- .pre-commit-config.yaml | 1 + 1 file changed, 1 insertion(+) diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml index ef1c34f7b..c67e5fe09 100644 --- a/.pre-commit-config.yaml +++ b/.pre-commit-config.yaml @@ -35,6 +35,7 @@ repos: - build==1.0.0 - pyproject_hooks==1.0.0 - pytest==7.4.2 + language_version: python3.8 - repo: https://github.com/PyCQA/bandit rev: 1.7.5 hooks: From 9dcd453aaefeb6bbbac0b7333c8af8c6e783668d Mon Sep 17 00:00:00 2001 From: chrysle Date: Thu, 26 Oct 2023 20:52:15 +0200 Subject: [PATCH 4/5] Remove origin ireqs for extras when writing `pip-compile` annotations Co-authored-by: Sander Van Balen <7672159+sanderr@users.noreply.github.com> --- piptools/writer.py | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/piptools/writer.py b/piptools/writer.py index 1ae341d60..d4dde7f5b 100644 --- a/piptools/writer.py +++ b/piptools/writer.py @@ -295,7 +295,8 @@ def _format_requirement( if src_ireq.comes_from } - if ireq.comes_from: + # Filter out the origin install requirements for extras. See https://github.com/jazzband/pip-tools/issues/2003 + if ireq.comes_from and (isinstance(ireq.comes_from, str) or ireq.comes_from.name != ireq.name): required_by.add(_comes_from_as_string(ireq.comes_from)) required_by |= set(getattr(ireq, "_required_by", set())) From f6ff30bfa705955ecaec1f0b3496e29a78ad4083 Mon Sep 17 00:00:00 2001 From: chrysle Date: Fri, 27 Oct 2023 18:39:44 +0200 Subject: [PATCH 5/5] Add functional test --- piptools/writer.py | 7 ++++-- tests/test_cli_compile.py | 50 +++++++++++++++++++++++++++++++++++++++ 2 files changed, 55 insertions(+), 2 deletions(-) diff --git a/piptools/writer.py b/piptools/writer.py index d4dde7f5b..bcb70f02f 100644 --- a/piptools/writer.py +++ b/piptools/writer.py @@ -295,8 +295,11 @@ def _format_requirement( if src_ireq.comes_from } - # Filter out the origin install requirements for extras. See https://github.com/jazzband/pip-tools/issues/2003 - if ireq.comes_from and (isinstance(ireq.comes_from, str) or ireq.comes_from.name != ireq.name): + # Filter out the origin install requirements for extras. + # See https://github.com/jazzband/pip-tools/issues/2003 + if ireq.comes_from and ( + isinstance(ireq.comes_from, str) or ireq.comes_from.name != ireq.name + ): required_by.add(_comes_from_as_string(ireq.comes_from)) required_by |= set(getattr(ireq, "_required_by", set())) diff --git a/tests/test_cli_compile.py b/tests/test_cli_compile.py index 0275cc0d8..61ca317cc 100644 --- a/tests/test_cli_compile.py +++ b/tests/test_cli_compile.py @@ -3290,3 +3290,53 @@ def test_do_not_show_warning_on_explicit_strip_extras_option( assert out.exit_code == 0 assert strip_extras_warning not in out.stderr + + +def test_origin_of_extra_requirement_not_written_to_annotations( + pip_conf, runner, make_package, make_wheel, tmp_path, tmpdir +): + req_in = tmp_path / "requirements.in" + package_with_extras = make_package( + "package_with_extras", + version="0.1", + extras_require={ + "extra1": ["small-fake-a==0.1"], + "extra2": ["small-fake-b==0.1"], + }, + ) + + dists_dir = tmpdir / "dists" + make_wheel(package_with_extras, dists_dir) + + with open(req_in, "w") as req_out: + req_out.write("package-with-extras[extra1,extra2]") + + out = runner.invoke( + cli, + [ + "--output-file", + "-", + "--quiet", + "--no-header", + "--find-links", + str(dists_dir), + "--no-emit-options", + "--no-build-isolation", + req_in.as_posix(), + ], + ) + + assert out.exit_code == 0, out + assert ( + dedent( + f"""\ + package-with-extras[extra1,extra2]==0.1 + # via -r {req_in.as_posix()} + small-fake-a==0.1 + # via package-with-extras + small-fake-b==0.1 + # via package-with-extras + """ + ) + == out.stdout + )