diff --git a/.gitignore b/.gitignore index 98559d4f0..d61e97835 100644 --- a/.gitignore +++ b/.gitignore @@ -84,9 +84,7 @@ celerybeat-schedule # virtualenv .venv -venv/ -venv3/ -venv2/ +venv*/ ENV/ env/ env2/ @@ -112,8 +110,5 @@ all_known_setup.yaml # mkdocs site/ -# Virtual environments -venv* - # PyCharm .idea/ diff --git a/cibuildwheel/__main__.py b/cibuildwheel/__main__.py index 691d88ed1..d14f9f89f 100644 --- a/cibuildwheel/__main__.py +++ b/cibuildwheel/__main__.py @@ -8,12 +8,13 @@ import sys import tarfile import textwrap +import time import traceback import typing -from collections.abc import Iterable, Sequence, Set +from collections.abc import Generator, Iterable, Sequence, Set from pathlib import Path from tempfile import mkdtemp -from typing import Protocol, assert_never +from typing import Any, Protocol, TextIO, assert_never import cibuildwheel import cibuildwheel.linux @@ -23,19 +24,13 @@ import cibuildwheel.windows from cibuildwheel import errors from cibuildwheel.architecture import Architecture, allowed_architectures_check +from cibuildwheel.ci import CIProvider, detect_ci_provider, fix_ansi_codes_for_github_actions from cibuildwheel.logger import log from cibuildwheel.options import CommandLineArguments, Options, compute_options +from cibuildwheel.selector import BuildSelector, EnableGroup from cibuildwheel.typing import PLATFORMS, GenericPythonConfiguration, PlatformName -from cibuildwheel.util import ( - CIBW_CACHE_PATH, - BuildSelector, - CIProvider, - EnableGroup, - Unbuffered, - detect_ci_provider, - fix_ansi_codes_for_github_actions, - strtobool, -) +from cibuildwheel.util.file import CIBW_CACHE_PATH +from cibuildwheel.util.helpers import strtobool @dataclasses.dataclass @@ -43,6 +38,29 @@ class GlobalOptions: print_traceback_on_error: bool = True # decides what happens when errors are hit. +@dataclasses.dataclass(frozen=True) +class FileReport: + name: str + size: str + + +# Taken from https://stackoverflow.com/a/107717 +class Unbuffered: + def __init__(self, stream: TextIO) -> None: + self.stream = stream + + def write(self, data: str) -> None: + self.stream.write(data) + self.stream.flush() + + def writelines(self, data: Iterable[str]) -> None: + self.stream.writelines(data) + self.stream.flush() + + def __getattr__(self, attr: str) -> Any: + return getattr(self.stream, attr) + + def main() -> None: global_options = GlobalOptions() try: @@ -288,6 +306,42 @@ def get_platform_module(platform: PlatformName) -> PlatformModule: assert_never(platform) +@contextlib.contextmanager +def print_new_wheels(msg: str, output_dir: Path) -> Generator[None, None, None]: + """ + Prints the new items in a directory upon exiting. The message to display + can include {n} for number of wheels, {s} for total number of seconds, + and/or {m} for total number of minutes. Does not print anything if this + exits via exception. + """ + + start_time = time.time() + existing_contents = set(output_dir.iterdir()) + yield + final_contents = set(output_dir.iterdir()) + + new_contents = [ + FileReport(wheel.name, f"{(wheel.stat().st_size + 1023) // 1024:,d}") + for wheel in final_contents - existing_contents + ] + + if not new_contents: + return + + max_name_len = max(len(f.name) for f in new_contents) + max_size_len = max(len(f.size) for f in new_contents) + n = len(new_contents) + s = time.time() - start_time + m = s / 60 + print( + msg.format(n=n, s=s, m=m), + *sorted( + f" {f.name:<{max_name_len}s} {f.size:>{max_size_len}s} kB" for f in new_contents + ), + sep="\n", + ) + + def build_in_directory(args: CommandLineArguments) -> None: platform: PlatformName = _compute_platform(args) if platform == "pyodide" and sys.platform == "win32": @@ -350,9 +404,7 @@ def build_in_directory(args: CommandLineArguments) -> None: tmp_path = Path(mkdtemp(prefix="cibw-run-")).resolve(strict=True) try: - with cibuildwheel.util.print_new_wheels( - "\n{n} wheels produced in {m:.0f} minutes:", output_dir - ): + with print_new_wheels("\n{n} wheels produced in {m:.0f} minutes:", output_dir): platform_module.build(options, tmp_path) finally: # avoid https://github.com/python/cpython/issues/86962 by performing diff --git a/cibuildwheel/ci.py b/cibuildwheel/ci.py new file mode 100644 index 000000000..d3af6e93e --- /dev/null +++ b/cibuildwheel/ci.py @@ -0,0 +1,67 @@ +from __future__ import annotations + +import os +import re +from enum import Enum + +from .util.helpers import strtobool + + +class CIProvider(Enum): + travis_ci = "travis" + appveyor = "appveyor" + circle_ci = "circle_ci" + azure_pipelines = "azure_pipelines" + github_actions = "github_actions" + gitlab = "gitlab" + cirrus_ci = "cirrus_ci" + other = "other" + + +def detect_ci_provider() -> CIProvider | None: + if "TRAVIS" in os.environ: + return CIProvider.travis_ci + elif "APPVEYOR" in os.environ: + return CIProvider.appveyor + elif "CIRCLECI" in os.environ: + return CIProvider.circle_ci + elif "AZURE_HTTP_USER_AGENT" in os.environ: + return CIProvider.azure_pipelines + elif "GITHUB_ACTIONS" in os.environ: + return CIProvider.github_actions + elif "GITLAB_CI" in os.environ: + return CIProvider.gitlab + elif "CIRRUS_CI" in os.environ: + return CIProvider.cirrus_ci + elif strtobool(os.environ.get("CI", "false")): + return CIProvider.other + else: + return None + + +def fix_ansi_codes_for_github_actions(text: str) -> str: + """ + Github Actions forgets the current ANSI style on every new line. This + function repeats the current ANSI style on every new line. + """ + ansi_code_regex = re.compile(r"(\033\[[0-9;]*m)") + ansi_codes: list[str] = [] + output = "" + + for line in text.splitlines(keepends=True): + # add the current ANSI codes to the beginning of the line + output += "".join(ansi_codes) + line + + # split the line at each ANSI code + parts = ansi_code_regex.split(line) + # if there are any ANSI codes, save them + if len(parts) > 1: + # iterate over the ANSI codes in this line + for code in parts[1::2]: + if code == "\033[0m": + # reset the list of ANSI codes when the clear code is found + ansi_codes = [] + else: + ansi_codes.append(code) + + return output diff --git a/cibuildwheel/frontend.py b/cibuildwheel/frontend.py new file mode 100644 index 000000000..fc863e45c --- /dev/null +++ b/cibuildwheel/frontend.py @@ -0,0 +1,65 @@ +from __future__ import annotations + +import shlex +import typing +from collections.abc import Sequence +from dataclasses import dataclass +from typing import Literal + +from .logger import log +from .util.helpers import parse_key_value_string + +BuildFrontendName = Literal["pip", "build", "build[uv]"] + + +@dataclass(frozen=True) +class BuildFrontendConfig: + name: BuildFrontendName + args: Sequence[str] = () + + @staticmethod + def from_config_string(config_string: str) -> BuildFrontendConfig: + config_dict = parse_key_value_string(config_string, ["name"], ["args"]) + name = " ".join(config_dict["name"]) + if name not in {"pip", "build", "build[uv]"}: + msg = f"Unrecognised build frontend {name!r}, only 'pip', 'build', and 'build[uv]' are supported" + raise ValueError(msg) + + name = typing.cast(BuildFrontendName, name) + + args = config_dict.get("args") or [] + return BuildFrontendConfig(name=name, args=args) + + def options_summary(self) -> str | dict[str, str]: + if not self.args: + return self.name + else: + return {"name": self.name, "args": repr(self.args)} + + +def _get_verbosity_flags(level: int, frontend: BuildFrontendName) -> list[str]: + if frontend == "pip": + if level > 0: + return ["-" + level * "v"] + if level < 0: + return ["-" + -level * "q"] + elif not 0 <= level < 2: + msg = f"build_verbosity {level} is not supported for build frontend. Ignoring." + log.warning(msg) + return [] + + +def _split_config_settings(config_settings: str, frontend: BuildFrontendName) -> list[str]: + config_settings_list = shlex.split(config_settings) + s = "s" if frontend == "pip" else "" + return [f"--config-setting{s}={setting}" for setting in config_settings_list] + + +def get_build_frontend_extra_flags( + build_frontend: BuildFrontendConfig, verbosity_level: int, config_settings: str +) -> list[str]: + return [ + *_split_config_settings(config_settings, build_frontend.name), + *build_frontend.args, + *_get_verbosity_flags(verbosity_level, build_frontend.name), + ] diff --git a/cibuildwheel/linux.py b/cibuildwheel/linux.py index 83b6f8738..c9a3fc2a9 100644 --- a/cibuildwheel/linux.py +++ b/cibuildwheel/linux.py @@ -14,21 +14,16 @@ from . import errors from .architecture import Architecture +from .frontend import BuildFrontendConfig, get_build_frontend_extra_flags from .logger import log from .oci_container import OCIContainer, OCIContainerEngineConfig, OCIPlatform from .options import BuildOptions, Options +from .selector import BuildSelector from .typing import PathOrStr -from .util import ( - BuildFrontendConfig, - BuildSelector, - copy_test_sources, - find_compatible_wheel, - get_build_verbosity_extra_flags, - prepare_command, - read_python_configs, - split_config_settings, - unwrap, -) +from .util import resources +from .util.file import copy_test_sources +from .util.helpers import prepare_command, unwrap +from .util.packaging import find_compatible_wheel ARCHITECTURE_OCI_PLATFORM_MAP = { Architecture.x86_64: OCIPlatform.AMD64, @@ -63,7 +58,7 @@ def get_python_configurations( build_selector: BuildSelector, architectures: Set[Architecture], ) -> list[PythonConfiguration]: - full_python_configs = read_python_configs("linux") + full_python_configs = resources.read_python_configs("linux") python_configurations = [PythonConfiguration(**item) for item in full_python_configs] @@ -275,11 +270,11 @@ def build_in_container( container.call(["rm", "-rf", built_wheel_dir]) container.call(["mkdir", "-p", built_wheel_dir]) - extra_flags = split_config_settings(build_options.config_settings, build_frontend.name) - extra_flags += build_frontend.args + extra_flags = get_build_frontend_extra_flags( + build_frontend, build_options.build_verbosity, build_options.config_settings + ) if build_frontend.name == "pip": - extra_flags += get_build_verbosity_extra_flags(build_options.build_verbosity) container.call( [ "python", @@ -294,9 +289,6 @@ def build_in_container( env=env, ) elif build_frontend.name == "build" or build_frontend.name == "build[uv]": - if not 0 <= build_options.build_verbosity < 2: - msg = f"build_verbosity {build_options.build_verbosity} is not supported for build frontend. Ignoring." - log.warning(msg) if use_uv and "--no-isolation" not in extra_flags and "-n" not in extra_flags: extra_flags += ["--installer=uv"] container.call( diff --git a/cibuildwheel/logger.py b/cibuildwheel/logger.py index b88093814..4b7df5c60 100644 --- a/cibuildwheel/logger.py +++ b/cibuildwheel/logger.py @@ -7,7 +7,7 @@ import time from typing import IO, AnyStr, Final -from .util import CIProvider, detect_ci_provider +from .ci import CIProvider, detect_ci_provider FoldPattern = tuple[str, str] DEFAULT_FOLD_PATTERN: Final[FoldPattern] = ("{name}", "") diff --git a/cibuildwheel/macos.py b/cibuildwheel/macos.py index 2fae4df33..e81e2fbc4 100644 --- a/cibuildwheel/macos.py +++ b/cibuildwheel/macos.py @@ -19,34 +19,24 @@ from . import errors from .architecture import Architecture +from .ci import detect_ci_provider from .environment import ParsedEnvironment +from .frontend import BuildFrontendConfig, BuildFrontendName, get_build_frontend_extra_flags from .logger import log from .options import Options +from .selector import BuildSelector from .typing import PathOrStr -from .util import ( +from .util import resources +from .util.cmd import call, shell +from .util.file import ( CIBW_CACHE_PATH, - BuildFrontendConfig, - BuildFrontendName, - BuildSelector, - call, - combine_constraints, copy_test_sources, - detect_ci_provider, download, - find_compatible_wheel, - find_uv, - free_thread_enable_313, - get_build_verbosity_extra_flags, - get_pip_version, - install_certifi_script, move_file, - prepare_command, - read_python_configs, - shell, - split_config_settings, - unwrap, - virtualenv, ) +from .util.helpers import prepare_command, unwrap +from .util.packaging import combine_constraints, find_compatible_wheel, get_pip_version +from .venv import find_uv, virtualenv @functools.cache @@ -99,7 +89,7 @@ class PythonConfiguration: def get_python_configurations( build_selector: BuildSelector, architectures: Set[Architecture] ) -> list[PythonConfiguration]: - full_python_configs = read_python_configs("macos") + full_python_configs = resources.read_python_configs("macos") python_configurations = [PythonConfiguration(**item) for item in full_python_configs] @@ -167,7 +157,7 @@ def install_cpython(_tmp: Path, version: str, url: str, free_threading: bool) -> args = [] if version.startswith("3.13"): # Python 3.13 is the first version to have a free-threading option - args += ["-applyChoiceChangesXML", str(free_thread_enable_313.resolve())] + args += ["-applyChoiceChangesXML", str(resources.FREE_THREAD_ENABLE_313.resolve())] call("sudo", "installer", "-pkg", pkg_path, *args, "-target", "/") pkg_path.unlink() env = os.environ.copy() @@ -175,9 +165,13 @@ def install_cpython(_tmp: Path, version: str, url: str, free_threading: bool) -> if free_threading: call(installation_path / f"bin/python{version}t", "-m", "ensurepip", env=env) - call(installation_path / f"bin/python{version}t", install_certifi_script, env=env) + call( + installation_path / f"bin/python{version}t", + resources.INSTALL_CERTIFI_SCRIPT, + env=env, + ) else: - call(installation_path / "bin/python3", install_certifi_script, env=env) + call(installation_path / "bin/python3", resources.INSTALL_CERTIFI_SCRIPT, env=env) return installation_path / "bin" / (f"python{version}t" if free_threading else "python3") @@ -473,10 +467,9 @@ def build(options: Options, tmp_path: Path) -> None: log.step("Building wheel...") built_wheel_dir.mkdir() - extra_flags = split_config_settings( - build_options.config_settings, build_frontend.name + extra_flags = get_build_frontend_extra_flags( + build_frontend, build_options.build_verbosity, build_options.config_settings ) - extra_flags += build_frontend.args build_env = env.copy() if not use_uv: @@ -490,7 +483,6 @@ def build(options: Options, tmp_path: Path) -> None: ) if build_frontend.name == "pip": - extra_flags += get_build_verbosity_extra_flags(build_options.build_verbosity) # Path.resolve() is needed. Without it pip wheel may try to fetch package from pypi.org # see https://github.com/pypa/cibuildwheel/pull/369 call( @@ -505,9 +497,6 @@ def build(options: Options, tmp_path: Path) -> None: env=build_env, ) elif build_frontend.name == "build" or build_frontend.name == "build[uv]": - if not 0 <= build_options.build_verbosity < 2: - msg = f"build_verbosity {build_options.build_verbosity} is not supported for build frontend. Ignoring." - log.warning(msg) if use_uv and "--no-isolation" not in extra_flags and "-n" not in extra_flags: extra_flags.append("--installer=uv") call( diff --git a/cibuildwheel/oci_container.py b/cibuildwheel/oci_container.py index 3e4e6ada5..bd1373ef4 100644 --- a/cibuildwheel/oci_container.py +++ b/cibuildwheel/oci_container.py @@ -18,17 +18,12 @@ from types import TracebackType from typing import IO, Literal, Self, assert_never +from .ci import CIProvider, detect_ci_provider from .errors import OCIEngineTooOldError from .logger import log from .typing import PathOrStr, PopenBytes -from .util import ( - CIProvider, - FlexibleVersion, - call, - detect_ci_provider, - parse_key_value_string, - strtobool, -) +from .util.cmd import call +from .util.helpers import FlexibleVersion, parse_key_value_string, strtobool ContainerEngineName = Literal["docker", "podman"] diff --git a/cibuildwheel/options.py b/cibuildwheel/options.py index 091afbb51..6c9766803 100644 --- a/cibuildwheel/options.py +++ b/cibuildwheel/options.py @@ -12,30 +12,42 @@ import tomllib from collections.abc import Callable, Generator, Iterable, Mapping, Sequence, Set from pathlib import Path -from typing import Any, Literal, assert_never +from typing import Any, Final, Literal, assert_never from packaging.specifiers import SpecifierSet from . import errors from .architecture import Architecture from .environment import EnvironmentParseError, ParsedEnvironment, parse_environment +from .frontend import BuildFrontendConfig from .logger import log from .oci_container import OCIContainerEngineConfig from .projectfiles import get_requires_python_str, resolve_dependency_groups +from .selector import BuildSelector, EnableGroup, TestSelector, selector_matches from .typing import PLATFORMS, PlatformName -from .util import ( - MANYLINUX_ARCHS, - MUSLLINUX_ARCHS, - BuildFrontendConfig, - BuildSelector, - DependencyConstraints, - EnableGroup, - TestSelector, - format_safe, - resources_dir, - selector_matches, - strtobool, - unwrap, +from .util import resources +from .util.helpers import format_safe, strtobool, unwrap +from .util.packaging import DependencyConstraints + +MANYLINUX_ARCHS: Final[tuple[str, ...]] = ( + "x86_64", + "i686", + "pypy_x86_64", + "aarch64", + "ppc64le", + "s390x", + "armv7l", + "pypy_aarch64", + "pypy_i686", +) + +MUSLLINUX_ARCHS: Final[tuple[str, ...]] = ( + "x86_64", + "i686", + "aarch64", + "ppc64le", + "s390x", + "armv7l", ) @@ -395,8 +407,7 @@ def __init__( self.disallow = disallow or {} # Open defaults.toml, loading both global and platform sections - defaults_path = resources_dir / "defaults.toml" - self.default_options, self.default_platform_options = self._load_file(defaults_path) + self.default_options, self.default_platform_options = self._load_file(resources.DEFAULTS) # Load the project config file config_options: dict[str, Any] = {} @@ -962,10 +973,8 @@ def _get_pinned_container_images() -> Mapping[str, Mapping[str, str]]: 'pypy_x86_64': {'manylinux2010': '...' } ... } """ - - pinned_images_file = resources_dir / "pinned_docker_images.cfg" all_pinned_images = configparser.ConfigParser() - all_pinned_images.read(pinned_images_file) + all_pinned_images.read(resources.PINNED_DOCKER_IMAGES) return all_pinned_images diff --git a/cibuildwheel/pyodide.py b/cibuildwheel/pyodide.py index 540a27fd0..00faf58d7 100644 --- a/cibuildwheel/pyodide.py +++ b/cibuildwheel/pyodide.py @@ -1,39 +1,41 @@ from __future__ import annotations +import functools import os import shutil import sys +import tomllib from collections.abc import Sequence, Set from dataclasses import dataclass from pathlib import Path +from tempfile import TemporaryDirectory +from typing import Final from filelock import FileLock from . import errors from .architecture import Architecture from .environment import ParsedEnvironment +from .frontend import BuildFrontendConfig, get_build_frontend_extra_flags from .logger import log from .options import Options +from .selector import BuildSelector from .typing import PathOrStr -from .util import ( +from .util import resources +from .util.cmd import call, shell +from .util.file import ( CIBW_CACHE_PATH, - BuildFrontendConfig, - BuildSelector, - call, - combine_constraints, copy_test_sources, download, - ensure_node, + extract_tar, extract_zip, - find_compatible_wheel, - get_pip_version, move_file, - prepare_command, - read_python_configs, - shell, - split_config_settings, - virtualenv, ) +from .util.helpers import prepare_command +from .util.packaging import combine_constraints, find_compatible_wheel, get_pip_version +from .venv import virtualenv + +IS_WIN: Final[bool] = sys.platform.startswith("win") @dataclass(frozen=True) @@ -46,6 +48,37 @@ class PythonConfiguration: node_version: str +@functools.cache +def ensure_node(major_version: str) -> Path: + with resources.NODEJS.open("rb") as f: + loaded_file = tomllib.load(f) + version = str(loaded_file[major_version]) + base_url = str(loaded_file["url"]) + ext = "zip" if IS_WIN else "tar.xz" + platform = "win" if IS_WIN else ("darwin" if sys.platform.startswith("darwin") else "linux") + linux_arch = Architecture.native_arch("linux") + assert linux_arch is not None + arch = {"x86_64": "x64", "i686": "x86", "aarch64": "arm64"}.get( + linux_arch.value, linux_arch.value + ) + name = f"node-{version}-{platform}-{arch}" + path = CIBW_CACHE_PATH / name + with FileLock(str(path) + ".lock"): + if not path.exists(): + url = f"{base_url}{version}/{name}.{ext}" + with TemporaryDirectory() as tmp_path: + archive = Path(tmp_path) / f"{name}.{ext}" + download(url, archive) + if ext == "zip": + extract_zip(archive, path.parent) + else: + extract_tar(archive, path.parent) + assert path.exists() + if not IS_WIN: + return path / "bin" + return path + + def install_emscripten(tmp: Path, version: str) -> Path: # We don't need to match the emsdk version to the version we install, but # we do for stability @@ -188,7 +221,7 @@ def get_python_configurations( build_selector: BuildSelector, architectures: Set[Architecture], # noqa: ARG001 ) -> list[PythonConfiguration]: - full_python_configs = read_python_configs("pyodide") + full_python_configs = resources.read_python_configs("pyodide") python_configurations = [PythonConfiguration(**item) for item in full_python_configs] python_configurations = [c for c in python_configurations if build_selector(c.identifier)] @@ -283,12 +316,9 @@ def build(options: Options, tmp_path: Path) -> None: log.step("Building wheel...") - extra_flags = split_config_settings(build_options.config_settings, "build") - extra_flags += build_frontend.args - - if not 0 <= build_options.build_verbosity < 2: - msg = f"build_verbosity {build_options.build_verbosity} is not supported for build frontend. Ignoring." - log.warning(msg) + extra_flags = get_build_frontend_extra_flags( + build_frontend, build_options.build_verbosity, build_options.config_settings + ) build_env = env.copy() if build_options.dependency_constraints: diff --git a/cibuildwheel/schema.py b/cibuildwheel/schema.py index 91ca26594..63b936a10 100644 --- a/cibuildwheel/schema.py +++ b/cibuildwheel/schema.py @@ -1,15 +1,14 @@ from __future__ import annotations import json -from pathlib import Path from typing import Any -DIR = Path(__file__).parent.resolve() +from .util import resources def get_schema(tool_name: str = "cibuildwheel") -> dict[str, Any]: "Get the stored complete schema for cibuildwheel settings." assert tool_name == "cibuildwheel", "Only cibuildwheel is supported." - with DIR.joinpath("resources/cibuildwheel.schema.json").open(encoding="utf-8") as f: + with resources.CIBUILDWHEEL_SCHEMA.open(encoding="utf-8") as f: return json.load(f) # type: ignore[no-any-return] diff --git a/cibuildwheel/selector.py b/cibuildwheel/selector.py new file mode 100644 index 000000000..60eb2f711 --- /dev/null +++ b/cibuildwheel/selector.py @@ -0,0 +1,103 @@ +from __future__ import annotations + +import fnmatch +import itertools +from dataclasses import dataclass +from enum import Enum +from typing import Any + +import bracex +from packaging.specifiers import SpecifierSet +from packaging.version import Version + + +def selector_matches(patterns: str, string: str) -> bool: + """ + Returns True if `string` is matched by any of the wildcard patterns in + `patterns`. + + Matching is according to fnmatch, but with shell-like curly brace + expansion. For example, 'cp{36,37}-*' would match either of 'cp36-*' or + 'cp37-*'. + """ + patterns_list = patterns.split() + expanded_patterns = itertools.chain.from_iterable(bracex.expand(p) for p in patterns_list) + return any(fnmatch.fnmatch(string, pat) for pat in expanded_patterns) + + +class EnableGroup(Enum): + """ + Groups of build selectors that are not enabled by default. + """ + + CPythonFreeThreading = "cpython-freethreading" + CPythonPrerelease = "cpython-prerelease" + PyPy = "pypy" + + +@dataclass(frozen=True, kw_only=True) +class BuildSelector: + """ + This class holds a set of build/skip patterns. You call an instance with a + build identifier, and it returns True if that identifier should be + included. Only call this on valid identifiers, ones that have at least 2 + numeric digits before the first dash. + """ + + build_config: str + skip_config: str + requires_python: SpecifierSet | None = None + enable: frozenset[EnableGroup] = frozenset() + + def __call__(self, build_id: str) -> bool: + # Filter build selectors by python_requires if set + if self.requires_python is not None: + py_ver_str = build_id.split("-")[0] + if py_ver_str.endswith("t"): + py_ver_str = py_ver_str[:-1] + major = int(py_ver_str[2]) + minor = int(py_ver_str[3:]) + version = Version(f"{major}.{minor}.99") + if not self.requires_python.contains(version): + return False + + # filter out groups that are not enabled + if EnableGroup.CPythonFreeThreading not in self.enable and selector_matches( + "cp3??t-*", build_id + ): + return False + if EnableGroup.CPythonPrerelease not in self.enable and selector_matches( + "cp314*", build_id + ): + return False + if EnableGroup.PyPy not in self.enable and selector_matches("pp*", build_id): + return False + + should_build = selector_matches(self.build_config, build_id) + should_skip = selector_matches(self.skip_config, build_id) + + return should_build and not should_skip + + def options_summary(self) -> Any: + return { + "build_config": self.build_config, + "skip_config": self.skip_config, + "requires_python": str(self.requires_python), + "enable": sorted(group.value for group in self.enable), + } + + +@dataclass(frozen=True) +class TestSelector: + """ + A build selector that can only skip tests according to a skip pattern. + """ + + skip_config: str + + def __call__(self, build_id: str) -> bool: + should_skip = selector_matches(self.skip_config, build_id) + return not should_skip + + def options_summary(self) -> Any: + return {"skip_config": self.skip_config} diff --git a/cibuildwheel/util.py b/cibuildwheel/util.py deleted file mode 100644 index 057736f6a..000000000 --- a/cibuildwheel/util.py +++ /dev/null @@ -1,1002 +0,0 @@ -from __future__ import annotations - -import contextlib -import enum -import fnmatch -import itertools -import os -import re -import shlex -import shutil -import ssl -import subprocess -import sys -import tarfile -import textwrap -import time -import tomllib -import typing -import urllib.request -from collections import defaultdict -from collections.abc import Callable, Generator, Iterable, Mapping, MutableMapping, Sequence -from dataclasses import dataclass -from enum import Enum -from functools import cache, total_ordering -from pathlib import Path, PurePath -from tempfile import TemporaryDirectory -from time import sleep -from typing import Any, Final, Literal, TextIO, TypeVar -from zipfile import ZipFile - -import bracex -import certifi -from filelock import FileLock -from packaging.requirements import InvalidRequirement, Requirement -from packaging.specifiers import SpecifierSet -from packaging.utils import parse_wheel_filename -from packaging.version import Version -from platformdirs import user_cache_path - -from . import errors -from .architecture import Architecture -from .errors import FatalError -from .typing import PathOrStr, PlatformName - -__all__ = [ - "MANYLINUX_ARCHS", - "EnableGroup", - "call", - "combine_constraints", - "find_compatible_wheel", - "find_uv", - "format_safe", - "get_build_verbosity_extra_flags", - "prepare_command", - "read_python_configs", - "resources_dir", - "selector_matches", - "shell", - "split_config_settings", - "strtobool", -] - -resources_dir: Final[Path] = Path(__file__).parent / "resources" - -install_certifi_script: Final[Path] = resources_dir / "install_certifi.py" - -free_thread_enable_313: Final[Path] = resources_dir / "free-threaded-enable-313.xml" - - -class EnableGroup(enum.Enum): - """ - Groups of build selectors that are not enabled by default. - """ - - CPythonFreeThreading = "cpython-freethreading" - CPythonPrerelease = "cpython-prerelease" - PyPy = "pypy" - - -MANYLINUX_ARCHS: Final[tuple[str, ...]] = ( - "x86_64", - "i686", - "pypy_x86_64", - "aarch64", - "ppc64le", - "s390x", - "armv7l", - "pypy_aarch64", - "pypy_i686", -) - -MUSLLINUX_ARCHS: Final[tuple[str, ...]] = ( - "x86_64", - "i686", - "aarch64", - "ppc64le", - "s390x", - "armv7l", -) - -DEFAULT_CIBW_CACHE_PATH: Final[Path] = user_cache_path(appname="cibuildwheel", appauthor="pypa") -CIBW_CACHE_PATH: Final[Path] = Path( - os.environ.get("CIBW_CACHE_PATH", DEFAULT_CIBW_CACHE_PATH) -).resolve() - -IS_WIN: Final[bool] = sys.platform.startswith("win") - - -@typing.overload -def call( - *args: PathOrStr, - env: Mapping[str, str] | None = None, - cwd: PathOrStr | None = None, - capture_stdout: Literal[False] = ..., -) -> None: ... - - -@typing.overload -def call( - *args: PathOrStr, - env: Mapping[str, str] | None = None, - cwd: PathOrStr | None = None, - capture_stdout: Literal[True], -) -> str: ... - - -def call( - *args: PathOrStr, - env: Mapping[str, str] | None = None, - cwd: PathOrStr | None = None, - capture_stdout: bool = False, -) -> str | None: - """ - Run subprocess.run, but print the commands first. Takes the commands as - *args. Uses shell=True on Windows due to a bug. Also converts to - Paths to strings, due to Windows behavior at least on older Pythons. - https://bugs.python.org/issue8557 - """ - args_ = [str(arg) for arg in args] - # print the command executing for the logs - print("+ " + " ".join(shlex.quote(a) for a in args_)) - # workaround platform behaviour differences outlined - # in https://github.com/python/cpython/issues/52803 - path_env = env if env is not None else os.environ - path = path_env.get("PATH", None) - executable = shutil.which(args_[0], path=path) - if executable is None: - msg = f"Couldn't find {args_[0]!r} in PATH {path!r}" - raise FatalError(msg) - args_[0] = executable - try: - result = subprocess.run( - args_, - check=True, - shell=IS_WIN, - env=env, - cwd=cwd, - capture_output=capture_stdout, - text=capture_stdout, - ) - except subprocess.CalledProcessError as e: - if capture_stdout: - sys.stderr.write(e.stderr) - raise - if not capture_stdout: - return None - sys.stderr.write(result.stderr) - return typing.cast(str, result.stdout) - - -def shell( - *commands: str, env: Mapping[str, str] | None = None, cwd: PathOrStr | None = None -) -> None: - command = " ".join(commands) - print(f"+ {command}") - subprocess.run(command, env=env, cwd=cwd, shell=True, check=True) - - -def format_safe(template: str, **kwargs: str | os.PathLike[str]) -> str: - """ - Works similarly to `template.format(**kwargs)`, except that unmatched - fields in `template` are passed through untouched. - - >>> format_safe('{a} {b}', a='123') - '123 {b}' - >>> format_safe('{a} {b[4]:3f}', a='123') - '123 {b[4]:3f}' - - To avoid variable expansion, precede with a single backslash e.g. - >>> format_safe('\\{a} {b}', a='123') - '{a} {b}' - """ - - result = template - - for key, value in kwargs.items(): - find_pattern = re.compile( - rf""" - (? str: - """ - Preprocesses a command by expanding variables like {project}. - - For example, used in the test_command option to specify the path to the - project's root. Unmatched syntax will mostly be allowed through. - """ - return format_safe(command, **kwargs) - - -def get_build_verbosity_extra_flags(level: int) -> list[str]: - if level > 0: - return ["-" + level * "v"] - elif level < 0: - return ["-" + -level * "q"] - else: - return [] - - -def split_config_settings( - config_settings: str, frontend: Literal["pip", "build", "build[uv]"] -) -> list[str]: - config_settings_list = shlex.split(config_settings) - s = "s" if frontend == "pip" else "" - return [f"--config-setting{s}={setting}" for setting in config_settings_list] - - -def read_python_configs(config: PlatformName) -> list[dict[str, str]]: - input_file = resources_dir / "build-platforms.toml" - with input_file.open("rb") as f: - loaded_file = tomllib.load(f) - results: list[dict[str, str]] = list(loaded_file[config]["python_configurations"]) - return results - - -def selector_matches(patterns: str, string: str) -> bool: - """ - Returns True if `string` is matched by any of the wildcard patterns in - `patterns`. - - Matching is according to fnmatch, but with shell-like curly brace - expansion. For example, 'cp{36,37}-*' would match either of 'cp36-*' or - 'cp37-*'. - """ - patterns_list = patterns.split() - expanded_patterns = itertools.chain.from_iterable(bracex.expand(p) for p in patterns_list) - return any(fnmatch.fnmatch(string, pat) for pat in expanded_patterns) - - -# Once we require Python 3.10+, we can add kw_only=True -@dataclass(frozen=True) -class BuildSelector: - """ - This class holds a set of build/skip patterns. You call an instance with a - build identifier, and it returns True if that identifier should be - included. Only call this on valid identifiers, ones that have at least 2 - numeric digits before the first dash. - """ - - build_config: str - skip_config: str - requires_python: SpecifierSet | None = None - enable: frozenset[EnableGroup] = frozenset() - - def __call__(self, build_id: str) -> bool: - # Filter build selectors by python_requires if set - if self.requires_python is not None: - py_ver_str = build_id.split("-")[0] - if py_ver_str.endswith("t"): - py_ver_str = py_ver_str[:-1] - major = int(py_ver_str[2]) - minor = int(py_ver_str[3:]) - version = Version(f"{major}.{minor}.99") - if not self.requires_python.contains(version): - return False - - # filter out groups that are not enabled - if EnableGroup.CPythonFreeThreading not in self.enable and selector_matches( - "cp3??t-*", build_id - ): - return False - if EnableGroup.CPythonPrerelease not in self.enable and selector_matches( - "cp314*", build_id - ): - return False - if EnableGroup.PyPy not in self.enable and selector_matches("pp*", build_id): - return False - - should_build = selector_matches(self.build_config, build_id) - should_skip = selector_matches(self.skip_config, build_id) - - return should_build and not should_skip - - def options_summary(self) -> Any: - return { - "build_config": self.build_config, - "skip_config": self.skip_config, - "requires_python": str(self.requires_python), - "enable": sorted(group.value for group in self.enable), - } - - -@dataclass(frozen=True) -class TestSelector: - """ - A build selector that can only skip tests according to a skip pattern. - """ - - skip_config: str - - def __call__(self, build_id: str) -> bool: - should_skip = selector_matches(self.skip_config, build_id) - return not should_skip - - def options_summary(self) -> Any: - return {"skip_config": self.skip_config} - - -# Taken from https://stackoverflow.com/a/107717 -class Unbuffered: - def __init__(self, stream: TextIO) -> None: - self.stream = stream - - def write(self, data: str) -> None: - self.stream.write(data) - self.stream.flush() - - def writelines(self, data: Iterable[str]) -> None: - self.stream.writelines(data) - self.stream.flush() - - def __getattr__(self, attr: str) -> Any: - return getattr(self.stream, attr) - - -def download(url: str, dest: Path) -> None: - print(f"+ Download {url} to {dest}") - dest_dir = dest.parent - if not dest_dir.exists(): - dest_dir.mkdir(parents=True) - - # we've had issues when relying on the host OS' CA certificates on Windows, - # so we use certifi (this sounds odd but requests also does this by default) - cafile = os.environ.get("SSL_CERT_FILE", certifi.where()) - context = ssl.create_default_context(cafile=cafile) - repeat_num = 3 - for i in range(repeat_num): - try: - with urllib.request.urlopen(url, context=context) as response: - dest.write_bytes(response.read()) - return - - except OSError: - if i == repeat_num - 1: - raise - sleep(3) - - -def extract_zip(zip_src: Path, dest: Path) -> None: - with ZipFile(zip_src) as zip_: - for zinfo in zip_.filelist: - zip_.extract(zinfo, dest) - - # Set permissions to the same values as they were set in the archive - # We have to do this manually due to - # https://github.com/python/cpython/issues/59999 - # But some files in the zipfile seem to have external_attr with 0 - # permissions. In that case just use the default value??? - permissions = (zinfo.external_attr >> 16) & 0o777 - if permissions != 0: - dest.joinpath(zinfo.filename).chmod(permissions) - - -def extract_tar(tar_src: Path, dest: Path) -> None: - with tarfile.open(tar_src) as tar_: - tar_.extraction_filter = getattr(tarfile, "tar_filter", (lambda member, _: member)) - tar_.extractall(dest) - - -def move_file(src_file: Path, dst_file: Path) -> Path: - """Moves a file safely while avoiding potential semantic confusion: - - 1. `dst_file` must point to the target filename, not a directory - 2. `dst_file` will be overwritten if it already exists - 3. any missing parent directories will be created - - Returns the fully resolved Path of the resulting file. - - Raises: - NotADirectoryError: If any part of the intermediate path to `dst_file` is an existing file - IsADirectoryError: If `dst_file` points directly to an existing directory - """ - src_file = src_file.resolve(strict=True) - dst_file = dst_file.resolve() - - if dst_file.is_dir(): - msg = "dst_file must be a valid target filename, not an existing directory." - raise IsADirectoryError(msg) - dst_file.unlink(missing_ok=True) - dst_file.parent.mkdir(parents=True, exist_ok=True) - - # using shutil.move() as Path.rename() is not guaranteed to work across filesystem boundaries - # explicit str() needed for Python 3.8 - resulting_file = shutil.move(str(src_file), str(dst_file)) - return Path(resulting_file).resolve(strict=True) - - -def copy_into_local(src: Path, dst: PurePath) -> None: - """Copy a path from src to dst, regardless of whether it's a file or a directory.""" - # Ensure the target folder location exists - Path(dst.parent).mkdir(exist_ok=True, parents=True) - - if src.is_dir(): - shutil.copytree(src, dst) - else: - shutil.copy(src, dst) - - -def copy_test_sources( - test_sources: list[str], - package_dir: Path, - test_dir: PurePath, - copy_into: Callable[[Path, PurePath], None] = copy_into_local, -) -> None: - """Copy the list of test sources from the package to the test directory. - - :param test_sources: A list of test paths, relative to the package_dir. - :param package_dir: The root of the package directory. - :param test_dir: The folder where test sources should be placed. - :param copy_info: The copy function to use. By default, does a local - filesystem copy; but an OCIContainer.copy_info method (or equivalent) - can be provided. - """ - for test_path in test_sources: - source = package_dir.resolve() / test_path - - if not source.exists(): - msg = f"Test source {test_path} does not exist." - raise errors.FatalError(msg) - - copy_into(source, test_dir / test_path) - - -class DependencyConstraints: - def __init__(self, base_file_path: Path): - assert base_file_path.exists() - self.base_file_path = base_file_path.resolve() - - @staticmethod - def with_defaults() -> DependencyConstraints: - return DependencyConstraints(base_file_path=resources_dir / "constraints.txt") - - def get_for_python_version( - self, version: str, *, variant: Literal["python", "pyodide"] = "python" - ) -> Path: - version_parts = version.split(".") - - # try to find a version-specific dependency file e.g. if - # ./constraints.txt is the base, look for ./constraints-python36.txt - specific_stem = self.base_file_path.stem + f"-{variant}{version_parts[0]}{version_parts[1]}" - specific_name = specific_stem + self.base_file_path.suffix - specific_file_path = self.base_file_path.with_name(specific_name) - - if specific_file_path.exists(): - return specific_file_path - else: - return self.base_file_path - - def __repr__(self) -> str: - return f"{self.__class__.__name__}({self.base_file_path!r})" - - def __eq__(self, o: object) -> bool: - if not isinstance(o, DependencyConstraints): - return False - - return self.base_file_path == o.base_file_path - - def options_summary(self) -> Any: - if self == DependencyConstraints.with_defaults(): - return "pinned" - else: - return self.base_file_path.name - - -BuildFrontendName = Literal["pip", "build", "build[uv]"] - - -@dataclass(frozen=True) -class BuildFrontendConfig: - name: BuildFrontendName - args: Sequence[str] = () - - @staticmethod - def from_config_string(config_string: str) -> BuildFrontendConfig: - config_dict = parse_key_value_string(config_string, ["name"], ["args"]) - name = " ".join(config_dict["name"]) - if name not in {"pip", "build", "build[uv]"}: - msg = f"Unrecognised build frontend {name!r}, only 'pip', 'build', and 'build[uv]' are supported" - raise ValueError(msg) - - name = typing.cast(BuildFrontendName, name) - - args = config_dict.get("args") or [] - return BuildFrontendConfig(name=name, args=args) - - def options_summary(self) -> str | dict[str, str]: - if not self.args: - return self.name - else: - return {"name": self.name, "args": repr(self.args)} - - -def strtobool(val: str) -> bool: - return val.lower() in {"y", "yes", "t", "true", "on", "1"} - - -class CIProvider(Enum): - travis_ci = "travis" - appveyor = "appveyor" - circle_ci = "circle_ci" - azure_pipelines = "azure_pipelines" - github_actions = "github_actions" - gitlab = "gitlab" - cirrus_ci = "cirrus_ci" - other = "other" - - -def detect_ci_provider() -> CIProvider | None: - if "TRAVIS" in os.environ: - return CIProvider.travis_ci - elif "APPVEYOR" in os.environ: - return CIProvider.appveyor - elif "CIRCLECI" in os.environ: - return CIProvider.circle_ci - elif "AZURE_HTTP_USER_AGENT" in os.environ: - return CIProvider.azure_pipelines - elif "GITHUB_ACTIONS" in os.environ: - return CIProvider.github_actions - elif "GITLAB_CI" in os.environ: - return CIProvider.gitlab - elif "CIRRUS_CI" in os.environ: - return CIProvider.cirrus_ci - elif strtobool(os.environ.get("CI", "false")): - return CIProvider.other - else: - return None - - -def unwrap(text: str) -> str: - """ - Unwraps multi-line text to a single line - """ - # remove initial line indent - text = textwrap.dedent(text) - # remove leading/trailing whitespace - text = text.strip() - # remove consecutive whitespace - return re.sub(r"\s+", " ", text) - - -@dataclass(frozen=True) -class FileReport: - name: str - size: str - - -@contextlib.contextmanager -def print_new_wheels(msg: str, output_dir: Path) -> Generator[None, None, None]: - """ - Prints the new items in a directory upon exiting. The message to display - can include {n} for number of wheels, {s} for total number of seconds, - and/or {m} for total number of minutes. Does not print anything if this - exits via exception. - """ - - start_time = time.time() - existing_contents = set(output_dir.iterdir()) - yield - final_contents = set(output_dir.iterdir()) - - new_contents = [ - FileReport(wheel.name, f"{(wheel.stat().st_size + 1023) // 1024:,d}") - for wheel in final_contents - existing_contents - ] - - if not new_contents: - return - - max_name_len = max(len(f.name) for f in new_contents) - max_size_len = max(len(f.size) for f in new_contents) - n = len(new_contents) - s = time.time() - start_time - m = s / 60 - print( - msg.format(n=n, s=s, m=m), - *sorted( - f" {f.name:<{max_name_len}s} {f.size:>{max_size_len}s} kB" for f in new_contents - ), - sep="\n", - ) - - -def get_pip_version(env: Mapping[str, str]) -> str: - versions_output_text = call( - "python", "-m", "pip", "freeze", "--all", capture_stdout=True, env=env - ) - (pip_version,) = ( - version[5:] - for version in versions_output_text.strip().splitlines() - if version.startswith("pip==") - ) - return pip_version - - -@cache -def ensure_node(major_version: str) -> Path: - input_file = resources_dir / "nodejs.toml" - with input_file.open("rb") as f: - loaded_file = tomllib.load(f) - version = str(loaded_file[major_version]) - base_url = str(loaded_file["url"]) - ext = "zip" if IS_WIN else "tar.xz" - platform = "win" if IS_WIN else ("darwin" if sys.platform.startswith("darwin") else "linux") - linux_arch = Architecture.native_arch("linux") - assert linux_arch is not None - arch = {"x86_64": "x64", "i686": "x86", "aarch64": "arm64"}.get( - linux_arch.value, linux_arch.value - ) - name = f"node-{version}-{platform}-{arch}" - path = CIBW_CACHE_PATH / name - with FileLock(str(path) + ".lock"): - if not path.exists(): - url = f"{base_url}{version}/{name}.{ext}" - with TemporaryDirectory() as tmp_path: - archive = Path(tmp_path) / f"{name}.{ext}" - download(url, archive) - if ext == "zip": - extract_zip(archive, path.parent) - else: - extract_tar(archive, path.parent) - assert path.exists() - if not IS_WIN: - return path / "bin" - return path - - -@cache -def _ensure_virtualenv(version: str) -> Path: - version_parts = version.split(".") - key = f"py{version_parts[0]}{version_parts[1]}" - input_file = resources_dir / "virtualenv.toml" - with input_file.open("rb") as f: - loaded_file = tomllib.load(f) - configuration = loaded_file.get(key, loaded_file["default"]) - version = str(configuration["version"]) - url = str(configuration["url"]) - path = CIBW_CACHE_PATH / f"virtualenv-{version}.pyz" - with FileLock(str(path) + ".lock"): - if not path.exists(): - download(url, path) - return path - - -def _parse_constraints_for_virtualenv( - seed_packages: list[str], - dependency_constraint_flags: Sequence[PathOrStr], -) -> dict[str, str]: - """ - Parses the constraints file referenced by `dependency_constraint_flags` and returns a dict where - the key is the package name, and the value is the constraint version. - If a package version cannot be found, its value is "embed" meaning that virtualenv will install - its bundled version, already available locally. - The function does not try to be too smart and just handles basic constraints. - If it can't get an exact version, the real constraint will be handled by the - {macos|windows}.setup_python function. - """ - assert len(dependency_constraint_flags) in {0, 2} - # only seed pip if other seed packages do not appear in a constraint file - constraints_dict = {"pip": "embed"} - if len(dependency_constraint_flags) == 2: - assert dependency_constraint_flags[0] == "-c" - constraint_path = Path(dependency_constraint_flags[1]) - assert constraint_path.exists() - with constraint_path.open(encoding="utf-8") as constraint_file: - for line_ in constraint_file: - line = line_.strip() - if not line: - continue - if line.startswith("#"): - continue - try: - requirement = Requirement(line) - package = requirement.name - if ( - package not in seed_packages - or requirement.url is not None - or requirement.marker is not None - or len(requirement.extras) != 0 - or len(requirement.specifier) != 1 - ): - continue - specifier = next(iter(requirement.specifier)) - if specifier.operator != "==": - continue - constraints_dict[package] = specifier.version - except InvalidRequirement: - continue - return constraints_dict - - -def virtualenv( - version: str, - python: Path, - venv_path: Path, - dependency_constraint_flags: Sequence[PathOrStr], - *, - use_uv: bool, -) -> dict[str, str]: - """ - Create a virtual environment. If `use_uv` is True, - dependency_constraint_flags are ignored since nothing is installed in the - venv. Otherwise, pip is installed, and setuptools + wheel if Python < 3.12. - """ - assert python.exists() - - if use_uv: - call("uv", "venv", venv_path, "--python", python) - else: - virtualenv_app = _ensure_virtualenv(version) - allowed_seed_packages = ["pip", "setuptools", "wheel"] - constraints = _parse_constraints_for_virtualenv( - allowed_seed_packages, dependency_constraint_flags - ) - additional_flags: list[str] = [] - for package in allowed_seed_packages: - if package in constraints: - additional_flags.append(f"--{package}={constraints[package]}") - else: - additional_flags.append(f"--no-{package}") - - # Using symlinks to pre-installed seed packages is really the fastest way to get a virtual - # environment. The initial cost is a bit higher but reusing is much faster. - # Windows does not always allow symlinks so just disabling for now. - # Requires pip>=19.3 so disabling for "embed" because this means we don't know what's the - # version of pip that will end-up installed. - # c.f. https://virtualenv.pypa.io/en/latest/cli_interface.html#section-seeder - if ( - not IS_WIN - and constraints["pip"] != "embed" - and Version(constraints["pip"]) >= Version("19.3") - ): - additional_flags.append("--symlink-app-data") - - call( - sys.executable, - "-sS", # just the stdlib, https://github.com/pypa/virtualenv/issues/2133#issuecomment-1003710125 - virtualenv_app, - "--activators=", - "--no-periodic-update", - *additional_flags, - "--python", - python, - venv_path, - ) - - paths = [str(venv_path), str(venv_path / "Scripts")] if IS_WIN else [str(venv_path / "bin")] - env = os.environ.copy() - env["PATH"] = os.pathsep.join([*paths, env["PATH"]]) - env["VIRTUAL_ENV"] = str(venv_path) - return env - - -T = TypeVar("T", bound=PurePath) - - -def find_compatible_wheel(wheels: Sequence[T], identifier: str) -> T | None: - """ - Finds a wheel with an abi3 or a none ABI tag in `wheels` compatible with the Python interpreter - specified by `identifier` that is previously built. - """ - - interpreter, platform = identifier.split("-") - free_threaded = interpreter.endswith("t") - if free_threaded: - interpreter = interpreter[:-1] - for wheel in wheels: - _, _, _, tags = parse_wheel_filename(wheel.name) - for tag in tags: - if tag.abi == "abi3" and not free_threaded: - # ABI3 wheels must start with cp3 for impl and tag - if not (interpreter.startswith("cp3") and tag.interpreter.startswith("cp3")): - continue - elif tag.abi == "none": - # CPythonless wheels must include py3 tag - if tag.interpreter[:3] != "py3": - continue - else: - # Other types of wheels are not detected, this is looking for previously built wheels. - continue - - if tag.interpreter != "py3" and int(tag.interpreter[3:]) > int(interpreter[3:]): - # If a minor version number is given, it has to be lower than the current one. - continue - - if platform.startswith(("manylinux", "musllinux", "macosx")): - # Linux, macOS require the beginning and ending match (macos/manylinux version doesn't need to) - os_, arch = platform.split("_", 1) - if not tag.platform.startswith(os_): - continue - if not tag.platform.endswith(f"_{arch}"): - continue - else: - # Windows should exactly match - if tag.platform != platform: - continue - - # If all the filters above pass, then the wheel is a previously built compatible wheel. - return wheel - - return None - - -def fix_ansi_codes_for_github_actions(text: str) -> str: - """ - Github Actions forgets the current ANSI style on every new line. This - function repeats the current ANSI style on every new line. - """ - ansi_code_regex = re.compile(r"(\033\[[0-9;]*m)") - ansi_codes: list[str] = [] - output = "" - - for line in text.splitlines(keepends=True): - # add the current ANSI codes to the beginning of the line - output += "".join(ansi_codes) + line - - # split the line at each ANSI code - parts = ansi_code_regex.split(line) - # if there are any ANSI codes, save them - if len(parts) > 1: - # iterate over the ANSI codes in this line - for code in parts[1::2]: - if code == "\033[0m": - # reset the list of ANSI codes when the clear code is found - ansi_codes = [] - else: - ansi_codes.append(code) - - return output - - -def parse_key_value_string( - key_value_string: str, - positional_arg_names: Sequence[str] | None = None, - kw_arg_names: Sequence[str] | None = None, -) -> dict[str, list[str]]: - """ - Parses a string like "docker; create_args: --some-option=value another-option" - """ - if positional_arg_names is None: - positional_arg_names = [] - if kw_arg_names is None: - kw_arg_names = [] - - all_field_names = [*positional_arg_names, *kw_arg_names] - - shlexer = shlex.shlex(key_value_string, posix=True, punctuation_chars=";") - shlexer.commenters = "" - shlexer.whitespace_split = True - parts = list(shlexer) - # parts now looks like - # ['docker', ';', 'create_args:', '--some-option=value', 'another-option'] - - # split by semicolon - fields = [list(group) for k, group in itertools.groupby(parts, lambda x: x == ";") if not k] - - result: defaultdict[str, list[str]] = defaultdict(list) - for field_i, field in enumerate(fields): - # check to see if the option name is specified - field_name, sep, first_value = field[0].partition(":") - if sep: - if field_name not in all_field_names: - msg = f"Failed to parse {key_value_string!r}. Unknown field name {field_name!r}" - raise ValueError(msg) - - values = ([first_value] if first_value else []) + field[1:] - else: - try: - field_name = positional_arg_names[field_i] - except IndexError: - msg = f"Failed to parse {key_value_string!r}. Too many positional arguments - expected a maximum of {len(positional_arg_names)}" - raise ValueError(msg) from None - - values = field - - result[field_name] += values - - return dict(result) - - -def find_uv() -> Path | None: - # Prefer uv in our environment - with contextlib.suppress(ImportError, FileNotFoundError): - # pylint: disable-next=import-outside-toplevel - from uv import find_uv_bin - - return Path(find_uv_bin()) - - uv_on_path = shutil.which("uv") - return Path(uv_on_path) if uv_on_path else None - - -def combine_constraints( - env: MutableMapping[str, str], /, constraints_path: Path, tmp_dir: Path | None -) -> None: - """ - This will workaround a bug in pip<=21.1.1 or uv<=0.2.0 if a tmp_dir is given. - If set to None, this will use the modern URI method. - """ - - if tmp_dir: - if " " in str(constraints_path): - assert " " not in str(tmp_dir) - tmp_file = tmp_dir / "constraints.txt" - tmp_file.write_bytes(constraints_path.read_bytes()) - constraints_path = tmp_file - our_constraints = str(constraints_path) - else: - our_constraints = ( - constraints_path.as_uri() if " " in str(constraints_path) else str(constraints_path) - ) - - user_constraints = env.get("PIP_CONSTRAINT") - - env["UV_CONSTRAINT"] = env["PIP_CONSTRAINT"] = " ".join( - c for c in [our_constraints, user_constraints] if c - ) - - -@total_ordering -class FlexibleVersion: - version_str: str - version_parts: tuple[int, ...] - suffix: str - - def __init__(self, version_str: str) -> None: - self.version_str = version_str - - # Split into numeric parts and the optional suffix - match = re.match(r"^[v]?(\d+(\.\d+)*)(.*)$", version_str) - if not match: - msg = f"Invalid version string: {version_str}" - raise ValueError(msg) - - version_part, _, suffix = match.groups() - - # Convert numeric version part into a tuple of integers - self.version_parts = tuple(map(int, version_part.split("."))) - self.suffix = suffix.strip() if suffix else "" - - # Normalize by removing trailing zeros - self.version_parts = self._remove_trailing_zeros(self.version_parts) - - def _remove_trailing_zeros(self, parts: tuple[int, ...]) -> tuple[int, ...]: - # Remove trailing zeros for accurate comparisons - # without this, "3.0" would be considered greater than "3" - while parts and parts[-1] == 0: - parts = parts[:-1] - return parts - - def __eq__(self, other: object) -> bool: - if not isinstance(other, FlexibleVersion): - raise NotImplementedError() - return (self.version_parts, self.suffix) == (other.version_parts, other.suffix) - - def __lt__(self, other: object) -> bool: - if not isinstance(other, FlexibleVersion): - raise NotImplementedError() - return (self.version_parts, self.suffix) < (other.version_parts, other.suffix) - - def __repr__(self) -> str: - return f"FlexibleVersion('{self.version_str}')" - - def __str__(self) -> str: - return self.version_str diff --git a/cibuildwheel/util/__init__.py b/cibuildwheel/util/__init__.py new file mode 100644 index 000000000..e69de29bb diff --git a/cibuildwheel/util/cmd.py b/cibuildwheel/util/cmd.py new file mode 100644 index 000000000..e02288e2a --- /dev/null +++ b/cibuildwheel/util/cmd.py @@ -0,0 +1,85 @@ +from __future__ import annotations + +import os +import shlex +import shutil +import subprocess +import sys +import typing +from collections.abc import Mapping +from typing import Final, Literal + +from ..errors import FatalError +from ..typing import PathOrStr + +_IS_WIN: Final[bool] = sys.platform.startswith("win") + + +@typing.overload +def call( + *args: PathOrStr, + env: Mapping[str, str] | None = None, + cwd: PathOrStr | None = None, + capture_stdout: Literal[False] = ..., +) -> None: ... + + +@typing.overload +def call( + *args: PathOrStr, + env: Mapping[str, str] | None = None, + cwd: PathOrStr | None = None, + capture_stdout: Literal[True], +) -> str: ... + + +def call( + *args: PathOrStr, + env: Mapping[str, str] | None = None, + cwd: PathOrStr | None = None, + capture_stdout: bool = False, +) -> str | None: + """ + Run subprocess.run, but print the commands first. Takes the commands as + *args. Uses shell=True on Windows due to a bug. Also converts to + Paths to strings, due to Windows behavior at least on older Pythons. + https://bugs.python.org/issue8557 + """ + args_ = [str(arg) for arg in args] + # print the command executing for the logs + print("+ " + " ".join(shlex.quote(a) for a in args_)) + # workaround platform behaviour differences outlined + # in https://github.com/python/cpython/issues/52803 + path_env = env if env is not None else os.environ + path = path_env.get("PATH", None) + executable = shutil.which(args_[0], path=path) + if executable is None: + msg = f"Couldn't find {args_[0]!r} in PATH {path!r}" + raise FatalError(msg) + args_[0] = executable + try: + result = subprocess.run( + args_, + check=True, + shell=_IS_WIN, + env=env, + cwd=cwd, + capture_output=capture_stdout, + text=capture_stdout, + ) + except subprocess.CalledProcessError as e: + if capture_stdout: + sys.stderr.write(e.stderr) + raise + if not capture_stdout: + return None + sys.stderr.write(result.stderr) + return typing.cast(str, result.stdout) + + +def shell( + *commands: str, env: Mapping[str, str] | None = None, cwd: PathOrStr | None = None +) -> None: + command = " ".join(commands) + print(f"+ {command}") + subprocess.run(command, env=env, cwd=cwd, shell=True, check=True) diff --git a/cibuildwheel/util/file.py b/cibuildwheel/util/file.py new file mode 100644 index 000000000..c961651e9 --- /dev/null +++ b/cibuildwheel/util/file.py @@ -0,0 +1,137 @@ +from __future__ import annotations + +import os +import shutil +import ssl +import tarfile +import time +import urllib.request +from collections.abc import Callable +from pathlib import Path, PurePath +from typing import Final +from zipfile import ZipFile + +import certifi +from platformdirs import user_cache_path + +from ..errors import FatalError + +DEFAULT_CIBW_CACHE_PATH: Final[Path] = user_cache_path(appname="cibuildwheel", appauthor="pypa") +CIBW_CACHE_PATH: Final[Path] = Path( + os.environ.get("CIBW_CACHE_PATH", DEFAULT_CIBW_CACHE_PATH) +).resolve() + + +def download(url: str, dest: Path) -> None: + print(f"+ Download {url} to {dest}") + dest_dir = dest.parent + if not dest_dir.exists(): + dest_dir.mkdir(parents=True) + + # we've had issues when relying on the host OS' CA certificates on Windows, + # so we use certifi (this sounds odd but requests also does this by default) + cafile = os.environ.get("SSL_CERT_FILE", certifi.where()) + context = ssl.create_default_context(cafile=cafile) + repeat_num = 3 + for i in range(repeat_num): + try: + with urllib.request.urlopen(url, context=context) as response: + dest.write_bytes(response.read()) + return + + except OSError: + if i == repeat_num - 1: + raise + time.sleep(3) + + +def extract_zip(zip_src: Path, dest: Path) -> None: + """Extracts a zip and correctly sets permissions on extracted files. + + Notes: + - sets permissions to the same values as they were set in the archive + - files with no clear permissions in `external_attr` will be extracted with default values + """ + with ZipFile(zip_src) as zip_: + for zinfo in zip_.filelist: + zip_.extract(zinfo, dest) + + # Set permissions to the same values as they were set in the archive + # We have to do this manually due to https://github.com/python/cpython/issues/59999 + permissions = (zinfo.external_attr >> 16) & 0o777 + if permissions != 0: + dest.joinpath(zinfo.filename).chmod(permissions) + + +def extract_tar(tar_src: Path, dest: Path) -> None: + """Extracts a tar file using the stdlib 'tar' filter. + + See: https://docs.python.org/3/library/tarfile.html#tarfile.tar_filter for filter details + """ + with tarfile.open(tar_src) as tar_: + tar_.extraction_filter = getattr(tarfile, "tar_filter", (lambda member, _: member)) + tar_.extractall(dest) + + +def move_file(src_file: Path, dst_file: Path) -> Path: + """Moves a file safely while avoiding potential semantic confusion: + + 1. `dst_file` must point to the target filename, not a directory + 2. `dst_file` will be overwritten if it already exists + 3. any missing parent directories will be created + + Returns the fully resolved Path of the resulting file. + + Raises: + NotADirectoryError: If any part of the intermediate path to `dst_file` is an existing file + IsADirectoryError: If `dst_file` points directly to an existing directory + """ + src_file = src_file.resolve(strict=True) + dst_file = dst_file.resolve() + + if dst_file.is_dir(): + msg = "dst_file must be a valid target filename, not an existing directory." + raise IsADirectoryError(msg) + dst_file.unlink(missing_ok=True) + dst_file.parent.mkdir(parents=True, exist_ok=True) + + # using shutil.move() as Path.rename() is not guaranteed to work across filesystem boundaries + # explicit str() needed for Python 3.8 + resulting_file = shutil.move(str(src_file), str(dst_file)) + return Path(resulting_file).resolve(strict=True) + + +def copy_into_local(src: Path, dst: PurePath) -> None: + """Copy a path from src to dst, regardless of whether it's a file or a directory.""" + # Ensure the target folder location exists + Path(dst.parent).mkdir(exist_ok=True, parents=True) + + if src.is_dir(): + shutil.copytree(src, dst) + else: + shutil.copy(src, dst) + + +def copy_test_sources( + test_sources: list[str], + package_dir: Path, + test_dir: PurePath, + copy_into: Callable[[Path, PurePath], None] = copy_into_local, +) -> None: + """Copy the list of test sources from the package to the test directory. + + :param test_sources: A list of test paths, relative to the package_dir. + :param package_dir: The root of the package directory. + :param test_dir: The folder where test sources should be placed. + :param copy_info: The copy function to use. By default, does a local + filesystem copy; but an OCIContainer.copy_info method (or equivalent) + can be provided. + """ + for test_path in test_sources: + source = package_dir.resolve() / test_path + + if not source.exists(): + msg = f"Test source {test_path} does not exist." + raise FatalError(msg) + + copy_into(source, test_dir / test_path) diff --git a/cibuildwheel/util/helpers.py b/cibuildwheel/util/helpers.py new file mode 100644 index 000000000..0a65a0deb --- /dev/null +++ b/cibuildwheel/util/helpers.py @@ -0,0 +1,175 @@ +from __future__ import annotations + +import itertools +import os +import re +import shlex +import textwrap +from collections import defaultdict +from collections.abc import Sequence +from functools import total_ordering + +from ..typing import PathOrStr + + +def format_safe(template: str, **kwargs: str | os.PathLike[str]) -> str: + """ + Works similarly to `template.format(**kwargs)`, except that unmatched + fields in `template` are passed through untouched. + + >>> format_safe('{a} {b}', a='123') + '123 {b}' + >>> format_safe('{a} {b[4]:3f}', a='123') + '123 {b[4]:3f}' + + To avoid variable expansion, precede with a single backslash e.g. + >>> format_safe('\\{a} {b}', a='123') + '{a} {b}' + """ + + result = template + + for key, value in kwargs.items(): + find_pattern = re.compile( + rf""" + (? str: + """ + Preprocesses a command by expanding variables like {project}. + + For example, used in the test_command option to specify the path to the + project's root. Unmatched syntax will mostly be allowed through. + """ + return format_safe(command, **kwargs) + + +def strtobool(val: str) -> bool: + return val.lower() in {"y", "yes", "t", "true", "on", "1"} + + +def unwrap(text: str) -> str: + """ + Unwraps multi-line text to a single line + """ + # remove initial line indent + text = textwrap.dedent(text) + # remove leading/trailing whitespace + text = text.strip() + # remove consecutive whitespace + return re.sub(r"\s+", " ", text) + + +def parse_key_value_string( + key_value_string: str, + positional_arg_names: Sequence[str] | None = None, + kw_arg_names: Sequence[str] | None = None, +) -> dict[str, list[str]]: + """ + Parses a string like "docker; create_args: --some-option=value another-option" + """ + if positional_arg_names is None: + positional_arg_names = [] + if kw_arg_names is None: + kw_arg_names = [] + + all_field_names = [*positional_arg_names, *kw_arg_names] + + shlexer = shlex.shlex(key_value_string, posix=True, punctuation_chars=";") + shlexer.commenters = "" + shlexer.whitespace_split = True + parts = list(shlexer) + # parts now looks like + # ['docker', ';', 'create_args:', '--some-option=value', 'another-option'] + + # split by semicolon + fields = [list(group) for k, group in itertools.groupby(parts, lambda x: x == ";") if not k] + + result: defaultdict[str, list[str]] = defaultdict(list) + for field_i, field in enumerate(fields): + # check to see if the option name is specified + field_name, sep, first_value = field[0].partition(":") + if sep: + if field_name not in all_field_names: + msg = f"Failed to parse {key_value_string!r}. Unknown field name {field_name!r}" + raise ValueError(msg) + + values = ([first_value] if first_value else []) + field[1:] + else: + try: + field_name = positional_arg_names[field_i] + except IndexError: + msg = f"Failed to parse {key_value_string!r}. Too many positional arguments - expected a maximum of {len(positional_arg_names)}" + raise ValueError(msg) from None + + values = field + + result[field_name] += values + + return dict(result) + + +@total_ordering +class FlexibleVersion: + version_str: str + version_parts: tuple[int, ...] + suffix: str + + def __init__(self, version_str: str) -> None: + self.version_str = version_str + + # Split into numeric parts and the optional suffix + match = re.match(r"^[v]?(\d+(\.\d+)*)(.*)$", version_str) + if not match: + msg = f"Invalid version string: {version_str}" + raise ValueError(msg) + + version_part, _, suffix = match.groups() + + # Convert numeric version part into a tuple of integers + self.version_parts = tuple(map(int, version_part.split("."))) + self.suffix = suffix.strip() if suffix else "" + + # Normalize by removing trailing zeros + self.version_parts = self._remove_trailing_zeros(self.version_parts) + + def _remove_trailing_zeros(self, parts: tuple[int, ...]) -> tuple[int, ...]: + # Remove trailing zeros for accurate comparisons + # without this, "3.0" would be considered greater than "3" + while parts and parts[-1] == 0: + parts = parts[:-1] + return parts + + def __eq__(self, other: object) -> bool: + if not isinstance(other, FlexibleVersion): + raise NotImplementedError() + return (self.version_parts, self.suffix) == (other.version_parts, other.suffix) + + def __lt__(self, other: object) -> bool: + if not isinstance(other, FlexibleVersion): + raise NotImplementedError() + return (self.version_parts, self.suffix) < (other.version_parts, other.suffix) + + def __repr__(self) -> str: + return f"FlexibleVersion('{self.version_str}')" + + def __str__(self) -> str: + return self.version_str diff --git a/cibuildwheel/util/packaging.py b/cibuildwheel/util/packaging.py new file mode 100644 index 000000000..c4e14f211 --- /dev/null +++ b/cibuildwheel/util/packaging.py @@ -0,0 +1,140 @@ +from __future__ import annotations + +from collections.abc import Mapping, MutableMapping, Sequence +from pathlib import Path, PurePath +from typing import Any, Literal, TypeVar + +from packaging.utils import parse_wheel_filename + +from . import resources +from .cmd import call + + +class DependencyConstraints: + def __init__(self, base_file_path: Path): + assert base_file_path.exists() + self.base_file_path = base_file_path.resolve() + + @staticmethod + def with_defaults() -> DependencyConstraints: + return DependencyConstraints(base_file_path=resources.CONSTRAINTS) + + def get_for_python_version( + self, version: str, *, variant: Literal["python", "pyodide"] = "python" + ) -> Path: + version_parts = version.split(".") + + # try to find a version-specific dependency file e.g. if + # ./constraints.txt is the base, look for ./constraints-python36.txt + specific_stem = self.base_file_path.stem + f"-{variant}{version_parts[0]}{version_parts[1]}" + specific_name = specific_stem + self.base_file_path.suffix + specific_file_path = self.base_file_path.with_name(specific_name) + + if specific_file_path.exists(): + return specific_file_path + else: + return self.base_file_path + + def __repr__(self) -> str: + return f"{self.__class__.__name__}({self.base_file_path!r})" + + def __eq__(self, o: object) -> bool: + if not isinstance(o, DependencyConstraints): + return False + + return self.base_file_path == o.base_file_path + + def options_summary(self) -> Any: + if self == DependencyConstraints.with_defaults(): + return "pinned" + else: + return self.base_file_path.name + + +def get_pip_version(env: Mapping[str, str]) -> str: + versions_output_text = call( + "python", "-m", "pip", "freeze", "--all", capture_stdout=True, env=env + ) + (pip_version,) = ( + version[5:] + for version in versions_output_text.strip().splitlines() + if version.startswith("pip==") + ) + return pip_version + + +T = TypeVar("T", bound=PurePath) + + +def find_compatible_wheel(wheels: Sequence[T], identifier: str) -> T | None: + """ + Finds a wheel with an abi3 or a none ABI tag in `wheels` compatible with the Python interpreter + specified by `identifier` that is previously built. + """ + + interpreter, platform = identifier.split("-") + free_threaded = interpreter.endswith("t") + if free_threaded: + interpreter = interpreter[:-1] + for wheel in wheels: + _, _, _, tags = parse_wheel_filename(wheel.name) + for tag in tags: + if tag.abi == "abi3" and not free_threaded: + # ABI3 wheels must start with cp3 for impl and tag + if not (interpreter.startswith("cp3") and tag.interpreter.startswith("cp3")): + continue + elif tag.abi == "none": + # CPythonless wheels must include py3 tag + if tag.interpreter[:3] != "py3": + continue + else: + # Other types of wheels are not detected, this is looking for previously built wheels. + continue + + if tag.interpreter != "py3" and int(tag.interpreter[3:]) > int(interpreter[3:]): + # If a minor version number is given, it has to be lower than the current one. + continue + + if platform.startswith(("manylinux", "musllinux", "macosx")): + # Linux, macOS require the beginning and ending match (macos/manylinux version doesn't need to) + os_, arch = platform.split("_", 1) + if not tag.platform.startswith(os_): + continue + if not tag.platform.endswith(f"_{arch}"): + continue + else: + # Windows should exactly match + if tag.platform != platform: + continue + + # If all the filters above pass, then the wheel is a previously built compatible wheel. + return wheel + + return None + + +def combine_constraints( + env: MutableMapping[str, str], /, constraints_path: Path, tmp_dir: Path | None +) -> None: + """ + This will workaround a bug in pip<=21.1.1 or uv<=0.2.0 if a tmp_dir is given. + If set to None, this will use the modern URI method. + """ + + if tmp_dir: + if " " in str(constraints_path): + assert " " not in str(tmp_dir) + tmp_file = tmp_dir / "constraints.txt" + tmp_file.write_bytes(constraints_path.read_bytes()) + constraints_path = tmp_file + our_constraints = str(constraints_path) + else: + our_constraints = ( + constraints_path.as_uri() if " " in str(constraints_path) else str(constraints_path) + ) + + user_constraints = env.get("PIP_CONSTRAINT") + + env["UV_CONSTRAINT"] = env["PIP_CONSTRAINT"] = " ".join( + c for c in [our_constraints, user_constraints] if c + ) diff --git a/cibuildwheel/util/resources.py b/cibuildwheel/util/resources.py new file mode 100644 index 000000000..781f92db9 --- /dev/null +++ b/cibuildwheel/util/resources.py @@ -0,0 +1,25 @@ +from __future__ import annotations + +import tomllib +from pathlib import Path +from typing import Final + +from ..typing import PlatformName + +PATH: Final[Path] = Path(__file__).parent.parent / "resources" +INSTALL_CERTIFI_SCRIPT: Final[Path] = PATH / "install_certifi.py" +FREE_THREAD_ENABLE_313: Final[Path] = PATH / "free-threaded-enable-313.xml" +NODEJS: Final[Path] = PATH / "nodejs.toml" +DEFAULTS: Final[Path] = PATH / "defaults.toml" +PINNED_DOCKER_IMAGES: Final[Path] = PATH / "pinned_docker_images.cfg" +BUILD_PLATFORMS: Final[Path] = PATH / "build-platforms.toml" +CONSTRAINTS: Final[Path] = PATH / "constraints.txt" +VIRTUALENV: Final[Path] = PATH / "virtualenv.toml" +CIBUILDWHEEL_SCHEMA: Final[Path] = PATH / "cibuildwheel.schema.json" + + +def read_python_configs(config: PlatformName) -> list[dict[str, str]]: + with BUILD_PLATFORMS.open("rb") as f: + loaded_file = tomllib.load(f) + results: list[dict[str, str]] = list(loaded_file[config]["python_configurations"]) + return results diff --git a/cibuildwheel/venv.py b/cibuildwheel/venv.py new file mode 100644 index 000000000..71b29fee6 --- /dev/null +++ b/cibuildwheel/venv.py @@ -0,0 +1,159 @@ +from __future__ import annotations + +import contextlib +import functools +import os +import shutil +import sys +import tomllib +from collections.abc import Sequence +from pathlib import Path +from typing import Final + +from filelock import FileLock +from packaging.requirements import InvalidRequirement, Requirement +from packaging.version import Version + +from .typing import PathOrStr +from .util import resources +from .util.cmd import call +from .util.file import CIBW_CACHE_PATH, download + +_IS_WIN: Final[bool] = sys.platform.startswith("win") + + +@functools.cache +def _ensure_virtualenv(version: str) -> Path: + version_parts = version.split(".") + key = f"py{version_parts[0]}{version_parts[1]}" + with resources.VIRTUALENV.open("rb") as f: + loaded_file = tomllib.load(f) + configuration = loaded_file.get(key, loaded_file["default"]) + version = str(configuration["version"]) + url = str(configuration["url"]) + path = CIBW_CACHE_PATH / f"virtualenv-{version}.pyz" + with FileLock(str(path) + ".lock"): + if not path.exists(): + download(url, path) + return path + + +def _parse_constraints_for_virtualenv( + seed_packages: list[str], + dependency_constraint_flags: Sequence[PathOrStr], +) -> dict[str, str]: + """ + Parses the constraints file referenced by `dependency_constraint_flags` and returns a dict where + the key is the package name, and the value is the constraint version. + If a package version cannot be found, its value is "embed" meaning that virtualenv will install + its bundled version, already available locally. + The function does not try to be too smart and just handles basic constraints. + If it can't get an exact version, the real constraint will be handled by the + {macos|windows}.setup_python function. + """ + assert len(dependency_constraint_flags) in {0, 2} + # only seed pip if other seed packages do not appear in a constraint file + constraints_dict = {"pip": "embed"} + if len(dependency_constraint_flags) == 2: + assert dependency_constraint_flags[0] == "-c" + constraint_path = Path(dependency_constraint_flags[1]) + assert constraint_path.exists() + with constraint_path.open(encoding="utf-8") as constraint_file: + for line_ in constraint_file: + line = line_.strip() + if not line: + continue + if line.startswith("#"): + continue + try: + requirement = Requirement(line) + package = requirement.name + if ( + package not in seed_packages + or requirement.url is not None + or requirement.marker is not None + or len(requirement.extras) != 0 + or len(requirement.specifier) != 1 + ): + continue + specifier = next(iter(requirement.specifier)) + if specifier.operator != "==": + continue + constraints_dict[package] = specifier.version + except InvalidRequirement: + continue + return constraints_dict + + +def virtualenv( + version: str, + python: Path, + venv_path: Path, + dependency_constraint_flags: Sequence[PathOrStr], + *, + use_uv: bool, +) -> dict[str, str]: + """ + Create a virtual environment. If `use_uv` is True, + dependency_constraint_flags are ignored since nothing is installed in the + venv. Otherwise, pip is installed, and setuptools + wheel if Python < 3.12. + """ + assert python.exists() + + if use_uv: + call("uv", "venv", venv_path, "--python", python) + else: + virtualenv_app = _ensure_virtualenv(version) + allowed_seed_packages = ["pip", "setuptools", "wheel"] + constraints = _parse_constraints_for_virtualenv( + allowed_seed_packages, dependency_constraint_flags + ) + additional_flags: list[str] = [] + for package in allowed_seed_packages: + if package in constraints: + additional_flags.append(f"--{package}={constraints[package]}") + else: + additional_flags.append(f"--no-{package}") + + # Using symlinks to pre-installed seed packages is really the fastest way to get a virtual + # environment. The initial cost is a bit higher but reusing is much faster. + # Windows does not always allow symlinks so just disabling for now. + # Requires pip>=19.3 so disabling for "embed" because this means we don't know what's the + # version of pip that will end-up installed. + # c.f. https://virtualenv.pypa.io/en/latest/cli_interface.html#section-seeder + if ( + not _IS_WIN + and constraints["pip"] != "embed" + and Version(constraints["pip"]) >= Version("19.3") + ): + additional_flags.append("--symlink-app-data") + + call( + sys.executable, + "-sS", # just the stdlib, https://github.com/pypa/virtualenv/issues/2133#issuecomment-1003710125 + virtualenv_app, + "--activators=", + "--no-periodic-update", + *additional_flags, + "--python", + python, + venv_path, + ) + + paths = [str(venv_path), str(venv_path / "Scripts")] if _IS_WIN else [str(venv_path / "bin")] + env = os.environ.copy() + env["PATH"] = os.pathsep.join([*paths, env["PATH"]]) + env["VIRTUAL_ENV"] = str(venv_path) + return env + + +def find_uv() -> Path | None: + # Prefer uv in our environment + with contextlib.suppress(ImportError, FileNotFoundError): + # pylint: disable-next=import-outside-toplevel + from uv import find_uv_bin + + return Path(find_uv_bin()) + + uv_on_path = shutil.which("uv") + return Path(uv_on_path) if uv_on_path else None diff --git a/cibuildwheel/windows.py b/cibuildwheel/windows.py index a81a38a69..233ad2ff8 100644 --- a/cibuildwheel/windows.py +++ b/cibuildwheel/windows.py @@ -17,31 +17,17 @@ from . import errors from .architecture import Architecture from .environment import ParsedEnvironment +from .frontend import BuildFrontendConfig, BuildFrontendName, get_build_frontend_extra_flags from .logger import log from .options import Options +from .selector import BuildSelector from .typing import PathOrStr -from .util import ( - CIBW_CACHE_PATH, - BuildFrontendConfig, - BuildFrontendName, - BuildSelector, - call, - combine_constraints, - copy_test_sources, - download, - extract_zip, - find_compatible_wheel, - find_uv, - get_build_verbosity_extra_flags, - get_pip_version, - move_file, - prepare_command, - read_python_configs, - shell, - split_config_settings, - unwrap, - virtualenv, -) +from .util import resources +from .util.cmd import call, shell +from .util.file import CIBW_CACHE_PATH, copy_test_sources, download, extract_zip, move_file +from .util.helpers import prepare_command, unwrap +from .util.packaging import combine_constraints, find_compatible_wheel, get_pip_version +from .venv import find_uv, virtualenv def get_nuget_args( @@ -80,7 +66,7 @@ def get_python_configurations( build_selector: BuildSelector, architectures: Set[Architecture], ) -> list[PythonConfiguration]: - full_python_configs = read_python_configs("windows") + full_python_configs = resources.read_python_configs("windows") python_configurations = [PythonConfiguration(**item) for item in full_python_configs] @@ -422,10 +408,9 @@ def build(options: Options, tmp_path: Path) -> None: log.step("Building wheel...") built_wheel_dir.mkdir() - extra_flags = split_config_settings( - build_options.config_settings, build_frontend.name + extra_flags = get_build_frontend_extra_flags( + build_frontend, build_options.build_verbosity, build_options.config_settings ) - extra_flags += build_frontend.args build_env = env.copy() if not use_uv: @@ -438,7 +423,6 @@ def build(options: Options, tmp_path: Path) -> None: combine_constraints(build_env, constraints_path, identifier_tmp_dir) if build_frontend.name == "pip": - extra_flags += get_build_verbosity_extra_flags(build_options.build_verbosity) # Path.resolve() is needed. Without it pip wheel may try to fetch package from pypi.org # see https://github.com/pypa/cibuildwheel/pull/369 call( @@ -453,9 +437,6 @@ def build(options: Options, tmp_path: Path) -> None: env=build_env, ) elif build_frontend.name == "build" or build_frontend.name == "build[uv]": - if not 0 <= build_options.build_verbosity < 2: - msg = f"build_verbosity {build_options.build_verbosity} is not supported for build frontend. Ignoring." - log.warning(msg) if use_uv and "--no-isolation" not in extra_flags and "-n" not in extra_flags: extra_flags.append("--installer=uv") call( diff --git a/test/conftest.py b/test/conftest.py index 65363e59c..13b0882a5 100644 --- a/test/conftest.py +++ b/test/conftest.py @@ -8,8 +8,9 @@ from filelock import FileLock from cibuildwheel.architecture import Architecture +from cibuildwheel.ci import detect_ci_provider from cibuildwheel.options import CommandLineArguments, Options -from cibuildwheel.util import detect_ci_provider, find_uv +from cibuildwheel.venv import find_uv from .utils import EMULATED_ARCHS, platform diff --git a/test/test_dependency_versions.py b/test/test_dependency_versions.py index 3b1dead7a..bd449f23f 100644 --- a/test/test_dependency_versions.py +++ b/test/test_dependency_versions.py @@ -7,7 +7,7 @@ import pytest -import cibuildwheel.util +from cibuildwheel.util import resources from . import test_projects, utils @@ -69,7 +69,7 @@ def test_pinned_versions(tmp_path, python_version, build_frontend_env_nouv): build_environment = {} build_pattern = f"[cp]p{version_no_dot}-*" constraint_filename = f"constraints-python{version_no_dot}.txt" - constraint_file = cibuildwheel.util.resources_dir / constraint_filename + constraint_file = resources.PATH / constraint_filename constraint_versions = get_versions_from_constraint_file(constraint_file) build_environment["EXPECTED_PIP_VERSION"] = constraint_versions["pip"] diff --git a/test/test_emscripten.py b/test/test_emscripten.py index 7a8fd9953..e7d08398a 100644 --- a/test/test_emscripten.py +++ b/test/test_emscripten.py @@ -6,7 +6,8 @@ import pytest -from cibuildwheel.util import CIBW_CACHE_PATH, CIProvider, detect_ci_provider +from cibuildwheel.ci import CIProvider, detect_ci_provider +from cibuildwheel.util.file import CIBW_CACHE_PATH from . import test_projects, utils diff --git a/test/utils.py b/test/utils.py index bb7b203b0..f956b5876 100644 --- a/test/utils.py +++ b/test/utils.py @@ -18,7 +18,7 @@ import pytest from cibuildwheel.architecture import Architecture -from cibuildwheel.util import CIBW_CACHE_PATH +from cibuildwheel.util.file import CIBW_CACHE_PATH EMULATED_ARCHS: Final[list[str]] = sorted( arch.value for arch in (Architecture.all_archs("linux") - Architecture.auto_archs("linux")) diff --git a/unit_test/build_ids_test.py b/unit_test/build_ids_test.py index 0a444e85c..d389fcf8b 100644 --- a/unit_test/build_ids_test.py +++ b/unit_test/build_ids_test.py @@ -5,14 +5,13 @@ from packaging.version import Version from cibuildwheel.extra import Printable, dump_python_configurations -from cibuildwheel.util import resources_dir +from cibuildwheel.util import resources def test_compare_configs(): - with open(resources_dir / "build-platforms.toml") as f1: - txt = f1.read() + txt = resources.BUILD_PLATFORMS.read_text() - with open(resources_dir / "build-platforms.toml", "rb") as f2: + with resources.BUILD_PLATFORMS.open("rb") as f2: dict_txt = tomllib.load(f2) new_txt = dump_python_configurations(dict_txt) diff --git a/unit_test/build_selector_test.py b/unit_test/build_selector_test.py index 84359abba..33fa936f4 100644 --- a/unit_test/build_selector_test.py +++ b/unit_test/build_selector_test.py @@ -2,7 +2,7 @@ from packaging.specifiers import SpecifierSet -from cibuildwheel.util import BuildSelector, EnableGroup +from cibuildwheel.selector import BuildSelector, EnableGroup def test_build(): @@ -162,7 +162,7 @@ def test_build_free_threaded_python(): def test_testing_selector(): # local import to avoid pytest trying to collect this as a test class! - from cibuildwheel.util import TestSelector + from cibuildwheel.selector import TestSelector test_selector = TestSelector(skip_config="cp36-*") diff --git a/unit_test/dependency_constraints_test.py b/unit_test/dependency_constraints_test.py index ce907d5e3..44eb7c743 100644 --- a/unit_test/dependency_constraints_test.py +++ b/unit_test/dependency_constraints_test.py @@ -2,7 +2,7 @@ from pathlib import Path -from cibuildwheel.util import DependencyConstraints +from cibuildwheel.util.packaging import DependencyConstraints def test_defaults(): diff --git a/unit_test/download_test.py b/unit_test/download_test.py index efc5c4e5b..41c264d06 100644 --- a/unit_test/download_test.py +++ b/unit_test/download_test.py @@ -5,7 +5,7 @@ import certifi import pytest -from cibuildwheel.util import download +from cibuildwheel.util.file import download DOWNLOAD_URL = "https://raw.githubusercontent.com/pypa/cibuildwheel/v1.6.3/requirements-dev.txt" diff --git a/unit_test/get_platform_test.py b/unit_test/get_platform_test.py index c9c312e50..6f691bf6f 100644 --- a/unit_test/get_platform_test.py +++ b/unit_test/get_platform_test.py @@ -5,8 +5,8 @@ import pytest import setuptools._distutils.util +from cibuildwheel.ci import CIProvider, detect_ci_provider from cibuildwheel.errors import FatalError -from cibuildwheel.util import CIProvider, detect_ci_provider from cibuildwheel.windows import PythonConfiguration, setup_setuptools_cross_compile # monkeypatching os.name is too flaky. E.g. It works on my machine, but fails in pipeline diff --git a/unit_test/main_tests/conftest.py b/unit_test/main_tests/conftest.py index e39c09446..2235973e4 100644 --- a/unit_test/main_tests/conftest.py +++ b/unit_test/main_tests/conftest.py @@ -8,7 +8,8 @@ import pytest -from cibuildwheel import linux, macos, pyodide, util, windows +from cibuildwheel import __main__, linux, macos, pyodide, windows +from cibuildwheel.util import file class ArgsInterceptor: @@ -38,7 +39,7 @@ def ignore_call(*args, **kwargs): pass monkeypatch.setattr(subprocess, "Popen", fail_on_call) - monkeypatch.setattr(util, "download", fail_on_call) + monkeypatch.setattr(file, "download", fail_on_call) monkeypatch.setattr(windows, "build", fail_on_call) monkeypatch.setattr(linux, "build", fail_on_call) monkeypatch.setattr(macos, "build", fail_on_call) @@ -58,7 +59,7 @@ def disable_print_wheels(monkeypatch): def empty_cm(*args, **kwargs): yield - monkeypatch.setattr(util, "print_new_wheels", empty_cm) + monkeypatch.setattr(__main__, "print_new_wheels", empty_cm) @pytest.fixture diff --git a/unit_test/main_tests/main_options_test.py b/unit_test/main_tests/main_options_test.py index 572076864..9f1a380eb 100644 --- a/unit_test/main_tests/main_options_test.py +++ b/unit_test/main_tests/main_options_test.py @@ -9,8 +9,10 @@ from cibuildwheel.__main__ import main from cibuildwheel.environment import ParsedEnvironment +from cibuildwheel.frontend import _split_config_settings from cibuildwheel.options import BuildOptions, _get_pinned_container_images -from cibuildwheel.util import BuildSelector, EnableGroup, resources_dir, split_config_settings +from cibuildwheel.selector import BuildSelector, EnableGroup +from cibuildwheel.util import resources # CIBW_PLATFORM is tested in main_platform_test.py @@ -281,13 +283,13 @@ def test_config_settings(platform_specific, platform, intercepted_build_args, mo assert build_options.config_settings == config_settings - assert split_config_settings(config_settings, "build") == [ + assert _split_config_settings(config_settings, "build") == [ "--config-setting=setting=value", "--config-setting=setting=value2", "--config-setting=other=something else", ] - assert split_config_settings(config_settings, "pip") == [ + assert _split_config_settings(config_settings, "pip") == [ "--config-settings=setting=value", "--config-settings=setting=value2", "--config-settings=other=something else", @@ -411,8 +413,7 @@ def test_defaults(platform, intercepted_build_args): main() build_options: BuildOptions = intercepted_build_args.args[0].build_options(identifier=None) - defaults_config_path = resources_dir / "defaults.toml" - with defaults_config_path.open("rb") as f: + with resources.DEFAULTS.open("rb") as f: defaults_toml = tomllib.load(f) root_defaults = defaults_toml["tool"]["cibuildwheel"] diff --git a/unit_test/main_tests/main_platform_test.py b/unit_test/main_tests/main_platform_test.py index f98e5805f..cd9add856 100644 --- a/unit_test/main_tests/main_platform_test.py +++ b/unit_test/main_tests/main_platform_test.py @@ -6,7 +6,7 @@ from cibuildwheel.__main__ import main from cibuildwheel.architecture import Architecture -from cibuildwheel.util import EnableGroup +from cibuildwheel.selector import EnableGroup from ..conftest import MOCK_PACKAGE_DIR diff --git a/unit_test/oci_container_test.py b/unit_test/oci_container_test.py index 7c8e8456e..bc3985177 100644 --- a/unit_test/oci_container_test.py +++ b/unit_test/oci_container_test.py @@ -16,6 +16,7 @@ import tomli_w import cibuildwheel.oci_container +from cibuildwheel.ci import CIProvider, detect_ci_provider from cibuildwheel.environment import EnvironmentAssignmentBash from cibuildwheel.errors import OCIEngineTooOldError from cibuildwheel.oci_container import ( @@ -24,7 +25,6 @@ OCIPlatform, _check_engine_version, ) -from cibuildwheel.util import CIProvider, detect_ci_provider # Test utilities diff --git a/unit_test/option_prepare_test.py b/unit_test/option_prepare_test.py index deb0c6fba..9ca80e704 100644 --- a/unit_test/option_prepare_test.py +++ b/unit_test/option_prepare_test.py @@ -10,9 +10,10 @@ import pytest -from cibuildwheel import linux, util +from cibuildwheel import linux from cibuildwheel.__main__ import main from cibuildwheel.oci_container import OCIPlatform +from cibuildwheel.util import file DEFAULT_IDS = {"cp36", "cp37", "cp38", "cp39", "cp310", "cp311", "cp312", "cp313"} ALL_IDS = DEFAULT_IDS | {"cp313t", "pp37", "pp38", "pp39", "pp310"} @@ -39,13 +40,13 @@ def ignore_context_call(*args, **kwargs): monkeypatch.setattr(subprocess, "Popen", fail_on_call) monkeypatch.setattr(subprocess, "run", ignore_call) - monkeypatch.setattr(util, "download", fail_on_call) + monkeypatch.setattr(file, "download", fail_on_call) monkeypatch.setattr("cibuildwheel.linux.OCIContainer", ignore_context_call) monkeypatch.setattr( "cibuildwheel.linux.build_in_container", mock.Mock(spec=linux.build_in_container) ) - monkeypatch.setattr("cibuildwheel.util.print_new_wheels", ignore_context_call) + monkeypatch.setattr("cibuildwheel.__main__.print_new_wheels", ignore_context_call) @pytest.mark.usefixtures("mock_build_container", "fake_package_dir") diff --git a/unit_test/options_test.py b/unit_test/options_test.py index 937e3039b..99b484044 100644 --- a/unit_test/options_test.py +++ b/unit_test/options_test.py @@ -15,7 +15,7 @@ Options, _get_pinned_container_images, ) -from cibuildwheel.util import EnableGroup +from cibuildwheel.selector import EnableGroup PYPROJECT_1 = """ [tool.cibuildwheel] diff --git a/unit_test/utils_test.py b/unit_test/utils_test.py index c26d9bbf3..b1ae13055 100644 --- a/unit_test/utils_test.py +++ b/unit_test/utils_test.py @@ -7,15 +7,15 @@ import pytest from cibuildwheel import errors -from cibuildwheel.util import ( +from cibuildwheel.ci import fix_ansi_codes_for_github_actions +from cibuildwheel.util.file import copy_test_sources +from cibuildwheel.util.helpers import ( FlexibleVersion, - copy_test_sources, - find_compatible_wheel, - fix_ansi_codes_for_github_actions, format_safe, parse_key_value_string, prepare_command, ) +from cibuildwheel.util.packaging import find_compatible_wheel def test_format_safe(): diff --git a/unit_test/validate_schema_test.py b/unit_test/validate_schema_test.py index c0dfad454..f322102b7 100644 --- a/unit_test/validate_schema_test.py +++ b/unit_test/validate_schema_test.py @@ -7,12 +7,13 @@ import pytest import validate_pyproject.api +from cibuildwheel.util import resources + DIR = Path(__file__).parent.resolve() def test_validate_default_schema(): - filepath = DIR.parent / "cibuildwheel/resources/defaults.toml" - with filepath.open("rb") as f: + with resources.DEFAULTS.open("rb") as f: example = tomllib.load(f) validator = validate_pyproject.api.Validator() diff --git a/unit_test/wheel_print_test.py b/unit_test/wheel_print_test.py index e97d316d4..fc97cbecf 100644 --- a/unit_test/wheel_print_test.py +++ b/unit_test/wheel_print_test.py @@ -2,7 +2,7 @@ import pytest -from cibuildwheel.util import print_new_wheels +from cibuildwheel.__main__ import print_new_wheels def test_printout_wheels(tmp_path, capsys):